summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--README.hardware8
-rwxr-xr-xbitbake/bin/bitbake16
-rwxr-xr-xbitbake/bin/bitbake-layers54
-rwxr-xr-xbitbake/bin/bitbake-worker57
-rwxr-xr-xbitbake/bin/toaster112
-rwxr-xr-xbitbake/bin/toaster-eventreplay179
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml2
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml31
-rw-r--r--bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml2
-rw-r--r--bitbake/lib/bb/__init__.py4
-rw-r--r--bitbake/lib/bb/build.py13
-rw-r--r--bitbake/lib/bb/cache.py4
-rw-r--r--bitbake/lib/bb/codeparser.py2
-rw-r--r--bitbake/lib/bb/command.py7
-rw-r--r--bitbake/lib/bb/cooker.py103
-rw-r--r--bitbake/lib/bb/cookerdata.py12
-rw-r--r--bitbake/lib/bb/daemonize.py9
-rw-r--r--bitbake/lib/bb/data.py49
-rw-r--r--bitbake/lib/bb/data_smart.py17
-rw-r--r--bitbake/lib/bb/event.py13
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py20
-rw-r--r--bitbake/lib/bb/fetch2/git.py43
-rw-r--r--bitbake/lib/bb/fetch2/hg.py5
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py21
-rw-r--r--bitbake/lib/bb/fetch2/wget.py266
-rw-r--r--bitbake/lib/bb/monitordisk.py18
-rw-r--r--bitbake/lib/bb/namedtuple_with_abc.py4
-rw-r--r--bitbake/lib/bb/parse/ast.py2
-rw-r--r--bitbake/lib/bb/runqueue.py26
-rw-r--r--bitbake/lib/bb/server/process.py19
-rw-r--r--bitbake/lib/bb/siggen.py50
-rw-r--r--bitbake/lib/bb/tests/data.py13
-rw-r--r--bitbake/lib/bb/tests/fetch.py86
-rw-r--r--bitbake/lib/bb/tinfoil.py9
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py449
-rw-r--r--bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py5
-rw-r--r--bitbake/lib/bb/ui/depexp.py4
-rw-r--r--bitbake/lib/bb/ui/knotty.py13
-rw-r--r--bitbake/lib/bb/ui/ncurses.py4
-rw-r--r--bitbake/lib/bb/ui/toasterui.py54
-rw-r--r--bitbake/lib/bb/utils.py182
-rw-r--r--bitbake/lib/bs4/AUTHORS.txt43
-rw-r--r--bitbake/lib/bs4/COPYING.txt26
-rw-r--r--bitbake/lib/bs4/NEWS.txt1066
-rw-r--r--bitbake/lib/bs4/__init__.py406
-rw-r--r--bitbake/lib/bs4/builder/__init__.py321
-rw-r--r--bitbake/lib/bs4/builder/_html5lib.py285
-rw-r--r--bitbake/lib/bs4/builder/_htmlparser.py258
-rw-r--r--bitbake/lib/bs4/builder/_lxml.py233
-rw-r--r--bitbake/lib/bs4/dammit.py829
-rw-r--r--bitbake/lib/bs4/diagnose.py204
-rw-r--r--bitbake/lib/bs4/element.py1611
-rw-r--r--bitbake/lib/bs4/testing.py592
-rw-r--r--bitbake/lib/bs4/tests/__init__.py1
-rw-r--r--bitbake/lib/bs4/tests/test_builder_registry.py141
-rw-r--r--bitbake/lib/bs4/tests/test_docs.py36
-rw-r--r--bitbake/lib/bs4/tests/test_html5lib.py85
-rw-r--r--bitbake/lib/bs4/tests/test_htmlparser.py19
-rw-r--r--bitbake/lib/bs4/tests/test_lxml.py91
-rw-r--r--bitbake/lib/bs4/tests/test_soup.py434
-rw-r--r--bitbake/lib/bs4/tests/test_tree.py1829
-rw-r--r--bitbake/lib/progressbar.py2
-rw-r--r--bitbake/lib/prserv/db.py15
-rw-r--r--bitbake/lib/prserv/serv.py21
-rw-r--r--bitbake/lib/toaster/bldcontrol/admin.py8
-rw-r--r--bitbake/lib/toaster/bldcontrol/bbcontroller.py192
-rw-r--r--bitbake/lib/toaster/bldcontrol/localhostbecontroller.py219
-rw-r--r--bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py130
-rw-r--r--bitbake/lib/toaster/bldcontrol/management/commands/loadconf.py174
-rw-r--r--bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py30
-rw-r--r--bitbake/lib/toaster/bldcontrol/migrations/0006_auto__add_brbitbake.py128
-rw-r--r--bitbake/lib/toaster/bldcontrol/migrations/0007_auto__add_field_buildrequest_environment__chg_field_buildrequest_build.py145
-rw-r--r--bitbake/lib/toaster/bldcontrol/models.py54
-rw-r--r--bitbake/lib/toaster/bldcontrol/sshbecontroller.py209
-rw-r--r--bitbake/lib/toaster/bldcontrol/tests.py137
-rw-r--r--bitbake/lib/toaster/bldviewer/templates/simple_build.html2
-rw-r--r--bitbake/lib/toaster/bldviewer/templates/simple_layer.html2
-rw-r--r--bitbake/lib/toaster/bldviewer/templates/simple_recipe.html2
-rw-r--r--bitbake/lib/toaster/bldviewer/templatetags/simple_projecttags.py (renamed from bitbake/lib/toaster/bldviewer/templatetags/projecttags.py)0
-rw-r--r--bitbake/lib/toaster/orm/admin.py34
-rw-r--r--bitbake/lib/toaster/orm/management/__init__.py (renamed from meta/recipes-core/eglibc/eglibc-2.19/etc/ld.so.conf)0
-rw-r--r--bitbake/lib/toaster/orm/management/commands/__init__.py (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg)0
-rw-r--r--bitbake/lib/toaster/orm/management/commands/lsupdates.py12
-rw-r--r--bitbake/lib/toaster/orm/migrations/0013_auto__add_release__add_layerversiondependency__add_unique_layerversion.py710
-rw-r--r--bitbake/lib/toaster/orm/migrations/0014_auto__chg_field_package_summary__chg_field_layer_summary__chg_field_re.py336
-rw-r--r--bitbake/lib/toaster/orm/migrations/0015_auto__add_field_layer_vcs_web_url__add_field_layer_vcs_web_tree_base_u.py336
-rw-r--r--bitbake/lib/toaster/orm/migrations/0016_auto__add_field_release_helptext__chg_field_release_branch__add_index_.py359
-rw-r--r--bitbake/lib/toaster/orm/migrations/0017_auto__del_toastersettingdefaultlayer__add_releaselayersourcepriority__.py396
-rw-r--r--bitbake/lib/toaster/orm/migrations/0018_auto__add_field_layer_version_project.py331
-rw-r--r--bitbake/lib/toaster/orm/migrations/0019_auto__add_buildartifact.py342
-rw-r--r--bitbake/lib/toaster/orm/models.py690
-rw-r--r--bitbake/lib/toaster/orm/tests.py36
-rw-r--r--bitbake/lib/toaster/orm/urls.py27
-rw-r--r--bitbake/lib/toaster/orm/views.py60
-rw-r--r--bitbake/lib/toaster/toastergui/static/css/default.css108
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular-animate.min.js28
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular-cookies.min.js8
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular-route.min.js14
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular-sanitize.min.js15
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular.min.js215
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/angular.min.js.map8
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/base.js134
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js11
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/importlayer.js289
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/jquery-ui.js15003
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/libtoaster.js323
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/main.js111
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/projectapp.js690
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.js10
-rw-r--r--bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.min.js10
-rw-r--r--bitbake/lib/toaster/toastergui/templates/base.html124
-rw-r--r--[-rwxr-xr-x]bitbake/lib/toaster/toastergui/templates/basebuilddetailpage.html5
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basebuildpage.html7
-rw-r--r--bitbake/lib/toaster/toastergui/templates/baseprojectpage.html43
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basetable_bottom.html53
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basetable_top.html19
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basetable_top_buildprojects.html16
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basetable_top_layers.html5
-rw-r--r--bitbake/lib/toaster/toastergui/templates/basetable_top_projectbuilds.html16
-rw-r--r--bitbake/lib/toaster/toastergui/templates/build.html114
-rw-r--r--bitbake/lib/toaster/toastergui/templates/builddashboard.html177
-rw-r--r--bitbake/lib/toaster/toastergui/templates/configuration.html6
-rw-r--r--bitbake/lib/toaster/toastergui/templates/filtersnippet.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/importlayer.html116
-rw-r--r--bitbake/lib/toaster/toastergui/templates/landing.html66
-rw-r--r--bitbake/lib/toaster/toastergui/templates/layerdetails.html159
-rw-r--r--bitbake/lib/toaster/toastergui/templates/layers.html288
-rw-r--r--bitbake/lib/toaster/toastergui/templates/layers_dep_modal.html90
-rw-r--r--bitbake/lib/toaster/toastergui/templates/machines.html63
-rw-r--r--bitbake/lib/toaster/toastergui/templates/mrb_section.html108
-rw-r--r--bitbake/lib/toaster/toastergui/templates/newproject.html122
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_detail_base.html3
-rw-r--r--bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html4
-rw-r--r--bitbake/lib/toaster/toastergui/templates/project.html694
-rw-r--r--bitbake/lib/toaster/toastergui/templates/projectbuilds.html59
-rw-r--r--bitbake/lib/toaster/toastergui/templates/projectconf.html62
-rw-r--r--bitbake/lib/toaster/toastergui/templates/projects.html36
-rw-r--r--bitbake/lib/toaster/toastergui/templates/recipe.html11
-rw-r--r--[-rwxr-xr-x]bitbake/lib/toaster/toastergui/templates/recipes.html7
-rw-r--r--bitbake/lib/toaster/toastergui/templates/target.html18
-rw-r--r--bitbake/lib/toaster/toastergui/templates/targets.html272
-rw-r--r--bitbake/lib/toaster/toastergui/templates/task.html19
-rw-r--r--bitbake/lib/toaster/toastergui/templates/tasks.html12
-rw-r--r--bitbake/lib/toaster/toastergui/templatetags/projecttags.py23
-rw-r--r--bitbake/lib/toaster/toastergui/urls.py29
-rwxr-xr-xbitbake/lib/toaster/toastergui/views.py1407
-rw-r--r--bitbake/lib/toaster/toastermain/settings.py66
-rw-r--r--bitbake/lib/toaster/toastermain/urls.py28
-rw-r--r--documentation/adt-manual/adt-command.xml6
-rw-r--r--documentation/adt-manual/adt-manual.xml7
-rw-r--r--documentation/adt-manual/adt-package.xml12
-rw-r--r--documentation/adt-manual/adt-prepare.xml18
-rw-r--r--documentation/bsp-guide/bsp-guide.xml7
-rw-r--r--documentation/bsp-guide/bsp.xml2
-rw-r--r--documentation/dev-manual/dev-manual-common-tasks.xml678
-rw-r--r--documentation/dev-manual/dev-manual-model.xml43
-rw-r--r--documentation/dev-manual/dev-manual-qemu.xml8
-rw-r--r--documentation/dev-manual/dev-manual-start.xml34
-rw-r--r--documentation/dev-manual/dev-manual.xml7
-rw-r--r--documentation/kernel-dev/kernel-dev-advanced.xml35
-rw-r--r--documentation/kernel-dev/kernel-dev-common.xml4
-rw-r--r--documentation/kernel-dev/kernel-dev-maint-appx.xml6
-rw-r--r--documentation/kernel-dev/kernel-dev.xml7
-rw-r--r--documentation/mega-manual/figures/buildhistory.pngbin42532 -> 44913 bytes
-rw-r--r--documentation/poky.ent10
-rw-r--r--documentation/profile-manual/profile-manual-usage.xml26
-rw-r--r--documentation/profile-manual/profile-manual.xml7
-rw-r--r--documentation/ref-manual/closer-look.xml32
-rw-r--r--documentation/ref-manual/faq.xml2
-rw-r--r--documentation/ref-manual/figures/buildhistory.pngbin42532 -> 44913 bytes
-rw-r--r--documentation/ref-manual/introduction.xml14
-rw-r--r--documentation/ref-manual/migration.xml342
-rw-r--r--documentation/ref-manual/ref-classes.xml250
-rw-r--r--documentation/ref-manual/ref-features.xml167
-rw-r--r--documentation/ref-manual/ref-images.xml46
-rw-r--r--documentation/ref-manual/ref-manual-customization.xsl1
-rw-r--r--documentation/ref-manual/ref-manual.xml7
-rw-r--r--documentation/ref-manual/ref-qa-checks.xml139
-rw-r--r--documentation/ref-manual/ref-structure.xml6
-rw-r--r--documentation/ref-manual/ref-style.css15
-rw-r--r--documentation/ref-manual/ref-tasks.xml21
-rw-r--r--documentation/ref-manual/ref-variables.xml3122
-rw-r--r--documentation/ref-manual/technical-details.xml11
-rw-r--r--documentation/ref-manual/usingpoky.xml176
-rw-r--r--documentation/template/qa-code-permalinks.xsl23
-rw-r--r--documentation/tools/mega-manual.sed36
-rw-r--r--documentation/yocto-project-qs/yocto-project-qs.xml61
-rw-r--r--meta-yocto-bsp/conf/machine/beaglebone.conf4
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend10
-rw-r--r--meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.4.bbappend7
-rw-r--r--meta-yocto/conf/distro/include/maintainers.inc172
-rw-r--r--meta-yocto/conf/distro/include/package_regex.inc409
-rw-r--r--meta-yocto/conf/distro/include/poky-floating-revisions.inc16
-rw-r--r--meta-yocto/conf/distro/include/upstream_tracking.inc153
-rw-r--r--meta-yocto/conf/distro/poky-lsb.conf3
-rw-r--r--meta-yocto/conf/distro/poky-tiny.conf7
-rw-r--r--meta-yocto/conf/distro/poky.conf24
-rw-r--r--meta-yocto/conf/local.conf.sample1
-rw-r--r--meta-yocto/conf/local.conf.sample.extended21
-rw-r--r--meta-yocto/conf/toasterconf.json97
-rw-r--r--meta/classes/allarch.bbclass2
-rw-r--r--meta/classes/archiver.bbclass9
-rw-r--r--meta/classes/autotools.bbclass8
-rw-r--r--meta/classes/base.bbclass24
-rw-r--r--meta/classes/boot-directdisk.bbclass3
-rw-r--r--meta/classes/bootimg.bbclass3
-rw-r--r--meta/classes/buildhistory.bbclass29
-rw-r--r--meta/classes/cmake.bbclass7
-rw-r--r--meta/classes/cml1.bbclass5
-rw-r--r--meta/classes/compress_doc.bbclass260
-rw-r--r--meta/classes/cpan.bbclass4
-rw-r--r--meta/classes/cpan_build.bbclass5
-rw-r--r--meta/classes/cross-canadian.bbclass1
-rw-r--r--meta/classes/debian.bbclass13
-rw-r--r--meta/classes/distrodata.bbclass492
-rw-r--r--meta/classes/externalsrc.bbclass3
-rw-r--r--meta/classes/gnomebase.bbclass14
-rw-r--r--meta/classes/image-buildinfo.bbclass69
-rw-r--r--meta/classes/image-mklibs.bbclass2
-rw-r--r--meta/classes/image-swab.bbclass2
-rw-r--r--meta/classes/image.bbclass44
-rw-r--r--meta/classes/image_types.bbclass28
-rw-r--r--meta/classes/insane.bbclass123
-rw-r--r--meta/classes/kernel-yocto.bbclass215
-rw-r--r--meta/classes/kernel.bbclass133
-rw-r--r--meta/classes/kernelsrc.bbclass10
-rw-r--r--meta/classes/libc-common.bbclass19
-rw-r--r--meta/classes/libc-package.bbclass5
-rw-r--r--meta/classes/license.bbclass82
-rw-r--r--meta/classes/linux-kernel-base.bbclass13
-rw-r--r--meta/classes/metadata_scm.bbclass2
-rw-r--r--meta/classes/multilib.bbclass4
-rw-r--r--meta/classes/native.bbclass23
-rw-r--r--meta/classes/nativesdk.bbclass1
-rw-r--r--meta/classes/package.bbclass136
-rw-r--r--meta/classes/package_deb.bbclass17
-rw-r--r--meta/classes/package_ipk.bbclass2
-rw-r--r--meta/classes/package_rpm.bbclass38
-rw-r--r--meta/classes/packagegroup.bbclass7
-rw-r--r--meta/classes/populate_sdk_base.bbclass53
-rw-r--r--meta/classes/prserv.bbclass31
-rw-r--r--meta/classes/pythonnative.bbclass2
-rw-r--r--meta/classes/qemu.bbclass28
-rw-r--r--meta/classes/qt4e.bbclass3
-rw-r--r--meta/classes/qt4x11.bbclass3
-rw-r--r--meta/classes/report-error.bbclass9
-rw-r--r--meta/classes/rm_work.bbclass13
-rw-r--r--meta/classes/sanity.bbclass55
-rw-r--r--meta/classes/siteconfig.bbclass8
-rw-r--r--meta/classes/siteinfo.bbclass8
-rw-r--r--meta/classes/spdx.bbclass338
-rw-r--r--meta/classes/sstate.bbclass100
-rw-r--r--meta/classes/staging.bbclass1
-rw-r--r--meta/classes/systemd.bbclass7
-rw-r--r--meta/classes/testimage.bbclass12
-rw-r--r--meta/classes/toaster.bbclass56
-rw-r--r--meta/classes/toolchain-scripts.bbclass22
-rw-r--r--meta/classes/uboot-config.bbclass3
-rw-r--r--meta/classes/uninative.bbclass44
-rw-r--r--meta/classes/update-rc.d.bbclass3
-rw-r--r--meta/classes/useradd-staticids.bbclass8
-rw-r--r--meta/classes/useradd.bbclass4
-rw-r--r--meta/conf/abi_version.conf2
-rw-r--r--meta/conf/bitbake.conf21
-rw-r--r--meta/conf/distro/defaultsetup.conf2
-rw-r--r--meta/conf/distro/include/default-distrovars.inc2
-rw-r--r--meta/conf/distro/include/default-providers.inc3
-rw-r--r--meta/conf/distro/include/default-versions.inc4
-rw-r--r--meta/conf/distro/include/security_flags.inc6
-rw-r--r--meta/conf/distro/include/tclibc-eglibc.inc40
-rw-r--r--meta/conf/distro/include/tclibc-glibc.inc40
-rw-r--r--meta/conf/distro/include/tclibc-musl.inc4
-rw-r--r--meta/conf/distro/include/tclibc-uclibc.inc4
-rw-r--r--meta/conf/distro/include/tcmode-default.inc24
-rw-r--r--meta/conf/documentation.conf18
-rw-r--r--meta/conf/layer.conf7
-rw-r--r--meta/conf/licenses.conf25
-rw-r--r--meta/conf/machine/include/arm/arch-arm64.inc36
-rw-r--r--meta/conf/machine/include/arm/arch-armv8.inc1
-rw-r--r--meta/conf/machine/include/arm/feature-arm-thumb.inc10
-rw-r--r--meta/conf/machine/include/tune-power5.inc21
-rw-r--r--meta/conf/machine/include/tune-power6.inc21
-rw-r--r--meta/conf/machine/include/tune-power7.inc21
-rw-r--r--meta/conf/machine/qemuarm64.conf12
-rw-r--r--meta/conf/machine/qemumips.conf2
-rw-r--r--meta/conf/machine/qemux86-64.conf2
-rw-r--r--meta/conf/machine/qemux86.conf2
-rw-r--r--meta/conf/multilib.conf2
-rw-r--r--meta/conf/sanity.conf2
-rw-r--r--meta/conf/toasterconf.json84
-rw-r--r--meta/files/common-licenses/SMAIL_GPL164
-rw-r--r--meta/files/toolchain-shar-template.sh14
-rw-r--r--meta/lib/oe/classextend.py7
-rw-r--r--meta/lib/oe/image.py20
-rw-r--r--meta/lib/oe/package.py26
-rw-r--r--meta/lib/oe/package_manager.py208
-rw-r--r--meta/lib/oe/patch.py158
-rw-r--r--meta/lib/oe/recipeutils.py279
-rw-r--r--meta/lib/oe/rootfs.py22
-rw-r--r--meta/lib/oe/sdk.py2
-rw-r--r--meta/lib/oe/sstatesig.py108
-rw-r--r--meta/lib/oe/utils.py33
-rw-r--r--meta/lib/oeqa/oetest.py11
-rw-r--r--meta/lib/oeqa/runtime/_ptest.py124
-rw-r--r--meta/lib/oeqa/runtime/dmesg.py2
-rw-r--r--meta/lib/oeqa/runtime/parselogs.py200
-rw-r--r--meta/lib/oeqa/runtime/xorg.py5
-rw-r--r--meta/lib/oeqa/sdk/buildsudoku.py4
-rw-r--r--meta/lib/oeqa/selftest/bbtests.py3
-rw-r--r--meta/lib/oeqa/selftest/buildoptions.py12
-rw-r--r--meta/lib/oeqa/selftest/devtool.py241
-rw-r--r--meta/lib/oeqa/selftest/sstatetests.py10
-rw-r--r--meta/lib/oeqa/utils/commands.py14
-rw-r--r--meta/lib/oeqa/utils/decorators.py85
-rw-r--r--meta/lib/oeqa/utils/httpserver.py2
-rw-r--r--meta/lib/oeqa/utils/logparser.py125
-rw-r--r--meta/recipes-bsp/acpid/acpid.inc23
-rw-r--r--meta/recipes-bsp/acpid/acpid/acpid.service10
-rw-r--r--meta/recipes-bsp/acpid/acpid/set_socket_noblock.patch10
-rw-r--r--meta/recipes-bsp/acpid/acpid_2.0.23.bb (renamed from meta/recipes-bsp/acpid/acpid_1.0.10.bb)5
-rw-r--r--meta/recipes-bsp/alsa-state/alsa-state.bb11
-rw-r--r--meta/recipes-bsp/grub/files/0001-Fix-build-with-glibc-2.20.patch32
-rw-r--r--meta/recipes-bsp/grub/files/cfg2
-rw-r--r--meta/recipes-bsp/grub/files/grub-efi-fix-with-glibc-2.20.patch32
-rw-r--r--meta/recipes-bsp/grub/grub-2.00/grub2-remove-sparc64-setup-from-x86-builds.patch104
-rw-r--r--meta/recipes-bsp/grub/grub-efi_2.00.bb1
-rw-r--r--meta/recipes-bsp/grub/grub_2.00.bb1
-rw-r--r--meta/recipes-bsp/grub/grub_git.bb1
-rw-r--r--meta/recipes-bsp/hostap/hostap-utils-0.4.7/ldflags.patch27
-rw-r--r--meta/recipes-bsp/hostap/hostap-utils_0.4.7.bb1
-rw-r--r--meta/recipes-bsp/keymaps/keymaps_1.0.bb32
-rw-r--r--meta/recipes-bsp/pciutils/pciutils/lib-build-fix.patch92
-rw-r--r--meta/recipes-bsp/pciutils/pciutils_3.3.0.bb (renamed from meta/recipes-bsp/pciutils/pciutils_3.2.1.bb)23
-rw-r--r--meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb2
-rw-r--r--meta/recipes-bsp/setserial/setserial/ldflags.patch24
-rw-r--r--meta/recipes-bsp/setserial/setserial_2.17.bb1
-rw-r--r--meta/recipes-bsp/u-boot/files/0001-am335x_evm.h-Add-use-DEFAULT_LINUX_BOOT_ENV-environm.patch74
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2013.07.bb38
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2014.07.bb38
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-fw-utils_2013.07.bb35
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-fw-utils_2014.07.bb35
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-mkimage_2013.07.bb32
-rw-r--r--meta/recipes-bsp/u-boot/u-boot-mkimage_2014.07.bb34
-rw-r--r--meta/recipes-bsp/u-boot/u-boot.inc11
-rw-r--r--meta/recipes-bsp/u-boot/u-boot_2013.07.bb25
-rw-r--r--meta/recipes-bsp/u-boot/u-boot_2014.07.bb9
-rw-r--r--meta/recipes-bsp/v86d/v86d_0.1.10.bb17
-rw-r--r--meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb6
-rw-r--r--meta/recipes-connectivity/bind/bind/bind92
-rw-r--r--meta/recipes-connectivity/bind/bind/bind9_9_5-CVE-2014-8500.patch990
-rw-r--r--meta/recipes-connectivity/bind/bind/conf.patch16
-rw-r--r--meta/recipes-connectivity/bind/bind/named.service2
-rw-r--r--meta/recipes-connectivity/bind/bind_9.9.5.bb20
-rw-r--r--meta/recipes-connectivity/bluez/bluez-hcidump_2.5.bb2
-rw-r--r--meta/recipes-connectivity/bluez/gst-plugin-bluetooth_4.101.bb1
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5.inc31
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf1
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.21.bb3
-rw-r--r--meta/recipes-connectivity/bluez5/bluez5_5.25.bb50
-rw-r--r--meta/recipes-connectivity/connman/connman-gnome/connman-gnome-fix-dbus-interface-name.patch187
-rw-r--r--meta/recipes-connectivity/connman/connman-gnome_0.7.bb7
-rw-r--r--meta/recipes-connectivity/connman/connman.inc3
-rw-r--r--meta/recipes-connectivity/connman/connman/build-libppp-plugin-without-versioning-info.patch32
-rw-r--r--meta/recipes-connectivity/connman/connman_1.26.bb (renamed from meta/recipes-connectivity/connman/connman_1.24.bb)5
-rw-r--r--meta/recipes-connectivity/dhcp/dhcp.inc23
-rw-r--r--meta/recipes-connectivity/dhcp/dhcp/replace-ifconfig-route.patch176
-rw-r--r--meta/recipes-connectivity/dhcp/dhcp_4.3.0.bb1
-rw-r--r--meta/recipes-connectivity/dhcp/files/dhcpd.service11
-rw-r--r--meta/recipes-connectivity/dhcp/files/dhcrelay.service9
-rw-r--r--meta/recipes-connectivity/iproute2/iproute2_3.17.0.bb (renamed from meta/recipes-connectivity/iproute2/iproute2_3.15.0.bb)4
-rw-r--r--meta/recipes-connectivity/irda-utils/irda-utils-0.9.18/ldflags.patch75
-rw-r--r--meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb18
-rw-r--r--meta/recipes-connectivity/libpcap/libpcap.inc4
-rw-r--r--meta/recipes-connectivity/libpcap/libpcap/ieee80215-arphrd.patch24
-rw-r--r--meta/recipes-connectivity/libpcap/libpcap_1.6.2.bb (renamed from meta/recipes-connectivity/libpcap/libpcap_1.5.3.bb)8
-rw-r--r--meta/recipes-connectivity/neard/neard/neard.service.in6
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/0001-statd-fixed-the-with-statdpath-flag.patch41
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-utils-1.0.6-uclibc.patch27
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils/nfscommon27
-rw-r--r--meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.1.bb (renamed from meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.0.bb)13
-rw-r--r--meta/recipes-connectivity/ofono/ofono.inc16
-rw-r--r--meta/recipes-connectivity/ofono/ofono/Revert-test-Convert-to-Python-3.patch1270
-rw-r--r--meta/recipes-connectivity/ofono/ofono_1.14.bb13
-rw-r--r--meta/recipes-connectivity/ofono/ofono_1.15.bb12
-rw-r--r--meta/recipes-connectivity/openssh/openssh/openssh-CVE-2014-2532.patch22
-rw-r--r--meta/recipes-connectivity/openssh/openssh/sshd@.service1
-rw-r--r--meta/recipes-connectivity/openssh/openssh_6.6p1.bb10
-rw-r--r--meta/recipes-connectivity/openssl/openssl.inc6
-rw-r--r--meta/recipes-connectivity/openssl/openssl_1.0.1j.bb (renamed from meta/recipes-connectivity/openssl/openssl_1.0.1i.bb)4
-rw-r--r--meta/recipes-connectivity/portmap/portmap.inc7
-rw-r--r--meta/recipes-connectivity/portmap/portmap/portmap.service10
-rw-r--r--meta/recipes-connectivity/portmap/portmap_6.0.bb7
-rw-r--r--meta/recipes-connectivity/ppp/ppp_2.4.7.bb (renamed from meta/recipes-connectivity/ppp/ppp_2.4.6.bb)4
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf/99_resolvconf4
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf/fix-path-for-busybox.patch20
-rw-r--r--meta/recipes-connectivity/resolvconf/resolvconf_1.76.bb (renamed from meta/recipes-connectivity/resolvconf/resolvconf_1.75.bb)21
-rw-r--r--meta/recipes-connectivity/socat/socat/socat-1.7.2.4-linux-3.17.patch29
-rw-r--r--meta/recipes-connectivity/socat/socat_1.7.2.4.bb1
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant.inc8
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/fix-libnl3-host-contamination.patch14
-rw-r--r--meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.3.bb (renamed from meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.2.bb)0
-rw-r--r--meta/recipes-core/base-files/base-files_3.0.14.bb1
-rw-r--r--meta/recipes-core/busybox/busybox.inc5
-rw-r--r--meta/recipes-core/busybox/busybox/busybox-cross-menuconfig.patch71
-rw-r--r--meta/recipes-core/busybox/busybox/defconfig2
-rw-r--r--meta/recipes-core/busybox/busybox_1.22.1.bb2
-rw-r--r--meta/recipes-core/busybox/busybox_git.bb1
-rw-r--r--meta/recipes-core/busybox/files/find-touchscreen.sh7
-rw-r--r--meta/recipes-core/busybox/files/mdev-mount.sh63
-rw-r--r--meta/recipes-core/busybox/files/mdev.conf5
-rw-r--r--meta/recipes-core/busybox/files/simple.script4
-rw-r--r--meta/recipes-core/busybox/files/syslog2
-rw-r--r--meta/recipes-core/coreutils/coreutils-8.22/fix-selinux-flask.patch39
-rw-r--r--meta/recipes-core/coreutils/coreutils_8.22.bb6
-rw-r--r--meta/recipes-core/dbus/dbus-glib-0.100.2/obsolete_automake_macros.patch15
-rw-r--r--meta/recipes-core/dbus/dbus-glib.inc1
-rw-r--r--meta/recipes-core/dbus/dbus-glib/no-examples.patch (renamed from meta/recipes-core/dbus/dbus-glib-0.100.2/no-examples.patch)0
-rw-r--r--meta/recipes-core/dbus/dbus-glib/test-install-makefile.patch (renamed from meta/recipes-core/dbus/dbus-glib-0.100.2/test-install-makefile.patch)57
-rw-r--r--meta/recipes-core/dbus/dbus-glib_0.100.2.bb5
-rw-r--r--meta/recipes-core/dbus/dbus-glib_0.102.bb5
-rw-r--r--meta/recipes-core/dbus/dbus-test_1.8.10.bb (renamed from meta/recipes-core/dbus/dbus-test_1.8.2.bb)4
-rw-r--r--meta/recipes-core/dbus/dbus.inc2
-rw-r--r--meta/recipes-core/dbus/dbus_1.8.10.bb4
-rw-r--r--meta/recipes-core/dbus/dbus_1.8.2.bb4
-rw-r--r--meta/recipes-core/dropbear/dropbear.inc7
-rw-r--r--meta/recipes-core/eglibc/cross-localedef-native_2.19.bb48
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-menuconfig-support.patch912
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/0002-eglibc-menuconfig-hex-string-options.patch169
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/0003-eglibc-menuconfig-build-instructions.patch176
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/GLRO_dl_debug_mask.patch143
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/eglibc-svn-arm-lowlevellock-include-tls.patch21
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/eglibc_fix_findidx_parameters.patch38
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/fileops-without-wchar-io.patch22
-rw-r--r--meta/recipes-core/eglibc/eglibc-2.19/fix_am_rootsbindir.patch32
-rw-r--r--meta/recipes-core/eglibc/eglibc-common.inc9
-rw-r--r--meta/recipes-core/eglibc/eglibc-initial_2.19.bb11
-rw-r--r--meta/recipes-core/eglibc/eglibc-locale_2.19.bb1
-rw-r--r--meta/recipes-core/eglibc/eglibc-mtrace.inc13
-rw-r--r--meta/recipes-core/eglibc/eglibc-mtrace_2.19.bb1
-rw-r--r--meta/recipes-core/eglibc/eglibc-scripts.inc16
-rw-r--r--meta/recipes-core/eglibc/eglibc-scripts_2.19.bb1
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0/allow-run-media-sdX-drive-mount-if-username-root.patch39
-rw-r--r--meta/recipes-core/glib-2.0/glib-2.0_2.40.0.bb1
-rw-r--r--meta/recipes-core/glib-2.0/glib.inc22
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native/fix_for_centos_5.8.patch (renamed from meta/recipes-core/eglibc/cross-localedef-native-2.19/fix_for_centos_5.8.patch)6
-rw-r--r--meta/recipes-core/glibc/cross-localedef-native_2.20.bb56
-rw-r--r--meta/recipes-core/glibc/glibc-collateral.inc (renamed from meta/recipes-core/eglibc/eglibc-collateral.inc)2
-rw-r--r--meta/recipes-core/glibc/glibc-common.inc9
-rw-r--r--meta/recipes-core/glibc/glibc-initial.inc (renamed from meta/recipes-core/eglibc/eglibc-initial.inc)46
-rw-r--r--meta/recipes-core/glibc/glibc-initial_2.20.bb11
-rw-r--r--meta/recipes-core/glibc/glibc-ld.inc (renamed from meta/recipes-core/eglibc/eglibc-ld.inc)6
-rw-r--r--meta/recipes-core/glibc/glibc-locale.inc (renamed from meta/recipes-core/eglibc/eglibc-locale.inc)48
-rw-r--r--meta/recipes-core/glibc/glibc-locale_2.20.bb1
-rw-r--r--meta/recipes-core/glibc/glibc-mtrace.inc13
-rw-r--r--meta/recipes-core/glibc/glibc-mtrace_2.20.bb1
-rw-r--r--meta/recipes-core/glibc/glibc-options.inc (renamed from meta/recipes-core/eglibc/eglibc-options.inc)82
-rw-r--r--meta/recipes-core/glibc/glibc-package.inc (renamed from meta/recipes-core/eglibc/eglibc-package.inc)81
-rw-r--r--meta/recipes-core/glibc/glibc-scripts.inc16
-rw-r--r--meta/recipes-core/glibc/glibc-scripts_2.20.bb1
-rw-r--r--meta/recipes-core/glibc/glibc-testing.inc (renamed from meta/recipes-core/eglibc/eglibc-testing.inc)72
-rw-r--r--meta/recipes-core/glibc/glibc.inc (renamed from meta/recipes-core/eglibc/eglibc.inc)20
-rw-r--r--meta/recipes-core/glibc/glibc/0001-R_ARM_TLS_DTPOFF32.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/0001-R_ARM_TLS_DTPOFF32.patch)10
-rw-r--r--meta/recipes-core/glibc/glibc/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch)13
-rw-r--r--meta/recipes-core/glibc/glibc/CVE-2012-3406-Stack-overflow-in-vfprintf-BZ-16617.patch339
-rw-r--r--meta/recipes-core/glibc/glibc/CVE-2014-7817-wordexp-fails-to-honour-WRDE_NOCMD.patch215
-rw-r--r--meta/recipes-core/glibc/glibc/GLRO_dl_debug_mask.patch529
-rw-r--r--meta/recipes-core/glibc/glibc/IO-acquire-lock-fix.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/IO-acquire-lock-fix.patch)8
-rw-r--r--meta/recipes-core/glibc/glibc/add_resource_h_to_wait_h.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/add_resource_h_to_wait_h.patch)8
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-header-bootstrap.patch85
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-install-pic-archives.patch109
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-ppc8xx-cache-line-workaround.patch68
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-resolv-dynamic.patch54
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-sh4-fpscr_values.patch42
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc-use-option-groups.patch16577
-rw-r--r--meta/recipes-core/glibc/glibc/eglibc.patch602
-rw-r--r--meta/recipes-core/glibc/glibc/etc/ld.so.conf (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc)0
-rw-r--r--meta/recipes-core/glibc/glibc/fix-tibetian-locales.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/fix-tibetian-locales.patch)16
-rw-r--r--meta/recipes-core/glibc/glibc/fix_am_rootsbindir.patch29
-rw-r--r--meta/recipes-core/glibc/glibc/fsl-ppc-no-fsqrt.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/fsl-ppc-no-fsqrt.patch)38
-rw-r--r--meta/recipes-core/glibc/glibc/generate-supported.mk (renamed from meta/recipes-core/eglibc/eglibc-2.19/generate-supported.mk)0
-rw-r--r--meta/recipes-core/glibc/glibc/glibc.fix_sqrt2.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/glibc.fix_sqrt2.patch)114
-rw-r--r--meta/recipes-core/glibc/glibc/grok_gold.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/grok_gold.patch)16
-rw-r--r--meta/recipes-core/glibc/glibc/initgroups_keys.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/initgroups_keys.patch)14
-rw-r--r--meta/recipes-core/glibc/glibc/ld-search-order.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/ld-search-order.patch)20
-rw-r--r--meta/recipes-core/glibc/glibc/mips-rld-map-check.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/mips-rld-map-check.patch)7
-rw-r--r--meta/recipes-core/glibc/glibc/multilib_readlib.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/multilib_readlib.patch)8
-rw-r--r--meta/recipes-core/glibc/glibc/option-groups.patch1397
-rw-r--r--meta/recipes-core/glibc/glibc/ppc-sqrt_finite.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/ppc-sqrt_finite.patch)0
-rw-r--r--meta/recipes-core/glibc/glibc/ppc_slow_ieee754_sqrt.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/ppc_slow_ieee754_sqrt.patch)136
-rw-r--r--meta/recipes-core/glibc/glibc/ppce6500-32b_slow_ieee754_sqrt.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/ppce6500-32b_slow_ieee754_sqrt.patch)0
-rw-r--r--meta/recipes-core/glibc/glibc/relocatable_sdk.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk.patch)0
-rw-r--r--meta/recipes-core/glibc/glibc/relocatable_sdk_fix_openpath.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk_fix_openpath.patch)0
-rw-r--r--meta/recipes-core/glibc/glibc/timezone-re-written-tzselect-as-posix-sh.patch (renamed from meta/recipes-core/eglibc/eglibc-2.19/timezone-re-written-tzselect-as-posix-sh.patch)28
-rw-r--r--meta/recipes-core/glibc/glibc_2.20.bb (renamed from meta/recipes-core/eglibc/eglibc_2.19.bb)45
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/32and64bit.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/32and64bit.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/README (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/README)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/endian-ness_handling.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling_fix.patch47
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/endianess-header.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/endianess-header.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/flag_fix.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/flag_fix.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2 (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2)bin21491 -> 21491 bytes
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch (renamed from meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch)0
-rw-r--r--meta/recipes-core/glibc/ldconfig-native_2.12.1.bb (renamed from meta/recipes-core/eglibc/ldconfig-native_2.12.1.bb)1
-rw-r--r--meta/recipes-core/glibc/site_config/funcs (renamed from meta/recipes-core/eglibc/site_config/funcs)0
-rw-r--r--meta/recipes-core/glibc/site_config/headers (renamed from meta/recipes-core/eglibc/site_config/headers)0
-rw-r--r--meta/recipes-core/glibc/site_config/types (renamed from meta/recipes-core/eglibc/site_config/types)0
-rw-r--r--meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmx58
-rw-r--r--meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmxf2
-rw-r--r--meta/recipes-core/images/build-appliance-image_8.0.bb4
-rw-r--r--meta/recipes-core/init-ifupdown/init-ifupdown-1.0/qemuarm64/interfaces (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/{{=machine}}/interfaces)0
-rw-r--r--meta/recipes-core/initscripts/initscripts_1.0.bb6
-rw-r--r--meta/recipes-core/libxml/libxml2.inc16
-rw-r--r--meta/recipes-core/libxml/libxml2/configure.ac-fix-cross-compiling-warning.patch45
-rw-r--r--meta/recipes-core/libxml/libxml2/libxml2-CVE-2014-3660.patch147
-rw-r--r--meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch17
-rw-r--r--meta/recipes-core/libxml/libxml2_2.9.2.bb (renamed from meta/recipes-core/libxml/libxml2_2.9.1.bb)4
-rw-r--r--meta/recipes-core/meta/buildtools-tarball.bb32
-rw-r--r--meta/recipes-core/meta/meta-environment.bb47
-rw-r--r--meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb19
-rw-r--r--meta/recipes-core/meta/uninative-tarball.bb48
-rw-r--r--meta/recipes-core/ncurses/ncurses.inc2
-rw-r--r--meta/recipes-core/netbase/netbase_5.3.bb (renamed from meta/recipes-core/netbase/netbase_5.2.bb)6
-rw-r--r--meta/recipes-core/os-release/os-release.bb36
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-base.bb10
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-boot.bb6
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb1
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb7
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb4
-rw-r--r--meta/recipes-core/packagegroups/packagegroup-self-hosted.bb17
-rw-r--r--meta/recipes-core/psplash/psplash_git.bb12
-rw-r--r--meta/recipes-core/readline/readline-6.3/readline-dispatch-multikey.patch32
-rw-r--r--meta/recipes-core/readline/readline-6.3/readline63-00343
-rw-r--r--meta/recipes-core/readline/readline_6.3.bb3
-rw-r--r--meta/recipes-core/systemd/systemd-serialgetty/serial-getty@.service2
-rwxr-xr-xmeta/recipes-core/systemd/systemd-systemctl/systemctl45
-rw-r--r--meta/recipes-core/systemd/systemd/0001-Make-root-s-home-directory-configurable.patch180
-rw-r--r--meta/recipes-core/systemd/systemd/0001-add-support-for-executing-scripts-under-etc-rcS.d.patch138
-rw-r--r--meta/recipes-core/systemd/systemd/0001-journal-Fix-navigating-backwards-missing-entries.patch34
-rw-r--r--meta/recipes-core/systemd/systemd/0001-missing.h-add-fake-__NR_memfd_create-for-MIPS.patch29
-rw-r--r--meta/recipes-core/systemd/systemd/0001-systemd-user-avoid-using-system-auth.patch27
-rw-r--r--meta/recipes-core/systemd/systemd/0001-tmpfiles.d-etc.conf-disable-resolv.conf-symlink.patch35
-rw-r--r--meta/recipes-core/systemd/systemd/0001-uClibc-doesn-t-implement-pwritev-preadv.patch34
-rw-r--r--meta/recipes-core/systemd/systemd/run-ptest2
-rw-r--r--meta/recipes-core/systemd/systemd/systemd-older-kernel.patch56
-rw-r--r--meta/recipes-core/systemd/systemd_216.bb (renamed from meta/recipes-core/systemd/systemd_213.bb)51
-rw-r--r--meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb2
-rw-r--r--meta/recipes-core/uclibc/uclibc-git.inc6
-rw-r--r--meta/recipes-core/uclibc/uclibc-git/0001-Define-IPTOS_CLASS_-macros-according-to-RFC-2474.patch75
-rw-r--r--meta/recipes-core/uclibc/uclibc-git/0001-timex-Sync-with-glibc.patch33
-rw-r--r--meta/recipes-core/uclibc/uclibc-git/0003-fcntl.h-Define-F_SETPIPE_SZ-and-F_GETPIPE_SZ.patch377
-rw-r--r--meta/recipes-core/uclibc/uclibc-git/0004-Add-clock_adjtime-syscall.patch75
-rw-r--r--meta/recipes-core/uclibc/uclibc-git/remove_attribute_optimize_Os.patch125
-rw-r--r--meta/recipes-core/uclibc/uclibc.inc6
-rw-r--r--meta/recipes-core/udev/udev.inc2
-rw-r--r--meta/recipes-core/udev/udev/init66
-rw-r--r--meta/recipes-core/udev/udev/udev-cache45
-rw-r--r--meta/recipes-core/udev/udev/udev-cache.default2
-rw-r--r--meta/recipes-core/util-linux/util-linux.inc14
-rw-r--r--meta/recipes-core/volatile-binds/volatile-binds.bb2
-rw-r--r--meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch45
-rw-r--r--meta/recipes-core/zlib/zlib_1.2.8.bb1
-rw-r--r--meta/recipes-devtools/apt/apt-0.9.9.4/apt-0.9.9.4-CVE-2014-0478.patch193
-rw-r--r--meta/recipes-devtools/apt/apt-package.inc1
-rw-r--r--meta/recipes-devtools/apt/apt.inc1
-rw-r--r--meta/recipes-devtools/apt/apt_0.9.9.4.bb2
-rw-r--r--meta/recipes-devtools/autoconf/autoconf_2.69.bb2
-rw-r--r--meta/recipes-devtools/autogen/autogen-native_5.18.3.bb2
-rw-r--r--meta/recipes-devtools/binutils/binutils-2.24.inc8
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8484.patch67
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8485.patch102
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8501.patch60
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502.patch89
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502_1.patch523
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8503.patch47
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8504.patch75
-rw-r--r--meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8737.patch177
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2/bootchartd-no-bashism.patch27
-rw-r--r--meta/recipes-devtools/bootchart2/bootchart2_git.bb12
-rw-r--r--meta/recipes-devtools/btrfs-tools/btrfs-tools_git.bb6
-rw-r--r--meta/recipes-devtools/cmake/cmake.inc2
-rw-r--r--meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake18
-rw-r--r--meta/recipes-devtools/cmake/cmake/aarch64-cmake.patch37
-rw-r--r--meta/recipes-devtools/cmake/cmake/aarch64-kwsys.patch40
-rw-r--r--meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh1
-rw-r--r--meta/recipes-devtools/cmake/cmake_2.8.12.2.bb14
-rw-r--r--meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets/docbook-xsl-stylesheets-no-bashism-in-docbook-xsl-up.patch24
-rw-r--r--meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.78.1.bb4
-rw-r--r--meta/recipes-devtools/dosfstools/dosfstools_2.11.bb2
-rw-r--r--meta/recipes-devtools/dpkg/dpkg.inc3
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471-CVE-2014-3127.patch68
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471.patch97
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/ignore_extra_fields.patch21
-rw-r--r--meta/recipes-devtools/dpkg/dpkg/no-vla-warning.patch24
-rw-r--r--meta/recipes-devtools/dpkg/dpkg_1.17.21.bb (renamed from meta/recipes-devtools/dpkg/dpkg_1.17.4.bb)6
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/0012-Fix-musl-build-failures.patch54
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs/acinclude.m453
-rw-r--r--meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb14
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils/arm_backend.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff (renamed from meta/recipes-devtools/elfutils/elfutils/elf_additions.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch (renamed from meta/recipes-devtools/elfutils/elfutils/elfutils-ar-c-fix-num-passed-to-memset.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch (renamed from meta/recipes-devtools/elfutils/elfutils/elfutils-fsize.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch (renamed from meta/recipes-devtools/elfutils/elfutils/fix-build-gcc-4.8.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch (renamed from meta/recipes-devtools/elfutils/elfutils/fix_for_gcc-4.7.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils/hppa_backend.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils/m68k_backend.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils/mips_backend.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch (renamed from meta/recipes-devtools/elfutils/elfutils/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff (renamed from meta/recipes-devtools/elfutils/elfutils/redhat-portability.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff (renamed from meta/recipes-devtools/elfutils/elfutils/redhat-robustify.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch (renamed from meta/recipes-devtools/elfutils/elfutils/remove-unused.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff (renamed from meta/recipes-devtools/elfutils/elfutils/testsuite-ignore-elflint.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/CVE-2014-0172.patch35
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/core_filename.patch27
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/elf_additions.diff77
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/m4-biarch.m4-tweak-AC_RUN_IFELSE-for-cross-compiling.patch34
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/redhat-robustify.diff1756
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.158/unwind_non_linux.patch256
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/arm_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/arm_backend.diff)382
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/arm_func_value.patch166
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/arm_unwind_ret_mask.patch83
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/fixheadercheck.patch (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/fixheadercheck.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/hppa_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/hppa_backend.diff)2
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/m68k_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/m68k_backend.diff)2
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/mips_backend.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/mips_backend.diff)2
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/mips_readelf_w.patch (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/mips_readelf_w.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/non_linux.patch35
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/redhat-portability.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/redhat-portability.diff)408
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/scanf-format.patch (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/scanf-format.patch)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils-0.160/testsuite-ignore-elflint.diff (renamed from meta/recipes-devtools/elfutils/elfutils-0.158/testsuite-ignore-elflint.diff)0
-rw-r--r--meta/recipes-devtools/elfutils/elfutils/Fix_elf_cvt_gunhash.patch29
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.148.bb1
-rw-r--r--meta/recipes-devtools/elfutils/elfutils_0.160.bb (renamed from meta/recipes-devtools/elfutils/elfutils_0.158.bb)27
-rw-r--r--meta/recipes-devtools/expect/expect_5.45.bb1
-rw-r--r--meta/recipes-devtools/fdisk/gptfdisk_git.bb22
-rw-r--r--meta/recipes-devtools/file/file/debian-742262.patch4
-rw-r--r--meta/recipes-devtools/file/file_5.21.bb (renamed from meta/recipes-devtools/file/file_5.18.bb)9
-rw-r--r--meta/recipes-devtools/flex/flex.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.8/0051-fix-unwind-race.patch33
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9.inc8
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9/0023-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch30
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9/0056-top-level-reorder_gcc-bug-61144.patch31
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9/0057-aarch64-config.patch32
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9/0058-gcc-r212171.patch113
-rw-r--r--meta/recipes-devtools/gcc/gcc-4.9/0059-gcc-PR-rtl-optimization-63348.patch59
-rw-r--r--meta/recipes-devtools/gcc/gcc-common.inc41
-rw-r--r--meta/recipes-devtools/gcc/gcc-configure-common.inc11
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-canadian.inc5
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross-initial.inc2
-rw-r--r--meta/recipes-devtools/gcc/gcc-cross.inc8
-rw-r--r--meta/recipes-devtools/gcc/gcc-runtime.inc36
-rw-r--r--meta/recipes-devtools/gcc/gcc-shared-source.inc9
-rw-r--r--meta/recipes-devtools/gcc/gcc-source.inc34
-rw-r--r--meta/recipes-devtools/gcc/gcc-source_4.8.bb2
-rw-r--r--meta/recipes-devtools/gcc/gcc-source_4.9.bb2
-rw-r--r--meta/recipes-devtools/gcc/libgcc-common.inc2
-rw-r--r--meta/recipes-devtools/gcc/libgcc.inc7
-rw-r--r--meta/recipes-devtools/gcc/libgfortran.inc2
-rw-r--r--meta/recipes-devtools/gdb/gdb-7.8.1.inc (renamed from meta/recipes-devtools/gdb/gdb-7.7.1.inc)5
-rw-r--r--meta/recipes-devtools/gdb/gdb-common.inc2
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross-canadian.inc2
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross-canadian_7.8.1.bb (renamed from meta/recipes-devtools/gdb/gdb-cross-canadian_7.7.1.bb)0
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross.inc13
-rw-r--r--meta/recipes-devtools/gdb/gdb-cross_7.8.1.bb (renamed from meta/recipes-devtools/gdb/gdb-cross_7.7.1.bb)0
-rw-r--r--meta/recipes-devtools/gdb/gdb.inc3
-rw-r--r--meta/recipes-devtools/gdb/gdb/kill_arm_map_symbols.patch26
-rw-r--r--meta/recipes-devtools/gdb/gdb_7.8.1.bb (renamed from meta/recipes-devtools/gdb/gdb_7.7.1.bb)0
-rw-r--r--meta/recipes-devtools/git/git.inc21
-rw-r--r--meta/recipes-devtools/git/git_2.2.0.bb (renamed from meta/recipes-devtools/git/git_2.0.1.bb)4
-rw-r--r--meta/recipes-devtools/gnu-config/gnu-config_20120814.bb2
-rw-r--r--meta/recipes-devtools/gnu-config/gnu-config_git.bb2
-rw-r--r--meta/recipes-devtools/guile/files/arm_aarch64.patch19
-rw-r--r--meta/recipes-devtools/guile/files/workaround-ice-ssa-corruption.patch60
-rw-r--r--meta/recipes-devtools/guile/guile_2.0.11.bb2
-rw-r--r--meta/recipes-devtools/help2man/help2man-native_1.46.4.bb (renamed from meta/recipes-devtools/help2man/help2man-native_1.46.1.bb)4
-rw-r--r--meta/recipes-devtools/i2c-tools/i2c-tools_3.1.1.bb12
-rw-r--r--meta/recipes-devtools/insserv/files/run-ptest2
-rwxr-xr-xmeta/recipes-devtools/installer/adt-installer/scripts/adt_installer_internal2
-rw-r--r--meta/recipes-devtools/installer/adt-installer_1.0.bb1
-rw-r--r--meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb2
-rw-r--r--meta/recipes-devtools/libtool/libtool-2.4.2.inc2
-rw-r--r--meta/recipes-devtools/libtool/libtool-cross_2.4.2.bb2
-rw-r--r--meta/recipes-devtools/libtool/libtool-native_2.4.2.bb2
-rw-r--r--meta/recipes-devtools/libtool/libtool/fix-final-rpath.patch16
-rw-r--r--meta/recipes-devtools/libtool/libtool_2.4.2.bb13
-rw-r--r--meta/recipes-devtools/libtool/nativesdk-libtool_2.4.2.bb2
-rw-r--r--meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb4
-rw-r--r--meta/recipes-devtools/m4/m4.inc1
-rw-r--r--meta/recipes-devtools/make/make_4.1.bb (renamed from meta/recipes-devtools/make/make_4.0.bb)4
-rw-r--r--meta/recipes-devtools/mklibs/files/sysrooted-ldso.patch18
-rw-r--r--meta/recipes-devtools/mklibs/mklibs-native_0.1.39.bb1
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch44
-rw-r--r--meta/recipes-devtools/mtd/mtd-utils_git.bb1
-rw-r--r--meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch23
-rw-r--r--meta/recipes-devtools/mtools/mtools_3.9.9.bb4
-rw-r--r--meta/recipes-devtools/nasm/nasm_2.11.06.bb (renamed from meta/recipes-devtools/nasm/nasm_2.11.05.bb)4
-rw-r--r--meta/recipes-devtools/opkg-utils/opkg-utils_git.bb2
-rw-r--r--meta/recipes-devtools/opkg/opkg-collateral.bb2
-rw-r--r--meta/recipes-devtools/opkg/opkg.inc2
-rw-r--r--meta/recipes-devtools/opkg/opkg/add-exclude.patch24
-rw-r--r--meta/recipes-devtools/opkg/opkg/libopkg-opkg_remove.c-avoid-remove-pkg-repeatly-with.patch39
-rw-r--r--meta/recipes-devtools/opkg/opkg/no-install-recommends.patch26
-rw-r--r--meta/recipes-devtools/opkg/opkg_0.2.4.bb (renamed from meta/recipes-devtools/opkg/opkg_0.2.2.bb)5
-rw-r--r--meta/recipes-devtools/ossp-uuid/ossp-uuid/ldflags.patch26
-rw-r--r--meta/recipes-devtools/ossp-uuid/ossp-uuid_1.6.2.bb1
-rw-r--r--meta/recipes-devtools/patchelf/patchelf_0.8.bb12
-rw-r--r--meta/recipes-devtools/pax-utils/pax-utils_0.9.2.bb (renamed from meta/recipes-devtools/pax-utils/pax-utils_0.8.1.bb)6
-rw-r--r--meta/recipes-devtools/perl/perl-5.20.0/config.sh7
-rw-r--r--meta/recipes-devtools/perl/perl-5.20.0/run-ptest2
-rw-r--r--meta/recipes-devtools/perl/perl-native_5.20.0.bb9
-rw-r--r--meta/recipes-devtools/perl/perl-ptest.inc2
-rw-r--r--meta/recipes-devtools/perl/perl_5.20.0.bb19
-rw-r--r--meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb (renamed from meta/recipes-devtools/postinst-intercept/postinst-intercept_1.0.bb)6
-rw-r--r--meta/recipes-devtools/pseudo/files/0001-pseudo_has_unload-add-function.patch190
-rw-r--r--meta/recipes-devtools/pseudo/files/fallback-group2
-rw-r--r--meta/recipes-devtools/pseudo/files/fallback-passwd1
-rw-r--r--meta/recipes-devtools/pseudo/files/pseudo-1.5.1-install-directory-mode.patch18
-rw-r--r--meta/recipes-devtools/pseudo/files/pseudo-fchmodat-permissions.patch264
-rw-r--r--meta/recipes-devtools/pseudo/files/shutdownping.patch53
-rw-r--r--meta/recipes-devtools/pseudo/files/symver.patch26
-rw-r--r--meta/recipes-devtools/pseudo/pseudo-1.6.2/0001-pseudo_client.c-protect-pwd_lck-against-magic.patch56
-rw-r--r--meta/recipes-devtools/pseudo/pseudo-1.6.2/0002-pseudo_util-modify-interface-to-pseudo_etc_file.patch70
-rw-r--r--meta/recipes-devtools/pseudo/pseudo-1.6.2/0003-pseudo_client.c-support-multiple-directories-in-PSEU.patch116
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_1.5.1.bb20
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_1.6.1.bb10
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_1.6.2.bb22
-rw-r--r--meta/recipes-devtools/pseudo/pseudo_git.bb4
-rw-r--r--meta/recipes-devtools/python/fix-path.inc22
-rw-r--r--meta/recipes-devtools/python/python-2.7-manifest.inc14
-rw-r--r--meta/recipes-devtools/python/python-3.3-manifest.inc14
-rw-r--r--meta/recipes-devtools/python/python-argparse_1.2.1.bb17
-rw-r--r--meta/recipes-devtools/python/python-native_2.7.3.bb4
-rw-r--r--meta/recipes-devtools/python/python-numpy/mips64/_numpyconfig.h30
-rw-r--r--meta/recipes-devtools/python/python-numpy/mips64/config.h139
-rw-r--r--meta/recipes-devtools/python/python-numpy_1.7.0.bb4
-rw-r--r--meta/recipes-devtools/python/python-pycurl_7.19.5.bb (renamed from meta/recipes-devtools/python/python-pycurl_7.19.3.1.bb)11
-rw-r--r--meta/recipes-devtools/python/python-pygobject_2.28.3.bb4
-rw-r--r--meta/recipes-devtools/python/python-pygtk/fix-pygtk-2.0.pc.patch13
-rw-r--r--meta/recipes-devtools/python/python-pygtk_2.24.0.bb29
-rw-r--r--meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch37
-rw-r--r--meta/recipes-devtools/python/python-smartpm/smart-attempt.patch27
-rw-r--r--meta/recipes-devtools/python/python-smartpm/smart-dflags.patch53
-rw-r--r--meta/recipes-devtools/python/python-smartpm/smart-rpm4-fixes.patch49
-rw-r--r--meta/recipes-devtools/python/python-smartpm_1.4.1.bb6
-rw-r--r--meta/recipes-devtools/python/python/python-2.7.3-CVE-2014-7185.patch75
-rw-r--r--meta/recipes-devtools/python/python/remove-BOM-insection-code.patch24
-rw-r--r--meta/recipes-devtools/python/python3-distribute_0.6.32.bb2
-rw-r--r--meta/recipes-devtools/python/python3/python3-setup.py-no-host-headers-libs.patch33
-rw-r--r--meta/recipes-devtools/python/python3/python3-use-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch28
-rw-r--r--meta/recipes-devtools/python/python3_3.3.3.bb19
-rw-r--r--meta/recipes-devtools/python/python_2.7.3.bb9
-rw-r--r--meta/recipes-devtools/qemu/qemu.inc10
-rw-r--r--meta/recipes-devtools/qemu/qemu/wacom.patch130
-rw-r--r--meta/recipes-devtools/qemu/qemu_2.1.2.bb (renamed from meta/recipes-devtools/qemu/qemu_2.1.0.bb)12
-rw-r--r--meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb2
-rw-r--r--meta/recipes-devtools/quilt/quilt-0.63.inc2
-rwxr-xr-xmeta/recipes-devtools/quilt/quilt/run-ptest2
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/add_RPMSENSE_MISSINGOK_to_rpmmodule.patch20
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/disable_shortcircuited.patch23
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/fix_libdir.patch19
-rwxr-xr-xmeta/recipes-devtools/rpm/rpm-4.11.2/pythondeps.sh16
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/remove-db3-from-configure.patch26
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/remove-dir-check.patch23
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/rpm-scriptetexechelp.patch194
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/support-suggests-tag.patch384
-rw-r--r--meta/recipes-devtools/rpm/rpm-4.11.2/use-pkgconfig-for-python.patch38
-rw-r--r--meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch49
-rw-r--r--meta/recipes-devtools/rpm/rpm/rpm-realpath.patch24
-rw-r--r--meta/recipes-devtools/rpm/rpm_4.11.2.bb133
-rw-r--r--meta/recipes-devtools/rpm/rpm_5.4+cvs.bb5
-rw-r--r--meta/recipes-devtools/rpm/rpm_5.4.14.bb7
-rw-r--r--meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c8
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.1.0.bb26
-rw-r--r--meta/recipes-devtools/rsync/rsync_3.1.1.bb27
-rw-r--r--meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service1
-rw-r--r--meta/recipes-devtools/squashfs-tools/squashfs-tools_4.3.bb1
-rw-r--r--meta/recipes-devtools/strace/strace-4.8/0001-Work-around-conflict-between-sys-ptrace.h-and-linux-.patch108
-rw-r--r--meta/recipes-devtools/strace/strace-4.8/Makefile-ptest.patch28
-rw-r--r--meta/recipes-devtools/strace/strace-4.8/strace-fix-64-bit-process-detection.patch34
-rw-r--r--meta/recipes-devtools/strace/strace-4.9/Makefile-ptest.patch53
-rwxr-xr-xmeta/recipes-devtools/strace/strace-4.9/git-version-gen (renamed from meta/recipes-devtools/strace/strace-4.8/git-version-gen)0
-rwxr-xr-xmeta/recipes-devtools/strace/strace-4.9/run-ptest (renamed from meta/recipes-devtools/strace/strace-4.8/run-ptest)0
-rw-r--r--meta/recipes-devtools/strace/strace-4.9/strace-add-configure-options.patch (renamed from meta/recipes-devtools/strace/strace-4.8/strace-add-configure-options.patch)37
-rw-r--r--meta/recipes-devtools/strace/strace_4.9.bb (renamed from meta/recipes-devtools/strace/strace_4.8.bb)9
-rw-r--r--meta/recipes-devtools/subversion/subversion-1.8.10/disable_macos.patch (renamed from meta/recipes-devtools/subversion/subversion-1.8.9/disable_macos.patch)0
-rw-r--r--meta/recipes-devtools/subversion/subversion-1.8.10/libtool2.patch (renamed from meta/recipes-devtools/subversion/subversion-1.8.9/libtool2.patch)0
-rw-r--r--meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3522.patch439
-rw-r--r--meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3528.patch29
-rw-r--r--meta/recipes-devtools/subversion/subversion_1.6.15.bb5
-rw-r--r--meta/recipes-devtools/subversion/subversion_1.8.10.bb (renamed from meta/recipes-devtools/subversion/subversion_1.8.9.bb)4
-rw-r--r--meta/recipes-devtools/syslinux/syslinux_6.01.bb1
-rw-r--r--meta/recipes-devtools/tcltk/tcl/run-ptest4
-rw-r--r--meta/recipes-devtools/tcltk/tcl_8.6.3.bb (renamed from meta/recipes-devtools/tcltk/tcl_8.6.1.bb)5
-rw-r--r--meta/recipes-devtools/unifdef/files/unifdef.c1005
-rw-r--r--meta/recipes-devtools/unifdef/unifdef-native_2.6.18+git.bb21
-rw-r--r--meta/recipes-devtools/unifdef/unifdef_2.10.bb15
-rw-r--r--meta/recipes-devtools/vala/vala.inc8
-rw-r--r--meta/recipes-devtools/vala/vala/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch (renamed from meta/recipes-devtools/vala/vala-0.16.0/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch)2
-rw-r--r--meta/recipes-devtools/vala/vala_0.16.0.bb8
-rw-r--r--meta/recipes-devtools/vala/vala_0.26.1.bb6
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/add-ptest.patch68
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/glibc-2.19.patch23
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/glibc-2.20.patch30
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-arm-variant-specific.patch20
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/remove-ppc-tests-failing-build.patch26
-rwxr-xr-xmeta/recipes-devtools/valgrind/valgrind/run-ptest3
-rw-r--r--meta/recipes-devtools/valgrind/valgrind/valgrind-remove-rpath.patch25
-rw-r--r--meta/recipes-devtools/valgrind/valgrind_3.10.1.bb (renamed from meta/recipes-devtools/valgrind/valgrind_3.9.0.bb)10
-rw-r--r--meta/recipes-devtools/xmlto/xmlto_0.0.25.bb3
-rw-r--r--meta/recipes-extended/at/at/atd.init (renamed from meta/recipes-extended/at/files/S99at)0
-rw-r--r--meta/recipes-extended/at/at/atd.service (renamed from meta/recipes-extended/at/files/atd.service)0
-rw-r--r--meta/recipes-extended/at/at/configure-add-enable-pam.patch (renamed from meta/recipes-extended/at/files/configure-add-enable-pam.patch)0
-rw-r--r--meta/recipes-extended/at/at/file_replacement_with_gplv2.patch (renamed from meta/recipes-extended/at/files/file_replacement_with_gplv2.patch)0
-rw-r--r--meta/recipes-extended/at/at/fix_parallel_build_error.patch (renamed from meta/recipes-extended/at/files/fix_parallel_build_error.patch)0
-rw-r--r--meta/recipes-extended/at/at/pam.conf.patch (renamed from meta/recipes-extended/at/files/pam.conf.patch)24
-rw-r--r--meta/recipes-extended/at/at/posixtm.c (renamed from meta/recipes-extended/at/files/posixtm.c)0
-rw-r--r--meta/recipes-extended/at/at/posixtm.h (renamed from meta/recipes-extended/at/files/posixtm.h)0
-rw-r--r--meta/recipes-extended/at/at_3.1.16.bb (renamed from meta/recipes-extended/at/at_3.1.14.bb)17
-rw-r--r--meta/recipes-extended/bash/bash.inc7
-rw-r--r--meta/recipes-extended/bash/bash_3.2.48.bb39
-rw-r--r--meta/recipes-extended/bash/bash_4.3.bb91
-rw-r--r--meta/recipes-extended/byacc/byacc_20141128.bb (renamed from meta/recipes-extended/byacc/byacc_20140422.bb)4
-rw-r--r--meta/recipes-extended/bzip2/bzip2_1.0.6.bb14
-rw-r--r--meta/recipes-extended/cpio/cpio-2.11/fix-memory-overrun.patch220
-rw-r--r--meta/recipes-extended/cpio/cpio-2.8/fix-memory-overrun.patch217
-rw-r--r--meta/recipes-extended/cpio/cpio_2.11.bb3
-rw-r--r--meta/recipes-extended/cpio/cpio_2.8.bb7
-rw-r--r--meta/recipes-extended/cracklib/cracklib_2.9.2.bb (renamed from meta/recipes-extended/cracklib/cracklib_2.9.1.bb)4
-rw-r--r--meta/recipes-extended/cronie/cronie/crontab6
-rw-r--r--meta/recipes-extended/cronie/cronie/fix-out-of-tree-build.patch31
-rw-r--r--meta/recipes-extended/cronie/cronie_1.4.12.bb (renamed from meta/recipes-extended/cronie/cronie_1.4.11.bb)16
-rw-r--r--meta/recipes-extended/cups/cups.inc28
-rw-r--r--meta/recipes-extended/cups/cups/0001-don-t-try-to-run-generated-binaries.patch35
-rw-r--r--meta/recipes-extended/cups/cups/cups-no-gcrypt.patch49
-rw-r--r--meta/recipes-extended/cups/cups/cups.path8
-rw-r--r--meta/recipes-extended/cups/cups/cups.service10
-rw-r--r--meta/recipes-extended/cups/cups/cups.socket8
-rw-r--r--meta/recipes-extended/cups/cups_1.7.5.bb6
-rw-r--r--meta/recipes-extended/cups/cups_2.0.1.bb6
-rw-r--r--meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb6
-rw-r--r--meta/recipes-extended/diffutils/diffutils_3.3.bb2
-rw-r--r--meta/recipes-extended/ethtool/ethtool/ethtool-uint.patch50
-rw-r--r--meta/recipes-extended/ethtool/ethtool_3.16.bb (renamed from meta/recipes-extended/ethtool/ethtool_3.14.bb)5
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/01-27017.patch781
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/02-28824.patch294
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/03-28872.patch58
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_doc.patch84
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_automake-1.12.patch22
-rw-r--r--meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_x32.patch40
-rw-r--r--meta/recipes-extended/findutils/findutils.inc2
-rw-r--r--meta/recipes-extended/findutils/findutils_4.4.2.bb25
-rw-r--r--meta/recipes-extended/findutils/findutils_4.5.14.bb15
-rw-r--r--meta/recipes-extended/gawk/gawk-4.1.1/run-ptest4
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch31
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.02-parallel-make.patch99
-rw-r--r--meta/recipes-extended/ghostscript/ghostscript_9.15.bb (renamed from meta/recipes-extended/ghostscript/ghostscript_9.14.bb)5
-rw-r--r--meta/recipes-extended/grep/grep_2.21.bb (renamed from meta/recipes-extended/grep/grep_2.19.bb)5
-rw-r--r--meta/recipes-extended/grep/grep_2.5.1a.bb2
-rw-r--r--meta/recipes-extended/gzip/gzip-1.6/wrong-path-fix.patch31
-rw-r--r--meta/recipes-extended/gzip/gzip.inc3
-rw-r--r--meta/recipes-extended/hdparm/hdparm_9.45.bb (renamed from meta/recipes-extended/hdparm/hdparm_9.43.bb)4
-rw-r--r--meta/recipes-extended/images/core-image-kernel-dev.bb17
-rw-r--r--meta/recipes-extended/less/less_471.bb (renamed from meta/recipes-extended/less/less_458.bb)4
-rw-r--r--meta/recipes-extended/libaio/libaio/libaio_fix_for_mips64.patch42
-rw-r--r--meta/recipes-extended/libaio/libaio_0.3.109.bb1
-rw-r--r--meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb (renamed from meta/recipes-extended/libtirpc/libtirpc_0.2.4.bb)6
-rw-r--r--meta/recipes-extended/lighttpd/lighttpd_1.4.35.bb2
-rw-r--r--meta/recipes-extended/logrotate/logrotate/disable-check-different-filesystems.patch14
-rw-r--r--meta/recipes-extended/logrotate/logrotate_3.8.8.bb (renamed from meta/recipes-extended/logrotate/logrotate_3.8.7.bb)32
-rw-r--r--meta/recipes-extended/lsb/lsb_4.1.bb12
-rw-r--r--meta/recipes-extended/lsb/lsbinitscripts_9.56.1.bb (renamed from meta/recipes-extended/lsb/lsbinitscripts_9.55.bb)6
-rw-r--r--meta/recipes-extended/lsb/lsbtest/LSB_Test.sh2
-rw-r--r--meta/recipes-extended/lsof/lsof_4.88.bb (renamed from meta/recipes-extended/lsof/lsof_4.87.bb)8
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-bad-priority-inheritance-conditio.patch48
-rw-r--r--meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-robust-mutex-conditionals.patch62
-rw-r--r--meta/recipes-extended/ltp/ltp/add-knob-for-numa.patch39
-rw-r--r--meta/recipes-extended/ltp/ltp/add-knob-for-tirpc.patch37
-rw-r--r--meta/recipes-extended/ltp/ltp/automake-foreign.patch20
-rw-r--r--meta/recipes-extended/ltp/ltp/make-setregid02-work.patch61
-rw-r--r--meta/recipes-extended/ltp/ltp_20140828.bb (renamed from meta/recipes-extended/ltp/ltp_20140422.bb)16
-rw-r--r--meta/recipes-extended/man-pages/man-pages_3.75.bb (renamed from meta/recipes-extended/man-pages/man-pages_3.70.bb)11
-rw-r--r--meta/recipes-extended/man/man/man.conf3
-rw-r--r--meta/recipes-extended/man/man_1.6g.bb13
-rw-r--r--meta/recipes-extended/mc/mc/mc-CTRL.patch31
-rw-r--r--meta/recipes-extended/mc/mc_4.7.5.2.bb4
-rw-r--r--meta/recipes-extended/mc/mc_4.8.13.bb (renamed from meta/recipes-extended/mc/mc_4.8.12.bb)8
-rw-r--r--meta/recipes-extended/mdadm/mdadm_3.3.2.bb (renamed from meta/recipes-extended/mdadm/mdadm_3.3.1.bb)4
-rw-r--r--meta/recipes-extended/newt/libnewt-python_0.52.18.bb (renamed from meta/recipes-extended/newt/libnewt-python_0.52.17.bb)0
-rw-r--r--meta/recipes-extended/newt/libnewt_0.52.18.bb (renamed from meta/recipes-extended/newt/libnewt_0.52.17.bb)8
-rw-r--r--meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb18
-rw-r--r--meta/recipes-extended/pam/libpam/libpam-xtests-remove-bash-dependency.patch226
-rw-r--r--meta/recipes-extended/pam/libpam_1.1.6.bb40
-rw-r--r--meta/recipes-extended/parted/files/Makefile (renamed from meta/recipes-extended/parted/parted-3.1/Makefile)0
-rw-r--r--meta/recipes-extended/parted/files/fix-compile-failure-while-dis.patch57
-rw-r--r--meta/recipes-extended/parted/files/fix-doc-mandir.patch (renamed from meta/recipes-extended/parted/parted-3.1/fix-doc-mandir.patch)0
-rw-r--r--meta/recipes-extended/parted/files/no_check.patch (renamed from meta/recipes-extended/parted/parted-3.1/no_check.patch)0
-rw-r--r--meta/recipes-extended/parted/files/run-ptest (renamed from meta/recipes-extended/parted/parted-3.1/run-ptest)0
-rw-r--r--meta/recipes-extended/parted/files/syscalls.patch (renamed from meta/recipes-extended/parted/parted-3.1/syscalls.patch)0
-rw-r--r--meta/recipes-extended/parted/parted-3.1/fix-deprecated-readline.patch34
-rw-r--r--meta/recipes-extended/parted/parted-3.1/fix-dvh-overflows.patch35
-rw-r--r--meta/recipes-extended/parted/parted-3.1/fix-git-version-gen.patch43
-rw-r--r--meta/recipes-extended/parted/parted_3.2.bb (renamed from meta/recipes-extended/parted/parted_3.1.bb)8
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/60_linux_version_init.patch54
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/detect_bitness.patch26
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/gnu-kbsd-version.patch44
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/install.patch39
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/linux-limits.patch15
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/procmodule.patch38
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/procps-3.2.7-top-remcpu.patch111
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8+gmake-3.82.patch19
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8-ps-cgroup.patch82
-rw-r--r--meta/recipes-extended/procps/procps-3.2.8/psmodule.patch23
-rw-r--r--meta/recipes-extended/procps/procps.inc31
-rw-r--r--meta/recipes-extended/procps/procps/fix-configure.patch19
-rw-r--r--meta/recipes-extended/procps/procps/sysctl.conf (renamed from meta/recipes-extended/procps/procps-3.2.8/sysctl.conf)0
-rw-r--r--meta/recipes-extended/procps/procps_3.2.8.bb32
-rw-r--r--meta/recipes-extended/procps/procps_3.3.10.bb62
-rw-r--r--meta/recipes-extended/psmisc/files/0002-Include-limits.h-for-PATH_MAX.patch29
-rw-r--r--meta/recipes-extended/psmisc/psmisc_22.21.bb4
-rw-r--r--meta/recipes-extended/rpcbind/rpcbind/rpcbind.service10
-rw-r--r--meta/recipes-extended/rpcbind/rpcbind/rpcbind.socket8
-rw-r--r--meta/recipes-extended/rpcbind/rpcbind_0.2.1.bb7
-rw-r--r--meta/recipes-extended/sed/sed_4.2.2.bb2
-rw-r--r--meta/recipes-extended/shadow/files/0001-Do-not-read-login.defs-before-doing-chroot.patch46
-rw-r--r--meta/recipes-extended/shadow/files/check_size_of_uid_t_and_gid_t_using_AC_CHECK_SIZEOF.patch41
-rw-r--r--meta/recipes-extended/shadow/files/securetty19
-rw-r--r--meta/recipes-extended/shadow/shadow.inc9
-rw-r--r--meta/recipes-extended/sudo/files/volatiles.99_sudo1
-rw-r--r--meta/recipes-extended/sudo/sudo.inc16
-rw-r--r--meta/recipes-extended/sudo/sudo_1.8.11p2.bb (renamed from meta/recipes-extended/sudo/sudo_1.8.10p3.bb)13
-rw-r--r--meta/recipes-extended/tar/tar-replacement-native_1.28.bb (renamed from meta/recipes-extended/tar/tar-replacement-native_1.27.1.bb)0
-rw-r--r--meta/recipes-extended/tar/tar_1.28.bb (renamed from meta/recipes-extended/tar/tar_1.27.1.bb)5
-rw-r--r--meta/recipes-extended/texinfo-dummy-native/texinfo-dummy/template.py2
-rw-r--r--meta/recipes-extended/texinfo/texinfo-4.8/check-locale-h.patch28
-rw-r--r--meta/recipes-extended/texinfo/texinfo-4.8/do-compile-native-tools.patch49
-rw-r--r--meta/recipes-extended/texinfo/texinfo-4.8/using-native-makeinfo.patch24
-rw-r--r--meta/recipes-extended/texinfo/texinfo_4.8.bb56
-rw-r--r--meta/recipes-extended/texinfo/texinfo_5.2.bb15
-rw-r--r--meta/recipes-extended/tzcode/tzcode-native.inc2
-rw-r--r--meta/recipes-extended/tzcode/tzcode-native_2014f.bb11
-rw-r--r--meta/recipes-extended/tzcode/tzcode-native_2014j.bb10
-rw-r--r--meta/recipes-extended/tzdata/tzdata_2014f.bb6
-rw-r--r--meta/recipes-extended/tzdata/tzdata_2014j.bb6
-rw-r--r--meta/recipes-extended/watchdog/watchdog/fix-ping-failure.patch (renamed from meta/recipes-extended/watchdog/files/fix-ping-failure.patch)56
-rw-r--r--meta/recipes-extended/watchdog/watchdog/fixsepbuild.patch (renamed from meta/recipes-extended/watchdog/files/fixsepbuild.patch)0
-rw-r--r--meta/recipes-extended/watchdog/watchdog_5.14.bb (renamed from meta/recipes-extended/watchdog/watchdog_5.13.bb)5
-rw-r--r--meta/recipes-extended/wget/wget.inc5
-rw-r--r--meta/recipes-extended/wget/wget_1.15.bb7
-rw-r--r--meta/recipes-extended/wget/wget_1.16.1.bb8
-rw-r--r--meta/recipes-extended/which/which-2.18/automake-foreign.patch28
-rw-r--r--meta/recipes-extended/which/which-2.20/automake.patch5
-rw-r--r--meta/recipes-extended/which/which_2.18.bb4
-rw-r--r--meta/recipes-extended/xinetd/xinetd/xinetd.service13
-rw-r--r--meta/recipes-extended/xinetd/xinetd_2.3.15.bb12
-rw-r--r--meta/recipes-extended/xz/xz_5.2.0.bb (renamed from meta/recipes-extended/xz/xz_5.1.3alpha.bb)6
-rw-r--r--meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb12
-rw-r--r--meta/recipes-gnome/gnome/gnome-common_3.14.0.bb (renamed from meta/recipes-gnome/gnome/gnome-common_3.12.0.bb)4
-rw-r--r--meta/recipes-gnome/gnome/gnome-desktop.inc23
-rw-r--r--meta/recipes-gnome/gnome/gnome-desktop_2.32.1.bb7
-rw-r--r--meta/recipes-gnome/gnome/gnome-doc-utils.inc2
-rw-r--r--meta/recipes-gnome/gnome/gsettings-desktop-schemas_3.10.1.bb16
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.22/hardcoded_libtool.patch1814
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/cellrenderer-cairo.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/cellrenderer-cairo.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/configure-nm.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/configure-nm.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/configurefix.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/configurefix.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/doc-fixes.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/doc-fixes.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/entry-cairo.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/entry-cairo.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/hardcoded_libtool.patch35
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/run-iconcache.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/run-iconcache.patch)11
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/toggle-font.diff (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/toggle-font.diff)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+-2.24.24/xsettings.patch (renamed from meta/recipes-gnome/gtk+/gtk+-2.24.22/xsettings.patch)0
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3.inc8
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3/fix-build-when-wayland-backend-enabled.patch43
-rw-r--r--meta/recipes-gnome/gtk+/gtk+3_3.12.2.bb (renamed from meta/recipes-gnome/gtk+/gtk+3_3.10.7.bb)5
-rw-r--r--meta/recipes-gnome/gtk+/gtk+_2.24.24.bb (renamed from meta/recipes-gnome/gtk+/gtk+_2.24.22.bb)4
-rw-r--r--meta/recipes-gnome/gtk-doc-stub/gtk-doc-stub_git.bb4
-rw-r--r--meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.13.bb (renamed from meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.12.bb)9
-rw-r--r--meta/recipes-graphics/clutter/clutter-1.0.inc7
-rw-r--r--meta/recipes-graphics/clutter/clutter-1.0/run-installed-tests-with-tap-output.patch20
-rw-r--r--meta/recipes-graphics/clutter/clutter-1.0/run-ptest3
-rw-r--r--meta/recipes-graphics/clutter/clutter-1.0_1.18.2.bb8
-rw-r--r--meta/recipes-graphics/clutter/clutter-1.0_1.20.0.bb10
-rw-r--r--meta/recipes-graphics/drm/libdrm.inc1
-rw-r--r--meta/recipes-graphics/drm/libdrm/GNU_SOURCE_definition.patch30
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.54.bb8
-rw-r--r--meta/recipes-graphics/drm/libdrm_2.4.58.bb6
-rw-r--r--meta/recipes-graphics/glew/glew/fix-glew.pc-install.patch3
-rw-r--r--meta/recipes-graphics/harfbuzz/harfbuzz_0.9.35.bb (renamed from meta/recipes-graphics/harfbuzz/harfbuzz_0.9.29.bb)4
-rw-r--r--meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch10
-rw-r--r--meta/recipes-graphics/mesa/mesa-gl_10.3.4.bb (renamed from meta/recipes-graphics/mesa/mesa-gl_10.1.3.bb)0
-rw-r--r--meta/recipes-graphics/mesa/mesa.inc8
-rw-r--r--meta/recipes-graphics/mesa/mesa/0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch52
-rw-r--r--meta/recipes-graphics/mesa/mesa/0003-EGL-Mutate-NativeDisplayType-depending-on-config.patch362
-rw-r--r--meta/recipes-graphics/mesa/mesa/0006-fix-out-of-tree-egl.patch48
-rw-r--r--meta/recipes-graphics/mesa/mesa_10.3.4.bb (renamed from meta/recipes-graphics/mesa/mesa_10.1.3.bb)9
-rw-r--r--meta/recipes-graphics/mesa/mesa_git.bb12
-rw-r--r--meta/recipes-graphics/mx/mx-1.0/fix-build-dir.patch (renamed from meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-build-dir.patch)0
-rw-r--r--meta/recipes-graphics/mx/mx-1.0/fix-test-includes.patch (renamed from meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-test-includes.patch)0
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-directfb.bb2
-rw-r--r--meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb4
-rw-r--r--meta/recipes-graphics/pango/pango.inc2
-rw-r--r--meta/recipes-graphics/pango/pango_1.36.6.bb (renamed from meta/recipes-graphics/pango/pango_1.36.5.bb)4
-rw-r--r--meta/recipes-graphics/piglit/piglit_git.bb5
-rw-r--r--meta/recipes-graphics/wayland/libinput_0.6.0.bb14
-rw-r--r--meta/recipes-graphics/wayland/wayland_1.6.0.bb (renamed from meta/recipes-graphics/wayland/wayland_1.5.0.bb)4
-rw-r--r--meta/recipes-graphics/wayland/weston_1.6.0.bb (renamed from meta/recipes-graphics/wayland/weston_1.5.0.bb)8
-rw-r--r--meta/recipes-graphics/x11-common/xserver-nodm-init.bb2
-rwxr-xr-xmeta/recipes-graphics/x11-common/xserver-nodm-init/xserver-nodm4
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap/gnu-source.patch60
-rw-r--r--meta/recipes-graphics/xorg-app/xmodmap_1.0.8.bb2
-rw-r--r--meta/recipes-graphics/xorg-app/xrandr_1.4.3.bb (renamed from meta/recipes-graphics/xorg-app/xrandr_1.4.2.bb)4
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.1.bb (renamed from meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.0.bb)4
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics/always_include_xorg_server.h.patch60
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.8.0.bb (renamed from meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.7.4.bb)8
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-vmmouse/always_include_config.h.patch81
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.0.0.bb2
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.2.bb13
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-intel/always_include_xorg_server.h.patch24
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-intel_2.99.912.bb4
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-omapfb/0007-always_include_xorg_server.h.patch48
-rw-r--r--meta/recipes-graphics/xorg-driver/xf86-video-omapfb_git.bb1
-rw-r--r--meta/recipes-graphics/xorg-font/font-util_1.3.0.bb2
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.10.bb10
-rw-r--r--meta/recipes-graphics/xorg-lib/libxcb_1.11.bb10
-rw-r--r--meta/recipes-graphics/xorg-lib/libxkbcommon_0.5.0.bb (renamed from meta/recipes-graphics/xorg-lib/libxkbcommon_0.4.2.bb)8
-rw-r--r--meta/recipes-graphics/xorg-lib/pixman/mips-export-revert.patch22
-rw-r--r--meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb (renamed from meta/recipes-graphics/xorg-lib/pixman_0.32.4.bb)5
-rw-r--r--meta/recipes-graphics/xorg-lib/xkeyboard-config_2.12.bb (renamed from meta/recipes-graphics/xorg-lib/xkeyboard-config_2.11.bb)6
-rw-r--r--meta/recipes-graphics/xorg-proto/xcb-proto_1.11.bb (renamed from meta/recipes-graphics/xorg-proto/xcb-proto_1.10.bb)4
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf5
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf5
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg.inc5
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/aarch64.patch35
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/crosscompile.patch22
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/mips64-compiler.patch29
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg/xshmfence-option.patch32
-rw-r--r--meta/recipes-graphics/xorg-xserver/xserver-xorg_1.16.2.bb (renamed from meta/recipes-graphics/xorg-xserver/xserver-xorg_1.15.1.bb)9
-rw-r--r--meta/recipes-kernel/blktrace/blktrace/ldflags.patch8
-rw-r--r--meta/recipes-kernel/cryptodev/cryptodev_1.6.inc2
-rw-r--r--meta/recipes-kernel/dtc/dtc.inc12
-rw-r--r--meta/recipes-kernel/kern-tools/kern-tools-native_git.bb2
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools.inc2
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools/kexec-aarch64.patch801
-rw-r--r--meta/recipes-kernel/kexec/kexec-tools_2.0.8.bb (renamed from meta/recipes-kernel/kexec/kexec-tools_2.0.7.bb)8
-rw-r--r--meta/recipes-kernel/kmod/kmod.inc7
-rw-r--r--meta/recipes-kernel/kmod/kmod/0001-Add-missing-O_CLOEXEC-in-kmod_module_get_size.patch27
-rw-r--r--meta/recipes-kernel/kmod/kmod/Change-to-calling-bswap_-instead-of-htobe-and-be-toh.patch21
-rwxr-xr-xmeta/recipes-kernel/kmod/kmod/run-ptest2
-rw-r--r--meta/recipes-kernel/kmod/kmod_git.bb9
-rw-r--r--meta/recipes-kernel/latencytop/latencytop_0.5.bb8
-rw-r--r--meta/recipes-kernel/linux-firmware/linux-firmware_git.bb50
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.14.bb7
-rw-r--r--meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.17.7.bb7
-rw-r--r--meta/recipes-kernel/linux/kernel-devsrc.bb65
-rw-r--r--meta/recipes-kernel/linux/linux-dummy.bb3
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-dev.bb3
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_3.10.bb14
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb14
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-rt_3.4.bb30
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_3.10.bb10
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb12
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_3.17.bb21
-rw-r--r--meta/recipes-kernel/linux/linux-yocto-tiny_3.4.bb26
-rw-r--r--meta/recipes-kernel/linux/linux-yocto.inc3
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_3.10.bb37
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_3.14.bb40
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_3.17.bb39
-rw-r--r--meta/recipes-kernel/linux/linux-yocto_3.4.bb38
-rw-r--r--meta/recipes-kernel/lttng/babeltrace/0001-Fix-Support-out-of-tree-builds-in-babeltrace.patch17
-rw-r--r--meta/recipes-kernel/lttng/babeltrace/Fix-Align-buffers-from-objstack_alloc-on-sizeof-void.patch54
-rw-r--r--meta/recipes-kernel/lttng/babeltrace_1.2.4.bb (renamed from meta/recipes-kernel/lttng/babeltrace_1.2.1.bb)11
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/Fix-noargs-probes-should-calculate-alignment-and-eve.patch130
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/Update-kvm-instrumentation-compile-on-3.17-rc1.patch46
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/Update-statedump-to-3.17-nsproxy-locking.patch70
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/compaction-fix-mm_compaction_isolate_template-build.patch41
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules/fix_build_with_v3.17_kernel.patch113
-rw-r--r--meta/recipes-kernel/lttng/lttng-modules_2.5.0.bb6
-rw-r--r--meta/recipes-kernel/lttng/lttng-tools_2.5.0.bb34
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust/add-aarch64.patch19
-rw-r--r--meta/recipes-kernel/lttng/lttng-ust_2.5.0.bb2
-rw-r--r--meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb10
-rw-r--r--meta/recipes-kernel/oprofile/oprofile.inc2
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/0001-Add-freescale-e500mc-support.patch219
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/0001-Add-rmb-definition-for-AArch64-architecture.patch31
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/0001-Tidy-powerpc64-bfd-target-check.patch123
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/0002-Add-freescale-e6500-support.patch364
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/automake-foreign.patch5
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/filemode-fix.patch41
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/opstart.patch245
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/root-home-dir.patch134
-rw-r--r--meta/recipes-kernel/oprofile/oprofile/run-ptest3
-rw-r--r--meta/recipes-kernel/oprofile/oprofile_0.9.9.bb17
-rw-r--r--meta/recipes-kernel/oprofile/oprofile_1.0.0.bb12
-rw-r--r--meta/recipes-kernel/oprofile/oprofile_git.bb11
-rw-r--r--meta/recipes-kernel/perf/perf-features.inc2
-rw-r--r--meta/recipes-kernel/perf/perf.bb37
-rw-r--r--meta/recipes-kernel/powertop/powertop_2.7.bb (renamed from meta/recipes-kernel/powertop/powertop_2.6.1.bb)9
-rw-r--r--meta/recipes-kernel/sysprof/sysprof_git.bb5
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/configure-allow-to-disable-libvirt.patch39
-rw-r--r--meta/recipes-kernel/systemtap/systemtap/system_map_location.patch23
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.bb3
-rw-r--r--meta/recipes-kernel/systemtap/systemtap_git.inc6
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib/Check-if-wordexp-function-is-supported.patch31
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib/Update-iatomic.h-functions-definitions-for-mips.patch68
-rw-r--r--meta/recipes-multimedia/alsa/alsa-lib_1.0.28.bb (renamed from meta/recipes-multimedia/alsa/alsa-lib_1.0.27.2.bb)9
-rw-r--r--meta/recipes-multimedia/alsa/alsa-tools/mips_has_no_io_h.patch16
-rw-r--r--meta/recipes-multimedia/alsa/alsa-tools_1.0.28.bb (renamed from meta/recipes-multimedia/alsa/alsa-tools_1.0.27.bb)7
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.28.bb (renamed from meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.27.2.bb)0
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils/0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch42
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils/alsa-utils-aplay-interrupt-signal-handling.patch48
-rw-r--r--meta/recipes-multimedia/alsa/alsa-utils_1.0.28.bb (renamed from meta/recipes-multimedia/alsa/alsa-utils_1.0.27.2.bb)5
-rw-r--r--meta/recipes-multimedia/flac/flac_1.3.0.bb2
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-avcodec-smc-fix-off-by-1-error.patch32
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-set-parameters-from-SPS-whenever-it-changes.patch145
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-skip-error-concealment-when-SPS-and-slices-are-.patch33
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0002-avcodec-mjpegdec-check-bits-per-pixel-for-changes-si.patch68
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb4
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-meta-base_0.10.bb7
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-bad_0.10.23.bb1
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-base-0.10.36/audioresample-Fix-build-on-x86-if-emmintrin.h-is-ava.patch37
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-base_0.10.36.bb4
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins-package.inc2
-rw-r--r--meta/recipes-multimedia/gstreamer/gst-plugins.inc6
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.0.bb)7
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb8
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc2
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc20
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch41
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/configure-allow-to-disable-libssh2.patch64
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.0.bb)7
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.0.bb)4
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.0.bb)5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.0.bb)4
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.0.bb6
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.1.bb6
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.1.bb (renamed from meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.0.bb)5
-rw-r--r--meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb5
-rw-r--r--meta/recipes-multimedia/libpng/libpng/0001-configure-lower-automake-requirement.patch31
-rw-r--r--meta/recipes-multimedia/libpng/libpng_1.6.10.bb29
-rw-r--r--meta/recipes-multimedia/libpng/libpng_1.6.13.bb28
-rw-r--r--meta/recipes-multimedia/libtiff/files/libtiff-CVE-2013-1961.patch786
-rw-r--r--meta/recipes-multimedia/libtiff/tiff_4.0.3.bb1
-rw-r--r--meta/recipes-multimedia/pulseaudio/files/0001-libatomic_ops-Aarch64-basic-port.patch239
-rw-r--r--meta/recipes-multimedia/pulseaudio/libatomics-ops_7.2.bb1
-rw-r--r--meta/recipes-multimedia/sbc/sbc_1.3.bb (renamed from meta/recipes-multimedia/sbc/sbc_1.2.bb)4
-rw-r--r--meta/recipes-qt/meta/meta-toolchain-qt.inc25
-rw-r--r--meta/recipes-qt/qt-demo/qt-demo-init/qtdemo-init10
-rw-r--r--meta/recipes-qt/qt4/qt-mobility_1.2.0.inc2
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6.inc12
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0007-dbus-Remove-const-usage-that-causes-compile-failure-.patch2
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0012-Add-2bpp-support.patch2
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0029-aarch64_arm64_fix_arch_detection.patch53
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0030-aarch64_arm64_qatomic_support.patch491
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0031-aarch64_arm64_mkspecs.patch124
-rw-r--r--meta/recipes-qt/qt4/qt4-4.8.6/0032-aarch64_add_header.patch18
-rw-r--r--meta/recipes-qt/qt4/qt4-native.inc4
-rw-r--r--meta/recipes-qt/qt4/qt4.inc2
-rw-r--r--meta/recipes-qt/qt4/qt4_arch.inc1
-rw-r--r--meta/recipes-sato/images/core-image-sato-sdk.bb2
-rw-r--r--meta/recipes-sato/midori/midori_0.5.5.bb47
-rw-r--r--meta/recipes-sato/midori/midori_0.5.8.bb26
-rw-r--r--meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb4
-rw-r--r--meta/recipes-sato/puzzles/files/fix-compiling-failure-with-option-g-O.patch52
-rw-r--r--meta/recipes-sato/puzzles/puzzles_r10116.bb4
-rw-r--r--meta/recipes-support/apr/apr-util_1.5.4.bb (renamed from meta/recipes-support/apr/apr-util_1.5.3.bb)4
-rw-r--r--meta/recipes-support/apr/apr_1.5.1.bb8
-rw-r--r--meta/recipes-support/aspell/aspell_0.60.6.1.bb2
-rw-r--r--meta/recipes-support/atk/at-spi2-core_2.12.0.bb5
-rw-r--r--meta/recipes-support/atk/files/core_acinclude_m4.patch40
-rw-r--r--meta/recipes-support/attr/acl.inc2
-rw-r--r--meta/recipes-support/attr/ea-acl.inc2
-rw-r--r--meta/recipes-support/beecrypt/beecrypt/add-option-dev-dsp.patch34
-rw-r--r--meta/recipes-support/beecrypt/beecrypt/run-ptest2
-rw-r--r--meta/recipes-support/beecrypt/beecrypt_4.2.1.bb1
-rw-r--r--meta/recipes-support/boost/bjam-native_1.57.0.bb (renamed from meta/recipes-support/boost/bjam-native_1.56.0.bb)0
-rw-r--r--meta/recipes-support/boost/boost-1.57.0.inc (renamed from meta/recipes-support/boost/boost-1.56.0.inc)4
-rw-r--r--meta/recipes-support/boost/boost.inc30
-rw-r--r--meta/recipes-support/boost/boost_1.56.0.bb4
-rw-r--r--meta/recipes-support/boost/boost_1.57.0.bb6
-rw-r--r--meta/recipes-support/curl/curl_7.38.0.bb (renamed from meta/recipes-support/curl/curl_7.37.1.bb)11
-rw-r--r--meta/recipes-support/db/db_5.3.28.bb1
-rw-r--r--meta/recipes-support/db/db_6.0.30.bb1
-rw-r--r--meta/recipes-support/debianutils/debianutils_4.4.bb35
-rw-r--r--meta/recipes-support/gdbm/gdbm-1.8.3/ldflags.patch22
-rw-r--r--meta/recipes-support/gdbm/gdbm_1.8.3.bb3
-rw-r--r--meta/recipes-support/gmp/gmp/gmp-6.0.0-ppc64.patch26
-rw-r--r--meta/recipes-support/gmp/gmp_6.0.0.bb1
-rw-r--r--meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing/0001-gsystem-subprocess.c-Enable-GNU-extensions-in-system.patch35
-rw-r--r--meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2014.1.bb4
-rw-r--r--meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4242.patch62
-rw-r--r--meta/recipes-support/gnupg/gnupg_1.4.7.bb2
-rw-r--r--meta/recipes-support/gnupg/gnupg_2.0.26.bb1
-rw-r--r--meta/recipes-support/gnutls/gnutls.inc3
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.3.11.bb6
-rw-r--r--meta/recipes-support/gnutls/gnutls_3.3.5.bb7
-rw-r--r--meta/recipes-support/gpgme/gpgme-1.4.3/gpgme.pc10
-rw-r--r--meta/recipes-support/gpgme/gpgme_1.4.3.bb8
-rw-r--r--meta/recipes-support/icu/icu.inc1
-rw-r--r--meta/recipes-support/libassuan/libassuan/libassuan-add-pkgconfig-support.patch38
-rw-r--r--meta/recipes-support/libassuan/libassuan_2.1.2.bb (renamed from meta/recipes-support/libassuan/libassuan_2.1.1.bb)6
-rw-r--r--meta/recipes-support/libcap/libcap/fix-CAP_LAST_CAP.patch39
-rw-r--r--meta/recipes-support/libcap/libcap_2.22.bb6
-rw-r--r--meta/recipes-support/libcap/libcap_2.24.bb (renamed from meta/recipes-support/libcap/libcap.inc)12
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.6.1.bb4
-rw-r--r--meta/recipes-support/libgcrypt/libgcrypt_1.6.2.bb4
-rw-r--r--meta/recipes-support/libical/files/pthread-fix.patch52
-rw-r--r--meta/recipes-support/libical/libical_0.48.bb16
-rw-r--r--meta/recipes-support/libical/libical_1.0.0.bb13
-rw-r--r--meta/recipes-support/libiconv/libiconv_1.11.1.bb4
-rw-r--r--meta/recipes-support/libiconv/libiconv_1.14.bb4
-rw-r--r--meta/recipes-support/libksba/libksba_1.3.1.bb (renamed from meta/recipes-support/libksba/libksba_1.3.0.bb)5
-rw-r--r--meta/recipes-support/libpcre/libpcre/Makefile2
-rw-r--r--meta/recipes-support/libpcre/libpcre_8.36.bb (renamed from meta/recipes-support/libpcre/libpcre_8.35.bb)4
-rw-r--r--meta/recipes-support/libproxy/libproxy_0.4.11.bb2
-rw-r--r--meta/recipes-support/libunistring/libunistring/iconv-m4-remove-the-test-to-convert-euc-jp.patch20
-rw-r--r--meta/recipes-support/libunistring/libunistring/libunistring_fix_for_automake_1.12.patch81
-rw-r--r--meta/recipes-support/libunistring/libunistring/parallelmake.patch26
-rw-r--r--meta/recipes-support/libunistring/libunistring_0.9.4.bb (renamed from meta/recipes-support/libunistring/libunistring_0.9.3.bb)8
-rw-r--r--meta/recipes-support/libunwind/libunwind-1.1/AArch64-port.patch2529
-rw-r--r--meta/recipes-support/libunwind/libunwind-1.1/Support-building-with-older-compilers.patch72
-rw-r--r--meta/recipes-support/libunwind/libunwind.inc31
-rw-r--r--meta/recipes-support/libunwind/libunwind_1.1.bb9
-rw-r--r--meta/recipes-support/liburcu/files/aarch64.patch19
-rw-r--r--meta/recipes-support/liburcu/liburcu/Revert-Blacklist-ARM-gcc-4.8.0-4.8.1-4.8.2.patch47
-rw-r--r--meta/recipes-support/liburcu/liburcu_0.8.5.bb (renamed from meta/recipes-support/liburcu/liburcu_0.8.4.bb)6
-rw-r--r--meta/recipes-support/libxslt/libxslt_1.1.28.bb5
-rw-r--r--meta/recipes-support/lz4/lz4_svn.bb4
-rw-r--r--meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch70
-rw-r--r--meta/recipes-support/lzo/lzo_2.08.bb1
-rw-r--r--meta/recipes-support/npth/npth/pkgconfig.patch49
-rw-r--r--meta/recipes-support/npth/npth_1.1.bb (renamed from meta/recipes-support/npth/npth_0.91.bb)6
-rw-r--r--meta/recipes-support/nspr/nspr/fix-build-on-x86_64.patch31
-rw-r--r--meta/recipes-support/nspr/nspr/remove-srcdir-from-configure-in.patch19
-rw-r--r--meta/recipes-support/nspr/nspr/trickly-fix-build-on-x86_64.patch62
-rw-r--r--meta/recipes-support/nspr/nspr_4.10.7.bb (renamed from meta/recipes-support/nspr/nspr_4.10.3.bb)13
-rw-r--r--meta/recipes-support/nss-myhostname/nss-myhostname_0.3.bb9
-rw-r--r--meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1739.patch81
-rw-r--r--meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1741.patch92
-rw-r--r--meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-5605.patch18
-rw-r--r--meta/recipes-support/nss/files/nss-CVE-2013-1740.patch916
-rw-r--r--meta/recipes-support/nss/files/nss-CVE-2013-5606.patch48
-rw-r--r--meta/recipes-support/nss/files/nss-CVE-2014-1492.patch68
-rw-r--r--meta/recipes-support/nss/nss.inc16
-rw-r--r--meta/recipes-support/nss/nss/nss-fix-incorrect-shebang-of-perl.patch (renamed from meta/recipes-support/nss/files/nss-fix-incorrect-shebang-of-perl.patch)0
-rw-r--r--meta/recipes-support/nss/nss/nss-fix-support-cross-compiling.patch (renamed from meta/recipes-support/nss/files/nss-fix-support-cross-compiling.patch)0
-rw-r--r--meta/recipes-support/nss/nss/nss-no-rpath-for-cross-compiling.patch (renamed from meta/recipes-support/nss/files/nss-no-rpath-for-cross-compiling.patch)0
-rw-r--r--meta/recipes-support/nss/nss/nss.pc.in (renamed from meta/recipes-support/nss/files/nss.pc.in)0
-rw-r--r--meta/recipes-support/nss/nss/signlibs.sh (renamed from meta/recipes-support/nss/files/signlibs.sh)0
-rw-r--r--meta/recipes-support/nss/nss_3.15.1.bb9
-rw-r--r--meta/recipes-support/nss/nss_3.17.2.bb8
-rw-r--r--meta/recipes-support/pinentry/pinentry_0.9.0.bb27
-rw-r--r--meta/recipes-support/ptest-runner/files/ptest-runner8
-rw-r--r--meta/recipes-support/serf/serf/env.patch28
-rw-r--r--meta/recipes-support/serf/serf_1.3.8.bb (renamed from meta/recipes-support/serf/serf_1.3.6.bb)15
-rw-r--r--meta/recipes-support/sqlite/sqlite3_3.8.7.4.bb (renamed from meta/recipes-support/sqlite/sqlite3_3.8.6.0.bb)4
-rw-r--r--meta/site/arm-3247
-rw-r--r--meta/site/arm-6446
-rw-r--r--meta/site/arm-common38
-rwxr-xr-xoe-init-build-env-memres8
-rwxr-xr-xscripts/contrib/build-perf-test.sh6
-rwxr-xr-xscripts/contrib/list-packageconfig-flags.py4
-rwxr-xr-xscripts/contrib/python/generate-manifest-2.7.py7
-rwxr-xr-xscripts/contrib/python/generate-manifest-3.3.py9
-rwxr-xr-xscripts/create-recipe3
-rwxr-xr-xscripts/devtool255
-rwxr-xr-xscripts/gen-lockedsig-cache40
-rw-r--r--scripts/lib/bsp/engine.py173
-rw-r--r--scripts/lib/bsp/help.py1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/conf/machine/{{=machine}}.conf105
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend2
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-non_hardware.cfg (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-non_hardware.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.cfg (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.17.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)3
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine/machconfig (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/{{=machine}}/machconfig)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/kernel-list.noinstall8
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom.bb)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/defconfig (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/defconfig)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-config.cfg (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-config.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-patches.scc (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-patches.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.cfg (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf (renamed from scripts/lib/bsp/substrate/target/arch/i386/conf/machine/{{=machine}}.conf)21
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend2
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.cfg (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}_{{=example_bbappend_version}}.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version/example.patch (renamed from scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}-{{=example_bbappend_version}}/example.patch)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.bb (renamed from scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}_0.1.bb)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/example.patch (renamed from scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/example.patch)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/helloworld.c (renamed from scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/helloworld.c)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/conf/machine/machine.conf (renamed from scripts/lib/bsp/substrate/target/arch/mips/conf/machine/{{=machine}}.conf)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.cfg2
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend32
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/.gitignore (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc)0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/conf/machine/machine.conf39
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc10
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc10
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc10
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.cfg66
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc8
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall5
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/{{=machine}}.conf)13
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.cfg (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend26
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend)3
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/machine.conf (renamed from scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/{{=machine}}.conf)7
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine/interfaces5
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/xorg.conf)4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)3
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend)11
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend)7
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend62
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend)7
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend62
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.4.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.4.bbappend)7
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)11
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)11
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.17.bbappend62
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/machine.conf (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/{{=machine}}.conf)11
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend2
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files.noinstall1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-config.cfg1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-features.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-patches.scc1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.cfg (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall4
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend26
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.10.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend (renamed from scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend)1
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.17.bbappend33
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc0
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend25
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend32
-rw-r--r--scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend32
-rw-r--r--scripts/lib/bsp/tags.py12
-rw-r--r--scripts/lib/devtool/__init__.py78
-rw-r--r--scripts/lib/devtool/deploy.py100
-rw-r--r--scripts/lib/devtool/standard.py545
-rw-r--r--scripts/lib/image/canned-wks/sdimage-bootpart.wks6
-rw-r--r--scripts/lib/image/engine.py37
-rw-r--r--scripts/lib/image/help.py23
-rw-r--r--scripts/lib/recipetool/__init__.py (renamed from scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf)0
-rw-r--r--scripts/lib/recipetool/create.py413
-rw-r--r--scripts/lib/recipetool/create_buildsys.py319
-rw-r--r--scripts/lib/scriptutils.py60
-rw-r--r--scripts/lib/wic/3rdparty/pykickstart/parser.py87
-rw-r--r--scripts/lib/wic/3rdparty/pykickstart/version.py29
-rw-r--r--scripts/lib/wic/conf.py2
-rw-r--r--scripts/lib/wic/creator.py2
-rw-r--r--scripts/lib/wic/imager/baseimager.py2
-rw-r--r--scripts/lib/wic/imager/direct.py21
-rw-r--r--scripts/lib/wic/kickstart/__init__.py2
-rw-r--r--scripts/lib/wic/kickstart/custom_commands/micboot.py2
-rw-r--r--scripts/lib/wic/kickstart/custom_commands/micpartition.py2
-rw-r--r--scripts/lib/wic/kickstart/custom_commands/partition.py38
-rw-r--r--scripts/lib/wic/msger.py2
-rw-r--r--scripts/lib/wic/plugin.py2
-rw-r--r--scripts/lib/wic/pluginbase.py2
-rw-r--r--scripts/lib/wic/plugins/imager/direct_plugin.py20
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-efi.py5
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-partition.py138
-rw-r--r--scripts/lib/wic/plugins/source/bootimg-pcbios.py14
-rw-r--r--scripts/lib/wic/utils/errors.py2
-rw-r--r--scripts/lib/wic/utils/fs_related.py2
-rw-r--r--scripts/lib/wic/utils/misc.py2
-rw-r--r--scripts/lib/wic/utils/oe/misc.py1
-rw-r--r--scripts/lib/wic/utils/partitionedfs.py24
-rw-r--r--scripts/lib/wic/utils/runner.py2
-rwxr-xr-xscripts/oe-git-proxy2
-rwxr-xr-xscripts/pybootchartgui/pybootchartgui.py2
-rwxr-xr-xscripts/recipetool99
-rwxr-xr-xscripts/runqemu5
-rwxr-xr-xscripts/runqemu-internal26
-rwxr-xr-xscripts/wic12
1517 files changed, 74520 insertions, 37203 deletions
diff --git a/README.hardware b/README.hardware
index d8faaa3bdb..a97f104117 100644
--- a/README.hardware
+++ b/README.hardware
@@ -251,14 +251,14 @@ if used via a usb card reader):
5. If using core-image-minimal rootfs, install the modules
# tar x -C /media/root -f modules-beaglebone.tgz
- 6. If using core-image-minimal rootfs, install the kernel uImage into /boot
+ 6. If using core-image-minimal rootfs, install the kernel zImage into /boot
directory of rootfs
- # cp uImage-beaglebone.bin /media/root/boot/uImage
+ # cp zImage-beaglebone.bin /media/root/boot/zImage
7. If using core-image-minimal rootfs, also install device tree (DTB) files
into /boot directory of rootfs
- # cp uImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
- # cp uImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
+ # cp zImage-am335x-bone.dtb /media/root/boot/am335x-bone.dtb
+ # cp zImage-am335x-boneblack.dtb /media/root/boot/am335x-boneblack.dtb
8. Unmount the SD partitions, insert the SD card into the Beaglebone, and
boot the Beaglebone
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index b8acd6e631..d46c3dde3b 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -41,7 +41,7 @@ from bb import ui
from bb import server
from bb import cookerdata
-__version__ = "1.23.1"
+__version__ = "1.25.0"
logger = logging.getLogger("BitBake")
# Python multiprocessing requires /dev/shm
@@ -196,6 +196,9 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
action = "store_true", dest = "status_only", default = False)
+ parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
+ action = "store", dest = "writeeventlog")
+
options, targets = parser.parse_args(sys.argv)
# some environmental variables set also configuration options
@@ -206,6 +209,14 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
if "BBTOKEN" in os.environ:
options.xmlrpctoken = os.environ["BBTOKEN"]
+ if "BBEVENTLOG" is os.environ:
+ options.writeeventlog = os.environ["BBEVENTLOG"]
+
+ # fill in proper log name if not supplied
+ if options.writeeventlog is not None and len(options.writeeventlog) == 0:
+ import datetime
+ options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+
# if BBSERVER says to autodetect, let's do that
if options.remote_server:
[host, port] = options.remote_server.split(":", 2)
@@ -266,7 +277,6 @@ def start_server(servermodule, configParams, configuration, features):
return server
-
def main():
configParams = BitBakeConfigParameters()
@@ -372,7 +382,7 @@ def main():
bb.event.ui_queue = []
server_connection.terminate()
else:
- print("server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
+ print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
return 0
return 1
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
index 9964040bf7..98794981a0 100755
--- a/bitbake/bin/bitbake-layers
+++ b/bitbake/bin/bitbake-layers
@@ -103,6 +103,60 @@ class Commands(cmd.Cmd):
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
+ def do_add_layer(self, dirname):
+ """Add a layer to bblayers.conf
+
+usage: add-layer <layerdir>
+"""
+ if not dirname:
+ sys.stderr.write("Please specify the layer directory to add\n")
+ return
+
+ layerdir = os.path.abspath(dirname)
+ if not os.path.exists(layerdir):
+ sys.stderr.write("Specified layer directory doesn't exist\n")
+ return
+
+ layer_conf = os.path.join(layerdir, 'conf', 'layer.conf')
+ if not os.path.exists(layer_conf):
+ sys.stderr.write("Specified layer directory doesn't contain a conf/layer.conf file\n")
+ return
+
+ bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ if not os.path.exists(bblayers_conf):
+ sys.stderr.write("Unable to find bblayers.conf\n")
+ return
+
+ (notadded, _) = bb.utils.edit_bblayers_conf(bblayers_conf, layerdir, None)
+ if notadded:
+ for item in notadded:
+ sys.stderr.write("Specified layer %s is already in BBLAYERS\n" % item)
+
+
+ def do_remove_layer(self, dirname):
+ """Remove a layer from bblayers.conf
+
+usage: remove-layer <layerdir>
+"""
+ if not dirname:
+ sys.stderr.write("Please specify the layer directory to remove\n")
+ return
+
+ bblayers_conf = os.path.join('conf', 'bblayers.conf')
+ if not os.path.exists(bblayers_conf):
+ sys.stderr.write("Unable to find bblayers.conf\n")
+ return
+
+ if dirname.startswith('*'):
+ layerdir = dirname
+ else:
+ layerdir = os.path.abspath(dirname)
+ (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdir)
+ if notremoved:
+ for item in notremoved:
+ sys.stderr.write("No layers matching %s found in BBLAYERS\n" % item)
+
+
def version_str(self, pe, pv, pr = None):
verstr = "%s" % pv
if pr:
diff --git a/bitbake/bin/bitbake-worker b/bitbake/bin/bitbake-worker
index 05e0cf6313..371c99a677 100755
--- a/bitbake/bin/bitbake-worker
+++ b/bitbake/bin/bitbake-worker
@@ -12,10 +12,18 @@ import errno
import signal
# Users shouldn't be running this code directly
-if len(sys.argv) != 2 or sys.argv[1] != "decafbad":
+if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
sys.exit(1)
+profiling = False
+if sys.argv[1] == "decafbadbad":
+ profiling = True
+ try:
+ import cProfile as profile
+ except:
+ import profile
+
logger = logging.getLogger("BitBake")
try:
@@ -81,6 +89,11 @@ def workerlog_write(msg):
lf.write(msg)
lf.flush()
+def sigterm_handler(signum, frame):
+ signal.signal(signal.SIGTERM, signal.SIG_DFL)
+ os.killpg(0, signal.SIGTERM)
+ sys.exit()
+
def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False):
# We need to setup the environment BEFORE the fork, since
# a fork() or exec*() activates PSEUDO...
@@ -129,10 +142,13 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
bb.msg.fatal("RunQueue", "fork failed: %d (%s)" % (e.errno, e.strerror))
if pid == 0:
+ def child():
global worker_pipe
pipein.close()
- signal.signal(signal.SIGTERM, signal.SIG_DFL)
+ signal.signal(signal.SIGTERM, sigterm_handler)
+ # Let SIGHUP exit as SIGTERM
+ signal.signal(signal.SIGHUP, sigterm_handler)
# Save out the PID so that the event can include it the
# events
@@ -154,7 +170,7 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
data.setVar("BUILDNAME", workerdata["buildname"])
data.setVar("DATE", workerdata["date"])
data.setVar("TIME", workerdata["time"])
- bb.parse.siggen.set_taskdata(workerdata["hashes"], workerdata["hash_deps"], workerdata["sigchecksums"])
+ bb.parse.siggen.set_taskdata(workerdata["sigdata"])
ret = 0
try:
the_data = bb.cache.Cache.loadDataFull(fn, appends, data)
@@ -179,11 +195,22 @@ def fork_off_task(cfg, data, workerdata, fn, task, taskname, appends, taskdepdat
logger.critical(str(exc))
os._exit(1)
try:
- if not cfg.dry_run:
- ret = bb.build.exec_task(fn, taskname, the_data, cfg.profile)
- os._exit(ret)
+ if cfg.dry_run:
+ return 0
+ return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
except:
os._exit(1)
+ if not profiling:
+ os._exit(child())
+ else:
+ profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
+ prof = profile.Profile()
+ try:
+ ret = profile.Profile.runcall(prof, child)
+ finally:
+ prof.dump_stats(profname)
+ bb.utils.process_profilelog(profname)
+ os._exit(ret)
else:
for key, value in envbackup.iteritems():
if value is None:
@@ -241,9 +268,14 @@ class BitbakeWorker(object):
self.build_pipes = {}
signal.signal(signal.SIGTERM, self.sigterm_exception)
+ # Let SIGHUP exit as SIGTERM
+ signal.signal(signal.SIGHUP, self.sigterm_exception)
def sigterm_exception(self, signum, stackframe):
- bb.warn("Worker recieved SIGTERM, shutting down...")
+ if signum == signal.SIGTERM:
+ bb.warn("Worker recieved SIGTERM, shutting down...")
+ elif signum == signal.SIGHUP:
+ bb.warn("Worker recieved SIGHUP, shutting down...")
self.handle_finishnow(None)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
os.kill(os.getpid(), signal.SIGTERM)
@@ -358,7 +390,16 @@ class BitbakeWorker(object):
try:
worker = BitbakeWorker(sys.stdin)
- worker.serve()
+ if not profiling:
+ worker.serve()
+ else:
+ profname = "profile-worker.log"
+ prof = profile.Profile()
+ try:
+ profile.Profile.runcall(prof, worker.serve)
+ finally:
+ prof.dump_stats(profname)
+ bb.utils.process_profilelog(profname)
except BaseException as e:
if not normalexit:
import traceback
diff --git a/bitbake/bin/toaster b/bitbake/bin/toaster
index ce16de6c3b..7511012552 100755
--- a/bitbake/bin/toaster
+++ b/bitbake/bin/toaster
@@ -37,7 +37,7 @@ function webserverKillAll()
kill -SIGTERM -$(< ${pidfile}) 2>/dev/null
sleep 1;
# Kill processes if they are still running - may happen in interactive shells
- ps fux | grep "python.*manage.py" | awk '{print $2}' | xargs kill
+ ps fux | grep "python.*manage.py runserver" | awk '{print $2}' | xargs kill
done;
rm ${pidfile}
fi
@@ -65,15 +65,17 @@ function webserverStartAll()
fi
if [ "x$TOASTER_MANAGED" == "x1" ]; then
python $BBBASEDIR/lib/toaster/manage.py migrate bldcontrol || retval=1
- python $BBBASEDIR/lib/toaster/manage.py checksettings || retval=1
+ python $BBBASEDIR/lib/toaster/manage.py checksettings --traceback || retval=1
fi
- echo "Starting webserver"
if [ $retval -eq 0 ]; then
- python $BBBASEDIR/lib/toaster/manage.py runserver 0.0.0.0:8000 </dev/null >${BUILDDIR}/toaster_web.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
+ echo "Starting webserver"
+ python $BBBASEDIR/lib/toaster/manage.py runserver "0.0.0.0:$WEB_PORT" </dev/null >${BUILDDIR}/toaster_web_$$.log 2>&1 & echo $! >${BUILDDIR}/.toastermain.pid
sleep 1
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
retval=1
rm "${BUILDDIR}/.toastermain.pid"
+ else
+ echo "Webserver address: http://0.0.0.0:$WEB_PORT/"
fi
fi
return $retval
@@ -83,8 +85,10 @@ function webserverStartAll()
function addtoConfiguration()
{
- echo "#Created by toaster start script" > ${BUILDDIR}/conf/$2
- echo $1 >> ${BUILDDIR}/conf/$2
+ file=$1
+ shift
+ echo "#Created by toaster start script" > ${BUILDDIR}/conf/$file
+ for var in "$@"; do echo $var >> ${BUILDDIR}/conf/$file; done
}
INSTOPSYSTEM=0
@@ -99,7 +103,7 @@ function stop_system()
kill $(< ${BUILDDIR}/.toasterui.pid ) 2>/dev/null
rm ${BUILDDIR}/.toasterui.pid
fi
- BBSERVER=localhost:8200 bitbake -m
+ BBSERVER=0.0.0.0:-1 bitbake -m
unset BBSERVER
webserverKillAll
# force stop any misbehaving bitbake server
@@ -122,9 +126,46 @@ function notify_chldexit() {
}
+# Verify prerequisites
+
+if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then
+ echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6"
+ return 2
+fi
+
+if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
+ echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4"
+ return 2
+fi
+
+
+# read command line parameters
+
BBBASEDIR=`dirname ${BASH_SOURCE}`/..
RUNNING=0
+NOTOASTERUI=0
+WEBSERVER=1
+TOASTER_BRBE=""
+WEB_PORT="8000"
+
+for param in $*; do
+ case $param in
+ noui )
+ NOTOASTERUI=1
+ ;;
+ noweb )
+ WEBSERVER=0
+ ;;
+ brbe=* )
+ TOASTER_BRBE=$'\n'"TOASTER_BRBE=\""${param#*=}"\""
+ ;;
+ webport=*)
+ WEB_PORT="${param#*=}"
+ esac
+done
+
+
if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; then
# We are called as standalone. We refuse to run in a build environment - we need the interactive mode for that.
# Start just the web server, point the web browser to the interface, and start any Django services.
@@ -144,8 +185,11 @@ if [ -z "$ZSH_NAME" ] && [ `basename \"$0\"` = `basename \"$BASH_SOURCE\"` ]; th
}
TOASTER_MANAGED=1
export TOASTER_MANAGED=1
- webserverStartAll || (echo "Fail to start the web server, stopping" 1>&2 && exit 1)
- xdg-open http://0.0.0.0:8000/ >/dev/null 2>&1 &
+ if ! webserverStartAll; then
+ echo "Failed to start the web server, stopping" 1>&2;
+ exit 1;
+ fi
+ xdg-open http://0.0.0.0:$WEB_PORT/ >/dev/null 2>&1 &
trap trap_ctrlc SIGINT
echo "Running. Stop with Ctrl-C"
while [ $RUNNING -gt 0 ]; do
@@ -163,23 +207,6 @@ if [ -z "$BUILDDIR" ] || [ -z `which bitbake` ]; then
fi
-
-# Verify prerequisites
-
-if ! echo "import django; print (1,) == django.VERSION[0:1] and django.VERSION[1:2][0] in (5,6)" | python 2>/dev/null | grep True >/dev/null; then
- echo -e "This program needs Django 1.5 or 1.6. Please install with\n\npip install django==1.6"
- return 2
-fi
-
-if ! echo "import south; print [0,8,4] == map(int,south.__version__.split(\".\"))" | python 2>/dev/null | grep True >/dev/null; then
- echo -e "This program needs South 0.8.4. Please install with\n\npip install south==0.8.4"
- return 2
-fi
-
-
-
-
-
# Determine the action. If specified by arguments, fine, if not, toggle it
if [ "x$1" == "xstart" ] || [ "x$1" == "xstop" ]; then
CMD="$1"
@@ -191,19 +218,6 @@ else
fi;
fi
-NOTOASTERUI=0
-WEBSERVER=1
-for param in $*; do
- case $param in
- noui )
- NOTOASTERUI=1
- ;;
- noweb )
- WEBSERVER=0
- ;;
- esac
-done
-
echo "The system will $CMD."
# Make sure it's safe to run by checking bitbake lock
@@ -213,30 +227,36 @@ if [ -e $BUILDDIR/bitbake.lock ]; then
(flock -n 200 ) 200<$BUILDDIR/bitbake.lock || lock=0
fi
-if [ ${CMD} == "start" ] && ( [ $lock -eq 0 ] || [ -e $BUILDDIR/.toastermain.pid ] ); then
- echo "Error: bitbake lock state error. File locks show that the system is on." 2>&1
- echo "If you see problems, stop and then start the system again." 2>&1
+if [ ${CMD} == "start" ] && [ $lock -eq 0 ]; then
+ echo "Error: bitbake lock state error. File locks show that the system is on." 1>&2
+ echo "Please wait for the current build to finish, stop and then start the system again." 1>&2
return 3
fi
+if [ ${CMD} == "start" ] && [ -e $BUILDDIR/.toastermain.pid ] && kill -0 `cat $BUILDDIR/.toastermain.pid`; then
+ echo "Error: bitbake appears to be dead, but the webserver is alive. Something fishy is going on." 1>&2
+ echo "Cleaning up the web server at to start a clean slate."
+ webserverKillAll
+fi
+
# Execute the commands
case $CMD in
start )
start_success=1
- addtoConfiguration "INHERIT+=\"toaster buildhistory\"" toaster.conf
+ addtoConfiguration toaster.conf "INHERIT+=\"toaster buildhistory\"" $TOASTER_BRBE
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
echo "Failed ${CMD}."
return 4
fi
unset BBSERVER
- bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B localhost:8200
+ bitbake --postread conf/toaster.conf --server-only -t xmlrpc -B 0.0.0.0:0
if [ $? -ne 0 ]; then
start_success=0
echo "Bitbake server start failed"
else
- export BBSERVER=localhost:8200
+ export BBSERVER=0.0.0.0:-1
if [ $NOTOASTERUI == 0 ]; then # we start the TOASTERUI only if not inhibited
bitbake --observe-only -u toasterui >${BUILDDIR}/toaster_ui.log 2>&1 & echo $! >${BUILDDIR}/.toasterui.pid
fi
@@ -245,10 +265,12 @@ case $CMD in
# set fail safe stop system on terminal exit
trap stop_system SIGHUP
echo "Successful ${CMD}."
+ return 0
else
# failed start, do stop
stop_system
echo "Failed ${CMD}."
+ return 1
fi
# stop system on terminal exit
set -o monitor
diff --git a/bitbake/bin/toaster-eventreplay b/bitbake/bin/toaster-eventreplay
new file mode 100755
index 0000000000..624829aea0
--- /dev/null
+++ b/bitbake/bin/toaster-eventreplay
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2014 Alex Damian
+#
+# This file re-uses code spread throughout other Bitbake source files.
+# As such, all other copyrights belong to their own right holders.
+#
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+# This command takes a filename as a single parameter. The filename is read
+# as a build eventlog, and the ToasterUI is used to process events in the file
+# and log data in the database
+
+import os
+import sys, logging
+
+# mangle syspath to allow easy import of modules
+sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+ 'lib'))
+
+
+import bb.cooker
+from bb.ui import toasterui
+import sys
+import logging
+
+logger = logging.getLogger(__name__)
+console = logging.StreamHandler(sys.stdout)
+format_str = "%(levelname)s: %(message)s"
+logging.basicConfig(format=format_str)
+
+
+import json, pickle
+
+
+class FileReadEventsServerConnection():
+ """ Emulates a connection to a bitbake server that feeds
+ events coming actually read from a saved log file.
+ """
+
+ class MockConnection():
+ """ fill-in for the proxy to the server. we just return generic data
+ """
+ def __init__(self, sc):
+ self._sc = sc
+
+ def runCommand(self, commandArray):
+ """ emulates running a command on the server; only read-only commands are accepted """
+ command_name = commandArray[0]
+
+ if command_name == "getVariable":
+ if commandArray[1] in self._sc._variables:
+ return (self._sc._variables[commandArray[1]]['v'], None)
+ return (None, "Missing variable")
+
+ elif command_name == "getAllKeysWithFlags":
+ dump = {}
+ flaglist = commandArray[1]
+ for k in self._sc._variables.keys():
+ try:
+ if not k.startswith("__"):
+ v = self._sc._variables[k]['v']
+ dump[k] = {
+ 'v' : v ,
+ 'history' : self._sc._variables[k]['history'],
+ }
+ for d in flaglist:
+ dump[k][d] = self._sc._variables[k][d]
+ except Exception as e:
+ print(e)
+ return (dump, None)
+ else:
+ raise Exception("Command %s not implemented" % commandArray[0])
+
+ def terminateServer(self):
+ """ do not do anything """
+ pass
+
+
+
+ class EventReader():
+ def __init__(self, sc):
+ self._sc = sc
+ self.firstraise = 0
+
+ def _create_event(self, line):
+ def _import_class(name):
+ assert len(name) > 0
+ assert "." in name, name
+
+ components = name.strip().split(".")
+ modulename = ".".join(components[:-1])
+ moduleklass = components[-1]
+
+ module = __import__(modulename, fromlist=[str(moduleklass)])
+ return getattr(module, moduleklass)
+
+ # we build a toaster event out of current event log line
+ try:
+ event_data = json.loads(line.strip())
+ event_class = _import_class(event_data['class'])
+ event_object = pickle.loads(json.loads(event_data['vars']))
+ except ValueError as e:
+ print("Failed loading ", line)
+ raise e
+
+ if not isinstance(event_object, event_class):
+ raise Exception("Error loading objects %s class %s ", event_object, event_class)
+
+ return event_object
+
+ def waitEvent(self, timeout):
+
+ nextline = self._sc._eventfile.readline()
+ if len(nextline) == 0:
+ # the build data ended, while toasterui still waits for events.
+ # this happens when the server was abruptly stopped, so we simulate this
+ self.firstraise += 1
+ if self.firstraise == 1:
+ raise KeyboardInterrupt()
+ else:
+ return None
+ else:
+ self._sc.lineno += 1
+ return self._create_event(nextline)
+
+
+ def _readVariables(self, variableline):
+ self._variables = json.loads(variableline.strip())['allvariables']
+
+
+ def __init__(self, file_name):
+ self.connection = FileReadEventsServerConnection.MockConnection(self)
+ self._eventfile = open(file_name, "r")
+
+ # we expect to have the variable dump at the start of the file
+ self.lineno = 1
+ self._readVariables(self._eventfile.readline())
+
+ self.events = FileReadEventsServerConnection.EventReader(self)
+
+
+
+
+
+class MockConfigParameters():
+ """ stand-in for cookerdata.ConfigParameters; as we don't really config a cooker, this
+ serves just to supply needed interfaces for the toaster ui to work """
+ def __init__(self):
+ self.observe_only = True # we can only read files
+
+
+# run toaster ui on our mock bitbake class
+if __name__ == "__main__":
+ if len(sys.argv) < 2:
+ logger.error("Usage: %s event.log " % sys.argv[0])
+ sys.exit(1)
+
+ file_name = sys.argv[-1]
+ mock_connection = FileReadEventsServerConnection(file_name)
+ configParams = MockConfigParameters()
+
+ # run the main program
+ toasterui.main(mock_connection.connection, mock_connection.events, configParams)
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
index 4ce7ed9f8c..fd5a92316c 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml
@@ -471,7 +471,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh
Time: 00:00:00
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
NOTE: Resolving any missing task queue dependencies
- NOTE: Preparing runqueue
+ NOTE: Preparing RunQueue
NOTE: Executing RunQueue Tasks
********************
* *
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
index 992f65058f..0dd543bcc0 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml
@@ -1308,8 +1308,8 @@
BitBake allows installation of event handlers within
recipe and class files.
Events are triggered at certain points during operation,
- such as the beginning of operation against a given
- <filename>.bb</filename>, the start of a given task,
+ such as the beginning of an operation against a given recipe
+ (<filename>*.bb</filename> file), the start of a given task,
task failure, task success, and so forth.
The intent is to make it easy to do things like email
notification on build failure.
@@ -1338,6 +1338,27 @@
</para>
<para>
+ Because you probably are only interested in a subset of events,
+ you would likely use the <filename>[eventmask]</filename> flag
+ for your event handler to be sure that only certain events
+ trigger the handler.
+ Given the previous example, suppose you only wanted the
+ <filename>bb.build.TaskFailed</filename> event to trigger that
+ event handler.
+ Use the flag as follows:
+ <literallayout class='monospaced'>
+ addhandler myclass_eventhandler
+ myclass_eventhandler[eventmask] = "bb.build.TaskFailed"
+ python myclass_eventhandler() {
+ from bb.event import getName
+ from bb import data
+ print("The name of the Event is %s" % getName(e))
+ print("The file we run for is %s" % data.getVar('FILE', e.data, True))
+ }
+ </literallayout>
+ </para>
+
+ <para>
During a standard build, the following common events might occur:
<itemizedlist>
<listitem><para>
@@ -1551,11 +1572,11 @@
item runtime dependency which must have completed before that
task can be executed.
<literallayout class='monospaced'>
- do_package_write[rdeptask] = "do_package"
+ do_package_qa[rdeptask] = "do_packagedata"
</literallayout>
- In the previous example, the <filename>do_package</filename>
+ In the previous example, the <filename>do_packagedata</filename>
task of each item in <filename>RDEPENDS</filename> must have
- completed before <filename>do_package_write</filename> can execute.
+ completed before <filename>do_package_qa</filename> can execute.
</para>
</section>
diff --git a/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml b/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml
index e927c4d9e3..7fff933be8 100644
--- a/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml
+++ b/bitbake/doc/bitbake-user-manual/bitbake-user-manual.xml
@@ -56,7 +56,7 @@
-->
<copyright>
- <year>2004-2014</year>
+ <year>2004-2015</year>
<holder>Richard Purdie</holder>
<holder>Chris Larson</holder>
<holder>and Phil Blundell</holder>
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 84f6ec3f3c..36c4d9663d 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-__version__ = "1.23.1"
+__version__ = "1.25.0"
import sys
if sys.version_info < (2, 7, 3):
@@ -103,7 +103,7 @@ def fatal(*args):
def deprecated(func, name=None, advice=""):
"""This is a decorator which can be used to mark functions
- as deprecated. It will result in a warning being emmitted
+ as deprecated. It will result in a warning being emitted
when the function is used."""
import warnings
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index dcd42ef8cf..65cc851df4 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -23,7 +23,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os
import sys
@@ -42,9 +42,8 @@ logger = logging.getLogger('BitBake.Build')
NULL = open(os.devnull, 'r+')
-
-# When we execute a python function we'd like certain things
-# in all namespaces, hence we add them to __builtins__
+# When we execute a Python function, we'd like certain things
+# in all namespaces, hence we add them to __builtins__.
# If we do not do this and use the exec globals, they will
# not be available to subfunctions.
__builtins__['bb'] = bb
@@ -143,7 +142,7 @@ class LogTee(object):
self.outfile.flush()
def exec_func(func, d, dirs = None):
- """Execute an BB 'function'"""
+ """Execute a BB 'function'"""
body = d.getVar(func)
if not body:
@@ -228,7 +227,7 @@ def exec_func_python(func, d, runfile, cwd=None):
code = _functionfmt.format(function=func, body=d.getVar(func, True))
bb.utils.mkdirhier(os.path.dirname(runfile))
with open(runfile, 'w') as script:
- script.write(code)
+ bb.data.emit_func_python(func, script, d)
if cwd:
try:
@@ -417,7 +416,7 @@ def _exec_task(fn, task, d, quieterr):
os.dup2(logfile.fileno(), oso[1])
os.dup2(logfile.fileno(), ose[1])
- # Ensure python logging goes to the logfile
+ # Ensure Python logging goes to the logfile
handler = logging.StreamHandler(logfile)
handler.setFormatter(logformatter)
# Always enable full debug output into task logfiles
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index f892d7dc32..ac0c27f922 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -225,7 +225,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
for package in self.packages_dynamic:
cachedata.packages_dynamic[package].append(fn)
- # Build hash of runtime depends and rececommends
+ # Build hash of runtime depends and recommends
for package in self.packages + [self.pn]:
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
@@ -261,7 +261,7 @@ class Cache(object):
def __init__(self, data, data_hash, caches_array):
# Pass caches_array information into Cache Constructor
- # It will be used in later for deciding whether we
+ # It will be used later for deciding whether we
# need extra cache file dump/load support
self.caches_array = caches_array
self.cachedir = data.getVar("CACHE", True)
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 8b8f91a762..21a36f64ca 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -178,7 +178,7 @@ class BufferedLogger(Logger):
class PythonParser():
getvars = (".getVar", ".appendVar", ".prependVar")
- containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any")
+ containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
def warn(self, func, arg):
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index d797fcf930..60f9ac08aa 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -87,6 +87,9 @@ class Command:
def runAsyncCommand(self):
try:
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
+ # updateCache will trigger a shutdown of the parser
+ # and then raise BBHandledException triggering an exit
+ self.cooker.updateCache()
return False
if self.currentAsyncCommand is not None:
(command, options) = self.currentAsyncCommand
@@ -268,6 +271,10 @@ class CommandsSync:
# we always take and leave the cooker in state.initial
setFeatures.readonly = True
+ def updateConfig(self, command, params):
+ options = params[0]
+ command.cooker.updateConfigOpts(options)
+
class CommandsAsync:
"""
A class of asynchronous commands
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 2c7788ea04..0d9b85e604 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -153,20 +153,25 @@ class BBCooker:
self.parser = None
signal.signal(signal.SIGTERM, self.sigterm_exception)
+ # Let SIGHUP exit as SIGTERM
+ signal.signal(signal.SIGHUP, self.sigterm_exception)
def sigterm_exception(self, signum, stackframe):
- bb.warn("Cooker recieved SIGTERM, shutting down...")
+ if signum == signal.SIGTERM:
+ bb.warn("Cooker recieved SIGTERM, shutting down...")
+ elif signum == signal.SIGHUP:
+ bb.warn("Cooker recieved SIGHUP, shutting down...")
self.state = state.forceshutdown
def setFeatures(self, features):
# we only accept a new feature set if we're in state initial, so we can reset without problems
- if self.state != state.initial:
+ if self.state != state.initial and self.state != state.error:
raise Exception("Illegal state for feature set change")
original_featureset = list(self.featureset)
for feature in features:
self.featureset.setFeature(feature)
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
- if (original_featureset != list(self.featureset)):
+ if (original_featureset != list(self.featureset)) and self.state != state.error:
self.reset()
def initConfigurationData(self):
@@ -200,6 +205,75 @@ class BBCooker:
self.data = self.databuilder.data
self.data_hash = self.databuilder.data_hash
+
+ # we log all events to a file if so directed
+ if self.configuration.writeeventlog:
+ import json, pickle
+ DEFAULT_EVENTFILE = self.configuration.writeeventlog
+ class EventLogWriteHandler():
+
+ class EventWriter():
+ def __init__(self, cooker):
+ self.file_inited = None
+ self.cooker = cooker
+ self.event_queue = []
+
+ def init_file(self):
+ try:
+ # delete the old log
+ os.remove(DEFAULT_EVENTFILE)
+ except:
+ pass
+
+ # write current configuration data
+ with open(DEFAULT_EVENTFILE, "w") as f:
+ f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+
+ def write_event(self, event):
+ with open(DEFAULT_EVENTFILE, "a") as f:
+ try:
+ f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
+ except Exception as e:
+ import traceback
+ print(e, traceback.format_exc(e))
+
+
+ def send(self, event):
+ event_class = event.__module__ + "." + event.__class__.__name__
+
+ # init on bb.event.BuildStarted
+ if self.file_inited is None:
+ if event_class == "bb.event.BuildStarted":
+ self.init_file()
+ self.file_inited = True
+
+ # write pending events
+ for e in self.event_queue:
+ self.write_event(e)
+
+ # also write the current event
+ self.write_event(event)
+
+ else:
+ # queue all events until the file is inited
+ self.event_queue.append(event)
+
+ else:
+ # we have the file, just write the event
+ self.write_event(event)
+
+ # set our handler's event processor
+ event = EventWriter(self) # self is the cooker here
+
+
+ # set up cooker features for this mock UI handler
+
+ # we need to write the dependency tree in the log
+ self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
+ # register the log file writer as UI Handler
+ bb.event.register_UIHhandler(EventLogWriteHandler())
+
+
#
# Special updated configuration we use for firing events
#
@@ -240,7 +314,7 @@ class BBCooker:
f.write(total)
#add to history
- loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
+ loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
self.data.appendVar(var, val, **loginfo)
def saveConfigurationVar(self, var, val, default_file, op):
@@ -309,7 +383,7 @@ class BBCooker:
f.write(total)
#add to history
- loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
+ loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
self.data.setVar(var, val, **loginfo)
def removeConfigurationVar(self, var):
@@ -371,6 +445,10 @@ class BBCooker:
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
+ def updateConfigOpts(self,options):
+ for o in options:
+ setattr(self.configuration, o, options[o])
+
def runCommands(self, server, data, abort):
"""
Run any queued asynchronous command
@@ -496,7 +574,7 @@ class BBCooker:
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
return runlist, taskdata
-
+
######## WARNING : this function requires cache_extra to be enabled ########
def generateTaskDepTreeData(self, pkgs_to_build, task):
@@ -1307,7 +1385,7 @@ class BBCooker:
raise bb.BBHandledException()
self.show_appends_with_no_recipes()
self.handlePrefProviders()
- self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
+ self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
self.state = state.running
return None
@@ -1435,7 +1513,7 @@ class CookerCollectFiles(object):
for ignored in ('SCCS', 'CVS', '.svn'):
if ignored in dirs:
dirs.remove(ignored)
- found += [os.path.join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
+ found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))]
return found
@@ -1532,7 +1610,7 @@ class CookerCollectFiles(object):
filelist.append(filename)
return filelist
- def collection_priorities(self, pkgfns):
+ def collection_priorities(self, pkgfns, d):
priorities = {}
@@ -1541,10 +1619,10 @@ class CookerCollectFiles(object):
for p in pkgfns:
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
priorities[p] = self.calc_bbfile_priority(realfn, matched)
-
+
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
unmatched = set()
- for _, _, regex, pri in self.bbfile_config_priorities:
+ for _, _, regex, pri in self.bbfile_config_priorities:
if not regex in matched:
unmatched.add(regex)
@@ -1561,7 +1639,8 @@ class CookerCollectFiles(object):
for collection, pattern, regex, _ in self.bbfile_config_priorities:
if regex in unmatched:
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
+ if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
+ collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
return priorities
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 60a6d516af..2ceed2d867 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -69,6 +69,17 @@ class ConfigParameters(object):
if bbpkgs:
self.options.pkgs_to_build.extend(bbpkgs.split())
+ def updateToServer(self, server):
+ options = {}
+ for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
+ "verbose", "debug", "dry_run", "dump_signatures",
+ "debug_domains", "extra_assume_provided", "profile"]:
+ options[o] = getattr(self.options, o)
+
+ ret, error = server.runCommand(["updateConfig", options])
+ if error:
+ raise Exception("Unable to update the server configuration with local parameters: %s" % error)
+
def parseActions(self):
# Parse any commandline into actions
action = {'action':None, 'msg':None}
@@ -128,6 +139,7 @@ class CookerConfiguration(object):
self.dry_run = False
self.tracking = False
self.interface = []
+ self.writeeventlog = False
self.env = {}
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index 898820b069..346a618582 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -12,8 +12,11 @@ A failed call to fork() now raises an exception.
References:
1) Advanced Programming in the Unix Environment: W. Richard Stevens
- 2) Unix Programming Frequently Asked Questions:
- http://www.erlenstar.demon.co.uk/unix/faq_toc.html
+ http://www.apuebook.com/apue3e.html
+ 2) The Linux Programming Interface: Michael Kerrisk
+ http://man7.org/tlpi/index.html
+ 3) Unix Programming Frequently Asked Questions:
+ http://www.faqs.org/faqs/unix-faq/programmer/faq/
Modified to allow a function to be daemonized and return for
bitbake use by Richard Purdie
@@ -146,7 +149,7 @@ def createDaemon(function, logfile):
# OR
#
# Use the getrlimit method to retrieve the maximum file descriptor number
- # that can be opened by this process. If there is not limit on the
+ # that can be opened by this process. If there is no limit on the
# resource, use the default value.
#
import resource # Resource usage information.
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index db938be1e6..82eefef1a6 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -6,7 +6,7 @@ BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
-The expandData and update_data are the most expensive
+The expandKeys and update_data are the most expensive
operations. At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
@@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
to have faster update_data and expandKeys operations.
-This is a treade-off between speed and memory again but
+This is a trade-off between speed and memory again but
the speed is more critical here.
"""
@@ -35,7 +35,7 @@ the speed is more critical here.
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
+# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import sys, os, re
if sys.argv[0][-5:] == "pydoc":
@@ -219,6 +219,13 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
val = str(val)
+ if varExpanded.startswith("BASH_FUNC_"):
+ varExpanded = varExpanded[10:-2]
+ val = val[3:] # Strip off "() "
+ o.write("%s() %s\n" % (varExpanded, val))
+ o.write("export -f %s\n" % (varExpanded))
+ return 1
+
if func:
# NOTE: should probably check for unbalanced {} within the var
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
@@ -231,6 +238,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val)
alter = re.sub('\n', ' \\\n', alter)
+ alter = re.sub('\\$', '\\\\$', alter)
o.write('%s="%s"\n' % (varExpanded, alter))
return 0
@@ -281,6 +289,41 @@ def emit_func(func, o=sys.__stdout__, d = init()):
newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
newdeps -= seen
+_functionfmt = """
+def {function}(d):
+{body}"""
+
+def emit_func_python(func, o=sys.__stdout__, d = init()):
+ """Emits all items in the data store in a format such that it can be sourced by a shell."""
+
+ def write_func(func, o, call = False):
+ body = d.getVar(func, True)
+ if not body.startswith("def"):
+ body = _functionfmt.format(function=func, body=body)
+
+ o.write(body.strip() + "\n\n")
+ if call:
+ o.write(func + "(d)" + "\n\n")
+
+ write_func(func, o, True)
+ pp = bb.codeparser.PythonParser(func, logger)
+ pp.parse_python(d.getVar(func, True))
+ newdeps = pp.execs
+ newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split())
+ seen = set()
+ while newdeps:
+ deps = newdeps
+ seen |= deps
+ newdeps = set()
+ for dep in deps:
+ if d.getVarFlag(dep, "func") and d.getVarFlag(dep, "python"):
+ write_func(dep, o)
+ pp = bb.codeparser.PythonParser(dep, logger)
+ pp.parse_python(d.getVar(dep, True))
+ newdeps |= pp.execs
+ newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split())
+ newdeps -= seen
+
def update_data(d):
"""Performs final steps upon the datastore, including application of overrides"""
d.finalize(parent = True)
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 3d773b1d69..68d273b3a6 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -263,7 +263,7 @@ class VariableHistory(object):
flag = ''
o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail'])))
if len(history) > 1:
- o.write("# computed:\n")
+ o.write("# pre-expansion value:\n")
o.write('# "%s"\n' % (commentVal))
else:
o.write("#\n# $%s\n# [no history recorded]\n#\n" % var)
@@ -589,7 +589,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
self.dict[var][flag] = value
- if flag == "defaultval" and '_' in var:
+ if flag == "_defaultval" and '_' in var:
self._setvar_update_overrides(var)
if flag == "unexport" or flag == "export":
@@ -605,8 +605,8 @@ class DataSmart(MutableMapping):
if local_var is not None:
if flag in local_var:
value = copy.copy(local_var[flag])
- elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
- value = copy.copy(local_var["defaultval"])
+ elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
+ value = copy.copy(local_var["_defaultval"])
if expand and value:
# Only getvar (flag == _content) hits the expand cache
cachename = None
@@ -616,8 +616,9 @@ class DataSmart(MutableMapping):
cachename = var + "[" + flag + "]"
value = self.expand(value, cachename)
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
- filtered = filter(lambda v: v not in local_var["_removeactive"],
- value.split(" "))
+ removes = [self.expand(r) for r in local_var["_removeactive"]]
+ filtered = filter(lambda v: v not in removes,
+ value.split())
value = " ".join(filtered)
if expand:
# We need to ensure the expand cache has the correct value
@@ -739,12 +740,16 @@ class DataSmart(MutableMapping):
yield key
def __iter__(self):
+ deleted = set()
def keylist(d):
klist = set()
for key in d:
if key == "_data":
continue
+ if key in deleted:
+ continue
if not d[key]:
+ deleted.add(key)
continue
klist.add(key)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index e2f6b9cad2..fec6a05b38 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -55,6 +55,7 @@ def get_class_handlers():
return _handlers
def set_class_handlers(h):
+ global _handlers
_handlers = h
def clean_class_handlers():
@@ -67,6 +68,7 @@ _ui_logfilters = {}
_ui_handler_seq = 0
_event_handler_map = {}
_catchall_handlers = {}
+_eventfilter = None
def execute_handler(name, handler, event, d):
event.data = d
@@ -94,6 +96,9 @@ def fire_class_handlers(event, d):
evt_hmap = _event_handler_map.get(eid, {})
for name, handler in _handlers.iteritems():
if name in _catchall_handlers or name in evt_hmap:
+ if _eventfilter:
+ if not _eventfilter(name, handler, event, d):
+ continue
execute_handler(name, handler, event, d)
ui_queue = []
@@ -204,6 +209,10 @@ def remove(name, handler):
"""Remove an Event handler"""
_handlers.pop(name)
+def set_eventfilter(func):
+ global _eventfilter
+ _eventfilter = func
+
def register_UIHhandler(handler):
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
_ui_handlers[_ui_handler_seq] = handler
@@ -598,7 +607,7 @@ class MetadataEvent(Event):
class SanityCheck(Event):
"""
- Event to runs sanity checks, either raise errors or generate events as return status.
+ Event to run sanity checks, either raise errors or generate events as return status.
"""
def __init__(self, generateevents = True):
Event.__init__(self)
@@ -606,7 +615,7 @@ class SanityCheck(Event):
class SanityCheckPassed(Event):
"""
- Event to indicate sanity check is passed
+ Event to indicate sanity check has passed
"""
class SanityCheckFailed(Event):
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index df2f2b056b..5b26524f45 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -535,7 +535,7 @@ def verify_checksum(ud, d):
"""
- if not ud.method.supports_checksum(ud):
+ if ud.ignore_checksums or not ud.method.supports_checksum(ud):
return
md5data = bb.utils.md5_file(ud.localpath)
@@ -543,8 +543,8 @@ def verify_checksum(ud, d):
if ud.method.recommends_checksum(ud):
# If strict checking enabled and neither sum defined, raise error
- strict = d.getVar("BB_STRICT_CHECKSUM", True) or None
- if strict and not (ud.md5_expected or ud.sha256_expected):
+ strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
+ if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
(ud.localpath, ud.md5_name, md5data,
@@ -1041,6 +1041,7 @@ class FetchData(object):
self.sha256_expected = None
else:
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+ self.ignore_checksums = False
self.names = self.parm.get("name",'default').split(',')
@@ -1198,7 +1199,7 @@ class FetchMethod(object):
(file, urldata.parm.get('unpack')))
dots = file.split(".")
- if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
+ if dots[-1] in ['gz', 'bz2', 'Z', 'xz', 'lz']:
efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
else:
efile = file
@@ -1219,6 +1220,10 @@ class FetchMethod(object):
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
+ elif file.endswith('.tar.lz'):
+ cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.lz'):
+ cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.zip') or file.endswith('.jar'):
try:
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
@@ -1264,8 +1269,13 @@ class FetchMethod(object):
# items. So, only do so for file:// entries.
if urldata.type == "file" and urldata.path.find("/") != -1:
destdir = urldata.path.rsplit("/", 1)[0]
+ if urldata.parm.get('subdir') != None:
+ destdir = urldata.parm.get('subdir') + "/" + destdir
else:
- destdir = "."
+ if urldata.parm.get('subdir') != None:
+ destdir = urldata.parm.get('subdir')
+ else:
+ destdir = "."
bb.utils.mkdirhier("%s/%s" % (rootdir, destdir))
cmd = 'cp -f %s %s/%s/' % (file, rootdir, destdir)
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 799fb6c0fe..f771fd02b6 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -67,6 +67,7 @@ Supported SRC_URI options are:
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
+import re
import bb
from bb import data
from bb.fetch2 import FetchMethod
@@ -339,10 +340,50 @@ class Git(FetchMethod):
"""
Compute the HEAD revision for the url
"""
- search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
+ if ud.unresolvedrev[name][:5] == "refs/":
+ search = "%s %s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
+ else:
+ search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
output = self._lsremote(ud, d, search)
return output.split()[0]
+ def latest_versionstring(self, ud, d):
+ """
+ Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+ by searching through the tags output of ls-remote, comparing
+ versions and returning the highest match.
+ """
+ verstring = ""
+ tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
+ try:
+ output = self._lsremote(ud, d, "refs/tags/*^{}")
+ except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
+ return ""
+
+ for line in output.split("\n"):
+ if not line:
+ break
+
+ line = line.split("/")[-1]
+ # Ignore non-released branches
+ m = re.search("(alpha|beta|rc|final)+", line)
+ if m:
+ continue
+
+ # search for version in the line
+ tag = tagregex.search(line)
+ if tag == None:
+ continue
+
+ tag = tag.group('pver')
+ tag = tag.replace("_", ".")
+
+ if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ continue
+ verstring = tag
+
+ return verstring
+
def _build_revision(self, ud, d, name):
return ud.revisions[name]
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 5760fcdc58..81592f6e04 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -123,7 +123,10 @@ class Hg(FetchMethod):
else:
cmd = "%s pull" % (basecmd)
elif command == "update":
- cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
+ if ud.user and ud.pswd:
+ cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (basecmd, ud.user, ud.pswd, proto, " ".join(options))
+ else:
+ cmd = "%s update -C %s" % (basecmd, " ".join(options))
else:
raise FetchError("Invalid hg command %s" % command, ud.url)
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index 9836bd7268..d079a33c62 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -103,22 +103,15 @@ class Perforce(FetchMethod):
def urldata_init(self, ud, d):
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
- # If a label is specified, we use that as our filename
-
+ base_path = path.replace('/...', '')
+ base_path = self._strip_leading_slashes(base_path)
+
if "label" in parm:
- ud.localfile = "%s.tar.gz" % (parm["label"])
- return
-
- base = path
- which = path.find('/...')
- if which != -1:
- base = path[:which-1]
-
- base = self._strip_leading_slashes(base)
-
- cset = Perforce.getcset(d, path, host, user, pswd, parm)
+ version = parm["label"]
+ else:
+ version = Perforce.getcset(d, path, host, user, pswd, parm)
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
+ ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base_path.replace('/', '.'), version), d)
def download(self, ud, d):
"""
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 0456490368..b081b7621b 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -25,6 +25,9 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+import re
+import tempfile
+import subprocess
import os
import logging
import bb
@@ -34,6 +37,7 @@ from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+from bs4 import BeautifulSoup
class Wget(FetchMethod):
"""Class to fetch urls via 'wget'"""
@@ -104,3 +108,265 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, True)
return True
+
+
+ def _parse_path(self, regex, s):
+ """
+ Find and group name, version and archive type in the given string s
+ """
+ bb.debug(3, "parse_path(%s, %s)" % (regex.pattern, s))
+ m = regex.search(s)
+ if m:
+ bb.debug(3, "%s, %s, %s" % (m.group('name'), m.group('ver'), m.group('type')))
+ return (m.group('name'), m.group('ver'), m.group('type'))
+ return None
+
+ def _modelate_version(self, version):
+ if version[0] in ['.', '-']:
+ if version[1].isdigit():
+ version = version[1] + version[0] + version[2:len(version)]
+ else:
+ version = version[1:len(version)]
+
+ version = re.sub('\-', '.', version)
+ version = re.sub('_', '.', version)
+ version = re.sub('(rc)+', '.-1.', version)
+ version = re.sub('(alpha)+', '.-3.', version)
+ version = re.sub('(beta)+', '.-2.', version)
+ if version[0] == 'v':
+ version = version[1:len(version)]
+ return version
+
+ def _vercmp(self, old, new):
+ """
+ Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
+ purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
+ for simplicity as it's somehow difficult to get from various upstream format
+ """
+
+ (oldpn, oldpv, oldsuffix) = old
+ (newpn, newpv, newsuffix) = new
+
+ """
+ Check for a new suffix type that we have never heard of before
+ """
+ if (newsuffix):
+ m = self.suffix_regex_comp.search(newsuffix)
+ if not m:
+ bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
+ return False
+
+ """
+ Not our package so ignore it
+ """
+ if oldpn != newpn:
+ return False
+
+ oldpv = self._modelate_version(oldpv)
+ newpv = self._modelate_version(newpv)
+
+ if bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, "")) < 0:
+ return True
+ else:
+ return False
+
+ def _fetch_index(self, uri, ud, d):
+ """
+ Run fetch checkstatus to get directory information
+ """
+ f = tempfile.NamedTemporaryFile()
+
+ agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
+ fetchcmd = self.basecmd
+ fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ try:
+ self._runwget(ud, d, fetchcmd, True)
+ fetchresult = f.read()
+ except bb.fetch2.BBFetchException:
+ fetchresult = ""
+
+ f.close()
+ return fetchresult
+
+ def _check_latest_dir(self, url, versionstring, ud, d):
+ """
+ Return the name of the directory with the greatest package version
+ If error or no version, return None
+ """
+ bb.debug(3, "DirURL: %s, %s" % (url, versionstring))
+ soup = BeautifulSoup(self._fetch_index(url, ud, d))
+ if not soup:
+ return None
+
+ valid = 0
+ prefix = ''
+ regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
+ m = regex.search(versionstring)
+ if m:
+ version = ('', m.group(2), '')
+ prefix = m.group(1)
+ bb.debug(3, "version: %s, prefix: %s" % (version, prefix))
+ else:
+ version = ('', versionstring, '')
+
+ for href in soup.find_all('a', href=True):
+ bb.debug(3, "href: %s" % (href['href']))
+ if href['href'].find(versionstring) >= 0:
+ valid = 1
+ m = regex.search(href['href'].strip("/"))
+ if m:
+ thisversion = ('', m.group(2), '')
+ if thisversion and self._vercmp(version, thisversion) == True:
+ version = thisversion
+
+ if valid:
+ bb.debug(3, "Would return %s" % (prefix+version[1]))
+ return prefix+version[1]
+ else:
+ bb.debug(3, "Not Valid")
+ return None
+
+ def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
+ """
+ Return the latest version of a package inside a given directory path
+ If error or no version, return None
+ """
+ valid = 0
+ version = ('', '', '')
+
+ bb.debug(3, "VersionURL: %s" % (url))
+ soup = BeautifulSoup(self._fetch_index(url, ud, d))
+ if not soup:
+ bb.debug(3, "*** %s NO SOUP" % (url))
+ return None
+
+ pn_regex = d.getVar('REGEX', True)
+ if pn_regex:
+ pn_regex = re.compile(pn_regex)
+ bb.debug(3, "pn_regex = '%s'" % (pn_regex.pattern))
+
+ for line in soup.find_all('a', href=True):
+ newver = None
+ bb.debug(3, "line = '%s'" % (line['href']))
+ if pn_regex:
+ m = pn_regex.search(line['href'])
+ if m:
+ bb.debug(3, "Pver = '%s'" % (m.group('pver')))
+ newver = ('', m.group('pver'), '')
+ else:
+ m = pn_regex.search(str(line))
+ if m:
+ bb.debug(3, "Pver = '%s'" % (m.group('pver')))
+ newver = ('', m.group('pver'), '')
+ else:
+ newver = self._parse_path(package_regex, line['href'])
+ if not newver:
+ newver = self._parse_path(package_regex, str(line))
+
+ if newver:
+ bb.debug(3, "Upstream version found: %s" % newver[1])
+ if valid == 0:
+ version = newver
+ valid = 1
+ elif self._vercmp(version, newver) == True:
+ version = newver
+
+ # check whether a valid package and version were found
+ bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
+ (package, version[1] or "N/A", current_version[1]))
+
+ if valid and version:
+ return re.sub('_', '.', version[1])
+
+ return None
+
+ def _init_regexes(self, package):
+ """
+ Match as many patterns as possible such as:
+ gnome-common-2.20.0.tar.gz (most common format)
+ gtk+-2.90.1.tar.gz
+ xf86-input-synaptics-12.6.9.tar.gz
+ dri2proto-2.3.tar.gz
+ blktool_4.orig.tar.gz
+ libid3tag-0.15.1b.tar.gz
+ unzip552.tar.gz
+ icu4c-3_6-src.tgz
+ genext2fs_1.3.orig.tar.gz
+ gst-fluendo-mp3
+ """
+ # match most patterns which uses "-" as separator to version digits
+ pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]"
+ # a loose pattern such as for unzip552.tar.gz
+ pn_prefix2 = "[a-zA-Z]+"
+ # a loose pattern such as for 80325-quicky-0.4.tar.gz
+ pn_prefix3 = "[0-9]+[\-]?[a-zA-Z]+"
+ # Save the Package Name (pn) Regex for use later
+ pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+
+ # match version
+ pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
+
+ # match arch
+ parch_regex = "\-source|_all_"
+
+ # src.rpm extension was added only for rpm package. Can be removed if the rpm
+ # packaged will always be considered as having to be manually upgraded
+ psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+
+ # match name, version and archive type of a package
+ self.package_regex_comp = re.compile("(?P<name>%s?)\.?v?(?P<ver>%s)(?P<arch>%s)?[\.\-](?P<type>%s$)"
+ % (pn_regex, pver_regex, parch_regex, psuffix_regex))
+ self.suffix_regex_comp = re.compile(psuffix_regex)
+
+ # search for version matches on folders inside the path, like:
+ # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
+ self.dirver_regex_comp = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([\-_]r\d+)*)/")
+
+ # make custom regex for search in uri's
+ package_custom_regex_comp = None
+ version = self._parse_path(self.package_regex_comp, package)
+ if version:
+ package_custom_regex_comp = re.compile(
+ "(?P<name>%s)(?P<ver>%s)(?P<arch>%s)?[\.\-](?P<type>%s)$" %
+ (version[0], pver_regex, parch_regex, psuffix_regex))
+
+ return package_custom_regex_comp
+
+ def latest_versionstring(self, ud, d):
+ """
+ Manipulate the URL and try to obtain the latest package version
+
+ sanity check to ensure same name and type.
+ """
+ package = ud.path.split("/")[-1]
+ regex_uri = d.getVar("REGEX_URI", True)
+ newpath = regex_uri or ud.path
+ pupver = ""
+
+ package_custom_regex_comp = self._init_regexes(package)
+
+ current_version = ('', d.getVar('PV', True), '')
+
+ """possible to have no version in pkg name, such as spectrum-fw"""
+ if not re.search("\d+", package):
+ return re.sub('_', '.', current_version[1])
+
+ if not regex_uri:
+ # generate the new uri with the appropriate latest directory
+ m = self.dirver_regex_comp.search(ud.path)
+ if m:
+ dirver = m.group('dirver')
+ newuri = bb.fetch.encodeurl([ud.type, ud.host,
+ ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
+ new_dirver = self._check_latest_dir(newuri, dirver, ud, d)
+ if new_dirver and dirver != new_dirver:
+ newpath = ud.path.replace(dirver, new_dirver, True)
+
+ newpath = newpath.split(package)[0] or "/" # path to directory
+ newuri = bb.fetch.encodeurl([ud.type, ud.host, newpath, ud.user, ud.pswd, {}])
+ else:
+ newuri = newpath
+
+ return self._check_latest_version(newuri, package,
+ package_custom_regex_comp or package_regex_comp,
+ current_version, ud, d) or ""
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index fca43eefd0..466523c6e4 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -52,10 +52,10 @@ def getMountedDev(path):
parentDev = os.stat(path).st_dev
currentDev = parentDev
# When the current directory's device is different from the
- # parrent's, then the current directory is a mount point
+ # parent's, then the current directory is a mount point
while parentDev == currentDev:
mountPoint = path
- # Use dirname to get the parrent's directory
+ # Use dirname to get the parent's directory
path = os.path.dirname(path)
# Reach the "/"
if path == mountPoint:
@@ -77,7 +77,7 @@ def getDiskData(BBDirs, configuration):
"""Prepare disk data for disk space monitor"""
# Save the device IDs, need the ID to be unique (the dictionary's key is
- # unique), so that when more than one directories are located in the same
+ # unique), so that when more than one directory is located on the same
# device, we just monitor it once
devDict = {}
for pathSpaceInode in BBDirs.split():
@@ -187,11 +187,11 @@ class diskMonitor:
if self.spaceInterval and self.inodeInterval:
self.enableMonitor = True
# These are for saving the previous disk free space and inode, we
- # use them to avoid print too many warning messages
+ # use them to avoid printing too many warning messages
self.preFreeS = {}
self.preFreeI = {}
- # This is for STOPTASKS and ABORT, to avoid print the message repeatly
- # during waiting the tasks to finish
+ # This is for STOPTASKS and ABORT, to avoid printing the message
+ # repeatedly while waiting for the tasks to finish
self.checked = {}
for k in self.devDict:
self.preFreeS[k] = 0
@@ -239,11 +239,9 @@ class diskMonitor:
freeInode = st.f_favail
if minInode and freeInode < minInode:
- # Some fs formats' (e.g., btrfs) statvfs.f_files (inodes) is
- # zero, this is a feature of the fs, we disable the inode
- # checking for such a fs.
+ # Some filesystems use dynamic inodes so can't run out
+ # (e.g. btrfs). This is reported by the inode count being 0.
if st.f_files == 0:
- logger.info("Inode check for %s is unavaliable, will remove it from disk monitor" % path)
self.devDict[k][2] = None
continue
# Always show warning, the self.checked would always be False if the action is WARN
diff --git a/bitbake/lib/bb/namedtuple_with_abc.py b/bitbake/lib/bb/namedtuple_with_abc.py
index f5e0a3f3d5..32f2fc642c 100644
--- a/bitbake/lib/bb/namedtuple_with_abc.py
+++ b/bitbake/lib/bb/namedtuple_with_abc.py
@@ -202,8 +202,8 @@ if __name__ == '__main__':
print(rec5._replace(k=222)._my_custom_method()) # MyMixIn's
print(rec5._replace(k=222).count(2)) # MyMixIn's
- # None that behavior: the standard namedtuple methods cannot be
- # overriden by a foreign mix-in -- even if the mix-in is declared
+ # Note that behavior: the standard namedtuple methods cannot be
+ # overridden by a foreign mix-in -- even if the mix-in is declared
# as the leftmost base class (but, obviously, you can override them
# in the defined class or its subclasses):
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 4e5a06e761..af42a0c0d6 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -128,7 +128,7 @@ class DataNode(AstNode):
if 'flag' in groupd and groupd['flag'] != None:
flag = groupd['flag']
elif groupd["lazyques"]:
- flag = "defaultval"
+ flag = "_defaultval"
loginfo['op'] = op
loginfo['detail'] = groupd["value"]
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index e13dc57ae8..c503980666 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -430,7 +430,7 @@ class RunQueueData:
# Nothing to do
return 0
- logger.info("Preparing runqueue")
+ logger.info("Preparing RunQueue")
# Step A - Work out a list of tasks to run
#
@@ -859,15 +859,18 @@ class RunQueue:
def _start_worker(self, fakeroot = False, rqexec = None):
logger.debug(1, "Starting bitbake-worker")
+ magic = "decafbad"
+ if self.cooker.configuration.profile:
+ magic = "decafbadbad"
if fakeroot:
fakerootcmd = self.cfgData.getVar("FAKEROOTCMD", True)
fakerootenv = (self.cfgData.getVar("FAKEROOTBASEENV", True) or "").split()
env = os.environ.copy()
for key, value in (var.split('=') for var in fakerootenv):
env[key] = value
- worker = subprocess.Popen([fakerootcmd, "bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
+ worker = subprocess.Popen([fakerootcmd, "bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
else:
- worker = subprocess.Popen(["bitbake-worker", "decafbad"], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
+ worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
bb.utils.nonblockingfd(worker.stdout)
workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec)
@@ -876,9 +879,7 @@ class RunQueue:
"fakerootenv" : self.rqdata.dataCache.fakerootenv,
"fakerootdirs" : self.rqdata.dataCache.fakerootdirs,
"fakerootnoenv" : self.rqdata.dataCache.fakerootnoenv,
- "hashes" : bb.parse.siggen.taskhash,
- "hash_deps" : bb.parse.siggen.runtaskdeps,
- "sigchecksums" : bb.parse.siggen.file_checksum_values,
+ "sigdata" : bb.parse.siggen.get_taskdata(),
"runq_hash" : self.rqdata.runq_hash,
"logdefaultdebug" : bb.msg.loggerDefaultDebugLevel,
"logdefaultverbose" : bb.msg.loggerDefaultVerbose,
@@ -1063,7 +1064,7 @@ class RunQueue:
retval = self.rqexe.execute()
if self.state is runQueueCleanUp:
- self.rqexe.finish()
+ retval = self.rqexe.finish()
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
self.teardown_workers()
@@ -1305,15 +1306,14 @@ class RunQueueExecute:
if self.stats.active > 0:
bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
self.rq.read_workers()
-
- return
+ return self.rq.active_fds()
if len(self.failed_fnids) != 0:
self.rq.state = runQueueFailed
- return
+ return True
self.rq.state = runQueueComplete
- return
+ return True
def check_dependencies(self, task, taskdeps, setscene = False):
if not self.rq.depvalidate:
@@ -1844,6 +1844,10 @@ class RunQueueExecuteScenequeue(RunQueueExecute):
realtask = self.rqdata.runq_setscene[task]
realdep = self.rqdata.runq_setscene[dep]
logger.debug(2, "%s was unavailable and is a hard dependency of %s so skipping" % (self.rqdata.get_user_idstring(realtask), self.rqdata.get_user_idstring(realdep)))
+ self.scenequeue_updatecounters(dep, fail)
+ continue
+ if task not in self.sq_revdeps2[dep]:
+ # May already have been removed by the fail case above
continue
self.sq_revdeps2[dep].remove(task)
if len(self.sq_revdeps2[dep]) == 0:
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index 577c2503ac..d362f8d7fe 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -38,14 +38,18 @@ from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
logger = logging.getLogger('BitBake')
class ServerCommunicator():
- def __init__(self, connection, event_handle):
+ def __init__(self, connection, event_handle, server):
self.connection = connection
self.event_handle = event_handle
+ self.server = server
def runCommand(self, command):
# @todo try/except
self.connection.send(command)
+ if not self.server.is_alive():
+ raise SystemExit
+
while True:
# don't let the user ctrl-c while we're waiting for a response
try:
@@ -139,6 +143,8 @@ class ProcessServer(Process, BaseImplServer):
raise
except Exception:
logger.exception('Running idle function')
+ del self._idlefuns[function]
+ self.quit = True
if nextsleep is not None:
select.select(fds,[],[],nextsleep)
@@ -158,7 +164,7 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection):
self.procserver = serverImpl
self.ui_channel = ui_channel
self.event_queue = event_queue
- self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle)
+ self.connection = ServerCommunicator(self.ui_channel, self.procserver.event_handle, self.procserver)
self.events = self.event_queue
def sigterm_terminate(self):
@@ -197,14 +203,20 @@ class ProcessEventQueue(multiprocessing.queues.Queue):
def waitEvent(self, timeout):
if self.exit:
- raise KeyboardInterrupt()
+ sys.exit(1)
try:
+ if not self.server.is_alive():
+ self.setexit()
+ return None
return self.get(True, timeout)
except Empty:
return None
def getEvent(self):
try:
+ if not self.server.is_alive():
+ self.setexit()
+ return None
return self.get(False)
except Empty:
return None
@@ -219,6 +231,7 @@ class BitBakeServer(BitBakeBaseServer):
self.ui_channel, self.server_channel = Pipe()
self.event_queue = ProcessEventQueue(0)
self.serverImpl = ProcessServer(self.server_channel, self.event_queue, None)
+ self.event_queue.server = self.serverImpl
def detach(self):
self.serverImpl.start()
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 933311cccb..0c77d72112 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -187,6 +187,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
self.file_checksum_values[k][f] = cs
data = data + cs
+ taskdep = dataCache.task_deps[fn]
+ if 'nostamp' in taskdep and task in taskdep['nostamp']:
+ # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
+ import uuid
+ data = data + str(uuid.uuid4())
+
taint = self.read_taint(fn, task, dataCache.stamp[fn])
if taint:
data = data + taint
@@ -197,10 +203,11 @@ class SignatureGeneratorBasic(SignatureGenerator):
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
- def set_taskdata(self, hashes, deps, checksums):
- self.runtaskdeps = deps
- self.taskhash = hashes
- self.file_checksum_values = checksums
+ def get_taskdata(self):
+ return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
+
+ def set_taskdata(self, data):
+ self.runtaskdeps, self.taskhash, self.file_checksum_values = data
def dump_sigtask(self, fn, task, stampbase, runtime):
k = fn + "." + task
@@ -294,10 +301,9 @@ def dump_this_task(outfile, d):
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
def clean_basepath(a):
+ b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
if a.startswith("virtual:"):
- b = a.rsplit(":", 1)[0] + ":" + a.rsplit("/", 1)[1]
- else:
- b = a.rsplit("/", 1)[1]
+ b = b + ":" + a.rsplit(":", 1)[0]
return b
def clean_basepaths(a):
@@ -306,6 +312,12 @@ def clean_basepaths(a):
b[clean_basepath(x)] = a[x]
return b
+def clean_basepaths_list(a):
+ b = []
+ for x in a:
+ b.append(clean_basepath(x))
+ return b
+
def compare_sigfiles(a, b, recursecb = None):
output = []
@@ -405,6 +417,17 @@ def compare_sigfiles(a, b, recursecb = None):
for f in removed:
output.append("Dependency on checksum of file %s was removed" % (f))
+ changed = []
+ for idx, task in enumerate(a_data['runtaskdeps']):
+ a = a_data['runtaskdeps'][idx]
+ b = b_data['runtaskdeps'][idx]
+ if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
+ changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
+
+ if changed:
+ output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
+ output.append("\n".join(changed))
+
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
a = a_data['runtaskhashes']
@@ -481,4 +504,17 @@ def dump_sigfile(a):
if 'taint' in a_data:
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
+ data = a_data['basehash']
+ for dep in a_data['runtaskdeps']:
+ data = data + a_data['runtaskhashes'][dep]
+
+ for c in a_data['file_checksum_values']:
+ data = data + c[1]
+
+ if 'taint' in a_data:
+ data = data + a_data['taint']
+
+ h = hashlib.md5(data).hexdigest()
+ output.append("Computed Hash is %s" % h)
+
return output
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index 944a906653..81e4091ff7 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -121,6 +121,12 @@ class DataExpansions(unittest.TestCase):
keys = self.d.keys()
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
+ def test_keys_deletion(self):
+ newd = bb.data.createCopy(self.d)
+ newd.delVar("bar")
+ keys = newd.keys()
+ self.assertEqual(keys, ['value_of_foo', 'foo'])
+
class TestNestedExpansions(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
@@ -259,6 +265,13 @@ class TestConcatOverride(unittest.TestCase):
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "")
+ def test_remove_expansion(self):
+ self.d.setVar("BAR", "Z")
+ self.d.setVar("TEST", "${BAR}/X Y")
+ self.d.setVar("TEST_remove", "${BAR}/X")
+ bb.data.update_data(self.d)
+ self.assertEqual(self.d.getVar("TEST", True), "Y")
+
class TestOverrides(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 7df7a0ef51..6eb0f2dc18 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -24,6 +24,7 @@ import tempfile
import subprocess
import os
from bb.fetch2 import URI
+from bb.fetch2 import FetchMethod
import bb
class URITest(unittest.TestCase):
@@ -444,6 +445,13 @@ class FetcherLocalTest(FetcherTest):
tree = self.fetchUnpack(['file://dir/subdir/e'])
self.assertEqual(tree, ['dir/subdir/e'])
+ def test_local_subdirparam(self):
+ tree = self.fetchUnpack(['file://a;subdir=bar'])
+ self.assertEqual(tree, ['bar/a'])
+
+ def test_local_deepsubdirparam(self):
+ tree = self.fetchUnpack(['file://dir/subdir/e;subdir=bar'])
+ self.assertEqual(tree, ['bar/dir/subdir/e'])
class FetcherNetworkTest(FetcherTest):
@@ -558,5 +566,81 @@ class URLHandle(unittest.TestCase):
result = bb.fetch.encodeurl(v)
self.assertEqual(result, k)
+class FetchMethodTest(FetcherTest):
+
+ test_git_uris = {
+ # version pattern "X.Y.Z"
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ : "1.99.4",
+ # version pattern "vX.Y"
+ ("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ : "1.5.0",
+ # version pattern "pkg_name-X.Y"
+ ("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ : "1.0",
+ # version pattern "pkg_name-vX.Y.Z"
+ ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ : "1.4.0",
+ # combination version pattern
+ ("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ : "1.2.0",
+ ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
+ : "2014.01",
+ # version pattern "yyyymmdd"
+ ("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ : "20120614",
+ # packages with a valid GITTAGREGEX
+ ("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ : "0.4.3",
+ ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ : "11.0.0",
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ : "1.3.59",
+ ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ : "3.82+dbg0.9",
+ }
-
+ test_wget_uris = {
+ # packages with versions inside directory name
+ ("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
+ : "2.24.2",
+ ("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
+ : "1.6.0",
+ ("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
+ : "2.8.12.1",
+ # packages with versions only in current directory
+ ("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
+ : "2.19",
+ ("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
+ : "20120814",
+ # packages with "99" in the name of possible version
+ ("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
+ : "5.0",
+ ("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
+ : "1.15.1",
+ # packages with valid REGEX_URI and REGEX
+ ("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ : "2.0.0",
+ ("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
+ : "6.1.19",
+ }
+ if os.environ.get("BB_SKIP_NETTESTS") == "yes":
+ print("Unset BB_SKIP_NETTESTS to run network tests")
+ else:
+ def test_git_latest_versionstring(self):
+ for k, v in self.test_git_uris.items():
+ self.d.setVar("SRCREV", k[2])
+ self.d.setVar("GITTAGREGEX", k[3])
+ ud = bb.fetch2.FetchData(k[1], self.d)
+ verstring = ud.method.latest_versionstring(ud, self.d)
+ r = bb.utils.vercmp_string(v, verstring)
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
+
+ def test_wget_latest_versionstring(self):
+ for k, v in self.test_wget_uris.items():
+ self.d.setVar("REGEX_URI", k[2])
+ self.d.setVar("REGEX", k[3])
+ ud = bb.fetch2.FetchData(k[1], self.d)
+ verstring = ud.method.latest_versionstring(ud, self.d)
+ r = bb.utils.vercmp_string(v, verstring)
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 751a2d7a23..6bcbd47ab3 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -25,12 +25,12 @@ import bb.cache
import bb.cooker
import bb.providers
import bb.utils
-from bb.cooker import state, BBCooker
+from bb.cooker import state, BBCooker, CookerFeatures
from bb.cookerdata import CookerConfiguration, ConfigParameters
import bb.fetch2
class Tinfoil:
- def __init__(self, output=sys.stdout):
+ def __init__(self, output=sys.stdout, tracking=False):
# Needed to avoid deprecation warnings with python 2.6
warnings.filterwarnings("ignore", category=DeprecationWarning)
@@ -48,7 +48,10 @@ class Tinfoil:
configparams = TinfoilConfigParameters(parse_only=True)
self.config.setConfigParameters(configparams)
self.config.setServerRegIdleCallback(self.register_idle_function)
- self.cooker = BBCooker(self.config)
+ features = []
+ if tracking:
+ features.append(CookerFeatures.BASEDATASTORE_TRACKING)
+ self.cooker = BBCooker(self.config, features)
self.config_data = self.cooker.data
bb.providers.logger.setLevel(logging.ERROR)
self.cooker_data = None
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index f64423b733..f825b57bea 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -26,12 +26,18 @@ os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
import toaster.toastermain.settings as toaster_django_settings
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
-from toaster.orm.models import Target_Image_File
+from toaster.orm.models import Target_Image_File, BuildArtifact
from toaster.orm.models import Variable, VariableHistory
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
from toaster.orm.models import Task_Dependency, Package_Dependency
from toaster.orm.models import Recipe_Dependency
from bb.msg import BBLogFormatter as format
+from django.db import models
+import logging
+
+
+logger = logging.getLogger("BitBake")
+
class NotExisting(Exception):
pass
@@ -43,8 +49,57 @@ class ORMWrapper(object):
"""
def __init__(self):
+ self.layer_version_objects = []
+ self.task_objects = {}
+ self.recipe_objects = {}
pass
+ @staticmethod
+ def _build_key(**kwargs):
+ key = "0"
+ for k in sorted(kwargs.keys()):
+ if isinstance(kwargs[k], models.Model):
+ key += "-%d" % kwargs[k].id
+ else:
+ key += "-%s" % str(kwargs[k])
+ return key
+
+
+ def _cached_get_or_create(self, clazz, **kwargs):
+ """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
+ database through any other means.
+ """
+
+ assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ created = False
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.create(**kwargs)
+ created = True
+
+ return (vars(self)[dictname][key], created)
+
+
+ def _cached_get(self, clazz, **kwargs):
+ """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
+ """
+ assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.get(**kwargs)
+
+ return vars(self)[dictname][key]
def create_build_object(self, build_info, brbe):
assert 'machine' in build_info
@@ -65,13 +120,18 @@ class ORMWrapper(object):
build_name=build_info['build_name'],
bitbake_version=build_info['bitbake_version'])
+ logger.debug(1, "buildinfohelper: build is created %s" % build)
if brbe is not None:
+ logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
from bldcontrol.models import BuildEnvironment, BuildRequest
br, be = brbe.split(":")
+
buildrequest = BuildRequest.objects.get(pk = br)
- build.project = buildrequest.project
- build.save()
+ buildrequest.build = build
+ buildrequest.save()
+ build.project_id = buildrequest.project_id
+ build.save()
return build
def create_target_objects(self, target_info):
@@ -83,7 +143,7 @@ class ORMWrapper(object):
tgt_object = Target.objects.create( build = target_info['build'],
target = tgt_name,
is_image = False,
- );
+ )
targets.append(tgt_object)
return targets
@@ -103,8 +163,7 @@ class ORMWrapper(object):
build.outcome = outcome
build.save()
- def update_target_object(self, target, license_manifest_path):
-
+ def update_target_set_license_manifest(self, target, license_manifest_path):
target.license_manifest_path = license_manifest_path
target.save()
@@ -113,39 +172,47 @@ class ORMWrapper(object):
assert 'recipe' in task_information
assert 'task_name' in task_information
- task_object, created = Task.objects.get_or_create(
- build=task_information['build'],
- recipe=task_information['recipe'],
- task_name=task_information['task_name'],
- )
-
- if must_exist and created:
- task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
- task_object.delete()
- raise NotExisting("Task object created when expected to exist", task_information)
+ # we use must_exist info for database look-up optimization
+ task_object, created = self._cached_get_or_create(Task,
+ build=task_information['build'],
+ recipe=task_information['recipe'],
+ task_name=task_information['task_name']
+ )
+ if created and must_exist:
+ task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
+ raise NotExisting("Task object created when expected to exist", task_information)
+ object_changed = False
for v in vars(task_object):
if v in task_information.keys():
- vars(task_object)[v] = task_information[v]
+ if vars(task_object)[v] != task_information[v]:
+ vars(task_object)[v] = task_information[v]
+ object_changed = True
- # update setscene-related information
- if 1 == Task.objects.related_setscene(task_object).count():
- if task_object.outcome == Task.OUTCOME_COVERED:
- task_object.outcome = Task.OUTCOME_CACHED
+ # update setscene-related information if the task was just created
+ if created and task_object.outcome == Task.OUTCOME_COVERED and 1 == Task.objects.related_setscene(task_object).count():
+ task_object.outcome = Task.OUTCOME_CACHED
+ object_changed = True
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
task_object.sstate_result = Task.SSTATE_RESTORED
+ object_changed = True
elif outcome_task_setscene == Task.OUTCOME_FAILED:
task_object.sstate_result = Task.SSTATE_FAILED
+ object_changed = True
# mark down duration if we have a start time and a current time
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
duration = task_information['end_time'] - task_information['start_time']
task_object.elapsed_time = duration
+ object_changed = True
+ del task_information['start_time']
+ del task_information['end_time']
- task_object.save()
+ if object_changed:
+ task_object.save()
return task_object
@@ -153,20 +220,19 @@ class ORMWrapper(object):
assert 'layer_version' in recipe_information
assert 'file_path' in recipe_information
-
- recipe_object, created = Recipe.objects.get_or_create(
- layer_version=recipe_information['layer_version'],
- file_path=recipe_information['file_path'])
-
- if must_exist and created:
- recipe_object.delete()
+ recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
+ file_path=recipe_information['file_path'])
+ if created and must_exist:
raise NotExisting("Recipe object created when expected to exist", recipe_information)
+ object_changed = False
for v in vars(recipe_object):
if v in recipe_information.keys():
+ object_changed = True
vars(recipe_object)[v] = recipe_information[v]
- recipe_object.save()
+ if object_changed:
+ recipe_object.save()
return recipe_object
@@ -185,19 +251,34 @@ class ORMWrapper(object):
priority = layer_version_information['priority']
)
+ self.layer_version_objects.append(layer_version_object)
+
return layer_version_object
- def get_update_layer_object(self, layer_information):
+ def get_update_layer_object(self, layer_information, brbe):
assert 'name' in layer_information
assert 'local_path' in layer_information
assert 'layer_index_url' in layer_information
- layer_object, created = Layer.objects.get_or_create(
+ if brbe is None:
+ layer_object, created = Layer.objects.get_or_create(
name=layer_information['name'],
local_path=layer_information['local_path'],
layer_index_url=layer_information['layer_index_url'])
+ return layer_object
+ else:
+ # we are under managed mode; we must match the layer used in the Project Layer
+ from bldcontrol.models import BuildEnvironment, BuildRequest
+ br, be = brbe.split(":")
+
+ buildrequest = BuildRequest.objects.get(pk = br)
+
+ # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
+ # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
+ layer_object = buildrequest.project.projectlayer_set.get(layercommit__layer__name=layer_information['name']).layercommit.layer
+
+ return layer_object
- return layer_object
def save_target_file_information(self, build_obj, target_obj, filedata):
assert isinstance(build_obj, Build)
@@ -229,7 +310,7 @@ class ORMWrapper(object):
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
if len(parent_path) == 0:
parent_path = "/"
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj = Target_File.objects.create(
target = target_obj,
path = path,
@@ -263,7 +344,7 @@ class ORMWrapper(object):
permission = permission,
owner = user,
group = group)
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj.directory = parent_obj
tf_obj.save()
@@ -318,8 +399,7 @@ class ORMWrapper(object):
searchname = pkgpnmap[p]['OPKGN']
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
- if created:
- # package was not build in the current build, but
+ if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
# fill in everything we can from the runtime-reverse package data
try:
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
@@ -333,11 +413,14 @@ class ORMWrapper(object):
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
# no files recorded for this package, so save files info
+ packagefile_objects = []
for targetpath in pkgpnmap[p]['FILES_INFO']:
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
- Package_File.objects.create( package = packagedict[p]['object'],
+ packagefile_objects.append(Package_File( package = packagedict[p]['object'],
path = targetpath,
- size = targetfilesize)
+ size = targetfilesize))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
except KeyError as e:
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
@@ -347,6 +430,7 @@ class ORMWrapper(object):
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
+ packagedeps_objs = []
for p in packagedict:
for (px,deptype) in packagedict[p]['depends']:
if deptype == 'depends':
@@ -354,10 +438,13 @@ class ORMWrapper(object):
elif deptype == 'recommends':
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
- Package_Dependency.objects.create( package = packagedict[p]['object'],
+ packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
depends_on = packagedict[px]['object'],
dep_type = tdeptype,
- target = target_obj);
+ target = target_obj))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
if (len(errormsg) > 0):
raise Exception(errormsg)
@@ -366,7 +453,17 @@ class ORMWrapper(object):
target_image_file = Target_Image_File.objects.create( target = target_obj,
file_name = file_name,
file_size = file_size)
- target_image_file.save()
+
+ def save_artifact_information(self, build_obj, file_name, file_size):
+ # we skip the image files from other builds
+ if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ # do not update artifacts found in other builds
+ if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
def create_logmessage(self, log_information):
assert 'build' in log_information
@@ -408,10 +505,13 @@ class ORMWrapper(object):
bp_object.save()
# save any attached file information
+ packagefile_objects = []
for path in package_info['FILES_INFO']:
- fo = Package_File.objects.create( package = bp_object,
+ packagefile_objects.append(Package_File( package = bp_object,
path = path,
- size = package_info['FILES_INFO'][path] )
+ size = package_info['FILES_INFO'][path] ))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
def _po_byname(p):
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
@@ -420,39 +520,44 @@ class ORMWrapper(object):
pkg.save()
return pkg
+ packagedeps_objs = []
# save soft dependency information
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
if 'RREPLACES' in package_info and package_info['RREPLACES']:
for p in bb.utils.explode_deps(package_info['RREPLACES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
return bp_object
def save_build_variables(self, build_obj, vardump):
assert isinstance(build_obj, Build)
+ helptext_objects = []
for k in vardump:
- desc = vardump[k]['doc'];
+ desc = vardump[k]['doc']
if desc is None:
var_words = [word for word in k.split('_')]
root_var = "_".join([word for word in var_words if word.isupper()])
@@ -460,25 +565,33 @@ class ORMWrapper(object):
desc = vardump[root_var]['doc']
if desc is None:
desc = ''
- if desc:
- helptext_obj = HelpText.objects.create(build=build_obj,
+ if len(desc):
+ helptext_objects.append(HelpText(build=build_obj,
area=HelpText.VARIABLE,
key=k,
- text=desc)
+ text=desc))
if not bool(vardump[k]['func']):
- value = vardump[k]['v'];
+ value = vardump[k]['v']
if value is None:
value = ''
variable_obj = Variable.objects.create( build = build_obj,
variable_name = k,
variable_value = value,
description = desc)
+
+ varhist_objects = []
for vh in vardump[k]['history']:
if not 'documentation.conf' in vh['file']:
- VariableHistory.objects.create( variable = variable_obj,
+ varhist_objects.append(VariableHistory( variable = variable_obj,
file_name = vh['file'],
line_number = vh['line'],
- operation = vh['op'])
+ operation = vh['op']))
+ if len(varhist_objects):
+ VariableHistory.objects.bulk_create(varhist_objects)
+
+ HelpText.objects.bulk_create(helptext_objects)
+
+class MockEvent: pass # sometimes we mock an event, declare it here
class BuildInfoHelper(object):
""" This class gathers the build information from the server and sends it
@@ -487,6 +600,7 @@ class BuildInfoHelper(object):
Keeps in memory all data that needs matching before writing it to the database
"""
+
def __init__(self, server, has_build_history = False):
self._configure_django()
self.internal_state = {}
@@ -496,6 +610,9 @@ class BuildInfoHelper(object):
self.orm_wrapper = ORMWrapper()
self.has_build_history = has_build_history
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
+ self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
+ logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
+
def _configure_django(self):
# Add toaster to sys path for importing modules
@@ -545,13 +662,15 @@ class BuildInfoHelper(object):
# Heuristics: we always match recipe to the deepest layer path that
# we can match to the recipe file path
- for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
+ for bl in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey):
if (path.startswith(bl.layer.local_path)):
return bl
- #TODO: if we get here, we didn't read layers correctly
- assert False
- return None
+ #if we get here, we didn't read layers correctly; mockup the new layer
+ unknown_layer, created = Layer.objects.get_or_create(name="unknown", local_path="/", layer_index_url="")
+ unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
+
+ return unknown_layer_version_obj
def _get_recipe_information_from_taskfile(self, taskfile):
localfilepath = taskfile.split(":")[-1]
@@ -594,30 +713,40 @@ class BuildInfoHelper(object):
################################
## external available methods to store information
+ @staticmethod
+ def _get_data_from_event(event):
+ evdata = None
+ if '_localdata' in vars(event):
+ evdata = event._localdata
+ elif 'data' in vars(event):
+ evdata = event.data
+ else:
+ raise Exception("Event with neither _localdata or data properties")
+ return evdata
def store_layer_info(self, event):
- assert '_localdata' in vars(event)
- layerinfos = event._localdata
+ layerinfos = BuildInfoHelper._get_data_from_event(event)
self.internal_state['lvs'] = {}
for layer in layerinfos:
- self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
+ self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
def store_started_build(self, event):
assert '_pkgs' in vars(event)
build_information = self._get_build_information()
- brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
-
- build_obj = self.orm_wrapper.create_build_object(build_information, brbe)
+ build_obj = self.orm_wrapper.create_build_object(build_information, self.brbe)
self.internal_state['build'] = build_obj
# save layer version information for this build
- for layer_obj in self.internal_state['lvs']:
- self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
+ if not 'lvs' in self.internal_state:
+ logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
+ else:
+ for layer_obj in self.internal_state['lvs']:
+ self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
- del self.internal_state['lvs']
+ del self.internal_state['lvs']
# create target information
target_information = {}
@@ -627,19 +756,27 @@ class BuildInfoHelper(object):
self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
# Save build configuration
- self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
+ data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
+ self.orm_wrapper.save_build_variables(build_obj, [])
- return brbe
+ return self.brbe
def update_target_image_file(self, event):
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
+ evdata = BuildInfoHelper._get_data_from_event(event)
+
for t in self.internal_state['targets']:
if t.is_image == True:
- output_files = list(event._localdata.viewkeys())
+ output_files = list(evdata.viewkeys())
for output in output_files:
if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
- self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output])
+ self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
+
+ def update_artifact_image_file(self, event):
+ evdata = BuildInfoHelper._get_data_from_event(event)
+ for artifact_path in evdata.keys():
+ self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
def update_build_information(self, event, errors, warnings, taskfailures):
if 'build' in self.internal_state:
@@ -647,12 +784,12 @@ class BuildInfoHelper(object):
def store_license_manifest_path(self, event):
- deploy_dir = event._localdata['deploy_dir']
- image_name = event._localdata['image_name']
- path = deploy_dir + "/licenses/" + image_name + "/"
+ deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
+ image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
+ path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
for target in self.internal_state['targets']:
if target.target in image_name:
- self.orm_wrapper.update_target_object(target, path)
+ self.orm_wrapper.update_target_set_license_manifest(target, path)
def store_started_task(self, event):
@@ -696,7 +833,7 @@ class BuildInfoHelper(object):
def store_tasks_stats(self, event):
- for (taskfile, taskname, taskstats, recipename) in event._localdata:
+ for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
localfilepath = taskfile.split(":")[-1]
assert localfilepath.startswith("/")
@@ -711,6 +848,8 @@ class BuildInfoHelper(object):
task_information['task_name'] = taskname
task_information['cpu_usage'] = taskstats['cpu_usage']
task_information['disk_io'] = taskstats['disk_io']
+ if 'elapsed_time' in taskstats:
+ task_information['elapsed_time'] = taskstats['elapsed_time']
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
def update_and_store_task(self, event):
@@ -768,12 +907,11 @@ class BuildInfoHelper(object):
def store_missed_state_tasks(self, event):
- for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
- class MockEvent: pass
mevent = MockEvent()
mevent.taskname = taskname
mevent.taskhash = taskhash
@@ -787,12 +925,11 @@ class BuildInfoHelper(object):
self.orm_wrapper.get_update_task_object(task_information)
- for (fn, taskname, taskhash, sstatefile) in event._localdata['found']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
recipe = self.orm_wrapper.get_update_recipe_object(recipe_information)
- class MockEvent: pass
mevent = MockEvent()
mevent.taskname = taskname
mevent.taskhash = taskhash
@@ -804,15 +941,14 @@ class BuildInfoHelper(object):
def store_target_package_data(self, event):
- assert '_localdata' in vars(event)
# for all image targets
for target in self.internal_state['targets']:
if target.is_image:
try:
- pkgdata = event._localdata['pkgdata']
- imgdata = event._localdata['imgdata'][target.target]
+ pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
+ imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
- filedata = event._localdata['filedata'][target.target]
+ filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
except KeyError:
# we must have not got the data for this image, nothing to save
@@ -848,14 +984,29 @@ class BuildInfoHelper(object):
recipe_info = {}
recipe_info['name'] = pn
- recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
recipe_info['layer_version'] = layer_version_obj
- recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
- recipe_info['license'] = event._depgraph['pn'][pn]['license']
- recipe_info['description'] = event._depgraph['pn'][pn]['description']
- recipe_info['section'] = event._depgraph['pn'][pn]['section']
- recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
- recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
+ if 'version' in event._depgraph['pn'][pn]:
+ recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
+
+ if 'summary' in event._depgraph['pn'][pn]:
+ recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
+
+ if 'license' in event._depgraph['pn'][pn]:
+ recipe_info['license'] = event._depgraph['pn'][pn]['license']
+
+ if 'description' in event._depgraph['pn'][pn]:
+ recipe_info['description'] = event._depgraph['pn'][pn]['description']
+
+ if 'section' in event._depgraph['pn'][pn]:
+ recipe_info['section'] = event._depgraph['pn'][pn]['section']
+
+ if 'homepage' in event._depgraph['pn'][pn]:
+ recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
+
+ if 'bugtracker' in event._depgraph['pn'][pn]:
+ recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
recipe_info['file_path'] = file_name
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
recipe.is_image = False
@@ -877,20 +1028,22 @@ class BuildInfoHelper(object):
# save recipe dependency
# buildtime
+ recipedeps_objects = []
for recipe in event._depgraph['depends']:
try:
target = self.internal_state['recipes'][recipe]
for dep in event._depgraph['depends'][recipe]:
dependency = self.internal_state['recipes'][dep]
- Recipe_Dependency.objects.get_or_create( recipe = target,
- depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
+ recipedeps_objects.append(Recipe_Dependency( recipe = target,
+ depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
except KeyError as e:
if e not in assume_provided and not str(e).startswith("virtual/"):
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
+ Recipe_Dependency.objects.bulk_create(recipedeps_objects)
# save all task information
def _save_a_task(taskdesc):
- spec = re.split(r'\.', taskdesc);
+ spec = re.split(r'\.', taskdesc)
pn = ".".join(spec[0:-1])
taskname = spec[-1]
e = event
@@ -907,6 +1060,7 @@ class BuildInfoHelper(object):
tasks[taskdesc] = _save_a_task(taskdesc)
# create dependencies between tasks
+ taskdeps_objects = []
for taskdesc in event._depgraph['tdepends']:
target = tasks[taskdesc]
for taskdep in event._depgraph['tdepends'][taskdesc]:
@@ -915,73 +1069,100 @@ class BuildInfoHelper(object):
dep = _save_a_task(taskdep)
else:
dep = tasks[taskdep]
- Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
+ taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
+ Task_Dependency.objects.bulk_create(taskdeps_objects)
if (len(errormsg) > 0):
raise Exception(errormsg)
def store_build_package_information(self, event):
- assert '_localdata' in vars(event)
- package_info = event._localdata
+ package_info = BuildInfoHelper._get_data_from_event(event)
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
package_info,
self.internal_state['recipes'],
)
- def store_build_done(self, br_id, be_id):
+ def _store_build_done(self, errorcode):
+ br_id, be_id = self.brbe.split(":")
from bldcontrol.models import BuildEnvironment, BuildRequest
be = BuildEnvironment.objects.get(pk = be_id)
be.lock = BuildEnvironment.LOCK_LOCK
be.save()
br = BuildRequest.objects.get(pk = br_id)
- br.state = BuildRequest.REQ_COMPLETED
- br.build = self.internal_state['build']
+ if errorcode == 0:
+ br.state = BuildRequest.REQ_COMPLETED
+ else:
+ br.state = BuildRequest.REQ_FAILED
br.save()
- def _store_log_information(self, level, text):
- log_information = {}
- log_information['build'] = self.internal_state['build']
- log_information['level'] = level
- log_information['message'] = text
- self.orm_wrapper.create_logmessage(log_information)
-
- def store_log_info(self, text):
- self._store_log_information(LogMessage.INFO, text)
-
- def store_log_warn(self, text):
- self._store_log_information(LogMessage.WARNING, text)
def store_log_error(self, text):
- self._store_log_information(LogMessage.ERROR, text)
+ mockevent = MockEvent()
+ mockevent.levelno = format.ERROR
+ mockevent.msg = text
+ mockevent.pathname = '-- None'
+ mockevent.lineno = -1
+ self.store_log_event(mockevent)
+
+ def store_log_exception(self, text, backtrace = ""):
+ mockevent = MockEvent()
+ mockevent.levelno = -1
+ mockevent.msg = text
+ mockevent.pathname = backtrace
+ mockevent.lineno = -1
+ self.store_log_event(mockevent)
+
def store_log_event(self, event):
+ if event.levelno < format.WARNING:
+ return
+
+ if 'args' in vars(event):
+ event.msg = event.msg % event.args
+
+ if not 'build' in self.internal_state:
+ if self.brbe is None:
+ if not 'backlog' in self.internal_state:
+ self.internal_state['backlog'] = []
+ self.internal_state['backlog'].append(event)
+ else: # we're under Toaster control, post the errors to the build request
+ from bldcontrol.models import BuildRequest, BRError
+ br, be = self.brbe.split(":")
+ buildrequest = BuildRequest.objects.get(pk = br)
+ brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
+
+ return
+
if 'build' in self.internal_state and 'backlog' in self.internal_state:
if len(self.internal_state['backlog']):
tempevent = self.internal_state['backlog'].pop()
- print "Saving stored event ", tempevent
+ logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
self.store_log_event(tempevent)
else:
+ logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
del self.internal_state['backlog']
- if event.levelno < format.WARNING:
- return
-
- if not 'build' in self.internal_state:
- print "Save event for later"
- if not 'backlog' in self.internal_state:
- self.internal_state['backlog'] = []
- self.internal_state['backlog'].append(event)
-
- return
log_information = {}
log_information['build'] = self.internal_state['build']
- if event.levelno >= format.ERROR:
+ if event.levelno == format.ERROR:
log_information['level'] = LogMessage.ERROR
elif event.levelno == format.WARNING:
log_information['level'] = LogMessage.WARNING
+ elif event.levelno == -1: # toaster self-logging
+ log_information['level'] = -1
+ else:
+ log_information['level'] = LogMessage.INFO
+
log_information['message'] = event.msg
log_information['pathname'] = event.pathname
log_information['lineno'] = event.lineno
self.orm_wrapper.create_logmessage(log_information)
+ def close(self, errorcode):
+ if self.brbe is not None:
+ self._store_build_done(errorcode)
+
+ if 'backlog' in self.internal_state:
+ for event in self.internal_state['backlog']:
+ logger.error("Unsaved log: %s", event.msg)
diff --git a/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
index ab5b614c8d..b5eb3d8738 100644
--- a/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
+++ b/bitbake/lib/bb/ui/crumbs/hig/simplesettingsdialog.py
@@ -230,10 +230,7 @@ class SimpleSettingsDialog (CrumbsDialog, SettingsUIHelper):
self.configuration.sstatemirror = ""
for mirror in self.sstatemirrors_list:
if mirror[1] != "" and mirror[2].startswith("file://"):
- if mirror[1].endswith("\\1"):
- smirror = mirror[2] + " " + mirror[1] + " \\n "
- else:
- smirror = mirror[2] + " " + mirror[1] + "\\1 \\n "
+ smirror = mirror[2] + " " + mirror[1] + " \\n "
self.configuration.sstatemirror += smirror
self.configuration.bbthread = self.bb_spinner.get_value_as_int()
self.configuration.pmake = self.pmake_spinner.get_value_as_int()
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
index 4578dce615..0c71a3ebd2 100644
--- a/bitbake/lib/bb/ui/depexp.py
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -198,7 +198,7 @@ def main(server, eventHandler, params):
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
return 1
if 'msg' in cmdline and cmdline['msg']:
- logger.error(cmdline['msg'])
+ print(cmdline['msg'])
return 1
cmdline = cmdline['action']
if not cmdline or cmdline[0] != "generateDotGraph":
@@ -236,7 +236,7 @@ def main(server, eventHandler, params):
try:
event = eventHandler.waitEvent(0.25)
if gtkthread.quit.isSet():
- _, error = server.runCommand(["stateStop"])
+ _, error = server.runCommand(["stateForceShutdown"])
if error:
print('Unable to cleanly stop: %s' % error)
break
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 746dcf462a..9e58b31727 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -284,6 +284,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if not params.observe_only:
params.updateFromServer(server)
+ params.updateToServer(server)
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -507,7 +508,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
termfilter.clearFooter()
# ignore interrupted io
if ioerror.args[0] == 4:
- pass
+ continue
+ sys.stderr.write(str(ioerror))
+ if not params.observe_only:
+ _, error = server.runCommand(["stateForceShutdown"])
+ main.shutdown = 2
except KeyboardInterrupt:
termfilter.clearFooter()
if params.observe_only:
@@ -526,7 +531,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
logger.error("Unable to cleanly shutdown: %s" % error)
main.shutdown = main.shutdown + 1
pass
-
+ except Exception as e:
+ sys.stderr.write(str(e))
+ if not params.observe_only:
+ _, error = server.runCommand(["stateForceShutdown"])
+ main.shutdown = 2
summary = ""
if taskfailures:
summary += pluralise("\nSummary: %s task failed:",
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index b6c20ec388..9589a77d75 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -361,13 +361,13 @@ class NCursesUI:
shutdown = shutdown + 1
pass
-def main(server, eventHandler):
+def main(server, eventHandler, params):
if not os.isatty(sys.stdout.fileno()):
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
return
ui = NCursesUI()
try:
- curses.wrapper(ui.main, server, eventHandler)
+ curses.wrapper(ui.main, server, eventHandler, params)
except:
import traceback
traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index 2f628e9a72..a85ad5a06a 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -62,15 +62,6 @@ def _log_settings_from_server(server):
def main(server, eventHandler, params ):
- includelogs, loglines = _log_settings_from_server(server)
-
- # verify and warn
- build_history_enabled = True
- inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
- if not "buildhistory" in inheritlist.split(" "):
- logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
- build_history_enabled = False
-
helper = uihelper.BBUIHelper()
console = logging.StreamHandler(sys.stdout)
@@ -80,6 +71,16 @@ def main(server, eventHandler, params ):
console.setFormatter(format)
logger.addHandler(console)
+ includelogs, loglines = _log_settings_from_server(server)
+
+ # verify and warn
+ build_history_enabled = True
+ inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
+
+ if not "buildhistory" in inheritlist.split(" "):
+ logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
+ build_history_enabled = False
+
if not params.observe_only:
logger.error("ToasterUI can only work in observer mode")
return
@@ -94,7 +95,6 @@ def main(server, eventHandler, params ):
first = True
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
- brbe = None
while True:
try:
@@ -111,7 +111,7 @@ def main(server, eventHandler, params ):
helper.eventHandler(event)
if isinstance(event, bb.event.BuildStarted):
- brbe = buildinfohelper.store_started_build(event)
+ buildinfohelper.store_started_build(event)
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
buildinfohelper.update_and_store_task(event)
@@ -220,26 +220,28 @@ def main(server, eventHandler, params ):
if isinstance(event, (bb.command.CommandCompleted,
bb.command.CommandFailed,
bb.command.CommandExit)):
+ errorcode = 0
if (isinstance(event, bb.command.CommandFailed)):
event.levelno = format.ERROR
- event.msg = event.error
+ event.msg = "Command Failed " + event.error
event.pathname = ""
event.lineno = 0
buildinfohelper.store_log_event(event)
errors += 1
+ errorcode = 1
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
-
+ buildinfohelper.close(errorcode)
+ # mark the log output; controllers may kill the toasterUI after seeing this log
+ logger.info("ToasterUI build done")
# we start a new build info
- if brbe is not None:
- br_id, be_id = brbe.split(":")
- buildinfohelper.store_build_done(br_id, be_id)
+ if buildinfohelper.brbe is not None:
- print "we are under BuildEnvironment management - after the build, we exit"
+ logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
server.terminateServer()
else:
- print "prepared for new build"
+ logger.debug(1, "ToasterUI prepared for new build")
errors = 0
warnings = 0
taskfailures = []
@@ -260,8 +262,12 @@ def main(server, eventHandler, params ):
buildinfohelper.store_missed_state_tasks(event)
elif event.type == "ImageFileSize":
buildinfohelper.update_target_image_file(event)
+ elif event.type == "ArtifactFileSize":
+ buildinfohelper.update_artifact_image_file(event)
elif event.type == "LicenseManifestPath":
buildinfohelper.store_license_manifest_path(event)
+ else:
+ logger.error("Unprocessed MetadataEvent %s " % str(event))
continue
if isinstance(event, bb.cooker.CookerExit):
@@ -294,9 +300,17 @@ def main(server, eventHandler, params ):
main.shutdown = 1
pass
except Exception as e:
- logger.error(e)
+ # print errors to log
import traceback
- traceback.print_exc()
+ exception_data = traceback.format_exc()
+ logger.error("%s\n%s" % (e, exception_data))
+
+ # save them to database, if possible; if it fails, we already logged to console.
+ try:
+ buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
+ except Exception as ce:
+ logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
+
pass
if interrupted:
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index f62709bed5..90090b2fe3 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -264,7 +264,7 @@ def _print_trace(body, line):
def better_compile(text, file, realfile, mode = "exec"):
"""
A better compile method. This method
- will print the offending lines.
+ will print the offending lines.
"""
try:
return compile(text, file, mode)
@@ -522,7 +522,7 @@ def filter_environment(good_vars):
os.unsetenv(key)
del os.environ[key]
- if len(removed_vars):
+ if removed_vars:
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
return removed_vars
@@ -530,7 +530,7 @@ def filter_environment(good_vars):
def approved_variables():
"""
Determine and return the list of whitelisted variables which are approved
- to remain in the envrionment.
+ to remain in the environment.
"""
if 'BB_PRESERVE_ENV' in os.environ:
return os.environ.keys()
@@ -862,21 +862,16 @@ def nonblockingfd(fd):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
def process_profilelog(fn):
- # Redirect stdout to capture profile information
pout = open(fn + '.processed', 'w')
- so = sys.stdout.fileno()
- orig_so = os.dup(sys.stdout.fileno())
- os.dup2(pout.fileno(), so)
import pstats
- p = pstats.Stats(fn)
+ p = pstats.Stats(fn, stream=pout)
p.sort_stats('time')
p.print_stats()
p.print_callers()
p.sort_stats('cumulative')
p.print_stats()
- os.dup2(orig_so, so)
pout.flush()
pout.close()
@@ -884,5 +879,174 @@ def process_profilelog(fn):
# Was present to work around multiprocessing pool bugs in python < 2.7.3
#
def multiprocessingpool(*args, **kwargs):
+
+ import multiprocessing.pool
+ #import multiprocessing.util
+ #multiprocessing.util.log_to_stderr(10)
+ # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
+ # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
+ def wrapper(func):
+ def wrap(self, timeout=None):
+ return func(self, timeout=timeout if timeout is not None else 1e100)
+ return wrap
+ multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
+
return multiprocessing.Pool(*args, **kwargs)
+def exec_flat_python_func(func, *args, **kwargs):
+ """Execute a flat python function (defined with def funcname(args):...)"""
+ # Prepare a small piece of python code which calls the requested function
+ # To do this we need to prepare two things - a set of variables we can use to pass
+ # the values of arguments into the calling function, and the list of arguments for
+ # the function being called
+ context = {}
+ funcargs = []
+ # Handle unnamed arguments
+ aidx = 1
+ for arg in args:
+ argname = 'arg_%s' % aidx
+ context[argname] = arg
+ funcargs.append(argname)
+ aidx += 1
+ # Handle keyword arguments
+ context.update(kwargs)
+ funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
+ code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
+ comp = bb.utils.better_compile(code, '<string>', '<string>')
+ bb.utils.better_exec(comp, context, code, '<string>')
+ return context['retval']
+
+def edit_metadata_file(meta_file, variables, func):
+ """Edit a recipe or config file and modify one or more specified
+ variable values set in the file using a specified callback function.
+ The file is only written to if the value(s) actually change.
+ """
+ var_res = {}
+ for var in variables:
+ var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
+
+ updated = False
+ varset_start = ''
+ newlines = []
+ in_var = None
+ full_value = ''
+
+ def handle_var_end():
+ (newvalue, indent, minbreak) = func(in_var, full_value)
+ if newvalue != full_value:
+ if isinstance(newvalue, list):
+ intentspc = ' ' * indent
+ if minbreak:
+ # First item on first line
+ if len(newvalue) == 1:
+ newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
+ else:
+ newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
+ for item in newvalue[1:]:
+ newlines.append('%s%s \\\n' % (intentspc, item))
+ newlines.append('%s"\n' % indentspc)
+ else:
+ # No item on first line
+ newlines.append('%s " \\\n' % varset_start)
+ for item in newvalue:
+ newlines.append('%s%s \\\n' % (intentspc, item))
+ newlines.append('%s"\n' % intentspc)
+ else:
+ newlines.append('%s "%s"\n' % (varset_start, newvalue))
+ return True
+ return False
+
+ with open(meta_file, 'r') as f:
+ for line in f:
+ if in_var:
+ value = line.rstrip()
+ full_value += value[:-1]
+ if value.endswith('"') or value.endswith("'"):
+ if handle_var_end():
+ updated = True
+ in_var = None
+ else:
+ matched = False
+ for (varname, var_re) in var_res.iteritems():
+ if var_re.match(line):
+ splitvalue = line.split('"', 1)
+ varset_start = splitvalue[0].rstrip()
+ value = splitvalue[1].rstrip()
+ if value.endswith('\\'):
+ value = value[:-1]
+ full_value = value
+ if value.endswith('"') or value.endswith("'"):
+ if handle_var_end():
+ updated = True
+ else:
+ in_var = varname
+ matched = True
+ break
+ if not matched:
+ newlines.append(line)
+ if updated:
+ with open(meta_file, 'w') as f:
+ f.writelines(newlines)
+
+def edit_bblayers_conf(bblayers_conf, add, remove):
+ """Edit bblayers.conf, adding and/or removing layers"""
+
+ import fnmatch
+
+ def remove_trailing_sep(pth):
+ if pth and pth[-1] == os.sep:
+ pth = pth[:-1]
+ return pth
+
+ def layerlist_param(value):
+ if not value:
+ return []
+ elif isinstance(value, list):
+ return [remove_trailing_sep(x) for x in value]
+ else:
+ return [remove_trailing_sep(value)]
+
+ notadded = []
+ notremoved = []
+
+ addlayers = layerlist_param(add)
+ removelayers = layerlist_param(remove)
+
+ # Need to use a list here because we can't set non-local variables from a callback in python 2.x
+ bblayercalls = []
+
+ def handle_bblayers(varname, origvalue):
+ bblayercalls.append(varname)
+ updated = False
+ bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
+ if removelayers:
+ for removelayer in removelayers:
+ matched = False
+ for layer in bblayers:
+ if fnmatch.fnmatch(layer, removelayer):
+ updated = True
+ matched = True
+ bblayers.remove(layer)
+ break
+ if not matched:
+ notremoved.append(removelayer)
+ if addlayers:
+ for addlayer in addlayers:
+ if addlayer not in bblayers:
+ updated = True
+ bblayers.append(addlayer)
+ else:
+ notadded.append(addlayer)
+
+ if updated:
+ return (bblayers, 2, False)
+ else:
+ return (origvalue, 2, False)
+
+ edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
+
+ if not bblayercalls:
+ raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
+
+ return (notadded, notremoved)
+
diff --git a/bitbake/lib/bs4/AUTHORS.txt b/bitbake/lib/bs4/AUTHORS.txt
new file mode 100644
index 0000000000..2ac8fcc8cc
--- /dev/null
+++ b/bitbake/lib/bs4/AUTHORS.txt
@@ -0,0 +1,43 @@
+Behold, mortal, the origins of Beautiful Soup...
+================================================
+
+Leonard Richardson is the primary programmer.
+
+Aaron DeVore is awesome.
+
+Mark Pilgrim provided the encoding detection code that forms the base
+of UnicodeDammit.
+
+Thomas Kluyver and Ezio Melotti finished the work of getting Beautiful
+Soup 4 working under Python 3.
+
+Simon Willison wrote soupselect, which was used to make Beautiful Soup
+support CSS selectors.
+
+Sam Ruby helped with a lot of edge cases.
+
+Jonathan Ellis was awarded the prestigous Beau Potage D'Or for his
+work in solving the nestable tags conundrum.
+
+An incomplete list of people have contributed patches to Beautiful
+Soup:
+
+ Istvan Albert, Andrew Lin, Anthony Baxter, Andrew Boyko, Tony Chang,
+ Zephyr Fang, Fuzzy, Roman Gaufman, Yoni Gilad, Richie Hindle, Peteris
+ Krumins, Kent Johnson, Ben Last, Robert Leftwich, Staffan Malmgren,
+ Ksenia Marasanova, JP Moins, Adam Monsen, John Nagle, "Jon", Ed
+ Oskiewicz, Greg Phillips, Giles Radford, Arthur Rudolph, Marko
+ Samastur, Jouni Seppänen, Alexander Schmolck, Andy Theyers, Glyn
+ Webster, Paul Wright, Danny Yoo
+
+An incomplete list of people who made suggestions or found bugs or
+found ways to break Beautiful Soup:
+
+ Hanno Böck, Matteo Bertini, Chris Curvey, Simon Cusack, Bruce Eckel,
+ Matt Ernst, Michael Foord, Tom Harris, Bill de hOra, Donald Howes,
+ Matt Patterson, Scott Roberts, Steve Strassmann, Mike Williams,
+ warchild at redho dot com, Sami Kuisma, Carlos Rocha, Bob Hutchison,
+ Joren Mc, Michal Migurski, John Kleven, Tim Heaney, Tripp Lilley, Ed
+ Summers, Dennis Sutch, Chris Smith, Aaron Sweep^W Swartz, Stuart
+ Turner, Greg Edwards, Kevin J Kalupson, Nikos Kouremenos, Artur de
+ Sousa Rocha, Yichun Wei, Per Vognsen
diff --git a/bitbake/lib/bs4/COPYING.txt b/bitbake/lib/bs4/COPYING.txt
new file mode 100644
index 0000000000..d668d13f04
--- /dev/null
+++ b/bitbake/lib/bs4/COPYING.txt
@@ -0,0 +1,26 @@
+Beautiful Soup is made available under the MIT license:
+
+ Copyright (c) 2004-2012 Leonard Richardson
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE, DAMMIT.
+
+Beautiful Soup incorporates code from the html5lib library, which is
+also made available under the MIT license.
diff --git a/bitbake/lib/bs4/NEWS.txt b/bitbake/lib/bs4/NEWS.txt
new file mode 100644
index 0000000000..88a60a2458
--- /dev/null
+++ b/bitbake/lib/bs4/NEWS.txt
@@ -0,0 +1,1066 @@
+= 4.3.2 (20131002) =
+
+* Fixed a bug in which short Unicode input was improperly encoded to
+ ASCII when checking whether or not it was the name of a file on
+ disk. [bug=1227016]
+
+* Fixed a crash when a short input contains data not valid in
+ filenames. [bug=1232604]
+
+* Fixed a bug that caused Unicode data put into UnicodeDammit to
+ return None instead of the original data. [bug=1214983]
+
+* Combined two tests to stop a spurious test failure when tests are
+ run by nosetests. [bug=1212445]
+
+= 4.3.1 (20130815) =
+
+* Fixed yet another problem with the html5lib tree builder, caused by
+ html5lib's tendency to rearrange the tree during
+ parsing. [bug=1189267]
+
+* Fixed a bug that caused the optimized version of find_all() to
+ return nothing. [bug=1212655]
+
+= 4.3.0 (20130812) =
+
+* Instead of converting incoming data to Unicode and feeding it to the
+ lxml tree builder in chunks, Beautiful Soup now makes successive
+ guesses at the encoding of the incoming data, and tells lxml to
+ parse the data as that encoding. Giving lxml more control over the
+ parsing process improves performance and avoids a number of bugs and
+ issues with the lxml parser which had previously required elaborate
+ workarounds:
+
+ - An issue in which lxml refuses to parse Unicode strings on some
+ systems. [bug=1180527]
+
+ - A returning bug that truncated documents longer than a (very
+ small) size. [bug=963880]
+
+ - A returning bug in which extra spaces were added to a document if
+ the document defined a charset other than UTF-8. [bug=972466]
+
+ This required a major overhaul of the tree builder architecture. If
+ you wrote your own tree builder and didn't tell me, you'll need to
+ modify your prepare_markup() method.
+
+* The UnicodeDammit code that makes guesses at encodings has been
+ split into its own class, EncodingDetector. A lot of apparently
+ redundant code has been removed from Unicode, Dammit, and some
+ undocumented features have also been removed.
+
+* Beautiful Soup will issue a warning if instead of markup you pass it
+ a URL or the name of a file on disk (a common beginner's mistake).
+
+* A number of optimizations improve the performance of the lxml tree
+ builder by about 33%, the html.parser tree builder by about 20%, and
+ the html5lib tree builder by about 15%.
+
+* All find_all calls should now return a ResultSet object. Patch by
+ Aaron DeVore. [bug=1194034]
+
+= 4.2.1 (20130531) =
+
+* The default XML formatter will now replace ampersands even if they
+ appear to be part of entities. That is, "&lt;" will become
+ "&amp;lt;". The old code was left over from Beautiful Soup 3, which
+ didn't always turn entities into Unicode characters.
+
+ If you really want the old behavior (maybe because you add new
+ strings to the tree, those strings include entities, and you want
+ the formatter to leave them alone on output), it can be found in
+ EntitySubstitution.substitute_xml_containing_entities(). [bug=1182183]
+
+* Gave new_string() the ability to create subclasses of
+ NavigableString. [bug=1181986]
+
+* Fixed another bug by which the html5lib tree builder could create a
+ disconnected tree. [bug=1182089]
+
+* The .previous_element of a BeautifulSoup object is now always None,
+ not the last element to be parsed. [bug=1182089]
+
+* Fixed test failures when lxml is not installed. [bug=1181589]
+
+* html5lib now supports Python 3. Fixed some Python 2-specific
+ code in the html5lib test suite. [bug=1181624]
+
+* The html.parser treebuilder can now handle numeric attributes in
+ text when the hexidecimal name of the attribute starts with a
+ capital X. Patch by Tim Shirley. [bug=1186242]
+
+= 4.2.0 (20130514) =
+
+* The Tag.select() method now supports a much wider variety of CSS
+ selectors.
+
+ - Added support for the adjacent sibling combinator (+) and the
+ general sibling combinator (~). Tests by "liquider". [bug=1082144]
+
+ - The combinators (>, +, and ~) can now combine with any supported
+ selector, not just one that selects based on tag name.
+
+ - Added limited support for the "nth-of-type" pseudo-class. Code
+ by Sven Slootweg. [bug=1109952]
+
+* The BeautifulSoup class is now aliased to "_s" and "_soup", making
+ it quicker to type the import statement in an interactive session:
+
+ from bs4 import _s
+ or
+ from bs4 import _soup
+
+ The alias may change in the future, so don't use this in code you're
+ going to run more than once.
+
+* Added the 'diagnose' submodule, which includes several useful
+ functions for reporting problems and doing tech support.
+
+ - diagnose(data) tries the given markup on every installed parser,
+ reporting exceptions and displaying successes. If a parser is not
+ installed, diagnose() mentions this fact.
+
+ - lxml_trace(data, html=True) runs the given markup through lxml's
+ XML parser or HTML parser, and prints out the parser events as
+ they happen. This helps you quickly determine whether a given
+ problem occurs in lxml code or Beautiful Soup code.
+
+ - htmlparser_trace(data) is the same thing, but for Python's
+ built-in HTMLParser class.
+
+* In an HTML document, the contents of a <script> or <style> tag will
+ no longer undergo entity substitution by default. XML documents work
+ the same way they did before. [bug=1085953]
+
+* Methods like get_text() and properties like .strings now only give
+ you strings that are visible in the document--no comments or
+ processing commands. [bug=1050164]
+
+* The prettify() method now leaves the contents of <pre> tags
+ alone. [bug=1095654]
+
+* Fix a bug in the html5lib treebuilder which sometimes created
+ disconnected trees. [bug=1039527]
+
+* Fix a bug in the lxml treebuilder which crashed when a tag included
+ an attribute from the predefined "xml:" namespace. [bug=1065617]
+
+* Fix a bug by which keyword arguments to find_parent() were not
+ being passed on. [bug=1126734]
+
+* Stop a crash when unwisely messing with a tag that's been
+ decomposed. [bug=1097699]
+
+* Now that lxml's segfault on invalid doctype has been fixed, fixed a
+ corresponding problem on the Beautiful Soup end that was previously
+ invisible. [bug=984936]
+
+* Fixed an exception when an overspecified CSS selector didn't match
+ anything. Code by Stefaan Lippens. [bug=1168167]
+
+= 4.1.3 (20120820) =
+
+* Skipped a test under Python 2.6 and Python 3.1 to avoid a spurious
+ test failure caused by the lousy HTMLParser in those
+ versions. [bug=1038503]
+
+* Raise a more specific error (FeatureNotFound) when a requested
+ parser or parser feature is not installed. Raise NotImplementedError
+ instead of ValueError when the user calls insert_before() or
+ insert_after() on the BeautifulSoup object itself. Patch by Aaron
+ Devore. [bug=1038301]
+
+= 4.1.2 (20120817) =
+
+* As per PEP-8, allow searching by CSS class using the 'class_'
+ keyword argument. [bug=1037624]
+
+* Display namespace prefixes for namespaced attribute names, instead of
+ the fully-qualified names given by the lxml parser. [bug=1037597]
+
+* Fixed a crash on encoding when an attribute name contained
+ non-ASCII characters.
+
+* When sniffing encodings, if the cchardet library is installed,
+ Beautiful Soup uses it instead of chardet. cchardet is much
+ faster. [bug=1020748]
+
+* Use logging.warning() instead of warning.warn() to notify the user
+ that characters were replaced with REPLACEMENT
+ CHARACTER. [bug=1013862]
+
+= 4.1.1 (20120703) =
+
+* Fixed an html5lib tree builder crash which happened when html5lib
+ moved a tag with a multivalued attribute from one part of the tree
+ to another. [bug=1019603]
+
+* Correctly display closing tags with an XML namespace declared. Patch
+ by Andreas Kostyrka. [bug=1019635]
+
+* Fixed a typo that made parsing significantly slower than it should
+ have been, and also waited too long to close tags with XML
+ namespaces. [bug=1020268]
+
+* get_text() now returns an empty Unicode string if there is no text,
+ rather than an empty bytestring. [bug=1020387]
+
+= 4.1.0 (20120529) =
+
+* Added experimental support for fixing Windows-1252 characters
+ embedded in UTF-8 documents. (UnicodeDammit.detwingle())
+
+* Fixed the handling of &quot; with the built-in parser. [bug=993871]
+
+* Comments, processing instructions, document type declarations, and
+ markup declarations are now treated as preformatted strings, the way
+ CData blocks are. [bug=1001025]
+
+* Fixed a bug with the lxml treebuilder that prevented the user from
+ adding attributes to a tag that didn't originally have
+ attributes. [bug=1002378] Thanks to Oliver Beattie for the patch.
+
+* Fixed some edge-case bugs having to do with inserting an element
+ into a tag it's already inside, and replacing one of a tag's
+ children with another. [bug=997529]
+
+* Added the ability to search for attribute values specified in UTF-8. [bug=1003974]
+
+ This caused a major refactoring of the search code. All the tests
+ pass, but it's possible that some searches will behave differently.
+
+= 4.0.5 (20120427) =
+
+* Added a new method, wrap(), which wraps an element in a tag.
+
+* Renamed replace_with_children() to unwrap(), which is easier to
+ understand and also the jQuery name of the function.
+
+* Made encoding substitution in <meta> tags completely transparent (no
+ more %SOUP-ENCODING%).
+
+* Fixed a bug in decoding data that contained a byte-order mark, such
+ as data encoded in UTF-16LE. [bug=988980]
+
+* Fixed a bug that made the HTMLParser treebuilder generate XML
+ definitions ending with two question marks instead of
+ one. [bug=984258]
+
+* Upon document generation, CData objects are no longer run through
+ the formatter. [bug=988905]
+
+* The test suite now passes when lxml is not installed, whether or not
+ html5lib is installed. [bug=987004]
+
+* Print a warning on HTMLParseErrors to let people know they should
+ install a better parser library.
+
+= 4.0.4 (20120416) =
+
+* Fixed a bug that sometimes created disconnected trees.
+
+* Fixed a bug with the string setter that moved a string around the
+ tree instead of copying it. [bug=983050]
+
+* Attribute values are now run through the provided output formatter.
+ Previously they were always run through the 'minimal' formatter. In
+ the future I may make it possible to specify different formatters
+ for attribute values and strings, but for now, consistent behavior
+ is better than inconsistent behavior. [bug=980237]
+
+* Added the missing renderContents method from Beautiful Soup 3. Also
+ added an encode_contents() method to go along with decode_contents().
+
+* Give a more useful error when the user tries to run the Python 2
+ version of BS under Python 3.
+
+* UnicodeDammit can now convert Microsoft smart quotes to ASCII with
+ UnicodeDammit(markup, smart_quotes_to="ascii").
+
+= 4.0.3 (20120403) =
+
+* Fixed a typo that caused some versions of Python 3 to convert the
+ Beautiful Soup codebase incorrectly.
+
+* Got rid of the 4.0.2 workaround for HTML documents--it was
+ unnecessary and the workaround was triggering a (possibly different,
+ but related) bug in lxml. [bug=972466]
+
+= 4.0.2 (20120326) =
+
+* Worked around a possible bug in lxml that prevents non-tiny XML
+ documents from being parsed. [bug=963880, bug=963936]
+
+* Fixed a bug where specifying `text` while also searching for a tag
+ only worked if `text` wanted an exact string match. [bug=955942]
+
+= 4.0.1 (20120314) =
+
+* This is the first official release of Beautiful Soup 4. There is no
+ 4.0.0 release, to eliminate any possibility that packaging software
+ might treat "4.0.0" as being an earlier version than "4.0.0b10".
+
+* Brought BS up to date with the latest release of soupselect, adding
+ CSS selector support for direct descendant matches and multiple CSS
+ class matches.
+
+= 4.0.0b10 (20120302) =
+
+* Added support for simple CSS selectors, taken from the soupselect project.
+
+* Fixed a crash when using html5lib. [bug=943246]
+
+* In HTML5-style <meta charset="foo"> tags, the value of the "charset"
+ attribute is now replaced with the appropriate encoding on
+ output. [bug=942714]
+
+* Fixed a bug that caused calling a tag to sometimes call find_all()
+ with the wrong arguments. [bug=944426]
+
+* For backwards compatibility, brought back the BeautifulStoneSoup
+ class as a deprecated wrapper around BeautifulSoup.
+
+= 4.0.0b9 (20120228) =
+
+* Fixed the string representation of DOCTYPEs that have both a public
+ ID and a system ID.
+
+* Fixed the generated XML declaration.
+
+* Renamed Tag.nsprefix to Tag.prefix, for consistency with
+ NamespacedAttribute.
+
+* Fixed a test failure that occured on Python 3.x when chardet was
+ installed.
+
+* Made prettify() return Unicode by default, so it will look nice on
+ Python 3 when passed into print().
+
+= 4.0.0b8 (20120224) =
+
+* All tree builders now preserve namespace information in the
+ documents they parse. If you use the html5lib parser or lxml's XML
+ parser, you can access the namespace URL for a tag as tag.namespace.
+
+ However, there is no special support for namespace-oriented
+ searching or tree manipulation. When you search the tree, you need
+ to use namespace prefixes exactly as they're used in the original
+ document.
+
+* The string representation of a DOCTYPE always ends in a newline.
+
+* Issue a warning if the user tries to use a SoupStrainer in
+ conjunction with the html5lib tree builder, which doesn't support
+ them.
+
+= 4.0.0b7 (20120223) =
+
+* Upon decoding to string, any characters that can't be represented in
+ your chosen encoding will be converted into numeric XML entity
+ references.
+
+* Issue a warning if characters were replaced with REPLACEMENT
+ CHARACTER during Unicode conversion.
+
+* Restored compatibility with Python 2.6.
+
+* The install process no longer installs docs or auxillary text files.
+
+* It's now possible to deepcopy a BeautifulSoup object created with
+ Python's built-in HTML parser.
+
+* About 100 unit tests that "test" the behavior of various parsers on
+ invalid markup have been removed. Legitimate changes to those
+ parsers caused these tests to fail, indicating that perhaps
+ Beautiful Soup should not test the behavior of foreign
+ libraries.
+
+ The problematic unit tests have been reformulated as informational
+ comparisons generated by the script
+ scripts/demonstrate_parser_differences.py.
+
+ This makes Beautiful Soup compatible with html5lib version 0.95 and
+ future versions of HTMLParser.
+
+= 4.0.0b6 (20120216) =
+
+* Multi-valued attributes like "class" always have a list of values,
+ even if there's only one value in the list.
+
+* Added a number of multi-valued attributes defined in HTML5.
+
+* Stopped generating a space before the slash that closes an
+ empty-element tag. This may come back if I add a special XHTML mode
+ (http://www.w3.org/TR/xhtml1/#C_2), but right now it's pretty
+ useless.
+
+* Passing text along with tag-specific arguments to a find* method:
+
+ find("a", text="Click here")
+
+ will find tags that contain the given text as their
+ .string. Previously, the tag-specific arguments were ignored and
+ only strings were searched.
+
+* Fixed a bug that caused the html5lib tree builder to build a
+ partially disconnected tree. Generally cleaned up the html5lib tree
+ builder.
+
+* If you restrict a multi-valued attribute like "class" to a string
+ that contains spaces, Beautiful Soup will only consider it a match
+ if the values correspond to that specific string.
+
+= 4.0.0b5 (20120209) =
+
+* Rationalized Beautiful Soup's treatment of CSS class. A tag
+ belonging to multiple CSS classes is treated as having a list of
+ values for the 'class' attribute. Searching for a CSS class will
+ match *any* of the CSS classes.
+
+ This actually affects all attributes that the HTML standard defines
+ as taking multiple values (class, rel, rev, archive, accept-charset,
+ and headers), but 'class' is by far the most common. [bug=41034]
+
+* If you pass anything other than a dictionary as the second argument
+ to one of the find* methods, it'll assume you want to use that
+ object to search against a tag's CSS classes. Previously this only
+ worked if you passed in a string.
+
+* Fixed a bug that caused a crash when you passed a dictionary as an
+ attribute value (possibly because you mistyped "attrs"). [bug=842419]
+
+* Unicode, Dammit now detects the encoding in HTML 5-style <meta> tags
+ like <meta charset="utf-8" />. [bug=837268]
+
+* If Unicode, Dammit can't figure out a consistent encoding for a
+ page, it will try each of its guesses again, with errors="replace"
+ instead of errors="strict". This may mean that some data gets
+ replaced with REPLACEMENT CHARACTER, but at least most of it will
+ get turned into Unicode. [bug=754903]
+
+* Patched over a bug in html5lib (?) that was crashing Beautiful Soup
+ on certain kinds of markup. [bug=838800]
+
+* Fixed a bug that wrecked the tree if you replaced an element with an
+ empty string. [bug=728697]
+
+* Improved Unicode, Dammit's behavior when you give it Unicode to
+ begin with.
+
+= 4.0.0b4 (20120208) =
+
+* Added BeautifulSoup.new_string() to go along with BeautifulSoup.new_tag()
+
+* BeautifulSoup.new_tag() will follow the rules of whatever
+ tree-builder was used to create the original BeautifulSoup object. A
+ new <p> tag will look like "<p />" if the soup object was created to
+ parse XML, but it will look like "<p></p>" if the soup object was
+ created to parse HTML.
+
+* We pass in strict=False to html.parser on Python 3, greatly
+ improving html.parser's ability to handle bad HTML.
+
+* We also monkeypatch a serious bug in html.parser that made
+ strict=False disastrous on Python 3.2.2.
+
+* Replaced the "substitute_html_entities" argument with the
+ more general "formatter" argument.
+
+* Bare ampersands and angle brackets are always converted to XML
+ entities unless the user prevents it.
+
+* Added PageElement.insert_before() and PageElement.insert_after(),
+ which let you put an element into the parse tree with respect to
+ some other element.
+
+* Raise an exception when the user tries to do something nonsensical
+ like insert a tag into itself.
+
+
+= 4.0.0b3 (20120203) =
+
+Beautiful Soup 4 is a nearly-complete rewrite that removes Beautiful
+Soup's custom HTML parser in favor of a system that lets you write a
+little glue code and plug in any HTML or XML parser you want.
+
+Beautiful Soup 4.0 comes with glue code for four parsers:
+
+ * Python's standard HTMLParser (html.parser in Python 3)
+ * lxml's HTML and XML parsers
+ * html5lib's HTML parser
+
+HTMLParser is the default, but I recommend you install lxml if you
+can.
+
+For complete documentation, see the Sphinx documentation in
+bs4/doc/source/. What follows is a summary of the changes from
+Beautiful Soup 3.
+
+=== The module name has changed ===
+
+Previously you imported the BeautifulSoup class from a module also
+called BeautifulSoup. To save keystrokes and make it clear which
+version of the API is in use, the module is now called 'bs4':
+
+ >>> from bs4 import BeautifulSoup
+
+=== It works with Python 3 ===
+
+Beautiful Soup 3.1.0 worked with Python 3, but the parser it used was
+so bad that it barely worked at all. Beautiful Soup 4 works with
+Python 3, and since its parser is pluggable, you don't sacrifice
+quality.
+
+Special thanks to Thomas Kluyver and Ezio Melotti for getting Python 3
+support to the finish line. Ezio Melotti is also to thank for greatly
+improving the HTML parser that comes with Python 3.2.
+
+=== CDATA sections are normal text, if they're understood at all. ===
+
+Currently, the lxml and html5lib HTML parsers ignore CDATA sections in
+markup:
+
+ <p><![CDATA[foo]]></p> => <p></p>
+
+A future version of html5lib will turn CDATA sections into text nodes,
+but only within tags like <svg> and <math>:
+
+ <svg><![CDATA[foo]]></svg> => <p>foo</p>
+
+The default XML parser (which uses lxml behind the scenes) turns CDATA
+sections into ordinary text elements:
+
+ <p><![CDATA[foo]]></p> => <p>foo</p>
+
+In theory it's possible to preserve the CDATA sections when using the
+XML parser, but I don't see how to get it to work in practice.
+
+=== Miscellaneous other stuff ===
+
+If the BeautifulSoup instance has .is_xml set to True, an appropriate
+XML declaration will be emitted when the tree is transformed into a
+string:
+
+ <?xml version="1.0" encoding="utf-8">
+ <markup>
+ ...
+ </markup>
+
+The ['lxml', 'xml'] tree builder sets .is_xml to True; the other tree
+builders set it to False. If you want to parse XHTML with an HTML
+parser, you can set it manually.
+
+
+= 3.2.0 =
+
+The 3.1 series wasn't very useful, so I renamed the 3.0 series to 3.2
+to make it obvious which one you should use.
+
+= 3.1.0 =
+
+A hybrid version that supports 2.4 and can be automatically converted
+to run under Python 3.0. There are three backwards-incompatible
+changes you should be aware of, but no new features or deliberate
+behavior changes.
+
+1. str() may no longer do what you want. This is because the meaning
+of str() inverts between Python 2 and 3; in Python 2 it gives you a
+byte string, in Python 3 it gives you a Unicode string.
+
+The effect of this is that you can't pass an encoding to .__str__
+anymore. Use encode() to get a string and decode() to get Unicode, and
+you'll be ready (well, readier) for Python 3.
+
+2. Beautiful Soup is now based on HTMLParser rather than SGMLParser,
+which is gone in Python 3. There's some bad HTML that SGMLParser
+handled but HTMLParser doesn't, usually to do with attribute values
+that aren't closed or have brackets inside them:
+
+ <a href="foo</a>, </a><a href="bar">baz</a>
+ <a b="<a>">', '<a b="&lt;a&gt;"></a><a>"></a>
+
+A later version of Beautiful Soup will allow you to plug in different
+parsers to make tradeoffs between speed and the ability to handle bad
+HTML.
+
+3. In Python 3 (but not Python 2), HTMLParser converts entities within
+attributes to the corresponding Unicode characters. In Python 2 it's
+possible to parse this string and leave the &eacute; intact.
+
+ <a href="http://crummy.com?sacr&eacute;&bleu">
+
+In Python 3, the &eacute; is always converted to \xe9 during
+parsing.
+
+
+= 3.0.7a =
+
+Added an import that makes BS work in Python 2.3.
+
+
+= 3.0.7 =
+
+Fixed a UnicodeDecodeError when unpickling documents that contain
+non-ASCII characters.
+
+Fixed a TypeError that occured in some circumstances when a tag
+contained no text.
+
+Jump through hoops to avoid the use of chardet, which can be extremely
+slow in some circumstances. UTF-8 documents should never trigger the
+use of chardet.
+
+Whitespace is preserved inside <pre> and <textarea> tags that contain
+nothing but whitespace.
+
+Beautiful Soup can now parse a doctype that's scoped to an XML namespace.
+
+
+= 3.0.6 =
+
+Got rid of a very old debug line that prevented chardet from working.
+
+Added a Tag.decompose() method that completely disconnects a tree or a
+subset of a tree, breaking it up into bite-sized pieces that are
+easy for the garbage collecter to collect.
+
+Tag.extract() now returns the tag that was extracted.
+
+Tag.findNext() now does something with the keyword arguments you pass
+it instead of dropping them on the floor.
+
+Fixed a Unicode conversion bug.
+
+Fixed a bug that garbled some <meta> tags when rewriting them.
+
+
+= 3.0.5 =
+
+Soup objects can now be pickled, and copied with copy.deepcopy.
+
+Tag.append now works properly on existing BS objects. (It wasn't
+originally intended for outside use, but it can be now.) (Giles
+Radford)
+
+Passing in a nonexistent encoding will no longer crash the parser on
+Python 2.4 (John Nagle).
+
+Fixed an underlying bug in SGMLParser that thinks ASCII has 255
+characters instead of 127 (John Nagle).
+
+Entities are converted more consistently to Unicode characters.
+
+Entity references in attribute values are now converted to Unicode
+characters when appropriate. Numeric entities are always converted,
+because SGMLParser always converts them outside of attribute values.
+
+ALL_ENTITIES happens to just be the XHTML entities, so I renamed it to
+XHTML_ENTITIES.
+
+The regular expression for bare ampersands was too loose. In some
+cases ampersands were not being escaped. (Sam Ruby?)
+
+Non-breaking spaces and other special Unicode space characters are no
+longer folded to ASCII spaces. (Robert Leftwich)
+
+Information inside a TEXTAREA tag is now parsed literally, not as HTML
+tags. TEXTAREA now works exactly the same way as SCRIPT. (Zephyr Fang)
+
+= 3.0.4 =
+
+Fixed a bug that crashed Unicode conversion in some cases.
+
+Fixed a bug that prevented UnicodeDammit from being used as a
+general-purpose data scrubber.
+
+Fixed some unit test failures when running against Python 2.5.
+
+When considering whether to convert smart quotes, UnicodeDammit now
+looks at the original encoding in a case-insensitive way.
+
+= 3.0.3 (20060606) =
+
+Beautiful Soup is now usable as a way to clean up invalid XML/HTML (be
+sure to pass in an appropriate value for convertEntities, or XML/HTML
+entities might stick around that aren't valid in HTML/XML). The result
+may not validate, but it should be good enough to not choke a
+real-world XML parser. Specifically, the output of a properly
+constructed soup object should always be valid as part of an XML
+document, but parts may be missing if they were missing in the
+original. As always, if the input is valid XML, the output will also
+be valid.
+
+= 3.0.2 (20060602) =
+
+Previously, Beautiful Soup correctly handled attribute values that
+contained embedded quotes (sometimes by escaping), but not other kinds
+of XML character. Now, it correctly handles or escapes all special XML
+characters in attribute values.
+
+I aliased methods to the 2.x names (fetch, find, findText, etc.) for
+backwards compatibility purposes. Those names are deprecated and if I
+ever do a 4.0 I will remove them. I will, I tell you!
+
+Fixed a bug where the findAll method wasn't passing along any keyword
+arguments.
+
+When run from the command line, Beautiful Soup now acts as an HTML
+pretty-printer, not an XML pretty-printer.
+
+= 3.0.1 (20060530) =
+
+Reintroduced the "fetch by CSS class" shortcut. I thought keyword
+arguments would replace it, but they don't. You can't call soup('a',
+class='foo') because class is a Python keyword.
+
+If Beautiful Soup encounters a meta tag that declares the encoding,
+but a SoupStrainer tells it not to parse that tag, Beautiful Soup will
+no longer try to rewrite the meta tag to mention the new
+encoding. Basically, this makes SoupStrainers work in real-world
+applications instead of crashing the parser.
+
+= 3.0.0 "Who would not give all else for two p" (20060528) =
+
+This release is not backward-compatible with previous releases. If
+you've got code written with a previous version of the library, go
+ahead and keep using it, unless one of the features mentioned here
+really makes your life easier. Since the library is self-contained,
+you can include an old copy of the library in your old applications,
+and use the new version for everything else.
+
+The documentation has been rewritten and greatly expanded with many
+more examples.
+
+Beautiful Soup autodetects the encoding of a document (or uses the one
+you specify), and converts it from its native encoding to
+Unicode. Internally, it only deals with Unicode strings. When you
+print out the document, it converts to UTF-8 (or another encoding you
+specify). [Doc reference]
+
+It's now easy to make large-scale changes to the parse tree without
+screwing up the navigation members. The methods are extract,
+replaceWith, and insert. [Doc reference. See also Improving Memory
+Usage with extract]
+
+Passing True in as an attribute value gives you tags that have any
+value for that attribute. You don't have to create a regular
+expression. Passing None for an attribute value gives you tags that
+don't have that attribute at all.
+
+Tag objects now know whether or not they're self-closing. This avoids
+the problem where Beautiful Soup thought that tags like <BR /> were
+self-closing even in XML documents. You can customize the self-closing
+tags for a parser object by passing them in as a list of
+selfClosingTags: you don't have to subclass anymore.
+
+There's a new built-in parser, MinimalSoup, which has most of
+BeautifulSoup's HTML-specific rules, but no tag nesting rules. [Doc
+reference]
+
+You can use a SoupStrainer to tell Beautiful Soup to parse only part
+of a document. This saves time and memory, often making Beautiful Soup
+about as fast as a custom-built SGMLParser subclass. [Doc reference,
+SoupStrainer reference]
+
+You can (usually) use keyword arguments instead of passing a
+dictionary of attributes to a search method. That is, you can replace
+soup(args={"id" : "5"}) with soup(id="5"). You can still use args if
+(for instance) you need to find an attribute whose name clashes with
+the name of an argument to findAll. [Doc reference: **kwargs attrs]
+
+The method names have changed to the better method names used in
+Rubyful Soup. Instead of find methods and fetch methods, there are
+only find methods. Instead of a scheme where you can't remember which
+method finds one element and which one finds them all, we have find
+and findAll. In general, if the method name mentions All or a plural
+noun (eg. findNextSiblings), then it finds many elements
+method. Otherwise, it only finds one element. [Doc reference]
+
+Some of the argument names have been renamed for clarity. For instance
+avoidParserProblems is now parserMassage.
+
+Beautiful Soup no longer implements a feed method. You need to pass a
+string or a filehandle into the soup constructor, not with feed after
+the soup has been created. There is still a feed method, but it's the
+feed method implemented by SGMLParser and calling it will bypass
+Beautiful Soup and cause problems.
+
+The NavigableText class has been renamed to NavigableString. There is
+no NavigableUnicodeString anymore, because every string inside a
+Beautiful Soup parse tree is a Unicode string.
+
+findText and fetchText are gone. Just pass a text argument into find
+or findAll.
+
+Null was more trouble than it was worth, so I got rid of it. Anything
+that used to return Null now returns None.
+
+Special XML constructs like comments and CDATA now have their own
+NavigableString subclasses, instead of being treated as oddly-formed
+data. If you parse a document that contains CDATA and write it back
+out, the CDATA will still be there.
+
+When you're parsing a document, you can get Beautiful Soup to convert
+XML or HTML entities into the corresponding Unicode characters. [Doc
+reference]
+
+= 2.1.1 (20050918) =
+
+Fixed a serious performance bug in BeautifulStoneSoup which was
+causing parsing to be incredibly slow.
+
+Corrected several entities that were previously being incorrectly
+translated from Microsoft smart-quote-like characters.
+
+Fixed a bug that was breaking text fetch.
+
+Fixed a bug that crashed the parser when text chunks that look like
+HTML tag names showed up within a SCRIPT tag.
+
+THEAD, TBODY, and TFOOT tags are now nestable within TABLE
+tags. Nested tables should parse more sensibly now.
+
+BASE is now considered a self-closing tag.
+
+= 2.1.0 "Game, or any other dish?" (20050504) =
+
+Added a wide variety of new search methods which, given a starting
+point inside the tree, follow a particular navigation member (like
+nextSibling) over and over again, looking for Tag and NavigableText
+objects that match certain criteria. The new methods are findNext,
+fetchNext, findPrevious, fetchPrevious, findNextSibling,
+fetchNextSiblings, findPreviousSibling, fetchPreviousSiblings,
+findParent, and fetchParents. All of these use the same basic code
+used by first and fetch, so you can pass your weird ways of matching
+things into these methods.
+
+The fetch method and its derivatives now accept a limit argument.
+
+You can now pass keyword arguments when calling a Tag object as though
+it were a method.
+
+Fixed a bug that caused all hand-created tags to share a single set of
+attributes.
+
+= 2.0.3 (20050501) =
+
+Fixed Python 2.2 support for iterators.
+
+Fixed a bug that gave the wrong representation to tags within quote
+tags like <script>.
+
+Took some code from Mark Pilgrim that treats CDATA declarations as
+data instead of ignoring them.
+
+Beautiful Soup's setup.py will now do an install even if the unit
+tests fail. It won't build a source distribution if the unit tests
+fail, so I can't release a new version unless they pass.
+
+= 2.0.2 (20050416) =
+
+Added the unit tests in a separate module, and packaged it with
+distutils.
+
+Fixed a bug that sometimes caused renderContents() to return a Unicode
+string even if there was no Unicode in the original string.
+
+Added the done() method, which closes all of the parser's open
+tags. It gets called automatically when you pass in some text to the
+constructor of a parser class; otherwise you must call it yourself.
+
+Reinstated some backwards compatibility with 1.x versions: referencing
+the string member of a NavigableText object returns the NavigableText
+object instead of throwing an error.
+
+= 2.0.1 (20050412) =
+
+Fixed a bug that caused bad results when you tried to reference a tag
+name shorter than 3 characters as a member of a Tag, eg. tag.table.td.
+
+Made sure all Tags have the 'hidden' attribute so that an attempt to
+access tag.hidden doesn't spawn an attempt to find a tag named
+'hidden'.
+
+Fixed a bug in the comparison operator.
+
+= 2.0.0 "Who cares for fish?" (20050410)
+
+Beautiful Soup version 1 was very useful but also pretty stupid. I
+originally wrote it without noticing any of the problems inherent in
+trying to build a parse tree out of ambiguous HTML tags. This version
+solves all of those problems to my satisfaction. It also adds many new
+clever things to make up for the removal of the stupid things.
+
+== Parsing ==
+
+The parser logic has been greatly improved, and the BeautifulSoup
+class should much more reliably yield a parse tree that looks like
+what the page author intended. For a particular class of odd edge
+cases that now causes problems, there is a new class,
+ICantBelieveItsBeautifulSoup.
+
+By default, Beautiful Soup now performs some cleanup operations on
+text before parsing it. This is to avoid common problems with bad
+definitions and self-closing tags that crash SGMLParser. You can
+provide your own set of cleanup operations, or turn it off
+altogether. The cleanup operations include fixing self-closing tags
+that don't close, and replacing Microsoft smart quotes and similar
+characters with their HTML entity equivalents.
+
+You can now get a pretty-print version of parsed HTML to get a visual
+picture of how Beautiful Soup parses it, with the Tag.prettify()
+method.
+
+== Strings and Unicode ==
+
+There are separate NavigableText subclasses for ASCII and Unicode
+strings. These classes directly subclass the corresponding base data
+types. This means you can treat NavigableText objects as strings
+instead of having to call methods on them to get the strings.
+
+str() on a Tag always returns a string, and unicode() always returns
+Unicode. Previously it was inconsistent.
+
+== Tree traversal ==
+
+In a first() or fetch() call, the tag name or the desired value of an
+attribute can now be any of the following:
+
+ * A string (matches that specific tag or that specific attribute value)
+ * A list of strings (matches any tag or attribute value in the list)
+ * A compiled regular expression object (matches any tag or attribute
+ value that matches the regular expression)
+ * A callable object that takes the Tag object or attribute value as a
+ string. It returns None/false/empty string if the given string
+ doesn't match, and any other value if it does.
+
+This is much easier to use than SQL-style wildcards (see, regular
+expressions are good for something). Because of this, I took out
+SQL-style wildcards. I'll put them back if someone complains, but
+their removal simplifies the code a lot.
+
+You can use fetch() and first() to search for text in the parse tree,
+not just tags. There are new alias methods fetchText() and firstText()
+designed for this purpose. As with searching for tags, you can pass in
+a string, a regular expression object, or a method to match your text.
+
+If you pass in something besides a map to the attrs argument of
+fetch() or first(), Beautiful Soup will assume you want to match that
+thing against the "class" attribute. When you're scraping
+well-structured HTML, this makes your code a lot cleaner.
+
+1.x and 2.x both let you call a Tag object as a shorthand for
+fetch(). For instance, foo("bar") is a shorthand for
+foo.fetch("bar"). In 2.x, you can also access a specially-named member
+of a Tag object as a shorthand for first(). For instance, foo.barTag
+is a shorthand for foo.first("bar"). By chaining these shortcuts you
+traverse a tree in very little code: for header in
+soup.bodyTag.pTag.tableTag('th'):
+
+If an element relationship (like parent or next) doesn't apply to a
+tag, it'll now show up Null instead of None. first() will also return
+Null if you ask it for a nonexistent tag. Null is an object that's
+just like None, except you can do whatever you want to it and it'll
+give you Null instead of throwing an error.
+
+This lets you do tree traversals like soup.htmlTag.headTag.titleTag
+without having to worry if the intermediate stages are actually
+there. Previously, if there was no 'head' tag in the document, headTag
+in that instance would have been None, and accessing its 'titleTag'
+member would have thrown an AttributeError. Now, you can get what you
+want when it exists, and get Null when it doesn't, without having to
+do a lot of conditionals checking to see if every stage is None.
+
+There are two new relations between page elements: previousSibling and
+nextSibling. They reference the previous and next element at the same
+level of the parse tree. For instance, if you have HTML like this:
+
+ <p><ul><li>Foo<br /><li>Bar</ul>
+
+The first 'li' tag has a previousSibling of Null and its nextSibling
+is the second 'li' tag. The second 'li' tag has a nextSibling of Null
+and its previousSibling is the first 'li' tag. The previousSibling of
+the 'ul' tag is the first 'p' tag. The nextSibling of 'Foo' is the
+'br' tag.
+
+I took out the ability to use fetch() to find tags that have a
+specific list of contents. See, I can't even explain it well. It was
+really difficult to use, I never used it, and I don't think anyone
+else ever used it. To the extent anyone did, they can probably use
+fetchText() instead. If it turns out someone needs it I'll think of
+another solution.
+
+== Tree manipulation ==
+
+You can add new attributes to a tag, and delete attributes from a
+tag. In 1.x you could only change a tag's existing attributes.
+
+== Porting Considerations ==
+
+There are three changes in 2.0 that break old code:
+
+In the post-1.2 release you could pass in a function into fetch(). The
+function took a string, the tag name. In 2.0, the function takes the
+actual Tag object.
+
+It's no longer to pass in SQL-style wildcards to fetch(). Use a
+regular expression instead.
+
+The different parsing algorithm means the parse tree may not be shaped
+like you expect. This will only actually affect you if your code uses
+one of the affected parts. I haven't run into this problem yet while
+porting my code.
+
+= Between 1.2 and 2.0 =
+
+This is the release to get if you want Python 1.5 compatibility.
+
+The desired value of an attribute can now be any of the following:
+
+ * A string
+ * A string with SQL-style wildcards
+ * A compiled RE object
+ * A callable that returns None/false/empty string if the given value
+ doesn't match, and any other value otherwise.
+
+This is much easier to use than SQL-style wildcards (see, regular
+expressions are good for something). Because of this, I no longer
+recommend you use SQL-style wildcards. They may go away in a future
+release to clean up the code.
+
+Made Beautiful Soup handle processing instructions as text instead of
+ignoring them.
+
+Applied patch from Richie Hindle (richie at entrian dot com) that
+makes tag.string a shorthand for tag.contents[0].string when the tag
+has only one string-owning child.
+
+Added still more nestable tags. The nestable tags thing won't work in
+a lot of cases and needs to be rethought.
+
+Fixed an edge case where searching for "%foo" would match any string
+shorter than "foo".
+
+= 1.2 "Who for such dainties would not stoop?" (20040708) =
+
+Applied patch from Ben Last (ben at benlast dot com) that made
+Tag.renderContents() correctly handle Unicode.
+
+Made BeautifulStoneSoup even dumber by making it not implicitly close
+a tag when another tag of the same type is encountered; only when an
+actual closing tag is encountered. This change courtesy of Fuzzy (mike
+at pcblokes dot com). BeautifulSoup still works as before.
+
+= 1.1 "Swimming in a hot tureen" =
+
+Added more 'nestable' tags. Changed popping semantics so that when a
+nestable tag is encountered, tags are popped up to the previously
+encountered nestable tag (of whatever kind). I will revert this if
+enough people complain, but it should make more people's lives easier
+than harder. This enhancement was suggested by Anthony Baxter (anthony
+at interlink dot com dot au).
+
+= 1.0 "So rich and green" (20040420) =
+
+Initial release.
diff --git a/bitbake/lib/bs4/__init__.py b/bitbake/lib/bs4/__init__.py
new file mode 100644
index 0000000000..7ba34269af
--- /dev/null
+++ b/bitbake/lib/bs4/__init__.py
@@ -0,0 +1,406 @@
+"""Beautiful Soup
+Elixir and Tonic
+"The Screen-Scraper's Friend"
+http://www.crummy.com/software/BeautifulSoup/
+
+Beautiful Soup uses a pluggable XML or HTML parser to parse a
+(possibly invalid) document into a tree representation. Beautiful Soup
+provides provides methods and Pythonic idioms that make it easy to
+navigate, search, and modify the parse tree.
+
+Beautiful Soup works with Python 2.6 and up. It works better if lxml
+and/or html5lib is installed.
+
+For more than you ever wanted to know about Beautiful Soup, see the
+documentation:
+http://www.crummy.com/software/BeautifulSoup/bs4/doc/
+"""
+
+__author__ = "Leonard Richardson (leonardr@segfault.org)"
+__version__ = "4.3.2"
+__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
+__license__ = "MIT"
+
+__all__ = ['BeautifulSoup']
+
+import os
+import re
+import warnings
+
+from .builder import builder_registry, ParserRejectedMarkup
+from .dammit import UnicodeDammit
+from .element import (
+ CData,
+ Comment,
+ DEFAULT_OUTPUT_ENCODING,
+ Declaration,
+ Doctype,
+ NavigableString,
+ PageElement,
+ ProcessingInstruction,
+ ResultSet,
+ SoupStrainer,
+ Tag,
+ )
+
+# The very first thing we do is give a useful error if someone is
+# running this code under Python 3 without converting it.
+syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
+
+class BeautifulSoup(Tag):
+ """
+ This class defines the basic interface called by the tree builders.
+
+ These methods will be called by the parser:
+ reset()
+ feed(markup)
+
+ The tree builder may call these methods from its feed() implementation:
+ handle_starttag(name, attrs) # See note about return value
+ handle_endtag(name)
+ handle_data(data) # Appends to the current data node
+ endData(containerClass=NavigableString) # Ends the current data node
+
+ No matter how complicated the underlying parser is, you should be
+ able to build a tree using 'start tag' events, 'end tag' events,
+ 'data' events, and "done with data" events.
+
+ If you encounter an empty-element tag (aka a self-closing tag,
+ like HTML's <br> tag), call handle_starttag and then
+ handle_endtag.
+ """
+ ROOT_TAG_NAME = u'[document]'
+
+ # If the end-user gives no indication which tree builder they
+ # want, look for one with these features.
+ DEFAULT_BUILDER_FEATURES = ['html', 'fast']
+
+ ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
+
+ def __init__(self, markup="", features=None, builder=None,
+ parse_only=None, from_encoding=None, **kwargs):
+ """The Soup object is initialized as the 'root tag', and the
+ provided markup (which can be a string or a file-like object)
+ is fed into the underlying parser."""
+
+ if 'convertEntities' in kwargs:
+ warnings.warn(
+ "BS4 does not respect the convertEntities argument to the "
+ "BeautifulSoup constructor. Entities are always converted "
+ "to Unicode characters.")
+
+ if 'markupMassage' in kwargs:
+ del kwargs['markupMassage']
+ warnings.warn(
+ "BS4 does not respect the markupMassage argument to the "
+ "BeautifulSoup constructor. The tree builder is responsible "
+ "for any necessary markup massage.")
+
+ if 'smartQuotesTo' in kwargs:
+ del kwargs['smartQuotesTo']
+ warnings.warn(
+ "BS4 does not respect the smartQuotesTo argument to the "
+ "BeautifulSoup constructor. Smart quotes are always converted "
+ "to Unicode characters.")
+
+ if 'selfClosingTags' in kwargs:
+ del kwargs['selfClosingTags']
+ warnings.warn(
+ "BS4 does not respect the selfClosingTags argument to the "
+ "BeautifulSoup constructor. The tree builder is responsible "
+ "for understanding self-closing tags.")
+
+ if 'isHTML' in kwargs:
+ del kwargs['isHTML']
+ warnings.warn(
+ "BS4 does not respect the isHTML argument to the "
+ "BeautifulSoup constructor. You can pass in features='html' "
+ "or features='xml' to get a builder capable of handling "
+ "one or the other.")
+
+ def deprecated_argument(old_name, new_name):
+ if old_name in kwargs:
+ warnings.warn(
+ 'The "%s" argument to the BeautifulSoup constructor '
+ 'has been renamed to "%s."' % (old_name, new_name))
+ value = kwargs[old_name]
+ del kwargs[old_name]
+ return value
+ return None
+
+ parse_only = parse_only or deprecated_argument(
+ "parseOnlyThese", "parse_only")
+
+ from_encoding = from_encoding or deprecated_argument(
+ "fromEncoding", "from_encoding")
+
+ if len(kwargs) > 0:
+ arg = kwargs.keys().pop()
+ raise TypeError(
+ "__init__() got an unexpected keyword argument '%s'" % arg)
+
+ if builder is None:
+ if isinstance(features, basestring):
+ features = [features]
+ if features is None or len(features) == 0:
+ features = self.DEFAULT_BUILDER_FEATURES
+ builder_class = builder_registry.lookup(*features)
+ if builder_class is None:
+ raise FeatureNotFound(
+ "Couldn't find a tree builder with the features you "
+ "requested: %s. Do you need to install a parser library?"
+ % ",".join(features))
+ builder = builder_class()
+ self.builder = builder
+ self.is_xml = builder.is_xml
+ self.builder.soup = self
+
+ self.parse_only = parse_only
+
+ if hasattr(markup, 'read'): # It's a file-type object.
+ markup = markup.read()
+ elif len(markup) <= 256:
+ # Print out warnings for a couple beginner problems
+ # involving passing non-markup to Beautiful Soup.
+ # Beautiful Soup will still parse the input as markup,
+ # just in case that's what the user really wants.
+ if (isinstance(markup, unicode)
+ and not os.path.supports_unicode_filenames):
+ possible_filename = markup.encode("utf8")
+ else:
+ possible_filename = markup
+ is_file = False
+ try:
+ is_file = os.path.exists(possible_filename)
+ except Exception, e:
+ # This is almost certainly a problem involving
+ # characters not valid in filenames on this
+ # system. Just let it go.
+ pass
+ if is_file:
+ warnings.warn(
+ '"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
+ if markup[:5] == "http:" or markup[:6] == "https:":
+ # TODO: This is ugly but I couldn't get it to work in
+ # Python 3 otherwise.
+ if ((isinstance(markup, bytes) and not b' ' in markup)
+ or (isinstance(markup, unicode) and not u' ' in markup)):
+ warnings.warn(
+ '"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
+
+ for (self.markup, self.original_encoding, self.declared_html_encoding,
+ self.contains_replacement_characters) in (
+ self.builder.prepare_markup(markup, from_encoding)):
+ self.reset()
+ try:
+ self._feed()
+ break
+ except ParserRejectedMarkup:
+ pass
+
+ # Clear out the markup and remove the builder's circular
+ # reference to this object.
+ self.markup = None
+ self.builder.soup = None
+
+ def _feed(self):
+ # Convert the document to Unicode.
+ self.builder.reset()
+
+ self.builder.feed(self.markup)
+ # Close out any unfinished strings and close all the open tags.
+ self.endData()
+ while self.currentTag.name != self.ROOT_TAG_NAME:
+ self.popTag()
+
+ def reset(self):
+ Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
+ self.hidden = 1
+ self.builder.reset()
+ self.current_data = []
+ self.currentTag = None
+ self.tagStack = []
+ self.preserve_whitespace_tag_stack = []
+ self.pushTag(self)
+
+ def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
+ """Create a new tag associated with this soup."""
+ return Tag(None, self.builder, name, namespace, nsprefix, attrs)
+
+ def new_string(self, s, subclass=NavigableString):
+ """Create a new NavigableString associated with this soup."""
+ navigable = subclass(s)
+ navigable.setup()
+ return navigable
+
+ def insert_before(self, successor):
+ raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
+
+ def insert_after(self, successor):
+ raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
+
+ def popTag(self):
+ tag = self.tagStack.pop()
+ if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
+ self.preserve_whitespace_tag_stack.pop()
+ #print "Pop", tag.name
+ if self.tagStack:
+ self.currentTag = self.tagStack[-1]
+ return self.currentTag
+
+ def pushTag(self, tag):
+ #print "Push", tag.name
+ if self.currentTag:
+ self.currentTag.contents.append(tag)
+ self.tagStack.append(tag)
+ self.currentTag = self.tagStack[-1]
+ if tag.name in self.builder.preserve_whitespace_tags:
+ self.preserve_whitespace_tag_stack.append(tag)
+
+ def endData(self, containerClass=NavigableString):
+ if self.current_data:
+ current_data = u''.join(self.current_data)
+ # If whitespace is not preserved, and this string contains
+ # nothing but ASCII spaces, replace it with a single space
+ # or newline.
+ if not self.preserve_whitespace_tag_stack:
+ strippable = True
+ for i in current_data:
+ if i not in self.ASCII_SPACES:
+ strippable = False
+ break
+ if strippable:
+ if '\n' in current_data:
+ current_data = '\n'
+ else:
+ current_data = ' '
+
+ # Reset the data collector.
+ self.current_data = []
+
+ # Should we add this string to the tree at all?
+ if self.parse_only and len(self.tagStack) <= 1 and \
+ (not self.parse_only.text or \
+ not self.parse_only.search(current_data)):
+ return
+
+ o = containerClass(current_data)
+ self.object_was_parsed(o)
+
+ def object_was_parsed(self, o, parent=None, most_recent_element=None):
+ """Add an object to the parse tree."""
+ parent = parent or self.currentTag
+ most_recent_element = most_recent_element or self._most_recent_element
+ o.setup(parent, most_recent_element)
+
+ if most_recent_element is not None:
+ most_recent_element.next_element = o
+ self._most_recent_element = o
+ parent.contents.append(o)
+
+ def _popToTag(self, name, nsprefix=None, inclusivePop=True):
+ """Pops the tag stack up to and including the most recent
+ instance of the given tag. If inclusivePop is false, pops the tag
+ stack up to but *not* including the most recent instqance of
+ the given tag."""
+ #print "Popping to %s" % name
+ if name == self.ROOT_TAG_NAME:
+ # The BeautifulSoup object itself can never be popped.
+ return
+
+ most_recently_popped = None
+
+ stack_size = len(self.tagStack)
+ for i in range(stack_size - 1, 0, -1):
+ t = self.tagStack[i]
+ if (name == t.name and nsprefix == t.prefix):
+ if inclusivePop:
+ most_recently_popped = self.popTag()
+ break
+ most_recently_popped = self.popTag()
+
+ return most_recently_popped
+
+ def handle_starttag(self, name, namespace, nsprefix, attrs):
+ """Push a start tag on to the stack.
+
+ If this method returns None, the tag was rejected by the
+ SoupStrainer. You should proceed as if the tag had not occured
+ in the document. For instance, if this was a self-closing tag,
+ don't call handle_endtag.
+ """
+
+ # print "Start tag %s: %s" % (name, attrs)
+ self.endData()
+
+ if (self.parse_only and len(self.tagStack) <= 1
+ and (self.parse_only.text
+ or not self.parse_only.search_tag(name, attrs))):
+ return None
+
+ tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
+ self.currentTag, self._most_recent_element)
+ if tag is None:
+ return tag
+ if self._most_recent_element:
+ self._most_recent_element.next_element = tag
+ self._most_recent_element = tag
+ self.pushTag(tag)
+ return tag
+
+ def handle_endtag(self, name, nsprefix=None):
+ #print "End tag: " + name
+ self.endData()
+ self._popToTag(name, nsprefix)
+
+ def handle_data(self, data):
+ self.current_data.append(data)
+
+ def decode(self, pretty_print=False,
+ eventual_encoding=DEFAULT_OUTPUT_ENCODING,
+ formatter="minimal"):
+ """Returns a string or Unicode representation of this document.
+ To get Unicode, pass None for encoding."""
+
+ if self.is_xml:
+ # Print the XML declaration
+ encoding_part = ''
+ if eventual_encoding != None:
+ encoding_part = ' encoding="%s"' % eventual_encoding
+ prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
+ else:
+ prefix = u''
+ if not pretty_print:
+ indent_level = None
+ else:
+ indent_level = 0
+ return prefix + super(BeautifulSoup, self).decode(
+ indent_level, eventual_encoding, formatter)
+
+# Alias to make it easier to type import: 'from bs4 import _soup'
+_s = BeautifulSoup
+_soup = BeautifulSoup
+
+class BeautifulStoneSoup(BeautifulSoup):
+ """Deprecated interface to an XML parser."""
+
+ def __init__(self, *args, **kwargs):
+ kwargs['features'] = 'xml'
+ warnings.warn(
+ 'The BeautifulStoneSoup class is deprecated. Instead of using '
+ 'it, pass features="xml" into the BeautifulSoup constructor.')
+ super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
+
+
+class StopParsing(Exception):
+ pass
+
+class FeatureNotFound(ValueError):
+ pass
+
+
+#By default, act as an HTML pretty-printer.
+if __name__ == '__main__':
+ import sys
+ soup = BeautifulSoup(sys.stdin)
+ print soup.prettify()
diff --git a/bitbake/lib/bs4/builder/__init__.py b/bitbake/lib/bs4/builder/__init__.py
new file mode 100644
index 0000000000..740f5f29cd
--- /dev/null
+++ b/bitbake/lib/bs4/builder/__init__.py
@@ -0,0 +1,321 @@
+from collections import defaultdict
+import itertools
+import sys
+from bs4.element import (
+ CharsetMetaAttributeValue,
+ ContentMetaAttributeValue,
+ whitespace_re
+ )
+
+__all__ = [
+ 'HTMLTreeBuilder',
+ 'SAXTreeBuilder',
+ 'TreeBuilder',
+ 'TreeBuilderRegistry',
+ ]
+
+# Some useful features for a TreeBuilder to have.
+FAST = 'fast'
+PERMISSIVE = 'permissive'
+STRICT = 'strict'
+XML = 'xml'
+HTML = 'html'
+HTML_5 = 'html5'
+
+
+class TreeBuilderRegistry(object):
+
+ def __init__(self):
+ self.builders_for_feature = defaultdict(list)
+ self.builders = []
+
+ def register(self, treebuilder_class):
+ """Register a treebuilder based on its advertised features."""
+ for feature in treebuilder_class.features:
+ self.builders_for_feature[feature].insert(0, treebuilder_class)
+ self.builders.insert(0, treebuilder_class)
+
+ def lookup(self, *features):
+ if len(self.builders) == 0:
+ # There are no builders at all.
+ return None
+
+ if len(features) == 0:
+ # They didn't ask for any features. Give them the most
+ # recently registered builder.
+ return self.builders[0]
+
+ # Go down the list of features in order, and eliminate any builders
+ # that don't match every feature.
+ features = list(features)
+ features.reverse()
+ candidates = None
+ candidate_set = None
+ while len(features) > 0:
+ feature = features.pop()
+ we_have_the_feature = self.builders_for_feature.get(feature, [])
+ if len(we_have_the_feature) > 0:
+ if candidates is None:
+ candidates = we_have_the_feature
+ candidate_set = set(candidates)
+ else:
+ # Eliminate any candidates that don't have this feature.
+ candidate_set = candidate_set.intersection(
+ set(we_have_the_feature))
+
+ # The only valid candidates are the ones in candidate_set.
+ # Go through the original list of candidates and pick the first one
+ # that's in candidate_set.
+ if candidate_set is None:
+ return None
+ for candidate in candidates:
+ if candidate in candidate_set:
+ return candidate
+ return None
+
+# The BeautifulSoup class will take feature lists from developers and use them
+# to look up builders in this registry.
+builder_registry = TreeBuilderRegistry()
+
+class TreeBuilder(object):
+ """Turn a document into a Beautiful Soup object tree."""
+
+ features = []
+
+ is_xml = False
+ preserve_whitespace_tags = set()
+ empty_element_tags = None # A tag will be considered an empty-element
+ # tag when and only when it has no contents.
+
+ # A value for these tag/attribute combinations is a space- or
+ # comma-separated list of CDATA, rather than a single CDATA.
+ cdata_list_attributes = {}
+
+
+ def __init__(self):
+ self.soup = None
+
+ def reset(self):
+ pass
+
+ def can_be_empty_element(self, tag_name):
+ """Might a tag with this name be an empty-element tag?
+
+ The final markup may or may not actually present this tag as
+ self-closing.
+
+ For instance: an HTMLBuilder does not consider a <p> tag to be
+ an empty-element tag (it's not in
+ HTMLBuilder.empty_element_tags). This means an empty <p> tag
+ will be presented as "<p></p>", not "<p />".
+
+ The default implementation has no opinion about which tags are
+ empty-element tags, so a tag will be presented as an
+ empty-element tag if and only if it has no contents.
+ "<foo></foo>" will become "<foo />", and "<foo>bar</foo>" will
+ be left alone.
+ """
+ if self.empty_element_tags is None:
+ return True
+ return tag_name in self.empty_element_tags
+
+ def feed(self, markup):
+ raise NotImplementedError()
+
+ def prepare_markup(self, markup, user_specified_encoding=None,
+ document_declared_encoding=None):
+ return markup, None, None, False
+
+ def test_fragment_to_document(self, fragment):
+ """Wrap an HTML fragment to make it look like a document.
+
+ Different parsers do this differently. For instance, lxml
+ introduces an empty <head> tag, and html5lib
+ doesn't. Abstracting this away lets us write simple tests
+ which run HTML fragments through the parser and compare the
+ results against other HTML fragments.
+
+ This method should not be used outside of tests.
+ """
+ return fragment
+
+ def set_up_substitutions(self, tag):
+ return False
+
+ def _replace_cdata_list_attribute_values(self, tag_name, attrs):
+ """Replaces class="foo bar" with class=["foo", "bar"]
+
+ Modifies its input in place.
+ """
+ if not attrs:
+ return attrs
+ if self.cdata_list_attributes:
+ universal = self.cdata_list_attributes.get('*', [])
+ tag_specific = self.cdata_list_attributes.get(
+ tag_name.lower(), None)
+ for attr in attrs.keys():
+ if attr in universal or (tag_specific and attr in tag_specific):
+ # We have a "class"-type attribute whose string
+ # value is a whitespace-separated list of
+ # values. Split it into a list.
+ value = attrs[attr]
+ if isinstance(value, basestring):
+ values = whitespace_re.split(value)
+ else:
+ # html5lib sometimes calls setAttributes twice
+ # for the same tag when rearranging the parse
+ # tree. On the second call the attribute value
+ # here is already a list. If this happens,
+ # leave the value alone rather than trying to
+ # split it again.
+ values = value
+ attrs[attr] = values
+ return attrs
+
+class SAXTreeBuilder(TreeBuilder):
+ """A Beautiful Soup treebuilder that listens for SAX events."""
+
+ def feed(self, markup):
+ raise NotImplementedError()
+
+ def close(self):
+ pass
+
+ def startElement(self, name, attrs):
+ attrs = dict((key[1], value) for key, value in list(attrs.items()))
+ #print "Start %s, %r" % (name, attrs)
+ self.soup.handle_starttag(name, attrs)
+
+ def endElement(self, name):
+ #print "End %s" % name
+ self.soup.handle_endtag(name)
+
+ def startElementNS(self, nsTuple, nodeName, attrs):
+ # Throw away (ns, nodeName) for now.
+ self.startElement(nodeName, attrs)
+
+ def endElementNS(self, nsTuple, nodeName):
+ # Throw away (ns, nodeName) for now.
+ self.endElement(nodeName)
+ #handler.endElementNS((ns, node.nodeName), node.nodeName)
+
+ def startPrefixMapping(self, prefix, nodeValue):
+ # Ignore the prefix for now.
+ pass
+
+ def endPrefixMapping(self, prefix):
+ # Ignore the prefix for now.
+ # handler.endPrefixMapping(prefix)
+ pass
+
+ def characters(self, content):
+ self.soup.handle_data(content)
+
+ def startDocument(self):
+ pass
+
+ def endDocument(self):
+ pass
+
+
+class HTMLTreeBuilder(TreeBuilder):
+ """This TreeBuilder knows facts about HTML.
+
+ Such as which tags are empty-element tags.
+ """
+
+ preserve_whitespace_tags = set(['pre', 'textarea'])
+ empty_element_tags = set(['br' , 'hr', 'input', 'img', 'meta',
+ 'spacer', 'link', 'frame', 'base'])
+
+ # The HTML standard defines these attributes as containing a
+ # space-separated list of values, not a single value. That is,
+ # class="foo bar" means that the 'class' attribute has two values,
+ # 'foo' and 'bar', not the single value 'foo bar'. When we
+ # encounter one of these attributes, we will parse its value into
+ # a list of values if possible. Upon output, the list will be
+ # converted back into a string.
+ cdata_list_attributes = {
+ "*" : ['class', 'accesskey', 'dropzone'],
+ "a" : ['rel', 'rev'],
+ "link" : ['rel', 'rev'],
+ "td" : ["headers"],
+ "th" : ["headers"],
+ "td" : ["headers"],
+ "form" : ["accept-charset"],
+ "object" : ["archive"],
+
+ # These are HTML5 specific, as are *.accesskey and *.dropzone above.
+ "area" : ["rel"],
+ "icon" : ["sizes"],
+ "iframe" : ["sandbox"],
+ "output" : ["for"],
+ }
+
+ def set_up_substitutions(self, tag):
+ # We are only interested in <meta> tags
+ if tag.name != 'meta':
+ return False
+
+ http_equiv = tag.get('http-equiv')
+ content = tag.get('content')
+ charset = tag.get('charset')
+
+ # We are interested in <meta> tags that say what encoding the
+ # document was originally in. This means HTML 5-style <meta>
+ # tags that provide the "charset" attribute. It also means
+ # HTML 4-style <meta> tags that provide the "content"
+ # attribute and have "http-equiv" set to "content-type".
+ #
+ # In both cases we will replace the value of the appropriate
+ # attribute with a standin object that can take on any
+ # encoding.
+ meta_encoding = None
+ if charset is not None:
+ # HTML 5 style:
+ # <meta charset="utf8">
+ meta_encoding = charset
+ tag['charset'] = CharsetMetaAttributeValue(charset)
+
+ elif (content is not None and http_equiv is not None
+ and http_equiv.lower() == 'content-type'):
+ # HTML 4 style:
+ # <meta http-equiv="content-type" content="text/html; charset=utf8">
+ tag['content'] = ContentMetaAttributeValue(content)
+
+ return (meta_encoding is not None)
+
+def register_treebuilders_from(module):
+ """Copy TreeBuilders from the given module into this module."""
+ # I'm fairly sure this is not the best way to do this.
+ this_module = sys.modules['bs4.builder']
+ for name in module.__all__:
+ obj = getattr(module, name)
+
+ if issubclass(obj, TreeBuilder):
+ setattr(this_module, name, obj)
+ this_module.__all__.append(name)
+ # Register the builder while we're at it.
+ this_module.builder_registry.register(obj)
+
+class ParserRejectedMarkup(Exception):
+ pass
+
+# Builders are registered in reverse order of priority, so that custom
+# builder registrations will take precedence. In general, we want lxml
+# to take precedence over html5lib, because it's faster. And we only
+# want to use HTMLParser as a last result.
+from . import _htmlparser
+register_treebuilders_from(_htmlparser)
+try:
+ from . import _html5lib
+ register_treebuilders_from(_html5lib)
+except ImportError:
+ # They don't have html5lib installed.
+ pass
+try:
+ from . import _lxml
+ register_treebuilders_from(_lxml)
+except ImportError:
+ # They don't have lxml installed.
+ pass
diff --git a/bitbake/lib/bs4/builder/_html5lib.py b/bitbake/lib/bs4/builder/_html5lib.py
new file mode 100644
index 0000000000..7de36ae75e
--- /dev/null
+++ b/bitbake/lib/bs4/builder/_html5lib.py
@@ -0,0 +1,285 @@
+__all__ = [
+ 'HTML5TreeBuilder',
+ ]
+
+import warnings
+from bs4.builder import (
+ PERMISSIVE,
+ HTML,
+ HTML_5,
+ HTMLTreeBuilder,
+ )
+from bs4.element import NamespacedAttribute
+import html5lib
+from html5lib.constants import namespaces
+from bs4.element import (
+ Comment,
+ Doctype,
+ NavigableString,
+ Tag,
+ )
+
+class HTML5TreeBuilder(HTMLTreeBuilder):
+ """Use html5lib to build a tree."""
+
+ features = ['html5lib', PERMISSIVE, HTML_5, HTML]
+
+ def prepare_markup(self, markup, user_specified_encoding):
+ # Store the user-specified encoding for use later on.
+ self.user_specified_encoding = user_specified_encoding
+ yield (markup, None, None, False)
+
+ # These methods are defined by Beautiful Soup.
+ def feed(self, markup):
+ if self.soup.parse_only is not None:
+ warnings.warn("You provided a value for parse_only, but the html5lib tree builder doesn't support parse_only. The entire document will be parsed.")
+ parser = html5lib.HTMLParser(tree=self.create_treebuilder)
+ doc = parser.parse(markup, encoding=self.user_specified_encoding)
+
+ # Set the character encoding detected by the tokenizer.
+ if isinstance(markup, unicode):
+ # We need to special-case this because html5lib sets
+ # charEncoding to UTF-8 if it gets Unicode input.
+ doc.original_encoding = None
+ else:
+ doc.original_encoding = parser.tokenizer.stream.charEncoding[0]
+
+ def create_treebuilder(self, namespaceHTMLElements):
+ self.underlying_builder = TreeBuilderForHtml5lib(
+ self.soup, namespaceHTMLElements)
+ return self.underlying_builder
+
+ def test_fragment_to_document(self, fragment):
+ """See `TreeBuilder`."""
+ return u'<html><head></head><body>%s</body></html>' % fragment
+
+
+class TreeBuilderForHtml5lib(html5lib.treebuilders._base.TreeBuilder):
+
+ def __init__(self, soup, namespaceHTMLElements):
+ self.soup = soup
+ super(TreeBuilderForHtml5lib, self).__init__(namespaceHTMLElements)
+
+ def documentClass(self):
+ self.soup.reset()
+ return Element(self.soup, self.soup, None)
+
+ def insertDoctype(self, token):
+ name = token["name"]
+ publicId = token["publicId"]
+ systemId = token["systemId"]
+
+ doctype = Doctype.for_name_and_ids(name, publicId, systemId)
+ self.soup.object_was_parsed(doctype)
+
+ def elementClass(self, name, namespace):
+ tag = self.soup.new_tag(name, namespace)
+ return Element(tag, self.soup, namespace)
+
+ def commentClass(self, data):
+ return TextNode(Comment(data), self.soup)
+
+ def fragmentClass(self):
+ self.soup = BeautifulSoup("")
+ self.soup.name = "[document_fragment]"
+ return Element(self.soup, self.soup, None)
+
+ def appendChild(self, node):
+ # XXX This code is not covered by the BS4 tests.
+ self.soup.append(node.element)
+
+ def getDocument(self):
+ return self.soup
+
+ def getFragment(self):
+ return html5lib.treebuilders._base.TreeBuilder.getFragment(self).element
+
+class AttrList(object):
+ def __init__(self, element):
+ self.element = element
+ self.attrs = dict(self.element.attrs)
+ def __iter__(self):
+ return list(self.attrs.items()).__iter__()
+ def __setitem__(self, name, value):
+ "set attr", name, value
+ self.element[name] = value
+ def items(self):
+ return list(self.attrs.items())
+ def keys(self):
+ return list(self.attrs.keys())
+ def __len__(self):
+ return len(self.attrs)
+ def __getitem__(self, name):
+ return self.attrs[name]
+ def __contains__(self, name):
+ return name in list(self.attrs.keys())
+
+
+class Element(html5lib.treebuilders._base.Node):
+ def __init__(self, element, soup, namespace):
+ html5lib.treebuilders._base.Node.__init__(self, element.name)
+ self.element = element
+ self.soup = soup
+ self.namespace = namespace
+
+ def appendChild(self, node):
+ string_child = child = None
+ if isinstance(node, basestring):
+ # Some other piece of code decided to pass in a string
+ # instead of creating a TextElement object to contain the
+ # string.
+ string_child = child = node
+ elif isinstance(node, Tag):
+ # Some other piece of code decided to pass in a Tag
+ # instead of creating an Element object to contain the
+ # Tag.
+ child = node
+ elif node.element.__class__ == NavigableString:
+ string_child = child = node.element
+ else:
+ child = node.element
+
+ if not isinstance(child, basestring) and child.parent is not None:
+ node.element.extract()
+
+ if (string_child and self.element.contents
+ and self.element.contents[-1].__class__ == NavigableString):
+ # We are appending a string onto another string.
+ # TODO This has O(n^2) performance, for input like
+ # "a</a>a</a>a</a>..."
+ old_element = self.element.contents[-1]
+ new_element = self.soup.new_string(old_element + string_child)
+ old_element.replace_with(new_element)
+ self.soup._most_recent_element = new_element
+ else:
+ if isinstance(node, basestring):
+ # Create a brand new NavigableString from this string.
+ child = self.soup.new_string(node)
+
+ # Tell Beautiful Soup to act as if it parsed this element
+ # immediately after the parent's last descendant. (Or
+ # immediately after the parent, if it has no children.)
+ if self.element.contents:
+ most_recent_element = self.element._last_descendant(False)
+ else:
+ most_recent_element = self.element
+
+ self.soup.object_was_parsed(
+ child, parent=self.element,
+ most_recent_element=most_recent_element)
+
+ def getAttributes(self):
+ return AttrList(self.element)
+
+ def setAttributes(self, attributes):
+ if attributes is not None and len(attributes) > 0:
+
+ converted_attributes = []
+ for name, value in list(attributes.items()):
+ if isinstance(name, tuple):
+ new_name = NamespacedAttribute(*name)
+ del attributes[name]
+ attributes[new_name] = value
+
+ self.soup.builder._replace_cdata_list_attribute_values(
+ self.name, attributes)
+ for name, value in attributes.items():
+ self.element[name] = value
+
+ # The attributes may contain variables that need substitution.
+ # Call set_up_substitutions manually.
+ #
+ # The Tag constructor called this method when the Tag was created,
+ # but we just set/changed the attributes, so call it again.
+ self.soup.builder.set_up_substitutions(self.element)
+ attributes = property(getAttributes, setAttributes)
+
+ def insertText(self, data, insertBefore=None):
+ if insertBefore:
+ text = TextNode(self.soup.new_string(data), self.soup)
+ self.insertBefore(data, insertBefore)
+ else:
+ self.appendChild(data)
+
+ def insertBefore(self, node, refNode):
+ index = self.element.index(refNode.element)
+ if (node.element.__class__ == NavigableString and self.element.contents
+ and self.element.contents[index-1].__class__ == NavigableString):
+ # (See comments in appendChild)
+ old_node = self.element.contents[index-1]
+ new_str = self.soup.new_string(old_node + node.element)
+ old_node.replace_with(new_str)
+ else:
+ self.element.insert(index, node.element)
+ node.parent = self
+
+ def removeChild(self, node):
+ node.element.extract()
+
+ def reparentChildren(self, new_parent):
+ """Move all of this tag's children into another tag."""
+ element = self.element
+ new_parent_element = new_parent.element
+ # Determine what this tag's next_element will be once all the children
+ # are removed.
+ final_next_element = element.next_sibling
+
+ new_parents_last_descendant = new_parent_element._last_descendant(False, False)
+ if len(new_parent_element.contents) > 0:
+ # The new parent already contains children. We will be
+ # appending this tag's children to the end.
+ new_parents_last_child = new_parent_element.contents[-1]
+ new_parents_last_descendant_next_element = new_parents_last_descendant.next_element
+ else:
+ # The new parent contains no children.
+ new_parents_last_child = None
+ new_parents_last_descendant_next_element = new_parent_element.next_element
+
+ to_append = element.contents
+ append_after = new_parent.element.contents
+ if len(to_append) > 0:
+ # Set the first child's previous_element and previous_sibling
+ # to elements within the new parent
+ first_child = to_append[0]
+ first_child.previous_element = new_parents_last_descendant
+ first_child.previous_sibling = new_parents_last_child
+
+ # Fix the last child's next_element and next_sibling
+ last_child = to_append[-1]
+ last_child.next_element = new_parents_last_descendant_next_element
+ last_child.next_sibling = None
+
+ for child in to_append:
+ child.parent = new_parent_element
+ new_parent_element.contents.append(child)
+
+ # Now that this element has no children, change its .next_element.
+ element.contents = []
+ element.next_element = final_next_element
+
+ def cloneNode(self):
+ tag = self.soup.new_tag(self.element.name, self.namespace)
+ node = Element(tag, self.soup, self.namespace)
+ for key,value in self.attributes:
+ node.attributes[key] = value
+ return node
+
+ def hasContent(self):
+ return self.element.contents
+
+ def getNameTuple(self):
+ if self.namespace == None:
+ return namespaces["html"], self.name
+ else:
+ return self.namespace, self.name
+
+ nameTuple = property(getNameTuple)
+
+class TextNode(Element):
+ def __init__(self, element, soup):
+ html5lib.treebuilders._base.Node.__init__(self, None)
+ self.element = element
+ self.soup = soup
+
+ def cloneNode(self):
+ raise NotImplementedError
diff --git a/bitbake/lib/bs4/builder/_htmlparser.py b/bitbake/lib/bs4/builder/_htmlparser.py
new file mode 100644
index 0000000000..ca8d8b892b
--- /dev/null
+++ b/bitbake/lib/bs4/builder/_htmlparser.py
@@ -0,0 +1,258 @@
+"""Use the HTMLParser library to parse HTML files that aren't too bad."""
+
+__all__ = [
+ 'HTMLParserTreeBuilder',
+ ]
+
+from HTMLParser import (
+ HTMLParser,
+ HTMLParseError,
+ )
+import sys
+import warnings
+
+# Starting in Python 3.2, the HTMLParser constructor takes a 'strict'
+# argument, which we'd like to set to False. Unfortunately,
+# http://bugs.python.org/issue13273 makes strict=True a better bet
+# before Python 3.2.3.
+#
+# At the end of this file, we monkeypatch HTMLParser so that
+# strict=True works well on Python 3.2.2.
+major, minor, release = sys.version_info[:3]
+CONSTRUCTOR_TAKES_STRICT = (
+ major > 3
+ or (major == 3 and minor > 2)
+ or (major == 3 and minor == 2 and release >= 3))
+
+from bs4.element import (
+ CData,
+ Comment,
+ Declaration,
+ Doctype,
+ ProcessingInstruction,
+ )
+from bs4.dammit import EntitySubstitution, UnicodeDammit
+
+from bs4.builder import (
+ HTML,
+ HTMLTreeBuilder,
+ STRICT,
+ )
+
+
+HTMLPARSER = 'html.parser'
+
+class BeautifulSoupHTMLParser(HTMLParser):
+ def handle_starttag(self, name, attrs):
+ # XXX namespace
+ attr_dict = {}
+ for key, value in attrs:
+ # Change None attribute values to the empty string
+ # for consistency with the other tree builders.
+ if value is None:
+ value = ''
+ attr_dict[key] = value
+ attrvalue = '""'
+ self.soup.handle_starttag(name, None, None, attr_dict)
+
+ def handle_endtag(self, name):
+ self.soup.handle_endtag(name)
+
+ def handle_data(self, data):
+ self.soup.handle_data(data)
+
+ def handle_charref(self, name):
+ # XXX workaround for a bug in HTMLParser. Remove this once
+ # it's fixed.
+ if name.startswith('x'):
+ real_name = int(name.lstrip('x'), 16)
+ elif name.startswith('X'):
+ real_name = int(name.lstrip('X'), 16)
+ else:
+ real_name = int(name)
+
+ try:
+ data = unichr(real_name)
+ except (ValueError, OverflowError), e:
+ data = u"\N{REPLACEMENT CHARACTER}"
+
+ self.handle_data(data)
+
+ def handle_entityref(self, name):
+ character = EntitySubstitution.HTML_ENTITY_TO_CHARACTER.get(name)
+ if character is not None:
+ data = character
+ else:
+ data = "&%s;" % name
+ self.handle_data(data)
+
+ def handle_comment(self, data):
+ self.soup.endData()
+ self.soup.handle_data(data)
+ self.soup.endData(Comment)
+
+ def handle_decl(self, data):
+ self.soup.endData()
+ if data.startswith("DOCTYPE "):
+ data = data[len("DOCTYPE "):]
+ elif data == 'DOCTYPE':
+ # i.e. "<!DOCTYPE>"
+ data = ''
+ self.soup.handle_data(data)
+ self.soup.endData(Doctype)
+
+ def unknown_decl(self, data):
+ if data.upper().startswith('CDATA['):
+ cls = CData
+ data = data[len('CDATA['):]
+ else:
+ cls = Declaration
+ self.soup.endData()
+ self.soup.handle_data(data)
+ self.soup.endData(cls)
+
+ def handle_pi(self, data):
+ self.soup.endData()
+ if data.endswith("?") and data.lower().startswith("xml"):
+ # "An XHTML processing instruction using the trailing '?'
+ # will cause the '?' to be included in data." - HTMLParser
+ # docs.
+ #
+ # Strip the question mark so we don't end up with two
+ # question marks.
+ data = data[:-1]
+ self.soup.handle_data(data)
+ self.soup.endData(ProcessingInstruction)
+
+
+class HTMLParserTreeBuilder(HTMLTreeBuilder):
+
+ is_xml = False
+ features = [HTML, STRICT, HTMLPARSER]
+
+ def __init__(self, *args, **kwargs):
+ if CONSTRUCTOR_TAKES_STRICT:
+ kwargs['strict'] = False
+ self.parser_args = (args, kwargs)
+
+ def prepare_markup(self, markup, user_specified_encoding=None,
+ document_declared_encoding=None):
+ """
+ :return: A 4-tuple (markup, original encoding, encoding
+ declared within markup, whether any characters had to be
+ replaced with REPLACEMENT CHARACTER).
+ """
+ if isinstance(markup, unicode):
+ yield (markup, None, None, False)
+ return
+
+ try_encodings = [user_specified_encoding, document_declared_encoding]
+ dammit = UnicodeDammit(markup, try_encodings, is_html=True)
+ yield (dammit.markup, dammit.original_encoding,
+ dammit.declared_html_encoding,
+ dammit.contains_replacement_characters)
+
+ def feed(self, markup):
+ args, kwargs = self.parser_args
+ parser = BeautifulSoupHTMLParser(*args, **kwargs)
+ parser.soup = self.soup
+ try:
+ parser.feed(markup)
+ except HTMLParseError, e:
+ warnings.warn(RuntimeWarning(
+ "Python's built-in HTMLParser cannot parse the given document. This is not a bug in Beautiful Soup. The best solution is to install an external parser (lxml or html5lib), and use Beautiful Soup with that parser. See http://www.crummy.com/software/BeautifulSoup/bs4/doc/#installing-a-parser for help."))
+ raise e
+
+# Patch 3.2 versions of HTMLParser earlier than 3.2.3 to use some
+# 3.2.3 code. This ensures they don't treat markup like <p></p> as a
+# string.
+#
+# XXX This code can be removed once most Python 3 users are on 3.2.3.
+if major == 3 and minor == 2 and not CONSTRUCTOR_TAKES_STRICT:
+ import re
+ attrfind_tolerant = re.compile(
+ r'\s*((?<=[\'"\s])[^\s/>][^\s/=>]*)(\s*=+\s*'
+ r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?')
+ HTMLParserTreeBuilder.attrfind_tolerant = attrfind_tolerant
+
+ locatestarttagend = re.compile(r"""
+ <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name
+ (?:\s+ # whitespace before attribute name
+ (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name
+ (?:\s*=\s* # value indicator
+ (?:'[^']*' # LITA-enclosed value
+ |\"[^\"]*\" # LIT-enclosed value
+ |[^'\">\s]+ # bare value
+ )
+ )?
+ )
+ )*
+ \s* # trailing whitespace
+""", re.VERBOSE)
+ BeautifulSoupHTMLParser.locatestarttagend = locatestarttagend
+
+ from html.parser import tagfind, attrfind
+
+ def parse_starttag(self, i):
+ self.__starttag_text = None
+ endpos = self.check_for_whole_start_tag(i)
+ if endpos < 0:
+ return endpos
+ rawdata = self.rawdata
+ self.__starttag_text = rawdata[i:endpos]
+
+ # Now parse the data between i+1 and j into a tag and attrs
+ attrs = []
+ match = tagfind.match(rawdata, i+1)
+ assert match, 'unexpected call to parse_starttag()'
+ k = match.end()
+ self.lasttag = tag = rawdata[i+1:k].lower()
+ while k < endpos:
+ if self.strict:
+ m = attrfind.match(rawdata, k)
+ else:
+ m = attrfind_tolerant.match(rawdata, k)
+ if not m:
+ break
+ attrname, rest, attrvalue = m.group(1, 2, 3)
+ if not rest:
+ attrvalue = None
+ elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
+ attrvalue[:1] == '"' == attrvalue[-1:]:
+ attrvalue = attrvalue[1:-1]
+ if attrvalue:
+ attrvalue = self.unescape(attrvalue)
+ attrs.append((attrname.lower(), attrvalue))
+ k = m.end()
+
+ end = rawdata[k:endpos].strip()
+ if end not in (">", "/>"):
+ lineno, offset = self.getpos()
+ if "\n" in self.__starttag_text:
+ lineno = lineno + self.__starttag_text.count("\n")
+ offset = len(self.__starttag_text) \
+ - self.__starttag_text.rfind("\n")
+ else:
+ offset = offset + len(self.__starttag_text)
+ if self.strict:
+ self.error("junk characters in start tag: %r"
+ % (rawdata[k:endpos][:20],))
+ self.handle_data(rawdata[i:endpos])
+ return endpos
+ if end.endswith('/>'):
+ # XHTML-style empty tag: <span attr="value" />
+ self.handle_startendtag(tag, attrs)
+ else:
+ self.handle_starttag(tag, attrs)
+ if tag in self.CDATA_CONTENT_ELEMENTS:
+ self.set_cdata_mode(tag)
+ return endpos
+
+ def set_cdata_mode(self, elem):
+ self.cdata_elem = elem.lower()
+ self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
+
+ BeautifulSoupHTMLParser.parse_starttag = parse_starttag
+ BeautifulSoupHTMLParser.set_cdata_mode = set_cdata_mode
+
+ CONSTRUCTOR_TAKES_STRICT = True
diff --git a/bitbake/lib/bs4/builder/_lxml.py b/bitbake/lib/bs4/builder/_lxml.py
new file mode 100644
index 0000000000..fa5d49875e
--- /dev/null
+++ b/bitbake/lib/bs4/builder/_lxml.py
@@ -0,0 +1,233 @@
+__all__ = [
+ 'LXMLTreeBuilderForXML',
+ 'LXMLTreeBuilder',
+ ]
+
+from io import BytesIO
+from StringIO import StringIO
+import collections
+from lxml import etree
+from bs4.element import Comment, Doctype, NamespacedAttribute
+from bs4.builder import (
+ FAST,
+ HTML,
+ HTMLTreeBuilder,
+ PERMISSIVE,
+ ParserRejectedMarkup,
+ TreeBuilder,
+ XML)
+from bs4.dammit import EncodingDetector
+
+LXML = 'lxml'
+
+class LXMLTreeBuilderForXML(TreeBuilder):
+ DEFAULT_PARSER_CLASS = etree.XMLParser
+
+ is_xml = True
+
+ # Well, it's permissive by XML parser standards.
+ features = [LXML, XML, FAST, PERMISSIVE]
+
+ CHUNK_SIZE = 512
+
+ # This namespace mapping is specified in the XML Namespace
+ # standard.
+ DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
+
+ def default_parser(self, encoding):
+ # This can either return a parser object or a class, which
+ # will be instantiated with default arguments.
+ if self._default_parser is not None:
+ return self._default_parser
+ return etree.XMLParser(
+ target=self, strip_cdata=False, recover=True, encoding=encoding)
+
+ def parser_for(self, encoding):
+ # Use the default parser.
+ parser = self.default_parser(encoding)
+
+ if isinstance(parser, collections.Callable):
+ # Instantiate the parser with default arguments
+ parser = parser(target=self, strip_cdata=False, encoding=encoding)
+ return parser
+
+ def __init__(self, parser=None, empty_element_tags=None):
+ # TODO: Issue a warning if parser is present but not a
+ # callable, since that means there's no way to create new
+ # parsers for different encodings.
+ self._default_parser = parser
+ if empty_element_tags is not None:
+ self.empty_element_tags = set(empty_element_tags)
+ self.soup = None
+ self.nsmaps = [self.DEFAULT_NSMAPS]
+
+ def _getNsTag(self, tag):
+ # Split the namespace URL out of a fully-qualified lxml tag
+ # name. Copied from lxml's src/lxml/sax.py.
+ if tag[0] == '{':
+ return tuple(tag[1:].split('}', 1))
+ else:
+ return (None, tag)
+
+ def prepare_markup(self, markup, user_specified_encoding=None,
+ document_declared_encoding=None):
+ """
+ :yield: A series of 4-tuples.
+ (markup, encoding, declared encoding,
+ has undergone character replacement)
+
+ Each 4-tuple represents a strategy for parsing the document.
+ """
+ if isinstance(markup, unicode):
+ # We were given Unicode. Maybe lxml can parse Unicode on
+ # this system?
+ yield markup, None, document_declared_encoding, False
+
+ if isinstance(markup, unicode):
+ # No, apparently not. Convert the Unicode to UTF-8 and
+ # tell lxml to parse it as UTF-8.
+ yield (markup.encode("utf8"), "utf8",
+ document_declared_encoding, False)
+
+ # Instead of using UnicodeDammit to convert the bytestring to
+ # Unicode using different encodings, use EncodingDetector to
+ # iterate over the encodings, and tell lxml to try to parse
+ # the document as each one in turn.
+ is_html = not self.is_xml
+ try_encodings = [user_specified_encoding, document_declared_encoding]
+ detector = EncodingDetector(markup, try_encodings, is_html)
+ for encoding in detector.encodings:
+ yield (detector.markup, encoding, document_declared_encoding, False)
+
+ def feed(self, markup):
+ if isinstance(markup, bytes):
+ markup = BytesIO(markup)
+ elif isinstance(markup, unicode):
+ markup = StringIO(markup)
+
+ # Call feed() at least once, even if the markup is empty,
+ # or the parser won't be initialized.
+ data = markup.read(self.CHUNK_SIZE)
+ try:
+ self.parser = self.parser_for(self.soup.original_encoding)
+ self.parser.feed(data)
+ while len(data) != 0:
+ # Now call feed() on the rest of the data, chunk by chunk.
+ data = markup.read(self.CHUNK_SIZE)
+ if len(data) != 0:
+ self.parser.feed(data)
+ self.parser.close()
+ except (UnicodeDecodeError, LookupError, etree.ParserError), e:
+ raise ParserRejectedMarkup(str(e))
+
+ def close(self):
+ self.nsmaps = [self.DEFAULT_NSMAPS]
+
+ def start(self, name, attrs, nsmap={}):
+ # Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
+ attrs = dict(attrs)
+ nsprefix = None
+ # Invert each namespace map as it comes in.
+ if len(self.nsmaps) > 1:
+ # There are no new namespaces for this tag, but
+ # non-default namespaces are in play, so we need a
+ # separate tag stack to know when they end.
+ self.nsmaps.append(None)
+ elif len(nsmap) > 0:
+ # A new namespace mapping has come into play.
+ inverted_nsmap = dict((value, key) for key, value in nsmap.items())
+ self.nsmaps.append(inverted_nsmap)
+ # Also treat the namespace mapping as a set of attributes on the
+ # tag, so we can recreate it later.
+ attrs = attrs.copy()
+ for prefix, namespace in nsmap.items():
+ attribute = NamespacedAttribute(
+ "xmlns", prefix, "http://www.w3.org/2000/xmlns/")
+ attrs[attribute] = namespace
+
+ # Namespaces are in play. Find any attributes that came in
+ # from lxml with namespaces attached to their names, and
+ # turn then into NamespacedAttribute objects.
+ new_attrs = {}
+ for attr, value in attrs.items():
+ namespace, attr = self._getNsTag(attr)
+ if namespace is None:
+ new_attrs[attr] = value
+ else:
+ nsprefix = self._prefix_for_namespace(namespace)
+ attr = NamespacedAttribute(nsprefix, attr, namespace)
+ new_attrs[attr] = value
+ attrs = new_attrs
+
+ namespace, name = self._getNsTag(name)
+ nsprefix = self._prefix_for_namespace(namespace)
+ self.soup.handle_starttag(name, namespace, nsprefix, attrs)
+
+ def _prefix_for_namespace(self, namespace):
+ """Find the currently active prefix for the given namespace."""
+ if namespace is None:
+ return None
+ for inverted_nsmap in reversed(self.nsmaps):
+ if inverted_nsmap is not None and namespace in inverted_nsmap:
+ return inverted_nsmap[namespace]
+ return None
+
+ def end(self, name):
+ self.soup.endData()
+ completed_tag = self.soup.tagStack[-1]
+ namespace, name = self._getNsTag(name)
+ nsprefix = None
+ if namespace is not None:
+ for inverted_nsmap in reversed(self.nsmaps):
+ if inverted_nsmap is not None and namespace in inverted_nsmap:
+ nsprefix = inverted_nsmap[namespace]
+ break
+ self.soup.handle_endtag(name, nsprefix)
+ if len(self.nsmaps) > 1:
+ # This tag, or one of its parents, introduced a namespace
+ # mapping, so pop it off the stack.
+ self.nsmaps.pop()
+
+ def pi(self, target, data):
+ pass
+
+ def data(self, content):
+ self.soup.handle_data(content)
+
+ def doctype(self, name, pubid, system):
+ self.soup.endData()
+ doctype = Doctype.for_name_and_ids(name, pubid, system)
+ self.soup.object_was_parsed(doctype)
+
+ def comment(self, content):
+ "Handle comments as Comment objects."
+ self.soup.endData()
+ self.soup.handle_data(content)
+ self.soup.endData(Comment)
+
+ def test_fragment_to_document(self, fragment):
+ """See `TreeBuilder`."""
+ return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
+
+
+class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
+
+ features = [LXML, HTML, FAST, PERMISSIVE]
+ is_xml = False
+
+ def default_parser(self, encoding):
+ return etree.HTMLParser
+
+ def feed(self, markup):
+ encoding = self.soup.original_encoding
+ try:
+ self.parser = self.parser_for(encoding)
+ self.parser.feed(markup)
+ self.parser.close()
+ except (UnicodeDecodeError, LookupError, etree.ParserError), e:
+ raise ParserRejectedMarkup(str(e))
+
+
+ def test_fragment_to_document(self, fragment):
+ """See `TreeBuilder`."""
+ return u'<html><body>%s</body></html>' % fragment
diff --git a/bitbake/lib/bs4/dammit.py b/bitbake/lib/bs4/dammit.py
new file mode 100644
index 0000000000..59640b7ce3
--- /dev/null
+++ b/bitbake/lib/bs4/dammit.py
@@ -0,0 +1,829 @@
+# -*- coding: utf-8 -*-
+"""Beautiful Soup bonus library: Unicode, Dammit
+
+This library converts a bytestream to Unicode through any means
+necessary. It is heavily based on code from Mark Pilgrim's Universal
+Feed Parser. It works best on XML and XML, but it does not rewrite the
+XML or HTML to reflect a new encoding; that's the tree builder's job.
+"""
+
+import codecs
+from htmlentitydefs import codepoint2name
+import re
+import logging
+import string
+
+# Import a library to autodetect character encodings.
+chardet_type = None
+try:
+ # First try the fast C implementation.
+ # PyPI package: cchardet
+ import cchardet
+ def chardet_dammit(s):
+ return cchardet.detect(s)['encoding']
+except ImportError:
+ try:
+ # Fall back to the pure Python implementation
+ # Debian package: python-chardet
+ # PyPI package: chardet
+ import chardet
+ def chardet_dammit(s):
+ return chardet.detect(s)['encoding']
+ #import chardet.constants
+ #chardet.constants._debug = 1
+ except ImportError:
+ # No chardet available.
+ def chardet_dammit(s):
+ return None
+
+# Available from http://cjkpython.i18n.org/.
+try:
+ import iconv_codec
+except ImportError:
+ pass
+
+xml_encoding_re = re.compile(
+ '^<\?.*encoding=[\'"](.*?)[\'"].*\?>'.encode(), re.I)
+html_meta_re = re.compile(
+ '<\s*meta[^>]+charset\s*=\s*["\']?([^>]*?)[ /;\'">]'.encode(), re.I)
+
+class EntitySubstitution(object):
+
+ """Substitute XML or HTML entities for the corresponding characters."""
+
+ def _populate_class_variables():
+ lookup = {}
+ reverse_lookup = {}
+ characters_for_re = []
+ for codepoint, name in list(codepoint2name.items()):
+ character = unichr(codepoint)
+ if codepoint != 34:
+ # There's no point in turning the quotation mark into
+ # &quot;, unless it happens within an attribute value, which
+ # is handled elsewhere.
+ characters_for_re.append(character)
+ lookup[character] = name
+ # But we do want to turn &quot; into the quotation mark.
+ reverse_lookup[name] = character
+ re_definition = "[%s]" % "".join(characters_for_re)
+ return lookup, reverse_lookup, re.compile(re_definition)
+ (CHARACTER_TO_HTML_ENTITY, HTML_ENTITY_TO_CHARACTER,
+ CHARACTER_TO_HTML_ENTITY_RE) = _populate_class_variables()
+
+ CHARACTER_TO_XML_ENTITY = {
+ "'": "apos",
+ '"': "quot",
+ "&": "amp",
+ "<": "lt",
+ ">": "gt",
+ }
+
+ BARE_AMPERSAND_OR_BRACKET = re.compile("([<>]|"
+ "&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)"
+ ")")
+
+ AMPERSAND_OR_BRACKET = re.compile("([<>&])")
+
+ @classmethod
+ def _substitute_html_entity(cls, matchobj):
+ entity = cls.CHARACTER_TO_HTML_ENTITY.get(matchobj.group(0))
+ return "&%s;" % entity
+
+ @classmethod
+ def _substitute_xml_entity(cls, matchobj):
+ """Used with a regular expression to substitute the
+ appropriate XML entity for an XML special character."""
+ entity = cls.CHARACTER_TO_XML_ENTITY[matchobj.group(0)]
+ return "&%s;" % entity
+
+ @classmethod
+ def quoted_attribute_value(self, value):
+ """Make a value into a quoted XML attribute, possibly escaping it.
+
+ Most strings will be quoted using double quotes.
+
+ Bob's Bar -> "Bob's Bar"
+
+ If a string contains double quotes, it will be quoted using
+ single quotes.
+
+ Welcome to "my bar" -> 'Welcome to "my bar"'
+
+ If a string contains both single and double quotes, the
+ double quotes will be escaped, and the string will be quoted
+ using double quotes.
+
+ Welcome to "Bob's Bar" -> "Welcome to &quot;Bob's bar&quot;
+ """
+ quote_with = '"'
+ if '"' in value:
+ if "'" in value:
+ # The string contains both single and double
+ # quotes. Turn the double quotes into
+ # entities. We quote the double quotes rather than
+ # the single quotes because the entity name is
+ # "&quot;" whether this is HTML or XML. If we
+ # quoted the single quotes, we'd have to decide
+ # between &apos; and &squot;.
+ replace_with = "&quot;"
+ value = value.replace('"', replace_with)
+ else:
+ # There are double quotes but no single quotes.
+ # We can use single quotes to quote the attribute.
+ quote_with = "'"
+ return quote_with + value + quote_with
+
+ @classmethod
+ def substitute_xml(cls, value, make_quoted_attribute=False):
+ """Substitute XML entities for special XML characters.
+
+ :param value: A string to be substituted. The less-than sign
+ will become &lt;, the greater-than sign will become &gt;,
+ and any ampersands will become &amp;. If you want ampersands
+ that appear to be part of an entity definition to be left
+ alone, use substitute_xml_containing_entities() instead.
+
+ :param make_quoted_attribute: If True, then the string will be
+ quoted, as befits an attribute value.
+ """
+ # Escape angle brackets and ampersands.
+ value = cls.AMPERSAND_OR_BRACKET.sub(
+ cls._substitute_xml_entity, value)
+
+ if make_quoted_attribute:
+ value = cls.quoted_attribute_value(value)
+ return value
+
+ @classmethod
+ def substitute_xml_containing_entities(
+ cls, value, make_quoted_attribute=False):
+ """Substitute XML entities for special XML characters.
+
+ :param value: A string to be substituted. The less-than sign will
+ become &lt;, the greater-than sign will become &gt;, and any
+ ampersands that are not part of an entity defition will
+ become &amp;.
+
+ :param make_quoted_attribute: If True, then the string will be
+ quoted, as befits an attribute value.
+ """
+ # Escape angle brackets, and ampersands that aren't part of
+ # entities.
+ value = cls.BARE_AMPERSAND_OR_BRACKET.sub(
+ cls._substitute_xml_entity, value)
+
+ if make_quoted_attribute:
+ value = cls.quoted_attribute_value(value)
+ return value
+
+ @classmethod
+ def substitute_html(cls, s):
+ """Replace certain Unicode characters with named HTML entities.
+
+ This differs from data.encode(encoding, 'xmlcharrefreplace')
+ in that the goal is to make the result more readable (to those
+ with ASCII displays) rather than to recover from
+ errors. There's absolutely nothing wrong with a UTF-8 string
+ containg a LATIN SMALL LETTER E WITH ACUTE, but replacing that
+ character with "&eacute;" will make it more readable to some
+ people.
+ """
+ return cls.CHARACTER_TO_HTML_ENTITY_RE.sub(
+ cls._substitute_html_entity, s)
+
+
+class EncodingDetector:
+ """Suggests a number of possible encodings for a bytestring.
+
+ Order of precedence:
+
+ 1. Encodings you specifically tell EncodingDetector to try first
+ (the override_encodings argument to the constructor).
+
+ 2. An encoding declared within the bytestring itself, either in an
+ XML declaration (if the bytestring is to be interpreted as an XML
+ document), or in a <meta> tag (if the bytestring is to be
+ interpreted as an HTML document.)
+
+ 3. An encoding detected through textual analysis by chardet,
+ cchardet, or a similar external library.
+
+ 4. UTF-8.
+
+ 5. Windows-1252.
+ """
+ def __init__(self, markup, override_encodings=None, is_html=False):
+ self.override_encodings = override_encodings or []
+ self.chardet_encoding = None
+ self.is_html = is_html
+ self.declared_encoding = None
+
+ # First order of business: strip a byte-order mark.
+ self.markup, self.sniffed_encoding = self.strip_byte_order_mark(markup)
+
+ def _usable(self, encoding, tried):
+ if encoding is not None:
+ encoding = encoding.lower()
+ if encoding not in tried:
+ tried.add(encoding)
+ return True
+ return False
+
+ @property
+ def encodings(self):
+ """Yield a number of encodings that might work for this markup."""
+ tried = set()
+ for e in self.override_encodings:
+ if self._usable(e, tried):
+ yield e
+
+ # Did the document originally start with a byte-order mark
+ # that indicated its encoding?
+ if self._usable(self.sniffed_encoding, tried):
+ yield self.sniffed_encoding
+
+ # Look within the document for an XML or HTML encoding
+ # declaration.
+ if self.declared_encoding is None:
+ self.declared_encoding = self.find_declared_encoding(
+ self.markup, self.is_html)
+ if self._usable(self.declared_encoding, tried):
+ yield self.declared_encoding
+
+ # Use third-party character set detection to guess at the
+ # encoding.
+ if self.chardet_encoding is None:
+ self.chardet_encoding = chardet_dammit(self.markup)
+ if self._usable(self.chardet_encoding, tried):
+ yield self.chardet_encoding
+
+ # As a last-ditch effort, try utf-8 and windows-1252.
+ for e in ('utf-8', 'windows-1252'):
+ if self._usable(e, tried):
+ yield e
+
+ @classmethod
+ def strip_byte_order_mark(cls, data):
+ """If a byte-order mark is present, strip it and return the encoding it implies."""
+ encoding = None
+ if (len(data) >= 4) and (data[:2] == b'\xfe\xff') \
+ and (data[2:4] != '\x00\x00'):
+ encoding = 'utf-16be'
+ data = data[2:]
+ elif (len(data) >= 4) and (data[:2] == b'\xff\xfe') \
+ and (data[2:4] != '\x00\x00'):
+ encoding = 'utf-16le'
+ data = data[2:]
+ elif data[:3] == b'\xef\xbb\xbf':
+ encoding = 'utf-8'
+ data = data[3:]
+ elif data[:4] == b'\x00\x00\xfe\xff':
+ encoding = 'utf-32be'
+ data = data[4:]
+ elif data[:4] == b'\xff\xfe\x00\x00':
+ encoding = 'utf-32le'
+ data = data[4:]
+ return data, encoding
+
+ @classmethod
+ def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False):
+ """Given a document, tries to find its declared encoding.
+
+ An XML encoding is declared at the beginning of the document.
+
+ An HTML encoding is declared in a <meta> tag, hopefully near the
+ beginning of the document.
+ """
+ if search_entire_document:
+ xml_endpos = html_endpos = len(markup)
+ else:
+ xml_endpos = 1024
+ html_endpos = max(2048, int(len(markup) * 0.05))
+
+ declared_encoding = None
+ declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos)
+ if not declared_encoding_match and is_html:
+ declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos)
+ if declared_encoding_match is not None:
+ declared_encoding = declared_encoding_match.groups()[0].decode(
+ 'ascii')
+ if declared_encoding:
+ return declared_encoding.lower()
+ return None
+
+class UnicodeDammit:
+ """A class for detecting the encoding of a *ML document and
+ converting it to a Unicode string. If the source encoding is
+ windows-1252, can replace MS smart quotes with their HTML or XML
+ equivalents."""
+
+ # This dictionary maps commonly seen values for "charset" in HTML
+ # meta tags to the corresponding Python codec names. It only covers
+ # values that aren't in Python's aliases and can't be determined
+ # by the heuristics in find_codec.
+ CHARSET_ALIASES = {"macintosh": "mac-roman",
+ "x-sjis": "shift-jis"}
+
+ ENCODINGS_WITH_SMART_QUOTES = [
+ "windows-1252",
+ "iso-8859-1",
+ "iso-8859-2",
+ ]
+
+ def __init__(self, markup, override_encodings=[],
+ smart_quotes_to=None, is_html=False):
+ self.smart_quotes_to = smart_quotes_to
+ self.tried_encodings = []
+ self.contains_replacement_characters = False
+ self.is_html = is_html
+
+ self.detector = EncodingDetector(markup, override_encodings, is_html)
+
+ # Short-circuit if the data is in Unicode to begin with.
+ if isinstance(markup, unicode) or markup == '':
+ self.markup = markup
+ self.unicode_markup = unicode(markup)
+ self.original_encoding = None
+ return
+
+ # The encoding detector may have stripped a byte-order mark.
+ # Use the stripped markup from this point on.
+ self.markup = self.detector.markup
+
+ u = None
+ for encoding in self.detector.encodings:
+ markup = self.detector.markup
+ u = self._convert_from(encoding)
+ if u is not None:
+ break
+
+ if not u:
+ # None of the encodings worked. As an absolute last resort,
+ # try them again with character replacement.
+
+ for encoding in self.detector.encodings:
+ if encoding != "ascii":
+ u = self._convert_from(encoding, "replace")
+ if u is not None:
+ logging.warning(
+ "Some characters could not be decoded, and were "
+ "replaced with REPLACEMENT CHARACTER.")
+ self.contains_replacement_characters = True
+ break
+
+ # If none of that worked, we could at this point force it to
+ # ASCII, but that would destroy so much data that I think
+ # giving up is better.
+ self.unicode_markup = u
+ if not u:
+ self.original_encoding = None
+
+ def _sub_ms_char(self, match):
+ """Changes a MS smart quote character to an XML or HTML
+ entity, or an ASCII character."""
+ orig = match.group(1)
+ if self.smart_quotes_to == 'ascii':
+ sub = self.MS_CHARS_TO_ASCII.get(orig).encode()
+ else:
+ sub = self.MS_CHARS.get(orig)
+ if type(sub) == tuple:
+ if self.smart_quotes_to == 'xml':
+ sub = '&#x'.encode() + sub[1].encode() + ';'.encode()
+ else:
+ sub = '&'.encode() + sub[0].encode() + ';'.encode()
+ else:
+ sub = sub.encode()
+ return sub
+
+ def _convert_from(self, proposed, errors="strict"):
+ proposed = self.find_codec(proposed)
+ if not proposed or (proposed, errors) in self.tried_encodings:
+ return None
+ self.tried_encodings.append((proposed, errors))
+ markup = self.markup
+ # Convert smart quotes to HTML if coming from an encoding
+ # that might have them.
+ if (self.smart_quotes_to is not None
+ and proposed in self.ENCODINGS_WITH_SMART_QUOTES):
+ smart_quotes_re = b"([\x80-\x9f])"
+ smart_quotes_compiled = re.compile(smart_quotes_re)
+ markup = smart_quotes_compiled.sub(self._sub_ms_char, markup)
+
+ try:
+ #print "Trying to convert document to %s (errors=%s)" % (
+ # proposed, errors)
+ u = self._to_unicode(markup, proposed, errors)
+ self.markup = u
+ self.original_encoding = proposed
+ except Exception as e:
+ #print "That didn't work!"
+ #print e
+ return None
+ #print "Correct encoding: %s" % proposed
+ return self.markup
+
+ def _to_unicode(self, data, encoding, errors="strict"):
+ '''Given a string and its encoding, decodes the string into Unicode.
+ %encoding is a string recognized by encodings.aliases'''
+ return unicode(data, encoding, errors)
+
+ @property
+ def declared_html_encoding(self):
+ if not self.is_html:
+ return None
+ return self.detector.declared_encoding
+
+ def find_codec(self, charset):
+ value = (self._codec(self.CHARSET_ALIASES.get(charset, charset))
+ or (charset and self._codec(charset.replace("-", "")))
+ or (charset and self._codec(charset.replace("-", "_")))
+ or (charset and charset.lower())
+ or charset
+ )
+ if value:
+ return value.lower()
+ return None
+
+ def _codec(self, charset):
+ if not charset:
+ return charset
+ codec = None
+ try:
+ codecs.lookup(charset)
+ codec = charset
+ except (LookupError, ValueError):
+ pass
+ return codec
+
+
+ # A partial mapping of ISO-Latin-1 to HTML entities/XML numeric entities.
+ MS_CHARS = {b'\x80': ('euro', '20AC'),
+ b'\x81': ' ',
+ b'\x82': ('sbquo', '201A'),
+ b'\x83': ('fnof', '192'),
+ b'\x84': ('bdquo', '201E'),
+ b'\x85': ('hellip', '2026'),
+ b'\x86': ('dagger', '2020'),
+ b'\x87': ('Dagger', '2021'),
+ b'\x88': ('circ', '2C6'),
+ b'\x89': ('permil', '2030'),
+ b'\x8A': ('Scaron', '160'),
+ b'\x8B': ('lsaquo', '2039'),
+ b'\x8C': ('OElig', '152'),
+ b'\x8D': '?',
+ b'\x8E': ('#x17D', '17D'),
+ b'\x8F': '?',
+ b'\x90': '?',
+ b'\x91': ('lsquo', '2018'),
+ b'\x92': ('rsquo', '2019'),
+ b'\x93': ('ldquo', '201C'),
+ b'\x94': ('rdquo', '201D'),
+ b'\x95': ('bull', '2022'),
+ b'\x96': ('ndash', '2013'),
+ b'\x97': ('mdash', '2014'),
+ b'\x98': ('tilde', '2DC'),
+ b'\x99': ('trade', '2122'),
+ b'\x9a': ('scaron', '161'),
+ b'\x9b': ('rsaquo', '203A'),
+ b'\x9c': ('oelig', '153'),
+ b'\x9d': '?',
+ b'\x9e': ('#x17E', '17E'),
+ b'\x9f': ('Yuml', ''),}
+
+ # A parochial partial mapping of ISO-Latin-1 to ASCII. Contains
+ # horrors like stripping diacritical marks to turn á into a, but also
+ # contains non-horrors like turning “ into ".
+ MS_CHARS_TO_ASCII = {
+ b'\x80' : 'EUR',
+ b'\x81' : ' ',
+ b'\x82' : ',',
+ b'\x83' : 'f',
+ b'\x84' : ',,',
+ b'\x85' : '...',
+ b'\x86' : '+',
+ b'\x87' : '++',
+ b'\x88' : '^',
+ b'\x89' : '%',
+ b'\x8a' : 'S',
+ b'\x8b' : '<',
+ b'\x8c' : 'OE',
+ b'\x8d' : '?',
+ b'\x8e' : 'Z',
+ b'\x8f' : '?',
+ b'\x90' : '?',
+ b'\x91' : "'",
+ b'\x92' : "'",
+ b'\x93' : '"',
+ b'\x94' : '"',
+ b'\x95' : '*',
+ b'\x96' : '-',
+ b'\x97' : '--',
+ b'\x98' : '~',
+ b'\x99' : '(TM)',
+ b'\x9a' : 's',
+ b'\x9b' : '>',
+ b'\x9c' : 'oe',
+ b'\x9d' : '?',
+ b'\x9e' : 'z',
+ b'\x9f' : 'Y',
+ b'\xa0' : ' ',
+ b'\xa1' : '!',
+ b'\xa2' : 'c',
+ b'\xa3' : 'GBP',
+ b'\xa4' : '$', #This approximation is especially parochial--this is the
+ #generic currency symbol.
+ b'\xa5' : 'YEN',
+ b'\xa6' : '|',
+ b'\xa7' : 'S',
+ b'\xa8' : '..',
+ b'\xa9' : '',
+ b'\xaa' : '(th)',
+ b'\xab' : '<<',
+ b'\xac' : '!',
+ b'\xad' : ' ',
+ b'\xae' : '(R)',
+ b'\xaf' : '-',
+ b'\xb0' : 'o',
+ b'\xb1' : '+-',
+ b'\xb2' : '2',
+ b'\xb3' : '3',
+ b'\xb4' : ("'", 'acute'),
+ b'\xb5' : 'u',
+ b'\xb6' : 'P',
+ b'\xb7' : '*',
+ b'\xb8' : ',',
+ b'\xb9' : '1',
+ b'\xba' : '(th)',
+ b'\xbb' : '>>',
+ b'\xbc' : '1/4',
+ b'\xbd' : '1/2',
+ b'\xbe' : '3/4',
+ b'\xbf' : '?',
+ b'\xc0' : 'A',
+ b'\xc1' : 'A',
+ b'\xc2' : 'A',
+ b'\xc3' : 'A',
+ b'\xc4' : 'A',
+ b'\xc5' : 'A',
+ b'\xc6' : 'AE',
+ b'\xc7' : 'C',
+ b'\xc8' : 'E',
+ b'\xc9' : 'E',
+ b'\xca' : 'E',
+ b'\xcb' : 'E',
+ b'\xcc' : 'I',
+ b'\xcd' : 'I',
+ b'\xce' : 'I',
+ b'\xcf' : 'I',
+ b'\xd0' : 'D',
+ b'\xd1' : 'N',
+ b'\xd2' : 'O',
+ b'\xd3' : 'O',
+ b'\xd4' : 'O',
+ b'\xd5' : 'O',
+ b'\xd6' : 'O',
+ b'\xd7' : '*',
+ b'\xd8' : 'O',
+ b'\xd9' : 'U',
+ b'\xda' : 'U',
+ b'\xdb' : 'U',
+ b'\xdc' : 'U',
+ b'\xdd' : 'Y',
+ b'\xde' : 'b',
+ b'\xdf' : 'B',
+ b'\xe0' : 'a',
+ b'\xe1' : 'a',
+ b'\xe2' : 'a',
+ b'\xe3' : 'a',
+ b'\xe4' : 'a',
+ b'\xe5' : 'a',
+ b'\xe6' : 'ae',
+ b'\xe7' : 'c',
+ b'\xe8' : 'e',
+ b'\xe9' : 'e',
+ b'\xea' : 'e',
+ b'\xeb' : 'e',
+ b'\xec' : 'i',
+ b'\xed' : 'i',
+ b'\xee' : 'i',
+ b'\xef' : 'i',
+ b'\xf0' : 'o',
+ b'\xf1' : 'n',
+ b'\xf2' : 'o',
+ b'\xf3' : 'o',
+ b'\xf4' : 'o',
+ b'\xf5' : 'o',
+ b'\xf6' : 'o',
+ b'\xf7' : '/',
+ b'\xf8' : 'o',
+ b'\xf9' : 'u',
+ b'\xfa' : 'u',
+ b'\xfb' : 'u',
+ b'\xfc' : 'u',
+ b'\xfd' : 'y',
+ b'\xfe' : 'b',
+ b'\xff' : 'y',
+ }
+
+ # A map used when removing rogue Windows-1252/ISO-8859-1
+ # characters in otherwise UTF-8 documents.
+ #
+ # Note that \x81, \x8d, \x8f, \x90, and \x9d are undefined in
+ # Windows-1252.
+ WINDOWS_1252_TO_UTF8 = {
+ 0x80 : b'\xe2\x82\xac', # €
+ 0x82 : b'\xe2\x80\x9a', # ‚
+ 0x83 : b'\xc6\x92', # Æ’
+ 0x84 : b'\xe2\x80\x9e', # „
+ 0x85 : b'\xe2\x80\xa6', # …
+ 0x86 : b'\xe2\x80\xa0', # †
+ 0x87 : b'\xe2\x80\xa1', # ‡
+ 0x88 : b'\xcb\x86', # ˆ
+ 0x89 : b'\xe2\x80\xb0', # ‰
+ 0x8a : b'\xc5\xa0', # Å 
+ 0x8b : b'\xe2\x80\xb9', # ‹
+ 0x8c : b'\xc5\x92', # Å’
+ 0x8e : b'\xc5\xbd', # Ž
+ 0x91 : b'\xe2\x80\x98', # ‘
+ 0x92 : b'\xe2\x80\x99', # ’
+ 0x93 : b'\xe2\x80\x9c', # “
+ 0x94 : b'\xe2\x80\x9d', # â€
+ 0x95 : b'\xe2\x80\xa2', # •
+ 0x96 : b'\xe2\x80\x93', # –
+ 0x97 : b'\xe2\x80\x94', # —
+ 0x98 : b'\xcb\x9c', # ˜
+ 0x99 : b'\xe2\x84\xa2', # â„¢
+ 0x9a : b'\xc5\xa1', # Å¡
+ 0x9b : b'\xe2\x80\xba', # ›
+ 0x9c : b'\xc5\x93', # Å“
+ 0x9e : b'\xc5\xbe', # ž
+ 0x9f : b'\xc5\xb8', # Ÿ
+ 0xa0 : b'\xc2\xa0', #  
+ 0xa1 : b'\xc2\xa1', # ¡
+ 0xa2 : b'\xc2\xa2', # ¢
+ 0xa3 : b'\xc2\xa3', # £
+ 0xa4 : b'\xc2\xa4', # ¤
+ 0xa5 : b'\xc2\xa5', # ¥
+ 0xa6 : b'\xc2\xa6', # ¦
+ 0xa7 : b'\xc2\xa7', # §
+ 0xa8 : b'\xc2\xa8', # ¨
+ 0xa9 : b'\xc2\xa9', # ©
+ 0xaa : b'\xc2\xaa', # ª
+ 0xab : b'\xc2\xab', # «
+ 0xac : b'\xc2\xac', # ¬
+ 0xad : b'\xc2\xad', # ­
+ 0xae : b'\xc2\xae', # ®
+ 0xaf : b'\xc2\xaf', # ¯
+ 0xb0 : b'\xc2\xb0', # °
+ 0xb1 : b'\xc2\xb1', # ±
+ 0xb2 : b'\xc2\xb2', # ²
+ 0xb3 : b'\xc2\xb3', # ³
+ 0xb4 : b'\xc2\xb4', # ´
+ 0xb5 : b'\xc2\xb5', # µ
+ 0xb6 : b'\xc2\xb6', # ¶
+ 0xb7 : b'\xc2\xb7', # ·
+ 0xb8 : b'\xc2\xb8', # ¸
+ 0xb9 : b'\xc2\xb9', # ¹
+ 0xba : b'\xc2\xba', # º
+ 0xbb : b'\xc2\xbb', # »
+ 0xbc : b'\xc2\xbc', # ¼
+ 0xbd : b'\xc2\xbd', # ½
+ 0xbe : b'\xc2\xbe', # ¾
+ 0xbf : b'\xc2\xbf', # ¿
+ 0xc0 : b'\xc3\x80', # À
+ 0xc1 : b'\xc3\x81', # Ã
+ 0xc2 : b'\xc3\x82', # Â
+ 0xc3 : b'\xc3\x83', # Ã
+ 0xc4 : b'\xc3\x84', # Ä
+ 0xc5 : b'\xc3\x85', # Ã…
+ 0xc6 : b'\xc3\x86', # Æ
+ 0xc7 : b'\xc3\x87', # Ç
+ 0xc8 : b'\xc3\x88', # È
+ 0xc9 : b'\xc3\x89', # É
+ 0xca : b'\xc3\x8a', # Ê
+ 0xcb : b'\xc3\x8b', # Ë
+ 0xcc : b'\xc3\x8c', # Ì
+ 0xcd : b'\xc3\x8d', # Ã
+ 0xce : b'\xc3\x8e', # ÃŽ
+ 0xcf : b'\xc3\x8f', # Ã
+ 0xd0 : b'\xc3\x90', # Ã
+ 0xd1 : b'\xc3\x91', # Ñ
+ 0xd2 : b'\xc3\x92', # Ã’
+ 0xd3 : b'\xc3\x93', # Ó
+ 0xd4 : b'\xc3\x94', # Ô
+ 0xd5 : b'\xc3\x95', # Õ
+ 0xd6 : b'\xc3\x96', # Ö
+ 0xd7 : b'\xc3\x97', # ×
+ 0xd8 : b'\xc3\x98', # Ø
+ 0xd9 : b'\xc3\x99', # Ù
+ 0xda : b'\xc3\x9a', # Ú
+ 0xdb : b'\xc3\x9b', # Û
+ 0xdc : b'\xc3\x9c', # Ü
+ 0xdd : b'\xc3\x9d', # Ã
+ 0xde : b'\xc3\x9e', # Þ
+ 0xdf : b'\xc3\x9f', # ß
+ 0xe0 : b'\xc3\xa0', # à
+ 0xe1 : b'\xa1', # á
+ 0xe2 : b'\xc3\xa2', # â
+ 0xe3 : b'\xc3\xa3', # ã
+ 0xe4 : b'\xc3\xa4', # ä
+ 0xe5 : b'\xc3\xa5', # å
+ 0xe6 : b'\xc3\xa6', # æ
+ 0xe7 : b'\xc3\xa7', # ç
+ 0xe8 : b'\xc3\xa8', # è
+ 0xe9 : b'\xc3\xa9', # é
+ 0xea : b'\xc3\xaa', # ê
+ 0xeb : b'\xc3\xab', # ë
+ 0xec : b'\xc3\xac', # ì
+ 0xed : b'\xc3\xad', # í
+ 0xee : b'\xc3\xae', # î
+ 0xef : b'\xc3\xaf', # ï
+ 0xf0 : b'\xc3\xb0', # ð
+ 0xf1 : b'\xc3\xb1', # ñ
+ 0xf2 : b'\xc3\xb2', # ò
+ 0xf3 : b'\xc3\xb3', # ó
+ 0xf4 : b'\xc3\xb4', # ô
+ 0xf5 : b'\xc3\xb5', # õ
+ 0xf6 : b'\xc3\xb6', # ö
+ 0xf7 : b'\xc3\xb7', # ÷
+ 0xf8 : b'\xc3\xb8', # ø
+ 0xf9 : b'\xc3\xb9', # ù
+ 0xfa : b'\xc3\xba', # ú
+ 0xfb : b'\xc3\xbb', # û
+ 0xfc : b'\xc3\xbc', # ü
+ 0xfd : b'\xc3\xbd', # ý
+ 0xfe : b'\xc3\xbe', # þ
+ }
+
+ MULTIBYTE_MARKERS_AND_SIZES = [
+ (0xc2, 0xdf, 2), # 2-byte characters start with a byte C2-DF
+ (0xe0, 0xef, 3), # 3-byte characters start with E0-EF
+ (0xf0, 0xf4, 4), # 4-byte characters start with F0-F4
+ ]
+
+ FIRST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[0][0]
+ LAST_MULTIBYTE_MARKER = MULTIBYTE_MARKERS_AND_SIZES[-1][1]
+
+ @classmethod
+ def detwingle(cls, in_bytes, main_encoding="utf8",
+ embedded_encoding="windows-1252"):
+ """Fix characters from one encoding embedded in some other encoding.
+
+ Currently the only situation supported is Windows-1252 (or its
+ subset ISO-8859-1), embedded in UTF-8.
+
+ The input must be a bytestring. If you've already converted
+ the document to Unicode, you're too late.
+
+ The output is a bytestring in which `embedded_encoding`
+ characters have been converted to their `main_encoding`
+ equivalents.
+ """
+ if embedded_encoding.replace('_', '-').lower() not in (
+ 'windows-1252', 'windows_1252'):
+ raise NotImplementedError(
+ "Windows-1252 and ISO-8859-1 are the only currently supported "
+ "embedded encodings.")
+
+ if main_encoding.lower() not in ('utf8', 'utf-8'):
+ raise NotImplementedError(
+ "UTF-8 is the only currently supported main encoding.")
+
+ byte_chunks = []
+
+ chunk_start = 0
+ pos = 0
+ while pos < len(in_bytes):
+ byte = in_bytes[pos]
+ if not isinstance(byte, int):
+ # Python 2.x
+ byte = ord(byte)
+ if (byte >= cls.FIRST_MULTIBYTE_MARKER
+ and byte <= cls.LAST_MULTIBYTE_MARKER):
+ # This is the start of a UTF-8 multibyte character. Skip
+ # to the end.
+ for start, end, size in cls.MULTIBYTE_MARKERS_AND_SIZES:
+ if byte >= start and byte <= end:
+ pos += size
+ break
+ elif byte >= 0x80 and byte in cls.WINDOWS_1252_TO_UTF8:
+ # We found a Windows-1252 character!
+ # Save the string up to this point as a chunk.
+ byte_chunks.append(in_bytes[chunk_start:pos])
+
+ # Now translate the Windows-1252 character into UTF-8
+ # and add it as another, one-byte chunk.
+ byte_chunks.append(cls.WINDOWS_1252_TO_UTF8[byte])
+ pos += 1
+ chunk_start = pos
+ else:
+ # Go on to the next character.
+ pos += 1
+ if chunk_start == 0:
+ # The string is unchanged.
+ return in_bytes
+ else:
+ # Store the final chunk.
+ byte_chunks.append(in_bytes[chunk_start:])
+ return b''.join(byte_chunks)
+
diff --git a/bitbake/lib/bs4/diagnose.py b/bitbake/lib/bs4/diagnose.py
new file mode 100644
index 0000000000..4d0b00afad
--- /dev/null
+++ b/bitbake/lib/bs4/diagnose.py
@@ -0,0 +1,204 @@
+"""Diagnostic functions, mainly for use when doing tech support."""
+import cProfile
+from StringIO import StringIO
+from HTMLParser import HTMLParser
+import bs4
+from bs4 import BeautifulSoup, __version__
+from bs4.builder import builder_registry
+
+import os
+import pstats
+import random
+import tempfile
+import time
+import traceback
+import sys
+import cProfile
+
+def diagnose(data):
+ """Diagnostic suite for isolating common problems."""
+ print "Diagnostic running on Beautiful Soup %s" % __version__
+ print "Python version %s" % sys.version
+
+ basic_parsers = ["html.parser", "html5lib", "lxml"]
+ for name in basic_parsers:
+ for builder in builder_registry.builders:
+ if name in builder.features:
+ break
+ else:
+ basic_parsers.remove(name)
+ print (
+ "I noticed that %s is not installed. Installing it may help." %
+ name)
+
+ if 'lxml' in basic_parsers:
+ basic_parsers.append(["lxml", "xml"])
+ from lxml import etree
+ print "Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION))
+
+ if 'html5lib' in basic_parsers:
+ import html5lib
+ print "Found html5lib version %s" % html5lib.__version__
+
+ if hasattr(data, 'read'):
+ data = data.read()
+ elif os.path.exists(data):
+ print '"%s" looks like a filename. Reading data from the file.' % data
+ data = open(data).read()
+ elif data.startswith("http:") or data.startswith("https:"):
+ print '"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data
+ print "You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup."
+ return
+ print
+
+ for parser in basic_parsers:
+ print "Trying to parse your markup with %s" % parser
+ success = False
+ try:
+ soup = BeautifulSoup(data, parser)
+ success = True
+ except Exception, e:
+ print "%s could not parse the markup." % parser
+ traceback.print_exc()
+ if success:
+ print "Here's what %s did with the markup:" % parser
+ print soup.prettify()
+
+ print "-" * 80
+
+def lxml_trace(data, html=True, **kwargs):
+ """Print out the lxml events that occur during parsing.
+
+ This lets you see how lxml parses a document when no Beautiful
+ Soup code is running.
+ """
+ from lxml import etree
+ for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
+ print("%s, %4s, %s" % (event, element.tag, element.text))
+
+class AnnouncingParser(HTMLParser):
+ """Announces HTMLParser parse events, without doing anything else."""
+
+ def _p(self, s):
+ print(s)
+
+ def handle_starttag(self, name, attrs):
+ self._p("%s START" % name)
+
+ def handle_endtag(self, name):
+ self._p("%s END" % name)
+
+ def handle_data(self, data):
+ self._p("%s DATA" % data)
+
+ def handle_charref(self, name):
+ self._p("%s CHARREF" % name)
+
+ def handle_entityref(self, name):
+ self._p("%s ENTITYREF" % name)
+
+ def handle_comment(self, data):
+ self._p("%s COMMENT" % data)
+
+ def handle_decl(self, data):
+ self._p("%s DECL" % data)
+
+ def unknown_decl(self, data):
+ self._p("%s UNKNOWN-DECL" % data)
+
+ def handle_pi(self, data):
+ self._p("%s PI" % data)
+
+def htmlparser_trace(data):
+ """Print out the HTMLParser events that occur during parsing.
+
+ This lets you see how HTMLParser parses a document when no
+ Beautiful Soup code is running.
+ """
+ parser = AnnouncingParser()
+ parser.feed(data)
+
+_vowels = "aeiou"
+_consonants = "bcdfghjklmnpqrstvwxyz"
+
+def rword(length=5):
+ "Generate a random word-like string."
+ s = ''
+ for i in range(length):
+ if i % 2 == 0:
+ t = _consonants
+ else:
+ t = _vowels
+ s += random.choice(t)
+ return s
+
+def rsentence(length=4):
+ "Generate a random sentence-like string."
+ return " ".join(rword(random.randint(4,9)) for i in range(length))
+
+def rdoc(num_elements=1000):
+ """Randomly generate an invalid HTML document."""
+ tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
+ elements = []
+ for i in range(num_elements):
+ choice = random.randint(0,3)
+ if choice == 0:
+ # New tag.
+ tag_name = random.choice(tag_names)
+ elements.append("<%s>" % tag_name)
+ elif choice == 1:
+ elements.append(rsentence(random.randint(1,4)))
+ elif choice == 2:
+ # Close a tag.
+ tag_name = random.choice(tag_names)
+ elements.append("</%s>" % tag_name)
+ return "<html>" + "\n".join(elements) + "</html>"
+
+def benchmark_parsers(num_elements=100000):
+ """Very basic head-to-head performance benchmark."""
+ print "Comparative parser benchmark on Beautiful Soup %s" % __version__
+ data = rdoc(num_elements)
+ print "Generated a large invalid HTML document (%d bytes)." % len(data)
+
+ for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
+ success = False
+ try:
+ a = time.time()
+ soup = BeautifulSoup(data, parser)
+ b = time.time()
+ success = True
+ except Exception, e:
+ print "%s could not parse the markup." % parser
+ traceback.print_exc()
+ if success:
+ print "BS4+%s parsed the markup in %.2fs." % (parser, b-a)
+
+ from lxml import etree
+ a = time.time()
+ etree.HTML(data)
+ b = time.time()
+ print "Raw lxml parsed the markup in %.2fs." % (b-a)
+
+ import html5lib
+ parser = html5lib.HTMLParser()
+ a = time.time()
+ parser.parse(data)
+ b = time.time()
+ print "Raw html5lib parsed the markup in %.2fs." % (b-a)
+
+def profile(num_elements=100000, parser="lxml"):
+
+ filehandle = tempfile.NamedTemporaryFile()
+ filename = filehandle.name
+
+ data = rdoc(num_elements)
+ vars = dict(bs4=bs4, data=data, parser=parser)
+ cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
+
+ stats = pstats.Stats(filename)
+ # stats.strip_dirs()
+ stats.sort_stats("cumulative")
+ stats.print_stats('_html5lib|bs4', 50)
+
+if __name__ == '__main__':
+ diagnose(sys.stdin.read())
diff --git a/bitbake/lib/bs4/element.py b/bitbake/lib/bs4/element.py
new file mode 100644
index 0000000000..da9afdf48e
--- /dev/null
+++ b/bitbake/lib/bs4/element.py
@@ -0,0 +1,1611 @@
+import collections
+import re
+import sys
+import warnings
+from bs4.dammit import EntitySubstitution
+
+DEFAULT_OUTPUT_ENCODING = "utf-8"
+PY3K = (sys.version_info[0] > 2)
+
+whitespace_re = re.compile("\s+")
+
+def _alias(attr):
+ """Alias one attribute name to another for backward compatibility"""
+ @property
+ def alias(self):
+ return getattr(self, attr)
+
+ @alias.setter
+ def alias(self):
+ return setattr(self, attr)
+ return alias
+
+
+class NamespacedAttribute(unicode):
+
+ def __new__(cls, prefix, name, namespace=None):
+ if name is None:
+ obj = unicode.__new__(cls, prefix)
+ elif prefix is None:
+ # Not really namespaced.
+ obj = unicode.__new__(cls, name)
+ else:
+ obj = unicode.__new__(cls, prefix + ":" + name)
+ obj.prefix = prefix
+ obj.name = name
+ obj.namespace = namespace
+ return obj
+
+class AttributeValueWithCharsetSubstitution(unicode):
+ """A stand-in object for a character encoding specified in HTML."""
+
+class CharsetMetaAttributeValue(AttributeValueWithCharsetSubstitution):
+ """A generic stand-in for the value of a meta tag's 'charset' attribute.
+
+ When Beautiful Soup parses the markup '<meta charset="utf8">', the
+ value of the 'charset' attribute will be one of these objects.
+ """
+
+ def __new__(cls, original_value):
+ obj = unicode.__new__(cls, original_value)
+ obj.original_value = original_value
+ return obj
+
+ def encode(self, encoding):
+ return encoding
+
+
+class ContentMetaAttributeValue(AttributeValueWithCharsetSubstitution):
+ """A generic stand-in for the value of a meta tag's 'content' attribute.
+
+ When Beautiful Soup parses the markup:
+ <meta http-equiv="content-type" content="text/html; charset=utf8">
+
+ The value of the 'content' attribute will be one of these objects.
+ """
+
+ CHARSET_RE = re.compile("((^|;)\s*charset=)([^;]*)", re.M)
+
+ def __new__(cls, original_value):
+ match = cls.CHARSET_RE.search(original_value)
+ if match is None:
+ # No substitution necessary.
+ return unicode.__new__(unicode, original_value)
+
+ obj = unicode.__new__(cls, original_value)
+ obj.original_value = original_value
+ return obj
+
+ def encode(self, encoding):
+ def rewrite(match):
+ return match.group(1) + encoding
+ return self.CHARSET_RE.sub(rewrite, self.original_value)
+
+class HTMLAwareEntitySubstitution(EntitySubstitution):
+
+ """Entity substitution rules that are aware of some HTML quirks.
+
+ Specifically, the contents of <script> and <style> tags should not
+ undergo entity substitution.
+
+ Incoming NavigableString objects are checked to see if they're the
+ direct children of a <script> or <style> tag.
+ """
+
+ cdata_containing_tags = set(["script", "style"])
+
+ preformatted_tags = set(["pre"])
+
+ @classmethod
+ def _substitute_if_appropriate(cls, ns, f):
+ if (isinstance(ns, NavigableString)
+ and ns.parent is not None
+ and ns.parent.name in cls.cdata_containing_tags):
+ # Do nothing.
+ return ns
+ # Substitute.
+ return f(ns)
+
+ @classmethod
+ def substitute_html(cls, ns):
+ return cls._substitute_if_appropriate(
+ ns, EntitySubstitution.substitute_html)
+
+ @classmethod
+ def substitute_xml(cls, ns):
+ return cls._substitute_if_appropriate(
+ ns, EntitySubstitution.substitute_xml)
+
+class PageElement(object):
+ """Contains the navigational information for some part of the page
+ (either a tag or a piece of text)"""
+
+ # There are five possible values for the "formatter" argument passed in
+ # to methods like encode() and prettify():
+ #
+ # "html" - All Unicode characters with corresponding HTML entities
+ # are converted to those entities on output.
+ # "minimal" - Bare ampersands and angle brackets are converted to
+ # XML entities: &amp; &lt; &gt;
+ # None - The null formatter. Unicode characters are never
+ # converted to entities. This is not recommended, but it's
+ # faster than "minimal".
+ # A function - This function will be called on every string that
+ # needs to undergo entity substitution.
+ #
+
+ # In an HTML document, the default "html" and "minimal" functions
+ # will leave the contents of <script> and <style> tags alone. For
+ # an XML document, all tags will be given the same treatment.
+
+ HTML_FORMATTERS = {
+ "html" : HTMLAwareEntitySubstitution.substitute_html,
+ "minimal" : HTMLAwareEntitySubstitution.substitute_xml,
+ None : None
+ }
+
+ XML_FORMATTERS = {
+ "html" : EntitySubstitution.substitute_html,
+ "minimal" : EntitySubstitution.substitute_xml,
+ None : None
+ }
+
+ def format_string(self, s, formatter='minimal'):
+ """Format the given string using the given formatter."""
+ if not callable(formatter):
+ formatter = self._formatter_for_name(formatter)
+ if formatter is None:
+ output = s
+ else:
+ output = formatter(s)
+ return output
+
+ @property
+ def _is_xml(self):
+ """Is this element part of an XML tree or an HTML tree?
+
+ This is used when mapping a formatter name ("minimal") to an
+ appropriate function (one that performs entity-substitution on
+ the contents of <script> and <style> tags, or not). It's
+ inefficient, but it should be called very rarely.
+ """
+ if self.parent is None:
+ # This is the top-level object. It should have .is_xml set
+ # from tree creation. If not, take a guess--BS is usually
+ # used on HTML markup.
+ return getattr(self, 'is_xml', False)
+ return self.parent._is_xml
+
+ def _formatter_for_name(self, name):
+ "Look up a formatter function based on its name and the tree."
+ if self._is_xml:
+ return self.XML_FORMATTERS.get(
+ name, EntitySubstitution.substitute_xml)
+ else:
+ return self.HTML_FORMATTERS.get(
+ name, HTMLAwareEntitySubstitution.substitute_xml)
+
+ def setup(self, parent=None, previous_element=None):
+ """Sets up the initial relations between this element and
+ other elements."""
+ self.parent = parent
+ self.previous_element = previous_element
+ if previous_element is not None:
+ self.previous_element.next_element = self
+ self.next_element = None
+ self.previous_sibling = None
+ self.next_sibling = None
+ if self.parent is not None and self.parent.contents:
+ self.previous_sibling = self.parent.contents[-1]
+ self.previous_sibling.next_sibling = self
+
+ nextSibling = _alias("next_sibling") # BS3
+ previousSibling = _alias("previous_sibling") # BS3
+
+ def replace_with(self, replace_with):
+ if replace_with is self:
+ return
+ if replace_with is self.parent:
+ raise ValueError("Cannot replace a Tag with its parent.")
+ old_parent = self.parent
+ my_index = self.parent.index(self)
+ self.extract()
+ old_parent.insert(my_index, replace_with)
+ return self
+ replaceWith = replace_with # BS3
+
+ def unwrap(self):
+ my_parent = self.parent
+ my_index = self.parent.index(self)
+ self.extract()
+ for child in reversed(self.contents[:]):
+ my_parent.insert(my_index, child)
+ return self
+ replace_with_children = unwrap
+ replaceWithChildren = unwrap # BS3
+
+ def wrap(self, wrap_inside):
+ me = self.replace_with(wrap_inside)
+ wrap_inside.append(me)
+ return wrap_inside
+
+ def extract(self):
+ """Destructively rips this element out of the tree."""
+ if self.parent is not None:
+ del self.parent.contents[self.parent.index(self)]
+
+ #Find the two elements that would be next to each other if
+ #this element (and any children) hadn't been parsed. Connect
+ #the two.
+ last_child = self._last_descendant()
+ next_element = last_child.next_element
+
+ if self.previous_element is not None:
+ self.previous_element.next_element = next_element
+ if next_element is not None:
+ next_element.previous_element = self.previous_element
+ self.previous_element = None
+ last_child.next_element = None
+
+ self.parent = None
+ if self.previous_sibling is not None:
+ self.previous_sibling.next_sibling = self.next_sibling
+ if self.next_sibling is not None:
+ self.next_sibling.previous_sibling = self.previous_sibling
+ self.previous_sibling = self.next_sibling = None
+ return self
+
+ def _last_descendant(self, is_initialized=True, accept_self=True):
+ "Finds the last element beneath this object to be parsed."
+ if is_initialized and self.next_sibling:
+ last_child = self.next_sibling.previous_element
+ else:
+ last_child = self
+ while isinstance(last_child, Tag) and last_child.contents:
+ last_child = last_child.contents[-1]
+ if not accept_self and last_child == self:
+ last_child = None
+ return last_child
+ # BS3: Not part of the API!
+ _lastRecursiveChild = _last_descendant
+
+ def insert(self, position, new_child):
+ if new_child is self:
+ raise ValueError("Cannot insert a tag into itself.")
+ if (isinstance(new_child, basestring)
+ and not isinstance(new_child, NavigableString)):
+ new_child = NavigableString(new_child)
+
+ position = min(position, len(self.contents))
+ if hasattr(new_child, 'parent') and new_child.parent is not None:
+ # We're 'inserting' an element that's already one
+ # of this object's children.
+ if new_child.parent is self:
+ current_index = self.index(new_child)
+ if current_index < position:
+ # We're moving this element further down the list
+ # of this object's children. That means that when
+ # we extract this element, our target index will
+ # jump down one.
+ position -= 1
+ new_child.extract()
+
+ new_child.parent = self
+ previous_child = None
+ if position == 0:
+ new_child.previous_sibling = None
+ new_child.previous_element = self
+ else:
+ previous_child = self.contents[position - 1]
+ new_child.previous_sibling = previous_child
+ new_child.previous_sibling.next_sibling = new_child
+ new_child.previous_element = previous_child._last_descendant(False)
+ if new_child.previous_element is not None:
+ new_child.previous_element.next_element = new_child
+
+ new_childs_last_element = new_child._last_descendant(False)
+
+ if position >= len(self.contents):
+ new_child.next_sibling = None
+
+ parent = self
+ parents_next_sibling = None
+ while parents_next_sibling is None and parent is not None:
+ parents_next_sibling = parent.next_sibling
+ parent = parent.parent
+ if parents_next_sibling is not None:
+ # We found the element that comes next in the document.
+ break
+ if parents_next_sibling is not None:
+ new_childs_last_element.next_element = parents_next_sibling
+ else:
+ # The last element of this tag is the last element in
+ # the document.
+ new_childs_last_element.next_element = None
+ else:
+ next_child = self.contents[position]
+ new_child.next_sibling = next_child
+ if new_child.next_sibling is not None:
+ new_child.next_sibling.previous_sibling = new_child
+ new_childs_last_element.next_element = next_child
+
+ if new_childs_last_element.next_element is not None:
+ new_childs_last_element.next_element.previous_element = new_childs_last_element
+ self.contents.insert(position, new_child)
+
+ def append(self, tag):
+ """Appends the given tag to the contents of this tag."""
+ self.insert(len(self.contents), tag)
+
+ def insert_before(self, predecessor):
+ """Makes the given element the immediate predecessor of this one.
+
+ The two elements will have the same parent, and the given element
+ will be immediately before this one.
+ """
+ if self is predecessor:
+ raise ValueError("Can't insert an element before itself.")
+ parent = self.parent
+ if parent is None:
+ raise ValueError(
+ "Element has no parent, so 'before' has no meaning.")
+ # Extract first so that the index won't be screwed up if they
+ # are siblings.
+ if isinstance(predecessor, PageElement):
+ predecessor.extract()
+ index = parent.index(self)
+ parent.insert(index, predecessor)
+
+ def insert_after(self, successor):
+ """Makes the given element the immediate successor of this one.
+
+ The two elements will have the same parent, and the given element
+ will be immediately after this one.
+ """
+ if self is successor:
+ raise ValueError("Can't insert an element after itself.")
+ parent = self.parent
+ if parent is None:
+ raise ValueError(
+ "Element has no parent, so 'after' has no meaning.")
+ # Extract first so that the index won't be screwed up if they
+ # are siblings.
+ if isinstance(successor, PageElement):
+ successor.extract()
+ index = parent.index(self)
+ parent.insert(index+1, successor)
+
+ def find_next(self, name=None, attrs={}, text=None, **kwargs):
+ """Returns the first item that matches the given criteria and
+ appears after this Tag in the document."""
+ return self._find_one(self.find_all_next, name, attrs, text, **kwargs)
+ findNext = find_next # BS3
+
+ def find_all_next(self, name=None, attrs={}, text=None, limit=None,
+ **kwargs):
+ """Returns all items that match the given criteria and appear
+ after this Tag in the document."""
+ return self._find_all(name, attrs, text, limit, self.next_elements,
+ **kwargs)
+ findAllNext = find_all_next # BS3
+
+ def find_next_sibling(self, name=None, attrs={}, text=None, **kwargs):
+ """Returns the closest sibling to this Tag that matches the
+ given criteria and appears after this Tag in the document."""
+ return self._find_one(self.find_next_siblings, name, attrs, text,
+ **kwargs)
+ findNextSibling = find_next_sibling # BS3
+
+ def find_next_siblings(self, name=None, attrs={}, text=None, limit=None,
+ **kwargs):
+ """Returns the siblings of this Tag that match the given
+ criteria and appear after this Tag in the document."""
+ return self._find_all(name, attrs, text, limit,
+ self.next_siblings, **kwargs)
+ findNextSiblings = find_next_siblings # BS3
+ fetchNextSiblings = find_next_siblings # BS2
+
+ def find_previous(self, name=None, attrs={}, text=None, **kwargs):
+ """Returns the first item that matches the given criteria and
+ appears before this Tag in the document."""
+ return self._find_one(
+ self.find_all_previous, name, attrs, text, **kwargs)
+ findPrevious = find_previous # BS3
+
+ def find_all_previous(self, name=None, attrs={}, text=None, limit=None,
+ **kwargs):
+ """Returns all items that match the given criteria and appear
+ before this Tag in the document."""
+ return self._find_all(name, attrs, text, limit, self.previous_elements,
+ **kwargs)
+ findAllPrevious = find_all_previous # BS3
+ fetchPrevious = find_all_previous # BS2
+
+ def find_previous_sibling(self, name=None, attrs={}, text=None, **kwargs):
+ """Returns the closest sibling to this Tag that matches the
+ given criteria and appears before this Tag in the document."""
+ return self._find_one(self.find_previous_siblings, name, attrs, text,
+ **kwargs)
+ findPreviousSibling = find_previous_sibling # BS3
+
+ def find_previous_siblings(self, name=None, attrs={}, text=None,
+ limit=None, **kwargs):
+ """Returns the siblings of this Tag that match the given
+ criteria and appear before this Tag in the document."""
+ return self._find_all(name, attrs, text, limit,
+ self.previous_siblings, **kwargs)
+ findPreviousSiblings = find_previous_siblings # BS3
+ fetchPreviousSiblings = find_previous_siblings # BS2
+
+ def find_parent(self, name=None, attrs={}, **kwargs):
+ """Returns the closest parent of this Tag that matches the given
+ criteria."""
+ # NOTE: We can't use _find_one because findParents takes a different
+ # set of arguments.
+ r = None
+ l = self.find_parents(name, attrs, 1, **kwargs)
+ if l:
+ r = l[0]
+ return r
+ findParent = find_parent # BS3
+
+ def find_parents(self, name=None, attrs={}, limit=None, **kwargs):
+ """Returns the parents of this Tag that match the given
+ criteria."""
+
+ return self._find_all(name, attrs, None, limit, self.parents,
+ **kwargs)
+ findParents = find_parents # BS3
+ fetchParents = find_parents # BS2
+
+ @property
+ def next(self):
+ return self.next_element
+
+ @property
+ def previous(self):
+ return self.previous_element
+
+ #These methods do the real heavy lifting.
+
+ def _find_one(self, method, name, attrs, text, **kwargs):
+ r = None
+ l = method(name, attrs, text, 1, **kwargs)
+ if l:
+ r = l[0]
+ return r
+
+ def _find_all(self, name, attrs, text, limit, generator, **kwargs):
+ "Iterates over a generator looking for things that match."
+
+ if isinstance(name, SoupStrainer):
+ strainer = name
+ else:
+ strainer = SoupStrainer(name, attrs, text, **kwargs)
+
+ if text is None and not limit and not attrs and not kwargs:
+ if name is True or name is None:
+ # Optimization to find all tags.
+ result = (element for element in generator
+ if isinstance(element, Tag))
+ return ResultSet(strainer, result)
+ elif isinstance(name, basestring):
+ # Optimization to find all tags with a given name.
+ result = (element for element in generator
+ if isinstance(element, Tag)
+ and element.name == name)
+ return ResultSet(strainer, result)
+ results = ResultSet(strainer)
+ while True:
+ try:
+ i = next(generator)
+ except StopIteration:
+ break
+ if i:
+ found = strainer.search(i)
+ if found:
+ results.append(found)
+ if limit and len(results) >= limit:
+ break
+ return results
+
+ #These generators can be used to navigate starting from both
+ #NavigableStrings and Tags.
+ @property
+ def next_elements(self):
+ i = self.next_element
+ while i is not None:
+ yield i
+ i = i.next_element
+
+ @property
+ def next_siblings(self):
+ i = self.next_sibling
+ while i is not None:
+ yield i
+ i = i.next_sibling
+
+ @property
+ def previous_elements(self):
+ i = self.previous_element
+ while i is not None:
+ yield i
+ i = i.previous_element
+
+ @property
+ def previous_siblings(self):
+ i = self.previous_sibling
+ while i is not None:
+ yield i
+ i = i.previous_sibling
+
+ @property
+ def parents(self):
+ i = self.parent
+ while i is not None:
+ yield i
+ i = i.parent
+
+ # Methods for supporting CSS selectors.
+
+ tag_name_re = re.compile('^[a-z0-9]+$')
+
+ # /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
+ # \---/ \---/\-------------/ \-------/
+ # | | | |
+ # | | | The value
+ # | | ~,|,^,$,* or =
+ # | Attribute
+ # Tag
+ attribselect_re = re.compile(
+ r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' +
+ r'=?"?(?P<value>[^\]"]*)"?\]$'
+ )
+
+ def _attr_value_as_string(self, value, default=None):
+ """Force an attribute value into a string representation.
+
+ A multi-valued attribute will be converted into a
+ space-separated stirng.
+ """
+ value = self.get(value, default)
+ if isinstance(value, list) or isinstance(value, tuple):
+ value =" ".join(value)
+ return value
+
+ def _tag_name_matches_and(self, function, tag_name):
+ if not tag_name:
+ return function
+ else:
+ def _match(tag):
+ return tag.name == tag_name and function(tag)
+ return _match
+
+ def _attribute_checker(self, operator, attribute, value=''):
+ """Create a function that performs a CSS selector operation.
+
+ Takes an operator, attribute and optional value. Returns a
+ function that will return True for elements that match that
+ combination.
+ """
+ if operator == '=':
+ # string representation of `attribute` is equal to `value`
+ return lambda el: el._attr_value_as_string(attribute) == value
+ elif operator == '~':
+ # space-separated list representation of `attribute`
+ # contains `value`
+ def _includes_value(element):
+ attribute_value = element.get(attribute, [])
+ if not isinstance(attribute_value, list):
+ attribute_value = attribute_value.split()
+ return value in attribute_value
+ return _includes_value
+ elif operator == '^':
+ # string representation of `attribute` starts with `value`
+ return lambda el: el._attr_value_as_string(
+ attribute, '').startswith(value)
+ elif operator == '$':
+ # string represenation of `attribute` ends with `value`
+ return lambda el: el._attr_value_as_string(
+ attribute, '').endswith(value)
+ elif operator == '*':
+ # string representation of `attribute` contains `value`
+ return lambda el: value in el._attr_value_as_string(attribute, '')
+ elif operator == '|':
+ # string representation of `attribute` is either exactly
+ # `value` or starts with `value` and then a dash.
+ def _is_or_starts_with_dash(element):
+ attribute_value = element._attr_value_as_string(attribute, '')
+ return (attribute_value == value or attribute_value.startswith(
+ value + '-'))
+ return _is_or_starts_with_dash
+ else:
+ return lambda el: el.has_attr(attribute)
+
+ # Old non-property versions of the generators, for backwards
+ # compatibility with BS3.
+ def nextGenerator(self):
+ return self.next_elements
+
+ def nextSiblingGenerator(self):
+ return self.next_siblings
+
+ def previousGenerator(self):
+ return self.previous_elements
+
+ def previousSiblingGenerator(self):
+ return self.previous_siblings
+
+ def parentGenerator(self):
+ return self.parents
+
+
+class NavigableString(unicode, PageElement):
+
+ PREFIX = ''
+ SUFFIX = ''
+
+ def __new__(cls, value):
+ """Create a new NavigableString.
+
+ When unpickling a NavigableString, this method is called with
+ the string in DEFAULT_OUTPUT_ENCODING. That encoding needs to be
+ passed in to the superclass's __new__ or the superclass won't know
+ how to handle non-ASCII characters.
+ """
+ if isinstance(value, unicode):
+ return unicode.__new__(cls, value)
+ return unicode.__new__(cls, value, DEFAULT_OUTPUT_ENCODING)
+
+ def __copy__(self):
+ return self
+
+ def __getnewargs__(self):
+ return (unicode(self),)
+
+ def __getattr__(self, attr):
+ """text.string gives you text. This is for backwards
+ compatibility for Navigable*String, but for CData* it lets you
+ get the string without the CData wrapper."""
+ if attr == 'string':
+ return self
+ else:
+ raise AttributeError(
+ "'%s' object has no attribute '%s'" % (
+ self.__class__.__name__, attr))
+
+ def output_ready(self, formatter="minimal"):
+ output = self.format_string(self, formatter)
+ return self.PREFIX + output + self.SUFFIX
+
+ @property
+ def name(self):
+ return None
+
+ @name.setter
+ def name(self, name):
+ raise AttributeError("A NavigableString cannot be given a name.")
+
+class PreformattedString(NavigableString):
+ """A NavigableString not subject to the normal formatting rules.
+
+ The string will be passed into the formatter (to trigger side effects),
+ but the return value will be ignored.
+ """
+
+ def output_ready(self, formatter="minimal"):
+ """CData strings are passed into the formatter.
+ But the return value is ignored."""
+ self.format_string(self, formatter)
+ return self.PREFIX + self + self.SUFFIX
+
+class CData(PreformattedString):
+
+ PREFIX = u'<![CDATA['
+ SUFFIX = u']]>'
+
+class ProcessingInstruction(PreformattedString):
+
+ PREFIX = u'<?'
+ SUFFIX = u'?>'
+
+class Comment(PreformattedString):
+
+ PREFIX = u'<!--'
+ SUFFIX = u'-->'
+
+
+class Declaration(PreformattedString):
+ PREFIX = u'<!'
+ SUFFIX = u'!>'
+
+
+class Doctype(PreformattedString):
+
+ @classmethod
+ def for_name_and_ids(cls, name, pub_id, system_id):
+ value = name or ''
+ if pub_id is not None:
+ value += ' PUBLIC "%s"' % pub_id
+ if system_id is not None:
+ value += ' "%s"' % system_id
+ elif system_id is not None:
+ value += ' SYSTEM "%s"' % system_id
+
+ return Doctype(value)
+
+ PREFIX = u'<!DOCTYPE '
+ SUFFIX = u'>\n'
+
+
+class Tag(PageElement):
+
+ """Represents a found HTML tag with its attributes and contents."""
+
+ def __init__(self, parser=None, builder=None, name=None, namespace=None,
+ prefix=None, attrs=None, parent=None, previous=None):
+ "Basic constructor."
+
+ if parser is None:
+ self.parser_class = None
+ else:
+ # We don't actually store the parser object: that lets extracted
+ # chunks be garbage-collected.
+ self.parser_class = parser.__class__
+ if name is None:
+ raise ValueError("No value provided for new tag's name.")
+ self.name = name
+ self.namespace = namespace
+ self.prefix = prefix
+ if attrs is None:
+ attrs = {}
+ elif attrs and builder.cdata_list_attributes:
+ attrs = builder._replace_cdata_list_attribute_values(
+ self.name, attrs)
+ else:
+ attrs = dict(attrs)
+ self.attrs = attrs
+ self.contents = []
+ self.setup(parent, previous)
+ self.hidden = False
+
+ # Set up any substitutions, such as the charset in a META tag.
+ if builder is not None:
+ builder.set_up_substitutions(self)
+ self.can_be_empty_element = builder.can_be_empty_element(name)
+ else:
+ self.can_be_empty_element = False
+
+ parserClass = _alias("parser_class") # BS3
+
+ @property
+ def is_empty_element(self):
+ """Is this tag an empty-element tag? (aka a self-closing tag)
+
+ A tag that has contents is never an empty-element tag.
+
+ A tag that has no contents may or may not be an empty-element
+ tag. It depends on the builder used to create the tag. If the
+ builder has a designated list of empty-element tags, then only
+ a tag whose name shows up in that list is considered an
+ empty-element tag.
+
+ If the builder has no designated list of empty-element tags,
+ then any tag with no contents is an empty-element tag.
+ """
+ return len(self.contents) == 0 and self.can_be_empty_element
+ isSelfClosing = is_empty_element # BS3
+
+ @property
+ def string(self):
+ """Convenience property to get the single string within this tag.
+
+ :Return: If this tag has a single string child, return value
+ is that string. If this tag has no children, or more than one
+ child, return value is None. If this tag has one child tag,
+ return value is the 'string' attribute of the child tag,
+ recursively.
+ """
+ if len(self.contents) != 1:
+ return None
+ child = self.contents[0]
+ if isinstance(child, NavigableString):
+ return child
+ return child.string
+
+ @string.setter
+ def string(self, string):
+ self.clear()
+ self.append(string.__class__(string))
+
+ def _all_strings(self, strip=False, types=(NavigableString, CData)):
+ """Yield all strings of certain classes, possibly stripping them.
+
+ By default, yields only NavigableString and CData objects. So
+ no comments, processing instructions, etc.
+ """
+ for descendant in self.descendants:
+ if (
+ (types is None and not isinstance(descendant, NavigableString))
+ or
+ (types is not None and type(descendant) not in types)):
+ continue
+ if strip:
+ descendant = descendant.strip()
+ if len(descendant) == 0:
+ continue
+ yield descendant
+
+ strings = property(_all_strings)
+
+ @property
+ def stripped_strings(self):
+ for string in self._all_strings(True):
+ yield string
+
+ def get_text(self, separator=u"", strip=False,
+ types=(NavigableString, CData)):
+ """
+ Get all child strings, concatenated using the given separator.
+ """
+ return separator.join([s for s in self._all_strings(
+ strip, types=types)])
+ getText = get_text
+ text = property(get_text)
+
+ def decompose(self):
+ """Recursively destroys the contents of this tree."""
+ self.extract()
+ i = self
+ while i is not None:
+ next = i.next_element
+ i.__dict__.clear()
+ i.contents = []
+ i = next
+
+ def clear(self, decompose=False):
+ """
+ Extract all children. If decompose is True, decompose instead.
+ """
+ if decompose:
+ for element in self.contents[:]:
+ if isinstance(element, Tag):
+ element.decompose()
+ else:
+ element.extract()
+ else:
+ for element in self.contents[:]:
+ element.extract()
+
+ def index(self, element):
+ """
+ Find the index of a child by identity, not value. Avoids issues with
+ tag.contents.index(element) getting the index of equal elements.
+ """
+ for i, child in enumerate(self.contents):
+ if child is element:
+ return i
+ raise ValueError("Tag.index: element not in tag")
+
+ def get(self, key, default=None):
+ """Returns the value of the 'key' attribute for the tag, or
+ the value given for 'default' if it doesn't have that
+ attribute."""
+ return self.attrs.get(key, default)
+
+ def has_attr(self, key):
+ return key in self.attrs
+
+ def __hash__(self):
+ return str(self).__hash__()
+
+ def __getitem__(self, key):
+ """tag[key] returns the value of the 'key' attribute for the tag,
+ and throws an exception if it's not there."""
+ return self.attrs[key]
+
+ def __iter__(self):
+ "Iterating over a tag iterates over its contents."
+ return iter(self.contents)
+
+ def __len__(self):
+ "The length of a tag is the length of its list of contents."
+ return len(self.contents)
+
+ def __contains__(self, x):
+ return x in self.contents
+
+ def __nonzero__(self):
+ "A tag is non-None even if it has no contents."
+ return True
+
+ def __setitem__(self, key, value):
+ """Setting tag[key] sets the value of the 'key' attribute for the
+ tag."""
+ self.attrs[key] = value
+
+ def __delitem__(self, key):
+ "Deleting tag[key] deletes all 'key' attributes for the tag."
+ self.attrs.pop(key, None)
+
+ def __call__(self, *args, **kwargs):
+ """Calling a tag like a function is the same as calling its
+ find_all() method. Eg. tag('a') returns a list of all the A tags
+ found within this tag."""
+ return self.find_all(*args, **kwargs)
+
+ def __getattr__(self, tag):
+ #print "Getattr %s.%s" % (self.__class__, tag)
+ if len(tag) > 3 and tag.endswith('Tag'):
+ # BS3: soup.aTag -> "soup.find("a")
+ tag_name = tag[:-3]
+ warnings.warn(
+ '.%sTag is deprecated, use .find("%s") instead.' % (
+ tag_name, tag_name))
+ return self.find(tag_name)
+ # We special case contents to avoid recursion.
+ elif not tag.startswith("__") and not tag=="contents":
+ return self.find(tag)
+ raise AttributeError(
+ "'%s' object has no attribute '%s'" % (self.__class__, tag))
+
+ def __eq__(self, other):
+ """Returns true iff this tag has the same name, the same attributes,
+ and the same contents (recursively) as the given tag."""
+ if self is other:
+ return True
+ if (not hasattr(other, 'name') or
+ not hasattr(other, 'attrs') or
+ not hasattr(other, 'contents') or
+ self.name != other.name or
+ self.attrs != other.attrs or
+ len(self) != len(other)):
+ return False
+ for i, my_child in enumerate(self.contents):
+ if my_child != other.contents[i]:
+ return False
+ return True
+
+ def __ne__(self, other):
+ """Returns true iff this tag is not identical to the other tag,
+ as defined in __eq__."""
+ return not self == other
+
+ def __repr__(self, encoding=DEFAULT_OUTPUT_ENCODING):
+ """Renders this tag as a string."""
+ return self.encode(encoding)
+
+ def __unicode__(self):
+ return self.decode()
+
+ def __str__(self):
+ return self.encode()
+
+ if PY3K:
+ __str__ = __repr__ = __unicode__
+
+ def encode(self, encoding=DEFAULT_OUTPUT_ENCODING,
+ indent_level=None, formatter="minimal",
+ errors="xmlcharrefreplace"):
+ # Turn the data structure into Unicode, then encode the
+ # Unicode.
+ u = self.decode(indent_level, encoding, formatter)
+ return u.encode(encoding, errors)
+
+ def _should_pretty_print(self, indent_level):
+ """Should this tag be pretty-printed?"""
+ return (
+ indent_level is not None and
+ (self.name not in HTMLAwareEntitySubstitution.preformatted_tags
+ or self._is_xml))
+
+ def decode(self, indent_level=None,
+ eventual_encoding=DEFAULT_OUTPUT_ENCODING,
+ formatter="minimal"):
+ """Returns a Unicode representation of this tag and its contents.
+
+ :param eventual_encoding: The tag is destined to be
+ encoded into this encoding. This method is _not_
+ responsible for performing that encoding. This information
+ is passed in so that it can be substituted in if the
+ document contains a <META> tag that mentions the document's
+ encoding.
+ """
+
+ # First off, turn a string formatter into a function. This
+ # will stop the lookup from happening over and over again.
+ if not callable(formatter):
+ formatter = self._formatter_for_name(formatter)
+
+ attrs = []
+ if self.attrs:
+ for key, val in sorted(self.attrs.items()):
+ if val is None:
+ decoded = key
+ else:
+ if isinstance(val, list) or isinstance(val, tuple):
+ val = ' '.join(val)
+ elif not isinstance(val, basestring):
+ val = unicode(val)
+ elif (
+ isinstance(val, AttributeValueWithCharsetSubstitution)
+ and eventual_encoding is not None):
+ val = val.encode(eventual_encoding)
+
+ text = self.format_string(val, formatter)
+ decoded = (
+ unicode(key) + '='
+ + EntitySubstitution.quoted_attribute_value(text))
+ attrs.append(decoded)
+ close = ''
+ closeTag = ''
+
+ prefix = ''
+ if self.prefix:
+ prefix = self.prefix + ":"
+
+ if self.is_empty_element:
+ close = '/'
+ else:
+ closeTag = '</%s%s>' % (prefix, self.name)
+
+ pretty_print = self._should_pretty_print(indent_level)
+ space = ''
+ indent_space = ''
+ if indent_level is not None:
+ indent_space = (' ' * (indent_level - 1))
+ if pretty_print:
+ space = indent_space
+ indent_contents = indent_level + 1
+ else:
+ indent_contents = None
+ contents = self.decode_contents(
+ indent_contents, eventual_encoding, formatter)
+
+ if self.hidden:
+ # This is the 'document root' object.
+ s = contents
+ else:
+ s = []
+ attribute_string = ''
+ if attrs:
+ attribute_string = ' ' + ' '.join(attrs)
+ if indent_level is not None:
+ # Even if this particular tag is not pretty-printed,
+ # we should indent up to the start of the tag.
+ s.append(indent_space)
+ s.append('<%s%s%s%s>' % (
+ prefix, self.name, attribute_string, close))
+ if pretty_print:
+ s.append("\n")
+ s.append(contents)
+ if pretty_print and contents and contents[-1] != "\n":
+ s.append("\n")
+ if pretty_print and closeTag:
+ s.append(space)
+ s.append(closeTag)
+ if indent_level is not None and closeTag and self.next_sibling:
+ # Even if this particular tag is not pretty-printed,
+ # we're now done with the tag, and we should add a
+ # newline if appropriate.
+ s.append("\n")
+ s = ''.join(s)
+ return s
+
+ def prettify(self, encoding=None, formatter="minimal"):
+ if encoding is None:
+ return self.decode(True, formatter=formatter)
+ else:
+ return self.encode(encoding, True, formatter=formatter)
+
+ def decode_contents(self, indent_level=None,
+ eventual_encoding=DEFAULT_OUTPUT_ENCODING,
+ formatter="minimal"):
+ """Renders the contents of this tag as a Unicode string.
+
+ :param eventual_encoding: The tag is destined to be
+ encoded into this encoding. This method is _not_
+ responsible for performing that encoding. This information
+ is passed in so that it can be substituted in if the
+ document contains a <META> tag that mentions the document's
+ encoding.
+ """
+ # First off, turn a string formatter into a function. This
+ # will stop the lookup from happening over and over again.
+ if not callable(formatter):
+ formatter = self._formatter_for_name(formatter)
+
+ pretty_print = (indent_level is not None)
+ s = []
+ for c in self:
+ text = None
+ if isinstance(c, NavigableString):
+ text = c.output_ready(formatter)
+ elif isinstance(c, Tag):
+ s.append(c.decode(indent_level, eventual_encoding,
+ formatter))
+ if text and indent_level and not self.name == 'pre':
+ text = text.strip()
+ if text:
+ if pretty_print and not self.name == 'pre':
+ s.append(" " * (indent_level - 1))
+ s.append(text)
+ if pretty_print and not self.name == 'pre':
+ s.append("\n")
+ return ''.join(s)
+
+ def encode_contents(
+ self, indent_level=None, encoding=DEFAULT_OUTPUT_ENCODING,
+ formatter="minimal"):
+ """Renders the contents of this tag as a bytestring."""
+ contents = self.decode_contents(indent_level, encoding, formatter)
+ return contents.encode(encoding)
+
+ # Old method for BS3 compatibility
+ def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
+ prettyPrint=False, indentLevel=0):
+ if not prettyPrint:
+ indentLevel = None
+ return self.encode_contents(
+ indent_level=indentLevel, encoding=encoding)
+
+ #Soup methods
+
+ def find(self, name=None, attrs={}, recursive=True, text=None,
+ **kwargs):
+ """Return only the first child of this Tag matching the given
+ criteria."""
+ r = None
+ l = self.find_all(name, attrs, recursive, text, 1, **kwargs)
+ if l:
+ r = l[0]
+ return r
+ findChild = find
+
+ def find_all(self, name=None, attrs={}, recursive=True, text=None,
+ limit=None, **kwargs):
+ """Extracts a list of Tag objects that match the given
+ criteria. You can specify the name of the Tag and any
+ attributes you want the Tag to have.
+
+ The value of a key-value pair in the 'attrs' map can be a
+ string, a list of strings, a regular expression object, or a
+ callable that takes a string and returns whether or not the
+ string matches for some custom definition of 'matches'. The
+ same is true of the tag name."""
+
+ generator = self.descendants
+ if not recursive:
+ generator = self.children
+ return self._find_all(name, attrs, text, limit, generator, **kwargs)
+ findAll = find_all # BS3
+ findChildren = find_all # BS2
+
+ #Generator methods
+ @property
+ def children(self):
+ # return iter() to make the purpose of the method clear
+ return iter(self.contents) # XXX This seems to be untested.
+
+ @property
+ def descendants(self):
+ if not len(self.contents):
+ return
+ stopNode = self._last_descendant().next_element
+ current = self.contents[0]
+ while current is not stopNode:
+ yield current
+ current = current.next_element
+
+ # CSS selector code
+
+ _selector_combinators = ['>', '+', '~']
+ _select_debug = False
+ def select(self, selector, _candidate_generator=None):
+ """Perform a CSS selection operation on the current element."""
+ tokens = selector.split()
+ current_context = [self]
+
+ if tokens[-1] in self._selector_combinators:
+ raise ValueError(
+ 'Final combinator "%s" is missing an argument.' % tokens[-1])
+ if self._select_debug:
+ print 'Running CSS selector "%s"' % selector
+ for index, token in enumerate(tokens):
+ if self._select_debug:
+ print ' Considering token "%s"' % token
+ recursive_candidate_generator = None
+ tag_name = None
+ if tokens[index-1] in self._selector_combinators:
+ # This token was consumed by the previous combinator. Skip it.
+ if self._select_debug:
+ print ' Token was consumed by the previous combinator.'
+ continue
+ # Each operation corresponds to a checker function, a rule
+ # for determining whether a candidate matches the
+ # selector. Candidates are generated by the active
+ # iterator.
+ checker = None
+
+ m = self.attribselect_re.match(token)
+ if m is not None:
+ # Attribute selector
+ tag_name, attribute, operator, value = m.groups()
+ checker = self._attribute_checker(operator, attribute, value)
+
+ elif '#' in token:
+ # ID selector
+ tag_name, tag_id = token.split('#', 1)
+ def id_matches(tag):
+ return tag.get('id', None) == tag_id
+ checker = id_matches
+
+ elif '.' in token:
+ # Class selector
+ tag_name, klass = token.split('.', 1)
+ classes = set(klass.split('.'))
+ def classes_match(candidate):
+ return classes.issubset(candidate.get('class', []))
+ checker = classes_match
+
+ elif ':' in token:
+ # Pseudo-class
+ tag_name, pseudo = token.split(':', 1)
+ if tag_name == '':
+ raise ValueError(
+ "A pseudo-class must be prefixed with a tag name.")
+ pseudo_attributes = re.match('([a-zA-Z\d-]+)\(([a-zA-Z\d]+)\)', pseudo)
+ found = []
+ if pseudo_attributes is not None:
+ pseudo_type, pseudo_value = pseudo_attributes.groups()
+ if pseudo_type == 'nth-of-type':
+ try:
+ pseudo_value = int(pseudo_value)
+ except:
+ raise NotImplementedError(
+ 'Only numeric values are currently supported for the nth-of-type pseudo-class.')
+ if pseudo_value < 1:
+ raise ValueError(
+ 'nth-of-type pseudo-class value must be at least 1.')
+ class Counter(object):
+ def __init__(self, destination):
+ self.count = 0
+ self.destination = destination
+
+ def nth_child_of_type(self, tag):
+ self.count += 1
+ if self.count == self.destination:
+ return True
+ if self.count > self.destination:
+ # Stop the generator that's sending us
+ # these things.
+ raise StopIteration()
+ return False
+ checker = Counter(pseudo_value).nth_child_of_type
+ else:
+ raise NotImplementedError(
+ 'Only the following pseudo-classes are implemented: nth-of-type.')
+
+ elif token == '*':
+ # Star selector -- matches everything
+ pass
+ elif token == '>':
+ # Run the next token as a CSS selector against the
+ # direct children of each tag in the current context.
+ recursive_candidate_generator = lambda tag: tag.children
+ elif token == '~':
+ # Run the next token as a CSS selector against the
+ # siblings of each tag in the current context.
+ recursive_candidate_generator = lambda tag: tag.next_siblings
+ elif token == '+':
+ # For each tag in the current context, run the next
+ # token as a CSS selector against the tag's next
+ # sibling that's a tag.
+ def next_tag_sibling(tag):
+ yield tag.find_next_sibling(True)
+ recursive_candidate_generator = next_tag_sibling
+
+ elif self.tag_name_re.match(token):
+ # Just a tag name.
+ tag_name = token
+ else:
+ raise ValueError(
+ 'Unsupported or invalid CSS selector: "%s"' % token)
+
+ if recursive_candidate_generator:
+ # This happens when the selector looks like "> foo".
+ #
+ # The generator calls select() recursively on every
+ # member of the current context, passing in a different
+ # candidate generator and a different selector.
+ #
+ # In the case of "> foo", the candidate generator is
+ # one that yields a tag's direct children (">"), and
+ # the selector is "foo".
+ next_token = tokens[index+1]
+ def recursive_select(tag):
+ if self._select_debug:
+ print ' Calling select("%s") recursively on %s %s' % (next_token, tag.name, tag.attrs)
+ print '-' * 40
+ for i in tag.select(next_token, recursive_candidate_generator):
+ if self._select_debug:
+ print '(Recursive select picked up candidate %s %s)' % (i.name, i.attrs)
+ yield i
+ if self._select_debug:
+ print '-' * 40
+ _use_candidate_generator = recursive_select
+ elif _candidate_generator is None:
+ # By default, a tag's candidates are all of its
+ # children. If tag_name is defined, only yield tags
+ # with that name.
+ if self._select_debug:
+ if tag_name:
+ check = "[any]"
+ else:
+ check = tag_name
+ print ' Default candidate generator, tag name="%s"' % check
+ if self._select_debug:
+ # This is redundant with later code, but it stops
+ # a bunch of bogus tags from cluttering up the
+ # debug log.
+ def default_candidate_generator(tag):
+ for child in tag.descendants:
+ if not isinstance(child, Tag):
+ continue
+ if tag_name and not child.name == tag_name:
+ continue
+ yield child
+ _use_candidate_generator = default_candidate_generator
+ else:
+ _use_candidate_generator = lambda tag: tag.descendants
+ else:
+ _use_candidate_generator = _candidate_generator
+
+ new_context = []
+ new_context_ids = set([])
+ for tag in current_context:
+ if self._select_debug:
+ print " Running candidate generator on %s %s" % (
+ tag.name, repr(tag.attrs))
+ for candidate in _use_candidate_generator(tag):
+ if not isinstance(candidate, Tag):
+ continue
+ if tag_name and candidate.name != tag_name:
+ continue
+ if checker is not None:
+ try:
+ result = checker(candidate)
+ except StopIteration:
+ # The checker has decided we should no longer
+ # run the generator.
+ break
+ if checker is None or result:
+ if self._select_debug:
+ print " SUCCESS %s %s" % (candidate.name, repr(candidate.attrs))
+ if id(candidate) not in new_context_ids:
+ # If a tag matches a selector more than once,
+ # don't include it in the context more than once.
+ new_context.append(candidate)
+ new_context_ids.add(id(candidate))
+ elif self._select_debug:
+ print " FAILURE %s %s" % (candidate.name, repr(candidate.attrs))
+
+ current_context = new_context
+
+ if self._select_debug:
+ print "Final verdict:"
+ for i in current_context:
+ print " %s %s" % (i.name, i.attrs)
+ return current_context
+
+ # Old names for backwards compatibility
+ def childGenerator(self):
+ return self.children
+
+ def recursiveChildGenerator(self):
+ return self.descendants
+
+ def has_key(self, key):
+ """This was kind of misleading because has_key() (attributes)
+ was different from __in__ (contents). has_key() is gone in
+ Python 3, anyway."""
+ warnings.warn('has_key is deprecated. Use has_attr("%s") instead.' % (
+ key))
+ return self.has_attr(key)
+
+# Next, a couple classes to represent queries and their results.
+class SoupStrainer(object):
+ """Encapsulates a number of ways of matching a markup element (tag or
+ text)."""
+
+ def __init__(self, name=None, attrs={}, text=None, **kwargs):
+ self.name = self._normalize_search_value(name)
+ if not isinstance(attrs, dict):
+ # Treat a non-dict value for attrs as a search for the 'class'
+ # attribute.
+ kwargs['class'] = attrs
+ attrs = None
+
+ if 'class_' in kwargs:
+ # Treat class_="foo" as a search for the 'class'
+ # attribute, overriding any non-dict value for attrs.
+ kwargs['class'] = kwargs['class_']
+ del kwargs['class_']
+
+ if kwargs:
+ if attrs:
+ attrs = attrs.copy()
+ attrs.update(kwargs)
+ else:
+ attrs = kwargs
+ normalized_attrs = {}
+ for key, value in attrs.items():
+ normalized_attrs[key] = self._normalize_search_value(value)
+
+ self.attrs = normalized_attrs
+ self.text = self._normalize_search_value(text)
+
+ def _normalize_search_value(self, value):
+ # Leave it alone if it's a Unicode string, a callable, a
+ # regular expression, a boolean, or None.
+ if (isinstance(value, unicode) or callable(value) or hasattr(value, 'match')
+ or isinstance(value, bool) or value is None):
+ return value
+
+ # If it's a bytestring, convert it to Unicode, treating it as UTF-8.
+ if isinstance(value, bytes):
+ return value.decode("utf8")
+
+ # If it's listlike, convert it into a list of strings.
+ if hasattr(value, '__iter__'):
+ new_value = []
+ for v in value:
+ if (hasattr(v, '__iter__') and not isinstance(v, bytes)
+ and not isinstance(v, unicode)):
+ # This is almost certainly the user's mistake. In the
+ # interests of avoiding infinite loops, we'll let
+ # it through as-is rather than doing a recursive call.
+ new_value.append(v)
+ else:
+ new_value.append(self._normalize_search_value(v))
+ return new_value
+
+ # Otherwise, convert it into a Unicode string.
+ # The unicode(str()) thing is so this will do the same thing on Python 2
+ # and Python 3.
+ return unicode(str(value))
+
+ def __str__(self):
+ if self.text:
+ return self.text
+ else:
+ return "%s|%s" % (self.name, self.attrs)
+
+ def search_tag(self, markup_name=None, markup_attrs={}):
+ found = None
+ markup = None
+ if isinstance(markup_name, Tag):
+ markup = markup_name
+ markup_attrs = markup
+ call_function_with_tag_data = (
+ isinstance(self.name, collections.Callable)
+ and not isinstance(markup_name, Tag))
+
+ if ((not self.name)
+ or call_function_with_tag_data
+ or (markup and self._matches(markup, self.name))
+ or (not markup and self._matches(markup_name, self.name))):
+ if call_function_with_tag_data:
+ match = self.name(markup_name, markup_attrs)
+ else:
+ match = True
+ markup_attr_map = None
+ for attr, match_against in list(self.attrs.items()):
+ if not markup_attr_map:
+ if hasattr(markup_attrs, 'get'):
+ markup_attr_map = markup_attrs
+ else:
+ markup_attr_map = {}
+ for k, v in markup_attrs:
+ markup_attr_map[k] = v
+ attr_value = markup_attr_map.get(attr)
+ if not self._matches(attr_value, match_against):
+ match = False
+ break
+ if match:
+ if markup:
+ found = markup
+ else:
+ found = markup_name
+ if found and self.text and not self._matches(found.string, self.text):
+ found = None
+ return found
+ searchTag = search_tag
+
+ def search(self, markup):
+ # print 'looking for %s in %s' % (self, markup)
+ found = None
+ # If given a list of items, scan it for a text element that
+ # matches.
+ if hasattr(markup, '__iter__') and not isinstance(markup, (Tag, basestring)):
+ for element in markup:
+ if isinstance(element, NavigableString) \
+ and self.search(element):
+ found = element
+ break
+ # If it's a Tag, make sure its name or attributes match.
+ # Don't bother with Tags if we're searching for text.
+ elif isinstance(markup, Tag):
+ if not self.text or self.name or self.attrs:
+ found = self.search_tag(markup)
+ # If it's text, make sure the text matches.
+ elif isinstance(markup, NavigableString) or \
+ isinstance(markup, basestring):
+ if not self.name and not self.attrs and self._matches(markup, self.text):
+ found = markup
+ else:
+ raise Exception(
+ "I don't know how to match against a %s" % markup.__class__)
+ return found
+
+ def _matches(self, markup, match_against):
+ # print u"Matching %s against %s" % (markup, match_against)
+ result = False
+ if isinstance(markup, list) or isinstance(markup, tuple):
+ # This should only happen when searching a multi-valued attribute
+ # like 'class'.
+ if (isinstance(match_against, unicode)
+ and ' ' in match_against):
+ # A bit of a special case. If they try to match "foo
+ # bar" on a multivalue attribute's value, only accept
+ # the literal value "foo bar"
+ #
+ # XXX This is going to be pretty slow because we keep
+ # splitting match_against. But it shouldn't come up
+ # too often.
+ return (whitespace_re.split(match_against) == markup)
+ else:
+ for item in markup:
+ if self._matches(item, match_against):
+ return True
+ return False
+
+ if match_against is True:
+ # True matches any non-None value.
+ return markup is not None
+
+ if isinstance(match_against, collections.Callable):
+ return match_against(markup)
+
+ # Custom callables take the tag as an argument, but all
+ # other ways of matching match the tag name as a string.
+ if isinstance(markup, Tag):
+ markup = markup.name
+
+ # Ensure that `markup` is either a Unicode string, or None.
+ markup = self._normalize_search_value(markup)
+
+ if markup is None:
+ # None matches None, False, an empty string, an empty list, and so on.
+ return not match_against
+
+ if isinstance(match_against, unicode):
+ # Exact string match
+ return markup == match_against
+
+ if hasattr(match_against, 'match'):
+ # Regexp match
+ return match_against.search(markup)
+
+ if hasattr(match_against, '__iter__'):
+ # The markup must be an exact match against something
+ # in the iterable.
+ return markup in match_against
+
+
+class ResultSet(list):
+ """A ResultSet is just a list that keeps track of the SoupStrainer
+ that created it."""
+ def __init__(self, source, result=()):
+ super(ResultSet, self).__init__(result)
+ self.source = source
diff --git a/bitbake/lib/bs4/testing.py b/bitbake/lib/bs4/testing.py
new file mode 100644
index 0000000000..fd4495ac58
--- /dev/null
+++ b/bitbake/lib/bs4/testing.py
@@ -0,0 +1,592 @@
+"""Helper classes for tests."""
+
+import copy
+import functools
+import unittest
+from unittest import TestCase
+from bs4 import BeautifulSoup
+from bs4.element import (
+ CharsetMetaAttributeValue,
+ Comment,
+ ContentMetaAttributeValue,
+ Doctype,
+ SoupStrainer,
+)
+
+from bs4.builder import HTMLParserTreeBuilder
+default_builder = HTMLParserTreeBuilder
+
+
+class SoupTest(unittest.TestCase):
+
+ @property
+ def default_builder(self):
+ return default_builder()
+
+ def soup(self, markup, **kwargs):
+ """Build a Beautiful Soup object from markup."""
+ builder = kwargs.pop('builder', self.default_builder)
+ return BeautifulSoup(markup, builder=builder, **kwargs)
+
+ def document_for(self, markup):
+ """Turn an HTML fragment into a document.
+
+ The details depend on the builder.
+ """
+ return self.default_builder.test_fragment_to_document(markup)
+
+ def assertSoupEquals(self, to_parse, compare_parsed_to=None):
+ builder = self.default_builder
+ obj = BeautifulSoup(to_parse, builder=builder)
+ if compare_parsed_to is None:
+ compare_parsed_to = to_parse
+
+ self.assertEqual(obj.decode(), self.document_for(compare_parsed_to))
+
+
+class HTMLTreeBuilderSmokeTest(object):
+
+ """A basic test of a treebuilder's competence.
+
+ Any HTML treebuilder, present or future, should be able to pass
+ these tests. With invalid markup, there's room for interpretation,
+ and different parsers can handle it differently. But with the
+ markup in these tests, there's not much room for interpretation.
+ """
+
+ def assertDoctypeHandled(self, doctype_fragment):
+ """Assert that a given doctype string is handled correctly."""
+ doctype_str, soup = self._document_with_doctype(doctype_fragment)
+
+ # Make sure a Doctype object was created.
+ doctype = soup.contents[0]
+ self.assertEqual(doctype.__class__, Doctype)
+ self.assertEqual(doctype, doctype_fragment)
+ self.assertEqual(str(soup)[:len(doctype_str)], doctype_str)
+
+ # Make sure that the doctype was correctly associated with the
+ # parse tree and that the rest of the document parsed.
+ self.assertEqual(soup.p.contents[0], 'foo')
+
+ def _document_with_doctype(self, doctype_fragment):
+ """Generate and parse a document with the given doctype."""
+ doctype = '<!DOCTYPE %s>' % doctype_fragment
+ markup = doctype + '\n<p>foo</p>'
+ soup = self.soup(markup)
+ return doctype, soup
+
+ def test_normal_doctypes(self):
+ """Make sure normal, everyday HTML doctypes are handled correctly."""
+ self.assertDoctypeHandled("html")
+ self.assertDoctypeHandled(
+ 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"')
+
+ def test_empty_doctype(self):
+ soup = self.soup("<!DOCTYPE>")
+ doctype = soup.contents[0]
+ self.assertEqual("", doctype.strip())
+
+ def test_public_doctype_with_url(self):
+ doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"'
+ self.assertDoctypeHandled(doctype)
+
+ def test_system_doctype(self):
+ self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"')
+
+ def test_namespaced_system_doctype(self):
+ # We can handle a namespaced doctype with a system ID.
+ self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"')
+
+ def test_namespaced_public_doctype(self):
+ # Test a namespaced doctype with a public id.
+ self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"')
+
+ def test_real_xhtml_document(self):
+ """A real XHTML document should come out more or less the same as it went in."""
+ markup = b"""<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head><title>Hello.</title></head>
+<body>Goodbye.</body>
+</html>"""
+ soup = self.soup(markup)
+ self.assertEqual(
+ soup.encode("utf-8").replace(b"\n", b""),
+ markup.replace(b"\n", b""))
+
+ def test_deepcopy(self):
+ """Make sure you can copy the tree builder.
+
+ This is important because the builder is part of a
+ BeautifulSoup object, and we want to be able to copy that.
+ """
+ copy.deepcopy(self.default_builder)
+
+ def test_p_tag_is_never_empty_element(self):
+ """A <p> tag is never designated as an empty-element tag.
+
+ Even if the markup shows it as an empty-element tag, it
+ shouldn't be presented that way.
+ """
+ soup = self.soup("<p/>")
+ self.assertFalse(soup.p.is_empty_element)
+ self.assertEqual(str(soup.p), "<p></p>")
+
+ def test_unclosed_tags_get_closed(self):
+ """A tag that's not closed by the end of the document should be closed.
+
+ This applies to all tags except empty-element tags.
+ """
+ self.assertSoupEquals("<p>", "<p></p>")
+ self.assertSoupEquals("<b>", "<b></b>")
+
+ self.assertSoupEquals("<br>", "<br/>")
+
+ def test_br_is_always_empty_element_tag(self):
+ """A <br> tag is designated as an empty-element tag.
+
+ Some parsers treat <br></br> as one <br/> tag, some parsers as
+ two tags, but it should always be an empty-element tag.
+ """
+ soup = self.soup("<br></br>")
+ self.assertTrue(soup.br.is_empty_element)
+ self.assertEqual(str(soup.br), "<br/>")
+
+ def test_nested_formatting_elements(self):
+ self.assertSoupEquals("<em><em></em></em>")
+
+ def test_comment(self):
+ # Comments are represented as Comment objects.
+ markup = "<p>foo<!--foobar-->baz</p>"
+ self.assertSoupEquals(markup)
+
+ soup = self.soup(markup)
+ comment = soup.find(text="foobar")
+ self.assertEqual(comment.__class__, Comment)
+
+ # The comment is properly integrated into the tree.
+ foo = soup.find(text="foo")
+ self.assertEqual(comment, foo.next_element)
+ baz = soup.find(text="baz")
+ self.assertEqual(comment, baz.previous_element)
+
+ def test_preserved_whitespace_in_pre_and_textarea(self):
+ """Whitespace must be preserved in <pre> and <textarea> tags."""
+ self.assertSoupEquals("<pre> </pre>")
+ self.assertSoupEquals("<textarea> woo </textarea>")
+
+ def test_nested_inline_elements(self):
+ """Inline elements can be nested indefinitely."""
+ b_tag = "<b>Inside a B tag</b>"
+ self.assertSoupEquals(b_tag)
+
+ nested_b_tag = "<p>A <i>nested <b>tag</b></i></p>"
+ self.assertSoupEquals(nested_b_tag)
+
+ double_nested_b_tag = "<p>A <a>doubly <i>nested <b>tag</b></i></a></p>"
+ self.assertSoupEquals(nested_b_tag)
+
+ def test_nested_block_level_elements(self):
+ """Block elements can be nested."""
+ soup = self.soup('<blockquote><p><b>Foo</b></p></blockquote>')
+ blockquote = soup.blockquote
+ self.assertEqual(blockquote.p.b.string, 'Foo')
+ self.assertEqual(blockquote.b.string, 'Foo')
+
+ def test_correctly_nested_tables(self):
+ """One table can go inside another one."""
+ markup = ('<table id="1">'
+ '<tr>'
+ "<td>Here's another table:"
+ '<table id="2">'
+ '<tr><td>foo</td></tr>'
+ '</table></td>')
+
+ self.assertSoupEquals(
+ markup,
+ '<table id="1"><tr><td>Here\'s another table:'
+ '<table id="2"><tr><td>foo</td></tr></table>'
+ '</td></tr></table>')
+
+ self.assertSoupEquals(
+ "<table><thead><tr><td>Foo</td></tr></thead>"
+ "<tbody><tr><td>Bar</td></tr></tbody>"
+ "<tfoot><tr><td>Baz</td></tr></tfoot></table>")
+
+ def test_deeply_nested_multivalued_attribute(self):
+ # html5lib can set the attributes of the same tag many times
+ # as it rearranges the tree. This has caused problems with
+ # multivalued attributes.
+ markup = '<table><div><div class="css"></div></div></table>'
+ soup = self.soup(markup)
+ self.assertEqual(["css"], soup.div.div['class'])
+
+ def test_angle_brackets_in_attribute_values_are_escaped(self):
+ self.assertSoupEquals('<a b="<a>"></a>', '<a b="&lt;a&gt;"></a>')
+
+ def test_entities_in_attributes_converted_to_unicode(self):
+ expect = u'<p id="pi\N{LATIN SMALL LETTER N WITH TILDE}ata"></p>'
+ self.assertSoupEquals('<p id="pi&#241;ata"></p>', expect)
+ self.assertSoupEquals('<p id="pi&#xf1;ata"></p>', expect)
+ self.assertSoupEquals('<p id="pi&#Xf1;ata"></p>', expect)
+ self.assertSoupEquals('<p id="pi&ntilde;ata"></p>', expect)
+
+ def test_entities_in_text_converted_to_unicode(self):
+ expect = u'<p>pi\N{LATIN SMALL LETTER N WITH TILDE}ata</p>'
+ self.assertSoupEquals("<p>pi&#241;ata</p>", expect)
+ self.assertSoupEquals("<p>pi&#xf1;ata</p>", expect)
+ self.assertSoupEquals("<p>pi&#Xf1;ata</p>", expect)
+ self.assertSoupEquals("<p>pi&ntilde;ata</p>", expect)
+
+ def test_quot_entity_converted_to_quotation_mark(self):
+ self.assertSoupEquals("<p>I said &quot;good day!&quot;</p>",
+ '<p>I said "good day!"</p>')
+
+ def test_out_of_range_entity(self):
+ expect = u"\N{REPLACEMENT CHARACTER}"
+ self.assertSoupEquals("&#10000000000000;", expect)
+ self.assertSoupEquals("&#x10000000000000;", expect)
+ self.assertSoupEquals("&#1000000000;", expect)
+
+ def test_multipart_strings(self):
+ "Mostly to prevent a recurrence of a bug in the html5lib treebuilder."
+ soup = self.soup("<html><h2>\nfoo</h2><p></p></html>")
+ self.assertEqual("p", soup.h2.string.next_element.name)
+ self.assertEqual("p", soup.p.name)
+
+ def test_basic_namespaces(self):
+ """Parsers don't need to *understand* namespaces, but at the
+ very least they should not choke on namespaces or lose
+ data."""
+
+ markup = b'<html xmlns="http://www.w3.org/1999/xhtml" xmlns:mathml="http://www.w3.org/1998/Math/MathML" xmlns:svg="http://www.w3.org/2000/svg"><head></head><body><mathml:msqrt>4</mathml:msqrt><b svg:fill="red"></b></body></html>'
+ soup = self.soup(markup)
+ self.assertEqual(markup, soup.encode())
+ html = soup.html
+ self.assertEqual('http://www.w3.org/1999/xhtml', soup.html['xmlns'])
+ self.assertEqual(
+ 'http://www.w3.org/1998/Math/MathML', soup.html['xmlns:mathml'])
+ self.assertEqual(
+ 'http://www.w3.org/2000/svg', soup.html['xmlns:svg'])
+
+ def test_multivalued_attribute_value_becomes_list(self):
+ markup = b'<a class="foo bar">'
+ soup = self.soup(markup)
+ self.assertEqual(['foo', 'bar'], soup.a['class'])
+
+ #
+ # Generally speaking, tests below this point are more tests of
+ # Beautiful Soup than tests of the tree builders. But parsers are
+ # weird, so we run these tests separately for every tree builder
+ # to detect any differences between them.
+ #
+
+ def test_can_parse_unicode_document(self):
+ # A seemingly innocuous document... but it's in Unicode! And
+ # it contains characters that can't be represented in the
+ # encoding found in the declaration! The horror!
+ markup = u'<html><head><meta encoding="euc-jp"></head><body>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</body>'
+ soup = self.soup(markup)
+ self.assertEqual(u'Sacr\xe9 bleu!', soup.body.string)
+
+ def test_soupstrainer(self):
+ """Parsers should be able to work with SoupStrainers."""
+ strainer = SoupStrainer("b")
+ soup = self.soup("A <b>bold</b> <meta/> <i>statement</i>",
+ parse_only=strainer)
+ self.assertEqual(soup.decode(), "<b>bold</b>")
+
+ def test_single_quote_attribute_values_become_double_quotes(self):
+ self.assertSoupEquals("<foo attr='bar'></foo>",
+ '<foo attr="bar"></foo>')
+
+ def test_attribute_values_with_nested_quotes_are_left_alone(self):
+ text = """<foo attr='bar "brawls" happen'>a</foo>"""
+ self.assertSoupEquals(text)
+
+ def test_attribute_values_with_double_nested_quotes_get_quoted(self):
+ text = """<foo attr='bar "brawls" happen'>a</foo>"""
+ soup = self.soup(text)
+ soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"'
+ self.assertSoupEquals(
+ soup.foo.decode(),
+ """<foo attr="Brawls happen at &quot;Bob\'s Bar&quot;">a</foo>""")
+
+ def test_ampersand_in_attribute_value_gets_escaped(self):
+ self.assertSoupEquals('<this is="really messed up & stuff"></this>',
+ '<this is="really messed up &amp; stuff"></this>')
+
+ self.assertSoupEquals(
+ '<a href="http://example.org?a=1&b=2;3">foo</a>',
+ '<a href="http://example.org?a=1&amp;b=2;3">foo</a>')
+
+ def test_escaped_ampersand_in_attribute_value_is_left_alone(self):
+ self.assertSoupEquals('<a href="http://example.org?a=1&amp;b=2;3"></a>')
+
+ def test_entities_in_strings_converted_during_parsing(self):
+ # Both XML and HTML entities are converted to Unicode characters
+ # during parsing.
+ text = "<p>&lt;&lt;sacr&eacute;&#32;bleu!&gt;&gt;</p>"
+ expected = u"<p>&lt;&lt;sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</p>"
+ self.assertSoupEquals(text, expected)
+
+ def test_smart_quotes_converted_on_the_way_in(self):
+ # Microsoft smart quotes are converted to Unicode characters during
+ # parsing.
+ quote = b"<p>\x91Foo\x92</p>"
+ soup = self.soup(quote)
+ self.assertEqual(
+ soup.p.string,
+ u"\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}")
+
+ def test_non_breaking_spaces_converted_on_the_way_in(self):
+ soup = self.soup("<a>&nbsp;&nbsp;</a>")
+ self.assertEqual(soup.a.string, u"\N{NO-BREAK SPACE}" * 2)
+
+ def test_entities_converted_on_the_way_out(self):
+ text = "<p>&lt;&lt;sacr&eacute;&#32;bleu!&gt;&gt;</p>"
+ expected = u"<p>&lt;&lt;sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</p>".encode("utf-8")
+ soup = self.soup(text)
+ self.assertEqual(soup.p.encode("utf-8"), expected)
+
+ def test_real_iso_latin_document(self):
+ # Smoke test of interrelated functionality, using an
+ # easy-to-understand document.
+
+ # Here it is in Unicode. Note that it claims to be in ISO-Latin-1.
+ unicode_html = u'<html><head><meta content="text/html; charset=ISO-Latin-1" http-equiv="Content-type"/></head><body><p>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</p></body></html>'
+
+ # That's because we're going to encode it into ISO-Latin-1, and use
+ # that to test.
+ iso_latin_html = unicode_html.encode("iso-8859-1")
+
+ # Parse the ISO-Latin-1 HTML.
+ soup = self.soup(iso_latin_html)
+ # Encode it to UTF-8.
+ result = soup.encode("utf-8")
+
+ # What do we expect the result to look like? Well, it would
+ # look like unicode_html, except that the META tag would say
+ # UTF-8 instead of ISO-Latin-1.
+ expected = unicode_html.replace("ISO-Latin-1", "utf-8")
+
+ # And, of course, it would be in UTF-8, not Unicode.
+ expected = expected.encode("utf-8")
+
+ # Ta-da!
+ self.assertEqual(result, expected)
+
+ def test_real_shift_jis_document(self):
+ # Smoke test to make sure the parser can handle a document in
+ # Shift-JIS encoding, without choking.
+ shift_jis_html = (
+ b'<html><head></head><body><pre>'
+ b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
+ b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
+ b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
+ b'</pre></body></html>')
+ unicode_html = shift_jis_html.decode("shift-jis")
+ soup = self.soup(unicode_html)
+
+ # Make sure the parse tree is correctly encoded to various
+ # encodings.
+ self.assertEqual(soup.encode("utf-8"), unicode_html.encode("utf-8"))
+ self.assertEqual(soup.encode("euc_jp"), unicode_html.encode("euc_jp"))
+
+ def test_real_hebrew_document(self):
+ # A real-world test to make sure we can convert ISO-8859-9 (a
+ # Hebrew encoding) to UTF-8.
+ hebrew_document = b'<html><head><title>Hebrew (ISO 8859-8) in Visual Directionality</title></head><body><h1>Hebrew (ISO 8859-8) in Visual Directionality</h1>\xed\xe5\xec\xf9</body></html>'
+ soup = self.soup(
+ hebrew_document, from_encoding="iso8859-8")
+ self.assertEqual(soup.original_encoding, 'iso8859-8')
+ self.assertEqual(
+ soup.encode('utf-8'),
+ hebrew_document.decode("iso8859-8").encode("utf-8"))
+
+ def test_meta_tag_reflects_current_encoding(self):
+ # Here's the <meta> tag saying that a document is
+ # encoded in Shift-JIS.
+ meta_tag = ('<meta content="text/html; charset=x-sjis" '
+ 'http-equiv="Content-type"/>')
+
+ # Here's a document incorporating that meta tag.
+ shift_jis_html = (
+ '<html><head>\n%s\n'
+ '<meta http-equiv="Content-language" content="ja"/>'
+ '</head><body>Shift-JIS markup goes here.') % meta_tag
+ soup = self.soup(shift_jis_html)
+
+ # Parse the document, and the charset is seemingly unaffected.
+ parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'})
+ content = parsed_meta['content']
+ self.assertEqual('text/html; charset=x-sjis', content)
+
+ # But that value is actually a ContentMetaAttributeValue object.
+ self.assertTrue(isinstance(content, ContentMetaAttributeValue))
+
+ # And it will take on a value that reflects its current
+ # encoding.
+ self.assertEqual('text/html; charset=utf8', content.encode("utf8"))
+
+ # For the rest of the story, see TestSubstitutions in
+ # test_tree.py.
+
+ def test_html5_style_meta_tag_reflects_current_encoding(self):
+ # Here's the <meta> tag saying that a document is
+ # encoded in Shift-JIS.
+ meta_tag = ('<meta id="encoding" charset="x-sjis" />')
+
+ # Here's a document incorporating that meta tag.
+ shift_jis_html = (
+ '<html><head>\n%s\n'
+ '<meta http-equiv="Content-language" content="ja"/>'
+ '</head><body>Shift-JIS markup goes here.') % meta_tag
+ soup = self.soup(shift_jis_html)
+
+ # Parse the document, and the charset is seemingly unaffected.
+ parsed_meta = soup.find('meta', id="encoding")
+ charset = parsed_meta['charset']
+ self.assertEqual('x-sjis', charset)
+
+ # But that value is actually a CharsetMetaAttributeValue object.
+ self.assertTrue(isinstance(charset, CharsetMetaAttributeValue))
+
+ # And it will take on a value that reflects its current
+ # encoding.
+ self.assertEqual('utf8', charset.encode("utf8"))
+
+ def test_tag_with_no_attributes_can_have_attributes_added(self):
+ data = self.soup("<a>text</a>")
+ data.a['foo'] = 'bar'
+ self.assertEqual('<a foo="bar">text</a>', data.a.decode())
+
+class XMLTreeBuilderSmokeTest(object):
+
+ def test_docstring_generated(self):
+ soup = self.soup("<root/>")
+ self.assertEqual(
+ soup.encode(), b'<?xml version="1.0" encoding="utf-8"?>\n<root/>')
+
+ def test_real_xhtml_document(self):
+ """A real XHTML document should come out *exactly* the same as it went in."""
+ markup = b"""<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head><title>Hello.</title></head>
+<body>Goodbye.</body>
+</html>"""
+ soup = self.soup(markup)
+ self.assertEqual(
+ soup.encode("utf-8"), markup)
+
+ def test_formatter_processes_script_tag_for_xml_documents(self):
+ doc = """
+ <script type="text/javascript">
+ </script>
+"""
+ soup = BeautifulSoup(doc, "xml")
+ # lxml would have stripped this while parsing, but we can add
+ # it later.
+ soup.script.string = 'console.log("< < hey > > ");'
+ encoded = soup.encode()
+ self.assertTrue(b"&lt; &lt; hey &gt; &gt;" in encoded)
+
+ def test_can_parse_unicode_document(self):
+ markup = u'<?xml version="1.0" encoding="euc-jp"><root>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</root>'
+ soup = self.soup(markup)
+ self.assertEqual(u'Sacr\xe9 bleu!', soup.root.string)
+
+ def test_popping_namespaced_tag(self):
+ markup = '<rss xmlns:dc="foo"><dc:creator>b</dc:creator><dc:date>2012-07-02T20:33:42Z</dc:date><dc:rights>c</dc:rights></rss>'
+ soup = self.soup(markup)
+ self.assertEqual(
+ unicode(soup.rss), markup)
+
+ def test_docstring_includes_correct_encoding(self):
+ soup = self.soup("<root/>")
+ self.assertEqual(
+ soup.encode("latin1"),
+ b'<?xml version="1.0" encoding="latin1"?>\n<root/>')
+
+ def test_large_xml_document(self):
+ """A large XML document should come out the same as it went in."""
+ markup = (b'<?xml version="1.0" encoding="utf-8"?>\n<root>'
+ + b'0' * (2**12)
+ + b'</root>')
+ soup = self.soup(markup)
+ self.assertEqual(soup.encode("utf-8"), markup)
+
+
+ def test_tags_are_empty_element_if_and_only_if_they_are_empty(self):
+ self.assertSoupEquals("<p>", "<p/>")
+ self.assertSoupEquals("<p>foo</p>")
+
+ def test_namespaces_are_preserved(self):
+ markup = '<root xmlns:a="http://example.com/" xmlns:b="http://example.net/"><a:foo>This tag is in the a namespace</a:foo><b:foo>This tag is in the b namespace</b:foo></root>'
+ soup = self.soup(markup)
+ root = soup.root
+ self.assertEqual("http://example.com/", root['xmlns:a'])
+ self.assertEqual("http://example.net/", root['xmlns:b'])
+
+ def test_closing_namespaced_tag(self):
+ markup = '<p xmlns:dc="http://purl.org/dc/elements/1.1/"><dc:date>20010504</dc:date></p>'
+ soup = self.soup(markup)
+ self.assertEqual(unicode(soup.p), markup)
+
+ def test_namespaced_attributes(self):
+ markup = '<foo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><bar xsi:schemaLocation="http://www.example.com"/></foo>'
+ soup = self.soup(markup)
+ self.assertEqual(unicode(soup.foo), markup)
+
+ def test_namespaced_attributes_xml_namespace(self):
+ markup = '<foo xml:lang="fr">bar</foo>'
+ soup = self.soup(markup)
+ self.assertEqual(unicode(soup.foo), markup)
+
+class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest):
+ """Smoke test for a tree builder that supports HTML5."""
+
+ def test_real_xhtml_document(self):
+ # Since XHTML is not HTML5, HTML5 parsers are not tested to handle
+ # XHTML documents in any particular way.
+ pass
+
+ def test_html_tags_have_namespace(self):
+ markup = "<a>"
+ soup = self.soup(markup)
+ self.assertEqual("http://www.w3.org/1999/xhtml", soup.a.namespace)
+
+ def test_svg_tags_have_namespace(self):
+ markup = '<svg><circle/></svg>'
+ soup = self.soup(markup)
+ namespace = "http://www.w3.org/2000/svg"
+ self.assertEqual(namespace, soup.svg.namespace)
+ self.assertEqual(namespace, soup.circle.namespace)
+
+
+ def test_mathml_tags_have_namespace(self):
+ markup = '<math><msqrt>5</msqrt></math>'
+ soup = self.soup(markup)
+ namespace = 'http://www.w3.org/1998/Math/MathML'
+ self.assertEqual(namespace, soup.math.namespace)
+ self.assertEqual(namespace, soup.msqrt.namespace)
+
+ def test_xml_declaration_becomes_comment(self):
+ markup = '<?xml version="1.0" encoding="utf-8"?><html></html>'
+ soup = self.soup(markup)
+ self.assertTrue(isinstance(soup.contents[0], Comment))
+ self.assertEqual(soup.contents[0], '?xml version="1.0" encoding="utf-8"?')
+ self.assertEqual("html", soup.contents[0].next_element.name)
+
+def skipIf(condition, reason):
+ def nothing(test, *args, **kwargs):
+ return None
+
+ def decorator(test_item):
+ if condition:
+ return nothing
+ else:
+ return test_item
+
+ return decorator
diff --git a/bitbake/lib/bs4/tests/__init__.py b/bitbake/lib/bs4/tests/__init__.py
new file mode 100644
index 0000000000..142c8cc3f1
--- /dev/null
+++ b/bitbake/lib/bs4/tests/__init__.py
@@ -0,0 +1 @@
+"The beautifulsoup tests."
diff --git a/bitbake/lib/bs4/tests/test_builder_registry.py b/bitbake/lib/bs4/tests/test_builder_registry.py
new file mode 100644
index 0000000000..92ad10fb04
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_builder_registry.py
@@ -0,0 +1,141 @@
+"""Tests of the builder registry."""
+
+import unittest
+
+from bs4 import BeautifulSoup
+from bs4.builder import (
+ builder_registry as registry,
+ HTMLParserTreeBuilder,
+ TreeBuilderRegistry,
+)
+
+try:
+ from bs4.builder import HTML5TreeBuilder
+ HTML5LIB_PRESENT = True
+except ImportError:
+ HTML5LIB_PRESENT = False
+
+try:
+ from bs4.builder import (
+ LXMLTreeBuilderForXML,
+ LXMLTreeBuilder,
+ )
+ LXML_PRESENT = True
+except ImportError:
+ LXML_PRESENT = False
+
+
+class BuiltInRegistryTest(unittest.TestCase):
+ """Test the built-in registry with the default builders registered."""
+
+ def test_combination(self):
+ if LXML_PRESENT:
+ self.assertEqual(registry.lookup('fast', 'html'),
+ LXMLTreeBuilder)
+
+ if LXML_PRESENT:
+ self.assertEqual(registry.lookup('permissive', 'xml'),
+ LXMLTreeBuilderForXML)
+ self.assertEqual(registry.lookup('strict', 'html'),
+ HTMLParserTreeBuilder)
+ if HTML5LIB_PRESENT:
+ self.assertEqual(registry.lookup('html5lib', 'html'),
+ HTML5TreeBuilder)
+
+ def test_lookup_by_markup_type(self):
+ if LXML_PRESENT:
+ self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
+ self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
+ else:
+ self.assertEqual(registry.lookup('xml'), None)
+ if HTML5LIB_PRESENT:
+ self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
+ else:
+ self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
+
+ def test_named_library(self):
+ if LXML_PRESENT:
+ self.assertEqual(registry.lookup('lxml', 'xml'),
+ LXMLTreeBuilderForXML)
+ self.assertEqual(registry.lookup('lxml', 'html'),
+ LXMLTreeBuilder)
+ if HTML5LIB_PRESENT:
+ self.assertEqual(registry.lookup('html5lib'),
+ HTML5TreeBuilder)
+
+ self.assertEqual(registry.lookup('html.parser'),
+ HTMLParserTreeBuilder)
+
+ def test_beautifulsoup_constructor_does_lookup(self):
+ # You can pass in a string.
+ BeautifulSoup("", features="html")
+ # Or a list of strings.
+ BeautifulSoup("", features=["html", "fast"])
+
+ # You'll get an exception if BS can't find an appropriate
+ # builder.
+ self.assertRaises(ValueError, BeautifulSoup,
+ "", features="no-such-feature")
+
+class RegistryTest(unittest.TestCase):
+ """Test the TreeBuilderRegistry class in general."""
+
+ def setUp(self):
+ self.registry = TreeBuilderRegistry()
+
+ def builder_for_features(self, *feature_list):
+ cls = type('Builder_' + '_'.join(feature_list),
+ (object,), {'features' : feature_list})
+
+ self.registry.register(cls)
+ return cls
+
+ def test_register_with_no_features(self):
+ builder = self.builder_for_features()
+
+ # Since the builder advertises no features, you can't find it
+ # by looking up features.
+ self.assertEqual(self.registry.lookup('foo'), None)
+
+ # But you can find it by doing a lookup with no features, if
+ # this happens to be the only registered builder.
+ self.assertEqual(self.registry.lookup(), builder)
+
+ def test_register_with_features_makes_lookup_succeed(self):
+ builder = self.builder_for_features('foo', 'bar')
+ self.assertEqual(self.registry.lookup('foo'), builder)
+ self.assertEqual(self.registry.lookup('bar'), builder)
+
+ def test_lookup_fails_when_no_builder_implements_feature(self):
+ builder = self.builder_for_features('foo', 'bar')
+ self.assertEqual(self.registry.lookup('baz'), None)
+
+ def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
+ builder1 = self.builder_for_features('foo')
+ builder2 = self.builder_for_features('bar')
+ self.assertEqual(self.registry.lookup(), builder2)
+
+ def test_lookup_fails_when_no_tree_builders_registered(self):
+ self.assertEqual(self.registry.lookup(), None)
+
+ def test_lookup_gets_most_recent_builder_supporting_all_features(self):
+ has_one = self.builder_for_features('foo')
+ has_the_other = self.builder_for_features('bar')
+ has_both_early = self.builder_for_features('foo', 'bar', 'baz')
+ has_both_late = self.builder_for_features('foo', 'bar', 'quux')
+ lacks_one = self.builder_for_features('bar')
+ has_the_other = self.builder_for_features('foo')
+
+ # There are two builders featuring 'foo' and 'bar', but
+ # the one that also features 'quux' was registered later.
+ self.assertEqual(self.registry.lookup('foo', 'bar'),
+ has_both_late)
+
+ # There is only one builder featuring 'foo', 'bar', and 'baz'.
+ self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
+ has_both_early)
+
+ def test_lookup_fails_when_cannot_reconcile_requested_features(self):
+ builder1 = self.builder_for_features('foo', 'bar')
+ builder2 = self.builder_for_features('foo', 'baz')
+ self.assertEqual(self.registry.lookup('bar', 'baz'), None)
diff --git a/bitbake/lib/bs4/tests/test_docs.py b/bitbake/lib/bs4/tests/test_docs.py
new file mode 100644
index 0000000000..5b9f677093
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_docs.py
@@ -0,0 +1,36 @@
+"Test harness for doctests."
+
+# pylint: disable-msg=E0611,W0142
+
+__metaclass__ = type
+__all__ = [
+ 'additional_tests',
+ ]
+
+import atexit
+import doctest
+import os
+#from pkg_resources import (
+# resource_filename, resource_exists, resource_listdir, cleanup_resources)
+import unittest
+
+DOCTEST_FLAGS = (
+ doctest.ELLIPSIS |
+ doctest.NORMALIZE_WHITESPACE |
+ doctest.REPORT_NDIFF)
+
+
+# def additional_tests():
+# "Run the doc tests (README.txt and docs/*, if any exist)"
+# doctest_files = [
+# os.path.abspath(resource_filename('bs4', 'README.txt'))]
+# if resource_exists('bs4', 'docs'):
+# for name in resource_listdir('bs4', 'docs'):
+# if name.endswith('.txt'):
+# doctest_files.append(
+# os.path.abspath(
+# resource_filename('bs4', 'docs/%s' % name)))
+# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
+# atexit.register(cleanup_resources)
+# return unittest.TestSuite((
+# doctest.DocFileSuite(*doctest_files, **kwargs)))
diff --git a/bitbake/lib/bs4/tests/test_html5lib.py b/bitbake/lib/bs4/tests/test_html5lib.py
new file mode 100644
index 0000000000..594c3e1f26
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_html5lib.py
@@ -0,0 +1,85 @@
+"""Tests to ensure that the html5lib tree builder generates good trees."""
+
+import warnings
+
+try:
+ from bs4.builder import HTML5TreeBuilder
+ HTML5LIB_PRESENT = True
+except ImportError, e:
+ HTML5LIB_PRESENT = False
+from bs4.element import SoupStrainer
+from bs4.testing import (
+ HTML5TreeBuilderSmokeTest,
+ SoupTest,
+ skipIf,
+)
+
+@skipIf(
+ not HTML5LIB_PRESENT,
+ "html5lib seems not to be present, not testing its tree builder.")
+class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
+ """See ``HTML5TreeBuilderSmokeTest``."""
+
+ @property
+ def default_builder(self):
+ return HTML5TreeBuilder()
+
+ def test_soupstrainer(self):
+ # The html5lib tree builder does not support SoupStrainers.
+ strainer = SoupStrainer("b")
+ markup = "<p>A <b>bold</b> statement.</p>"
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup(markup, parse_only=strainer)
+ self.assertEqual(
+ soup.decode(), self.document_for(markup))
+
+ self.assertTrue(
+ "the html5lib tree builder doesn't support parse_only" in
+ str(w[0].message))
+
+ def test_correctly_nested_tables(self):
+ """html5lib inserts <tbody> tags where other parsers don't."""
+ markup = ('<table id="1">'
+ '<tr>'
+ "<td>Here's another table:"
+ '<table id="2">'
+ '<tr><td>foo</td></tr>'
+ '</table></td>')
+
+ self.assertSoupEquals(
+ markup,
+ '<table id="1"><tbody><tr><td>Here\'s another table:'
+ '<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
+ '</td></tr></tbody></table>')
+
+ self.assertSoupEquals(
+ "<table><thead><tr><td>Foo</td></tr></thead>"
+ "<tbody><tr><td>Bar</td></tr></tbody>"
+ "<tfoot><tr><td>Baz</td></tr></tfoot></table>")
+
+ def test_xml_declaration_followed_by_doctype(self):
+ markup = '''<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html>
+<html>
+ <head>
+ </head>
+ <body>
+ <p>foo</p>
+ </body>
+</html>'''
+ soup = self.soup(markup)
+ # Verify that we can reach the <p> tag; this means the tree is connected.
+ self.assertEqual(b"<p>foo</p>", soup.p.encode())
+
+ def test_reparented_markup(self):
+ markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
+ soup = self.soup(markup)
+ self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
+ self.assertEqual(2, len(soup.find_all('p')))
+
+
+ def test_reparented_markup_ends_with_whitespace(self):
+ markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
+ soup = self.soup(markup)
+ self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
+ self.assertEqual(2, len(soup.find_all('p')))
diff --git a/bitbake/lib/bs4/tests/test_htmlparser.py b/bitbake/lib/bs4/tests/test_htmlparser.py
new file mode 100644
index 0000000000..bcb5ed232f
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_htmlparser.py
@@ -0,0 +1,19 @@
+"""Tests to ensure that the html.parser tree builder generates good
+trees."""
+
+from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
+from bs4.builder import HTMLParserTreeBuilder
+
+class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
+
+ @property
+ def default_builder(self):
+ return HTMLParserTreeBuilder()
+
+ def test_namespaced_system_doctype(self):
+ # html.parser can't handle namespaced doctypes, so skip this one.
+ pass
+
+ def test_namespaced_public_doctype(self):
+ # html.parser can't handle namespaced doctypes, so skip this one.
+ pass
diff --git a/bitbake/lib/bs4/tests/test_lxml.py b/bitbake/lib/bs4/tests/test_lxml.py
new file mode 100644
index 0000000000..2b2e9b7e78
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_lxml.py
@@ -0,0 +1,91 @@
+"""Tests to ensure that the lxml tree builder generates good trees."""
+
+import re
+import warnings
+
+try:
+ import lxml.etree
+ LXML_PRESENT = True
+ LXML_VERSION = lxml.etree.LXML_VERSION
+except ImportError, e:
+ LXML_PRESENT = False
+ LXML_VERSION = (0,)
+
+if LXML_PRESENT:
+ from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
+
+from bs4 import (
+ BeautifulSoup,
+ BeautifulStoneSoup,
+ )
+from bs4.element import Comment, Doctype, SoupStrainer
+from bs4.testing import skipIf
+from bs4.tests import test_htmlparser
+from bs4.testing import (
+ HTMLTreeBuilderSmokeTest,
+ XMLTreeBuilderSmokeTest,
+ SoupTest,
+ skipIf,
+)
+
+@skipIf(
+ not LXML_PRESENT,
+ "lxml seems not to be present, not testing its tree builder.")
+class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
+ """See ``HTMLTreeBuilderSmokeTest``."""
+
+ @property
+ def default_builder(self):
+ return LXMLTreeBuilder()
+
+ def test_out_of_range_entity(self):
+ self.assertSoupEquals(
+ "<p>foo&#10000000000000;bar</p>", "<p>foobar</p>")
+ self.assertSoupEquals(
+ "<p>foo&#x10000000000000;bar</p>", "<p>foobar</p>")
+ self.assertSoupEquals(
+ "<p>foo&#1000000000;bar</p>", "<p>foobar</p>")
+
+ # In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
+ # test if an old version of lxml is installed.
+
+ @skipIf(
+ not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
+ "Skipping doctype test for old version of lxml to avoid segfault.")
+ def test_empty_doctype(self):
+ soup = self.soup("<!DOCTYPE>")
+ doctype = soup.contents[0]
+ self.assertEqual("", doctype.strip())
+
+ def test_beautifulstonesoup_is_xml_parser(self):
+ # Make sure that the deprecated BSS class uses an xml builder
+ # if one is installed.
+ with warnings.catch_warnings(record=True) as w:
+ soup = BeautifulStoneSoup("<b />")
+ self.assertEqual(u"<b/>", unicode(soup.b))
+ self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
+
+ def test_real_xhtml_document(self):
+ """lxml strips the XML definition from an XHTML doc, which is fine."""
+ markup = b"""<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN">
+<html xmlns="http://www.w3.org/1999/xhtml">
+<head><title>Hello.</title></head>
+<body>Goodbye.</body>
+</html>"""
+ soup = self.soup(markup)
+ self.assertEqual(
+ soup.encode("utf-8").replace(b"\n", b''),
+ markup.replace(b'\n', b'').replace(
+ b'<?xml version="1.0" encoding="utf-8"?>', b''))
+
+
+@skipIf(
+ not LXML_PRESENT,
+ "lxml seems not to be present, not testing its XML tree builder.")
+class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
+ """See ``HTMLTreeBuilderSmokeTest``."""
+
+ @property
+ def default_builder(self):
+ return LXMLTreeBuilderForXML()
diff --git a/bitbake/lib/bs4/tests/test_soup.py b/bitbake/lib/bs4/tests/test_soup.py
new file mode 100644
index 0000000000..47ac245f99
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_soup.py
@@ -0,0 +1,434 @@
+# -*- coding: utf-8 -*-
+"""Tests of Beautiful Soup as a whole."""
+
+import logging
+import unittest
+import sys
+import tempfile
+
+from bs4 import (
+ BeautifulSoup,
+ BeautifulStoneSoup,
+)
+from bs4.element import (
+ CharsetMetaAttributeValue,
+ ContentMetaAttributeValue,
+ SoupStrainer,
+ NamespacedAttribute,
+ )
+import bs4.dammit
+from bs4.dammit import (
+ EntitySubstitution,
+ UnicodeDammit,
+)
+from bs4.testing import (
+ SoupTest,
+ skipIf,
+)
+import warnings
+
+try:
+ from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
+ LXML_PRESENT = True
+except ImportError, e:
+ LXML_PRESENT = False
+
+PYTHON_2_PRE_2_7 = (sys.version_info < (2,7))
+PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
+
+class TestConstructor(SoupTest):
+
+ def test_short_unicode_input(self):
+ data = u"<h1>éé</h1>"
+ soup = self.soup(data)
+ self.assertEqual(u"éé", soup.h1.string)
+
+ def test_embedded_null(self):
+ data = u"<h1>foo\0bar</h1>"
+ soup = self.soup(data)
+ self.assertEqual(u"foo\0bar", soup.h1.string)
+
+
+class TestDeprecatedConstructorArguments(SoupTest):
+
+ def test_parseOnlyThese_renamed_to_parse_only(self):
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
+ msg = str(w[0].message)
+ self.assertTrue("parseOnlyThese" in msg)
+ self.assertTrue("parse_only" in msg)
+ self.assertEqual(b"<b></b>", soup.encode())
+
+ def test_fromEncoding_renamed_to_from_encoding(self):
+ with warnings.catch_warnings(record=True) as w:
+ utf8 = b"\xc3\xa9"
+ soup = self.soup(utf8, fromEncoding="utf8")
+ msg = str(w[0].message)
+ self.assertTrue("fromEncoding" in msg)
+ self.assertTrue("from_encoding" in msg)
+ self.assertEqual("utf8", soup.original_encoding)
+
+ def test_unrecognized_keyword_argument(self):
+ self.assertRaises(
+ TypeError, self.soup, "<a>", no_such_argument=True)
+
+class TestWarnings(SoupTest):
+
+ def test_disk_file_warning(self):
+ filehandle = tempfile.NamedTemporaryFile()
+ filename = filehandle.name
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup(filename)
+ msg = str(w[0].message)
+ self.assertTrue("looks like a filename" in msg)
+ finally:
+ filehandle.close()
+
+ # The file no longer exists, so Beautiful Soup will no longer issue the warning.
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup(filename)
+ self.assertEqual(0, len(w))
+
+ def test_url_warning(self):
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup("http://www.crummy.com/")
+ msg = str(w[0].message)
+ self.assertTrue("looks like a URL" in msg)
+
+ with warnings.catch_warnings(record=True) as w:
+ soup = self.soup("http://www.crummy.com/ is great")
+ self.assertEqual(0, len(w))
+
+class TestSelectiveParsing(SoupTest):
+
+ def test_parse_with_soupstrainer(self):
+ markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
+ strainer = SoupStrainer("b")
+ soup = self.soup(markup, parse_only=strainer)
+ self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
+
+
+class TestEntitySubstitution(unittest.TestCase):
+ """Standalone tests of the EntitySubstitution class."""
+ def setUp(self):
+ self.sub = EntitySubstitution
+
+ def test_simple_html_substitution(self):
+ # Unicode characters corresponding to named HTML entites
+ # are substituted, and no others.
+ s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
+ self.assertEqual(self.sub.substitute_html(s),
+ u"foo&forall;\N{SNOWMAN}&otilde;bar")
+
+ def test_smart_quote_substitution(self):
+ # MS smart quotes are a common source of frustration, so we
+ # give them a special test.
+ quotes = b"\x91\x92foo\x93\x94"
+ dammit = UnicodeDammit(quotes)
+ self.assertEqual(self.sub.substitute_html(dammit.markup),
+ "&lsquo;&rsquo;foo&ldquo;&rdquo;")
+
+ def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
+ s = 'Welcome to "my bar"'
+ self.assertEqual(self.sub.substitute_xml(s, False), s)
+
+ def test_xml_attribute_quoting_normally_uses_double_quotes(self):
+ self.assertEqual(self.sub.substitute_xml("Welcome", True),
+ '"Welcome"')
+ self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
+ '"Bob\'s Bar"')
+
+ def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
+ s = 'Welcome to "my bar"'
+ self.assertEqual(self.sub.substitute_xml(s, True),
+ "'Welcome to \"my bar\"'")
+
+ def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
+ s = 'Welcome to "Bob\'s Bar"'
+ self.assertEqual(
+ self.sub.substitute_xml(s, True),
+ '"Welcome to &quot;Bob\'s Bar&quot;"')
+
+ def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
+ quoted = 'Welcome to "Bob\'s Bar"'
+ self.assertEqual(self.sub.substitute_xml(quoted), quoted)
+
+ def test_xml_quoting_handles_angle_brackets(self):
+ self.assertEqual(
+ self.sub.substitute_xml("foo<bar>"),
+ "foo&lt;bar&gt;")
+
+ def test_xml_quoting_handles_ampersands(self):
+ self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&amp;T")
+
+ def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
+ self.assertEqual(
+ self.sub.substitute_xml("&Aacute;T&T"),
+ "&amp;Aacute;T&amp;T")
+
+ def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
+ self.assertEqual(
+ self.sub.substitute_xml_containing_entities("&Aacute;T&T"),
+ "&Aacute;T&amp;T")
+
+ def test_quotes_not_html_substituted(self):
+ """There's no need to do this except inside attribute values."""
+ text = 'Bob\'s "bar"'
+ self.assertEqual(self.sub.substitute_html(text), text)
+
+
+class TestEncodingConversion(SoupTest):
+ # Test Beautiful Soup's ability to decode and encode from various
+ # encodings.
+
+ def setUp(self):
+ super(TestEncodingConversion, self).setUp()
+ self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
+ self.utf8_data = self.unicode_data.encode("utf-8")
+ # Just so you know what it looks like.
+ self.assertEqual(
+ self.utf8_data,
+ b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
+
+ def test_ascii_in_unicode_out(self):
+ # ASCII input is converted to Unicode. The original_encoding
+ # attribute is set to 'utf-8', a superset of ASCII.
+ chardet = bs4.dammit.chardet_dammit
+ logging.disable(logging.WARNING)
+ try:
+ def noop(str):
+ return None
+ # Disable chardet, which will realize that the ASCII is ASCII.
+ bs4.dammit.chardet_dammit = noop
+ ascii = b"<foo>a</foo>"
+ soup_from_ascii = self.soup(ascii)
+ unicode_output = soup_from_ascii.decode()
+ self.assertTrue(isinstance(unicode_output, unicode))
+ self.assertEqual(unicode_output, self.document_for(ascii.decode()))
+ self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
+ finally:
+ logging.disable(logging.NOTSET)
+ bs4.dammit.chardet_dammit = chardet
+
+ def test_unicode_in_unicode_out(self):
+ # Unicode input is left alone. The original_encoding attribute
+ # is not set.
+ soup_from_unicode = self.soup(self.unicode_data)
+ self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
+ self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
+ self.assertEqual(soup_from_unicode.original_encoding, None)
+
+ def test_utf8_in_unicode_out(self):
+ # UTF-8 input is converted to Unicode. The original_encoding
+ # attribute is set.
+ soup_from_utf8 = self.soup(self.utf8_data)
+ self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
+ self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
+
+ def test_utf8_out(self):
+ # The internal data structures can be encoded as UTF-8.
+ soup_from_unicode = self.soup(self.unicode_data)
+ self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
+
+ @skipIf(
+ PYTHON_2_PRE_2_7 or PYTHON_3_PRE_3_2,
+ "Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
+ def test_attribute_name_containing_unicode_characters(self):
+ markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
+ self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
+
+class TestUnicodeDammit(unittest.TestCase):
+ """Standalone tests of UnicodeDammit."""
+
+ def test_unicode_input(self):
+ markup = u"I'm already Unicode! \N{SNOWMAN}"
+ dammit = UnicodeDammit(markup)
+ self.assertEqual(dammit.unicode_markup, markup)
+
+ def test_smart_quotes_to_unicode(self):
+ markup = b"<foo>\x91\x92\x93\x94</foo>"
+ dammit = UnicodeDammit(markup)
+ self.assertEqual(
+ dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
+
+ def test_smart_quotes_to_xml_entities(self):
+ markup = b"<foo>\x91\x92\x93\x94</foo>"
+ dammit = UnicodeDammit(markup, smart_quotes_to="xml")
+ self.assertEqual(
+ dammit.unicode_markup, "<foo>&#x2018;&#x2019;&#x201C;&#x201D;</foo>")
+
+ def test_smart_quotes_to_html_entities(self):
+ markup = b"<foo>\x91\x92\x93\x94</foo>"
+ dammit = UnicodeDammit(markup, smart_quotes_to="html")
+ self.assertEqual(
+ dammit.unicode_markup, "<foo>&lsquo;&rsquo;&ldquo;&rdquo;</foo>")
+
+ def test_smart_quotes_to_ascii(self):
+ markup = b"<foo>\x91\x92\x93\x94</foo>"
+ dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
+ self.assertEqual(
+ dammit.unicode_markup, """<foo>''""</foo>""")
+
+ def test_detect_utf8(self):
+ utf8 = b"\xc3\xa9"
+ dammit = UnicodeDammit(utf8)
+ self.assertEqual(dammit.unicode_markup, u'\xe9')
+ self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
+
+ def test_convert_hebrew(self):
+ hebrew = b"\xed\xe5\xec\xf9"
+ dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
+ self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
+ self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
+
+ def test_dont_see_smart_quotes_where_there_are_none(self):
+ utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
+ dammit = UnicodeDammit(utf_8)
+ self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
+ self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
+
+ def test_ignore_inappropriate_codecs(self):
+ utf8_data = u"Räksmörgås".encode("utf-8")
+ dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
+ self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
+
+ def test_ignore_invalid_codecs(self):
+ utf8_data = u"Räksmörgås".encode("utf-8")
+ for bad_encoding in ['.utf8', '...', 'utF---16.!']:
+ dammit = UnicodeDammit(utf8_data, [bad_encoding])
+ self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
+
+ def test_detect_html5_style_meta_tag(self):
+
+ for data in (
+ b'<html><meta charset="euc-jp" /></html>',
+ b"<html><meta charset='euc-jp' /></html>",
+ b"<html><meta charset=euc-jp /></html>",
+ b"<html><meta charset=euc-jp/></html>"):
+ dammit = UnicodeDammit(data, is_html=True)
+ self.assertEqual(
+ "euc-jp", dammit.original_encoding)
+
+ def test_last_ditch_entity_replacement(self):
+ # This is a UTF-8 document that contains bytestrings
+ # completely incompatible with UTF-8 (ie. encoded with some other
+ # encoding).
+ #
+ # Since there is no consistent encoding for the document,
+ # Unicode, Dammit will eventually encode the document as UTF-8
+ # and encode the incompatible characters as REPLACEMENT
+ # CHARACTER.
+ #
+ # If chardet is installed, it will detect that the document
+ # can be converted into ISO-8859-1 without errors. This happens
+ # to be the wrong encoding, but it is a consistent encoding, so the
+ # code we're testing here won't run.
+ #
+ # So we temporarily disable chardet if it's present.
+ doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
+<html><b>\330\250\330\252\330\261</b>
+<i>\310\322\321\220\312\321\355\344</i></html>"""
+ chardet = bs4.dammit.chardet_dammit
+ logging.disable(logging.WARNING)
+ try:
+ def noop(str):
+ return None
+ bs4.dammit.chardet_dammit = noop
+ dammit = UnicodeDammit(doc)
+ self.assertEqual(True, dammit.contains_replacement_characters)
+ self.assertTrue(u"\ufffd" in dammit.unicode_markup)
+
+ soup = BeautifulSoup(doc, "html.parser")
+ self.assertTrue(soup.contains_replacement_characters)
+ finally:
+ logging.disable(logging.NOTSET)
+ bs4.dammit.chardet_dammit = chardet
+
+ def test_byte_order_mark_removed(self):
+ # A document written in UTF-16LE will have its byte order marker stripped.
+ data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
+ dammit = UnicodeDammit(data)
+ self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
+ self.assertEqual("utf-16le", dammit.original_encoding)
+
+ def test_detwingle(self):
+ # Here's a UTF8 document.
+ utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
+
+ # Here's a Windows-1252 document.
+ windows_1252 = (
+ u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
+ u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
+
+ # Through some unholy alchemy, they've been stuck together.
+ doc = utf8 + windows_1252 + utf8
+
+ # The document can't be turned into UTF-8:
+ self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
+
+ # Unicode, Dammit thinks the whole document is Windows-1252,
+ # and decodes it into "☃☃☃“Hi, I like Windows!â€Ã¢ËœÆ’☃☃"
+
+ # But if we run it through fix_embedded_windows_1252, it's fixed:
+
+ fixed = UnicodeDammit.detwingle(doc)
+ self.assertEqual(
+ u"☃☃☃“Hi, I like Windows!â€â˜ƒâ˜ƒâ˜ƒ", fixed.decode("utf8"))
+
+ def test_detwingle_ignores_multibyte_characters(self):
+ # Each of these characters has a UTF-8 representation ending
+ # in \x93. \x93 is a smart quote if interpreted as
+ # Windows-1252. But our code knows to skip over multibyte
+ # UTF-8 characters, so they'll survive the process unscathed.
+ for tricky_unicode_char in (
+ u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
+ u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
+ u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
+ ):
+ input = tricky_unicode_char.encode("utf8")
+ self.assertTrue(input.endswith(b'\x93'))
+ output = UnicodeDammit.detwingle(input)
+ self.assertEqual(output, input)
+
+class TestNamedspacedAttribute(SoupTest):
+
+ def test_name_may_be_none(self):
+ a = NamespacedAttribute("xmlns", None)
+ self.assertEqual(a, "xmlns")
+
+ def test_attribute_is_equivalent_to_colon_separated_string(self):
+ a = NamespacedAttribute("a", "b")
+ self.assertEqual("a:b", a)
+
+ def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
+ a = NamespacedAttribute("a", "b", "c")
+ b = NamespacedAttribute("a", "b", "c")
+ self.assertEqual(a, b)
+
+ # The actual namespace is not considered.
+ c = NamespacedAttribute("a", "b", None)
+ self.assertEqual(a, c)
+
+ # But name and prefix are important.
+ d = NamespacedAttribute("a", "z", "c")
+ self.assertNotEqual(a, d)
+
+ e = NamespacedAttribute("z", "b", "c")
+ self.assertNotEqual(a, e)
+
+
+class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
+
+ def test_content_meta_attribute_value(self):
+ value = CharsetMetaAttributeValue("euc-jp")
+ self.assertEqual("euc-jp", value)
+ self.assertEqual("euc-jp", value.original_value)
+ self.assertEqual("utf8", value.encode("utf8"))
+
+
+ def test_content_meta_attribute_value(self):
+ value = ContentMetaAttributeValue("text/html; charset=euc-jp")
+ self.assertEqual("text/html; charset=euc-jp", value)
+ self.assertEqual("text/html; charset=euc-jp", value.original_value)
+ self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
diff --git a/bitbake/lib/bs4/tests/test_tree.py b/bitbake/lib/bs4/tests/test_tree.py
new file mode 100644
index 0000000000..f8515c0ea1
--- /dev/null
+++ b/bitbake/lib/bs4/tests/test_tree.py
@@ -0,0 +1,1829 @@
+# -*- coding: utf-8 -*-
+"""Tests for Beautiful Soup's tree traversal methods.
+
+The tree traversal methods are the main advantage of using Beautiful
+Soup over just using a parser.
+
+Different parsers will build different Beautiful Soup trees given the
+same markup, but all Beautiful Soup trees can be traversed with the
+methods tested here.
+"""
+
+import copy
+import pickle
+import re
+import warnings
+from bs4 import BeautifulSoup
+from bs4.builder import (
+ builder_registry,
+ HTMLParserTreeBuilder,
+)
+from bs4.element import (
+ CData,
+ Comment,
+ Doctype,
+ NavigableString,
+ SoupStrainer,
+ Tag,
+)
+from bs4.testing import (
+ SoupTest,
+ skipIf,
+)
+
+XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None)
+LXML_PRESENT = (builder_registry.lookup("lxml") is not None)
+
+class TreeTest(SoupTest):
+
+ def assertSelects(self, tags, should_match):
+ """Make sure that the given tags have the correct text.
+
+ This is used in tests that define a bunch of tags, each
+ containing a single string, and then select certain strings by
+ some mechanism.
+ """
+ self.assertEqual([tag.string for tag in tags], should_match)
+
+ def assertSelectsIDs(self, tags, should_match):
+ """Make sure that the given tags have the correct IDs.
+
+ This is used in tests that define a bunch of tags, each
+ containing a single string, and then select certain strings by
+ some mechanism.
+ """
+ self.assertEqual([tag['id'] for tag in tags], should_match)
+
+
+class TestFind(TreeTest):
+ """Basic tests of the find() method.
+
+ find() just calls find_all() with limit=1, so it's not tested all
+ that thouroughly here.
+ """
+
+ def test_find_tag(self):
+ soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>")
+ self.assertEqual(soup.find("b").string, "2")
+
+ def test_unicode_text_find(self):
+ soup = self.soup(u'<h1>Räksmörgås</h1>')
+ self.assertEqual(soup.find(text=u'Räksmörgås'), u'Räksmörgås')
+
+ def test_find_everything(self):
+ """Test an optimization that finds all tags."""
+ soup = self.soup("<a>foo</a><b>bar</b>")
+ self.assertEqual(2, len(soup.find_all()))
+
+ def test_find_everything_with_name(self):
+ """Test an optimization that finds all tags with a given name."""
+ soup = self.soup("<a>foo</a><b>bar</b><a>baz</a>")
+ self.assertEqual(2, len(soup.find_all('a')))
+
+class TestFindAll(TreeTest):
+ """Basic tests of the find_all() method."""
+
+ def test_find_all_text_nodes(self):
+ """You can search the tree for text nodes."""
+ soup = self.soup("<html>Foo<b>bar</b>\xbb</html>")
+ # Exact match.
+ self.assertEqual(soup.find_all(text="bar"), [u"bar"])
+ # Match any of a number of strings.
+ self.assertEqual(
+ soup.find_all(text=["Foo", "bar"]), [u"Foo", u"bar"])
+ # Match a regular expression.
+ self.assertEqual(soup.find_all(text=re.compile('.*')),
+ [u"Foo", u"bar", u'\xbb'])
+ # Match anything.
+ self.assertEqual(soup.find_all(text=True),
+ [u"Foo", u"bar", u'\xbb'])
+
+ def test_find_all_limit(self):
+ """You can limit the number of items returned by find_all."""
+ soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>")
+ self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"])
+ self.assertSelects(soup.find_all('a', limit=1), ["1"])
+ self.assertSelects(
+ soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"])
+
+ # A limit of 0 means no limit.
+ self.assertSelects(
+ soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"])
+
+ def test_calling_a_tag_is_calling_findall(self):
+ soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>")
+ self.assertSelects(soup('a', limit=1), ["1"])
+ self.assertSelects(soup.b(id="foo"), ["3"])
+
+ def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self):
+ soup = self.soup("<a></a>")
+ # Create a self-referential list.
+ l = []
+ l.append(l)
+
+ # Without special code in _normalize_search_value, this would cause infinite
+ # recursion.
+ self.assertEqual([], soup.find_all(l))
+
+ def test_find_all_resultset(self):
+ """All find_all calls return a ResultSet"""
+ soup = self.soup("<a></a>")
+ result = soup.find_all("a")
+ self.assertTrue(hasattr(result, "source"))
+
+ result = soup.find_all(True)
+ self.assertTrue(hasattr(result, "source"))
+
+ result = soup.find_all(text="foo")
+ self.assertTrue(hasattr(result, "source"))
+
+
+class TestFindAllBasicNamespaces(TreeTest):
+
+ def test_find_by_namespaced_name(self):
+ soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">')
+ self.assertEqual("4", soup.find("mathml:msqrt").string)
+ self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name)
+
+
+class TestFindAllByName(TreeTest):
+ """Test ways of finding tags by tag name."""
+
+ def setUp(self):
+ super(TreeTest, self).setUp()
+ self.tree = self.soup("""<a>First tag.</a>
+ <b>Second tag.</b>
+ <c>Third <a>Nested tag.</a> tag.</c>""")
+
+ def test_find_all_by_tag_name(self):
+ # Find all the <a> tags.
+ self.assertSelects(
+ self.tree.find_all('a'), ['First tag.', 'Nested tag.'])
+
+ def test_find_all_by_name_and_text(self):
+ self.assertSelects(
+ self.tree.find_all('a', text='First tag.'), ['First tag.'])
+
+ self.assertSelects(
+ self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.'])
+
+ self.assertSelects(
+ self.tree.find_all('a', text=re.compile("tag")),
+ ['First tag.', 'Nested tag.'])
+
+
+ def test_find_all_on_non_root_element(self):
+ # You can call find_all on any node, not just the root.
+ self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.'])
+
+ def test_calling_element_invokes_find_all(self):
+ self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.'])
+
+ def test_find_all_by_tag_strainer(self):
+ self.assertSelects(
+ self.tree.find_all(SoupStrainer('a')),
+ ['First tag.', 'Nested tag.'])
+
+ def test_find_all_by_tag_names(self):
+ self.assertSelects(
+ self.tree.find_all(['a', 'b']),
+ ['First tag.', 'Second tag.', 'Nested tag.'])
+
+ def test_find_all_by_tag_dict(self):
+ self.assertSelects(
+ self.tree.find_all({'a' : True, 'b' : True}),
+ ['First tag.', 'Second tag.', 'Nested tag.'])
+
+ def test_find_all_by_tag_re(self):
+ self.assertSelects(
+ self.tree.find_all(re.compile('^[ab]$')),
+ ['First tag.', 'Second tag.', 'Nested tag.'])
+
+ def test_find_all_with_tags_matching_method(self):
+ # You can define an oracle method that determines whether
+ # a tag matches the search.
+ def id_matches_name(tag):
+ return tag.name == tag.get('id')
+
+ tree = self.soup("""<a id="a">Match 1.</a>
+ <a id="1">Does not match.</a>
+ <b id="b">Match 2.</a>""")
+
+ self.assertSelects(
+ tree.find_all(id_matches_name), ["Match 1.", "Match 2."])
+
+
+class TestFindAllByAttribute(TreeTest):
+
+ def test_find_all_by_attribute_name(self):
+ # You can pass in keyword arguments to find_all to search by
+ # attribute.
+ tree = self.soup("""
+ <a id="first">Matching a.</a>
+ <a id="second">
+ Non-matching <b id="first">Matching b.</b>a.
+ </a>""")
+ self.assertSelects(tree.find_all(id='first'),
+ ["Matching a.", "Matching b."])
+
+ def test_find_all_by_utf8_attribute_value(self):
+ peace = u"×ולש".encode("utf8")
+ data = u'<a title="×ולש"></a>'.encode("utf8")
+ soup = self.soup(data)
+ self.assertEqual([soup.a], soup.find_all(title=peace))
+ self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8")))
+ self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"]))
+
+ def test_find_all_by_attribute_dict(self):
+ # You can pass in a dictionary as the argument 'attrs'. This
+ # lets you search for attributes like 'name' (a fixed argument
+ # to find_all) and 'class' (a reserved word in Python.)
+ tree = self.soup("""
+ <a name="name1" class="class1">Name match.</a>
+ <a name="name2" class="class2">Class match.</a>
+ <a name="name3" class="class3">Non-match.</a>
+ <name1>A tag called 'name1'.</name1>
+ """)
+
+ # This doesn't do what you want.
+ self.assertSelects(tree.find_all(name='name1'),
+ ["A tag called 'name1'."])
+ # This does what you want.
+ self.assertSelects(tree.find_all(attrs={'name' : 'name1'}),
+ ["Name match."])
+
+ self.assertSelects(tree.find_all(attrs={'class' : 'class2'}),
+ ["Class match."])
+
+ def test_find_all_by_class(self):
+ tree = self.soup("""
+ <a class="1">Class 1.</a>
+ <a class="2">Class 2.</a>
+ <b class="1">Class 1.</b>
+ <c class="3 4">Class 3 and 4.</c>
+ """)
+
+ # Passing in the class_ keyword argument will search against
+ # the 'class' attribute.
+ self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.'])
+ self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.'])
+ self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.'])
+
+ # Passing in a string to 'attrs' will also search the CSS class.
+ self.assertSelects(tree.find_all('a', '1'), ['Class 1.'])
+ self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.'])
+ self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.'])
+ self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.'])
+
+ def test_find_by_class_when_multiple_classes_present(self):
+ tree = self.soup("<gar class='foo bar'>Found it</gar>")
+
+ f = tree.find_all("gar", class_=re.compile("o"))
+ self.assertSelects(f, ["Found it"])
+
+ f = tree.find_all("gar", class_=re.compile("a"))
+ self.assertSelects(f, ["Found it"])
+
+ # Since the class is not the string "foo bar", but the two
+ # strings "foo" and "bar", this will not find anything.
+ f = tree.find_all("gar", class_=re.compile("o b"))
+ self.assertSelects(f, [])
+
+ def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self):
+ soup = self.soup("<a class='bar'>Found it</a>")
+
+ self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"])
+
+ def big_attribute_value(value):
+ return len(value) > 3
+
+ self.assertSelects(soup.find_all("a", big_attribute_value), [])
+
+ def small_attribute_value(value):
+ return len(value) <= 3
+
+ self.assertSelects(
+ soup.find_all("a", small_attribute_value), ["Found it"])
+
+ def test_find_all_with_string_for_attrs_finds_multiple_classes(self):
+ soup = self.soup('<a class="foo bar"></a><a class="foo"></a>')
+ a, a2 = soup.find_all("a")
+ self.assertEqual([a, a2], soup.find_all("a", "foo"))
+ self.assertEqual([a], soup.find_all("a", "bar"))
+
+ # If you specify the class as a string that contains a
+ # space, only that specific value will be found.
+ self.assertEqual([a], soup.find_all("a", class_="foo bar"))
+ self.assertEqual([a], soup.find_all("a", "foo bar"))
+ self.assertEqual([], soup.find_all("a", "bar foo"))
+
+ def test_find_all_by_attribute_soupstrainer(self):
+ tree = self.soup("""
+ <a id="first">Match.</a>
+ <a id="second">Non-match.</a>""")
+
+ strainer = SoupStrainer(attrs={'id' : 'first'})
+ self.assertSelects(tree.find_all(strainer), ['Match.'])
+
+ def test_find_all_with_missing_atribute(self):
+ # You can pass in None as the value of an attribute to find_all.
+ # This will match tags that do not have that attribute set.
+ tree = self.soup("""<a id="1">ID present.</a>
+ <a>No ID present.</a>
+ <a id="">ID is empty.</a>""")
+ self.assertSelects(tree.find_all('a', id=None), ["No ID present."])
+
+ def test_find_all_with_defined_attribute(self):
+ # You can pass in None as the value of an attribute to find_all.
+ # This will match tags that have that attribute set to any value.
+ tree = self.soup("""<a id="1">ID present.</a>
+ <a>No ID present.</a>
+ <a id="">ID is empty.</a>""")
+ self.assertSelects(
+ tree.find_all(id=True), ["ID present.", "ID is empty."])
+
+ def test_find_all_with_numeric_attribute(self):
+ # If you search for a number, it's treated as a string.
+ tree = self.soup("""<a id=1>Unquoted attribute.</a>
+ <a id="1">Quoted attribute.</a>""")
+
+ expected = ["Unquoted attribute.", "Quoted attribute."]
+ self.assertSelects(tree.find_all(id=1), expected)
+ self.assertSelects(tree.find_all(id="1"), expected)
+
+ def test_find_all_with_list_attribute_values(self):
+ # You can pass a list of attribute values instead of just one,
+ # and you'll get tags that match any of the values.
+ tree = self.soup("""<a id="1">1</a>
+ <a id="2">2</a>
+ <a id="3">3</a>
+ <a>No ID.</a>""")
+ self.assertSelects(tree.find_all(id=["1", "3", "4"]),
+ ["1", "3"])
+
+ def test_find_all_with_regular_expression_attribute_value(self):
+ # You can pass a regular expression as an attribute value, and
+ # you'll get tags whose values for that attribute match the
+ # regular expression.
+ tree = self.soup("""<a id="a">One a.</a>
+ <a id="aa">Two as.</a>
+ <a id="ab">Mixed as and bs.</a>
+ <a id="b">One b.</a>
+ <a>No ID.</a>""")
+
+ self.assertSelects(tree.find_all(id=re.compile("^a+$")),
+ ["One a.", "Two as."])
+
+ def test_find_by_name_and_containing_string(self):
+ soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>")
+ a = soup.a
+
+ self.assertEqual([a], soup.find_all("a", text="foo"))
+ self.assertEqual([], soup.find_all("a", text="bar"))
+ self.assertEqual([], soup.find_all("a", text="bar"))
+
+ def test_find_by_name_and_containing_string_when_string_is_buried(self):
+ soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>")
+ self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo"))
+
+ def test_find_by_attribute_and_containing_string(self):
+ soup = self.soup('<b id="1">foo</b><a id="2">foo</a>')
+ a = soup.a
+
+ self.assertEqual([a], soup.find_all(id=2, text="foo"))
+ self.assertEqual([], soup.find_all(id=1, text="bar"))
+
+
+
+
+class TestIndex(TreeTest):
+ """Test Tag.index"""
+ def test_index(self):
+ tree = self.soup("""<div>
+ <a>Identical</a>
+ <b>Not identical</b>
+ <a>Identical</a>
+
+ <c><d>Identical with child</d></c>
+ <b>Also not identical</b>
+ <c><d>Identical with child</d></c>
+ </div>""")
+ div = tree.div
+ for i, element in enumerate(div.contents):
+ self.assertEqual(i, div.index(element))
+ self.assertRaises(ValueError, tree.index, 1)
+
+
+class TestParentOperations(TreeTest):
+ """Test navigation and searching through an element's parents."""
+
+ def setUp(self):
+ super(TestParentOperations, self).setUp()
+ self.tree = self.soup('''<ul id="empty"></ul>
+ <ul id="top">
+ <ul id="middle">
+ <ul id="bottom">
+ <b>Start here</b>
+ </ul>
+ </ul>''')
+ self.start = self.tree.b
+
+
+ def test_parent(self):
+ self.assertEqual(self.start.parent['id'], 'bottom')
+ self.assertEqual(self.start.parent.parent['id'], 'middle')
+ self.assertEqual(self.start.parent.parent.parent['id'], 'top')
+
+ def test_parent_of_top_tag_is_soup_object(self):
+ top_tag = self.tree.contents[0]
+ self.assertEqual(top_tag.parent, self.tree)
+
+ def test_soup_object_has_no_parent(self):
+ self.assertEqual(None, self.tree.parent)
+
+ def test_find_parents(self):
+ self.assertSelectsIDs(
+ self.start.find_parents('ul'), ['bottom', 'middle', 'top'])
+ self.assertSelectsIDs(
+ self.start.find_parents('ul', id="middle"), ['middle'])
+
+ def test_find_parent(self):
+ self.assertEqual(self.start.find_parent('ul')['id'], 'bottom')
+ self.assertEqual(self.start.find_parent('ul', id='top')['id'], 'top')
+
+ def test_parent_of_text_element(self):
+ text = self.tree.find(text="Start here")
+ self.assertEqual(text.parent.name, 'b')
+
+ def test_text_element_find_parent(self):
+ text = self.tree.find(text="Start here")
+ self.assertEqual(text.find_parent('ul')['id'], 'bottom')
+
+ def test_parent_generator(self):
+ parents = [parent['id'] for parent in self.start.parents
+ if parent is not None and 'id' in parent.attrs]
+ self.assertEqual(parents, ['bottom', 'middle', 'top'])
+
+
+class ProximityTest(TreeTest):
+
+ def setUp(self):
+ super(TreeTest, self).setUp()
+ self.tree = self.soup(
+ '<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>')
+
+
+class TestNextOperations(ProximityTest):
+
+ def setUp(self):
+ super(TestNextOperations, self).setUp()
+ self.start = self.tree.b
+
+ def test_next(self):
+ self.assertEqual(self.start.next_element, "One")
+ self.assertEqual(self.start.next_element.next_element['id'], "2")
+
+ def test_next_of_last_item_is_none(self):
+ last = self.tree.find(text="Three")
+ self.assertEqual(last.next_element, None)
+
+ def test_next_of_root_is_none(self):
+ # The document root is outside the next/previous chain.
+ self.assertEqual(self.tree.next_element, None)
+
+ def test_find_all_next(self):
+ self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"])
+ self.start.find_all_next(id=3)
+ self.assertSelects(self.start.find_all_next(id=3), ["Three"])
+
+ def test_find_next(self):
+ self.assertEqual(self.start.find_next('b')['id'], '2')
+ self.assertEqual(self.start.find_next(text="Three"), "Three")
+
+ def test_find_next_for_text_element(self):
+ text = self.tree.find(text="One")
+ self.assertEqual(text.find_next("b").string, "Two")
+ self.assertSelects(text.find_all_next("b"), ["Two", "Three"])
+
+ def test_next_generator(self):
+ start = self.tree.find(text="Two")
+ successors = [node for node in start.next_elements]
+ # There are two successors: the final <b> tag and its text contents.
+ tag, contents = successors
+ self.assertEqual(tag['id'], '3')
+ self.assertEqual(contents, "Three")
+
+class TestPreviousOperations(ProximityTest):
+
+ def setUp(self):
+ super(TestPreviousOperations, self).setUp()
+ self.end = self.tree.find(text="Three")
+
+ def test_previous(self):
+ self.assertEqual(self.end.previous_element['id'], "3")
+ self.assertEqual(self.end.previous_element.previous_element, "Two")
+
+ def test_previous_of_first_item_is_none(self):
+ first = self.tree.find('html')
+ self.assertEqual(first.previous_element, None)
+
+ def test_previous_of_root_is_none(self):
+ # The document root is outside the next/previous chain.
+ # XXX This is broken!
+ #self.assertEqual(self.tree.previous_element, None)
+ pass
+
+ def test_find_all_previous(self):
+ # The <b> tag containing the "Three" node is the predecessor
+ # of the "Three" node itself, which is why "Three" shows up
+ # here.
+ self.assertSelects(
+ self.end.find_all_previous('b'), ["Three", "Two", "One"])
+ self.assertSelects(self.end.find_all_previous(id=1), ["One"])
+
+ def test_find_previous(self):
+ self.assertEqual(self.end.find_previous('b')['id'], '3')
+ self.assertEqual(self.end.find_previous(text="One"), "One")
+
+ def test_find_previous_for_text_element(self):
+ text = self.tree.find(text="Three")
+ self.assertEqual(text.find_previous("b").string, "Three")
+ self.assertSelects(
+ text.find_all_previous("b"), ["Three", "Two", "One"])
+
+ def test_previous_generator(self):
+ start = self.tree.find(text="One")
+ predecessors = [node for node in start.previous_elements]
+
+ # There are four predecessors: the <b> tag containing "One"
+ # the <body> tag, the <head> tag, and the <html> tag.
+ b, body, head, html = predecessors
+ self.assertEqual(b['id'], '1')
+ self.assertEqual(body.name, "body")
+ self.assertEqual(head.name, "head")
+ self.assertEqual(html.name, "html")
+
+
+class SiblingTest(TreeTest):
+
+ def setUp(self):
+ super(SiblingTest, self).setUp()
+ markup = '''<html>
+ <span id="1">
+ <span id="1.1"></span>
+ </span>
+ <span id="2">
+ <span id="2.1"></span>
+ </span>
+ <span id="3">
+ <span id="3.1"></span>
+ </span>
+ <span id="4"></span>
+ </html>'''
+ # All that whitespace looks good but makes the tests more
+ # difficult. Get rid of it.
+ markup = re.compile("\n\s*").sub("", markup)
+ self.tree = self.soup(markup)
+
+
+class TestNextSibling(SiblingTest):
+
+ def setUp(self):
+ super(TestNextSibling, self).setUp()
+ self.start = self.tree.find(id="1")
+
+ def test_next_sibling_of_root_is_none(self):
+ self.assertEqual(self.tree.next_sibling, None)
+
+ def test_next_sibling(self):
+ self.assertEqual(self.start.next_sibling['id'], '2')
+ self.assertEqual(self.start.next_sibling.next_sibling['id'], '3')
+
+ # Note the difference between next_sibling and next_element.
+ self.assertEqual(self.start.next_element['id'], '1.1')
+
+ def test_next_sibling_may_not_exist(self):
+ self.assertEqual(self.tree.html.next_sibling, None)
+
+ nested_span = self.tree.find(id="1.1")
+ self.assertEqual(nested_span.next_sibling, None)
+
+ last_span = self.tree.find(id="4")
+ self.assertEqual(last_span.next_sibling, None)
+
+ def test_find_next_sibling(self):
+ self.assertEqual(self.start.find_next_sibling('span')['id'], '2')
+
+ def test_next_siblings(self):
+ self.assertSelectsIDs(self.start.find_next_siblings("span"),
+ ['2', '3', '4'])
+
+ self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3'])
+
+ def test_next_sibling_for_text_element(self):
+ soup = self.soup("Foo<b>bar</b>baz")
+ start = soup.find(text="Foo")
+ self.assertEqual(start.next_sibling.name, 'b')
+ self.assertEqual(start.next_sibling.next_sibling, 'baz')
+
+ self.assertSelects(start.find_next_siblings('b'), ['bar'])
+ self.assertEqual(start.find_next_sibling(text="baz"), "baz")
+ self.assertEqual(start.find_next_sibling(text="nonesuch"), None)
+
+
+class TestPreviousSibling(SiblingTest):
+
+ def setUp(self):
+ super(TestPreviousSibling, self).setUp()
+ self.end = self.tree.find(id="4")
+
+ def test_previous_sibling_of_root_is_none(self):
+ self.assertEqual(self.tree.previous_sibling, None)
+
+ def test_previous_sibling(self):
+ self.assertEqual(self.end.previous_sibling['id'], '3')
+ self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2')
+
+ # Note the difference between previous_sibling and previous_element.
+ self.assertEqual(self.end.previous_element['id'], '3.1')
+
+ def test_previous_sibling_may_not_exist(self):
+ self.assertEqual(self.tree.html.previous_sibling, None)
+
+ nested_span = self.tree.find(id="1.1")
+ self.assertEqual(nested_span.previous_sibling, None)
+
+ first_span = self.tree.find(id="1")
+ self.assertEqual(first_span.previous_sibling, None)
+
+ def test_find_previous_sibling(self):
+ self.assertEqual(self.end.find_previous_sibling('span')['id'], '3')
+
+ def test_previous_siblings(self):
+ self.assertSelectsIDs(self.end.find_previous_siblings("span"),
+ ['3', '2', '1'])
+
+ self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1'])
+
+ def test_previous_sibling_for_text_element(self):
+ soup = self.soup("Foo<b>bar</b>baz")
+ start = soup.find(text="baz")
+ self.assertEqual(start.previous_sibling.name, 'b')
+ self.assertEqual(start.previous_sibling.previous_sibling, 'Foo')
+
+ self.assertSelects(start.find_previous_siblings('b'), ['bar'])
+ self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo")
+ self.assertEqual(start.find_previous_sibling(text="nonesuch"), None)
+
+
+class TestTagCreation(SoupTest):
+ """Test the ability to create new tags."""
+ def test_new_tag(self):
+ soup = self.soup("")
+ new_tag = soup.new_tag("foo", bar="baz")
+ self.assertTrue(isinstance(new_tag, Tag))
+ self.assertEqual("foo", new_tag.name)
+ self.assertEqual(dict(bar="baz"), new_tag.attrs)
+ self.assertEqual(None, new_tag.parent)
+
+ def test_tag_inherits_self_closing_rules_from_builder(self):
+ if XML_BUILDER_PRESENT:
+ xml_soup = BeautifulSoup("", "xml")
+ xml_br = xml_soup.new_tag("br")
+ xml_p = xml_soup.new_tag("p")
+
+ # Both the <br> and <p> tag are empty-element, just because
+ # they have no contents.
+ self.assertEqual(b"<br/>", xml_br.encode())
+ self.assertEqual(b"<p/>", xml_p.encode())
+
+ html_soup = BeautifulSoup("", "html")
+ html_br = html_soup.new_tag("br")
+ html_p = html_soup.new_tag("p")
+
+ # The HTML builder users HTML's rules about which tags are
+ # empty-element tags, and the new tags reflect these rules.
+ self.assertEqual(b"<br/>", html_br.encode())
+ self.assertEqual(b"<p></p>", html_p.encode())
+
+ def test_new_string_creates_navigablestring(self):
+ soup = self.soup("")
+ s = soup.new_string("foo")
+ self.assertEqual("foo", s)
+ self.assertTrue(isinstance(s, NavigableString))
+
+ def test_new_string_can_create_navigablestring_subclass(self):
+ soup = self.soup("")
+ s = soup.new_string("foo", Comment)
+ self.assertEqual("foo", s)
+ self.assertTrue(isinstance(s, Comment))
+
+class TestTreeModification(SoupTest):
+
+ def test_attribute_modification(self):
+ soup = self.soup('<a id="1"></a>')
+ soup.a['id'] = 2
+ self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>'))
+ del(soup.a['id'])
+ self.assertEqual(soup.decode(), self.document_for('<a></a>'))
+ soup.a['id2'] = 'foo'
+ self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>'))
+
+ def test_new_tag_creation(self):
+ builder = builder_registry.lookup('html')()
+ soup = self.soup("<body></body>", builder=builder)
+ a = Tag(soup, builder, 'a')
+ ol = Tag(soup, builder, 'ol')
+ a['href'] = 'http://foo.com/'
+ soup.body.insert(0, a)
+ soup.body.insert(1, ol)
+ self.assertEqual(
+ soup.body.encode(),
+ b'<body><a href="http://foo.com/"></a><ol></ol></body>')
+
+ def test_append_to_contents_moves_tag(self):
+ doc = """<p id="1">Don't leave me <b>here</b>.</p>
+ <p id="2">Don\'t leave!</p>"""
+ soup = self.soup(doc)
+ second_para = soup.find(id='2')
+ bold = soup.b
+
+ # Move the <b> tag to the end of the second paragraph.
+ soup.find(id='2').append(soup.b)
+
+ # The <b> tag is now a child of the second paragraph.
+ self.assertEqual(bold.parent, second_para)
+
+ self.assertEqual(
+ soup.decode(), self.document_for(
+ '<p id="1">Don\'t leave me .</p>\n'
+ '<p id="2">Don\'t leave!<b>here</b></p>'))
+
+ def test_replace_with_returns_thing_that_was_replaced(self):
+ text = "<a></a><b><c></c></b>"
+ soup = self.soup(text)
+ a = soup.a
+ new_a = a.replace_with(soup.c)
+ self.assertEqual(a, new_a)
+
+ def test_unwrap_returns_thing_that_was_replaced(self):
+ text = "<a><b></b><c></c></a>"
+ soup = self.soup(text)
+ a = soup.a
+ new_a = a.unwrap()
+ self.assertEqual(a, new_a)
+
+ def test_replace_tag_with_itself(self):
+ text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>"
+ soup = self.soup(text)
+ c = soup.c
+ soup.c.replace_with(c)
+ self.assertEqual(soup.decode(), self.document_for(text))
+
+ def test_replace_tag_with_its_parent_raises_exception(self):
+ text = "<a><b></b></a>"
+ soup = self.soup(text)
+ self.assertRaises(ValueError, soup.b.replace_with, soup.a)
+
+ def test_insert_tag_into_itself_raises_exception(self):
+ text = "<a><b></b></a>"
+ soup = self.soup(text)
+ self.assertRaises(ValueError, soup.a.insert, 0, soup.a)
+
+ def test_replace_with_maintains_next_element_throughout(self):
+ soup = self.soup('<p><a>one</a><b>three</b></p>')
+ a = soup.a
+ b = a.contents[0]
+ # Make it so the <a> tag has two text children.
+ a.insert(1, "two")
+
+ # Now replace each one with the empty string.
+ left, right = a.contents
+ left.replaceWith('')
+ right.replaceWith('')
+
+ # The <b> tag is still connected to the tree.
+ self.assertEqual("three", soup.b.string)
+
+ def test_replace_final_node(self):
+ soup = self.soup("<b>Argh!</b>")
+ soup.find(text="Argh!").replace_with("Hooray!")
+ new_text = soup.find(text="Hooray!")
+ b = soup.b
+ self.assertEqual(new_text.previous_element, b)
+ self.assertEqual(new_text.parent, b)
+ self.assertEqual(new_text.previous_element.next_element, new_text)
+ self.assertEqual(new_text.next_element, None)
+
+ def test_consecutive_text_nodes(self):
+ # A builder should never create two consecutive text nodes,
+ # but if you insert one next to another, Beautiful Soup will
+ # handle it correctly.
+ soup = self.soup("<a><b>Argh!</b><c></c></a>")
+ soup.b.insert(1, "Hooray!")
+
+ self.assertEqual(
+ soup.decode(), self.document_for(
+ "<a><b>Argh!Hooray!</b><c></c></a>"))
+
+ new_text = soup.find(text="Hooray!")
+ self.assertEqual(new_text.previous_element, "Argh!")
+ self.assertEqual(new_text.previous_element.next_element, new_text)
+
+ self.assertEqual(new_text.previous_sibling, "Argh!")
+ self.assertEqual(new_text.previous_sibling.next_sibling, new_text)
+
+ self.assertEqual(new_text.next_sibling, None)
+ self.assertEqual(new_text.next_element, soup.c)
+
+ def test_insert_string(self):
+ soup = self.soup("<a></a>")
+ soup.a.insert(0, "bar")
+ soup.a.insert(0, "foo")
+ # The string were added to the tag.
+ self.assertEqual(["foo", "bar"], soup.a.contents)
+ # And they were converted to NavigableStrings.
+ self.assertEqual(soup.a.contents[0].next_element, "bar")
+
+ def test_insert_tag(self):
+ builder = self.default_builder
+ soup = self.soup(
+ "<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder)
+ magic_tag = Tag(soup, builder, 'magictag')
+ magic_tag.insert(0, "the")
+ soup.a.insert(1, magic_tag)
+
+ self.assertEqual(
+ soup.decode(), self.document_for(
+ "<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>"))
+
+ # Make sure all the relationships are hooked up correctly.
+ b_tag = soup.b
+ self.assertEqual(b_tag.next_sibling, magic_tag)
+ self.assertEqual(magic_tag.previous_sibling, b_tag)
+
+ find = b_tag.find(text="Find")
+ self.assertEqual(find.next_element, magic_tag)
+ self.assertEqual(magic_tag.previous_element, find)
+
+ c_tag = soup.c
+ self.assertEqual(magic_tag.next_sibling, c_tag)
+ self.assertEqual(c_tag.previous_sibling, magic_tag)
+
+ the = magic_tag.find(text="the")
+ self.assertEqual(the.parent, magic_tag)
+ self.assertEqual(the.next_element, c_tag)
+ self.assertEqual(c_tag.previous_element, the)
+
+ def test_append_child_thats_already_at_the_end(self):
+ data = "<a><b></b></a>"
+ soup = self.soup(data)
+ soup.a.append(soup.b)
+ self.assertEqual(data, soup.decode())
+
+ def test_move_tag_to_beginning_of_parent(self):
+ data = "<a><b></b><c></c><d></d></a>"
+ soup = self.soup(data)
+ soup.a.insert(0, soup.d)
+ self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode())
+
+ def test_insert_works_on_empty_element_tag(self):
+ # This is a little strange, since most HTML parsers don't allow
+ # markup like this to come through. But in general, we don't
+ # know what the parser would or wouldn't have allowed, so
+ # I'm letting this succeed for now.
+ soup = self.soup("<br/>")
+ soup.br.insert(1, "Contents")
+ self.assertEqual(str(soup.br), "<br>Contents</br>")
+
+ def test_insert_before(self):
+ soup = self.soup("<a>foo</a><b>bar</b>")
+ soup.b.insert_before("BAZ")
+ soup.a.insert_before("QUUX")
+ self.assertEqual(
+ soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>"))
+
+ soup.a.insert_before(soup.b)
+ self.assertEqual(
+ soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
+
+ def test_insert_after(self):
+ soup = self.soup("<a>foo</a><b>bar</b>")
+ soup.b.insert_after("BAZ")
+ soup.a.insert_after("QUUX")
+ self.assertEqual(
+ soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ"))
+ soup.b.insert_after(soup.a)
+ self.assertEqual(
+ soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
+
+ def test_insert_after_raises_exception_if_after_has_no_meaning(self):
+ soup = self.soup("")
+ tag = soup.new_tag("a")
+ string = soup.new_string("")
+ self.assertRaises(ValueError, string.insert_after, tag)
+ self.assertRaises(NotImplementedError, soup.insert_after, tag)
+ self.assertRaises(ValueError, tag.insert_after, tag)
+
+ def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self):
+ soup = self.soup("")
+ tag = soup.new_tag("a")
+ string = soup.new_string("")
+ self.assertRaises(ValueError, string.insert_before, tag)
+ self.assertRaises(NotImplementedError, soup.insert_before, tag)
+ self.assertRaises(ValueError, tag.insert_before, tag)
+
+ def test_replace_with(self):
+ soup = self.soup(
+ "<p>There's <b>no</b> business like <b>show</b> business</p>")
+ no, show = soup.find_all('b')
+ show.replace_with(no)
+ self.assertEqual(
+ soup.decode(),
+ self.document_for(
+ "<p>There's business like <b>no</b> business</p>"))
+
+ self.assertEqual(show.parent, None)
+ self.assertEqual(no.parent, soup.p)
+ self.assertEqual(no.next_element, "no")
+ self.assertEqual(no.next_sibling, " business")
+
+ def test_replace_first_child(self):
+ data = "<a><b></b><c></c></a>"
+ soup = self.soup(data)
+ soup.b.replace_with(soup.c)
+ self.assertEqual("<a><c></c></a>", soup.decode())
+
+ def test_replace_last_child(self):
+ data = "<a><b></b><c></c></a>"
+ soup = self.soup(data)
+ soup.c.replace_with(soup.b)
+ self.assertEqual("<a><b></b></a>", soup.decode())
+
+ def test_nested_tag_replace_with(self):
+ soup = self.soup(
+ """<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""")
+
+ # Replace the entire <b> tag and its contents ("reserve the
+ # right") with the <f> tag ("refuse").
+ remove_tag = soup.b
+ move_tag = soup.f
+ remove_tag.replace_with(move_tag)
+
+ self.assertEqual(
+ soup.decode(), self.document_for(
+ "<a>We<f>refuse</f></a><e>to<g>service</g></e>"))
+
+ # The <b> tag is now an orphan.
+ self.assertEqual(remove_tag.parent, None)
+ self.assertEqual(remove_tag.find(text="right").next_element, None)
+ self.assertEqual(remove_tag.previous_element, None)
+ self.assertEqual(remove_tag.next_sibling, None)
+ self.assertEqual(remove_tag.previous_sibling, None)
+
+ # The <f> tag is now connected to the <a> tag.
+ self.assertEqual(move_tag.parent, soup.a)
+ self.assertEqual(move_tag.previous_element, "We")
+ self.assertEqual(move_tag.next_element.next_element, soup.e)
+ self.assertEqual(move_tag.next_sibling, None)
+
+ # The gap where the <f> tag used to be has been mended, and
+ # the word "to" is now connected to the <g> tag.
+ to_text = soup.find(text="to")
+ g_tag = soup.g
+ self.assertEqual(to_text.next_element, g_tag)
+ self.assertEqual(to_text.next_sibling, g_tag)
+ self.assertEqual(g_tag.previous_element, to_text)
+ self.assertEqual(g_tag.previous_sibling, to_text)
+
+ def test_unwrap(self):
+ tree = self.soup("""
+ <p>Unneeded <em>formatting</em> is unneeded</p>
+ """)
+ tree.em.unwrap()
+ self.assertEqual(tree.em, None)
+ self.assertEqual(tree.p.text, "Unneeded formatting is unneeded")
+
+ def test_wrap(self):
+ soup = self.soup("I wish I was bold.")
+ value = soup.string.wrap(soup.new_tag("b"))
+ self.assertEqual(value.decode(), "<b>I wish I was bold.</b>")
+ self.assertEqual(
+ soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
+
+ def test_wrap_extracts_tag_from_elsewhere(self):
+ soup = self.soup("<b></b>I wish I was bold.")
+ soup.b.next_sibling.wrap(soup.b)
+ self.assertEqual(
+ soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
+
+ def test_wrap_puts_new_contents_at_the_end(self):
+ soup = self.soup("<b>I like being bold.</b>I wish I was bold.")
+ soup.b.next_sibling.wrap(soup.b)
+ self.assertEqual(2, len(soup.b.contents))
+ self.assertEqual(
+ soup.decode(), self.document_for(
+ "<b>I like being bold.I wish I was bold.</b>"))
+
+ def test_extract(self):
+ soup = self.soup(
+ '<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>')
+
+ self.assertEqual(len(soup.body.contents), 3)
+ extracted = soup.find(id="nav").extract()
+
+ self.assertEqual(
+ soup.decode(), "<html><body>Some content. More content.</body></html>")
+ self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>')
+
+ # The extracted tag is now an orphan.
+ self.assertEqual(len(soup.body.contents), 2)
+ self.assertEqual(extracted.parent, None)
+ self.assertEqual(extracted.previous_element, None)
+ self.assertEqual(extracted.next_element.next_element, None)
+
+ # The gap where the extracted tag used to be has been mended.
+ content_1 = soup.find(text="Some content. ")
+ content_2 = soup.find(text=" More content.")
+ self.assertEqual(content_1.next_element, content_2)
+ self.assertEqual(content_1.next_sibling, content_2)
+ self.assertEqual(content_2.previous_element, content_1)
+ self.assertEqual(content_2.previous_sibling, content_1)
+
+ def test_extract_distinguishes_between_identical_strings(self):
+ soup = self.soup("<a>foo</a><b>bar</b>")
+ foo_1 = soup.a.string
+ bar_1 = soup.b.string
+ foo_2 = soup.new_string("foo")
+ bar_2 = soup.new_string("bar")
+ soup.a.append(foo_2)
+ soup.b.append(bar_2)
+
+ # Now there are two identical strings in the <a> tag, and two
+ # in the <b> tag. Let's remove the first "foo" and the second
+ # "bar".
+ foo_1.extract()
+ bar_2.extract()
+ self.assertEqual(foo_2, soup.a.string)
+ self.assertEqual(bar_2, soup.b.string)
+
+ def test_clear(self):
+ """Tag.clear()"""
+ soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>")
+ # clear using extract()
+ a = soup.a
+ soup.p.clear()
+ self.assertEqual(len(soup.p.contents), 0)
+ self.assertTrue(hasattr(a, "contents"))
+
+ # clear using decompose()
+ em = a.em
+ a.clear(decompose=True)
+ self.assertEqual(0, len(em.contents))
+
+ def test_string_set(self):
+ """Tag.string = 'string'"""
+ soup = self.soup("<a></a> <b><c></c></b>")
+ soup.a.string = "foo"
+ self.assertEqual(soup.a.contents, ["foo"])
+ soup.b.string = "bar"
+ self.assertEqual(soup.b.contents, ["bar"])
+
+ def test_string_set_does_not_affect_original_string(self):
+ soup = self.soup("<a><b>foo</b><c>bar</c>")
+ soup.b.string = soup.c.string
+ self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>")
+
+ def test_set_string_preserves_class_of_string(self):
+ soup = self.soup("<a></a>")
+ cdata = CData("foo")
+ soup.a.string = cdata
+ self.assertTrue(isinstance(soup.a.string, CData))
+
+class TestElementObjects(SoupTest):
+ """Test various features of element objects."""
+
+ def test_len(self):
+ """The length of an element is its number of children."""
+ soup = self.soup("<top>1<b>2</b>3</top>")
+
+ # The BeautifulSoup object itself contains one element: the
+ # <top> tag.
+ self.assertEqual(len(soup.contents), 1)
+ self.assertEqual(len(soup), 1)
+
+ # The <top> tag contains three elements: the text node "1", the
+ # <b> tag, and the text node "3".
+ self.assertEqual(len(soup.top), 3)
+ self.assertEqual(len(soup.top.contents), 3)
+
+ def test_member_access_invokes_find(self):
+ """Accessing a Python member .foo invokes find('foo')"""
+ soup = self.soup('<b><i></i></b>')
+ self.assertEqual(soup.b, soup.find('b'))
+ self.assertEqual(soup.b.i, soup.find('b').find('i'))
+ self.assertEqual(soup.a, None)
+
+ def test_deprecated_member_access(self):
+ soup = self.soup('<b><i></i></b>')
+ with warnings.catch_warnings(record=True) as w:
+ tag = soup.bTag
+ self.assertEqual(soup.b, tag)
+ self.assertEqual(
+ '.bTag is deprecated, use .find("b") instead.',
+ str(w[0].message))
+
+ def test_has_attr(self):
+ """has_attr() checks for the presence of an attribute.
+
+ Please note note: has_attr() is different from
+ __in__. has_attr() checks the tag's attributes and __in__
+ checks the tag's chidlren.
+ """
+ soup = self.soup("<foo attr='bar'>")
+ self.assertTrue(soup.foo.has_attr('attr'))
+ self.assertFalse(soup.foo.has_attr('attr2'))
+
+
+ def test_attributes_come_out_in_alphabetical_order(self):
+ markup = '<b a="1" z="5" m="3" f="2" y="4"></b>'
+ self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>')
+
+ def test_string(self):
+ # A tag that contains only a text node makes that node
+ # available as .string.
+ soup = self.soup("<b>foo</b>")
+ self.assertEqual(soup.b.string, 'foo')
+
+ def test_empty_tag_has_no_string(self):
+ # A tag with no children has no .stirng.
+ soup = self.soup("<b></b>")
+ self.assertEqual(soup.b.string, None)
+
+ def test_tag_with_multiple_children_has_no_string(self):
+ # A tag with no children has no .string.
+ soup = self.soup("<a>foo<b></b><b></b></b>")
+ self.assertEqual(soup.b.string, None)
+
+ soup = self.soup("<a>foo<b></b>bar</b>")
+ self.assertEqual(soup.b.string, None)
+
+ # Even if all the children are strings, due to trickery,
+ # it won't work--but this would be a good optimization.
+ soup = self.soup("<a>foo</b>")
+ soup.a.insert(1, "bar")
+ self.assertEqual(soup.a.string, None)
+
+ def test_tag_with_recursive_string_has_string(self):
+ # A tag with a single child which has a .string inherits that
+ # .string.
+ soup = self.soup("<a><b>foo</b></a>")
+ self.assertEqual(soup.a.string, "foo")
+ self.assertEqual(soup.string, "foo")
+
+ def test_lack_of_string(self):
+ """Only a tag containing a single text node has a .string."""
+ soup = self.soup("<b>f<i>e</i>o</b>")
+ self.assertFalse(soup.b.string)
+
+ soup = self.soup("<b></b>")
+ self.assertFalse(soup.b.string)
+
+ def test_all_text(self):
+ """Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated"""
+ soup = self.soup("<a>a<b>r</b> <r> t </r></a>")
+ self.assertEqual(soup.a.text, "ar t ")
+ self.assertEqual(soup.a.get_text(strip=True), "art")
+ self.assertEqual(soup.a.get_text(","), "a,r, , t ")
+ self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t")
+
+ def test_get_text_ignores_comments(self):
+ soup = self.soup("foo<!--IGNORE-->bar")
+ self.assertEqual(soup.get_text(), "foobar")
+
+ self.assertEqual(
+ soup.get_text(types=(NavigableString, Comment)), "fooIGNOREbar")
+ self.assertEqual(
+ soup.get_text(types=None), "fooIGNOREbar")
+
+ def test_all_strings_ignores_comments(self):
+ soup = self.soup("foo<!--IGNORE-->bar")
+ self.assertEqual(['foo', 'bar'], list(soup.strings))
+
+class TestCDAtaListAttributes(SoupTest):
+
+ """Testing cdata-list attributes like 'class'.
+ """
+ def test_single_value_becomes_list(self):
+ soup = self.soup("<a class='foo'>")
+ self.assertEqual(["foo"],soup.a['class'])
+
+ def test_multiple_values_becomes_list(self):
+ soup = self.soup("<a class='foo bar'>")
+ self.assertEqual(["foo", "bar"], soup.a['class'])
+
+ def test_multiple_values_separated_by_weird_whitespace(self):
+ soup = self.soup("<a class='foo\tbar\nbaz'>")
+ self.assertEqual(["foo", "bar", "baz"],soup.a['class'])
+
+ def test_attributes_joined_into_string_on_output(self):
+ soup = self.soup("<a class='foo\tbar'>")
+ self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode())
+
+ def test_accept_charset(self):
+ soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">')
+ self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset'])
+
+ def test_cdata_attribute_applying_only_to_one_tag(self):
+ data = '<a accept-charset="ISO-8859-1 UTF-8"></a>'
+ soup = self.soup(data)
+ # We saw in another test that accept-charset is a cdata-list
+ # attribute for the <form> tag. But it's not a cdata-list
+ # attribute for any other tag.
+ self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset'])
+
+ def test_string_has_immutable_name_property(self):
+ string = self.soup("s").string
+ self.assertEqual(None, string.name)
+ def t():
+ string.name = 'foo'
+ self.assertRaises(AttributeError, t)
+
+class TestPersistence(SoupTest):
+ "Testing features like pickle and deepcopy."
+
+ def setUp(self):
+ super(TestPersistence, self).setUp()
+ self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"
+"http://www.w3.org/TR/REC-html40/transitional.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+<title>Beautiful Soup: We called him Tortoise because he taught us.</title>
+<link rev="made" href="mailto:leonardr@segfault.org">
+<meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping.">
+<meta name="generator" content="Markov Approximation 1.4 (module: leonardr)">
+<meta name="author" content="Leonard Richardson">
+</head>
+<body>
+<a href="foo">foo</a>
+<a href="foo"><b>bar</b></a>
+</body>
+</html>"""
+ self.tree = self.soup(self.page)
+
+ def test_pickle_and_unpickle_identity(self):
+ # Pickling a tree, then unpickling it, yields a tree identical
+ # to the original.
+ dumped = pickle.dumps(self.tree, 2)
+ loaded = pickle.loads(dumped)
+ self.assertEqual(loaded.__class__, BeautifulSoup)
+ self.assertEqual(loaded.decode(), self.tree.decode())
+
+ def test_deepcopy_identity(self):
+ # Making a deepcopy of a tree yields an identical tree.
+ copied = copy.deepcopy(self.tree)
+ self.assertEqual(copied.decode(), self.tree.decode())
+
+ def test_unicode_pickle(self):
+ # A tree containing Unicode characters can be pickled.
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL)
+ loaded = pickle.loads(dumped)
+ self.assertEqual(loaded.decode(), soup.decode())
+
+
+class TestSubstitutions(SoupTest):
+
+ def test_default_formatter_is_minimal(self):
+ markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
+ soup = self.soup(markup)
+ decoded = soup.decode(formatter="minimal")
+ # The < is converted back into &lt; but the e-with-acute is left alone.
+ self.assertEqual(
+ decoded,
+ self.document_for(
+ u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
+
+ def test_formatter_html(self):
+ markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
+ soup = self.soup(markup)
+ decoded = soup.decode(formatter="html")
+ self.assertEqual(
+ decoded,
+ self.document_for("<b>&lt;&lt;Sacr&eacute; bleu!&gt;&gt;</b>"))
+
+ def test_formatter_minimal(self):
+ markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
+ soup = self.soup(markup)
+ decoded = soup.decode(formatter="minimal")
+ # The < is converted back into &lt; but the e-with-acute is left alone.
+ self.assertEqual(
+ decoded,
+ self.document_for(
+ u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
+
+ def test_formatter_null(self):
+ markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
+ soup = self.soup(markup)
+ decoded = soup.decode(formatter=None)
+ # Neither the angle brackets nor the e-with-acute are converted.
+ # This is not valid HTML, but it's what the user wanted.
+ self.assertEqual(decoded,
+ self.document_for(u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
+
+ def test_formatter_custom(self):
+ markup = u"<b>&lt;foo&gt;</b><b>bar</b>"
+ soup = self.soup(markup)
+ decoded = soup.decode(formatter = lambda x: x.upper())
+ # Instead of normal entity conversion code, the custom
+ # callable is called on every string.
+ self.assertEqual(
+ decoded,
+ self.document_for(u"<b><FOO></b><b>BAR</b>"))
+
+ def test_formatter_is_run_on_attribute_values(self):
+ markup = u'<a href="http://a.com?a=b&c=é">e</a>'
+ soup = self.soup(markup)
+ a = soup.a
+
+ expect_minimal = u'<a href="http://a.com?a=b&amp;c=é">e</a>'
+
+ self.assertEqual(expect_minimal, a.decode())
+ self.assertEqual(expect_minimal, a.decode(formatter="minimal"))
+
+ expect_html = u'<a href="http://a.com?a=b&amp;c=&eacute;">e</a>'
+ self.assertEqual(expect_html, a.decode(formatter="html"))
+
+ self.assertEqual(markup, a.decode(formatter=None))
+ expect_upper = u'<a href="HTTP://A.COM?A=B&C=É">E</a>'
+ self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper()))
+
+ def test_formatter_skips_script_tag_for_html_documents(self):
+ doc = """
+ <script type="text/javascript">
+ console.log("< < hey > > ");
+ </script>
+"""
+ encoded = BeautifulSoup(doc).encode()
+ self.assertTrue(b"< < hey > >" in encoded)
+
+ def test_formatter_skips_style_tag_for_html_documents(self):
+ doc = """
+ <style type="text/css">
+ console.log("< < hey > > ");
+ </style>
+"""
+ encoded = BeautifulSoup(doc).encode()
+ self.assertTrue(b"< < hey > >" in encoded)
+
+ def test_prettify_leaves_preformatted_text_alone(self):
+ soup = self.soup("<div> foo <pre> \tbar\n \n </pre> baz ")
+ # Everything outside the <pre> tag is reformatted, but everything
+ # inside is left alone.
+ self.assertEqual(
+ u'<div>\n foo\n <pre> \tbar\n \n </pre>\n baz\n</div>',
+ soup.div.prettify())
+
+ def test_prettify_accepts_formatter(self):
+ soup = BeautifulSoup("<html><body>foo</body></html>")
+ pretty = soup.prettify(formatter = lambda x: x.upper())
+ self.assertTrue("FOO" in pretty)
+
+ def test_prettify_outputs_unicode_by_default(self):
+ soup = self.soup("<a></a>")
+ self.assertEqual(unicode, type(soup.prettify()))
+
+ def test_prettify_can_encode_data(self):
+ soup = self.soup("<a></a>")
+ self.assertEqual(bytes, type(soup.prettify("utf-8")))
+
+ def test_html_entity_substitution_off_by_default(self):
+ markup = u"<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>"
+ soup = self.soup(markup)
+ encoded = soup.b.encode("utf-8")
+ self.assertEqual(encoded, markup.encode('utf-8'))
+
+ def test_encoding_substitution(self):
+ # Here's the <meta> tag saying that a document is
+ # encoded in Shift-JIS.
+ meta_tag = ('<meta content="text/html; charset=x-sjis" '
+ 'http-equiv="Content-type"/>')
+ soup = self.soup(meta_tag)
+
+ # Parse the document, and the charset apprears unchanged.
+ self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis')
+
+ # Encode the document into some encoding, and the encoding is
+ # substituted into the meta tag.
+ utf_8 = soup.encode("utf-8")
+ self.assertTrue(b"charset=utf-8" in utf_8)
+
+ euc_jp = soup.encode("euc_jp")
+ self.assertTrue(b"charset=euc_jp" in euc_jp)
+
+ shift_jis = soup.encode("shift-jis")
+ self.assertTrue(b"charset=shift-jis" in shift_jis)
+
+ utf_16_u = soup.encode("utf-16").decode("utf-16")
+ self.assertTrue("charset=utf-16" in utf_16_u)
+
+ def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self):
+ markup = ('<head><meta content="text/html; charset=x-sjis" '
+ 'http-equiv="Content-type"/></head><pre>foo</pre>')
+
+ # Beautiful Soup used to try to rewrite the meta tag even if the
+ # meta tag got filtered out by the strainer. This test makes
+ # sure that doesn't happen.
+ strainer = SoupStrainer('pre')
+ soup = self.soup(markup, parse_only=strainer)
+ self.assertEqual(soup.contents[0].name, 'pre')
+
+class TestEncoding(SoupTest):
+ """Test the ability to encode objects into strings."""
+
+ def test_unicode_string_can_be_encoded(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(soup.b.string.encode("utf-8"),
+ u"\N{SNOWMAN}".encode("utf-8"))
+
+ def test_tag_containing_unicode_string_can_be_encoded(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(
+ soup.b.encode("utf-8"), html.encode("utf-8"))
+
+ def test_encoding_substitutes_unrecognized_characters_by_default(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(soup.b.encode("ascii"), b"<b>&#9731;</b>")
+
+ def test_encoding_can_be_made_strict(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertRaises(
+ UnicodeEncodeError, soup.encode, "ascii", errors="strict")
+
+ def test_decode_contents(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(u"\N{SNOWMAN}", soup.b.decode_contents())
+
+ def test_encode_contents(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(
+ u"\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents(
+ encoding="utf8"))
+
+ def test_deprecated_renderContents(self):
+ html = u"<b>\N{SNOWMAN}</b>"
+ soup = self.soup(html)
+ self.assertEqual(
+ u"\N{SNOWMAN}".encode("utf8"), soup.b.renderContents())
+
+class TestNavigableStringSubclasses(SoupTest):
+
+ def test_cdata(self):
+ # None of the current builders turn CDATA sections into CData
+ # objects, but you can create them manually.
+ soup = self.soup("")
+ cdata = CData("foo")
+ soup.insert(1, cdata)
+ self.assertEqual(str(soup), "<![CDATA[foo]]>")
+ self.assertEqual(soup.find(text="foo"), "foo")
+ self.assertEqual(soup.contents[0], "foo")
+
+ def test_cdata_is_never_formatted(self):
+ """Text inside a CData object is passed into the formatter.
+
+ But the return value is ignored.
+ """
+
+ self.count = 0
+ def increment(*args):
+ self.count += 1
+ return "BITTER FAILURE"
+
+ soup = self.soup("")
+ cdata = CData("<><><>")
+ soup.insert(1, cdata)
+ self.assertEqual(
+ b"<![CDATA[<><><>]]>", soup.encode(formatter=increment))
+ self.assertEqual(1, self.count)
+
+ def test_doctype_ends_in_newline(self):
+ # Unlike other NavigableString subclasses, a DOCTYPE always ends
+ # in a newline.
+ doctype = Doctype("foo")
+ soup = self.soup("")
+ soup.insert(1, doctype)
+ self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n")
+
+
+class TestSoupSelector(TreeTest):
+
+ HTML = """
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
+"http://www.w3.org/TR/html4/strict.dtd">
+<html>
+<head>
+<title>The title</title>
+<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
+</head>
+<body>
+
+<div id="main" class="fancy">
+<div id="inner">
+<h1 id="header1">An H1</h1>
+<p>Some text</p>
+<p class="onep" id="p1">Some more text</p>
+<h2 id="header2">An H2</h2>
+<p class="class1 class2 class3" id="pmulti">Another</p>
+<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
+<h2 id="header3">Another H2</h2>
+<a id="me" href="http://simonwillison.net/" rel="me">me</a>
+<span class="s1">
+<a href="#" id="s1a1">span1a1</a>
+<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
+<span class="span2">
+<a href="#" id="s2a1">span2a1</a>
+</span>
+<span class="span3"></span>
+</span>
+</div>
+<p lang="en" id="lang-en">English</p>
+<p lang="en-gb" id="lang-en-gb">English UK</p>
+<p lang="en-us" id="lang-en-us">English US</p>
+<p lang="fr" id="lang-fr">French</p>
+</div>
+
+<div id="footer">
+</div>
+"""
+
+ def setUp(self):
+ self.soup = BeautifulSoup(self.HTML)
+
+ def assertSelects(self, selector, expected_ids):
+ el_ids = [el['id'] for el in self.soup.select(selector)]
+ el_ids.sort()
+ expected_ids.sort()
+ self.assertEqual(expected_ids, el_ids,
+ "Selector %s, expected [%s], got [%s]" % (
+ selector, ', '.join(expected_ids), ', '.join(el_ids)
+ )
+ )
+
+ assertSelect = assertSelects
+
+ def assertSelectMultiple(self, *tests):
+ for selector, expected_ids in tests:
+ self.assertSelect(selector, expected_ids)
+
+ def test_one_tag_one(self):
+ els = self.soup.select('title')
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].name, 'title')
+ self.assertEqual(els[0].contents, [u'The title'])
+
+ def test_one_tag_many(self):
+ els = self.soup.select('div')
+ self.assertEqual(len(els), 3)
+ for div in els:
+ self.assertEqual(div.name, 'div')
+
+ def test_tag_in_tag_one(self):
+ els = self.soup.select('div div')
+ self.assertSelects('div div', ['inner'])
+
+ def test_tag_in_tag_many(self):
+ for selector in ('html div', 'html body div', 'body div'):
+ self.assertSelects(selector, ['main', 'inner', 'footer'])
+
+ def test_tag_no_match(self):
+ self.assertEqual(len(self.soup.select('del')), 0)
+
+ def test_invalid_tag(self):
+ self.assertRaises(ValueError, self.soup.select, 'tag%t')
+
+ def test_header_tags(self):
+ self.assertSelectMultiple(
+ ('h1', ['header1']),
+ ('h2', ['header2', 'header3']),
+ )
+
+ def test_class_one(self):
+ for selector in ('.onep', 'p.onep', 'html p.onep'):
+ els = self.soup.select(selector)
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].name, 'p')
+ self.assertEqual(els[0]['class'], ['onep'])
+
+ def test_class_mismatched_tag(self):
+ els = self.soup.select('div.onep')
+ self.assertEqual(len(els), 0)
+
+ def test_one_id(self):
+ for selector in ('div#inner', '#inner', 'div div#inner'):
+ self.assertSelects(selector, ['inner'])
+
+ def test_bad_id(self):
+ els = self.soup.select('#doesnotexist')
+ self.assertEqual(len(els), 0)
+
+ def test_items_in_id(self):
+ els = self.soup.select('div#inner p')
+ self.assertEqual(len(els), 3)
+ for el in els:
+ self.assertEqual(el.name, 'p')
+ self.assertEqual(els[1]['class'], ['onep'])
+ self.assertFalse(els[0].has_attr('class'))
+
+ def test_a_bunch_of_emptys(self):
+ for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
+ self.assertEqual(len(self.soup.select(selector)), 0)
+
+ def test_multi_class_support(self):
+ for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
+ '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
+ self.assertSelects(selector, ['pmulti'])
+
+ def test_multi_class_selection(self):
+ for selector in ('.class1.class3', '.class3.class2',
+ '.class1.class2.class3'):
+ self.assertSelects(selector, ['pmulti'])
+
+ def test_child_selector(self):
+ self.assertSelects('.s1 > a', ['s1a1', 's1a2'])
+ self.assertSelects('.s1 > a span', ['s1a2s1'])
+
+ def test_child_selector_id(self):
+ self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1'])
+
+ def test_attribute_equals(self):
+ self.assertSelectMultiple(
+ ('p[class="onep"]', ['p1']),
+ ('p[id="p1"]', ['p1']),
+ ('[class="onep"]', ['p1']),
+ ('[id="p1"]', ['p1']),
+ ('link[rel="stylesheet"]', ['l1']),
+ ('link[type="text/css"]', ['l1']),
+ ('link[href="blah.css"]', ['l1']),
+ ('link[href="no-blah.css"]', []),
+ ('[rel="stylesheet"]', ['l1']),
+ ('[type="text/css"]', ['l1']),
+ ('[href="blah.css"]', ['l1']),
+ ('[href="no-blah.css"]', []),
+ ('p[href="no-blah.css"]', []),
+ ('[href="no-blah.css"]', []),
+ )
+
+ def test_attribute_tilde(self):
+ self.assertSelectMultiple(
+ ('p[class~="class1"]', ['pmulti']),
+ ('p[class~="class2"]', ['pmulti']),
+ ('p[class~="class3"]', ['pmulti']),
+ ('[class~="class1"]', ['pmulti']),
+ ('[class~="class2"]', ['pmulti']),
+ ('[class~="class3"]', ['pmulti']),
+ ('a[rel~="friend"]', ['bob']),
+ ('a[rel~="met"]', ['bob']),
+ ('[rel~="friend"]', ['bob']),
+ ('[rel~="met"]', ['bob']),
+ )
+
+ def test_attribute_startswith(self):
+ self.assertSelectMultiple(
+ ('[rel^="style"]', ['l1']),
+ ('link[rel^="style"]', ['l1']),
+ ('notlink[rel^="notstyle"]', []),
+ ('[rel^="notstyle"]', []),
+ ('link[rel^="notstyle"]', []),
+ ('link[href^="bla"]', ['l1']),
+ ('a[href^="http://"]', ['bob', 'me']),
+ ('[href^="http://"]', ['bob', 'me']),
+ ('[id^="p"]', ['pmulti', 'p1']),
+ ('[id^="m"]', ['me', 'main']),
+ ('div[id^="m"]', ['main']),
+ ('a[id^="m"]', ['me']),
+ )
+
+ def test_attribute_endswith(self):
+ self.assertSelectMultiple(
+ ('[href$=".css"]', ['l1']),
+ ('link[href$=".css"]', ['l1']),
+ ('link[id$="1"]', ['l1']),
+ ('[id$="1"]', ['l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1']),
+ ('div[id$="1"]', []),
+ ('[id$="noending"]', []),
+ )
+
+ def test_attribute_contains(self):
+ self.assertSelectMultiple(
+ # From test_attribute_startswith
+ ('[rel*="style"]', ['l1']),
+ ('link[rel*="style"]', ['l1']),
+ ('notlink[rel*="notstyle"]', []),
+ ('[rel*="notstyle"]', []),
+ ('link[rel*="notstyle"]', []),
+ ('link[href*="bla"]', ['l1']),
+ ('a[href*="http://"]', ['bob', 'me']),
+ ('[href*="http://"]', ['bob', 'me']),
+ ('[id*="p"]', ['pmulti', 'p1']),
+ ('div[id*="m"]', ['main']),
+ ('a[id*="m"]', ['me']),
+ # From test_attribute_endswith
+ ('[href*=".css"]', ['l1']),
+ ('link[href*=".css"]', ['l1']),
+ ('link[id*="1"]', ['l1']),
+ ('[id*="1"]', ['l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1']),
+ ('div[id*="1"]', []),
+ ('[id*="noending"]', []),
+ # New for this test
+ ('[href*="."]', ['bob', 'me', 'l1']),
+ ('a[href*="."]', ['bob', 'me']),
+ ('link[href*="."]', ['l1']),
+ ('div[id*="n"]', ['main', 'inner']),
+ ('div[id*="nn"]', ['inner']),
+ )
+
+ def test_attribute_exact_or_hypen(self):
+ self.assertSelectMultiple(
+ ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
+ ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
+ ('p[lang|="fr"]', ['lang-fr']),
+ ('p[lang|="gb"]', []),
+ )
+
+ def test_attribute_exists(self):
+ self.assertSelectMultiple(
+ ('[rel]', ['l1', 'bob', 'me']),
+ ('link[rel]', ['l1']),
+ ('a[rel]', ['bob', 'me']),
+ ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
+ ('p[class]', ['p1', 'pmulti']),
+ ('[blah]', []),
+ ('p[blah]', []),
+ )
+
+ def test_nth_of_type(self):
+ # Try to select first paragraph
+ els = self.soup.select('div#inner p:nth-of-type(1)')
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].string, u'Some text')
+
+ # Try to select third paragraph
+ els = self.soup.select('div#inner p:nth-of-type(3)')
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].string, u'Another')
+
+ # Try to select (non-existent!) fourth paragraph
+ els = self.soup.select('div#inner p:nth-of-type(4)')
+ self.assertEqual(len(els), 0)
+
+ # Pass in an invalid value.
+ self.assertRaises(
+ ValueError, self.soup.select, 'div p:nth-of-type(0)')
+
+ def test_nth_of_type_direct_descendant(self):
+ els = self.soup.select('div#inner > p:nth-of-type(1)')
+ self.assertEqual(len(els), 1)
+ self.assertEqual(els[0].string, u'Some text')
+
+ def test_id_child_selector_nth_of_type(self):
+ self.assertSelects('#inner > p:nth-of-type(2)', ['p1'])
+
+ def test_select_on_element(self):
+ # Other tests operate on the tree; this operates on an element
+ # within the tree.
+ inner = self.soup.find("div", id="main")
+ selected = inner.select("div")
+ # The <div id="inner"> tag was selected. The <div id="footer">
+ # tag was not.
+ self.assertSelectsIDs(selected, ['inner'])
+
+ def test_overspecified_child_id(self):
+ self.assertSelects(".fancy #inner", ['inner'])
+ self.assertSelects(".normal #inner", [])
+
+ def test_adjacent_sibling_selector(self):
+ self.assertSelects('#p1 + h2', ['header2'])
+ self.assertSelects('#p1 + h2 + p', ['pmulti'])
+ self.assertSelects('#p1 + #header2 + .class1', ['pmulti'])
+ self.assertEqual([], self.soup.select('#p1 + p'))
+
+ def test_general_sibling_selector(self):
+ self.assertSelects('#p1 ~ h2', ['header2', 'header3'])
+ self.assertSelects('#p1 ~ #header2', ['header2'])
+ self.assertSelects('#p1 ~ h2 + a', ['me'])
+ self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me'])
+ self.assertEqual([], self.soup.select('#inner ~ h2'))
+
+ def test_dangling_combinator(self):
+ self.assertRaises(ValueError, self.soup.select, 'h1 >')
+
+ def test_sibling_combinator_wont_select_same_tag_twice(self):
+ self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
diff --git a/bitbake/lib/progressbar.py b/bitbake/lib/progressbar.py
index b668647a36..114cdc16ba 100644
--- a/bitbake/lib/progressbar.py
+++ b/bitbake/lib/progressbar.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
#
# progressbar - Text progressbar library for python.
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py
index 49f36da1ad..9d6d11526a 100644
--- a/bitbake/lib/prserv/db.py
+++ b/bitbake/lib/prserv/db.py
@@ -19,6 +19,7 @@ class PRTable(object):
def __init__(self, conn, table, nohist):
self.conn = conn
self.nohist = nohist
+ self.dirty = False
if nohist:
self.table = "%s_nohist" % table
else:
@@ -47,6 +48,11 @@ class PRTable(object):
self.conn.commit()
self._execute("BEGIN EXCLUSIVE TRANSACTION")
+ def sync_if_dirty(self):
+ if self.dirty:
+ self.sync()
+ self.dirty = False
+
def _getValueHist(self, version, pkgarch, checksum):
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
@@ -62,6 +68,8 @@ class PRTable(object):
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
+ self.dirty = True
+
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row=data.fetchone()
@@ -89,6 +97,8 @@ class PRTable(object):
logger.error(str(exc))
self.conn.rollback()
+ self.dirty = True
+
data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row=data.fetchone()
@@ -118,6 +128,8 @@ class PRTable(object):
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
+ self.dirty = True
+
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
(version, pkgarch, checksum))
row = data.fetchone()
@@ -139,6 +151,8 @@ class PRTable(object):
except sqlite3.IntegrityError as exc:
logger.error(str(exc))
+ self.dirty = True
+
data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
(version,pkgarch,checksum,value))
row=data.fetchone()
@@ -221,6 +235,7 @@ class PRData(object):
self.connection=sqlite3.connect(self.filename, isolation_level="EXCLUSIVE", check_same_thread = False)
self.connection.row_factory=sqlite3.Row
self.connection.execute("pragma synchronous = off;")
+ self.connection.execute("PRAGMA journal_mode = WAL;")
self._tables={}
def __del__(self):
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py
index 1e170cea0a..25eb46a410 100644
--- a/bitbake/lib/prserv/serv.py
+++ b/bitbake/lib/prserv/serv.py
@@ -38,8 +38,17 @@ singleton = None
class PRServer(SimpleXMLRPCServer):
def __init__(self, dbfile, logfile, interface, daemon=True):
''' constructor '''
- SimpleXMLRPCServer.__init__(self, interface,
- logRequests=False, allow_none=True)
+ import socket
+ try:
+ SimpleXMLRPCServer.__init__(self, interface,
+ logRequests=False, allow_none=True)
+ except socket.error:
+ ip=socket.gethostbyname(interface[0])
+ port=interface[1]
+ msg="PR Server unable to bind to %s:%s\n" % (ip, port)
+ sys.stderr.write(msg)
+ raise PRServiceConfigError
+
self.dbfile=dbfile
self.daemon=daemon
self.logfile=logfile
@@ -67,11 +76,19 @@ class PRServer(SimpleXMLRPCServer):
In addition, exception handling is done here.
"""
+ iter_count = 1
+ # With 60 iterations between syncs and a 0.5 second timeout between
+ # iterations, this will sync if dirty every ~30 seconds.
+ iterations_between_sync = 60
+
while True:
(request, client_address) = self.requestqueue.get()
try:
self.finish_request(request, client_address)
self.shutdown_request(request)
+ iter_count = (iter_count + 1) % iterations_between_sync
+ if iter_count == 0:
+ self.table.sync_if_dirty()
except:
self.handle_error(request, client_address)
self.shutdown_request(request)
diff --git a/bitbake/lib/toaster/bldcontrol/admin.py b/bitbake/lib/toaster/bldcontrol/admin.py
new file mode 100644
index 0000000000..fcbe5f5935
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/admin.py
@@ -0,0 +1,8 @@
+from django.contrib import admin
+from django.contrib.admin.filters import RelatedFieldListFilter
+from .models import BuildEnvironment
+
+class BuildEnvironmentAdmin(admin.ModelAdmin):
+ pass
+
+admin.site.register(BuildEnvironment, BuildEnvironmentAdmin)
diff --git a/bitbake/lib/toaster/bldcontrol/bbcontroller.py b/bitbake/lib/toaster/bldcontrol/bbcontroller.py
index 1e58c67fcd..7c27fe110e 100644
--- a/bitbake/lib/toaster/bldcontrol/bbcontroller.py
+++ b/bitbake/lib/toaster/bldcontrol/bbcontroller.py
@@ -25,11 +25,7 @@ import sys
import re
from django.db import transaction
from django.db.models import Q
-from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget
-import subprocess
-
-from toastermain import settings
-
+from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
# load Bitbake components
path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
@@ -72,6 +68,10 @@ def getBuildEnvironmentController(**kwargs):
The return object MUST always be a BuildEnvironmentController.
"""
+
+ from localhostbecontroller import LocalhostBEController
+ from sshbecontroller import SSHBEController
+
be = BuildEnvironment.objects.filter(Q(**kwargs))[0]
if be.betype == BuildEnvironment.TYPE_LOCAL:
return LocalhostBEController(be)
@@ -81,6 +81,13 @@ def getBuildEnvironmentController(**kwargs):
raise Exception("FIXME: Implement BEC for type %s" % str(be.betype))
+def _getgitcheckoutdirectoryname(url):
+ """ Utility that returns the last component of a git path as directory
+ """
+ import re
+ components = re.split(r'[:\.\/]', url)
+ return components[-2] if components[-1] == "git" else components[-1]
+
class BuildEnvironmentController(object):
""" BuildEnvironmentController (BEC) is the abstract class that defines the operations that MUST
@@ -110,7 +117,27 @@ class BuildEnvironmentController(object):
self.be = be
self.connection = None
- def startBBServer(self):
+ @staticmethod
+ def _updateBBLayers(bblayerconf, layerlist):
+ conflines = open(bblayerconf, "r").readlines()
+
+ bblayerconffile = open(bblayerconf, "w")
+ skip = 0
+ for i in xrange(len(conflines)):
+ if skip > 0:
+ skip =- 1
+ continue
+ if conflines[i].startswith("# line added by toaster"):
+ skip = 1
+ else:
+ bblayerconffile.write(conflines[i])
+
+ bblayerconffile.write("# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
+ bblayerconffile.close()
+
+
+
+ def startBBServer(self, brbe):
""" Starts a BB server with Toaster toasterui set up to record the builds, an no controlling UI.
After this method executes, self.be bbaddress/bbport MUST point to a running and free server,
and the bbstate MUST be updated to "started".
@@ -123,8 +150,10 @@ class BuildEnvironmentController(object):
self.connection must be none.
"""
- def setLayers(self,ls):
- """ Sets the layer variables in the config file, after validating local layer paths.
+ def setLayers(self, bbs, ls):
+ """ Checks-out bitbake executor and layers from git repositories.
+ Sets the layer variables in the config file, after validating local layer paths.
+ The bitbakes must be a 1-length list of BRBitbake
The layer paths must be in a list of BRLayer object
a word of attention: by convention, the first layer for any build will be poky!
@@ -132,12 +161,12 @@ class BuildEnvironmentController(object):
raise Exception("Must override setLayers")
- def getBBController(self):
+ def getBBController(self, brbe):
""" returns a BitbakeController to an already started server; this is the point where the server
starts if needed; or reconnects to the server if we can
"""
if not self.connection:
- self.startBBServer()
+ self.startBBServer(brbe)
self.be.lock = BuildEnvironment.LOCK_RUNNING
self.be.save()
@@ -171,146 +200,3 @@ class ShellCmdException(Exception):
class BuildSetupException(Exception):
pass
-class LocalhostBEController(BuildEnvironmentController):
- """ Implementation of the BuildEnvironmentController for the localhost;
- this controller manages the default build directory,
- the server setup and system start and stop for the localhost-type build environment
-
- """
-
- def __init__(self, be):
- super(LocalhostBEController, self).__init__(be)
- self.dburl = settings.getDATABASE_URL()
- self.pokydirname = None
-
- def _shellcmd(self, command, cwd = None):
- if cwd is None:
- cwd = self.be.sourcedir
-
- p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (out,err) = p.communicate()
- if p.returncode:
- if len(err) == 0:
- err = "command: %s \n%s" % (command, out)
- else:
- err = "command: %s \n%s" % (command, err)
- raise ShellCmdException(err)
- else:
- return out
-
- def _createdirpath(self, path):
- from os.path import dirname as DN
- if not os.path.exists(DN(path)):
- self._createdirpath(DN(path))
- if not os.path.exists(path):
- os.mkdir(path, 0755)
-
- def _startBE(self):
- assert self.pokydirname and os.path.exists(self.pokydirname)
- self._createdirpath(self.be.builddir)
- self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
-
- def startBBServer(self):
- assert self.pokydirname and os.path.exists(self.pokydirname)
- print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb && sleep 1\"" % (self.pokydirname, self.be.builddir, self.dburl))
- # FIXME unfortunate sleep 1 - we need to make sure that bbserver is started and the toaster ui is connected
- # but since they start async without any return, we just wait a bit
- print "Started server"
- assert self.be.sourcedir and os.path.exists(self.be.builddir)
- self.be.bbaddress = "localhost"
- self.be.bbport = "8200"
- self.be.bbstate = BuildEnvironment.SERVER_STARTED
- self.be.save()
-
- def stopBBServer(self):
- assert self.be.sourcedir
- print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
- (self.be.sourcedir, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
- self.be.bbstate = BuildEnvironment.SERVER_STOPPED
- self.be.save()
- print "Stopped server"
-
- def setLayers(self, layers):
- """ a word of attention: by convention, the first layer for any build will be poky! """
-
- assert self.be.sourcedir is not None
- # set layers in the layersource
-
- # 1. get a list of repos, and map dirpaths for each layer
- gitrepos = {}
- for layer in layers:
- if not layer.giturl in gitrepos:
- gitrepos[layer.giturl] = []
- gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
- for giturl in gitrepos.keys():
- commitid = gitrepos[giturl][0][2]
- for e in gitrepos[giturl]:
- if commitid != e[2]:
- raise BuildSetupException("More than one commit per git url, unsupported configuration")
-
- def _getgitdirectoryname(url):
- import re
- components = re.split(r'[\.\/]', url)
- return components[-2] if components[-1] == "git" else components[-1]
-
- layerlist = []
-
- # 2. checkout the repositories
- for giturl in gitrepos.keys():
- localdirname = os.path.join(self.be.sourcedir, _getgitdirectoryname(giturl))
- print "DEBUG: giturl checking out in current directory", localdirname
-
- # make sure our directory is a git repository
- if os.path.exists(localdirname):
- if not giturl in self._shellcmd("git remote -v", localdirname):
- raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
- else:
- self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
- # checkout the needed commit
- commit = gitrepos[giturl][0][2]
- self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
- print "DEBUG: checked out commit ", commit, "to", localdirname
-
- # if this is the first checkout, take the localdirname as poky dir
- if self.pokydirname is None:
- print "DEBUG: selected poky dir name", localdirname
- self.pokydirname = localdirname
-
- # verify our repositories
- for name, dirpath, commit in gitrepos[giturl]:
- localdirpath = os.path.join(localdirname, dirpath)
- if not os.path.exists(localdirpath):
- raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
-
- layerlist.append(localdirpath)
-
- print "DEBUG: current layer list ", layerlist
-
- # 3. configure the build environment, so we have a conf/bblayers.conf
- assert self.pokydirname is not None
- self._startBE()
-
- # 4. update the bblayers.conf
- bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
- if not os.path.exists(bblayerconf):
- raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
-
- conflines = open(bblayerconf, "r").readlines()
-
- bblayerconffile = open(bblayerconf, "w")
- for i in xrange(len(conflines)):
- if conflines[i].startswith("# line added by toaster"):
- i += 2
- else:
- bblayerconffile.write(conflines[i])
-
- bblayerconffile.write("\n# line added by toaster build control\nBBLAYERS = \"" + " ".join(layerlist) + "\"")
- bblayerconffile.close()
-
- return True
-
- def release(self):
- assert self.be.sourcedir and os.path.exists(self.be.builddir)
- import shutil
- shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
- assert not os.path.exists(self.be.builddir)
diff --git a/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
new file mode 100644
index 0000000000..22d31e33f2
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py
@@ -0,0 +1,219 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Toaster Implementation
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import os
+import sys
+import re
+from django.db import transaction
+from django.db.models import Q
+from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
+import subprocess
+
+from toastermain import settings
+
+from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
+
+import logging
+logger = logging.getLogger("toaster")
+
+
+class LocalhostBEController(BuildEnvironmentController):
+ """ Implementation of the BuildEnvironmentController for the localhost;
+ this controller manages the default build directory,
+ the server setup and system start and stop for the localhost-type build environment
+
+ """
+
+ def __init__(self, be):
+ super(LocalhostBEController, self).__init__(be)
+ self.dburl = settings.getDATABASE_URL()
+ self.pokydirname = None
+ self.islayerset = False
+
+ def _shellcmd(self, command, cwd = None):
+ if cwd is None:
+ cwd = self.be.sourcedir
+
+ p = subprocess.Popen(command, cwd = cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (out,err) = p.communicate()
+ if p.returncode:
+ if len(err) == 0:
+ err = "command: %s \n%s" % (command, out)
+ else:
+ err = "command: %s \n%s" % (command, err)
+ logger.debug("localhostbecontroller: shellcmd error %s" % err)
+ raise ShellCmdException(err)
+ else:
+ logger.debug("localhostbecontroller: shellcmd success")
+ return out
+
+ def _createdirpath(self, path):
+ from os.path import dirname as DN
+ if path == "":
+ raise Exception("Invalid path creation specified.")
+ if not os.path.exists(DN(path)):
+ self._createdirpath(DN(path))
+ if not os.path.exists(path):
+ os.mkdir(path, 0755)
+
+ def _setupBE(self):
+ assert self.pokydirname and os.path.exists(self.pokydirname)
+ self._createdirpath(self.be.builddir)
+ self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
+
+ def startBBServer(self, brbe):
+ assert self.pokydirname and os.path.exists(self.pokydirname)
+ assert self.islayerset
+
+ try:
+ os.remove(os.path.join(self.be.builddir, "toaster_ui.log"))
+ except OSError as e:
+ import errno
+ if e.errno != errno.ENOENT:
+ raise
+
+ cmd = "bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb brbe=%s\"" % (self.pokydirname, self.be.builddir, self.dburl, brbe)
+ port = "-1"
+ for i in self._shellcmd(cmd).split("\n"):
+ if i.startswith("Bitbake server address"):
+ port = i.split(" ")[-1]
+ logger.debug("localhostbecontroller: Found bitbake server port %s" % port)
+
+ def _toaster_ui_started(filepath):
+ if not os.path.exists(filepath):
+ return False
+ with open(filepath, "r") as f:
+ for line in f:
+ if line.startswith("NOTE: ToasterUI waiting for events"):
+ return True
+ return False
+
+ while not _toaster_ui_started(os.path.join(self.be.builddir, "toaster_ui.log")):
+ import time
+ logger.debug("localhostbecontroller: Waiting bitbake server to start")
+ time.sleep(0.5)
+
+ logger.debug("localhostbecontroller: Started bitbake server")
+ assert self.be.sourcedir and os.path.exists(self.be.builddir)
+ self.be.bbaddress = "localhost"
+ self.be.bbport = port
+ self.be.bbstate = BuildEnvironment.SERVER_STARTED
+ self.be.save()
+
+ def stopBBServer(self):
+ assert self.pokydirname and os.path.exists(self.pokydirname)
+ assert self.islayerset
+ self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
+ (self.pokydirname, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
+ self.be.bbstate = BuildEnvironment.SERVER_STOPPED
+ self.be.save()
+ logger.debug("localhostbecontroller: Stopped bitbake server")
+
+ def setLayers(self, bitbakes, layers):
+ """ a word of attention: by convention, the first layer for any build will be poky! """
+
+ assert self.be.sourcedir is not None
+ assert len(bitbakes) == 1
+ # set layers in the layersource
+
+ # 1. get a list of repos, and map dirpaths for each layer
+ gitrepos = {}
+ gitrepos[bitbakes[0].giturl] = []
+ gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
+
+ for layer in layers:
+ # we don't process local URLs
+ if layer.giturl.startswith("file://"):
+ continue
+ if not layer.giturl in gitrepos:
+ gitrepos[layer.giturl] = []
+ gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
+ for giturl in gitrepos.keys():
+ commitid = gitrepos[giturl][0][2]
+ for e in gitrepos[giturl]:
+ if commitid != e[2]:
+ import pprint
+ raise BuildSetupException("More than one commit per git url, unsupported configuration: \n%s" % pprint.pformat(gitrepos))
+
+
+ logger.debug("localhostbecontroller, our git repos are %s" % gitrepos)
+ layerlist = []
+
+ # 2. checkout the repositories
+ for giturl in gitrepos.keys():
+ localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
+ logger.debug("localhostbecontroller: giturl %s checking out in current directory %s" % (giturl, localdirname))
+
+ # make sure our directory is a git repository
+ if os.path.exists(localdirname):
+ if not giturl in self._shellcmd("git remote -v", localdirname):
+ raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
+ else:
+ self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
+ # checkout the needed commit
+ commit = gitrepos[giturl][0][2]
+
+ # branch magic name "HEAD" will inhibit checkout
+ if commit != "HEAD":
+ logger.debug("localhostbecontroller: checking out commit %s to %s " % (commit, localdirname))
+ self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
+
+ # take the localdirname as poky dir if we can find the oe-init-build-env
+ if self.pokydirname is None and os.path.exists(os.path.join(localdirname, "oe-init-build-env")):
+ logger.debug("localhostbecontroller: selected poky dir name %s" % localdirname)
+ self.pokydirname = localdirname
+
+ # make sure we have a working bitbake
+ if not os.path.exists(os.path.join(self.pokydirname, 'bitbake')):
+ logger.debug("localhostbecontroller: checking bitbake into the poky dirname %s " % self.pokydirname)
+ self._shellcmd("git clone -b \"%s\" \"%s\" \"%s\" " % (bitbakes[0].commit, bitbakes[0].giturl, os.path.join(self.pokydirname, 'bitbake')))
+
+ # verify our repositories
+ for name, dirpath, commit in gitrepos[giturl]:
+ localdirpath = os.path.join(localdirname, dirpath)
+ if not os.path.exists(localdirpath):
+ raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
+
+ if name != "bitbake":
+ layerlist.append(localdirpath.rstrip("/"))
+
+ logger.debug("localhostbecontroller: current layer list %s " % layerlist)
+
+ # 3. configure the build environment, so we have a conf/bblayers.conf
+ assert self.pokydirname is not None
+ self._setupBE()
+
+ # 4. update the bblayers.conf
+ bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
+ if not os.path.exists(bblayerconf):
+ raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
+
+ BuildEnvironmentController._updateBBLayers(bblayerconf, layerlist)
+
+ self.islayerset = True
+ return True
+
+ def release(self):
+ assert self.be.sourcedir and os.path.exists(self.be.builddir)
+ import shutil
+ shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
+ assert not os.path.exists(self.be.builddir)
diff --git a/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py b/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
index a91dd150ef..55f118cf77 100644
--- a/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
+++ b/bitbake/lib/toaster/bldcontrol/management/commands/checksettings.py
@@ -1,38 +1,150 @@
from django.core.management.base import NoArgsCommand, CommandError
from django.db import transaction
-from orm.models import Build
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException
-from bldcontrol.models import BuildRequest, BuildEnvironment
+from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
+from orm.models import ToasterSetting
import os
+def DN(path):
+ if path is None:
+ return ""
+ else:
+ return os.path.dirname(path)
+
+
class Command(NoArgsCommand):
args = ""
- help = "Verifies thid %dthe configured settings are valid and usable, or prompts the user to fix the settings."
+ help = "Verifies that the configured settings are valid and usable, or prompts the user to fix the settings."
+
+
+ def _find_first_path_for_file(self, startdirectory, filename, level = 0):
+ if level < 0:
+ return None
+ dirs = []
+ for i in os.listdir(startdirectory):
+ j = os.path.join(startdirectory, i)
+ if os.path.isfile(j):
+ if i == filename:
+ return startdirectory
+ elif os.path.isdir(j):
+ dirs.append(j)
+ for j in dirs:
+ ret = self._find_first_path_for_file(j, filename, level - 1)
+ if ret is not None:
+ return ret
+ return None
+
+ def _recursive_list_directories(self, startdirectory, level = 0):
+ if level < 0:
+ return []
+ dirs = []
+ try:
+ for i in os.listdir(startdirectory):
+ j = os.path.join(startdirectory, i)
+ if os.path.isdir(j):
+ dirs.append(j)
+ except OSError:
+ pass
+ for j in dirs:
+ dirs = dirs + self._recursive_list_directories(j, level - 1)
+ return dirs
+
+
+ def _get_suggested_sourcedir(self, be):
+ if be.betype != BuildEnvironment.TYPE_LOCAL:
+ return ""
+ return DN(DN(DN(self._find_first_path_for_file(self.guesspath, "toasterconf.json", 4))))
+
+ def _get_suggested_builddir(self, be):
+ if be.betype != BuildEnvironment.TYPE_LOCAL:
+ return ""
+ return DN(self._find_first_path_for_file(DN(self.guesspath), "bblayers.conf", 4))
+
def handle(self, **options):
+ self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
+ # refuse to start if we have no build environments
+ while BuildEnvironment.objects.count() == 0:
+ print(" !! No build environments found. Toaster needs at least one build environment in order to be able to run builds.\n" +
+ "You can manually define build environments in the database table bldcontrol_buildenvironment.\n" +
+ "Or Toaster can define a simple localhost-based build environment for you.")
+
+ i = raw_input(" -- Do you want to create a basic localhost build environment ? (Y/n) ");
+ if not len(i) or i.startswith("y") or i.startswith("Y"):
+ BuildEnvironment.objects.create(pk = 1, betype = 0)
+ else:
+ raise Exception("Toaster cannot start without build environments. Aborting.")
+
+
# we make sure we have builddir and sourcedir for all defined build envionments
for be in BuildEnvironment.objects.all():
def _verify_be():
is_changed = False
print("Verifying the Build Environment type %s id %d." % (be.get_betype_display(), be.pk))
if len(be.sourcedir) == 0:
- be.sourcedir = raw_input(" -- sourcedir may not be empty:")
+ suggesteddir = self._get_suggested_sourcedir(be)
+ be.sourcedir = raw_input(" -- Layer sources checkout directory may not be empty [guessed \"%s\"]:" % suggesteddir)
+ if len(be.sourcedir) == 0 and len(suggesteddir) > 0:
+ be.sourcedir = suggesteddir
is_changed = True
+
if not be.sourcedir.startswith("/"):
- be.sourcedir = raw_input(" -- sourcedir must be an absolute path:")
+ be.sourcedir = raw_input(" -- Layer sources checkout directory must be an absolute path:")
is_changed = True
+
if len(be.builddir) == 0:
- be.builddir = raw_input(" -- builddir may not be empty:")
+ suggesteddir = self._get_suggested_builddir(be)
+ be.builddir = raw_input(" -- Build directory may not be empty [guessed \"%s\"]:" % suggesteddir)
+ if len(be.builddir) == 0 and len(suggesteddir) > 0:
+ be.builddir = suggesteddir
is_changed = True
+
if not be.builddir.startswith("/"):
- be.builddir = raw_input(" -- builddir must be an absolute path:")
+ be.builddir = raw_input(" -- Build directory must be an absolute path:")
is_changed = True
+
+
if is_changed:
- print "saved"
+ print "Build configuration saved"
be.save()
+
+ if is_changed and be.betype == BuildEnvironment.TYPE_LOCAL:
+ for dirname in self._recursive_list_directories(be.sourcedir,2):
+ if os.path.exists(os.path.join(dirname, ".templateconf")):
+ import subprocess
+ conffilepath, error = subprocess.Popen('bash -c ". '+os.path.join(dirname, ".templateconf")+'; echo \"\$TEMPLATECONF\""', shell=True, stdout=subprocess.PIPE).communicate()
+ conffilepath = os.path.join(conffilepath.strip(), "toasterconf.json")
+ candidatefilepath = os.path.join(dirname, conffilepath)
+ if os.path.exists(candidatefilepath):
+ i = raw_input(" -- Do you want to import basic layer configuration from \"%s\" ? (y/N):" % candidatefilepath)
+ if len(i) and i.upper()[0] == 'Y':
+ from loadconf import Command as LoadConfigCommand
+
+ LoadConfigCommand()._import_layer_config(candidatefilepath)
+ # we run lsupdates after config update
+ print "Layer configuration imported. Updating information from the layer source, please wait."
+ from django.core.management import call_command
+ call_command("lsupdates")
+
+ # we don't look for any other config files
+ return is_changed
+
return is_changed
while (_verify_be()):
pass
- return 0
+ # verify that default settings are there
+ if ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').count() != 1:
+ ToasterSetting.objects.filter(name = 'DEFAULT_RELEASE').delete()
+ ToasterSetting.objects.get_or_create(name = 'DEFAULT_RELEASE', value = '')
+
+ # we are just starting up. we must not have any builds in progress, or build environments taken
+ for b in BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS):
+ BRError.objects.create(req = b, errtype = "toaster", errmsg = "Toaster found this build IN PROGRESS while Toaster started up. This is an inconsistent state, and the build was marked as failed")
+
+ BuildRequest.objects.filter(state = BuildRequest.REQ_INPROGRESS).update(state = BuildRequest.REQ_FAILED)
+
+ BuildEnvironment.objects.update(lock = BuildEnvironment.LOCK_FREE)
+
+ return 0
diff --git a/bitbake/lib/toaster/bldcontrol/management/commands/loadconf.py b/bitbake/lib/toaster/bldcontrol/management/commands/loadconf.py
new file mode 100644
index 0000000000..2257a7143b
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/management/commands/loadconf.py
@@ -0,0 +1,174 @@
+from django.core.management.base import BaseCommand, CommandError
+from orm.models import LayerSource, ToasterSetting, Branch, Layer, Layer_Version
+from orm.models import BitbakeVersion, Release, ReleaseDefaultLayer, ReleaseLayerSourcePriority
+import os
+
+from checksettings import DN
+
+def _reduce_canon_path(path):
+ components = []
+ for c in path.split("/"):
+ if c == "..":
+ del components[-1]
+ elif c == ".":
+ pass
+ else:
+ components.append(c)
+ if len(components) < 2:
+ components.append('')
+ return "/".join(components)
+
+def _get_id_for_sourcetype(s):
+ for i in LayerSource.SOURCE_TYPE:
+ if s == i[1]:
+ return i[0]
+ raise Exception("Could not find definition for sourcetype " + s)
+
+class Command(BaseCommand):
+ help = "Loads a toasterconf.json file in the database"
+ args = "filepath"
+
+
+
+ def _import_layer_config(self, filepath):
+ if not os.path.exists(filepath) or not os.path.isfile(filepath):
+ raise Exception("Failed to find toaster config file %s ." % filepath)
+
+ import json, pprint
+ data = json.loads(open(filepath, "r").read())
+
+ # verify config file validity before updating settings
+ for i in ['bitbake', 'releases', 'defaultrelease', 'config', 'layersources']:
+ assert i in data
+
+ def _read_git_url_from_local_repository(address):
+ url = None
+ # we detect the remote name at runtime
+ import subprocess
+ (remote, remote_name) = address.split(":", 1)
+ cmd = subprocess.Popen("git remote -v", shell=True, cwd = os.path.dirname(filepath), stdout=subprocess.PIPE, stderr = subprocess.PIPE)
+ (out,err) = cmd.communicate()
+ if cmd.returncode != 0:
+ raise Exception("Error while importing layer vcs_url: git error: %s" % err)
+ for line in out.split("\n"):
+ try:
+ (name, path) = line.split("\t", 1)
+ if name == remote_name:
+ url = path.split(" ")[0]
+ break
+ except ValueError:
+ pass
+ if url == None:
+ raise Exception("Error while looking for remote \"%s\" in \"s\"" % (remote_name, lo.local_path))
+ return url
+
+
+ # import bitbake data
+ for bvi in data['bitbake']:
+ bvo, created = BitbakeVersion.objects.get_or_create(name=bvi['name'])
+ bvo.giturl = bvi['giturl']
+ if bvi['giturl'].startswith("remote:"):
+ bvo.giturl = _read_git_url_from_local_repository(bvi['giturl'])
+ bvo.branch = bvi['branch']
+ bvo.dirpath = bvi['dirpath']
+ bvo.save()
+
+ # set the layer sources
+ for lsi in data['layersources']:
+ assert 'sourcetype' in lsi
+ assert 'apiurl' in lsi
+ assert 'name' in lsi
+ assert 'branches' in lsi
+
+
+ if _get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX or lsi['apiurl'].startswith("/"):
+ apiurl = lsi['apiurl']
+ else:
+ apiurl = _reduce_canon_path(os.path.join(DN(os.path.abspath(filepath)), lsi['apiurl']))
+
+ assert ((_get_id_for_sourcetype(lsi['sourcetype']) == LayerSource.TYPE_LAYERINDEX) or apiurl.startswith("/")), (lsi['sourcetype'],apiurl)
+
+ try:
+ ls = LayerSource.objects.get(sourcetype = _get_id_for_sourcetype(lsi['sourcetype']), apiurl = apiurl)
+ except LayerSource.DoesNotExist:
+ ls = LayerSource.objects.create(
+ name = lsi['name'],
+ sourcetype = _get_id_for_sourcetype(lsi['sourcetype']),
+ apiurl = apiurl
+ )
+
+ layerbranches = []
+ for branchname in lsi['branches']:
+ bo, created = Branch.objects.get_or_create(layer_source = ls, name = branchname)
+ layerbranches.append(bo)
+
+ if 'layers' in lsi:
+ for layerinfo in lsi['layers']:
+ lo, created = Layer.objects.get_or_create(layer_source = ls, name = layerinfo['name'])
+ if layerinfo['local_path'].startswith("/"):
+ lo.local_path = layerinfo['local_path']
+ else:
+ lo.local_path = _reduce_canon_path(os.path.join(ls.apiurl, layerinfo['local_path']))
+
+ if not os.path.exists(lo.local_path):
+ raise Exception("Local layer path %s must exists." % lo.local_path)
+
+ lo.vcs_url = layerinfo['vcs_url']
+ if layerinfo['vcs_url'].startswith("remote:"):
+ lo.vcs_url = _read_git_url_from_local_repository(layerinfo['vcs_url'])
+ else:
+ lo.vcs_url = layerinfo['vcs_url']
+
+ if 'layer_index_url' in layerinfo:
+ lo.layer_index_url = layerinfo['layer_index_url']
+ lo.save()
+
+ for branch in layerbranches:
+ lvo, created = Layer_Version.objects.get_or_create(layer_source = ls,
+ up_branch = branch,
+ commit = branch.name,
+ layer = lo)
+ lvo.dirpath = layerinfo['dirpath']
+ lvo.save()
+ # set releases
+ for ri in data['releases']:
+ bvo = BitbakeVersion.objects.get(name = ri['bitbake'])
+ assert bvo is not None
+
+ ro, created = Release.objects.get_or_create(name = ri['name'], bitbake_version = bvo, branch_name = ri['branch'])
+ ro.description = ri['description']
+ ro.helptext = ri['helptext']
+ ro.save()
+
+ # save layer source priority for release
+ for ls_name in ri['layersourcepriority'].keys():
+ rlspo, created = ReleaseLayerSourcePriority.objects.get_or_create(release = ro, layer_source = LayerSource.objects.get(name=ls_name))
+ rlspo.priority = ri['layersourcepriority'][ls_name]
+ rlspo.save()
+
+ for dli in ri['defaultlayers']:
+ # find layers with the same name
+ ReleaseDefaultLayer.objects.get_or_create( release = ro, layer_name = dli)
+
+ # set default release
+ if ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").count() > 0:
+ ToasterSetting.objects.filter(name = "DEFAULT_RELEASE").update(value = data['defaultrelease'])
+ else:
+ ToasterSetting.objects.create(name = "DEFAULT_RELEASE", value = data['defaultrelease'])
+
+ # set default config variables
+ for configname in data['config']:
+ if ToasterSetting.objects.filter(name = "DEFCONF_" + configname).count() > 0:
+ ToasterSetting.objects.filter(name = "DEFCONF_" + configname).update(value = data['config'][configname])
+ else:
+ ToasterSetting.objects.create(name = "DEFCONF_" + configname, value = data['config'][configname])
+
+
+ def handle(self, *args, **options):
+ if len(args) == 0:
+ raise CommandError("Need a path to the toasterconf.json file")
+ filepath = args[0]
+ self._import_layer_config(filepath)
+
+
+
diff --git a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
index fa8c1a9906..56c989c9c9 100644
--- a/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
+++ b/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py
@@ -4,6 +4,9 @@ from orm.models import Build
from bldcontrol.bbcontroller import getBuildEnvironmentController, ShellCmdException, BuildSetupException
from bldcontrol.models import BuildRequest, BuildEnvironment, BRError
import os
+import logging
+
+logger = logging.getLogger("toaster")
class Command(NoArgsCommand):
args = ""
@@ -32,6 +35,7 @@ class Command(NoArgsCommand):
# select the build environment and the request to build
br = self._selectBuildRequest()
except IndexError as e:
+ # logger.debug("runbuilds: No build request")
return
try:
bec = self._selectBuildEnvironment()
@@ -39,26 +43,31 @@ class Command(NoArgsCommand):
# we could not find a BEC; postpone the BR
br.state = BuildRequest.REQ_QUEUED
br.save()
+ logger.debug("runbuilds: No build env")
return
- # set up the buid environment with the needed layers
- print "Build %s, Environment %s" % (br, bec.be)
- bec.setLayers(br.brlayer_set.all())
+ logger.debug("runbuilds: starting build %s, environment %s" % (br, bec.be))
+ # let the build request know where it is being executed
+ br.environment = bec.be
+ br.save()
- # get the bb server running
- bbctrl = bec.getBBController()
+ # set up the buid environment with the needed layers
+ bec.setLayers(br.brbitbake_set.all(), br.brlayer_set.all())
- # let toasterui that this is a managed build
- bbctrl.setVariable("TOASTER_BRBE", "%d:%d" % (br.pk, bec.be.pk))
+ # get the bb server running with the build req id and build env id
+ bbctrl = bec.getBBController("%d:%d" % (br.pk, bec.be.pk))
# set the build configuration
for variable in br.brvariable_set.all():
bbctrl.setVariable(variable.name, variable.value)
# trigger the build command
- bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())))
+ task = reduce(lambda x, y: x if len(y)== 0 else y, map(lambda y: y.task, br.brtarget_set.all()))
+ if len(task) == 0:
+ task = None
+ bbctrl.build(list(map(lambda x:x.target, br.brtarget_set.all())), task)
- print "Build launched, exiting"
+ logger.debug("runbuilds: Build launched, exiting")
# disconnect from the server
bbctrl.disconnect()
@@ -66,7 +75,7 @@ class Command(NoArgsCommand):
except Exception as e:
- print " EE Error executing shell command\n", e
+ logger.error("runbuilds: Error executing shell command %s" % e)
traceback.print_exc(e)
BRError.objects.create(req = br,
errtype = str(type(e)),
@@ -78,6 +87,7 @@ class Command(NoArgsCommand):
bec.be.save()
+
def cleanup(self):
from django.utils import timezone
from datetime import timedelta
diff --git a/bitbake/lib/toaster/bldcontrol/migrations/0006_auto__add_brbitbake.py b/bitbake/lib/toaster/bldcontrol/migrations/0006_auto__add_brbitbake.py
new file mode 100644
index 0000000000..74388f8434
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/migrations/0006_auto__add_brbitbake.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding model 'BRBitbake'
+ db.create_table(u'bldcontrol_brbitbake', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('req', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildRequest'], unique=True)),
+ ('giturl', self.gf('django.db.models.fields.CharField')(max_length=254)),
+ ('commit', self.gf('django.db.models.fields.CharField')(max_length=254)),
+ ('dirpath', self.gf('django.db.models.fields.CharField')(max_length=254)),
+ ))
+ db.send_create_signal(u'bldcontrol', ['BRBitbake'])
+
+
+ def backwards(self, orm):
+ # Deleting model 'BRBitbake'
+ db.delete_table(u'bldcontrol_brbitbake')
+
+
+ models = {
+ u'bldcontrol.brbitbake': {
+ 'Meta': {'object_name': 'BRBitbake'},
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
+ },
+ u'bldcontrol.brerror': {
+ 'Meta': {'object_name': 'BRError'},
+ 'errmsg': ('django.db.models.fields.TextField', [], {}),
+ 'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'traceback': ('django.db.models.fields.TextField', [], {})
+ },
+ u'bldcontrol.brlayer': {
+ 'Meta': {'object_name': 'BRLayer'},
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
+ },
+ u'bldcontrol.brtarget': {
+ 'Meta': {'object_name': 'BRTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'bldcontrol.brvariable': {
+ 'Meta': {'object_name': 'BRVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'bldcontrol.buildenvironment': {
+ 'Meta': {'object_name': 'BuildEnvironment'},
+ 'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
+ 'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
+ 'betype': ('django.db.models.fields.IntegerField', [], {}),
+ 'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+ },
+ u'bldcontrol.buildrequest': {
+ 'Meta': {'object_name': 'BuildRequest'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']", 'null': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+ },
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ }
+ }
+
+ complete_apps = ['bldcontrol'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/bldcontrol/migrations/0007_auto__add_field_buildrequest_environment__chg_field_buildrequest_build.py b/bitbake/lib/toaster/bldcontrol/migrations/0007_auto__add_field_buildrequest_environment__chg_field_buildrequest_build.py
new file mode 100644
index 0000000000..70677a294e
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/migrations/0007_auto__add_field_buildrequest_environment__chg_field_buildrequest_build.py
@@ -0,0 +1,145 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'BuildRequest.environment'
+ db.add_column(u'bldcontrol_buildrequest', 'environment',
+ self.gf('django.db.models.fields.related.ForeignKey')(to=orm['bldcontrol.BuildEnvironment'], null=True),
+ keep_default=False)
+
+
+ # Changing field 'BuildRequest.build'
+ db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['orm.Build'], unique=True, null=True))
+ # Adding unique constraint on 'BuildRequest', fields ['build']
+ db.create_unique(u'bldcontrol_buildrequest', ['build_id'])
+
+
+ def backwards(self, orm):
+ # Removing unique constraint on 'BuildRequest', fields ['build']
+ db.delete_unique(u'bldcontrol_buildrequest', ['build_id'])
+
+ # Deleting field 'BuildRequest.environment'
+ db.delete_column(u'bldcontrol_buildrequest', 'environment_id')
+
+
+ # Changing field 'BuildRequest.build'
+ db.alter_column(u'bldcontrol_buildrequest', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'], null=True))
+
+ models = {
+ u'bldcontrol.brbitbake': {
+ 'Meta': {'object_name': 'BRBitbake'},
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']", 'unique': 'True'})
+ },
+ u'bldcontrol.brerror': {
+ 'Meta': {'object_name': 'BRError'},
+ 'errmsg': ('django.db.models.fields.TextField', [], {}),
+ 'errtype': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'traceback': ('django.db.models.fields.TextField', [], {})
+ },
+ u'bldcontrol.brlayer': {
+ 'Meta': {'object_name': 'BRLayer'},
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'giturl': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"})
+ },
+ u'bldcontrol.brtarget': {
+ 'Meta': {'object_name': 'BRTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'bldcontrol.brvariable': {
+ 'Meta': {'object_name': 'BRVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'req': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildRequest']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'bldcontrol.buildenvironment': {
+ 'Meta': {'object_name': 'BuildEnvironment'},
+ 'address': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
+ 'bbaddress': ('django.db.models.fields.CharField', [], {'max_length': '254', 'blank': 'True'}),
+ 'bbport': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'bbstate': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'bbtoken': ('django.db.models.fields.CharField', [], {'max_length': '126', 'blank': 'True'}),
+ 'betype': ('django.db.models.fields.IntegerField', [], {}),
+ 'builddir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'lock': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'sourcedir': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+ },
+ u'bldcontrol.buildrequest': {
+ 'Meta': {'object_name': 'BuildRequest'},
+ 'build': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['orm.Build']", 'unique': 'True', 'null': 'True'}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ 'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bldcontrol.BuildEnvironment']", 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'state': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
+ },
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ }
+ }
+
+ complete_apps = ['bldcontrol'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/bldcontrol/models.py b/bitbake/lib/toaster/bldcontrol/models.py
index 8c271ffa94..cab4463647 100644
--- a/bitbake/lib/toaster/bldcontrol/models.py
+++ b/bitbake/lib/toaster/bldcontrol/models.py
@@ -40,6 +40,50 @@ class BuildEnvironment(models.Model):
updated = models.DateTimeField(auto_now = True)
+ def get_artifact_type(self, path):
+ if self.betype == BuildEnvironment.TYPE_LOCAL:
+ try:
+ import magic
+
+ # fair warning: this is a mess; there are multiple competeing and incompatible
+ # magic modules floating around, so we try some of the most common combinations
+
+ try: # we try ubuntu's python-magic 5.4
+ m = magic.open(magic.MAGIC_MIME_TYPE)
+ m.load()
+ return m.file(path)
+ except AttributeError:
+ pass
+
+ try: # we try python-magic 0.4.6
+ m = magic.Magic(magic.MAGIC_MIME)
+ return m.from_file(path)
+ except AttributeError:
+ pass
+
+ try: # we try pip filemagic 1.6
+ m = magic.Magic(flags=magic.MAGIC_MIME_TYPE)
+ return m.id_filename(path)
+ except AttributeError:
+ pass
+
+ return "binary/octet-stream"
+ except ImportError:
+ return "binary/octet-stream"
+ raise Exception("FIXME: artifact type not implemented for build environment type %s" % be.get_betype_display())
+
+
+ def get_artifact(self, path):
+ if self.betype == BuildEnvironment.TYPE_LOCAL:
+ return open(path, "r")
+ raise Exception("FIXME: artifact download not implemented for build environment type %s" % be.get_betype_display())
+
+ def has_artifact(self, path):
+ import os
+ if self.betype == BuildRequest.TYPE_LOCAL:
+ return os.path.exists(path)
+ raise Exception("FIXME: has artifact not implemented for build environment type %s" % be.get_betype_display())
+
# a BuildRequest is a request that the scheduler will build using a BuildEnvironment
# the build request queue is the table itself, ordered by state
@@ -49,6 +93,7 @@ class BuildRequest(models.Model):
REQ_INPROGRESS = 2
REQ_COMPLETED = 3
REQ_FAILED = 4
+ REQ_DELETED = 5
REQUEST_STATE = (
(REQ_CREATED, "created"),
@@ -56,10 +101,12 @@ class BuildRequest(models.Model):
(REQ_INPROGRESS, "in progress"),
(REQ_COMPLETED, "completed"),
(REQ_FAILED, "failed"),
+ (REQ_DELETED, "deleted"),
)
project = models.ForeignKey(Project)
- build = models.ForeignKey(Build, null = True) # TODO: toasterui should set this when Build is created
+ build = models.OneToOneField(Build, null = True) # TODO: toasterui should set this when Build is created
+ environment = models.ForeignKey(BuildEnvironment, null = True)
state = models.IntegerField(choices = REQUEST_STATE, default = REQ_CREATED)
created = models.DateTimeField(auto_now_add = True)
updated = models.DateTimeField(auto_now = True)
@@ -75,6 +122,11 @@ class BRLayer(models.Model):
commit = models.CharField(max_length = 254)
dirpath = models.CharField(max_length = 254)
+class BRBitbake(models.Model):
+ req = models.ForeignKey(BuildRequest, unique = True) # only one bitbake for a request
+ giturl = models.CharField(max_length =254)
+ commit = models.CharField(max_length = 254)
+ dirpath = models.CharField(max_length = 254)
class BRVariable(models.Model):
req = models.ForeignKey(BuildRequest)
diff --git a/bitbake/lib/toaster/bldcontrol/sshbecontroller.py b/bitbake/lib/toaster/bldcontrol/sshbecontroller.py
new file mode 100644
index 0000000000..45e15392e5
--- /dev/null
+++ b/bitbake/lib/toaster/bldcontrol/sshbecontroller.py
@@ -0,0 +1,209 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# BitBake Toaster Implementation
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import sys
+import re
+from django.db import transaction
+from django.db.models import Q
+from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake
+import subprocess
+
+from toastermain import settings
+
+from bbcontroller import BuildEnvironmentController, ShellCmdException, BuildSetupException, _getgitcheckoutdirectoryname
+
+def DN(path):
+ return "/".join(path.split("/")[0:-1])
+
+class SSHBEController(BuildEnvironmentController):
+ """ Implementation of the BuildEnvironmentController for the localhost;
+ this controller manages the default build directory,
+ the server setup and system start and stop for the localhost-type build environment
+
+ """
+
+ def __init__(self, be):
+ super(SSHBEController, self).__init__(be)
+ self.dburl = settings.getDATABASE_URL()
+ self.pokydirname = None
+ self.islayerset = False
+
+ def _shellcmd(self, command, cwd = None):
+ if cwd is None:
+ cwd = self.be.sourcedir
+
+ p = subprocess.Popen("ssh %s 'cd %s && %s'" % (self.be.address, cwd, command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ (out,err) = p.communicate()
+ if p.returncode:
+ if len(err) == 0:
+ err = "command: %s \n%s" % (command, out)
+ else:
+ err = "command: %s \n%s" % (command, err)
+ raise ShellCmdException(err)
+ else:
+ return out.strip()
+
+ def _pathexists(self, path):
+ try:
+ self._shellcmd("test -e \"%s\"" % path)
+ return True
+ except ShellCmdException as e:
+ return False
+
+ def _pathcreate(self, path):
+ self._shellcmd("mkdir -p \"%s\"" % path)
+
+ def _setupBE(self):
+ assert self.pokydirname and self._pathexists(self.pokydirname)
+ self._pathcreate(self.be.builddir)
+ self._shellcmd("bash -c \"source %s/oe-init-build-env %s\"" % (self.pokydirname, self.be.builddir))
+
+ def startBBServer(self, brbe):
+ assert self.pokydirname and self._pathexists(self.pokydirname)
+ assert self.islayerset
+ cmd = self._shellcmd("bash -c \"source %s/oe-init-build-env %s && DATABASE_URL=%s source toaster start noweb brbe=%s\"" % (self.pokydirname, self.be.builddir, self.dburl, brbe))
+
+ port = "-1"
+ for i in cmd.split("\n"):
+ if i.startswith("Bitbake server address"):
+ port = i.split(" ")[-1]
+ print "Found bitbake server port ", port
+
+
+ assert self.be.sourcedir and self._pathexists(self.be.builddir)
+ self.be.bbaddress = self.be.address.split("@")[-1]
+ self.be.bbport = port
+ self.be.bbstate = BuildEnvironment.SERVER_STARTED
+ self.be.save()
+
+ def stopBBServer(self):
+ assert self.pokydirname and self._pathexists(self.pokydirname)
+ assert self.islayerset
+ print self._shellcmd("bash -c \"source %s/oe-init-build-env %s && %s source toaster stop\"" %
+ (self.pokydirname, self.be.builddir, (lambda: "" if self.be.bbtoken is None else "BBTOKEN=%s" % self.be.bbtoken)()))
+ self.be.bbstate = BuildEnvironment.SERVER_STOPPED
+ self.be.save()
+ print "Stopped server"
+
+
+ def _copyFile(self, filepath1, filepath2):
+ p = subprocess.Popen("scp '%s' '%s'" % (filepath1, filepath2), stdout=subprocess.PIPE, stderr = subprocess.PIPE, shell=True)
+ (out, err) = p.communicate()
+ if p.returncode:
+ raise ShellCmdException(err)
+
+ def pullFile(self, local_filename, remote_filename):
+ _copyFile(local_filename, "%s:%s" % (self.be.address, remote_filename))
+
+ def pushFile(self, local_filename, remote_filename):
+ _copyFile("%s:%s" % (self.be.address, remote_filename), local_filename)
+
+ def setLayers(self, bitbakes, layers):
+ """ a word of attention: by convention, the first layer for any build will be poky! """
+
+ assert self.be.sourcedir is not None
+ assert len(bitbakes) == 1
+ # set layers in the layersource
+
+ # 1. get a list of repos, and map dirpaths for each layer
+ gitrepos = {}
+ gitrepos[bitbakes[0].giturl] = []
+ gitrepos[bitbakes[0].giturl].append( ("bitbake", bitbakes[0].dirpath, bitbakes[0].commit) )
+
+ for layer in layers:
+ # we don't process local URLs
+ if layer.giturl.startswith("file://"):
+ continue
+ if not layer.giturl in gitrepos:
+ gitrepos[layer.giturl] = []
+ gitrepos[layer.giturl].append( (layer.name, layer.dirpath, layer.commit))
+ for giturl in gitrepos.keys():
+ commitid = gitrepos[giturl][0][2]
+ for e in gitrepos[giturl]:
+ if commitid != e[2]:
+ raise BuildSetupException("More than one commit per git url, unsupported configuration")
+
+ layerlist = []
+
+ # 2. checkout the repositories
+ for giturl in gitrepos.keys():
+ import os
+ localdirname = os.path.join(self.be.sourcedir, _getgitcheckoutdirectoryname(giturl))
+ print "DEBUG: giturl ", giturl ,"checking out in current directory", localdirname
+
+ # make sure our directory is a git repository
+ if self._pathexists(localdirname):
+ if not giturl in self._shellcmd("git remote -v", localdirname):
+ raise BuildSetupException("Existing git repository at %s, but with different remotes (not '%s'). Aborting." % (localdirname, giturl))
+ else:
+ self._shellcmd("git clone \"%s\" \"%s\"" % (giturl, localdirname))
+ # checkout the needed commit
+ commit = gitrepos[giturl][0][2]
+
+ # branch magic name "HEAD" will inhibit checkout
+ if commit != "HEAD":
+ print "DEBUG: checking out commit ", commit, "to", localdirname
+ self._shellcmd("git fetch --all && git checkout \"%s\"" % commit , localdirname)
+
+ # take the localdirname as poky dir if we can find the oe-init-build-env
+ if self.pokydirname is None and self._pathexists(os.path.join(localdirname, "oe-init-build-env")):
+ print "DEBUG: selected poky dir name", localdirname
+ self.pokydirname = localdirname
+
+ # verify our repositories
+ for name, dirpath, commit in gitrepos[giturl]:
+ localdirpath = os.path.join(localdirname, dirpath)
+ if not self._pathexists(localdirpath):
+ raise BuildSetupException("Cannot find layer git path '%s' in checked out repository '%s:%s'. Aborting." % (localdirpath, giturl, commit))
+
+ if name != "bitbake":
+ layerlist.append(localdirpath)
+
+ print "DEBUG: current layer list ", layerlist
+
+ # 3. configure the build environment, so we have a conf/bblayers.conf
+ assert self.pokydirname is not None
+ self._setupBE()
+
+ # 4. update the bblayers.conf
+ bblayerconf = os.path.join(self.be.builddir, "conf/bblayers.conf")
+ if not self._pathexists(bblayerconf):
+ raise BuildSetupException("BE is not consistent: bblayers.conf file missing at %s" % bblayerconf)
+
+ import uuid
+ local_bblayerconf = "/tmp/" + uuid.uuid4() + "-bblayer.conf"
+
+ self.pullFile(bblayerconf, local_bblayerconf)
+
+ BuildEnvironmentController._updateBBLayers(local_bblayerconf, layerlist)
+ self.pushFile(local_bblayerconf, bblayerconf)
+
+ os.unlink(local_bblayerconf)
+
+ self.islayerset = True
+ return True
+
+ def release(self):
+ assert self.be.sourcedir and self._pathexists(self.be.builddir)
+ import shutil
+ shutil.rmtree(os.path.join(self.be.sourcedir, "build"))
+ assert not self._pathexists(self.be.builddir)
diff --git a/bitbake/lib/toaster/bldcontrol/tests.py b/bitbake/lib/toaster/bldcontrol/tests.py
index ebe477d8a8..5a9d1df37a 100644
--- a/bitbake/lib/toaster/bldcontrol/tests.py
+++ b/bitbake/lib/toaster/bldcontrol/tests.py
@@ -7,46 +7,108 @@ Replace this with more appropriate tests for your application.
from django.test import TestCase
-from bldcontrol.bbcontroller import LocalhostBEController, BitbakeController
+from bldcontrol.bbcontroller import BitbakeController
+from bldcontrol.localhostbecontroller import LocalhostBEController
+from bldcontrol.sshbecontroller import SSHBEController
from bldcontrol.models import BuildEnvironment, BuildRequest
from bldcontrol.management.commands.runbuilds import Command
import socket
import subprocess
-class LocalhostBEControllerTests(TestCase):
- def test_StartAndStopServer(self):
- obe = BuildEnvironment.objects.create(lock = BuildEnvironment.LOCK_FREE, betype = BuildEnvironment.TYPE_LOCAL)
- lbc = LocalhostBEController(obe)
+# standard poky data hardcoded for testing
+BITBAKE_LAYERS = [type('bitbake_info', (object,), { "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "", "commit": "HEAD"})]
+POKY_LAYERS = [
+ type('poky_info', (object,), { "name": "meta", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta", "commit": "HEAD"}),
+ type('poky_info', (object,), { "name": "meta-yocto", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta-yocto", "commit": "HEAD"}),
+ type('poky_info', (object,), { "name": "meta-yocto-bsp", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta-yocto-bsp", "commit": "HEAD"}),
+ ]
- # test start server and stop
- self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex(('localhost', 8200)), "Port already occupied")
- lbc.startBBServer()
- self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex(('localhost', 8200)), "Server not answering")
- lbc.stopBBServer()
- self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex(('localhost', 8200)), "Server not stopped")
- # clean up
- import subprocess
- out, err = subprocess.Popen("netstat -tapn 2>/dev/null | grep 8200 | awk '{print $7}' | sort -fu | cut -d \"/\" -f 1 | grep -v -- - | tee /dev/fd/2 | xargs -r kill", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
+# we have an abstract test class designed to ensure that the controllers use a single interface
+# specific controller tests only need to override the _getBuildEnvironment() method
+class BEControllerTests(object):
+
+ def _serverForceStop(self, bc):
+ err = bc._shellcmd("netstat -tapn 2>/dev/null | grep 8200 | awk '{print $7}' | sort -fu | cut -d \"/\" -f 1 | grep -v -- - | tee /dev/fd/2 | xargs -r kill")
self.assertTrue(err == '', "bitbake server pid %s not stopped" % err)
- obe = BuildEnvironment.objects.create(lock = BuildEnvironment.LOCK_FREE, betype = BuildEnvironment.TYPE_LOCAL)
- lbc = LocalhostBEController(obe)
+ def test_serverStartAndStop(self):
+ obe = self._getBuildEnvironment()
+ bc = self._getBEController(obe)
+ bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
+
+ hostname = self.test_address.split("@")[-1]
+
+ # test start server and stop
+ self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Port already occupied")
+ bc.startBBServer("0:0")
+ self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not answering")
+
+ bc.stopBBServer()
+ self.assertTrue(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, 8200)), "Server not stopped")
- bbc = lbc.getBBController()
+ self._serverForceStop(bc)
+
+ def test_getBBController(self):
+ obe = self._getBuildEnvironment()
+ bc = self._getBEController(obe)
+ bc.setLayers(BITBAKE_LAYERS, POKY_LAYERS) # setting layers, skip any layer info
+
+ bbc = bc.getBBController("%d:%d" % (-1, obe.pk))
self.assertTrue(isinstance(bbc, BitbakeController))
- # test set variable
- try:
- bbc.setVariable
- except Exception as e :
- self.fail("setVariable raised %s", e)
-
- lbc.stopBBServer()
- out, err = subprocess.Popen("netstat -tapn 2>/dev/null | grep 8200 | awk '{print $7}' | sort -fu | cut -d \"/\" -f 1 | grep -v -- - | tee /dev/fd/2 | xargs -r kill", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
- self.assertTrue(err == '', "bitbake server pid %s not stopped" % err)
+ bc.stopBBServer()
+
+ self._serverForceStop(bc)
+
+class LocalhostBEControllerTests(TestCase, BEControllerTests):
+ def __init__(self, *args):
+ super(LocalhostBEControllerTests, self).__init__(*args)
+ # hardcoded for Alex's machine; since the localhost BE is machine-dependent,
+ # I found no good way to abstractize this
+ self.test_sourcedir = "/home/ddalex/ssd/yocto"
+ self.test_builddir = "/home/ddalex/ssd/yocto/build"
+ self.test_address = "localhost"
+
+ def _getBuildEnvironment(self):
+ return BuildEnvironment.objects.create(
+ lock = BuildEnvironment.LOCK_FREE,
+ betype = BuildEnvironment.TYPE_LOCAL,
+ address = self.test_address,
+ sourcedir = self.test_sourcedir,
+ builddir = self.test_builddir )
+
+ def _getBEController(self, obe):
+ return LocalhostBEController(obe)
+
+class SSHBEControllerTests(TestCase, BEControllerTests):
+ def __init__(self, *args):
+ super(SSHBEControllerTests, self).__init__(*args)
+ self.test_address = "ddalex-desktop.local"
+ # hardcoded for ddalex-desktop.local machine; since the localhost BE is machine-dependent,
+ # I found no good way to abstractize this
+ self.test_sourcedir = "/home/ddalex/ssd/yocto"
+ self.test_builddir = "/home/ddalex/ssd/yocto/build"
+
+ def _getBuildEnvironment(self):
+ return BuildEnvironment.objects.create(
+ lock = BuildEnvironment.LOCK_FREE,
+ betype = BuildEnvironment.TYPE_SSH,
+ address = self.test_address,
+ sourcedir = self.test_sourcedir,
+ builddir = self.test_builddir )
+
+ def _getBEController(self, obe):
+ return SSHBEController(obe)
+
+ def test_pathExists(self):
+ obe = BuildEnvironment.objects.create(betype = BuildEnvironment.TYPE_SSH, address= self.test_address)
+ sbc = SSHBEController(obe)
+ self.assertTrue(sbc._pathexists("/"))
+ self.assertFalse(sbc._pathexists("/.deadbeef"))
+ self.assertTrue(sbc._pathexists(sbc._shellcmd("pwd")))
class RunBuildsCommandTests(TestCase):
@@ -67,8 +129,8 @@ class RunBuildsCommandTests(TestCase):
self.assertRaises(IndexError, command._selectBuildEnvironment)
def test_br_select(self):
- from orm.models import Project
- p, created = Project.objects.get_or_create(pk=1)
+ from orm.models import Project, Release, BitbakeVersion
+ p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch=Branch.objects.get_or_create(name="HEAD"))[0])[0])
obr = BuildRequest.objects.create(state = BuildRequest.REQ_QUEUED, project = p)
command = Command()
br = command._selectBuildRequest()
@@ -79,3 +141,22 @@ class RunBuildsCommandTests(TestCase):
self.assertTrue(br.state == BuildRequest.REQ_INPROGRESS, "Request is not updated")
# no more selections possible here
self.assertRaises(IndexError, command._selectBuildRequest)
+
+
+class UtilityTests(TestCase):
+ def test_reduce_path(self):
+ from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
+
+ self.assertTrue( _reduce_canon_path("/") == "/")
+ self.assertTrue( _reduce_canon_path("/home/..") == "/")
+ self.assertTrue( _reduce_canon_path("/home/../ana") == "/ana")
+ self.assertTrue( _reduce_canon_path("/home/../ana/..") == "/")
+ self.assertTrue( _reduce_canon_path("/home/ana/mihai/../maria") == "/home/ana/maria")
+
+ def test_get_id_for_sorucetype(self):
+ from bldcontrol.management.commands.loadconf import _reduce_canon_path, _get_id_for_sourcetype
+ self.assertTrue( _get_id_for_sourcetype("layerindex") == 1)
+ self.assertTrue( _get_id_for_sourcetype("local") == 0)
+ self.assertTrue( _get_id_for_sourcetype("imported") == 2)
+ with self.assertRaises(Exception):
+ _get_id_for_sourcetype("unknown")
diff --git a/bitbake/lib/toaster/bldviewer/templates/simple_build.html b/bitbake/lib/toaster/bldviewer/templates/simple_build.html
index a6983f5804..230e7c21ca 100644
--- a/bitbake/lib/toaster/bldviewer/templates/simple_build.html
+++ b/bitbake/lib/toaster/bldviewer/templates/simple_build.html
@@ -6,7 +6,7 @@
{% block pagetable %}
- {% load projecttags %}
+ {% load simple_projecttags %}
<tr>
<th>Outcome</th>
<th>Started On</th>
diff --git a/bitbake/lib/toaster/bldviewer/templates/simple_layer.html b/bitbake/lib/toaster/bldviewer/templates/simple_layer.html
index ae7172d9a0..25e7bf818e 100644
--- a/bitbake/lib/toaster/bldviewer/templates/simple_layer.html
+++ b/bitbake/lib/toaster/bldviewer/templates/simple_layer.html
@@ -5,7 +5,7 @@
{% endblock %}
{% block pagetable %}
- {% load projecttags %}
+ {% load simple_projecttags %}
<tr>
<th>Name</th>
diff --git a/bitbake/lib/toaster/bldviewer/templates/simple_recipe.html b/bitbake/lib/toaster/bldviewer/templates/simple_recipe.html
index 77b9de2525..3bff3b9a25 100644
--- a/bitbake/lib/toaster/bldviewer/templates/simple_recipe.html
+++ b/bitbake/lib/toaster/bldviewer/templates/simple_recipe.html
@@ -8,7 +8,7 @@
{% endblock %}
{% block pagetable %}
- {% load projecttags %}
+ {% load simple_projecttags %}
<tr>
</tr>
diff --git a/bitbake/lib/toaster/bldviewer/templatetags/projecttags.py b/bitbake/lib/toaster/bldviewer/templatetags/simple_projecttags.py
index 1b8953c69d..1b8953c69d 100644
--- a/bitbake/lib/toaster/bldviewer/templatetags/projecttags.py
+++ b/bitbake/lib/toaster/bldviewer/templatetags/simple_projecttags.py
diff --git a/bitbake/lib/toaster/orm/admin.py b/bitbake/lib/toaster/orm/admin.py
new file mode 100644
index 0000000000..706e517e0e
--- /dev/null
+++ b/bitbake/lib/toaster/orm/admin.py
@@ -0,0 +1,34 @@
+from django.contrib import admin
+from django.contrib.admin.filters import RelatedFieldListFilter
+from .models import BitbakeVersion, Release, LayerSource, ToasterSetting
+from django.forms.widgets import Textarea
+from django import forms
+import django.db.models as models
+
+from django.contrib.admin import widgets, helpers
+
+class LayerSourceAdmin(admin.ModelAdmin):
+ pass
+
+class BitbakeVersionAdmin(admin.ModelAdmin):
+
+ # we override the formfield for db URLField because of broken URL validation
+
+ def formfield_for_dbfield(self, db_field, **kwargs):
+ if isinstance(db_field, models.fields.URLField):
+ return forms.fields.CharField()
+ return super(BitbakeVersionAdmin, self).formfield_for_dbfield(db_field, **kwargs)
+
+
+
+class ReleaseAdmin(admin.ModelAdmin):
+ pass
+
+class ToasterSettingAdmin(admin.ModelAdmin):
+ pass
+
+admin.site.register(LayerSource, LayerSourceAdmin)
+admin.site.register(BitbakeVersion, BitbakeVersionAdmin)
+admin.site.register(Release, ReleaseAdmin)
+admin.site.register(ToasterSetting, ToasterSettingAdmin)
+
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/etc/ld.so.conf b/bitbake/lib/toaster/orm/management/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/etc/ld.so.conf
+++ b/bitbake/lib/toaster/orm/management/__init__.py
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/bitbake/lib/toaster/orm/management/commands/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ b/bitbake/lib/toaster/orm/management/commands/__init__.py
diff --git a/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
new file mode 100644
index 0000000000..75e9513fca
--- /dev/null
+++ b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
@@ -0,0 +1,12 @@
+from django.core.management.base import NoArgsCommand, CommandError
+from orm.models import LayerSource
+import os
+
+class Command(NoArgsCommand):
+ args = ""
+ help = "Updates locally cached information from all LayerSources"
+
+
+ def handle_noargs(self, **options):
+ for ls in LayerSource.objects.all():
+ ls.update()
diff --git a/bitbake/lib/toaster/orm/migrations/0013_auto__add_release__add_layerversiondependency__add_unique_layerversion.py b/bitbake/lib/toaster/orm/migrations/0013_auto__add_release__add_layerversiondependency__add_unique_layerversion.py
new file mode 100644
index 0000000000..7c954e6bef
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0013_auto__add_release__add_layerversiondependency__add_unique_layerversion.py
@@ -0,0 +1,710 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding model 'Release'
+ db.create_table(u'orm_release', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=32)),
+ ('description', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ('bitbake_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.BitbakeVersion'])),
+ ('branch', self.gf('django.db.models.fields.CharField')(max_length=32)),
+ ))
+ db.send_create_signal(u'orm', ['Release'])
+
+ # Adding model 'LayerVersionDependency'
+ db.create_table(u'orm_layerversiondependency', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('layer_source', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.LayerSource'], null=True)),
+ ('up_id', self.gf('django.db.models.fields.IntegerField')(default=None, null=True)),
+ ('layer_version', self.gf('django.db.models.fields.related.ForeignKey')(related_name='dependencies', to=orm['orm.Layer_Version'])),
+ ('depends_on', self.gf('django.db.models.fields.related.ForeignKey')(related_name='dependees', to=orm['orm.Layer_Version'])),
+ ))
+ db.send_create_signal(u'orm', ['LayerVersionDependency'])
+
+ # Adding unique constraint on 'LayerVersionDependency', fields ['layer_source', 'up_id']
+ db.create_unique(u'orm_layerversiondependency', ['layer_source_id', 'up_id'])
+
+ # Adding model 'ToasterSetting'
+ db.create_table(u'orm_toastersetting', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('name', self.gf('django.db.models.fields.CharField')(max_length=63)),
+ ('helptext', self.gf('django.db.models.fields.TextField')()),
+ ('value', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ))
+ db.send_create_signal(u'orm', ['ToasterSetting'])
+
+ # Adding model 'Machine'
+ db.create_table(u'orm_machine', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('layer_source', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.LayerSource'], null=True)),
+ ('up_id', self.gf('django.db.models.fields.IntegerField')(default=None, null=True)),
+ ('up_date', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True)),
+ ('layer_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'])),
+ ('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ('description', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ))
+ db.send_create_signal(u'orm', ['Machine'])
+
+ # Adding unique constraint on 'Machine', fields ['layer_source', 'up_id']
+ db.create_unique(u'orm_machine', ['layer_source_id', 'up_id'])
+
+ # Adding model 'ReleaseDefaultLayer'
+ db.create_table(u'orm_releasedefaultlayer', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('release', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Release'])),
+ ('layer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer'])),
+ ))
+ db.send_create_signal(u'orm', ['ReleaseDefaultLayer'])
+
+ # Adding model 'BitbakeVersion'
+ db.create_table(u'orm_bitbakeversion', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=32)),
+ ('giturl', self.gf('django.db.models.fields.URLField')(max_length=200)),
+ ('branch', self.gf('django.db.models.fields.CharField')(max_length=32)),
+ ('dirpath', self.gf('django.db.models.fields.CharField')(max_length=255)),
+ ))
+ db.send_create_signal(u'orm', ['BitbakeVersion'])
+
+ # Adding model 'Branch'
+ db.create_table(u'orm_branch', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('layer_source', self.gf('django.db.models.fields.related.ForeignKey')(default=True, to=orm['orm.LayerSource'], null=True)),
+ ('up_id', self.gf('django.db.models.fields.IntegerField')(default=None, null=True)),
+ ('up_date', self.gf('django.db.models.fields.DateTimeField')(default=None, null=True)),
+ ('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
+ ('bitbake_branch', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
+ ('short_description', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
+ ))
+ db.send_create_signal(u'orm', ['Branch'])
+
+ # Adding unique constraint on 'Branch', fields ['layer_source', 'name']
+ db.create_unique(u'orm_branch', ['layer_source_id', 'name'])
+
+ # Adding unique constraint on 'Branch', fields ['layer_source', 'up_id']
+ db.create_unique(u'orm_branch', ['layer_source_id', 'up_id'])
+
+ # Adding model 'ToasterSettingDefaultLayer'
+ db.create_table(u'orm_toastersettingdefaultlayer', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('layer_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'])),
+ ))
+ db.send_create_signal(u'orm', ['ToasterSettingDefaultLayer'])
+
+ # Adding model 'LayerSource'
+ db.create_table(u'orm_layersource', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('name', self.gf('django.db.models.fields.CharField')(max_length=63)),
+ ('sourcetype', self.gf('django.db.models.fields.IntegerField')()),
+ ('apiurl', self.gf('django.db.models.fields.CharField')(default=None, max_length=255, null=True)),
+ ))
+ db.send_create_signal(u'orm', ['LayerSource'])
+
+ # Adding unique constraint on 'LayerSource', fields ['sourcetype', 'apiurl']
+ db.create_unique(u'orm_layersource', ['sourcetype', 'apiurl'])
+
+ # Deleting field 'ProjectLayer.name'
+ db.delete_column(u'orm_projectlayer', 'name')
+
+ # Deleting field 'ProjectLayer.dirpath'
+ db.delete_column(u'orm_projectlayer', 'dirpath')
+
+ # Deleting field 'ProjectLayer.commit'
+ db.delete_column(u'orm_projectlayer', 'commit')
+
+ # Deleting field 'ProjectLayer.giturl'
+ db.delete_column(u'orm_projectlayer', 'giturl')
+
+ # Adding field 'ProjectLayer.layercommit'
+ db.add_column(u'orm_projectlayer', 'layercommit',
+ self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'], null=True),
+ keep_default=False)
+
+ # Adding field 'Layer_Version.layer_source'
+ db.add_column(u'orm_layer_version', 'layer_source',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.LayerSource'], null=True),
+ keep_default=False)
+
+ # Adding field 'Layer_Version.up_id'
+ db.add_column(u'orm_layer_version', 'up_id',
+ self.gf('django.db.models.fields.IntegerField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer_Version.up_date'
+ db.add_column(u'orm_layer_version', 'up_date',
+ self.gf('django.db.models.fields.DateTimeField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer_Version.up_branch'
+ db.add_column(u'orm_layer_version', 'up_branch',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.Branch'], null=True),
+ keep_default=False)
+
+ # Adding field 'Layer_Version.dirpath'
+ db.add_column(u'orm_layer_version', 'dirpath',
+ self.gf('django.db.models.fields.CharField')(default=None, max_length=255, null=True),
+ keep_default=False)
+
+
+ # Changing field 'Layer_Version.build'
+ db.alter_column(u'orm_layer_version', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['orm.Build']))
+
+ # Changing field 'Layer_Version.branch'
+ db.alter_column(u'orm_layer_version', 'branch', self.gf('django.db.models.fields.CharField')(max_length=80))
+ # Adding unique constraint on 'Layer_Version', fields ['layer_source', 'up_id']
+ db.create_unique(u'orm_layer_version', ['layer_source_id', 'up_id'])
+
+ # Adding field 'Recipe.layer_source'
+ db.add_column(u'orm_recipe', 'layer_source',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.LayerSource'], null=True),
+ keep_default=False)
+
+ # Adding field 'Recipe.up_id'
+ db.add_column(u'orm_recipe', 'up_id',
+ self.gf('django.db.models.fields.IntegerField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Recipe.up_date'
+ db.add_column(u'orm_recipe', 'up_date',
+ self.gf('django.db.models.fields.DateTimeField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.layer_source'
+ db.add_column(u'orm_layer', 'layer_source',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.LayerSource'], null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.up_id'
+ db.add_column(u'orm_layer', 'up_id',
+ self.gf('django.db.models.fields.IntegerField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.up_date'
+ db.add_column(u'orm_layer', 'up_date',
+ self.gf('django.db.models.fields.DateTimeField')(default=None, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.vcs_url'
+ db.add_column(u'orm_layer', 'vcs_url',
+ self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.vcs_web_file_base_url'
+ db.add_column(u'orm_layer', 'vcs_web_file_base_url',
+ self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.summary'
+ db.add_column(u'orm_layer', 'summary',
+ self.gf('django.db.models.fields.CharField')(default=None, max_length=200, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.description'
+ db.add_column(u'orm_layer', 'description',
+ self.gf('django.db.models.fields.TextField')(default=None, null=True),
+ keep_default=False)
+
+
+ # Changing field 'Layer.local_path'
+ db.alter_column(u'orm_layer', 'local_path', self.gf('django.db.models.fields.FilePathField')(max_length=255, null=True))
+ # Adding unique constraint on 'Layer', fields ['layer_source', 'up_id']
+ db.create_unique(u'orm_layer', ['layer_source_id', 'up_id'])
+
+ # Adding unique constraint on 'Layer', fields ['layer_source', 'name']
+ db.create_unique(u'orm_layer', ['layer_source_id', 'name'])
+
+ # Deleting field 'Project.branch'
+ db.delete_column(u'orm_project', 'branch')
+
+ # Adding field 'Project.bitbake_version'
+ db.add_column(u'orm_project', 'bitbake_version',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=-1, to=orm['orm.BitbakeVersion']),
+ keep_default=False)
+
+ # Adding field 'Project.release'
+ db.add_column(u'orm_project', 'release',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=-1, to=orm['orm.Release']),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Removing unique constraint on 'Layer', fields ['layer_source', 'name']
+ db.delete_unique(u'orm_layer', ['layer_source_id', 'name'])
+
+ # Removing unique constraint on 'Layer', fields ['layer_source', 'up_id']
+ db.delete_unique(u'orm_layer', ['layer_source_id', 'up_id'])
+
+ # Removing unique constraint on 'Layer_Version', fields ['layer_source', 'up_id']
+ db.delete_unique(u'orm_layer_version', ['layer_source_id', 'up_id'])
+
+ # Removing unique constraint on 'LayerSource', fields ['sourcetype', 'apiurl']
+ db.delete_unique(u'orm_layersource', ['sourcetype', 'apiurl'])
+
+ # Removing unique constraint on 'Branch', fields ['layer_source', 'up_id']
+ db.delete_unique(u'orm_branch', ['layer_source_id', 'up_id'])
+
+ # Removing unique constraint on 'Branch', fields ['layer_source', 'name']
+ db.delete_unique(u'orm_branch', ['layer_source_id', 'name'])
+
+ # Removing unique constraint on 'Machine', fields ['layer_source', 'up_id']
+ db.delete_unique(u'orm_machine', ['layer_source_id', 'up_id'])
+
+ # Removing unique constraint on 'LayerVersionDependency', fields ['layer_source', 'up_id']
+ db.delete_unique(u'orm_layerversiondependency', ['layer_source_id', 'up_id'])
+
+ # Deleting model 'Release'
+ db.delete_table(u'orm_release')
+
+ # Deleting model 'LayerVersionDependency'
+ db.delete_table(u'orm_layerversiondependency')
+
+ # Deleting model 'ToasterSetting'
+ db.delete_table(u'orm_toastersetting')
+
+ # Deleting model 'Machine'
+ db.delete_table(u'orm_machine')
+
+ # Deleting model 'ReleaseDefaultLayer'
+ db.delete_table(u'orm_releasedefaultlayer')
+
+ # Deleting model 'BitbakeVersion'
+ db.delete_table(u'orm_bitbakeversion')
+
+ # Deleting model 'Branch'
+ db.delete_table(u'orm_branch')
+
+ # Deleting model 'ToasterSettingDefaultLayer'
+ db.delete_table(u'orm_toastersettingdefaultlayer')
+
+ # Deleting model 'LayerSource'
+ db.delete_table(u'orm_layersource')
+
+
+ # User chose to not deal with backwards NULL issues for 'ProjectLayer.name'
+ raise RuntimeError("Cannot reverse this migration. 'ProjectLayer.name' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'ProjectLayer.name'
+ db.add_column(u'orm_projectlayer', 'name',
+ self.gf('django.db.models.fields.CharField')(max_length=100),
+ keep_default=False)
+
+
+ # User chose to not deal with backwards NULL issues for 'ProjectLayer.dirpath'
+ raise RuntimeError("Cannot reverse this migration. 'ProjectLayer.dirpath' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'ProjectLayer.dirpath'
+ db.add_column(u'orm_projectlayer', 'dirpath',
+ self.gf('django.db.models.fields.CharField')(max_length=254),
+ keep_default=False)
+
+
+ # User chose to not deal with backwards NULL issues for 'ProjectLayer.commit'
+ raise RuntimeError("Cannot reverse this migration. 'ProjectLayer.commit' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'ProjectLayer.commit'
+ db.add_column(u'orm_projectlayer', 'commit',
+ self.gf('django.db.models.fields.CharField')(max_length=254),
+ keep_default=False)
+
+
+ # User chose to not deal with backwards NULL issues for 'ProjectLayer.giturl'
+ raise RuntimeError("Cannot reverse this migration. 'ProjectLayer.giturl' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'ProjectLayer.giturl'
+ db.add_column(u'orm_projectlayer', 'giturl',
+ self.gf('django.db.models.fields.CharField')(max_length=254),
+ keep_default=False)
+
+ # Deleting field 'ProjectLayer.layercommit'
+ db.delete_column(u'orm_projectlayer', 'layercommit_id')
+
+ # Deleting field 'Layer_Version.layer_source'
+ db.delete_column(u'orm_layer_version', 'layer_source_id')
+
+ # Deleting field 'Layer_Version.up_id'
+ db.delete_column(u'orm_layer_version', 'up_id')
+
+ # Deleting field 'Layer_Version.up_date'
+ db.delete_column(u'orm_layer_version', 'up_date')
+
+ # Deleting field 'Layer_Version.up_branch'
+ db.delete_column(u'orm_layer_version', 'up_branch_id')
+
+ # Deleting field 'Layer_Version.dirpath'
+ db.delete_column(u'orm_layer_version', 'dirpath')
+
+
+ # User chose to not deal with backwards NULL issues for 'Layer_Version.build'
+ raise RuntimeError("Cannot reverse this migration. 'Layer_Version.build' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration
+ # Changing field 'Layer_Version.build'
+ db.alter_column(u'orm_layer_version', 'build_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build']))
+
+ # Changing field 'Layer_Version.branch'
+ db.alter_column(u'orm_layer_version', 'branch', self.gf('django.db.models.fields.CharField')(max_length=50))
+ # Deleting field 'Recipe.layer_source'
+ db.delete_column(u'orm_recipe', 'layer_source_id')
+
+ # Deleting field 'Recipe.up_id'
+ db.delete_column(u'orm_recipe', 'up_id')
+
+ # Deleting field 'Recipe.up_date'
+ db.delete_column(u'orm_recipe', 'up_date')
+
+ # Deleting field 'Layer.layer_source'
+ db.delete_column(u'orm_layer', 'layer_source_id')
+
+ # Deleting field 'Layer.up_id'
+ db.delete_column(u'orm_layer', 'up_id')
+
+ # Deleting field 'Layer.up_date'
+ db.delete_column(u'orm_layer', 'up_date')
+
+ # Deleting field 'Layer.vcs_url'
+ db.delete_column(u'orm_layer', 'vcs_url')
+
+ # Deleting field 'Layer.vcs_web_file_base_url'
+ db.delete_column(u'orm_layer', 'vcs_web_file_base_url')
+
+ # Deleting field 'Layer.summary'
+ db.delete_column(u'orm_layer', 'summary')
+
+ # Deleting field 'Layer.description'
+ db.delete_column(u'orm_layer', 'description')
+
+
+ # User chose to not deal with backwards NULL issues for 'Layer.local_path'
+ raise RuntimeError("Cannot reverse this migration. 'Layer.local_path' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration
+ # Changing field 'Layer.local_path'
+ db.alter_column(u'orm_layer', 'local_path', self.gf('django.db.models.fields.FilePathField')(max_length=255))
+
+ # User chose to not deal with backwards NULL issues for 'Project.branch'
+ raise RuntimeError("Cannot reverse this migration. 'Project.branch' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'Project.branch'
+ db.add_column(u'orm_project', 'branch',
+ self.gf('django.db.models.fields.CharField')(max_length=50),
+ keep_default=False)
+
+ # Deleting field 'Project.bitbake_version'
+ db.delete_column(u'orm_project', 'bitbake_version_id')
+
+ # Deleting field 'Project.release'
+ db.delete_column(u'orm_project', 'release_id')
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ 'bitbake_branch': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.toastersettingdefaultlayer': {
+ 'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/migrations/0014_auto__chg_field_package_summary__chg_field_layer_summary__chg_field_re.py b/bitbake/lib/toaster/orm/migrations/0014_auto__chg_field_package_summary__chg_field_layer_summary__chg_field_re.py
new file mode 100644
index 0000000000..7945f15a17
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0014_auto__chg_field_package_summary__chg_field_layer_summary__chg_field_re.py
@@ -0,0 +1,336 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+
+ # Changing field 'Package.summary'
+ db.alter_column(u'orm_package', 'summary', self.gf('django.db.models.fields.TextField')())
+
+ # Changing field 'Layer.summary'
+ db.alter_column(u'orm_layer', 'summary', self.gf('django.db.models.fields.TextField')(null=True))
+
+ # Changing field 'Recipe.summary'
+ db.alter_column(u'orm_recipe', 'summary', self.gf('django.db.models.fields.TextField')())
+
+ def backwards(self, orm):
+
+ # Changing field 'Package.summary'
+ db.alter_column(u'orm_package', 'summary', self.gf('django.db.models.fields.CharField')(max_length=200))
+
+ # Changing field 'Layer.summary'
+ db.alter_column(u'orm_layer', 'summary', self.gf('django.db.models.fields.CharField')(max_length=200, null=True))
+
+ # Changing field 'Recipe.summary'
+ db.alter_column(u'orm_recipe', 'summary', self.gf('django.db.models.fields.CharField')(max_length=100))
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ 'bitbake_branch': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.toastersettingdefaultlayer': {
+ 'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/migrations/0015_auto__add_field_layer_vcs_web_url__add_field_layer_vcs_web_tree_base_u.py b/bitbake/lib/toaster/orm/migrations/0015_auto__add_field_layer_vcs_web_url__add_field_layer_vcs_web_tree_base_u.py
new file mode 100644
index 0000000000..6e664c9fc8
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0015_auto__add_field_layer_vcs_web_url__add_field_layer_vcs_web_tree_base_u.py
@@ -0,0 +1,336 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Layer.vcs_web_url'
+ db.add_column(u'orm_layer', 'vcs_web_url',
+ self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
+ keep_default=False)
+
+ # Adding field 'Layer.vcs_web_tree_base_url'
+ db.add_column(u'orm_layer', 'vcs_web_tree_base_url',
+ self.gf('django.db.models.fields.URLField')(default=None, max_length=200, null=True),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Layer.vcs_web_url'
+ db.delete_column(u'orm_layer', 'vcs_web_url')
+
+ # Deleting field 'Layer.vcs_web_tree_base_url'
+ db.delete_column(u'orm_layer', 'vcs_web_tree_base_url')
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ 'bitbake_branch': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.toastersettingdefaultlayer': {
+ 'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/migrations/0016_auto__add_field_release_helptext__chg_field_release_branch__add_index_.py b/bitbake/lib/toaster/orm/migrations/0016_auto__add_field_release_helptext__chg_field_release_branch__add_index_.py
new file mode 100644
index 0000000000..545c0ba586
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0016_auto__add_field_release_helptext__chg_field_release_branch__add_index_.py
@@ -0,0 +1,359 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Release.helptext'
+ db.add_column(u'orm_release', 'helptext',
+ self.gf('django.db.models.fields.TextField')(null=True),
+ keep_default=False)
+
+
+ # Renaming column for 'Release.branch' to match new field type.
+ db.delete_column(u'orm_release', 'branch')
+
+ # Changing field 'Release.branch'
+ db.add_column(u'orm_release', 'branch', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Branch'], default=-1))
+
+ # Deleting field 'Branch.bitbake_branch'
+ db.delete_column(u'orm_branch', 'bitbake_branch')
+
+ # Adding unique constraint on 'Recipe', fields ['layer_version', 'file_path']
+ db.create_unique(u'orm_recipe', ['layer_version_id', 'file_path'])
+
+ # Adding unique constraint on 'ProjectLayer', fields ['project', 'layercommit']
+ db.create_unique(u'orm_projectlayer', ['project_id', 'layercommit_id'])
+
+
+ def backwards(self, orm):
+ # Removing unique constraint on 'ProjectLayer', fields ['project', 'layercommit']
+ db.delete_unique(u'orm_projectlayer', ['project_id', 'layercommit_id'])
+
+ # Removing unique constraint on 'Recipe', fields ['layer_version', 'file_path']
+ db.delete_unique(u'orm_recipe', ['layer_version_id', 'file_path'])
+
+ # Deleting field 'Release.helptext'
+ db.delete_column(u'orm_release', 'helptext')
+
+ # Renaming column for 'Release.branch' to match new field type.
+ db.rename_column(u'orm_release', 'branch_id', 'branch')
+ # Changing field 'Release.branch'
+ db.alter_column(u'orm_release', 'branch', self.gf('django.db.models.fields.CharField')(max_length=32))
+ # Adding field 'Branch.bitbake_branch'
+ db.add_column(u'orm_branch', 'bitbake_branch',
+ self.gf('django.db.models.fields.CharField')(default='', max_length=50, blank=True),
+ keep_default=False)
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Branch']"}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer']"}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.toastersettingdefaultlayer': {
+ 'Meta': {'object_name': 'ToasterSettingDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm']
diff --git a/bitbake/lib/toaster/orm/migrations/0017_auto__del_toastersettingdefaultlayer__add_releaselayersourcepriority__.py b/bitbake/lib/toaster/orm/migrations/0017_auto__del_toastersettingdefaultlayer__add_releaselayersourcepriority__.py
new file mode 100644
index 0000000000..6685b55640
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0017_auto__del_toastersettingdefaultlayer__add_releaselayersourcepriority__.py
@@ -0,0 +1,396 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Deleting model 'ToasterSettingDefaultLayer'
+ db.delete_table(u'orm_toastersettingdefaultlayer')
+
+ # Adding model 'ReleaseLayerSourcePriority'
+ db.create_table(u'orm_releaselayersourcepriority', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('release', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Release'])),
+ ('layer_source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.LayerSource'])),
+ ('priority', self.gf('django.db.models.fields.IntegerField')(default=0)),
+ ))
+ db.send_create_signal(u'orm', ['ReleaseLayerSourcePriority'])
+
+ # Adding unique constraint on 'ReleaseLayerSourcePriority', fields ['release', 'layer_source']
+ db.create_unique(u'orm_releaselayersourcepriority', ['release_id', 'layer_source_id'])
+
+ # Deleting field 'Release.branch'
+ db.delete_column(u'orm_release', 'branch_id')
+
+ # Adding field 'Release.branch_name'
+ db.add_column(u'orm_release', 'branch_name',
+ self.gf('django.db.models.fields.CharField')(default='', max_length=50),
+ keep_default=False)
+
+ # Adding unique constraint on 'LayerSource', fields ['name']
+ db.create_unique(u'orm_layersource', ['name'])
+
+ # Deleting field 'ReleaseDefaultLayer.layer'
+ db.delete_column(u'orm_releasedefaultlayer', 'layer_id')
+
+ # Adding field 'ReleaseDefaultLayer.layer_name'
+ db.add_column(u'orm_releasedefaultlayer', 'layer_name',
+ self.gf('django.db.models.fields.CharField')(default='', max_length=100),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Removing unique constraint on 'LayerSource', fields ['name']
+ db.delete_unique(u'orm_layersource', ['name'])
+
+ # Removing unique constraint on 'ReleaseLayerSourcePriority', fields ['release', 'layer_source']
+ db.delete_unique(u'orm_releaselayersourcepriority', ['release_id', 'layer_source_id'])
+
+ # Adding model 'ToasterSettingDefaultLayer'
+ db.create_table(u'orm_toastersettingdefaultlayer', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('layer_version', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer_Version'])),
+ ))
+ db.send_create_signal(u'orm', ['ToasterSettingDefaultLayer'])
+
+ # Deleting model 'ReleaseLayerSourcePriority'
+ db.delete_table(u'orm_releaselayersourcepriority')
+
+
+ # User chose to not deal with backwards NULL issues for 'Release.branch'
+ raise RuntimeError("Cannot reverse this migration. 'Release.branch' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'Release.branch'
+ db.add_column(u'orm_release', 'branch',
+ self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Branch']),
+ keep_default=False)
+
+ # Deleting field 'Release.branch_name'
+ db.delete_column(u'orm_release', 'branch_name')
+
+
+ # User chose to not deal with backwards NULL issues for 'ReleaseDefaultLayer.layer'
+ raise RuntimeError("Cannot reverse this migration. 'ReleaseDefaultLayer.layer' and its values cannot be restored.")
+
+ # The following code is provided here to aid in writing a correct migration # Adding field 'ReleaseDefaultLayer.layer'
+ db.add_column(u'orm_releasedefaultlayer', 'layer',
+ self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Layer']),
+ keep_default=False)
+
+ # Deleting field 'ReleaseDefaultLayer.layer_name'
+ db.delete_column(u'orm_releasedefaultlayer', 'layer_name')
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.releaselayersourcepriority': {
+ 'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/migrations/0018_auto__add_field_layer_version_project.py b/bitbake/lib/toaster/orm/migrations/0018_auto__add_field_layer_version_project.py
new file mode 100644
index 0000000000..7284bb8429
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0018_auto__add_field_layer_version_project.py
@@ -0,0 +1,331 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding field 'Layer_Version.project'
+ db.add_column(u'orm_layer_version', 'project',
+ self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['orm.Project'], null=True),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Layer_Version.project'
+ db.delete_column(u'orm_layer_version', 'project_id')
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.releaselayersourcepriority': {
+ 'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/migrations/0019_auto__add_buildartifact.py b/bitbake/lib/toaster/orm/migrations/0019_auto__add_buildartifact.py
new file mode 100644
index 0000000000..0dce9ead20
--- /dev/null
+++ b/bitbake/lib/toaster/orm/migrations/0019_auto__add_buildartifact.py
@@ -0,0 +1,342 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ # Adding model 'BuildArtifact'
+ db.create_table(u'orm_buildartifact', (
+ (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('build', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['orm.Build'])),
+ ('file_name', self.gf('django.db.models.fields.FilePathField')(max_length=100)),
+ ('file_size', self.gf('django.db.models.fields.IntegerField')()),
+ ))
+ db.send_create_signal(u'orm', ['BuildArtifact'])
+
+
+ def backwards(self, orm):
+ # Deleting model 'BuildArtifact'
+ db.delete_table(u'orm_buildartifact')
+
+
+ models = {
+ u'orm.bitbakeversion': {
+ 'Meta': {'object_name': 'BitbakeVersion'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'giturl': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.branch': {
+ 'Meta': {'unique_together': "(('layer_source', 'name'), ('layer_source', 'up_id'))", 'object_name': 'Branch'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'True', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.build': {
+ 'Meta': {'object_name': 'Build'},
+ 'bitbake_version': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
+ 'build_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'completed_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'cooker_log_path': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
+ 'distro': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'distro_version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'errors_no': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'machine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'started_on': ('django.db.models.fields.DateTimeField', [], {}),
+ 'timespent': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'warnings_no': ('django.db.models.fields.IntegerField', [], {'default': '0'})
+ },
+ u'orm.buildartifact': {
+ 'Meta': {'object_name': 'BuildArtifact'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
+ },
+ u'orm.helptext': {
+ 'Meta': {'object_name': 'HelpText'},
+ 'area': ('django.db.models.fields.IntegerField', [], {}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'helptext_build'", 'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'text': ('django.db.models.fields.TextField', [], {})
+ },
+ u'orm.layer': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'), ('layer_source', 'name'))", 'object_name': 'Layer'},
+ 'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_index_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'local_path': ('django.db.models.fields.FilePathField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'vcs_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_file_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_tree_base_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'}),
+ 'vcs_web_url': ('django.db.models.fields.URLField', [], {'default': 'None', 'max_length': '200', 'null': 'True'})
+ },
+ u'orm.layer_version': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Layer_Version'},
+ 'branch': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'layer_version_build'", 'null': 'True', 'to': u"orm['orm.Build']"}),
+ 'commit': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'dirpath': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'layer_version_layer'", 'to': u"orm['orm.Layer']"}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Project']", 'null': 'True'}),
+ 'up_branch': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.Branch']", 'null': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.layersource': {
+ 'Meta': {'unique_together': "(('sourcetype', 'apiurl'),)", 'object_name': 'LayerSource'},
+ 'apiurl': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '63'}),
+ 'sourcetype': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.layerversiondependency': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'LayerVersionDependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependees'", 'to': u"orm['orm.Layer_Version']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': u"orm['orm.Layer_Version']"}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.logmessage': {
+ 'Meta': {'object_name': 'LogMessage'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'lineno': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'pathname': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Task']", 'null': 'True', 'blank': 'True'})
+ },
+ u'orm.machine': {
+ 'Meta': {'unique_together': "(('layer_source', 'up_id'),)", 'object_name': 'Machine'},
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'})
+ },
+ u'orm.package': {
+ 'Meta': {'object_name': 'Package'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'installed_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'installed_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Recipe']", 'null': 'True'}),
+ 'revision': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.package_dependency': {
+ 'Meta': {'object_name': 'Package_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_target'", 'to': u"orm['orm.Package']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'package_dependencies_source'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']", 'null': 'True'})
+ },
+ u'orm.package_file': {
+ 'Meta': {'object_name': 'Package_File'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildfilelist_package'", 'to': u"orm['orm.Package']"}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {})
+ },
+ u'orm.project': {
+ 'Meta': {'object_name': 'Project'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"}),
+ 'short_description': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
+ 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
+ 'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
+ },
+ u'orm.projectlayer': {
+ 'Meta': {'unique_together': "(('project', 'layercommit'),)", 'object_name': 'ProjectLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layercommit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Layer_Version']", 'null': 'True'}),
+ 'optional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"})
+ },
+ u'orm.projecttarget': {
+ 'Meta': {'object_name': 'ProjectTarget'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'task': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'})
+ },
+ u'orm.projectvariable': {
+ 'Meta': {'object_name': 'ProjectVariable'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Project']"}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.recipe': {
+ 'Meta': {'unique_together': "(('layer_version', 'file_path'),)", 'object_name': 'Recipe'},
+ 'bugtracker': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'file_path': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['orm.LayerSource']", 'null': 'True'}),
+ 'layer_version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'recipe_layer_version'", 'to': u"orm['orm.Layer_Version']"}),
+ 'license': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'section': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'up_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
+ 'up_id': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
+ },
+ u'orm.recipe_dependency': {
+ 'Meta': {'object_name': 'Recipe_Dependency'},
+ 'dep_type': ('django.db.models.fields.IntegerField', [], {}),
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_depends'", 'to': u"orm['orm.Recipe']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'r_dependencies_recipe'", 'to': u"orm['orm.Recipe']"})
+ },
+ u'orm.release': {
+ 'Meta': {'object_name': 'Release'},
+ 'bitbake_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.BitbakeVersion']"}),
+ 'branch_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50'}),
+ 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'helptext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
+ },
+ u'orm.releasedefaultlayer': {
+ 'Meta': {'object_name': 'ReleaseDefaultLayer'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.releaselayersourcepriority': {
+ 'Meta': {'unique_together': "(('release', 'layer_source'),)", 'object_name': 'ReleaseLayerSourcePriority'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'layer_source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.LayerSource']"}),
+ 'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'release': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Release']"})
+ },
+ u'orm.target': {
+ 'Meta': {'object_name': 'Target'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Build']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'image_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'is_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'license_manifest_path': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True'}),
+ 'target': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ u'orm.target_file': {
+ 'Meta': {'object_name': 'Target_File'},
+ 'directory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'directory_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'group': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'inodetype': ('django.db.models.fields.IntegerField', [], {}),
+ 'owner': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.FilePathField', [], {'max_length': '100'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
+ 'size': ('django.db.models.fields.IntegerField', [], {}),
+ 'sym_target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'symlink_set'", 'null': 'True', 'to': u"orm['orm.Target_File']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_image_file': {
+ 'Meta': {'object_name': 'Target_Image_File'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '254'}),
+ 'file_size': ('django.db.models.fields.IntegerField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.target_installed_package': {
+ 'Meta': {'object_name': 'Target_Installed_Package'},
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'package': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buildtargetlist_package'", 'to': u"orm['orm.Package']"}),
+ 'target': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['orm.Target']"})
+ },
+ u'orm.task': {
+ 'Meta': {'ordering': "('order', 'recipe')", 'unique_together': "(('build', 'recipe', 'task_name'),)", 'object_name': 'Task'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_build'", 'to': u"orm['orm.Build']"}),
+ 'cpu_usage': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ 'disk_io': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'elapsed_time': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'logfile': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
+ 'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'outcome': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
+ 'path_to_sstate_obj': ('django.db.models.fields.FilePathField', [], {'max_length': '500', 'blank': 'True'}),
+ 'recipe': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'build_recipe'", 'to': u"orm['orm.Recipe']"}),
+ 'script_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'source_url': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'}),
+ 'sstate_checksum': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
+ 'sstate_result': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
+ 'task_executed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'task_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'work_directory': ('django.db.models.fields.FilePathField', [], {'max_length': '255', 'blank': 'True'})
+ },
+ u'orm.task_dependency': {
+ 'Meta': {'object_name': 'Task_Dependency'},
+ 'depends_on': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_depends'", 'to': u"orm['orm.Task']"}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'task_dependencies_task'", 'to': u"orm['orm.Task']"})
+ },
+ u'orm.toastersetting': {
+ 'Meta': {'object_name': 'ToasterSetting'},
+ 'helptext': ('django.db.models.fields.TextField', [], {}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '63'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
+ },
+ u'orm.variable': {
+ 'Meta': {'object_name': 'Variable'},
+ 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variable_build'", 'to': u"orm['orm.Build']"}),
+ 'changed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'human_readable_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'variable_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'variable_value': ('django.db.models.fields.TextField', [], {'blank': 'True'})
+ },
+ u'orm.variablehistory': {
+ 'Meta': {'object_name': 'VariableHistory'},
+ 'file_name': ('django.db.models.fields.FilePathField', [], {'max_length': '255'}),
+ u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'line_number': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
+ 'operation': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
+ 'variable': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vhistory'", 'to': u"orm['orm.Variable']"})
+ }
+ }
+
+ complete_apps = ['orm'] \ No newline at end of file
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py
index f19a4370c8..46b704c130 100644
--- a/bitbake/lib/toaster/orm/models.py
+++ b/bitbake/lib/toaster/orm/models.py
@@ -20,32 +20,57 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from django.db import models
-from django.db.models import F
-from django.utils.encoding import python_2_unicode_compatible
+from django.db.models import F, Q
+from django.utils import timezone
+
+
+from django.core import validators
+from django.conf import settings
+
+class GitURLValidator(validators.URLValidator):
+ import re
+ regex = re.compile(
+ r'^(?:ssh|git|http|ftp)s?://' # http:// or https://
+ r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
+ r'localhost|' # localhost...
+ r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
+ r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
+ r'(?::\d+)?' # optional port
+ r'(?:/?|[/?]\S+)$', re.IGNORECASE)
+
+def GitURLField(**kwargs):
+ r = models.URLField(**kwargs)
+ for i in xrange(len(r.validators)):
+ if isinstance(r.validators[i], validators.URLValidator):
+ r.validators[i] = GitURLValidator()
+ return r
+
+
+class ToasterSetting(models.Model):
+ name = models.CharField(max_length=63)
+ helptext = models.TextField()
+ value = models.CharField(max_length=255)
+
+ def __unicode__(self):
+ return "Setting %s" % self.name
class ProjectManager(models.Manager):
- def create_project(self, name, branch, short_description):
- prj = self.model(name = name, branch = branch, short_description = short_description)
+ def create_project(self, name, release):
+ prj = self.model(name = name, bitbake_version = release.bitbake_version, release = release)
prj.save()
- # create default variables
- ProjectVariable.objects.create(project = prj, name = "MACHINE", value = "qemux86")
- ProjectVariable.objects.create(project = prj, name = "DISTRO", value = "poky")
-
- # create default layers
- ProjectLayer.objects.create(project = prj,
- name = "meta",
- giturl = "git://git.yoctoproject.org/poky",
- commit = branch,
- dirpath = "meta",
- optional = False)
-
- ProjectLayer.objects.create(project = prj,
- name = "meta-yocto",
- giturl = "git://git.yoctoproject.org/poky",
- commit = branch,
- dirpath = "meta-yocto",
- optional = False)
+ for defaultconf in ToasterSetting.objects.filter(name__startswith="DEFCONF_"):
+ name = defaultconf.name[8:]
+ ProjectVariable.objects.create( project = prj,
+ name = name,
+ value = defaultconf.value)
+
+
+ for rdl in release.releasedefaultlayer_set.all():
+ lv = Layer_Version.objects.filter(layer__name = rdl.layer_name, up_branch__name = release.branch_name)[0].get_equivalents_wpriority(prj)[0]
+ ProjectLayer.objects.create( project = prj,
+ layercommit = lv,
+ optional = False )
return prj
@@ -56,9 +81,11 @@ class ProjectManager(models.Manager):
raise Exception("Invalid call to Project.objects.get_or_create. Use Project.objects.create_project() to create a project")
class Project(models.Model):
+ search_allowed_fields = ['name', 'short_description', 'release__name', 'release__branch_name']
name = models.CharField(max_length=100)
- branch = models.CharField(max_length=50)
short_description = models.CharField(max_length=50, blank=True)
+ bitbake_version = models.ForeignKey('BitbakeVersion')
+ release = models.ForeignKey("Release")
created = models.DateTimeField(auto_now_add = True)
updated = models.DateTimeField(auto_now = True)
# This is a horrible hack; since Toaster has no "User" model available when
@@ -68,21 +95,59 @@ class Project(models.Model):
user_id = models.IntegerField(null = True)
objects = ProjectManager()
+ def __unicode__(self):
+ return "%s (%s, %s)" % (self.name, self.release, self.bitbake_version)
+
+ # returns a queryset of compatible layers for a project
+ def compatible_layerversions(self, release = None, layer_name = None):
+ if release == None:
+ release = self.release
+ # layers on the same branch or layers specifically set for this project
+ queryset = Layer_Version.objects.filter((Q(up_branch__name = release.branch_name) & Q(project = None)) | Q(project = self))
+ if layer_name is not None:
+ # we select only a layer name
+ queryset = queryset.filter(layer__name = layer_name)
+
+ # order by layer version priority
+ queryset = queryset.filter(layer_source__releaselayersourcepriority__release = release).order_by("-layer_source__releaselayersourcepriority__priority")
+
+ return queryset
+
+ # returns a set of layer-equivalent set of layers already in project
+ def projectlayer_equivalent_set(self):
+ return [j for i in [x.layercommit.get_equivalents_wpriority(self) for x in self.projectlayer_set.all()] for j in i]
def schedule_build(self):
- from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable
+ from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable, BRBitbake
br = BuildRequest.objects.create(project = self)
- for l in self.projectlayer_set.all():
- BRLayer.objects.create(req = br, name = l.name, giturl = l.giturl, commit = l.commit, dirpath = l.dirpath)
- for t in self.projecttarget_set.all():
- BRTarget.objects.create(req = br, target = t.target, task = t.task)
- for v in self.projectvariable_set.all():
- BRVariable.objects.create(req = br, name = v.name, value = v.value)
-
- br.state = BuildRequest.REQ_QUEUED
- br.save()
- return br
+ try:
+ BRBitbake.objects.create(req = br,
+ giturl = self.bitbake_version.giturl,
+ commit = self.bitbake_version.branch,
+ dirpath = self.bitbake_version.dirpath)
+
+ for l in self.projectlayer_set.all().order_by("pk"):
+ commit = l.layercommit.commit
+ print("ii Building layer ", l.layercommit.layer.name, " at commit ", commit)
+ if l.layercommit.up_branch:
+ commit = l.layercommit.up_branch.name
+ print("ii Building layer ", l.layercommit.layer.name, " at upbranch ", commit)
+ if l.layercommit.branch:
+ commit = l.layercommit.branch
+ print("ii Building layer ", l.layercommit.layer.name, " at actual_branch ", commit)
+ BRLayer.objects.create(req = br, name = l.layercommit.layer.name, giturl = l.layercommit.layer.vcs_url, commit = commit, dirpath = l.layercommit.dirpath)
+ for t in self.projecttarget_set.all():
+ BRTarget.objects.create(req = br, target = t.target, task = t.task)
+ for v in self.projectvariable_set.all():
+ BRVariable.objects.create(req = br, name = v.name, value = v.value)
+
+ br.state = BuildRequest.REQ_QUEUED
+ br.save()
+ except Exception as e:
+ br.delete()
+ raise e
+ return br
class Build(models.Model):
SUCCEEDED = 0
@@ -111,16 +176,60 @@ class Build(models.Model):
build_name = models.CharField(max_length=100)
bitbake_version = models.CharField(max_length=50)
+ def completeper(self):
+ tf = Task.objects.filter(build = self)
+ tfc = tf.count()
+ if tfc > 0:
+ completeper = tf.exclude(order__isnull=True).count()*100/tf.count()
+ else:
+ completeper = 0
+ return completeper
+
+ def eta(self):
+ from django.utils import timezone
+ eta = timezone.now()
+ completeper = self.completeper()
+ if self.completeper() > 0:
+ eta = timezone.now() + ((timezone.now() - self.started_on)*(100-completeper)/completeper)
+ return eta
+
+
def get_sorted_target_list(self):
tgts = Target.objects.filter(build_id = self.id).order_by( 'target' );
return( tgts );
+ @property
+ def toaster_exceptions(self):
+ return self.logmessage_set.filter(level=LogMessage.EXCEPTION)
+
+
+# an Artifact is anything that results from a Build, and may be of interest to the user, and is not stored elsewhere
+class BuildArtifact(models.Model):
+ build = models.ForeignKey(Build)
+ file_name = models.FilePathField()
+ file_size = models.IntegerField()
+
+
+ def get_local_file_name(self):
+ try:
+ deploydir = Variable.objects.get(build = self.build, variable_name="DEPLOY_DIR").variable_value
+ return self.file_name[len(deploydir)+1:]
+ except:
+ raise
+
+ return self.file_name
+
+
+ def is_available(self):
+ if settings.MANAGED and build.project is not None:
+ return build.buildrequest.environment.has_artifact(file_path)
+ return False
+
class ProjectTarget(models.Model):
project = models.ForeignKey(Project)
target = models.CharField(max_length=100)
task = models.CharField(max_length=100, null=True)
-@python_2_unicode_compatible
class Target(models.Model):
search_allowed_fields = ['target', 'file_name']
build = models.ForeignKey(Build)
@@ -132,7 +241,7 @@ class Target(models.Model):
def package_count(self):
return Target_Installed_Package.objects.filter(target_id__exact=self.id).count()
- def __str__(self):
+ def __unicode__(self):
return self.target
class Target_Image_File(models.Model):
@@ -294,7 +403,7 @@ class Package(models.Model):
installed_name = models.CharField(max_length=100, default='')
version = models.CharField(max_length=100, blank=True)
revision = models.CharField(max_length=32, blank=True)
- summary = models.CharField(max_length=200, blank=True)
+ summary = models.TextField(blank=True)
description = models.TextField(blank=True)
size = models.IntegerField(default=0)
installed_size = models.IntegerField(default=0)
@@ -327,10 +436,10 @@ class Package_Dependency(models.Model):
(TYPE_RREPLACES, "replaces"),
(TYPE_RCONFLICTS, "conflicts"),
)
- ''' Indexed by dep_type, in view order, key for short name and help
+ """ Indexed by dep_type, in view order, key for short name and help
description which when viewed will be printf'd with the
package name.
- '''
+ """
DEPENDS_DICT = {
TYPE_RDEPENDS : ("depends", "%s is required to run %s"),
TYPE_TRDEPENDS : ("depends", "%s is required to run %s"),
@@ -358,11 +467,16 @@ class Package_File(models.Model):
size = models.IntegerField()
class Recipe(models.Model):
- search_allowed_fields = ['name', 'version', 'file_path', 'section', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__layer__local_path']
- name = models.CharField(max_length=100, blank=True)
- version = models.CharField(max_length=100, blank=True)
+ search_allowed_fields = ['name', 'version', 'file_path', 'section', 'description', 'license', 'layer_version__layer__name', 'layer_version__branch', 'layer_version__commit', 'layer_version__layer__local_path', 'layer_version__layer_source__name']
+
+ layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this recipe
+ up_id = models.IntegerField(null = True, default = None) # id of entry in the source
+ up_date = models.DateTimeField(null = True, default = None)
+
+ name = models.CharField(max_length=100, blank=True) # pn
+ version = models.CharField(max_length=100, blank=True) # pv
layer_version = models.ForeignKey('Layer_Version', related_name='recipe_layer_version')
- summary = models.CharField(max_length=100, blank=True)
+ summary = models.TextField(blank=True)
description = models.TextField(blank=True)
section = models.CharField(max_length=100, blank=True)
license = models.CharField(max_length=200, blank=True)
@@ -370,6 +484,30 @@ class Recipe(models.Model):
bugtracker = models.URLField(blank=True)
file_path = models.FilePathField(max_length=255)
+ def get_vcs_link_url(self):
+ if self.layer_version.layer.vcs_web_file_base_url is None:
+ return ""
+ return self.layer_version.layer.vcs_web_file_base_url.replace('%path%', self.file_path).replace('%branch%', self.layer_version.up_branch.name)
+
+ def get_layersource_view_url(self):
+ if self.layer_source is None:
+ return ""
+
+ url = self.layer_source.get_object_view(self.layer_version.up_branch, "recipes", self.name)
+ return url
+
+ def __unicode__(self):
+ return "Recipe " + self.name + ":" + self.version
+
+ def get_local_path(self):
+ if settings.MANAGED and self.layer_version.build.project is not None:
+ return self.file_path[len(self.layer_version.layer.local_path)+1:]
+
+ return self.file_path
+
+ class Meta:
+ unique_together = ("layer_version", "file_path")
+
class Recipe_DependencyManager(models.Manager):
use_for_related_fields = True
@@ -389,27 +527,465 @@ class Recipe_Dependency(models.Model):
dep_type = models.IntegerField(choices=DEPENDS_TYPE)
objects = Recipe_DependencyManager()
-class ProjectLayer(models.Model):
- project = models.ForeignKey(Project)
- name = models.CharField(max_length = 100)
- giturl = models.CharField(max_length = 254)
- commit = models.CharField(max_length = 254)
- dirpath = models.CharField(max_length = 254)
- optional = models.BooleanField(default = True)
+class Machine(models.Model):
+ layer_source = models.ForeignKey('LayerSource', default = None, null = True) # from where did we get this machine
+ up_id = models.IntegerField(null = True, default = None) # id of entry in the source
+ up_date = models.DateTimeField(null = True, default = None)
+
+ layer_version = models.ForeignKey('Layer_Version')
+ name = models.CharField(max_length=255)
+ description = models.CharField(max_length=255)
+
+ def __unicode__(self):
+ return "Machine " + self.name + "(" + self.description + ")"
+
+ class Meta:
+ unique_together = ("layer_source", "up_id")
+
+
+from django.db.models.base import ModelBase
+
+class InheritanceMetaclass(ModelBase):
+ def __call__(cls, *args, **kwargs):
+ obj = super(InheritanceMetaclass, cls).__call__(*args, **kwargs)
+ return obj.get_object()
+
+
+class LayerSource(models.Model):
+ __metaclass__ = InheritanceMetaclass
+
+ class Meta:
+ unique_together = (('sourcetype', 'apiurl'), )
+
+ TYPE_LOCAL = 0
+ TYPE_LAYERINDEX = 1
+ TYPE_IMPORTED = 2
+ SOURCE_TYPE = (
+ (TYPE_LOCAL, "local"),
+ (TYPE_LAYERINDEX, "layerindex"),
+ (TYPE_IMPORTED, "imported"),
+ )
+
+ name = models.CharField(max_length=63, unique = True)
+ sourcetype = models.IntegerField(choices=SOURCE_TYPE)
+ apiurl = models.CharField(max_length=255, null=True, default=None)
+
+ def update(self):
+ """
+ Updates the local database information from the upstream layer source
+ """
+ raise Exception("Abstract, update() must be implemented by all LayerSource-derived classes (object is %s)" % str(vars(self)))
+
+ def save(self, *args, **kwargs):
+ if isinstance(self, LocalLayerSource):
+ self.sourcetype = LayerSource.TYPE_LOCAL
+ elif isinstance(self, LayerIndexLayerSource):
+ self.sourcetype = LayerSource.TYPE_LAYERINDEX
+ elif isinstance(self, ImportedLayerSource):
+ self.sourcetype = LayerSource.TYPE_IMPORTED
+ elif self.sourcetype == None:
+ raise Exception("Unknown LayerSource-derived class. If you added a new layer source type, fill out all code stubs.")
+ return super(LayerSource, self).save(*args, **kwargs)
+
+ def get_object(self):
+ if self.sourcetype == LayerSource.TYPE_LOCAL:
+ self.__class__ = LocalLayerSource
+ elif self.sourcetype == LayerSource.TYPE_LAYERINDEX:
+ self.__class__ = LayerIndexLayerSource
+ elif self.sourcetype == LayerSource.TYPE_IMPORTED:
+ self.__class__ = ImportedLayerSource
+ else:
+ raise Exception("Unknown LayerSource type. If you added a new layer source type, fill out all code stubs.")
+ return self
+
+ def __unicode__(self):
+ return "%s (%s)" % (self.name, self.sourcetype)
+
+
+class LocalLayerSource(LayerSource):
+ class Meta(LayerSource._meta.__class__):
+ proxy = True
+
+ def __init__(self, *args, **kwargs):
+ super(LocalLayerSource, self).__init__(args, kwargs)
+ self.sourcetype = LayerSource.TYPE_LOCAL
+
+ def update(self):
+ """
+ Fetches layer, recipe and machine information from local repository
+ """
+ pass
+
+class ImportedLayerSource(LayerSource):
+ class Meta(LayerSource._meta.__class__):
+ proxy = True
+
+ def __init__(self, *args, **kwargs):
+ super(ImportedLayerSource, self).__init__(args, kwargs)
+ self.sourcetype = LayerSource.TYPE_IMPORTED
+
+ def update(self):
+ """
+ Fetches layer, recipe and machine information from local repository
+ """
+ pass
+
+
+class LayerIndexLayerSource(LayerSource):
+ class Meta(LayerSource._meta.__class__):
+ proxy = True
+
+ def __init__(self, *args, **kwargs):
+ super(LayerIndexLayerSource, self).__init__(args, kwargs)
+ self.sourcetype = LayerSource.TYPE_LAYERINDEX
+
+ def get_object_view(self, branch, objectype, upid):
+ if self != branch.layer_source:
+ raise Exception("Invalid branch specification")
+ return self.apiurl + "../branch/" + branch.name + "/" + objectype + "/?q=" + str(upid)
+
+ def update(self):
+ """
+ Fetches layer, recipe and machine information from remote repository
+ """
+ assert self.apiurl is not None
+ from django.db import IntegrityError
+
+ import httplib, urlparse, json
+ import os
+ proxy_settings = os.environ.get("http_proxy", None)
+
+ def _get_json_response(apiurl = self.apiurl):
+ conn = None
+ _parsedurl = urlparse.urlparse(apiurl)
+ path = _parsedurl.path
+ query = _parsedurl.query
+ def parse_url(url):
+ parsedurl = urlparse.urlparse(url)
+ try:
+ (host, port) = parsedurl.netloc.split(":")
+ except ValueError:
+ host = parsedurl.netloc
+ port = None
+
+ if port is None:
+ port = 80
+ else:
+ port = int(port)
+ return (host, port)
+
+ if proxy_settings is None:
+ host, port = parse_url(apiurl)
+ conn = httplib.HTTPConnection(host, port)
+ conn.request("GET", path + "?" + query)
+ else:
+ host, port = parse_url(proxy_settings)
+ conn = httplib.HTTPConnection(host, port)
+ conn.request("GET", apiurl)
+
+ r = conn.getresponse()
+ if r.status != 200:
+ raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason))
+ return json.loads(r.read())
+
+ # verify we can get the basic api
+ try:
+ apilinks = _get_json_response()
+ except Exception as e:
+ import traceback
+ if proxy_settings is not None:
+ print "EE: Using proxy ", proxy_settings
+ print "EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc(e))
+ return
+
+ # update branches; only those that we already have names listed in the Releases table
+ whitelist_branch_names = map(lambda x: x.branch_name, Release.objects.all())
+
+ branches_info = _get_json_response(apilinks['branches']
+ + "?filter=name:%s" % "OR".join(whitelist_branch_names))
+ for bi in branches_info:
+ b, created = Branch.objects.get_or_create(layer_source = self, name = bi['name'])
+ b.up_id = bi['id']
+ b.up_date = bi['updated']
+ b.name = bi['name']
+ b.short_description = bi['short_description']
+ b.save()
+
+ # update layers
+ layers_info = _get_json_response(apilinks['layerItems'])
+ for li in layers_info:
+ l, created = Layer.objects.get_or_create(layer_source = self, name = li['name'])
+ l.up_id = li['id']
+ l.up_date = li['updated']
+ l.vcs_url = li['vcs_url']
+ l.vcs_web_url = li['vcs_web_url']
+ l.vcs_web_tree_base_url = li['vcs_web_tree_base_url']
+ l.vcs_web_file_base_url = li['vcs_web_file_base_url']
+ l.summary = li['summary']
+ l.description = li['description']
+ l.save()
+
+ # update layerbranches/layer_versions
+ layerbranches_info = _get_json_response(apilinks['layerBranches']
+ + "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] ))
+ )
+ for lbi in layerbranches_info:
+ lv, created = Layer_Version.objects.get_or_create(layer_source = self,
+ up_id = lbi['id'],
+ layer=Layer.objects.get(layer_source = self, up_id = lbi['layer'])
+ )
+
+ lv.up_date = lbi['updated']
+ lv.up_branch = Branch.objects.get(layer_source = self, up_id = lbi['branch'])
+ lv.branch = lbi['actual_branch']
+ lv.commit = lbi['vcs_last_rev']
+ lv.dirpath = lbi['vcs_subdir']
+ lv.save()
+
+ # update layer dependencies
+ layerdependencies_info = _get_json_response(apilinks['layerDependencies'])
+ dependlist = {}
+ for ldi in layerdependencies_info:
+ try:
+ lv = Layer_Version.objects.get(layer_source = self, up_id = ldi['layerbranch'])
+ except Layer_Version.DoesNotExist as e:
+ continue
+
+ if lv not in dependlist:
+ dependlist[lv] = []
+ try:
+ dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch))
+ except Layer_Version.DoesNotExist as e:
+ print "Cannot find layer version ", self, ldi['dependency'], lv.up_branch
+ raise e
+
+ for lv in dependlist:
+ LayerVersionDependency.objects.filter(layer_version = lv).delete()
+ for lvd in dependlist[lv]:
+ LayerVersionDependency.objects.get_or_create(layer_version = lv, depends_on = lvd)
+
+
+ # update machines
+ machines_info = _get_json_response(apilinks['machines']
+ + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
+ )
+ for mi in machines_info:
+ mo, created = Machine.objects.get_or_create(layer_source = self, up_id = mi['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = mi['layerbranch']))
+ mo.up_date = mi['updated']
+ mo.name = mi['name']
+ mo.description = mi['description']
+ mo.save()
+
+ # update recipes; paginate by layer version / layer branch
+ recipes_info = _get_json_response(apilinks['recipes']
+ + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self)))
+ )
+ for ri in recipes_info:
+ try:
+ ro, created = Recipe.objects.get_or_create(layer_source = self, up_id = ri['id'], layer_version = Layer_Version.objects.get(layer_source = self, up_id = ri['layerbranch']))
+ ro.up_date = ri['updated']
+ ro.name = ri['pn']
+ ro.version = ri['pv']
+ ro.summary = ri['summary']
+ ro.description = ri['description']
+ ro.section = ri['section']
+ ro.license = ri['license']
+ ro.homepage = ri['homepage']
+ ro.bugtracker = ri['bugtracker']
+ ro.file_path = ri['filepath'] + "/" + ri['filename']
+ ro.save()
+ except:
+ print "Duplicate Recipe, ignoring: ", vars(ro)
+ pass
+ pass
+
+class BitbakeVersion(models.Model):
+
+ name = models.CharField(max_length=32, unique = True)
+ giturl = GitURLField()
+ branch = models.CharField(max_length=32)
+ dirpath = models.CharField(max_length=255)
+
+ def __unicode__(self):
+ return "%s (%s)" % (self.name, self.branch)
+
+
+class Release(models.Model):
+ """ A release is a project template, used to pre-populate Project settings with a configuration set """
+ name = models.CharField(max_length=32, unique = True)
+ description = models.CharField(max_length=255)
+ bitbake_version = models.ForeignKey(BitbakeVersion)
+ branch_name = models.CharField(max_length=50, default = "")
+ helptext = models.TextField(null=True)
+
+ def __unicode__(self):
+ return "%s (%s)" % (self.name, self.branch_name)
+
+class ReleaseLayerSourcePriority(models.Model):
+ """ Each release selects layers from the set up layer sources, ordered by priority """
+ release = models.ForeignKey("Release")
+ layer_source = models.ForeignKey("LayerSource")
+ priority = models.IntegerField(default = 0)
+
+ def __unicode__(self):
+ return "%s-%s:%d" % (self.release.name, self.layer_source.name, self.priority)
+ class Meta:
+ unique_together = (('release', 'layer_source'),)
+
+
+class ReleaseDefaultLayer(models.Model):
+ release = models.ForeignKey(Release)
+ layer_name = models.CharField(max_length=100, default="")
+
+
+# Branch class is synced with layerindex.Branch, branches can only come from remote layer indexes
+class Branch(models.Model):
+ layer_source = models.ForeignKey('LayerSource', null = True, default = True)
+ up_id = models.IntegerField(null = True, default = None) # id of branch in the source
+ up_date = models.DateTimeField(null = True, default = None)
+
+ name = models.CharField(max_length=50)
+ short_description = models.CharField(max_length=50, blank=True)
+
+ class Meta:
+ verbose_name_plural = "Branches"
+ unique_together = (('layer_source', 'name'),('layer_source', 'up_id'))
+
+ def __unicode__(self):
+ return self.name
+
+
+# Layer class synced with layerindex.LayerItem
class Layer(models.Model):
+ layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer
+ up_id = models.IntegerField(null = True, default = None) # id of layer in the remote source
+ up_date = models.DateTimeField(null = True, default = None)
+
name = models.CharField(max_length=100)
- local_path = models.FilePathField(max_length=255)
+ local_path = models.FilePathField(max_length=255, null = True, default = None)
layer_index_url = models.URLField()
+ vcs_url = GitURLField(default = None, null = True)
+ vcs_web_url = models.URLField(null = True, default = None)
+ vcs_web_tree_base_url = models.URLField(null = True, default = None)
+ vcs_web_file_base_url = models.URLField(null = True, default = None)
+
+ summary = models.TextField(help_text='One-line description of the layer', null = True, default = None)
+ description = models.TextField(null = True, default = None)
+ def __unicode__(self):
+ return "%s / %s " % (self.name, self.layer_source)
+ class Meta:
+ unique_together = (("layer_source", "up_id"), ("layer_source", "name"))
+
+
+# LayerCommit class is synced with layerindex.LayerBranch
class Layer_Version(models.Model):
- build = models.ForeignKey(Build, related_name='layer_version_build')
+ search_allowed_fields = ["layer__name", "layer__summary", "layer__description", "layer__vcs_url", "dirpath", "up_branch__name", "commit", "branch"]
+ build = models.ForeignKey(Build, related_name='layer_version_build', default = None, null = True)
layer = models.ForeignKey(Layer, related_name='layer_version_layer')
- branch = models.CharField(max_length=50)
- commit = models.CharField(max_length=100)
- priority = models.IntegerField()
+ layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we get this Layer Version
+ up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source
+ up_date = models.DateTimeField(null = True, default = None)
+ up_branch = models.ForeignKey(Branch, null = True, default = None)
+
+ branch = models.CharField(max_length=80) # LayerBranch.actual_branch
+ commit = models.CharField(max_length=100) # LayerBranch.vcs_last_rev
+ dirpath = models.CharField(max_length=255, null = True, default = None) # LayerBranch.vcs_subdir
+ priority = models.IntegerField(default = 0) # if -1, this is a default layer
+
+ project = models.ForeignKey('Project', null = True, default = None) # Set if this layer is project-specific; always set for imported layers, and project-set branches
+
+ # code lifted, with adaptations, from the layerindex-web application https://git.yoctoproject.org/cgit/cgit.cgi/layerindex-web/
+ def _handle_url_path(self, base_url, path):
+ import re
+ if base_url:
+ if self.dirpath:
+ if path:
+ extra_path = self.dirpath + '/' + path
+ # Normalise out ../ in path for usage URL
+ extra_path = posixpath.normpath(extra_path)
+ # Minor workaround to handle case where subdirectory has been added between branches
+ # (should probably support usage URL per branch to handle this... sigh...)
+ if extra_path.startswith('../'):
+ extra_path = extra_path[3:]
+ else:
+ extra_path = self.dirpath
+ else:
+ extra_path = path
+ branchname = self.up_branch.name
+ url = base_url.replace('%branch%', branchname)
+
+ # If there's a % in the path (e.g. a wildcard bbappend) we need to encode it
+ if extra_path:
+ extra_path = extra_path.replace('%', '%25')
+
+ if '%path%' in base_url:
+ if extra_path:
+ url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '\\1', url)
+ else:
+ url = re.sub(r'\[([^\]]*%path%[^\]]*)\]', '', url)
+ return url.replace('%path%', extra_path)
+ else:
+ return url + extra_path
+ return None
+
+ def get_vcs_link_url(self):
+ if self.layer.vcs_web_url is None:
+ return None
+ return self.layer.vcs_web_url
+
+ def get_vcs_file_link_url(self, file_path=""):
+ if self.layer.vcs_web_file_base_url is None:
+ return None
+ return self._handle_url_path(self.layer.vcs_web_file_base_url, file_path)
+
+ def get_vcs_dirpath_link_url(self):
+ if self.layer.vcs_web_tree_base_url is None:
+ return None
+ return self._handle_url_path(self.layer.vcs_web_tree_base_url, '')
+
+ def get_equivalents_wpriority(self, project):
+ """ Returns an ordered layerversion list that satisfies a LayerVersionDependency using the layer name and the current Project Releases' LayerSource priority """
+ def _get_ls_priority(ls):
+ try:
+ return ls.releaselayersourcepriority_set.get(release=project.release).priority
+ except ReleaseLayerSourcePriority.DoesNotExist:
+ raise
+ return sorted(
+ Layer_Version.objects.filter( layer__name = self.layer.name, up_branch__name = self.up_branch.name ),
+ key = lambda x: _get_ls_priority(x.layer_source),
+ reverse = True)
+
+
+ def __unicode__(self):
+ return str(self.layer) + " (" + self.commit +")"
+
+ class Meta:
+ unique_together = ("layer_source", "up_id")
+
+class LayerVersionDependency(models.Model):
+ layer_source = models.ForeignKey(LayerSource, null = True, default = None) # from where did we got this layer
+ up_id = models.IntegerField(null = True, default = None) # id of layerbranch in the remote source
+
+ layer_version = models.ForeignKey(Layer_Version, related_name="dependencies")
+ depends_on = models.ForeignKey(Layer_Version, related_name="dependees")
+
+ class Meta:
+ unique_together = ("layer_source", "up_id")
+
+class ProjectLayer(models.Model):
+ project = models.ForeignKey(Project)
+ layercommit = models.ForeignKey(Layer_Version, null=True)
+ optional = models.BooleanField(default = True)
+
+ def __unicode__(self):
+ return "%s, %s" % (self.project.name, self.layercommit)
+
+ class Meta:
+ unique_together = (("project", "layercommit"),)
class ProjectVariable(models.Model):
project = models.ForeignKey(Project)
@@ -443,13 +1019,15 @@ class HelpText(models.Model):
text = models.TextField()
class LogMessage(models.Model):
+ EXCEPTION = -1 # used to signal self-toaster-exceptions
INFO = 0
WARNING = 1
ERROR = 2
LOG_LEVEL = ( (INFO, "info"),
(WARNING, "warn"),
- (ERROR, "error") )
+ (ERROR, "error"),
+ (EXCEPTION, "toaster exception"))
build = models.ForeignKey(Build)
task = models.ForeignKey(Task, blank = True, null=True)
diff --git a/bitbake/lib/toaster/orm/tests.py b/bitbake/lib/toaster/orm/tests.py
new file mode 100644
index 0000000000..b965d8e50e
--- /dev/null
+++ b/bitbake/lib/toaster/orm/tests.py
@@ -0,0 +1,36 @@
+from django.test import TestCase
+from orm.models import LocalLayerSource, LayerIndexLayerSource, ImportedLayerSource, LayerSource
+from orm.models import Branch
+
+class LayerSourceVerifyInheritanceSaveLoad(TestCase):
+ def test_object_creation(self):
+ lls = LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LOCAL, apiurl = "")
+ lils = LayerSource.objects.create(name = "a2", sourcetype = LayerSource.TYPE_LAYERINDEX, apiurl = "")
+ imls = LayerSource.objects.create(name = "a3", sourcetype = LayerSource.TYPE_IMPORTED, apiurl = "")
+
+ import pprint
+ pprint.pprint([(x.__class__,vars(x)) for x in LayerSource.objects.all()])
+
+ self.assertTrue(True in map(lambda x: isinstance(x, LocalLayerSource), LayerSource.objects.all()))
+ self.assertTrue(True in map(lambda x: isinstance(x, LayerIndexLayerSource), LayerSource.objects.all()))
+ self.assertTrue(True in map(lambda x: isinstance(x, ImportedLayerSource), LayerSource.objects.all()))
+
+ def test_duplicate_error(self):
+ def duplicate():
+ LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LOCAL, apiurl = "")
+ LayerSource.objects.create(name = "a1", sourcetype = LayerSource.TYPE_LOCAL, apiurl = "")
+
+ self.assertRaises(Exception, duplicate)
+
+
+
+class LILSUpdateTestCase(TestCase):
+ def test_update(self):
+ lils = LayerSource.objects.create(name = "b1", sourcetype = LayerSource.TYPE_LAYERINDEX, apiurl = "http://adamian-desk.local:8080/layerindex/api/")
+ lils.update()
+
+ # run second update
+ # lils.update()
+
+ # print vars(lils)
+ #print map(lambda x: vars(x), Branch.objects.all())
diff --git a/bitbake/lib/toaster/orm/urls.py b/bitbake/lib/toaster/orm/urls.py
new file mode 100644
index 0000000000..961bc19070
--- /dev/null
+++ b/bitbake/lib/toaster/orm/urls.py
@@ -0,0 +1,27 @@
+#
+# BitBake Toaster Implementation
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+from django.conf.urls import patterns, include, url
+from django.views.generic import RedirectView
+
+urlpatterns = patterns('orm.views',
+ # landing point for pushing a bitbake_eventlog.json file to this toaster instace
+ url(r'^eventfile$', 'eventfile', name='eventfile'),
+ )
+
diff --git a/bitbake/lib/toaster/orm/views.py b/bitbake/lib/toaster/orm/views.py
new file mode 100644
index 0000000000..97d792b99e
--- /dev/null
+++ b/bitbake/lib/toaster/orm/views.py
@@ -0,0 +1,60 @@
+#
+# BitBake Toaster Implementation
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from django.views.decorators.cache import cache_control
+from django.core.urlresolvers import reverse
+from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
+from django.http import HttpResponseBadRequest, HttpResponse
+from django.utils import timezone
+from django.utils.html import escape
+from datetime import timedelta
+from django.utils import formats
+from toastergui.templatetags.projecttags import json as jsonfilter
+import json
+import os
+import tempfile
+import subprocess
+import toastermain
+from django.views.decorators.csrf import csrf_exempt
+
+
+@csrf_exempt
+def eventfile(request):
+ """ Receives a file by POST, and runs toaster-eventreply on this file """
+ if request.method != "POST":
+ return HttpResponseBadRequest("This API only accepts POST requests. Post a file with:\n\ncurl -F eventlog=@bitbake_eventlog.json http[s]://[server-address]/orm/eventfile\n", content_type="text/plain;utf8")
+
+ # write temporary file
+ (handle, abstemppath) = tempfile.mkstemp(dir="/tmp/")
+ with os.fdopen(handle, "w") as tmpfile:
+ for chunk in request.FILES['eventlog'].chunks():
+ tmpfile.write(chunk)
+ tmpfile.close()
+
+ # compute the path to "bitbake/bin/toaster-eventreplay"
+ from os.path import dirname as DN
+ import_script = os.path.join(DN(DN(DN(DN(os.path.abspath(__file__))))), "bin/toaster-eventreplay")
+ if not os.path.exists(import_script):
+ raise Exception("script missing %s" % import_script)
+ scriptenv = os.environ.copy()
+ scriptenv["DATABASE_URL"] = toastermain.settings.getDATABASE_URL()
+
+ # run the data loading process and return the results
+ (out, err) = subprocess.Popen([import_script, abstemppath], stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=scriptenv).communicate()
+ os.remove(abstemppath)
+ return HttpResponse("%s\n%s" % (out, err), content_type="text/plain;utf8")
diff --git a/bitbake/lib/toaster/toastergui/static/css/default.css b/bitbake/lib/toaster/toastergui/static/css/default.css
index 778d8b8456..199c7531dc 100644
--- a/bitbake/lib/toaster/toastergui/static/css/default.css
+++ b/bitbake/lib/toaster/toastergui/static/css/default.css
@@ -8,10 +8,15 @@
/* Styles for the help information */
.get-help { color: #CCCCCC; }
-.get-help:hover { color: #999999; cursor: pointer; }
+.get-help:hover, .icon-plus-sign:hover { color: #999999; cursor: pointer; }
.get-help-blue { color: #3A87AD; }
.get-help-blue:hover { color: #005580; cursor: pointer; }
-.manual { margin-top: 11px; }
+.get-help-yellow { color: #C09853; }
+.get-help-yellow:hover { color: #B38942; cursor: pointer; }
+.get-help-red { color: #B94A48; font-size: 16px; padding-left: 2px; }
+.get-help-red:hover { color: #943A38; cursor: pointer; }
+.build-form .get-help { margin-left: 5px; }
+.manual { margin: 11px 15px;}
.heading-help { font-size: 14px; }
/* Styles for the external link */
@@ -44,6 +49,7 @@ dd p { line-height: 20px; }
/* Some extra space before headings when needed */
.details { margin-top: 30px; }
+.air { margin-top: 30px; }
/* Required classes for the highlight behaviour in tables */
.highlight { -webkit-animation: target-fade 10s 1; -moz-animation: target-fade 10s 1; animation: target-fade 10s 1; }
@@ -55,11 +61,12 @@ dd p { line-height: 20px; }
.tooltip { z-index: 2000 !important; }
/* Override default Twitter Boostrap styles for anchor tags inside tables */
-td a { color: #333333; }
-td a:hover { color: #000000; text-decoration: underline; }
+td a, td a > code { color: #333333; }
+td code { white-space: normal; }
+td a:hover, td a > code:hover { color: #000000; text-decoration: underline; }
/* Override default Twitter Bootstrap styles for tr.error */
-.table tbody tr.error > td { background-color: #FFFFFF; } /* override default Bootstrap behaviour */
+.table tbody tr.error > td { background-color: transparent; } /* override default Bootstrap behaviour */
.table-hover tbody tr.error:hover > td { background-color: #F5F5F5;} /* override default Bootstrap behaviour */
/* Right justify Bootstrap table columns for size fields */
@@ -96,6 +103,10 @@ th > a, th > span { font-weight: normal; }
.content-directory a:hover { color: #005580; text-decoration: underline; }
.symlink { color: #CCCCCC; }
+/* Styles for the navbar actions */
+.btn-group + .btn-group { margin-right: 10px; }
+.navbar-inner > .btn-group { margin-top: 6px; }
+
/* Other styles */
.dropdown-menu { padding: 10px; }
select { width: auto; }
@@ -103,7 +114,11 @@ select { width: auto; }
.top-air { margin-top: 40px;}
.progress { margin-bottom: 0px; }
.lead .badge { font-size: 18px; font-weight: normal; border-radius: 15px; padding: 9px; }
+.lead ol > li, .lead ul > li {
+ line-height: 35px;
+}
.well > .lead, .alert .lead { margin-bottom: 0px; }
+.well-transparent { background-color: transparent; }
.no-results { margin: 10px 0; }
.task-name { margin-left: 7px; }
.icon-hand-right {color: #CCCCCC; }
@@ -116,12 +131,29 @@ select { width: auto; }
/* make tables Chrome-happy (me, not so much) */
#otable { table-layout: fixed; word-wrap: break-word; }
+/* styles for the new build button */
+.new-build .btn-primary { padding: 4px 30px; }
+.new-build .alert { margin-top: 10px; }
+.new-build .alert p { margin-top: 10px; }
+
+/* styles for showing the project name in build mode */
+.project-name { padding-top: 0; }
+.project-name .label { font-weight: normal; margin-bottom: 5px; margin-left: -15px; padding: 5px; }
+
+/* Remove bottom margin for forms inside modal dialogs */
+#dependencies_modal_form { margin-bottom: 0px; }
+
/* Configuration styles */
.icon-trash { color: #B94A48; font-size: 16px; padding-left: 2px; }
.icon-trash:hover { color: #943A38; text-decoration: none; cursor: pointer; }
-.icon-pencil, .icon-download-alt { font-size: 16px; color: #0088CC; padding-left: 2px; }
-.icon-pencil:hover, .icon-download-alt:hover { color: #005580; text-decoration: none; cursor: pointer; }
-.configuration-list li { line-height: 35px; font-size: 21px; font-weight: 200; }
+.icon-pencil, .icon-download-alt, .icon-refresh, .icon-star-empty, .icon-star, .icon-tasks { font-size: 16px; color: #0088CC; padding-left: 2px; }
+.icon-pencil:hover, .icon-download-alt:hover, .icon-refresh:hover, .icon-star-empty:hover, .icon-star:hover, .icon-tasks:hover { color: #005580; text-decoration: none; cursor: pointer; }
+.icon-share { padding-left: 2px; }
+.alert-success .icon-refresh, .alert-success .icon-tasks { color: #468847; }
+.alert-success .icon-refresh:hover, .alert-success .icon-tasks:hover { color: #347132; }
+.alert-error .icon-refresh, .alert-error .icon-tasks { color: #b94a48; }
+.alert-error .icon-refresh:hover, .alert-error .icon-tasks:hover { color: #943A38; }
+.configuration-list li, .configuration-list label { line-height: 35px; font-size: 21px; font-weight: 200; margin-bottom: 0px;}
.configuration-list { font-size: 16px; margin-bottom: 1.5em; }
.configuration-list i { font-size: 16px; }
/*.configuration-layers { height: 135px; overflow: scroll; }*/
@@ -132,15 +164,71 @@ select { width: auto; }
.configuration-alert p { margin-bottom: 0px; }
fieldset { padding-left: 19px; }
.project-form { margin-top: 10px; }
-.add-layers .btn-block + .btn-block { margin-top: 0px; }
+.add-layers .btn-block + .btn-block, .build .btn-block + .btn-block { margin-top: 0px; }
input.huge { font-size: 17.5px; padding: 11px 19px; }
-.build-form { margin-bottom: 0px; padding-left: 20px; }
+.build-form { margin-bottom: 0px; }
+.build-form .input-append { margin-bottom: 0px; }
+.build-form .btn-large { padding: 11px 35px; }
+.build-form p { font-size:17.5px ;margin:12px 0 0 10px;}
+#layer-container form, #target-container form { margin-bottom: 0px; }
+.btn-primary .icon-question-sign, .btn-danger .icon-question-sign { color: #fff; }
+.btn-primary .icon-question-sign:hover, .btn-danger .icon-question-sign:hover { color: #999; }
a code { color: #0088CC; }
a code:hover { color: #005580; }
.localconf { font-size: 17.5px; margin-top: 40px; }
.localconf code { font-size: 17.5px; }
#add-layer-dependencies { margin-top: 5px; }
+.link-action { font-size: 17.5px; margin-top: 40px; }
+.link-action code { font-size: 17.5px; }
.artifact { width: 9em; }
.control-group { margin-bottom: 0px; }
#project-details form { margin: 0px; }
dd form { margin: 10px 0 0 0; }
+dd form { margin-bottom: 0px; }
+dl textarea { resize: vertical; }
+.navbar-fixed-top { z-index: 1; }
+.popover { z-index: 2; }
+.btn-danger .icon-trash { color: #fff; }
+.bbappends { list-style-type: none; margin-left: 0; }
+.bbappends li { line-height: 25px; }
+.configuration-list input[type="checkbox"] { margin-top:13px;margin-right:10px; }
+.alert input[type="checkbox"] { margin-top: 0px; margin-right: 3px; }
+.alert ol { padding: 10px 0px 0px 20px; }
+.alert ol > li { line-height: 35px; }
+.dl-vertical form { margin-top: 10px; }
+.scrolling { border: 1px solid #dddddd; height: 154px; overflow: auto; padding: 8px; width: 27.5%; margin-bottom: 10px; }
+.lead .help-block { font-size: 14px; line-height: 20px; font-weight: normal; }
+.button-place .btn { margin: 0 0 20px 0; }
+.tooltip-inner { max-width: 250px; }
+dd > span { line-height: 20px; }
+.new-build { padding: 20px; }
+.new-build li { line-height: 30px; }
+.new-build h6 { margin: 10px 0 0 0; color: #5a5a5a; }
+.new-build h3 { margin: 0; color: #5a5a5a; }
+.new-build form { margin: 5px 0 0; }
+.new-build .input-append { margin-bottom: 0; }
+#build-selected { margin-top: 15px; }
+
+
+.animate-repeat {
+ list-style:none;
+ box-sizing:border-box;
+}
+
+.animate-repeat.ng-move,
+.animate-repeat.ng-enter,
+.animate-repeat.ng-leave {
+ -webkit-transition:all linear 0.5s;
+ transition:all linear 0.5s;
+}
+
+.animate-repeat.ng-leave.ng-leave-active,
+.animate-repeat.ng-move,
+.animate-repeat.ng-enter {
+ opacity:0;
+}
+
+.animate-repeat.ng-leave,
+.animate-repeat.ng-enter.ng-enter-active {
+ opacity:1;
+}
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular-animate.min.js b/bitbake/lib/toaster/toastergui/static/js/angular-animate.min.js
new file mode 100644
index 0000000000..591544a51a
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular-animate.min.js
@@ -0,0 +1,28 @@
+/*
+ AngularJS v1.2.23
+ (c) 2010-2014 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(F,e,O){'use strict';e.module("ngAnimate",["ng"]).directive("ngAnimateChildren",function(){return function(G,s,g){g=g.ngAnimateChildren;e.isString(g)&&0===g.length?s.data("$$ngAnimateChildren",!0):G.$watch(g,function(e){s.data("$$ngAnimateChildren",!!e)})}}).factory("$$animateReflow",["$$rAF","$document",function(e,s){return function(g){return e(function(){g()})}}]).config(["$provide","$animateProvider",function(G,s){function g(e){for(var g=0;g<e.length;g++){var l=e[g];if(l.nodeType==aa)return l}}
+function B(l){return e.element(g(l))}var m=e.noop,u=e.forEach,P=s.$$selectors,aa=1,l="$$ngAnimateState",V="$$ngAnimateChildren",J="ng-animate",n={running:!0};G.decorator("$animate",["$delegate","$injector","$sniffer","$rootElement","$$asyncCallback","$rootScope","$document",function(z,F,$,R,E,H,O){function K(a){var b=a.data(l)||{};b.running=!0;a.data(l,b)}function L(a){if(a){var b=[],c={};a=a.substr(1).split(".");($.transitions||$.animations)&&b.push(F.get(P[""]));for(var d=0;d<a.length;d++){var f=
+a[d],e=P[f];e&&!c[f]&&(b.push(F.get(e)),c[f]=!0)}return b}}function G(a,b,c){function d(a,b){var c=a[b],d=a["before"+b.charAt(0).toUpperCase()+b.substr(1)];if(c||d)return"leave"==b&&(d=c,c=null),n.push({event:b,fn:c}),h.push({event:b,fn:d}),!0}function f(b,d,e){var f=[];u(b,function(a){a.fn&&f.push(a)});var g=0;u(f,function(b,l){var C=function(){a:{if(d){(d[l]||m)();if(++g<f.length)break a;d=null}e()}};switch(b.event){case "setClass":d.push(b.fn(a,A,k,C));break;case "addClass":d.push(b.fn(a,A||c,
+C));break;case "removeClass":d.push(b.fn(a,k||c,C));break;default:d.push(b.fn(a,C))}});d&&0===d.length&&e()}var g=a[0];if(g){var l="setClass"==b,p=l||"addClass"==b||"removeClass"==b,A,k;e.isArray(c)&&(A=c[0],k=c[1],c=A+" "+k);var x=a.attr("class")+" "+c;if(M(x)){var t=m,w=[],h=[],q=m,y=[],n=[],x=(" "+x).replace(/\s+/g,".");u(L(x),function(a){!d(a,b)&&l&&(d(a,"addClass"),d(a,"removeClass"))});return{node:g,event:b,className:c,isClassBased:p,isSetClassOperation:l,before:function(a){t=a;f(h,w,function(){t=
+m;a()})},after:function(a){q=a;f(n,y,function(){q=m;a()})},cancel:function(){w&&(u(w,function(a){(a||m)(!0)}),t(!0));y&&(u(y,function(a){(a||m)(!0)}),q(!0))}}}}}function r(a,b,c,d,f,g,n){function p(d){var e="$animate:"+d;q&&(q[e]&&0<q[e].length)&&E(function(){c.triggerHandler(e,{event:a,className:b})})}function A(){p("before")}function m(){p("after")}function x(){p("close");n&&E(function(){n()})}function t(){t.hasBeenRun||(t.hasBeenRun=!0,g())}function w(){if(!w.hasBeenRun){w.hasBeenRun=!0;var d=
+c.data(l);d&&(h&&h.isClassBased?k(c,b):(E(function(){var d=c.data(l)||{};r==d.index&&k(c,b,a)}),c.data(l,d)));x()}}var h=G(c,a,b);if(h){b=h.className;var q=e.element._data(h.node),q=q&&q.events;d||(d=f?f.parent():c.parent());var y=c.data(l)||{};f=y.active||{};var z=y.totalActive||0,C=y.last,D;h.isClassBased&&(D=y.running||y.disabled||C&&!C.isClassBased);if(D||N(c,d))t(),A(),m(),w();else{d=!1;if(0<z){D=[];if(h.isClassBased)"setClass"==C.event?(D.push(C),k(c,b)):f[b]&&(v=f[b],v.event==a?d=!0:(D.push(v),
+k(c,b)));else if("leave"==a&&f["ng-leave"])d=!0;else{for(var v in f)D.push(f[v]),k(c,v);f={};z=0}0<D.length&&u(D,function(a){a.cancel()})}!h.isClassBased||(h.isSetClassOperation||d)||(d="addClass"==a==c.hasClass(b));if(d)t(),A(),m(),x();else{if("leave"==a)c.one("$destroy",function(a){a=e.element(this);var b=a.data(l);b&&(b=b.active["ng-leave"])&&(b.cancel(),k(a,"ng-leave"))});c.addClass(J);var r=Y++;z++;f[b]=h;c.data(l,{last:h,active:f,index:r,totalActive:z});A();h.before(function(d){var e=c.data(l);
+d=d||!e||!e.active[b]||h.isClassBased&&e.active[b].event!=a;t();!0===d?w():(m(),h.after(w))})}}}else t(),A(),m(),w()}function T(a){if(a=g(a))a=e.isFunction(a.getElementsByClassName)?a.getElementsByClassName(J):a.querySelectorAll("."+J),u(a,function(a){a=e.element(a);(a=a.data(l))&&a.active&&u(a.active,function(a){a.cancel()})})}function k(a,b){if(g(a)==g(R))n.disabled||(n.running=!1,n.structural=!1);else if(b){var c=a.data(l)||{},d=!0===b;!d&&(c.active&&c.active[b])&&(c.totalActive--,delete c.active[b]);
+if(d||!c.totalActive)a.removeClass(J),a.removeData(l)}}function N(a,b){if(n.disabled)return!0;if(g(a)==g(R))return n.running;var c,d,f;do{if(0===b.length)break;var m=g(b)==g(R),k=m?n:b.data(l)||{};if(k.disabled)return!0;m&&(f=!0);!1!==c&&(m=b.data(V),e.isDefined(m)&&(c=m));d=d||k.running||k.last&&!k.last.isClassBased}while(b=b.parent());return!f||!c&&d}var Y=0;R.data(l,n);H.$$postDigest(function(){H.$$postDigest(function(){n.running=!1})});var Q=s.classNameFilter(),M=Q?function(a){return Q.test(a)}:
+function(){return!0};return{enter:function(a,b,c,d){a=e.element(a);b=b&&e.element(b);c=c&&e.element(c);K(a);z.enter(a,b,c);H.$$postDigest(function(){a=B(a);r("enter","ng-enter",a,b,c,m,d)})},leave:function(a,b){a=e.element(a);T(a);K(a);H.$$postDigest(function(){r("leave","ng-leave",B(a),null,null,function(){z.leave(a)},b)})},move:function(a,b,c,d){a=e.element(a);b=b&&e.element(b);c=c&&e.element(c);T(a);K(a);z.move(a,b,c);H.$$postDigest(function(){a=B(a);r("move","ng-move",a,b,c,m,d)})},addClass:function(a,
+b,c){a=e.element(a);a=B(a);r("addClass",b,a,null,null,function(){z.addClass(a,b)},c)},removeClass:function(a,b,c){a=e.element(a);a=B(a);r("removeClass",b,a,null,null,function(){z.removeClass(a,b)},c)},setClass:function(a,b,c,d){a=e.element(a);a=B(a);r("setClass",[b,c],a,null,null,function(){z.setClass(a,b,c)},d)},enabled:function(a,b){switch(arguments.length){case 2:if(a)k(b);else{var c=b.data(l)||{};c.disabled=!0;b.data(l,c)}break;case 1:n.disabled=!a;break;default:a=!n.disabled}return!!a}}}]);s.register("",
+["$window","$sniffer","$timeout","$$animateReflow",function(l,n,s,B){function E(a,U){S&&S();W.push(U);S=B(function(){u(W,function(a){a()});W=[];S=null;v={}})}function H(a,U){var b=g(a);a=e.element(b);Z.push(a);b=Date.now()+U;b<=da||(s.cancel(ca),da=b,ca=s(function(){G(Z);Z=[]},U,!1))}function G(a){u(a,function(a){(a=a.data(q))&&(a.closeAnimationFn||m)()})}function K(a,b){var c=b?v[b]:null;if(!c){var d=0,e=0,f=0,g=0,m,k,h,q;u(a,function(a){if(a.nodeType==aa){a=l.getComputedStyle(a)||{};h=a[I+P];d=
+Math.max(L(h),d);q=a[I+x];m=a[I+t];e=Math.max(L(m),e);k=a[p+t];g=Math.max(L(k),g);var b=L(a[p+P]);0<b&&(b*=parseInt(a[p+w],10)||1);f=Math.max(b,f)}});c={total:0,transitionPropertyStyle:q,transitionDurationStyle:h,transitionDelayStyle:m,transitionDelay:e,transitionDuration:d,animationDelayStyle:k,animationDelay:g,animationDuration:f};b&&(v[b]=c)}return c}function L(a){var b=0;a=e.isString(a)?a.split(/\s*,\s*/):[];u(a,function(a){b=Math.max(parseFloat(a)||0,b)});return b}function J(a){var b=a.parent(),
+c=b.data(h);c||(b.data(h,++ba),c=ba);return c+"-"+g(a).getAttribute("class")}function r(a,b,c,d){var e=J(b),f=e+" "+c,l=v[f]?++v[f].total:0,k={};if(0<l){var h=c+"-stagger",k=e+" "+h;(e=!v[k])&&b.addClass(h);k=K(b,k);e&&b.removeClass(h)}d=d||function(a){return a()};b.addClass(c);var h=b.data(q)||{},n=d(function(){return K(b,f)});d=n.transitionDuration;e=n.animationDuration;if(0===d&&0===e)return b.removeClass(c),!1;b.data(q,{running:h.running||0,itemIndex:l,stagger:k,timings:n,closeAnimationFn:m});
+a=0<h.running||"setClass"==a;0<d&&T(b,c,a);0<e&&(0<k.animationDelay&&0===k.animationDuration)&&(g(b).style[p]="none 0s");return!0}function T(a,b,c){"ng-enter"!=b&&("ng-move"!=b&&"ng-leave"!=b)&&c?a.addClass(y):g(a).style[I+x]="none"}function k(a,b){var c=I+x,d=g(a);d.style[c]&&0<d.style[c].length&&(d.style[c]="");a.removeClass(y)}function N(a){var b=p;a=g(a);a.style[b]&&0<a.style[b].length&&(a.style[b]="")}function Y(a,b,d,e){function k(a){b.off(x,l);b.removeClass(m);c(b,d);a=g(b);for(var e in s)a.style.removeProperty(s[e])}
+function l(a){a.stopPropagation();var b=a.originalEvent||a;a=b.$manualTimeStamp||b.timeStamp||Date.now();b=parseFloat(b.elapsedTime.toFixed(V));Math.max(a-z,0)>=y&&b>=v&&e()}var h=g(b);a=b.data(q);if(-1!=h.getAttribute("class").indexOf(d)&&a){var m="";u(d.split(" "),function(a,b){m+=(0<b?" ":"")+a+"-active"});var n=a.stagger,p=a.timings,t=a.itemIndex,v=Math.max(p.transitionDuration,p.animationDuration),w=Math.max(p.transitionDelay,p.animationDelay),y=w*D,z=Date.now(),x=A+" "+X,r="",s=[];if(0<p.transitionDuration){var B=
+p.transitionPropertyStyle;-1==B.indexOf("all")&&(r+=f+"transition-property: "+B+";",r+=f+"transition-duration: "+p.transitionDurationStyle+";",s.push(f+"transition-property"),s.push(f+"transition-duration"))}0<t&&(0<n.transitionDelay&&0===n.transitionDuration&&(r+=f+"transition-delay: "+Q(p.transitionDelayStyle,n.transitionDelay,t)+"; ",s.push(f+"transition-delay")),0<n.animationDelay&&0===n.animationDuration&&(r+=f+"animation-delay: "+Q(p.animationDelayStyle,n.animationDelay,t)+"; ",s.push(f+"animation-delay")));
+0<s.length&&(p=h.getAttribute("style")||"",h.setAttribute("style",p+"; "+r));b.on(x,l);b.addClass(m);a.closeAnimationFn=function(){k();e()};h=(t*(Math.max(n.animationDelay,n.transitionDelay)||0)+(w+v)*C)*D;a.running++;H(b,h);return k}e()}function Q(a,b,c){var d="";u(a.split(","),function(a,e){d+=(0<e?",":"")+(c*b+parseInt(a,10))+"s"});return d}function M(a,b,d,e){if(r(a,b,d,e))return function(a){a&&c(b,d)}}function a(a,b,d,e){if(b.data(q))return Y(a,b,d,e);c(b,d);e()}function b(b,c,d,e){var f=M(b,
+c,d);if(f){var g=f;E(c,function(){k(c,d);N(c);g=a(b,c,d,e)});return function(a){(g||m)(a)}}e()}function c(a,b){a.removeClass(b);var c=a.data(q);c&&(c.running&&c.running--,c.running&&0!==c.running||a.removeData(q))}function d(a,b){var c="";a=e.isArray(a)?a:a.split(/\s+/);u(a,function(a,d){a&&0<a.length&&(c+=(0<d?" ":"")+a+b)});return c}var f="",I,X,p,A;F.ontransitionend===O&&F.onwebkittransitionend!==O?(f="-webkit-",I="WebkitTransition",X="webkitTransitionEnd transitionend"):(I="transition",X="transitionend");
+F.onanimationend===O&&F.onwebkitanimationend!==O?(f="-webkit-",p="WebkitAnimation",A="webkitAnimationEnd animationend"):(p="animation",A="animationend");var P="Duration",x="Property",t="Delay",w="IterationCount",h="$$ngAnimateKey",q="$$ngAnimateCSS3Data",y="ng-animate-block-transitions",V=3,C=1.5,D=1E3,v={},ba=0,W=[],S,ca=null,da=0,Z=[];return{enter:function(a,c){return b("enter",a,"ng-enter",c)},leave:function(a,c){return b("leave",a,"ng-leave",c)},move:function(a,c){return b("move",a,"ng-move",
+c)},beforeSetClass:function(a,b,c,e){var f=d(c,"-remove")+" "+d(b,"-add"),g=M("setClass",a,f,function(d){var e=a.attr("class");a.removeClass(c);a.addClass(b);d=d();a.attr("class",e);return d});if(g)return E(a,function(){k(a,f);N(a);e()}),g;e()},beforeAddClass:function(a,b,c){var e=M("addClass",a,d(b,"-add"),function(c){a.addClass(b);c=c();a.removeClass(b);return c});if(e)return E(a,function(){k(a,b);N(a);c()}),e;c()},setClass:function(b,c,e,f){e=d(e,"-remove");c=d(c,"-add");return a("setClass",b,
+e+" "+c,f)},addClass:function(b,c,e){return a("addClass",b,d(c,"-add"),e)},beforeRemoveClass:function(a,b,c){var e=M("removeClass",a,d(b,"-remove"),function(c){var d=a.attr("class");a.removeClass(b);c=c();a.attr("class",d);return c});if(e)return E(a,function(){k(a,b);N(a);c()}),e;c()},removeClass:function(b,c,e){return a("removeClass",b,d(c,"-remove"),e)}}}])}])})(window,window.angular);
+//# sourceMappingURL=angular-animate.min.js.map
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular-cookies.min.js b/bitbake/lib/toaster/toastergui/static/js/angular-cookies.min.js
new file mode 100644
index 0000000000..24a1d50016
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular-cookies.min.js
@@ -0,0 +1,8 @@
+/*
+ AngularJS v1.2.23
+ (c) 2010-2014 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(p,f,n){'use strict';f.module("ngCookies",["ng"]).factory("$cookies",["$rootScope","$browser",function(e,b){var c={},g={},h,k=!1,l=f.copy,m=f.isUndefined;b.addPollFn(function(){var a=b.cookies();h!=a&&(h=a,l(a,g),l(a,c),k&&e.$apply())})();k=!0;e.$watch(function(){var a,d,e;for(a in g)m(c[a])&&b.cookies(a,n);for(a in c)d=c[a],f.isString(d)||(d=""+d,c[a]=d),d!==g[a]&&(b.cookies(a,d),e=!0);if(e)for(a in d=b.cookies(),c)c[a]!==d[a]&&(m(d[a])?delete c[a]:c[a]=d[a])});return c}]).factory("$cookieStore",
+["$cookies",function(e){return{get:function(b){return(b=e[b])?f.fromJson(b):b},put:function(b,c){e[b]=f.toJson(c)},remove:function(b){delete e[b]}}}])})(window,window.angular);
+//# sourceMappingURL=angular-cookies.min.js.map
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular-route.min.js b/bitbake/lib/toaster/toastergui/static/js/angular-route.min.js
new file mode 100644
index 0000000000..ef8d614e18
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular-route.min.js
@@ -0,0 +1,14 @@
+/*
+ AngularJS v1.2.23
+ (c) 2010-2014 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(n,e,A){'use strict';function x(s,g,h){return{restrict:"ECA",terminal:!0,priority:400,transclude:"element",link:function(a,c,b,f,w){function y(){p&&(p.remove(),p=null);k&&(k.$destroy(),k=null);l&&(h.leave(l,function(){p=null}),p=l,l=null)}function v(){var b=s.current&&s.current.locals;if(e.isDefined(b&&b.$template)){var b=a.$new(),d=s.current;l=w(b,function(d){h.enter(d,null,l||c,function(){!e.isDefined(t)||t&&!a.$eval(t)||g()});y()});k=d.scope=b;k.$emit("$viewContentLoaded");k.$eval(u)}else y()}
+var k,l,p,t=b.autoscroll,u=b.onload||"";a.$on("$routeChangeSuccess",v);v()}}}function z(e,g,h){return{restrict:"ECA",priority:-400,link:function(a,c){var b=h.current,f=b.locals;c.html(f.$template);var w=e(c.contents());b.controller&&(f.$scope=a,f=g(b.controller,f),b.controllerAs&&(a[b.controllerAs]=f),c.data("$ngControllerController",f),c.children().data("$ngControllerController",f));w(a)}}}n=e.module("ngRoute",["ng"]).provider("$route",function(){function s(a,c){return e.extend(new (e.extend(function(){},
+{prototype:a})),c)}function g(a,e){var b=e.caseInsensitiveMatch,f={originalPath:a,regexp:a},h=f.keys=[];a=a.replace(/([().])/g,"\\$1").replace(/(\/)?:(\w+)([\?\*])?/g,function(a,e,b,c){a="?"===c?c:null;c="*"===c?c:null;h.push({name:b,optional:!!a});e=e||"";return""+(a?"":e)+"(?:"+(a?e:"")+(c&&"(.+?)"||"([^/]+)")+(a||"")+")"+(a||"")}).replace(/([\/$\*])/g,"\\$1");f.regexp=RegExp("^"+a+"$",b?"i":"");return f}var h={};this.when=function(a,c){h[a]=e.extend({reloadOnSearch:!0},c,a&&g(a,c));if(a){var b=
+"/"==a[a.length-1]?a.substr(0,a.length-1):a+"/";h[b]=e.extend({redirectTo:a},g(b,c))}return this};this.otherwise=function(a){this.when(null,a);return this};this.$get=["$rootScope","$location","$routeParams","$q","$injector","$http","$templateCache","$sce",function(a,c,b,f,g,n,v,k){function l(){var d=p(),m=r.current;if(d&&m&&d.$$route===m.$$route&&e.equals(d.pathParams,m.pathParams)&&!d.reloadOnSearch&&!u)m.params=d.params,e.copy(m.params,b),a.$broadcast("$routeUpdate",m);else if(d||m)u=!1,a.$broadcast("$routeChangeStart",
+d,m),(r.current=d)&&d.redirectTo&&(e.isString(d.redirectTo)?c.path(t(d.redirectTo,d.params)).search(d.params).replace():c.url(d.redirectTo(d.pathParams,c.path(),c.search())).replace()),f.when(d).then(function(){if(d){var a=e.extend({},d.resolve),c,b;e.forEach(a,function(d,c){a[c]=e.isString(d)?g.get(d):g.invoke(d)});e.isDefined(c=d.template)?e.isFunction(c)&&(c=c(d.params)):e.isDefined(b=d.templateUrl)&&(e.isFunction(b)&&(b=b(d.params)),b=k.getTrustedResourceUrl(b),e.isDefined(b)&&(d.loadedTemplateUrl=
+b,c=n.get(b,{cache:v}).then(function(a){return a.data})));e.isDefined(c)&&(a.$template=c);return f.all(a)}}).then(function(c){d==r.current&&(d&&(d.locals=c,e.copy(d.params,b)),a.$broadcast("$routeChangeSuccess",d,m))},function(c){d==r.current&&a.$broadcast("$routeChangeError",d,m,c)})}function p(){var a,b;e.forEach(h,function(f,h){var q;if(q=!b){var g=c.path();q=f.keys;var l={};if(f.regexp)if(g=f.regexp.exec(g)){for(var k=1,p=g.length;k<p;++k){var n=q[k-1],r=g[k];n&&r&&(l[n.name]=r)}q=l}else q=null;
+else q=null;q=a=q}q&&(b=s(f,{params:e.extend({},c.search(),a),pathParams:a}),b.$$route=f)});return b||h[null]&&s(h[null],{params:{},pathParams:{}})}function t(a,c){var b=[];e.forEach((a||"").split(":"),function(a,d){if(0===d)b.push(a);else{var e=a.match(/(\w+)(.*)/),f=e[1];b.push(c[f]);b.push(e[2]||"");delete c[f]}});return b.join("")}var u=!1,r={routes:h,reload:function(){u=!0;a.$evalAsync(l)}};a.$on("$locationChangeSuccess",l);return r}]});n.provider("$routeParams",function(){this.$get=function(){return{}}});
+n.directive("ngView",x);n.directive("ngView",z);x.$inject=["$route","$anchorScroll","$animate"];z.$inject=["$compile","$controller","$route"]})(window,window.angular);
+//# sourceMappingURL=angular-route.min.js.map
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular-sanitize.min.js b/bitbake/lib/toaster/toastergui/static/js/angular-sanitize.min.js
new file mode 100644
index 0000000000..e10308b6ff
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular-sanitize.min.js
@@ -0,0 +1,15 @@
+/*
+ AngularJS v1.2.23
+ (c) 2010-2014 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(q,g,r){'use strict';function F(a){var d=[];t(d,g.noop).chars(a);return d.join("")}function m(a){var d={};a=a.split(",");var c;for(c=0;c<a.length;c++)d[a[c]]=!0;return d}function G(a,d){function c(a,b,c,h){b=g.lowercase(b);if(u[b])for(;f.last()&&v[f.last()];)e("",f.last());w[b]&&f.last()==b&&e("",b);(h=x[b]||!!h)||f.push(b);var n={};c.replace(H,function(a,b,d,c,e){n[b]=s(d||c||e||"")});d.start&&d.start(b,n,h)}function e(a,b){var c=0,e;if(b=g.lowercase(b))for(c=f.length-1;0<=c&&f[c]!=b;c--);
+if(0<=c){for(e=f.length-1;e>=c;e--)d.end&&d.end(f[e]);f.length=c}}"string"!==typeof a&&(a=null===a||"undefined"===typeof a?"":""+a);var b,l,f=[],n=a,h;for(f.last=function(){return f[f.length-1]};a;){h="";l=!0;if(f.last()&&y[f.last()])a=a.replace(RegExp("(.*)<\\s*\\/\\s*"+f.last()+"[^>]*>","i"),function(a,b){b=b.replace(I,"$1").replace(J,"$1");d.chars&&d.chars(s(b));return""}),e("",f.last());else{if(0===a.indexOf("\x3c!--"))b=a.indexOf("--",4),0<=b&&a.lastIndexOf("--\x3e",b)===b&&(d.comment&&d.comment(a.substring(4,
+b)),a=a.substring(b+3),l=!1);else if(z.test(a)){if(b=a.match(z))a=a.replace(b[0],""),l=!1}else if(K.test(a)){if(b=a.match(A))a=a.substring(b[0].length),b[0].replace(A,e),l=!1}else L.test(a)&&((b=a.match(B))?(b[4]&&(a=a.substring(b[0].length),b[0].replace(B,c)),l=!1):(h+="<",a=a.substring(1)));l&&(b=a.indexOf("<"),h+=0>b?a:a.substring(0,b),a=0>b?"":a.substring(b),d.chars&&d.chars(s(h)))}if(a==n)throw M("badparse",a);n=a}e()}function s(a){if(!a)return"";var d=N.exec(a);a=d[1];var c=d[3];if(d=d[2])p.innerHTML=
+d.replace(/</g,"&lt;"),d="textContent"in p?p.textContent:p.innerText;return a+d+c}function C(a){return a.replace(/&/g,"&amp;").replace(O,function(a){var c=a.charCodeAt(0);a=a.charCodeAt(1);return"&#"+(1024*(c-55296)+(a-56320)+65536)+";"}).replace(P,function(a){return"&#"+a.charCodeAt(0)+";"}).replace(/</g,"&lt;").replace(/>/g,"&gt;")}function t(a,d){var c=!1,e=g.bind(a,a.push);return{start:function(a,l,f){a=g.lowercase(a);!c&&y[a]&&(c=a);c||!0!==D[a]||(e("<"),e(a),g.forEach(l,function(c,f){var k=
+g.lowercase(f),l="img"===a&&"src"===k||"background"===k;!0!==Q[k]||!0===E[k]&&!d(c,l)||(e(" "),e(f),e('="'),e(C(c)),e('"'))}),e(f?"/>":">"))},end:function(a){a=g.lowercase(a);c||!0!==D[a]||(e("</"),e(a),e(">"));a==c&&(c=!1)},chars:function(a){c||e(C(a))}}}var M=g.$$minErr("$sanitize"),B=/^<((?:[a-zA-Z])[\w:-]*)((?:\s+[\w:-]+(?:\s*=\s*(?:(?:"[^"]*")|(?:'[^']*')|[^>\s]+))?)*)\s*(\/?)\s*(>?)/,A=/^<\/\s*([\w:-]+)[^>]*>/,H=/([\w:-]+)(?:\s*=\s*(?:(?:"((?:[^"])*)")|(?:'((?:[^'])*)')|([^>\s]+)))?/g,L=/^</,
+K=/^<\//,I=/\x3c!--(.*?)--\x3e/g,z=/<!DOCTYPE([^>]*?)>/i,J=/<!\[CDATA\[(.*?)]]\x3e/g,O=/[\uD800-\uDBFF][\uDC00-\uDFFF]/g,P=/([^\#-~| |!])/g,x=m("area,br,col,hr,img,wbr");q=m("colgroup,dd,dt,li,p,tbody,td,tfoot,th,thead,tr");r=m("rp,rt");var w=g.extend({},r,q),u=g.extend({},q,m("address,article,aside,blockquote,caption,center,del,dir,div,dl,figure,figcaption,footer,h1,h2,h3,h4,h5,h6,header,hgroup,hr,ins,map,menu,nav,ol,pre,script,section,table,ul")),v=g.extend({},r,m("a,abbr,acronym,b,bdi,bdo,big,br,cite,code,del,dfn,em,font,i,img,ins,kbd,label,map,mark,q,ruby,rp,rt,s,samp,small,span,strike,strong,sub,sup,time,tt,u,var")),
+y=m("script,style"),D=g.extend({},x,u,v,w),E=m("background,cite,href,longdesc,src,usemap"),Q=g.extend({},E,m("abbr,align,alt,axis,bgcolor,border,cellpadding,cellspacing,class,clear,color,cols,colspan,compact,coords,dir,face,headers,height,hreflang,hspace,ismap,lang,language,nohref,nowrap,rel,rev,rows,rowspan,rules,scope,scrolling,shape,size,span,start,summary,target,title,type,valign,value,vspace,width")),p=document.createElement("pre"),N=/^(\s*)([\s\S]*?)(\s*)$/;g.module("ngSanitize",[]).provider("$sanitize",
+function(){this.$get=["$$sanitizeUri",function(a){return function(d){var c=[];G(d,t(c,function(c,b){return!/^unsafe/.test(a(c,b))}));return c.join("")}}]});g.module("ngSanitize").filter("linky",["$sanitize",function(a){var d=/((ftp|https?):\/\/|(mailto:)?[A-Za-z0-9._%+-]+@)\S*[^\s.;,(){}<>"]/,c=/^mailto:/;return function(e,b){function l(a){a&&k.push(F(a))}function f(a,c){k.push("<a ");g.isDefined(b)&&(k.push('target="'),k.push(b),k.push('" '));k.push('href="');k.push(a);k.push('">');l(c);k.push("</a>")}
+if(!e)return e;for(var n,h=e,k=[],m,p;n=h.match(d);)m=n[0],n[2]==n[3]&&(m="mailto:"+m),p=n.index,l(h.substr(0,p)),f(m,n[0].replace(c,"")),h=h.substring(p+n[0].length);l(h);return a(k.join(""))}}])})(window,window.angular);
+//# sourceMappingURL=angular-sanitize.min.js.map
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular.min.js b/bitbake/lib/toaster/toastergui/static/js/angular.min.js
new file mode 100644
index 0000000000..22af422a19
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular.min.js
@@ -0,0 +1,215 @@
+/*
+ AngularJS v1.2.23
+ (c) 2010-2014 Google, Inc. http://angularjs.org
+ License: MIT
+*/
+(function(Q,X,t){'use strict';function x(b){return function(){var a=arguments[0],c,a="["+(b?b+":":"")+a+"] http://errors.angularjs.org/1.2.23/"+(b?b+"/":"")+a;for(c=1;c<arguments.length;c++)a=a+(1==c?"?":"&")+"p"+(c-1)+"="+encodeURIComponent("function"==typeof arguments[c]?arguments[c].toString().replace(/ \{[\s\S]*$/,""):"undefined"==typeof arguments[c]?"undefined":"string"!=typeof arguments[c]?JSON.stringify(arguments[c]):arguments[c]);return Error(a)}}function fb(b){if(null==b||Fa(b))return!1;
+var a=b.length;return 1===b.nodeType&&a?!0:z(b)||H(b)||0===a||"number"===typeof a&&0<a&&a-1 in b}function r(b,a,c){var d;if(b)if(P(b))for(d in b)"prototype"==d||("length"==d||"name"==d||b.hasOwnProperty&&!b.hasOwnProperty(d))||a.call(c,b[d],d);else if(H(b)||fb(b))for(d=0;d<b.length;d++)a.call(c,b[d],d);else if(b.forEach&&b.forEach!==r)b.forEach(a,c);else for(d in b)b.hasOwnProperty(d)&&a.call(c,b[d],d);return b}function Zb(b){var a=[],c;for(c in b)b.hasOwnProperty(c)&&a.push(c);return a.sort()}function Tc(b,
+a,c){for(var d=Zb(b),e=0;e<d.length;e++)a.call(c,b[d[e]],d[e]);return d}function $b(b){return function(a,c){b(c,a)}}function gb(){for(var b=la.length,a;b;){b--;a=la[b].charCodeAt(0);if(57==a)return la[b]="A",la.join("");if(90==a)la[b]="0";else return la[b]=String.fromCharCode(a+1),la.join("")}la.unshift("0");return la.join("")}function ac(b,a){a?b.$$hashKey=a:delete b.$$hashKey}function B(b){var a=b.$$hashKey;r(arguments,function(a){a!==b&&r(a,function(a,c){b[c]=a})});ac(b,a);return b}function Z(b){return parseInt(b,
+10)}function bc(b,a){return B(new (B(function(){},{prototype:b})),a)}function y(){}function Ga(b){return b}function $(b){return function(){return b}}function D(b){return"undefined"===typeof b}function A(b){return"undefined"!==typeof b}function T(b){return null!=b&&"object"===typeof b}function z(b){return"string"===typeof b}function Ab(b){return"number"===typeof b}function sa(b){return"[object Date]"===ya.call(b)}function P(b){return"function"===typeof b}function hb(b){return"[object RegExp]"===ya.call(b)}
+function Fa(b){return b&&b.document&&b.location&&b.alert&&b.setInterval}function Uc(b){return!(!b||!(b.nodeName||b.prop&&b.attr&&b.find))}function Vc(b,a,c){var d=[];r(b,function(b,f,g){d.push(a.call(c,b,f,g))});return d}function Qa(b,a){if(b.indexOf)return b.indexOf(a);for(var c=0;c<b.length;c++)if(a===b[c])return c;return-1}function Ra(b,a){var c=Qa(b,a);0<=c&&b.splice(c,1);return a}function Ha(b,a,c,d){if(Fa(b)||b&&b.$evalAsync&&b.$watch)throw Sa("cpws");if(a){if(b===a)throw Sa("cpi");c=c||[];
+d=d||[];if(T(b)){var e=Qa(c,b);if(-1!==e)return d[e];c.push(b);d.push(a)}if(H(b))for(var f=a.length=0;f<b.length;f++)e=Ha(b[f],null,c,d),T(b[f])&&(c.push(b[f]),d.push(e)),a.push(e);else{var g=a.$$hashKey;H(a)?a.length=0:r(a,function(b,c){delete a[c]});for(f in b)e=Ha(b[f],null,c,d),T(b[f])&&(c.push(b[f]),d.push(e)),a[f]=e;ac(a,g)}}else if(a=b)H(b)?a=Ha(b,[],c,d):sa(b)?a=new Date(b.getTime()):hb(b)?(a=RegExp(b.source,b.toString().match(/[^\/]*$/)[0]),a.lastIndex=b.lastIndex):T(b)&&(a=Ha(b,{},c,d));
+return a}function ga(b,a){if(H(b)){a=a||[];for(var c=0;c<b.length;c++)a[c]=b[c]}else if(T(b))for(c in a=a||{},b)!ib.call(b,c)||"$"===c.charAt(0)&&"$"===c.charAt(1)||(a[c]=b[c]);return a||b}function za(b,a){if(b===a)return!0;if(null===b||null===a)return!1;if(b!==b&&a!==a)return!0;var c=typeof b,d;if(c==typeof a&&"object"==c)if(H(b)){if(!H(a))return!1;if((c=b.length)==a.length){for(d=0;d<c;d++)if(!za(b[d],a[d]))return!1;return!0}}else{if(sa(b))return sa(a)?isNaN(b.getTime())&&isNaN(a.getTime())||b.getTime()===
+a.getTime():!1;if(hb(b)&&hb(a))return b.toString()==a.toString();if(b&&b.$evalAsync&&b.$watch||a&&a.$evalAsync&&a.$watch||Fa(b)||Fa(a)||H(a))return!1;c={};for(d in b)if("$"!==d.charAt(0)&&!P(b[d])){if(!za(b[d],a[d]))return!1;c[d]=!0}for(d in a)if(!c.hasOwnProperty(d)&&"$"!==d.charAt(0)&&a[d]!==t&&!P(a[d]))return!1;return!0}return!1}function Bb(b,a){var c=2<arguments.length?Aa.call(arguments,2):[];return!P(a)||a instanceof RegExp?a:c.length?function(){return arguments.length?a.apply(b,c.concat(Aa.call(arguments,
+0))):a.apply(b,c)}:function(){return arguments.length?a.apply(b,arguments):a.call(b)}}function Wc(b,a){var c=a;"string"===typeof b&&"$"===b.charAt(0)?c=t:Fa(a)?c="$WINDOW":a&&X===a?c="$DOCUMENT":a&&(a.$evalAsync&&a.$watch)&&(c="$SCOPE");return c}function ta(b,a){return"undefined"===typeof b?t:JSON.stringify(b,Wc,a?" ":null)}function cc(b){return z(b)?JSON.parse(b):b}function Ta(b){"function"===typeof b?b=!0:b&&0!==b.length?(b=N(""+b),b=!("f"==b||"0"==b||"false"==b||"no"==b||"n"==b||"[]"==b)):b=!1;
+return b}function ha(b){b=u(b).clone();try{b.empty()}catch(a){}var c=u("<div>").append(b).html();try{return 3===b[0].nodeType?N(c):c.match(/^(<[^>]+>)/)[1].replace(/^<([\w\-]+)/,function(a,b){return"<"+N(b)})}catch(d){return N(c)}}function dc(b){try{return decodeURIComponent(b)}catch(a){}}function ec(b){var a={},c,d;r((b||"").split("&"),function(b){b&&(c=b.replace(/\+/g,"%20").split("="),d=dc(c[0]),A(d)&&(b=A(c[1])?dc(c[1]):!0,ib.call(a,d)?H(a[d])?a[d].push(b):a[d]=[a[d],b]:a[d]=b))});return a}function Cb(b){var a=
+[];r(b,function(b,d){H(b)?r(b,function(b){a.push(Ba(d,!0)+(!0===b?"":"="+Ba(b,!0)))}):a.push(Ba(d,!0)+(!0===b?"":"="+Ba(b,!0)))});return a.length?a.join("&"):""}function jb(b){return Ba(b,!0).replace(/%26/gi,"&").replace(/%3D/gi,"=").replace(/%2B/gi,"+")}function Ba(b,a){return encodeURIComponent(b).replace(/%40/gi,"@").replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%20/g,a?"%20":"+")}function Xc(b,a){function c(a){a&&d.push(a)}var d=[b],e,f,g=["ng:app","ng-app","x-ng-app",
+"data-ng-app"],k=/\sng[:\-]app(:\s*([\w\d_]+);?)?\s/;r(g,function(a){g[a]=!0;c(X.getElementById(a));a=a.replace(":","\\:");b.querySelectorAll&&(r(b.querySelectorAll("."+a),c),r(b.querySelectorAll("."+a+"\\:"),c),r(b.querySelectorAll("["+a+"]"),c))});r(d,function(a){if(!e){var b=k.exec(" "+a.className+" ");b?(e=a,f=(b[2]||"").replace(/\s+/g,",")):r(a.attributes,function(b){!e&&g[b.name]&&(e=a,f=b.value)})}});e&&a(e,f?[f]:[])}function fc(b,a){var c=function(){b=u(b);if(b.injector()){var c=b[0]===X?
+"document":ha(b);throw Sa("btstrpd",c.replace(/</,"&lt;").replace(/>/,"&gt;"));}a=a||[];a.unshift(["$provide",function(a){a.value("$rootElement",b)}]);a.unshift("ng");c=gc(a);c.invoke(["$rootScope","$rootElement","$compile","$injector","$animate",function(a,b,c,d,e){a.$apply(function(){b.data("$injector",d);c(b)(a)})}]);return c},d=/^NG_DEFER_BOOTSTRAP!/;if(Q&&!d.test(Q.name))return c();Q.name=Q.name.replace(d,"");Ua.resumeBootstrap=function(b){r(b,function(b){a.push(b)});c()}}function kb(b,a){a=
+a||"_";return b.replace(Yc,function(b,d){return(d?a:"")+b.toLowerCase()})}function Db(b,a,c){if(!b)throw Sa("areq",a||"?",c||"required");return b}function Va(b,a,c){c&&H(b)&&(b=b[b.length-1]);Db(P(b),a,"not a function, got "+(b&&"object"===typeof b?b.constructor.name||"Object":typeof b));return b}function Ca(b,a){if("hasOwnProperty"===b)throw Sa("badname",a);}function hc(b,a,c){if(!a)return b;a=a.split(".");for(var d,e=b,f=a.length,g=0;g<f;g++)d=a[g],b&&(b=(e=b)[d]);return!c&&P(b)?Bb(e,b):b}function Eb(b){var a=
+b[0];b=b[b.length-1];if(a===b)return u(a);var c=[a];do{a=a.nextSibling;if(!a)break;c.push(a)}while(a!==b);return u(c)}function Zc(b){var a=x("$injector"),c=x("ng");b=b.angular||(b.angular={});b.$$minErr=b.$$minErr||x;return b.module||(b.module=function(){var b={};return function(e,f,g){if("hasOwnProperty"===e)throw c("badname","module");f&&b.hasOwnProperty(e)&&(b[e]=null);return b[e]||(b[e]=function(){function b(a,d,e){return function(){c[e||"push"]([a,d,arguments]);return n}}if(!f)throw a("nomod",
+e);var c=[],d=[],l=b("$injector","invoke"),n={_invokeQueue:c,_runBlocks:d,requires:f,name:e,provider:b("$provide","provider"),factory:b("$provide","factory"),service:b("$provide","service"),value:b("$provide","value"),constant:b("$provide","constant","unshift"),animation:b("$animateProvider","register"),filter:b("$filterProvider","register"),controller:b("$controllerProvider","register"),directive:b("$compileProvider","directive"),config:l,run:function(a){d.push(a);return this}};g&&l(g);return n}())}}())}
+function $c(b){B(b,{bootstrap:fc,copy:Ha,extend:B,equals:za,element:u,forEach:r,injector:gc,noop:y,bind:Bb,toJson:ta,fromJson:cc,identity:Ga,isUndefined:D,isDefined:A,isString:z,isFunction:P,isObject:T,isNumber:Ab,isElement:Uc,isArray:H,version:ad,isDate:sa,lowercase:N,uppercase:Ia,callbacks:{counter:0},$$minErr:x,$$csp:Wa});Xa=Zc(Q);try{Xa("ngLocale")}catch(a){Xa("ngLocale",[]).provider("$locale",bd)}Xa("ng",["ngLocale"],["$provide",function(a){a.provider({$$sanitizeUri:cd});a.provider("$compile",
+ic).directive({a:dd,input:jc,textarea:jc,form:ed,script:fd,select:gd,style:hd,option:id,ngBind:jd,ngBindHtml:kd,ngBindTemplate:ld,ngClass:md,ngClassEven:nd,ngClassOdd:od,ngCloak:pd,ngController:qd,ngForm:rd,ngHide:sd,ngIf:td,ngInclude:ud,ngInit:vd,ngNonBindable:wd,ngPluralize:xd,ngRepeat:yd,ngShow:zd,ngStyle:Ad,ngSwitch:Bd,ngSwitchWhen:Cd,ngSwitchDefault:Dd,ngOptions:Ed,ngTransclude:Fd,ngModel:Gd,ngList:Hd,ngChange:Id,required:kc,ngRequired:kc,ngValue:Jd}).directive({ngInclude:Kd}).directive(Fb).directive(lc);
+a.provider({$anchorScroll:Ld,$animate:Md,$browser:Nd,$cacheFactory:Od,$controller:Pd,$document:Qd,$exceptionHandler:Rd,$filter:mc,$interpolate:Sd,$interval:Td,$http:Ud,$httpBackend:Vd,$location:Wd,$log:Xd,$parse:Yd,$rootScope:Zd,$q:$d,$sce:ae,$sceDelegate:be,$sniffer:ce,$templateCache:de,$timeout:ee,$window:fe,$$rAF:ge,$$asyncCallback:he})}])}function Ya(b){return b.replace(ie,function(a,b,d,e){return e?d.toUpperCase():d}).replace(je,"Moz$1")}function Gb(b,a,c,d){function e(b){var e=c&&b?[this.filter(b)]:
+[this],m=a,h,l,n,p,q,s;if(!d||null!=b)for(;e.length;)for(h=e.shift(),l=0,n=h.length;l<n;l++)for(p=u(h[l]),m?p.triggerHandler("$destroy"):m=!m,q=0,p=(s=p.children()).length;q<p;q++)e.push(Da(s[q]));return f.apply(this,arguments)}var f=Da.fn[b],f=f.$original||f;e.$original=f;Da.fn[b]=e}function S(b){if(b instanceof S)return b;z(b)&&(b=aa(b));if(!(this instanceof S)){if(z(b)&&"<"!=b.charAt(0))throw Hb("nosel");return new S(b)}if(z(b)){var a=b;b=X;var c;if(c=ke.exec(a))b=[b.createElement(c[1])];else{var d=
+b,e;b=d.createDocumentFragment();c=[];if(Ib.test(a)){d=b.appendChild(d.createElement("div"));e=(le.exec(a)||["",""])[1].toLowerCase();e=ba[e]||ba._default;d.innerHTML="<div>&#160;</div>"+e[1]+a.replace(me,"<$1></$2>")+e[2];d.removeChild(d.firstChild);for(a=e[0];a--;)d=d.lastChild;a=0;for(e=d.childNodes.length;a<e;++a)c.push(d.childNodes[a]);d=b.firstChild;d.textContent=""}else c.push(d.createTextNode(a));b.textContent="";b.innerHTML="";b=c}Jb(this,b);u(X.createDocumentFragment()).append(this)}else Jb(this,
+b)}function Kb(b){return b.cloneNode(!0)}function Ja(b){Lb(b);var a=0;for(b=b.childNodes||[];a<b.length;a++)Ja(b[a])}function nc(b,a,c,d){if(A(d))throw Hb("offargs");var e=ma(b,"events");ma(b,"handle")&&(D(a)?r(e,function(a,c){Za(b,c,a);delete e[c]}):r(a.split(" "),function(a){D(c)?(Za(b,a,e[a]),delete e[a]):Ra(e[a]||[],c)}))}function Lb(b,a){var c=b.ng339,d=$a[c];d&&(a?delete $a[c].data[a]:(d.handle&&(d.events.$destroy&&d.handle({},"$destroy"),nc(b)),delete $a[c],b.ng339=t))}function ma(b,a,c){var d=
+b.ng339,d=$a[d||-1];if(A(c))d||(b.ng339=d=++ne,d=$a[d]={}),d[a]=c;else return d&&d[a]}function Mb(b,a,c){var d=ma(b,"data"),e=A(c),f=!e&&A(a),g=f&&!T(a);d||g||ma(b,"data",d={});if(e)d[a]=c;else if(f){if(g)return d&&d[a];B(d,a)}else return d}function Nb(b,a){return b.getAttribute?-1<(" "+(b.getAttribute("class")||"")+" ").replace(/[\n\t]/g," ").indexOf(" "+a+" "):!1}function lb(b,a){a&&b.setAttribute&&r(a.split(" "),function(a){b.setAttribute("class",aa((" "+(b.getAttribute("class")||"")+" ").replace(/[\n\t]/g,
+" ").replace(" "+aa(a)+" "," ")))})}function mb(b,a){if(a&&b.setAttribute){var c=(" "+(b.getAttribute("class")||"")+" ").replace(/[\n\t]/g," ");r(a.split(" "),function(a){a=aa(a);-1===c.indexOf(" "+a+" ")&&(c+=a+" ")});b.setAttribute("class",aa(c))}}function Jb(b,a){if(a){a=a.nodeName||!A(a.length)||Fa(a)?[a]:a;for(var c=0;c<a.length;c++)b.push(a[c])}}function oc(b,a){return nb(b,"$"+(a||"ngController")+"Controller")}function nb(b,a,c){9==b.nodeType&&(b=b.documentElement);for(a=H(a)?a:[a];b;){for(var d=
+0,e=a.length;d<e;d++)if((c=u.data(b,a[d]))!==t)return c;b=b.parentNode||11===b.nodeType&&b.host}}function pc(b){for(var a=0,c=b.childNodes;a<c.length;a++)Ja(c[a]);for(;b.firstChild;)b.removeChild(b.firstChild)}function qc(b,a){var c=ob[a.toLowerCase()];return c&&rc[b.nodeName]&&c}function oe(b,a){var c=function(c,e){c.preventDefault||(c.preventDefault=function(){c.returnValue=!1});c.stopPropagation||(c.stopPropagation=function(){c.cancelBubble=!0});c.target||(c.target=c.srcElement||X);if(D(c.defaultPrevented)){var f=
+c.preventDefault;c.preventDefault=function(){c.defaultPrevented=!0;f.call(c)};c.defaultPrevented=!1}c.isDefaultPrevented=function(){return c.defaultPrevented||!1===c.returnValue};var g=ga(a[e||c.type]||[]);r(g,function(a){a.call(b,c)});8>=R?(c.preventDefault=null,c.stopPropagation=null,c.isDefaultPrevented=null):(delete c.preventDefault,delete c.stopPropagation,delete c.isDefaultPrevented)};c.elem=b;return c}function Ka(b,a){var c=typeof b,d;"function"==c||"object"==c&&null!==b?"function"==typeof(d=
+b.$$hashKey)?d=b.$$hashKey():d===t&&(d=b.$$hashKey=(a||gb)()):d=b;return c+":"+d}function ab(b,a){if(a){var c=0;this.nextUid=function(){return++c}}r(b,this.put,this)}function sc(b){var a,c;"function"===typeof b?(a=b.$inject)||(a=[],b.length&&(c=b.toString().replace(pe,""),c=c.match(qe),r(c[1].split(re),function(b){b.replace(se,function(b,c,d){a.push(d)})})),b.$inject=a):H(b)?(c=b.length-1,Va(b[c],"fn"),a=b.slice(0,c)):Va(b,"fn",!0);return a}function gc(b){function a(a){return function(b,c){if(T(b))r(b,
+$b(a));else return a(b,c)}}function c(a,b){Ca(a,"service");if(P(b)||H(b))b=n.instantiate(b);if(!b.$get)throw bb("pget",a);return l[a+k]=b}function d(a,b){return c(a,{$get:b})}function e(a){var b=[],c,d,f,k;r(a,function(a){if(!h.get(a)){h.put(a,!0);try{if(z(a))for(c=Xa(a),b=b.concat(e(c.requires)).concat(c._runBlocks),d=c._invokeQueue,f=0,k=d.length;f<k;f++){var g=d[f],m=n.get(g[0]);m[g[1]].apply(m,g[2])}else P(a)?b.push(n.invoke(a)):H(a)?b.push(n.invoke(a)):Va(a,"module")}catch(l){throw H(a)&&(a=
+a[a.length-1]),l.message&&(l.stack&&-1==l.stack.indexOf(l.message))&&(l=l.message+"\n"+l.stack),bb("modulerr",a,l.stack||l.message||l);}}});return b}function f(a,b){function c(d){if(a.hasOwnProperty(d)){if(a[d]===g)throw bb("cdep",d+" <- "+m.join(" <- "));return a[d]}try{return m.unshift(d),a[d]=g,a[d]=b(d)}catch(e){throw a[d]===g&&delete a[d],e;}finally{m.shift()}}function d(a,b,e){var f=[],k=sc(a),g,m,h;m=0;for(g=k.length;m<g;m++){h=k[m];if("string"!==typeof h)throw bb("itkn",h);f.push(e&&e.hasOwnProperty(h)?
+e[h]:c(h))}H(a)&&(a=a[g]);return a.apply(b,f)}return{invoke:d,instantiate:function(a,b){var c=function(){},e;c.prototype=(H(a)?a[a.length-1]:a).prototype;c=new c;e=d(a,c,b);return T(e)||P(e)?e:c},get:c,annotate:sc,has:function(b){return l.hasOwnProperty(b+k)||a.hasOwnProperty(b)}}}var g={},k="Provider",m=[],h=new ab([],!0),l={$provide:{provider:a(c),factory:a(d),service:a(function(a,b){return d(a,["$injector",function(a){return a.instantiate(b)}])}),value:a(function(a,b){return d(a,$(b))}),constant:a(function(a,
+b){Ca(a,"constant");l[a]=b;p[a]=b}),decorator:function(a,b){var c=n.get(a+k),d=c.$get;c.$get=function(){var a=q.invoke(d,c);return q.invoke(b,null,{$delegate:a})}}}},n=l.$injector=f(l,function(){throw bb("unpr",m.join(" <- "));}),p={},q=p.$injector=f(p,function(a){a=n.get(a+k);return q.invoke(a.$get,a)});r(e(b),function(a){q.invoke(a||y)});return q}function Ld(){var b=!0;this.disableAutoScrolling=function(){b=!1};this.$get=["$window","$location","$rootScope",function(a,c,d){function e(a){var b=null;
+r(a,function(a){b||"a"!==N(a.nodeName)||(b=a)});return b}function f(){var b=c.hash(),d;b?(d=g.getElementById(b))?d.scrollIntoView():(d=e(g.getElementsByName(b)))?d.scrollIntoView():"top"===b&&a.scrollTo(0,0):a.scrollTo(0,0)}var g=a.document;b&&d.$watch(function(){return c.hash()},function(){d.$evalAsync(f)});return f}]}function he(){this.$get=["$$rAF","$timeout",function(b,a){return b.supported?function(a){return b(a)}:function(b){return a(b,0,!1)}}]}function te(b,a,c,d){function e(a){try{a.apply(null,
+Aa.call(arguments,1))}finally{if(s--,0===s)for(;L.length;)try{L.pop()()}catch(b){c.error(b)}}}function f(a,b){(function ca(){r(v,function(a){a()});C=b(ca,a)})()}function g(){w=null;O!=k.url()&&(O=k.url(),r(da,function(a){a(k.url())}))}var k=this,m=a[0],h=b.location,l=b.history,n=b.setTimeout,p=b.clearTimeout,q={};k.isMock=!1;var s=0,L=[];k.$$completeOutstandingRequest=e;k.$$incOutstandingRequestCount=function(){s++};k.notifyWhenNoOutstandingRequests=function(a){r(v,function(a){a()});0===s?a():L.push(a)};
+var v=[],C;k.addPollFn=function(a){D(C)&&f(100,n);v.push(a);return a};var O=h.href,I=a.find("base"),w=null;k.url=function(a,c){h!==b.location&&(h=b.location);l!==b.history&&(l=b.history);if(a){if(O!=a)return O=a,d.history?c?l.replaceState(null,"",a):(l.pushState(null,"",a),I.attr("href",I.attr("href"))):(w=a,c?h.replace(a):h.href=a),k}else return w||h.href.replace(/%27/g,"'")};var da=[],K=!1;k.onUrlChange=function(a){if(!K){if(d.history)u(b).on("popstate",g);if(d.hashchange)u(b).on("hashchange",g);
+else k.addPollFn(g);K=!0}da.push(a);return a};k.baseHref=function(){var a=I.attr("href");return a?a.replace(/^(https?\:)?\/\/[^\/]*/,""):""};var W={},ea="",J=k.baseHref();k.cookies=function(a,b){var d,e,f,k;if(a)b===t?m.cookie=escape(a)+"=;path="+J+";expires=Thu, 01 Jan 1970 00:00:00 GMT":z(b)&&(d=(m.cookie=escape(a)+"="+escape(b)+";path="+J).length+1,4096<d&&c.warn("Cookie '"+a+"' possibly not set or overflowed because it was too large ("+d+" > 4096 bytes)!"));else{if(m.cookie!==ea)for(ea=m.cookie,
+d=ea.split("; "),W={},f=0;f<d.length;f++)e=d[f],k=e.indexOf("="),0<k&&(a=unescape(e.substring(0,k)),W[a]===t&&(W[a]=unescape(e.substring(k+1))));return W}};k.defer=function(a,b){var c;s++;c=n(function(){delete q[c];e(a)},b||0);q[c]=!0;return c};k.defer.cancel=function(a){return q[a]?(delete q[a],p(a),e(y),!0):!1}}function Nd(){this.$get=["$window","$log","$sniffer","$document",function(b,a,c,d){return new te(b,d,a,c)}]}function Od(){this.$get=function(){function b(b,d){function e(a){a!=n&&(p?p==a&&
+(p=a.n):p=a,f(a.n,a.p),f(a,n),n=a,n.n=null)}function f(a,b){a!=b&&(a&&(a.p=b),b&&(b.n=a))}if(b in a)throw x("$cacheFactory")("iid",b);var g=0,k=B({},d,{id:b}),m={},h=d&&d.capacity||Number.MAX_VALUE,l={},n=null,p=null;return a[b]={put:function(a,b){if(h<Number.MAX_VALUE){var c=l[a]||(l[a]={key:a});e(c)}if(!D(b))return a in m||g++,m[a]=b,g>h&&this.remove(p.key),b},get:function(a){if(h<Number.MAX_VALUE){var b=l[a];if(!b)return;e(b)}return m[a]},remove:function(a){if(h<Number.MAX_VALUE){var b=l[a];if(!b)return;
+b==n&&(n=b.p);b==p&&(p=b.n);f(b.n,b.p);delete l[a]}delete m[a];g--},removeAll:function(){m={};g=0;l={};n=p=null},destroy:function(){l=k=m=null;delete a[b]},info:function(){return B({},k,{size:g})}}}var a={};b.info=function(){var b={};r(a,function(a,e){b[e]=a.info()});return b};b.get=function(b){return a[b]};return b}}function de(){this.$get=["$cacheFactory",function(b){return b("templates")}]}function ic(b,a){var c={},d="Directive",e=/^\s*directive\:\s*([\d\w_\-]+)\s+(.*)$/,f=/(([\d\w_\-]+)(?:\:([^;]+))?;?)/,
+g=/^(on[a-z]+|formaction)$/;this.directive=function m(a,e){Ca(a,"directive");z(a)?(Db(e,"directiveFactory"),c.hasOwnProperty(a)||(c[a]=[],b.factory(a+d,["$injector","$exceptionHandler",function(b,d){var e=[];r(c[a],function(c,f){try{var g=b.invoke(c);P(g)?g={compile:$(g)}:!g.compile&&g.link&&(g.compile=$(g.link));g.priority=g.priority||0;g.index=f;g.name=g.name||a;g.require=g.require||g.controller&&g.name;g.restrict=g.restrict||"A";e.push(g)}catch(m){d(m)}});return e}])),c[a].push(e)):r(a,$b(m));
+return this};this.aHrefSanitizationWhitelist=function(b){return A(b)?(a.aHrefSanitizationWhitelist(b),this):a.aHrefSanitizationWhitelist()};this.imgSrcSanitizationWhitelist=function(b){return A(b)?(a.imgSrcSanitizationWhitelist(b),this):a.imgSrcSanitizationWhitelist()};this.$get=["$injector","$interpolate","$exceptionHandler","$http","$templateCache","$parse","$controller","$rootScope","$document","$sce","$animate","$$sanitizeUri",function(a,b,l,n,p,q,s,L,v,C,O,I){function w(a,b,c,d,e){a instanceof
+u||(a=u(a));r(a,function(b,c){3==b.nodeType&&b.nodeValue.match(/\S+/)&&(a[c]=u(b).wrap("<span></span>").parent()[0])});var f=K(a,b,a,c,d,e);da(a,"ng-scope");return function(b,c,d,e){Db(b,"scope");var g=c?La.clone.call(a):a;r(d,function(a,b){g.data("$"+b+"Controller",a)});d=0;for(var m=g.length;d<m;d++){var h=g[d].nodeType;1!==h&&9!==h||g.eq(d).data("$scope",b)}c&&c(g,b);f&&f(b,g,g,e);return g}}function da(a,b){try{a.addClass(b)}catch(c){}}function K(a,b,c,d,e,f){function g(a,c,d,e){var f,h,l,q,n,
+p,s;f=c.length;var M=Array(f);for(q=0;q<f;q++)M[q]=c[q];p=q=0;for(n=m.length;q<n;p++)h=M[p],c=m[q++],f=m[q++],c?(c.scope?(l=a.$new(),u.data(h,"$scope",l)):l=a,s=c.transcludeOnThisElement?W(a,c.transclude,e):!c.templateOnThisElement&&e?e:!e&&b?W(a,b):null,c(f,l,h,d,s)):f&&f(a,h.childNodes,t,e)}for(var m=[],h,l,q,n,p=0;p<a.length;p++)h=new Ob,l=ea(a[p],[],h,0===p?d:t,e),(f=l.length?F(l,a[p],h,b,c,null,[],[],f):null)&&f.scope&&da(h.$$element,"ng-scope"),h=f&&f.terminal||!(q=a[p].childNodes)||!q.length?
+null:K(q,f?(f.transcludeOnThisElement||!f.templateOnThisElement)&&f.transclude:b),m.push(f,h),n=n||f||h,f=null;return n?g:null}function W(a,b,c){return function(d,e,f){var g=!1;d||(d=a.$new(),g=d.$$transcluded=!0);e=b(d,e,f,c);if(g)e.on("$destroy",function(){d.$destroy()});return e}}function ea(a,b,c,d,g){var h=c.$attr,m;switch(a.nodeType){case 1:ca(b,na(Ma(a).toLowerCase()),"E",d,g);for(var l,q,n,p=a.attributes,s=0,L=p&&p.length;s<L;s++){var C=!1,O=!1;l=p[s];if(!R||8<=R||l.specified){m=l.name;q=
+aa(l.value);l=na(m);if(n=V.test(l))m=kb(l.substr(6),"-");var v=l.replace(/(Start|End)$/,"");l===v+"Start"&&(C=m,O=m.substr(0,m.length-5)+"end",m=m.substr(0,m.length-6));l=na(m.toLowerCase());h[l]=m;if(n||!c.hasOwnProperty(l))c[l]=q,qc(a,l)&&(c[l]=!0);Q(a,b,q,l);ca(b,l,"A",d,g,C,O)}}a=a.className;if(z(a)&&""!==a)for(;m=f.exec(a);)l=na(m[2]),ca(b,l,"C",d,g)&&(c[l]=aa(m[3])),a=a.substr(m.index+m[0].length);break;case 3:x(b,a.nodeValue);break;case 8:try{if(m=e.exec(a.nodeValue))l=na(m[1]),ca(b,l,"M",
+d,g)&&(c[l]=aa(m[2]))}catch(w){}}b.sort(D);return b}function J(a,b,c){var d=[],e=0;if(b&&a.hasAttribute&&a.hasAttribute(b)){do{if(!a)throw ia("uterdir",b,c);1==a.nodeType&&(a.hasAttribute(b)&&e++,a.hasAttribute(c)&&e--);d.push(a);a=a.nextSibling}while(0<e)}else d.push(a);return u(d)}function E(a,b,c){return function(d,e,f,g,m){e=J(e[0],b,c);return a(d,e,f,g,m)}}function F(a,c,d,e,f,g,m,n,p){function L(a,b,c,d){if(a){c&&(a=E(a,c,d));a.require=G.require;a.directiveName=oa;if(K===G||G.$$isolateScope)a=
+tc(a,{isolateScope:!0});m.push(a)}if(b){c&&(b=E(b,c,d));b.require=G.require;b.directiveName=oa;if(K===G||G.$$isolateScope)b=tc(b,{isolateScope:!0});n.push(b)}}function C(a,b,c,d){var e,f="data",g=!1;if(z(b)){for(;"^"==(e=b.charAt(0))||"?"==e;)b=b.substr(1),"^"==e&&(f="inheritedData"),g=g||"?"==e;e=null;d&&"data"===f&&(e=d[b]);e=e||c[f]("$"+b+"Controller");if(!e&&!g)throw ia("ctreq",b,a);}else H(b)&&(e=[],r(b,function(b){e.push(C(a,b,c,d))}));return e}function O(a,e,f,g,p){function L(a,b){var c;2>
+arguments.length&&(b=a,a=t);Ea&&(c=ea);return p(a,b,c)}var v,M,w,I,E,J,ea={},qb;v=c===f?d:ga(d,new Ob(u(f),d.$attr));M=v.$$element;if(K){var Na=/^\s*([@=&])(\??)\s*(\w*)\s*$/;J=e.$new(!0);!F||F!==K&&F!==K.$$originalDirective?M.data("$isolateScopeNoTemplate",J):M.data("$isolateScope",J);da(M,"ng-isolate-scope");r(K.scope,function(a,c){var d=a.match(Na)||[],f=d[3]||c,g="?"==d[2],d=d[1],m,l,n,p;J.$$isolateBindings[c]=d+f;switch(d){case "@":v.$observe(f,function(a){J[c]=a});v.$$observers[f].$$scope=e;
+v[f]&&(J[c]=b(v[f])(e));break;case "=":if(g&&!v[f])break;l=q(v[f]);p=l.literal?za:function(a,b){return a===b||a!==a&&b!==b};n=l.assign||function(){m=J[c]=l(e);throw ia("nonassign",v[f],K.name);};m=J[c]=l(e);J.$watch(function(){var a=l(e);p(a,J[c])||(p(a,m)?n(e,a=J[c]):J[c]=a);return m=a},null,l.literal);break;case "&":l=q(v[f]);J[c]=function(a){return l(e,a)};break;default:throw ia("iscp",K.name,c,a);}})}qb=p&&L;W&&r(W,function(a){var b={$scope:a===K||a.$$isolateScope?J:e,$element:M,$attrs:v,$transclude:qb},
+c;E=a.controller;"@"==E&&(E=v[a.name]);c=s(E,b);ea[a.name]=c;Ea||M.data("$"+a.name+"Controller",c);a.controllerAs&&(b.$scope[a.controllerAs]=c)});g=0;for(w=m.length;g<w;g++)try{I=m[g],I(I.isolateScope?J:e,M,v,I.require&&C(I.directiveName,I.require,M,ea),qb)}catch(ca){l(ca,ha(M))}g=e;K&&(K.template||null===K.templateUrl)&&(g=J);a&&a(g,f.childNodes,t,p);for(g=n.length-1;0<=g;g--)try{I=n[g],I(I.isolateScope?J:e,M,v,I.require&&C(I.directiveName,I.require,M,ea),qb)}catch(pb){l(pb,ha(M))}}p=p||{};for(var v=
+-Number.MAX_VALUE,I,W=p.controllerDirectives,K=p.newIsolateScopeDirective,F=p.templateDirective,ca=p.nonTlbTranscludeDirective,D=!1,B=!1,Ea=p.hasElementTranscludeDirective,x=d.$$element=u(c),G,oa,U,S=e,R,Q=0,pa=a.length;Q<pa;Q++){G=a[Q];var V=G.$$start,Y=G.$$end;V&&(x=J(c,V,Y));U=t;if(v>G.priority)break;if(U=G.scope)I=I||G,G.templateUrl||(N("new/isolated scope",K,G,x),T(U)&&(K=G));oa=G.name;!G.templateUrl&&G.controller&&(U=G.controller,W=W||{},N("'"+oa+"' controller",W[oa],G,x),W[oa]=G);if(U=G.transclude)D=
+!0,G.$$tlb||(N("transclusion",ca,G,x),ca=G),"element"==U?(Ea=!0,v=G.priority,U=x,x=d.$$element=u(X.createComment(" "+oa+": "+d[oa]+" ")),c=x[0],Na(f,Aa.call(U,0),c),S=w(U,e,v,g&&g.name,{nonTlbTranscludeDirective:ca})):(U=u(Kb(c)).contents(),x.empty(),S=w(U,e));if(G.template)if(B=!0,N("template",F,G,x),F=G,U=P(G.template)?G.template(x,d):G.template,U=Z(U),G.replace){g=G;U=Ib.test(U)?u(aa(U)):[];c=U[0];if(1!=U.length||1!==c.nodeType)throw ia("tplrt",oa,"");Na(f,x,c);pa={$attr:{}};U=ea(c,[],pa);var $=
+a.splice(Q+1,a.length-(Q+1));K&&pb(U);a=a.concat(U).concat($);A(d,pa);pa=a.length}else x.html(U);if(G.templateUrl)B=!0,N("template",F,G,x),F=G,G.replace&&(g=G),O=y(a.splice(Q,a.length-Q),x,d,f,D&&S,m,n,{controllerDirectives:W,newIsolateScopeDirective:K,templateDirective:F,nonTlbTranscludeDirective:ca}),pa=a.length;else if(G.compile)try{R=G.compile(x,d,S),P(R)?L(null,R,V,Y):R&&L(R.pre,R.post,V,Y)}catch(ba){l(ba,ha(x))}G.terminal&&(O.terminal=!0,v=Math.max(v,G.priority))}O.scope=I&&!0===I.scope;O.transcludeOnThisElement=
+D;O.templateOnThisElement=B;O.transclude=S;p.hasElementTranscludeDirective=Ea;return O}function pb(a){for(var b=0,c=a.length;b<c;b++)a[b]=bc(a[b],{$$isolateScope:!0})}function ca(b,e,f,g,h,q,n){if(e===h)return null;h=null;if(c.hasOwnProperty(e)){var p;e=a.get(e+d);for(var s=0,v=e.length;s<v;s++)try{p=e[s],(g===t||g>p.priority)&&-1!=p.restrict.indexOf(f)&&(q&&(p=bc(p,{$$start:q,$$end:n})),b.push(p),h=p)}catch(L){l(L)}}return h}function A(a,b){var c=b.$attr,d=a.$attr,e=a.$$element;r(a,function(d,e){"$"!=
+e.charAt(0)&&(b[e]&&b[e]!==d&&(d+=("style"===e?";":" ")+b[e]),a.$set(e,d,!0,c[e]))});r(b,function(b,f){"class"==f?(da(e,b),a["class"]=(a["class"]?a["class"]+" ":"")+b):"style"==f?(e.attr("style",e.attr("style")+";"+b),a.style=(a.style?a.style+";":"")+b):"$"==f.charAt(0)||a.hasOwnProperty(f)||(a[f]=b,d[f]=c[f])})}function y(a,b,c,d,e,f,g,m){var h=[],l,q,s=b[0],v=a.shift(),L=B({},v,{templateUrl:null,transclude:null,replace:null,$$originalDirective:v}),O=P(v.templateUrl)?v.templateUrl(b,c):v.templateUrl;
+b.empty();n.get(C.getTrustedResourceUrl(O),{cache:p}).success(function(n){var p,C;n=Z(n);if(v.replace){n=Ib.test(n)?u(aa(n)):[];p=n[0];if(1!=n.length||1!==p.nodeType)throw ia("tplrt",v.name,O);n={$attr:{}};Na(d,b,p);var w=ea(p,[],n);T(v.scope)&&pb(w);a=w.concat(a);A(c,n)}else p=s,b.html(n);a.unshift(L);l=F(a,p,c,e,b,v,f,g,m);r(d,function(a,c){a==p&&(d[c]=b[0])});for(q=K(b[0].childNodes,e);h.length;){n=h.shift();C=h.shift();var I=h.shift(),E=h.shift(),w=b[0];if(C!==s){var J=C.className;m.hasElementTranscludeDirective&&
+v.replace||(w=Kb(p));Na(I,u(C),w);da(u(w),J)}C=l.transcludeOnThisElement?W(n,l.transclude,E):E;l(q,n,w,d,C)}h=null}).error(function(a,b,c,d){throw ia("tpload",d.url);});return function(a,b,c,d,e){a=e;h?(h.push(b),h.push(c),h.push(d),h.push(a)):(l.transcludeOnThisElement&&(a=W(b,l.transclude,e)),l(q,b,c,d,a))}}function D(a,b){var c=b.priority-a.priority;return 0!==c?c:a.name!==b.name?a.name<b.name?-1:1:a.index-b.index}function N(a,b,c,d){if(b)throw ia("multidir",b.name,c.name,a,ha(d));}function x(a,
+c){var d=b(c,!0);d&&a.push({priority:0,compile:function(a){var b=a.parent().length;b&&da(a.parent(),"ng-binding");return function(a,c){var e=c.parent(),f=e.data("$binding")||[];f.push(d);e.data("$binding",f);b||da(e,"ng-binding");a.$watch(d,function(a){c[0].nodeValue=a})}}})}function S(a,b){if("srcdoc"==b)return C.HTML;var c=Ma(a);if("xlinkHref"==b||"FORM"==c&&"action"==b||"IMG"!=c&&("src"==b||"ngSrc"==b))return C.RESOURCE_URL}function Q(a,c,d,e){var f=b(d,!0);if(f){if("multiple"===e&&"SELECT"===
+Ma(a))throw ia("selmulti",ha(a));c.push({priority:100,compile:function(){return{pre:function(c,d,m){d=m.$$observers||(m.$$observers={});if(g.test(e))throw ia("nodomevents");if(f=b(m[e],!0,S(a,e)))m[e]=f(c),(d[e]||(d[e]=[])).$$inter=!0,(m.$$observers&&m.$$observers[e].$$scope||c).$watch(f,function(a,b){"class"===e&&a!=b?m.$updateClass(a,b):m.$set(e,a)})}}}})}}function Na(a,b,c){var d=b[0],e=b.length,f=d.parentNode,g,m;if(a)for(g=0,m=a.length;g<m;g++)if(a[g]==d){a[g++]=c;m=g+e-1;for(var h=a.length;g<
+h;g++,m++)m<h?a[g]=a[m]:delete a[g];a.length-=e-1;break}f&&f.replaceChild(c,d);a=X.createDocumentFragment();a.appendChild(d);c[u.expando]=d[u.expando];d=1;for(e=b.length;d<e;d++)f=b[d],u(f).remove(),a.appendChild(f),delete b[d];b[0]=c;b.length=1}function tc(a,b){return B(function(){return a.apply(null,arguments)},a,b)}var Ob=function(a,b){this.$$element=a;this.$attr=b||{}};Ob.prototype={$normalize:na,$addClass:function(a){a&&0<a.length&&O.addClass(this.$$element,a)},$removeClass:function(a){a&&0<
+a.length&&O.removeClass(this.$$element,a)},$updateClass:function(a,b){var c=uc(a,b),d=uc(b,a);0===c.length?O.removeClass(this.$$element,d):0===d.length?O.addClass(this.$$element,c):O.setClass(this.$$element,c,d)},$set:function(a,b,c,d){var e=qc(this.$$element[0],a);e&&(this.$$element.prop(a,b),d=e);this[a]=b;d?this.$attr[a]=d:(d=this.$attr[a])||(this.$attr[a]=d=kb(a,"-"));e=Ma(this.$$element);if("A"===e&&"href"===a||"IMG"===e&&"src"===a)this[a]=b=I(b,"src"===a);!1!==c&&(null===b||b===t?this.$$element.removeAttr(d):
+this.$$element.attr(d,b));(c=this.$$observers)&&r(c[a],function(a){try{a(b)}catch(c){l(c)}})},$observe:function(a,b){var c=this,d=c.$$observers||(c.$$observers={}),e=d[a]||(d[a]=[]);e.push(b);L.$evalAsync(function(){e.$$inter||b(c[a])});return b}};var pa=b.startSymbol(),Ea=b.endSymbol(),Z="{{"==pa||"}}"==Ea?Ga:function(a){return a.replace(/\{\{/g,pa).replace(/}}/g,Ea)},V=/^ngAttr[A-Z]/;return w}]}function na(b){return Ya(b.replace(ue,""))}function uc(b,a){var c="",d=b.split(/\s+/),e=a.split(/\s+/),
+f=0;a:for(;f<d.length;f++){for(var g=d[f],k=0;k<e.length;k++)if(g==e[k])continue a;c+=(0<c.length?" ":"")+g}return c}function Pd(){var b={},a=/^(\S+)(\s+as\s+(\w+))?$/;this.register=function(a,d){Ca(a,"controller");T(a)?B(b,a):b[a]=d};this.$get=["$injector","$window",function(c,d){return function(e,f){var g,k,m;z(e)&&(g=e.match(a),k=g[1],m=g[3],e=b.hasOwnProperty(k)?b[k]:hc(f.$scope,k,!0)||hc(d,k,!0),Va(e,k,!0));g=c.instantiate(e,f);if(m){if(!f||"object"!==typeof f.$scope)throw x("$controller")("noscp",
+k||e.name,m);f.$scope[m]=g}return g}}]}function Qd(){this.$get=["$window",function(b){return u(b.document)}]}function Rd(){this.$get=["$log",function(b){return function(a,c){b.error.apply(b,arguments)}}]}function vc(b){var a={},c,d,e;if(!b)return a;r(b.split("\n"),function(b){e=b.indexOf(":");c=N(aa(b.substr(0,e)));d=aa(b.substr(e+1));c&&(a[c]=a[c]?a[c]+", "+d:d)});return a}function wc(b){var a=T(b)?b:t;return function(c){a||(a=vc(b));return c?a[N(c)]||null:a}}function xc(b,a,c){if(P(c))return c(b,
+a);r(c,function(c){b=c(b,a)});return b}function Ud(){var b=/^\s*(\[|\{[^\{])/,a=/[\}\]]\s*$/,c=/^\)\]\}',?\n/,d={"Content-Type":"application/json;charset=utf-8"},e=this.defaults={transformResponse:[function(d){z(d)&&(d=d.replace(c,""),b.test(d)&&a.test(d)&&(d=cc(d)));return d}],transformRequest:[function(a){return T(a)&&"[object File]"!==ya.call(a)&&"[object Blob]"!==ya.call(a)?ta(a):a}],headers:{common:{Accept:"application/json, text/plain, */*"},post:ga(d),put:ga(d),patch:ga(d)},xsrfCookieName:"XSRF-TOKEN",
+xsrfHeaderName:"X-XSRF-TOKEN"},f=this.interceptors=[],g=this.responseInterceptors=[];this.$get=["$httpBackend","$browser","$cacheFactory","$rootScope","$q","$injector",function(a,b,c,d,n,p){function q(a){function b(a){var d=B({},a,{data:xc(a.data,a.headers,c.transformResponse)});return 200<=a.status&&300>a.status?d:n.reject(d)}var c={method:"get",transformRequest:e.transformRequest,transformResponse:e.transformResponse},d=function(a){var b=e.headers,c=B({},a.headers),d,f,b=B({},b.common,b[N(a.method)]);
+a:for(d in b){a=N(d);for(f in c)if(N(f)===a)continue a;c[d]=b[d]}(function(a){var b;r(a,function(c,d){P(c)&&(b=c(),null!=b?a[d]=b:delete a[d])})})(c);return c}(a);B(c,a);c.headers=d;c.method=Ia(c.method);var f=[function(a){d=a.headers;var c=xc(a.data,wc(d),a.transformRequest);D(c)&&r(d,function(a,b){"content-type"===N(b)&&delete d[b]});D(a.withCredentials)&&!D(e.withCredentials)&&(a.withCredentials=e.withCredentials);return s(a,c,d).then(b,b)},t],g=n.when(c);for(r(C,function(a){(a.request||a.requestError)&&
+f.unshift(a.request,a.requestError);(a.response||a.responseError)&&f.push(a.response,a.responseError)});f.length;){a=f.shift();var m=f.shift(),g=g.then(a,m)}g.success=function(a){g.then(function(b){a(b.data,b.status,b.headers,c)});return g};g.error=function(a){g.then(null,function(b){a(b.data,b.status,b.headers,c)});return g};return g}function s(c,f,g){function h(a,b,c,e){E&&(200<=a&&300>a?E.put(u,[a,b,vc(c),e]):E.remove(u));p(b,a,c,e);d.$$phase||d.$apply()}function p(a,b,d,e){b=Math.max(b,0);(200<=
+b&&300>b?C.resolve:C.reject)({data:a,status:b,headers:wc(d),config:c,statusText:e})}function s(){var a=Qa(q.pendingRequests,c);-1!==a&&q.pendingRequests.splice(a,1)}var C=n.defer(),r=C.promise,E,F,u=L(c.url,c.params);q.pendingRequests.push(c);r.then(s,s);!c.cache&&!e.cache||(!1===c.cache||"GET"!==c.method&&"JSONP"!==c.method)||(E=T(c.cache)?c.cache:T(e.cache)?e.cache:v);if(E)if(F=E.get(u),A(F)){if(F&&P(F.then))return F.then(s,s),F;H(F)?p(F[1],F[0],ga(F[2]),F[3]):p(F,200,{},"OK")}else E.put(u,r);D(F)&&
+((F=Pb(c.url)?b.cookies()[c.xsrfCookieName||e.xsrfCookieName]:t)&&(g[c.xsrfHeaderName||e.xsrfHeaderName]=F),a(c.method,u,f,h,g,c.timeout,c.withCredentials,c.responseType));return r}function L(a,b){if(!b)return a;var c=[];Tc(b,function(a,b){null===a||D(a)||(H(a)||(a=[a]),r(a,function(a){T(a)&&(sa(a)?a=a.toISOString():T(a)&&(a=ta(a)));c.push(Ba(b)+"="+Ba(a))}))});0<c.length&&(a+=(-1==a.indexOf("?")?"?":"&")+c.join("&"));return a}var v=c("$http"),C=[];r(f,function(a){C.unshift(z(a)?p.get(a):p.invoke(a))});
+r(g,function(a,b){var c=z(a)?p.get(a):p.invoke(a);C.splice(b,0,{response:function(a){return c(n.when(a))},responseError:function(a){return c(n.reject(a))}})});q.pendingRequests=[];(function(a){r(arguments,function(a){q[a]=function(b,c){return q(B(c||{},{method:a,url:b}))}})})("get","delete","head","jsonp");(function(a){r(arguments,function(a){q[a]=function(b,c,d){return q(B(d||{},{method:a,url:b,data:c}))}})})("post","put");q.defaults=e;return q}]}function ve(b){if(8>=R&&(!b.match(/^(get|post|head|put|delete|options)$/i)||
+!Q.XMLHttpRequest))return new Q.ActiveXObject("Microsoft.XMLHTTP");if(Q.XMLHttpRequest)return new Q.XMLHttpRequest;throw x("$httpBackend")("noxhr");}function Vd(){this.$get=["$browser","$window","$document",function(b,a,c){return we(b,ve,b.defer,a.angular.callbacks,c[0])}]}function we(b,a,c,d,e){function f(a,b,c){var f=e.createElement("script"),g=null;f.type="text/javascript";f.src=a;f.async=!0;g=function(a){Za(f,"load",g);Za(f,"error",g);e.body.removeChild(f);f=null;var k=-1,s="unknown";a&&("load"!==
+a.type||d[b].called||(a={type:"error"}),s=a.type,k="error"===a.type?404:200);c&&c(k,s)};rb(f,"load",g);rb(f,"error",g);8>=R&&(f.onreadystatechange=function(){z(f.readyState)&&/loaded|complete/.test(f.readyState)&&(f.onreadystatechange=null,g({type:"load"}))});e.body.appendChild(f);return g}var g=-1;return function(e,m,h,l,n,p,q,s){function L(){C=g;I&&I();w&&w.abort()}function v(a,d,e,f,g){K&&c.cancel(K);I=w=null;0===d&&(d=e?200:"file"==ua(m).protocol?404:0);a(1223===d?204:d,e,f,g||"");b.$$completeOutstandingRequest(y)}
+var C;b.$$incOutstandingRequestCount();m=m||b.url();if("jsonp"==N(e)){var O="_"+(d.counter++).toString(36);d[O]=function(a){d[O].data=a;d[O].called=!0};var I=f(m.replace("JSON_CALLBACK","angular.callbacks."+O),O,function(a,b){v(l,a,d[O].data,"",b);d[O]=y})}else{var w=a(e);w.open(e,m,!0);r(n,function(a,b){A(a)&&w.setRequestHeader(b,a)});w.onreadystatechange=function(){if(w&&4==w.readyState){var a=null,b=null,c="";C!==g&&(a=w.getAllResponseHeaders(),b="response"in w?w.response:w.responseText);C===g&&
+10>R||(c=w.statusText);v(l,C||w.status,b,a,c)}};q&&(w.withCredentials=!0);if(s)try{w.responseType=s}catch(da){if("json"!==s)throw da;}w.send(h||null)}if(0<p)var K=c(L,p);else p&&P(p.then)&&p.then(L)}}function Sd(){var b="{{",a="}}";this.startSymbol=function(a){return a?(b=a,this):b};this.endSymbol=function(b){return b?(a=b,this):a};this.$get=["$parse","$exceptionHandler","$sce",function(c,d,e){function f(f,h,l){for(var n,p,q=0,s=[],L=f.length,v=!1,C=[];q<L;)-1!=(n=f.indexOf(b,q))&&-1!=(p=f.indexOf(a,
+n+g))?(q!=n&&s.push(f.substring(q,n)),s.push(q=c(v=f.substring(n+g,p))),q.exp=v,q=p+k,v=!0):(q!=L&&s.push(f.substring(q)),q=L);(L=s.length)||(s.push(""),L=1);if(l&&1<s.length)throw yc("noconcat",f);if(!h||v)return C.length=L,q=function(a){try{for(var b=0,c=L,g;b<c;b++){if("function"==typeof(g=s[b]))if(g=g(a),g=l?e.getTrusted(l,g):e.valueOf(g),null==g)g="";else switch(typeof g){case "string":break;case "number":g=""+g;break;default:g=ta(g)}C[b]=g}return C.join("")}catch(k){a=yc("interr",f,k.toString()),
+d(a)}},q.exp=f,q.parts=s,q}var g=b.length,k=a.length;f.startSymbol=function(){return b};f.endSymbol=function(){return a};return f}]}function Td(){this.$get=["$rootScope","$window","$q",function(b,a,c){function d(d,g,k,m){var h=a.setInterval,l=a.clearInterval,n=c.defer(),p=n.promise,q=0,s=A(m)&&!m;k=A(k)?k:0;p.then(null,null,d);p.$$intervalId=h(function(){n.notify(q++);0<k&&q>=k&&(n.resolve(q),l(p.$$intervalId),delete e[p.$$intervalId]);s||b.$apply()},g);e[p.$$intervalId]=n;return p}var e={};d.cancel=
+function(b){return b&&b.$$intervalId in e?(e[b.$$intervalId].reject("canceled"),a.clearInterval(b.$$intervalId),delete e[b.$$intervalId],!0):!1};return d}]}function bd(){this.$get=function(){return{id:"en-us",NUMBER_FORMATS:{DECIMAL_SEP:".",GROUP_SEP:",",PATTERNS:[{minInt:1,minFrac:0,maxFrac:3,posPre:"",posSuf:"",negPre:"-",negSuf:"",gSize:3,lgSize:3},{minInt:1,minFrac:2,maxFrac:2,posPre:"\u00a4",posSuf:"",negPre:"(\u00a4",negSuf:")",gSize:3,lgSize:3}],CURRENCY_SYM:"$"},DATETIME_FORMATS:{MONTH:"January February March April May June July August September October November December".split(" "),
+SHORTMONTH:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),DAY:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),SHORTDAY:"Sun Mon Tue Wed Thu Fri Sat".split(" "),AMPMS:["AM","PM"],medium:"MMM d, y h:mm:ss a","short":"M/d/yy h:mm a",fullDate:"EEEE, MMMM d, y",longDate:"MMMM d, y",mediumDate:"MMM d, y",shortDate:"M/d/yy",mediumTime:"h:mm:ss a",shortTime:"h:mm a"},pluralCat:function(b){return 1===b?"one":"other"}}}}function Qb(b){b=b.split("/");for(var a=b.length;a--;)b[a]=
+jb(b[a]);return b.join("/")}function zc(b,a,c){b=ua(b,c);a.$$protocol=b.protocol;a.$$host=b.hostname;a.$$port=Z(b.port)||xe[b.protocol]||null}function Ac(b,a,c){var d="/"!==b.charAt(0);d&&(b="/"+b);b=ua(b,c);a.$$path=decodeURIComponent(d&&"/"===b.pathname.charAt(0)?b.pathname.substring(1):b.pathname);a.$$search=ec(b.search);a.$$hash=decodeURIComponent(b.hash);a.$$path&&"/"!=a.$$path.charAt(0)&&(a.$$path="/"+a.$$path)}function qa(b,a){if(0===a.indexOf(b))return a.substr(b.length)}function cb(b){var a=
+b.indexOf("#");return-1==a?b:b.substr(0,a)}function Rb(b){return b.substr(0,cb(b).lastIndexOf("/")+1)}function Bc(b,a){this.$$html5=!0;a=a||"";var c=Rb(b);zc(b,this,b);this.$$parse=function(a){var e=qa(c,a);if(!z(e))throw Sb("ipthprfx",a,c);Ac(e,this,b);this.$$path||(this.$$path="/");this.$$compose()};this.$$compose=function(){var a=Cb(this.$$search),b=this.$$hash?"#"+jb(this.$$hash):"";this.$$url=Qb(this.$$path)+(a?"?"+a:"")+b;this.$$absUrl=c+this.$$url.substr(1)};this.$$rewrite=function(d){var e;
+if((e=qa(b,d))!==t)return d=e,(e=qa(a,e))!==t?c+(qa("/",e)||e):b+d;if((e=qa(c,d))!==t)return c+e;if(c==d+"/")return c}}function Tb(b,a){var c=Rb(b);zc(b,this,b);this.$$parse=function(d){var e=qa(b,d)||qa(c,d),e="#"==e.charAt(0)?qa(a,e):this.$$html5?e:"";if(!z(e))throw Sb("ihshprfx",d,a);Ac(e,this,b);d=this.$$path;var f=/^\/[A-Z]:(\/.*)/;0===e.indexOf(b)&&(e=e.replace(b,""));f.exec(e)||(d=(e=f.exec(d))?e[1]:d);this.$$path=d;this.$$compose()};this.$$compose=function(){var c=Cb(this.$$search),e=this.$$hash?
+"#"+jb(this.$$hash):"";this.$$url=Qb(this.$$path)+(c?"?"+c:"")+e;this.$$absUrl=b+(this.$$url?a+this.$$url:"")};this.$$rewrite=function(a){if(cb(b)==cb(a))return a}}function Ub(b,a){this.$$html5=!0;Tb.apply(this,arguments);var c=Rb(b);this.$$rewrite=function(d){var e;if(b==cb(d))return d;if(e=qa(c,d))return b+a+e;if(c===d+"/")return c};this.$$compose=function(){var c=Cb(this.$$search),e=this.$$hash?"#"+jb(this.$$hash):"";this.$$url=Qb(this.$$path)+(c?"?"+c:"")+e;this.$$absUrl=b+a+this.$$url}}function sb(b){return function(){return this[b]}}
+function Cc(b,a){return function(c){if(D(c))return this[b];this[b]=a(c);this.$$compose();return this}}function Wd(){var b="",a=!1;this.hashPrefix=function(a){return A(a)?(b=a,this):b};this.html5Mode=function(b){return A(b)?(a=b,this):a};this.$get=["$rootScope","$browser","$sniffer","$rootElement",function(c,d,e,f){function g(a){c.$broadcast("$locationChangeSuccess",k.absUrl(),a)}var k,m,h=d.baseHref(),l=d.url(),n;a?(n=l.substring(0,l.indexOf("/",l.indexOf("//")+2))+(h||"/"),m=e.history?Bc:Ub):(n=
+cb(l),m=Tb);k=new m(n,"#"+b);k.$$parse(k.$$rewrite(l));var p=/^\s*(javascript|mailto):/i;f.on("click",function(a){if(!a.ctrlKey&&!a.metaKey&&2!=a.which){for(var e=u(a.target);"a"!==N(e[0].nodeName);)if(e[0]===f[0]||!(e=e.parent())[0])return;var g=e.prop("href");T(g)&&"[object SVGAnimatedString]"===g.toString()&&(g=ua(g.animVal).href);if(!p.test(g)){if(m===Ub){var h=e.attr("href")||e.attr("xlink:href");if(h&&0>h.indexOf("://"))if(g="#"+b,"/"==h[0])g=n+g+h;else if("#"==h[0])g=n+g+(k.path()||"/")+h;
+else{var l=k.path().split("/"),h=h.split("/");2!==l.length||l[1]||(l.length=1);for(var q=0;q<h.length;q++)"."!=h[q]&&(".."==h[q]?l.pop():h[q].length&&l.push(h[q]));g=n+g+l.join("/")}}l=k.$$rewrite(g);g&&(!e.attr("target")&&l&&!a.isDefaultPrevented())&&(a.preventDefault(),l!=d.url()&&(k.$$parse(l),c.$apply(),Q.angular["ff-684208-preventDefault"]=!0))}}});k.absUrl()!=l&&d.url(k.absUrl(),!0);d.onUrlChange(function(a){k.absUrl()!=a&&(c.$evalAsync(function(){var b=k.absUrl();k.$$parse(a);c.$broadcast("$locationChangeStart",
+a,b).defaultPrevented?(k.$$parse(b),d.url(b)):g(b)}),c.$$phase||c.$digest())});var q=0;c.$watch(function(){var a=d.url(),b=k.$$replace;q&&a==k.absUrl()||(q++,c.$evalAsync(function(){c.$broadcast("$locationChangeStart",k.absUrl(),a).defaultPrevented?k.$$parse(a):(d.url(k.absUrl(),b),g(a))}));k.$$replace=!1;return q});return k}]}function Xd(){var b=!0,a=this;this.debugEnabled=function(a){return A(a)?(b=a,this):b};this.$get=["$window",function(c){function d(a){a instanceof Error&&(a.stack?a=a.message&&
+-1===a.stack.indexOf(a.message)?"Error: "+a.message+"\n"+a.stack:a.stack:a.sourceURL&&(a=a.message+"\n"+a.sourceURL+":"+a.line));return a}function e(a){var b=c.console||{},e=b[a]||b.log||y;a=!1;try{a=!!e.apply}catch(m){}return a?function(){var a=[];r(arguments,function(b){a.push(d(b))});return e.apply(b,a)}:function(a,b){e(a,null==b?"":b)}}return{log:e("log"),info:e("info"),warn:e("warn"),error:e("error"),debug:function(){var c=e("debug");return function(){b&&c.apply(a,arguments)}}()}}]}function ja(b,
+a){if("__defineGetter__"===b||"__defineSetter__"===b||"__lookupGetter__"===b||"__lookupSetter__"===b||"__proto__"===b)throw ka("isecfld",a);return b}function Oa(b,a){if(b){if(b.constructor===b)throw ka("isecfn",a);if(b.document&&b.location&&b.alert&&b.setInterval)throw ka("isecwindow",a);if(b.children&&(b.nodeName||b.prop&&b.attr&&b.find))throw ka("isecdom",a);if(b===Object)throw ka("isecobj",a);}return b}function tb(b,a,c,d,e){e=e||{};a=a.split(".");for(var f,g=0;1<a.length;g++){f=ja(a.shift(),d);
+var k=b[f];k||(k={},b[f]=k);b=k;b.then&&e.unwrapPromises&&(va(d),"$$v"in b||function(a){a.then(function(b){a.$$v=b})}(b),b.$$v===t&&(b.$$v={}),b=b.$$v)}f=ja(a.shift(),d);Oa(b,d);Oa(b[f],d);return b[f]=c}function Dc(b,a,c,d,e,f,g){ja(b,f);ja(a,f);ja(c,f);ja(d,f);ja(e,f);return g.unwrapPromises?function(g,m){var h=m&&m.hasOwnProperty(b)?m:g,l;if(null==h)return h;(h=h[b])&&h.then&&(va(f),"$$v"in h||(l=h,l.$$v=t,l.then(function(a){l.$$v=a})),h=h.$$v);if(!a)return h;if(null==h)return t;(h=h[a])&&h.then&&
+(va(f),"$$v"in h||(l=h,l.$$v=t,l.then(function(a){l.$$v=a})),h=h.$$v);if(!c)return h;if(null==h)return t;(h=h[c])&&h.then&&(va(f),"$$v"in h||(l=h,l.$$v=t,l.then(function(a){l.$$v=a})),h=h.$$v);if(!d)return h;if(null==h)return t;(h=h[d])&&h.then&&(va(f),"$$v"in h||(l=h,l.$$v=t,l.then(function(a){l.$$v=a})),h=h.$$v);if(!e)return h;if(null==h)return t;(h=h[e])&&h.then&&(va(f),"$$v"in h||(l=h,l.$$v=t,l.then(function(a){l.$$v=a})),h=h.$$v);return h}:function(f,g){var h=g&&g.hasOwnProperty(b)?g:f;if(null==
+h)return h;h=h[b];if(!a)return h;if(null==h)return t;h=h[a];if(!c)return h;if(null==h)return t;h=h[c];if(!d)return h;if(null==h)return t;h=h[d];return e?null==h?t:h=h[e]:h}}function Ec(b,a,c){if(Vb.hasOwnProperty(b))return Vb[b];var d=b.split("."),e=d.length,f;if(a.csp)f=6>e?Dc(d[0],d[1],d[2],d[3],d[4],c,a):function(b,f){var g=0,k;do k=Dc(d[g++],d[g++],d[g++],d[g++],d[g++],c,a)(b,f),f=t,b=k;while(g<e);return k};else{var g="var p;\n";r(d,function(b,d){ja(b,c);g+="if(s == null) return undefined;\ns="+
+(d?"s":'((k&&k.hasOwnProperty("'+b+'"))?k:s)')+'["'+b+'"];\n'+(a.unwrapPromises?'if (s && s.then) {\n pw("'+c.replace(/(["\r\n])/g,"\\$1")+'");\n if (!("$$v" in s)) {\n p=s;\n p.$$v = undefined;\n p.then(function(v) {p.$$v=v;});\n}\n s=s.$$v\n}\n':"")});var g=g+"return s;",k=new Function("s","k","pw",g);k.toString=$(g);f=a.unwrapPromises?function(a,b){return k(a,b,va)}:k}"hasOwnProperty"!==b&&(Vb[b]=f);return f}function Yd(){var b={},a={csp:!1,unwrapPromises:!1,logPromiseWarnings:!0};this.unwrapPromises=
+function(b){return A(b)?(a.unwrapPromises=!!b,this):a.unwrapPromises};this.logPromiseWarnings=function(b){return A(b)?(a.logPromiseWarnings=b,this):a.logPromiseWarnings};this.$get=["$filter","$sniffer","$log",function(c,d,e){a.csp=d.csp;va=function(b){a.logPromiseWarnings&&!Fc.hasOwnProperty(b)&&(Fc[b]=!0,e.warn("[$parse] Promise found in the expression `"+b+"`. Automatic unwrapping of promises in Angular expressions is deprecated."))};return function(d){var e;switch(typeof d){case "string":if(b.hasOwnProperty(d))return b[d];
+e=new Wb(a);e=(new db(e,c,a)).parse(d);"hasOwnProperty"!==d&&(b[d]=e);return e;case "function":return d;default:return y}}}]}function $d(){this.$get=["$rootScope","$exceptionHandler",function(b,a){return ye(function(a){b.$evalAsync(a)},a)}]}function ye(b,a){function c(a){return a}function d(a){return g(a)}var e=function(){var g=[],h,l;return l={resolve:function(a){if(g){var c=g;g=t;h=f(a);c.length&&b(function(){for(var a,b=0,d=c.length;b<d;b++)a=c[b],h.then(a[0],a[1],a[2])})}},reject:function(a){l.resolve(k(a))},
+notify:function(a){if(g){var c=g;g.length&&b(function(){for(var b,d=0,e=c.length;d<e;d++)b=c[d],b[2](a)})}},promise:{then:function(b,f,k){var l=e(),L=function(d){try{l.resolve((P(b)?b:c)(d))}catch(e){l.reject(e),a(e)}},v=function(b){try{l.resolve((P(f)?f:d)(b))}catch(c){l.reject(c),a(c)}},C=function(b){try{l.notify((P(k)?k:c)(b))}catch(d){a(d)}};g?g.push([L,v,C]):h.then(L,v,C);return l.promise},"catch":function(a){return this.then(null,a)},"finally":function(a){function b(a,c){var d=e();c?d.resolve(a):
+d.reject(a);return d.promise}function d(e,f){var g=null;try{g=(a||c)()}catch(k){return b(k,!1)}return g&&P(g.then)?g.then(function(){return b(e,f)},function(a){return b(a,!1)}):b(e,f)}return this.then(function(a){return d(a,!0)},function(a){return d(a,!1)})}}}},f=function(a){return a&&P(a.then)?a:{then:function(c){var d=e();b(function(){d.resolve(c(a))});return d.promise}}},g=function(a){var b=e();b.reject(a);return b.promise},k=function(c){return{then:function(f,g){var k=e();b(function(){try{k.resolve((P(g)?
+g:d)(c))}catch(b){k.reject(b),a(b)}});return k.promise}}};return{defer:e,reject:g,when:function(k,h,l,n){var p=e(),q,s=function(b){try{return(P(h)?h:c)(b)}catch(d){return a(d),g(d)}},L=function(b){try{return(P(l)?l:d)(b)}catch(c){return a(c),g(c)}},v=function(b){try{return(P(n)?n:c)(b)}catch(d){a(d)}};b(function(){f(k).then(function(a){q||(q=!0,p.resolve(f(a).then(s,L,v)))},function(a){q||(q=!0,p.resolve(L(a)))},function(a){q||p.notify(v(a))})});return p.promise},all:function(a){var b=e(),c=0,d=H(a)?
+[]:{};r(a,function(a,e){c++;f(a).then(function(a){d.hasOwnProperty(e)||(d[e]=a,--c||b.resolve(d))},function(a){d.hasOwnProperty(e)||b.reject(a)})});0===c&&b.resolve(d);return b.promise}}}function ge(){this.$get=["$window","$timeout",function(b,a){var c=b.requestAnimationFrame||b.webkitRequestAnimationFrame||b.mozRequestAnimationFrame,d=b.cancelAnimationFrame||b.webkitCancelAnimationFrame||b.mozCancelAnimationFrame||b.webkitCancelRequestAnimationFrame,e=!!c,f=e?function(a){var b=c(a);return function(){d(b)}}:
+function(b){var c=a(b,16.66,!1);return function(){a.cancel(c)}};f.supported=e;return f}]}function Zd(){var b=10,a=x("$rootScope"),c=null;this.digestTtl=function(a){arguments.length&&(b=a);return b};this.$get=["$injector","$exceptionHandler","$parse","$browser",function(d,e,f,g){function k(){this.$id=gb();this.$$phase=this.$parent=this.$$watchers=this.$$nextSibling=this.$$prevSibling=this.$$childHead=this.$$childTail=null;this["this"]=this.$root=this;this.$$destroyed=!1;this.$$asyncQueue=[];this.$$postDigestQueue=
+[];this.$$listeners={};this.$$listenerCount={};this.$$isolateBindings={}}function m(b){if(p.$$phase)throw a("inprog",p.$$phase);p.$$phase=b}function h(a,b){var c=f(a);Va(c,b);return c}function l(a,b,c){do a.$$listenerCount[c]-=b,0===a.$$listenerCount[c]&&delete a.$$listenerCount[c];while(a=a.$parent)}function n(){}k.prototype={constructor:k,$new:function(a){a?(a=new k,a.$root=this.$root,a.$$asyncQueue=this.$$asyncQueue,a.$$postDigestQueue=this.$$postDigestQueue):(this.$$childScopeClass||(this.$$childScopeClass=
+function(){this.$$watchers=this.$$nextSibling=this.$$childHead=this.$$childTail=null;this.$$listeners={};this.$$listenerCount={};this.$id=gb();this.$$childScopeClass=null},this.$$childScopeClass.prototype=this),a=new this.$$childScopeClass);a["this"]=a;a.$parent=this;a.$$prevSibling=this.$$childTail;this.$$childHead?this.$$childTail=this.$$childTail.$$nextSibling=a:this.$$childHead=this.$$childTail=a;return a},$watch:function(a,b,d){var e=h(a,"watch"),f=this.$$watchers,g={fn:b,last:n,get:e,exp:a,
+eq:!!d};c=null;if(!P(b)){var k=h(b||y,"listener");g.fn=function(a,b,c){k(c)}}if("string"==typeof a&&e.constant){var m=g.fn;g.fn=function(a,b,c){m.call(this,a,b,c);Ra(f,g)}}f||(f=this.$$watchers=[]);f.unshift(g);return function(){Ra(f,g);c=null}},$watchCollection:function(a,b){var c=this,d,e,g,k=1<b.length,h=0,m=f(a),l=[],p={},n=!0,r=0;return this.$watch(function(){d=m(c);var a,b,f;if(T(d))if(fb(d))for(e!==l&&(e=l,r=e.length=0,h++),a=d.length,r!==a&&(h++,e.length=r=a),b=0;b<a;b++)f=e[b]!==e[b]&&d[b]!==
+d[b],f||e[b]===d[b]||(h++,e[b]=d[b]);else{e!==p&&(e=p={},r=0,h++);a=0;for(b in d)d.hasOwnProperty(b)&&(a++,e.hasOwnProperty(b)?(f=e[b]!==e[b]&&d[b]!==d[b],f||e[b]===d[b]||(h++,e[b]=d[b])):(r++,e[b]=d[b],h++));if(r>a)for(b in h++,e)e.hasOwnProperty(b)&&!d.hasOwnProperty(b)&&(r--,delete e[b])}else e!==d&&(e=d,h++);return h},function(){n?(n=!1,b(d,d,c)):b(d,g,c);if(k)if(T(d))if(fb(d)){g=Array(d.length);for(var a=0;a<d.length;a++)g[a]=d[a]}else for(a in g={},d)ib.call(d,a)&&(g[a]=d[a]);else g=d})},$digest:function(){var d,
+f,g,k,h=this.$$asyncQueue,l=this.$$postDigestQueue,r,w,t=b,K,W=[],u,J,E;m("$digest");c=null;do{w=!1;for(K=this;h.length;){try{E=h.shift(),E.scope.$eval(E.expression)}catch(F){p.$$phase=null,e(F)}c=null}a:do{if(k=K.$$watchers)for(r=k.length;r--;)try{if(d=k[r])if((f=d.get(K))!==(g=d.last)&&!(d.eq?za(f,g):"number"===typeof f&&"number"===typeof g&&isNaN(f)&&isNaN(g)))w=!0,c=d,d.last=d.eq?Ha(f,null):f,d.fn(f,g===n?f:g,K),5>t&&(u=4-t,W[u]||(W[u]=[]),J=P(d.exp)?"fn: "+(d.exp.name||d.exp.toString()):d.exp,
+J+="; newVal: "+ta(f)+"; oldVal: "+ta(g),W[u].push(J));else if(d===c){w=!1;break a}}catch(A){p.$$phase=null,e(A)}if(!(k=K.$$childHead||K!==this&&K.$$nextSibling))for(;K!==this&&!(k=K.$$nextSibling);)K=K.$parent}while(K=k);if((w||h.length)&&!t--)throw p.$$phase=null,a("infdig",b,ta(W));}while(w||h.length);for(p.$$phase=null;l.length;)try{l.shift()()}catch(x){e(x)}},$destroy:function(){if(!this.$$destroyed){var a=this.$parent;this.$broadcast("$destroy");this.$$destroyed=!0;this!==p&&(r(this.$$listenerCount,
+Bb(null,l,this)),a.$$childHead==this&&(a.$$childHead=this.$$nextSibling),a.$$childTail==this&&(a.$$childTail=this.$$prevSibling),this.$$prevSibling&&(this.$$prevSibling.$$nextSibling=this.$$nextSibling),this.$$nextSibling&&(this.$$nextSibling.$$prevSibling=this.$$prevSibling),this.$parent=this.$$nextSibling=this.$$prevSibling=this.$$childHead=this.$$childTail=this.$root=null,this.$$listeners={},this.$$watchers=this.$$asyncQueue=this.$$postDigestQueue=[],this.$destroy=this.$digest=this.$apply=y,this.$on=
+this.$watch=function(){return y})}},$eval:function(a,b){return f(a)(this,b)},$evalAsync:function(a){p.$$phase||p.$$asyncQueue.length||g.defer(function(){p.$$asyncQueue.length&&p.$digest()});this.$$asyncQueue.push({scope:this,expression:a})},$$postDigest:function(a){this.$$postDigestQueue.push(a)},$apply:function(a){try{return m("$apply"),this.$eval(a)}catch(b){e(b)}finally{p.$$phase=null;try{p.$digest()}catch(c){throw e(c),c;}}},$on:function(a,b){var c=this.$$listeners[a];c||(this.$$listeners[a]=
+c=[]);c.push(b);var d=this;do d.$$listenerCount[a]||(d.$$listenerCount[a]=0),d.$$listenerCount[a]++;while(d=d.$parent);var e=this;return function(){c[Qa(c,b)]=null;l(e,1,a)}},$emit:function(a,b){var c=[],d,f=this,g=!1,k={name:a,targetScope:f,stopPropagation:function(){g=!0},preventDefault:function(){k.defaultPrevented=!0},defaultPrevented:!1},h=[k].concat(Aa.call(arguments,1)),m,l;do{d=f.$$listeners[a]||c;k.currentScope=f;m=0;for(l=d.length;m<l;m++)if(d[m])try{d[m].apply(null,h)}catch(p){e(p)}else d.splice(m,
+1),m--,l--;if(g)break;f=f.$parent}while(f);return k},$broadcast:function(a,b){for(var c=this,d=this,f={name:a,targetScope:this,preventDefault:function(){f.defaultPrevented=!0},defaultPrevented:!1},g=[f].concat(Aa.call(arguments,1)),k,h;c=d;){f.currentScope=c;d=c.$$listeners[a]||[];k=0;for(h=d.length;k<h;k++)if(d[k])try{d[k].apply(null,g)}catch(m){e(m)}else d.splice(k,1),k--,h--;if(!(d=c.$$listenerCount[a]&&c.$$childHead||c!==this&&c.$$nextSibling))for(;c!==this&&!(d=c.$$nextSibling);)c=c.$parent}return f}};
+var p=new k;return p}]}function cd(){var b=/^\s*(https?|ftp|mailto|tel|file):/,a=/^\s*((https?|ftp|file):|data:image\/)/;this.aHrefSanitizationWhitelist=function(a){return A(a)?(b=a,this):b};this.imgSrcSanitizationWhitelist=function(b){return A(b)?(a=b,this):a};this.$get=function(){return function(c,d){var e=d?a:b,f;if(!R||8<=R)if(f=ua(c).href,""!==f&&!f.match(e))return"unsafe:"+f;return c}}}function ze(b){if("self"===b)return b;if(z(b)){if(-1<b.indexOf("***"))throw wa("iwcard",b);b=b.replace(/([-()\[\]{}+?*.$\^|,:#<!\\])/g,
+"\\$1").replace(/\x08/g,"\\x08").replace("\\*\\*",".*").replace("\\*","[^:/.?&;]*");return RegExp("^"+b+"$")}if(hb(b))return RegExp("^"+b.source+"$");throw wa("imatcher");}function Gc(b){var a=[];A(b)&&r(b,function(b){a.push(ze(b))});return a}function be(){this.SCE_CONTEXTS=fa;var b=["self"],a=[];this.resourceUrlWhitelist=function(a){arguments.length&&(b=Gc(a));return b};this.resourceUrlBlacklist=function(b){arguments.length&&(a=Gc(b));return a};this.$get=["$injector",function(c){function d(a){var b=
+function(a){this.$$unwrapTrustedValue=function(){return a}};a&&(b.prototype=new a);b.prototype.valueOf=function(){return this.$$unwrapTrustedValue()};b.prototype.toString=function(){return this.$$unwrapTrustedValue().toString()};return b}var e=function(a){throw wa("unsafe");};c.has("$sanitize")&&(e=c.get("$sanitize"));var f=d(),g={};g[fa.HTML]=d(f);g[fa.CSS]=d(f);g[fa.URL]=d(f);g[fa.JS]=d(f);g[fa.RESOURCE_URL]=d(g[fa.URL]);return{trustAs:function(a,b){var c=g.hasOwnProperty(a)?g[a]:null;if(!c)throw wa("icontext",
+a,b);if(null===b||b===t||""===b)return b;if("string"!==typeof b)throw wa("itype",a);return new c(b)},getTrusted:function(c,d){if(null===d||d===t||""===d)return d;var f=g.hasOwnProperty(c)?g[c]:null;if(f&&d instanceof f)return d.$$unwrapTrustedValue();if(c===fa.RESOURCE_URL){var f=ua(d.toString()),l,n,p=!1;l=0;for(n=b.length;l<n;l++)if("self"===b[l]?Pb(f):b[l].exec(f.href)){p=!0;break}if(p)for(l=0,n=a.length;l<n;l++)if("self"===a[l]?Pb(f):a[l].exec(f.href)){p=!1;break}if(p)return d;throw wa("insecurl",
+d.toString());}if(c===fa.HTML)return e(d);throw wa("unsafe");},valueOf:function(a){return a instanceof f?a.$$unwrapTrustedValue():a}}}]}function ae(){var b=!0;this.enabled=function(a){arguments.length&&(b=!!a);return b};this.$get=["$parse","$sniffer","$sceDelegate",function(a,c,d){if(b&&c.msie&&8>c.msieDocumentMode)throw wa("iequirks");var e=ga(fa);e.isEnabled=function(){return b};e.trustAs=d.trustAs;e.getTrusted=d.getTrusted;e.valueOf=d.valueOf;b||(e.trustAs=e.getTrusted=function(a,b){return b},
+e.valueOf=Ga);e.parseAs=function(b,c){var d=a(c);return d.literal&&d.constant?d:function(a,c){return e.getTrusted(b,d(a,c))}};var f=e.parseAs,g=e.getTrusted,k=e.trustAs;r(fa,function(a,b){var c=N(b);e[Ya("parse_as_"+c)]=function(b){return f(a,b)};e[Ya("get_trusted_"+c)]=function(b){return g(a,b)};e[Ya("trust_as_"+c)]=function(b){return k(a,b)}});return e}]}function ce(){this.$get=["$window","$document",function(b,a){var c={},d=Z((/android (\d+)/.exec(N((b.navigator||{}).userAgent))||[])[1]),e=/Boxee/i.test((b.navigator||
+{}).userAgent),f=a[0]||{},g=f.documentMode,k,m=/^(Moz|webkit|O|ms)(?=[A-Z])/,h=f.body&&f.body.style,l=!1,n=!1;if(h){for(var p in h)if(l=m.exec(p)){k=l[0];k=k.substr(0,1).toUpperCase()+k.substr(1);break}k||(k="WebkitOpacity"in h&&"webkit");l=!!("transition"in h||k+"Transition"in h);n=!!("animation"in h||k+"Animation"in h);!d||l&&n||(l=z(f.body.style.webkitTransition),n=z(f.body.style.webkitAnimation))}return{history:!(!b.history||!b.history.pushState||4>d||e),hashchange:"onhashchange"in b&&(!g||7<
+g),hasEvent:function(a){if("input"==a&&9==R)return!1;if(D(c[a])){var b=f.createElement("div");c[a]="on"+a in b}return c[a]},csp:Wa(),vendorPrefix:k,transitions:l,animations:n,android:d,msie:R,msieDocumentMode:g}}]}function ee(){this.$get=["$rootScope","$browser","$q","$exceptionHandler",function(b,a,c,d){function e(e,k,m){var h=c.defer(),l=h.promise,n=A(m)&&!m;k=a.defer(function(){try{h.resolve(e())}catch(a){h.reject(a),d(a)}finally{delete f[l.$$timeoutId]}n||b.$apply()},k);l.$$timeoutId=k;f[k]=h;
+return l}var f={};e.cancel=function(b){return b&&b.$$timeoutId in f?(f[b.$$timeoutId].reject("canceled"),delete f[b.$$timeoutId],a.defer.cancel(b.$$timeoutId)):!1};return e}]}function ua(b,a){var c=b;R&&(V.setAttribute("href",c),c=V.href);V.setAttribute("href",c);return{href:V.href,protocol:V.protocol?V.protocol.replace(/:$/,""):"",host:V.host,search:V.search?V.search.replace(/^\?/,""):"",hash:V.hash?V.hash.replace(/^#/,""):"",hostname:V.hostname,port:V.port,pathname:"/"===V.pathname.charAt(0)?V.pathname:
+"/"+V.pathname}}function Pb(b){b=z(b)?ua(b):b;return b.protocol===Hc.protocol&&b.host===Hc.host}function fe(){this.$get=$(Q)}function mc(b){function a(d,e){if(T(d)){var f={};r(d,function(b,c){f[c]=a(c,b)});return f}return b.factory(d+c,e)}var c="Filter";this.register=a;this.$get=["$injector",function(a){return function(b){return a.get(b+c)}}];a("currency",Ic);a("date",Jc);a("filter",Ae);a("json",Be);a("limitTo",Ce);a("lowercase",De);a("number",Kc);a("orderBy",Lc);a("uppercase",Ee)}function Ae(){return function(b,
+a,c){if(!H(b))return b;var d=typeof c,e=[];e.check=function(a){for(var b=0;b<e.length;b++)if(!e[b](a))return!1;return!0};"function"!==d&&(c="boolean"===d&&c?function(a,b){return Ua.equals(a,b)}:function(a,b){if(a&&b&&"object"===typeof a&&"object"===typeof b){for(var d in a)if("$"!==d.charAt(0)&&ib.call(a,d)&&c(a[d],b[d]))return!0;return!1}b=(""+b).toLowerCase();return-1<(""+a).toLowerCase().indexOf(b)});var f=function(a,b){if("string"==typeof b&&"!"===b.charAt(0))return!f(a,b.substr(1));switch(typeof a){case "boolean":case "number":case "string":return c(a,
+b);case "object":switch(typeof b){case "object":return c(a,b);default:for(var d in a)if("$"!==d.charAt(0)&&f(a[d],b))return!0}return!1;case "array":for(d=0;d<a.length;d++)if(f(a[d],b))return!0;return!1;default:return!1}};switch(typeof a){case "boolean":case "number":case "string":a={$:a};case "object":for(var g in a)(function(b){"undefined"!==typeof a[b]&&e.push(function(c){return f("$"==b?c:c&&c[b],a[b])})})(g);break;case "function":e.push(a);break;default:return b}d=[];for(g=0;g<b.length;g++){var k=
+b[g];e.check(k)&&d.push(k)}return d}}function Ic(b){var a=b.NUMBER_FORMATS;return function(b,d){D(d)&&(d=a.CURRENCY_SYM);return Mc(b,a.PATTERNS[1],a.GROUP_SEP,a.DECIMAL_SEP,2).replace(/\u00A4/g,d)}}function Kc(b){var a=b.NUMBER_FORMATS;return function(b,d){return Mc(b,a.PATTERNS[0],a.GROUP_SEP,a.DECIMAL_SEP,d)}}function Mc(b,a,c,d,e){if(null==b||!isFinite(b)||T(b))return"";var f=0>b;b=Math.abs(b);var g=b+"",k="",m=[],h=!1;if(-1!==g.indexOf("e")){var l=g.match(/([\d\.]+)e(-?)(\d+)/);l&&"-"==l[2]&&
+l[3]>e+1?(g="0",b=0):(k=g,h=!0)}if(h)0<e&&(-1<b&&1>b)&&(k=b.toFixed(e));else{g=(g.split(Nc)[1]||"").length;D(e)&&(e=Math.min(Math.max(a.minFrac,g),a.maxFrac));b=+(Math.round(+(b.toString()+"e"+e)).toString()+"e"+-e);b=(""+b).split(Nc);g=b[0];b=b[1]||"";var l=0,n=a.lgSize,p=a.gSize;if(g.length>=n+p)for(l=g.length-n,h=0;h<l;h++)0===(l-h)%p&&0!==h&&(k+=c),k+=g.charAt(h);for(h=l;h<g.length;h++)0===(g.length-h)%n&&0!==h&&(k+=c),k+=g.charAt(h);for(;b.length<e;)b+="0";e&&"0"!==e&&(k+=d+b.substr(0,e))}m.push(f?
+a.negPre:a.posPre);m.push(k);m.push(f?a.negSuf:a.posSuf);return m.join("")}function Xb(b,a,c){var d="";0>b&&(d="-",b=-b);for(b=""+b;b.length<a;)b="0"+b;c&&(b=b.substr(b.length-a));return d+b}function Y(b,a,c,d){c=c||0;return function(e){e=e["get"+b]();if(0<c||e>-c)e+=c;0===e&&-12==c&&(e=12);return Xb(e,a,d)}}function ub(b,a){return function(c,d){var e=c["get"+b](),f=Ia(a?"SHORT"+b:b);return d[f][e]}}function Jc(b){function a(a){var b;if(b=a.match(c)){a=new Date(0);var f=0,g=0,k=b[8]?a.setUTCFullYear:
+a.setFullYear,m=b[8]?a.setUTCHours:a.setHours;b[9]&&(f=Z(b[9]+b[10]),g=Z(b[9]+b[11]));k.call(a,Z(b[1]),Z(b[2])-1,Z(b[3]));f=Z(b[4]||0)-f;g=Z(b[5]||0)-g;k=Z(b[6]||0);b=Math.round(1E3*parseFloat("0."+(b[7]||0)));m.call(a,f,g,k,b)}return a}var c=/^(\d{4})-?(\d\d)-?(\d\d)(?:T(\d\d)(?::?(\d\d)(?::?(\d\d)(?:\.(\d+))?)?)?(Z|([+-])(\d\d):?(\d\d))?)?$/;return function(c,e){var f="",g=[],k,m;e=e||"mediumDate";e=b.DATETIME_FORMATS[e]||e;z(c)&&(c=Fe.test(c)?Z(c):a(c));Ab(c)&&(c=new Date(c));if(!sa(c))return c;
+for(;e;)(m=Ge.exec(e))?(g=g.concat(Aa.call(m,1)),e=g.pop()):(g.push(e),e=null);r(g,function(a){k=He[a];f+=k?k(c,b.DATETIME_FORMATS):a.replace(/(^'|'$)/g,"").replace(/''/g,"'")});return f}}function Be(){return function(b){return ta(b,!0)}}function Ce(){return function(b,a){if(!H(b)&&!z(b))return b;a=Infinity===Math.abs(Number(a))?Number(a):Z(a);if(z(b))return a?0<=a?b.slice(0,a):b.slice(a,b.length):"";var c=[],d,e;a>b.length?a=b.length:a<-b.length&&(a=-b.length);0<a?(d=0,e=a):(d=b.length+a,e=b.length);
+for(;d<e;d++)c.push(b[d]);return c}}function Lc(b){return function(a,c,d){function e(a,b){return Ta(b)?function(b,c){return a(c,b)}:a}function f(a,b){var c=typeof a,d=typeof b;return c==d?(sa(a)&&sa(b)&&(a=a.valueOf(),b=b.valueOf()),"string"==c&&(a=a.toLowerCase(),b=b.toLowerCase()),a===b?0:a<b?-1:1):c<d?-1:1}if(!H(a)||!c)return a;c=H(c)?c:[c];c=Vc(c,function(a){var c=!1,d=a||Ga;if(z(a)){if("+"==a.charAt(0)||"-"==a.charAt(0))c="-"==a.charAt(0),a=a.substring(1);d=b(a);if(d.constant){var g=d();return e(function(a,
+b){return f(a[g],b[g])},c)}}return e(function(a,b){return f(d(a),d(b))},c)});for(var g=[],k=0;k<a.length;k++)g.push(a[k]);return g.sort(e(function(a,b){for(var d=0;d<c.length;d++){var e=c[d](a,b);if(0!==e)return e}return 0},d))}}function xa(b){P(b)&&(b={link:b});b.restrict=b.restrict||"AC";return $(b)}function Oc(b,a,c,d){function e(a,c){c=c?"-"+kb(c,"-"):"";d.removeClass(b,(a?vb:wb)+c);d.addClass(b,(a?wb:vb)+c)}var f=this,g=b.parent().controller("form")||xb,k=0,m=f.$error={},h=[];f.$name=a.name||
+a.ngForm;f.$dirty=!1;f.$pristine=!0;f.$valid=!0;f.$invalid=!1;g.$addControl(f);b.addClass(Pa);e(!0);f.$addControl=function(a){Ca(a.$name,"input");h.push(a);a.$name&&(f[a.$name]=a)};f.$removeControl=function(a){a.$name&&f[a.$name]===a&&delete f[a.$name];r(m,function(b,c){f.$setValidity(c,!0,a)});Ra(h,a)};f.$setValidity=function(a,b,c){var d=m[a];if(b)d&&(Ra(d,c),d.length||(k--,k||(e(b),f.$valid=!0,f.$invalid=!1),m[a]=!1,e(!0,a),g.$setValidity(a,!0,f)));else{k||e(b);if(d){if(-1!=Qa(d,c))return}else m[a]=
+d=[],k++,e(!1,a),g.$setValidity(a,!1,f);d.push(c);f.$valid=!1;f.$invalid=!0}};f.$setDirty=function(){d.removeClass(b,Pa);d.addClass(b,yb);f.$dirty=!0;f.$pristine=!1;g.$setDirty()};f.$setPristine=function(){d.removeClass(b,yb);d.addClass(b,Pa);f.$dirty=!1;f.$pristine=!0;r(h,function(a){a.$setPristine()})}}function ra(b,a,c,d){b.$setValidity(a,c);return c?d:t}function Pc(b,a){var c,d;if(a)for(c=0;c<a.length;++c)if(d=a[c],b[d])return!0;return!1}function Ie(b,a,c,d,e){T(e)&&(b.$$hasNativeValidators=!0,
+b.$parsers.push(function(f){if(b.$error[a]||Pc(e,d)||!Pc(e,c))return f;b.$setValidity(a,!1)}))}function zb(b,a,c,d,e,f){var g=a.prop(Je),k=a[0].placeholder,m={},h=N(a[0].type);d.$$validityState=g;if(!e.android){var l=!1;a.on("compositionstart",function(a){l=!0});a.on("compositionend",function(){l=!1;n()})}var n=function(e){if(!l){var f=a.val();if(R&&"input"===(e||m).type&&a[0].placeholder!==k)k=a[0].placeholder;else if("password"!==h&&Ta(c.ngTrim||"T")&&(f=aa(f)),e=g&&d.$$hasNativeValidators,d.$viewValue!==
+f||""===f&&e)b.$$phase?d.$setViewValue(f):b.$apply(function(){d.$setViewValue(f)})}};if(e.hasEvent("input"))a.on("input",n);else{var p,q=function(){p||(p=f.defer(function(){n();p=null}))};a.on("keydown",function(a){a=a.keyCode;91===a||(15<a&&19>a||37<=a&&40>=a)||q()});if(e.hasEvent("paste"))a.on("paste cut",q)}a.on("change",n);d.$render=function(){a.val(d.$isEmpty(d.$viewValue)?"":d.$viewValue)};var s=c.ngPattern;s&&((e=s.match(/^\/(.*)\/([gim]*)$/))?(s=RegExp(e[1],e[2]),e=function(a){return ra(d,
+"pattern",d.$isEmpty(a)||s.test(a),a)}):e=function(c){var e=b.$eval(s);if(!e||!e.test)throw x("ngPattern")("noregexp",s,e,ha(a));return ra(d,"pattern",d.$isEmpty(c)||e.test(c),c)},d.$formatters.push(e),d.$parsers.push(e));if(c.ngMinlength){var r=Z(c.ngMinlength);e=function(a){return ra(d,"minlength",d.$isEmpty(a)||a.length>=r,a)};d.$parsers.push(e);d.$formatters.push(e)}if(c.ngMaxlength){var v=Z(c.ngMaxlength);e=function(a){return ra(d,"maxlength",d.$isEmpty(a)||a.length<=v,a)};d.$parsers.push(e);
+d.$formatters.push(e)}}function Yb(b,a){b="ngClass"+b;return["$animate",function(c){function d(a,b){var c=[],d=0;a:for(;d<a.length;d++){for(var e=a[d],l=0;l<b.length;l++)if(e==b[l])continue a;c.push(e)}return c}function e(a){if(!H(a)){if(z(a))return a.split(" ");if(T(a)){var b=[];r(a,function(a,c){a&&(b=b.concat(c.split(" ")))});return b}}return a}return{restrict:"AC",link:function(f,g,k){function m(a,b){var c=g.data("$classCounts")||{},d=[];r(a,function(a){if(0<b||c[a])c[a]=(c[a]||0)+b,c[a]===+(0<
+b)&&d.push(a)});g.data("$classCounts",c);return d.join(" ")}function h(b){if(!0===a||f.$index%2===a){var h=e(b||[]);if(!l){var q=m(h,1);k.$addClass(q)}else if(!za(b,l)){var s=e(l),q=d(h,s),h=d(s,h),h=m(h,-1),q=m(q,1);0===q.length?c.removeClass(g,h):0===h.length?c.addClass(g,q):c.setClass(g,q,h)}}l=ga(b)}var l;f.$watch(k[b],h,!0);k.$observe("class",function(a){h(f.$eval(k[b]))});"ngClass"!==b&&f.$watch("$index",function(c,d){var g=c&1;if(g!==(d&1)){var h=e(f.$eval(k[b]));g===a?(g=m(h,1),k.$addClass(g)):
+(g=m(h,-1),k.$removeClass(g))}})}}}]}var Je="validity",N=function(b){return z(b)?b.toLowerCase():b},ib=Object.prototype.hasOwnProperty,Ia=function(b){return z(b)?b.toUpperCase():b},R,u,Da,Aa=[].slice,Ke=[].push,ya=Object.prototype.toString,Sa=x("ng"),Ua=Q.angular||(Q.angular={}),Xa,Ma,la=["0","0","0"];R=Z((/msie (\d+)/.exec(N(navigator.userAgent))||[])[1]);isNaN(R)&&(R=Z((/trident\/.*; rv:(\d+)/.exec(N(navigator.userAgent))||[])[1]));y.$inject=[];Ga.$inject=[];var H=function(){return P(Array.isArray)?
+Array.isArray:function(b){return"[object Array]"===ya.call(b)}}(),aa=function(){return String.prototype.trim?function(b){return z(b)?b.trim():b}:function(b){return z(b)?b.replace(/^\s\s*/,"").replace(/\s\s*$/,""):b}}();Ma=9>R?function(b){b=b.nodeName?b:b[0];return b.scopeName&&"HTML"!=b.scopeName?Ia(b.scopeName+":"+b.nodeName):b.nodeName}:function(b){return b.nodeName?b.nodeName:b[0].nodeName};var Wa=function(){if(A(Wa.isActive_))return Wa.isActive_;var b=!(!X.querySelector("[ng-csp]")&&!X.querySelector("[data-ng-csp]"));
+if(!b)try{new Function("")}catch(a){b=!0}return Wa.isActive_=b},Yc=/[A-Z]/g,ad={full:"1.2.23",major:1,minor:2,dot:23,codeName:"superficial-malady"};S.expando="ng339";var $a=S.cache={},ne=1,rb=Q.document.addEventListener?function(b,a,c){b.addEventListener(a,c,!1)}:function(b,a,c){b.attachEvent("on"+a,c)},Za=Q.document.removeEventListener?function(b,a,c){b.removeEventListener(a,c,!1)}:function(b,a,c){b.detachEvent("on"+a,c)};S._data=function(b){return this.cache[b[this.expando]]||{}};var ie=/([\:\-\_]+(.))/g,
+je=/^moz([A-Z])/,Hb=x("jqLite"),ke=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,Ib=/<|&#?\w+;/,le=/<([\w:]+)/,me=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,ba={option:[1,'<select multiple="multiple">',"</select>"],thead:[1,"<table>","</table>"],col:[2,"<table><colgroup>","</colgroup></table>"],tr:[2,"<table><tbody>","</tbody></table>"],td:[3,"<table><tbody><tr>","</tr></tbody></table>"],_default:[0,"",""]};ba.optgroup=ba.option;ba.tbody=ba.tfoot=ba.colgroup=ba.caption=ba.thead;ba.th=
+ba.td;var La=S.prototype={ready:function(b){function a(){c||(c=!0,b())}var c=!1;"complete"===X.readyState?setTimeout(a):(this.on("DOMContentLoaded",a),S(Q).on("load",a))},toString:function(){var b=[];r(this,function(a){b.push(""+a)});return"["+b.join(", ")+"]"},eq:function(b){return 0<=b?u(this[b]):u(this[this.length+b])},length:0,push:Ke,sort:[].sort,splice:[].splice},ob={};r("multiple selected checked disabled readOnly required open".split(" "),function(b){ob[N(b)]=b});var rc={};r("input select option textarea button form details".split(" "),
+function(b){rc[Ia(b)]=!0});r({data:Mb,removeData:Lb},function(b,a){S[a]=b});r({data:Mb,inheritedData:nb,scope:function(b){return u.data(b,"$scope")||nb(b.parentNode||b,["$isolateScope","$scope"])},isolateScope:function(b){return u.data(b,"$isolateScope")||u.data(b,"$isolateScopeNoTemplate")},controller:oc,injector:function(b){return nb(b,"$injector")},removeAttr:function(b,a){b.removeAttribute(a)},hasClass:Nb,css:function(b,a,c){a=Ya(a);if(A(c))b.style[a]=c;else{var d;8>=R&&(d=b.currentStyle&&b.currentStyle[a],
+""===d&&(d="auto"));d=d||b.style[a];8>=R&&(d=""===d?t:d);return d}},attr:function(b,a,c){var d=N(a);if(ob[d])if(A(c))c?(b[a]=!0,b.setAttribute(a,d)):(b[a]=!1,b.removeAttribute(d));else return b[a]||(b.attributes.getNamedItem(a)||y).specified?d:t;else if(A(c))b.setAttribute(a,c);else if(b.getAttribute)return b=b.getAttribute(a,2),null===b?t:b},prop:function(b,a,c){if(A(c))b[a]=c;else return b[a]},text:function(){function b(b,d){var e=a[b.nodeType];if(D(d))return e?b[e]:"";b[e]=d}var a=[];9>R?(a[1]=
+"innerText",a[3]="nodeValue"):a[1]=a[3]="textContent";b.$dv="";return b}(),val:function(b,a){if(D(a)){if("SELECT"===Ma(b)&&b.multiple){var c=[];r(b.options,function(a){a.selected&&c.push(a.value||a.text)});return 0===c.length?null:c}return b.value}b.value=a},html:function(b,a){if(D(a))return b.innerHTML;for(var c=0,d=b.childNodes;c<d.length;c++)Ja(d[c]);b.innerHTML=a},empty:pc},function(b,a){S.prototype[a]=function(a,d){var e,f,g=this.length;if(b!==pc&&(2==b.length&&b!==Nb&&b!==oc?a:d)===t){if(T(a)){for(e=
+0;e<g;e++)if(b===Mb)b(this[e],a);else for(f in a)b(this[e],f,a[f]);return this}e=b.$dv;g=e===t?Math.min(g,1):g;for(f=0;f<g;f++){var k=b(this[f],a,d);e=e?e+k:k}return e}for(e=0;e<g;e++)b(this[e],a,d);return this}});r({removeData:Lb,dealoc:Ja,on:function a(c,d,e,f){if(A(f))throw Hb("onargs");var g=ma(c,"events"),k=ma(c,"handle");g||ma(c,"events",g={});k||ma(c,"handle",k=oe(c,g));r(d.split(" "),function(d){var f=g[d];if(!f){if("mouseenter"==d||"mouseleave"==d){var l=X.body.contains||X.body.compareDocumentPosition?
+function(a,c){var d=9===a.nodeType?a.documentElement:a,e=c&&c.parentNode;return a===e||!!(e&&1===e.nodeType&&(d.contains?d.contains(e):a.compareDocumentPosition&&a.compareDocumentPosition(e)&16))}:function(a,c){if(c)for(;c=c.parentNode;)if(c===a)return!0;return!1};g[d]=[];a(c,{mouseleave:"mouseout",mouseenter:"mouseover"}[d],function(a){var c=a.relatedTarget;c&&(c===this||l(this,c))||k(a,d)})}else rb(c,d,k),g[d]=[];f=g[d]}f.push(e)})},off:nc,one:function(a,c,d){a=u(a);a.on(c,function f(){a.off(c,
+d);a.off(c,f)});a.on(c,d)},replaceWith:function(a,c){var d,e=a.parentNode;Ja(a);r(new S(c),function(c){d?e.insertBefore(c,d.nextSibling):e.replaceChild(c,a);d=c})},children:function(a){var c=[];r(a.childNodes,function(a){1===a.nodeType&&c.push(a)});return c},contents:function(a){return a.contentDocument||a.childNodes||[]},append:function(a,c){r(new S(c),function(c){1!==a.nodeType&&11!==a.nodeType||a.appendChild(c)})},prepend:function(a,c){if(1===a.nodeType){var d=a.firstChild;r(new S(c),function(c){a.insertBefore(c,
+d)})}},wrap:function(a,c){c=u(c)[0];var d=a.parentNode;d&&d.replaceChild(c,a);c.appendChild(a)},remove:function(a){Ja(a);var c=a.parentNode;c&&c.removeChild(a)},after:function(a,c){var d=a,e=a.parentNode;r(new S(c),function(a){e.insertBefore(a,d.nextSibling);d=a})},addClass:mb,removeClass:lb,toggleClass:function(a,c,d){c&&r(c.split(" "),function(c){var f=d;D(f)&&(f=!Nb(a,c));(f?mb:lb)(a,c)})},parent:function(a){return(a=a.parentNode)&&11!==a.nodeType?a:null},next:function(a){if(a.nextElementSibling)return a.nextElementSibling;
+for(a=a.nextSibling;null!=a&&1!==a.nodeType;)a=a.nextSibling;return a},find:function(a,c){return a.getElementsByTagName?a.getElementsByTagName(c):[]},clone:Kb,triggerHandler:function(a,c,d){var e,f;e=c.type||c;var g=(ma(a,"events")||{})[e];g&&(e={preventDefault:function(){this.defaultPrevented=!0},isDefaultPrevented:function(){return!0===this.defaultPrevented},stopPropagation:y,type:e,target:a},c.type&&(e=B(e,c)),c=ga(g),f=d?[e].concat(d):[e],r(c,function(c){c.apply(a,f)}))}},function(a,c){S.prototype[c]=
+function(c,e,f){for(var g,k=0;k<this.length;k++)D(g)?(g=a(this[k],c,e,f),A(g)&&(g=u(g))):Jb(g,a(this[k],c,e,f));return A(g)?g:this};S.prototype.bind=S.prototype.on;S.prototype.unbind=S.prototype.off});ab.prototype={put:function(a,c){this[Ka(a,this.nextUid)]=c},get:function(a){return this[Ka(a,this.nextUid)]},remove:function(a){var c=this[a=Ka(a,this.nextUid)];delete this[a];return c}};var qe=/^function\s*[^\(]*\(\s*([^\)]*)\)/m,re=/,/,se=/^\s*(_?)(\S+?)\1\s*$/,pe=/((\/\/.*$)|(\/\*[\s\S]*?\*\/))/mg,
+bb=x("$injector"),Le=x("$animate"),Md=["$provide",function(a){this.$$selectors={};this.register=function(c,d){var e=c+"-animation";if(c&&"."!=c.charAt(0))throw Le("notcsel",c);this.$$selectors[c.substr(1)]=e;a.factory(e,d)};this.classNameFilter=function(a){1===arguments.length&&(this.$$classNameFilter=a instanceof RegExp?a:null);return this.$$classNameFilter};this.$get=["$timeout","$$asyncCallback",function(a,d){return{enter:function(a,c,g,k){g?g.after(a):(c&&c[0]||(c=g.parent()),c.append(a));k&&
+d(k)},leave:function(a,c){a.remove();c&&d(c)},move:function(a,c,d,k){this.enter(a,c,d,k)},addClass:function(a,c,g){c=z(c)?c:H(c)?c.join(" "):"";r(a,function(a){mb(a,c)});g&&d(g)},removeClass:function(a,c,g){c=z(c)?c:H(c)?c.join(" "):"";r(a,function(a){lb(a,c)});g&&d(g)},setClass:function(a,c,g,k){r(a,function(a){mb(a,c);lb(a,g)});k&&d(k)},enabled:y}}]}],ia=x("$compile");ic.$inject=["$provide","$$sanitizeUriProvider"];var ue=/^(x[\:\-_]|data[\:\-_])/i,yc=x("$interpolate"),Me=/^([^\?#]*)(\?([^#]*))?(#(.*))?$/,
+xe={http:80,https:443,ftp:21},Sb=x("$location");Ub.prototype=Tb.prototype=Bc.prototype={$$html5:!1,$$replace:!1,absUrl:sb("$$absUrl"),url:function(a,c){if(D(a))return this.$$url;var d=Me.exec(a);d[1]&&this.path(decodeURIComponent(d[1]));(d[2]||d[1])&&this.search(d[3]||"");this.hash(d[5]||"",c);return this},protocol:sb("$$protocol"),host:sb("$$host"),port:sb("$$port"),path:Cc("$$path",function(a){return"/"==a.charAt(0)?a:"/"+a}),search:function(a,c){switch(arguments.length){case 0:return this.$$search;
+case 1:if(z(a))this.$$search=ec(a);else if(T(a))r(a,function(c,e){null==c&&delete a[e]}),this.$$search=a;else throw Sb("isrcharg");break;default:D(c)||null===c?delete this.$$search[a]:this.$$search[a]=c}this.$$compose();return this},hash:Cc("$$hash",Ga),replace:function(){this.$$replace=!0;return this}};var ka=x("$parse"),Fc={},va,Ne=Function.prototype.call,Oe=Function.prototype.apply,Qc=Function.prototype.bind,eb={"null":function(){return null},"true":function(){return!0},"false":function(){return!1},
+undefined:y,"+":function(a,c,d,e){d=d(a,c);e=e(a,c);return A(d)?A(e)?d+e:d:A(e)?e:t},"-":function(a,c,d,e){d=d(a,c);e=e(a,c);return(A(d)?d:0)-(A(e)?e:0)},"*":function(a,c,d,e){return d(a,c)*e(a,c)},"/":function(a,c,d,e){return d(a,c)/e(a,c)},"%":function(a,c,d,e){return d(a,c)%e(a,c)},"^":function(a,c,d,e){return d(a,c)^e(a,c)},"=":y,"===":function(a,c,d,e){return d(a,c)===e(a,c)},"!==":function(a,c,d,e){return d(a,c)!==e(a,c)},"==":function(a,c,d,e){return d(a,c)==e(a,c)},"!=":function(a,c,d,e){return d(a,
+c)!=e(a,c)},"<":function(a,c,d,e){return d(a,c)<e(a,c)},">":function(a,c,d,e){return d(a,c)>e(a,c)},"<=":function(a,c,d,e){return d(a,c)<=e(a,c)},">=":function(a,c,d,e){return d(a,c)>=e(a,c)},"&&":function(a,c,d,e){return d(a,c)&&e(a,c)},"||":function(a,c,d,e){return d(a,c)||e(a,c)},"&":function(a,c,d,e){return d(a,c)&e(a,c)},"|":function(a,c,d,e){return e(a,c)(a,c,d(a,c))},"!":function(a,c,d){return!d(a,c)}},Pe={n:"\n",f:"\f",r:"\r",t:"\t",v:"\v","'":"'",'"':'"'},Wb=function(a){this.options=a};Wb.prototype=
+{constructor:Wb,lex:function(a){this.text=a;this.index=0;this.ch=t;this.lastCh=":";for(this.tokens=[];this.index<this.text.length;){this.ch=this.text.charAt(this.index);if(this.is("\"'"))this.readString(this.ch);else if(this.isNumber(this.ch)||this.is(".")&&this.isNumber(this.peek()))this.readNumber();else if(this.isIdent(this.ch))this.readIdent();else if(this.is("(){}[].,;:?"))this.tokens.push({index:this.index,text:this.ch}),this.index++;else if(this.isWhitespace(this.ch)){this.index++;continue}else{a=
+this.ch+this.peek();var c=a+this.peek(2),d=eb[this.ch],e=eb[a],f=eb[c];f?(this.tokens.push({index:this.index,text:c,fn:f}),this.index+=3):e?(this.tokens.push({index:this.index,text:a,fn:e}),this.index+=2):d?(this.tokens.push({index:this.index,text:this.ch,fn:d}),this.index+=1):this.throwError("Unexpected next character ",this.index,this.index+1)}this.lastCh=this.ch}return this.tokens},is:function(a){return-1!==a.indexOf(this.ch)},was:function(a){return-1!==a.indexOf(this.lastCh)},peek:function(a){a=
+a||1;return this.index+a<this.text.length?this.text.charAt(this.index+a):!1},isNumber:function(a){return"0"<=a&&"9">=a},isWhitespace:function(a){return" "===a||"\r"===a||"\t"===a||"\n"===a||"\v"===a||"\u00a0"===a},isIdent:function(a){return"a"<=a&&"z">=a||"A"<=a&&"Z">=a||"_"===a||"$"===a},isExpOperator:function(a){return"-"===a||"+"===a||this.isNumber(a)},throwError:function(a,c,d){d=d||this.index;c=A(c)?"s "+c+"-"+this.index+" ["+this.text.substring(c,d)+"]":" "+d;throw ka("lexerr",a,c,this.text);
+},readNumber:function(){for(var a="",c=this.index;this.index<this.text.length;){var d=N(this.text.charAt(this.index));if("."==d||this.isNumber(d))a+=d;else{var e=this.peek();if("e"==d&&this.isExpOperator(e))a+=d;else if(this.isExpOperator(d)&&e&&this.isNumber(e)&&"e"==a.charAt(a.length-1))a+=d;else if(!this.isExpOperator(d)||e&&this.isNumber(e)||"e"!=a.charAt(a.length-1))break;else this.throwError("Invalid exponent")}this.index++}a*=1;this.tokens.push({index:c,text:a,literal:!0,constant:!0,fn:function(){return a}})},
+readIdent:function(){for(var a=this,c="",d=this.index,e,f,g,k;this.index<this.text.length;){k=this.text.charAt(this.index);if("."===k||this.isIdent(k)||this.isNumber(k))"."===k&&(e=this.index),c+=k;else break;this.index++}if(e)for(f=this.index;f<this.text.length;){k=this.text.charAt(f);if("("===k){g=c.substr(e-d+1);c=c.substr(0,e-d);this.index=f;break}if(this.isWhitespace(k))f++;else break}d={index:d,text:c};if(eb.hasOwnProperty(c))d.fn=eb[c],d.literal=!0,d.constant=!0;else{var m=Ec(c,this.options,
+this.text);d.fn=B(function(a,c){return m(a,c)},{assign:function(d,e){return tb(d,c,e,a.text,a.options)}})}this.tokens.push(d);g&&(this.tokens.push({index:e,text:"."}),this.tokens.push({index:e+1,text:g}))},readString:function(a){var c=this.index;this.index++;for(var d="",e=a,f=!1;this.index<this.text.length;){var g=this.text.charAt(this.index),e=e+g;if(f)"u"===g?(f=this.text.substring(this.index+1,this.index+5),f.match(/[\da-f]{4}/i)||this.throwError("Invalid unicode escape [\\u"+f+"]"),this.index+=
+4,d+=String.fromCharCode(parseInt(f,16))):d+=Pe[g]||g,f=!1;else if("\\"===g)f=!0;else{if(g===a){this.index++;this.tokens.push({index:c,text:e,string:d,literal:!0,constant:!0,fn:function(){return d}});return}d+=g}this.index++}this.throwError("Unterminated quote",c)}};var db=function(a,c,d){this.lexer=a;this.$filter=c;this.options=d};db.ZERO=B(function(){return 0},{constant:!0});db.prototype={constructor:db,parse:function(a){this.text=a;this.tokens=this.lexer.lex(a);a=this.statements();0!==this.tokens.length&&
+this.throwError("is an unexpected token",this.tokens[0]);a.literal=!!a.literal;a.constant=!!a.constant;return a},primary:function(){var a;if(this.expect("("))a=this.filterChain(),this.consume(")");else if(this.expect("["))a=this.arrayDeclaration();else if(this.expect("{"))a=this.object();else{var c=this.expect();(a=c.fn)||this.throwError("not a primary expression",c);a.literal=!!c.literal;a.constant=!!c.constant}for(var d;c=this.expect("(","[",".");)"("===c.text?(a=this.functionCall(a,d),d=null):
+"["===c.text?(d=a,a=this.objectIndex(a)):"."===c.text?(d=a,a=this.fieldAccess(a)):this.throwError("IMPOSSIBLE");return a},throwError:function(a,c){throw ka("syntax",c.text,a,c.index+1,this.text,this.text.substring(c.index));},peekToken:function(){if(0===this.tokens.length)throw ka("ueoe",this.text);return this.tokens[0]},peek:function(a,c,d,e){if(0<this.tokens.length){var f=this.tokens[0],g=f.text;if(g===a||g===c||g===d||g===e||!(a||c||d||e))return f}return!1},expect:function(a,c,d,e){return(a=this.peek(a,
+c,d,e))?(this.tokens.shift(),a):!1},consume:function(a){this.expect(a)||this.throwError("is unexpected, expecting ["+a+"]",this.peek())},unaryFn:function(a,c){return B(function(d,e){return a(d,e,c)},{constant:c.constant})},ternaryFn:function(a,c,d){return B(function(e,f){return a(e,f)?c(e,f):d(e,f)},{constant:a.constant&&c.constant&&d.constant})},binaryFn:function(a,c,d){return B(function(e,f){return c(e,f,a,d)},{constant:a.constant&&d.constant})},statements:function(){for(var a=[];;)if(0<this.tokens.length&&
+!this.peek("}",")",";","]")&&a.push(this.filterChain()),!this.expect(";"))return 1===a.length?a[0]:function(c,d){for(var e,f=0;f<a.length;f++){var g=a[f];g&&(e=g(c,d))}return e}},filterChain:function(){for(var a=this.expression(),c;;)if(c=this.expect("|"))a=this.binaryFn(a,c.fn,this.filter());else return a},filter:function(){for(var a=this.expect(),c=this.$filter(a.text),d=[];;)if(a=this.expect(":"))d.push(this.expression());else{var e=function(a,e,k){k=[k];for(var m=0;m<d.length;m++)k.push(d[m](a,
+e));return c.apply(a,k)};return function(){return e}}},expression:function(){return this.assignment()},assignment:function(){var a=this.ternary(),c,d;return(d=this.expect("="))?(a.assign||this.throwError("implies assignment but ["+this.text.substring(0,d.index)+"] can not be assigned to",d),c=this.ternary(),function(d,f){return a.assign(d,c(d,f),f)}):a},ternary:function(){var a=this.logicalOR(),c,d;if(this.expect("?")){c=this.assignment();if(d=this.expect(":"))return this.ternaryFn(a,c,this.assignment());
+this.throwError("expected :",d)}else return a},logicalOR:function(){for(var a=this.logicalAND(),c;;)if(c=this.expect("||"))a=this.binaryFn(a,c.fn,this.logicalAND());else return a},logicalAND:function(){var a=this.equality(),c;if(c=this.expect("&&"))a=this.binaryFn(a,c.fn,this.logicalAND());return a},equality:function(){var a=this.relational(),c;if(c=this.expect("==","!=","===","!=="))a=this.binaryFn(a,c.fn,this.equality());return a},relational:function(){var a=this.additive(),c;if(c=this.expect("<",
+">","<=",">="))a=this.binaryFn(a,c.fn,this.relational());return a},additive:function(){for(var a=this.multiplicative(),c;c=this.expect("+","-");)a=this.binaryFn(a,c.fn,this.multiplicative());return a},multiplicative:function(){for(var a=this.unary(),c;c=this.expect("*","/","%");)a=this.binaryFn(a,c.fn,this.unary());return a},unary:function(){var a;return this.expect("+")?this.primary():(a=this.expect("-"))?this.binaryFn(db.ZERO,a.fn,this.unary()):(a=this.expect("!"))?this.unaryFn(a.fn,this.unary()):
+this.primary()},fieldAccess:function(a){var c=this,d=this.expect().text,e=Ec(d,this.options,this.text);return B(function(c,d,k){return e(k||a(c,d))},{assign:function(e,g,k){(k=a(e,k))||a.assign(e,k={});return tb(k,d,g,c.text,c.options)}})},objectIndex:function(a){var c=this,d=this.expression();this.consume("]");return B(function(e,f){var g=a(e,f),k=d(e,f),m;ja(k,c.text);if(!g)return t;(g=Oa(g[k],c.text))&&(g.then&&c.options.unwrapPromises)&&(m=g,"$$v"in g||(m.$$v=t,m.then(function(a){m.$$v=a})),g=
+g.$$v);return g},{assign:function(e,f,g){var k=ja(d(e,g),c.text);(g=Oa(a(e,g),c.text))||a.assign(e,g={});return g[k]=f}})},functionCall:function(a,c){var d=[];if(")"!==this.peekToken().text){do d.push(this.expression());while(this.expect(","))}this.consume(")");var e=this;return function(f,g){for(var k=[],m=c?c(f,g):f,h=0;h<d.length;h++)k.push(d[h](f,g));h=a(f,g,m)||y;Oa(m,e.text);var l=e.text;if(h){if(h.constructor===h)throw ka("isecfn",l);if(h===Ne||h===Oe||Qc&&h===Qc)throw ka("isecff",l);}k=h.apply?
+h.apply(m,k):h(k[0],k[1],k[2],k[3],k[4]);return Oa(k,e.text)}},arrayDeclaration:function(){var a=[],c=!0;if("]"!==this.peekToken().text){do{if(this.peek("]"))break;var d=this.expression();a.push(d);d.constant||(c=!1)}while(this.expect(","))}this.consume("]");return B(function(c,d){for(var g=[],k=0;k<a.length;k++)g.push(a[k](c,d));return g},{literal:!0,constant:c})},object:function(){var a=[],c=!0;if("}"!==this.peekToken().text){do{if(this.peek("}"))break;var d=this.expect(),d=d.string||d.text;this.consume(":");
+var e=this.expression();a.push({key:d,value:e});e.constant||(c=!1)}while(this.expect(","))}this.consume("}");return B(function(c,d){for(var e={},m=0;m<a.length;m++){var h=a[m];e[h.key]=h.value(c,d)}return e},{literal:!0,constant:c})}};var Vb={},wa=x("$sce"),fa={HTML:"html",CSS:"css",URL:"url",RESOURCE_URL:"resourceUrl",JS:"js"},V=X.createElement("a"),Hc=ua(Q.location.href,!0);mc.$inject=["$provide"];Ic.$inject=["$locale"];Kc.$inject=["$locale"];var Nc=".",He={yyyy:Y("FullYear",4),yy:Y("FullYear",
+2,0,!0),y:Y("FullYear",1),MMMM:ub("Month"),MMM:ub("Month",!0),MM:Y("Month",2,1),M:Y("Month",1,1),dd:Y("Date",2),d:Y("Date",1),HH:Y("Hours",2),H:Y("Hours",1),hh:Y("Hours",2,-12),h:Y("Hours",1,-12),mm:Y("Minutes",2),m:Y("Minutes",1),ss:Y("Seconds",2),s:Y("Seconds",1),sss:Y("Milliseconds",3),EEEE:ub("Day"),EEE:ub("Day",!0),a:function(a,c){return 12>a.getHours()?c.AMPMS[0]:c.AMPMS[1]},Z:function(a){a=-1*a.getTimezoneOffset();return a=(0<=a?"+":"")+(Xb(Math[0<a?"floor":"ceil"](a/60),2)+Xb(Math.abs(a%60),
+2))}},Ge=/((?:[^yMdHhmsaZE']+)|(?:'(?:[^']|'')*')|(?:E+|y+|M+|d+|H+|h+|m+|s+|a|Z))(.*)/,Fe=/^\-?\d+$/;Jc.$inject=["$locale"];var De=$(N),Ee=$(Ia);Lc.$inject=["$parse"];var dd=$({restrict:"E",compile:function(a,c){8>=R&&(c.href||c.name||c.$set("href",""),a.append(X.createComment("IE fix")));if(!c.href&&!c.xlinkHref&&!c.name)return function(a,c){var f="[object SVGAnimatedString]"===ya.call(c.prop("href"))?"xlink:href":"href";c.on("click",function(a){c.attr(f)||a.preventDefault()})}}}),Fb={};r(ob,function(a,
+c){if("multiple"!=a){var d=na("ng-"+c);Fb[d]=function(){return{priority:100,link:function(a,f,g){a.$watch(g[d],function(a){g.$set(c,!!a)})}}}}});r(["src","srcset","href"],function(a){var c=na("ng-"+a);Fb[c]=function(){return{priority:99,link:function(d,e,f){var g=a,k=a;"href"===a&&"[object SVGAnimatedString]"===ya.call(e.prop("href"))&&(k="xlinkHref",f.$attr[k]="xlink:href",g=null);f.$observe(c,function(c){c?(f.$set(k,c),R&&g&&e.prop(g,f[k])):"href"===a&&f.$set(k,null)})}}}});var xb={$addControl:y,
+$removeControl:y,$setValidity:y,$setDirty:y,$setPristine:y};Oc.$inject=["$element","$attrs","$scope","$animate"];var Rc=function(a){return["$timeout",function(c){return{name:"form",restrict:a?"EAC":"E",controller:Oc,compile:function(){return{pre:function(a,e,f,g){if(!f.action){var k=function(a){a.preventDefault?a.preventDefault():a.returnValue=!1};rb(e[0],"submit",k);e.on("$destroy",function(){c(function(){Za(e[0],"submit",k)},0,!1)})}var m=e.parent().controller("form"),h=f.name||f.ngForm;h&&tb(a,
+h,g,h);if(m)e.on("$destroy",function(){m.$removeControl(g);h&&tb(a,h,t,h);B(g,xb)})}}}}}]},ed=Rc(),rd=Rc(!0),Qe=/^(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?$/,Re=/^[a-z0-9!#$%&'*+\/=?^_`{|}~.-]+@[a-z0-9]([a-z0-9-]*[a-z0-9])?(\.[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/i,Se=/^\s*(\-|\+)?(\d+|(\d*(\.\d*)))\s*$/,Sc={text:zb,number:function(a,c,d,e,f,g){zb(a,c,d,e,f,g);e.$parsers.push(function(a){var c=e.$isEmpty(a);if(c||Se.test(a))return e.$setValidity("number",!0),""===
+a?null:c?a:parseFloat(a);e.$setValidity("number",!1);return t});Ie(e,"number",Te,null,e.$$validityState);e.$formatters.push(function(a){return e.$isEmpty(a)?"":""+a});d.min&&(a=function(a){var c=parseFloat(d.min);return ra(e,"min",e.$isEmpty(a)||a>=c,a)},e.$parsers.push(a),e.$formatters.push(a));d.max&&(a=function(a){var c=parseFloat(d.max);return ra(e,"max",e.$isEmpty(a)||a<=c,a)},e.$parsers.push(a),e.$formatters.push(a));e.$formatters.push(function(a){return ra(e,"number",e.$isEmpty(a)||Ab(a),a)})},
+url:function(a,c,d,e,f,g){zb(a,c,d,e,f,g);a=function(a){return ra(e,"url",e.$isEmpty(a)||Qe.test(a),a)};e.$formatters.push(a);e.$parsers.push(a)},email:function(a,c,d,e,f,g){zb(a,c,d,e,f,g);a=function(a){return ra(e,"email",e.$isEmpty(a)||Re.test(a),a)};e.$formatters.push(a);e.$parsers.push(a)},radio:function(a,c,d,e){D(d.name)&&c.attr("name",gb());c.on("click",function(){c[0].checked&&a.$apply(function(){e.$setViewValue(d.value)})});e.$render=function(){c[0].checked=d.value==e.$viewValue};d.$observe("value",
+e.$render)},checkbox:function(a,c,d,e){var f=d.ngTrueValue,g=d.ngFalseValue;z(f)||(f=!0);z(g)||(g=!1);c.on("click",function(){a.$apply(function(){e.$setViewValue(c[0].checked)})});e.$render=function(){c[0].checked=e.$viewValue};e.$isEmpty=function(a){return a!==f};e.$formatters.push(function(a){return a===f});e.$parsers.push(function(a){return a?f:g})},hidden:y,button:y,submit:y,reset:y,file:y},Te=["badInput"],jc=["$browser","$sniffer",function(a,c){return{restrict:"E",require:"?ngModel",link:function(d,
+e,f,g){g&&(Sc[N(f.type)]||Sc.text)(d,e,f,g,c,a)}}}],wb="ng-valid",vb="ng-invalid",Pa="ng-pristine",yb="ng-dirty",Ue=["$scope","$exceptionHandler","$attrs","$element","$parse","$animate",function(a,c,d,e,f,g){function k(a,c){c=c?"-"+kb(c,"-"):"";g.removeClass(e,(a?vb:wb)+c);g.addClass(e,(a?wb:vb)+c)}this.$modelValue=this.$viewValue=Number.NaN;this.$parsers=[];this.$formatters=[];this.$viewChangeListeners=[];this.$pristine=!0;this.$dirty=!1;this.$valid=!0;this.$invalid=!1;this.$name=d.name;var m=f(d.ngModel),
+h=m.assign;if(!h)throw x("ngModel")("nonassign",d.ngModel,ha(e));this.$render=y;this.$isEmpty=function(a){return D(a)||""===a||null===a||a!==a};var l=e.inheritedData("$formController")||xb,n=0,p=this.$error={};e.addClass(Pa);k(!0);this.$setValidity=function(a,c){p[a]!==!c&&(c?(p[a]&&n--,n||(k(!0),this.$valid=!0,this.$invalid=!1)):(k(!1),this.$invalid=!0,this.$valid=!1,n++),p[a]=!c,k(c,a),l.$setValidity(a,c,this))};this.$setPristine=function(){this.$dirty=!1;this.$pristine=!0;g.removeClass(e,yb);g.addClass(e,
+Pa)};this.$setViewValue=function(d){this.$viewValue=d;this.$pristine&&(this.$dirty=!0,this.$pristine=!1,g.removeClass(e,Pa),g.addClass(e,yb),l.$setDirty());r(this.$parsers,function(a){d=a(d)});this.$modelValue!==d&&(this.$modelValue=d,h(a,d),r(this.$viewChangeListeners,function(a){try{a()}catch(d){c(d)}}))};var q=this;a.$watch(function(){var c=m(a);if(q.$modelValue!==c){var d=q.$formatters,e=d.length;for(q.$modelValue=c;e--;)c=d[e](c);q.$viewValue!==c&&(q.$viewValue=c,q.$render())}return c})}],Gd=
+function(){return{require:["ngModel","^?form"],controller:Ue,link:function(a,c,d,e){var f=e[0],g=e[1]||xb;g.$addControl(f);a.$on("$destroy",function(){g.$removeControl(f)})}}},Id=$({require:"ngModel",link:function(a,c,d,e){e.$viewChangeListeners.push(function(){a.$eval(d.ngChange)})}}),kc=function(){return{require:"?ngModel",link:function(a,c,d,e){if(e){d.required=!0;var f=function(a){if(d.required&&e.$isEmpty(a))e.$setValidity("required",!1);else return e.$setValidity("required",!0),a};e.$formatters.push(f);
+e.$parsers.unshift(f);d.$observe("required",function(){f(e.$viewValue)})}}}},Hd=function(){return{require:"ngModel",link:function(a,c,d,e){var f=(a=/\/(.*)\//.exec(d.ngList))&&RegExp(a[1])||d.ngList||",";e.$parsers.push(function(a){if(!D(a)){var c=[];a&&r(a.split(f),function(a){a&&c.push(aa(a))});return c}});e.$formatters.push(function(a){return H(a)?a.join(", "):t});e.$isEmpty=function(a){return!a||!a.length}}}},Ve=/^(true|false|\d+)$/,Jd=function(){return{priority:100,compile:function(a,c){return Ve.test(c.ngValue)?
+function(a,c,f){f.$set("value",a.$eval(f.ngValue))}:function(a,c,f){a.$watch(f.ngValue,function(a){f.$set("value",a)})}}}},jd=xa({compile:function(a){a.addClass("ng-binding");return function(a,d,e){d.data("$binding",e.ngBind);a.$watch(e.ngBind,function(a){d.text(a==t?"":a)})}}}),ld=["$interpolate",function(a){return function(c,d,e){c=a(d.attr(e.$attr.ngBindTemplate));d.addClass("ng-binding").data("$binding",c);e.$observe("ngBindTemplate",function(a){d.text(a)})}}],kd=["$sce","$parse",function(a,c){return{compile:function(d){d.addClass("ng-binding");
+return function(d,f,g){f.data("$binding",g.ngBindHtml);var k=c(g.ngBindHtml);d.$watch(function(){return(k(d)||"").toString()},function(c){f.html(a.getTrustedHtml(k(d))||"")})}}}}],md=Yb("",!0),od=Yb("Odd",0),nd=Yb("Even",1),pd=xa({compile:function(a,c){c.$set("ngCloak",t);a.removeClass("ng-cloak")}}),qd=[function(){return{scope:!0,controller:"@",priority:500}}],lc={};r("click dblclick mousedown mouseup mouseover mouseout mousemove mouseenter mouseleave keydown keyup keypress submit focus blur copy cut paste".split(" "),
+function(a){var c=na("ng-"+a);lc[c]=["$parse",function(d){return{compile:function(e,f){var g=d(f[c]);return function(c,d){d.on(N(a),function(a){c.$apply(function(){g(c,{$event:a})})})}}}}]});var td=["$animate",function(a){return{transclude:"element",priority:600,terminal:!0,restrict:"A",$$tlb:!0,link:function(c,d,e,f,g){var k,m,h;c.$watch(e.ngIf,function(f){Ta(f)?m||(m=c.$new(),g(m,function(c){c[c.length++]=X.createComment(" end ngIf: "+e.ngIf+" ");k={clone:c};a.enter(c,d.parent(),d)})):(h&&(h.remove(),
+h=null),m&&(m.$destroy(),m=null),k&&(h=Eb(k.clone),a.leave(h,function(){h=null}),k=null))})}}}],ud=["$http","$templateCache","$anchorScroll","$animate","$sce",function(a,c,d,e,f){return{restrict:"ECA",priority:400,terminal:!0,transclude:"element",controller:Ua.noop,compile:function(g,k){var m=k.ngInclude||k.src,h=k.onload||"",l=k.autoscroll;return function(g,k,q,r,L){var v=0,t,u,I,w=function(){u&&(u.remove(),u=null);t&&(t.$destroy(),t=null);I&&(e.leave(I,function(){u=null}),u=I,I=null)};g.$watch(f.parseAsResourceUrl(m),
+function(f){var m=function(){!A(l)||l&&!g.$eval(l)||d()},q=++v;f?(a.get(f,{cache:c}).success(function(a){if(q===v){var c=g.$new();r.template=a;a=L(c,function(a){w();e.enter(a,null,k,m)});t=c;I=a;t.$emit("$includeContentLoaded");g.$eval(h)}}).error(function(){q===v&&w()}),g.$emit("$includeContentRequested")):(w(),r.template=null)})}}}}],Kd=["$compile",function(a){return{restrict:"ECA",priority:-400,require:"ngInclude",link:function(c,d,e,f){d.html(f.template);a(d.contents())(c)}}}],vd=xa({priority:450,
+compile:function(){return{pre:function(a,c,d){a.$eval(d.ngInit)}}}}),wd=xa({terminal:!0,priority:1E3}),xd=["$locale","$interpolate",function(a,c){var d=/{}/g;return{restrict:"EA",link:function(e,f,g){var k=g.count,m=g.$attr.when&&f.attr(g.$attr.when),h=g.offset||0,l=e.$eval(m)||{},n={},p=c.startSymbol(),q=c.endSymbol(),s=/^when(Minus)?(.+)$/;r(g,function(a,c){s.test(c)&&(l[N(c.replace("when","").replace("Minus","-"))]=f.attr(g.$attr[c]))});r(l,function(a,e){n[e]=c(a.replace(d,p+k+"-"+h+q))});e.$watch(function(){var c=
+parseFloat(e.$eval(k));if(isNaN(c))return"";c in l||(c=a.pluralCat(c-h));return n[c](e,f,!0)},function(a){f.text(a)})}}}],yd=["$parse","$animate",function(a,c){var d=x("ngRepeat");return{transclude:"element",priority:1E3,terminal:!0,$$tlb:!0,link:function(e,f,g,k,m){var h=g.ngRepeat,l=h.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)(?:\s+track\s+by\s+([\s\S]+?))?\s*$/),n,p,q,s,t,v,C={$id:Ka};if(!l)throw d("iexp",h);g=l[1];k=l[2];(l=l[3])?(n=a(l),p=function(a,c,d){v&&(C[v]=a);C[t]=c;C.$index=d;return n(e,
+C)}):(q=function(a,c){return Ka(c)},s=function(a){return a});l=g.match(/^(?:([\$\w]+)|\(([\$\w]+)\s*,\s*([\$\w]+)\))$/);if(!l)throw d("iidexp",g);t=l[3]||l[1];v=l[2];var A={};e.$watchCollection(k,function(a){var g,k,l=f[0],n,C={},J,E,F,x,z,y,H=[];if(fb(a))z=a,n=p||q;else{n=p||s;z=[];for(F in a)a.hasOwnProperty(F)&&"$"!=F.charAt(0)&&z.push(F);z.sort()}J=z.length;k=H.length=z.length;for(g=0;g<k;g++)if(F=a===z?g:z[g],x=a[F],x=n(F,x,g),Ca(x,"`track by` id"),A.hasOwnProperty(x))y=A[x],delete A[x],C[x]=
+y,H[g]=y;else{if(C.hasOwnProperty(x))throw r(H,function(a){a&&a.scope&&(A[a.id]=a)}),d("dupes",h,x);H[g]={id:x};C[x]=!1}for(F in A)A.hasOwnProperty(F)&&(y=A[F],g=Eb(y.clone),c.leave(g),r(g,function(a){a.$$NG_REMOVED=!0}),y.scope.$destroy());g=0;for(k=z.length;g<k;g++){F=a===z?g:z[g];x=a[F];y=H[g];H[g-1]&&(l=H[g-1].clone[H[g-1].clone.length-1]);if(y.scope){E=y.scope;n=l;do n=n.nextSibling;while(n&&n.$$NG_REMOVED);y.clone[0]!=n&&c.move(Eb(y.clone),null,u(l));l=y.clone[y.clone.length-1]}else E=e.$new();
+E[t]=x;v&&(E[v]=F);E.$index=g;E.$first=0===g;E.$last=g===J-1;E.$middle=!(E.$first||E.$last);E.$odd=!(E.$even=0===(g&1));y.scope||m(E,function(a){a[a.length++]=X.createComment(" end ngRepeat: "+h+" ");c.enter(a,null,u(l));l=a;y.scope=E;y.clone=a;C[y.id]=y})}A=C})}}}],zd=["$animate",function(a){return function(c,d,e){c.$watch(e.ngShow,function(c){a[Ta(c)?"removeClass":"addClass"](d,"ng-hide")})}}],sd=["$animate",function(a){return function(c,d,e){c.$watch(e.ngHide,function(c){a[Ta(c)?"addClass":"removeClass"](d,
+"ng-hide")})}}],Ad=xa(function(a,c,d){a.$watch(d.ngStyle,function(a,d){d&&a!==d&&r(d,function(a,d){c.css(d,"")});a&&c.css(a)},!0)}),Bd=["$animate",function(a){return{restrict:"EA",require:"ngSwitch",controller:["$scope",function(){this.cases={}}],link:function(c,d,e,f){var g=[],k=[],m=[],h=[];c.$watch(e.ngSwitch||e.on,function(d){var n,p;n=0;for(p=m.length;n<p;++n)m[n].remove();n=m.length=0;for(p=h.length;n<p;++n){var q=k[n];h[n].$destroy();m[n]=q;a.leave(q,function(){m.splice(n,1)})}k.length=0;h.length=
+0;if(g=f.cases["!"+d]||f.cases["?"])c.$eval(e.change),r(g,function(d){var e=c.$new();h.push(e);d.transclude(e,function(c){var e=d.element;k.push(c);a.enter(c,e.parent(),e)})})})}}}],Cd=xa({transclude:"element",priority:800,require:"^ngSwitch",link:function(a,c,d,e,f){e.cases["!"+d.ngSwitchWhen]=e.cases["!"+d.ngSwitchWhen]||[];e.cases["!"+d.ngSwitchWhen].push({transclude:f,element:c})}}),Dd=xa({transclude:"element",priority:800,require:"^ngSwitch",link:function(a,c,d,e,f){e.cases["?"]=e.cases["?"]||
+[];e.cases["?"].push({transclude:f,element:c})}}),Fd=xa({link:function(a,c,d,e,f){if(!f)throw x("ngTransclude")("orphan",ha(c));f(function(a){c.empty();c.append(a)})}}),fd=["$templateCache",function(a){return{restrict:"E",terminal:!0,compile:function(c,d){"text/ng-template"==d.type&&a.put(d.id,c[0].text)}}}],We=x("ngOptions"),Ed=$({terminal:!0}),gd=["$compile","$parse",function(a,c){var d=/^\s*([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+group\s+by\s+([\s\S]+?))?\s+for\s+(?:([\$\w][\$\w]*)|(?:\(\s*([\$\w][\$\w]*)\s*,\s*([\$\w][\$\w]*)\s*\)))\s+in\s+([\s\S]+?)(?:\s+track\s+by\s+([\s\S]+?))?$/,
+e={$setViewValue:y};return{restrict:"E",require:["select","?ngModel"],controller:["$element","$scope","$attrs",function(a,c,d){var m=this,h={},l=e,n;m.databound=d.ngModel;m.init=function(a,c,d){l=a;n=d};m.addOption=function(c){Ca(c,'"option value"');h[c]=!0;l.$viewValue==c&&(a.val(c),n.parent()&&n.remove())};m.removeOption=function(a){this.hasOption(a)&&(delete h[a],l.$viewValue==a&&this.renderUnknownOption(a))};m.renderUnknownOption=function(c){c="? "+Ka(c)+" ?";n.val(c);a.prepend(n);a.val(c);n.prop("selected",
+!0)};m.hasOption=function(a){return h.hasOwnProperty(a)};c.$on("$destroy",function(){m.renderUnknownOption=y})}],link:function(e,g,k,m){function h(a,c,d,e){d.$render=function(){var a=d.$viewValue;e.hasOption(a)?(z.parent()&&z.remove(),c.val(a),""===a&&v.prop("selected",!0)):D(a)&&v?c.val(""):e.renderUnknownOption(a)};c.on("change",function(){a.$apply(function(){z.parent()&&z.remove();d.$setViewValue(c.val())})})}function l(a,c,d){var e;d.$render=function(){var a=new ab(d.$viewValue);r(c.find("option"),
+function(c){c.selected=A(a.get(c.value))})};a.$watch(function(){za(e,d.$viewValue)||(e=ga(d.$viewValue),d.$render())});c.on("change",function(){a.$apply(function(){var a=[];r(c.find("option"),function(c){c.selected&&a.push(c.value)});d.$setViewValue(a)})})}function n(e,f,g){function k(){var a={"":[]},c=[""],d,h,s,t,w;s=g.$modelValue;t=v(e)||[];var E=n?Zb(t):t,I,M,B;M={};B=!1;if(q)if(h=g.$modelValue,u&&H(h))for(B=new ab([]),d={},w=0;w<h.length;w++)d[m]=h[w],B.put(u(e,d),h[w]);else B=new ab(h);w=B;
+var D,J;for(B=0;I=E.length,B<I;B++){h=B;if(n){h=E[B];if("$"===h.charAt(0))continue;M[n]=h}M[m]=t[h];d=p(e,M)||"";(h=a[d])||(h=a[d]=[],c.push(d));q?d=A(w.remove(u?u(e,M):r(e,M))):(u?(d={},d[m]=s,d=u(e,d)===u(e,M)):d=s===r(e,M),w=w||d);D=l(e,M);D=A(D)?D:"";h.push({id:u?u(e,M):n?E[B]:B,label:D,selected:d})}q||(x||null===s?a[""].unshift({id:"",label:"",selected:!w}):w||a[""].unshift({id:"?",label:"",selected:!0}));M=0;for(E=c.length;M<E;M++){d=c[M];h=a[d];z.length<=M?(s={element:y.clone().attr("label",
+d),label:h.label},t=[s],z.push(t),f.append(s.element)):(t=z[M],s=t[0],s.label!=d&&s.element.attr("label",s.label=d));D=null;B=0;for(I=h.length;B<I;B++)d=h[B],(w=t[B+1])?(D=w.element,w.label!==d.label&&D.text(w.label=d.label),w.id!==d.id&&D.val(w.id=d.id),D[0].selected!==d.selected&&(D.prop("selected",w.selected=d.selected),R&&D.prop("selected",w.selected))):(""===d.id&&x?J=x:(J=C.clone()).val(d.id).prop("selected",d.selected).attr("selected",d.selected).text(d.label),t.push({element:J,label:d.label,
+id:d.id,selected:d.selected}),D?D.after(J):s.element.append(J),D=J);for(B++;t.length>B;)t.pop().element.remove()}for(;z.length>M;)z.pop()[0].element.remove()}var h;if(!(h=s.match(d)))throw We("iexp",s,ha(f));var l=c(h[2]||h[1]),m=h[4]||h[6],n=h[5],p=c(h[3]||""),r=c(h[2]?h[1]:m),v=c(h[7]),u=h[8]?c(h[8]):null,z=[[{element:f,label:""}]];x&&(a(x)(e),x.removeClass("ng-scope"),x.remove());f.empty();f.on("change",function(){e.$apply(function(){var a,c=v(e)||[],d={},h,l,p,s,w,x,y;if(q)for(l=[],s=0,x=z.length;s<
+x;s++)for(a=z[s],p=1,w=a.length;p<w;p++){if((h=a[p].element)[0].selected){h=h.val();n&&(d[n]=h);if(u)for(y=0;y<c.length&&(d[m]=c[y],u(e,d)!=h);y++);else d[m]=c[h];l.push(r(e,d))}}else if(h=f.val(),"?"==h)l=t;else if(""===h)l=null;else if(u)for(y=0;y<c.length;y++){if(d[m]=c[y],u(e,d)==h){l=r(e,d);break}}else d[m]=c[h],n&&(d[n]=h),l=r(e,d);g.$setViewValue(l);k()})});g.$render=k;e.$watchCollection(v,k);q&&e.$watchCollection(function(){return g.$modelValue},k)}if(m[1]){var p=m[0];m=m[1];var q=k.multiple,
+s=k.ngOptions,x=!1,v,C=u(X.createElement("option")),y=u(X.createElement("optgroup")),z=C.clone();k=0;for(var w=g.children(),B=w.length;k<B;k++)if(""===w[k].value){v=x=w.eq(k);break}p.init(m,x,z);q&&(m.$isEmpty=function(a){return!a||0===a.length});s?n(e,g,m):q?l(e,g,m):h(e,g,m,p)}}}}],id=["$interpolate",function(a){var c={addOption:y,removeOption:y};return{restrict:"E",priority:100,compile:function(d,e){if(D(e.value)){var f=a(d.text(),!0);f||e.$set("value",d.text())}return function(a,d,e){var h=d.parent(),
+l=h.data("$selectController")||h.parent().data("$selectController");l&&l.databound?d.prop("selected",!1):l=c;f?a.$watch(f,function(a,c){e.$set("value",a);a!==c&&l.removeOption(c);l.addOption(a)}):l.addOption(e.value);d.on("$destroy",function(){l.removeOption(e.value)})}}}}],hd=$({restrict:"E",terminal:!0});Q.angular.bootstrap?console.log("WARNING: Tried to load angular more than once."):((Da=Q.jQuery)&&Da.fn.on?(u=Da,B(Da.fn,{scope:La.scope,isolateScope:La.isolateScope,controller:La.controller,injector:La.injector,
+inheritedData:La.inheritedData}),Gb("remove",!0,!0,!1),Gb("empty",!1,!1,!1),Gb("html",!1,!1,!0)):u=S,Ua.element=u,$c(Ua),u(X).ready(function(){Xc(X,fc)}))})(window,document);!window.angular.$$csp()&&window.angular.element(document).find("head").prepend('<style type="text/css">@charset "UTF-8";[ng\\:cloak],[ng-cloak],[data-ng-cloak],[x-ng-cloak],.ng-cloak,.x-ng-cloak,.ng-hide{display:none !important;}ng\\:form{display:block;}.ng-animate-block-transitions{transition:0s all!important;-webkit-transition:0s all!important;}.ng-hide-add-active,.ng-hide-remove{display:block!important;}</style>');
+//# sourceMappingURL=angular.min.js.map
diff --git a/bitbake/lib/toaster/toastergui/static/js/angular.min.js.map b/bitbake/lib/toaster/toastergui/static/js/angular.min.js.map
new file mode 100644
index 0000000000..8aa40d383c
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/angular.min.js.map
@@ -0,0 +1,8 @@
+{
+"version":3,
+"file":"angular.min.js",
+"lineCount":214,
+"mappings":"A;;;;;aAKC,SAAQ,CAACA,CAAD,CAASC,CAAT,CAAmBC,CAAnB,CAA8B,CA8BvCC,QAAAA,EAAAA,CAAAA,CAAAA,CAAAA,CAAAA,MAAAA,SAAAA,EAAAA,CAAAA,IAAAA,EAAAA,SAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,EAAAA,GAAAA,EAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAAAA,CAAAA,EAAAA,EAAAA,CAAAA,CAAAA,uCAAAA,EAAAA,CAAAA,CAAAA,CAAAA,CAAAA,GAAAA,CAAAA,EAAAA,EAAAA,CAAAA,KAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,SAAAA,OAAAA,CAAAA,CAAAA,EAAAA,CAAAA,CAAAA,CAAAA,CAAAA,EAAAA,CAAAA,EAAAA,CAAAA,CAAAA,GAAAA,CAAAA,GAAAA,EAAAA,GAAAA,EAAAA,CAAAA,CAAAA,CAAAA,EAAAA,GAAAA,CAAAA,kBAAAA,CAAAA,UAAAA,EAAAA,MAAAA,UAAAA,CAAAA,CAAAA,CAAAA,CAAAA,SAAAA,CAAAA,CAAAA,CAAAA,SAAAA,EAAAA,QAAAA,CAAAA,aAAAA,CAAAA,EAAAA,CAAAA,CAAAA,WAAAA,EAAAA,MAAAA,UAAAA,CAAAA,CAAAA,CAAAA,CAAAA,WAAAA,CAAAA,QAAAA,EAAAA,MAAAA,UAAAA,CAAAA,CAAAA,CAAAA,CAAAA,IAAAA,UAAAA,CAAAA,SAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,SAAAA,CAAAA,CAAAA,CAAAA,CAAAA,OAAAA,MAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAwOAC,QAASA,GAAW,CAACC,CAAD,CAAM,CACxB,GAAW,IAAX,EAAIA,CAAJ,EAAmBC,EAAA,CAASD,CAAT,CAAnB,CACE,MAAO,CAAA,CAGT;IAAIE,EAASF,CAAAE,OAEb,OAAqB,EAArB,GAAIF,CAAAG,SAAJ,EAA0BD,CAA1B,CACS,CAAA,CADT,CAIOE,CAAA,CAASJ,CAAT,CAJP,EAIwBK,CAAA,CAAQL,CAAR,CAJxB,EAImD,CAJnD,GAIwCE,CAJxC,EAKyB,QALzB,GAKO,MAAOA,EALd,EAK8C,CAL9C,CAKqCA,CALrC,EAKoDA,CALpD,CAK6D,CAL7D,GAKmEF,EAZ3C,CA4C1BM,QAASA,EAAO,CAACN,CAAD,CAAMO,CAAN,CAAgBC,CAAhB,CAAyB,CACvC,IAAIC,CACJ,IAAIT,CAAJ,CACE,GAAIU,CAAA,CAAWV,CAAX,CAAJ,CACE,IAAKS,CAAL,GAAYT,EAAZ,CAGa,WAAX,EAAIS,CAAJ,GAAiC,QAAjC,EAA0BA,CAA1B,EAAoD,MAApD,EAA6CA,CAA7C,EAAgET,CAAAW,eAAhE,EAAsF,CAAAX,CAAAW,eAAA,CAAmBF,CAAnB,CAAtF,GACEF,CAAAK,KAAA,CAAcJ,CAAd,CAAuBR,CAAA,CAAIS,CAAJ,CAAvB,CAAiCA,CAAjC,CALN,KAQO,IAAIJ,CAAA,CAAQL,CAAR,CAAJ,EAAoBD,EAAA,CAAYC,CAAZ,CAApB,CACL,IAAKS,CAAL,CAAW,CAAX,CAAcA,CAAd,CAAoBT,CAAAE,OAApB,CAAgCO,CAAA,EAAhC,CACEF,CAAAK,KAAA,CAAcJ,CAAd,CAAuBR,CAAA,CAAIS,CAAJ,CAAvB,CAAiCA,CAAjC,CAFG,KAIA,IAAIT,CAAAM,QAAJ,EAAmBN,CAAAM,QAAnB,GAAmCA,CAAnC,CACHN,CAAAM,QAAA,CAAYC,CAAZ,CAAsBC,CAAtB,CADG,KAGL,KAAKC,CAAL,GAAYT,EAAZ,CACMA,CAAAW,eAAA,CAAmBF,CAAnB,CAAJ,EACEF,CAAAK,KAAA,CAAcJ,CAAd,CAAuBR,CAAA,CAAIS,CAAJ,CAAvB,CAAiCA,CAAjC,CAKR,OAAOT,EAzBgC,CA4BzCa,QAASA,GAAU,CAACb,CAAD,CAAM,CACvB,IAAIc,EAAO,EAAX,CACSL,CAAT,KAASA,CAAT,GAAgBT,EAAhB,CACMA,CAAAW,eAAA,CAAmBF,CAAnB,CAAJ,EACEK,CAAAC,KAAA,CAAUN,CAAV,CAGJ,OAAOK,EAAAE,KAAA,EAPgB,CAUzBC,QAASA,GAAa,CAACjB,CAAD;AAAMO,CAAN,CAAgBC,CAAhB,CAAyB,CAE7C,IADA,IAAIM,EAAOD,EAAA,CAAWb,CAAX,CAAX,CACUkB,EAAI,CAAd,CAAiBA,CAAjB,CAAqBJ,CAAAZ,OAArB,CAAkCgB,CAAA,EAAlC,CACEX,CAAAK,KAAA,CAAcJ,CAAd,CAAuBR,CAAA,CAAIc,CAAA,CAAKI,CAAL,CAAJ,CAAvB,CAAqCJ,CAAA,CAAKI,CAAL,CAArC,CAEF,OAAOJ,EALsC,CAc/CK,QAASA,GAAa,CAACC,CAAD,CAAa,CACjC,MAAO,SAAQ,CAACC,CAAD,CAAQZ,CAAR,CAAa,CAAEW,CAAA,CAAWX,CAAX,CAAgBY,CAAhB,CAAF,CADK,CAYnCC,QAASA,GAAO,EAAG,CAIjB,IAHA,IAAIC,EAAQC,EAAAtB,OAAZ,CACIuB,CAEJ,CAAMF,CAAN,CAAA,CAAa,CACXA,CAAA,EACAE,EAAA,CAAQD,EAAA,CAAID,CAAJ,CAAAG,WAAA,CAAsB,CAAtB,CACR,IAAa,EAAb,EAAID,CAAJ,CAEE,MADAD,GAAA,CAAID,CAAJ,CACO,CADM,GACN,CAAAC,EAAAG,KAAA,CAAS,EAAT,CAET,IAAa,EAAb,EAAIF,CAAJ,CACED,EAAA,CAAID,CAAJ,CAAA,CAAa,GADf,KAIE,OADAC,GAAA,CAAID,CAAJ,CACO,CADMK,MAAAC,aAAA,CAAoBJ,CAApB,CAA4B,CAA5B,CACN,CAAAD,EAAAG,KAAA,CAAS,EAAT,CAXE,CAcbH,EAAAM,QAAA,CAAY,GAAZ,CACA,OAAON,GAAAG,KAAA,CAAS,EAAT,CAnBU,CA4BnBI,QAASA,GAAU,CAAC/B,CAAD,CAAMgC,CAAN,CAAS,CACtBA,CAAJ,CACEhC,CAAAiC,UADF,CACkBD,CADlB,CAIE,OAAOhC,CAAAiC,UALiB,CAuB5BC,QAASA,EAAM,CAACC,CAAD,CAAM,CACnB,IAAIH,EAAIG,CAAAF,UACR3B,EAAA,CAAQ8B,SAAR,CAAmB,QAAQ,CAACpC,CAAD,CAAM,CAC3BA,CAAJ,GAAYmC,CAAZ,EACE7B,CAAA,CAAQN,CAAR,CAAa,QAAQ,CAACqB,CAAD,CAAQZ,CAAR,CAAa,CAChC0B,CAAA,CAAI1B,CAAJ,CAAA,CAAWY,CADqB,CAAlC,CAF6B,CAAjC,CAQAU,GAAA,CAAWI,CAAX,CAAeH,CAAf,CACA,OAAOG,EAXY,CAcrBE,QAASA,EAAG,CAACC,CAAD,CAAM,CAChB,MAAOC,SAAA,CAASD,CAAT;AAAc,EAAd,CADS,CAKlBE,QAASA,GAAO,CAACC,CAAD,CAASC,CAAT,CAAgB,CAC9B,MAAOR,EAAA,CAAO,KAAKA,CAAA,CAAO,QAAQ,EAAG,EAAlB,CAAsB,WAAWO,CAAX,CAAtB,CAAL,CAAP,CAA0DC,CAA1D,CADuB,CAoBhCC,QAASA,EAAI,EAAG,EAoBhBC,QAASA,GAAQ,CAACC,CAAD,CAAI,CAAC,MAAOA,EAAR,CAIrBC,QAASA,EAAO,CAACzB,CAAD,CAAQ,CAAC,MAAO,SAAQ,EAAG,CAAC,MAAOA,EAAR,CAAnB,CAcxB0B,QAASA,EAAW,CAAC1B,CAAD,CAAO,CAAC,MAAwB,WAAxB,GAAO,MAAOA,EAAf,CAe3B2B,QAASA,EAAS,CAAC3B,CAAD,CAAO,CAAC,MAAwB,WAAxB,GAAO,MAAOA,EAAf,CAgBzB4B,QAASA,EAAQ,CAAC5B,CAAD,CAAO,CAAC,MAAgB,KAAhB,EAAOA,CAAP,EAAyC,QAAzC,GAAwB,MAAOA,EAAhC,CAexBjB,QAASA,EAAQ,CAACiB,CAAD,CAAO,CAAC,MAAwB,QAAxB,GAAO,MAAOA,EAAf,CAexB6B,QAASA,GAAQ,CAAC7B,CAAD,CAAO,CAAC,MAAwB,QAAxB,GAAO,MAAOA,EAAf,CAexB8B,QAASA,GAAM,CAAC9B,CAAD,CAAQ,CACrB,MAAgC,eAAhC,GAAO+B,EAAAxC,KAAA,CAAcS,CAAd,CADc,CAsCvBX,QAASA,EAAU,CAACW,CAAD,CAAO,CAAC,MAAwB,UAAxB,GAAO,MAAOA,EAAf,CAU1BgC,QAASA,GAAQ,CAAChC,CAAD,CAAQ,CACvB,MAAgC,iBAAhC,GAAO+B,EAAAxC,KAAA,CAAcS,CAAd,CADgB,CA9mBc;AA0nBvCpB,QAASA,GAAQ,CAACD,CAAD,CAAM,CACrB,MAAOA,EAAP,EAAcA,CAAAJ,SAAd,EAA8BI,CAAAsD,SAA9B,EAA8CtD,CAAAuD,MAA9C,EAA2DvD,CAAAwD,YADtC,CAyDvBC,QAASA,GAAS,CAACC,CAAD,CAAO,CACvB,MAAO,EAAGA,CAAAA,CAAH,EACJ,EAAAA,CAAAC,SAAA,EACGD,CAAAE,KADH,EACgBF,CAAAG,KADhB,EAC6BH,CAAAI,KAD7B,CADI,CADgB,CA+BzBC,QAASA,GAAG,CAAC/D,CAAD,CAAMO,CAAN,CAAgBC,CAAhB,CAAyB,CACnC,IAAIwD,EAAU,EACd1D,EAAA,CAAQN,CAAR,CAAa,QAAQ,CAACqB,CAAD,CAAQE,CAAR,CAAe0C,CAAf,CAAqB,CACxCD,CAAAjD,KAAA,CAAaR,CAAAK,KAAA,CAAcJ,CAAd,CAAuBa,CAAvB,CAA8BE,CAA9B,CAAqC0C,CAArC,CAAb,CADwC,CAA1C,CAGA,OAAOD,EAL4B,CAwCrCE,QAASA,GAAO,CAACC,CAAD,CAAQnE,CAAR,CAAa,CAC3B,GAAImE,CAAAD,QAAJ,CAAmB,MAAOC,EAAAD,QAAA,CAAclE,CAAd,CAE1B,KAAK,IAAIkB,EAAI,CAAb,CAAgBA,CAAhB,CAAoBiD,CAAAjE,OAApB,CAAkCgB,CAAA,EAAlC,CACE,GAAIlB,CAAJ,GAAYmE,CAAA,CAAMjD,CAAN,CAAZ,CAAsB,MAAOA,EAE/B,OAAQ,EANmB,CAS7BkD,QAASA,GAAW,CAACD,CAAD,CAAQ9C,CAAR,CAAe,CACjC,IAAIE,EAAQ2C,EAAA,CAAQC,CAAR,CAAe9C,CAAf,CACA,EAAZ,EAAIE,CAAJ,EACE4C,CAAAE,OAAA,CAAa9C,CAAb,CAAoB,CAApB,CACF,OAAOF,EAJ0B,CA6EnCiD,QAASA,GAAI,CAACC,CAAD,CAASC,CAAT,CAAsBC,CAAtB,CAAmCC,CAAnC,CAA8C,CACzD,GAAIzE,EAAA,CAASsE,CAAT,CAAJ,EAAgCA,CAAhC,EAAgCA,CAjNlBI,WAiNd,EAAgCJ,CAjNAK,OAiNhC,CACE,KAAMC,GAAA,CAAS,MAAT,CAAN,CAIF,GAAKL,CAAL,CAcO,CACL,GAAID,CAAJ,GAAeC,CAAf,CAA4B,KAAMK,GAAA,CAAS,KAAT,CAAN,CAG5BJ,CAAA,CAAcA,CAAd,EAA6B,EAC7BC;CAAA,CAAYA,CAAZ,EAAyB,EAEzB,IAAIzB,CAAA,CAASsB,CAAT,CAAJ,CAAsB,CACpB,IAAIhD,EAAQ2C,EAAA,CAAQO,CAAR,CAAqBF,CAArB,CACZ,IAAe,EAAf,GAAIhD,CAAJ,CAAkB,MAAOmD,EAAA,CAAUnD,CAAV,CAEzBkD,EAAA1D,KAAA,CAAiBwD,CAAjB,CACAG,EAAA3D,KAAA,CAAeyD,CAAf,CALoB,CAStB,GAAInE,CAAA,CAAQkE,CAAR,CAAJ,CAEE,IAAM,IAAIrD,EADVsD,CAAAtE,OACUgB,CADW,CACrB,CAAiBA,CAAjB,CAAqBqD,CAAArE,OAArB,CAAoCgB,CAAA,EAApC,CACE4D,CAKA,CALSR,EAAA,CAAKC,CAAA,CAAOrD,CAAP,CAAL,CAAgB,IAAhB,CAAsBuD,CAAtB,CAAmCC,CAAnC,CAKT,CAJIzB,CAAA,CAASsB,CAAA,CAAOrD,CAAP,CAAT,CAIJ,GAHEuD,CAAA1D,KAAA,CAAiBwD,CAAA,CAAOrD,CAAP,CAAjB,CACA,CAAAwD,CAAA3D,KAAA,CAAe+D,CAAf,CAEF,EAAAN,CAAAzD,KAAA,CAAiB+D,CAAjB,CARJ,KAUO,CACL,IAAI9C,EAAIwC,CAAAvC,UACJ5B,EAAA,CAAQmE,CAAR,CAAJ,CACEA,CAAAtE,OADF,CACuB,CADvB,CAGEI,CAAA,CAAQkE,CAAR,CAAqB,QAAQ,CAACnD,CAAD,CAAQZ,CAAR,CAAa,CACxC,OAAO+D,CAAA,CAAY/D,CAAZ,CADiC,CAA1C,CAIF,KAAUA,CAAV,GAAiB8D,EAAjB,CACEO,CAKA,CALSR,EAAA,CAAKC,CAAA,CAAO9D,CAAP,CAAL,CAAkB,IAAlB,CAAwBgE,CAAxB,CAAqCC,CAArC,CAKT,CAJIzB,CAAA,CAASsB,CAAA,CAAO9D,CAAP,CAAT,CAIJ,GAHEgE,CAAA1D,KAAA,CAAiBwD,CAAA,CAAO9D,CAAP,CAAjB,CACA,CAAAiE,CAAA3D,KAAA,CAAe+D,CAAf,CAEF,EAAAN,CAAA,CAAY/D,CAAZ,CAAA,CAAmBqE,CAErB/C,GAAA,CAAWyC,CAAX,CAAuBxC,CAAvB,CAjBK,CA1BF,CAdP,IAEE,IADAwC,CACA,CADcD,CACd,CACMlE,CAAA,CAAQkE,CAAR,CAAJ,CACEC,CADF,CACgBF,EAAA,CAAKC,CAAL,CAAa,EAAb,CAAiBE,CAAjB,CAA8BC,CAA9B,CADhB,CAEWvB,EAAA,CAAOoB,CAAP,CAAJ,CACLC,CADK,CACS,IAAIO,IAAJ,CAASR,CAAAS,QAAA,EAAT,CADT,CAEI3B,EAAA,CAASkB,CAAT,CAAJ,EACLC,CACA,CADkBS,MAAJ,CAAWV,CAAAA,OAAX,CAA0BA,CAAAnB,SAAA,EAAA8B,MAAA,CAAwB,SAAxB,CAAA,CAAmC,CAAnC,CAA1B,CACd,CAAAV,CAAAW,UAAA,CAAwBZ,CAAAY,UAFnB,EAGIlC,CAAA,CAASsB,CAAT,CAHJ,GAILC,CAJK,CAISF,EAAA,CAAKC,CAAL,CAAa,EAAb,CAAiBE,CAAjB,CAA8BC,CAA9B,CAJT,CAsDX;MAAOF,EAnEkD,CAyE3DY,QAASA,GAAW,CAACC,CAAD,CAAMlD,CAAN,CAAW,CAC7B,GAAI9B,CAAA,CAAQgF,CAAR,CAAJ,CAAkB,CAChBlD,CAAA,CAAMA,CAAN,EAAa,EAEb,KAAM,IAAIjB,EAAI,CAAd,CAAiBA,CAAjB,CAAqBmE,CAAAnF,OAArB,CAAiCgB,CAAA,EAAjC,CACEiB,CAAA,CAAIjB,CAAJ,CAAA,CAASmE,CAAA,CAAInE,CAAJ,CAJK,CAAlB,IAMO,IAAI+B,CAAA,CAASoC,CAAT,CAAJ,CAGL,IAAS5E,CAAT,GAFA0B,EAEgBkD,CAFVlD,CAEUkD,EAFH,EAEGA,CAAAA,CAAhB,CACM,CAAA1E,EAAAC,KAAA,CAAoByE,CAApB,CAAyB5E,CAAzB,CAAJ,EAAyD,GAAzD,GAAuCA,CAAA6E,OAAA,CAAW,CAAX,CAAvC,EAAkF,GAAlF,GAAgE7E,CAAA6E,OAAA,CAAW,CAAX,CAAhE,GACEnD,CAAA,CAAI1B,CAAJ,CADF,CACa4E,CAAA,CAAI5E,CAAJ,CADb,CAMJ,OAAO0B,EAAP,EAAckD,CAjBe,CAkD/BE,QAASA,GAAM,CAACC,CAAD,CAAKC,CAAL,CAAS,CACtB,GAAID,CAAJ,GAAWC,CAAX,CAAe,MAAO,CAAA,CACtB,IAAW,IAAX,GAAID,CAAJ,EAA0B,IAA1B,GAAmBC,CAAnB,CAAgC,MAAO,CAAA,CACvC,IAAID,CAAJ,GAAWA,CAAX,EAAiBC,CAAjB,GAAwBA,CAAxB,CAA4B,MAAO,CAAA,CAHb,KAIlBC,EAAK,MAAOF,EAJM,CAIsB/E,CAC5C,IAAIiF,CAAJ,EADyBC,MAAOF,EAChC,EACY,QADZ,EACMC,CADN,CAEI,GAAIrF,CAAA,CAAQmF,CAAR,CAAJ,CAAiB,CACf,GAAI,CAACnF,CAAA,CAAQoF,CAAR,CAAL,CAAkB,MAAO,CAAA,CACzB,KAAKvF,CAAL,CAAcsF,CAAAtF,OAAd,GAA4BuF,CAAAvF,OAA5B,CAAuC,CACrC,IAAIO,CAAJ,CAAQ,CAAR,CAAWA,CAAX,CAAeP,CAAf,CAAuBO,CAAA,EAAvB,CACE,GAAI,CAAC8E,EAAA,CAAOC,CAAA,CAAG/E,CAAH,CAAP,CAAgBgF,CAAA,CAAGhF,CAAH,CAAhB,CAAL,CAA+B,MAAO,CAAA,CAExC,OAAO,CAAA,CAJ8B,CAFxB,CAAjB,IAQO,CAAA,GAAI0C,EAAA,CAAOqC,CAAP,CAAJ,CACL,MAAKrC,GAAA,CAAOsC,CAAP,CAAL,CACQG,KAAA,CAAMJ,CAAAR,QAAA,EAAN,CADR,EAC+BY,KAAA,CAAMH,CAAAT,QAAA,EAAN,CAD/B,EACwDQ,CAAAR,QAAA,EADxD;AACyES,CAAAT,QAAA,EADzE,CAAwB,CAAA,CAEnB,IAAI3B,EAAA,CAASmC,CAAT,CAAJ,EAAoBnC,EAAA,CAASoC,CAAT,CAApB,CACL,MAAOD,EAAApC,SAAA,EAAP,EAAwBqC,CAAArC,SAAA,EAExB,IAAYoC,CAAZ,EAAYA,CAhWJb,WAgWR,EAAYa,CAhWcZ,OAgW1B,EAA2Ba,CAA3B,EAA2BA,CAhWnBd,WAgWR,EAA2Bc,CAhWDb,OAgW1B,EAAkC3E,EAAA,CAASuF,CAAT,CAAlC,EAAkDvF,EAAA,CAASwF,CAAT,CAAlD,EAAkEpF,CAAA,CAAQoF,CAAR,CAAlE,CAA+E,MAAO,CAAA,CACtFI,EAAA,CAAS,EACT,KAAIpF,CAAJ,GAAW+E,EAAX,CACE,GAAsB,GAAtB,GAAI/E,CAAA6E,OAAA,CAAW,CAAX,CAAJ,EAA6B,CAAA5E,CAAA,CAAW8E,CAAA,CAAG/E,CAAH,CAAX,CAA7B,CAAA,CACA,GAAI,CAAC8E,EAAA,CAAOC,CAAA,CAAG/E,CAAH,CAAP,CAAgBgF,CAAA,CAAGhF,CAAH,CAAhB,CAAL,CAA+B,MAAO,CAAA,CACtCoF,EAAA,CAAOpF,CAAP,CAAA,CAAc,CAAA,CAFd,CAIF,IAAIA,CAAJ,GAAWgF,EAAX,CACE,GAAI,CAACI,CAAAlF,eAAA,CAAsBF,CAAtB,CAAL,EACsB,GADtB,GACIA,CAAA6E,OAAA,CAAW,CAAX,CADJ,EAEIG,CAAA,CAAGhF,CAAH,CAFJ,GAEgBZ,CAFhB,EAGI,CAACa,CAAA,CAAW+E,CAAA,CAAGhF,CAAH,CAAX,CAHL,CAG0B,MAAO,CAAA,CAEnC,OAAO,CAAA,CAnBF,CAuBX,MAAO,CAAA,CAtCe,CA0FxBqF,QAASA,GAAI,CAACC,CAAD,CAAOC,CAAP,CAAW,CACtB,IAAIC,EAA+B,CAAnB,CAAA7D,SAAAlC,OAAA,CAxBTgG,EAAAtF,KAAA,CAwB0CwB,SAxB1C,CAwBqD+D,CAxBrD,CAwBS,CAAiD,EACjE,OAAI,CAAAzF,CAAA,CAAWsF,CAAX,CAAJ,EAAwBA,CAAxB,WAAsCf,OAAtC,CAcSe,CAdT,CACSC,CAAA/F,OACA,CAAH,QAAQ,EAAG,CACT,MAAOkC,UAAAlC,OACA,CAAH8F,CAAAI,MAAA,CAASL,CAAT,CAAeE,CAAAI,OAAA,CAAiBH,EAAAtF,KAAA,CAAWwB,SAAX;AAAsB,CAAtB,CAAjB,CAAf,CAAG,CACH4D,CAAAI,MAAA,CAASL,CAAT,CAAeE,CAAf,CAHK,CAAR,CAKH,QAAQ,EAAG,CACT,MAAO7D,UAAAlC,OACA,CAAH8F,CAAAI,MAAA,CAASL,CAAT,CAAe3D,SAAf,CAAG,CACH4D,CAAApF,KAAA,CAAQmF,CAAR,CAHK,CATK,CAqBxBO,QAASA,GAAc,CAAC7F,CAAD,CAAMY,CAAN,CAAa,CAClC,IAAIkF,EAAMlF,CAES,SAAnB,GAAI,MAAOZ,EAAX,EAAiD,GAAjD,GAA+BA,CAAA6E,OAAA,CAAW,CAAX,CAA/B,CACEiB,CADF,CACQ1G,CADR,CAEWI,EAAA,CAASoB,CAAT,CAAJ,CACLkF,CADK,CACC,SADD,CAEIlF,CAAJ,EAAczB,CAAd,GAA2ByB,CAA3B,CACLkF,CADK,CACC,WADD,CAEYlF,CAFZ,GAEYA,CAncLsD,WAicP,EAEYtD,CAncauD,OAiczB,IAGL2B,CAHK,CAGC,QAHD,CAMP,OAAOA,EAb2B,CA+BpCC,QAASA,GAAM,CAACxG,CAAD,CAAMyG,CAAN,CAAc,CAC3B,MAAmB,WAAnB,GAAI,MAAOzG,EAAX,CAAuCH,CAAvC,CACO6G,IAAAC,UAAA,CAAe3G,CAAf,CAAoBsG,EAApB,CAAoCG,CAAA,CAAS,IAAT,CAAgB,IAApD,CAFoB,CAkB7BG,QAASA,GAAQ,CAACC,CAAD,CAAO,CACtB,MAAOzG,EAAA,CAASyG,CAAT,CACA,CAADH,IAAAI,MAAA,CAAWD,CAAX,CAAC,CACDA,CAHgB,CAOxBE,QAASA,GAAS,CAAC1F,CAAD,CAAQ,CACH,UAArB,GAAI,MAAOA,EAAX,CACEA,CADF,CACU,CAAA,CADV,CAEWA,CAAJ,EAA8B,CAA9B,GAAaA,CAAAnB,OAAb,EACD8G,CACJ,CADQC,CAAA,CAAU,EAAV,CAAe5F,CAAf,CACR,CAAAA,CAAA,CAAQ,EAAO,GAAP,EAAE2F,CAAF,EAAmB,GAAnB,EAAcA,CAAd,EAA+B,OAA/B,EAA0BA,CAA1B,EAA+C,IAA/C,EAA0CA,CAA1C,EAA4D,GAA5D,EAAuDA,CAAvD,EAAwE,IAAxE,EAAmEA,CAAnE,CAFH,EAIL3F,CAJK,CAIG,CAAA,CAEV;MAAOA,EATiB,CAe1B6F,QAASA,GAAW,CAACC,CAAD,CAAU,CAC5BA,CAAA,CAAUC,CAAA,CAAOD,CAAP,CAAAE,MAAA,EACV,IAAI,CAGFF,CAAAG,MAAA,EAHE,CAIF,MAAMC,CAAN,CAAS,EAGX,IAAIC,EAAWJ,CAAA,CAAO,OAAP,CAAAK,OAAA,CAAuBN,CAAvB,CAAAO,KAAA,EACf,IAAI,CACF,MAHcC,EAGP,GAAAR,CAAA,CAAQ,CAAR,CAAAhH,SAAA,CAAoC8G,CAAA,CAAUO,CAAV,CAApC,CACHA,CAAAtC,MAAA,CACQ,YADR,CACA,CAAsB,CAAtB,CAAA0C,QAAA,CACU,aADV,CACyB,QAAQ,CAAC1C,CAAD,CAAQvB,CAAR,CAAkB,CAAE,MAAO,GAAP,CAAasD,CAAA,CAAUtD,CAAV,CAAf,CADnD,CAHF,CAKF,MAAM4D,CAAN,CAAS,CACT,MAAON,EAAA,CAAUO,CAAV,CADE,CAfiB,CAgC9BK,QAASA,GAAqB,CAACxG,CAAD,CAAQ,CACpC,GAAI,CACF,MAAOyG,mBAAA,CAAmBzG,CAAnB,CADL,CAEF,MAAMkG,CAAN,CAAS,EAHyB,CAatCQ,QAASA,GAAa,CAAYC,CAAZ,CAAsB,CAAA,IACtChI,EAAM,EADgC,CAC5BiI,CAD4B,CACjBxH,CACzBH,EAAA,CAAS4H,CAAAF,CAAAE,EAAY,EAAZA,OAAA,CAAsB,GAAtB,CAAT,CAAqC,QAAQ,CAACF,CAAD,CAAW,CACjDA,CAAL,GACEC,CAEA,CAFYD,CAAAJ,QAAA,CAAiB,KAAjB,CAAuB,KAAvB,CAAAM,MAAA,CAAoC,GAApC,CAEZ,CADAzH,CACA,CADMoH,EAAA,CAAsBI,CAAA,CAAU,CAAV,CAAtB,CACN,CAAKjF,CAAA,CAAUvC,CAAV,CAAL,GACM8F,CACJ,CADUvD,CAAA,CAAUiF,CAAA,CAAU,CAAV,CAAV,CAAA,CAA0BJ,EAAA,CAAsBI,CAAA,CAAU,CAAV,CAAtB,CAA1B,CAAgE,CAAA,CAC1E,CAAKtH,EAAAC,KAAA,CAAoBZ,CAApB,CAAyBS,CAAzB,CAAL,CAEUJ,CAAA,CAAQL,CAAA,CAAIS,CAAJ,CAAR,CAAH,CACLT,CAAA,CAAIS,CAAJ,CAAAM,KAAA,CAAcwF,CAAd,CADK,CAGLvG,CAAA,CAAIS,CAAJ,CAHK,CAGM,CAACT,CAAA,CAAIS,CAAJ,CAAD,CAAU8F,CAAV,CALb,CACEvG,CAAA,CAAIS,CAAJ,CADF,CACa8F,CAHf,CAHF,CADsD,CAAxD,CAgBA,OAAOvG,EAlBmC,CAqB5CmI,QAASA,GAAU,CAACnI,CAAD,CAAM,CACvB,IAAIoI;AAAQ,EACZ9H,EAAA,CAAQN,CAAR,CAAa,QAAQ,CAACqB,CAAD,CAAQZ,CAAR,CAAa,CAC5BJ,CAAA,CAAQgB,CAAR,CAAJ,CACEf,CAAA,CAAQe,CAAR,CAAe,QAAQ,CAACgH,CAAD,CAAa,CAClCD,CAAArH,KAAA,CAAWuH,EAAA,CAAe7H,CAAf,CAAoB,CAAA,CAApB,CAAX,EAC2B,CAAA,CAAf,GAAA4H,CAAA,CAAsB,EAAtB,CAA2B,GAA3B,CAAiCC,EAAA,CAAeD,CAAf,CAA2B,CAAA,CAA3B,CAD7C,EADkC,CAApC,CADF,CAMAD,CAAArH,KAAA,CAAWuH,EAAA,CAAe7H,CAAf,CAAoB,CAAA,CAApB,CAAX,EACsB,CAAA,CAAV,GAAAY,CAAA,CAAiB,EAAjB,CAAsB,GAAtB,CAA4BiH,EAAA,CAAejH,CAAf,CAAsB,CAAA,CAAtB,CADxC,EAPgC,CAAlC,CAWA,OAAO+G,EAAAlI,OAAA,CAAekI,CAAAzG,KAAA,CAAW,GAAX,CAAf,CAAiC,EAbjB,CA4BzB4G,QAASA,GAAgB,CAAChC,CAAD,CAAM,CAC7B,MAAO+B,GAAA,CAAe/B,CAAf,CAAoB,CAAA,CAApB,CAAAqB,QAAA,CACY,OADZ,CACqB,GADrB,CAAAA,QAAA,CAEY,OAFZ,CAEqB,GAFrB,CAAAA,QAAA,CAGY,OAHZ,CAGqB,GAHrB,CADsB,CAmB/BU,QAASA,GAAc,CAAC/B,CAAD,CAAMiC,CAAN,CAAuB,CAC5C,MAAOC,mBAAA,CAAmBlC,CAAnB,CAAAqB,QAAA,CACY,OADZ,CACqB,GADrB,CAAAA,QAAA,CAEY,OAFZ,CAEqB,GAFrB,CAAAA,QAAA,CAGY,MAHZ,CAGoB,GAHpB,CAAAA,QAAA,CAIY,OAJZ,CAIqB,GAJrB,CAAAA,QAAA,CAKY,MALZ,CAKqBY,CAAA,CAAkB,KAAlB,CAA0B,GAL/C,CADqC,CAwD9CE,QAASA,GAAW,CAACvB,CAAD,CAAUwB,CAAV,CAAqB,CAOvClB,QAASA,EAAM,CAACN,CAAD,CAAU,CACvBA,CAAA,EAAWyB,CAAA7H,KAAA,CAAcoG,CAAd,CADY,CAPc,IACnCyB,EAAW,CAACzB,CAAD,CADwB,CAEnC0B,CAFmC,CAGnCC,CAHmC,CAInCC,EAAQ,CAAC,QAAD,CAAW,QAAX,CAAqB,UAArB;AAAiC,aAAjC,CAJ2B,CAKnCC,EAAsB,mCAM1B1I,EAAA,CAAQyI,CAAR,CAAe,QAAQ,CAACE,CAAD,CAAO,CAC5BF,CAAA,CAAME,CAAN,CAAA,CAAc,CAAA,CACdxB,EAAA,CAAO7H,CAAAsJ,eAAA,CAAwBD,CAAxB,CAAP,CACAA,EAAA,CAAOA,CAAArB,QAAA,CAAa,GAAb,CAAkB,KAAlB,CACHT,EAAAgC,iBAAJ,GACE7I,CAAA,CAAQ6G,CAAAgC,iBAAA,CAAyB,GAAzB,CAA+BF,CAA/B,CAAR,CAA8CxB,CAA9C,CAEA,CADAnH,CAAA,CAAQ6G,CAAAgC,iBAAA,CAAyB,GAAzB,CAA+BF,CAA/B,CAAsC,KAAtC,CAAR,CAAsDxB,CAAtD,CACA,CAAAnH,CAAA,CAAQ6G,CAAAgC,iBAAA,CAAyB,GAAzB,CAA+BF,CAA/B,CAAsC,GAAtC,CAAR,CAAoDxB,CAApD,CAHF,CAJ4B,CAA9B,CAWAnH,EAAA,CAAQsI,CAAR,CAAkB,QAAQ,CAACzB,CAAD,CAAU,CAClC,GAAI,CAAC0B,CAAL,CAAiB,CAEf,IAAI3D,EAAQ8D,CAAAI,KAAA,CADI,GACJ,CADUjC,CAAAkC,UACV,CAD8B,GAC9B,CACRnE,EAAJ,EACE2D,CACA,CADa1B,CACb,CAAA2B,CAAA,CAAUlB,CAAA1C,CAAA,CAAM,CAAN,CAAA0C,EAAY,EAAZA,SAAA,CAAwB,MAAxB,CAAgC,GAAhC,CAFZ,EAIEtH,CAAA,CAAQ6G,CAAAmC,WAAR,CAA4B,QAAQ,CAACzF,CAAD,CAAO,CACpCgF,CAAAA,CAAL,EAAmBE,CAAA,CAAMlF,CAAAoF,KAAN,CAAnB,GACEJ,CACA,CADa1B,CACb,CAAA2B,CAAA,CAASjF,CAAAxC,MAFX,CADyC,CAA3C,CAPa,CADiB,CAApC,CAiBIwH,EAAJ,EACEF,CAAA,CAAUE,CAAV,CAAsBC,CAAA,CAAS,CAACA,CAAD,CAAT,CAAoB,EAA1C,CAxCqC,CAkGzCH,QAASA,GAAS,CAACxB,CAAD,CAAUoC,CAAV,CAAmB,CACnC,IAAIC,EAAcA,QAAQ,EAAG,CAC3BrC,CAAA,CAAUC,CAAA,CAAOD,CAAP,CAEV,IAAIA,CAAAsC,SAAA,EAAJ,CAAwB,CACtB,IAAIC,EAAOvC,CAAA,CAAQ,CAAR,CAAD,GAAgBvH,CAAhB;AAA4B,UAA5B,CAAyCsH,EAAA,CAAYC,CAAZ,CAEnD,MAAMtC,GAAA,CACF,SADE,CAGF6E,CAAA9B,QAAA,CAAY,GAAZ,CAAgB,MAAhB,CAAAA,QAAA,CAAgC,GAAhC,CAAoC,MAApC,CAHE,CAAN,CAHsB,CASxB2B,CAAA,CAAUA,CAAV,EAAqB,EACrBA,EAAAzH,QAAA,CAAgB,CAAC,UAAD,CAAa,QAAQ,CAAC6H,CAAD,CAAW,CAC9CA,CAAAtI,MAAA,CAAe,cAAf,CAA+B8F,CAA/B,CAD8C,CAAhC,CAAhB,CAGAoC,EAAAzH,QAAA,CAAgB,IAAhB,CACI2H,EAAAA,CAAWG,EAAA,CAAeL,CAAf,CACfE,EAAAI,OAAA,CAAgB,CAAC,YAAD,CAAe,cAAf,CAA+B,UAA/B,CAA2C,WAA3C,CAAwD,UAAxD,CACb,QAAQ,CAACC,CAAD,CAAQ3C,CAAR,CAAiB4C,CAAjB,CAA0BN,CAA1B,CAAoCO,CAApC,CAA6C,CACpDF,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtB9C,CAAA+C,KAAA,CAAa,WAAb,CAA0BT,CAA1B,CACAM,EAAA,CAAQ5C,CAAR,CAAA,CAAiB2C,CAAjB,CAFsB,CAAxB,CADoD,CADxC,CAAhB,CAQA,OAAOL,EA1BoB,CAA7B,CA6BIU,EAAqB,sBAEzB,IAAIxK,CAAJ,EAAc,CAACwK,CAAAC,KAAA,CAAwBzK,CAAAsJ,KAAxB,CAAf,CACE,MAAOO,EAAA,EAGT7J,EAAAsJ,KAAA,CAActJ,CAAAsJ,KAAArB,QAAA,CAAoBuC,CAApB,CAAwC,EAAxC,CACdE,GAAAC,gBAAA,CAA0BC,QAAQ,CAACC,CAAD,CAAe,CAC/ClK,CAAA,CAAQkK,CAAR,CAAsB,QAAQ,CAAC1B,CAAD,CAAS,CACrCS,CAAAxI,KAAA,CAAa+H,CAAb,CADqC,CAAvC,CAGAU,EAAA,EAJ+C,CArCd,CA8CrCiB,QAASA,GAAU,CAACxB,CAAD,CAAOyB,CAAP,CAAkB,CACnCA,CAAA;AAAYA,CAAZ,EAAyB,GACzB,OAAOzB,EAAArB,QAAA,CAAa+C,EAAb,CAAgC,QAAQ,CAACC,CAAD,CAASC,CAAT,CAAc,CAC3D,OAAQA,CAAA,CAAMH,CAAN,CAAkB,EAA1B,EAAgCE,CAAAE,YAAA,EAD2B,CAAtD,CAF4B,CAmCrCC,QAASA,GAAS,CAACC,CAAD,CAAM/B,CAAN,CAAYgC,CAAZ,CAAoB,CACpC,GAAI,CAACD,CAAL,CACE,KAAMnG,GAAA,CAAS,MAAT,CAA2CoE,CAA3C,EAAmD,GAAnD,CAA0DgC,CAA1D,EAAoE,UAApE,CAAN,CAEF,MAAOD,EAJ6B,CAOtCE,QAASA,GAAW,CAACF,CAAD,CAAM/B,CAAN,CAAYkC,CAAZ,CAAmC,CACjDA,CAAJ,EAA6B9K,CAAA,CAAQ2K,CAAR,CAA7B,GACIA,CADJ,CACUA,CAAA,CAAIA,CAAA9K,OAAJ,CAAiB,CAAjB,CADV,CAIA6K,GAAA,CAAUrK,CAAA,CAAWsK,CAAX,CAAV,CAA2B/B,CAA3B,CAAiC,sBAAjC,EACK+B,CAAA,EAAsB,QAAtB,GAAO,MAAOA,EAAd,CAAiCA,CAAAI,YAAAnC,KAAjC,EAAyD,QAAzD,CAAoE,MAAO+B,EADhF,EAEA,OAAOA,EAP8C,CAevDK,QAASA,GAAuB,CAACpC,CAAD,CAAOzI,CAAP,CAAgB,CAC9C,GAAa,gBAAb,GAAIyI,CAAJ,CACE,KAAMpE,GAAA,CAAS,SAAT,CAA8DrE,CAA9D,CAAN,CAF4C,CAchD8K,QAASA,GAAM,CAACtL,CAAD,CAAMuL,CAAN,CAAYC,CAAZ,CAA2B,CACxC,GAAI,CAACD,CAAL,CAAW,MAAOvL,EACdc,EAAAA,CAAOyK,CAAArD,MAAA,CAAW,GAAX,CAKX,KAJA,IAAIzH,CAAJ,CACIgL,EAAezL,CADnB,CAEI0L,EAAM5K,CAAAZ,OAFV,CAISgB,EAAI,CAAb,CAAgBA,CAAhB,CAAoBwK,CAApB,CAAyBxK,CAAA,EAAzB,CACET,CACA,CADMK,CAAA,CAAKI,CAAL,CACN,CAAIlB,CAAJ,GACEA,CADF,CACQ,CAACyL,CAAD,CAAgBzL,CAAhB,EAAqBS,CAArB,CADR,CAIF,OAAI,CAAC+K,CAAL,EAAsB9K,CAAA,CAAWV,CAAX,CAAtB,CACS8F,EAAA,CAAK2F,CAAL,CAAmBzL,CAAnB,CADT,CAGOA,CAhBiC,CAwB1C2L,QAASA,GAAgB,CAACC,CAAD,CAAQ,CAAA,IAC3BC;AAAYD,CAAA,CAAM,CAAN,CACZE,EAAAA,CAAUF,CAAA,CAAMA,CAAA1L,OAAN,CAAqB,CAArB,CACd,IAAI2L,CAAJ,GAAkBC,CAAlB,CACE,MAAO1E,EAAA,CAAOyE,CAAP,CAIT,KAAIjD,EAAW,CAACzB,CAAD,CAEf,GAAG,CACDA,CAAA,CAAUA,CAAA4E,YACV,IAAI,CAAC5E,CAAL,CAAc,KACdyB,EAAA7H,KAAA,CAAcoG,CAAd,CAHC,CAAH,MAISA,CAJT,GAIqB2E,CAJrB,CAMA,OAAO1E,EAAA,CAAOwB,CAAP,CAhBwB,CA4BjCoD,QAASA,GAAiB,CAACrM,CAAD,CAAS,CAEjC,IAAIsM,EAAkBnM,CAAA,CAAO,WAAP,CAAtB,CACI+E,EAAW/E,CAAA,CAAO,IAAP,CAMXuK,EAAAA,CAAiB1K,CAHZ,QAGL0K,GAAiB1K,CAHE,QAGnB0K,CAH+B,EAG/BA,CAGJA,EAAA6B,SAAA,CAAmB7B,CAAA6B,SAAnB,EAAuCpM,CAEvC,OAAcuK,EARL,OAQT,GAAcA,CARS,OAQvB,CAAiC8B,QAAQ,EAAG,CAE1C,IAAI5C,EAAU,EAqDd,OAAOT,SAAe,CAACG,CAAD,CAAOmD,CAAP,CAAiBC,CAAjB,CAA2B,CAE7C,GAAa,gBAAb,GAKsBpD,CALtB,CACE,KAAMpE,EAAA,CAAS,SAAT,CAIoBrE,QAJpB,CAAN,CAKA4L,CAAJ,EAAgB7C,CAAA5I,eAAA,CAAuBsI,CAAvB,CAAhB,GACEM,CAAA,CAAQN,CAAR,CADF,CACkB,IADlB,CAGA,OAAcM,EA1ET,CA0EkBN,CA1ElB,CA0EL,GAAcM,CA1EK,CA0EIN,CA1EJ,CA0EnB,CAA6BkD,QAAQ,EAAG,CAmNtCG,QAASA,EAAW,CAACC,CAAD,CAAWC,CAAX,CAAmBC,CAAnB,CAAiC,CACnD,MAAO,SAAQ,EAAG,CAChBC,CAAA,CAAYD,CAAZ,EAA4B,MAA5B,CAAA,CAAoC,CAACF,CAAD,CAAWC,CAAX,CAAmBpK,SAAnB,CAApC,CACA,OAAOuK,EAFS,CADiC,CAlNrD,GAAI,CAACP,CAAL,CACE,KAAMH,EAAA,CAAgB,OAAhB;AAEiDhD,CAFjD,CAAN,CAMF,IAAIyD,EAAc,EAAlB,CAGIE,EAAY,EAHhB,CAKIC,EAASP,CAAA,CAAY,WAAZ,CAAyB,QAAzB,CALb,CAQIK,EAAiB,cAELD,CAFK,YAGPE,CAHO,UAcTR,CAdS,MAwBbnD,CAxBa,UAqCTqD,CAAA,CAAY,UAAZ,CAAwB,UAAxB,CArCS,SAgDVA,CAAA,CAAY,UAAZ,CAAwB,SAAxB,CAhDU,SA2DVA,CAAA,CAAY,UAAZ,CAAwB,SAAxB,CA3DU,OAsEZA,CAAA,CAAY,UAAZ,CAAwB,OAAxB,CAtEY,UAkFTA,CAAA,CAAY,UAAZ,CAAwB,UAAxB,CAAoC,SAApC,CAlFS,WAoHRA,CAAA,CAAY,kBAAZ,CAAgC,UAAhC,CApHQ,QA+HXA,CAAA,CAAY,iBAAZ,CAA+B,UAA/B,CA/HW,YA2IPA,CAAA,CAAY,qBAAZ,CAAmC,UAAnC,CA3IO,WAwJRA,CAAA,CAAY,kBAAZ,CAAgC,WAAhC,CAxJQ,QAqKXO,CArKW,KAiLdC,QAAQ,CAACC,CAAD,CAAQ,CACnBH,CAAA7L,KAAA,CAAegM,CAAf,CACA,OAAO,KAFY,CAjLF,CAuLjBV,EAAJ,EACEQ,CAAA,CAAOR,CAAP,CAGF,OAAQM,EA3M8B,CA1ET,EA0E/B,CAX+C,CAvDP,CART,EAQnC,CAdiC,CArjDI;AAw8DvCK,QAASA,GAAkB,CAAC3C,CAAD,CAAS,CAClCnI,CAAA,CAAOmI,CAAP,CAAgB,WACD1B,EADC,MAENrE,EAFM,QAGJpC,CAHI,QAIJqD,EAJI,SAKH6B,CALG,SAMH9G,CANG,UAOFsJ,EAPE,MAQNjH,CARM,MASNmD,EATM,QAUJU,EAVI,UAWFI,EAXE,UAYFhE,EAZE,aAaCG,CAbD,WAcDC,CAdC,UAeF5C,CAfE,YAgBAM,CAhBA,UAiBFuC,CAjBE,UAkBFC,EAlBE,WAmBDO,EAnBC,SAoBHpD,CApBG,SAqBH4M,EArBG,QAsBJ9J,EAtBI,WAuBD8D,CAvBC,WAwBDiG,EAxBC,WAyBD,SAAU,CAAV,CAzBC,UA0BFpN,CA1BE,OA2BLqN,EA3BK,CAAhB,CA8BAC,GAAA,CAAgBpB,EAAA,CAAkBrM,CAAlB,CAChB,IAAI,CACFyN,EAAA,CAAc,UAAd,CADE,CAEF,MAAO7F,CAAP,CAAU,CACV6F,EAAA,CAAc,UAAd,CAA0B,EAA1B,CAAAb,SAAA,CAAuC,SAAvC,CAAkDc,EAAlD,CADU,CAIZD,EAAA,CAAc,IAAd,CAAoB,CAAC,UAAD,CAApB,CAAkC,CAAC,UAAD,CAChCE,QAAiB,CAAC3D,CAAD,CAAW,CAE1BA,CAAA4C,SAAA,CAAkB,eACDgB,EADC,CAAlB,CAGA5D,EAAA4C,SAAA,CAAkB,UAAlB;AAA8BiB,EAA9B,CAAAC,UAAA,CACY,GACHC,EADG,OAECC,EAFD,UAGIA,EAHJ,MAIAC,EAJA,QAKEC,EALF,QAMEC,EANF,OAOCC,EAPD,QAQEC,EARF,QASEC,EATF,YAUMC,EAVN,gBAWUC,EAXV,SAYGC,EAZH,aAaOC,EAbP,YAcMC,EAdN,SAeGC,EAfH,cAgBQC,EAhBR,QAiBEC,EAjBF,QAkBEC,EAlBF,MAmBAC,EAnBA,WAoBKC,EApBL,QAqBEC,EArBF,eAsBSC,EAtBT,aAuBOC,EAvBP,UAwBIC,EAxBJ,QAyBEC,EAzBF,SA0BGC,EA1BH,UA2BIC,EA3BJ,cA4BQC,EA5BR,iBA6BWC,EA7BX,WA8BKC,EA9BL,cA+BQC,EA/BR,SAgCGC,EAhCH,QAiCEC,EAjCF,UAkCIC,EAlCJ,UAmCIC,EAnCJ,YAoCMA,EApCN,SAqCGC,EArCH,CADZ,CAAAnC,UAAA,CAwCY,WACGoC,EADH,CAxCZ,CAAApC,UAAA,CA2CYqC,EA3CZ,CAAArC,UAAA,CA4CYsC,EA5CZ,CA6CApG;CAAA4C,SAAA,CAAkB,eACDyD,EADC,UAENC,EAFM,UAGNC,EAHM,eAIDC,EAJC,aAKHC,EALG,WAMLC,EANK,mBAOGC,EAPH,SAQPC,EARO,cASFC,EATE,WAULC,EAVK,OAWTC,EAXS,cAYFC,EAZE,WAaLC,EAbK,MAcVC,EAdU,QAeRC,EAfQ,YAgBJC,EAhBI,IAiBZC,EAjBY,MAkBVC,EAlBU,cAmBFC,EAnBE,UAoBNC,EApBM,gBAqBAC,EArBA,UAsBNC,EAtBM,SAuBPC,EAvBO,OAwBTC,EAxBS,iBAyBEC,EAzBF,CAAlB,CAlD0B,CADI,CAAlC,CAtCkC,CAuPpCC,QAASA,GAAS,CAACxI,CAAD,CAAO,CACvB,MAAOA,EAAArB,QAAA,CACG8J,EADH,CACyB,QAAQ,CAACC,CAAD,CAAIjH,CAAJ,CAAeE,CAAf,CAAuBgH,CAAvB,CAA+B,CACnE,MAAOA,EAAA,CAAShH,CAAAiH,YAAA,EAAT,CAAgCjH,CAD4B,CADhE,CAAAhD,QAAA,CAIGkK,EAJH,CAIoB,OAJpB,CADgB,CAgBzBC,QAASA,GAAuB,CAAC9I,CAAD,CAAO+I,CAAP,CAAqBC,CAArB,CAAkCC,CAAlC,CAAuD,CAMrFC,QAASA,EAAW,CAACC,CAAD,CAAQ,CAAA,IAEtBnO,EAAOgO,CAAA,EAAeG,CAAf,CAAuB,CAAC,IAAAC,OAAA,CAAYD,CAAZ,CAAD,CAAvB;AAA8C,CAAC,IAAD,CAF/B,CAGtBE,EAAYN,CAHU,CAItBO,CAJsB,CAIjBC,CAJiB,CAIPC,CAJO,CAKtBtL,CALsB,CAKbuL,CALa,CAKYC,CAEtC,IAAI,CAACT,CAAL,EAAqC,IAArC,EAA4BE,CAA5B,CACE,IAAA,CAAMnO,CAAA/D,OAAN,CAAA,CAEE,IADAqS,CACkB,CADZtO,CAAA2O,MAAA,EACY,CAAdJ,CAAc,CAAH,CAAG,CAAAC,CAAA,CAAYF,CAAArS,OAA9B,CAA0CsS,CAA1C,CAAqDC,CAArD,CAAgED,CAAA,EAAhE,CAOE,IANArL,CAMoB,CANVC,CAAA,CAAOmL,CAAA,CAAIC,CAAJ,CAAP,CAMU,CALhBF,CAAJ,CACEnL,CAAA0L,eAAA,CAAuB,UAAvB,CADF,CAGEP,CAHF,CAGc,CAACA,CAEK,CAAhBI,CAAgB,CAAH,CAAG,CAAAI,CAAA,CAAe5S,CAAAyS,CAAAzS,CAAWiH,CAAAwL,SAAA,EAAXzS,QAAnC,CACIwS,CADJ,CACiBI,CADjB,CAEIJ,CAAA,EAFJ,CAGEzO,CAAAlD,KAAA,CAAUgS,EAAA,CAAOJ,CAAA,CAASD,CAAT,CAAP,CAAV,CAKR,OAAOM,EAAA5M,MAAA,CAAmB,IAAnB,CAAyBhE,SAAzB,CAzBmB,CAL5B,IAAI4Q,EAAeD,EAAA/M,GAAA,CAAUiD,CAAV,CAAnB,CACA+J,EAAeA,CAAAC,UAAfD,EAAyCA,CACzCb,EAAAc,UAAA,CAAwBD,CACxBD,GAAA/M,GAAA,CAAUiD,CAAV,CAAA,CAAkBkJ,CAJmE,CAyGvFe,QAASA,EAAM,CAAC/L,CAAD,CAAU,CACvB,GAAIA,CAAJ,WAAuB+L,EAAvB,CACE,MAAO/L,EAEL/G,EAAA,CAAS+G,CAAT,CAAJ,GACEA,CADF,CACYgM,EAAA,CAAKhM,CAAL,CADZ,CAGA,IAAI,EAAE,IAAF,WAAkB+L,EAAlB,CAAJ,CAA+B,CAC7B,GAAI9S,CAAA,CAAS+G,CAAT,CAAJ,EAA8C,GAA9C,EAAyBA,CAAA7B,OAAA,CAAe,CAAf,CAAzB,CACE,KAAM8N,GAAA,CAAa,OAAb,CAAN,CAEF,MAAO,KAAIF,CAAJ,CAAW/L,CAAX,CAJsB,CAO/B,GAAI/G,CAAA,CAAS+G,CAAT,CAAJ,CAAuB,CACgBA,IAAAA,EAAAA,CA1BvC3G,EAAA,CAAqBZ,CACrB,KAAIyT,CAEJ,IAAKA,CAAL,CAAcC,EAAAlK,KAAA,CAAuB1B,CAAvB,CAAd,CACS,CAAA,CAAA,CAAA,CAAA,cAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CADT,KAAA,CAIO,IAAA;AAAA,CAAA,CA1CQgC,CACX6J,EAAAA,CAAW/S,CAAAgT,uBAAA,EACX5H,EAAAA,CAAQ,EAEZ,IARQ6H,EAAArJ,KAAA,CA8CD1C,CA9CC,CAQR,CAGO,CACLgM,CAAA,CAAMH,CAAAI,YAAA,CAAqBnT,CAAAoT,cAAA,CAAsB,KAAtB,CAArB,CAENlK,EAAA,CAAM,CAACmK,EAAAzK,KAAA,CAgCF1B,CAhCE,CAAD,EAA+B,CAAC,EAAD,CAAK,EAAL,CAA/B,EAAyC,CAAzC,CAAAoD,YAAA,EACNgJ,EAAA,CAAOC,EAAA,CAAQrK,CAAR,CAAP,EAAuBqK,EAAAC,SACvBN,EAAAO,UAAA,CAAgB,mBAAhB,CACEH,CAAA,CAAK,CAAL,CADF,CA8BKpM,CA7BOE,QAAA,CAAasM,EAAb,CAA+B,WAA/B,CADZ,CAC0DJ,CAAA,CAAK,CAAL,CAC1DJ,EAAAS,YAAA,CAAgBT,CAAAU,WAAhB,CAIA,KADAlT,CACA,CADI4S,CAAA,CAAK,CAAL,CACJ,CAAO5S,CAAA,EAAP,CAAA,CACEwS,CAAA,CAAMA,CAAAW,UAGHC,EAAA,CAAE,CAAP,KAAUC,CAAV,CAAab,CAAAc,WAAAtU,OAAb,CAAoCoU,CAApC,CAAsCC,CAAtC,CAA0C,EAAED,CAA5C,CAA+C1I,CAAA7K,KAAA,CAAW2S,CAAAc,WAAA,CAAeF,CAAf,CAAX,CAE/CZ,EAAA,CAAMH,CAAAa,WACNV,EAAAe,YAAA,CAAkB,EAlBb,CAHP,IAEE7I,EAAA7K,KAAA,CAAWP,CAAAkU,eAAA,CAoCNhN,CApCM,CAAX,CAuBF6L,EAAAkB,YAAA,CAAuB,EACvBlB,EAAAU,UAAA,CAAqB,EACrB,EAAA,CAAOrI,CAOP,CAuBE+I,EAAA,CAAe,IAAf,CAvBF,CAuBE,CACevN,EAAAmM,CAAO3T,CAAA4T,uBAAA,EAAPD,CACf9L,OAAA,CAAgB,IAAhB,CAHqB,CAAvB,IAKEkN,GAAA,CAAe,IAAf;AAAqBxN,CAArB,CAnBqB,CAuBzByN,QAASA,GAAW,CAACzN,CAAD,CAAU,CAC5B,MAAOA,EAAA0N,UAAA,CAAkB,CAAA,CAAlB,CADqB,CAI9BC,QAASA,GAAY,CAAC3N,CAAD,CAAS,CAC5B4N,EAAA,CAAiB5N,CAAjB,CAD4B,KAElBjG,EAAI,CAAd,KAAiByR,CAAjB,CAA4BxL,CAAAqN,WAA5B,EAAkD,EAAlD,CAAsDtT,CAAtD,CAA0DyR,CAAAzS,OAA1D,CAA2EgB,CAAA,EAA3E,CACE4T,EAAA,CAAanC,CAAA,CAASzR,CAAT,CAAb,CAH0B,CAO9B8T,QAASA,GAAS,CAAC7N,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoBkP,CAApB,CAAiC,CACjD,GAAIlS,CAAA,CAAUkS,CAAV,CAAJ,CAA4B,KAAM9B,GAAA,CAAa,SAAb,CAAN,CADqB,IAG7C+B,EAASC,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CACAiO,GAAAC,CAAmBlO,CAAnBkO,CAA4B,QAA5BA,CAEb,GAEItS,CAAA,CAAYkS,CAAZ,CAAJ,CACE3U,CAAA,CAAQ6U,CAAR,CAAgB,QAAQ,CAACG,CAAD,CAAeL,CAAf,CAAqB,CAC3CM,EAAA,CAAsBpO,CAAtB,CAA+B8N,CAA/B,CAAqCK,CAArC,CACA,QAAOH,CAAA,CAAOF,CAAP,CAFoC,CAA7C,CADF,CAME3U,CAAA,CAAQ2U,CAAA/M,MAAA,CAAW,GAAX,CAAR,CAAyB,QAAQ,CAAC+M,CAAD,CAAO,CAClClS,CAAA,CAAYiD,CAAZ,CAAJ,EACEuP,EAAA,CAAsBpO,CAAtB,CAA+B8N,CAA/B,CAAqCE,CAAA,CAAOF,CAAP,CAArC,CACA,CAAA,OAAOE,CAAA,CAAOF,CAAP,CAFT,EAIE7Q,EAAA,CAAY+Q,CAAA,CAAOF,CAAP,CAAZ,EAA4B,EAA5B,CAAgCjP,CAAhC,CALoC,CAAxC,CARF,CANiD,CAyBnD+O,QAASA,GAAgB,CAAC5N,CAAD,CAAU8B,CAAV,CAAgB,CAAA,IACnCuM,EAAYrO,CAAAsO,MADuB,CAEnCC,EAAeC,EAAA,CAAQH,CAAR,CAEfE,EAAJ,GACMzM,CAAJ,CACE,OAAO0M,EAAA,CAAQH,CAAR,CAAAtL,KAAA,CAAwBjB,CAAxB,CADT,EAKIyM,CAAAL,OAKJ,GAJEK,CAAAP,OAAAS,SACA,EADgCF,CAAAL,OAAA,CAAoB,EAApB,CAAwB,UAAxB,CAChC,CAAAL,EAAA,CAAU7N,CAAV,CAGF,EADA,OAAOwO,EAAA,CAAQH,CAAR,CACP,CAAArO,CAAAsO,MAAA,CAAgB5V,CAVhB,CADF,CAJuC,CAmBzCuV,QAASA,GAAkB,CAACjO,CAAD,CAAU1G,CAAV,CAAeY,CAAf,CAAsB,CAAA,IAC3CmU;AAAYrO,CAAAsO,MAD+B,CAE3CC,EAAeC,EAAA,CAAQH,CAAR,EAAsB,EAAtB,CAEnB,IAAIxS,CAAA,CAAU3B,CAAV,CAAJ,CACOqU,CAIL,GAHEvO,CAAAsO,MACA,CADgBD,CAChB,CA1NuB,EAAEK,EA0NzB,CAAAH,CAAA,CAAeC,EAAA,CAAQH,CAAR,CAAf,CAAoC,EAEtC,EAAAE,CAAA,CAAajV,CAAb,CAAA,CAAoBY,CALtB,KAOE,OAAOqU,EAAP,EAAuBA,CAAA,CAAajV,CAAb,CAXsB,CAejDqV,QAASA,GAAU,CAAC3O,CAAD,CAAU1G,CAAV,CAAeY,CAAf,CAAsB,CAAA,IACnC6I,EAAOkL,EAAA,CAAmBjO,CAAnB,CAA4B,MAA5B,CAD4B,CAEnC4O,EAAW/S,CAAA,CAAU3B,CAAV,CAFwB,CAGnC2U,EAAa,CAACD,CAAdC,EAA0BhT,CAAA,CAAUvC,CAAV,CAHS,CAInCwV,EAAiBD,CAAjBC,EAA+B,CAAChT,CAAA,CAASxC,CAAT,CAE/ByJ,EAAL,EAAc+L,CAAd,EACEb,EAAA,CAAmBjO,CAAnB,CAA4B,MAA5B,CAAoC+C,CAApC,CAA2C,EAA3C,CAGF,IAAI6L,CAAJ,CACE7L,CAAA,CAAKzJ,CAAL,CAAA,CAAYY,CADd,KAGE,IAAI2U,CAAJ,CAAgB,CACd,GAAIC,CAAJ,CAEE,MAAO/L,EAAP,EAAeA,CAAA,CAAKzJ,CAAL,CAEfyB,EAAA,CAAOgI,CAAP,CAAazJ,CAAb,CALY,CAAhB,IAQE,OAAOyJ,EArB4B,CA0BzCgM,QAASA,GAAc,CAAC/O,CAAD,CAAUgP,CAAV,CAAoB,CACzC,MAAKhP,EAAAiP,aAAL,CAEuC,EAFvC,CACSxO,CAAA,GAAAA,EAAOT,CAAAiP,aAAA,CAAqB,OAArB,CAAPxO,EAAwC,EAAxCA,EAA8C,GAA9CA,SAAA,CAA2D,SAA3D,CAAsE,GAAtE,CAAA1D,QAAA,CACI,GADJ,CACUiS,CADV,CACqB,GADrB,CADT,CAAkC,CAAA,CADO,CAM3CE,QAASA,GAAiB,CAAClP,CAAD,CAAUmP,CAAV,CAAsB,CAC1CA,CAAJ,EAAkBnP,CAAAoP,aAAlB,EACEjW,CAAA,CAAQgW,CAAApO,MAAA,CAAiB,GAAjB,CAAR,CAA+B,QAAQ,CAACsO,CAAD,CAAW,CAChDrP,CAAAoP,aAAA,CAAqB,OAArB,CAA8BpD,EAAA,CACzBvL,CAAA,GAAAA,EAAOT,CAAAiP,aAAA,CAAqB,OAArB,CAAPxO,EAAwC,EAAxCA,EAA8C,GAA9CA,SAAA,CACQ,SADR;AACmB,GADnB,CAAAA,QAAA,CAEQ,GAFR,CAEcuL,EAAA,CAAKqD,CAAL,CAFd,CAE+B,GAF/B,CAEoC,GAFpC,CADyB,CAA9B,CADgD,CAAlD,CAF4C,CAYhDC,QAASA,GAAc,CAACtP,CAAD,CAAUmP,CAAV,CAAsB,CAC3C,GAAIA,CAAJ,EAAkBnP,CAAAoP,aAAlB,CAAwC,CACtC,IAAIG,EAAmB9O,CAAA,GAAAA,EAAOT,CAAAiP,aAAA,CAAqB,OAArB,CAAPxO,EAAwC,EAAxCA,EAA8C,GAA9CA,SAAA,CACU,SADV,CACqB,GADrB,CAGvBtH,EAAA,CAAQgW,CAAApO,MAAA,CAAiB,GAAjB,CAAR,CAA+B,QAAQ,CAACsO,CAAD,CAAW,CAChDA,CAAA,CAAWrD,EAAA,CAAKqD,CAAL,CAC4C,GAAvD,GAAIE,CAAAxS,QAAA,CAAwB,GAAxB,CAA8BsS,CAA9B,CAAyC,GAAzC,CAAJ,GACEE,CADF,EACqBF,CADrB,CACgC,GADhC,CAFgD,CAAlD,CAOArP,EAAAoP,aAAA,CAAqB,OAArB,CAA8BpD,EAAA,CAAKuD,CAAL,CAA9B,CAXsC,CADG,CAgB7C/B,QAASA,GAAc,CAACgC,CAAD,CAAO/N,CAAP,CAAiB,CACtC,GAAIA,CAAJ,CAAc,CACZA,CAAA,CAAaA,CAAAjF,SACF,EADuB,CAAAX,CAAA,CAAU4F,CAAA1I,OAAV,CACvB,EADsDD,EAAA,CAAS2I,CAAT,CACtD,CACP,CAAEA,CAAF,CADO,CAAPA,CAEJ,KAAI,IAAI1H,EAAE,CAAV,CAAaA,CAAb,CAAiB0H,CAAA1I,OAAjB,CAAkCgB,CAAA,EAAlC,CACEyV,CAAA5V,KAAA,CAAU6H,CAAA,CAAS1H,CAAT,CAAV,CALU,CADwB,CAWxC0V,QAASA,GAAgB,CAACzP,CAAD,CAAU8B,CAAV,CAAgB,CACvC,MAAO4N,GAAA,CAAoB1P,CAApB,CAA6B,GAA7B,EAAoC8B,CAApC,EAA4C,cAA5C,EAA+D,YAA/D,CADgC,CAIzC4N,QAASA,GAAmB,CAAC1P,CAAD,CAAU8B,CAAV,CAAgB5H,CAAhB,CAAuB,CAG1B,CAAvB,EAAG8F,CAAAhH,SAAH,GACEgH,CADF,CACYA,CAAA2P,gBADZ,CAKA,KAFI/N,CAEJ,CAFY1I,CAAA,CAAQ4I,CAAR,CAAA,CAAgBA,CAAhB,CAAuB,CAACA,CAAD,CAEnC,CAAO9B,CAAP,CAAA,CAAgB,CACd,IADc,IACLjG;AAAI,CADC,CACE6V,EAAKhO,CAAA7I,OAArB,CAAmCgB,CAAnC,CAAuC6V,CAAvC,CAA2C7V,CAAA,EAA3C,CACE,IAAKG,CAAL,CAAa+F,CAAA8C,KAAA,CAAY/C,CAAZ,CAAqB4B,CAAA,CAAM7H,CAAN,CAArB,CAAb,IAAiDrB,CAAjD,CAA4D,MAAOwB,EAMrE8F,EAAA,CAAUA,CAAA6P,WAAV,EAAsD,EAAtD,GAAiC7P,CAAAhH,SAAjC,EAA4DgH,CAAA8P,KAR9C,CARiC,CAoBnDC,QAASA,GAAW,CAAC/P,CAAD,CAAU,CAC5B,IAD4B,IACnBjG,EAAI,CADe,CACZsT,EAAarN,CAAAqN,WAA7B,CAAiDtT,CAAjD,CAAqDsT,CAAAtU,OAArD,CAAwEgB,CAAA,EAAxE,CACE4T,EAAA,CAAaN,CAAA,CAAWtT,CAAX,CAAb,CAEF,KAAA,CAAOiG,CAAAiN,WAAP,CAAA,CACEjN,CAAAgN,YAAA,CAAoBhN,CAAAiN,WAApB,CAL0B,CA+D9B+C,QAASA,GAAkB,CAAChQ,CAAD,CAAU8B,CAAV,CAAgB,CAEzC,IAAImO,EAAcC,EAAA,CAAapO,CAAA6B,YAAA,EAAb,CAGlB,OAAOsM,EAAP,EAAsBE,EAAA,CAAiBnQ,CAAAxD,SAAjB,CAAtB,EAA4DyT,CALnB,CAyM3CG,QAASA,GAAkB,CAACpQ,CAAD,CAAUgO,CAAV,CAAkB,CAC3C,IAAIG,EAAeA,QAAS,CAACkC,CAAD,CAAQvC,CAAR,CAAc,CACnCuC,CAAAC,eAAL,GACED,CAAAC,eADF,CACyBC,QAAQ,EAAG,CAChCF,CAAAG,YAAA,CAAoB,CAAA,CADY,CADpC,CAMKH,EAAAI,gBAAL,GACEJ,CAAAI,gBADF,CAC0BC,QAAQ,EAAG,CACjCL,CAAAM,aAAA,CAAqB,CAAA,CADY,CADrC,CAMKN,EAAAO,OAAL,GACEP,CAAAO,OADF,CACiBP,CAAAQ,WADjB,EACqCpY,CADrC,CAIA,IAAImD,CAAA,CAAYyU,CAAAS,iBAAZ,CAAJ,CAAyC,CACvC,IAAIC;AAAUV,CAAAC,eACdD,EAAAC,eAAA,CAAuBC,QAAQ,EAAG,CAChCF,CAAAS,iBAAA,CAAyB,CAAA,CACzBC,EAAAtX,KAAA,CAAa4W,CAAb,CAFgC,CAIlCA,EAAAS,iBAAA,CAAyB,CAAA,CANc,CASzCT,CAAAW,mBAAA,CAA2BC,QAAQ,EAAG,CACpC,MAAOZ,EAAAS,iBAAP,EAAuD,CAAA,CAAvD,GAAiCT,CAAAG,YADG,CAKtC,KAAIU,EAAoBjT,EAAA,CAAY+P,CAAA,CAAOF,CAAP,EAAeuC,CAAAvC,KAAf,CAAZ,EAA0C,EAA1C,CAExB3U,EAAA,CAAQ+X,CAAR,CAA2B,QAAQ,CAACrS,CAAD,CAAK,CACtCA,CAAApF,KAAA,CAAQuG,CAAR,CAAiBqQ,CAAjB,CADsC,CAAxC,CAMY,EAAZ,EAAIc,CAAJ,EAEEd,CAAAC,eAEA,CAFuB,IAEvB,CADAD,CAAAI,gBACA,CADwB,IACxB,CAAAJ,CAAAW,mBAAA,CAA2B,IAJ7B,GAOE,OAAOX,CAAAC,eAEP,CADA,OAAOD,CAAAI,gBACP,CAAA,OAAOJ,CAAAW,mBATT,CAvCwC,CAmD1C7C,EAAAiD,KAAA,CAAoBpR,CACpB,OAAOmO,EArDoC,CAiU7CkD,QAASA,GAAO,CAACxY,CAAD,CAAMyY,CAAN,CAAiB,CAAA,IAC3BC,EAAU,MAAO1Y,EADU,CAE3BS,CAEW,WAAf,EAAIiY,CAAJ,EAAyC,QAAzC,EAA8BA,CAA9B,EAA6D,IAA7D,GAAqD1Y,CAArD,CACsC,UAApC,EAAI,OAAQS,CAAR;AAAcT,CAAAiC,UAAd,CAAJ,CAEExB,CAFF,CAEQT,CAAAiC,UAAA,EAFR,CAGWxB,CAHX,GAGmBZ,CAHnB,GAIEY,CAJF,CAIQT,CAAAiC,UAJR,CAIyB,CAAAwW,CAAA,EAAanX,EAAb,GAJzB,CADF,CAQEb,CARF,CAQQT,CAGR,OAAO0Y,EAAP,CAAiB,GAAjB,CAAuBjY,CAfQ,CAqBjCkY,QAASA,GAAO,CAACxU,CAAD,CAAQyU,CAAR,CAAqB,CACnC,GAAIA,CAAJ,CAAiB,CACf,IAAIpX,EAAM,CACV,KAAAF,QAAA,CAAeuX,QAAQ,EAAG,CACxB,MAAO,EAAErX,CADe,CAFX,CAMjBlB,CAAA,CAAQ6D,CAAR,CAAe,IAAA2U,IAAf,CAAyB,IAAzB,CAPmC,CAwGrCC,QAASA,GAAQ,CAAC/S,CAAD,CAAK,CAAA,IAChBgT,CADgB,CAEhBC,CAIc,WAAlB,GAAI,MAAOjT,EAAX,EACQgT,CADR,CACkBhT,CAAAgT,QADlB,IAEIA,CAUA,CAVU,EAUV,CATIhT,CAAA9F,OASJ,GARE+Y,CAEA,CAFSjT,CAAA5C,SAAA,EAAAwE,QAAA,CAAsBsR,EAAtB,CAAsC,EAAtC,CAET,CADAC,CACA,CADUF,CAAA/T,MAAA,CAAakU,EAAb,CACV,CAAA9Y,CAAA,CAAQ6Y,CAAA,CAAQ,CAAR,CAAAjR,MAAA,CAAiBmR,EAAjB,CAAR,CAAwC,QAAQ,CAACrO,CAAD,CAAK,CACnDA,CAAApD,QAAA,CAAY0R,EAAZ,CAAoB,QAAQ,CAACC,CAAD,CAAMC,CAAN,CAAkBvQ,CAAlB,CAAuB,CACjD+P,CAAAjY,KAAA,CAAakI,CAAb,CADiD,CAAnD,CADmD,CAArD,CAMF,EAAAjD,CAAAgT,QAAA,CAAaA,CAZjB,EAcW3Y,CAAA,CAAQ2F,CAAR,CAAJ,EACLyT,CAEA,CAFOzT,CAAA9F,OAEP,CAFmB,CAEnB,CADAgL,EAAA,CAAYlF,CAAA,CAAGyT,CAAH,CAAZ,CAAsB,IAAtB,CACA,CAAAT,CAAA,CAAUhT,CAAAE,MAAA,CAAS,CAAT,CAAYuT,CAAZ,CAHL,EAKLvO,EAAA,CAAYlF,CAAZ,CAAgB,IAAhB,CAAsB,CAAA,CAAtB,CAEF,OAAOgT,EA3Ba,CAygBtBpP,QAASA,GAAc,CAAC8P,CAAD,CAAgB,CAmCrCC,QAASA,EAAa,CAACC,CAAD,CAAW,CAC/B,MAAO,SAAQ,CAACnZ,CAAD,CAAMY,CAAN,CAAa,CAC1B,GAAI4B,CAAA,CAASxC,CAAT,CAAJ,CACEH,CAAA,CAAQG,CAAR;AAAaU,EAAA,CAAcyY,CAAd,CAAb,CADF,KAGE,OAAOA,EAAA,CAASnZ,CAAT,CAAcY,CAAd,CAJiB,CADG,CAUjCkL,QAASA,EAAQ,CAACtD,CAAD,CAAO4Q,CAAP,CAAkB,CACjCxO,EAAA,CAAwBpC,CAAxB,CAA8B,SAA9B,CACA,IAAIvI,CAAA,CAAWmZ,CAAX,CAAJ,EAA6BxZ,CAAA,CAAQwZ,CAAR,CAA7B,CACEA,CAAA,CAAYC,CAAAC,YAAA,CAA6BF,CAA7B,CAEd,IAAI,CAACA,CAAAG,KAAL,CACE,KAAM/N,GAAA,CAAgB,MAAhB,CAA2EhD,CAA3E,CAAN,CAEF,MAAOgR,EAAA,CAAchR,CAAd,CAAqBiR,CAArB,CAAP,CAA8CL,CARb,CAWnC1N,QAASA,EAAO,CAAClD,CAAD,CAAOkR,CAAP,CAAkB,CAAE,MAAO5N,EAAA,CAAStD,CAAT,CAAe,MAAQkR,CAAR,CAAf,CAAT,CA6BlCC,QAASA,EAAW,CAACV,CAAD,CAAe,CAAA,IAC7B9M,EAAY,EADiB,CACbyN,CADa,CACH3N,CADG,CACUxL,CADV,CACa6V,CAC9CzW,EAAA,CAAQoZ,CAAR,CAAuB,QAAQ,CAAC5Q,CAAD,CAAS,CACtC,GAAI,CAAAwR,CAAAC,IAAA,CAAkBzR,CAAlB,CAAJ,CAAA,CACAwR,CAAAxB,IAAA,CAAkBhQ,CAAlB,CAA0B,CAAA,CAA1B,CAEA,IAAI,CACF,GAAI1I,CAAA,CAAS0I,CAAT,CAAJ,CAIE,IAHAuR,CAGgD,CAHrCjN,EAAA,CAActE,CAAd,CAGqC,CAFhD8D,CAEgD,CAFpCA,CAAAvG,OAAA,CAAiB+T,CAAA,CAAYC,CAAAjO,SAAZ,CAAjB,CAAA/F,OAAA,CAAwDgU,CAAAG,WAAxD,CAEoC,CAA5C9N,CAA4C,CAA9B2N,CAAAI,aAA8B,CAAPvZ,CAAO,CAAH,CAAG,CAAA6V,CAAA,CAAKrK,CAAAxM,OAArD,CAAyEgB,CAAzE,CAA6E6V,CAA7E,CAAiF7V,CAAA,EAAjF,CAAsF,CAAA,IAChFwZ,EAAahO,CAAA,CAAYxL,CAAZ,CADmE,CAEhFqL,EAAWuN,CAAAS,IAAA,CAAqBG,CAAA,CAAW,CAAX,CAArB,CAEfnO,EAAA,CAASmO,CAAA,CAAW,CAAX,CAAT,CAAAtU,MAAA,CAA8BmG,CAA9B,CAAwCmO,CAAA,CAAW,CAAX,CAAxC,CAJoF,CAJxF,IAUWha,EAAA,CAAWoI,CAAX,CAAJ,CACH8D,CAAA7L,KAAA,CAAe+Y,CAAAjQ,OAAA,CAAwBf,CAAxB,CAAf,CADG,CAEIzI,CAAA,CAAQyI,CAAR,CAAJ,CACH8D,CAAA7L,KAAA,CAAe+Y,CAAAjQ,OAAA,CAAwBf,CAAxB,CAAf,CADG,CAGLoC,EAAA,CAAYpC,CAAZ,CAAoB,QAApB,CAhBA,CAkBF,MAAOvB,CAAP,CAAU,CAYV,KAXIlH,EAAA,CAAQyI,CAAR,CAWE,GAVJA,CAUI;AAVKA,CAAA,CAAOA,CAAA5I,OAAP,CAAuB,CAAvB,CAUL,EARFqH,CAAAoT,QAQE,GARWpT,CAAAqT,MAQX,EARqD,EAQrD,EARsBrT,CAAAqT,MAAA1W,QAAA,CAAgBqD,CAAAoT,QAAhB,CAQtB,IAFJpT,CAEI,CAFAA,CAAAoT,QAEA,CAFY,IAEZ,CAFmBpT,CAAAqT,MAEnB,EAAA3O,EAAA,CAAgB,UAAhB,CACInD,CADJ,CACYvB,CAAAqT,MADZ,EACuBrT,CAAAoT,QADvB,EACoCpT,CADpC,CAAN,CAZU,CArBZ,CADsC,CAAxC,CAsCA,OAAOqF,EAxC0B,CA+CnCiO,QAASA,EAAsB,CAACC,CAAD,CAAQ3O,CAAR,CAAiB,CAE9C4O,QAASA,EAAU,CAACC,CAAD,CAAc,CAC/B,GAAIF,CAAAna,eAAA,CAAqBqa,CAArB,CAAJ,CAAuC,CACrC,GAAIF,CAAA,CAAME,CAAN,CAAJ,GAA2BC,CAA3B,CACE,KAAMhP,GAAA,CAAgB,MAAhB,CACI+O,CADJ,CACkB,MADlB,CAC2BzP,CAAA5J,KAAA,CAAU,MAAV,CAD3B,CAAN,CAGF,MAAOmZ,EAAA,CAAME,CAAN,CAL8B,CAOrC,GAAI,CAGF,MAFAzP,EAAAzJ,QAAA,CAAakZ,CAAb,CAEO,CADPF,CAAA,CAAME,CAAN,CACO,CADcC,CACd,CAAAH,CAAA,CAAME,CAAN,CAAA,CAAqB7O,CAAA,CAAQ6O,CAAR,CAH1B,CAIF,MAAOE,CAAP,CAAY,CAIZ,KAHIJ,EAAA,CAAME,CAAN,CAGEE,GAHqBD,CAGrBC,EAFJ,OAAOJ,CAAA,CAAME,CAAN,CAEHE,CAAAA,CAAN,CAJY,CAJd,OASU,CACR3P,CAAAqH,MAAA,EADQ,CAjBmB,CAuBjC/I,QAASA,EAAM,CAAC7D,CAAD,CAAKD,CAAL,CAAWoV,CAAX,CAAkB,CAAA,IAC3BC,EAAO,EADoB,CAE3BpC,EAAUD,EAAA,CAAS/S,CAAT,CAFiB,CAG3B9F,CAH2B,CAGnBgB,CAHmB,CAI3BT,CAEAS,EAAA,CAAI,CAAR,KAAWhB,CAAX,CAAoB8Y,CAAA9Y,OAApB,CAAoCgB,CAApC,CAAwChB,CAAxC,CAAgDgB,CAAA,EAAhD,CAAqD,CACnDT,CAAA,CAAMuY,CAAA,CAAQ9X,CAAR,CACN,IAAmB,QAAnB,GAAI,MAAOT,EAAX,CACE,KAAMwL,GAAA,CAAgB,MAAhB,CACyExL,CADzE,CAAN,CAGF2a,CAAAra,KAAA,CACEoa,CACA,EADUA,CAAAxa,eAAA,CAAsBF,CAAtB,CACV;AAAE0a,CAAA,CAAO1a,CAAP,CAAF,CACEsa,CAAA,CAAWta,CAAX,CAHJ,CANmD,CAYjDJ,CAAA,CAAQ2F,CAAR,CAAJ,GACEA,CADF,CACOA,CAAA,CAAG9F,CAAH,CADP,CAMA,OAAO8F,EAAAI,MAAA,CAASL,CAAT,CAAeqV,CAAf,CAxBwB,CAwCjC,MAAO,QACGvR,CADH,aAbPkQ,QAAoB,CAACsB,CAAD,CAAOF,CAAP,CAAe,CAAA,IAC7BG,EAAcA,QAAQ,EAAG,EADI,CAEnBC,CAIdD,EAAAE,UAAA,CAAyBA,CAAAnb,CAAA,CAAQgb,CAAR,CAAA,CAAgBA,CAAA,CAAKA,CAAAnb,OAAL,CAAmB,CAAnB,CAAhB,CAAwCmb,CAAxCG,WACzBC,EAAA,CAAW,IAAIH,CACfC,EAAA,CAAgB1R,CAAA,CAAOwR,CAAP,CAAaI,CAAb,CAAuBN,CAAvB,CAEhB,OAAOlY,EAAA,CAASsY,CAAT,CAAA,EAA2B7a,CAAA,CAAW6a,CAAX,CAA3B,CAAuDA,CAAvD,CAAuEE,CAV7C,CAa5B,KAGAV,CAHA,UAIKhC,EAJL,KAKA2C,QAAQ,CAACzS,CAAD,CAAO,CAClB,MAAOgR,EAAAtZ,eAAA,CAA6BsI,CAA7B,CAAoCiR,CAApC,CAAP,EAA8DY,CAAAna,eAAA,CAAqBsI,CAArB,CAD5C,CALf,CAjEuC,CApIX,IACjCgS,EAAgB,EADiB,CAEjCf,EAAiB,UAFgB,CAGjC3O,EAAO,EAH0B,CAIjC+O,EAAgB,IAAI3B,EAAJ,CAAY,EAAZ,CAAgB,CAAA,CAAhB,CAJiB,CAKjCsB,EAAgB,UACJ,UACIN,CAAA,CAAcpN,CAAd,CADJ,SAEGoN,CAAA,CAAcxN,CAAd,CAFH,SAGGwN,CAAA,CAiDnBgC,QAAgB,CAAC1S,CAAD,CAAOmC,CAAP,CAAoB,CAClC,MAAOe,EAAA,CAAQlD,CAAR,CAAc,CAAC,WAAD,CAAc,QAAQ,CAAC2S,CAAD,CAAY,CACrD,MAAOA,EAAA7B,YAAA,CAAsB3O,CAAtB,CAD8C,CAAlC,CAAd,CAD2B,CAjDjB,CAHH,OAICuO,CAAA,CAsDjBtY,QAAc,CAAC4H,CAAD,CAAO1C,CAAP,CAAY,CAAE,MAAO4F,EAAA,CAAQlD,CAAR,CAAcnG,CAAA,CAAQyD,CAAR,CAAd,CAAT,CAtDT,CAJD,UAKIoT,CAAA,CAuDpBkC,QAAiB,CAAC5S,CAAD;AAAO5H,CAAP,CAAc,CAC7BgK,EAAA,CAAwBpC,CAAxB,CAA8B,UAA9B,CACAgR,EAAA,CAAchR,CAAd,CAAA,CAAsB5H,CACtBya,EAAA,CAAc7S,CAAd,CAAA,CAAsB5H,CAHO,CAvDX,CALJ,WAkEhB0a,QAAkB,CAACf,CAAD,CAAcgB,CAAd,CAAuB,CAAA,IACnCC,EAAenC,CAAAS,IAAA,CAAqBS,CAArB,CAAmCd,CAAnC,CADoB,CAEnCgC,EAAWD,CAAAjC,KAEfiC,EAAAjC,KAAA,CAAoBmC,QAAQ,EAAG,CAC7B,IAAIC,EAAeC,CAAAxS,OAAA,CAAwBqS,CAAxB,CAAkCD,CAAlC,CACnB,OAAOI,EAAAxS,OAAA,CAAwBmS,CAAxB,CAAiC,IAAjC,CAAuC,WAAYI,CAAZ,CAAvC,CAFsB,CAJQ,CAlEzB,CADI,CALiB,CAejCtC,EAAoBG,CAAA2B,UAApB9B,CACIe,CAAA,CAAuBZ,CAAvB,CAAsC,QAAQ,EAAG,CAC/C,KAAMhO,GAAA,CAAgB,MAAhB,CAAiDV,CAAA5J,KAAA,CAAU,MAAV,CAAjD,CAAN,CAD+C,CAAjD,CAhB6B,CAmBjCma,EAAgB,EAnBiB,CAoBjCO,EAAoBP,CAAAF,UAApBS,CACIxB,CAAA,CAAuBiB,CAAvB,CAAsC,QAAQ,CAACQ,CAAD,CAAc,CACtD/P,CAAAA,CAAWuN,CAAAS,IAAA,CAAqB+B,CAArB,CAAmCpC,CAAnC,CACf,OAAOmC,EAAAxS,OAAA,CAAwB0C,CAAAyN,KAAxB,CAAuCzN,CAAvC,CAFmD,CAA5D,CAMRjM,EAAA,CAAQ8Z,CAAA,CAAYV,CAAZ,CAAR,CAAoC,QAAQ,CAAC1T,CAAD,CAAK,CAAEqW,CAAAxS,OAAA,CAAwB7D,CAAxB,EAA8BrD,CAA9B,CAAF,CAAjD,CAEA,OAAO0Z,EA7B8B,CAkQvCrM,QAASA,GAAqB,EAAG,CAE/B,IAAIuM,EAAuB,CAAA,CAE3B,KAAAC,qBAAA,CAA4BC,QAAQ,EAAG,CACrCF,CAAA,CAAuB,CAAA,CADc,CAIvC,KAAAvC,KAAA,CAAY,CAAC,SAAD,CAAY,WAAZ,CAAyB,YAAzB,CAAuC,QAAQ,CAAC0C,CAAD,CAAUC,CAAV,CAAqBC,CAArB,CAAiC,CAO1FC,QAASA,EAAc,CAAC5Y,CAAD,CAAO,CAC5B,IAAIa,EAAS,IACbxE;CAAA,CAAQ2D,CAAR,CAAc,QAAQ,CAACkD,CAAD,CAAU,CACzBrC,CAAL,EAA+C,GAA/C,GAAemC,CAAA,CAAUE,CAAAxD,SAAV,CAAf,GAAoDmB,CAApD,CAA6DqC,CAA7D,CAD8B,CAAhC,CAGA,OAAOrC,EALqB,CAQ9BgY,QAASA,EAAM,EAAG,CAAA,IACZC,EAAOJ,CAAAI,KAAA,EADK,CACaC,CAGxBD,EAAL,CAGK,CAAKC,CAAL,CAAWpd,CAAAsJ,eAAA,CAAwB6T,CAAxB,CAAX,EAA2CC,CAAAC,eAAA,EAA3C,CAGA,CAAKD,CAAL,CAAWH,CAAA,CAAejd,CAAAsd,kBAAA,CAA2BH,CAA3B,CAAf,CAAX,EAA8DC,CAAAC,eAAA,EAA9D,CAGa,KAHb,GAGIF,CAHJ,EAGoBL,CAAAS,SAAA,CAAiB,CAAjB,CAAoB,CAApB,CATzB,CAAWT,CAAAS,SAAA,CAAiB,CAAjB,CAAoB,CAApB,CAJK,CAdlB,IAAIvd,EAAW8c,CAAA9c,SAgCX2c,EAAJ,EACEK,CAAAhY,OAAA,CAAkBwY,QAAwB,EAAG,CAAC,MAAOT,EAAAI,KAAA,EAAR,CAA7C,CACEM,QAA8B,EAAG,CAC/BT,CAAAjY,WAAA,CAAsBmY,CAAtB,CAD+B,CADnC,CAMF,OAAOA,EAxCmF,CAAhF,CARmB,CA0SjCtL,QAASA,GAAuB,EAAE,CAChC,IAAAwI,KAAA,CAAY,CAAC,OAAD,CAAU,UAAV,CAAsB,QAAQ,CAACsD,CAAD,CAAQC,CAAR,CAAkB,CAC1D,MAAOD,EAAAE,UACA,CAAH,QAAQ,CAACxX,CAAD,CAAK,CAAE,MAAOsX,EAAA,CAAMtX,CAAN,CAAT,CAAV,CACH,QAAQ,CAACA,CAAD,CAAK,CACb,MAAOuX,EAAA,CAASvX,CAAT,CAAa,CAAb,CAAgB,CAAA,CAAhB,CADM,CAHyC,CAAhD,CADoB,CAgClCyX,QAASA,GAAO,CAAC9d,CAAD,CAASC,CAAT,CAAmB8d,CAAnB,CAAyBC,CAAzB,CAAmC,CAsBjDC,QAASA,EAA0B,CAAC5X,CAAD,CAAK,CACtC,GAAI,CACFA,CAAAI,MAAA,CAAS,IAAT;AAzxGGF,EAAAtF,KAAA,CAyxGsBwB,SAzxGtB,CAyxGiC+D,CAzxGjC,CAyxGH,CADE,CAAJ,OAEU,CAER,GADA0X,CAAA,EACI,CAA4B,CAA5B,GAAAA,CAAJ,CACE,IAAA,CAAMC,CAAA5d,OAAN,CAAA,CACE,GAAI,CACF4d,CAAAC,IAAA,EAAA,EADE,CAEF,MAAOxW,CAAP,CAAU,CACVmW,CAAAM,MAAA,CAAWzW,CAAX,CADU,CANR,CAH4B,CAmExC0W,QAASA,EAAW,CAACC,CAAD,CAAWC,CAAX,CAAuB,CACxCC,SAASA,GAAK,EAAG,CAChB9d,CAAA,CAAQ+d,CAAR,CAAiB,QAAQ,CAACC,CAAD,CAAQ,CAAEA,CAAA,EAAF,CAAjC,CACAC,EAAA,CAAcJ,CAAA,CAAWC,EAAX,CAAkBF,CAAlB,CAFE,CAAjBE,CAAA,EADwC,CAuE3CI,QAASA,EAAa,EAAG,CACvBC,CAAA,CAAc,IACVC,EAAJ,EAAsB3Y,CAAA4Y,IAAA,EAAtB,GAEAD,CACA,CADiB3Y,CAAA4Y,IAAA,EACjB,CAAAre,CAAA,CAAQse,EAAR,CAA4B,QAAQ,CAACC,CAAD,CAAW,CAC7CA,CAAA,CAAS9Y,CAAA4Y,IAAA,EAAT,CAD6C,CAA/C,CAHA,CAFuB,CAhKwB,IAC7C5Y,EAAO,IADsC,CAE7C+Y,EAAclf,CAAA,CAAS,CAAT,CAF+B,CAG7C0D,EAAW3D,CAAA2D,SAHkC,CAI7Cyb,EAAUpf,CAAAof,QAJmC,CAK7CZ,EAAaxe,CAAAwe,WALgC,CAM7Ca,EAAerf,CAAAqf,aAN8B,CAO7CC,EAAkB,EAEtBlZ,EAAAmZ,OAAA,CAAc,CAAA,CAEd,KAAIrB,EAA0B,CAA9B,CACIC,EAA8B,EAGlC/X,EAAAoZ,6BAAA,CAAoCvB,CACpC7X,EAAAqZ,6BAAA,CAAoCC,QAAQ,EAAG,CAAExB,CAAA,EAAF,CA6B/C9X,EAAAuZ,gCAAA,CAAuCC,QAAQ,CAACC,CAAD,CAAW,CAIxDlf,CAAA,CAAQ+d,CAAR,CAAiB,QAAQ,CAACC,CAAD,CAAQ,CAAEA,CAAA,EAAF,CAAjC,CAEgC,EAAhC,GAAIT,CAAJ,CACE2B,CAAA,EADF,CAGE1B,CAAA/c,KAAA,CAAiCye,CAAjC,CATsD,CA7CT;IA6D7CnB,EAAU,EA7DmC,CA8D7CE,CAaJxY,EAAA0Z,UAAA,CAAiBC,QAAQ,CAAC1Z,CAAD,CAAK,CACxBjD,CAAA,CAAYwb,CAAZ,CAAJ,EAA8BN,CAAA,CAAY,GAAZ,CAAiBE,CAAjB,CAC9BE,EAAAtd,KAAA,CAAaiF,CAAb,CACA,OAAOA,EAHqB,CA3EmB,KAoG7C0Y,EAAiBpb,CAAAqc,KApG4B,CAqG7CC,EAAchgB,CAAAkE,KAAA,CAAc,MAAd,CArG+B,CAsG7C2a,EAAc,IAqBlB1Y,EAAA4Y,IAAA,CAAWkB,QAAQ,CAAClB,CAAD,CAAM/W,CAAN,CAAe,CAE5BtE,CAAJ,GAAiB3D,CAAA2D,SAAjB,GAAkCA,CAAlC,CAA6C3D,CAAA2D,SAA7C,CACIyb,EAAJ,GAAgBpf,CAAAof,QAAhB,GAAgCA,CAAhC,CAA0Cpf,CAAAof,QAA1C,CAGA,IAAIJ,CAAJ,CACE,IAAID,CAAJ,EAAsBC,CAAtB,CAiBA,MAhBAD,EAgBO3Y,CAhBU4Y,CAgBV5Y,CAfH4X,CAAAoB,QAAJ,CACMnX,CAAJ,CAAamX,CAAAe,aAAA,CAAqB,IAArB,CAA2B,EAA3B,CAA+BnB,CAA/B,CAAb,EAEEI,CAAAgB,UAAA,CAAkB,IAAlB,CAAwB,EAAxB,CAA4BpB,CAA5B,CAEA,CAAAiB,CAAA/b,KAAA,CAAiB,MAAjB,CAAyB+b,CAAA/b,KAAA,CAAiB,MAAjB,CAAzB,CAJF,CADF,EAQE4a,CACA,CADcE,CACd,CAAI/W,CAAJ,CACEtE,CAAAsE,QAAA,CAAiB+W,CAAjB,CADF,CAGErb,CAAAqc,KAHF,CAGkBhB,CAZpB,CAeO5Y,CAAAA,CAjBP,CADF,IAwBE,OAAO0Y,EAAP,EAAsBnb,CAAAqc,KAAA/X,QAAA,CAAsB,MAAtB,CAA6B,GAA7B,CA9BQ,CA3He,KA6J7CgX,GAAqB,EA7JwB,CA8J7CoB,EAAgB,CAAA,CAiCpBja,EAAAka,YAAA,CAAmBC,QAAQ,CAACV,CAAD,CAAW,CAEpC,GAAI,CAACQ,CAAL,CAAoB,CAMlB,GAAIrC,CAAAoB,QAAJ,CAAsB3X,CAAA,CAAOzH,CAAP,CAAAwgB,GAAA,CAAkB,UAAlB,CAA8B3B,CAA9B,CAEtB,IAAIb,CAAAyC,WAAJ,CAAyBhZ,CAAA,CAAOzH,CAAP,CAAAwgB,GAAA,CAAkB,YAAlB,CAAgC3B,CAAhC,CAAzB;IAEKzY,EAAA0Z,UAAA,CAAejB,CAAf,CAELwB,EAAA,CAAgB,CAAA,CAZE,CAepBpB,EAAA7d,KAAA,CAAwBye,CAAxB,CACA,OAAOA,EAlB6B,CAkCtCzZ,EAAAsa,SAAA,CAAgBC,QAAQ,EAAG,CACzB,IAAIX,EAAOC,CAAA/b,KAAA,CAAiB,MAAjB,CACX,OAAO8b,EAAA,CAAOA,CAAA/X,QAAA,CAAa,wBAAb,CAAuC,EAAvC,CAAP,CAAoD,EAFlC,CAQ3B,KAAI2Y,EAAc,EAAlB,CACIC,GAAmB,EADvB,CAEIC,EAAa1a,CAAAsa,SAAA,EAsBjBta,EAAA2a,QAAA,CAAeC,QAAQ,CAAC1X,CAAD,CAAO5H,CAAP,CAAc,CAAA,IAE/Buf,CAF+B,CAEJC,CAFI,CAEI3f,CAFJ,CAEOK,CAE1C,IAAI0H,CAAJ,CACM5H,CAAJ,GAAcxB,CAAd,CACEif,CAAA+B,OADF,CACuBC,MAAA,CAAO7X,CAAP,CADvB,CACsC,SADtC,CACkDwX,CADlD,CAE0B,wCAF1B,CAIMrgB,CAAA,CAASiB,CAAT,CAJN,GAKIuf,CAOA,CAPgB1gB,CAAA4e,CAAA+B,OAAA3gB,CAAqB4gB,MAAA,CAAO7X,CAAP,CAArB/I,CAAoC,GAApCA,CAA0C4gB,MAAA,CAAOzf,CAAP,CAA1CnB,CACM,QADNA,CACiBugB,CADjBvgB,QAOhB,CANsD,CAMtD,CAAmB,IAAnB,CAAI0gB,CAAJ,EACElD,CAAAqD,KAAA,CAAU,UAAV,CAAsB9X,CAAtB,CACE,6DADF,CAEE2X,CAFF,CAEiB,iBAFjB,CAbN,CADF,KAoBO,CACL,GAAI9B,CAAA+B,OAAJ,GAA2BL,EAA3B,CAKE,IAJAA,EAIK,CAJc1B,CAAA+B,OAId;AAHLG,CAGK,CAHSR,EAAAtY,MAAA,CAAuB,IAAvB,CAGT,CAFLqY,CAEK,CAFS,EAET,CAAArf,CAAA,CAAI,CAAT,CAAYA,CAAZ,CAAgB8f,CAAA9gB,OAAhB,CAAoCgB,CAAA,EAApC,CACE2f,CAEA,CAFSG,CAAA,CAAY9f,CAAZ,CAET,CADAK,CACA,CADQsf,CAAA3c,QAAA,CAAe,GAAf,CACR,CAAY,CAAZ,CAAI3C,CAAJ,GACE0H,CAIA,CAJOgY,QAAA,CAASJ,CAAAK,UAAA,CAAiB,CAAjB,CAAoB3f,CAApB,CAAT,CAIP,CAAIgf,CAAA,CAAYtX,CAAZ,CAAJ,GAA0BpJ,CAA1B,GACE0gB,CAAA,CAAYtX,CAAZ,CADF,CACsBgY,QAAA,CAASJ,CAAAK,UAAA,CAAiB3f,CAAjB,CAAyB,CAAzB,CAAT,CADtB,CALF,CAWJ,OAAOgf,EApBF,CAxB4B,CA+DrCxa,EAAAob,MAAA,CAAaC,QAAQ,CAACpb,CAAD,CAAKqb,CAAL,CAAY,CAC/B,IAAIC,CACJzD,EAAA,EACAyD,EAAA,CAAYnD,CAAA,CAAW,QAAQ,EAAG,CAChC,OAAOc,CAAA,CAAgBqC,CAAhB,CACP1D,EAAA,CAA2B5X,CAA3B,CAFgC,CAAtB,CAGTqb,CAHS,EAGA,CAHA,CAIZpC,EAAA,CAAgBqC,CAAhB,CAAA,CAA6B,CAAA,CAC7B,OAAOA,EARwB,CAsBjCvb,EAAAob,MAAAI,OAAA,CAAoBC,QAAQ,CAACC,CAAD,CAAU,CACpC,MAAIxC,EAAA,CAAgBwC,CAAhB,CAAJ,EACE,OAAOxC,CAAA,CAAgBwC,CAAhB,CAGA,CAFPzC,CAAA,CAAayC,CAAb,CAEO,CADP7D,CAAA,CAA2Bjb,CAA3B,CACO,CAAA,CAAA,CAJT,EAMO,CAAA,CAP6B,CAtVW,CAkWnDuN,QAASA,GAAgB,EAAE,CACzB,IAAA8J,KAAA,CAAY,CAAC,SAAD,CAAY,MAAZ,CAAoB,UAApB,CAAgC,WAAhC,CACR,QAAQ,CAAE0C,CAAF,CAAagB,CAAb,CAAqBC,CAArB,CAAiC+D,CAAjC,CAA2C,CACjD,MAAO,KAAIjE,EAAJ,CAAYf,CAAZ,CAAqBgF,CAArB,CAAgChE,CAAhC,CAAsCC,CAAtC,CAD0C,CAD3C,CADa,CAwF3BxN,QAASA,GAAqB,EAAG,CAE/B,IAAA6J,KAAA,CAAY2H,QAAQ,EAAG,CAGrBC,QAASA,EAAY,CAACC,CAAD,CAAUC,CAAV,CAAmB,CAwMtCC,QAASA,EAAO,CAACC,CAAD,CAAQ,CAClBA,CAAJ,EAAaC,CAAb,GACOC,CAAL,CAEWA,CAFX,EAEuBF,CAFvB;CAGEE,CAHF,CAGaF,CAAAG,EAHb,EACED,CADF,CACaF,CAQb,CAHAI,CAAA,CAAKJ,CAAAG,EAAL,CAAcH,CAAAK,EAAd,CAGA,CAFAD,CAAA,CAAKJ,CAAL,CAAYC,CAAZ,CAEA,CADAA,CACA,CADWD,CACX,CAAAC,CAAAE,EAAA,CAAa,IAVf,CADsB,CAmBxBC,QAASA,EAAI,CAACE,CAAD,CAAYC,CAAZ,CAAuB,CAC9BD,CAAJ,EAAiBC,CAAjB,GACMD,CACJ,GADeA,CAAAD,EACf,CAD6BE,CAC7B,EAAIA,CAAJ,GAAeA,CAAAJ,EAAf,CAA6BG,CAA7B,CAFF,CADkC,CA1NpC,GAAIT,CAAJ,GAAeW,EAAf,CACE,KAAM1iB,EAAA,CAAO,eAAP,CAAA,CAAwB,KAAxB,CAAkE+hB,CAAlE,CAAN,CAFoC,IAKlCY,EAAO,CAL2B,CAMlCC,EAAQxgB,CAAA,CAAO,EAAP,CAAW4f,CAAX,CAAoB,IAAKD,CAAL,CAApB,CAN0B,CAOlC3X,EAAO,EAP2B,CAQlCyY,EAAYb,CAAZa,EAAuBb,CAAAa,SAAvBA,EAA4CC,MAAAC,UARV,CASlCC,EAAU,EATwB,CAUlCb,EAAW,IAVuB,CAWlCC,EAAW,IAyCf,OAAOM,EAAA,CAAOX,CAAP,CAAP,CAAyB,KAoBlB/I,QAAQ,CAACrY,CAAD,CAAMY,CAAN,CAAa,CACxB,GAAIshB,CAAJ,CAAeC,MAAAC,UAAf,CAAiC,CAC/B,IAAIE,EAAWD,CAAA,CAAQriB,CAAR,CAAXsiB,GAA4BD,CAAA,CAAQriB,CAAR,CAA5BsiB,CAA2C,KAAMtiB,CAAN,CAA3CsiB,CAEJhB,EAAA,CAAQgB,CAAR,CAH+B,CAMjC,GAAI,CAAAhgB,CAAA,CAAY1B,CAAZ,CAAJ,CAQA,MAPMZ,EAOCY,GAPM6I,EAON7I,EAPaohB,CAAA,EAObphB,CANP6I,CAAA,CAAKzJ,CAAL,CAMOY,CANKA,CAMLA,CAJHohB,CAIGphB,CAJIshB,CAIJthB,EAHL,IAAA2hB,OAAA,CAAYd,CAAAzhB,IAAZ,CAGKY,CAAAA,CAfiB,CApBH,KAiDlBkZ,QAAQ,CAAC9Z,CAAD,CAAM,CACjB,GAAIkiB,CAAJ,CAAeC,MAAAC,UAAf,CAAiC,CAC/B,IAAIE,EAAWD,CAAA,CAAQriB,CAAR,CAEf,IAAI,CAACsiB,CAAL,CAAe,MAEfhB,EAAA,CAAQgB,CAAR,CAL+B,CAQjC,MAAO7Y,EAAA,CAAKzJ,CAAL,CATU,CAjDI,QAwEfuiB,QAAQ,CAACviB,CAAD,CAAM,CACpB,GAAIkiB,CAAJ,CAAeC,MAAAC,UAAf,CAAiC,CAC/B,IAAIE,EAAWD,CAAA,CAAQriB,CAAR,CAEf,IAAI,CAACsiB,CAAL,CAAe,MAEXA;CAAJ,EAAgBd,CAAhB,GAA0BA,CAA1B,CAAqCc,CAAAV,EAArC,CACIU,EAAJ,EAAgBb,CAAhB,GAA0BA,CAA1B,CAAqCa,CAAAZ,EAArC,CACAC,EAAA,CAAKW,CAAAZ,EAAL,CAAgBY,CAAAV,EAAhB,CAEA,QAAOS,CAAA,CAAQriB,CAAR,CATwB,CAYjC,OAAOyJ,CAAA,CAAKzJ,CAAL,CACPgiB,EAAA,EAdoB,CAxEC,WAkGZQ,QAAQ,EAAG,CACpB/Y,CAAA,CAAO,EACPuY,EAAA,CAAO,CACPK,EAAA,CAAU,EACVb,EAAA,CAAWC,CAAX,CAAsB,IAJF,CAlGC,SAmHdgB,QAAQ,EAAG,CAGlBJ,CAAA,CADAJ,CACA,CAFAxY,CAEA,CAFO,IAGP,QAAOsY,CAAA,CAAOX,CAAP,CAJW,CAnHG,MA2IjBsB,QAAQ,EAAG,CACf,MAAOjhB,EAAA,CAAO,EAAP,CAAWwgB,CAAX,CAAkB,MAAOD,CAAP,CAAlB,CADQ,CA3IM,CApDa,CAFxC,IAAID,EAAS,EA+ObZ,EAAAuB,KAAA,CAAoBC,QAAQ,EAAG,CAC7B,IAAID,EAAO,EACX7iB,EAAA,CAAQkiB,CAAR,CAAgB,QAAQ,CAAC1H,CAAD,CAAQ+G,CAAR,CAAiB,CACvCsB,CAAA,CAAKtB,CAAL,CAAA,CAAgB/G,CAAAqI,KAAA,EADuB,CAAzC,CAGA,OAAOA,EALsB,CAmB/BvB,EAAArH,IAAA,CAAmB8I,QAAQ,CAACxB,CAAD,CAAU,CACnC,MAAOW,EAAA,CAAOX,CAAP,CAD4B,CAKrC,OAAOD,EAxQc,CAFQ,CAwTjCxQ,QAASA,GAAsB,EAAG,CAChC,IAAA4I,KAAA,CAAY,CAAC,eAAD,CAAkB,QAAQ,CAACsJ,CAAD,CAAgB,CACpD,MAAOA,EAAA,CAAc,WAAd,CAD6C,CAA1C,CADoB,CAugBlC9V,QAASA,GAAgB,CAAC7D,CAAD,CAAW4Z,CAAX,CAAkC,CAAA,IACrDC,EAAgB,EADqC,CAErDC,EAAS,WAF4C,CAGrDC,EAA2B,wCAH0B,CAIrDC,EAAyB,gCAJ4B;AASrDC,EAA4B,yBAiB/B,KAAAnW,UAAA,CAAiBoW,QAASC,EAAiB,CAAC7a,CAAD,CAAO8a,CAAP,CAAyB,CACnE1Y,EAAA,CAAwBpC,CAAxB,CAA8B,WAA9B,CACI7I,EAAA,CAAS6I,CAAT,CAAJ,EACE8B,EAAA,CAAUgZ,CAAV,CAA4B,kBAA5B,CA2BA,CA1BKP,CAAA7iB,eAAA,CAA6BsI,CAA7B,CA0BL,GAzBEua,CAAA,CAAcva,CAAd,CACA,CADsB,EACtB,CAAAU,CAAAwC,QAAA,CAAiBlD,CAAjB,CAAwBwa,CAAxB,CAAgC,CAAC,WAAD,CAAc,mBAAd,CAC9B,QAAQ,CAAC7H,CAAD,CAAYoI,CAAZ,CAA+B,CACrC,IAAIC,EAAa,EACjB3jB,EAAA,CAAQkjB,CAAA,CAAcva,CAAd,CAAR,CAA6B,QAAQ,CAAC8a,CAAD,CAAmBxiB,CAAnB,CAA0B,CAC7D,GAAI,CACF,IAAIkM,EAAYmO,CAAA/R,OAAA,CAAiBka,CAAjB,CACZrjB,EAAA,CAAW+M,CAAX,CAAJ,CACEA,CADF,CACc,SAAW3K,CAAA,CAAQ2K,CAAR,CAAX,CADd,CAEY1D,CAAA0D,CAAA1D,QAFZ,EAEiC0D,CAAA2U,KAFjC,GAGE3U,CAAA1D,QAHF,CAGsBjH,CAAA,CAAQ2K,CAAA2U,KAAR,CAHtB,CAKA3U,EAAAyW,SAAA,CAAqBzW,CAAAyW,SAArB,EAA2C,CAC3CzW,EAAAlM,MAAA,CAAkBA,CAClBkM,EAAAxE,KAAA,CAAiBwE,CAAAxE,KAAjB,EAAmCA,CACnCwE,EAAA0W,QAAA,CAAoB1W,CAAA0W,QAApB,EAA0C1W,CAAA2W,WAA1C,EAAkE3W,CAAAxE,KAClEwE,EAAA4W,SAAA,CAAqB5W,CAAA4W,SAArB,EAA2C,GAC3CJ,EAAAljB,KAAA,CAAgB0M,CAAhB,CAZE,CAaF,MAAOlG,CAAP,CAAU,CACVyc,CAAA,CAAkBzc,CAAlB,CADU,CAdiD,CAA/D,CAkBA,OAAO0c,EApB8B,CADT,CAAhC,CAwBF,EAAAT,CAAA,CAAcva,CAAd,CAAAlI,KAAA,CAAyBgjB,CAAzB,CA5BF,EA8BEzjB,CAAA,CAAQ2I,CAAR,CAAc9H,EAAA,CAAc2iB,CAAd,CAAd,CAEF;MAAO,KAlC4D,CA0DrE,KAAAQ,2BAAA,CAAkCC,QAAQ,CAACC,CAAD,CAAS,CACjD,MAAIxhB,EAAA,CAAUwhB,CAAV,CAAJ,EACEjB,CAAAe,2BAAA,CAAiDE,CAAjD,CACO,CAAA,IAFT,EAISjB,CAAAe,2BAAA,EALwC,CA8BnD,KAAAG,4BAAA,CAAmCC,QAAQ,CAACF,CAAD,CAAS,CAClD,MAAIxhB,EAAA,CAAUwhB,CAAV,CAAJ,EACEjB,CAAAkB,4BAAA,CAAkDD,CAAlD,CACO,CAAA,IAFT,EAISjB,CAAAkB,4BAAA,EALyC,CASpD,KAAAzK,KAAA,CAAY,CACF,WADE,CACW,cADX,CAC2B,mBAD3B,CACgD,OADhD,CACyD,gBADzD,CAC2E,QAD3E,CAEF,aAFE,CAEa,YAFb,CAE2B,WAF3B,CAEwC,MAFxC,CAEgD,UAFhD,CAE4D,eAF5D,CAGV,QAAQ,CAAC4B,CAAD,CAAc+I,CAAd,CAA8BX,CAA9B,CAAmDY,CAAnD,CAA4DC,CAA5D,CAA8EC,CAA9E,CACCC,CADD,CACgBnI,CADhB,CAC8B8E,CAD9B,CAC2CsD,CAD3C,CACmDC,CADnD,CAC+DC,CAD/D,CAC8E,CAqLtFnb,QAASA,EAAO,CAACob,CAAD,CAAgBC,CAAhB,CAA8BC,CAA9B,CAA2CC,CAA3C,CACIC,CADJ,CAC4B,CACpCJ,CAAN;AAA+B/d,CAA/B,GAGE+d,CAHF,CAGkB/d,CAAA,CAAO+d,CAAP,CAHlB,CAOA7kB,EAAA,CAAQ6kB,CAAR,CAAuB,QAAQ,CAACzhB,CAAD,CAAOnC,CAAP,CAAa,CACrB,CAArB,EAAImC,CAAAvD,SAAJ,EAA0CuD,CAAA8hB,UAAAtgB,MAAA,CAAqB,KAArB,CAA1C,GACEigB,CAAA,CAAc5jB,CAAd,CADF,CACgC6F,CAAA,CAAO1D,CAAP,CAAAoQ,KAAA,CAAkB,eAAlB,CAAArR,OAAA,EAAA,CAA4C,CAA5C,CADhC,CAD0C,CAA5C,CAKA,KAAIgjB,EACIC,CAAA,CAAaP,CAAb,CAA4BC,CAA5B,CAA0CD,CAA1C,CACaE,CADb,CAC0BC,CAD1B,CAC2CC,CAD3C,CAERI,GAAA,CAAaR,CAAb,CAA4B,UAA5B,CACA,OAAOS,SAAqB,CAAC9b,CAAD,CAAQ+b,CAAR,CAAwBC,CAAxB,CAA+CC,CAA/C,CAAuE,CACjGhb,EAAA,CAAUjB,CAAV,CAAiB,OAAjB,CAGA,KAAIkc,EAAYH,CACA,CAAZI,EAAA5e,MAAAzG,KAAA,CAA2BukB,CAA3B,CAAY,CACZA,CAEJ7kB,EAAA,CAAQwlB,CAAR,CAA+B,QAAQ,CAACrK,CAAD,CAAWxS,CAAX,CAAiB,CACtD+c,CAAA9b,KAAA,CAAe,GAAf,CAAqBjB,CAArB,CAA4B,YAA5B,CAA0CwS,CAA1C,CADsD,CAAxD,CAKQva,EAAAA,CAAI,CAAZ,KAAI,IAAW6V,EAAKiP,CAAA9lB,OAApB,CAAsCgB,CAAtC,CAAwC6V,CAAxC,CAA4C7V,CAAA,EAA5C,CAAiD,CAC/C,IACIf,EADO6lB,CAAAtiB,CAAUxC,CAAVwC,CACIvD,SACE,EAAjB,GAAIA,CAAJ,EAAiD,CAAjD,GAAoCA,CAApC,EACE6lB,CAAAE,GAAA,CAAahlB,CAAb,CAAAgJ,KAAA,CAAqB,QAArB,CAA+BJ,CAA/B,CAJ6C,CAQ7C+b,CAAJ,EAAoBA,CAAA,CAAeG,CAAf,CAA0Blc,CAA1B,CAChB2b,EAAJ,EAAqBA,CAAA,CAAgB3b,CAAhB,CAAuBkc,CAAvB,CAAkCA,CAAlC,CAA6CD,CAA7C,CACrB,OAAOC,EAvB0F,CAjBzD,CA4C5CL,QAASA,GAAY,CAACQ,CAAD,CAAW9c,CAAX,CAAsB,CACzC,GAAI,CACF8c,CAAAC,SAAA,CAAkB/c,CAAlB,CADE,CAEF,MAAM9B,CAAN,CAAS,EAH8B,CAwB3Cme,QAASA,EAAY,CAACW,CAAD,CAAWjB,CAAX,CAAyBkB,CAAzB,CAAuCjB,CAAvC,CAAoDC,CAApD,CACGC,CADH,CAC2B,CAsC9CE,QAASA,EAAe,CAAC3b,CAAD,CAAQuc,CAAR,CAAkBC,CAAlB,CAAgCP,CAAhC,CAAyD,CAAA,IAC/DQ,CAD+D,CAClD7iB,CADkD,CAC5C8iB,CAD4C,CAChCtlB,CADgC,CAC7B6V,CAD6B;AACzBoL,CADyB,CACtBsE,CAGrDC,EAAAA,CAAiBL,CAAAnmB,OAArB,KACIymB,EAAqBC,KAAJ,CAAUF,CAAV,CACrB,KAAKxlB,CAAL,CAAS,CAAT,CAAYA,CAAZ,CAAgBwlB,CAAhB,CAAgCxlB,CAAA,EAAhC,CACEylB,CAAA,CAAezlB,CAAf,CAAA,CAAoBmlB,CAAA,CAASnlB,CAAT,CAGXihB,EAAP,CAAAjhB,CAAA,CAAI,CAAR,KAAkB6V,CAAlB,CAAuB8P,CAAA3mB,OAAvB,CAAuCgB,CAAvC,CAA2C6V,CAA3C,CAA+CoL,CAAA,EAA/C,CACEze,CAIA,CAJOijB,CAAA,CAAexE,CAAf,CAIP,CAHA2E,CAGA,CAHaD,CAAA,CAAQ3lB,CAAA,EAAR,CAGb,CAFAqlB,CAEA,CAFcM,CAAA,CAAQ3lB,CAAA,EAAR,CAEd,CAAI4lB,CAAJ,EACMA,CAAAhd,MAAJ,EACE0c,CACA,CADa1c,CAAAid,KAAA,EACb,CAAA3f,CAAA8C,KAAA,CAAYxG,CAAZ,CAAkB,QAAlB,CAA4B8iB,CAA5B,CAFF,EAIEA,CAJF,CAIe1c,CAgBf,CAZE2c,CAYF,CAbKK,CAAAE,wBAAL,CAC2BC,CAAA,CAAwBnd,CAAxB,CAA+Bgd,CAAAI,WAA/B,CAAsDnB,CAAtD,CAD3B,CAGYoB,CAAAL,CAAAK,sBAAL,EAAyCpB,CAAzC,CACoBA,CADpB,CAGKA,CAAAA,CAAL,EAAgCX,CAAhC,CACoB6B,CAAA,CAAwBnd,CAAxB,CAA+Bsb,CAA/B,CADpB,CAIoB,IAG3B,CAAA0B,CAAA,CAAWP,CAAX,CAAwBC,CAAxB,CAAoC9iB,CAApC,CAA0C4iB,CAA1C,CAAwDG,CAAxD,CArBF,EAuBWF,CAvBX,EAwBEA,CAAA,CAAYzc,CAAZ,CAAmBpG,CAAA8Q,WAAnB,CAAoC3U,CAApC,CAA+CkmB,CAA/C,CAvC2E,CAlCjF,IAJ8C,IAC1Cc,EAAU,EADgC,CAE1CO,CAF0C,CAEnCnD,CAFmC,CAEXzP,CAFW,CAEc6S,CAFd,CAIrCnmB,EAAI,CAAb,CAAgBA,CAAhB,CAAoBmlB,CAAAnmB,OAApB,CAAqCgB,CAAA,EAArC,CACEkmB,CA2BA,CA3BQ,IAAIE,EA2BZ,CAxBArD,CAwBA,CAxBasD,EAAA,CAAkBlB,CAAA,CAASnlB,CAAT,CAAlB,CAA+B,EAA/B,CAAmCkmB,CAAnC,CAAgD,CAAN,GAAAlmB,CAAA,CAAUmkB,CAAV,CAAwBxlB,CAAlE,CACmBylB,CADnB,CAwBb,EArBAwB,CAqBA,CArBc7C,CAAA/jB,OACD,CAAPsnB,CAAA,CAAsBvD,CAAtB,CAAkCoC,CAAA,CAASnlB,CAAT,CAAlC,CAA+CkmB,CAA/C,CAAsDhC,CAAtD,CAAoEkB,CAApE,CACwB,IADxB,CAC8B,EAD9B,CACkC,EADlC,CACsCf,CADtC,CAAO,CAEP,IAkBN,GAhBkBuB,CAAAhd,MAgBlB,EAfE6b,EAAA,CAAayB,CAAAK,UAAb,CAA8B,UAA9B,CAeF,CAZAlB,CAYA,CAZeO,CAGD,EAHeA,CAAAY,SAGf,EAFA,EAAElT,CAAF,CAAe6R,CAAA,CAASnlB,CAAT,CAAAsT,WAAf,CAEA,EADA,CAACA,CAAAtU,OACD;AAAR,IAAQ,CACRwlB,CAAA,CAAalR,CAAb,CACGsS,CAAA,EACEA,CAAAE,wBADF,EACwC,CAACF,CAAAK,sBADzC,GAEOL,CAAAI,WAFP,CAEgC9B,CAHnC,CAQN,CAHAyB,CAAA9lB,KAAA,CAAa+lB,CAAb,CAAyBP,CAAzB,CAGA,CAFAc,CAEA,CAFcA,CAEd,EAF6BP,CAE7B,EAF2CP,CAE3C,CAAAhB,CAAA,CAAyB,IAI3B,OAAO8B,EAAA,CAAc5B,CAAd,CAAgC,IApCO,CAmFhDwB,QAASA,EAAuB,CAACnd,CAAD,CAAQsb,CAAR,CAAsBuC,CAAtB,CAAiD,CAkB/E,MAhBwBC,SAAQ,CAACC,CAAD,CAAmBC,CAAnB,CAA4BC,CAA5B,CAAyC,CACvE,IAAIC,EAAe,CAAA,CAEdH,EAAL,GACEA,CAEA,CAFmB/d,CAAAid,KAAA,EAEnB,CAAAiB,CAAA,CADAH,CAAAI,cACA,CADiC,CAAA,CAFnC,CAMI5gB,EAAAA,CAAQ+d,CAAA,CAAayC,CAAb,CAA+BC,CAA/B,CAAwCC,CAAxC,CAAqDJ,CAArD,CACZ,IAAIK,CAAJ,CACE3gB,CAAA8Y,GAAA,CAAS,UAAT,CAAqB,QAAQ,EAAG,CAAE0H,CAAAjS,SAAA,EAAF,CAAhC,CAEF,OAAOvO,EAbgE,CAFM,CA+BjFkgB,QAASA,GAAiB,CAAC7jB,CAAD,CAAOugB,CAAP,CAAmBmD,CAAnB,CAA0B/B,CAA1B,CAAuCC,CAAvC,CAAwD,CAAA,IAE5E4C,EAAWd,CAAAe,MAFiE,CAG5EjjB,CAGJ,QALexB,CAAAvD,SAKf,EACE,KAAK,CAAL,CAEEioB,EAAA,CAAanE,CAAb,CACIoE,EAAA,CAAmBC,EAAA,CAAU5kB,CAAV,CAAAoH,YAAA,EAAnB,CADJ,CACuD,GADvD,CAC4Dua,CAD5D,CACyEC,CADzE,CAIA,KANF,IAMWzhB,CANX,CAM0CxC,CAN1C,CAMiDknB,CANjD,CAM2DC,EAAS9kB,CAAA4F,WANpE,CAOWgL,EAAI,CAPf,CAOkBC,EAAKiU,CAALjU,EAAeiU,CAAAtoB,OAD/B,CAC8CoU,CAD9C,CACkDC,CADlD,CACsDD,CAAA,EADtD,CAC2D,CACzD,IAAImU,EAAgB,CAAA,CAApB,CACIC,EAAc,CAAA,CAElB7kB,EAAA,CAAO2kB,CAAA,CAAOlU,CAAP,CACP,IAAI,CAACgE,CAAL,EAAqB,CAArB,EAAaA,CAAb,EAA0BzU,CAAA8kB,UAA1B,CAA0C,CACxC1f,CAAA,CAAOpF,CAAAoF,KACP5H,EAAA;AAAQ8R,EAAA,CAAKtP,CAAAxC,MAAL,CAGRunB,EAAA,CAAaP,EAAA,CAAmBpf,CAAnB,CACb,IAAIsf,CAAJ,CAAeM,CAAAze,KAAA,CAAqBwe,CAArB,CAAf,CACE3f,CAAA,CAAOwB,EAAA,CAAWme,CAAAE,OAAA,CAAkB,CAAlB,CAAX,CAAiC,GAAjC,CAGT,KAAIC,EAAiBH,CAAAhhB,QAAA,CAAmB,cAAnB,CAAmC,EAAnC,CACjBghB,EAAJ,GAAmBG,CAAnB,CAAoC,OAApC,GACEN,CAEA,CAFgBxf,CAEhB,CADAyf,CACA,CADczf,CAAA6f,OAAA,CAAY,CAAZ,CAAe7f,CAAA/I,OAAf,CAA6B,CAA7B,CACd,CADgD,KAChD,CAAA+I,CAAA,CAAOA,CAAA6f,OAAA,CAAY,CAAZ,CAAe7f,CAAA/I,OAAf,CAA6B,CAA7B,CAHT,CAMA8oB,EAAA,CAAQX,EAAA,CAAmBpf,CAAA6B,YAAA,EAAnB,CACRod,EAAA,CAASc,CAAT,CAAA,CAAkB/f,CAClB,IAAIsf,CAAJ,EAAgB,CAACnB,CAAAzmB,eAAA,CAAqBqoB,CAArB,CAAjB,CACI5B,CAAA,CAAM4B,CAAN,CACA,CADe3nB,CACf,CAAI8V,EAAA,CAAmBzT,CAAnB,CAAyBslB,CAAzB,CAAJ,GACE5B,CAAA,CAAM4B,CAAN,CADF,CACiB,CAAA,CADjB,CAIJC,EAAA,CAA4BvlB,CAA5B,CAAkCugB,CAAlC,CAA8C5iB,CAA9C,CAAqD2nB,CAArD,CACAZ,GAAA,CAAanE,CAAb,CAAyB+E,CAAzB,CAAgC,GAAhC,CAAqC3D,CAArC,CAAkDC,CAAlD,CAAmEmD,CAAnE,CACcC,CADd,CA1BwC,CALe,CAqC3Drf,CAAA,CAAY3F,CAAA2F,UACZ,IAAIjJ,CAAA,CAASiJ,CAAT,CAAJ,EAAyC,EAAzC,GAA2BA,CAA3B,CACE,IAAA,CAAOnE,CAAP,CAAeye,CAAAva,KAAA,CAA4BC,CAA5B,CAAf,CAAA,CACE2f,CAIA,CAJQX,EAAA,CAAmBnjB,CAAA,CAAM,CAAN,CAAnB,CAIR,CAHIkjB,EAAA,CAAanE,CAAb,CAAyB+E,CAAzB,CAAgC,GAAhC,CAAqC3D,CAArC,CAAkDC,CAAlD,CAGJ,GAFE8B,CAAA,CAAM4B,CAAN,CAEF,CAFiB7V,EAAA,CAAKjO,CAAA,CAAM,CAAN,CAAL,CAEjB,EAAAmE,CAAA,CAAYA,CAAAyf,OAAA,CAAiB5jB,CAAA3D,MAAjB,CAA+B2D,CAAA,CAAM,CAAN,CAAAhF,OAA/B,CAGhB,MACF,MAAK,CAAL,CACEgpB,CAAA,CAA4BjF,CAA5B,CAAwCvgB,CAAA8hB,UAAxC,CACA,MACF,MAAK,CAAL,CACE,GAAI,CAEF,GADAtgB,CACA,CADQwe,CAAAta,KAAA,CAA8B1F,CAAA8hB,UAA9B,CACR,CACEwD,CACA,CADQX,EAAA,CAAmBnjB,CAAA,CAAM,CAAN,CAAnB,CACR,CAAIkjB,EAAA,CAAanE,CAAb,CAAyB+E,CAAzB,CAAgC,GAAhC;AAAqC3D,CAArC,CAAkDC,CAAlD,CAAJ,GACE8B,CAAA,CAAM4B,CAAN,CADF,CACiB7V,EAAA,CAAKjO,CAAA,CAAM,CAAN,CAAL,CADjB,CAJA,CAQF,MAAOqC,CAAP,CAAU,EApEhB,CA4EA0c,CAAAjjB,KAAA,CAAgBmoB,CAAhB,CACA,OAAOlF,EAnFyE,CA8FlFmF,QAASA,EAAS,CAAC1lB,CAAD,CAAO2lB,CAAP,CAAkBC,CAAlB,CAA2B,CAC3C,IAAI1d,EAAQ,EAAZ,CACI2d,EAAQ,CACZ,IAAIF,CAAJ,EAAiB3lB,CAAA8lB,aAAjB,EAAsC9lB,CAAA8lB,aAAA,CAAkBH,CAAlB,CAAtC,EAEE,EAAG,CACD,GAAI,CAAC3lB,CAAL,CACE,KAAM+lB,GAAA,CAAe,SAAf,CAEIJ,CAFJ,CAEeC,CAFf,CAAN,CAImB,CAArB,EAAI5lB,CAAAvD,SAAJ,GACMuD,CAAA8lB,aAAA,CAAkBH,CAAlB,CACJ,EADkCE,CAAA,EAClC,CAAI7lB,CAAA8lB,aAAA,CAAkBF,CAAlB,CAAJ,EAAgCC,CAAA,EAFlC,CAIA3d,EAAA7K,KAAA,CAAW2C,CAAX,CACAA,EAAA,CAAOA,CAAAqI,YAXN,CAAH,MAYiB,CAZjB,CAYSwd,CAZT,CAFF,KAgBE3d,EAAA7K,KAAA,CAAW2C,CAAX,CAGF,OAAO0D,EAAA,CAAOwE,CAAP,CAtBoC,CAiC7C8d,QAASA,EAA0B,CAACC,CAAD,CAASN,CAAT,CAAoBC,CAApB,CAA6B,CAC9D,MAAO,SAAQ,CAACxf,CAAD,CAAQ3C,CAAR,CAAiBigB,CAAjB,CAAwBW,CAAxB,CAAqC3C,CAArC,CAAmD,CAChEje,CAAA,CAAUiiB,CAAA,CAAUjiB,CAAA,CAAQ,CAAR,CAAV,CAAsBkiB,CAAtB,CAAiCC,CAAjC,CACV,OAAOK,EAAA,CAAO7f,CAAP,CAAc3C,CAAd,CAAuBigB,CAAvB,CAA8BW,CAA9B,CAA2C3C,CAA3C,CAFyD,CADJ,CA8BhEoC,QAASA,EAAqB,CAACvD,CAAD,CAAa2F,CAAb,CAA0BC,CAA1B,CAAyCzE,CAAzC,CACC0E,CADD,CACeC,CADf,CACyCC,CADzC,CACqDC,CADrD,CAEC1E,CAFD,CAEyB,CAuMrD2E,QAASA,EAAU,CAACC,CAAD,CAAMC,CAAN,CAAYf,CAAZ,CAAuBC,CAAvB,CAAgC,CACjD,GAAIa,CAAJ,CAAS,CACHd,CAAJ,GAAec,CAAf,CAAqBT,CAAA,CAA2BS,CAA3B,CAAgCd,CAAhC,CAA2CC,CAA3C,CAArB,CACAa,EAAAhG,QAAA,CAAc1W,CAAA0W,QACdgG,EAAAE,cAAA,CAAoBA,EACpB,IAAIC,CAAJ,GAAiC7c,CAAjC,EAA8CA,CAAA8c,eAA9C,CACEJ,CAAA;AAAMK,EAAA,CAAmBL,CAAnB,CAAwB,cAAe,CAAA,CAAf,CAAxB,CAERH,EAAAjpB,KAAA,CAAgBopB,CAAhB,CAPO,CAST,GAAIC,CAAJ,CAAU,CACJf,CAAJ,GAAee,CAAf,CAAsBV,CAAA,CAA2BU,CAA3B,CAAiCf,CAAjC,CAA4CC,CAA5C,CAAtB,CACAc,EAAAjG,QAAA,CAAe1W,CAAA0W,QACfiG,EAAAC,cAAA,CAAqBA,EACrB,IAAIC,CAAJ,GAAiC7c,CAAjC,EAA8CA,CAAA8c,eAA9C,CACEH,CAAA,CAAOI,EAAA,CAAmBJ,CAAnB,CAAyB,cAAe,CAAA,CAAf,CAAzB,CAETH,EAAAlpB,KAAA,CAAiBqpB,CAAjB,CAPQ,CAVuC,CAsBnDK,QAASA,EAAc,CAACJ,CAAD,CAAgBlG,CAAhB,CAAyBgC,CAAzB,CAAmCuE,CAAnC,CAAuD,CAAA,IACxErpB,CADwE,CACjEspB,EAAkB,MAD+C,CACvCC,EAAW,CAAA,CAChD,IAAIxqB,CAAA,CAAS+jB,CAAT,CAAJ,CAAuB,CACrB,IAAA,CAAqC,GAArC,GAAO9iB,CAAP,CAAe8iB,CAAA7e,OAAA,CAAe,CAAf,CAAf,GAAqD,GAArD,EAA4CjE,CAA5C,CAAA,CACE8iB,CAIA,CAJUA,CAAA2E,OAAA,CAAe,CAAf,CAIV,CAHa,GAGb,EAHIznB,CAGJ,GAFEspB,CAEF,CAFoB,eAEpB,EAAAC,CAAA,CAAWA,CAAX,EAAgC,GAAhC,EAAuBvpB,CAEzBA,EAAA,CAAQ,IAEJqpB,EAAJ,EAA8C,MAA9C,GAA0BC,CAA1B,GACEtpB,CADF,CACUqpB,CAAA,CAAmBvG,CAAnB,CADV,CAGA9iB,EAAA,CAAQA,CAAR,EAAiB8kB,CAAA,CAASwE,CAAT,CAAA,CAA0B,GAA1B,CAAgCxG,CAAhC,CAA0C,YAA1C,CAEjB,IAAI,CAAC9iB,CAAL,EAAc,CAACupB,CAAf,CACE,KAAMnB,GAAA,CAAe,OAAf,CAEFtF,CAFE,CAEOkG,CAFP,CAAN,CAhBmB,CAAvB,IAqBWhqB,EAAA,CAAQ8jB,CAAR,CAAJ,GACL9iB,CACA,CADQ,EACR,CAAAf,CAAA,CAAQ6jB,CAAR,CAAiB,QAAQ,CAACA,CAAD,CAAU,CACjC9iB,CAAAN,KAAA,CAAW0pB,CAAA,CAAeJ,CAAf,CAA8BlG,CAA9B,CAAuCgC,CAAvC,CAAiDuE,CAAjD,CAAX,CADiC,CAAnC,CAFK,CAMP,OAAOrpB,EA7BqE,CAiC9EylB,QAASA,EAAU,CAACP,CAAD,CAAczc,CAAd,CAAqB+gB,CAArB,CAA+BvE,CAA/B,CAA6CsB,CAA7C,CAAgE,CAiKjFkD,QAASA,EAA0B,CAAChhB,CAAD,CAAQihB,CAAR,CAAuB,CACxD,IAAIjF,CAGmB,EAAvB;AAAI1jB,SAAAlC,OAAJ,GACE6qB,CACA,CADgBjhB,CAChB,CAAAA,CAAA,CAAQjK,CAFV,CAKImrB,GAAJ,GACElF,CADF,CAC0B4E,EAD1B,CAIA,OAAO9C,EAAA,CAAkB9d,CAAlB,CAAyBihB,CAAzB,CAAwCjF,CAAxC,CAbiD,CAjKuB,IAC7EsB,CAD6E,CACtEjB,CADsE,CACzDpP,CADyD,CACrD4S,CADqD,CAC7CvF,CAD6C,CACjC6G,CADiC,CACnBP,GAAqB,EADF,CACMtF,EAEvFgC,EAAA,CAASwC,CACD,GADiBiB,CACjB,CAAJhB,CAAI,CACJzkB,EAAA,CAAYykB,CAAZ,CAA2B,IAAIvC,EAAJ,CAAelgB,CAAA,CAAOyjB,CAAP,CAAf,CAAiChB,CAAA1B,MAAjC,CAA3B,CACJhC,EAAA,CAAWiB,CAAAK,UAEX,IAAI6C,CAAJ,CAA8B,CAC5B,IAAIY,GAAe,8BAEnBD,EAAA,CAAenhB,CAAAid,KAAA,CAAW,CAAA,CAAX,CAEXoE,EAAAA,CAAJ,EAA0BA,CAA1B,GAAgDb,CAAhD,EACIa,CADJ,GAC0Bb,CAAAc,oBAD1B,CAIEjF,CAAAjc,KAAA,CAAc,yBAAd,CAAyC+gB,CAAzC,CAJF,CAEE9E,CAAAjc,KAAA,CAAc,eAAd,CAA+B+gB,CAA/B,CAOFtF,GAAA,CAAaQ,CAAb,CAAuB,kBAAvB,CAEA7lB,EAAA,CAAQgqB,CAAAxgB,MAAR,CAAwC,QAAQ,CAACuhB,CAAD,CAAaC,CAAb,CAAwB,CAAA,IAClEpmB,EAAQmmB,CAAAnmB,MAAA,CAAiBgmB,EAAjB,CAARhmB,EAA0C,EADwB,CAElEqmB,EAAWrmB,CAAA,CAAM,CAAN,CAAXqmB,EAAuBD,CAF2C,CAGlEV,EAAwB,GAAxBA,EAAY1lB,CAAA,CAAM,CAAN,CAHsD,CAIlEsmB,EAAOtmB,CAAA,CAAM,CAAN,CAJ2D,CAKlEumB,CALkE,CAMlEC,CANkE,CAMvDC,CANuD,CAM5CC,CAE1BX,EAAAY,kBAAA,CAA+BP,CAA/B,CAAA,CAA4CE,CAA5C,CAAmDD,CAEnD,QAAQC,CAAR,EAEE,KAAK,GAAL,CACEpE,CAAA0E,SAAA,CAAeP,CAAf,CAAyB,QAAQ,CAAClqB,CAAD,CAAQ,CACvC4pB,CAAA,CAAaK,CAAb,CAAA,CAA0BjqB,CADa,CAAzC,CAGA+lB,EAAA2E,YAAA,CAAkBR,CAAlB,CAAAS,QAAA,CAAsCliB,CAClCsd;CAAA,CAAMmE,CAAN,CAAJ,GAGEN,CAAA,CAAaK,CAAb,CAHF,CAG4B3G,CAAA,CAAayC,CAAA,CAAMmE,CAAN,CAAb,CAAA,CAA8BzhB,CAA9B,CAH5B,CAKA,MAEF,MAAK,GAAL,CACE,GAAI8gB,CAAJ,EAAgB,CAACxD,CAAA,CAAMmE,CAAN,CAAjB,CACE,KAEFG,EAAA,CAAY5G,CAAA,CAAOsC,CAAA,CAAMmE,CAAN,CAAP,CAEVK,EAAA,CADEF,CAAAO,QAAJ,CACY1mB,EADZ,CAGYqmB,QAAQ,CAACM,CAAD,CAAGC,CAAH,CAAM,CAAE,MAAOD,EAAP,GAAaC,CAAb,EAAmBD,CAAnB,GAAyBA,CAAzB,EAA8BC,CAA9B,GAAoCA,CAAtC,CAE1BR,EAAA,CAAYD,CAAAU,OAAZ,EAAgC,QAAQ,EAAG,CAEzCX,CAAA,CAAYR,CAAA,CAAaK,CAAb,CAAZ,CAAsCI,CAAA,CAAU5hB,CAAV,CACtC,MAAM2f,GAAA,CAAe,WAAf,CAEFrC,CAAA,CAAMmE,CAAN,CAFE,CAEejB,CAAArhB,KAFf,CAAN,CAHyC,CAO3CwiB,EAAA,CAAYR,CAAA,CAAaK,CAAb,CAAZ,CAAsCI,CAAA,CAAU5hB,CAAV,CACtCmhB,EAAArmB,OAAA,CAAoBynB,QAAyB,EAAG,CAC9C,IAAIC,EAAcZ,CAAA,CAAU5hB,CAAV,CACb8hB,EAAA,CAAQU,CAAR,CAAqBrB,CAAA,CAAaK,CAAb,CAArB,CAAL,GAEOM,CAAA,CAAQU,CAAR,CAAqBb,CAArB,CAAL,CAKEE,CAAA,CAAU7hB,CAAV,CAAiBwiB,CAAjB,CAA+BrB,CAAA,CAAaK,CAAb,CAA/B,CALF,CAEEL,CAAA,CAAaK,CAAb,CAFF,CAE4BgB,CAJ9B,CAUA,OAAOb,EAAP,CAAmBa,CAZ2B,CAAhD,CAaG,IAbH,CAaSZ,CAAAO,QAbT,CAcA,MAEF,MAAK,GAAL,CACEP,CAAA,CAAY5G,CAAA,CAAOsC,CAAA,CAAMmE,CAAN,CAAP,CACZN,EAAA,CAAaK,CAAb,CAAA,CAA0B,QAAQ,CAACnQ,CAAD,CAAS,CACzC,MAAOuQ,EAAA,CAAU5hB,CAAV,CAAiBqR,CAAjB,CADkC,CAG3C,MAEF,SACE,KAAMsO,GAAA,CAAe,MAAf,CAGFa,CAAArhB,KAHE,CAG6BqiB,CAH7B,CAGwCD,CAHxC,CAAN,CAxDJ,CAVsE,CAAxE,CAhB4B,CAyF9BjG,EAAA,CAAewC,CAAf,EAAoCkD,CAChCyB,EAAJ,EACEjsB,CAAA,CAAQisB,CAAR,CAA8B,QAAQ,CAAC9e,CAAD,CAAY,CAAA,IAC5C0N,EAAS,QACH1N,CAAA,GAAc6c,CAAd,EAA0C7c,CAAA8c,eAA1C,CAAqEU,CAArE,CAAoFnhB,CADjF,UAEDqc,CAFC,QAGHiB,CAHG,aAIEhC,EAJF,CADmC;AAM7CoH,CAEHpI,EAAA,CAAa3W,CAAA2W,WACK,IAAlB,EAAIA,CAAJ,GACEA,CADF,CACegD,CAAA,CAAM3Z,CAAAxE,KAAN,CADf,CAIAujB,EAAA,CAAqBzH,CAAA,CAAYX,CAAZ,CAAwBjJ,CAAxB,CAMrBuP,GAAA,CAAmBjd,CAAAxE,KAAnB,CAAA,CAAqCujB,CAChCxB,GAAL,EACE7E,CAAAjc,KAAA,CAAc,GAAd,CAAoBuD,CAAAxE,KAApB,CAAqC,YAArC,CAAmDujB,CAAnD,CAGE/e,EAAAgf,aAAJ,GACEtR,CAAAuR,OAAA,CAAcjf,CAAAgf,aAAd,CADF,CAC0CD,CAD1C,CAxBgD,CAAlD,CA+BEtrB,EAAA,CAAI,CAAR,KAAW6V,CAAX,CAAgBiT,CAAA9pB,OAAhB,CAAmCgB,CAAnC,CAAuC6V,CAAvC,CAA2C7V,CAAA,EAA3C,CACE,GAAI,CACFyoB,CACA,CADSK,CAAA,CAAW9oB,CAAX,CACT,CAAAyoB,CAAA,CAAOA,CAAAsB,aAAA,CAAsBA,CAAtB,CAAqCnhB,CAA5C,CAAmDqc,CAAnD,CAA6DiB,CAA7D,CACIuC,CAAAxF,QADJ,EACsBsG,CAAA,CAAed,CAAAU,cAAf,CAAqCV,CAAAxF,QAArC,CAAqDgC,CAArD,CAA+DuE,EAA/D,CADtB,CAC0GtF,EAD1G,CAFE,CAIF,MAAO7d,EAAP,CAAU,CACVyc,CAAA,CAAkBzc,EAAlB,CAAqBL,EAAA,CAAYif,CAAZ,CAArB,CADU,CAQVwG,CAAAA,CAAe7iB,CACfwgB,EAAJ,GAAiCA,CAAAsC,SAAjC,EAA+G,IAA/G,GAAsEtC,CAAAuC,YAAtE,IACEF,CADF,CACiB1B,CADjB,CAGA1E,EAAA,EAAeA,CAAA,CAAYoG,CAAZ,CAA0B9B,CAAArW,WAA1B,CAA+C3U,CAA/C,CAA0D+nB,CAA1D,CAGf,KAAI1mB,CAAJ,CAAQ+oB,CAAA/pB,OAAR,CAA6B,CAA7B,CAAqC,CAArC,EAAgCgB,CAAhC,CAAwCA,CAAA,EAAxC,CACE,GAAI,CACFyoB,CACA,CADSM,CAAA,CAAY/oB,CAAZ,CACT,CAAAyoB,CAAA,CAAOA,CAAAsB,aAAA,CAAsBA,CAAtB,CAAqCnhB,CAA5C,CAAmDqc,CAAnD,CAA6DiB,CAA7D,CACIuC,CAAAxF,QADJ,EACsBsG,CAAA,CAAed,CAAAU,cAAf,CAAqCV,CAAAxF,QAArC,CAAqDgC,CAArD,CAA+DuE,EAA/D,CADtB,CAC0GtF,EAD1G,CAFE,CAIF,MAAO7d,EAAP,CAAU,CACVyc,CAAA,CAAkBzc,EAAlB,CAAqBL,EAAA,CAAYif,CAAZ,CAArB,CADU,CA3JmE,CA7PnFZ,CAAA,CAAyBA,CAAzB,EAAmD,EAqBnD,KAtBqD,IAGjDuH;AAAmB,CAAClK,MAAAC,UAH6B,CAIjDkK,CAJiD,CAKjDR,EAAuBhH,CAAAgH,qBAL0B,CAMjDjC,EAA2B/E,CAAA+E,yBANsB,CAOjDa,EAAoB5F,CAAA4F,kBAP6B,CAQjD6B,GAA4BzH,CAAAyH,0BARqB,CASjDC,EAAyB,CAAA,CATwB,CAUjDC,EAAc,CAAA,CAVmC,CAWjDlC,GAAgCzF,CAAAyF,8BAXiB,CAYjDmC,EAAetD,CAAApC,UAAf0F,CAAyC/lB,CAAA,CAAOwiB,CAAP,CAZQ,CAajDnc,CAbiD,CAcjD4c,EAdiD,CAejD+C,CAfiD,CAiBjDC,EAAoBjI,CAjB6B,CAkBjDuE,CAlBiD,CAsB7CzoB,EAAI,CAtByC,CAsBtC6V,GAAKkN,CAAA/jB,OAApB,CAAuCgB,CAAvC,CAA2C6V,EAA3C,CAA+C7V,CAAA,EAA/C,CAAoD,CAClDuM,CAAA,CAAYwW,CAAA,CAAW/iB,CAAX,CACZ,KAAImoB,EAAY5b,CAAA6f,QAAhB,CACIhE,EAAU7b,CAAA8f,MAGVlE,EAAJ,GACE8D,CADF,CACiB/D,CAAA,CAAUQ,CAAV,CAAuBP,CAAvB,CAAkCC,CAAlC,CADjB,CAGA8D,EAAA,CAAYvtB,CAEZ,IAAIitB,CAAJ,CAAuBrf,CAAAyW,SAAvB,CACE,KAGF,IAAIsJ,CAAJ,CAAqB/f,CAAA3D,MAArB,CACEijB,CAIA,CAJoBA,CAIpB,EAJyCtf,CAIzC,CAAKA,CAAAof,YAAL,GACEY,CAAA,CAAkB,oBAAlB,CAAwCnD,CAAxC,CAAkE7c,CAAlE,CACkB0f,CADlB,CAEA,CAAIlqB,CAAA,CAASuqB,CAAT,CAAJ,GACElD,CADF,CAC6B7c,CAD7B,CAHF,CASF4c,GAAA,CAAgB5c,CAAAxE,KAEX4jB,EAAApf,CAAAof,YAAL,EAA8Bpf,CAAA2W,WAA9B,GACEoJ,CAIA,CAJiB/f,CAAA2W,WAIjB,CAHAmI,CAGA,CAHuBA,CAGvB,EAH+C,EAG/C,CAFAkB,CAAA,CAAkB,GAAlB,CAAwBpD,EAAxB,CAAwC,cAAxC,CACIkC,CAAA,CAAqBlC,EAArB,CADJ,CACyC5c,CADzC,CACoD0f,CADpD,CAEA,CAAAZ,CAAA,CAAqBlC,EAArB,CAAA,CAAsC5c,CALxC,CAQA,IAAI+f,CAAJ,CAAqB/f,CAAAyZ,WAArB,CACE+F,CAUA;AAVyB,CAAA,CAUzB,CALKxf,CAAAigB,MAKL,GAJED,CAAA,CAAkB,cAAlB,CAAkCT,EAAlC,CAA6Dvf,CAA7D,CAAwE0f,CAAxE,CACA,CAAAH,EAAA,CAA4Bvf,CAG9B,EAAsB,SAAtB,EAAI+f,CAAJ,EACExC,EASA,CATgC,CAAA,CAShC,CARA8B,CAQA,CARmBrf,CAAAyW,SAQnB,CAPAkJ,CAOA,CAPYD,CAOZ,CANAA,CAMA,CANetD,CAAApC,UAMf,CALIrgB,CAAA,CAAOxH,CAAA+tB,cAAA,CAAuB,GAAvB,CAA6BtD,EAA7B,CAA6C,IAA7C,CACuBR,CAAA,CAAcQ,EAAd,CADvB,CACsD,GADtD,CAAP,CAKJ,CAHAT,CAGA,CAHcuD,CAAA,CAAa,CAAb,CAGd,CAFAS,EAAA,CAAY9D,CAAZ,CArtKH5jB,EAAAtF,KAAA,CAqtKuCwsB,CArtKvC,CAA+B,CAA/B,CAqtKG,CAAgDxD,CAAhD,CAEA,CAAAyD,CAAA,CAAoBtjB,CAAA,CAAQqjB,CAAR,CAAmBhI,CAAnB,CAAiC0H,CAAjC,CACQe,CADR,EAC4BA,CAAA5kB,KAD5B,CACmD,2BAQd+jB,EARc,CADnD,CAVtB,GAsBEI,CAEA,CAFYhmB,CAAA,CAAOwN,EAAA,CAAYgV,CAAZ,CAAP,CAAAkE,SAAA,EAEZ,CADAX,CAAA7lB,MAAA,EACA,CAAA+lB,CAAA,CAAoBtjB,CAAA,CAAQqjB,CAAR,CAAmBhI,CAAnB,CAxBtB,CA4BF,IAAI3X,CAAAmf,SAAJ,CAWE,GAVAM,CAUItlB,CAVU,CAAA,CAUVA,CATJ6lB,CAAA,CAAkB,UAAlB,CAA8BtC,CAA9B,CAAiD1d,CAAjD,CAA4D0f,CAA5D,CASIvlB,CARJujB,CAQIvjB,CARgB6F,CAQhB7F,CANJ4lB,CAMI5lB,CANclH,CAAA,CAAW+M,CAAAmf,SAAX,CACD,CAAXnf,CAAAmf,SAAA,CAAmBO,CAAnB,CAAiCtD,CAAjC,CAAW,CACXpc,CAAAmf,SAIFhlB,CAFJ4lB,CAEI5lB,CAFammB,CAAA,CAAoBP,CAApB,CAEb5lB,CAAA6F,CAAA7F,QAAJ,CAAuB,CACrBimB,CAAA,CAAmBpgB,CAEjB2f,EAAA,CA//HJ3Z,EAAArJ,KAAA,CA8/HuBojB,CA9/HvB,CA8/HE,CAGcpmB,CAAA,CAAO+L,EAAA,CAAKqa,CAAL,CAAP,CAHd,CACc,EAId5D,EAAA,CAAcwD,CAAA,CAAU,CAAV,CAEd,IAAwB,CAAxB,EAAIA,CAAAltB,OAAJ,EAAsD,CAAtD,GAA6B0pB,CAAAzpB,SAA7B,CACE,KAAMspB,GAAA,CAAe,OAAf,CAEFY,EAFE,CAEa,EAFb,CAAN,CAKFuD,EAAA,CAAY9D,CAAZ,CAA0BqD,CAA1B,CAAwCvD,CAAxC,CAEIoE,GAAAA,CAAmB,OAAQ,EAAR,CAOnBC,EAAAA,CAAqB1G,EAAA,CAAkBqC,CAAlB,CAA+B,EAA/B,CAAmCoE,EAAnC,CACzB,KAAIE;AAAwBjK,CAAA5f,OAAA,CAAkBnD,CAAlB,CAAsB,CAAtB,CAAyB+iB,CAAA/jB,OAAzB,EAA8CgB,CAA9C,CAAkD,CAAlD,EAExBopB,EAAJ,EACE6D,EAAA,CAAwBF,CAAxB,CAEFhK,EAAA,CAAaA,CAAA5d,OAAA,CAAkB4nB,CAAlB,CAAA5nB,OAAA,CAA6C6nB,CAA7C,CACbE,EAAA,CAAwBvE,CAAxB,CAAuCmE,EAAvC,CAEAjX,GAAA,CAAKkN,CAAA/jB,OAjCgB,CAAvB,IAmCEitB,EAAAzlB,KAAA,CAAkB8lB,CAAlB,CAIJ,IAAI/f,CAAAof,YAAJ,CACEK,CAeA,CAfc,CAAA,CAed,CAdAO,CAAA,CAAkB,UAAlB,CAA8BtC,CAA9B,CAAiD1d,CAAjD,CAA4D0f,CAA5D,CAcA,CAbAhC,CAaA,CAboB1d,CAapB,CAXIA,CAAA7F,QAWJ,GAVEimB,CAUF,CAVqBpgB,CAUrB,EAPAqZ,CAOA,CAPauH,CAAA,CAAmBpK,CAAA5f,OAAA,CAAkBnD,CAAlB,CAAqB+iB,CAAA/jB,OAArB,CAAyCgB,CAAzC,CAAnB,CAAgEisB,CAAhE,CACTtD,CADS,CACMC,CADN,CACoBmD,CADpB,EAC8CI,CAD9C,CACiErD,CADjE,CAC6EC,CAD7E,CAC0F,sBAC3EsC,CAD2E,0BAEvEjC,CAFuE,mBAG9Ea,CAH8E,2BAItE6B,EAJsE,CAD1F,CAOb,CAAAjW,EAAA,CAAKkN,CAAA/jB,OAhBP,KAiBO,IAAIuN,CAAA1D,QAAJ,CACL,GAAI,CACF4f,CACA,CADSlc,CAAA1D,QAAA,CAAkBojB,CAAlB,CAAgCtD,CAAhC,CAA+CwD,CAA/C,CACT,CAAI3sB,CAAA,CAAWipB,CAAX,CAAJ,CACEO,CAAA,CAAW,IAAX,CAAiBP,CAAjB,CAAyBN,CAAzB,CAAoCC,CAApC,CADF,CAEWK,CAFX,EAGEO,CAAA,CAAWP,CAAAQ,IAAX,CAAuBR,CAAAS,KAAvB,CAAoCf,CAApC,CAA+CC,CAA/C,CALA,CAOF,MAAO/hB,EAAP,CAAU,CACVyc,CAAA,CAAkBzc,EAAlB,CAAqBL,EAAA,CAAYimB,CAAZ,CAArB,CADU,CAKV1f,CAAAia,SAAJ,GACEZ,CAAAY,SACA,CADsB,CAAA,CACtB,CAAAoF,CAAA,CAAmBwB,IAAAC,IAAA,CAASzB,CAAT,CAA2Brf,CAAAyW,SAA3B,CAFrB,CA9JkD,CAqKpD4C,CAAAhd,MAAA,CAAmBijB,CAAnB,EAAoE,CAAA,CAApE,GAAwCA,CAAAjjB,MACxCgd,EAAAE,wBAAA;AAAqCiG,CACrCnG,EAAAK,sBAAA,CAAmC+F,CACnCpG,EAAAI,WAAA,CAAwBmG,CAExB9H,EAAAyF,8BAAA,CAAuDA,EAGvD,OAAOlE,EAnM8C,CAibvDqH,QAASA,GAAuB,CAAClK,CAAD,CAAa,CAE3C,IAF2C,IAElC3P,EAAI,CAF8B,CAE3BC,EAAK0P,CAAA/jB,OAArB,CAAwCoU,CAAxC,CAA4CC,CAA5C,CAAgDD,CAAA,EAAhD,CACE2P,CAAA,CAAW3P,CAAX,CAAA,CAAgB9R,EAAA,CAAQyhB,CAAA,CAAW3P,CAAX,CAAR,CAAuB,gBAAiB,CAAA,CAAjB,CAAvB,CAHyB,CAqB7C8T,QAASA,GAAY,CAACoG,CAAD,CAAcvlB,CAAd,CAAoB3F,CAApB,CAA8B+hB,CAA9B,CAA2CC,CAA3C,CAA4DmJ,CAA5D,CACCC,CADD,CACc,CACjC,GAAIzlB,CAAJ,GAAaqc,CAAb,CAA8B,MAAO,KACjCpgB,EAAAA,CAAQ,IACZ,IAAIse,CAAA7iB,eAAA,CAA6BsI,CAA7B,CAAJ,CAAwC,CAAA,IAC9BwE,CAAWwW,EAAAA,CAAarI,CAAArB,IAAA,CAActR,CAAd,CAAqBwa,CAArB,CAAhC,KADsC,IAElCviB,EAAI,CAF8B,CAE3B6V,EAAKkN,CAAA/jB,OADhB,CACmCgB,CADnC,CACqC6V,CADrC,CACyC7V,CAAA,EADzC,CAEE,GAAI,CACFuM,CACA,CADYwW,CAAA,CAAW/iB,CAAX,CACZ,EAAMmkB,CAAN,GAAsBxlB,CAAtB,EAAmCwlB,CAAnC,CAAiD5X,CAAAyW,SAAjD,GAC8C,EAD9C,EACKzW,CAAA4W,SAAAngB,QAAA,CAA2BZ,CAA3B,CADL,GAEMmrB,CAIJ,GAHEhhB,CAGF,CAHcjL,EAAA,CAAQiL,CAAR,CAAmB,SAAUghB,CAAV,OAAgCC,CAAhC,CAAnB,CAGd,EADAF,CAAAztB,KAAA,CAAiB0M,CAAjB,CACA,CAAAvI,CAAA,CAAQuI,CANV,CAFE,CAUF,MAAMlG,CAAN,CAAS,CAAEyc,CAAA,CAAkBzc,CAAlB,CAAF,CAbyB,CAgBxC,MAAOrC,EAnB0B,CA+BnCkpB,QAASA,EAAuB,CAACjsB,CAAD,CAAMkD,CAAN,CAAW,CAAA,IACrCspB,EAAUtpB,CAAA8iB,MAD2B,CAErCyG,EAAUzsB,CAAAgmB,MAF2B,CAGrChC,EAAWhkB,CAAAslB,UAGfnnB,EAAA,CAAQ6B,CAAR,CAAa,QAAQ,CAACd,CAAD,CAAQZ,CAAR,CAAa,CACX,GAArB;AAAIA,CAAA6E,OAAA,CAAW,CAAX,CAAJ,GACMD,CAAA,CAAI5E,CAAJ,CAGJ,EAHgB4E,CAAA,CAAI5E,CAAJ,CAGhB,GAH6BY,CAG7B,GAFEA,CAEF,GAFoB,OAAR,GAAAZ,CAAA,CAAkB,GAAlB,CAAwB,GAEpC,EAF2C4E,CAAA,CAAI5E,CAAJ,CAE3C,EAAA0B,CAAA0sB,KAAA,CAASpuB,CAAT,CAAcY,CAAd,CAAqB,CAAA,CAArB,CAA2BstB,CAAA,CAAQluB,CAAR,CAA3B,CAJF,CADgC,CAAlC,CAUAH,EAAA,CAAQ+E,CAAR,CAAa,QAAQ,CAAChE,CAAD,CAAQZ,CAAR,CAAa,CACrB,OAAX,EAAIA,CAAJ,EACEklB,EAAA,CAAaQ,CAAb,CAAuB9kB,CAAvB,CACA,CAAAc,CAAA,CAAI,OAAJ,CAAA,EAAgBA,CAAA,CAAI,OAAJ,CAAA,CAAeA,CAAA,CAAI,OAAJ,CAAf,CAA8B,GAA9B,CAAoC,EAApD,EAA0Dd,CAF5D,EAGkB,OAAX,EAAIZ,CAAJ,EACL0lB,CAAAtiB,KAAA,CAAc,OAAd,CAAuBsiB,CAAAtiB,KAAA,CAAc,OAAd,CAAvB,CAAgD,GAAhD,CAAsDxC,CAAtD,CACA,CAAAc,CAAA,MAAA,EAAgBA,CAAA,MAAA,CAAeA,CAAA,MAAf,CAA8B,GAA9B,CAAoC,EAApD,EAA0Dd,CAFrD,EAMqB,GANrB,EAMIZ,CAAA6E,OAAA,CAAW,CAAX,CANJ,EAM6BnD,CAAAxB,eAAA,CAAmBF,CAAnB,CAN7B,GAOL0B,CAAA,CAAI1B,CAAJ,CACA,CADWY,CACX,CAAAutB,CAAA,CAAQnuB,CAAR,CAAA,CAAekuB,CAAA,CAAQluB,CAAR,CARV,CAJyB,CAAlC,CAhByC,CAkC3C4tB,QAASA,EAAkB,CAACpK,CAAD,CAAakJ,CAAb,CAA2B2B,CAA3B,CACvBxI,CADuB,CACT+G,CADS,CACUrD,CADV,CACsBC,CADtB,CACmC1E,CADnC,CAC2D,CAAA,IAChFwJ,EAAY,EADoE,CAEhFC,CAFgF,CAGhFC,CAHgF,CAIhFC,EAA4B/B,CAAA,CAAa,CAAb,CAJoD,CAKhFgC,EAAqBlL,CAAArR,MAAA,EAL2D,CAOhFwc,EAAuBltB,CAAA,CAAO,EAAP,CAAWitB,CAAX,CAA+B,aACvC,IADuC,YACrB,IADqB,SACN,IADM,qBACqBA,CADrB,CAA/B,CAPyD,CAUhFtC,EAAensB,CAAA,CAAWyuB,CAAAtC,YAAX,CACD,CAARsC,CAAAtC,YAAA,CAA+BM,CAA/B,CAA6C2B,CAA7C,CAAQ,CACRK,CAAAtC,YAEVM;CAAA7lB,MAAA,EAEAsd,EAAArK,IAAA,CAAUyK,CAAAqK,sBAAA,CAA2BxC,CAA3B,CAAV,CAAmD,OAAQhI,CAAR,CAAnD,CAAAyK,QAAA,CACU,QAAQ,CAACC,CAAD,CAAU,CAAA,IACpB3F,CADoB,CACuBnD,CAE/C8I,EAAA,CAAUxB,CAAA,CAAoBwB,CAApB,CAEV,IAAIJ,CAAAvnB,QAAJ,CAAgC,CAE5BwlB,CAAA,CA96IJ3Z,EAAArJ,KAAA,CA66IuBmlB,CA76IvB,CA66IE,CAGcnoB,CAAA,CAAO+L,EAAA,CAAKoc,CAAL,CAAP,CAHd,CACc,EAId3F,EAAA,CAAcwD,CAAA,CAAU,CAAV,CAEd,IAAwB,CAAxB,EAAIA,CAAAltB,OAAJ,EAAsD,CAAtD,GAA6B0pB,CAAAzpB,SAA7B,CACE,KAAMspB,GAAA,CAAe,OAAf,CAEF0F,CAAAlmB,KAFE,CAEuB4jB,CAFvB,CAAN,CAKF2C,CAAA,CAAoB,OAAQ,EAAR,CACpB5B,GAAA,CAAYtH,CAAZ,CAA0B6G,CAA1B,CAAwCvD,CAAxC,CACA,KAAIqE,EAAqB1G,EAAA,CAAkBqC,CAAlB,CAA+B,EAA/B,CAAmC4F,CAAnC,CAErBvsB,EAAA,CAASksB,CAAArlB,MAAT,CAAJ,EACEqkB,EAAA,CAAwBF,CAAxB,CAEFhK,EAAA,CAAagK,CAAA5nB,OAAA,CAA0B4d,CAA1B,CACbmK,EAAA,CAAwBU,CAAxB,CAAgCU,CAAhC,CAtB8B,CAAhC,IAwBE5F,EACA,CADcsF,CACd,CAAA/B,CAAAzlB,KAAA,CAAkB6nB,CAAlB,CAGFtL,EAAAniB,QAAA,CAAmBstB,CAAnB,CAEAJ,EAAA,CAA0BxH,CAAA,CAAsBvD,CAAtB,CAAkC2F,CAAlC,CAA+CkF,CAA/C,CACtBzB,CADsB,CACHF,CADG,CACWgC,CADX,CAC+BnF,CAD/B,CAC2CC,CAD3C,CAEtB1E,CAFsB,CAG1BjlB,EAAA,CAAQgmB,CAAR,CAAsB,QAAQ,CAAC5iB,CAAD,CAAOxC,CAAP,CAAU,CAClCwC,CAAJ,EAAYkmB,CAAZ,GACEtD,CAAA,CAAaplB,CAAb,CADF,CACoBisB,CAAA,CAAa,CAAb,CADpB,CADsC,CAAxC,CAOA,KAFA8B,CAEA,CAF2BvJ,CAAA,CAAayH,CAAA,CAAa,CAAb,CAAA3Y,WAAb,CAAyC6Y,CAAzC,CAE3B,CAAM0B,CAAA7uB,OAAN,CAAA,CAAwB,CAClB4J,CAAAA,CAAQilB,CAAAnc,MAAA,EACR6c,EAAAA,CAAyBV,CAAAnc,MAAA,EAFP,KAGlB8c,EAAkBX,CAAAnc,MAAA,EAHA,CAIlBgV,EAAoBmH,CAAAnc,MAAA,EAJF,CAKlBiY,EAAWsC,CAAA,CAAa,CAAb,CAEf,IAAIsC,CAAJ,GAA+BP,CAA/B,CAA0D,CACxD,IAAIS,EAAaF,CAAApmB,UAEXkc,EAAAyF,8BAAN;AACImE,CAAAvnB,QADJ,GAGEijB,CAHF,CAGajW,EAAA,CAAYgV,CAAZ,CAHb,CAMAgE,GAAA,CAAY8B,CAAZ,CAA6BtoB,CAAA,CAAOqoB,CAAP,CAA7B,CAA6D5E,CAA7D,CAGAlF,GAAA,CAAave,CAAA,CAAOyjB,CAAP,CAAb,CAA+B8E,CAA/B,CAZwD,CAexDlJ,CAAA,CADEuI,CAAAhI,wBAAJ,CAC2BC,CAAA,CAAwBnd,CAAxB,CAA+BklB,CAAA9H,WAA/B,CAAmEU,CAAnE,CAD3B,CAG2BA,CAE3BoH,EAAA,CAAwBC,CAAxB,CAAkDnlB,CAAlD,CAAyD+gB,CAAzD,CAAmEvE,CAAnE,CACEG,CADF,CA1BsB,CA6BxBsI,CAAA,CAAY,IA1EY,CAD5B,CAAA/Q,MAAA,CA6EQ,QAAQ,CAAC4R,CAAD,CAAWC,CAAX,CAAiBC,CAAjB,CAA0BjjB,CAA1B,CAAkC,CAC9C,KAAM4c,GAAA,CAAe,QAAf,CAAyD5c,CAAA8R,IAAzD,CAAN,CAD8C,CA7ElD,CAiFA,OAAOoR,SAA0B,CAACC,CAAD,CAAoBlmB,CAApB,CAA2BpG,CAA3B,CAAiCusB,CAAjC,CAA8CrI,CAA9C,CAAiE,CAC5FnB,CAAAA,CAAyBmB,CACzBmH,EAAJ,EACEA,CAAAhuB,KAAA,CAAe+I,CAAf,CAGA,CAFAilB,CAAAhuB,KAAA,CAAe2C,CAAf,CAEA,CADAqrB,CAAAhuB,KAAA,CAAekvB,CAAf,CACA,CAAAlB,CAAAhuB,KAAA,CAAe0lB,CAAf,CAJF,GAMMuI,CAAAhI,wBAGJ,GAFEP,CAEF,CAF2BQ,CAAA,CAAwBnd,CAAxB,CAA+BklB,CAAA9H,WAA/B,CAAmEU,CAAnE,CAE3B,EAAAoH,CAAA,CAAwBC,CAAxB,CAAkDnlB,CAAlD,CAAyDpG,CAAzD,CAA+DusB,CAA/D,CAA4ExJ,CAA5E,CATF,CAFgG,CAjGd,CAqHtF0C,QAASA,EAAU,CAAC+C,CAAD,CAAIC,CAAJ,CAAO,CACxB,IAAI+D,EAAO/D,CAAAjI,SAAPgM,CAAoBhE,CAAAhI,SACxB,OAAa,EAAb,GAAIgM,CAAJ,CAAuBA,CAAvB,CACIhE,CAAAjjB,KAAJ,GAAekjB,CAAAljB,KAAf,CAA+BijB,CAAAjjB,KAAD,CAAUkjB,CAAAljB,KAAV,CAAqB,EAArB,CAAyB,CAAvD,CACOijB,CAAA3qB,MADP,CACiB4qB,CAAA5qB,MAJO,CAQ1BksB,QAASA,EAAiB,CAAC0C,CAAD,CAAOC,CAAP,CAA0B3iB,CAA1B,CAAqCtG,CAArC,CAA8C,CACtE,GAAIipB,CAAJ,CACE,KAAM3G,GAAA,CAAe,UAAf,CACF2G,CAAAnnB,KADE,CACsBwE,CAAAxE,KADtB,CACsCknB,CADtC,CAC4CjpB,EAAA,CAAYC,CAAZ,CAD5C,CAAN,CAFoE,CAQtE+hB,QAASA,EAA2B,CAACjF,CAAD;AAAaoM,CAAb,CAAmB,CACrD,IAAIC,EAAgB3L,CAAA,CAAa0L,CAAb,CAAmB,CAAA,CAAnB,CAChBC,EAAJ,EACErM,CAAAljB,KAAA,CAAgB,UACJ,CADI,SAELwvB,QAAiC,CAACC,CAAD,CAAe,CAGvD,IAAoCC,EAAvBD,CAAA/tB,OAAAA,EAA0CvC,OACnDuwB,EAAJ,EAAsB9K,EAAA,CAAa6K,CAAA/tB,OAAA,EAAb,CAAoC,YAApC,CAEtB,OAAOiuB,SAA8B,CAAC5mB,CAAD,CAAQpG,CAAR,CAAc,CAAA,IAC7CjB,EAASiB,CAAAjB,OAAA,EADoC,CAE/CkuB,EAAWluB,CAAAyH,KAAA,CAAY,UAAZ,CAAXymB,EAAsC,EACxCA,EAAA5vB,KAAA,CAAcuvB,CAAd,CACA7tB,EAAAyH,KAAA,CAAY,UAAZ,CAAwBymB,CAAxB,CACKF,EAAL,EAAuB9K,EAAA,CAAaljB,CAAb,CAAqB,YAArB,CACvBqH,EAAAlF,OAAA,CAAa0rB,CAAb,CAA4BM,QAAiC,CAACvvB,CAAD,CAAQ,CACnEqC,CAAA,CAAK,CAAL,CAAA8hB,UAAA,CAAoBnkB,CAD+C,CAArE,CANiD,CANI,CAF3C,CAAhB,CAHmD,CA2BzDwvB,QAASA,EAAiB,CAACntB,CAAD,CAAOotB,CAAP,CAA2B,CACnD,GAA0B,QAA1B,EAAIA,CAAJ,CACE,MAAO9L,EAAA+L,KAET,KAAIrnB,EAAM4e,EAAA,CAAU5kB,CAAV,CAEV,IAA0B,WAA1B,EAAIotB,CAAJ,EACY,MADZ,EACKpnB,CADL,EAC4C,QAD5C,EACsBonB,CADtB,EAEY,KAFZ,EAEKpnB,CAFL,GAE4C,KAF5C,EAEsBonB,CAFtB,EAG4C,OAH5C,EAGsBA,CAHtB,EAIE,MAAO9L,EAAAgM,aAV0C,CAerD/H,QAASA,EAA2B,CAACvlB,CAAD,CAAOugB,CAAP,CAAmB5iB,CAAnB,CAA0B4H,CAA1B,CAAgC,CAClE,IAAIqnB,EAAgB3L,CAAA,CAAatjB,CAAb,CAAoB,CAAA,CAApB,CAGpB,IAAKivB,CAAL,CAAA,CAGA,GAAa,UAAb,GAAIrnB,CAAJ,EAA+C,QAA/C;AAA2Bqf,EAAA,CAAU5kB,CAAV,CAA3B,CACE,KAAM+lB,GAAA,CAAe,UAAf,CAEFviB,EAAA,CAAYxD,CAAZ,CAFE,CAAN,CAKFugB,CAAAljB,KAAA,CAAgB,UACJ,GADI,SAELgJ,QAAQ,EAAG,CAChB,MAAO,KACAknB,QAAiC,CAACnnB,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACvDkoB,CAAAA,CAAeloB,CAAAkoB,YAAfA,GAAoCloB,CAAAkoB,YAApCA,CAAuD,EAAvDA,CAEJ,IAAInI,CAAAxZ,KAAA,CAA+BnB,CAA/B,CAAJ,CACE,KAAMwgB,GAAA,CAAe,aAAf,CAAN,CAWF,GAJA6G,CAIA,CAJgB3L,CAAA,CAAa9gB,CAAA,CAAKoF,CAAL,CAAb,CAAyB,CAAA,CAAzB,CAA+B4nB,CAAA,CAAkBntB,CAAlB,CAAwBuF,CAAxB,CAA/B,CAIhB,CAIApF,CAAA,CAAKoF,CAAL,CAEC,CAFYqnB,CAAA,CAAcxmB,CAAd,CAEZ,CADAonB,CAAAnF,CAAA,CAAY9iB,CAAZ,CAAAioB,GAAsBnF,CAAA,CAAY9iB,CAAZ,CAAtBioB,CAA0C,EAA1CA,UACA,CADyD,CAAA,CACzD,CAAAtsB,CAAAf,CAAAkoB,YAAAnnB,EAAoBf,CAAAkoB,YAAA,CAAiB9iB,CAAjB,CAAA+iB,QAApBpnB,EAAsDkF,CAAtDlF,QAAA,CACQ0rB,CADR,CACuBM,QAAiC,CAACO,CAAD,CAAWC,CAAX,CAAqB,CAO9D,OAAZ,GAAGnoB,CAAH,EAAuBkoB,CAAvB,EAAmCC,CAAnC,CACEvtB,CAAAwtB,aAAA,CAAkBF,CAAlB,CAA4BC,CAA5B,CADF,CAGEvtB,CAAAgrB,KAAA,CAAU5lB,CAAV,CAAgBkoB,CAAhB,CAVwE,CAD7E,CArB0D,CADxD,CADS,CAFN,CAAhB,CATA,CAJkE,CAqEpEvD,QAASA,GAAW,CAACtH,CAAD,CAAegL,CAAf,CAAiCC,CAAjC,CAA0C,CAAA,IACxDC,EAAuBF,CAAA,CAAiB,CAAjB,CADiC,CAExDG,EAAcH,CAAApxB,OAF0C,CAGxDuC,EAAS+uB,CAAAxa,WAH+C,CAIxD9V,CAJwD,CAIrD6V,CAEP,IAAIuP,CAAJ,CACE,IAAIplB,CAAO,CAAH,CAAG,CAAA6V,CAAA,CAAKuP,CAAApmB,OAAhB,CAAqCgB,CAArC,CAAyC6V,CAAzC,CAA6C7V,CAAA,EAA7C,CACE,GAAIolB,CAAA,CAAaplB,CAAb,CAAJ,EAAuBswB,CAAvB,CAA6C,CAC3ClL,CAAA,CAAaplB,CAAA,EAAb,CAAA,CAAoBqwB,CACJG,EAAAA,CAAKpd,CAALod,CAASD,CAATC,CAAuB,CAAvC,KAAK,IACInd,EAAK+R,CAAApmB,OADd,CAEKoU,CAFL;AAESC,CAFT,CAEaD,CAAA,EAAA,CAAKod,CAAA,EAFlB,CAGMA,CAAJ,CAASnd,CAAT,CACE+R,CAAA,CAAahS,CAAb,CADF,CACoBgS,CAAA,CAAaoL,CAAb,CADpB,CAGE,OAAOpL,CAAA,CAAahS,CAAb,CAGXgS,EAAApmB,OAAA,EAAuBuxB,CAAvB,CAAqC,CACrC,MAZ2C,CAiB7ChvB,CAAJ,EACEA,CAAAkvB,aAAA,CAAoBJ,CAApB,CAA6BC,CAA7B,CAEEje,EAAAA,CAAW3T,CAAA4T,uBAAA,EACfD,EAAAI,YAAA,CAAqB6d,CAArB,CACAD,EAAA,CAAQnqB,CAAAwqB,QAAR,CAAA,CAA0BJ,CAAA,CAAqBpqB,CAAAwqB,QAArB,CACjBC,EAAAA,CAAI,CAAb,KAAgBC,CAAhB,CAAqBR,CAAApxB,OAArB,CAA8C2xB,CAA9C,CAAkDC,CAAlD,CAAsDD,CAAA,EAAtD,CACM1qB,CAGJ,CAHcmqB,CAAA,CAAiBO,CAAjB,CAGd,CAFAzqB,CAAA,CAAOD,CAAP,CAAA6b,OAAA,EAEA,CADAzP,CAAAI,YAAA,CAAqBxM,CAArB,CACA,CAAA,OAAOmqB,CAAA,CAAiBO,CAAjB,CAGTP,EAAA,CAAiB,CAAjB,CAAA,CAAsBC,CACtBD,EAAApxB,OAAA,CAA0B,CAvCkC,CA2C9DsqB,QAASA,GAAkB,CAACxkB,CAAD,CAAK+rB,CAAL,CAAiB,CAC1C,MAAO7vB,EAAA,CAAO,QAAQ,EAAG,CAAE,MAAO8D,EAAAI,MAAA,CAAS,IAAT,CAAehE,SAAf,CAAT,CAAlB,CAAyD4D,CAAzD,CAA6D+rB,CAA7D,CADmC,CAlzC5C,IAAIzK,GAAaA,QAAQ,CAACngB,CAAD,CAAUtD,CAAV,CAAgB,CACvC,IAAA4jB,UAAA,CAAiBtgB,CACjB,KAAAghB,MAAA,CAAatkB,CAAb,EAAqB,EAFkB,CAKzCyjB,GAAA9L,UAAA,CAAuB,YACT6M,EADS,WAeT2J,QAAQ,CAACC,CAAD,CAAW,CAC1BA,CAAH,EAAiC,CAAjC,CAAeA,CAAA/xB,OAAf,EACE+kB,CAAAmB,SAAA,CAAkB,IAAAqB,UAAlB,CAAkCwK,CAAlC,CAF2B,CAfV,cAgCNC,QAAQ,CAACD,CAAD,CAAW,CAC7BA,CAAH,EAAiC,CAAjC;AAAeA,CAAA/xB,OAAf,EACE+kB,CAAAkN,YAAA,CAAqB,IAAA1K,UAArB,CAAqCwK,CAArC,CAF8B,CAhCb,cAkDNZ,QAAQ,CAACe,CAAD,CAAazC,CAAb,CAAyB,CAC9C,IAAI0C,EAAQC,EAAA,CAAgBF,CAAhB,CAA4BzC,CAA5B,CAAZ,CACI4C,EAAWD,EAAA,CAAgB3C,CAAhB,CAA4ByC,CAA5B,CAEK,EAApB,GAAGC,CAAAnyB,OAAH,CACE+kB,CAAAkN,YAAA,CAAqB,IAAA1K,UAArB,CAAqC8K,CAArC,CADF,CAE8B,CAAvB,GAAGA,CAAAryB,OAAH,CACL+kB,CAAAmB,SAAA,CAAkB,IAAAqB,UAAlB,CAAkC4K,CAAlC,CADK,CAGLpN,CAAAuN,SAAA,CAAkB,IAAA/K,UAAlB,CAAkC4K,CAAlC,CAAyCE,CAAzC,CAT4C,CAlD3B,MAwEf1D,QAAQ,CAACpuB,CAAD,CAAMY,CAAN,CAAaoxB,CAAb,CAAwBlH,CAAxB,CAAkC,CAAA,IAK1CmH,EAAavb,EAAA,CAAmB,IAAAsQ,UAAA,CAAe,CAAf,CAAnB,CAAsChnB,CAAtC,CAIbiyB,EAAJ,GACE,IAAAjL,UAAA7jB,KAAA,CAAoBnD,CAApB,CAAyBY,CAAzB,CACA,CAAAkqB,CAAA,CAAWmH,CAFb,CAKA,KAAA,CAAKjyB,CAAL,CAAA,CAAYY,CAGRkqB,EAAJ,CACE,IAAApD,MAAA,CAAW1nB,CAAX,CADF,CACoB8qB,CADpB,EAGEA,CAHF,CAGa,IAAApD,MAAA,CAAW1nB,CAAX,CAHb,IAKI,IAAA0nB,MAAA,CAAW1nB,CAAX,CALJ,CAKsB8qB,CALtB,CAKiC9gB,EAAA,CAAWhK,CAAX,CAAgB,GAAhB,CALjC,CASAkD,EAAA,CAAW2kB,EAAA,CAAU,IAAAb,UAAV,CAGX,IAAkB,GAAlB,GAAK9jB,CAAL,EAAiC,MAAjC,GAAyBlD,CAAzB,EACkB,KADlB,GACKkD,CADL,EACmC,KADnC,GAC2BlD,CAD3B,CAEE,IAAA,CAAKA,CAAL,CAAA,CAAYY,CAAZ,CAAoB6jB,CAAA,CAAc7jB,CAAd,CAA6B,KAA7B,GAAqBZ,CAArB,CAGJ,EAAA,CAAlB,GAAIgyB,CAAJ,GACgB,IAAd,GAAIpxB,CAAJ,EAAsBA,CAAtB,GAAgCxB,CAAhC,CACE,IAAA4nB,UAAAkL,WAAA,CAA0BpH,CAA1B,CADF;AAGE,IAAA9D,UAAA5jB,KAAA,CAAoB0nB,CAApB,CAA8BlqB,CAA9B,CAJJ,CAUA,EADI0qB,CACJ,CADkB,IAAAA,YAClB,GAAezrB,CAAA,CAAQyrB,CAAA,CAAYtrB,CAAZ,CAAR,CAA0B,QAAQ,CAACuF,CAAD,CAAK,CACpD,GAAI,CACFA,CAAA,CAAG3E,CAAH,CADE,CAEF,MAAOkG,CAAP,CAAU,CACVyc,CAAA,CAAkBzc,CAAlB,CADU,CAHwC,CAAvC,CA5C+B,CAxE3B,UAgJXukB,QAAQ,CAACrrB,CAAD,CAAMuF,CAAN,CAAU,CAAA,IACtBohB,EAAQ,IADc,CAEtB2E,EAAe3E,CAAA2E,YAAfA,GAAqC3E,CAAA2E,YAArCA,CAAyD,EAAzDA,CAFsB,CAGtB6G,EAAa7G,CAAA,CAAYtrB,CAAZ,CAAbmyB,GAAkC7G,CAAA,CAAYtrB,CAAZ,CAAlCmyB,CAAqD,EAArDA,CAEJA,EAAA7xB,KAAA,CAAeiF,CAAf,CACA4W,EAAAjY,WAAA,CAAsB,QAAQ,EAAG,CAC1BiuB,CAAA1B,QAAL,EAEElrB,CAAA,CAAGohB,CAAA,CAAM3mB,CAAN,CAAH,CAH6B,CAAjC,CAMA,OAAOuF,EAZmB,CAhJP,CAP+D,KAuKlF6sB,GAAclO,CAAAkO,YAAA,EAvKoE,CAwKlFC,GAAYnO,CAAAmO,UAAA,EAxKsE,CAyKlF/E,EAAsC,IAChB,EADC8E,EACD,EADsC,IACtC,EADwBC,EACxB,CAAhBlwB,EAAgB,CAChBmrB,QAA4B,CAACnB,CAAD,CAAW,CACvC,MAAOA,EAAAhlB,QAAA,CAAiB,OAAjB,CAA0BirB,EAA1B,CAAAjrB,QAAA,CAA+C,KAA/C,CAAsDkrB,EAAtD,CADgC,CA3KqC,CA8KlFjK,EAAkB,cAGtB,OAAO9e,EAjL+E,CAJ5E,CA3H6C,CAq8C3Dse,QAASA,GAAkB,CAACpf,CAAD,CAAO,CAChC,MAAOwI,GAAA,CAAUxI,CAAArB,QAAA,CAAamrB,EAAb,CAA4B,EAA5B,CAAV,CADyB,CAgElCT,QAASA,GAAe,CAACU,CAAD,CAAOC,CAAP,CAAa,CAAA,IAC/BC,EAAS,EADsB,CAE/BC,EAAUH,CAAA9qB,MAAA,CAAW,KAAX,CAFqB,CAG/BkrB,EAAUH,CAAA/qB,MAAA,CAAW,KAAX,CAHqB;AAM3BhH,EAAI,CADZ,EAAA,CACA,IAAA,CAAeA,CAAf,CAAmBiyB,CAAAjzB,OAAnB,CAAmCgB,CAAA,EAAnC,CAAwC,CAEtC,IADA,IAAImyB,EAAQF,CAAA,CAAQjyB,CAAR,CAAZ,CACQoT,EAAI,CAAZ,CAAeA,CAAf,CAAmB8e,CAAAlzB,OAAnB,CAAmCoU,CAAA,EAAnC,CACE,GAAG+e,CAAH,EAAYD,CAAA,CAAQ9e,CAAR,CAAZ,CAAwB,SAAS,CAEnC4e,EAAA,GAA2B,CAAhB,CAAAA,CAAAhzB,OAAA,CAAoB,GAApB,CAA0B,EAArC,EAA2CmzB,CALL,CAOxC,MAAOH,EAb4B,CA0BrC9iB,QAASA,GAAmB,EAAG,CAAA,IACzB2X,EAAc,EADW,CAEzBuL,EAAY,yBAWhB,KAAAC,SAAA,CAAgBC,QAAQ,CAACvqB,CAAD,CAAOmC,CAAP,CAAoB,CAC1CC,EAAA,CAAwBpC,CAAxB,CAA8B,YAA9B,CACIhG,EAAA,CAASgG,CAAT,CAAJ,CACE/G,CAAA,CAAO6lB,CAAP,CAAoB9e,CAApB,CADF,CAGE8e,CAAA,CAAY9e,CAAZ,CAHF,CAGsBmC,CALoB,CAU5C,KAAA4O,KAAA,CAAY,CAAC,WAAD,CAAc,SAAd,CAAyB,QAAQ,CAAC4B,CAAD,CAAYc,CAAZ,CAAqB,CAwBhE,MAAO,SAAQ,CAAC+W,CAAD,CAAatY,CAAb,CAAqB,CAAA,IAC9BM,CAD8B,CACbrQ,CADa,CACAsoB,CAE/BtzB,EAAA,CAASqzB,CAAT,CAAH,GACEvuB,CAOA,CAPQuuB,CAAAvuB,MAAA,CAAiBouB,CAAjB,CAOR,CANAloB,CAMA,CANclG,CAAA,CAAM,CAAN,CAMd,CALAwuB,CAKA,CALaxuB,CAAA,CAAM,CAAN,CAKb,CAJAuuB,CAIA,CAJa1L,CAAApnB,eAAA,CAA2ByK,CAA3B,CACA,CAAP2c,CAAA,CAAY3c,CAAZ,CAAO,CACPE,EAAA,CAAO6P,CAAAuR,OAAP,CAAsBthB,CAAtB,CAAmC,CAAA,CAAnC,CADO,EACqCE,EAAA,CAAOoR,CAAP,CAAgBtR,CAAhB,CAA6B,CAAA,CAA7B,CAElD,CAAAF,EAAA,CAAYuoB,CAAZ,CAAwBroB,CAAxB,CAAqC,CAAA,CAArC,CARF,CAWAqQ,EAAA,CAAWG,CAAA7B,YAAA,CAAsB0Z,CAAtB,CAAkCtY,CAAlC,CAEX,IAAIuY,CAAJ,CAAgB,CACd,GAAMvY,CAAAA,CAAN,EAAyC,QAAzC,GAAgB,MAAOA,EAAAuR,OAAvB,CACE,KAAM5sB,EAAA,CAAO,aAAP,CAAA,CAAsB,OAAtB;AAEFsL,CAFE,EAEaqoB,CAAAxqB,KAFb,CAE8ByqB,CAF9B,CAAN,CAKFvY,CAAAuR,OAAA,CAAcgH,CAAd,CAAA,CAA4BjY,CAPd,CAUhB,MAAOA,EA1B2B,CAxB4B,CAAtD,CAvBiB,CAuG/BpL,QAASA,GAAiB,EAAE,CAC1B,IAAA2J,KAAA,CAAY,CAAC,SAAD,CAAY,QAAQ,CAACra,CAAD,CAAQ,CACtC,MAAOyH,EAAA,CAAOzH,CAAAC,SAAP,CAD+B,CAA5B,CADc,CAsC5B0Q,QAASA,GAAyB,EAAG,CACnC,IAAA0J,KAAA,CAAY,CAAC,MAAD,CAAS,QAAQ,CAAC0D,CAAD,CAAO,CAClC,MAAO,SAAQ,CAACiW,CAAD,CAAYC,CAAZ,CAAmB,CAChClW,CAAAM,MAAA5X,MAAA,CAAiBsX,CAAjB,CAAuBtb,SAAvB,CADgC,CADA,CAAxB,CADuB,CAcrCyxB,QAASA,GAAY,CAAC/D,CAAD,CAAU,CAAA,IACzBzc,EAAS,EADgB,CACZ5S,CADY,CACP8F,CADO,CACFrF,CAE3B,IAAI,CAAC4uB,CAAL,CAAc,MAAOzc,EAErB/S,EAAA,CAAQwvB,CAAA5nB,MAAA,CAAc,IAAd,CAAR,CAA6B,QAAQ,CAAC4rB,CAAD,CAAO,CAC1C5yB,CAAA,CAAI4yB,CAAA5vB,QAAA,CAAa,GAAb,CACJzD,EAAA,CAAMwG,CAAA,CAAUkM,EAAA,CAAK2gB,CAAAhL,OAAA,CAAY,CAAZ,CAAe5nB,CAAf,CAAL,CAAV,CACNqF,EAAA,CAAM4M,EAAA,CAAK2gB,CAAAhL,OAAA,CAAY5nB,CAAZ,CAAgB,CAAhB,CAAL,CAEFT,EAAJ,GACE4S,CAAA,CAAO5S,CAAP,CADF,CACgB4S,CAAA,CAAO5S,CAAP,CAAA,CAAc4S,CAAA,CAAO5S,CAAP,CAAd,CAA4B,IAA5B,CAAmC8F,CAAnC,CAAyCA,CADzD,CAL0C,CAA5C,CAUA,OAAO8M,EAfsB,CA+B/B0gB,QAASA,GAAa,CAACjE,CAAD,CAAU,CAC9B,IAAIkE,EAAa/wB,CAAA,CAAS6sB,CAAT,CAAA,CAAoBA,CAApB,CAA8BjwB,CAE/C,OAAO,SAAQ,CAACoJ,CAAD,CAAO,CACf+qB,CAAL,GAAiBA,CAAjB,CAA+BH,EAAA,CAAa/D,CAAb,CAA/B,CAEA,OAAI7mB,EAAJ,CACS+qB,CAAA,CAAW/sB,CAAA,CAAUgC,CAAV,CAAX,CADT,EACwC,IADxC,CAIO+qB,CAPa,CAHQ,CAyBhCC,QAASA,GAAa,CAAC/pB,CAAD,CAAO4lB,CAAP,CAAgBoE,CAAhB,CAAqB,CACzC,GAAIxzB,CAAA,CAAWwzB,CAAX,CAAJ,CACE,MAAOA,EAAA,CAAIhqB,CAAJ;AAAU4lB,CAAV,CAETxvB,EAAA,CAAQ4zB,CAAR,CAAa,QAAQ,CAACluB,CAAD,CAAK,CACxBkE,CAAA,CAAOlE,CAAA,CAAGkE,CAAH,CAAS4lB,CAAT,CADiB,CAA1B,CAIA,OAAO5lB,EARkC,CAuB3CwG,QAASA,GAAa,EAAG,CAAA,IACnByjB,EAAa,kBADM,CAEnBC,EAAW,YAFQ,CAGnBC,EAAoB,cAHD,CAInBC,EAAgC,CAAC,cAAD,CAAiB,gCAAjB,CAJb,CA2BnBC,EAAW,IAAAA,SAAXA,CAA2B,mBAEV,CAAC,QAAQ,CAACrqB,CAAD,CAAO,CAC7B9J,CAAA,CAAS8J,CAAT,CAAJ,GAEEA,CACA,CADOA,CAAAtC,QAAA,CAAaysB,CAAb,CAAgC,EAAhC,CACP,CAAIF,CAAA/pB,KAAA,CAAgBF,CAAhB,CAAJ,EAA6BkqB,CAAAhqB,KAAA,CAAcF,CAAd,CAA7B,GACEA,CADF,CACStD,EAAA,CAASsD,CAAT,CADT,CAHF,CAMA,OAAOA,EAP0B,CAAhB,CAFU,kBAaX,CAAC,QAAQ,CAACsqB,CAAD,CAAI,CAC7B,MAAOvxB,EAAA,CAASuxB,CAAT,CAAA,EArrNmB,eAqrNnB,GArrNJpxB,EAAAxC,KAAA,CAqrN2B4zB,CArrN3B,CAqrNI,EAhrNmB,eAgrNnB,GAhrNJpxB,EAAAxC,KAAA,CAgrNyC4zB,CAhrNzC,CAgrNI,CAA0ChuB,EAAA,CAAOguB,CAAP,CAA1C,CAAsDA,CADhC,CAAb,CAbW,SAkBpB,QACC,QACI,mCADJ,CADD,MAICpvB,EAAA,CAAYkvB,CAAZ,CAJD,KAKClvB,EAAA,CAAYkvB,CAAZ,CALD,OAMClvB,EAAA,CAAYkvB,CAAZ,CAND,CAlBoB,gBA2Bb,YA3Ba;eA4Bb,cA5Ba,CA3BR,CA8DnBG,EAAuB,IAAAC,aAAvBD,CAA2C,EA9DxB,CAoEnBE,EAA+B,IAAAC,qBAA/BD,CAA2D,EAE/D,KAAA3a,KAAA,CAAY,CAAC,cAAD,CAAiB,UAAjB,CAA6B,eAA7B,CAA8C,YAA9C,CAA4D,IAA5D,CAAkE,WAAlE,CACR,QAAQ,CAAC6a,CAAD,CAAeC,CAAf,CAAyBxR,CAAzB,CAAwC1G,CAAxC,CAAoDmY,CAApD,CAAwDnZ,CAAxD,CAAmE,CAohB7EgJ,QAASA,EAAK,CAACoQ,CAAD,CAAgB,CAqE5BC,QAASA,EAAiB,CAACrF,CAAD,CAAW,CAEnC,IAAIsF,EAAOhzB,CAAA,CAAO,EAAP,CAAW0tB,CAAX,CAAqB,MACxBqE,EAAA,CAAcrE,CAAA1lB,KAAd,CAA6B0lB,CAAAE,QAA7B,CAA+CjjB,CAAAooB,kBAA/C,CADwB,CAArB,CAGX,OA/qBC,IAgrBM,EADWrF,CAAAuF,OACX,EAhrBoB,GAgrBpB,CADWvF,CAAAuF,OACX,CAAHD,CAAG,CACHH,CAAAK,OAAA,CAAUF,CAAV,CAP+B,CApErC,IAAIroB,EAAS,QACH,KADG,kBAEO0nB,CAAAc,iBAFP,mBAGQd,CAAAU,kBAHR,CAAb,CAKInF,EAyEJwF,QAAqB,CAACzoB,CAAD,CAAS,CAAA,IACxB0oB,EAAahB,CAAAzE,QADW,CAExB0F,EAAatzB,CAAA,CAAO,EAAP,CAAW2K,CAAAijB,QAAX,CAFW,CAGxB2F,CAHwB,CAGeC,CAHf,CAK5BH,EAAarzB,CAAA,CAAO,EAAP,CAAWqzB,CAAAI,OAAX,CAA8BJ,CAAA,CAAWtuB,CAAA,CAAU4F,CAAAL,OAAV,CAAX,CAA9B,CAGb;CAAA,CACA,IAAKipB,CAAL,GAAsBF,EAAtB,CAAkC,CAChCK,CAAA,CAAyB3uB,CAAA,CAAUwuB,CAAV,CAEzB,KAAKC,CAAL,GAAsBF,EAAtB,CACE,GAAIvuB,CAAA,CAAUyuB,CAAV,CAAJ,GAAiCE,CAAjC,CACE,SAAS,CAIbJ,EAAA,CAAWC,CAAX,CAAA,CAA4BF,CAAA,CAAWE,CAAX,CATI,CAgBlCI,SAAoB,CAAC/F,CAAD,CAAU,CAC5B,IAAIgG,CAEJx1B,EAAA,CAAQwvB,CAAR,CAAiB,QAAQ,CAACiG,CAAD,CAAWC,CAAX,CAAmB,CACtCt1B,CAAA,CAAWq1B,CAAX,CAAJ,GACED,CACA,CADgBC,CAAA,EAChB,CAAqB,IAArB,EAAID,CAAJ,CACEhG,CAAA,CAAQkG,CAAR,CADF,CACoBF,CADpB,CAGE,OAAOhG,CAAA,CAAQkG,CAAR,CALX,CAD0C,CAA5C,CAH4B,CAA9BH,CAHA,CAAYL,CAAZ,CACA,OAAOA,EAvBqB,CAzEhB,CAAaR,CAAb,CAEd9yB,EAAA,CAAO2K,CAAP,CAAemoB,CAAf,CACAnoB,EAAAijB,QAAA,CAAiBA,CACjBjjB,EAAAL,OAAA,CAAgBU,EAAA,CAAUL,CAAAL,OAAV,CAuBhB,KAAIypB,EAAQ,CArBQC,QAAQ,CAACrpB,CAAD,CAAS,CACnCijB,CAAA,CAAUjjB,CAAAijB,QACV,KAAIqG,EAAUlC,EAAA,CAAcpnB,CAAA3C,KAAd,CAA2B6pB,EAAA,CAAcjE,CAAd,CAA3B,CAAmDjjB,CAAAwoB,iBAAnD,CAGVtyB,EAAA,CAAYozB,CAAZ,CAAJ,EACE71B,CAAA,CAAQwvB,CAAR,CAAiB,QAAQ,CAACzuB,CAAD,CAAQ20B,CAAR,CAAgB,CACb,cAA1B,GAAI/uB,CAAA,CAAU+uB,CAAV,CAAJ,EACI,OAAOlG,CAAA,CAAQkG,CAAR,CAF4B,CAAzC,CAOEjzB,EAAA,CAAY8J,CAAAupB,gBAAZ,CAAJ,EAA4C,CAAArzB,CAAA,CAAYwxB,CAAA6B,gBAAZ,CAA5C,GACEvpB,CAAAupB,gBADF,CAC2B7B,CAAA6B,gBAD3B,CAKA,OAAOC,EAAA,CAAQxpB,CAAR,CAAgBspB,CAAhB,CAAyBrG,CAAzB,CAAAwG,KAAA,CAAuCrB,CAAvC,CAA0DA,CAA1D,CAlB4B,CAqBzB,CAAgBp1B,CAAhB,CAAZ,CACI02B,EAAUxB,CAAAyB,KAAA,CAAQ3pB,CAAR,CAYd,KATAvM,CAAA,CAAQm2B,CAAR,CAA8B,QAAQ,CAACC,CAAD,CAAc,CAClD,CAAIA,CAAAC,QAAJ,EAA2BD,CAAAE,aAA3B;AACEX,CAAAn0B,QAAA,CAAc40B,CAAAC,QAAd,CAAmCD,CAAAE,aAAnC,CAEF,EAAIF,CAAA9G,SAAJ,EAA4B8G,CAAAG,cAA5B,GACEZ,CAAAl1B,KAAA,CAAW21B,CAAA9G,SAAX,CAAiC8G,CAAAG,cAAjC,CALgD,CAApD,CASA,CAAMZ,CAAA/1B,OAAN,CAAA,CAAoB,CACd42B,CAAAA,CAASb,CAAArjB,MAAA,EACb,KAAImkB,EAAWd,CAAArjB,MAAA,EAAf,CAEA2jB,EAAUA,CAAAD,KAAA,CAAaQ,CAAb,CAAqBC,CAArB,CAJQ,CAOpBR,CAAAjH,QAAA,CAAkB0H,QAAQ,CAAChxB,CAAD,CAAK,CAC7BuwB,CAAAD,KAAA,CAAa,QAAQ,CAAC1G,CAAD,CAAW,CAC9B5pB,CAAA,CAAG4pB,CAAA1lB,KAAH,CAAkB0lB,CAAAuF,OAAlB,CAAmCvF,CAAAE,QAAnC,CAAqDjjB,CAArD,CAD8B,CAAhC,CAGA,OAAO0pB,EAJsB,CAO/BA,EAAAvY,MAAA,CAAgBiZ,QAAQ,CAACjxB,CAAD,CAAK,CAC3BuwB,CAAAD,KAAA,CAAa,IAAb,CAAmB,QAAQ,CAAC1G,CAAD,CAAW,CACpC5pB,CAAA,CAAG4pB,CAAA1lB,KAAH,CAAkB0lB,CAAAuF,OAAlB,CAAmCvF,CAAAE,QAAnC,CAAqDjjB,CAArD,CADoC,CAAtC,CAGA,OAAO0pB,EAJoB,CAO7B,OAAOA,EAnEqB,CAuP9BF,QAASA,EAAO,CAACxpB,CAAD,CAASspB,CAAT,CAAkBX,CAAlB,CAA8B,CA+D5C0B,QAASA,EAAI,CAAC/B,CAAD,CAASvF,CAAT,CAAmBuH,CAAnB,CAAkCC,CAAlC,CAA8C,CACrDtc,CAAJ,GA55BC,GA65BC,EAAcqa,CAAd,EA75ByB,GA65BzB,CAAcA,CAAd,CACEra,CAAAhC,IAAA,CAAU6F,CAAV,CAAe,CAACwW,CAAD,CAASvF,CAAT,CAAmBiE,EAAA,CAAasD,CAAb,CAAnB,CAAgDC,CAAhD,CAAf,CADF,CAIEtc,CAAAkI,OAAA,CAAarE,CAAb,CALJ,CASA0Y,EAAA,CAAezH,CAAf,CAAyBuF,CAAzB,CAAiCgC,CAAjC,CAAgDC,CAAhD,CACKxa,EAAA0a,QAAL,EAAyB1a,CAAA3S,OAAA,EAXgC,CAkB3DotB,QAASA,EAAc,CAACzH,CAAD,CAAWuF,CAAX,CAAmBrF,CAAnB,CAA4BsH,CAA5B,CAAwC,CAE7DjC,CAAA,CAAS7G,IAAAC,IAAA,CAAS4G,CAAT,CAAiB,CAAjB,CAER,EAj7BA,GAi7BA;AAAUA,CAAV,EAj7B0B,GAi7B1B,CAAUA,CAAV,CAAoBoC,CAAAC,QAApB,CAAuCD,CAAAnC,OAAvC,EAAwD,MACjDxF,CADiD,QAE/CuF,CAF+C,SAG9CpB,EAAA,CAAcjE,CAAd,CAH8C,QAI/CjjB,CAJ+C,YAK1CuqB,CAL0C,CAAxD,CAJ4D,CAc/DK,QAASA,EAAgB,EAAG,CAC1B,IAAIC,EAAMxzB,EAAA,CAAQ0gB,CAAA+S,gBAAR,CAA+B9qB,CAA/B,CACG,GAAb,GAAI6qB,CAAJ,EAAgB9S,CAAA+S,gBAAAtzB,OAAA,CAA6BqzB,CAA7B,CAAkC,CAAlC,CAFU,CA/FgB,IACxCH,EAAWxC,CAAA5T,MAAA,EAD6B,CAExCoV,EAAUgB,CAAAhB,QAF8B,CAGxCzb,CAHwC,CAIxC8c,CAJwC,CAKxCjZ,EAAMkZ,CAAA,CAAShrB,CAAA8R,IAAT,CAAqB9R,CAAAirB,OAArB,CAEVlT,EAAA+S,gBAAA52B,KAAA,CAA2B8L,CAA3B,CACA0pB,EAAAD,KAAA,CAAamB,CAAb,CAA+BA,CAA/B,CAGK3c,EAAAjO,CAAAiO,MAAL,EAAqBA,CAAAyZ,CAAAzZ,MAArB,GAAyD,CAAA,CAAzD,GAAwCjO,CAAAiO,MAAxC,EACuB,KADvB,GACKjO,CAAAL,OADL,EACkD,OADlD,GACgCK,CAAAL,OADhC,IAEEsO,CAFF,CAEU7X,CAAA,CAAS4J,CAAAiO,MAAT,CAAA,CAAyBjO,CAAAiO,MAAzB,CACA7X,CAAA,CAASsxB,CAAAzZ,MAAT,CAAA,CAA2ByZ,CAAAzZ,MAA3B,CACAid,CAJV,CAOA,IAAIjd,CAAJ,CAEE,GADA8c,CACI,CADS9c,CAAAP,IAAA,CAAUoE,CAAV,CACT,CAAA3b,CAAA,CAAU40B,CAAV,CAAJ,CAA2B,CACzB,GAAkBA,CAAlB,EAp+OMl3B,CAAA,CAo+OYk3B,CAp+ODtB,KAAX,CAo+ON,CAGE,MADAsB,EAAAtB,KAAA,CAAgBmB,CAAhB,CAAkCA,CAAlC,CACOG,CAAAA,CAGHv3B,EAAA,CAAQu3B,CAAR,CAAJ,CACEP,CAAA,CAAeO,CAAA,CAAW,CAAX,CAAf,CAA8BA,CAAA,CAAW,CAAX,CAA9B,CAA6CxyB,EAAA,CAAYwyB,CAAA,CAAW,CAAX,CAAZ,CAA7C,CAAyEA,CAAA,CAAW,CAAX,CAAzE,CADF,CAGEP,CAAA,CAAeO,CAAf,CAA2B,GAA3B,CAAgC,EAAhC,CAAoC,IAApC,CAVqB,CAA3B,IAeE9c,EAAAhC,IAAA,CAAU6F,CAAV,CAAe4X,CAAf,CAOAxzB,EAAA,CAAY60B,CAAZ,CAAJ;CAQE,CAPII,CAOJ,CAPgBC,EAAA,CAAgBprB,CAAA8R,IAAhB,CACA,CAAVmW,CAAApU,QAAA,EAAA,CAAmB7T,CAAAqrB,eAAnB,EAA4C3D,CAAA2D,eAA5C,CAAU,CACVr4B,CAKN,IAHE21B,CAAA,CAAY3oB,CAAAsrB,eAAZ,EAAqC5D,CAAA4D,eAArC,CAGF,CAHmEH,CAGnE,EAAAnD,CAAA,CAAahoB,CAAAL,OAAb,CAA4BmS,CAA5B,CAAiCwX,CAAjC,CAA0Ce,CAA1C,CAAgD1B,CAAhD,CAA4D3oB,CAAAurB,QAA5D,CACIvrB,CAAAupB,gBADJ,CAC4BvpB,CAAAwrB,aAD5B,CARF,CAYA,OAAO9B,EAtDqC,CAsG9CsB,QAASA,EAAQ,CAAClZ,CAAD,CAAMmZ,CAAN,CAAc,CAC7B,GAAI,CAACA,CAAL,CAAa,MAAOnZ,EACpB,KAAIvW,EAAQ,EACZnH,GAAA,CAAc62B,CAAd,CAAsB,QAAQ,CAACz2B,CAAD,CAAQZ,CAAR,CAAa,CAC3B,IAAd,GAAIY,CAAJ,EAAsB0B,CAAA,CAAY1B,CAAZ,CAAtB,GACKhB,CAAA,CAAQgB,CAAR,CAEL,GAFqBA,CAErB,CAF6B,CAACA,CAAD,CAE7B,EAAAf,CAAA,CAAQe,CAAR,CAAe,QAAQ,CAAC2F,CAAD,CAAI,CACrB/D,CAAA,CAAS+D,CAAT,CAAJ,GACM7D,EAAA,CAAO6D,CAAP,CAAJ,CACEA,CADF,CACMA,CAAAsxB,YAAA,EADN,CAEWr1B,CAAA,CAAS+D,CAAT,CAFX,GAGEA,CAHF,CAGMR,EAAA,CAAOQ,CAAP,CAHN,CADF,CAOAoB,EAAArH,KAAA,CAAWuH,EAAA,CAAe7H,CAAf,CAAX,CAAiC,GAAjC,CACW6H,EAAA,CAAetB,CAAf,CADX,CARyB,CAA3B,CAHA,CADyC,CAA3C,CAgBkB,EAAlB,CAAGoB,CAAAlI,OAAH,GACEye,CADF,GACgC,EAAtB,EAACA,CAAAza,QAAA,CAAY,GAAZ,CAAD,CAA2B,GAA3B,CAAiC,GAD3C,EACkDkE,CAAAzG,KAAA,CAAW,GAAX,CADlD,CAGA,OAAOgd,EAtBsB,CA/2B/B,IAAIoZ,EAAezU,CAAA,CAAc,OAAd,CAAnB,CAOImT,EAAuB,EAE3Bn2B,EAAA,CAAQm0B,CAAR,CAA8B,QAAQ,CAAC8D,CAAD,CAAqB,CACzD9B,CAAA30B,QAAA,CAA6B1B,CAAA,CAASm4B,CAAT,CACA,CAAvB3c,CAAArB,IAAA,CAAcge,CAAd,CAAuB,CAAa3c,CAAA/R,OAAA,CAAiB0uB,CAAjB,CAD1C,CADyD,CAA3D,CAKAj4B;CAAA,CAAQq0B,CAAR,CAAsC,QAAQ,CAAC4D,CAAD,CAAqBh3B,CAArB,CAA4B,CACxE,IAAIi3B,EAAap4B,CAAA,CAASm4B,CAAT,CACA,CAAX3c,CAAArB,IAAA,CAAcge,CAAd,CAAW,CACX3c,CAAA/R,OAAA,CAAiB0uB,CAAjB,CAON9B,EAAApyB,OAAA,CAA4B9C,CAA5B,CAAmC,CAAnC,CAAsC,UAC1BquB,QAAQ,CAACA,CAAD,CAAW,CAC3B,MAAO4I,EAAA,CAAWzD,CAAAyB,KAAA,CAAQ5G,CAAR,CAAX,CADoB,CADO,eAIrBiH,QAAQ,CAACjH,CAAD,CAAW,CAChC,MAAO4I,EAAA,CAAWzD,CAAAK,OAAA,CAAUxF,CAAV,CAAX,CADyB,CAJE,CAAtC,CAVwE,CAA1E,CA6nBAhL,EAAA+S,gBAAA,CAAwB,EA+FxBc,UAA2B,CAAC1vB,CAAD,CAAQ,CACjCzI,CAAA,CAAQ8B,SAAR,CAAmB,QAAQ,CAAC6G,CAAD,CAAO,CAChC2b,CAAA,CAAM3b,CAAN,CAAA,CAAc,QAAQ,CAAC0V,CAAD,CAAM9R,CAAN,CAAc,CAClC,MAAO+X,EAAA,CAAM1iB,CAAA,CAAO2K,CAAP,EAAiB,EAAjB,CAAqB,QACxB5D,CADwB,KAE3B0V,CAF2B,CAArB,CAAN,CAD2B,CADJ,CAAlC,CADiC,CAAnC8Z,CA7CA,CAAmB,KAAnB,CAA0B,QAA1B,CAAoC,MAApC,CAA4C,OAA5C,CAyDAC,UAAmC,CAACzvB,CAAD,CAAO,CACxC3I,CAAA,CAAQ8B,SAAR,CAAmB,QAAQ,CAAC6G,CAAD,CAAO,CAChC2b,CAAA,CAAM3b,CAAN,CAAA,CAAc,QAAQ,CAAC0V,CAAD,CAAMzU,CAAN,CAAY2C,CAAZ,CAAoB,CACxC,MAAO+X,EAAA,CAAM1iB,CAAA,CAAO2K,CAAP,EAAiB,EAAjB,CAAqB,QACxB5D,CADwB,KAE3B0V,CAF2B,MAG1BzU,CAH0B,CAArB,CAAN,CADiC,CADV,CAAlC,CADwC,CAA1CwuB,CA9BA,CAA2B,MAA3B,CAAmC,KAAnC,CAYA9T,EAAA2P,SAAA,CAAiBA,CAGjB,OAAO3P,EAzuBsE,CADnE,CAtEW,CAm9BzB+T,QAASA,GAAS,CAACnsB,CAAD,CAAS,CAIvB,GAAY,CAAZ,EAAI8L,CAAJ,GAAkB,CAAC9L,CAAAtH,MAAA,CAAa,uCAAb,CAAnB;AACE,CAACvF,CAAAi5B,eADH,EAEE,MAAO,KAAIj5B,CAAAk5B,cAAJ,CAAyB,mBAAzB,CACF,IAAIl5B,CAAAi5B,eAAJ,CACL,MAAO,KAAIj5B,CAAAi5B,eAGb,MAAM94B,EAAA,CAAO,cAAP,CAAA,CAAuB,OAAvB,CAAN,CAXuB,CA8B3B6Q,QAASA,GAAoB,EAAG,CAC9B,IAAAqJ,KAAA,CAAY,CAAC,UAAD,CAAa,SAAb,CAAwB,WAAxB,CAAqC,QAAQ,CAAC8a,CAAD,CAAWpY,CAAX,CAAoBgF,CAApB,CAA+B,CACtF,MAAOoX,GAAA,CAAkBhE,CAAlB,CAA4B6D,EAA5B,CAAuC7D,CAAA3T,MAAvC,CAAuDzE,CAAArS,QAAA0uB,UAAvD,CAAkFrX,CAAA,CAAU,CAAV,CAAlF,CAD+E,CAA5E,CADkB,CAMhCoX,QAASA,GAAiB,CAAChE,CAAD,CAAW6D,CAAX,CAAsBK,CAAtB,CAAqCD,CAArC,CAAgDja,CAAhD,CAA6D,CAgIrFma,QAASA,EAAQ,CAACta,CAAD,CAAMua,CAAN,CAAkBhC,CAAlB,CAAwB,CAAA,IAInCiC,EAASra,CAAAlL,cAAA,CAA0B,QAA1B,CAJ0B,CAIW4L,EAAW,IAC7D2Z,EAAAlkB,KAAA,CAAc,iBACdkkB,EAAA9zB,IAAA,CAAasZ,CACbwa,EAAAC,MAAA,CAAe,CAAA,CAEf5Z,EAAA,CAAWA,QAAQ,CAAChI,CAAD,CAAQ,CACzBjC,EAAA,CAAsB4jB,CAAtB,CAA8B,MAA9B,CAAsC3Z,CAAtC,CACAjK,GAAA,CAAsB4jB,CAAtB,CAA8B,OAA9B,CAAuC3Z,CAAvC,CACAV,EAAAua,KAAAllB,YAAA,CAA6BglB,CAA7B,CACAA,EAAA,CAAS,IACT,KAAIhE,EAAU,EAAd,CACI9E,EAAO,SAEP7Y,EAAJ,GACqB,MAInB;AAJIA,CAAAvC,KAIJ,EAJ8B8jB,CAAA,CAAUG,CAAV,CAAAI,OAI9B,GAHE9hB,CAGF,CAHU,MAAQ,OAAR,CAGV,EADA6Y,CACA,CADO7Y,CAAAvC,KACP,CAAAkgB,CAAA,CAAwB,OAAf,GAAA3d,CAAAvC,KAAA,CAAyB,GAAzB,CAA+B,GAL1C,CAQIiiB,EAAJ,EACEA,CAAA,CAAK/B,CAAL,CAAa9E,CAAb,CAjBuB,CAqB3BkJ,GAAA,CAAmBJ,CAAnB,CAA2B,MAA3B,CAAmC3Z,CAAnC,CACA+Z,GAAA,CAAmBJ,CAAnB,CAA2B,OAA3B,CAAoC3Z,CAApC,CAEY,EAAZ,EAAIlH,CAAJ,GACE6gB,CAAAK,mBADF,CAC8BC,QAAQ,EAAG,CACjCr5B,CAAA,CAAS+4B,CAAAO,WAAT,CAAJ,EAAmC,iBAAAtvB,KAAA,CAAuB+uB,CAAAO,WAAvB,CAAnC,GACEP,CAAAK,mBACA,CAD4B,IAC5B,CAAAha,CAAA,CAAS,MACD,MADC,CAAT,CAFF,CADqC,CADzC,CAWAV,EAAAua,KAAA1lB,YAAA,CAA6BwlB,CAA7B,CACA,OAAO3Z,EA7CgC,CA/HzC,IAAIma,EAAW,EAGf,OAAO,SAAQ,CAACntB,CAAD,CAASmS,CAAT,CAAcyL,CAAd,CAAoB5K,CAApB,CAA8BsQ,CAA9B,CAAuCsI,CAAvC,CAAgDhC,CAAhD,CAAiEiC,CAAjE,CAA+E,CAiG5FuB,QAASA,EAAc,EAAG,CACxBzE,CAAA,CAASwE,CACTE,EAAA,EAAaA,CAAA,EACbC,EAAA,EAAOA,CAAAC,MAAA,EAHiB,CAM1BC,QAASA,EAAe,CAACxa,CAAD,CAAW2V,CAAX,CAAmBvF,CAAnB,CAA6BuH,CAA7B,CAA4CC,CAA5C,CAAwD,CAE9E9V,CAAA,EAAa0X,CAAAzX,OAAA,CAAqBD,CAArB,CACbuY,EAAA,CAAYC,CAAZ,CAAkB,IAKH,EAAf,GAAI3E,CAAJ,GACEA,CADF,CACWvF,CAAA,CAAW,GAAX,CAA6C,MAA5B,EAAAqK,EAAA,CAAWtb,CAAX,CAAAub,SAAA,CAAqC,GAArC,CAA2C,CADvE,CAQA1a,EAAA,CAHoB,IAAX2V,GAAAA,CAAAA,CAAkB,GAAlBA,CAAwBA,CAGjC,CAAiBvF,CAAjB,CAA2BuH,CAA3B,CAFaC,CAEb,EAF2B,EAE3B,CACAtC,EAAA3V,6BAAA,CAAsCxc,CAAtC,CAjB8E,CAvGY;AAC5F,IAAIwyB,CACJL,EAAA1V,6BAAA,EACAT,EAAA,CAAMA,CAAN,EAAamW,CAAAnW,IAAA,EAEb,IAAyB,OAAzB,EAAI1X,CAAA,CAAUuF,CAAV,CAAJ,CAAkC,CAChC,IAAI0sB,EAAa,GAAbA,CAAoB91B,CAAA21B,CAAAoB,QAAA,EAAA/2B,UAAA,CAA8B,EAA9B,CACxB21B,EAAA,CAAUG,CAAV,CAAA,CAAwB,QAAQ,CAAChvB,CAAD,CAAO,CACrC6uB,CAAA,CAAUG,CAAV,CAAAhvB,KAAA,CAA6BA,CAC7B6uB,EAAA,CAAUG,CAAV,CAAAI,OAAA,CAA+B,CAAA,CAFM,CAKvC,KAAIO,EAAYZ,CAAA,CAASta,CAAA/W,QAAA,CAAY,eAAZ,CAA6B,oBAA7B,CAAoDsxB,CAApD,CAAT,CACZA,CADY,CACA,QAAQ,CAAC/D,CAAD,CAAS9E,CAAT,CAAe,CACrC2J,CAAA,CAAgBxa,CAAhB,CAA0B2V,CAA1B,CAAkC4D,CAAA,CAAUG,CAAV,CAAAhvB,KAAlC,CAA8D,EAA9D,CAAkEmmB,CAAlE,CACA0I,EAAA,CAAUG,CAAV,CAAA,CAAwBv2B,CAFa,CADvB,CAPgB,CAAlC,IAYO,CAEL,IAAIm3B,EAAMnB,CAAA,CAAUnsB,CAAV,CAEVstB,EAAAM,KAAA,CAAS5tB,CAAT,CAAiBmS,CAAjB,CAAsB,CAAA,CAAtB,CACAre,EAAA,CAAQwvB,CAAR,CAAiB,QAAQ,CAACzuB,CAAD,CAAQZ,CAAR,CAAa,CAChCuC,CAAA,CAAU3B,CAAV,CAAJ,EACIy4B,CAAAO,iBAAA,CAAqB55B,CAArB,CAA0BY,CAA1B,CAFgC,CAAtC,CASAy4B,EAAAN,mBAAA,CAAyBc,QAAQ,EAAG,CAQlC,GAAIR,CAAJ,EAA6B,CAA7B,EAAWA,CAAAJ,WAAX,CAAgC,CAAA,IAC1Ba,EAAkB,IADQ,CAE1B3K,EAAW,IAFe,CAG1BwH,EAAa,EAEdjC,EAAH,GAAcwE,CAAd,GACEY,CAIA,CAJkBT,CAAAU,sBAAA,EAIlB,CAAA5K,CAAA,CAAY,UAAD,EAAekK,EAAf,CAAsBA,CAAAlK,SAAtB,CAAqCkK,CAAAW,aALlD,CAUMtF,EAAN,GAAiBwE,CAAjB;AAAmC,EAAnC,CAA4BrhB,CAA5B,GACE8e,CADF,CACe0C,CAAA1C,WADf,CAIA4C,EAAA,CAAgBxa,CAAhB,CACI2V,CADJ,EACc2E,CAAA3E,OADd,CAEIvF,CAFJ,CAGI2K,CAHJ,CAIInD,CAJJ,CAnB8B,CARE,CAmChChB,EAAJ,GACE0D,CAAA1D,gBADF,CACwB,CAAA,CADxB,CAIA,IAAIiC,CAAJ,CACE,GAAI,CACFyB,CAAAzB,aAAA,CAAmBA,CADjB,CAEF,MAAO9wB,EAAP,CAAU,CAQV,GAAqB,MAArB,GAAI8wB,CAAJ,CACE,KAAM9wB,GAAN,CATQ,CAcduyB,CAAAY,KAAA,CAAStQ,CAAT,EAAiB,IAAjB,CAtEK,CAyEP,GAAc,CAAd,CAAIgO,CAAJ,CACE,IAAI9W,EAAY0X,CAAA,CAAcY,CAAd,CAA8BxB,CAA9B,CADlB,KAEyBA,EAAlB,EAptPK13B,CAAA,CAotPa03B,CAptPF9B,KAAX,CAotPL,EACL8B,CAAA9B,KAAA,CAAasD,CAAb,CA7F0F,CAJT,CAuNvFppB,QAASA,GAAoB,EAAG,CAC9B,IAAIqiB,EAAc,IAAlB,CACIC,EAAY,IAWhB,KAAAD,YAAA,CAAmB8H,QAAQ,CAACt5B,CAAD,CAAO,CAChC,MAAIA,EAAJ,EACEwxB,CACO,CADOxxB,CACP,CAAA,IAFT,EAISwxB,CALuB,CAkBlC,KAAAC,UAAA,CAAiB8H,QAAQ,CAACv5B,CAAD,CAAO,CAC9B,MAAIA,EAAJ,EACEyxB,CACO,CADKzxB,CACL,CAAA,IAFT,EAISyxB,CALqB,CAUhC,KAAA9Y,KAAA,CAAY,CAAC,QAAD,CAAW,mBAAX,CAAgC,MAAhC,CAAwC,QAAQ,CAAC8K,CAAD,CAASd,CAAT,CAA4BgB,CAA5B,CAAkC,CA0C5FL,QAASA,EAAY,CAAC0L,CAAD,CAAOwK,CAAP,CAA2BC,CAA3B,CAA2C,CAW9D,IAX8D,IAC1D30B,CAD0D,CAE1D40B,CAF0D,CAG1Dx5B,EAAQ,CAHkD,CAI1D6G,EAAQ,EAJkD,CAK1DlI,EAASmwB,CAAAnwB,OALiD,CAM1D86B,EAAmB,CAAA,CANuC,CAS1D30B,EAAS,EAEb,CAAM9E,CAAN,CAAcrB,CAAd,CAAA,CAC4D,EAA1D,GAAOiG,CAAP,CAAoBkqB,CAAAnsB,QAAA,CAAa2uB,CAAb,CAA0BtxB,CAA1B,CAApB,GAC+E,EAD/E,GACOw5B,CADP,CACkB1K,CAAAnsB,QAAA,CAAa4uB,CAAb;AAAwB3sB,CAAxB,CAAqC80B,CAArC,CADlB,GAEG15B,CAID,EAJU4E,CAIV,EAJyBiC,CAAArH,KAAA,CAAWsvB,CAAAnP,UAAA,CAAe3f,CAAf,CAAsB4E,CAAtB,CAAX,CAIzB,CAHAiC,CAAArH,KAAA,CAAWiF,CAAX,CAAgB8e,CAAA,CAAOoW,CAAP,CAAa7K,CAAAnP,UAAA,CAAe/a,CAAf,CAA4B80B,CAA5B,CAA+CF,CAA/C,CAAb,CAAhB,CAGA,CAFA/0B,CAAAk1B,IAEA,CAFSA,CAET,CADA35B,CACA,CADQw5B,CACR,CADmBI,CACnB,CAAAH,CAAA,CAAmB,CAAA,CANrB,GASGz5B,CACD,EADUrB,CACV,EADqBkI,CAAArH,KAAA,CAAWsvB,CAAAnP,UAAA,CAAe3f,CAAf,CAAX,CACrB,CAAAA,CAAA,CAAQrB,CAVV,CAcF,EAAMA,CAAN,CAAekI,CAAAlI,OAAf,IAEEkI,CAAArH,KAAA,CAAW,EAAX,CACA,CAAAb,CAAA,CAAS,CAHX,CAYA,IAAI46B,CAAJ,EAAqC,CAArC,CAAsB1yB,CAAAlI,OAAtB,CACI,KAAMk7B,GAAA,CAAmB,UAAnB,CAGsD/K,CAHtD,CAAN,CAMJ,GAAI,CAACwK,CAAL,EAA4BG,CAA5B,CA4CE,MA3CA30B,EAAAnG,OA2CO8F,CA3CS9F,CA2CT8F,CA1CPA,CA0COA,CA1CFA,QAAQ,CAACxF,CAAD,CAAU,CACrB,GAAI,CACF,IADE,IACMU,EAAI,CADV,CACa6V,EAAK7W,CADlB,CAC0Bm7B,CAA5B,CAAkCn6B,CAAlC,CAAoC6V,CAApC,CAAwC7V,CAAA,EAAxC,CAA6C,CAC3C,GAAgC,UAAhC,EAAI,OAAQm6B,CAAR,CAAejzB,CAAA,CAAMlH,CAAN,CAAf,CAAJ,CAOE,GANAm6B,CAMI,CANGA,CAAA,CAAK76B,CAAL,CAMH,CAJF66B,CAIE,CALAP,CAAJ,CACS9V,CAAAsW,WAAA,CAAgBR,CAAhB,CAAgCO,CAAhC,CADT,CAGSrW,CAAAuW,QAAA,CAAaF,CAAb,CAEL,CAAQ,IAAR,EAAAA,CAAJ,CACEA,CAAA,CAAO,EADT,KAGE,QAAQ,MAAOA,EAAf,EACE,KAAK,QAAL,CAEE,KAEF,MAAK,QAAL,CAEEA,CAAA,CAAO,EAAP,CAAYA,CACZ,MAEF,SAEEA,CAAA,CAAO70B,EAAA,CAAO60B,CAAP,CAZX,CAiBJh1B,CAAA,CAAOnF,CAAP,CAAA,CAAYm6B,CA5B+B,CA8B7C,MAAOh1B,EAAA1E,KAAA,CAAY,EAAZ,CA/BL,CAiCJ,MAAMuZ,CAAN,CAAW,CACLsgB,CAEJ,CAFaJ,EAAA,CAAmB,QAAnB,CAA4D/K,CAA5D,CACTnV,CAAA9X,SAAA,EADS,CAEb;AAAA4gB,CAAA,CAAkBwX,CAAlB,CAHS,CAlCU,CA0ChBx1B,CAFPA,CAAAk1B,IAEOl1B,CAFEqqB,CAEFrqB,CADPA,CAAAoC,MACOpC,CADIoC,CACJpC,CAAAA,CAzFqD,CA1C4B,IACxFi1B,EAAoBpI,CAAA3yB,OADoE,CAExFi7B,EAAkBrI,CAAA5yB,OAiJtBykB,EAAAkO,YAAA,CAA2B4I,QAAQ,EAAG,CACpC,MAAO5I,EAD6B,CAgBtClO,EAAAmO,UAAA,CAAyB4I,QAAQ,EAAG,CAClC,MAAO5I,EAD2B,CAIpC,OAAOnO,EAvKqF,CAAlF,CAzCkB,CAoNhClU,QAASA,GAAiB,EAAG,CAC3B,IAAAuJ,KAAA,CAAY,CAAC,YAAD,CAAe,SAAf,CAA0B,IAA1B,CACP,QAAQ,CAAC4C,CAAD,CAAeF,CAAf,CAA0BqY,CAA1B,CAA8B,CAgIzC7W,QAASA,EAAQ,CAAClY,CAAD,CAAKqb,CAAL,CAAYsa,CAAZ,CAAmBC,CAAnB,CAAgC,CAAA,IAC3Cp4B,EAAckZ,CAAAlZ,YAD6B,CAE3Cq4B,EAAgBnf,CAAAmf,cAF2B,CAG3CtE,EAAWxC,CAAA5T,MAAA,EAHgC,CAI3CoV,EAAUgB,CAAAhB,QAJiC,CAK3CuF,EAAY,CAL+B,CAM3CC,EAAa/4B,CAAA,CAAU44B,CAAV,CAAbG,EAAuC,CAACH,CAE5CD,EAAA,CAAQ34B,CAAA,CAAU24B,CAAV,CAAA,CAAmBA,CAAnB,CAA2B,CAEnCpF,EAAAD,KAAA,CAAa,IAAb,CAAmB,IAAnB,CAAyBtwB,CAAzB,CAEAuwB,EAAAyF,aAAA,CAAuBx4B,CAAA,CAAYy4B,QAAa,EAAG,CACjD1E,CAAA2E,OAAA,CAAgBJ,CAAA,EAAhB,CAEY,EAAZ,CAAIH,CAAJ,EAAiBG,CAAjB,EAA8BH,CAA9B,GACEpE,CAAAC,QAAA,CAAiBsE,CAAjB,CAEA,CADAD,CAAA,CAActF,CAAAyF,aAAd,CACA,CAAA,OAAOG,CAAA,CAAU5F,CAAAyF,aAAV,CAHT,CAMKD,EAAL,EAAgBnf,CAAA3S,OAAA,EATiC,CAA5B,CAWpBoX,CAXoB,CAavB8a,EAAA,CAAU5F,CAAAyF,aAAV,CAAA,CAAkCzE,CAElC,OAAOhB,EA3BwC,CA/HjD,IAAI4F,EAAY,EAwKhBje,EAAAqD,OAAA;AAAkB6a,QAAQ,CAAC7F,CAAD,CAAU,CAClC,MAAIA,EAAJ,EAAeA,CAAAyF,aAAf,GAAuCG,EAAvC,EACEA,CAAA,CAAU5F,CAAAyF,aAAV,CAAA5G,OAAA,CAAuC,UAAvC,CAGO,CAFP1Y,CAAAmf,cAAA,CAAsBtF,CAAAyF,aAAtB,CAEO,CADP,OAAOG,CAAA,CAAU5F,CAAAyF,aAAV,CACA,CAAA,CAAA,CAJT,EAMO,CAAA,CAP2B,CAUpC,OAAO9d,EAnLkC,CAD/B,CADe,CAmM7B7Q,QAASA,GAAe,EAAE,CACxB,IAAA2M,KAAA,CAAY2H,QAAQ,EAAG,CACrB,MAAO,IACD,OADC,gBAGW,aACD,GADC,WAEH,GAFG,UAGJ,CACR,QACU,CADV,SAEW,CAFX,SAGW,CAHX,QAIU,EAJV,QAKU,EALV,QAMU,GANV,QAOU,EAPV,OAQS,CART,QASU,CATV,CADQ,CAWN,QACQ,CADR,SAES,CAFT,SAGS,CAHT,QAIQ,QAJR,QAKQ,EALR,QAMQ,SANR,QAOQ,GAPR,OAQO,CARP,QASQ,CATR,CAXM,CAHI,cA0BA,GA1BA,CAHX,kBAgCa,OAEZ,uFAAA,MAAA,CAAA,GAAA,CAFY;WAIH,iDAAA,MAAA,CAAA,GAAA,CAJG,KAKX,0DAAA,MAAA,CAAA,GAAA,CALW,UAMN,6BAAA,MAAA,CAAA,GAAA,CANM,OAOT,CAAC,IAAD,CAAM,IAAN,CAPS,QAQR,oBARQ,CAShB0a,OATgB,CAST,eATS,UAUN,iBAVM,UAWN,WAXM,YAYJ,UAZI,WAaL,QAbK,YAcJ,WAdI,WAeL,QAfK,CAhCb,WAkDMC,QAAQ,CAACC,CAAD,CAAM,CACvB,MAAY,EAAZ,GAAIA,CAAJ,CACS,KADT,CAGO,OAJgB,CAlDpB,CADc,CADC,CAyE1BC,QAASA,GAAU,CAACjxB,CAAD,CAAO,CACpBkxB,CAAAA,CAAWlxB,CAAArD,MAAA,CAAW,GAAX,CAGf,KAHA,IACIhH,EAAIu7B,CAAAv8B,OAER,CAAOgB,CAAA,EAAP,CAAA,CACEu7B,CAAA,CAASv7B,CAAT,CAAA;AAAcqH,EAAA,CAAiBk0B,CAAA,CAASv7B,CAAT,CAAjB,CAGhB,OAAOu7B,EAAA96B,KAAA,CAAc,GAAd,CARiB,CAW1B+6B,QAASA,GAAgB,CAACC,CAAD,CAAcC,CAAd,CAA2BC,CAA3B,CAAoC,CACvDC,CAAAA,CAAY7C,EAAA,CAAW0C,CAAX,CAAwBE,CAAxB,CAEhBD,EAAAG,WAAA,CAAyBD,CAAA5C,SACzB0C,EAAAI,OAAA,CAAqBF,CAAAG,SACrBL,EAAAM,OAAA,CAAqB76B,CAAA,CAAIy6B,CAAAK,KAAJ,CAArB,EAA4CC,EAAA,CAAcN,CAAA5C,SAAd,CAA5C,EAAiF,IALtB,CAS7DmD,QAASA,GAAW,CAACC,CAAD,CAAcV,CAAd,CAA2BC,CAA3B,CAAoC,CACtD,IAAIU,EAAsC,GAAtCA,GAAYD,CAAAh4B,OAAA,CAAmB,CAAnB,CACZi4B,EAAJ,GACED,CADF,CACgB,GADhB,CACsBA,CADtB,CAGIp4B,EAAAA,CAAQ+0B,EAAA,CAAWqD,CAAX,CAAwBT,CAAxB,CACZD,EAAAY,OAAA,CAAqB11B,kBAAA,CAAmBy1B,CAAA,EAAyC,GAAzC,GAAYr4B,CAAAu4B,SAAAn4B,OAAA,CAAsB,CAAtB,CAAZ,CACpCJ,CAAAu4B,SAAAvc,UAAA,CAAyB,CAAzB,CADoC,CACNhc,CAAAu4B,SADb,CAErBb,EAAAc,SAAA,CAAuB31B,EAAA,CAAc7C,CAAAy4B,OAAd,CACvBf,EAAAgB,OAAA,CAAqB91B,kBAAA,CAAmB5C,CAAA6X,KAAnB,CAGjB6f,EAAAY,OAAJ,EAA0D,GAA1D,EAA0BZ,CAAAY,OAAAl4B,OAAA,CAA0B,CAA1B,CAA1B,GACEs3B,CAAAY,OADF,CACuB,GADvB,CAC6BZ,CAAAY,OAD7B,CAZsD,CAyBxDK,QAASA,GAAU,CAACC,CAAD,CAAQC,CAAR,CAAe,CAChC,GAA6B,CAA7B,GAAIA,CAAA75B,QAAA,CAAc45B,CAAd,CAAJ,CACE,MAAOC,EAAAjV,OAAA,CAAagV,CAAA59B,OAAb,CAFuB,CAOlC89B,QAASA,GAAS,CAACrf,CAAD,CAAM,CACtB,IAAIpd;AAAQod,CAAAza,QAAA,CAAY,GAAZ,CACZ,OAAiB,EAAV,EAAA3C,CAAA,CAAcod,CAAd,CAAoBA,CAAAmK,OAAA,CAAW,CAAX,CAAcvnB,CAAd,CAFL,CAMxB08B,QAASA,GAAS,CAACtf,CAAD,CAAM,CACtB,MAAOA,EAAAmK,OAAA,CAAW,CAAX,CAAckV,EAAA,CAAUrf,CAAV,CAAAuf,YAAA,CAA2B,GAA3B,CAAd,CAAgD,CAAhD,CADe,CAkBxBC,QAASA,GAAgB,CAACtB,CAAD,CAAUuB,CAAV,CAAsB,CAC7C,IAAAC,QAAA,CAAe,CAAA,CACfD,EAAA,CAAaA,CAAb,EAA2B,EAC3B,KAAIE,EAAgBL,EAAA,CAAUpB,CAAV,CACpBH,GAAA,CAAiBG,CAAjB,CAA0B,IAA1B,CAAgCA,CAAhC,CAQA,KAAA0B,QAAA,CAAeC,QAAQ,CAAC7f,CAAD,CAAM,CAC3B,IAAI8f,EAAUZ,EAAA,CAAWS,CAAX,CAA0B3f,CAA1B,CACd,IAAI,CAACve,CAAA,CAASq+B,CAAT,CAAL,CACE,KAAMC,GAAA,CAAgB,UAAhB,CAA6E/f,CAA7E,CACF2f,CADE,CAAN,CAIFjB,EAAA,CAAYoB,CAAZ,CAAqB,IAArB,CAA2B5B,CAA3B,CAEK,KAAAW,OAAL,GACE,IAAAA,OADF,CACgB,GADhB,CAIA,KAAAmB,UAAA,EAb2B,CAoB7B,KAAAA,UAAA,CAAiBC,QAAQ,EAAG,CAAA,IACtBjB,EAASx1B,EAAA,CAAW,IAAAu1B,SAAX,CADa,CAEtB3gB,EAAO,IAAA6gB,OAAA,CAAc,GAAd,CAAoBr1B,EAAA,CAAiB,IAAAq1B,OAAjB,CAApB,CAAoD,EAE/D,KAAAiB,MAAA,CAAarC,EAAA,CAAW,IAAAgB,OAAX,CAAb,EAAwCG,CAAA,CAAS,GAAT,CAAeA,CAAf,CAAwB,EAAhE,EAAsE5gB,CACtE,KAAA+hB,SAAA,CAAgBR,CAAhB,CAAgC,IAAAO,MAAA/V,OAAA,CAAkB,CAAlB,CALN,CAQ5B,KAAAiW,UAAA,CAAiBC,QAAQ,CAACrgB,CAAD,CAAM,CAAA,IACzBsgB,CAEJ;IAAMA,CAAN,CAAepB,EAAA,CAAWhB,CAAX,CAAoBle,CAApB,CAAf,IAA6C9e,CAA7C,CAEE,MADAq/B,EACA,CADaD,CACb,CAAA,CAAMA,CAAN,CAAepB,EAAA,CAAWO,CAAX,CAAuBa,CAAvB,CAAf,IAAmDp/B,CAAnD,CACSy+B,CADT,EAC0BT,EAAA,CAAW,GAAX,CAAgBoB,CAAhB,CAD1B,EACqDA,CADrD,EAGSpC,CAHT,CAGmBqC,CAEd,KAAMD,CAAN,CAAepB,EAAA,CAAWS,CAAX,CAA0B3f,CAA1B,CAAf,IAAmD9e,CAAnD,CACL,MAAOy+B,EAAP,CAAuBW,CAClB,IAAIX,CAAJ,EAAqB3f,CAArB,CAA2B,GAA3B,CACL,MAAO2f,EAboB,CAxCc,CAoE/Ca,QAASA,GAAmB,CAACtC,CAAD,CAAUuC,CAAV,CAAsB,CAChD,IAAId,EAAgBL,EAAA,CAAUpB,CAAV,CAEpBH,GAAA,CAAiBG,CAAjB,CAA0B,IAA1B,CAAgCA,CAAhC,CAQA,KAAA0B,QAAA,CAAeC,QAAQ,CAAC7f,CAAD,CAAM,CAC3B,IAAI0gB,EAAiBxB,EAAA,CAAWhB,CAAX,CAAoBle,CAApB,CAAjB0gB,EAA6CxB,EAAA,CAAWS,CAAX,CAA0B3f,CAA1B,CAAjD,CACI2gB,EAA6C,GAC5B,EADAD,CAAA/5B,OAAA,CAAsB,CAAtB,CACA,CAAfu4B,EAAA,CAAWuB,CAAX,CAAuBC,CAAvB,CAAe,CACd,IAAAhB,QACD,CAAEgB,CAAF,CACE,EAER,IAAI,CAACj/B,CAAA,CAASk/B,CAAT,CAAL,CACE,KAAMZ,GAAA,CAAgB,UAAhB,CAA6E/f,CAA7E,CACFygB,CADE,CAAN,CAGF/B,EAAA,CAAYiC,CAAZ,CAA4B,IAA5B,CAAkCzC,CAAlC,CAEqCW,EAAAA,CAAAA,IAAAA,OAoBnC,KAAI+B,EAAqB,iBAKC,EAA1B,GAAI5gB,CAAAza,QAAA,CAzB4D24B,CAyB5D,CAAJ,GACEle,CADF,CACQA,CAAA/W,QAAA,CA1BwDi1B,CA0BxD,CAAkB,EAAlB,CADR,CAKI0C,EAAAn2B,KAAA,CAAwBuV,CAAxB,CAAJ,GAKA,CALA,CAKO,CADP6gB,CACO,CADiBD,CAAAn2B,KAAA,CAAwBmC,CAAxB,CACjB,EAAwBi0B,CAAA,CAAsB,CAAtB,CAAxB,CAAmDj0B,CAL1D,CA9BF,KAAAiyB,OAAA,CAAc,CAEd,KAAAmB,UAAA,EAhB2B,CAyD7B,KAAAA,UAAA,CAAiBC,QAAQ,EAAG,CAAA,IACtBjB,EAASx1B,EAAA,CAAW,IAAAu1B,SAAX,CADa,CAEtB3gB,EAAO,IAAA6gB,OAAA;AAAc,GAAd,CAAoBr1B,EAAA,CAAiB,IAAAq1B,OAAjB,CAApB,CAAoD,EAE/D,KAAAiB,MAAA,CAAarC,EAAA,CAAW,IAAAgB,OAAX,CAAb,EAAwCG,CAAA,CAAS,GAAT,CAAeA,CAAf,CAAwB,EAAhE,EAAsE5gB,CACtE,KAAA+hB,SAAA,CAAgBjC,CAAhB,EAA2B,IAAAgC,MAAA,CAAaO,CAAb,CAA0B,IAAAP,MAA1B,CAAuC,EAAlE,CAL0B,CAQ5B,KAAAE,UAAA,CAAiBC,QAAQ,CAACrgB,CAAD,CAAM,CAC7B,GAAGqf,EAAA,CAAUnB,CAAV,CAAH,EAAyBmB,EAAA,CAAUrf,CAAV,CAAzB,CACE,MAAOA,EAFoB,CA5EiB,CA6FlD8gB,QAASA,GAA0B,CAAC5C,CAAD,CAAUuC,CAAV,CAAsB,CACvD,IAAAf,QAAA,CAAe,CAAA,CACfc,GAAA/4B,MAAA,CAA0B,IAA1B,CAAgChE,SAAhC,CAEA,KAAIk8B,EAAgBL,EAAA,CAAUpB,CAAV,CAEpB,KAAAkC,UAAA,CAAiBC,QAAQ,CAACrgB,CAAD,CAAM,CAC7B,IAAIsgB,CAEJ,IAAKpC,CAAL,EAAgBmB,EAAA,CAAUrf,CAAV,CAAhB,CACE,MAAOA,EACF,IAAMsgB,CAAN,CAAepB,EAAA,CAAWS,CAAX,CAA0B3f,CAA1B,CAAf,CACL,MAAOke,EAAP,CAAiBuC,CAAjB,CAA8BH,CACzB,IAAKX,CAAL,GAAuB3f,CAAvB,CAA6B,GAA7B,CACL,MAAO2f,EARoB,CAY/B,KAAAK,UAAA,CAAiBC,QAAQ,EAAG,CAAA,IACtBjB,EAASx1B,EAAA,CAAW,IAAAu1B,SAAX,CADa,CAEtB3gB,EAAO,IAAA6gB,OAAA,CAAc,GAAd,CAAoBr1B,EAAA,CAAiB,IAAAq1B,OAAjB,CAApB,CAAoD,EAE/D,KAAAiB,MAAA,CAAarC,EAAA,CAAW,IAAAgB,OAAX,CAAb,EAAwCG,CAAA,CAAS,GAAT,CAAeA,CAAf,CAAwB,EAAhE,EAAsE5gB,CAEtE,KAAA+hB,SAAA,CAAgBjC,CAAhB,CAA0BuC,CAA1B,CAAuC,IAAAP,MANb,CAlB2B,CA8PzDa,QAASA,GAAc,CAACC,CAAD,CAAW,CAChC,MAAO,SAAQ,EAAG,CAChB,MAAO,KAAA,CAAKA,CAAL,CADS,CADc,CA16SK;AAi7SvCC,QAASA,GAAoB,CAACD,CAAD,CAAWE,CAAX,CAAuB,CAClD,MAAO,SAAQ,CAACx+B,CAAD,CAAQ,CACrB,GAAI0B,CAAA,CAAY1B,CAAZ,CAAJ,CACE,MAAO,KAAA,CAAKs+B,CAAL,CAET,KAAA,CAAKA,CAAL,CAAA,CAAiBE,CAAA,CAAWx+B,CAAX,CACjB,KAAAs9B,UAAA,EAEA,OAAO,KAPc,CAD2B,CA6CpD/tB,QAASA,GAAiB,EAAE,CAAA,IACtBwuB,EAAa,EADS,CAEtBU,EAAY,CAAA,CAShB,KAAAV,WAAA,CAAkBW,QAAQ,CAACC,CAAD,CAAS,CACjC,MAAIh9B,EAAA,CAAUg9B,CAAV,CAAJ,EACEZ,CACO,CADMY,CACN,CAAA,IAFT,EAISZ,CALwB,CAgBnC,KAAAU,UAAA,CAAiBG,QAAQ,CAACzU,CAAD,CAAO,CAC9B,MAAIxoB,EAAA,CAAUwoB,CAAV,CAAJ,EACEsU,CACO,CADKtU,CACL,CAAA,IAFT,EAISsU,CALqB,CAoChC,KAAA9lB,KAAA,CAAY,CAAC,YAAD,CAAe,UAAf,CAA2B,UAA3B,CAAuC,cAAvC,CACR,QAAQ,CAAE4C,CAAF,CAAgBkY,CAAhB,CAA4BnX,CAA5B,CAAwC2I,CAAxC,CAAsD,CA8IhE4Z,QAASA,EAAmB,CAACC,CAAD,CAAS,CACnCvjB,CAAAwjB,WAAA,CAAsB,wBAAtB,CAAgDzjB,CAAA0jB,OAAA,EAAhD,CAAoEF,CAApE,CADmC,CA9I2B,IAC5DxjB,CAD4D,CAE5D2jB,CAF4D,CAG5DjgB,EAAWyU,CAAAzU,SAAA,EAHiD,CAI5DkgB,EAAazL,CAAAnW,IAAA,EAJ+C,CAK5Dke,CAEAiD,EAAJ,EACEjD,CACA,CADqB0D,CAviBlBrf,UAAA,CAAc,CAAd,CAuiBkBqf,CAviBDr8B,QAAA,CAAY,GAAZ,CAuiBCq8B,CAviBgBr8B,QAAA,CAAY,IAAZ,CAAjB,CAAqC,CAArC,CAAjB,CAwiBH,EADoCmc,CACpC,EADgD,GAChD,EAAAigB,CAAA,CAAe3iB,CAAAoB,QAAA,CAAmBof,EAAnB,CAAsCsB,EAFvD,GAIE5C,CACA;AADUmB,EAAA,CAAUuC,CAAV,CACV,CAAAD,CAAA,CAAenB,EALjB,CAOAxiB,EAAA,CAAY,IAAI2jB,CAAJ,CAAiBzD,CAAjB,CAA0B,GAA1B,CAAgCuC,CAAhC,CACZziB,EAAA4hB,QAAA,CAAkB5hB,CAAAoiB,UAAA,CAAoBwB,CAApB,CAAlB,CAEA,KAAIC,EAAoB,2BAExBla,EAAAnG,GAAA,CAAgB,OAAhB,CAAyB,QAAQ,CAAC3I,CAAD,CAAQ,CAIvC,GAAIipB,CAAAjpB,CAAAipB,QAAJ,EAAqBC,CAAAlpB,CAAAkpB,QAArB,EAAqD,CAArD,EAAsClpB,CAAAmpB,MAAtC,CAAA,CAKA,IAHA,IAAI3jB,EAAM5V,CAAA,CAAOoQ,CAAAO,OAAP,CAGV,CAAsC,GAAtC,GAAO9Q,CAAA,CAAU+V,CAAA,CAAI,CAAJ,CAAArZ,SAAV,CAAP,CAAA,CAEE,GAAIqZ,CAAA,CAAI,CAAJ,CAAJ,GAAesJ,CAAA,CAAa,CAAb,CAAf,EAAkC,CAAC,CAACtJ,CAAD,CAAOA,CAAAva,OAAA,EAAP,EAAqB,CAArB,CAAnC,CAA4D,MAG9D,KAAIm+B,EAAU5jB,CAAApZ,KAAA,CAAS,MAAT,CAEVX,EAAA,CAAS29B,CAAT,CAAJ,EAAgD,4BAAhD,GAAyBA,CAAAx9B,SAAA,EAAzB,GAGEw9B,CAHF,CAGY3G,EAAA,CAAW2G,CAAAC,QAAX,CAAAlhB,KAHZ,CAOA,IAAI,CAAA6gB,CAAAp2B,KAAA,CAAuBw2B,CAAvB,CAAJ,CAAA,CAKA,GAAIN,CAAJ,GAAqBb,EAArB,CAAiD,CAG/C,IAAI9f,EAAO3C,CAAAnZ,KAAA,CAAS,MAAT,CAAP8b,EAA2B3C,CAAAnZ,KAAA,CAAS,YAAT,CAE/B,IAAI8b,CAAJ,EAAkC,CAAlC,CAAYA,CAAAzb,QAAA,CAAa,KAAb,CAAZ,CAEE,GADI87B,CACA,CADS,GACT,CADeZ,CACf,CAAW,GAAX,EAAAzf,CAAA,CAAK,CAAL,CAAJ,CAEEihB,CAAA,CAAU/D,CAAV,CAAoBmD,CAApB,CAA6BrgB,CAF/B,KAGO,IAAe,GAAf,EAAIA,CAAA,CAAK,CAAL,CAAJ,CAELihB,CAAA,CAAU/D,CAAV,CAAoBmD,CAApB,EAA8BrjB,CAAApR,KAAA,EAA9B,EAAkD,GAAlD,EAAyDoU,CAFpD;IAGA,CAAA,IAED/E,EAAQ+B,CAAApR,KAAA,EAAArD,MAAA,CAAuB,GAAvB,CAFP,CAGHE,EAAQuX,CAAAzX,MAAA,CAAW,GAAX,CACW,EAArB,GAAI0S,CAAA1a,OAAJ,EAA2B0a,CAAA,CAAM,CAAN,CAA3B,GAAqCA,CAAA1a,OAArC,CAAoD,CAApD,CACA,KAAK,IAAIgB,EAAE,CAAX,CAAcA,CAAd,CAAgBkH,CAAAlI,OAAhB,CAA8BgB,CAAA,EAA9B,CACkB,GAAhB,EAAIkH,CAAA,CAAMlH,CAAN,CAAJ,GAEqB,IAAhB,EAAIkH,CAAA,CAAMlH,CAAN,CAAJ,CACH0Z,CAAAmD,IAAA,EADG,CAEI3V,CAAA,CAAMlH,CAAN,CAAAhB,OAFJ,EAGH0a,CAAA7Z,KAAA,CAAWqH,CAAA,CAAMlH,CAAN,CAAX,CALF,CAOF0/B,EAAA,CAAU/D,CAAV,CAAoBmD,CAApB,CAA6BplB,CAAAjZ,KAAA,CAAW,GAAX,CAbxB,CAbsC,CA+B7Cm/B,CAAAA,CAAenkB,CAAAoiB,UAAA,CAAoB6B,CAApB,CAEfA,EAAJ,GAAgB,CAAA5jB,CAAAnZ,KAAA,CAAS,QAAT,CAAhB,EAAsCi9B,CAAtC,EAAuD,CAAAtpB,CAAAW,mBAAA,EAAvD,IACEX,CAAAC,eAAA,EACA,CAAIqpB,CAAJ,EAAoBhM,CAAAnW,IAAA,EAApB,GAEEhC,CAAA4hB,QAAA,CAAkBuC,CAAlB,CAGA,CAFAlkB,CAAA3S,OAAA,EAEA,CAAAtK,CAAA0K,QAAA,CAAe,0BAAf,CAAA,CAA6C,CAAA,CAL/C,CAFF,CAtCA,CAnBA,CAJuC,CAAzC,CA2EIsS,EAAA0jB,OAAA,EAAJ,EAA0BE,CAA1B,EACEzL,CAAAnW,IAAA,CAAahC,CAAA0jB,OAAA,EAAb,CAAiC,CAAA,CAAjC,CAIFvL,EAAA7U,YAAA,CAAqB,QAAQ,CAAC8gB,CAAD,CAAS,CAChCpkB,CAAA0jB,OAAA,EAAJ,EAA0BU,CAA1B,GACEnkB,CAAAjY,WAAA,CAAsB,QAAQ,EAAG,CAC/B,IAAIw7B,EAASxjB,CAAA0jB,OAAA,EAEb1jB,EAAA4hB,QAAA,CAAkBwC,CAAlB,CACInkB,EAAAwjB,WAAA,CAAsB,sBAAtB;AAA8CW,CAA9C,CACsBZ,CADtB,CAAAloB,iBAAJ,EAEE0E,CAAA4hB,QAAA,CAAkB4B,CAAlB,CACA,CAAArL,CAAAnW,IAAA,CAAawhB,CAAb,CAHF,EAKED,CAAA,CAAoBC,CAApB,CAT6B,CAAjC,CAYA,CAAKvjB,CAAA0a,QAAL,EAAyB1a,CAAAokB,QAAA,EAb3B,CADoC,CAAtC,CAmBA,KAAIC,EAAgB,CACpBrkB,EAAAhY,OAAA,CAAkBs8B,QAAuB,EAAG,CAC1C,IAAIf,EAASrL,CAAAnW,IAAA,EAAb,CACIwiB,EAAiBxkB,CAAAykB,UAEhBH,EAAL,EAAsBd,CAAtB,EAAgCxjB,CAAA0jB,OAAA,EAAhC,GACEY,CAAA,EACA,CAAArkB,CAAAjY,WAAA,CAAsB,QAAQ,EAAG,CAC3BiY,CAAAwjB,WAAA,CAAsB,sBAAtB,CAA8CzjB,CAAA0jB,OAAA,EAA9C,CAAkEF,CAAlE,CAAAloB,iBAAJ,CAEE0E,CAAA4hB,QAAA,CAAkB4B,CAAlB,CAFF,EAIErL,CAAAnW,IAAA,CAAahC,CAAA0jB,OAAA,EAAb,CAAiCc,CAAjC,CACA,CAAAjB,CAAA,CAAoBC,CAApB,CALF,CAD+B,CAAjC,CAFF,CAYAxjB,EAAAykB,UAAA,CAAsB,CAAA,CAEtB,OAAOH,EAlBmC,CAA5C,CAqBA,OAAOtkB,EA5IyD,CADtD,CA/Dc,CA+P5B9L,QAASA,GAAY,EAAE,CAAA,IACjBwwB,EAAQ,CAAA,CADS,CAEjBt7B,EAAO,IASX,KAAAu7B,aAAA,CAAoBC,QAAQ,CAACC,CAAD,CAAO,CACjC,MAAIx+B,EAAA,CAAUw+B,CAAV,CAAJ,EACEH,CACK,CADGG,CACH,CAAA,IAFP,EAISH,CALwB,CASnC,KAAArnB,KAAA,CAAY,CAAC,SAAD,CAAY,QAAQ,CAAC0C,CAAD,CAAS,CAwDvC+kB,QAASA,EAAW,CAACz2B,CAAD,CAAM,CACpBA,CAAJ,WAAmB02B,MAAnB,GACM12B,CAAA4P,MAAJ,CACE5P,CADF,CACSA,CAAA2P,QACD;AADoD,EACpD,GADgB3P,CAAA4P,MAAA1W,QAAA,CAAkB8G,CAAA2P,QAAlB,CAChB,CAAA,SAAA,CAAY3P,CAAA2P,QAAZ,CAA0B,IAA1B,CAAiC3P,CAAA4P,MAAjC,CACA5P,CAAA4P,MAHR,CAIW5P,CAAA22B,UAJX,GAKE32B,CALF,CAKQA,CAAA2P,QALR,CAKsB,IALtB,CAK6B3P,CAAA22B,UAL7B,CAK6C,GAL7C,CAKmD32B,CAAA8oB,KALnD,CADF,CASA,OAAO9oB,EAViB,CAa1B42B,QAASA,EAAU,CAAC3sB,CAAD,CAAO,CAAA,IACpB4sB,EAAUnlB,CAAAmlB,QAAVA,EAA6B,EADT,CAEpBC,EAAQD,CAAA,CAAQ5sB,CAAR,CAAR6sB,EAAyBD,CAAAE,IAAzBD,EAAwCn/B,CACxCq/B,EAAAA,CAAW,CAAA,CAIf,IAAI,CACFA,CAAA,CAAW,CAAC,CAACF,CAAA17B,MADX,CAEF,MAAOmB,CAAP,CAAU,EAEZ,MAAIy6B,EAAJ,CACS,QAAQ,EAAG,CAChB,IAAI5mB,EAAO,EACX9a,EAAA,CAAQ8B,SAAR,CAAmB,QAAQ,CAAC4I,CAAD,CAAM,CAC/BoQ,CAAAra,KAAA,CAAU0gC,CAAA,CAAYz2B,CAAZ,CAAV,CAD+B,CAAjC,CAGA,OAAO82B,EAAA17B,MAAA,CAAYy7B,CAAZ,CAAqBzmB,CAArB,CALS,CADpB,CAYO,QAAQ,CAAC6mB,CAAD,CAAOC,CAAP,CAAa,CAC1BJ,CAAA,CAAMG,CAAN,CAAoB,IAAR,EAAAC,CAAA,CAAe,EAAf,CAAoBA,CAAhC,CAD0B,CAvBJ,CApE1B,MAAO,KAQAN,CAAA,CAAW,KAAX,CARA,MAiBCA,CAAA,CAAW,MAAX,CAjBD,MA0BCA,CAAA,CAAW,MAAX,CA1BD,OAmCEA,CAAA,CAAW,OAAX,CAnCF,OA4CG,QAAS,EAAG,CAClB,IAAI57B,EAAK47B,CAAA,CAAW,OAAX,CAET,OAAO,SAAQ,EAAG,CACZP,CAAJ,EACEr7B,CAAAI,MAAA,CAASL,CAAT,CAAe3D,SAAf,CAFc,CAHA,CAAZ,EA5CH,CADgC,CAA7B,CApBS,CAiJvB+/B,QAASA,GAAoB,CAACl5B,CAAD;AAAOm5B,CAAP,CAAuB,CAClD,GAAa,kBAAb,GAAIn5B,CAAJ,EAA4C,kBAA5C,GAAmCA,CAAnC,EACgB,kBADhB,GACOA,CADP,EAC+C,kBAD/C,GACsCA,CADtC,EAEgB,WAFhB,GAEOA,CAFP,CAGE,KAAMo5B,GAAA,CAAa,SAAb,CAEkBD,CAFlB,CAAN,CAIF,MAAOn5B,EAR2C,CAWpDq5B,QAASA,GAAgB,CAACtiC,CAAD,CAAMoiC,CAAN,CAAsB,CAE7C,GAAIpiC,CAAJ,CAAS,CACP,GAAIA,CAAAoL,YAAJ,GAAwBpL,CAAxB,CACE,KAAMqiC,GAAA,CAAa,QAAb,CAEFD,CAFE,CAAN,CAGK,GACHpiC,CAAAJ,SADG,EACaI,CAAAsD,SADb,EAC6BtD,CAAAuD,MAD7B,EAC0CvD,CAAAwD,YAD1C,CAEL,KAAM6+B,GAAA,CAAa,YAAb,CAEFD,CAFE,CAAN,CAGK,GACHpiC,CAAA2S,SADG,GACc3S,CAAA2D,SADd,EAC+B3D,CAAA4D,KAD/B,EAC2C5D,CAAA6D,KAD3C,EACuD7D,CAAA8D,KADvD,EAEL,KAAMu+B,GAAA,CAAa,SAAb,CAEFD,CAFE,CAAN,CAGK,GACHpiC,CADG,GACKuiC,MADL,CAEL,KAAMF,GAAA,CAAa,SAAb,CAEFD,CAFE,CAAN,CAjBK,CAsBT,MAAOpiC,EAxBsC,CAmyB/CwiC,QAASA,GAAM,CAACxiC,CAAD,CAAMuL,CAAN,CAAYk3B,CAAZ,CAAsBC,CAAtB,CAA+B5gB,CAA/B,CAAwC,CAErDA,CAAA,CAAUA,CAAV,EAAqB,EAEjB3a,EAAAA,CAAUoE,CAAArD,MAAA,CAAW,GAAX,CACd,KADA,IAA+BzH,CAA/B,CACSS,EAAI,CAAb,CAAiC,CAAjC,CAAgBiG,CAAAjH,OAAhB,CAAoCgB,CAAA,EAApC,CAAyC,CACvCT,CAAA,CAAM0hC,EAAA,CAAqBh7B,CAAAyL,MAAA,EAArB,CAAsC8vB,CAAtC,CACN;IAAIC,EAAc3iC,CAAA,CAAIS,CAAJ,CACbkiC,EAAL,GACEA,CACA,CADc,EACd,CAAA3iC,CAAA,CAAIS,CAAJ,CAAA,CAAWkiC,CAFb,CAIA3iC,EAAA,CAAM2iC,CACF3iC,EAAAs2B,KAAJ,EAAgBxU,CAAA8gB,eAAhB,GACEC,EAAA,CAAeH,CAAf,CASA,CARM,KAQN,EARe1iC,EAQf,EAPG,QAAQ,CAACu2B,CAAD,CAAU,CACjBA,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CADiB,CAAlB,CAECvG,CAFD,CAOH,CAHIA,CAAA8iC,IAGJ,GAHgBjjC,CAGhB,GAFEG,CAAA8iC,IAEF,CAFY,EAEZ,EAAA9iC,CAAA,CAAMA,CAAA8iC,IAVR,CARuC,CAqBzCriC,CAAA,CAAM0hC,EAAA,CAAqBh7B,CAAAyL,MAAA,EAArB,CAAsC8vB,CAAtC,CACNJ,GAAA,CAAiBtiC,CAAjB,CAAsB0iC,CAAtB,CACAJ,GAAA,CAAiBtiC,CAAA,CAAIS,CAAJ,CAAjB,CAA2BiiC,CAA3B,CAEA,OADA1iC,EAAA,CAAIS,CAAJ,CACA,CADWgiC,CA7B0C,CAwCvDM,QAASA,GAAe,CAACC,CAAD,CAAOC,CAAP,CAAaC,CAAb,CAAmBC,CAAnB,CAAyBC,CAAzB,CAA+BV,CAA/B,CAAwC5gB,CAAxC,CAAiD,CACvEqgB,EAAA,CAAqBa,CAArB,CAA2BN,CAA3B,CACAP,GAAA,CAAqBc,CAArB,CAA2BP,CAA3B,CACAP,GAAA,CAAqBe,CAArB,CAA2BR,CAA3B,CACAP,GAAA,CAAqBgB,CAArB,CAA2BT,CAA3B,CACAP,GAAA,CAAqBiB,CAArB,CAA2BV,CAA3B,CAEA,OAAQ5gB,EAAA8gB,eACD,CAwBDS,QAAoC,CAACv5B,CAAD,CAAQqR,CAAR,CAAgB,CAAA,IAC9CmoB,EAAWnoB,CAAD,EAAWA,CAAAxa,eAAA,CAAsBqiC,CAAtB,CAAX,CAA0C7nB,CAA1C,CAAmDrR,CADf,CAE9CysB,CAEJ,IAAe,IAAf,EAAI+M,CAAJ,CAAqB,MAAOA,EAG5B,EADAA,CACA,CADUA,CAAA,CAAQN,CAAR,CACV,GAAeM,CAAAhN,KAAf,GACEuM,EAAA,CAAeH,CAAf,CAMA,CALM,KAKN,EALeY,EAKf,GAJE/M,CAEA,CAFU+M,CAEV,CADA/M,CAAAuM,IACA,CADcjjC,CACd,CAAA02B,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CAEF,EAAA+8B,CAAA,CAAUA,CAAAR,IAPZ,CAUA,IAAI,CAACG,CAAL,CAAW,MAAOK,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAE5B,EADAyjC,CACA,CADUA,CAAA,CAAQL,CAAR,CACV,GAAeK,CAAAhN,KAAf;CACEuM,EAAA,CAAeH,CAAf,CAMA,CALM,KAKN,EALeY,EAKf,GAJE/M,CAEA,CAFU+M,CAEV,CADA/M,CAAAuM,IACA,CADcjjC,CACd,CAAA02B,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CAEF,EAAA+8B,CAAA,CAAUA,CAAAR,IAPZ,CAUA,IAAI,CAACI,CAAL,CAAW,MAAOI,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAE5B,EADAyjC,CACA,CADUA,CAAA,CAAQJ,CAAR,CACV,GAAeI,CAAAhN,KAAf,GACEuM,EAAA,CAAeH,CAAf,CAMA,CALM,KAKN,EALeY,EAKf,GAJE/M,CAEA,CAFU+M,CAEV,CADA/M,CAAAuM,IACA,CADcjjC,CACd,CAAA02B,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CAEF,EAAA+8B,CAAA,CAAUA,CAAAR,IAPZ,CAUA,IAAI,CAACK,CAAL,CAAW,MAAOG,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAE5B,EADAyjC,CACA,CADUA,CAAA,CAAQH,CAAR,CACV,GAAeG,CAAAhN,KAAf,GACEuM,EAAA,CAAeH,CAAf,CAMA,CALM,KAKN,EALeY,EAKf,GAJE/M,CAEA,CAFU+M,CAEV,CADA/M,CAAAuM,IACA,CADcjjC,CACd,CAAA02B,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CAEF,EAAA+8B,CAAA,CAAUA,CAAAR,IAPZ,CAUA,IAAI,CAACM,CAAL,CAAW,MAAOE,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAE5B,EADAyjC,CACA,CADUA,CAAA,CAAQF,CAAR,CACV,GAAeE,CAAAhN,KAAf,GACEuM,EAAA,CAAeH,CAAf,CAMA,CALM,KAKN,EALeY,EAKf,GAJE/M,CAEA,CAFU+M,CAEV,CADA/M,CAAAuM,IACA,CADcjjC,CACd,CAAA02B,CAAAD,KAAA,CAAa,QAAQ,CAAC/vB,CAAD,CAAM,CAAEgwB,CAAAuM,IAAA,CAAcv8B,CAAhB,CAA3B,CAEF,EAAA+8B,CAAA,CAAUA,CAAAR,IAPZ,CASA,OAAOQ,EApE2C,CAxBnD,CAADC,QAAsB,CAACz5B,CAAD,CAAQqR,CAAR,CAAgB,CACpC,IAAImoB,EAAWnoB,CAAD,EAAWA,CAAAxa,eAAA,CAAsBqiC,CAAtB,CAAX,CAA0C7nB,CAA1C,CAAmDrR,CAEjE,IAAe,IAAf;AAAIw5B,CAAJ,CAAqB,MAAOA,EAC5BA,EAAA,CAAUA,CAAA,CAAQN,CAAR,CAEV,IAAI,CAACC,CAAL,CAAW,MAAOK,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAC5ByjC,EAAA,CAAUA,CAAA,CAAQL,CAAR,CAEV,IAAI,CAACC,CAAL,CAAW,MAAOI,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAC5ByjC,EAAA,CAAUA,CAAA,CAAQJ,CAAR,CAEV,IAAI,CAACC,CAAL,CAAW,MAAOG,EAClB,IAAe,IAAf,EAAIA,CAAJ,CAAqB,MAAOzjC,EAC5ByjC,EAAA,CAAUA,CAAA,CAAQH,CAAR,CAEV,OAAKC,EAAL,CACe,IAAf,EAAIE,CAAJ,CAA4BzjC,CAA5B,CACAyjC,CADA,CACUA,CAAA,CAAQF,CAAR,CAFV,CAAkBE,CAlBkB,CAR2B,CAwGzEE,QAASA,GAAQ,CAACj4B,CAAD,CAAOuW,CAAP,CAAgB4gB,CAAhB,CAAyB,CAIxC,GAAIe,EAAA9iC,eAAA,CAA6B4K,CAA7B,CAAJ,CACE,MAAOk4B,GAAA,CAAcl4B,CAAd,CAL+B,KAQpCm4B,EAAWn4B,CAAArD,MAAA,CAAW,GAAX,CARyB,CASpCy7B,EAAiBD,CAAAxjC,OATmB,CAUpC8F,CAGJ,IAAI8b,CAAA3U,IAAJ,CAEInH,CAAA,CADmB,CAArB,CAAI29B,CAAJ,CACOZ,EAAA,CAAgBW,CAAA,CAAS,CAAT,CAAhB,CAA6BA,CAAA,CAAS,CAAT,CAA7B,CAA0CA,CAAA,CAAS,CAAT,CAA1C,CAAuDA,CAAA,CAAS,CAAT,CAAvD,CAAoEA,CAAA,CAAS,CAAT,CAApE,CAAiFhB,CAAjF,CACe5gB,CADf,CADP,CAIO9b,QAAQ,CAAC8D,CAAD,CAAQqR,CAAR,CAAgB,CAAA,IACvBja,EAAI,CADmB,CAChBqF,CACX,GACEA,EAIA,CAJMw8B,EAAA,CAAgBW,CAAA,CAASxiC,CAAA,EAAT,CAAhB,CAA+BwiC,CAAA,CAASxiC,CAAA,EAAT,CAA/B,CAA8CwiC,CAAA,CAASxiC,CAAA,EAAT,CAA9C,CAA6DwiC,CAAA,CAASxiC,CAAA,EAAT,CAA7D,CACgBwiC,CAAA,CAASxiC,CAAA,EAAT,CADhB,CAC+BwhC,CAD/B,CACwC5gB,CADxC,CAAA,CACiDhY,CADjD,CACwDqR,CADxD,CAIN,CADAA,CACA,CADStb,CACT,CAAAiK,CAAA,CAAQvD,CALV,OAMSrF,CANT,CAMayiC,CANb,CAOA,OAAOp9B,EAToB,CALjC,KAiBO,CACL,IAAIspB,EAAO,UACXvvB,EAAA,CAAQojC,CAAR,CAAkB,QAAQ,CAACjjC,CAAD,CAAMc,CAAN,CAAa,CACrC4gC,EAAA,CAAqB1hC,CAArB,CAA0BiiC,CAA1B,CACA7S,EAAA,EAAQ,qCAAR;CACetuB,CAEA,CAAG,GAAH,CAEG,yBAFH,CAE+Bd,CAF/B,CAEqC,UALpD,EAKkE,IALlE,CAKyEA,CALzE,CAKsF,OALtF,EAMSqhB,CAAA8gB,eACA,CAAG,2BAAH,CACaF,CAAA96B,QAAA,CAAgB,YAAhB,CAA8B,MAA9B,CADb,CAQC,4GARD,CASG,EAhBZ,CAFqC,CAAvC,CAoBA,KAAAioB,EAAAA,CAAAA,CAAQ,WAAR,CAGI+T,EAAiB,IAAIC,QAAJ,CAAa,GAAb,CAAkB,GAAlB,CAAuB,IAAvB,CAA6BhU,CAA7B,CAErB+T,EAAAxgC,SAAA,CAA0BN,CAAA,CAAQ+sB,CAAR,CAC1B7pB,EAAA,CAAK8b,CAAA8gB,eAAA,CAAyB,QAAQ,CAAC94B,CAAD,CAAQqR,CAAR,CAAgB,CACpD,MAAOyoB,EAAA,CAAe95B,CAAf,CAAsBqR,CAAtB,CAA8B0nB,EAA9B,CAD6C,CAAjD,CAEDe,CA9BC,CAmCM,gBAAb,GAAIr4B,CAAJ,GACEk4B,EAAA,CAAcl4B,CAAd,CADF,CACwBvF,CADxB,CAGA,OAAOA,EApEiC,CA2H1C8K,QAASA,GAAc,EAAG,CACxB,IAAIgK,EAAQ,EAAZ,CAEIgpB,EAAgB,KACb,CAAA,CADa,gBAEF,CAAA,CAFE,oBAGE,CAAA,CAHF,CAmDpB,KAAAlB,eAAA;AAAsBmB,QAAQ,CAAC1iC,CAAD,CAAQ,CACpC,MAAI2B,EAAA,CAAU3B,CAAV,CAAJ,EACEyiC,CAAAlB,eACO,CADwB,CAAC,CAACvhC,CAC1B,CAAA,IAFT,EAISyiC,CAAAlB,eAL2B,CA2BvC,KAAAoB,mBAAA,CAA0BC,QAAQ,CAAC5iC,CAAD,CAAQ,CACvC,MAAI2B,EAAA,CAAU3B,CAAV,CAAJ,EACEyiC,CAAAE,mBACO,CAD4B3iC,CAC5B,CAAA,IAFT,EAISyiC,CAAAE,mBAL8B,CAUzC,KAAAhqB,KAAA,CAAY,CAAC,SAAD,CAAY,UAAZ,CAAwB,MAAxB,CAAgC,QAAQ,CAACkqB,CAAD,CAAUvmB,CAAV,CAAoBD,CAApB,CAA0B,CAC5EomB,CAAA32B,IAAA,CAAoBwQ,CAAAxQ,IAEpB01B,GAAA,CAAiBA,QAAyB,CAACH,CAAD,CAAU,CAC7CoB,CAAAE,mBAAL,EAAyC,CAAAG,EAAAxjC,eAAA,CAAmC+hC,CAAnC,CAAzC,GACAyB,EAAA,CAAoBzB,CAApB,CACA,CAD+B,CAAA,CAC/B,CAAAhlB,CAAAqD,KAAA,CAAU,4CAAV,CAAyD2hB,CAAzD,CACI,2EADJ,CAFA,CADkD,CAOpD,OAAO,SAAQ,CAACxH,CAAD,CAAM,CACnB,IAAIkJ,CAEJ,QAAQ,MAAOlJ,EAAf,EACE,KAAK,QAAL,CAEE,GAAIpgB,CAAAna,eAAA,CAAqBu6B,CAArB,CAAJ,CACE,MAAOpgB,EAAA,CAAMogB,CAAN,CAGLmJ;CAAAA,CAAQ,IAAIC,EAAJ,CAAUR,CAAV,CAEZM,EAAA,CAAmBt9B,CADNy9B,IAAIC,EAAJD,CAAWF,CAAXE,CAAkBL,CAAlBK,CAA2BT,CAA3BS,CACMz9B,OAAA,CAAao0B,CAAb,CAEP,iBAAZ,GAAIA,CAAJ,GAGEpgB,CAAA,CAAMogB,CAAN,CAHF,CAGekJ,CAHf,CAMA,OAAOA,EAET,MAAK,UAAL,CACE,MAAOlJ,EAET,SACE,MAAOv4B,EAvBX,CAHmB,CAVuD,CAAlE,CA3FY,CAyS1BqO,QAASA,GAAU,EAAG,CAEpB,IAAAgJ,KAAA,CAAY,CAAC,YAAD,CAAe,mBAAf,CAAoC,QAAQ,CAAC4C,CAAD,CAAaoH,CAAb,CAAgC,CACtF,MAAOygB,GAAA,CAAS,QAAQ,CAACjlB,CAAD,CAAW,CACjC5C,CAAAjY,WAAA,CAAsB6a,CAAtB,CADiC,CAA5B,CAEJwE,CAFI,CAD+E,CAA5E,CAFQ,CAkBtBygB,QAASA,GAAQ,CAACC,CAAD,CAAWC,CAAX,CAA6B,CAyR5CC,QAASA,EAAe,CAACvjC,CAAD,CAAQ,CAC9B,MAAOA,EADuB,CAKhCwjC,QAASA,EAAc,CAAC55B,CAAD,CAAS,CAC9B,MAAOmqB,EAAA,CAAOnqB,CAAP,CADuB,CAlRhC,IAAIkW,EAAQA,QAAQ,EAAG,CAAA,IACjB2jB,EAAU,EADO,CAEjBzjC,CAFiB,CAEVk2B,CA+HX,OA7HAA,EA6HA,CA7HW,SAEAC,QAAQ,CAACjxB,CAAD,CAAM,CACrB,GAAIu+B,CAAJ,CAAa,CACX,IAAI/L,EAAY+L,CAChBA,EAAA,CAAUjlC,CACVwB,EAAA,CAAQ0jC,CAAA,CAAIx+B,CAAJ,CAEJwyB,EAAA74B,OAAJ,EACEwkC,CAAA,CAAS,QAAQ,EAAG,CAElB,IADA,IAAIllB,CAAJ,CACSte,EAAI,CADb,CACgB6V,EAAKgiB,CAAA74B,OAArB,CAAuCgB,CAAvC,CAA2C6V,CAA3C,CAA+C7V,CAAA,EAA/C,CACEse,CACA,CADWuZ,CAAA,CAAU73B,CAAV,CACX,CAAAG,CAAAi1B,KAAA,CAAW9W,CAAA,CAAS,CAAT,CAAX,CAAwBA,CAAA,CAAS,CAAT,CAAxB,CAAqCA,CAAA,CAAS,CAAT,CAArC,CAJgB,CAApB,CANS,CADQ,CAFd,QAqBD4V,QAAQ,CAACnqB,CAAD,CAAS,CACvBssB,CAAAC,QAAA,CAAiBwN,CAAA,CAA8B/5B,CAA9B,CAAjB,CADuB,CArBhB;OA0BDixB,QAAQ,CAAC+I,CAAD,CAAW,CACzB,GAAIH,CAAJ,CAAa,CACX,IAAI/L,EAAY+L,CAEZA,EAAA5kC,OAAJ,EACEwkC,CAAA,CAAS,QAAQ,EAAG,CAElB,IADA,IAAIllB,CAAJ,CACSte,EAAI,CADb,CACgB6V,EAAKgiB,CAAA74B,OAArB,CAAuCgB,CAAvC,CAA2C6V,CAA3C,CAA+C7V,CAAA,EAA/C,CACEse,CACA,CADWuZ,CAAA,CAAU73B,CAAV,CACX,CAAAse,CAAA,CAAS,CAAT,CAAA,CAAYylB,CAAZ,CAJgB,CAApB,CAJS,CADY,CA1BlB,SA2CA,MACD3O,QAAQ,CAAC9W,CAAD,CAAW0lB,CAAX,CAAoBC,CAApB,CAAkC,CAC9C,IAAIrgC,EAASqc,CAAA,EAAb,CAEIikB,EAAkBA,QAAQ,CAAC/jC,CAAD,CAAQ,CACpC,GAAI,CACFyD,CAAA0yB,QAAA,CAAgB,CAAA92B,CAAA,CAAW8e,CAAX,CAAA,CAAuBA,CAAvB,CAAkColB,CAAlC,EAAmDvjC,CAAnD,CAAhB,CADE,CAEF,MAAMkG,CAAN,CAAS,CACTzC,CAAAswB,OAAA,CAAc7tB,CAAd,CACA,CAAAo9B,CAAA,CAAiBp9B,CAAjB,CAFS,CAHyB,CAFtC,CAWI89B,EAAiBA,QAAQ,CAACp6B,CAAD,CAAS,CACpC,GAAI,CACFnG,CAAA0yB,QAAA,CAAgB,CAAA92B,CAAA,CAAWwkC,CAAX,CAAA,CAAsBA,CAAtB,CAAgCL,CAAhC,EAAgD55B,CAAhD,CAAhB,CADE,CAEF,MAAM1D,CAAN,CAAS,CACTzC,CAAAswB,OAAA,CAAc7tB,CAAd,CACA,CAAAo9B,CAAA,CAAiBp9B,CAAjB,CAFS,CAHyB,CAXtC,CAoBI+9B,EAAsBA,QAAQ,CAACL,CAAD,CAAW,CAC3C,GAAI,CACFngC,CAAAo3B,OAAA,CAAe,CAAAx7B,CAAA,CAAWykC,CAAX,CAAA,CAA2BA,CAA3B,CAA0CP,CAA1C,EAA2DK,CAA3D,CAAf,CADE,CAEF,MAAM19B,CAAN,CAAS,CACTo9B,CAAA,CAAiBp9B,CAAjB,CADS,CAHgC,CAQzCu9B,EAAJ,CACEA,CAAA/jC,KAAA,CAAa,CAACqkC,CAAD,CAAkBC,CAAlB,CAAkCC,CAAlC,CAAb,CADF,CAGEjkC,CAAAi1B,KAAA,CAAW8O,CAAX,CAA4BC,CAA5B,CAA4CC,CAA5C,CAGF,OAAOxgC,EAAAyxB,QAnCuC,CADzC,CAuCP,OAvCO,CAuCEgP,QAAQ,CAAC/lB,CAAD,CAAW,CAC1B,MAAO,KAAA8W,KAAA,CAAU,IAAV,CAAgB9W,CAAhB,CADmB,CAvCrB,CA2CP,SA3CO,CA2CIgmB,QAAQ,CAAChmB,CAAD,CAAW,CAE5BimB,QAASA,EAAW,CAACpkC,CAAD,CAAQqkC,CAAR,CAAkB,CACpC,IAAI5gC,EAASqc,CAAA,EACTukB,EAAJ,CACE5gC,CAAA0yB,QAAA,CAAen2B,CAAf,CADF;AAGEyD,CAAAswB,OAAA,CAAc/zB,CAAd,CAEF,OAAOyD,EAAAyxB,QAP6B,CAUtCoP,QAASA,EAAc,CAACtkC,CAAD,CAAQukC,CAAR,CAAoB,CACzC,IAAIC,EAAiB,IACrB,IAAI,CACFA,CAAA,CAAkB,CAAArmB,CAAA,EAAWolB,CAAX,GADhB,CAEF,MAAMr9B,CAAN,CAAS,CACT,MAAOk+B,EAAA,CAAYl+B,CAAZ,CAAe,CAAA,CAAf,CADE,CAGX,MAAkBs+B,EAAlB,EAvsVInlC,CAAA,CAusVcmlC,CAvsVHvP,KAAX,CAusVJ,CACSuP,CAAAvP,KAAA,CAAoB,QAAQ,EAAG,CACpC,MAAOmP,EAAA,CAAYpkC,CAAZ,CAAmBukC,CAAnB,CAD6B,CAA/B,CAEJ,QAAQ,CAAC5nB,CAAD,CAAQ,CACjB,MAAOynB,EAAA,CAAYznB,CAAZ,CAAmB,CAAA,CAAnB,CADU,CAFZ,CADT,CAOSynB,CAAA,CAAYpkC,CAAZ,CAAmBukC,CAAnB,CAdgC,CAkB3C,MAAO,KAAAtP,KAAA,CAAU,QAAQ,CAACj1B,CAAD,CAAQ,CAC/B,MAAOskC,EAAA,CAAetkC,CAAf,CAAsB,CAAA,CAAtB,CADwB,CAA1B,CAEJ,QAAQ,CAAC2c,CAAD,CAAQ,CACjB,MAAO2nB,EAAA,CAAe3nB,CAAf,CAAsB,CAAA,CAAtB,CADU,CAFZ,CA9BqB,CA3CvB,CA3CA,CAJU,CAAvB,CAqII+mB,EAAMA,QAAQ,CAAC1jC,CAAD,CAAQ,CACxB,MAAkBA,EAAlB,EAhuVYX,CAAA,CAguVMW,CAhuVKi1B,KAAX,CAguVZ,CAAiCj1B,CAAjC,CACO,MACCi1B,QAAQ,CAAC9W,CAAD,CAAW,CACvB,IAAI1a,EAASqc,CAAA,EACbujB,EAAA,CAAS,QAAQ,EAAG,CAClB5/B,CAAA0yB,QAAA,CAAehY,CAAA,CAASne,CAAT,CAAf,CADkB,CAApB,CAGA,OAAOyD,EAAAyxB,QALgB,CADpB,CAFiB,CArI1B,CAuLInB,EAASA,QAAQ,CAACnqB,CAAD,CAAS,CAC5B,IAAInG,EAASqc,CAAA,EACbrc,EAAAswB,OAAA,CAAcnqB,CAAd,CACA,OAAOnG,EAAAyxB,QAHqB,CAvL9B,CA6LIyO,EAAgCA,QAAQ,CAAC/5B,CAAD,CAAS,CACnD,MAAO,MACCqrB,QAAQ,CAAC9W,CAAD,CAAW0lB,CAAX,CAAoB,CAChC,IAAIpgC,EAASqc,CAAA,EACbujB,EAAA,CAAS,QAAQ,EAAG,CAClB,GAAI,CACF5/B,CAAA0yB,QAAA,CAAgB,CAAA92B,CAAA,CAAWwkC,CAAX,CAAA;AAAsBA,CAAtB,CAAgCL,CAAhC,EAAgD55B,CAAhD,CAAhB,CADE,CAEF,MAAM1D,CAAN,CAAS,CACTzC,CAAAswB,OAAA,CAAc7tB,CAAd,CACA,CAAAo9B,CAAA,CAAiBp9B,CAAjB,CAFS,CAHO,CAApB,CAQA,OAAOzC,EAAAyxB,QAVyB,CAD7B,CAD4C,CAiIrD,OAAO,OACEpV,CADF,QAEGiU,CAFH,MAlGIoB,QAAQ,CAACn1B,CAAD,CAAQme,CAAR,CAAkB0lB,CAAlB,CAA2BC,CAA3B,CAAyC,CAAA,IACtDrgC,EAASqc,CAAA,EAD6C,CAEtD+V,CAFsD,CAItDkO,EAAkBA,QAAQ,CAAC/jC,CAAD,CAAQ,CACpC,GAAI,CACF,MAAQ,CAAAX,CAAA,CAAW8e,CAAX,CAAA,CAAuBA,CAAvB,CAAkColB,CAAlC,EAAmDvjC,CAAnD,CADN,CAEF,MAAOkG,CAAP,CAAU,CAEV,MADAo9B,EAAA,CAAiBp9B,CAAjB,CACO,CAAA6tB,CAAA,CAAO7tB,CAAP,CAFG,CAHwB,CAJoB,CAatD89B,EAAiBA,QAAQ,CAACp6B,CAAD,CAAS,CACpC,GAAI,CACF,MAAQ,CAAAvK,CAAA,CAAWwkC,CAAX,CAAA,CAAsBA,CAAtB,CAAgCL,CAAhC,EAAgD55B,CAAhD,CADN,CAEF,MAAO1D,CAAP,CAAU,CAEV,MADAo9B,EAAA,CAAiBp9B,CAAjB,CACO,CAAA6tB,CAAA,CAAO7tB,CAAP,CAFG,CAHwB,CAboB,CAsBtD+9B,EAAsBA,QAAQ,CAACL,CAAD,CAAW,CAC3C,GAAI,CACF,MAAQ,CAAAvkC,CAAA,CAAWykC,CAAX,CAAA,CAA2BA,CAA3B,CAA0CP,CAA1C,EAA2DK,CAA3D,CADN,CAEF,MAAO19B,CAAP,CAAU,CACVo9B,CAAA,CAAiBp9B,CAAjB,CADU,CAH+B,CAQ7Cm9B,EAAA,CAAS,QAAQ,EAAG,CAClBK,CAAA,CAAI1jC,CAAJ,CAAAi1B,KAAA,CAAgB,QAAQ,CAACj1B,CAAD,CAAQ,CAC1B61B,CAAJ,GACAA,CACA,CADO,CAAA,CACP,CAAApyB,CAAA0yB,QAAA,CAAeuN,CAAA,CAAI1jC,CAAJ,CAAAi1B,KAAA,CAAgB8O,CAAhB,CAAiCC,CAAjC,CAAiDC,CAAjD,CAAf,CAFA,CAD8B,CAAhC,CAIG,QAAQ,CAACr6B,CAAD,CAAS,CACdisB,CAAJ,GACAA,CACA,CADO,CAAA,CACP,CAAApyB,CAAA0yB,QAAA,CAAe6N,CAAA,CAAep6B,CAAf,CAAf,CAFA,CADkB,CAJpB,CAQG,QAAQ,CAACg6B,CAAD,CAAW,CAChB/N,CAAJ,EACApyB,CAAAo3B,OAAA,CAAcoJ,CAAA,CAAoBL,CAApB,CAAd,CAFoB,CARtB,CADkB,CAApB,CAeA,OAAOngC,EAAAyxB,QA7CmD,CAkGrD,KAxBPhd,QAAY,CAACusB,CAAD,CAAW,CAAA,IACjBvO,EAAWpW,CAAA,EADM,CAEjBgZ,EAAU,CAFO,CAGjBn2B,EAAU3D,CAAA,CAAQylC,CAAR,CAAA;AAAoB,EAApB,CAAyB,EAEvCxlC,EAAA,CAAQwlC,CAAR,CAAkB,QAAQ,CAACvP,CAAD,CAAU91B,CAAV,CAAe,CACvC05B,CAAA,EACA4K,EAAA,CAAIxO,CAAJ,CAAAD,KAAA,CAAkB,QAAQ,CAACj1B,CAAD,CAAQ,CAC5B2C,CAAArD,eAAA,CAAuBF,CAAvB,CAAJ,GACAuD,CAAA,CAAQvD,CAAR,CACA,CADeY,CACf,CAAM,EAAE84B,CAAR,EAAkB5C,CAAAC,QAAA,CAAiBxzB,CAAjB,CAFlB,CADgC,CAAlC,CAIG,QAAQ,CAACiH,CAAD,CAAS,CACdjH,CAAArD,eAAA,CAAuBF,CAAvB,CAAJ,EACA82B,CAAAnC,OAAA,CAAgBnqB,CAAhB,CAFkB,CAJpB,CAFuC,CAAzC,CAYgB,EAAhB,GAAIkvB,CAAJ,EACE5C,CAAAC,QAAA,CAAiBxzB,CAAjB,CAGF,OAAOuzB,EAAAhB,QArBc,CAwBhB,CA1UqC,CAkV9ChlB,QAASA,GAAa,EAAE,CACtB,IAAAyI,KAAA,CAAY,CAAC,SAAD,CAAY,UAAZ,CAAwB,QAAQ,CAAC0C,CAAD,CAAUa,CAAV,CAAoB,CAC9D,IAAIwoB,EAAwBrpB,CAAAqpB,sBAAxBA,EACwBrpB,CAAAspB,4BADxBD,EAEwBrpB,CAAAupB,yBAF5B,CAIIC,EAAuBxpB,CAAAwpB,qBAAvBA,EACuBxpB,CAAAypB,2BADvBD,EAEuBxpB,CAAA0pB,wBAFvBF,EAGuBxpB,CAAA2pB,kCAP3B,CASIC,EAAe,CAAC,CAACP,CATrB,CAUIQ,EAAMD,CACA,CAAN,QAAQ,CAACtgC,CAAD,CAAK,CACX,IAAIwgC,EAAKT,CAAA,CAAsB//B,CAAtB,CACT,OAAO,SAAQ,EAAG,CAChBkgC,CAAA,CAAqBM,CAArB,CADgB,CAFP,CAAP;AAMN,QAAQ,CAACxgC,CAAD,CAAK,CACX,IAAIygC,EAAQlpB,CAAA,CAASvX,CAAT,CAAa,KAAb,CAAoB,CAAA,CAApB,CACZ,OAAO,SAAQ,EAAG,CAChBuX,CAAAgE,OAAA,CAAgBklB,CAAhB,CADgB,CAFP,CAOjBF,EAAA/oB,UAAA,CAAgB8oB,CAEhB,OAAOC,EA3BuD,CAApD,CADU,CAmGxBx1B,QAASA,GAAkB,EAAE,CAC3B,IAAI21B,EAAM,EAAV,CACIC,EAAmB7mC,CAAA,CAAO,YAAP,CADvB,CAEI8mC,EAAiB,IAErB,KAAAC,UAAA,CAAiBC,QAAQ,CAACzlC,CAAD,CAAQ,CAC3Be,SAAAlC,OAAJ,GACEwmC,CADF,CACQrlC,CADR,CAGA,OAAOqlC,EAJwB,CAOjC,KAAA1sB,KAAA,CAAY,CAAC,WAAD,CAAc,mBAAd,CAAmC,QAAnC,CAA6C,UAA7C,CACR,QAAQ,CAAE4B,CAAF,CAAeoI,CAAf,CAAoCc,CAApC,CAA8CgQ,CAA9C,CAAwD,CA0ClEiS,QAASA,EAAK,EAAG,CACf,IAAAC,IAAA,CAAW1lC,EAAA,EACX,KAAAg2B,QAAA,CAAe,IAAA2P,QAAf,CAA8B,IAAAC,WAA9B,CACe,IAAAC,cADf,CACoC,IAAAC,cADpC,CAEe,IAAAC,YAFf,CAEkC,IAAAC,YAFlC,CAEqD,IACrD,KAAA,CAAK,MAAL,CAAA,CAAe,IAAAC,MAAf,CAA6B,IAC7B,KAAAC,YAAA,CAAmB,CAAA,CACnB,KAAAC,aAAA,CAAoB,EACpB,KAAAC,kBAAA;AAAyB,EACzB,KAAAC,YAAA,CAAmB,EACnB,KAAAC,gBAAA,CAAuB,EACvB,KAAA/b,kBAAA,CAAyB,EAXV,CA+9BjBgc,QAASA,EAAU,CAACC,CAAD,CAAQ,CACzB,GAAIlrB,CAAA0a,QAAJ,CACE,KAAMqP,EAAA,CAAiB,QAAjB,CAAsD/pB,CAAA0a,QAAtD,CAAN,CAGF1a,CAAA0a,QAAA,CAAqBwQ,CALI,CAY3BC,QAASA,EAAW,CAAC7M,CAAD,CAAMjyB,CAAN,CAAY,CAC9B,IAAIjD,EAAK8e,CAAA,CAAOoW,CAAP,CACThwB,GAAA,CAAYlF,CAAZ,CAAgBiD,CAAhB,CACA,OAAOjD,EAHuB,CAMhCgiC,QAASA,EAAsB,CAACC,CAAD,CAAUtM,CAAV,CAAiB1yB,CAAjB,CAAuB,CACpD,EACEg/B,EAAAL,gBAAA,CAAwB3+B,CAAxB,CAEA,EAFiC0yB,CAEjC,CAAsC,CAAtC,GAAIsM,CAAAL,gBAAA,CAAwB3+B,CAAxB,CAAJ,EACE,OAAOg/B,CAAAL,gBAAA,CAAwB3+B,CAAxB,CAJX,OAMUg/B,CANV,CAMoBA,CAAAhB,QANpB,CADoD,CActDiB,QAASA,EAAY,EAAG,EAz+BxBnB,CAAAvrB,UAAA,CAAkB,aACHurB,CADG,MA0BVhgB,QAAQ,CAACohB,CAAD,CAAU,CAIlBA,CAAJ,EACEC,CAIA,CAJQ,IAAIrB,CAIZ,CAHAqB,CAAAb,MAGA,CAHc,IAAAA,MAGd,CADAa,CAAAX,aACA,CADqB,IAAAA,aACrB,CAAAW,CAAAV,kBAAA,CAA0B,IAAAA,kBAL5B,GASO,IAAAW,kBAWL,GAVE,IAAAA,kBAQA;AARyBC,QAAQ,EAAG,CAClC,IAAApB,WAAA,CAAkB,IAAAC,cAAlB,CACI,IAAAE,YADJ,CACuB,IAAAC,YADvB,CAC0C,IAC1C,KAAAK,YAAA,CAAmB,EACnB,KAAAC,gBAAA,CAAuB,EACvB,KAAAZ,IAAA,CAAW1lC,EAAA,EACX,KAAA+mC,kBAAA,CAAyB,IANS,CAQpC,CAAA,IAAAA,kBAAA7sB,UAAA,CAAmC,IAErC,EAAA4sB,CAAA,CAAQ,IAAI,IAAAC,kBApBd,CAsBAD,EAAA,CAAM,MAAN,CAAA,CAAgBA,CAChBA,EAAAnB,QAAA,CAAgB,IAChBmB,EAAAhB,cAAA,CAAsB,IAAAE,YAClB,KAAAD,YAAJ,CAEE,IAAAC,YAFF,CACE,IAAAA,YAAAH,cADF,CACmCiB,CADnC,CAIE,IAAAf,YAJF,CAIqB,IAAAC,YAJrB,CAIwCc,CAExC,OAAOA,EAnCe,CA1BR,QAsLRxjC,QAAQ,CAAC2jC,CAAD,CAAW1pB,CAAX,CAAqB2pB,CAArB,CAAqC,CAAA,IAE/CjuB,EAAMwtB,CAAA,CAAYQ,CAAZ,CAAsB,OAAtB,CAFyC,CAG/CpkC,EAFQ2F,IAEAo9B,WAHuC,CAI/CuB,EAAU,IACJ5pB,CADI,MAEFqpB,CAFE,KAGH3tB,CAHG,KAIHguB,CAJG;GAKJ,CAAC,CAACC,CALE,CAQd5B,EAAA,CAAiB,IAGjB,IAAI,CAAClmC,CAAA,CAAWme,CAAX,CAAL,CAA2B,CACzB,IAAI6pB,EAAWX,CAAA,CAAYlpB,CAAZ,EAAwBlc,CAAxB,CAA8B,UAA9B,CACf8lC,EAAAziC,GAAA,CAAa2iC,QAAQ,CAACC,CAAD,CAASC,CAAT,CAAiB/+B,CAAjB,CAAwB,CAAC4+B,CAAA,CAAS5+B,CAAT,CAAD,CAFpB,CAK3B,GAAuB,QAAvB,EAAI,MAAOy+B,EAAX,EAAmChuB,CAAAsB,SAAnC,CAAiD,CAC/C,IAAIitB,EAAaL,CAAAziC,GACjByiC,EAAAziC,GAAA,CAAa2iC,QAAQ,CAACC,CAAD,CAASC,CAAT,CAAiB/+B,CAAjB,CAAwB,CAC3Cg/B,CAAAloC,KAAA,CAAgB,IAAhB,CAAsBgoC,CAAtB,CAA8BC,CAA9B,CAAsC/+B,CAAtC,CACA1F,GAAA,CAAYD,CAAZ,CAAmBskC,CAAnB,CAF2C,CAFE,CAQ5CtkC,CAAL,GACEA,CADF,CA3BY2F,IA4BFo9B,WADV,CAC6B,EAD7B,CAKA/iC,EAAArC,QAAA,CAAc2mC,CAAd,CAEA,OAAOM,SAAwB,EAAG,CAChC3kC,EAAA,CAAYD,CAAZ,CAAmBskC,CAAnB,CACA7B,EAAA,CAAiB,IAFe,CAnCiB,CAtLrC,kBAuREoC,QAAQ,CAAChpC,CAAD,CAAM6e,CAAN,CAAgB,CACxC,IAAI9Y,EAAO,IAAX,CAEIorB,CAFJ,CAKIC,CALJ,CAOI6X,CAPJ,CASIC,EAAuC,CAAvCA,CAAqBrqB,CAAA3e,OATzB,CAUIipC,EAAiB,CAVrB,CAWIC,EAAYtkB,CAAA,CAAO9kB,CAAP,CAXhB,CAYIqpC,EAAgB,EAZpB,CAaIC,EAAiB,EAbrB,CAcIC,EAAU,CAAA,CAdd,CAeIC,EAAY,CAwGhB,OAAO,KAAA5kC,OAAA,CAtGP6kC,QAA8B,EAAG,CAC/BtY,CAAA,CAAWiY,CAAA,CAAUrjC,CAAV,CADoB,KAE3B2jC,CAF2B,CAEhBjpC,CAFgB,CAEXkpC,CAEpB,IAAK1mC,CAAA,CAASkuB,CAAT,CAAL,CAKO,GAAIpxB,EAAA,CAAYoxB,CAAZ,CAAJ,CAgBL,IAfIC,CAeKlwB,GAfQmoC,CAeRnoC,GAbPkwB,CAEA,CAFWiY,CAEX,CADAG,CACA,CADYpY,CAAAlxB,OACZ,CAD8B,CAC9B,CAAAipC,CAAA,EAWOjoC,EARTwoC,CAQSxoC,CARGiwB,CAAAjxB,OAQHgB,CANLsoC,CAMKtoC,GANSwoC,CAMTxoC,GAJPioC,CAAA,EACA,CAAA/X,CAAAlxB,OAAA,CAAkBspC,CAAlB,CAA8BE,CAGvBxoC,EAAAA,CAAAA,CAAI,CAAb,CAAgBA,CAAhB,CAAoBwoC,CAApB,CAA+BxoC,CAAA,EAA/B,CACEyoC,CAEA,CAFWvY,CAAA,CAASlwB,CAAT,CAEX,GAF2BkwB,CAAA,CAASlwB,CAAT,CAE3B,EADKiwB,CAAA,CAASjwB,CAAT,CACL;AADqBiwB,CAAA,CAASjwB,CAAT,CACrB,CAAKyoC,CAAL,EAAiBvY,CAAA,CAASlwB,CAAT,CAAjB,GAAiCiwB,CAAA,CAASjwB,CAAT,CAAjC,GACEioC,CAAA,EACA,CAAA/X,CAAA,CAASlwB,CAAT,CAAA,CAAciwB,CAAA,CAASjwB,CAAT,CAFhB,CAnBG,KAwBA,CACDkwB,CAAJ,GAAiBkY,CAAjB,GAEElY,CAEA,CAFWkY,CAEX,CAF4B,EAE5B,CADAE,CACA,CADY,CACZ,CAAAL,CAAA,EAJF,CAOAO,EAAA,CAAY,CACZ,KAAKjpC,CAAL,GAAY0wB,EAAZ,CACMA,CAAAxwB,eAAA,CAAwBF,CAAxB,CAAJ,GACEipC,CAAA,EACA,CAAItY,CAAAzwB,eAAA,CAAwBF,CAAxB,CAAJ,EACEkpC,CAEA,CAFWvY,CAAA,CAAS3wB,CAAT,CAEX,GAF6B2wB,CAAA,CAAS3wB,CAAT,CAE7B,EADK0wB,CAAA,CAAS1wB,CAAT,CACL,GADuB0wB,CAAA,CAAS1wB,CAAT,CACvB,CAAKkpC,CAAL,EAAiBvY,CAAA,CAAS3wB,CAAT,CAAjB,GAAmC0wB,CAAA,CAAS1wB,CAAT,CAAnC,GACE0oC,CAAA,EACA,CAAA/X,CAAA,CAAS3wB,CAAT,CAAA,CAAgB0wB,CAAA,CAAS1wB,CAAT,CAFlB,CAHF,GAQE+oC,CAAA,EAEA,CADApY,CAAA,CAAS3wB,CAAT,CACA,CADgB0wB,CAAA,CAAS1wB,CAAT,CAChB,CAAA0oC,CAAA,EAVF,CAFF,CAgBF,IAAIK,CAAJ,CAAgBE,CAAhB,CAGE,IAAIjpC,CAAJ,GADA0oC,EAAA,EACW/X,CAAAA,CAAX,CACMA,CAAAzwB,eAAA,CAAwBF,CAAxB,CAAJ,EAAqC,CAAA0wB,CAAAxwB,eAAA,CAAwBF,CAAxB,CAArC,GACE+oC,CAAA,EACA,CAAA,OAAOpY,CAAA,CAAS3wB,CAAT,CAFT,CA9BC,CA7BP,IACM2wB,EAAJ,GAAiBD,CAAjB,GACEC,CACA,CADWD,CACX,CAAAgY,CAAA,EAFF,CAiEF,OAAOA,EAtEwB,CAsG1B,CA7BPS,QAA+B,EAAG,CAC5BL,CAAJ,EACEA,CACA,CADU,CAAA,CACV,CAAA1qB,CAAA,CAASsS,CAAT,CAAmBA,CAAnB,CAA6BprB,CAA7B,CAFF,EAIE8Y,CAAA,CAASsS,CAAT,CAAmB8X,CAAnB,CAAiCljC,CAAjC,CAIF,IAAImjC,CAAJ,CACE,GAAKjmC,CAAA,CAASkuB,CAAT,CAAL,CAGO,GAAIpxB,EAAA,CAAYoxB,CAAZ,CAAJ,CAA2B,CAChC8X,CAAA,CAAmBriB,KAAJ,CAAUuK,CAAAjxB,OAAV,CACf,KAAK,IAAIgB,EAAI,CAAb,CAAgBA,CAAhB,CAAoBiwB,CAAAjxB,OAApB,CAAqCgB,CAAA,EAArC,CACE+nC,CAAA,CAAa/nC,CAAb,CAAA,CAAkBiwB,CAAA,CAASjwB,CAAT,CAHY,CAA3B,IAOL,KAAST,CAAT,GADAwoC,EACgB9X,CADD,EACCA,CAAAA,CAAhB,CACMxwB,EAAAC,KAAA,CAAoBuwB,CAApB,CAA8B1wB,CAA9B,CAAJ,GACEwoC,CAAA,CAAaxoC,CAAb,CADF,CACsB0wB,CAAA,CAAS1wB,CAAT,CADtB,CAXJ,KAEEwoC,EAAA,CAAe9X,CAZa,CA6B3B,CAxHiC,CAvR1B,SAqcP6P,QAAQ,EAAG,CAAA,IACd6I,CADc;AACPxoC,CADO,CACAoY,CADA,CAEdqwB,CAFc,CAGdC,EAAa,IAAAtC,aAHC,CAIduC,EAAkB,IAAAtC,kBAJJ,CAKdxnC,CALc,CAMd+pC,CANc,CAMPC,EAAMxD,CANC,CAORuB,CAPQ,CAQdkC,EAAW,EARG,CASdC,CATc,CASNC,CATM,CASEC,CAEpBzC,EAAA,CAAW,SAAX,CAEAjB,EAAA,CAAiB,IAEjB,GAAG,CACDqD,CAAA,CAAQ,CAAA,CAGR,KAFAhC,CAEA,CAZ0BlwB,IAY1B,CAAMgyB,CAAA7pC,OAAN,CAAA,CAAyB,CACvB,GAAI,CACFoqC,CACA,CADYP,CAAAn3B,MAAA,EACZ,CAAA03B,CAAAxgC,MAAAygC,MAAA,CAAsBD,CAAA7W,WAAtB,CAFE,CAGF,MAAOlsB,CAAP,CAAU,CAsflBqV,CAAA0a,QApfQ,CAofa,IApfb,CAAAtT,CAAA,CAAkBzc,CAAlB,CAFU,CAIZq/B,CAAA,CAAiB,IARM,CAWzB,CAAA,CACA,EAAG,CACD,GAAKkD,CAAL,CAAgB7B,CAAAf,WAAhB,CAGE,IADAhnC,CACA,CADS4pC,CAAA5pC,OACT,CAAOA,CAAA,EAAP,CAAA,CACE,GAAI,CAIF,GAHA2pC,CAGA,CAHQC,CAAA,CAAS5pC,CAAT,CAGR,CACE,IAAKmB,CAAL,CAAawoC,CAAAtvB,IAAA,CAAU0tB,CAAV,CAAb,KAAsCxuB,CAAtC,CAA6CowB,CAAApwB,KAA7C,GACI,EAAEowB,CAAA3jB,GACA,CAAI3gB,EAAA,CAAOlE,CAAP,CAAcoY,CAAd,CAAJ,CACsB,QADtB,GACK,MAAOpY,EADZ,EACkD,QADlD,GACkC,MAAOoY,EADzC,EAEQ7T,KAAA,CAAMvE,CAAN,CAFR,EAEwBuE,KAAA,CAAM6T,CAAN,CAH1B,CADJ,CAKEwwB,CAIA,CAJQ,CAAA,CAIR,CAHArD,CAGA,CAHiBiD,CAGjB,CAFAA,CAAApwB,KAEA,CAFaowB,CAAA3jB,GAAA,CAAW5hB,EAAA,CAAKjD,CAAL,CAAY,IAAZ,CAAX,CAA+BA,CAE5C,CADAwoC,CAAA7jC,GAAA,CAAS3E,CAAT,CAAkBoY,CAAD,GAAUyuB,CAAV,CAA0B7mC,CAA1B,CAAkCoY,CAAnD,CAA0DwuB,CAA1D,CACA,CAAU,CAAV,CAAIiC,CAAJ,GACEE,CAMA,CANS,CAMT,CANaF,CAMb,CALKC,CAAA,CAASC,CAAT,CAKL,GALuBD,CAAA,CAASC,CAAT,CAKvB,CAL0C,EAK1C,EAJAC,CAIA,CAJU3pC,CAAA,CAAWmpC,CAAA3O,IAAX,CACD,CAAH,MAAG,EAAO2O,CAAA3O,IAAAjyB,KAAP,EAAyB4gC,CAAA3O,IAAA93B,SAAA,EAAzB,EACHymC,CAAA3O,IAEN;AADAmP,CACA,EADU,YACV,CADyB7jC,EAAA,CAAOnF,CAAP,CACzB,CADyC,YACzC,CADwDmF,EAAA,CAAOiT,CAAP,CACxD,CAAA0wB,CAAA,CAASC,CAAT,CAAArpC,KAAA,CAAsBspC,CAAtB,CAPF,CATF,KAkBO,IAAIR,CAAJ,GAAcjD,CAAd,CAA8B,CAGnCqD,CAAA,CAAQ,CAAA,CACR,OAAM,CAJ6B,CAvBrC,CA8BF,MAAO1iC,CAAP,CAAU,CA2ctBqV,CAAA0a,QAzcY,CAycS,IAzcT,CAAAtT,CAAA,CAAkBzc,CAAlB,CAFU,CAUhB,GAAI,EAAEijC,CAAF,CAAUvC,CAAAZ,YAAV,EACCY,CADD,GArEoBlwB,IAqEpB,EACuBkwB,CAAAd,cADvB,CAAJ,CAEE,IAAA,CAAMc,CAAN,GAvEsBlwB,IAuEtB,EAA4B,EAAEyyB,CAAF,CAASvC,CAAAd,cAAT,CAA5B,CAAA,CACEc,CAAA,CAAUA,CAAAhB,QAhDb,CAAH,MAmDUgB,CAnDV,CAmDoBuC,CAnDpB,CAuDA,KAAIP,CAAJ,EAAaF,CAAA7pC,OAAb,GAAmC,CAAEgqC,CAAA,EAArC,CAEE,KAqbNttB,EAAA0a,QArbY,CAqbS,IArbT,CAAAqP,CAAA,CAAiB,QAAjB,CAGFD,CAHE,CAGGlgC,EAAA,CAAO2jC,CAAP,CAHH,CAAN,CAzED,CAAH,MA+ESF,CA/ET,EA+EkBF,CAAA7pC,OA/ElB,CAmFA,KA2aF0c,CAAA0a,QA3aE,CA2amB,IA3anB,CAAM0S,CAAA9pC,OAAN,CAAA,CACE,GAAI,CACF8pC,CAAAp3B,MAAA,EAAA,EADE,CAEF,MAAOrL,CAAP,CAAU,CACVyc,CAAA,CAAkBzc,CAAlB,CADU,CArGI,CArcJ,UAmlBNqO,QAAQ,EAAG,CAEnB,GAAI4xB,CAAA,IAAAA,YAAJ,CAAA,CACA,IAAI/kC,EAAS,IAAAwkC,QAEb,KAAA7G,WAAA,CAAgB,UAAhB,CACA,KAAAoH,YAAA,CAAmB,CAAA,CACf,KAAJ,GAAa5qB,CAAb,GAEAtc,CAAA,CAAQ,IAAAsnC,gBAAR;AAA8B9hC,EAAA,CAAK,IAAL,CAAWkiC,CAAX,CAAmC,IAAnC,CAA9B,CA2BA,CAvBIvlC,CAAA4kC,YAuBJ,EAvB0B,IAuB1B,GAvBgC5kC,CAAA4kC,YAuBhC,CAvBqD,IAAAF,cAuBrD,EAtBI1kC,CAAA6kC,YAsBJ,EAtB0B,IAsB1B,GAtBgC7kC,CAAA6kC,YAsBhC,CAtBqD,IAAAF,cAsBrD,EArBI,IAAAA,cAqBJ,GArBwB,IAAAA,cAAAD,cAqBxB,CArB2D,IAAAA,cAqB3D,EApBI,IAAAA,cAoBJ,GApBwB,IAAAA,cAAAC,cAoBxB,CApB2D,IAAAA,cAoB3D,EATA,IAAAH,QASA,CATe,IAAAE,cASf,CAToC,IAAAC,cASpC,CATyD,IAAAC,YASzD,CARI,IAAAC,YAQJ,CARuB,IAAAC,MAQvB,CARoC,IAQpC,CALA,IAAAI,YAKA,CALmB,EAKnB,CAJA,IAAAT,WAIA,CAJkB,IAAAO,aAIlB,CAJsC,IAAAC,kBAItC,CAJ+D,EAI/D,CADA,IAAA9xB,SACA,CADgB,IAAAorB,QAChB,CAD+B,IAAA/2B,OAC/B,CAD6CtH,CAC7C,CAAA,IAAA8nC,IAAA;AAAW,IAAA7lC,OAAX,CAAyB8lC,QAAQ,EAAG,CAAE,MAAO/nC,EAAT,CA7BpC,CALA,CAFmB,CAnlBL,OAspBT4nC,QAAQ,CAACI,CAAD,CAAOxvB,CAAP,CAAe,CAC5B,MAAO2J,EAAA,CAAO6lB,CAAP,CAAA,CAAa,IAAb,CAAmBxvB,CAAnB,CADqB,CAtpBd,YAurBJxW,QAAQ,CAACgmC,CAAD,CAAO,CAGpB/tB,CAAA0a,QAAL,EAA4B1a,CAAA6qB,aAAAvnC,OAA5B,EACE40B,CAAA3T,MAAA,CAAe,QAAQ,EAAG,CACpBvE,CAAA6qB,aAAAvnC,OAAJ,EACE0c,CAAAokB,QAAA,EAFsB,CAA1B,CAOF,KAAAyG,aAAA1mC,KAAA,CAAuB,OAAQ,IAAR,YAA0B4pC,CAA1B,CAAvB,CAXyB,CAvrBX,cAqsBDC,QAAQ,CAAC5kC,CAAD,CAAK,CAC1B,IAAA0hC,kBAAA3mC,KAAA,CAA4BiF,CAA5B,CAD0B,CArsBZ,QAsvBRiE,QAAQ,CAAC0gC,CAAD,CAAO,CACrB,GAAI,CAEF,MADA9C,EAAA,CAAW,QAAX,CACO,CAAA,IAAA0C,MAAA,CAAWI,CAAX,CAFL,CAGF,MAAOpjC,CAAP,CAAU,CACVyc,CAAA,CAAkBzc,CAAlB,CADU,CAHZ,OAKU,CAsNZqV,CAAA0a,QAAA,CAAqB,IApNjB,IAAI,CACF1a,CAAAokB,QAAA,EADE,CAEF,MAAOz5B,CAAP,CAAU,CAEV,KADAyc,EAAA,CAAkBzc,CAAlB,CACMA,CAAAA,CAAN,CAFU,CAJJ,CANW,CAtvBP,KAiyBXkjC,QAAQ,CAACxhC,CAAD,CAAO4V,CAAP,CAAiB,CAC5B,IAAIgsB,EAAiB,IAAAlD,YAAA,CAAiB1+B,CAAjB,CAChB4hC,EAAL,GACE,IAAAlD,YAAA,CAAiB1+B,CAAjB,CADF;AAC2B4hC,CAD3B,CAC4C,EAD5C,CAGAA,EAAA9pC,KAAA,CAAoB8d,CAApB,CAEA,KAAIopB,EAAU,IACd,GACOA,EAAAL,gBAAA,CAAwB3+B,CAAxB,CAGL,GAFEg/B,CAAAL,gBAAA,CAAwB3+B,CAAxB,CAEF,CAFkC,CAElC,EAAAg/B,CAAAL,gBAAA,CAAwB3+B,CAAxB,CAAA,EAJF,OAKUg/B,CALV,CAKoBA,CAAAhB,QALpB,CAOA,KAAIlhC,EAAO,IACX,OAAO,SAAQ,EAAG,CAChB8kC,CAAA,CAAe3mC,EAAA,CAAQ2mC,CAAR,CAAwBhsB,CAAxB,CAAf,CAAA,CAAoD,IACpDmpB,EAAA,CAAuBjiC,CAAvB,CAA6B,CAA7B,CAAgCkD,CAAhC,CAFgB,CAhBU,CAjyBd,OA80BT6hC,QAAQ,CAAC7hC,CAAD,CAAOmS,CAAP,CAAa,CAAA,IACtB9T,EAAQ,EADc,CAEtBujC,CAFsB,CAGtB/gC,EAAQ,IAHc,CAItB8N,EAAkB,CAAA,CAJI,CAKtBJ,EAAQ,MACAvO,CADA,aAEOa,CAFP,iBAGW8N,QAAQ,EAAG,CAACA,CAAA,CAAkB,CAAA,CAAnB,CAHtB,gBAIUH,QAAQ,EAAG,CACzBD,CAAAS,iBAAA,CAAyB,CAAA,CADA,CAJrB,kBAOY,CAAA,CAPZ,CALc,CActB8yB,EAAsBC,CAACxzB,CAADwzB,CAtjXzB3kC,OAAA,CAAcH,EAAAtF,KAAA,CAsjXoBwB,SAtjXpB,CAsjX+Bb,CAtjX/B,CAAd,CAwiXyB,CAetBL,CAfsB,CAenBhB,CAEP,GAAG,CACD2qC,CAAA,CAAiB/gC,CAAA69B,YAAA,CAAkB1+B,CAAlB,CAAjB,EAA4C3B,CAC5CkQ,EAAAyzB,aAAA,CAAqBnhC,CAChB5I,EAAA,CAAE,CAAP,KAAUhB,CAAV,CAAiB2qC,CAAA3qC,OAAjB,CAAwCgB,CAAxC,CAA0ChB,CAA1C,CAAkDgB,CAAA,EAAlD,CAGE,GAAK2pC,CAAA,CAAe3pC,CAAf,CAAL,CAMA,GAAI,CAEF2pC,CAAA,CAAe3pC,CAAf,CAAAkF,MAAA,CAAwB,IAAxB,CAA8B2kC,CAA9B,CAFE,CAGF,MAAOxjC,CAAP,CAAU,CACVyc,CAAA,CAAkBzc,CAAlB,CADU,CATZ,IACEsjC,EAAAxmC,OAAA,CAAsBnD,CAAtB;AAAyB,CAAzB,CAEA,CADAA,CAAA,EACA,CAAAhB,CAAA,EAWJ,IAAI0X,CAAJ,CAAqB,KAErB9N,EAAA,CAAQA,CAAAm9B,QAtBP,CAAH,MAuBSn9B,CAvBT,CAyBA,OAAO0N,EA1CmB,CA90BZ,YAi5BJ4oB,QAAQ,CAACn3B,CAAD,CAAOmS,CAAP,CAAa,CAgB/B,IAhB+B,IAE3B6sB,EADSlwB,IADkB,CAG3ByyB,EAFSzyB,IADkB,CAI3BP,EAAQ,MACAvO,CADA,aAHC8O,IAGD,gBAGUN,QAAQ,EAAG,CACzBD,CAAAS,iBAAA,CAAyB,CAAA,CADA,CAHrB,kBAMY,CAAA,CANZ,CAJmB,CAY3B8yB,EAAsBC,CAACxzB,CAADwzB,CAvnXzB3kC,OAAA,CAAcH,EAAAtF,KAAA,CAunXoBwB,SAvnXpB,CAunX+Bb,CAvnX/B,CAAd,CA2mX8B,CAahBL,CAbgB,CAabhB,CAGlB,CAAQ+nC,CAAR,CAAkBuC,CAAlB,CAAA,CAAyB,CACvBhzB,CAAAyzB,aAAA,CAAqBhD,CACrBrV,EAAA,CAAYqV,CAAAN,YAAA,CAAoB1+B,CAApB,CAAZ,EAAyC,EACpC/H,EAAA,CAAE,CAAP,KAAUhB,CAAV,CAAmB0yB,CAAA1yB,OAAnB,CAAqCgB,CAArC,CAAuChB,CAAvC,CAA+CgB,CAAA,EAA/C,CAEE,GAAK0xB,CAAA,CAAU1xB,CAAV,CAAL,CAOA,GAAI,CACF0xB,CAAA,CAAU1xB,CAAV,CAAAkF,MAAA,CAAmB,IAAnB,CAAyB2kC,CAAzB,CADE,CAEF,MAAMxjC,CAAN,CAAS,CACTyc,CAAA,CAAkBzc,CAAlB,CADS,CATX,IACEqrB,EAAAvuB,OAAA,CAAiBnD,CAAjB,CAAoB,CAApB,CAEA,CADAA,CAAA,EACA,CAAAhB,CAAA,EAeJ,IAAI,EAAEsqC,CAAF,CAAWvC,CAAAL,gBAAA,CAAwB3+B,CAAxB,CAAX,EAA4Cg/B,CAAAZ,YAA5C,EACCY,CADD,GAtCOlwB,IAsCP,EACuBkwB,CAAAd,cADvB,CAAJ,CAEE,IAAA,CAAMc,CAAN,GAxCSlwB,IAwCT,EAA4B,EAAEyyB,CAAF,CAASvC,CAAAd,cAAT,CAA5B,CAAA,CACEc,CAAA,CAAUA,CAAAhB,QA1BS,CA+BzB,MAAOzvB,EA/CwB,CAj5BjB,CAo8BlB;IAAIoF,EAAa,IAAImqB,CAErB,OAAOnqB,EAtgC2D,CADxD,CAZe,CA8jC7BrP,QAASA,GAAqB,EAAG,CAAA,IAC3B+W,EAA6B,mCADF,CAE7BG,EAA8B,uCAkBhC,KAAAH,2BAAA,CAAkCC,QAAQ,CAACC,CAAD,CAAS,CACjD,MAAIxhB,EAAA,CAAUwhB,CAAV,CAAJ,EACEF,CACO,CADsBE,CACtB,CAAA,IAFT,EAIOF,CAL0C,CAyBnD,KAAAG,4BAAA,CAAmCC,QAAQ,CAACF,CAAD,CAAS,CAClD,MAAIxhB,EAAA,CAAUwhB,CAAV,CAAJ,EACEC,CACO,CADuBD,CACvB,CAAA,IAFT,EAIOC,CAL2C,CAQpD,KAAAzK,KAAA,CAAY2H,QAAQ,EAAG,CACrB,MAAOupB,SAAoB,CAACC,CAAD,CAAMC,CAAN,CAAe,CACxC,IAAIC,EAAQD,CAAA,CAAU3mB,CAAV,CAAwCH,CAApD,CACIgnB,CAEJ,IAAI,CAAChzB,CAAL,EAAqB,CAArB,EAAaA,CAAb,CAEE,GADAgzB,CACI,CADYrR,EAAA,CAAWkR,CAAX,CAAAxrB,KACZ,CAAkB,EAAlB,GAAA2rB,CAAA,EAAwB,CAACA,CAAApmC,MAAA,CAAoBmmC,CAApB,CAA7B,CACE,MAAO,SAAP,CAAiBC,CAGrB,OAAOH,EAViC,CADrB,CArDQ,CA4FjCI,QAASA,GAAa,CAACC,CAAD,CAAU,CAC9B,GAAgB,MAAhB,GAAIA,CAAJ,CACE,MAAOA,EACF,IAAIprC,CAAA,CAASorC,CAAT,CAAJ,CAAuB,CAK5B,GAA8B,EAA9B,CAAIA,CAAAtnC,QAAA,CAAgB,KAAhB,CAAJ,CACE,KAAMunC,GAAA,CAAW,QAAX,CACsDD,CADtD,CAAN,CAGFA,CAAA,CAA0BA,CAjBrB5jC,QAAA,CAAU,+BAAV;AAA2C,MAA3C,CAAAA,QAAA,CACU,OADV,CACmB,OADnB,CAiBKA,QAAA,CACY,QADZ,CACsB,IADtB,CAAAA,QAAA,CAEY,KAFZ,CAEmB,YAFnB,CAGV,OAAW3C,OAAJ,CAAW,GAAX,CAAiBumC,CAAjB,CAA2B,GAA3B,CAZqB,CAavB,GAAInoC,EAAA,CAASmoC,CAAT,CAAJ,CAIL,MAAWvmC,OAAJ,CAAW,GAAX,CAAiBumC,CAAAjnC,OAAjB,CAAkC,GAAlC,CAEP,MAAMknC,GAAA,CAAW,UAAX,CAAN,CAtB4B,CA4BhCC,QAASA,GAAc,CAACC,CAAD,CAAW,CAChC,IAAIC,EAAmB,EACnB5oC,EAAA,CAAU2oC,CAAV,CAAJ,EACErrC,CAAA,CAAQqrC,CAAR,CAAkB,QAAQ,CAACH,CAAD,CAAU,CAClCI,CAAA7qC,KAAA,CAAsBwqC,EAAA,CAAcC,CAAd,CAAtB,CADkC,CAApC,CAIF,OAAOI,EAPyB,CA8ElC16B,QAASA,GAAoB,EAAG,CAC9B,IAAA26B,aAAA,CAAoBA,EADU,KAI1BC,EAAuB,CAAC,MAAD,CAJG,CAK1BC,EAAuB,EAwB3B,KAAAD,qBAAA,CAA4BE,QAAS,CAAC3qC,CAAD,CAAQ,CACvCe,SAAAlC,OAAJ,GACE4rC,CADF,CACyBJ,EAAA,CAAerqC,CAAf,CADzB,CAGA,OAAOyqC,EAJoC,CAkC7C,KAAAC,qBAAA,CAA4BE,QAAS,CAAC5qC,CAAD,CAAQ,CACvCe,SAAAlC,OAAJ,GACE6rC,CADF,CACyBL,EAAA,CAAerqC,CAAf,CADzB,CAGA,OAAO0qC,EAJoC,CAO7C,KAAA/xB,KAAA,CAAY,CAAC,WAAD,CAAc,QAAQ,CAAC4B,CAAD,CAAY,CA0C5CswB,QAASA,EAAkB,CAACC,CAAD,CAAO,CAChC,IAAIC;AAAaA,QAA+B,CAACC,CAAD,CAAe,CAC7D,IAAAC,qBAAA,CAA4BC,QAAQ,EAAG,CACrC,MAAOF,EAD8B,CADsB,CAK3DF,EAAJ,GACEC,CAAA5wB,UADF,CACyB,IAAI2wB,CAD7B,CAGAC,EAAA5wB,UAAA+f,QAAA,CAA+BiR,QAAmB,EAAG,CACnD,MAAO,KAAAF,qBAAA,EAD4C,CAGrDF,EAAA5wB,UAAApY,SAAA,CAAgCqpC,QAAoB,EAAG,CACrD,MAAO,KAAAH,qBAAA,EAAAlpC,SAAA,EAD8C,CAGvD,OAAOgpC,EAfyB,CAxClC,IAAIM,EAAgBA,QAAsB,CAAChlC,CAAD,CAAO,CAC/C,KAAM+jC,GAAA,CAAW,QAAX,CAAN,CAD+C,CAI7C7vB,EAAAF,IAAA,CAAc,WAAd,CAAJ,GACEgxB,CADF,CACkB9wB,CAAArB,IAAA,CAAc,WAAd,CADlB,CAN4C,KA4DxCoyB,EAAyBT,CAAA,EA5De,CA6DxCU,EAAS,EAEbA,EAAA,CAAOf,EAAA9a,KAAP,CAAA,CAA4Bmb,CAAA,CAAmBS,CAAnB,CAC5BC,EAAA,CAAOf,EAAAgB,IAAP,CAAA,CAA2BX,CAAA,CAAmBS,CAAnB,CAC3BC,EAAA,CAAOf,EAAAiB,IAAP,CAAA,CAA2BZ,CAAA,CAAmBS,CAAnB,CAC3BC,EAAA,CAAOf,EAAAkB,GAAP,CAAA,CAA0Bb,CAAA,CAAmBS,CAAnB,CAC1BC,EAAA,CAAOf,EAAA7a,aAAP,CAAA,CAAoCkb,CAAA,CAAmBU,CAAA,CAAOf,EAAAiB,IAAP,CAAnB,CAyGpC,OAAO,SAtFPE,QAAgB,CAAC/3B,CAAD,CAAOo3B,CAAP,CAAqB,CACnC,IAAI/wB,EAAesxB,CAAAjsC,eAAA,CAAsBsU,CAAtB,CAAA,CAA8B23B,CAAA,CAAO33B,CAAP,CAA9B,CAA6C,IAChE,IAAI,CAACqG,CAAL,CACE,KAAMmwB,GAAA,CAAW,UAAX;AAEFx2B,CAFE,CAEIo3B,CAFJ,CAAN,CAIF,GAAqB,IAArB,GAAIA,CAAJ,EAA6BA,CAA7B,GAA8CxsC,CAA9C,EAA4E,EAA5E,GAA2DwsC,CAA3D,CACE,MAAOA,EAIT,IAA4B,QAA5B,GAAI,MAAOA,EAAX,CACE,KAAMZ,GAAA,CAAW,OAAX,CAEFx2B,CAFE,CAAN,CAIF,MAAO,KAAIqG,CAAJ,CAAgB+wB,CAAhB,CAjB4B,CAsF9B,YAzBP/Q,QAAmB,CAACrmB,CAAD,CAAOg4B,CAAP,CAAqB,CACtC,GAAqB,IAArB,GAAIA,CAAJ,EAA6BA,CAA7B,GAA8CptC,CAA9C,EAA4E,EAA5E,GAA2DotC,CAA3D,CACE,MAAOA,EAET,KAAI7hC,EAAewhC,CAAAjsC,eAAA,CAAsBsU,CAAtB,CAAA,CAA8B23B,CAAA,CAAO33B,CAAP,CAA9B,CAA6C,IAChE,IAAI7J,CAAJ,EAAmB6hC,CAAnB,WAA2C7hC,EAA3C,CACE,MAAO6hC,EAAAX,qBAAA,EAKT,IAAIr3B,CAAJ,GAAa42B,EAAA7a,aAAb,CAAwC,CAzIpC8L,IAAAA,EAAY7C,EAAA,CA0ImBgT,CA1IR7pC,SAAA,EAAX,CAAZ05B,CACA57B,CADA47B,CACG3a,CADH2a,CACMoQ,EAAU,CAAA,CAEfhsC,EAAA,CAAI,CAAT,KAAYihB,CAAZ,CAAgB2pB,CAAA5rC,OAAhB,CAA6CgB,CAA7C,CAAiDihB,CAAjD,CAAoDjhB,CAAA,EAApD,CACE,GAbc,MAAhB,GAae4qC,CAAAN,CAAqBtqC,CAArBsqC,CAbf,CACSvT,EAAA,CAY+B6E,CAZ/B,CADT,CAaegP,CAAAN,CAAqBtqC,CAArBsqC,CATJpiC,KAAA,CAS6B0zB,CAThBnd,KAAb,CAST,CAAkD,CAChDutB,CAAA,CAAU,CAAA,CACV,MAFgD,CAKpD,GAAIA,CAAJ,CAEE,IAAKhsC,CAAO,CAAH,CAAG,CAAAihB,CAAA,CAAI4pB,CAAA7rC,OAAhB,CAA6CgB,CAA7C,CAAiDihB,CAAjD,CAAoDjhB,CAAA,EAApD,CACE,GArBY,MAAhB,GAqBiB6qC,CAAAP,CAAqBtqC,CAArBsqC,CArBjB,CACSvT,EAAA,CAoBiC6E,CApBjC,CADT,CAqBiBiP,CAAAP,CAAqBtqC,CAArBsqC,CAjBNpiC,KAAA,CAiB+B0zB,CAjBlBnd,KAAb,CAiBP,CAAkD,CAChDutB,CAAA,CAAU,CAAA,CACV,MAFgD,CA8HpD,GAxHKA,CAwHL,CACE,MAAOD,EAEP,MAAMxB,GAAA,CAAW,UAAX;AAEFwB,CAAA7pC,SAAA,EAFE,CAAN,CAJoC,CAQjC,GAAI6R,CAAJ,GAAa42B,EAAA9a,KAAb,CACL,MAAO2b,EAAA,CAAcO,CAAd,CAET,MAAMxB,GAAA,CAAW,QAAX,CAAN,CAtBsC,CAyBjC,SAhDPlQ,QAAgB,CAAC0R,CAAD,CAAe,CAC7B,MAAIA,EAAJ,WAA4BN,EAA5B,CACSM,CAAAX,qBAAA,EADT,CAGSW,CAJoB,CAgDxB,CA5KqC,CAAlC,CAtEkB,CAkhBhCh8B,QAASA,GAAY,EAAG,CACtB,IAAIk8B,EAAU,CAAA,CAad,KAAAA,QAAA,CAAeC,QAAS,CAAC/rC,CAAD,CAAQ,CAC1Be,SAAAlC,OAAJ,GACEitC,CADF,CACY,CAAC,CAAC9rC,CADd,CAGA,OAAO8rC,EAJuB,CAsDhC,KAAAnzB,KAAA,CAAY,CAAC,QAAD,CAAW,UAAX,CAAuB,cAAvB,CAAuC,QAAQ,CAC7C8K,CAD6C,CACnCnH,CADmC,CACvB0vB,CADuB,CACT,CAGhD,GAAIF,CAAJ,EAAexvB,CAAArF,KAAf,EAA4D,CAA5D,CAAgCqF,CAAA2vB,iBAAhC,CACE,KAAM7B,GAAA,CAAW,UAAX,CAAN,CAMF,IAAI8B,EAAMnoC,EAAA,CAAYymC,EAAZ,CAaV0B,EAAAC,UAAA,CAAgBC,QAAS,EAAG,CAC1B,MAAON,EADmB,CAG5BI,EAAAP,QAAA,CAAcK,CAAAL,QACdO,EAAAjS,WAAA,CAAiB+R,CAAA/R,WACjBiS,EAAAhS,QAAA,CAAc8R,CAAA9R,QAET4R,EAAL,GACEI,CAAAP,QACA,CADcO,CAAAjS,WACd,CAD+BoS,QAAQ,CAACz4B,CAAD,CAAO5T,CAAP,CAAc,CAAE,MAAOA,EAAT,CACrD;AAAAksC,CAAAhS,QAAA,CAAc34B,EAFhB,CAwBA2qC,EAAAI,QAAA,CAAcC,QAAmB,CAAC34B,CAAD,CAAO01B,CAAP,CAAa,CAC5C,IAAIt3B,EAASyR,CAAA,CAAO6lB,CAAP,CACb,OAAIt3B,EAAA4Y,QAAJ,EAAsB5Y,CAAAwI,SAAtB,CACSxI,CADT,CAGSw6B,QAA0B,CAAC9nC,CAAD,CAAOoV,CAAP,CAAe,CAC9C,MAAOoyB,EAAAjS,WAAA,CAAermB,CAAf,CAAqB5B,CAAA,CAAOtN,CAAP,CAAaoV,CAAb,CAArB,CADuC,CALN,CAtDE,KAoT5CrU,EAAQymC,CAAAI,QApToC,CAqT5CrS,EAAaiS,CAAAjS,WArT+B,CAsT5C0R,EAAUO,CAAAP,QAEd1sC,EAAA,CAAQurC,EAAR,CAAsB,QAAS,CAACiC,CAAD,CAAY7kC,CAAZ,CAAkB,CAC/C,IAAI8kC,EAAQ9mC,CAAA,CAAUgC,CAAV,CACZskC,EAAA,CAAI97B,EAAA,CAAU,WAAV,CAAwBs8B,CAAxB,CAAJ,CAAA,CAAsC,QAAS,CAACpD,CAAD,CAAO,CACpD,MAAO7jC,EAAA,CAAMgnC,CAAN,CAAiBnD,CAAjB,CAD6C,CAGtD4C,EAAA,CAAI97B,EAAA,CAAU,cAAV,CAA2Bs8B,CAA3B,CAAJ,CAAA,CAAyC,QAAS,CAAC1sC,CAAD,CAAQ,CACxD,MAAOi6B,EAAA,CAAWwS,CAAX,CAAsBzsC,CAAtB,CADiD,CAG1DksC,EAAA,CAAI97B,EAAA,CAAU,WAAV,CAAwBs8B,CAAxB,CAAJ,CAAA,CAAsC,QAAS,CAAC1sC,CAAD,CAAQ,CACrD,MAAO2rC,EAAA,CAAQc,CAAR,CAAmBzsC,CAAnB,CAD8C,CARR,CAAjD,CAaA,OAAOksC,EArUyC,CADtC,CApEU,CA6ZxBp8B,QAASA,GAAgB,EAAG,CAC1B,IAAA6I,KAAA,CAAY,CAAC,SAAD,CAAY,WAAZ,CAAyB,QAAQ,CAAC0C,CAAD,CAAUgF,CAAV,CAAqB,CAAA,IAC5DssB,EAAe,EAD6C,CAE5DC,EACE5rC,CAAA,CAAI,CAAC,eAAA+G,KAAA,CAAqBnC,CAAA,CAAWinC,CAAAxxB,CAAAyxB,UAAAD,EAAqB,EAArBA,WAAX,CAArB,CAAD,EAAyE,EAAzE,EAA6E,CAA7E,CAAJ,CAH0D,CAI5DE,EAAQ,QAAAhkC,KAAA,CAAe8jC,CAAAxxB,CAAAyxB,UAAAD;AAAqB,EAArBA,WAAf,CAJoD,CAK5DtuC,EAAW8hB,CAAA,CAAU,CAAV,CAAX9hB,EAA2B,EALiC,CAM5DyuC,EAAezuC,CAAAyuC,aAN6C,CAO5DC,CAP4D,CAQ5DC,EAAc,6BAR8C,CAS5DC,EAAY5uC,CAAAy5B,KAAZmV,EAA6B5uC,CAAAy5B,KAAAoV,MAT+B,CAU5DC,EAAc,CAAA,CAV8C,CAW5DC,EAAa,CAAA,CAGjB,IAAIH,CAAJ,CAAe,CACb,IAAI5qC,IAAIA,CAAR,GAAgB4qC,EAAhB,CACE,GAAGtpC,CAAH,CAAWqpC,CAAAnlC,KAAA,CAAiBxF,CAAjB,CAAX,CAAmC,CACjC0qC,CAAA,CAAeppC,CAAA,CAAM,CAAN,CACfopC,EAAA,CAAeA,CAAAxlB,OAAA,CAAoB,CAApB,CAAuB,CAAvB,CAAAjX,YAAA,EAAf,CAAyDy8B,CAAAxlB,OAAA,CAAoB,CAApB,CACzD,MAHiC,CAOjCwlB,CAAJ,GACEA,CADF,CACkB,eADlB,EACqCE,EADrC,EACmD,QADnD,CAIAE,EAAA,CAAc,CAAC,EAAG,YAAH,EAAmBF,EAAnB,EAAkCF,CAAlC,CAAiD,YAAjD,EAAiEE,EAAjE,CACfG,EAAA,CAAc,CAAC,EAAG,WAAH,EAAkBH,EAAlB,EAAiCF,CAAjC,CAAgD,WAAhD,EAA+DE,EAA/D,CAEXP,EAAAA,CAAJ,EAAiBS,CAAjB,EAA+BC,CAA/B,GACED,CACA,CADctuC,CAAA,CAASR,CAAAy5B,KAAAoV,MAAAG,iBAAT,CACd,CAAAD,CAAA,CAAavuC,CAAA,CAASR,CAAAy5B,KAAAoV,MAAAI,gBAAT,CAFf,CAhBa,CAuBf,MAAO,SAUI,EAAG9vB,CAAArC,CAAAqC,QAAH,EAAsBgB,CAAArD,CAAAqC,QAAAgB,UAAtB,EAA+D,CAA/D,CAAqDkuB,CAArD,EAAsEG,CAAtE,CAVJ,YAYO,cAZP,EAYyB1xB,EAZzB,GAcQ,CAAC2xB,CAdT,EAcwC,CAdxC;AAcyBA,CAdzB,WAeKS,QAAQ,CAACt3B,CAAD,CAAQ,CAIxB,GAAa,OAAb,EAAIA,CAAJ,EAAgC,CAAhC,EAAwBc,CAAxB,CAAmC,MAAO,CAAA,CAE1C,IAAIvV,CAAA,CAAYirC,CAAA,CAAax2B,CAAb,CAAZ,CAAJ,CAAsC,CACpC,IAAIu3B,EAASnvC,CAAAgU,cAAA,CAAuB,KAAvB,CACbo6B,EAAA,CAAax2B,CAAb,CAAA,CAAsB,IAAtB,CAA6BA,CAA7B,GAAsCu3B,EAFF,CAKtC,MAAOf,EAAA,CAAax2B,CAAb,CAXiB,CAfrB,KA4BArK,EAAA,EA5BA,cA6BSmhC,CA7BT,aA8BSI,CA9BT,YA+BQC,CA/BR,SAgCIV,CAhCJ,MAiCE31B,CAjCF,kBAkCa+1B,CAlCb,CArCyD,CAAtD,CADc,CA6E5Bh9B,QAASA,GAAgB,EAAG,CAC1B,IAAA2I,KAAA,CAAY,CAAC,YAAD,CAAe,UAAf,CAA2B,IAA3B,CAAiC,mBAAjC,CACP,QAAQ,CAAC4C,CAAD,CAAekY,CAAf,CAA2BC,CAA3B,CAAiC/Q,CAAjC,CAAoD,CA6B/DoU,QAASA,EAAO,CAACpyB,CAAD,CAAKqb,CAAL,CAAYua,CAAZ,CAAyB,CAAA,IACnCrE,EAAWxC,CAAA5T,MAAA,EADwB,CAEnCoV,EAAUgB,CAAAhB,QAFyB,CAGnCwF,EAAa/4B,CAAA,CAAU44B,CAAV,CAAbG,EAAuC,CAACH,CAG5Cta,EAAA,CAAYwT,CAAA3T,MAAA,CAAe,QAAQ,EAAG,CACpC,GAAI,CACFoW,CAAAC,QAAA,CAAiBxxB,CAAA,EAAjB,CADE,CAEF,MAAMuB,CAAN,CAAS,CACTgwB,CAAAnC,OAAA,CAAgB7tB,CAAhB,CACA,CAAAyc,CAAA,CAAkBzc,CAAlB,CAFS,CAFX,OAMQ,CACN,OAAOynC,CAAA,CAAUzY,CAAA0Y,YAAV,CADD,CAIHlT,CAAL,EAAgBnf,CAAA3S,OAAA,EAXoB,CAA1B,CAYToX,CAZS,CAcZkV,EAAA0Y,YAAA,CAAsB3tB,CACtB0tB,EAAA,CAAU1tB,CAAV,CAAA,CAAuBiW,CAEvB;MAAOhB,EAvBgC,CA5BzC,IAAIyY,EAAY,EAmEhB5W,EAAA7W,OAAA,CAAiB2tB,QAAQ,CAAC3Y,CAAD,CAAU,CACjC,MAAIA,EAAJ,EAAeA,CAAA0Y,YAAf,GAAsCD,EAAtC,EACEA,CAAA,CAAUzY,CAAA0Y,YAAV,CAAA7Z,OAAA,CAAsC,UAAtC,CAEO,CADP,OAAO4Z,CAAA,CAAUzY,CAAA0Y,YAAV,CACA,CAAAna,CAAA3T,MAAAI,OAAA,CAAsBgV,CAAA0Y,YAAtB,CAHT,EAKO,CAAA,CAN0B,CASnC,OAAO7W,EA7EwD,CADrD,CADc,CAkJ5B6B,QAASA,GAAU,CAACtb,CAAD,CAAMwwB,CAAN,CAAY,CAC7B,IAAIxvB,EAAOhB,CAEPrG,EAAJ,GAGE82B,CAAA74B,aAAA,CAA4B,MAA5B,CAAoCoJ,CAApC,CACA,CAAAA,CAAA,CAAOyvB,CAAAzvB,KAJT,CAOAyvB,EAAA74B,aAAA,CAA4B,MAA5B,CAAoCoJ,CAApC,CAGA,OAAO,MACCyvB,CAAAzvB,KADD,UAEKyvB,CAAAlV,SAAA,CAA0BkV,CAAAlV,SAAAtyB,QAAA,CAAgC,IAAhC,CAAsC,EAAtC,CAA1B,CAAsE,EAF3E,MAGCwnC,CAAAn4B,KAHD,QAIGm4B,CAAAzR,OAAA,CAAwByR,CAAAzR,OAAA/1B,QAAA,CAA8B,KAA9B,CAAqC,EAArC,CAAxB,CAAmE,EAJtE,MAKCwnC,CAAAryB,KAAA,CAAsBqyB,CAAAryB,KAAAnV,QAAA,CAA4B,IAA5B,CAAkC,EAAlC,CAAtB,CAA8D,EAL/D,UAMKwnC,CAAAnS,SANL,MAOCmS,CAAAjS,KAPD,UAQ4C,GACvC,GADCiS,CAAA3R,SAAAn4B,OAAA,CAA+B,CAA/B,CACD,CAAN8pC,CAAA3R,SAAM;AACN,GADM,CACA2R,CAAA3R,SAVL,CAbsB,CAkC/BxF,QAASA,GAAe,CAACoX,CAAD,CAAa,CAC/Bh8B,CAAAA,CAAUjT,CAAA,CAASivC,CAAT,CAAD,CAAyBpV,EAAA,CAAWoV,CAAX,CAAzB,CAAkDA,CAC/D,OAAQh8B,EAAA6mB,SAAR,GAA4BoV,EAAApV,SAA5B,EACQ7mB,CAAA4D,KADR,GACwBq4B,EAAAr4B,KAHW,CA+CrC3F,QAASA,GAAe,EAAE,CACxB,IAAA0I,KAAA,CAAYlX,CAAA,CAAQnD,CAAR,CADY,CA2G1B4Q,QAASA,GAAe,CAAC5G,CAAD,CAAW,CAWjC4pB,QAASA,EAAQ,CAACtqB,CAAD,CAAOkD,CAAP,CAAgB,CAC/B,GAAGlJ,CAAA,CAASgG,CAAT,CAAH,CAAmB,CACjB,IAAIsmC,EAAU,EACdjvC,EAAA,CAAQ2I,CAAR,CAAc,QAAQ,CAACoJ,CAAD,CAAS5R,CAAT,CAAc,CAClC8uC,CAAA,CAAQ9uC,CAAR,CAAA,CAAe8yB,CAAA,CAAS9yB,CAAT,CAAc4R,CAAd,CADmB,CAApC,CAGA,OAAOk9B,EALU,CAOjB,MAAO5lC,EAAAwC,QAAA,CAAiBlD,CAAjB,CAAwBumC,CAAxB,CAAgCrjC,CAAhC,CARsB,CAVjC,IAAIqjC,EAAS,QAqBb,KAAAjc,SAAA,CAAgBA,CAEhB,KAAAvZ,KAAA,CAAY,CAAC,WAAD,CAAc,QAAQ,CAAC4B,CAAD,CAAY,CAC5C,MAAO,SAAQ,CAAC3S,CAAD,CAAO,CACpB,MAAO2S,EAAArB,IAAA,CAActR,CAAd,CAAqBumC,CAArB,CADa,CADsB,CAAlC,CAoBZjc,EAAA,CAAS,UAAT,CAAqBkc,EAArB,CACAlc,EAAA,CAAS,MAAT,CAAiBmc,EAAjB,CACAnc,EAAA,CAAS,QAAT,CAAmBoc,EAAnB,CACApc,EAAA,CAAS,MAAT,CAAiBqc,EAAjB,CACArc,EAAA,CAAS,SAAT,CAAoBsc,EAApB,CACAtc,EAAA,CAAS,WAAT,CAAsBuc,EAAtB,CACAvc,EAAA,CAAS,QAAT,CAAmBwc,EAAnB,CACAxc,EAAA,CAAS,SAAT,CAAoByc,EAApB,CACAzc,EAAA,CAAS,WAAT,CAAsB0c,EAAtB,CApDiC,CAwKnCN,QAASA,GAAY,EAAG,CACtB,MAAO,SAAQ,CAACxrC,CAAD;AAAQsvB,CAAR,CAAoByc,CAApB,CAAgC,CAC7C,GAAI,CAAC7vC,CAAA,CAAQ8D,CAAR,CAAL,CAAqB,MAAOA,EADiB,KAGzCgsC,EAAiB,MAAOD,EAHiB,CAIzCE,EAAa,EAEjBA,EAAAhyB,MAAA,CAAmBiyB,QAAQ,CAAChvC,CAAD,CAAQ,CACjC,IAAK,IAAIiT,EAAI,CAAb,CAAgBA,CAAhB,CAAoB87B,CAAAlwC,OAApB,CAAuCoU,CAAA,EAAvC,CACE,GAAG,CAAC87B,CAAA,CAAW97B,CAAX,CAAA,CAAcjT,CAAd,CAAJ,CACE,MAAO,CAAA,CAGX,OAAO,CAAA,CAN0B,CASZ,WAAvB,GAAI8uC,CAAJ,GAEID,CAFJ,CACyB,SAAvB,GAAIC,CAAJ,EAAoCD,CAApC,CACeA,QAAQ,CAAClwC,CAAD,CAAMqwB,CAAN,CAAY,CAC/B,MAAOhmB,GAAA9E,OAAA,CAAevF,CAAf,CAAoBqwB,CAApB,CADwB,CADnC,CAKe6f,QAAQ,CAAClwC,CAAD,CAAMqwB,CAAN,CAAY,CAC/B,GAAIrwB,CAAJ,EAAWqwB,CAAX,EAAkC,QAAlC,GAAmB,MAAOrwB,EAA1B,EAA8D,QAA9D,GAA8C,MAAOqwB,EAArD,CAAwE,CACtE,IAAKigB,IAAIA,CAAT,GAAmBtwC,EAAnB,CACE,GAAyB,GAAzB,GAAIswC,CAAAhrC,OAAA,CAAc,CAAd,CAAJ,EAAgC3E,EAAAC,KAAA,CAAoBZ,CAApB,CAAyBswC,CAAzB,CAAhC,EACIJ,CAAA,CAAWlwC,CAAA,CAAIswC,CAAJ,CAAX,CAAwBjgB,CAAA,CAAKigB,CAAL,CAAxB,CADJ,CAEE,MAAO,CAAA,CAGX,OAAO,CAAA,CAP+D,CASxEjgB,CAAA,CAAQvlB,CAAA,EAAAA,CAAGulB,CAAHvlB,aAAA,EACR,OAA+C,EAA/C,CAAQA,CAAA,EAAAA,CAAG9K,CAAH8K,aAAA,EAAA5G,QAAA,CAA8BmsB,CAA9B,CAXuB,CANrC,CAsBA,KAAIsN,EAASA,QAAQ,CAAC39B,CAAD,CAAMqwB,CAAN,CAAW,CAC9B,GAAmB,QAAnB,EAAI,MAAOA,EAAX,EAAkD,GAAlD,GAA+BA,CAAA/qB,OAAA,CAAY,CAAZ,CAA/B,CACE,MAAO,CAACq4B,CAAA,CAAO39B,CAAP,CAAYqwB,CAAAvH,OAAA,CAAY,CAAZ,CAAZ,CAEV,QAAQ,MAAO9oB,EAAf,EACE,KAAK,SAAL,CACA,KAAK,QAAL,CACA,KAAK,QAAL,CACE,MAAOkwC,EAAA,CAAWlwC,CAAX;AAAgBqwB,CAAhB,CACT,MAAK,QAAL,CACE,OAAQ,MAAOA,EAAf,EACE,KAAK,QAAL,CACE,MAAO6f,EAAA,CAAWlwC,CAAX,CAAgBqwB,CAAhB,CACT,SACE,IAAMigB,IAAIA,CAAV,GAAoBtwC,EAApB,CACE,GAAyB,GAAzB,GAAIswC,CAAAhrC,OAAA,CAAc,CAAd,CAAJ,EAAgCq4B,CAAA,CAAO39B,CAAA,CAAIswC,CAAJ,CAAP,CAAoBjgB,CAApB,CAAhC,CACE,MAAO,CAAA,CANf,CAWA,MAAO,CAAA,CACT,MAAK,OAAL,CACE,IAAUnvB,CAAV,CAAc,CAAd,CAAiBA,CAAjB,CAAqBlB,CAAAE,OAArB,CAAiCgB,CAAA,EAAjC,CACE,GAAIy8B,CAAA,CAAO39B,CAAA,CAAIkB,CAAJ,CAAP,CAAemvB,CAAf,CAAJ,CACE,MAAO,CAAA,CAGX,OAAO,CAAA,CACT,SACE,MAAO,CAAA,CA1BX,CAJ8B,CAiChC,QAAQ,MAAOoD,EAAf,EACE,KAAK,SAAL,CACA,KAAK,QAAL,CACA,KAAK,QAAL,CAEEA,CAAA,CAAa,GAAGA,CAAH,CAEf,MAAK,QAAL,CAEE,IAAKhzB,IAAIA,CAAT,GAAgBgzB,EAAhB,CACG,SAAQ,CAACloB,CAAD,CAAO,CACkB,WAAhC,GAAI,MAAOkoB,EAAA,CAAWloB,CAAX,CAAX,EACA6kC,CAAArvC,KAAA,CAAgB,QAAQ,CAACM,CAAD,CAAQ,CAC9B,MAAOs8B,EAAA,CAAe,GAAR,EAAApyB,CAAA,CAAclK,CAAd,CAAuBA,CAAvB,EAAgCA,CAAA,CAAMkK,CAAN,CAAvC,CAAqDkoB,CAAA,CAAWloB,CAAX,CAArD,CADuB,CAAhC,CAFc,CAAf,CAAA,CAKE9K,CALF,CAOH,MACF,MAAK,UAAL,CACE2vC,CAAArvC,KAAA,CAAgB0yB,CAAhB,CACA,MACF,SACE,MAAOtvB,EAtBX,CAwBIosC,CAAAA,CAAW,EACf,KAAUj8B,CAAV,CAAc,CAAd,CAAiBA,CAAjB,CAAqBnQ,CAAAjE,OAArB,CAAmCoU,CAAA,EAAnC,CAAwC,CACtC,IAAIjT;AAAQ8C,CAAA,CAAMmQ,CAAN,CACR87B,EAAAhyB,MAAA,CAAiB/c,CAAjB,CAAJ,EACEkvC,CAAAxvC,KAAA,CAAcM,CAAd,CAHoC,CAMxC,MAAOkvC,EArGsC,CADzB,CA2JxBd,QAASA,GAAc,CAACe,CAAD,CAAU,CAC/B,IAAIC,EAAUD,CAAAE,eACd,OAAO,SAAQ,CAACC,CAAD,CAASC,CAAT,CAAwB,CACjC7tC,CAAA,CAAY6tC,CAAZ,CAAJ,GAAiCA,CAAjC,CAAkDH,CAAAI,aAAlD,CACA,OAAOC,GAAA,CAAaH,CAAb,CAAqBF,CAAAM,SAAA,CAAiB,CAAjB,CAArB,CAA0CN,CAAAO,UAA1C,CAA6DP,CAAAQ,YAA7D,CAAkF,CAAlF,CAAArpC,QAAA,CACa,SADb,CACwBgpC,CADxB,CAF8B,CAFR,CA6DjCb,QAASA,GAAY,CAACS,CAAD,CAAU,CAC7B,IAAIC,EAAUD,CAAAE,eACd,OAAO,SAAQ,CAACQ,CAAD,CAASC,CAAT,CAAuB,CACpC,MAAOL,GAAA,CAAaI,CAAb,CAAqBT,CAAAM,SAAA,CAAiB,CAAjB,CAArB,CAA0CN,CAAAO,UAA1C,CAA6DP,CAAAQ,YAA7D,CACLE,CADK,CAD6B,CAFT,CAS/BL,QAASA,GAAY,CAACI,CAAD,CAASE,CAAT,CAAkBC,CAAlB,CAA4BC,CAA5B,CAAwCH,CAAxC,CAAsD,CACzE,GAAc,IAAd,EAAID,CAAJ,EAAsB,CAACK,QAAA,CAASL,CAAT,CAAvB,EAA2CjuC,CAAA,CAASiuC,CAAT,CAA3C,CAA6D,MAAO,EAEpE,KAAIM,EAAsB,CAAtBA,CAAaN,CACjBA,EAAA,CAAS5iB,IAAAmjB,IAAA,CAASP,CAAT,CAJgE,KAKrEQ,EAASR,CAATQ,CAAkB,EALmD,CAMrEC,EAAe,EANsD,CAOrEvpC,EAAQ,EAP6D,CASrEwpC,EAAc,CAAA,CAClB,IAA6B,EAA7B,GAAIF,CAAAxtC,QAAA,CAAe,GAAf,CAAJ,CAAgC,CAC9B,IAAIgB,EAAQwsC,CAAAxsC,MAAA,CAAa,qBAAb,CACRA,EAAJ,EAAyB,GAAzB,EAAaA,CAAA,CAAM,CAAN,CAAb;AAAgCA,CAAA,CAAM,CAAN,CAAhC,CAA2CisC,CAA3C,CAA0D,CAA1D,EACEO,CACA,CADS,GACT,CAAAR,CAAA,CAAS,CAFX,GAIES,CACA,CADeD,CACf,CAAAE,CAAA,CAAc,CAAA,CALhB,CAF8B,CAWhC,GAAKA,CAAL,CA8CqB,CAAnB,CAAIT,CAAJ,GAAkC,EAAlC,CAAwBD,CAAxB,EAAgD,CAAhD,CAAuCA,CAAvC,IACES,CADF,CACiBT,CAAAW,QAAA,CAAeV,CAAf,CADjB,CA9CF,KAAkB,CACZW,CAAAA,CAAe5xC,CAAAwxC,CAAAxpC,MAAA,CAAa+oC,EAAb,CAAA,CAA0B,CAA1B,CAAA/wC,EAAgC,EAAhCA,QAGf6C,EAAA,CAAYouC,CAAZ,CAAJ,GACEA,CADF,CACiB7iB,IAAAyjB,IAAA,CAASzjB,IAAAC,IAAA,CAAS6iB,CAAAY,QAAT,CAA0BF,CAA1B,CAAT,CAAiDV,CAAAa,QAAjD,CADjB,CAOAf,EAAA,CAAS,EAAE5iB,IAAA4jB,MAAA,CAAW,EAAEhB,CAAA9tC,SAAA,EAAF,CAAsB,GAAtB,CAA4B+tC,CAA5B,CAAX,CAAA/tC,SAAA,EAAF,CAAqE,GAArE,CAA2E,CAAC+tC,CAA5E,CAELgB,EAAAA,CAAYjqC,CAAA,EAAAA,CAAKgpC,CAALhpC,OAAA,CAAmB+oC,EAAnB,CACZlT,EAAAA,CAAQoU,CAAA,CAAS,CAAT,CACZA,EAAA,CAAWA,CAAA,CAAS,CAAT,CAAX,EAA0B,EAEnBtnC,KAAAA,EAAM,CAANA,CACHunC,EAAShB,CAAAiB,OADNxnC,CAEHynC,EAAQlB,CAAAmB,MAEZ,IAAIxU,CAAA79B,OAAJ,EAAqBkyC,CAArB,CAA8BE,CAA9B,CAEE,IADAznC,CACK,CADCkzB,CAAA79B,OACD,CADgBkyC,CAChB,CAAAlxC,CAAA,CAAI,CAAT,CAAYA,CAAZ,CAAgB2J,CAAhB,CAAqB3J,CAAA,EAArB,CAC0B,CAGxB,IAHK2J,CAGL,CAHW3J,CAGX,EAHcoxC,CAGd,EAHmC,CAGnC,GAH6BpxC,CAG7B,GAFEywC,CAEF,EAFkBN,CAElB,EAAAM,CAAA,EAAgB5T,CAAAz4B,OAAA,CAAapE,CAAb,CAIpB,KAAKA,CAAL,CAAS2J,CAAT,CAAc3J,CAAd,CAAkB68B,CAAA79B,OAAlB,CAAgCgB,CAAA,EAAhC,CACoC,CAGlC,IAHK68B,CAAA79B,OAGL,CAHoBgB,CAGpB,EAHuBkxC,CAGvB,EAH6C,CAG7C,GAHuClxC,CAGvC,GAFEywC,CAEF,EAFkBN,CAElB,EAAAM,CAAA,EAAgB5T,CAAAz4B,OAAA,CAAapE,CAAb,CAIlB,KAAA,CAAMixC,CAAAjyC,OAAN,CAAwBixC,CAAxB,CAAA,CACEgB,CAAA,EAAY,GAGVhB,EAAJ,EAAqC,GAArC,GAAoBA,CAApB,GAA0CQ,CAA1C,EAA0DL,CAA1D,CAAuEa,CAAArpB,OAAA,CAAgB,CAAhB,CAAmBqoB,CAAnB,CAAvE,CA3CgB,CAmDlB/oC,CAAArH,KAAA,CAAWywC,CAAA;AAAaJ,CAAAoB,OAAb,CAA8BpB,CAAAqB,OAAzC,CACArqC,EAAArH,KAAA,CAAW4wC,CAAX,CACAvpC,EAAArH,KAAA,CAAWywC,CAAA,CAAaJ,CAAAsB,OAAb,CAA8BtB,CAAAuB,OAAzC,CACA,OAAOvqC,EAAAzG,KAAA,CAAW,EAAX,CA3EkE,CA8E3EixC,QAASA,GAAS,CAACrW,CAAD,CAAMsW,CAAN,CAAc1/B,CAAd,CAAoB,CACpC,IAAI2/B,EAAM,EACA,EAAV,CAAIvW,CAAJ,GACEuW,CACA,CADO,GACP,CAAAvW,CAAA,CAAM,CAACA,CAFT,CAKA,KADAA,CACA,CADM,EACN,CADWA,CACX,CAAMA,CAAAr8B,OAAN,CAAmB2yC,CAAnB,CAAA,CAA2BtW,CAAA,CAAM,GAAN,CAAYA,CACnCppB,EAAJ,GACEopB,CADF,CACQA,CAAAzT,OAAA,CAAWyT,CAAAr8B,OAAX,CAAwB2yC,CAAxB,CADR,CAEA,OAAOC,EAAP,CAAavW,CAVuB,CActCwW,QAASA,EAAU,CAAC9pC,CAAD,CAAOwZ,CAAP,CAAa7Q,CAAb,CAAqBuB,CAArB,CAA2B,CAC5CvB,CAAA,CAASA,CAAT,EAAmB,CACnB,OAAO,SAAQ,CAACohC,CAAD,CAAO,CAChB3xC,CAAAA,CAAQ2xC,CAAA,CAAK,KAAL,CAAa/pC,CAAb,CAAA,EACZ,IAAa,CAAb,CAAI2I,CAAJ,EAAkBvQ,CAAlB,CAA0B,CAACuQ,CAA3B,CACEvQ,CAAA,EAASuQ,CACG,EAAd,GAAIvQ,CAAJ,EAA8B,GAA9B,EAAmBuQ,CAAnB,GAAmCvQ,CAAnC,CAA2C,EAA3C,CACA,OAAOuxC,GAAA,CAAUvxC,CAAV,CAAiBohB,CAAjB,CAAuBtP,CAAvB,CALa,CAFsB,CAW9C8/B,QAASA,GAAa,CAAChqC,CAAD,CAAOiqC,CAAP,CAAkB,CACtC,MAAO,SAAQ,CAACF,CAAD,CAAOvC,CAAP,CAAgB,CAC7B,IAAIpvC,EAAQ2xC,CAAA,CAAK,KAAL,CAAa/pC,CAAb,CAAA,EAAZ,CACIsR,EAAMrN,EAAA,CAAUgmC,CAAA,CAAa,OAAb,CAAuBjqC,CAAvB,CAA+BA,CAAzC,CAEV,OAAOwnC,EAAA,CAAQl2B,CAAR,CAAA,CAAalZ,CAAb,CAJsB,CADO,CAuIxCquC,QAASA,GAAU,CAACc,CAAD,CAAU,CAK3B2C,QAASA,EAAgB,CAACC,CAAD,CAAS,CAChC,IAAIluC,CACJ,IAAIA,CAAJ,CAAYkuC,CAAAluC,MAAA,CAAamuC,CAAb,CAAZ,CAAyC,CACnCL,CAAAA,CAAO,IAAIjuC,IAAJ,CAAS,CAAT,CAD4B,KAEnCuuC,EAAS,CAF0B,CAGnCC,EAAS,CAH0B,CAInCC,EAAatuC,CAAA,CAAM,CAAN,CAAA,CAAW8tC,CAAAS,eAAX;AAAiCT,CAAAU,YAJX,CAKnCC,EAAazuC,CAAA,CAAM,CAAN,CAAA,CAAW8tC,CAAAY,YAAX,CAA8BZ,CAAAa,SAE3C3uC,EAAA,CAAM,CAAN,CAAJ,GACEouC,CACA,CADSjxC,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,CAAeA,CAAA,CAAM,EAAN,CAAf,CACT,CAAAquC,CAAA,CAAQlxC,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,CAAeA,CAAA,CAAM,EAAN,CAAf,CAFV,CAIAsuC,EAAA5yC,KAAA,CAAgBoyC,CAAhB,CAAsB3wC,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,CAAtB,CAAqC7C,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,CAArC,CAAqD,CAArD,CAAwD7C,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,CAAxD,CACIlD,EAAAA,CAAIK,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,EAAc,CAAd,CAAJlD,CAAuBsxC,CACvBQ,EAAAA,CAAIzxC,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,EAAc,CAAd,CAAJ4uC,CAAuBP,CACvBQ,EAAAA,CAAI1xC,CAAA,CAAI6C,CAAA,CAAM,CAAN,CAAJ,EAAc,CAAd,CACJ8uC,EAAAA,CAAK1lB,IAAA4jB,MAAA,CAA8C,GAA9C,CAAW+B,UAAA,CAAW,IAAX,EAAmB/uC,CAAA,CAAM,CAAN,CAAnB,EAA6B,CAA7B,EAAX,CACTyuC,EAAA/yC,KAAA,CAAgBoyC,CAAhB,CAAsBhxC,CAAtB,CAAyB8xC,CAAzB,CAA4BC,CAA5B,CAA+BC,CAA/B,CAhBuC,CAmBzC,MAAOZ,EArByB,CAFlC,IAAIC,EAAgB,sGA2BpB,OAAO,SAAQ,CAACL,CAAD,CAAOkB,CAAP,CAAe,CAAA,IACxB7jB,EAAO,EADiB,CAExBjoB,EAAQ,EAFgB,CAGxBpC,CAHwB,CAGpBd,CAERgvC,EAAA,CAASA,CAAT,EAAmB,YACnBA,EAAA,CAAS1D,CAAA2D,iBAAA,CAAyBD,CAAzB,CAAT,EAA6CA,CACzC9zC,EAAA,CAAS4yC,CAAT,CAAJ,GACEA,CADF,CACSoB,EAAAhqC,KAAA,CAAmB4oC,CAAnB,CAAA,CAA2B3wC,CAAA,CAAI2wC,CAAJ,CAA3B,CAAuCG,CAAA,CAAiBH,CAAjB,CADhD,CAII9vC,GAAA,CAAS8vC,CAAT,CAAJ,GACEA,CADF,CACS,IAAIjuC,IAAJ,CAASiuC,CAAT,CADT,CAIA,IAAI,CAAC7vC,EAAA,CAAO6vC,CAAP,CAAL,CACE,MAAOA,EAGT;IAAA,CAAMkB,CAAN,CAAA,CAEE,CADAhvC,CACA,CADQmvC,EAAAjrC,KAAA,CAAwB8qC,CAAxB,CACR,GACE9rC,CACA,CADeA,CAt4bd/B,OAAA,CAAcH,EAAAtF,KAAA,CAs4bOsE,CAt4bP,CAs4bc3D,CAt4bd,CAAd,CAu4bD,CAAA2yC,CAAA,CAAS9rC,CAAA2V,IAAA,EAFX,GAIE3V,CAAArH,KAAA,CAAWmzC,CAAX,CACA,CAAAA,CAAA,CAAS,IALX,CASF5zC,EAAA,CAAQ8H,CAAR,CAAe,QAAQ,CAAC/G,CAAD,CAAO,CAC5B2E,CAAA,CAAKsuC,EAAA,CAAajzC,CAAb,CACLgvB,EAAA,EAAQrqB,CAAA,CAAKA,CAAA,CAAGgtC,CAAH,CAASxC,CAAA2D,iBAAT,CAAL,CACK9yC,CAAAuG,QAAA,CAAc,UAAd,CAA0B,EAA1B,CAAAA,QAAA,CAAsC,KAAtC,CAA6C,GAA7C,CAHe,CAA9B,CAMA,OAAOyoB,EApCqB,CA9BH,CAmG7Buf,QAASA,GAAU,EAAG,CACpB,MAAO,SAAQ,CAAC2E,CAAD,CAAS,CACtB,MAAO/tC,GAAA,CAAO+tC,CAAP,CAAe,CAAA,CAAf,CADe,CADJ,CAkGtB1E,QAASA,GAAa,EAAE,CACtB,MAAO,SAAQ,CAAC2E,CAAD,CAAQC,CAAR,CAAe,CAC5B,GAAI,CAACp0C,CAAA,CAAQm0C,CAAR,CAAL,EAAuB,CAACp0C,CAAA,CAASo0C,CAAT,CAAxB,CAAyC,MAAOA,EAG9CC,EAAA,CAD8BC,QAAhC,GAAIpmB,IAAAmjB,IAAA,CAAS7uB,MAAA,CAAO6xB,CAAP,CAAT,CAAJ,CACU7xB,MAAA,CAAO6xB,CAAP,CADV,CAGUpyC,CAAA,CAAIoyC,CAAJ,CAGV,IAAIr0C,CAAA,CAASo0C,CAAT,CAAJ,CAEE,MAAIC,EAAJ,CACkB,CAAT,EAAAA,CAAA,CAAaD,CAAAtuC,MAAA,CAAY,CAAZ,CAAeuuC,CAAf,CAAb,CAAqCD,CAAAtuC,MAAA,CAAYuuC,CAAZ,CAAmBD,CAAAt0C,OAAnB,CAD9C,CAGS,EAdiB,KAkBxBy0C,EAAM,EAlBkB,CAmB1BzzC,CAnB0B,CAmBvBihB,CAGDsyB,EAAJ,CAAYD,CAAAt0C,OAAZ,CACEu0C,CADF,CACUD,CAAAt0C,OADV,CAESu0C,CAFT,CAEiB,CAACD,CAAAt0C,OAFlB,GAGEu0C,CAHF,CAGU,CAACD,CAAAt0C,OAHX,CAKY,EAAZ,CAAIu0C,CAAJ,EACEvzC,CACA,CADI,CACJ,CAAAihB,CAAA,CAAIsyB,CAFN,GAIEvzC,CACA,CADIszC,CAAAt0C,OACJ,CADmBu0C,CACnB,CAAAtyB,CAAA,CAAIqyB,CAAAt0C,OALN,CAQA;IAAA,CAAOgB,CAAP,CAASihB,CAAT,CAAYjhB,CAAA,EAAZ,CACEyzC,CAAA5zC,KAAA,CAASyzC,CAAA,CAAMtzC,CAAN,CAAT,CAGF,OAAOyzC,EAvCqB,CADR,CA6JxB3E,QAASA,GAAa,CAAClrB,CAAD,CAAQ,CAC5B,MAAO,SAAQ,CAAC3gB,CAAD,CAAQywC,CAAR,CAAuBC,CAAvB,CAAqC,CAkClDC,QAASA,EAAiB,CAACC,CAAD,CAAOC,CAAP,CAAmB,CAC3C,MAAOjuC,GAAA,CAAUiuC,CAAV,CACA,CAAD,QAAQ,CAAC9oB,CAAD,CAAGC,CAAH,CAAK,CAAC,MAAO4oB,EAAA,CAAK5oB,CAAL,CAAOD,CAAP,CAAR,CAAZ,CACD6oB,CAHqC,CAK7CnpB,QAASA,EAAO,CAACqpB,CAAD,CAAKC,CAAL,CAAQ,CACtB,IAAIxvC,EAAK,MAAOuvC,EAAhB,CACItvC,EAAK,MAAOuvC,EAChB,OAAIxvC,EAAJ,EAAUC,CAAV,EACMxC,EAAA,CAAO8xC,CAAP,CAQJ,EARkB9xC,EAAA,CAAO+xC,CAAP,CAQlB,GAPED,CACA,CADKA,CAAA1Z,QAAA,EACL,CAAA2Z,CAAA,CAAKA,CAAA3Z,QAAA,EAMP,EAJU,QAIV,EAJI71B,CAIJ,GAHGuvC,CACA,CADKA,CAAAnqC,YAAA,EACL,CAAAoqC,CAAA,CAAKA,CAAApqC,YAAA,EAER,EAAImqC,CAAJ,GAAWC,CAAX,CAAsB,CAAtB,CACOD,CAAA,CAAKC,CAAL,CAAW,EAAX,CAAe,CAVxB,EAYSxvC,CAAA,CAAKC,CAAL,CAAW,EAAX,CAAe,CAfF,CArCxB,GADI,CAACtF,CAAA,CAAQ8D,CAAR,CACL,EAAI,CAACywC,CAAL,CAAoB,MAAOzwC,EAC3BywC,EAAA,CAAgBv0C,CAAA,CAAQu0C,CAAR,CAAA,CAAyBA,CAAzB,CAAwC,CAACA,CAAD,CACxDA,EAAA,CAAgB7wC,EAAA,CAAI6wC,CAAJ,CAAmB,QAAQ,CAACO,CAAD,CAAW,CAAA,IAChDH,EAAa,CAAA,CADmC,CAC5Bz6B,EAAM46B,CAAN56B,EAAmB3X,EAC3C,IAAIxC,CAAA,CAAS+0C,CAAT,CAAJ,CAAyB,CACvB,GAA4B,GAA5B,EAAKA,CAAA7vC,OAAA,CAAiB,CAAjB,CAAL,EAA0D,GAA1D,EAAmC6vC,CAAA7vC,OAAA,CAAiB,CAAjB,CAAnC,CACE0vC,CACA,CADoC,GACpC,EADaG,CAAA7vC,OAAA,CAAiB,CAAjB,CACb,CAAA6vC,CAAA,CAAYA,CAAAj0B,UAAA,CAAoB,CAApB,CAEd3G,EAAA,CAAMuK,CAAA,CAAOqwB,CAAP,CACN,IAAI56B,CAAAsB,SAAJ,CAAkB,CAChB,IAAIpb,EAAM8Z,CAAA,EACV,OAAOu6B,EAAA,CAAkB,QAAQ,CAAC5oB,CAAD;AAAGC,CAAH,CAAM,CACrC,MAAOP,EAAA,CAAQM,CAAA,CAAEzrB,CAAF,CAAR,CAAgB0rB,CAAA,CAAE1rB,CAAF,CAAhB,CAD8B,CAAhC,CAEJu0C,CAFI,CAFS,CANK,CAazB,MAAOF,EAAA,CAAkB,QAAQ,CAAC5oB,CAAD,CAAGC,CAAH,CAAK,CACpC,MAAOP,EAAA,CAAQrR,CAAA,CAAI2R,CAAJ,CAAR,CAAe3R,CAAA,CAAI4R,CAAJ,CAAf,CAD6B,CAA/B,CAEJ6oB,CAFI,CAf6C,CAAtC,CAoBhB,KADA,IAAII,EAAY,EAAhB,CACUl0C,EAAI,CAAd,CAAiBA,CAAjB,CAAqBiD,CAAAjE,OAArB,CAAmCgB,CAAA,EAAnC,CAA0Ck0C,CAAAr0C,KAAA,CAAeoD,CAAA,CAAMjD,CAAN,CAAf,CAC1C,OAAOk0C,EAAAp0C,KAAA,CAAe8zC,CAAA,CAEtB5E,QAAmB,CAAC1qC,CAAD,CAAKC,CAAL,CAAQ,CACzB,IAAM,IAAIvE,EAAI,CAAd,CAAiBA,CAAjB,CAAqB0zC,CAAA10C,OAArB,CAA2CgB,CAAA,EAA3C,CAAgD,CAC9C,IAAI6zC,EAAOH,CAAA,CAAc1zC,CAAd,CAAA,CAAiBsE,CAAjB,CAAqBC,CAArB,CACX,IAAa,CAAb,GAAIsvC,CAAJ,CAAgB,MAAOA,EAFuB,CAIhD,MAAO,EALkB,CAFL,CAA8BF,CAA9B,CAAf,CAzB2C,CADxB,CA6D9BQ,QAASA,GAAW,CAAC5nC,CAAD,CAAY,CAC1B/M,CAAA,CAAW+M,CAAX,CAAJ,GACEA,CADF,CACc,MACJA,CADI,CADd,CAKAA,EAAA4W,SAAA,CAAqB5W,CAAA4W,SAArB,EAA2C,IAC3C,OAAOvhB,EAAA,CAAQ2K,CAAR,CAPuB,CAyfhC6nC,QAASA,GAAc,CAACnuC,CAAD,CAAUigB,CAAV,CAAiBsF,CAAjB,CAAyBzH,CAAzB,CAAmC,CAqBxDswB,QAASA,EAAc,CAACC,CAAD,CAAUC,CAAV,CAA8B,CACnDA,CAAA,CAAqBA,CAAA,CAAqB,GAArB,CAA2BhrC,EAAA,CAAWgrC,CAAX,CAA+B,GAA/B,CAA3B,CAAiE,EACtFxwB,EAAAkN,YAAA,CAAqBhrB,CAArB,EAA+BquC,CAAA,CAAUE,EAAV,CAA0BC,EAAzD,EAAwEF,CAAxE,CACAxwB,EAAAmB,SAAA,CAAkBjf,CAAlB,EAA4BquC,CAAA,CAAUG,EAAV,CAAwBD,EAApD,EAAqED,CAArE,CAHmD,CArBG,IACpDG,EAAO,IAD6C,CAEpDC,EAAa1uC,CAAA1E,OAAA,EAAA2hB,WAAA,CAA4B,MAA5B,CAAbyxB,EAAoDC,EAFA,CAGpDC,EAAe,CAHqC,CAIpDC,EAASJ,CAAAK,OAATD,CAAuB,EAJ6B,CAKpDE,EAAW,EAGfN,EAAAO,MAAA,CAAa/uB,CAAAne,KAAb;AAA2Bme,CAAAgvB,OAC3BR,EAAAS,OAAA,CAAc,CAAA,CACdT,EAAAU,UAAA,CAAiB,CAAA,CACjBV,EAAAW,OAAA,CAAc,CAAA,CACdX,EAAAY,SAAA,CAAgB,CAAA,CAEhBX,EAAAY,YAAA,CAAuBb,CAAvB,CAGAzuC,EAAAif,SAAA,CAAiBswB,EAAjB,CACAnB,EAAA,CAAe,CAAA,CAAf,CAkBAK,EAAAa,YAAA,CAAmBE,QAAQ,CAACC,CAAD,CAAU,CAGnCvrC,EAAA,CAAwBurC,CAAAT,MAAxB,CAAuC,OAAvC,CACAD,EAAAn1C,KAAA,CAAc61C,CAAd,CAEIA,EAAAT,MAAJ,GACEP,CAAA,CAAKgB,CAAAT,MAAL,CADF,CACwBS,CADxB,CANmC,CAoBrChB,EAAAiB,eAAA,CAAsBC,QAAQ,CAACF,CAAD,CAAU,CAClCA,CAAAT,MAAJ,EAAqBP,CAAA,CAAKgB,CAAAT,MAAL,CAArB,GAA6CS,CAA7C,EACE,OAAOhB,CAAA,CAAKgB,CAAAT,MAAL,CAET71C,EAAA,CAAQ01C,CAAR,CAAgB,QAAQ,CAACe,CAAD,CAAQC,CAAR,CAAyB,CAC/CpB,CAAAqB,aAAA,CAAkBD,CAAlB,CAAmC,CAAA,CAAnC,CAAyCJ,CAAzC,CAD+C,CAAjD,CAIAxyC,GAAA,CAAY8xC,CAAZ,CAAsBU,CAAtB,CARsC,CAoBxChB,EAAAqB,aAAA,CAAoBC,QAAQ,CAACF,CAAD,CAAkBxB,CAAlB,CAA2BoB,CAA3B,CAAoC,CAC9D,IAAIG,EAAQf,CAAA,CAAOgB,CAAP,CAEZ,IAAIxB,CAAJ,CACMuB,CAAJ,GACE3yC,EAAA,CAAY2yC,CAAZ,CAAmBH,CAAnB,CACA,CAAKG,CAAA72C,OAAL,GACE61C,CAAA,EAQA,CAPKA,CAOL,GANER,CAAA,CAAeC,CAAf,CAEA,CADAI,CAAAW,OACA,CADc,CAAA,CACd,CAAAX,CAAAY,SAAA,CAAgB,CAAA,CAIlB,EAFAR,CAAA,CAAOgB,CAAP,CAEA,CAF0B,CAAA,CAE1B,CADAzB,CAAA,CAAe,CAAA,CAAf,CAAqByB,CAArB,CACA,CAAAnB,CAAAoB,aAAA,CAAwBD,CAAxB,CAAyC,CAAA,CAAzC,CAA+CpB,CAA/C,CATF,CAFF,CADF,KAgBO,CACAG,CAAL,EACER,CAAA,CAAeC,CAAf,CAEF,IAAIuB,CAAJ,CACE,IAjmeyB,EAimezB,EAjmeC7yC,EAAA,CAimeY6yC,CAjmeZ,CAimemBH,CAjmenB,CAimeD,CAA8B,MAA9B,CADF,IAGEZ,EAAA,CAAOgB,CAAP,CAGA;AAH0BD,CAG1B,CAHkC,EAGlC,CAFAhB,CAAA,EAEA,CADAR,CAAA,CAAe,CAAA,CAAf,CAAsByB,CAAtB,CACA,CAAAnB,CAAAoB,aAAA,CAAwBD,CAAxB,CAAyC,CAAA,CAAzC,CAAgDpB,CAAhD,CAEFmB,EAAAh2C,KAAA,CAAW61C,CAAX,CAEAhB,EAAAW,OAAA,CAAc,CAAA,CACdX,EAAAY,SAAA,CAAgB,CAAA,CAfX,CAnBuD,CAgDhEZ,EAAAuB,UAAA,CAAiBC,QAAQ,EAAG,CAC1BnyB,CAAAkN,YAAA,CAAqBhrB,CAArB,CAA8BuvC,EAA9B,CACAzxB,EAAAmB,SAAA,CAAkBjf,CAAlB,CAA2BkwC,EAA3B,CACAzB,EAAAS,OAAA,CAAc,CAAA,CACdT,EAAAU,UAAA,CAAiB,CAAA,CACjBT,EAAAsB,UAAA,EAL0B,CAsB5BvB,EAAA0B,aAAA,CAAoBC,QAAS,EAAG,CAC9BtyB,CAAAkN,YAAA,CAAqBhrB,CAArB,CAA8BkwC,EAA9B,CACApyB,EAAAmB,SAAA,CAAkBjf,CAAlB,CAA2BuvC,EAA3B,CACAd,EAAAS,OAAA,CAAc,CAAA,CACdT,EAAAU,UAAA,CAAiB,CAAA,CACjBh2C,EAAA,CAAQ41C,CAAR,CAAkB,QAAQ,CAACU,CAAD,CAAU,CAClCA,CAAAU,aAAA,EADkC,CAApC,CAL8B,CAlJwB,CAizB1DE,QAASA,GAAQ,CAACC,CAAD,CAAOC,CAAP,CAAsBC,CAAtB,CAAgCt2C,CAAhC,CAAsC,CACrDo2C,CAAAR,aAAA,CAAkBS,CAAlB,CAAiCC,CAAjC,CACA,OAAOA,EAAA,CAAWt2C,CAAX,CAAmBxB,CAF2B,CAKvD+3C,QAASA,GAAS,CAACD,CAAD,CAAWE,CAAX,CAAkB,CAAA,IAC9B32C,CAD8B,CAC3BsgC,CACP,IAAIqW,CAAJ,CACE,IAAK32C,CAAL,CAAO,CAAP,CAAUA,CAAV,CAAY22C,CAAA33C,OAAZ,CAA0B,EAAEgB,CAA5B,CAEE,GADAsgC,CACI,CADGqW,CAAA,CAAM32C,CAAN,CACH,CAAAy2C,CAAA,CAASnW,CAAT,CAAJ,CACE,MAAO,CAAA,CAIb,OAAO,CAAA,CAV2B,CAcpCsW,QAASA,GAAwB,CAACL,CAAD,CAAOC,CAAP,CAAsBK,CAAtB,CAAgCC,CAAhC,CAA6CL,CAA7C,CAAuD,CAClF10C,CAAA,CAAS00C,CAAT,CAAJ,GACEF,CAAAQ,sBAYA,CAZ6B,CAAA,CAY7B;AAAAR,CAAAS,SAAAn3C,KAAA,CAXgBo3C,QAAQ,CAAC92C,CAAD,CAAQ,CAG9B,GAAKo2C,CAAAxB,OAAA,CAAYyB,CAAZ,CAAL,EACKE,EAAA,CAAUD,CAAV,CAAoBK,CAApB,CADL,EAEI,CAAAJ,EAAA,CAAUD,CAAV,CAAoBI,CAApB,CAFJ,CAMA,MAAO12C,EAHLo2C,EAAAR,aAAA,CAAkBS,CAAlB,CAAiC,CAAA,CAAjC,CAN4B,CAWhC,CAbF,CADsF,CAkBxFU,QAASA,GAAa,CAACtuC,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B95B,CAA7B,CAAuCmX,CAAvC,CAAiD,CACrE,IAAI6iB,EAAWxwC,CAAAvD,KAAA,CAAay0C,EAAb,CAAf,CACIC,EAAcnxC,CAAA,CAAQ,CAAR,CAAAmxC,YADlB,CAC0CC,EAAU,EADpD,CAEItjC,EAAOhO,CAAA,CAAUE,CAAA,CAAQ,CAAR,CAAA8N,KAAV,CACXwiC,EAAAe,gBAAA,CAAuBb,CAKvB,IAAI,CAACh6B,CAAAswB,QAAL,CAAuB,CACrB,IAAIwK,EAAY,CAAA,CAEhBtxC,EAAAgZ,GAAA,CAAW,kBAAX,CAA+B,QAAQ,CAACjW,CAAD,CAAO,CAC5CuuC,CAAA,CAAY,CAAA,CADgC,CAA9C,CAIAtxC,EAAAgZ,GAAA,CAAW,gBAAX,CAA6B,QAAQ,EAAG,CACtCs4B,CAAA,CAAY,CAAA,CACZ55B,EAAA,EAFsC,CAAxC,CAPqB,CAavB,IAAIA,EAAWA,QAAQ,CAAC65B,CAAD,CAAK,CAC1B,GAAID,CAAAA,CAAJ,CAAA,CACA,IAAIp3C,EAAQ8F,CAAAZ,IAAA,EAMZ,IAAI+R,CAAJ,EAAqC,OAArC,GAAarD,CAAAyjC,CAAAzjC,EAAMsjC,CAANtjC,MAAb,EAAgD9N,CAAA,CAAQ,CAAR,CAAAmxC,YAAhD,GAA2EA,CAA3E,CACEA,CAAA,CAAcnxC,CAAA,CAAQ,CAAR,CAAAmxC,YADhB,KAgBA,IARa,UAQT,GARArjC,CAQA,EARwBlO,EAAA,CAAUlD,CAAA80C,OAAV,EAAyB,GAAzB,CAQxB,GAPFt3C,CAOE,CAPM8R,EAAA,CAAK9R,CAAL,CAON,EADAu3C,CACA,CADajB,CACb,EADyBF,CAAAQ,sBACzB,CAAAR,CAAAoB,WAAA;AAAoBx3C,CAApB,EAAwC,EAAxC,GAA8BA,CAA9B,EAA8Cu3C,CAAlD,CACM9uC,CAAAwtB,QAAJ,CACEmgB,CAAAqB,cAAA,CAAmBz3C,CAAnB,CADF,CAGEyI,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtBwtC,CAAAqB,cAAA,CAAmBz3C,CAAnB,CADsB,CAAxB,CA3BJ,CAD0B,CAqC5B,IAAIsc,CAAAmxB,SAAA,CAAkB,OAAlB,CAAJ,CACE3nC,CAAAgZ,GAAA,CAAW,OAAX,CAAoBtB,CAApB,CADF,KAEO,CACL,IAAIuZ,CAAJ,CAEI2gB,EAAgBA,QAAQ,EAAG,CACxB3gB,CAAL,GACEA,CADF,CACYtD,CAAA3T,MAAA,CAAe,QAAQ,EAAG,CAClCtC,CAAA,EACAuZ,EAAA,CAAU,IAFwB,CAA1B,CADZ,CAD6B,CAS/BjxB,EAAAgZ,GAAA,CAAW,SAAX,CAAsB,QAAQ,CAAC3I,CAAD,CAAQ,CAChC/W,CAAAA,CAAM+W,CAAAwhC,QAIE,GAAZ,GAAIv4C,CAAJ,GAAmB,EAAnB,CAAwBA,CAAxB,EAAqC,EAArC,CAA+BA,CAA/B,EAA6C,EAA7C,EAAmDA,CAAnD,EAAiE,EAAjE,EAA0DA,CAA1D,GAEAs4C,CAAA,EAPoC,CAAtC,CAWA,IAAIp7B,CAAAmxB,SAAA,CAAkB,OAAlB,CAAJ,CACE3nC,CAAAgZ,GAAA,CAAW,WAAX,CAAwB44B,CAAxB,CAxBG,CA8BP5xC,CAAAgZ,GAAA,CAAW,QAAX,CAAqBtB,CAArB,CAEA44B,EAAAwB,QAAA,CAAeC,QAAQ,EAAG,CACxB/xC,CAAAZ,IAAA,CAAYkxC,CAAA0B,SAAA,CAAc1B,CAAAoB,WAAd,CAAA,CAAiC,EAAjC,CAAsCpB,CAAAoB,WAAlD,CADwB,CA7F2C,KAkGjEzH,EAAUvtC,CAAAu1C,UAIVhI,EAAJ,GAKE,CADAlsC,CACA,CADQksC,CAAAlsC,MAAA,CAAc,oBAAd,CACR,GACEksC,CACA,CADcnsC,MAAJ,CAAWC,CAAA,CAAM,CAAN,CAAX,CAAqBA,CAAA,CAAM,CAAN,CAArB,CACV,CAAAm0C,CAAA,CAAmBA,QAAQ,CAACh4C,CAAD,CAAQ,CACjC,MANKm2C,GAAA,CAASC,CAAT;AAAe,SAAf,CAA0BA,CAAA0B,SAAA,CAMD93C,CANC,CAA1B,EAMgB+vC,CANkChnC,KAAA,CAMzB/I,CANyB,CAAlD,CAMyBA,CANzB,CAK4B,CAFrC,EAMEg4C,CANF,CAMqBA,QAAQ,CAACh4C,CAAD,CAAQ,CACjC,IAAIi4C,EAAaxvC,CAAAygC,MAAA,CAAY6G,CAAZ,CAEjB,IAAI,CAACkI,CAAL,EAAmB,CAACA,CAAAlvC,KAApB,CACE,KAAMtK,EAAA,CAAO,WAAP,CAAA,CAAoB,UAApB,CACqDsxC,CADrD,CAEJkI,CAFI,CAEQpyC,EAAA,CAAYC,CAAZ,CAFR,CAAN,CAIF,MAjBKqwC,GAAA,CAASC,CAAT,CAAe,SAAf,CAA0BA,CAAA0B,SAAA,CAiBE93C,CAjBF,CAA1B,EAiBgBi4C,CAjBkClvC,KAAA,CAiBtB/I,CAjBsB,CAAlD,CAiB4BA,CAjB5B,CAS4B,CAarC,CADAo2C,CAAA8B,YAAAx4C,KAAA,CAAsBs4C,CAAtB,CACA,CAAA5B,CAAAS,SAAAn3C,KAAA,CAAmBs4C,CAAnB,CAxBF,CA4BA,IAAIx1C,CAAA21C,YAAJ,CAAsB,CACpB,IAAIC,EAAYp3C,CAAA,CAAIwB,CAAA21C,YAAJ,CACZE,EAAAA,CAAqBA,QAAQ,CAACr4C,CAAD,CAAQ,CACvC,MAAOm2C,GAAA,CAASC,CAAT,CAAe,WAAf,CAA4BA,CAAA0B,SAAA,CAAc93C,CAAd,CAA5B,EAAoDA,CAAAnB,OAApD,EAAoEu5C,CAApE,CAA+Ep4C,CAA/E,CADgC,CAIzCo2C,EAAAS,SAAAn3C,KAAA,CAAmB24C,CAAnB,CACAjC,EAAA8B,YAAAx4C,KAAA,CAAsB24C,CAAtB,CAPoB,CAWtB,GAAI71C,CAAA81C,YAAJ,CAAsB,CACpB,IAAIC,EAAYv3C,CAAA,CAAIwB,CAAA81C,YAAJ,CACZE,EAAAA,CAAqBA,QAAQ,CAACx4C,CAAD,CAAQ,CACvC,MAAOm2C,GAAA,CAASC,CAAT,CAAe,WAAf,CAA4BA,CAAA0B,SAAA,CAAc93C,CAAd,CAA5B,EAAoDA,CAAAnB,OAApD,EAAoE05C,CAApE,CAA+Ev4C,CAA/E,CADgC,CAIzCo2C,EAAAS,SAAAn3C,KAAA,CAAmB84C,CAAnB,CACApC;CAAA8B,YAAAx4C,KAAA,CAAsB84C,CAAtB,CAPoB,CA7I+C,CAu1CvEC,QAASA,GAAc,CAAC7wC,CAAD,CAAOkN,CAAP,CAAiB,CACtClN,CAAA,CAAO,SAAP,CAAmBA,CACnB,OAAO,CAAC,UAAD,CAAa,QAAQ,CAACgc,CAAD,CAAW,CAiFrC80B,QAASA,EAAe,CAAC5mB,CAAD,CAAUC,CAAV,CAAmB,CACzC,IAAIF,EAAS,EAAb,CAGQhyB,EAAI,CADZ,EAAA,CACA,IAAA,CAAeA,CAAf,CAAmBiyB,CAAAjzB,OAAnB,CAAmCgB,CAAA,EAAnC,CAAwC,CAEtC,IADA,IAAImyB,EAAQF,CAAA,CAAQjyB,CAAR,CAAZ,CACQoT,EAAI,CAAZ,CAAeA,CAAf,CAAmB8e,CAAAlzB,OAAnB,CAAmCoU,CAAA,EAAnC,CACE,GAAG+e,CAAH,EAAYD,CAAA,CAAQ9e,CAAR,CAAZ,CAAwB,SAAS,CAEnC4e,EAAAnyB,KAAA,CAAYsyB,CAAZ,CALsC,CAOxC,MAAOH,EAXkC,CAc3C8mB,QAASA,EAAa,CAAC/nB,CAAD,CAAW,CAC/B,GAAI,CAAA5xB,CAAA,CAAQ4xB,CAAR,CAAJ,CAEO,CAAA,GAAI7xB,CAAA,CAAS6xB,CAAT,CAAJ,CACL,MAAOA,EAAA/pB,MAAA,CAAe,GAAf,CACF,IAAIjF,CAAA,CAASgvB,CAAT,CAAJ,CAAwB,CAAA,IACzBgoB,EAAU,EACd35C,EAAA,CAAQ2xB,CAAR,CAAkB,QAAQ,CAACjrB,CAAD,CAAI6qB,CAAJ,CAAO,CAC3B7qB,CAAJ,GACEizC,CADF,CACYA,CAAA5zC,OAAA,CAAewrB,CAAA3pB,MAAA,CAAQ,GAAR,CAAf,CADZ,CAD+B,CAAjC,CAKA,OAAO+xC,EAPsB,CAFxB,CAWP,MAAOhoB,EAdwB,CA9FjC,MAAO,UACK,IADL,MAEC7P,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAiCnCq2C,QAASA,EAAkB,CAACD,CAAD,CAAUte,CAAV,CAAiB,CAC1C,IAAIwe,EAAchzC,CAAA+C,KAAA,CAAa,cAAb,CAAdiwC,EAA8C,EAAlD,CACIC,EAAkB,EACtB95C,EAAA,CAAQ25C,CAAR,CAAiB,QAAS,CAAC5wC,CAAD,CAAY,CACpC,GAAY,CAAZ,CAAIsyB,CAAJ,EAAiBwe,CAAA,CAAY9wC,CAAZ,CAAjB,CACE8wC,CAAA,CAAY9wC,CAAZ,CACA,EAD0B8wC,CAAA,CAAY9wC,CAAZ,CAC1B,EADoD,CACpD,EADyDsyB,CACzD,CAAIwe,CAAA,CAAY9wC,CAAZ,CAAJ,GAA+B,EAAU,CAAV;AAAEsyB,CAAF,CAA/B,EACEye,CAAAr5C,KAAA,CAAqBsI,CAArB,CAJgC,CAAtC,CAQAlC,EAAA+C,KAAA,CAAa,cAAb,CAA6BiwC,CAA7B,CACA,OAAOC,EAAAz4C,KAAA,CAAqB,GAArB,CAZmC,CA8B5C04C,QAASA,EAAkB,CAACzR,CAAD,CAAS,CAClC,GAAiB,CAAA,CAAjB,GAAIzyB,CAAJ,EAAyBrM,CAAAwwC,OAAzB,CAAwC,CAAxC,GAA8CnkC,CAA9C,CAAwD,CACtD,IAAIic,EAAa4nB,CAAA,CAAapR,CAAb,EAAuB,EAAvB,CACjB,IAAI,CAACC,CAAL,CAAa,CA1Cf,IAAIzW,EAAa8nB,CAAA,CA2CF9nB,CA3CE,CAA2B,CAA3B,CACjBvuB,EAAAmuB,UAAA,CAAeI,CAAf,CAyCe,CAAb,IAEO,IAAI,CAAC7sB,EAAA,CAAOqjC,CAAP,CAAcC,CAAd,CAAL,CAA4B,CAEnBlZ,IAAAA,EADGqqB,CAAArqB,CAAakZ,CAAblZ,CACHA,CArBd0C,EAAQ0nB,CAAA,CAqBkB3nB,CArBlB,CAA4BzC,CAA5B,CAqBMA,CApBd4C,EAAWwnB,CAAA,CAAgBpqB,CAAhB,CAoBeyC,CApBf,CAoBGzC,CAnBlB4C,EAAW2nB,CAAA,CAAkB3nB,CAAlB,CAA6B,EAA7B,CAmBO5C,CAlBlB0C,EAAQ6nB,CAAA,CAAkB7nB,CAAlB,CAAyB,CAAzB,CAEa,EAArB,GAAIA,CAAAnyB,OAAJ,CACE+kB,CAAAkN,YAAA,CAAqBhrB,CAArB,CAA8BorB,CAA9B,CADF,CAE+B,CAAxB,GAAIA,CAAAryB,OAAJ,CACL+kB,CAAAmB,SAAA,CAAkBjf,CAAlB,CAA2BkrB,CAA3B,CADK,CAGLpN,CAAAuN,SAAA,CAAkBrrB,CAAlB,CAA2BkrB,CAA3B,CAAkCE,CAAlC,CASmC,CAJmB,CASxDsW,CAAA,CAASzjC,EAAA,CAAYwjC,CAAZ,CAVyB,CA9DpC,IAAIC,CAEJ/+B,EAAAlF,OAAA,CAAaf,CAAA,CAAKoF,CAAL,CAAb,CAAyBoxC,CAAzB,CAA6C,CAAA,CAA7C,CAEAx2C,EAAAioB,SAAA,CAAc,OAAd,CAAuB,QAAQ,CAACzqB,CAAD,CAAQ,CACrCg5C,CAAA,CAAmBvwC,CAAAygC,MAAA,CAAY1mC,CAAA,CAAKoF,CAAL,CAAZ,CAAnB,CADqC,CAAvC,CAKa,UAAb,GAAIA,CAAJ,EACEa,CAAAlF,OAAA,CAAa,QAAb,CAAuB,QAAQ,CAAC01C,CAAD,CAASC,CAAT,CAAoB,CAEjD,IAAIC,EAAMF,CAANE,CAAe,CACnB,IAAIA,CAAJ,IAAaD,CAAb,CAAyB,CAAzB,EAA6B,CAC3B,IAAIN,EAAUD,CAAA,CAAalwC,CAAAygC,MAAA,CAAY1mC,CAAA,CAAKoF,CAAL,CAAZ,CAAb,CACduxC,EAAA,GAAQrkC,CAAR,EAQAic,CACJ,CADiB8nB,CAAA,CAPAD,CAOA,CAA2B,CAA3B,CACjB,CAAAp2C,CAAAmuB,UAAA,CAAeI,CAAf,CATI;CAaAA,CACJ,CADiB8nB,CAAA,CAXGD,CAWH,CAA4B,EAA5B,CACjB,CAAAp2C,CAAAquB,aAAA,CAAkBE,CAAlB,CAdI,CAF2B,CAHoB,CAAnD,CAXiC,CAFhC,CAD8B,CAAhC,CAF+B,CA7ujBxC,IAAIimB,GAA0B,UAA9B,CAYIpxC,EAAYA,QAAQ,CAACmsC,CAAD,CAAQ,CAAC,MAAOhzC,EAAA,CAASgzC,CAAT,CAAA,CAAmBA,CAAAtoC,YAAA,EAAnB,CAA0CsoC,CAAlD,CAZhC,CAaIzyC,GAAiB4hC,MAAA/mB,UAAA7a,eAbrB,CAyBIuM,GAAYA,QAAQ,CAACkmC,CAAD,CAAQ,CAAC,MAAOhzC,EAAA,CAASgzC,CAAT,CAAA,CAAmBA,CAAAvhC,YAAA,EAAnB,CAA0CuhC,CAAlD,CAzBhC,CAoDI96B,CApDJ,CAqDIlR,CArDJ,CAsDI2L,EAtDJ,CAuDI7M,GAAoB,EAAAA,MAvDxB,CAwDInF,GAAoB,EAAAA,KAxDxB,CAyDIqC,GAAoBm/B,MAAA/mB,UAAApY,SAzDxB,CA0DIyB,GAAoB/E,CAAA,CAAO,IAAP,CA1DxB,CA6DIuK,GAAoB1K,CAAA0K,QAApBA,GAAuC1K,CAAA0K,QAAvCA,CAAwD,EAAxDA,CA7DJ,CA8DI+C,EA9DJ,CA+DIkb,EA/DJ,CAgEI9mB,GAAoB,CAAC,GAAD,CAAM,GAAN,CAAW,GAAX,CAMxB8W,EAAA,CAAOjW,CAAA,CAAI,CAAC,YAAA+G,KAAA,CAAkBnC,CAAA,CAAUknC,SAAAD,UAAV,CAAlB,CAAD,EAAsD,EAAtD,EAA0D,CAA1D,CAAJ,CACHtoC,MAAA,CAAM0S,CAAN,CAAJ,GACEA,CADF,CACSjW,CAAA,CAAI,CAAC,uBAAA+G,KAAA,CAA6BnC,CAAA,CAAUknC,SAAAD,UAAV,CAA7B,CAAD,EAAiE,EAAjE,EAAqE,CAArE,CAAJ,CADT,CAkNAvrC,EAAAqW,QAAA,CAAe,EAoBfpW,GAAAoW,QAAA,CAAmB,EA8GnB,KAAI3Y,EAAW,QAAQ,EAAG,CACxB,MAAKK,EAAA,CAAWkmB,KAAAvmB,QAAX,CAAL;AAKOumB,KAAAvmB,QALP,CACS,QAAQ,CAACgB,CAAD,CAAQ,CACrB,MAAgC,gBAAhC,GAAO+B,EAAAxC,KAAA,CAAcS,CAAd,CADc,CAFD,CAAX,EAAf,CAyEI8R,GAAQ,QAAQ,EAAG,CAIrB,MAAKvR,OAAA4Z,UAAArI,KAAL,CAKO,QAAQ,CAAC9R,CAAD,CAAQ,CACrB,MAAOjB,EAAA,CAASiB,CAAT,CAAA,CAAkBA,CAAA8R,KAAA,EAAlB,CAAiC9R,CADnB,CALvB,CACS,QAAQ,CAACA,CAAD,CAAQ,CACrB,MAAOjB,EAAA,CAASiB,CAAT,CAAA,CAAkBA,CAAAuG,QAAA,CAAc,QAAd,CAAwB,EAAxB,CAAAA,QAAA,CAAoC,QAApC,CAA8C,EAA9C,CAAlB,CAAsEvG,CADxD,CALJ,CAAX,EA8CVinB,GAAA,CADS,CAAX,CAAIhQ,CAAJ,CACcgQ,QAAQ,CAACnhB,CAAD,CAAU,CAC5BA,CAAA,CAAUA,CAAAxD,SAAA,CAAmBwD,CAAnB,CAA6BA,CAAA,CAAQ,CAAR,CACvC,OAAQA,EAAAmkB,UACD,EAD2C,MAC3C,EADsBnkB,CAAAmkB,UACtB,CAAHpe,EAAA,CAAU/F,CAAAmkB,UAAV,CAA8B,GAA9B,CAAoCnkB,CAAAxD,SAApC,CAAG,CAAqDwD,CAAAxD,SAHhC,CADhC,CAOc2kB,QAAQ,CAACnhB,CAAD,CAAU,CAC5B,MAAOA,EAAAxD,SAAA,CAAmBwD,CAAAxD,SAAnB,CAAsCwD,CAAA,CAAQ,CAAR,CAAAxD,SADjB,CAwShC,KAAIwJ,GAAMA,QAAQ,EAAG,CACnB,GAAInK,CAAA,CAAUmK,EAAAstC,UAAV,CAAJ,CAA8B,MAAOttC,GAAAstC,UAErC,KAAIC,EAAS,EAAG,CAAA96C,CAAA+6C,cAAA,CAAuB,UAAvB,CAAH,EACG,CAAA/6C,CAAA+6C,cAAA,CAAuB,eAAvB,CADH,CAGb;GAAI,CAACD,CAAL,CACE,GAAI,CAEF,IAAI7W,QAAJ,CAAa,EAAb,CAFE,CAIF,MAAOt8B,CAAP,CAAU,CACVmzC,CAAA,CAAS,CAAA,CADC,CAKd,MAAQvtC,GAAAstC,UAAR,CAAwBC,CAhBL,CAArB,CAqcI/vC,GAAoB,QArcxB,CA28BIsC,GAAU,MACN,QADM,OAEL,CAFK,OAGL,CAHK,KAIP,EAJO,UAKF,oBALE,CAiOdiG,EAAA0e,QAAA,CAAiB,OAhqEsB,KAkqEnCjc,GAAUzC,CAAA4H,MAAVnF,CAAyB,EAlqEU,CAmqEnCE,GAAO,CAnqE4B,CAoqEnC0jB,GAAsB55B,CAAAC,SAAAg7C,iBACA,CAAlB,QAAQ,CAACzzC,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoB,CAACmB,CAAAyzC,iBAAA,CAAyB3lC,CAAzB,CAA+BjP,CAA/B,CAAmC,CAAA,CAAnC,CAAD,CAAV,CAClB,QAAQ,CAACmB,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoB,CAACmB,CAAA0zC,YAAA,CAAoB,IAApB,CAA2B5lC,CAA3B,CAAiCjP,CAAjC,CAAD,CAtqEG,CAuqEnCuP,GAAyB5V,CAAAC,SAAAk7C,oBACA,CAArB,QAAQ,CAAC3zC,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoB,CAACmB,CAAA2zC,oBAAA,CAA4B7lC,CAA5B,CAAkCjP,CAAlC,CAAsC,CAAA,CAAtC,CAAD,CAAP,CACrB,QAAQ,CAACmB,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoB,CAACmB,CAAA4zC,YAAA,CAAoB,IAApB,CAA2B9lC,CAA3B,CAAiCjP,CAAjC,CAAD,CAKvBkN,EAAA8nC,MAAb,CAA4BC,QAAQ,CAACv3C,CAAD,CAAO,CAEzC,MAAO,KAAAoX,MAAA,CAAWpX,CAAA,CAAK,IAAAkuB,QAAL,CAAX,CAAP,EAAyC,EAFA,CAQ3C,KAAIlgB,GAAuB,iBAA3B;AACII,GAAkB,aADtB,CAEIsB,GAAetT,CAAA,CAAO,QAAP,CAFnB,CA4DIwT,GAAoB,4BA5DxB,CA6DIG,GAAc,WA7DlB,CA8DII,GAAkB,WA9DtB,CA+DIK,GAAmB,yEA/DvB,CAiEIH,GAAU,QACF,CAAC,CAAD,CAAI,8BAAJ,CAAoC,WAApC,CADE,OAGH,CAAC,CAAD,CAAI,SAAJ,CAAe,UAAf,CAHG,KAIL,CAAC,CAAD,CAAI,mBAAJ,CAAyB,qBAAzB,CAJK,IAKN,CAAC,CAAD,CAAI,gBAAJ,CAAsB,kBAAtB,CALM,IAMN,CAAC,CAAD,CAAI,oBAAJ,CAA0B,uBAA1B,CANM,UAOA,CAAC,CAAD,CAAI,EAAJ,CAAQ,EAAR,CAPA,CAUdA,GAAAmnC,SAAA,CAAmBnnC,EAAAonC,OACnBpnC,GAAAqnC,MAAA,CAAgBrnC,EAAAsnC,MAAhB,CAAgCtnC,EAAAunC,SAAhC,CAAmDvnC,EAAAwnC,QAAnD,CAAqExnC,EAAAynC,MACrEznC,GAAA0nC,GAAA;AAAa1nC,EAAA2nC,GA6Pb,KAAIz1B,GAAkB/S,CAAAsI,UAAlByK,CAAqC,OAChC01B,QAAQ,CAAC31C,CAAD,CAAK,CAGlB41C,QAASA,EAAO,EAAG,CACbC,CAAJ,GACAA,CACA,CADQ,CAAA,CACR,CAAA71C,CAAA,EAFA,CADiB,CAFnB,IAAI61C,EAAQ,CAAA,CASgB,WAA5B,GAAIj8C,CAAA85B,WAAJ,CACEvb,UAAA,CAAWy9B,CAAX,CADF,EAGE,IAAAz7B,GAAA,CAAQ,kBAAR,CAA4By7B,CAA5B,CAGA,CAAA1oC,CAAA,CAAOvT,CAAP,CAAAwgB,GAAA,CAAkB,MAAlB,CAA0By7B,CAA1B,CANF,CAVkB,CADmB,UAqB7Bx4C,QAAQ,EAAG,CACnB,IAAI/B,EAAQ,EACZf,EAAA,CAAQ,IAAR,CAAc,QAAQ,CAACiH,CAAD,CAAG,CAAElG,CAAAN,KAAA,CAAW,EAAX,CAAgBwG,CAAhB,CAAF,CAAzB,CACA,OAAO,GAAP,CAAalG,CAAAM,KAAA,CAAW,IAAX,CAAb,CAAgC,GAHb,CArBkB,IA2BnCukB,QAAQ,CAAC3kB,CAAD,CAAQ,CAChB,MAAiB,EAAV,EAACA,CAAD,CAAe6F,CAAA,CAAO,IAAA,CAAK7F,CAAL,CAAP,CAAf,CAAqC6F,CAAA,CAAO,IAAA,CAAK,IAAAlH,OAAL,CAAmBqB,CAAnB,CAAP,CAD5B,CA3BmB,QA+B/B,CA/B+B,MAgCjCR,EAhCiC,MAiCjC,EAAAC,KAjCiC,QAkC/B,EAAAqD,OAlC+B,CAAzC,CA0CIgT,GAAe,EACnB/W,EAAA,CAAQ,2DAAA,MAAA,CAAA,GAAA,CAAR,CAAgF,QAAQ,CAACe,CAAD,CAAQ,CAC9FgW,EAAA,CAAapQ,CAAA,CAAU5F,CAAV,CAAb,CAAA,CAAiCA,CAD6D,CAAhG,CAGA,KAAIiW,GAAmB,EACvBhX,EAAA,CAAQ,kDAAA,MAAA,CAAA,GAAA,CAAR;AAAuE,QAAQ,CAACe,CAAD,CAAQ,CACrFiW,EAAA,CAAiBpK,EAAA,CAAU7L,CAAV,CAAjB,CAAA,CAAqC,CAAA,CADgD,CAAvF,CAYAf,EAAA,CAAQ,MACAwV,EADA,YAEMf,EAFN,CAAR,CAGG,QAAQ,CAAC/O,CAAD,CAAKiD,CAAL,CAAW,CACpBiK,CAAA,CAAOjK,CAAP,CAAA,CAAejD,CADK,CAHtB,CAOA1F,EAAA,CAAQ,MACAwV,EADA,eAESe,EAFT,OAIC/M,QAAQ,CAAC3C,CAAD,CAAU,CAEvB,MAAOC,EAAA8C,KAAA,CAAY/C,CAAZ,CAAqB,QAArB,CAAP,EAAyC0P,EAAA,CAAoB1P,CAAA6P,WAApB,EAA0C7P,CAA1C,CAAmD,CAAC,eAAD,CAAkB,QAAlB,CAAnD,CAFlB,CAJnB,cASQ8jB,QAAQ,CAAC9jB,CAAD,CAAU,CAE9B,MAAOC,EAAA8C,KAAA,CAAY/C,CAAZ,CAAqB,eAArB,CAAP,EAAgDC,CAAA8C,KAAA,CAAY/C,CAAZ,CAAqB,yBAArB,CAFlB,CAT1B,YAcMyP,EAdN,UAgBInN,QAAQ,CAACtC,CAAD,CAAU,CAC1B,MAAO0P,GAAA,CAAoB1P,CAApB,CAA6B,WAA7B,CADmB,CAhBtB,YAoBMwrB,QAAQ,CAACxrB,CAAD,CAAS8B,CAAT,CAAe,CACjC9B,CAAA20C,gBAAA,CAAwB7yC,CAAxB,CADiC,CApB7B,UAwBIiN,EAxBJ,KA0BD6lC,QAAQ,CAAC50C,CAAD,CAAU8B,CAAV,CAAgB5H,CAAhB,CAAuB,CAClC4H,CAAA,CAAOwI,EAAA,CAAUxI,CAAV,CAEP,IAAIjG,CAAA,CAAU3B,CAAV,CAAJ,CACE8F,CAAAsnC,MAAA,CAAcxlC,CAAd,CAAA,CAAsB5H,CADxB,KAEO,CACL,IAAIkF,CAEQ,EAAZ,EAAI+R,CAAJ,GAEE/R,CACA,CADMY,CAAA60C,aACN,EAD8B70C,CAAA60C,aAAA,CAAqB/yC,CAArB,CAC9B;AAAY,EAAZ,GAAI1C,CAAJ,GAAgBA,CAAhB,CAAsB,MAAtB,CAHF,CAMAA,EAAA,CAAMA,CAAN,EAAaY,CAAAsnC,MAAA,CAAcxlC,CAAd,CAED,EAAZ,EAAIqP,CAAJ,GAEE/R,CAFF,CAEiB,EAAT,GAACA,CAAD,CAAe1G,CAAf,CAA2B0G,CAFnC,CAKA,OAAQA,EAhBH,CAL2B,CA1B9B,MAmDA1C,QAAQ,CAACsD,CAAD,CAAU8B,CAAV,CAAgB5H,CAAhB,CAAsB,CAClC,IAAI46C,EAAiBh1C,CAAA,CAAUgC,CAAV,CACrB,IAAIoO,EAAA,CAAa4kC,CAAb,CAAJ,CACE,GAAIj5C,CAAA,CAAU3B,CAAV,CAAJ,CACQA,CAAN,EACE8F,CAAA,CAAQ8B,CAAR,CACA,CADgB,CAAA,CAChB,CAAA9B,CAAAoP,aAAA,CAAqBtN,CAArB,CAA2BgzC,CAA3B,CAFF,GAIE90C,CAAA,CAAQ8B,CAAR,CACA,CADgB,CAAA,CAChB,CAAA9B,CAAA20C,gBAAA,CAAwBG,CAAxB,CALF,CADF,KASE,OAAQ90C,EAAA,CAAQ8B,CAAR,CAED,EADG0f,CAAAxhB,CAAAmC,WAAA4yC,aAAA,CAAgCjzC,CAAhC,CAAA0f,EAAwChmB,CAAxCgmB,WACH,CAAEszB,CAAF,CACEp8C,CAbb,KAeO,IAAImD,CAAA,CAAU3B,CAAV,CAAJ,CACL8F,CAAAoP,aAAA,CAAqBtN,CAArB,CAA2B5H,CAA3B,CADK,KAEA,IAAI8F,CAAAiP,aAAJ,CAKL,MAFI+lC,EAEG,CAFGh1C,CAAAiP,aAAA,CAAqBnN,CAArB,CAA2B,CAA3B,CAEH,CAAQ,IAAR,GAAAkzC,CAAA,CAAet8C,CAAf,CAA2Bs8C,CAxBF,CAnD9B,MA+EAv4C,QAAQ,CAACuD,CAAD,CAAU8B,CAAV,CAAgB5H,CAAhB,CAAuB,CACnC,GAAI2B,CAAA,CAAU3B,CAAV,CAAJ,CACE8F,CAAA,CAAQ8B,CAAR,CAAA,CAAgB5H,CADlB,KAGE,OAAO8F,EAAA,CAAQ8B,CAAR,CAJ0B,CA/E/B,MAuFC,QAAQ,EAAG,CAYhBmzC,QAASA,EAAO,CAACj1C,CAAD,CAAU9F,CAAV,CAAiB,CAC/B,IAAIg7C,EAAWC,CAAA,CAAwBn1C,CAAAhH,SAAxB,CACf,IAAI4C,CAAA,CAAY1B,CAAZ,CAAJ,CACE,MAAOg7C,EAAA,CAAWl1C,CAAA,CAAQk1C,CAAR,CAAX,CAA+B,EAExCl1C,EAAA,CAAQk1C,CAAR,CAAA,CAAoBh7C,CALW,CAXjC,IAAIi7C,EAA0B,EACnB,EAAX,CAAIhkC,CAAJ,EACEgkC,CAAA,CAAwB,CAAxB,CACA;AAD6B,WAC7B,CAAAA,CAAA,CAAwB,CAAxB,CAAA,CAA6B,WAF/B,EAIEA,CAAA,CAAwB,CAAxB,CAJF,CAKEA,CAAA,CAAwB,CAAxB,CALF,CAK+B,aAE/BF,EAAAG,IAAA,CAAc,EACd,OAAOH,EAVS,CAAX,EAvFD,KA4GD71C,QAAQ,CAACY,CAAD,CAAU9F,CAAV,CAAiB,CAC5B,GAAI0B,CAAA,CAAY1B,CAAZ,CAAJ,CAAwB,CACtB,GAA2B,QAA3B,GAAIinB,EAAA,CAAUnhB,CAAV,CAAJ,EAAuCA,CAAAq1C,SAAvC,CAAyD,CACvD,IAAI13C,EAAS,EACbxE,EAAA,CAAQ6G,CAAA2a,QAAR,CAAyB,QAAS,CAACq5B,CAAD,CAAS,CACrCA,CAAAsB,SAAJ,EACE33C,CAAA/D,KAAA,CAAYo6C,CAAA95C,MAAZ,EAA4B85C,CAAA9qB,KAA5B,CAFuC,CAA3C,CAKA,OAAyB,EAAlB,GAAAvrB,CAAA5E,OAAA,CAAsB,IAAtB,CAA6B4E,CAPmB,CASzD,MAAOqC,EAAA9F,MAVe,CAYxB8F,CAAA9F,MAAA,CAAgBA,CAbY,CA5GxB,MA4HAqG,QAAQ,CAACP,CAAD,CAAU9F,CAAV,CAAiB,CAC7B,GAAI0B,CAAA,CAAY1B,CAAZ,CAAJ,CACE,MAAO8F,EAAA8M,UAET,KAJ6B,IAIpB/S,EAAI,CAJgB,CAIbsT,EAAarN,CAAAqN,WAA7B,CAAiDtT,CAAjD,CAAqDsT,CAAAtU,OAArD,CAAwEgB,CAAA,EAAxE,CACE4T,EAAA,CAAaN,CAAA,CAAWtT,CAAX,CAAb,CAEFiG,EAAA8M,UAAA,CAAoB5S,CAPS,CA5HzB,OAsIC6V,EAtID,CAAR,CAuIG,QAAQ,CAAClR,CAAD,CAAKiD,CAAL,CAAU,CAInBiK,CAAAsI,UAAA,CAAiBvS,CAAjB,CAAA,CAAyB,QAAQ,CAACg5B,CAAD,CAAOC,CAAP,CAAa,CAAA,IACxChhC,CADwC,CACrCT,CADqC,CAExCi8C,EAAY,IAAAx8C,OAKhB,IAAI8F,CAAJ,GAAWkR,EAAX,GACoB,CAAd,EAAClR,CAAA9F,OAAD,EAAoB8F,CAApB,GAA2BkQ,EAA3B,EAA6ClQ,CAA7C,GAAoD4Q,EAApD,CAAyEqrB,CAAzE,CAAgFC,CADtF,IACgGriC,CADhG,CAC4G,CAC1G,GAAIoD,CAAA,CAASg/B,CAAT,CAAJ,CAAoB,CAGlB,IAAK/gC,CAAL;AAAS,CAAT,CAAYA,CAAZ,CAAgBw7C,CAAhB,CAA2Bx7C,CAAA,EAA3B,CACE,GAAI8E,CAAJ,GAAW8P,EAAX,CAEE9P,CAAA,CAAG,IAAA,CAAK9E,CAAL,CAAH,CAAY+gC,CAAZ,CAFF,KAIE,KAAKxhC,CAAL,GAAYwhC,EAAZ,CACEj8B,CAAA,CAAG,IAAA,CAAK9E,CAAL,CAAH,CAAYT,CAAZ,CAAiBwhC,CAAA,CAAKxhC,CAAL,CAAjB,CAKN,OAAO,KAdW,CAkBdY,CAAAA,CAAQ2E,CAAAu2C,IAERhoC,EAAAA,CAAMlT,CAAD,GAAWxB,CAAX,CAAwByuB,IAAAyjB,IAAA,CAAS2K,CAAT,CAAoB,CAApB,CAAxB,CAAiDA,CAC1D,KAASpoC,CAAT,CAAa,CAAb,CAAgBA,CAAhB,CAAoBC,CAApB,CAAwBD,CAAA,EAAxB,CAA6B,CAC3B,IAAIkR,EAAYxf,CAAA,CAAG,IAAA,CAAKsO,CAAL,CAAH,CAAY2tB,CAAZ,CAAkBC,CAAlB,CAChB7gC,EAAA,CAAQA,CAAA,CAAQA,CAAR,CAAgBmkB,CAAhB,CAA4BA,CAFT,CAI7B,MAAOnkB,EA1BiG,CA8B1G,IAAKH,CAAL,CAAS,CAAT,CAAYA,CAAZ,CAAgBw7C,CAAhB,CAA2Bx7C,CAAA,EAA3B,CACE8E,CAAA,CAAG,IAAA,CAAK9E,CAAL,CAAH,CAAY+gC,CAAZ,CAAkBC,CAAlB,CAGF,OAAO,KA1CmC,CAJ3B,CAvIrB,CAuPA5hC,EAAA,CAAQ,YACMyU,EADN,QAGED,EAHF,IAKF6nC,QAASA,EAAI,CAACx1C,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoBkP,CAApB,CAAgC,CAC/C,GAAIlS,CAAA,CAAUkS,CAAV,CAAJ,CAA4B,KAAM9B,GAAA,CAAa,QAAb,CAAN,CADmB,IAG3C+B,EAASC,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CAHkC,CAI3CkO,EAASD,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CAERgO,EAAL,EAAaC,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CAAsCgO,CAAtC,CAA+C,EAA/C,CACRE,EAAL,EAAaD,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CAAsCkO,CAAtC,CAA+CkC,EAAA,CAAmBpQ,CAAnB,CAA4BgO,CAA5B,CAA/C,CAEb7U,EAAA,CAAQ2U,CAAA/M,MAAA,CAAW,GAAX,CAAR,CAAyB,QAAQ,CAAC+M,CAAD,CAAM,CACrC,IAAI2nC,EAAWznC,CAAA,CAAOF,CAAP,CAEf,IAAI,CAAC2nC,CAAL,CAAe,CACb,GAAY,YAAZ,EAAI3nC,CAAJ,EAAoC,YAApC,EAA4BA,CAA5B,CAAkD,CAChD,IAAI4nC,EAAWj9C,CAAAy5B,KAAAwjB,SAAA,EAA0Bj9C,CAAAy5B,KAAAyjB,wBAA1B;AACf,QAAQ,CAAE5wB,CAAF,CAAKC,CAAL,CAAS,CAAA,IAEX4wB,EAAuB,CAAf,GAAA7wB,CAAA/rB,SAAA,CAAmB+rB,CAAApV,gBAAnB,CAAuCoV,CAFpC,CAGf8wB,EAAM7wB,CAAN6wB,EAAW7wB,CAAAnV,WACX,OAAOkV,EAAP,GAAa8wB,CAAb,EAAoB,CAAC,EAAGA,CAAH,EAA2B,CAA3B,GAAUA,CAAA78C,SAAV,GACnB48C,CAAAF,SAAA,CACAE,CAAAF,SAAA,CAAgBG,CAAhB,CADA,CAEA9wB,CAAA4wB,wBAFA,EAE6B5wB,CAAA4wB,wBAAA,CAA2BE,CAA3B,CAF7B,CAEgE,EAH7C,EAJN,CADF,CAWb,QAAQ,CAAE9wB,CAAF,CAAKC,CAAL,CAAS,CACf,GAAKA,CAAL,CACE,IAAA,CAASA,CAAT,CAAaA,CAAAnV,WAAb,CAAA,CACE,GAAKmV,CAAL,GAAWD,CAAX,CACE,MAAO,CAAA,CAIb,OAAO,CAAA,CARQ,CAWnB/W,EAAA,CAAOF,CAAP,CAAA,CAAe,EAOf0nC,EAAA,CAAKx1C,CAAL,CAFe81C,YAAe,UAAfA,YAAwC,WAAxCA,CAED,CAAShoC,CAAT,CAAd,CAA8B,QAAQ,CAACuC,CAAD,CAAQ,CAC5C,IAAmB0lC,EAAU1lC,CAAA2lC,cAGvBD,EAAN,GAAkBA,CAAlB,GAHanlC,IAGb,EAAyC8kC,CAAA,CAH5B9kC,IAG4B,CAAiBmlC,CAAjB,CAAzC,GACE7nC,CAAA,CAAOmC,CAAP,CAAcvC,CAAd,CAL0C,CAA9C,CA9BgD,CAAlD,IAwCEskB,GAAA,CAAmBpyB,CAAnB,CAA4B8N,CAA5B,CAAkCI,CAAlC,CACA,CAAAF,CAAA,CAAOF,CAAP,CAAA,CAAe,EAEjB2nC,EAAA,CAAWznC,CAAA,CAAOF,CAAP,CA5CE,CA8Cf2nC,CAAA77C,KAAA,CAAciF,CAAd,CAjDqC,CAAvC,CAT+C,CAL3C,KAmEDgP,EAnEC,KAqEDooC,QAAQ,CAACj2C,CAAD,CAAU8N,CAAV,CAAgBjP,CAAhB,CAAoB,CAC/BmB,CAAA,CAAUC,CAAA,CAAOD,CAAP,CAKVA,EAAAgZ,GAAA,CAAWlL,CAAX,CAAiB0nC,QAASA,EAAI,EAAG,CAC/Bx1C,CAAAk2C,IAAA,CAAYpoC,CAAZ;AAAkBjP,CAAlB,CACAmB,EAAAk2C,IAAA,CAAYpoC,CAAZ,CAAkB0nC,CAAlB,CAF+B,CAAjC,CAIAx1C,EAAAgZ,GAAA,CAAWlL,CAAX,CAAiBjP,CAAjB,CAV+B,CArE3B,aAkFO4nB,QAAQ,CAACzmB,CAAD,CAAUm2C,CAAV,CAAuB,CAAA,IACtC/7C,CADsC,CAC/BkB,EAAS0E,CAAA6P,WACpBlC,GAAA,CAAa3N,CAAb,CACA7G,EAAA,CAAQ,IAAI4S,CAAJ,CAAWoqC,CAAX,CAAR,CAAiC,QAAQ,CAAC55C,CAAD,CAAM,CACzCnC,CAAJ,CACEkB,CAAA86C,aAAA,CAAoB75C,CAApB,CAA0BnC,CAAAwK,YAA1B,CADF,CAGEtJ,CAAAkvB,aAAA,CAAoBjuB,CAApB,CAA0ByD,CAA1B,CAEF5F,EAAA,CAAQmC,CANqC,CAA/C,CAH0C,CAlFtC,UA+FIiP,QAAQ,CAACxL,CAAD,CAAU,CAC1B,IAAIwL,EAAW,EACfrS,EAAA,CAAQ6G,CAAAqN,WAAR,CAA4B,QAAQ,CAACrN,CAAD,CAAS,CAClB,CAAzB,GAAIA,CAAAhH,SAAJ,EACEwS,CAAA5R,KAAA,CAAcoG,CAAd,CAFyC,CAA7C,CAIA,OAAOwL,EANmB,CA/FtB,UAwGImb,QAAQ,CAAC3mB,CAAD,CAAU,CAC1B,MAAOA,EAAAq2C,gBAAP,EAAkCr2C,CAAAqN,WAAlC,EAAwD,EAD9B,CAxGtB,QA4GE/M,QAAQ,CAACN,CAAD,CAAUzD,CAAV,CAAgB,CAC9BpD,CAAA,CAAQ,IAAI4S,CAAJ,CAAWxP,CAAX,CAAR,CAA0B,QAAQ,CAAC0kC,CAAD,CAAO,CACd,CAAzB,GAAIjhC,CAAAhH,SAAJ,EAAmD,EAAnD,GAA8BgH,CAAAhH,SAA9B,EACEgH,CAAAwM,YAAA,CAAoBy0B,CAApB,CAFqC,CAAzC,CAD8B,CA5G1B,SAoHGqV,QAAQ,CAACt2C,CAAD,CAAUzD,CAAV,CAAgB,CAC/B,GAAyB,CAAzB,GAAIyD,CAAAhH,SAAJ,CAA4B,CAC1B,IAAIoB,EAAQ4F,CAAAiN,WACZ9T,EAAA,CAAQ,IAAI4S,CAAJ,CAAWxP,CAAX,CAAR,CAA0B,QAAQ,CAAC0kC,CAAD,CAAO,CACvCjhC,CAAAo2C,aAAA,CAAqBnV,CAArB;AAA4B7mC,CAA5B,CADuC,CAAzC,CAF0B,CADG,CApH3B,MA6HAuS,QAAQ,CAAC3M,CAAD,CAAUu2C,CAAV,CAAoB,CAChCA,CAAA,CAAWt2C,CAAA,CAAOs2C,CAAP,CAAA,CAAiB,CAAjB,CACX,KAAIj7C,EAAS0E,CAAA6P,WACTvU,EAAJ,EACEA,CAAAkvB,aAAA,CAAoB+rB,CAApB,CAA8Bv2C,CAA9B,CAEFu2C,EAAA/pC,YAAA,CAAqBxM,CAArB,CANgC,CA7H5B,QAsIE6b,QAAQ,CAAC7b,CAAD,CAAU,CACxB2N,EAAA,CAAa3N,CAAb,CACA,KAAI1E,EAAS0E,CAAA6P,WACTvU,EAAJ,EAAYA,CAAA0R,YAAA,CAAmBhN,CAAnB,CAHY,CAtIpB,OA4ICw2C,QAAQ,CAACx2C,CAAD,CAAUy2C,CAAV,CAAsB,CAAA,IAC/Br8C,EAAQ4F,CADuB,CACd1E,EAAS0E,CAAA6P,WAC9B1W,EAAA,CAAQ,IAAI4S,CAAJ,CAAW0qC,CAAX,CAAR,CAAgC,QAAQ,CAACl6C,CAAD,CAAM,CAC5CjB,CAAA86C,aAAA,CAAoB75C,CAApB,CAA0BnC,CAAAwK,YAA1B,CACAxK,EAAA,CAAQmC,CAFoC,CAA9C,CAFmC,CA5I/B,UAoJI+S,EApJJ,aAqJOJ,EArJP,aAuJOwnC,QAAQ,CAAC12C,CAAD,CAAUgP,CAAV,CAAoB2nC,CAApB,CAA+B,CAC9C3nC,CAAJ,EACE7V,CAAA,CAAQ6V,CAAAjO,MAAA,CAAe,GAAf,CAAR,CAA6B,QAAQ,CAACmB,CAAD,CAAW,CAC9C,IAAI00C,EAAiBD,CACjB/6C,EAAA,CAAYg7C,CAAZ,CAAJ,GACEA,CADF,CACmB,CAAC7nC,EAAA,CAAe/O,CAAf,CAAwBkC,CAAxB,CADpB,CAGC,EAAA00C,CAAA,CAAiBtnC,EAAjB,CAAkCJ,EAAlC,EAAqDlP,CAArD,CAA8DkC,CAA9D,CAL6C,CAAhD,CAFgD,CAvJ9C,QAmKE5G,QAAQ,CAAC0E,CAAD,CAAU,CAExB,MAAO,CADH1E,CACG,CADM0E,CAAA6P,WACN,GAA8B,EAA9B,GAAUvU,CAAAtC,SAAV,CAAmCsC,CAAnC,CAA4C,IAF3B,CAnKpB,MAwKA+nC,QAAQ,CAACrjC,CAAD,CAAU,CACtB,GAAIA,CAAA62C,mBAAJ,CACE,MAAO72C,EAAA62C,mBAKT;IADIhhC,CACJ,CADU7V,CAAA4E,YACV,CAAc,IAAd,EAAOiR,CAAP,EAAuC,CAAvC,GAAsBA,CAAA7c,SAAtB,CAAA,CACE6c,CAAA,CAAMA,CAAAjR,YAER,OAAOiR,EAVe,CAxKlB,MAqLAlZ,QAAQ,CAACqD,CAAD,CAAUgP,CAAV,CAAoB,CAChC,MAAIhP,EAAA82C,qBAAJ,CACS92C,CAAA82C,qBAAA,CAA6B9nC,CAA7B,CADT,CAGS,EAJuB,CArL5B,OA6LCvB,EA7LD,gBA+LU/B,QAAQ,CAAC1L,CAAD,CAAUqQ,CAAV,CAAiB0mC,CAAjB,CAAkC,CAAA,IAEpDC,CAFoD,CAE1BC,CAC1BC,EAAAA,CAAY7mC,CAAAvC,KAAZopC,EAA0B7mC,CAC9B,KAAIolC,EAAW,CAACxnC,EAAA,CAAmBjO,CAAnB,CAA4B,QAA5B,CAAD,EAA0C,EAA1C,EAA8Ck3C,CAA9C,CAEXzB,EAAJ,GAGEuB,CAiBA,CAjBa,gBACK1mC,QAAQ,EAAG,CAAE,IAAAQ,iBAAA,CAAwB,CAAA,CAA1B,CADhB,oBAESE,QAAQ,EAAG,CAAE,MAAiC,CAAA,CAAjC,GAAO,IAAAF,iBAAT,CAFpB,iBAGMtV,CAHN,MAIL07C,CAJK,QAKHl3C,CALG,CAiBb,CARIqQ,CAAAvC,KAQJ,GAPEkpC,CAOF,CAPej8C,CAAA,CAAOi8C,CAAP,CAAmB3mC,CAAnB,CAOf,EAHA8mC,CAGA,CAHel5C,EAAA,CAAYw3C,CAAZ,CAGf,CAFAwB,CAEA,CAFcF,CAAA,CAAkB,CAACC,CAAD,CAAA93C,OAAA,CAAoB63C,CAApB,CAAlB,CAAyD,CAACC,CAAD,CAEvE,CAAA79C,CAAA,CAAQg+C,CAAR,CAAsB,QAAQ,CAACt4C,CAAD,CAAK,CACjCA,CAAAI,MAAA,CAASe,CAAT,CAAkBi3C,CAAlB,CADiC,CAAnC,CApBF,CANwD,CA/LpD,CAAR,CA+NG,QAAQ,CAACp4C,CAAD,CAAKiD,CAAL,CAAU,CAInBiK,CAAAsI,UAAA,CAAiBvS,CAAjB,CAAA;AAAyB,QAAQ,CAACg5B,CAAD,CAAOC,CAAP,CAAaqc,CAAb,CAAmB,CAElD,IADA,IAAIl9C,CAAJ,CACQH,EAAE,CAAV,CAAaA,CAAb,CAAiB,IAAAhB,OAAjB,CAA8BgB,CAAA,EAA9B,CACM6B,CAAA,CAAY1B,CAAZ,CAAJ,EACEA,CACA,CADQ2E,CAAA,CAAG,IAAA,CAAK9E,CAAL,CAAH,CAAY+gC,CAAZ,CAAkBC,CAAlB,CAAwBqc,CAAxB,CACR,CAAIv7C,CAAA,CAAU3B,CAAV,CAAJ,GAEEA,CAFF,CAEU+F,CAAA,CAAO/F,CAAP,CAFV,CAFF,EAOEsT,EAAA,CAAetT,CAAf,CAAsB2E,CAAA,CAAG,IAAA,CAAK9E,CAAL,CAAH,CAAY+gC,CAAZ,CAAkBC,CAAlB,CAAwBqc,CAAxB,CAAtB,CAGJ,OAAOv7C,EAAA,CAAU3B,CAAV,CAAA,CAAmBA,CAAnB,CAA2B,IAbgB,CAiBpD6R,EAAAsI,UAAA1V,KAAA,CAAwBoN,CAAAsI,UAAA2E,GACxBjN,EAAAsI,UAAAgjC,OAAA,CAA0BtrC,CAAAsI,UAAA6hC,IAtBP,CA/NrB,CAkSA1kC,GAAA6C,UAAA,CAAoB,KAMb1C,QAAQ,CAACrY,CAAD,CAAMY,CAAN,CAAa,CACxB,IAAA,CAAKmX,EAAA,CAAQ/X,CAAR,CAAa,IAAAa,QAAb,CAAL,CAAA,CAAmCD,CADX,CANR,KAcbkZ,QAAQ,CAAC9Z,CAAD,CAAM,CACjB,MAAO,KAAA,CAAK+X,EAAA,CAAQ/X,CAAR,CAAa,IAAAa,QAAb,CAAL,CADU,CAdD,QAsBV0hB,QAAQ,CAACviB,CAAD,CAAM,CACpB,IAAIY,EAAQ,IAAA,CAAKZ,CAAL,CAAW+X,EAAA,CAAQ/X,CAAR,CAAa,IAAAa,QAAb,CAAX,CACZ,QAAO,IAAA,CAAKb,CAAL,CACP,OAAOY,EAHa,CAtBJ,CA0FpB,KAAI+X,GAAU,oCAAd,CACIC,GAAe,GADnB,CAEIC,GAAS,sBAFb,CAGIJ,GAAiB,kCAHrB;AAIIjN,GAAkBnM,CAAA,CAAO,WAAP,CAJtB,CAo0BI2+C,GAAiB3+C,CAAA,CAAO,UAAP,CAp0BrB,CAm1BImQ,GAAmB,CAAC,UAAD,CAAa,QAAQ,CAACtG,CAAD,CAAW,CAGrD,IAAA+0C,YAAA,CAAmB,EAkCnB,KAAAnrB,SAAA,CAAgBC,QAAQ,CAACvqB,CAAD,CAAOkD,CAAP,CAAgB,CACtC,IAAI1L,EAAMwI,CAANxI,CAAa,YACjB,IAAIwI,CAAJ,EAA8B,GAA9B,EAAYA,CAAA3D,OAAA,CAAY,CAAZ,CAAZ,CAAmC,KAAMm5C,GAAA,CAAe,SAAf,CACoBx1C,CADpB,CAAN,CAEnC,IAAAy1C,YAAA,CAAiBz1C,CAAA6f,OAAA,CAAY,CAAZ,CAAjB,CAAA,CAAmCroB,CACnCkJ,EAAAwC,QAAA,CAAiB1L,CAAjB,CAAsB0L,CAAtB,CALsC,CAsBxC,KAAAwyC,gBAAA,CAAuBC,QAAQ,CAACnrB,CAAD,CAAa,CAClB,CAAxB,GAAGrxB,SAAAlC,OAAH,GACE,IAAA2+C,kBADF,CAC4BprB,CAAD,WAAuBxuB,OAAvB,CAAiCwuB,CAAjC,CAA8C,IADzE,CAGA,OAAO,KAAAorB,kBAJmC,CAO5C,KAAA7kC,KAAA,CAAY,CAAC,UAAD,CAAa,iBAAb,CAAgC,QAAQ,CAACuD,CAAD,CAAWuhC,CAAX,CAA4B,CAuB9E,MAAO,OAiBGC,QAAQ,CAAC53C,CAAD,CAAU1E,CAAV,CAAkBk7C,CAAlB,CAAyBzmB,CAAzB,CAA+B,CACzCymB,CAAJ,CACEA,CAAAA,MAAA,CAAYx2C,CAAZ,CADF,EAGO1E,CAGL,EAHgBA,CAAA,CAAO,CAAP,CAGhB,GAFEA,CAEF,CAFWk7C,CAAAl7C,OAAA,EAEX,EAAAA,CAAAgF,OAAA,CAAcN,CAAd,CANF,CAQM+vB,EA9CR;AAAM4nB,CAAA,CA8CE5nB,CA9CF,CAqCyC,CAjB1C,OAwCG8nB,QAAQ,CAAC73C,CAAD,CAAU+vB,CAAV,CAAgB,CAC9B/vB,CAAA6b,OAAA,EACMkU,EA9DR,EAAM4nB,CAAA,CA8DE5nB,CA9DF,CA4D0B,CAxC3B,MA+DE+nB,QAAQ,CAAC93C,CAAD,CAAU1E,CAAV,CAAkBk7C,CAAlB,CAAyBzmB,CAAzB,CAA+B,CAG5C,IAAA6nB,MAAA,CAAW53C,CAAX,CAAoB1E,CAApB,CAA4Bk7C,CAA5B,CAAmCzmB,CAAnC,CAH4C,CA/DzC,UAkFM9Q,QAAQ,CAACjf,CAAD,CAAUkC,CAAV,CAAqB6tB,CAArB,CAA2B,CAC5C7tB,CAAA,CAAYjJ,CAAA,CAASiJ,CAAT,CAAA,CACEA,CADF,CAEEhJ,CAAA,CAAQgJ,CAAR,CAAA,CAAqBA,CAAA1H,KAAA,CAAe,GAAf,CAArB,CAA2C,EACzDrB,EAAA,CAAQ6G,CAAR,CAAiB,QAAS,CAACA,CAAD,CAAU,CAClCsP,EAAA,CAAetP,CAAf,CAAwBkC,CAAxB,CADkC,CAApC,CAGM6tB,EA7GR,EAAM4nB,CAAA,CA6GE5nB,CA7GF,CAsGwC,CAlFzC,aAyGS/E,QAAQ,CAAChrB,CAAD,CAAUkC,CAAV,CAAqB6tB,CAArB,CAA2B,CAC/C7tB,CAAA,CAAYjJ,CAAA,CAASiJ,CAAT,CAAA,CACEA,CADF,CAEEhJ,CAAA,CAAQgJ,CAAR,CAAA,CAAqBA,CAAA1H,KAAA,CAAe,GAAf,CAArB,CAA2C,EACzDrB,EAAA,CAAQ6G,CAAR,CAAiB,QAAS,CAACA,CAAD,CAAU,CAClCkP,EAAA,CAAkBlP,CAAlB,CAA2BkC,CAA3B,CADkC,CAApC,CAGM6tB,EApIR,EAAM4nB,CAAA,CAoIE5nB,CApIF,CA6H2C,CAzG5C,UAiIM1E,QAAQ,CAACrrB,CAAD,CAAU+3C,CAAV,CAAel8B,CAAf,CAAuBkU,CAAvB,CAA6B,CAC9C52B,CAAA,CAAQ6G,CAAR,CAAiB,QAAS,CAACA,CAAD,CAAU,CAClCsP,EAAA,CAAetP,CAAf,CAAwB+3C,CAAxB,CACA7oC,GAAA,CAAkBlP,CAAlB,CAA2B6b,CAA3B,CAFkC,CAApC,CAIMkU,EA1JR,EAAM4nB,CAAA,CA0JE5nB,CA1JF,CAqJ0C,CAjI3C,SAyIKv0B,CAzIL,CAvBuE,CAApE,CAlEyC,CAAhC,CAn1BvB,CAy0EI8mB,GAAiB3pB,CAAA,CAAO,UAAP,CASrB0N,GAAAwL,QAAA,CAA2B,CAAC,UAAD,CAAa,uBAAb,CA07C3B,KAAI+Z,GAAgB,0BAApB,CAw/CIqI,GAAqBt7B,CAAA,CAAO,cAAP,CAx/CzB,CAm/DIq/C,GAAa,iCAn/DjB;AAo/DI/hB,GAAgB,MAAS,EAAT,OAAsB,GAAtB,KAAkC,EAAlC,CAp/DpB,CAq/DIsB,GAAkB5+B,CAAA,CAAO,WAAP,CAoRtB2/B,GAAAjkB,UAAA,CACE2jB,EAAA3jB,UADF,CAEE2iB,EAAA3iB,UAFF,CAE+B,SAMpB,CAAA,CANoB,WAYlB,CAAA,CAZkB,QA0BrBkkB,EAAA,CAAe,UAAf,CA1BqB,KA2CxB/gB,QAAQ,CAACA,CAAD,CAAM/W,CAAN,CAAe,CAC1B,GAAI7E,CAAA,CAAY4b,CAAZ,CAAJ,CACE,MAAO,KAAAkgB,MAET,KAAI35B,EAAQi6C,EAAA/1C,KAAA,CAAgBuV,CAAhB,CACRzZ,EAAA,CAAM,CAAN,CAAJ,EAAc,IAAAqG,KAAA,CAAUzD,kBAAA,CAAmB5C,CAAA,CAAM,CAAN,CAAnB,CAAV,CACd,EAAIA,CAAA,CAAM,CAAN,CAAJ,EAAgBA,CAAA,CAAM,CAAN,CAAhB,GAA0B,IAAAy4B,OAAA,CAAYz4B,CAAA,CAAM,CAAN,CAAZ,EAAwB,EAAxB,CAC1B,KAAA6X,KAAA,CAAU7X,CAAA,CAAM,CAAN,CAAV,EAAsB,EAAtB,CAA0B0C,CAA1B,CAEA,OAAO,KATmB,CA3CC,UAkEnB83B,EAAA,CAAe,YAAf,CAlEmB,MA+EvBA,EAAA,CAAe,QAAf,CA/EuB,MA4FvBA,EAAA,CAAe,QAAf,CA5FuB,MA+GvBE,EAAA,CAAqB,QAArB,CAA+B,QAAQ,CAACr0B,CAAD,CAAO,CAClD,MAAyB,GAAlB,EAAAA,CAAAjG,OAAA,CAAY,CAAZ,CAAA,CAAwBiG,CAAxB,CAA+B,GAA/B,CAAqCA,CADM,CAA9C,CA/GuB,QAiKrBoyB,QAAQ,CAACA,CAAD,CAASyhB,CAAT,CAAqB,CACnC,OAAQh9C,SAAAlC,OAAR,EACE,KAAK,CAAL,CACE,MAAO,KAAAw9B,SACT;KAAK,CAAL,CACE,GAAIt9B,CAAA,CAASu9B,CAAT,CAAJ,CACE,IAAAD,SAAA,CAAgB31B,EAAA,CAAc41B,CAAd,CADlB,KAEO,IAAI16B,CAAA,CAAS06B,CAAT,CAAJ,CAELr9B,CAAA,CAAQq9B,CAAR,CAAgB,QAAQ,CAACt8B,CAAD,CAAQZ,CAAR,CAAa,CACtB,IAAb,EAAIY,CAAJ,EAAmB,OAAOs8B,CAAA,CAAOl9B,CAAP,CADS,CAArC,CAIA,CAAA,IAAAi9B,SAAA,CAAgBC,CANX,KAQL,MAAMe,GAAA,CAAgB,UAAhB,CAAN,CAGF,KACF,SACM37B,CAAA,CAAYq8C,CAAZ,CAAJ,EAA8C,IAA9C,GAA+BA,CAA/B,CACE,OAAO,IAAA1hB,SAAA,CAAcC,CAAd,CADT,CAGE,IAAAD,SAAA,CAAcC,CAAd,CAHF,CAG0ByhB,CAtB9B,CA0BA,IAAAzgB,UAAA,EACA,OAAO,KA5B4B,CAjKR,MA8MvBiB,EAAA,CAAqB,QAArB,CAA+Bh9B,EAA/B,CA9MuB,SAwNpBgF,QAAQ,EAAG,CAClB,IAAAw5B,UAAA,CAAiB,CAAA,CACjB,OAAO,KAFW,CAxNS,CAwoB/B,KAAIiB,GAAeviC,CAAA,CAAO,QAAP,CAAnB,CACIqkC,GAAsB,EAD1B,CAEItB,EAFJ,CAgEIwc,GAAOxb,QAAAroB,UAAA5a,KAhEX,CAiEI0+C,GAAQzb,QAAAroB,UAAApV,MAjEZ,CAkEIm5C,GAAO1b,QAAAroB,UAAA1V,KAlEX,CAkFI05C,GAAY,CAEZ,MAFY,CAELC,QAAQ,EAAE,CAAC,MAAO,KAAR,CAFL,CAGZ,MAHY,CAGLC,QAAQ,EAAE,CAAC,MAAO,CAAA,CAAR,CAHL,CAIZ,OAJY,CAIJC,QAAQ,EAAE,CAAC,MAAO,CAAA,CAAR,CAJN;UAKFh9C,CALE,CAMZ,GANY,CAMRi9C,QAAQ,CAAC75C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAC7BD,CAAA,CAAEA,CAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAiBgR,EAAA,CAAEA,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CACrB,OAAInY,EAAA,CAAUkpB,CAAV,CAAJ,CACMlpB,CAAA,CAAUmpB,CAAV,CAAJ,CACSD,CADT,CACaC,CADb,CAGOD,CAJT,CAMOlpB,CAAA,CAAUmpB,CAAV,CAAA,CAAaA,CAAb,CAAetsB,CARO,CANnB,CAeZ,GAfY,CAeRggD,QAAQ,CAAC95C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CACzBD,CAAA,CAAEA,CAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAiBgR,EAAA,CAAEA,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CACrB,QAAQnY,CAAA,CAAUkpB,CAAV,CAAA,CAAaA,CAAb,CAAe,CAAvB,GAA2BlpB,CAAA,CAAUmpB,CAAV,CAAA,CAAaA,CAAb,CAAe,CAA1C,CAFyB,CAfnB,CAmBZ,GAnBY,CAmBR2zB,QAAQ,CAAC/5C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CAnBnB,CAoBZ,GApBY,CAoBR4kC,QAAQ,CAACh6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CApBnB,CAqBZ,GArBY,CAqBR6kC,QAAQ,CAACj6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CArBnB,CAsBZ,GAtBY,CAsBR8kC,QAAQ,CAACl6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CAtBnB,CAuBZ,GAvBY,CAuBRxY,CAvBQ,CAwBZ,KAxBY,CAwBNu9C,QAAQ,CAACn6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAkBC,CAAlB,CAAoB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,GAAyBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAA1B,CAxBtB,CAyBZ,KAzBY,CAyBNglC,QAAQ,CAACp6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAkBC,CAAlB,CAAoB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,GAAyBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAA1B,CAzBtB,CA0BZ,IA1BY,CA0BPilC,QAAQ,CAACr6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CA1BpB,CA2BZ,IA3BY,CA2BPklC,QAAQ,CAACt6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF;AAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CA3BpB,CA4BZ,GA5BY,CA4BRmlC,QAAQ,CAACv6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CA5BnB,CA6BZ,GA7BY,CA6BRolC,QAAQ,CAACx6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CA7BnB,CA8BZ,IA9BY,CA8BPqlC,QAAQ,CAACz6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CA9BpB,CA+BZ,IA/BY,CA+BPslC,QAAQ,CAAC16C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CA/BpB,CAgCZ,IAhCY,CAgCPulC,QAAQ,CAAC36C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CAhCpB,CAiCZ,IAjCY,CAiCPwlC,QAAQ,CAAC56C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,EAAwBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAzB,CAjCpB,CAkCZ,GAlCY,CAkCRylC,QAAQ,CAAC76C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOD,EAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAP,CAAuBgR,CAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAxB,CAlCnB,CAoCZ,GApCY,CAoCR0lC,QAAQ,CAAC96C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiBC,CAAjB,CAAmB,CAAC,MAAOA,EAAA,CAAEpmB,CAAF,CAAQoV,CAAR,CAAA,CAAgBpV,CAAhB,CAAsBoV,CAAtB,CAA8B+Q,CAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAA9B,CAAR,CApCnB,CAqCZ,GArCY,CAqCR2lC,QAAQ,CAAC/6C,CAAD,CAAOoV,CAAP,CAAe+Q,CAAf,CAAiB,CAAC,MAAO,CAACA,CAAA,CAAEnmB,CAAF,CAAQoV,CAAR,CAAT,CArCjB,CAlFhB,CA0HI4lC,GAAS,GAAK,IAAL,GAAe,IAAf,GAAyB,IAAzB,GAAmC,IAAnC,GAA6C,IAA7C,CAAmD,GAAnD,CAAuD,GAAvD,CAA4D,GAA5D,CAAgE,GAAhE,CA1Hb,CAmIIzc,GAAQA,QAAS,CAACxiB,CAAD,CAAU,CAC7B,IAAAA,QAAA,CAAeA,CADc,CAI/BwiB,GAAA9oB,UAAA;AAAkB,aACH8oB,EADG,KAGX0c,QAAS,CAAC3wB,CAAD,CAAO,CACnB,IAAAA,KAAA,CAAYA,CAEZ,KAAA9uB,MAAA,CAAa,CACb,KAAA0/C,GAAA,CAAUphD,CACV,KAAAqhD,OAAA,CAAc,GAId,KAFA,IAAAC,OAEA,CAFc,EAEd,CAAO,IAAA5/C,MAAP,CAAoB,IAAA8uB,KAAAnwB,OAApB,CAAA,CAAsC,CACpC,IAAA+gD,GAAA,CAAU,IAAA5wB,KAAA/qB,OAAA,CAAiB,IAAA/D,MAAjB,CACV,IAAI,IAAA6/C,GAAA,CAAQ,KAAR,CAAJ,CACE,IAAAC,WAAA,CAAgB,IAAAJ,GAAhB,CADF,KAEO,IAAI,IAAA/9C,SAAA,CAAc,IAAA+9C,GAAd,CAAJ,EAA8B,IAAAG,GAAA,CAAQ,GAAR,CAA9B,EAA8C,IAAAl+C,SAAA,CAAc,IAAAo+C,KAAA,EAAd,CAA9C,CACL,IAAAC,WAAA,EADK,KAEA,IAAI,IAAAC,QAAA,CAAa,IAAAP,GAAb,CAAJ,CACL,IAAAQ,UAAA,EADK,KAEA,IAAI,IAAAL,GAAA,CAAQ,aAAR,CAAJ,CACL,IAAAD,OAAApgD,KAAA,CAAiB,OACR,IAAAQ,MADQ,MAET,IAAA0/C,GAFS,CAAjB,CAIA,CAAA,IAAA1/C,MAAA,EALK,KAMA,IAAI,IAAAmgD,aAAA,CAAkB,IAAAT,GAAlB,CAAJ,CAAgC,CACrC,IAAA1/C,MAAA,EACA,SAFqC,CAAhC,IAGA,CACDogD,CAAAA;AAAM,IAAAV,GAANU,CAAgB,IAAAL,KAAA,EACpB,KAAIM,EAAMD,CAANC,CAAY,IAAAN,KAAA,CAAU,CAAV,CAAhB,CACIt7C,EAAKw5C,EAAA,CAAU,IAAAyB,GAAV,CADT,CAEIY,EAAMrC,EAAA,CAAUmC,CAAV,CAFV,CAGIG,EAAMtC,EAAA,CAAUoC,CAAV,CACNE,EAAJ,EACE,IAAAX,OAAApgD,KAAA,CAAiB,OAAQ,IAAAQ,MAAR,MAA0BqgD,CAA1B,IAAmCE,CAAnC,CAAjB,CACA,CAAA,IAAAvgD,MAAA,EAAc,CAFhB,EAGWsgD,CAAJ,EACL,IAAAV,OAAApgD,KAAA,CAAiB,OAAQ,IAAAQ,MAAR,MAA0BogD,CAA1B,IAAmCE,CAAnC,CAAjB,CACA,CAAA,IAAAtgD,MAAA,EAAc,CAFT,EAGIyE,CAAJ,EACL,IAAAm7C,OAAApgD,KAAA,CAAiB,OACR,IAAAQ,MADQ,MAET,IAAA0/C,GAFS,IAGXj7C,CAHW,CAAjB,CAKA,CAAA,IAAAzE,MAAA,EAAc,CANT,EAQL,IAAAwgD,WAAA,CAAgB,4BAAhB,CAA8C,IAAAxgD,MAA9C,CAA0D,IAAAA,MAA1D,CAAuE,CAAvE,CApBG,CAuBP,IAAA2/C,OAAA,CAAc,IAAAD,GAxCsB,CA0CtC,MAAO,KAAAE,OAnDY,CAHL,IAyDZC,QAAQ,CAACY,CAAD,CAAQ,CAClB,MAAmC,EAAnC,GAAOA,CAAA99C,QAAA,CAAc,IAAA+8C,GAAd,CADW,CAzDJ,KA6DXgB,QAAQ,CAACD,CAAD,CAAQ,CACnB,MAAuC,EAAvC,GAAOA,CAAA99C,QAAA,CAAc,IAAAg9C,OAAd,CADY,CA7DL,MAiEVI,QAAQ,CAACpgD,CAAD,CAAI,CACZq7B,CAAAA;AAAMr7B,CAANq7B,EAAW,CACf,OAAQ,KAAAh7B,MAAD,CAAcg7B,CAAd,CAAoB,IAAAlM,KAAAnwB,OAApB,CAAwC,IAAAmwB,KAAA/qB,OAAA,CAAiB,IAAA/D,MAAjB,CAA8Bg7B,CAA9B,CAAxC,CAA6E,CAAA,CAFpE,CAjEF,UAsENr5B,QAAQ,CAAC+9C,CAAD,CAAK,CACrB,MAAQ,GAAR,EAAeA,CAAf,EAA2B,GAA3B,EAAqBA,CADA,CAtEP,cA0EFS,QAAQ,CAACT,CAAD,CAAK,CAEzB,MAAe,GAAf,GAAQA,CAAR,EAA6B,IAA7B,GAAsBA,CAAtB,EAA4C,IAA5C,GAAqCA,CAArC,EACe,IADf,GACQA,CADR,EAC8B,IAD9B,GACuBA,CADvB,EAC6C,QAD7C,GACsCA,CAHb,CA1EX,SAgFPO,QAAQ,CAACP,CAAD,CAAK,CACpB,MAAQ,GAAR,EAAeA,CAAf,EAA2B,GAA3B,EAAqBA,CAArB,EACQ,GADR,EACeA,CADf,EAC2B,GAD3B,EACqBA,CADrB,EAEQ,GAFR,GAEgBA,CAFhB,EAE6B,GAF7B,GAEsBA,CAHF,CAhFN,eAsFDiB,QAAQ,CAACjB,CAAD,CAAK,CAC1B,MAAe,GAAf,GAAQA,CAAR,EAA6B,GAA7B,GAAsBA,CAAtB,EAAoC,IAAA/9C,SAAA,CAAc+9C,CAAd,CADV,CAtFZ,YA0FJc,QAAQ,CAAC/jC,CAAD,CAAQmkC,CAAR,CAAeC,CAAf,CAAoB,CACtCA,CAAA,CAAMA,CAAN,EAAa,IAAA7gD,MACT8gD,EAAAA,CAAUr/C,CAAA,CAAUm/C,CAAV,CACA,CAAJ,IAAI,CAAGA,CAAH,CAAY,GAAZ,CAAkB,IAAA5gD,MAAlB,CAA+B,IAA/B,CAAsC,IAAA8uB,KAAAnP,UAAA,CAAoBihC,CAApB,CAA2BC,CAA3B,CAAtC,CAAwE,GAAxE,CACJ,GADI,CACEA,CAChB,MAAM/f,GAAA,CAAa,QAAb,CACFrkB,CADE,CACKqkC,CADL,CACa,IAAAhyB,KADb,CAAN;AALsC,CA1FxB,YAmGJkxB,QAAQ,EAAG,CAGrB,IAFA,IAAIrQ,EAAS,EAAb,CACIiR,EAAQ,IAAA5gD,MACZ,CAAO,IAAAA,MAAP,CAAoB,IAAA8uB,KAAAnwB,OAApB,CAAA,CAAsC,CACpC,IAAI+gD,EAAKh6C,CAAA,CAAU,IAAAopB,KAAA/qB,OAAA,CAAiB,IAAA/D,MAAjB,CAAV,CACT,IAAU,GAAV,EAAI0/C,CAAJ,EAAiB,IAAA/9C,SAAA,CAAc+9C,CAAd,CAAjB,CACE/P,CAAA,EAAU+P,CADZ,KAEO,CACL,IAAIqB,EAAS,IAAAhB,KAAA,EACb,IAAU,GAAV,EAAIL,CAAJ,EAAiB,IAAAiB,cAAA,CAAmBI,CAAnB,CAAjB,CACEpR,CAAA,EAAU+P,CADZ,KAEO,IAAI,IAAAiB,cAAA,CAAmBjB,CAAnB,CAAJ,EACHqB,CADG,EACO,IAAAp/C,SAAA,CAAco/C,CAAd,CADP,EAEiC,GAFjC,EAEHpR,CAAA5rC,OAAA,CAAc4rC,CAAAhxC,OAAd,CAA8B,CAA9B,CAFG,CAGLgxC,CAAA,EAAU+P,CAHL,KAIA,IAAI,CAAA,IAAAiB,cAAA,CAAmBjB,CAAnB,CAAJ,EACDqB,CADC,EACU,IAAAp/C,SAAA,CAAco/C,CAAd,CADV,EAEiC,GAFjC,EAEHpR,CAAA5rC,OAAA,CAAc4rC,CAAAhxC,OAAd,CAA8B,CAA9B,CAFG,CAKL,KALK,KAGL,KAAA6hD,WAAA,CAAgB,kBAAhB,CAXG,CAgBP,IAAAxgD,MAAA,EApBoC,CAsBtC2vC,CAAA,EAAS,CACT,KAAAiQ,OAAApgD,KAAA,CAAiB,OACRohD,CADQ,MAETjR,CAFS,SAGN,CAAA,CAHM,UAIL,CAAA,CAJK,IAKXlrC,QAAQ,EAAG,CAAE,MAAOkrC,EAAT,CALA,CAAjB,CA1BqB,CAnGP;UAsILuQ,QAAQ,EAAG,CAQpB,IAPA,IAAIld,EAAS,IAAb,CAEIge,EAAQ,EAFZ,CAGIJ,EAAQ,IAAA5gD,MAHZ,CAKIihD,CALJ,CAKaC,CALb,CAKwBC,CALxB,CAKoCzB,CAEpC,CAAO,IAAA1/C,MAAP,CAAoB,IAAA8uB,KAAAnwB,OAApB,CAAA,CAAsC,CACpC+gD,CAAA,CAAK,IAAA5wB,KAAA/qB,OAAA,CAAiB,IAAA/D,MAAjB,CACL,IAAW,GAAX,GAAI0/C,CAAJ,EAAkB,IAAAO,QAAA,CAAaP,CAAb,CAAlB,EAAsC,IAAA/9C,SAAA,CAAc+9C,CAAd,CAAtC,CACa,GACX,GADIA,CACJ,GADgBuB,CAChB,CAD0B,IAAAjhD,MAC1B,EAAAghD,CAAA,EAAStB,CAFX,KAIE,MAEF,KAAA1/C,MAAA,EARoC,CAYtC,GAAIihD,CAAJ,CAEE,IADAC,CACA,CADY,IAAAlhD,MACZ,CAAOkhD,CAAP,CAAmB,IAAApyB,KAAAnwB,OAAnB,CAAA,CAAqC,CACnC+gD,CAAA,CAAK,IAAA5wB,KAAA/qB,OAAA,CAAiBm9C,CAAjB,CACL,IAAW,GAAX,GAAIxB,CAAJ,CAAgB,CACdyB,CAAA,CAAaH,CAAAz5B,OAAA,CAAa05B,CAAb,CAAuBL,CAAvB,CAA+B,CAA/B,CACbI,EAAA,CAAQA,CAAAz5B,OAAA,CAAa,CAAb,CAAgB05B,CAAhB,CAA0BL,CAA1B,CACR,KAAA5gD,MAAA,CAAakhD,CACb,MAJc,CAMhB,GAAI,IAAAf,aAAA,CAAkBT,CAAlB,CAAJ,CACEwB,CAAA,EADF,KAGE,MAXiC,CAiBnCpvB,CAAAA,CAAQ,OACH8uB,CADG,MAEJI,CAFI,CAMZ,IAAI/C,EAAA7+C,eAAA,CAAyB4hD,CAAzB,CAAJ,CACElvB,CAAArtB,GAEA,CAFWw5C,EAAA,CAAU+C,CAAV,CAEX,CADAlvB,CAAApH,QACA,CADgB,CAAA,CAChB,CAAAoH,CAAAxX,SAAA,CAAiB,CAAA,CAHnB,KAIO,CACL,IAAIvQ,EAASk4B,EAAA,CAAS+e,CAAT,CAAgB,IAAAzgC,QAAhB;AAA8B,IAAAuO,KAA9B,CACbgD,EAAArtB,GAAA,CAAW9D,CAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CACvC,MAAQ7P,EAAA,CAAOvF,CAAP,CAAaoV,CAAb,CAD+B,CAA9B,CAER,QACOiR,QAAQ,CAACrmB,CAAD,CAAO1E,CAAP,CAAc,CAC5B,MAAOmhC,GAAA,CAAOz8B,CAAP,CAAaw8C,CAAb,CAAoBlhD,CAApB,CAA2BkjC,CAAAlU,KAA3B,CAAwCkU,CAAAziB,QAAxC,CADqB,CAD7B,CAFQ,CAFN,CAWP,IAAAq/B,OAAApgD,KAAA,CAAiBsyB,CAAjB,CAEIqvB,EAAJ,GACE,IAAAvB,OAAApgD,KAAA,CAAiB,OACTyhD,CADS,MAET,GAFS,CAAjB,CAIA,CAAA,IAAArB,OAAApgD,KAAA,CAAiB,OACRyhD,CADQ,CACE,CADF,MAETE,CAFS,CAAjB,CALF,CA9DoB,CAtIN,YAgNJrB,QAAQ,CAACsB,CAAD,CAAQ,CAC1B,IAAIR,EAAQ,IAAA5gD,MACZ,KAAAA,MAAA,EAIA,KAHA,IAAI6xC,EAAS,EAAb,CACIwP,EAAYD,CADhB,CAEI7hC,EAAS,CAAA,CACb,CAAO,IAAAvf,MAAP,CAAoB,IAAA8uB,KAAAnwB,OAApB,CAAA,CAAsC,CACpC,IAAI+gD,EAAK,IAAA5wB,KAAA/qB,OAAA,CAAiB,IAAA/D,MAAjB,CAAT,CACAqhD,EAAAA,CAAAA,CAAa3B,CACb,IAAIngC,CAAJ,CACa,GAAX,GAAImgC,CAAJ,EACM4B,CAIJ,CAJU,IAAAxyB,KAAAnP,UAAA,CAAoB,IAAA3f,MAApB,CAAiC,CAAjC,CAAoC,IAAAA,MAApC,CAAiD,CAAjD,CAIV,CAHKshD,CAAA39C,MAAA,CAAU,aAAV,CAGL,EAFE,IAAA68C,WAAA,CAAgB,6BAAhB,CAAgDc,CAAhD,CAAsD,GAAtD,CAEF,CADA,IAAAthD,MACA;AADc,CACd,CAAA6xC,CAAA,EAAUxxC,MAAAC,aAAA,CAAoBU,QAAA,CAASsgD,CAAT,CAAc,EAAd,CAApB,CALZ,EAQEzP,CARF,EAOY2N,EAAA+B,CAAO7B,CAAP6B,CAPZ,EAQ4B7B,CAE5B,CAAAngC,CAAA,CAAS,CAAA,CAXX,KAYO,IAAW,IAAX,GAAImgC,CAAJ,CACLngC,CAAA,CAAS,CAAA,CADJ,KAEA,CAAA,GAAImgC,CAAJ,GAAW0B,CAAX,CAAkB,CACvB,IAAAphD,MAAA,EACA,KAAA4/C,OAAApgD,KAAA,CAAiB,OACRohD,CADQ,MAETS,CAFS,QAGPxP,CAHO,SAIN,CAAA,CAJM,UAKL,CAAA,CALK,IAMXptC,QAAQ,EAAG,CAAE,MAAOotC,EAAT,CANA,CAAjB,CAQA,OAVuB,CAYvBA,CAAA,EAAU6N,CAZL,CAcP,IAAA1/C,MAAA,EA/BoC,CAiCtC,IAAAwgD,WAAA,CAAgB,oBAAhB,CAAsCI,CAAtC,CAvC0B,CAhNZ,CA+PlB,KAAI3d,GAASA,QAAS,CAACH,CAAD,CAAQH,CAAR,CAAiBpiB,CAAjB,CAA0B,CAC9C,IAAAuiB,MAAA,CAAaA,CACb,KAAAH,QAAA,CAAeA,CACf,KAAApiB,QAAA,CAAeA,CAH+B,CAMhD0iB,GAAAue,KAAA,CAAc7gD,CAAA,CAAO,QAAS,EAAG,CAC/B,MAAO,EADwB,CAAnB,CAEX,UACS,CAAA,CADT,CAFW,CAMdsiC,GAAAhpB,UAAA,CAAmB,aACJgpB,EADI,OAGV19B,QAAS,CAACupB,CAAD,CAAO,CACrB,IAAAA,KAAA,CAAYA,CAEZ,KAAA8wB,OAAA,CAAc,IAAA9c,MAAA2c,IAAA,CAAe3wB,CAAf,CAEVhvB,EAAAA,CAAQ,IAAA2hD,WAAA,EAEe,EAA3B,GAAI,IAAA7B,OAAAjhD,OAAJ;AACE,IAAA6hD,WAAA,CAAgB,wBAAhB,CAA0C,IAAAZ,OAAA,CAAY,CAAZ,CAA1C,CAGF9/C,EAAA4qB,QAAA,CAAgB,CAAC,CAAC5qB,CAAA4qB,QAClB5qB,EAAAwa,SAAA,CAAiB,CAAC,CAACxa,CAAAwa,SAEnB,OAAOxa,EAdc,CAHN,SAoBR4hD,QAAS,EAAG,CACnB,IAAIA,CACJ,IAAI,IAAAC,OAAA,CAAY,GAAZ,CAAJ,CACED,CACA,CADU,IAAAE,YAAA,EACV,CAAA,IAAAC,QAAA,CAAa,GAAb,CAFF,KAGO,IAAI,IAAAF,OAAA,CAAY,GAAZ,CAAJ,CACLD,CAAA,CAAU,IAAAI,iBAAA,EADL,KAEA,IAAI,IAAAH,OAAA,CAAY,GAAZ,CAAJ,CACLD,CAAA,CAAU,IAAA1O,OAAA,EADL,KAEA,CACL,IAAIlhB,EAAQ,IAAA6vB,OAAA,EAEZ,EADAD,CACA,CADU5vB,CAAArtB,GACV,GACE,IAAA+7C,WAAA,CAAgB,0BAAhB,CAA4C1uB,CAA5C,CAEF4vB,EAAAh3B,QAAA,CAAkB,CAAC,CAACoH,CAAApH,QACpBg3B,EAAApnC,SAAA,CAAmB,CAAC,CAACwX,CAAAxX,SAPhB,CAWP,IADA,IAAUrb,CACV,CAAQgqC,CAAR,CAAe,IAAA0Y,OAAA,CAAY,GAAZ,CAAiB,GAAjB,CAAsB,GAAtB,CAAf,CAAA,CACoB,GAAlB,GAAI1Y,CAAAna,KAAJ,EACE4yB,CACA,CADU,IAAAK,aAAA,CAAkBL,CAAlB,CAA2BziD,CAA3B,CACV,CAAAA,CAAA,CAAU,IAFZ;AAGyB,GAAlB,GAAIgqC,CAAAna,KAAJ,EACL7vB,CACA,CADUyiD,CACV,CAAAA,CAAA,CAAU,IAAAM,YAAA,CAAiBN,CAAjB,CAFL,EAGkB,GAAlB,GAAIzY,CAAAna,KAAJ,EACL7vB,CACA,CADUyiD,CACV,CAAAA,CAAA,CAAU,IAAAO,YAAA,CAAiBP,CAAjB,CAFL,EAIL,IAAAlB,WAAA,CAAgB,YAAhB,CAGJ,OAAOkB,EAlCY,CApBJ,YAyDLlB,QAAQ,CAAC0B,CAAD,CAAMpwB,CAAN,CAAa,CAC/B,KAAMgP,GAAA,CAAa,QAAb,CAEAhP,CAAAhD,KAFA,CAEYozB,CAFZ,CAEkBpwB,CAAA9xB,MAFlB,CAEgC,CAFhC,CAEoC,IAAA8uB,KAFpC,CAE+C,IAAAA,KAAAnP,UAAA,CAAoBmS,CAAA9xB,MAApB,CAF/C,CAAN,CAD+B,CAzDhB,WA+DNmiD,QAAQ,EAAG,CACpB,GAA2B,CAA3B,GAAI,IAAAvC,OAAAjhD,OAAJ,CACE,KAAMmiC,GAAA,CAAa,MAAb,CAA0D,IAAAhS,KAA1D,CAAN,CACF,MAAO,KAAA8wB,OAAA,CAAY,CAAZ,CAHa,CA/DL,MAqEXG,QAAQ,CAACqC,CAAD,CAAKC,CAAL,CAASC,CAAT,CAAaC,CAAb,CAAiB,CAC7B,GAAyB,CAAzB,CAAI,IAAA3C,OAAAjhD,OAAJ,CAA4B,CAC1B,IAAImzB,EAAQ,IAAA8tB,OAAA,CAAY,CAAZ,CAAZ,CACI4C,EAAI1wB,CAAAhD,KACR,IAAI0zB,CAAJ,GAAUJ,CAAV,EAAgBI,CAAhB,GAAsBH,CAAtB,EAA4BG,CAA5B,GAAkCF,CAAlC,EAAwCE,CAAxC,GAA8CD,CAA9C,EACK,EAACH,CAAD,EAAQC,CAAR,EAAeC,CAAf,EAAsBC,CAAtB,CADL,CAEE,MAAOzwB,EALiB,CAQ5B,MAAO,CAAA,CATsB,CArEd,QAiFT6vB,QAAQ,CAACS,CAAD,CAAKC,CAAL,CAASC,CAAT,CAAaC,CAAb,CAAgB,CAE9B,MAAA,CADIzwB,CACJ,CADY,IAAAiuB,KAAA,CAAUqC,CAAV;AAAcC,CAAd,CAAkBC,CAAlB,CAAsBC,CAAtB,CACZ,GACE,IAAA3C,OAAAvuC,MAAA,EACOygB,CAAAA,CAFT,EAIO,CAAA,CANuB,CAjFf,SA0FR+vB,QAAQ,CAACO,CAAD,CAAI,CACd,IAAAT,OAAA,CAAYS,CAAZ,CAAL,EACE,IAAA5B,WAAA,CAAgB,4BAAhB,CAA+C4B,CAA/C,CAAoD,GAApD,CAAyD,IAAArC,KAAA,EAAzD,CAFiB,CA1FJ,SAgGR0C,QAAQ,CAACh+C,CAAD,CAAKi+C,CAAL,CAAY,CAC3B,MAAO/hD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CACnC,MAAOnV,EAAA,CAAGD,CAAH,CAASoV,CAAT,CAAiB8oC,CAAjB,CAD4B,CAA9B,CAEJ,UACQA,CAAApoC,SADR,CAFI,CADoB,CAhGZ,WAwGNqoC,QAAQ,CAACC,CAAD,CAAOC,CAAP,CAAeH,CAAf,CAAqB,CACtC,MAAO/hD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAc,CAClC,MAAOgpC,EAAA,CAAKp+C,CAAL,CAAWoV,CAAX,CAAA,CAAqBipC,CAAA,CAAOr+C,CAAP,CAAaoV,CAAb,CAArB,CAA4C8oC,CAAA,CAAMl+C,CAAN,CAAYoV,CAAZ,CADjB,CAA7B,CAEJ,UACSgpC,CAAAtoC,SADT,EAC0BuoC,CAAAvoC,SAD1B,EAC6CooC,CAAApoC,SAD7C,CAFI,CAD+B,CAxGvB,UAgHPwoC,QAAQ,CAACF,CAAD,CAAOn+C,CAAP,CAAWi+C,CAAX,CAAkB,CAClC,MAAO/hD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CACnC,MAAOnV,EAAA,CAAGD,CAAH,CAASoV,CAAT,CAAiBgpC,CAAjB,CAAuBF,CAAvB,CAD4B,CAA9B,CAEJ,UACQE,CAAAtoC,SADR,EACyBooC,CAAApoC,SADzB,CAFI,CAD2B,CAhHnB,YAwHLmnC,QAAQ,EAAG,CAErB,IADA,IAAIA,EAAa,EACjB,CAAA,CAAA,CAGE,GAFyB,CAErB,CAFA,IAAA7B,OAAAjhD,OAEA;AAF2B,CAAA,IAAAohD,KAAA,CAAU,GAAV,CAAe,GAAf,CAAoB,GAApB,CAAyB,GAAzB,CAE3B,EADF0B,CAAAjiD,KAAA,CAAgB,IAAAoiD,YAAA,EAAhB,CACE,CAAA,CAAC,IAAAD,OAAA,CAAY,GAAZ,CAAL,CAGE,MAA8B,EACvB,GADCF,CAAA9iD,OACD,CAAD8iD,CAAA,CAAW,CAAX,CAAC,CACD,QAAQ,CAACj9C,CAAD,CAAOoV,CAAP,CAAe,CAErB,IADA,IAAI9Z,CAAJ,CACSH,EAAI,CAAb,CAAgBA,CAAhB,CAAoB8hD,CAAA9iD,OAApB,CAAuCgB,CAAA,EAAvC,CAA4C,CAC1C,IAAIojD,EAAYtB,CAAA,CAAW9hD,CAAX,CACZojD,EAAJ,GACEjjD,CADF,CACUijD,CAAA,CAAUv+C,CAAV,CAAgBoV,CAAhB,CADV,CAF0C,CAM5C,MAAO9Z,EARc,CAVZ,CAxHN,aAgJJ8hD,QAAQ,EAAG,CAGtB,IAFA,IAAIgB,EAAO,IAAA1wB,WAAA,EAAX,CACIJ,CACJ,CAAA,CAAA,CACE,GAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAAqM,OAAA,EAA9B,CADT,KAGE,OAAO8xC,EAPW,CAhJP,QA4JT9xC,QAAQ,EAAG,CAIjB,IAHA,IAAIghB,EAAQ,IAAA6vB,OAAA,EAAZ,CACIl9C,EAAK,IAAAk+B,QAAA,CAAa7Q,CAAAhD,KAAb,CADT,CAEIk0B,EAAS,EACb,CAAA,CAAA,CACE,GAAKlxB,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,CACEqB,CAAAxjD,KAAA,CAAY,IAAA0yB,WAAA,EAAZ,CADF,KAEO,CACL,IAAI+wB,EAAWA,QAAQ,CAACz+C,CAAD,CAAOoV,CAAP,CAAeq5B,CAAf,CAAsB,CACvCp5B,CAAAA,CAAO,CAACo5B,CAAD,CACX,KAAK,IAAItzC,EAAI,CAAb,CAAgBA,CAAhB,CAAoBqjD,CAAArkD,OAApB,CAAmCgB,CAAA,EAAnC,CACEka,CAAAra,KAAA,CAAUwjD,CAAA,CAAOrjD,CAAP,CAAA,CAAU6E,CAAV;AAAgBoV,CAAhB,CAAV,CAEF,OAAOnV,EAAAI,MAAA,CAASL,CAAT,CAAeqV,CAAf,CALoC,CAO7C,OAAO,SAAQ,EAAG,CAChB,MAAOopC,EADS,CARb,CAPQ,CA5JF,YAkLL/wB,QAAQ,EAAG,CACrB,MAAO,KAAAgxB,WAAA,EADc,CAlLN,YAsLLA,QAAQ,EAAG,CACrB,IAAIN,EAAO,IAAAO,QAAA,EAAX,CACIT,CADJ,CAEI5wB,CACJ,OAAA,CAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,GACOiB,CAAA/3B,OAKE,EAJL,IAAA21B,WAAA,CAAgB,0BAAhB,CACI,IAAA1xB,KAAAnP,UAAA,CAAoB,CAApB,CAAuBmS,CAAA9xB,MAAvB,CADJ,CAC0C,0BAD1C,CACsE8xB,CADtE,CAIK,CADP4wB,CACO,CADC,IAAAS,QAAA,EACD,CAAA,QAAQ,CAAC56C,CAAD,CAAQqR,CAAR,CAAgB,CAC7B,MAAOgpC,EAAA/3B,OAAA,CAAYtiB,CAAZ,CAAmBm6C,CAAA,CAAMn6C,CAAN,CAAaqR,CAAb,CAAnB,CAAyCA,CAAzC,CADsB,CANjC,EAUOgpC,CAdc,CAtLN,SAuMRO,QAAQ,EAAG,CAClB,IAAIP,EAAO,IAAAQ,UAAA,EAAX,CACIP,CADJ,CAEI/wB,CACJ,IAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,CAAgC,CAC9BkB,CAAA,CAAS,IAAAK,WAAA,EACT,IAAKpxB,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,CACE,MAAO,KAAAgB,UAAA,CAAeC,CAAf,CAAqBC,CAArB,CAA6B,IAAAK,WAAA,EAA7B,CAEP;IAAA1C,WAAA,CAAgB,YAAhB,CAA8B1uB,CAA9B,CAL4B,CAAhC,IAQE,OAAO8wB,EAZS,CAvMH,WAuNNQ,QAAQ,EAAG,CAGpB,IAFA,IAAIR,EAAO,IAAAS,WAAA,EAAX,CACIvxB,CACJ,CAAA,CAAA,CACE,GAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,IAAZ,CAAb,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAA4+C,WAAA,EAA9B,CADT,KAGE,OAAOT,EAPS,CAvNL,YAmOLS,QAAQ,EAAG,CACrB,IAAIT,EAAO,IAAAU,SAAA,EAAX,CACIxxB,CACJ,IAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,IAAZ,CAAb,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAA4+C,WAAA,EAA9B,CAET,OAAOT,EANc,CAnON,UA4OPU,QAAQ,EAAG,CACnB,IAAIV,EAAO,IAAAW,WAAA,EAAX,CACIzxB,CACJ,IAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,IAAZ,CAAiB,IAAjB,CAAsB,KAAtB,CAA4B,KAA5B,CAAb,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAA6+C,SAAA,EAA9B,CAET,OAAOV,EANY,CA5OJ,YAqPLW,QAAQ,EAAG,CACrB,IAAIX,EAAO,IAAAY,SAAA,EAAX,CACI1xB,CACJ,IAAKA,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ;AAAiB,GAAjB,CAAsB,IAAtB,CAA4B,IAA5B,CAAb,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAA8+C,WAAA,EAA9B,CAET,OAAOX,EANc,CArPN,UA8PPY,QAAQ,EAAG,CAGnB,IAFA,IAAIZ,EAAO,IAAAa,eAAA,EAAX,CACI3xB,CACJ,CAAQA,CAAR,CAAgB,IAAA6vB,OAAA,CAAY,GAAZ,CAAgB,GAAhB,CAAhB,CAAA,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAAg/C,eAAA,EAA9B,CAET,OAAOb,EANY,CA9PJ,gBAuQDa,QAAQ,EAAG,CAGzB,IAFA,IAAIb,EAAO,IAAAc,MAAA,EAAX,CACI5xB,CACJ,CAAQA,CAAR,CAAgB,IAAA6vB,OAAA,CAAY,GAAZ,CAAgB,GAAhB,CAAoB,GAApB,CAAhB,CAAA,CACEiB,CAAA,CAAO,IAAAE,SAAA,CAAcF,CAAd,CAAoB9wB,CAAArtB,GAApB,CAA8B,IAAAi/C,MAAA,EAA9B,CAET,OAAOd,EANkB,CAvQV,OAgRVc,QAAQ,EAAG,CAChB,IAAI5xB,CACJ,OAAI,KAAA6vB,OAAA,CAAY,GAAZ,CAAJ,CACS,IAAAD,QAAA,EADT,CAEO,CAAK5vB,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,EACE,IAAAmB,SAAA,CAAc7f,EAAAue,KAAd,CAA2B1vB,CAAArtB,GAA3B,CAAqC,IAAAi/C,MAAA,EAArC,CADF,CAEA,CAAK5xB,CAAL,CAAa,IAAA6vB,OAAA,CAAY,GAAZ,CAAb,EACE,IAAAc,QAAA,CAAa3wB,CAAArtB,GAAb,CAAuB,IAAAi/C,MAAA,EAAvB,CADF;AAGE,IAAAhC,QAAA,EATO,CAhRD,aA6RJO,QAAQ,CAACjP,CAAD,CAAS,CAC5B,IAAIhQ,EAAS,IAAb,CACI2gB,EAAQ,IAAAhC,OAAA,EAAA7yB,KADZ,CAEI/kB,EAASk4B,EAAA,CAAS0hB,CAAT,CAAgB,IAAApjC,QAAhB,CAA8B,IAAAuO,KAA9B,CAEb,OAAOnuB,EAAA,CAAO,QAAQ,CAAC4H,CAAD,CAAQqR,CAAR,CAAgBpV,CAAhB,CAAsB,CAC1C,MAAOuF,EAAA,CAAOvF,CAAP,EAAewuC,CAAA,CAAOzqC,CAAP,CAAcqR,CAAd,CAAf,CADmC,CAArC,CAEJ,QACOiR,QAAQ,CAACtiB,CAAD,CAAQzI,CAAR,CAAe8Z,CAAf,CAAuB,CAErC,CADIgqC,CACJ,CADQ5Q,CAAA,CAAOzqC,CAAP,CAAcqR,CAAd,CACR,GAAQo5B,CAAAnoB,OAAA,CAActiB,CAAd,CAAqBq7C,CAArB,CAAyB,EAAzB,CACR,OAAO3iB,GAAA,CAAO2iB,CAAP,CAAUD,CAAV,CAAiB7jD,CAAjB,CAAwBkjC,CAAAlU,KAAxB,CAAqCkU,CAAAziB,QAArC,CAH8B,CADtC,CAFI,CALqB,CA7Rb,aA6SJyhC,QAAQ,CAACvjD,CAAD,CAAM,CACzB,IAAIukC,EAAS,IAAb,CAEI6gB,EAAU,IAAA3xB,WAAA,EACd,KAAA2vB,QAAA,CAAa,GAAb,CAEA,OAAOlhD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CAAA,IAC/BgqC,EAAInlD,CAAA,CAAI+F,CAAJ,CAAUoV,CAAV,CAD2B,CAE/Bja,EAAIkkD,CAAA,CAAQr/C,CAAR,CAAcoV,CAAd,CAF2B,CAG5BkH,CAEP8f,GAAA,CAAqBjhC,CAArB,CAAwBqjC,CAAAlU,KAAxB,CACA,IAAI,CAAC80B,CAAL,CAAQ,MAAOtlD,EAEf,EADAmH,CACA,CADIs7B,EAAA,CAAiB6iB,CAAA,CAAEjkD,CAAF,CAAjB,CAAuBqjC,CAAAlU,KAAvB,CACJ,IAASrpB,CAAAsvB,KAAT,EAAmBiO,CAAAziB,QAAA8gB,eAAnB,IACEvgB,CAKA,CALIrb,CAKJ,CAJM,KAIN,EAJeA,EAIf,GAHEqb,CAAAygB,IACA,CADQjjC,CACR,CAAAwiB,CAAAiU,KAAA,CAAO,QAAQ,CAAC/vB,CAAD,CAAM,CAAE8b,CAAAygB,IAAA,CAAQv8B,CAAV,CAArB,CAEF,EAAAS,CAAA;AAAIA,CAAA87B,IANN,CAQA,OAAO97B,EAhB4B,CAA9B,CAiBJ,QACOolB,QAAQ,CAACrmB,CAAD,CAAO1E,CAAP,CAAc8Z,CAAd,CAAsB,CACpC,IAAI1a,EAAM0hC,EAAA,CAAqBijB,CAAA,CAAQr/C,CAAR,CAAcoV,CAAd,CAArB,CAA4CopB,CAAAlU,KAA5C,CAGV,EADI80B,CACJ,CADQ7iB,EAAA,CAAiBtiC,CAAA,CAAI+F,CAAJ,CAAUoV,CAAV,CAAjB,CAAoCopB,CAAAlU,KAApC,CACR,GAAQrwB,CAAAosB,OAAA,CAAWrmB,CAAX,CAAiBo/C,CAAjB,CAAqB,EAArB,CACR,OAAOA,EAAA,CAAE1kD,CAAF,CAAP,CAAgBY,CALoB,CADrC,CAjBI,CANkB,CA7SV,cA+UHiiD,QAAQ,CAACt9C,CAAD,CAAKq/C,CAAL,CAAoB,CACxC,IAAId,EAAS,EACb,IAA8B,GAA9B,GAAI,IAAAb,UAAA,EAAArzB,KAAJ,EACE,EACEk0B,EAAAxjD,KAAA,CAAY,IAAA0yB,WAAA,EAAZ,CADF,OAES,IAAAyvB,OAAA,CAAY,GAAZ,CAFT,CADF,CAKA,IAAAE,QAAA,CAAa,GAAb,CAEA,KAAI7e,EAAS,IAEb,OAAO,SAAQ,CAACz6B,CAAD,CAAQqR,CAAR,CAAgB,CAI7B,IAHA,IAAIC,EAAO,EAAX,CACI5a,EAAU6kD,CAAA,CAAgBA,CAAA,CAAcv7C,CAAd,CAAqBqR,CAArB,CAAhB,CAA+CrR,CAD7D,CAGS5I,EAAI,CAAb,CAAgBA,CAAhB,CAAoBqjD,CAAArkD,OAApB,CAAmCgB,CAAA,EAAnC,CACEka,CAAAra,KAAA,CAAUwjD,CAAA,CAAOrjD,CAAP,CAAA,CAAU4I,CAAV,CAAiBqR,CAAjB,CAAV,CAEEmqC,EAAAA,CAAQt/C,CAAA,CAAG8D,CAAH,CAAUqR,CAAV,CAAkB3a,CAAlB,CAAR8kD,EAAsC3iD,CAE1C2/B,GAAA,CAAiB9hC,CAAjB,CAA0B+jC,CAAAlU,KAA1B,CAC0BA,KAAAA,EAAAkU,CAAAlU,KAjrB9B,IAirBuBi1B,CAjrBvB,CAAS,CACP,GAgrBqBA,CAhrBjBl6C,YAAJ,GAgrBqBk6C,CAhrBrB,CACE,KAAMjjB,GAAA,CAAa,QAAb,CAEJD,CAFI,CAAN,CAGK,GA4qBckjB,CA5qBd,GAAYjG,EAAZ,EA4qBciG,CA5qBd,GAA4BhG,EAA5B,EAAsCC,EAAtC,EA4qBc+F,CA5qBd,GAAsD/F,EAAtD,CACL,KAAMld,GAAA,CAAa,QAAb,CAEJD,CAFI,CAAN,CANK,CAorBDp7B,CAAAA,CAAIs+C,CAAAl/C,MACA;AAAAk/C,CAAAl/C,MAAA,CAAY5F,CAAZ,CAAqB4a,CAArB,CAAA,CACAkqC,CAAA,CAAMlqC,CAAA,CAAK,CAAL,CAAN,CAAeA,CAAA,CAAK,CAAL,CAAf,CAAwBA,CAAA,CAAK,CAAL,CAAxB,CAAiCA,CAAA,CAAK,CAAL,CAAjC,CAA0CA,CAAA,CAAK,CAAL,CAA1C,CAER,OAAOknB,GAAA,CAAiBt7B,CAAjB,CAAoBu9B,CAAAlU,KAApB,CAjBsB,CAXS,CA/UzB,kBAgXCgzB,QAAS,EAAG,CAC5B,IAAIkC,EAAa,EAAjB,CACIC,EAAc,CAAA,CAClB,IAA8B,GAA9B,GAAI,IAAA9B,UAAA,EAAArzB,KAAJ,EACE,EAAG,CACD,GAAI,IAAAixB,KAAA,CAAU,GAAV,CAAJ,CAEE,KAEF,KAAImE,EAAY,IAAAhyB,WAAA,EAChB8xB,EAAAxkD,KAAA,CAAgB0kD,CAAhB,CACKA,EAAA5pC,SAAL,GACE2pC,CADF,CACgB,CAAA,CADhB,CAPC,CAAH,MAUS,IAAAtC,OAAA,CAAY,GAAZ,CAVT,CADF,CAaA,IAAAE,QAAA,CAAa,GAAb,CAEA,OAAOlhD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CAEnC,IADA,IAAIhX,EAAQ,EAAZ,CACSjD,EAAI,CAAb,CAAgBA,CAAhB,CAAoBqkD,CAAArlD,OAApB,CAAuCgB,CAAA,EAAvC,CACEiD,CAAApD,KAAA,CAAWwkD,CAAA,CAAWrkD,CAAX,CAAA,CAAc6E,CAAd,CAAoBoV,CAApB,CAAX,CAEF,OAAOhX,EAL4B,CAA9B,CAMJ,SACQ,CAAA,CADR,UAESqhD,CAFT,CANI,CAlBqB,CAhXb,QA8YTjR,QAAS,EAAG,CAClB,IAAImR,EAAY,EAAhB,CACIF,EAAc,CAAA,CAClB,IAA8B,GAA9B,GAAI,IAAA9B,UAAA,EAAArzB,KAAJ,EACE,EAAG,CACD,GAAI,IAAAixB,KAAA,CAAU,GAAV,CAAJ,CAEE,KAHD,KAKGjuB,EAAQ,IAAA6vB,OAAA,EALX,CAMDziD,EAAM4yB,CAAA+f,OAAN3yC,EAAsB4yB,CAAAhD,KACtB,KAAA+yB,QAAA,CAAa,GAAb,CACA;IAAI/hD,EAAQ,IAAAoyB,WAAA,EACZiyB,EAAA3kD,KAAA,CAAe,KAAMN,CAAN,OAAkBY,CAAlB,CAAf,CACKA,EAAAwa,SAAL,GACE2pC,CADF,CACgB,CAAA,CADhB,CAVC,CAAH,MAaS,IAAAtC,OAAA,CAAY,GAAZ,CAbT,CADF,CAgBA,IAAAE,QAAA,CAAa,GAAb,CAEA,OAAOlhD,EAAA,CAAO,QAAQ,CAAC6D,CAAD,CAAOoV,CAAP,CAAe,CAEnC,IADA,IAAIo5B,EAAS,EAAb,CACSrzC,EAAI,CAAb,CAAgBA,CAAhB,CAAoBwkD,CAAAxlD,OAApB,CAAsCgB,CAAA,EAAtC,CAA2C,CACzC,IAAI8G,EAAW09C,CAAA,CAAUxkD,CAAV,CACfqzC,EAAA,CAAOvsC,CAAAvH,IAAP,CAAA,CAAuBuH,CAAA3G,MAAA,CAAe0E,CAAf,CAAqBoV,CAArB,CAFkB,CAI3C,MAAOo5B,EAN4B,CAA9B,CAOJ,SACQ,CAAA,CADR,UAESiR,CAFT,CAPI,CArBW,CA9YH,CAudnB,KAAI/hB,GAAgB,EAApB,CA6lEIgI,GAAa3rC,CAAA,CAAO,MAAP,CA7lEjB,CA+lEI+rC,GAAe,MACX,MADW,KAEZ,KAFY,KAGZ,KAHY,cAMH,aANG,IAOb,IAPa,CA/lEnB,CAozGIuD,EAAiBxvC,CAAAgU,cAAA,CAAuB,GAAvB,CApzGrB,CAqzGI07B,GAAYrV,EAAA,CAAWt6B,CAAA2D,SAAAqc,KAAX,CAAiC,CAAA,CAAjC,CAkPhBpP,GAAAyI,QAAA,CAA0B,CAAC,UAAD,CAmU1By2B,GAAAz2B,QAAA,CAAyB,CAAC,SAAD,CA6DzB+2B,GAAA/2B,QAAA,CAAuB,CAAC,SAAD,CASvB,KAAIi4B,GAAc,GAAlB,CA+HIqD,GAAe,MACXvB,CAAA,CAAW,UAAX,CAAuB,CAAvB,CADW,IAEXA,CAAA,CAAW,UAAX;AAAuB,CAAvB,CAA0B,CAA1B,CAA6B,CAAA,CAA7B,CAFW,GAGXA,CAAA,CAAW,UAAX,CAAuB,CAAvB,CAHW,MAIXE,EAAA,CAAc,OAAd,CAJW,KAKXA,EAAA,CAAc,OAAd,CAAuB,CAAA,CAAvB,CALW,IAMXF,CAAA,CAAW,OAAX,CAAoB,CAApB,CAAuB,CAAvB,CANW,GAOXA,CAAA,CAAW,OAAX,CAAoB,CAApB,CAAuB,CAAvB,CAPW,IAQXA,CAAA,CAAW,MAAX,CAAmB,CAAnB,CARW,GASXA,CAAA,CAAW,MAAX,CAAmB,CAAnB,CATW,IAUXA,CAAA,CAAW,OAAX,CAAoB,CAApB,CAVW,GAWXA,CAAA,CAAW,OAAX,CAAoB,CAApB,CAXW,IAYXA,CAAA,CAAW,OAAX,CAAoB,CAApB,CAAwB,GAAxB,CAZW,GAaXA,CAAA,CAAW,OAAX,CAAoB,CAApB,CAAwB,GAAxB,CAbW,IAcXA,CAAA,CAAW,SAAX,CAAsB,CAAtB,CAdW,GAeXA,CAAA,CAAW,SAAX,CAAsB,CAAtB,CAfW,IAgBXA,CAAA,CAAW,SAAX,CAAsB,CAAtB,CAhBW,GAiBXA,CAAA,CAAW,SAAX,CAAsB,CAAtB,CAjBW,KAoBXA,CAAA,CAAW,cAAX,CAA2B,CAA3B,CApBW,MAqBXE,EAAA,CAAc,KAAd,CArBW,KAsBXA,EAAA,CAAc,KAAd,CAAqB,CAAA,CAArB,CAtBW,GAJnB0S,QAAmB,CAAC3S,CAAD,CAAOvC,CAAP,CAAgB,CACjC,MAAyB,GAAlB,CAAAuC,CAAA4S,SAAA,EAAA,CAAuBnV,CAAAoV,MAAA,CAAc,CAAd,CAAvB,CAA0CpV,CAAAoV,MAAA,CAAc,CAAd,CADhB,CAIhB,GAdnBC,QAAuB,CAAC9S,CAAD,CAAO,CACxB+S,CAAAA,CAAQ,EAARA,CAAY/S,CAAAgT,kBAAA,EAMhB,OAHAC,EAGA,EAL0B,CAATA,EAACF,CAADE,CAAc,GAAdA,CAAoB,EAKrC,GAHcrT,EAAA,CAAUtkB,IAAA,CAAY,CAAP,CAAAy3B,CAAA,CAAW,OAAX,CAAqB,MAA1B,CAAA,CAAkCA,CAAlC,CAAyC,EAAzC,CAAV,CAAwD,CAAxD,CAGd,CAFcnT,EAAA,CAAUtkB,IAAAmjB,IAAA,CAASsU,CAAT,CAAgB,EAAhB,CAAV;AAA+B,CAA/B,CAEd,CAP4B,CAcX,CA/HnB,CA0JI1R,GAAqB,8EA1JzB,CA2JID,GAAgB,UAmFpB1E,GAAA12B,QAAA,CAAqB,CAAC,SAAD,CAmHrB,KAAI82B,GAAkBhtC,CAAA,CAAQmE,CAAR,CAAtB,CAWIgpC,GAAkBntC,CAAA,CAAQoK,EAAR,CAoOtB8iC,GAAAh3B,QAAA,CAAwB,CAAC,QAAD,CAqFxB,KAAItL,GAAsB5K,CAAA,CAAQ,UACtB,GADsB,SAEvBiH,QAAQ,CAAC5C,CAAD,CAAUtD,CAAV,CAAgB,CAEnB,CAAZ,EAAIyU,CAAJ,GAIOzU,CAAA8b,KAQL,EARmB9b,CAAAoF,KAQnB,EAPEpF,CAAAgrB,KAAA,CAAU,MAAV,CAAkB,EAAlB,CAOF,CAAA1nB,CAAAM,OAAA,CAAe7H,CAAA+tB,cAAA,CAAuB,QAAvB,CAAf,CAZF,CAeA,IAAI,CAAC9pB,CAAA8b,KAAL,EAAkB,CAAC9b,CAAAqiD,UAAnB,EAAqC,CAACriD,CAAAoF,KAAtC,CACE,MAAO,SAAQ,CAACa,CAAD,CAAQ3C,CAAR,CAAiB,CAE9B,IAAIwY,EAA+C,4BAAxC,GAAAvc,EAAAxC,KAAA,CAAcuG,CAAAvD,KAAA,CAAa,MAAb,CAAd,CAAA,CACA,YADA,CACe,MAC1BuD,EAAAgZ,GAAA,CAAW,OAAX,CAAoB,QAAQ,CAAC3I,CAAD,CAAO,CAE5BrQ,CAAAtD,KAAA,CAAa8b,CAAb,CAAL,EACEnI,CAAAC,eAAA,EAH+B,CAAnC,CAJ8B,CAlBH,CAFD,CAAR,CAA1B,CAuXI3H,GAA6B,EAIjCxP,EAAA,CAAQ+W,EAAR,CAAsB,QAAQ,CAAC8uC,CAAD;AAAW56B,CAAX,CAAqB,CAEjD,GAAgB,UAAhB,EAAI46B,CAAJ,CAAA,CAEA,IAAIC,EAAa/9B,EAAA,CAAmB,KAAnB,CAA2BkD,CAA3B,CACjBzb,GAAA,CAA2Bs2C,CAA3B,CAAA,CAAyC,QAAQ,EAAG,CAClD,MAAO,UACK,GADL,MAEChkC,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACnCiG,CAAAlF,OAAA,CAAaf,CAAA,CAAKuiD,CAAL,CAAb,CAA+BC,QAAiC,CAAChlD,CAAD,CAAQ,CACtEwC,CAAAgrB,KAAA,CAAUtD,CAAV,CAAoB,CAAC,CAAClqB,CAAtB,CADsE,CAAxE,CADmC,CAFhC,CAD2C,CAHpD,CAFiD,CAAnD,CAmBAf,EAAA,CAAQ,CAAC,KAAD,CAAQ,QAAR,CAAkB,MAAlB,CAAR,CAAmC,QAAQ,CAACirB,CAAD,CAAW,CACpD,IAAI66B,EAAa/9B,EAAA,CAAmB,KAAnB,CAA2BkD,CAA3B,CACjBzb,GAAA,CAA2Bs2C,CAA3B,CAAA,CAAyC,QAAQ,EAAG,CAClD,MAAO,UACK,EADL,MAEChkC,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAAA,IAC/BsiD,EAAW56B,CADoB,CAE/BtiB,EAAOsiB,CAEM,OAAjB,GAAIA,CAAJ,EAC4C,4BAD5C,GACInoB,EAAAxC,KAAA,CAAcuG,CAAAvD,KAAA,CAAa,MAAb,CAAd,CADJ,GAEEqF,CAEA,CAFO,WAEP,CADApF,CAAAskB,MAAA,CAAWlf,CAAX,CACA,CADmB,YACnB,CAAAk9C,CAAA,CAAW,IAJb,CAOAtiD,EAAAioB,SAAA,CAAcs6B,CAAd,CAA0B,QAAQ,CAAC/kD,CAAD,CAAQ,CACnCA,CAAL,EAOAwC,CAAAgrB,KAAA,CAAU5lB,CAAV,CAAgB5H,CAAhB,CAMA,CAAIiX,CAAJ,EAAY6tC,CAAZ,EAAsBh/C,CAAAvD,KAAA,CAAauiD,CAAb,CAAuBtiD,CAAA,CAAKoF,CAAL,CAAvB,CAbtB,EACmB,MADnB,GACMsiB,CADN,EAEI1nB,CAAAgrB,KAAA,CAAU5lB,CAAV,CAAgB,IAAhB,CAHoC,CAA1C,CAXmC,CAFhC,CAD2C,CAFA,CAAtD,CAsCA,KAAI6sC,GAAe,aACJnzC,CADI;eAEDA,CAFC,cAGHA,CAHG,WAINA,CAJM,cAKHA,CALG,CA6CnB2yC,GAAAt8B,QAAA,CAAyB,CAAC,UAAD,CAAa,QAAb,CAAuB,QAAvB,CAAiC,UAAjC,CAgUzB,KAAIstC,GAAuBA,QAAQ,CAACC,CAAD,CAAW,CAC5C,MAAO,CAAC,UAAD,CAAa,QAAQ,CAAChpC,CAAD,CAAW,CAoDrC,MAnDoB3P,MACZ,MADYA,UAER24C,CAAA,CAAW,KAAX,CAAmB,GAFX34C,YAGN0nC,EAHM1nC,SAIT7D,QAAQ,EAAG,CAClB,MAAO,KACAogB,QAAQ,CAACrgB,CAAD,CAAQ08C,CAAR,CAAqB3iD,CAArB,CAA2BugB,CAA3B,CAAuC,CAClD,GAAI,CAACvgB,CAAA4iD,OAAL,CAAkB,CAOhB,IAAIC,EAAyBA,QAAQ,CAAClvC,CAAD,CAAQ,CAC3CA,CAAAC,eACA,CAAID,CAAAC,eAAA,EAAJ,CACID,CAAAG,YADJ,CACwB,CAAA,CAHmB,CAM7C4hB,GAAA,CAAmBitB,CAAA,CAAY,CAAZ,CAAnB,CAAmC,QAAnC,CAA6CE,CAA7C,CAIAF,EAAArmC,GAAA,CAAe,UAAf,CAA2B,QAAQ,EAAG,CACpC5C,CAAA,CAAS,QAAQ,EAAG,CAClBhI,EAAA,CAAsBixC,CAAA,CAAY,CAAZ,CAAtB,CAAsC,QAAtC,CAAgDE,CAAhD,CADkB,CAApB,CAEG,CAFH,CAEM,CAAA,CAFN,CADoC,CAAtC,CAjBgB,CADgC,IAyB9CC,EAAiBH,CAAA/jD,OAAA,EAAA2hB,WAAA,CAAgC,MAAhC,CAzB6B,CA0B9CwiC,EAAQ/iD,CAAAoF,KAAR29C,EAAqB/iD,CAAAuyC,OAErBwQ,EAAJ,EACEpkB,EAAA,CAAO14B,CAAP;AAAc88C,CAAd,CAAqBxiC,CAArB,CAAiCwiC,CAAjC,CAEF,IAAID,CAAJ,CACEH,CAAArmC,GAAA,CAAe,UAAf,CAA2B,QAAQ,EAAG,CACpCwmC,CAAA9P,eAAA,CAA8BzyB,CAA9B,CACIwiC,EAAJ,EACEpkB,EAAA,CAAO14B,CAAP,CAAc88C,CAAd,CAAqB/mD,CAArB,CAAgC+mD,CAAhC,CAEF1kD,EAAA,CAAOkiB,CAAP,CAAmB0xB,EAAnB,CALoC,CAAtC,CAhCgD,CAD/C,CADW,CAJFloC,CADiB,CAAhC,CADqC,CAA9C,CAyDIA,GAAgB04C,EAAA,EAzDpB,CA0DI73C,GAAkB63C,EAAA,CAAqB,CAAA,CAArB,CA1DtB,CAkEIO,GAAa,qFAlEjB,CAmEIC,GAAe,mGAnEnB,CAoEIC,GAAgB,oCApEpB,CAsEIC,GAAY,MAkFN5O,EAlFM,QA2mBhB6O,QAAwB,CAACn9C,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B95B,CAA7B,CAAuCmX,CAAvC,CAAiD,CACvEsjB,EAAA,CAActuC,CAAd,CAAqB3C,CAArB,CAA8BtD,CAA9B,CAAoC4zC,CAApC,CAA0C95B,CAA1C,CAAoDmX,CAApD,CAEA2iB,EAAAS,SAAAn3C,KAAA,CAAmB,QAAQ,CAACM,CAAD,CAAQ,CACjC,IAAIiG,EAAQmwC,CAAA0B,SAAA,CAAc93C,CAAd,CACZ,IAAIiG,CAAJ,EAAay/C,EAAA38C,KAAA,CAAmB/I,CAAnB,CAAb,CAEE,MADAo2C,EAAAR,aAAA,CAAkB,QAAlB,CAA4B,CAAA,CAA5B,CACO,CAAU,EAAV;AAAA51C,CAAA,CAAe,IAAf,CAAuBiG,CAAA,CAAQjG,CAAR,CAAgB4yC,UAAA,CAAW5yC,CAAX,CAE9Co2C,EAAAR,aAAA,CAAkB,QAAlB,CAA4B,CAAA,CAA5B,CACA,OAAOp3C,EAPwB,CAAnC,CAWAi4C,GAAA,CAAyBL,CAAzB,CAA+B,QAA/B,CAAyCyP,EAAzC,CAAyD,IAAzD,CAA+DzP,CAAAe,gBAA/D,CAEAf,EAAA8B,YAAAx4C,KAAA,CAAsB,QAAQ,CAACM,CAAD,CAAQ,CACpC,MAAOo2C,EAAA0B,SAAA,CAAc93C,CAAd,CAAA,CAAuB,EAAvB,CAA4B,EAA5B,CAAiCA,CADJ,CAAtC,CAIIwC,EAAAkuC,IAAJ,GACMoV,CAMJ,CANmBA,QAAQ,CAAC9lD,CAAD,CAAQ,CACjC,IAAI0wC,EAAMkC,UAAA,CAAWpwC,CAAAkuC,IAAX,CACV,OAAOyF,GAAA,CAASC,CAAT,CAAe,KAAf,CAAsBA,CAAA0B,SAAA,CAAc93C,CAAd,CAAtB,EAA8CA,CAA9C,EAAuD0wC,CAAvD,CAA4D1wC,CAA5D,CAF0B,CAMnC,CADAo2C,CAAAS,SAAAn3C,KAAA,CAAmBomD,CAAnB,CACA,CAAA1P,CAAA8B,YAAAx4C,KAAA,CAAsBomD,CAAtB,CAPF,CAUItjD,EAAA0qB,IAAJ,GACM64B,CAMJ,CANmBA,QAAQ,CAAC/lD,CAAD,CAAQ,CACjC,IAAIktB,EAAM0lB,UAAA,CAAWpwC,CAAA0qB,IAAX,CACV,OAAOipB,GAAA,CAASC,CAAT,CAAe,KAAf,CAAsBA,CAAA0B,SAAA,CAAc93C,CAAd,CAAtB,EAA8CA,CAA9C,EAAuDktB,CAAvD,CAA4DltB,CAA5D,CAF0B,CAMnC,CADAo2C,CAAAS,SAAAn3C,KAAA,CAAmBqmD,CAAnB,CACA,CAAA3P,CAAA8B,YAAAx4C,KAAA,CAAsBqmD,CAAtB,CAPF,CAUA3P,EAAA8B,YAAAx4C,KAAA,CAAsB,QAAQ,CAACM,CAAD,CAAQ,CACpC,MAAOm2C,GAAA,CAASC,CAAT,CAAe,QAAf,CAAyBA,CAAA0B,SAAA,CAAc93C,CAAd,CAAzB,EAAiD6B,EAAA,CAAS7B,CAAT,CAAjD,CAAkEA,CAAlE,CAD6B,CAAtC,CAxCuE,CA3mBzD;IAwpBhBgmD,QAAqB,CAACv9C,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B95B,CAA7B,CAAuCmX,CAAvC,CAAiD,CACpEsjB,EAAA,CAActuC,CAAd,CAAqB3C,CAArB,CAA8BtD,CAA9B,CAAoC4zC,CAApC,CAA0C95B,CAA1C,CAAoDmX,CAApD,CAEIwyB,EAAAA,CAAeA,QAAQ,CAACjmD,CAAD,CAAQ,CACjC,MAAOm2C,GAAA,CAASC,CAAT,CAAe,KAAf,CAAsBA,CAAA0B,SAAA,CAAc93C,CAAd,CAAtB,EAA8CwlD,EAAAz8C,KAAA,CAAgB/I,CAAhB,CAA9C,CAAsEA,CAAtE,CAD0B,CAInCo2C,EAAA8B,YAAAx4C,KAAA,CAAsBumD,CAAtB,CACA7P,EAAAS,SAAAn3C,KAAA,CAAmBumD,CAAnB,CARoE,CAxpBtD,OAmqBhBC,QAAuB,CAACz9C,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B95B,CAA7B,CAAuCmX,CAAvC,CAAiD,CACtEsjB,EAAA,CAActuC,CAAd,CAAqB3C,CAArB,CAA8BtD,CAA9B,CAAoC4zC,CAApC,CAA0C95B,CAA1C,CAAoDmX,CAApD,CAEI0yB,EAAAA,CAAiBA,QAAQ,CAACnmD,CAAD,CAAQ,CACnC,MAAOm2C,GAAA,CAASC,CAAT,CAAe,OAAf,CAAwBA,CAAA0B,SAAA,CAAc93C,CAAd,CAAxB,EAAgDylD,EAAA18C,KAAA,CAAkB/I,CAAlB,CAAhD,CAA0EA,CAA1E,CAD4B,CAIrCo2C,EAAA8B,YAAAx4C,KAAA,CAAsBymD,CAAtB,CACA/P,EAAAS,SAAAn3C,KAAA,CAAmBymD,CAAnB,CARsE,CAnqBxD,OA8qBhBC,QAAuB,CAAC39C,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B,CAE9C10C,CAAA,CAAYc,CAAAoF,KAAZ,CAAJ,EACE9B,CAAAtD,KAAA,CAAa,MAAb,CAAqBvC,EAAA,EAArB,CAGF6F,EAAAgZ,GAAA,CAAW,OAAX,CAAoB,QAAQ,EAAG,CACzBhZ,CAAA,CAAQ,CAAR,CAAAugD,QAAJ,EACE59C,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtBwtC,CAAAqB,cAAA,CAAmBj1C,CAAAxC,MAAnB,CADsB,CAAxB,CAF2B,CAA/B,CAQAo2C,EAAAwB,QAAA,CAAeC,QAAQ,EAAG,CAExB/xC,CAAA,CAAQ,CAAR,CAAAugD,QAAA,CADY7jD,CAAAxC,MACZ,EAA+Bo2C,CAAAoB,WAFP,CAK1Bh1C,EAAAioB,SAAA,CAAc,OAAd;AAAuB2rB,CAAAwB,QAAvB,CAnBkD,CA9qBpC,UAosBhB0O,QAA0B,CAAC79C,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B,CAAA,IACjDmQ,EAAY/jD,CAAAgkD,YADqC,CAEjDC,EAAajkD,CAAAkkD,aAEZ3nD,EAAA,CAASwnD,CAAT,CAAL,GAA0BA,CAA1B,CAAsC,CAAA,CAAtC,CACKxnD,EAAA,CAAS0nD,CAAT,CAAL,GAA2BA,CAA3B,CAAwC,CAAA,CAAxC,CAEA3gD,EAAAgZ,GAAA,CAAW,OAAX,CAAoB,QAAQ,EAAG,CAC7BrW,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtBwtC,CAAAqB,cAAA,CAAmB3xC,CAAA,CAAQ,CAAR,CAAAugD,QAAnB,CADsB,CAAxB,CAD6B,CAA/B,CAMAjQ,EAAAwB,QAAA,CAAeC,QAAQ,EAAG,CACxB/xC,CAAA,CAAQ,CAAR,CAAAugD,QAAA,CAAqBjQ,CAAAoB,WADG,CAK1BpB,EAAA0B,SAAA,CAAgB6O,QAAQ,CAAC3mD,CAAD,CAAQ,CAC9B,MAAOA,EAAP,GAAiBumD,CADa,CAIhCnQ,EAAA8B,YAAAx4C,KAAA,CAAsB,QAAQ,CAACM,CAAD,CAAQ,CACpC,MAAOA,EAAP,GAAiBumD,CADmB,CAAtC,CAIAnQ,EAAAS,SAAAn3C,KAAA,CAAmB,QAAQ,CAACM,CAAD,CAAQ,CACjC,MAAOA,EAAA,CAAQumD,CAAR,CAAoBE,CADM,CAAnC,CA1BqD,CApsBvC,QAmaJnlD,CAnaI,QAoaJA,CApaI,QAqaJA,CAraI,OAsaLA,CAtaK,MAuaNA,CAvaM,CAtEhB,CA+qBIukD,GAAiB,CAAC,UAAD,CA/qBrB,CA27BIv5C,GAAiB,CAAC,UAAD,CAAa,UAAb,CAAyB,QAAQ,CAACmnB,CAAD,CAAWnX,CAAX,CAAqB,CACzE,MAAO,UACK,GADL,SAEI,UAFJ,MAGCyE,QAAQ,CAACtY,CAAD;AAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B,CACrCA,CAAJ,EACG,CAAAuP,EAAA,CAAU//C,CAAA,CAAUpD,CAAAoR,KAAV,CAAV,CAAA,EAAmC+xC,EAAA32B,KAAnC,EAAmDvmB,CAAnD,CAA0D3C,CAA1D,CAAmEtD,CAAnE,CAAyE4zC,CAAzE,CAA+E95B,CAA/E,CACmDmX,CADnD,CAFsC,CAHtC,CADkE,CAAtD,CA37BrB,CAw8BI6gB,GAAc,UAx8BlB,CAy8BID,GAAgB,YAz8BpB,CA08BIgB,GAAiB,aA18BrB,CA28BIW,GAAc,UA38BlB,CAwlCI4Q,GAAoB,CAAC,QAAD,CAAW,mBAAX,CAAgC,QAAhC,CAA0C,UAA1C,CAAsD,QAAtD,CAAgE,UAAhE,CACpB,QAAQ,CAACv7B,CAAD,CAAS1I,CAAT,CAA4BmE,CAA5B,CAAmChC,CAAnC,CAA6CrB,CAA7C,CAAqDG,CAArD,CAA+D,CA6DzEswB,QAASA,EAAc,CAACC,CAAD,CAAUC,CAAV,CAA8B,CACnDA,CAAA,CAAqBA,CAAA,CAAqB,GAArB,CAA2BhrC,EAAA,CAAWgrC,CAAX,CAA+B,GAA/B,CAA3B,CAAiE,EACtFxwB,EAAAkN,YAAA,CAAqBhM,CAArB,EAAgCqvB,CAAA,CAAUE,EAAV,CAA0BC,EAA1D,EAAyEF,CAAzE,CACAxwB,EAAAmB,SAAA,CAAkBD,CAAlB,EAA6BqvB,CAAA,CAAUG,EAAV,CAAwBD,EAArD,EAAsED,CAAtE,CAHmD,CA3DrD,IAAAyS,YAAA,CADA,IAAArP,WACA,CADkBj2B,MAAAulC,IAElB,KAAAjQ,SAAA,CAAgB,EAChB,KAAAqB,YAAA,CAAmB,EACnB,KAAA6O,qBAAA,CAA4B,EAC5B,KAAA9R,UAAA,CAAiB,CAAA,CACjB,KAAAD,OAAA,CAAc,CAAA,CACd,KAAAE,OAAA,CAAc,CAAA,CACd,KAAAC,SAAA,CAAgB,CAAA,CAChB,KAAAL,MAAA,CAAahuB,CAAAlf,KAV4D,KAYrEo/C,EAAavjC,CAAA,CAAOqD,CAAAmgC,QAAP,CAZwD;AAarEC,EAAaF,CAAAj8B,OAEjB,IAAI,CAACm8B,CAAL,CACE,KAAMzoD,EAAA,CAAO,SAAP,CAAA,CAAkB,WAAlB,CACFqoB,CAAAmgC,QADE,CACaphD,EAAA,CAAYif,CAAZ,CADb,CAAN,CAYF,IAAA8yB,QAAA,CAAet2C,CAmBf,KAAAw2C,SAAA,CAAgBqP,QAAQ,CAACnnD,CAAD,CAAQ,CAC9B,MAAO0B,EAAA,CAAY1B,CAAZ,CAAP,EAAuC,EAAvC,GAA6BA,CAA7B,EAAuD,IAAvD,GAA6CA,CAA7C,EAA+DA,CAA/D,GAAyEA,CAD3C,CA/CyC,KAmDrEw0C,EAAa1vB,CAAAsiC,cAAA,CAAuB,iBAAvB,CAAb5S,EAA0DC,EAnDW,CAoDrEC,EAAe,CApDsD,CAqDrEE,EAAS,IAAAA,OAATA,CAAuB,EAI3B9vB,EAAAC,SAAA,CAAkBswB,EAAlB,CACAnB,EAAA,CAAe,CAAA,CAAf,CA0BA,KAAA0B,aAAA,CAAoByR,QAAQ,CAACjT,CAAD,CAAqBD,CAArB,CAA8B,CAGpDS,CAAA,CAAOR,CAAP,CAAJ,GAAmC,CAACD,CAApC,GAGIA,CAAJ,EACMS,CAAA,CAAOR,CAAP,CACJ,EADgCM,CAAA,EAChC,CAAKA,CAAL,GACER,CAAA,CAAe,CAAA,CAAf,CAEA,CADA,IAAAgB,OACA,CADc,CAAA,CACd,CAAA,IAAAC,SAAA,CAAgB,CAAA,CAHlB,CAFF,GAQEjB,CAAA,CAAe,CAAA,CAAf,CAGA,CAFA,IAAAiB,SAEA,CAFgB,CAAA,CAEhB,CADA,IAAAD,OACA,CADc,CAAA,CACd,CAAAR,CAAA,EAXF,CAiBA,CAHAE,CAAA,CAAOR,CAAP,CAGA,CAH6B,CAACD,CAG9B,CAFAD,CAAA,CAAeC,CAAf,CAAwBC,CAAxB,CAEA,CAAAI,CAAAoB,aAAA,CAAwBxB,CAAxB,CAA4CD,CAA5C,CAAqD,IAArD,CApBA,CAHwD,CAoC1D,KAAA8B,aAAA,CAAoBqR,QAAS,EAAG,CAC9B,IAAAtS,OAAA,CAAc,CAAA,CACd,KAAAC,UAAA,CAAiB,CAAA,CACjBrxB,EAAAkN,YAAA,CAAqBhM,CAArB,CAA+BkxB,EAA/B,CACApyB,EAAAmB,SAAA,CAAkBD,CAAlB;AAA4BuwB,EAA5B,CAJ8B,CA4BhC,KAAAoC,cAAA,CAAqB8P,QAAQ,CAACvnD,CAAD,CAAQ,CACnC,IAAAw3C,WAAA,CAAkBx3C,CAGd,KAAAi1C,UAAJ,GACE,IAAAD,OAIA,CAJc,CAAA,CAId,CAHA,IAAAC,UAGA,CAHiB,CAAA,CAGjB,CAFArxB,CAAAkN,YAAA,CAAqBhM,CAArB,CAA+BuwB,EAA/B,CAEA,CADAzxB,CAAAmB,SAAA,CAAkBD,CAAlB,CAA4BkxB,EAA5B,CACA,CAAAxB,CAAAsB,UAAA,EALF,CAQA72C,EAAA,CAAQ,IAAA43C,SAAR,CAAuB,QAAQ,CAAClyC,CAAD,CAAK,CAClC3E,CAAA,CAAQ2E,CAAA,CAAG3E,CAAH,CAD0B,CAApC,CAII,KAAA6mD,YAAJ,GAAyB7mD,CAAzB,GACE,IAAA6mD,YAEA,CAFmB7mD,CAEnB,CADAknD,CAAA,CAAW77B,CAAX,CAAmBrrB,CAAnB,CACA,CAAAf,CAAA,CAAQ,IAAA8nD,qBAAR,CAAmC,QAAQ,CAACvpC,CAAD,CAAW,CACpD,GAAI,CACFA,CAAA,EADE,CAEF,MAAMtX,CAAN,CAAS,CACTyc,CAAA,CAAkBzc,CAAlB,CADS,CAHyC,CAAtD,CAHF,CAhBmC,CA8BrC,KAAIkwC,EAAO,IAEX/qB,EAAA9nB,OAAA,CAAcikD,QAAqB,EAAG,CACpC,IAAIxnD,EAAQgnD,CAAA,CAAW37B,CAAX,CAGZ,IAAI+qB,CAAAyQ,YAAJ,GAAyB7mD,CAAzB,CAAgC,CAAA,IAE1BynD,EAAarR,CAAA8B,YAFa,CAG1B7hB,EAAMoxB,CAAA5oD,OAGV,KADAu3C,CAAAyQ,YACA,CADmB7mD,CACnB,CAAMq2B,CAAA,EAAN,CAAA,CACEr2B,CAAA,CAAQynD,CAAA,CAAWpxB,CAAX,CAAA,CAAgBr2B,CAAhB,CAGNo2C,EAAAoB,WAAJ,GAAwBx3C,CAAxB,GACEo2C,CAAAoB,WACA,CADkBx3C,CAClB,CAAAo2C,CAAAwB,QAAA,EAFF,CAV8B,CAgBhC,MAAO53C,EApB6B,CAAtC,CApLyE,CADnD,CAxlCxB,CA64CImO;AAAmBA,QAAQ,EAAG,CAChC,MAAO,SACI,CAAC,SAAD,CAAY,QAAZ,CADJ,YAEOy4C,EAFP,MAGC7lC,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuBklD,CAAvB,CAA8B,CAAA,IAGtCC,EAAYD,CAAA,CAAM,CAAN,CAH0B,CAItCE,EAAWF,CAAA,CAAM,CAAN,CAAXE,EAAuBnT,EAE3BmT,EAAAxS,YAAA,CAAqBuS,CAArB,CAEAl/C,EAAA2gC,IAAA,CAAU,UAAV,CAAsB,QAAQ,EAAG,CAC/Bwe,CAAApS,eAAA,CAAwBmS,CAAxB,CAD+B,CAAjC,CAR0C,CAHvC,CADyB,CA74ClC,CA49CIt5C,GAAoB5M,CAAA,CAAQ,SACrB,SADqB,MAExBsf,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B,CACzCA,CAAA2Q,qBAAArnD,KAAA,CAA+B,QAAQ,EAAG,CACxC+I,CAAAygC,MAAA,CAAY1mC,CAAAqlD,SAAZ,CADwC,CAA1C,CADyC,CAFb,CAAR,CA59CxB,CAs+CIv5C,GAAoBA,QAAQ,EAAG,CACjC,MAAO,SACI,UADJ,MAECyS,QAAQ,CAACtY,CAAD,CAAQkT,CAAR,CAAanZ,CAAb,CAAmB4zC,CAAnB,CAAyB,CACrC,GAAKA,CAAL,CAAA,CACA5zC,CAAAslD,SAAA,CAAgB,CAAA,CAEhB,KAAIhR,EAAYA,QAAQ,CAAC92C,CAAD,CAAQ,CAC9B,GAAIwC,CAAAslD,SAAJ,EAAqB1R,CAAA0B,SAAA,CAAc93C,CAAd,CAArB,CACEo2C,CAAAR,aAAA,CAAkB,UAAlB,CAA8B,CAAA,CAA9B,CADF,KAKE,OADAQ,EAAAR,aAAA,CAAkB,UAAlB,CAA8B,CAAA,CAA9B,CACO51C,CAAAA,CANqB,CAUhCo2C,EAAA8B,YAAAx4C,KAAA,CAAsBo3C,CAAtB,CACAV;CAAAS,SAAAp2C,QAAA,CAAsBq2C,CAAtB,CAEAt0C,EAAAioB,SAAA,CAAc,UAAd,CAA0B,QAAQ,EAAG,CACnCqsB,CAAA,CAAUV,CAAAoB,WAAV,CADmC,CAArC,CAhBA,CADqC,CAFlC,CAD0B,CAt+CnC,CAyjDIppC,GAAkBA,QAAQ,EAAG,CAC/B,MAAO,SACI,SADJ,MAEC2S,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B,CACzC,IACI/sC,GADAxF,CACAwF,CADQ,UAAAtB,KAAA,CAAgBvF,CAAAulD,OAAhB,CACR1+C,GAAyBzF,MAAJ,CAAWC,CAAA,CAAM,CAAN,CAAX,CAArBwF,EAA6C7G,CAAAulD,OAA7C1+C,EAA4D,GAiBhE+sC,EAAAS,SAAAn3C,KAAA,CAfY+F,QAAQ,CAACuiD,CAAD,CAAY,CAE9B,GAAI,CAAAtmD,CAAA,CAAYsmD,CAAZ,CAAJ,CAAA,CAEA,IAAIplD,EAAO,EAEPolD,EAAJ,EACE/oD,CAAA,CAAQ+oD,CAAAnhD,MAAA,CAAgBwC,CAAhB,CAAR,CAAoC,QAAQ,CAACrJ,CAAD,CAAQ,CAC9CA,CAAJ,EAAW4C,CAAAlD,KAAA,CAAUoS,EAAA,CAAK9R,CAAL,CAAV,CADuC,CAApD,CAKF,OAAO4C,EAVP,CAF8B,CAehC,CACAwzC,EAAA8B,YAAAx4C,KAAA,CAAsB,QAAQ,CAACM,CAAD,CAAQ,CACpC,MAAIhB,EAAA,CAAQgB,CAAR,CAAJ,CACSA,CAAAM,KAAA,CAAW,IAAX,CADT,CAIO9B,CAL6B,CAAtC,CASA43C,EAAA0B,SAAA,CAAgB6O,QAAQ,CAAC3mD,CAAD,CAAQ,CAC9B,MAAO,CAACA,CAAR,EAAiB,CAACA,CAAAnB,OADY,CA7BS,CAFtC,CADwB,CAzjDjC,CAimDIopD,GAAwB,oBAjmD5B,CAspDI15C,GAAmBA,QAAQ,EAAG,CAChC,MAAO,UACK,GADL,SAEI7F,QAAQ,CAACw/C,CAAD,CAAMC,CAAN,CAAe,CAC9B,MAAIF,GAAAl/C,KAAA,CAA2Bo/C,CAAAC,QAA3B,CAAJ;AACSC,QAA4B,CAAC5/C,CAAD,CAAQkT,CAAR,CAAanZ,CAAb,CAAmB,CACpDA,CAAAgrB,KAAA,CAAU,OAAV,CAAmB/kB,CAAAygC,MAAA,CAAY1mC,CAAA4lD,QAAZ,CAAnB,CADoD,CADxD,CAKSE,QAAoB,CAAC7/C,CAAD,CAAQkT,CAAR,CAAanZ,CAAb,CAAmB,CAC5CiG,CAAAlF,OAAA,CAAaf,CAAA4lD,QAAb,CAA2BG,QAAyB,CAACvoD,CAAD,CAAQ,CAC1DwC,CAAAgrB,KAAA,CAAU,OAAV,CAAmBxtB,CAAnB,CAD0D,CAA5D,CAD4C,CANlB,CAF3B,CADyB,CAtpDlC,CA4tDI4M,GAAkBonC,EAAA,CAAY,SACvBtrC,QAAQ,CAAC8/C,CAAD,CAAkB,CACjCA,CAAAzjC,SAAA,CAAyB,YAAzB,CACA,OAAO,SAAS,CAACtc,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACrCsD,CAAA+C,KAAA,CAAa,UAAb,CAAyBrG,CAAAimD,OAAzB,CACAhgD,EAAAlF,OAAA,CAAaf,CAAAimD,OAAb,CAA0BC,QAA0B,CAAC1oD,CAAD,CAAQ,CAI1D8F,CAAAkpB,KAAA,CAAahvB,CAAA,EAASxB,CAAT,CAAqB,EAArB,CAA0BwB,CAAvC,CAJ0D,CAA5D,CAFqC,CAFN,CADH,CAAZ,CA5tDtB,CA+xDI8M,GAA0B,CAAC,cAAD,CAAiB,QAAQ,CAACwW,CAAD,CAAe,CACpE,MAAO,SAAQ,CAAC7a,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAEhCysB,CAAAA,CAAgB3L,CAAA,CAAaxd,CAAAtD,KAAA,CAAaA,CAAAskB,MAAA6hC,eAAb,CAAb,CACpB7iD,EAAAif,SAAA,CAAiB,YAAjB,CAAAlc,KAAA,CAAoC,UAApC,CAAgDomB,CAAhD,CACAzsB,EAAAioB,SAAA,CAAc,gBAAd,CAAgC,QAAQ,CAACzqB,CAAD,CAAQ,CAC9C8F,CAAAkpB,KAAA,CAAahvB,CAAb,CAD8C,CAAhD,CAJoC,CAD8B,CAAxC,CA/xD9B,CAw1DI6M,GAAsB,CAAC,MAAD,CAAS,QAAT,CAAmB,QAAQ,CAAC8W,CAAD,CAAOF,CAAP,CAAe,CAClE,MAAO,SACI/a,QAAS,CAACkgD,CAAD,CAAW,CAC3BA,CAAA7jC,SAAA,CAAkB,YAAlB,CAEA;MAAO,SAAS,CAACtc,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACrCsD,CAAA+C,KAAA,CAAa,UAAb,CAAyBrG,CAAAqmD,WAAzB,CAEA,KAAI72C,EAASyR,CAAA,CAAOjhB,CAAAqmD,WAAP,CAMbpgD,EAAAlF,OAAA,CAJAulD,QAAuB,EAAG,CACxB,MAAQ/mD,CAAAiQ,CAAA,CAAOvJ,CAAP,CAAA1G,EAAiB,EAAjBA,UAAA,EADgB,CAI1B,CAA6BgnD,QAA8B,CAAC/oD,CAAD,CAAQ,CACjE8F,CAAAO,KAAA,CAAasd,CAAAqlC,eAAA,CAAoBh3C,CAAA,CAAOvJ,CAAP,CAApB,CAAb,EAAmD,EAAnD,CADiE,CAAnE,CATqC,CAHZ,CADxB,CAD2D,CAA1C,CAx1D1B,CAknEIsE,GAAmB0rC,EAAA,CAAe,EAAf,CAAmB,CAAA,CAAnB,CAlnEvB,CAkqEIxrC,GAAsBwrC,EAAA,CAAe,KAAf,CAAsB,CAAtB,CAlqE1B,CAktEIzrC,GAAuByrC,EAAA,CAAe,MAAf,CAAuB,CAAvB,CAltE3B,CA4wEIvrC,GAAmB8mC,EAAA,CAAY,SACxBtrC,QAAQ,CAAC5C,CAAD,CAAUtD,CAAV,CAAgB,CAC/BA,CAAAgrB,KAAA,CAAU,SAAV,CAAqBhvB,CAArB,CACAsH,EAAAgrB,YAAA,CAAoB,UAApB,CAF+B,CADA,CAAZ,CA5wEvB,CA8+EI3jB,GAAwB,CAAC,QAAQ,EAAG,CACtC,MAAO,OACE,CAAA,CADF,YAEO,GAFP,UAGK,GAHL,CAD+B,CAAZ,CA9+E5B,CAilFIuB,GAAoB,EACxBzP,EAAA,CACE,6IAAA,MAAA,CAAA,GAAA,CADF;AAEE,QAAQ,CAAC2I,CAAD,CAAO,CACb,IAAIohB,EAAgBhC,EAAA,CAAmB,KAAnB,CAA2Bpf,CAA3B,CACpB8G,GAAA,CAAkBsa,CAAlB,CAAA,CAAmC,CAAC,QAAD,CAAW,QAAQ,CAACvF,CAAD,CAAS,CAC7D,MAAO,SACI/a,QAAQ,CAACoc,CAAD,CAAWtiB,CAAX,CAAiB,CAChC,IAAImC,EAAK8e,CAAA,CAAOjhB,CAAA,CAAKwmB,CAAL,CAAP,CACT,OAAOigC,SAAuB,CAACxgD,CAAD,CAAQ3C,CAAR,CAAiB,CAC7CA,CAAAgZ,GAAA,CAAWlZ,CAAA,CAAUgC,CAAV,CAAX,CAA4B,QAAQ,CAACuO,CAAD,CAAQ,CAC1C1N,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtBjE,CAAA,CAAG8D,CAAH,CAAU,QAAQ0N,CAAR,CAAV,CADsB,CAAxB,CAD0C,CAA5C,CAD6C,CAFf,CAD7B,CADsD,CAA5B,CAFtB,CAFjB,CA6eA,KAAI7I,GAAgB,CAAC,UAAD,CAAa,QAAQ,CAACsW,CAAD,CAAW,CAClD,MAAO,YACO,SADP,UAEK,GAFL,UAGK,CAAA,CAHL,UAIK,GAJL,OAKE,CAAA,CALF,MAMC7C,QAAS,CAACsK,CAAD,CAASvG,CAAT,CAAmBgC,CAAnB,CAA0BsvB,CAA1B,CAAgC8S,CAAhC,CAA6C,CAAA,IACpDx9C,CADoD,CAC7CyZ,CAD6C,CACjCgkC,CACvB99B,EAAA9nB,OAAA,CAAcujB,CAAAsiC,KAAd,CAA0BC,QAAwB,CAACrpD,CAAD,CAAQ,CAEpD0F,EAAA,CAAU1F,CAAV,CAAJ,CACOmlB,CADP,GAEIA,CACA,CADakG,CAAA3F,KAAA,EACb,CAAAwjC,CAAA,CAAY/jC,CAAZ,CAAwB,QAAS,CAACnf,CAAD,CAAQ,CACvCA,CAAA,CAAMA,CAAAnH,OAAA,EAAN,CAAA,CAAwBN,CAAA+tB,cAAA,CAAuB,aAAvB,CAAuCxF,CAAAsiC,KAAvC,CAAoD,GAApD,CAIxB19C,EAAA,CAAQ,OACC1F,CADD,CAGR4d,EAAA85B,MAAA,CAAe13C,CAAf,CAAsB8e,CAAA1jB,OAAA,EAAtB,CAAyC0jB,CAAzC,CARuC,CAAzC,CAHJ,GAeKqkC,CAQH,GAPEA,CAAAxnC,OAAA,EACA;AAAAwnC,CAAA,CAAmB,IAMrB,EAJGhkC,CAIH,GAHEA,CAAA5Q,SAAA,EACA,CAAA4Q,CAAA,CAAa,IAEf,EAAGzZ,CAAH,GACEy9C,CAIA,CAJmB7+C,EAAA,CAAiBoB,CAAA1F,MAAjB,CAInB,CAHA4d,CAAA+5B,MAAA,CAAewL,CAAf,CAAiC,QAAQ,EAAG,CAC1CA,CAAA,CAAmB,IADuB,CAA5C,CAGA,CAAAz9C,CAAA,CAAQ,IALV,CAvBF,CAFwD,CAA1D,CAFwD,CANvD,CAD2C,CAAhC,CAApB,CA+MI6B,GAAqB,CAAC,OAAD,CAAU,gBAAV,CAA4B,eAA5B,CAA6C,UAA7C,CAAyD,MAAzD,CACP,QAAQ,CAACgW,CAAD,CAAUC,CAAV,CAA4B8lC,CAA5B,CAA6C1lC,CAA7C,CAAyDD,CAAzD,CAA+D,CACvF,MAAO,UACK,KADL,UAEK,GAFL,UAGK,CAAA,CAHL,YAIO,SAJP,YAKO3a,EAAA1H,KALP,SAMIoH,QAAQ,CAAC5C,CAAD,CAAUtD,CAAV,CAAgB,CAAA,IAC3B+mD,EAAS/mD,CAAAgnD,UAATD,EAA2B/mD,CAAAwB,IADA,CAE3BylD,EAAYjnD,CAAAknD,OAAZD,EAA2B,EAFA,CAG3BE,EAAgBnnD,CAAAonD,WAEpB,OAAO,SAAQ,CAACnhD,CAAD,CAAQqc,CAAR,CAAkBgC,CAAlB,CAAyBsvB,CAAzB,CAA+B8S,CAA/B,CAA4C,CAAA,IACrDtpB,EAAgB,CADqC,CAErDgK,CAFqD,CAGrDigB,CAHqD,CAIrDC,CAJqD,CAMrDC,EAA4BA,QAAQ,EAAG,CACtCF,CAAH,GACEA,CAAAloC,OAAA,EACA,CAAAkoC,CAAA,CAAkB,IAFpB,CAIGjgB,EAAH,GACEA,CAAAr1B,SAAA,EACA,CAAAq1B,CAAA,CAAe,IAFjB,CAIGkgB,EAAH,GACElmC,CAAA+5B,MAAA,CAAemM,CAAf,CAA+B,QAAQ,EAAG,CACxCD,CAAA,CAAkB,IADsB,CAA1C,CAIA,CADAA,CACA,CADkBC,CAClB,CAAAA,CAAA,CAAiB,IALnB,CATyC,CAkB3CrhD,EAAAlF,OAAA,CAAaogB,CAAAqmC,mBAAA,CAAwBT,CAAxB,CAAb;AAA8CU,QAA6B,CAACjmD,CAAD,CAAM,CAC/E,IAAIkmD,EAAiBA,QAAQ,EAAG,CAC1B,CAAAvoD,CAAA,CAAUgoD,CAAV,CAAJ,EAAkCA,CAAlC,EAAmD,CAAAlhD,CAAAygC,MAAA,CAAYygB,CAAZ,CAAnD,EACEL,CAAA,EAF4B,CAAhC,CAKIa,EAAe,EAAEvqB,CAEjB57B,EAAJ,EACEuf,CAAArK,IAAA,CAAUlV,CAAV,CAAe,OAAQwf,CAAR,CAAf,CAAAyK,QAAA,CAAgD,QAAQ,CAACM,CAAD,CAAW,CACjE,GAAI47B,CAAJ,GAAqBvqB,CAArB,CAAA,CACA,IAAIwqB,EAAW3hD,CAAAid,KAAA,EACf0wB,EAAA7qB,SAAA,CAAgBgD,CAQZvoB,EAAAA,CAAQkjD,CAAA,CAAYkB,CAAZ,CAAsB,QAAQ,CAACpkD,CAAD,CAAQ,CAChD+jD,CAAA,EACAnmC,EAAA85B,MAAA,CAAe13C,CAAf,CAAsB,IAAtB,CAA4B8e,CAA5B,CAAsColC,CAAtC,CAFgD,CAAtC,CAKZtgB,EAAA,CAAewgB,CACfN,EAAA,CAAiB9jD,CAEjB4jC,EAAAH,MAAA,CAAmB,uBAAnB,CACAhhC,EAAAygC,MAAA,CAAYugB,CAAZ,CAnBA,CADiE,CAAnE,CAAA9sC,MAAA,CAqBS,QAAQ,EAAG,CACdwtC,CAAJ,GAAqBvqB,CAArB,EAAoCmqB,CAAA,EADlB,CArBpB,CAwBA,CAAAthD,CAAAghC,MAAA,CAAY,0BAAZ,CAzBF,GA2BEsgB,CAAA,EACA,CAAA3T,CAAA7qB,SAAA,CAAgB,IA5BlB,CAR+E,CAAjF,CAxByD,CAL5B,CAN5B,CADgF,CADhE,CA/MzB,CAqSI/c,GAAgC,CAAC,UAAD,CAClC,QAAQ,CAAC67C,CAAD,CAAW,CACjB,MAAO,UACK,KADL,UAEM,IAFN,SAGI,WAHJ,MAICtpC,QAAQ,CAACtY,CAAD,CAAQqc,CAAR,CAAkBgC,CAAlB,CAAyBsvB,CAAzB,CAA+B,CAC3CtxB,CAAAze,KAAA,CAAc+vC,CAAA7qB,SAAd,CACA8+B,EAAA,CAASvlC,CAAA2H,SAAA,EAAT,CAAA,CAA8BhkB,CAA9B,CAF2C,CAJxC,CADU,CADe,CArSpC,CA0WI+E,GAAkBwmC,EAAA,CAAY,UACtB,GADsB;QAEvBtrC,QAAQ,EAAG,CAClB,MAAO,KACAogB,QAAQ,CAACrgB,CAAD,CAAQ3C,CAAR,CAAiBigB,CAAjB,CAAwB,CACnCtd,CAAAygC,MAAA,CAAYnjB,CAAAukC,OAAZ,CADmC,CADhC,CADW,CAFY,CAAZ,CA1WtB,CAqZI78C,GAAyBumC,EAAA,CAAY,UAAY,CAAA,CAAZ,UAA4B,GAA5B,CAAZ,CArZ7B,CAmkBItmC,GAAuB,CAAC,SAAD,CAAY,cAAZ,CAA4B,QAAQ,CAACyhC,CAAD,CAAU7rB,CAAV,CAAwB,CACrF,IAAIinC,EAAQ,KACZ,OAAO,UACK,IADL,MAECxpC,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAAA,IAC/BgoD,EAAYhoD,CAAA83B,MADmB,CAE/BmwB,EAAUjoD,CAAAskB,MAAAqO,KAAVs1B,EAA6B3kD,CAAAtD,KAAA,CAAaA,CAAAskB,MAAAqO,KAAb,CAFE,CAG/B5kB,EAAS/N,CAAA+N,OAATA,EAAwB,CAHO,CAI/Bm6C,EAAQjiD,CAAAygC,MAAA,CAAYuhB,CAAZ,CAARC,EAAgC,EAJD,CAK/BC,EAAc,EALiB,CAM/Bn5B,EAAclO,CAAAkO,YAAA,EANiB,CAO/BC,EAAYnO,CAAAmO,UAAA,EAPmB,CAQ/Bm5B,EAAS,oBAEb3rD,EAAA,CAAQuD,CAAR,CAAc,QAAQ,CAAC4vB,CAAD,CAAay4B,CAAb,CAA4B,CAC5CD,CAAA7hD,KAAA,CAAY8hD,CAAZ,CAAJ,GACEH,CAAA,CAAM9kD,CAAA,CAAUilD,CAAAtkD,QAAA,CAAsB,MAAtB,CAA8B,EAA9B,CAAAA,QAAA,CAA0C,OAA1C,CAAmD,GAAnD,CAAV,CAAN,CADF,CAEIT,CAAAtD,KAAA,CAAaA,CAAAskB,MAAA,CAAW+jC,CAAX,CAAb,CAFJ,CADgD,CAAlD,CAMA5rD,EAAA,CAAQyrD,CAAR,CAAe,QAAQ,CAACt4B,CAAD,CAAahzB,CAAb,CAAkB,CACvCurD,CAAA,CAAYvrD,CAAZ,CAAA,CACEkkB,CAAA,CAAa8O,CAAA7rB,QAAA,CAAmBgkD,CAAnB,CAA0B/4B,CAA1B,CAAwCg5B,CAAxC,CAAoD,GAApD,CACXj6C,CADW,CACFkhB,CADE,CAAb,CAFqC,CAAzC,CAMAhpB,EAAAlF,OAAA,CAAaunD,QAAyB,EAAG,CACvC,IAAI9qD;AAAQ4yC,UAAA,CAAWnqC,CAAAygC,MAAA,CAAYshB,CAAZ,CAAX,CAEZ,IAAKjmD,KAAA,CAAMvE,CAAN,CAAL,CAME,MAAO,EAHDA,EAAN,GAAe0qD,EAAf,GAAuB1qD,CAAvB,CAA+BmvC,CAAAlU,UAAA,CAAkBj7B,CAAlB,CAA0BuQ,CAA1B,CAA/B,CACC,OAAOo6C,EAAA,CAAY3qD,CAAZ,CAAA,CAAmByI,CAAnB,CAA0B3C,CAA1B,CAAmC,CAAA,CAAnC,CAP6B,CAAzC,CAWGilD,QAA+B,CAACxjB,CAAD,CAAS,CACzCzhC,CAAAkpB,KAAA,CAAauY,CAAb,CADyC,CAX3C,CAtBmC,CAFhC,CAF8E,CAA5D,CAnkB3B,CAqzBI55B,GAAoB,CAAC,QAAD,CAAW,UAAX,CAAuB,QAAQ,CAAC8V,CAAD,CAASG,CAAT,CAAmB,CAExE,IAAIonC,EAAiBvsD,CAAA,CAAO,UAAP,CACrB,OAAO,YACO,SADP,UAEK,GAFL,UAGK,CAAA,CAHL,OAIE,CAAA,CAJF,MAKCsiB,QAAQ,CAACsK,CAAD,CAASvG,CAAT,CAAmBgC,CAAnB,CAA0BsvB,CAA1B,CAAgC8S,CAAhC,CAA4C,CACtD,IAAI92B,EAAatL,CAAAmkC,SAAjB,CACIpnD,EAAQuuB,CAAAvuB,MAAA,CAAiB,qEAAjB,CADZ,CAEcqnD,CAFd,CAEgCC,CAFhC,CAEgDC,CAFhD,CAEkEC,CAFlE,CAGYC,CAHZ,CAG6BC,CAH7B,CAIEC,EAAe,KAAMr0C,EAAN,CAEjB,IAAI,CAACtT,CAAL,CACE,KAAMmnD,EAAA,CAAe,MAAf,CACJ54B,CADI,CAAN,CAIFq5B,CAAA,CAAM5nD,CAAA,CAAM,CAAN,CACN6nD,EAAA,CAAM7nD,CAAA,CAAM,CAAN,CAGN,EAFA8nD,CAEA,CAFa9nD,CAAA,CAAM,CAAN,CAEb,GACEqnD,CACA,CADmBznC,CAAA,CAAOkoC,CAAP,CACnB,CAAAR,CAAA,CAAiBA,QAAQ,CAAC/rD,CAAD,CAAMY,CAAN,CAAaE,CAAb,CAAoB,CAEvCqrD,CAAJ,GAAmBC,CAAA,CAAaD,CAAb,CAAnB,CAAiDnsD,CAAjD,CACAosD,EAAA,CAAaF,CAAb,CAAA,CAAgCtrD,CAChCwrD,EAAAvS,OAAA,CAAsB/4C,CACtB,OAAOgrD,EAAA,CAAiB7/B,CAAjB;AAAyBmgC,CAAzB,CALoC,CAF/C,GAUEJ,CAGA,CAHmBA,QAAQ,CAAChsD,CAAD,CAAMY,CAAN,CAAa,CACtC,MAAOmX,GAAA,CAAQnX,CAAR,CAD+B,CAGxC,CAAAqrD,CAAA,CAAiBA,QAAQ,CAACjsD,CAAD,CAAM,CAC7B,MAAOA,EADsB,CAbjC,CAkBAyE,EAAA,CAAQ4nD,CAAA5nD,MAAA,CAAU,+CAAV,CACR,IAAI,CAACA,CAAL,CACE,KAAMmnD,EAAA,CAAe,QAAf,CACoDS,CADpD,CAAN,CAGFH,CAAA,CAAkBznD,CAAA,CAAM,CAAN,CAAlB,EAA8BA,CAAA,CAAM,CAAN,CAC9B0nD,EAAA,CAAgB1nD,CAAA,CAAM,CAAN,CAOhB,KAAI+nD,EAAe,EAGnBvgC,EAAAsc,iBAAA,CAAwB+jB,CAAxB,CAA6BG,QAAuB,CAACC,CAAD,CAAY,CAAA,IAC1D5rD,CAD0D,CACnDrB,CADmD,CAE1DktD,EAAejnC,CAAA,CAAS,CAAT,CAF2C,CAG1DknC,CAH0D,CAM1DC,EAAe,EAN2C,CAO1DC,CAP0D,CAQ1D/mC,CAR0D,CAS1D/lB,CAT0D,CASrDY,CATqD,CAY1DmsD,CAZ0D,CAa1DzgD,CAb0D,CAc1D0gD,EAAiB,EAIrB,IAAI1tD,EAAA,CAAYotD,CAAZ,CAAJ,CACEK,CACA,CADiBL,CACjB,CAAAO,CAAA,CAAclB,CAAd,EAAgCC,CAFlC,KAGO,CACLiB,CAAA,CAAclB,CAAd,EAAgCE,CAEhCc,EAAA,CAAiB,EACjB,KAAK/sD,CAAL,GAAY0sD,EAAZ,CACMA,CAAAxsD,eAAA,CAA0BF,CAA1B,CAAJ,EAAuD,GAAvD,EAAsCA,CAAA6E,OAAA,CAAW,CAAX,CAAtC,EACEkoD,CAAAzsD,KAAA,CAAoBN,CAApB,CAGJ+sD,EAAAxsD,KAAA,EATK,CAYPusD,CAAA,CAAcC,CAAAttD,OAGdA,EAAA,CAASutD,CAAAvtD,OAAT,CAAiCstD,CAAAttD,OACjC,KAAIqB,CAAJ,CAAY,CAAZ,CAAeA,CAAf,CAAuBrB,CAAvB,CAA+BqB,CAAA,EAA/B,CAKC,GAJAd,CAIG,CAJI0sD,CAAD,GAAgBK,CAAhB,CAAkCjsD,CAAlC,CAA0CisD,CAAA,CAAejsD,CAAf,CAI7C,CAHHF,CAGG,CAHK8rD,CAAA,CAAW1sD,CAAX,CAGL,CAFHktD,CAEG,CAFSD,CAAA,CAAYjtD,CAAZ,CAAiBY,CAAjB,CAAwBE,CAAxB,CAET,CADH8J,EAAA,CAAwBsiD,CAAxB,CAAmC,eAAnC,CACG,CAAAV,CAAAtsD,eAAA,CAA4BgtD,CAA5B,CAAH,CACE5gD,CAGA,CAHQkgD,CAAA,CAAaU,CAAb,CAGR,CAFA,OAAOV,CAAA,CAAaU,CAAb,CAEP,CADAL,CAAA,CAAaK,CAAb,CACA;AAD0B5gD,CAC1B,CAAA0gD,CAAA,CAAelsD,CAAf,CAAA,CAAwBwL,CAJ1B,KAKO,CAAA,GAAIugD,CAAA3sD,eAAA,CAA4BgtD,CAA5B,CAAJ,CAML,KAJArtD,EAAA,CAAQmtD,CAAR,CAAwB,QAAQ,CAAC1gD,CAAD,CAAQ,CAClCA,CAAJ,EAAaA,CAAAjD,MAAb,GAA0BmjD,CAAA,CAAalgD,CAAAy5B,GAAb,CAA1B,CAAmDz5B,CAAnD,CADsC,CAAxC,CAIM,CAAAs/C,CAAA,CAAe,OAAf,CACiI54B,CADjI,CACmJk6B,CADnJ,CAAN,CAIAF,CAAA,CAAelsD,CAAf,CAAA,CAAwB,IAAMosD,CAAN,CACxBL,EAAA,CAAaK,CAAb,CAAA,CAA0B,CAAA,CAXrB,CAgBR,IAAKltD,CAAL,GAAYwsD,EAAZ,CAEMA,CAAAtsD,eAAA,CAA4BF,CAA5B,CAAJ,GACEsM,CAIA,CAJQkgD,CAAA,CAAaxsD,CAAb,CAIR,CAHA6wB,CAGA,CAHmB3lB,EAAA,CAAiBoB,CAAA1F,MAAjB,CAGnB,CAFA4d,CAAA+5B,MAAA,CAAe1tB,CAAf,CAEA,CADAhxB,CAAA,CAAQgxB,CAAR,CAA0B,QAAQ,CAACnqB,CAAD,CAAU,CAAEA,CAAA,aAAA,CAAsB,CAAA,CAAxB,CAA5C,CACA,CAAA4F,CAAAjD,MAAA8L,SAAA,EALF,CAUGrU,EAAA,CAAQ,CAAb,KAAgBrB,CAAhB,CAAyBstD,CAAAttD,OAAzB,CAAgDqB,CAAhD,CAAwDrB,CAAxD,CAAgEqB,CAAA,EAAhE,CAAyE,CACvEd,CAAA,CAAO0sD,CAAD,GAAgBK,CAAhB,CAAkCjsD,CAAlC,CAA0CisD,CAAA,CAAejsD,CAAf,CAChDF,EAAA,CAAQ8rD,CAAA,CAAW1sD,CAAX,CACRsM,EAAA,CAAQ0gD,CAAA,CAAelsD,CAAf,CACJksD,EAAA,CAAelsD,CAAf,CAAuB,CAAvB,CAAJ,GAA+B6rD,CAA/B,CAA0DK,CAAA1gD,CAAexL,CAAfwL,CAAuB,CAAvBA,CAwD3D1F,MAAA,CAxD2DomD,CAAA1gD,CAAexL,CAAfwL,CAAuB,CAAvBA,CAwD/C1F,MAAAnH,OAAZ,CAAiC,CAAjC,CAxDC,CAEA,IAAI6M,CAAAjD,MAAJ,CAAiB,CAGf0c,CAAA,CAAazZ,CAAAjD,MAEbujD,EAAA,CAAWD,CACX,GACEC,EAAA,CAAWA,CAAAthD,YADb,OAEQshD,CAFR,EAEoBA,CAAA,aAFpB,CAIkBtgD,EAwCrB1F,MAAA,CAAY,CAAZ,CAxCG,EAA4BgmD,CAA5B,EAEEpoC,CAAAg6B,KAAA,CAActzC,EAAA,CAAiBoB,CAAA1F,MAAjB,CAAd,CAA6C,IAA7C,CAAmDD,CAAA,CAAOgmD,CAAP,CAAnD,CAEFA,EAAA,CAA2BrgD,CAwC9B1F,MAAA,CAxC8B0F,CAwClB1F,MAAAnH,OAAZ,CAAiC,CAAjC,CAtDkB,CAAjB,IAiBEsmB,EAAA,CAAakG,CAAA3F,KAAA,EAGfP;CAAA,CAAWmmC,CAAX,CAAA,CAA8BtrD,CAC1BurD,EAAJ,GAAmBpmC,CAAA,CAAWomC,CAAX,CAAnB,CAA+CnsD,CAA/C,CACA+lB,EAAA8zB,OAAA,CAAoB/4C,CACpBilB,EAAAonC,OAAA,CAA+B,CAA/B,GAAqBrsD,CACrBilB,EAAAqnC,MAAA,CAAoBtsD,CAApB,GAA+BgsD,CAA/B,CAA6C,CAC7C/mC,EAAAsnC,QAAA,CAAqB,EAAEtnC,CAAAonC,OAAF,EAAuBpnC,CAAAqnC,MAAvB,CAErBrnC,EAAAunC,KAAA,CAAkB,EAAEvnC,CAAAwnC,MAAF,CAAmC,CAAnC,IAAsBzsD,CAAtB,CAA4B,CAA5B,EAGbwL,EAAAjD,MAAL,EACEygD,CAAA,CAAY/jC,CAAZ,CAAwB,QAAQ,CAACnf,CAAD,CAAQ,CACtCA,CAAA,CAAMA,CAAAnH,OAAA,EAAN,CAAA,CAAwBN,CAAA+tB,cAAA,CAAuB,iBAAvB,CAA2C8F,CAA3C,CAAwD,GAAxD,CACxBxO,EAAA85B,MAAA,CAAe13C,CAAf,CAAsB,IAAtB,CAA4BD,CAAA,CAAOgmD,CAAP,CAA5B,CACAA,EAAA,CAAe/lD,CACf0F,EAAAjD,MAAA,CAAc0c,CAIdzZ,EAAA1F,MAAA,CAAcA,CACdimD,EAAA,CAAavgD,CAAAy5B,GAAb,CAAA,CAAyBz5B,CATa,CAAxC,CArCqE,CAkDzEkgD,CAAA,CAAeK,CA7H+C,CAAhE,CAlDsD,CALrD,CAHiE,CAAlD,CArzBxB,CA8oCIr+C,GAAkB,CAAC,UAAD,CAAa,QAAQ,CAACgW,CAAD,CAAW,CACpD,MAAO,SAAQ,CAACnb,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACpCiG,CAAAlF,OAAA,CAAaf,CAAAoqD,OAAb,CAA0BC,QAA0B,CAAC7sD,CAAD,CAAO,CACzD4jB,CAAA,CAASle,EAAA,CAAU1F,CAAV,CAAA,CAAmB,aAAnB,CAAmC,UAA5C,CAAA,CAAwD8F,CAAxD,CAAiE,SAAjE,CADyD,CAA3D,CADoC,CADc,CAAhC,CA9oCtB,CA0yCIuH,GAAkB,CAAC,UAAD,CAAa,QAAQ,CAACuW,CAAD,CAAW,CACpD,MAAO,SAAQ,CAACnb,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CACpCiG,CAAAlF,OAAA,CAAaf,CAAAsqD,OAAb,CAA0BC,QAA0B,CAAC/sD,CAAD,CAAO,CACzD4jB,CAAA,CAASle,EAAA,CAAU1F,CAAV,CAAA,CAAmB,UAAnB,CAAgC,aAAzC,CAAA,CAAwD8F,CAAxD;AAAiE,SAAjE,CADyD,CAA3D,CADoC,CADc,CAAhC,CA1yCtB,CAg2CI+H,GAAmBmmC,EAAA,CAAY,QAAQ,CAACvrC,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAChEiG,CAAAlF,OAAA,CAAaf,CAAAwqD,QAAb,CAA2BC,QAA2B,CAACC,CAAD,CAAYC,CAAZ,CAAuB,CACvEA,CAAJ,EAAkBD,CAAlB,GAAgCC,CAAhC,EACEluD,CAAA,CAAQkuD,CAAR,CAAmB,QAAQ,CAACjoD,CAAD,CAAMkoC,CAAN,CAAa,CAAEtnC,CAAA40C,IAAA,CAAYtN,CAAZ,CAAmB,EAAnB,CAAF,CAAxC,CAEE8f,EAAJ,EAAepnD,CAAA40C,IAAA,CAAYwS,CAAZ,CAJ4D,CAA7E,CAKG,CAAA,CALH,CADgE,CAA3C,CAh2CvB,CAy+CIp/C,GAAoB,CAAC,UAAD,CAAa,QAAQ,CAAC8V,CAAD,CAAW,CACtD,MAAO,UACK,IADL,SAEI,UAFJ,YAKO,CAAC,QAAD,CAAWwpC,QAA2B,EAAG,CACpD,IAAAC,MAAA,CAAa,EADuC,CAAzC,CALP,MAQCtsC,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4qD,CAAvB,CAA2C,CAAA,IAEnDE,EAAsB,EAF6B,CAGnDC,EAAmB,EAHgC,CAInDpE,EAAmB,EAJgC,CAKnDqE,EAAiB,EAErB/kD,EAAAlF,OAAA,CANgBf,CAAAirD,SAMhB,EANiCjrD,CAAAsc,GAMjC,CAAwB4uC,QAA4B,CAAC1tD,CAAD,CAAQ,CAAA,IACtDH,CADsD,CACnD6V,CACF7V,EAAA,CAAI,CAAT,KAAY6V,CAAZ,CAAiByzC,CAAAtqD,OAAjB,CAA0CgB,CAA1C,CAA8C6V,CAA9C,CAAkD,EAAE7V,CAApD,CACEspD,CAAA,CAAiBtpD,CAAjB,CAAA8hB,OAAA,EAIG9hB,EAAA,CAFLspD,CAAAtqD,OAEK,CAFqB,CAE1B,KAAY6W,CAAZ,CAAiB83C,CAAA3uD,OAAjB,CAAwCgB,CAAxC,CAA4C6V,CAA5C,CAAgD,EAAE7V,CAAlD,CAAqD,CACnD,IAAIu7C,EAAWmS,CAAA,CAAiB1tD,CAAjB,CACf2tD,EAAA,CAAe3tD,CAAf,CAAA0U,SAAA,EACA40C,EAAA,CAAiBtpD,CAAjB,CAAA,CAAsBu7C,CACtBx3B,EAAA+5B,MAAA,CAAevC,CAAf,CAAyB,QAAQ,EAAG,CAClC+N,CAAAnmD,OAAA,CAAwBnD,CAAxB,CAA2B,CAA3B,CADkC,CAApC,CAJmD,CASrD0tD,CAAA1uD,OAAA,CAA0B,CAC1B2uD,EAAA3uD,OAAA;AAAwB,CAExB,IAAKyuD,CAAL,CAA2BF,CAAAC,MAAA,CAAyB,GAAzB,CAA+BrtD,CAA/B,CAA3B,EAAoEotD,CAAAC,MAAA,CAAyB,GAAzB,CAApE,CACE5kD,CAAAygC,MAAA,CAAY1mC,CAAAmrD,OAAZ,CACA,CAAA1uD,CAAA,CAAQquD,CAAR,CAA6B,QAAQ,CAACM,CAAD,CAAqB,CACxD,IAAIC,EAAgBplD,CAAAid,KAAA,EACpB8nC,EAAA9tD,KAAA,CAAoBmuD,CAApB,CACAD,EAAA/nC,WAAA,CAA8BgoC,CAA9B,CAA6C,QAAQ,CAACC,CAAD,CAAc,CACjE,IAAIC,EAASH,CAAA9nD,QAEbynD,EAAA7tD,KAAA,CAAsBouD,CAAtB,CACAlqC,EAAA85B,MAAA,CAAeoQ,CAAf,CAA4BC,CAAA3sD,OAAA,EAA5B,CAA6C2sD,CAA7C,CAJiE,CAAnE,CAHwD,CAA1D,CArBwD,CAA5D,CAPuD,CARpD,CAD+C,CAAhC,CAz+CxB,CA8hDIhgD,GAAwBimC,EAAA,CAAY,YAC1B,SAD0B,UAE5B,GAF4B,SAG7B,WAH6B,MAIhCjzB,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBigB,CAAjB,CAAwBqwB,CAAxB,CAA8B8S,CAA9B,CAA2C,CACvD9S,CAAAiX,MAAA,CAAW,GAAX,CAAiBtnC,CAAAioC,aAAjB,CAAA,CAAwC5X,CAAAiX,MAAA,CAAW,GAAX,CAAiBtnC,CAAAioC,aAAjB,CAAxC,EAAgF,EAChF5X,EAAAiX,MAAA,CAAW,GAAX,CAAiBtnC,CAAAioC,aAAjB,CAAAtuD,KAAA,CAA0C,YAAcwpD,CAAd,SAAoCpjD,CAApC,CAA1C,CAFuD,CAJnB,CAAZ,CA9hD5B,CAwiDIkI,GAA2BgmC,EAAA,CAAY,YAC7B,SAD6B,UAE/B,GAF+B,SAGhC,WAHgC,MAInCjzB,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB4zC,CAAvB,CAA6B8S,CAA7B,CAA0C,CACtD9S,CAAAiX,MAAA,CAAW,GAAX,CAAA,CAAmBjX,CAAAiX,MAAA,CAAW,GAAX,CAAnB;AAAsC,EACtCjX,EAAAiX,MAAA,CAAW,GAAX,CAAA3tD,KAAA,CAAqB,YAAcwpD,CAAd,SAAoCpjD,CAApC,CAArB,CAFsD,CAJf,CAAZ,CAxiD/B,CAwmDIoI,GAAwB8lC,EAAA,CAAY,MAChCjzB,QAAQ,CAACsK,CAAD,CAASvG,CAAT,CAAmBmpC,CAAnB,CAA2BlrC,CAA3B,CAAuCmmC,CAAvC,CAAoD,CAChE,GAAI,CAACA,CAAL,CACE,KAAMzqD,EAAA,CAAO,cAAP,CAAA,CAAuB,QAAvB,CAILoH,EAAA,CAAYif,CAAZ,CAJK,CAAN,CAOFokC,CAAA,CAAY,QAAQ,CAACljD,CAAD,CAAQ,CAC1B8e,CAAA7e,MAAA,EACA6e,EAAA1e,OAAA,CAAgBJ,CAAhB,CAF0B,CAA5B,CATgE,CAD5B,CAAZ,CAxmD5B,CA0pDIwG,GAAkB,CAAC,gBAAD,CAAmB,QAAQ,CAACgX,CAAD,CAAiB,CAChE,MAAO,UACK,GADL,UAEK,CAAA,CAFL,SAGI9a,QAAQ,CAAC5C,CAAD,CAAUtD,CAAV,CAAgB,CACd,kBAAjB,EAAIA,CAAAoR,KAAJ,EAKE4P,CAAA/L,IAAA,CAJkBjV,CAAA2iC,GAIlB,CAFWr/B,CAAA,CAAQ,CAAR,CAAAkpB,KAEX,CAN6B,CAH5B,CADyD,CAA5C,CA1pDtB,CA0qDIk/B,GAAkBzvD,CAAA,CAAO,WAAP,CA1qDtB,CAizDIwP,GAAqBxM,CAAA,CAAQ,UAAY,CAAA,CAAZ,CAAR,CAjzDzB,CAmzDIgL,GAAkB,CAAC,UAAD,CAAa,QAAb,CAAuB,QAAQ,CAAC49C,CAAD,CAAa5mC,CAAb,CAAqB,CAAA,IAEpE0qC,EAAoB,wMAFgD;AAGpEC,EAAgB,eAAgB9sD,CAAhB,CAGpB,OAAO,UACK,GADL,SAEI,CAAC,QAAD,CAAW,UAAX,CAFJ,YAGO,CAAC,UAAD,CAAa,QAAb,CAAuB,QAAvB,CAAiC,QAAQ,CAACwjB,CAAD,CAAWuG,CAAX,CAAmB4iC,CAAnB,CAA2B,CAAA,IAC1EvpD,EAAO,IADmE,CAE1E2pD,EAAa,EAF6D,CAG1EC,EAAcF,CAH4D,CAK1EG,CAGJ7pD,EAAA8pD,UAAA,CAAiBP,CAAAhH,QAGjBviD,EAAA+pD,KAAA,CAAYC,QAAQ,CAACC,CAAD,CAAeC,CAAf,CAA4BC,CAA5B,CAA4C,CAC9DP,CAAA,CAAcK,CAEdJ,EAAA,CAAgBM,CAH8C,CAOhEnqD,EAAAoqD,UAAA,CAAiBC,QAAQ,CAAC/uD,CAAD,CAAQ,CAC/BgK,EAAA,CAAwBhK,CAAxB,CAA+B,gBAA/B,CACAquD,EAAA,CAAWruD,CAAX,CAAA,CAAoB,CAAA,CAEhBsuD,EAAA9W,WAAJ,EAA8Bx3C,CAA9B,GACE8kB,CAAA5f,IAAA,CAAalF,CAAb,CACA,CAAIuuD,CAAAntD,OAAA,EAAJ,EAA4BmtD,CAAA5sC,OAAA,EAF9B,CAJ+B,CAWjCjd,EAAAsqD,aAAA,CAAoBC,QAAQ,CAACjvD,CAAD,CAAQ,CAC9B,IAAAkvD,UAAA,CAAelvD,CAAf,CAAJ,GACE,OAAOquD,CAAA,CAAWruD,CAAX,CACP,CAAIsuD,CAAA9W,WAAJ,EAA8Bx3C,CAA9B,EACE,IAAAmvD,oBAAA,CAAyBnvD,CAAzB,CAHJ,CADkC,CAUpC0E,EAAAyqD,oBAAA,CAA2BC,QAAQ,CAAClqD,CAAD,CAAM,CACnCmqD,CAAAA,CAAa,IAAbA,CAAoBl4C,EAAA,CAAQjS,CAAR,CAApBmqD,CAAmC,IACvCd,EAAArpD,IAAA,CAAkBmqD,CAAlB,CACAvqC,EAAAs3B,QAAA,CAAiBmS,CAAjB,CACAzpC,EAAA5f,IAAA,CAAamqD,CAAb,CACAd,EAAAhsD,KAAA,CAAmB,UAAnB;AAA+B,CAAA,CAA/B,CALuC,CASzCmC,EAAAwqD,UAAA,CAAiBI,QAAQ,CAACtvD,CAAD,CAAQ,CAC/B,MAAOquD,EAAA/uD,eAAA,CAA0BU,CAA1B,CADwB,CAIjCqrB,EAAA+d,IAAA,CAAW,UAAX,CAAuB,QAAQ,EAAG,CAEhC1kC,CAAAyqD,oBAAA,CAA2B7tD,CAFK,CAAlC,CApD8E,CAApE,CAHP,MA6DCyf,QAAQ,CAACtY,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuBklD,CAAvB,CAA8B,CA0C1C6H,QAASA,EAAa,CAAC9mD,CAAD,CAAQ+mD,CAAR,CAAuBlB,CAAvB,CAAoCmB,CAApC,CAAgD,CACpEnB,CAAA1W,QAAA,CAAsB8X,QAAQ,EAAG,CAC/B,IAAI1H,EAAYsG,CAAA9W,WAEZiY,EAAAP,UAAA,CAAqBlH,CAArB,CAAJ,EACMuG,CAAAntD,OAAA,EAEJ,EAF4BmtD,CAAA5sC,OAAA,EAE5B,CADA6tC,CAAAtqD,IAAA,CAAkB8iD,CAAlB,CACA,CAAkB,EAAlB,GAAIA,CAAJ,EAAsB2H,CAAAptD,KAAA,CAAiB,UAAjB,CAA6B,CAAA,CAA7B,CAHxB,EAKMb,CAAA,CAAYsmD,CAAZ,CAAJ,EAA8B2H,CAA9B,CACEH,CAAAtqD,IAAA,CAAkB,EAAlB,CADF,CAGEuqD,CAAAN,oBAAA,CAA+BnH,CAA/B,CAX2B,CAgBjCwH,EAAA1wC,GAAA,CAAiB,QAAjB,CAA2B,QAAQ,EAAG,CACpCrW,CAAAG,OAAA,CAAa,QAAQ,EAAG,CAClB2lD,CAAAntD,OAAA,EAAJ,EAA4BmtD,CAAA5sC,OAAA,EAC5B2sC,EAAA7W,cAAA,CAA0B+X,CAAAtqD,IAAA,EAA1B,CAFsB,CAAxB,CADoC,CAAtC,CAjBoE,CAyBtE0qD,QAASA,EAAe,CAACnnD,CAAD,CAAQ+mD,CAAR,CAAuBpZ,CAAvB,CAA6B,CACnD,IAAIyZ,CACJzZ,EAAAwB,QAAA,CAAeC,QAAQ,EAAG,CACxB,IAAIiY,EAAQ,IAAIx4C,EAAJ,CAAY8+B,CAAAoB,WAAZ,CACZv4C,EAAA,CAAQuwD,CAAA/sD,KAAA,CAAmB,QAAnB,CAAR;AAAsC,QAAQ,CAACq3C,CAAD,CAAS,CACrDA,CAAAsB,SAAA,CAAkBz5C,CAAA,CAAUmuD,CAAA52C,IAAA,CAAU4gC,CAAA95C,MAAV,CAAV,CADmC,CAAvD,CAFwB,CAS1ByI,EAAAlF,OAAA,CAAawsD,QAA4B,EAAG,CACrC7rD,EAAA,CAAO2rD,CAAP,CAAiBzZ,CAAAoB,WAAjB,CAAL,GACEqY,CACA,CADW9rD,EAAA,CAAYqyC,CAAAoB,WAAZ,CACX,CAAApB,CAAAwB,QAAA,EAFF,CAD0C,CAA5C,CAOA4X,EAAA1wC,GAAA,CAAiB,QAAjB,CAA2B,QAAQ,EAAG,CACpCrW,CAAAG,OAAA,CAAa,QAAQ,EAAG,CACtB,IAAI9F,EAAQ,EACZ7D,EAAA,CAAQuwD,CAAA/sD,KAAA,CAAmB,QAAnB,CAAR,CAAsC,QAAQ,CAACq3C,CAAD,CAAS,CACjDA,CAAAsB,SAAJ,EACEt4C,CAAApD,KAAA,CAAWo6C,CAAA95C,MAAX,CAFmD,CAAvD,CAKAo2C,EAAAqB,cAAA,CAAmB30C,CAAnB,CAPsB,CAAxB,CADoC,CAAtC,CAlBmD,CA+BrDktD,QAASA,EAAc,CAACvnD,CAAD,CAAQ+mD,CAAR,CAAuBpZ,CAAvB,CAA6B,CA6HlD6Z,QAASA,EAAM,EAAG,CAAA,IAEZC,EAAe,CAAC,EAAD,CAAI,EAAJ,CAFH,CAGZC,EAAmB,CAAC,EAAD,CAHP,CAIZC,CAJY,CAKZC,CALY,CAOZC,CAPY,CAOIC,CAPJ,CAOqBC,CACjCC,EAAAA,CAAara,CAAAyQ,YACbh1B,EAAAA,CAAS6+B,CAAA,CAASjoD,CAAT,CAATopB,EAA4B,EAThB,KAUZpyB,EAAOkxD,CAAA,CAAUnxD,EAAA,CAAWqyB,CAAX,CAAV,CAA+BA,CAV1B,CAYChzB,CAZD,CAaZ+xD,CAbY,CAaA1wD,CACZ4Z,EAAAA,CAAS,EAhCT+2C,EAAAA,CAAc,CAAA,CAClB,IAAI1V,CAAJ,CAEE,GADIsV,CACA,CADara,CAAAyQ,YACb,CAAAiK,CAAA,EAAW9xD,CAAA,CAAQyxD,CAAR,CAAf,CAGE,IAFAI,CAESE,CAFK,IAAIz5C,EAAJ,CAAY,EAAZ,CAELy5C,CADLj3C,CACKi3C,CADI,EACJA,CAAAA,CAAAA,CAAa,CAAtB,CAAyBA,CAAzB,CAAsCN,CAAA5xD,OAAtC,CAAyDkyD,CAAA,EAAzD,CACEj3C,CAAA,CAAOk3C,CAAP,CACA,CADoBP,CAAA,CAAWM,CAAX,CACpB,CAAAF,CAAAp5C,IAAA,CAAgBq5C,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAAhB,CAAwC22C,CAAA,CAAWM,CAAX,CAAxC,CALJ,KAQEF,EAAA,CAAc,IAAIv5C,EAAJ,CAAYm5C,CAAZ,CAGlB,EAAA,CAAOI,CAIS;IAiBZI,CAjBY,CAkBZnrD,CAKJ,KAAK5F,CAAL,CAAa,CAAb,CAAgBrB,CAAA,CAASY,CAAAZ,OAAT,CAAsBqB,CAAtB,CAA8BrB,CAA9C,CAAsDqB,CAAA,EAAtD,CAA+D,CAE7Dd,CAAA,CAAMc,CACN,IAAIywD,CAAJ,CAAa,CACXvxD,CAAA,CAAMK,CAAA,CAAKS,CAAL,CACN,IAAuB,GAAvB,GAAKd,CAAA6E,OAAA,CAAW,CAAX,CAAL,CAA6B,QAC7B6V,EAAA,CAAO62C,CAAP,CAAA,CAAkBvxD,CAHP,CAMb0a,CAAA,CAAOk3C,CAAP,CAAA,CAAoBn/B,CAAA,CAAOzyB,CAAP,CAEpBgxD,EAAA,CAAkBc,CAAA,CAAUzoD,CAAV,CAAiBqR,CAAjB,CAAlB,EAA8C,EAC9C,EAAMu2C,CAAN,CAAoBH,CAAA,CAAaE,CAAb,CAApB,IACEC,CACA,CADcH,CAAA,CAAaE,CAAb,CACd,CAD8C,EAC9C,CAAAD,CAAAzwD,KAAA,CAAsB0wD,CAAtB,CAFF,CAIIjV,EAAJ,CACEC,CADF,CACaz5C,CAAA,CACTkvD,CAAAlvC,OAAA,CAAmBmvC,CAAA,CAAUA,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAAV,CAAmCrY,CAAA,CAAQgH,CAAR,CAAeqR,CAAf,CAAtD,CADS,CADb,EAKMg3C,CAAJ,EACMK,CAEJ,CAFgB,EAEhB,CADAA,CAAA,CAAUH,CAAV,CACA,CADuBP,CACvB,CAAArV,CAAA,CAAW0V,CAAA,CAAQroD,CAAR,CAAe0oD,CAAf,CAAX,GAAyCL,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAH3C,EAKEshC,CALF,CAKaqV,CALb,GAK4BhvD,CAAA,CAAQgH,CAAR,CAAeqR,CAAf,CAE5B,CAAA+2C,CAAA,CAAcA,CAAd,EAA6BzV,CAZ/B,CAcAgW,EAAA,CAAQC,CAAA,CAAU5oD,CAAV,CAAiBqR,CAAjB,CAGRs3C,EAAA,CAAQzvD,CAAA,CAAUyvD,CAAV,CAAA,CAAmBA,CAAnB,CAA2B,EACnCf,EAAA3wD,KAAA,CAAiB,IAEXoxD,CAAA,CAAUA,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAAV,CAAoC62C,CAAA,CAAUlxD,CAAA,CAAKS,CAAL,CAAV,CAAwBA,CAFjD,OAGRkxD,CAHQ,UAILhW,CAJK,CAAjB,CAlC6D,CAyC1DD,CAAL,GACMmW,CAAJ,EAAiC,IAAjC,GAAkBb,CAAlB,CAEEP,CAAA,CAAa,EAAb,CAAAzvD,QAAA,CAAyB,IAAI,EAAJ,OAAc,EAAd,UAA2B,CAACowD,CAA5B,CAAzB,CAFF,CAGYA,CAHZ,EAKEX,CAAA,CAAa,EAAb,CAAAzvD,QAAA,CAAyB,IAAI,GAAJ,OAAe,EAAf,UAA4B,CAAA,CAA5B,CAAzB,CANJ,CAWKmwD,EAAA,CAAa,CAAlB,KAAqBW,CAArB,CAAmCpB,CAAAtxD,OAAnC,CACK+xD,CADL,CACkBW,CADlB,CAEKX,CAAA,EAFL,CAEmB,CAEjBR,CAAA,CAAkBD,CAAA,CAAiBS,CAAjB,CAGlBP,EAAA,CAAcH,CAAA,CAAaE,CAAb,CAEVoB,EAAA3yD,OAAJ,EAAgC+xD,CAAhC,EAEEN,CAMA,CANiB,SACNmB,CAAAzrD,MAAA,EAAAxD,KAAA,CAA8B,OAA9B;AAAuC4tD,CAAvC,CADM,OAERC,CAAAe,MAFQ,CAMjB,CAFAb,CAEA,CAFkB,CAACD,CAAD,CAElB,CADAkB,CAAA9xD,KAAA,CAAuB6wD,CAAvB,CACA,CAAAf,CAAAppD,OAAA,CAAqBkqD,CAAAxqD,QAArB,CARF,GAUEyqD,CAIA,CAJkBiB,CAAA,CAAkBZ,CAAlB,CAIlB,CAHAN,CAGA,CAHiBC,CAAA,CAAgB,CAAhB,CAGjB,CAAID,CAAAc,MAAJ,EAA4BhB,CAA5B,EACEE,CAAAxqD,QAAAtD,KAAA,CAA4B,OAA5B,CAAqC8tD,CAAAc,MAArC,CAA4DhB,CAA5D,CAfJ,CAmBAa,EAAA,CAAc,IACV/wD,EAAA,CAAQ,CAAZ,KAAerB,CAAf,CAAwBwxD,CAAAxxD,OAAxB,CAA4CqB,CAA5C,CAAoDrB,CAApD,CAA4DqB,CAAA,EAA5D,CACE45C,CACA,CADSuW,CAAA,CAAYnwD,CAAZ,CACT,CAAA,CAAKswD,CAAL,CAAsBD,CAAA,CAAgBrwD,CAAhB,CAAsB,CAAtB,CAAtB,GAEE+wD,CAQA,CARcT,CAAA1qD,QAQd,CAPI0qD,CAAAY,MAOJ,GAP6BtX,CAAAsX,MAO7B,EANEH,CAAAjiC,KAAA,CAAiBwhC,CAAAY,MAAjB,CAAwCtX,CAAAsX,MAAxC,CAMF,CAJIZ,CAAArrB,GAIJ,GAJ0B2U,CAAA3U,GAI1B,EAHE8rB,CAAA/rD,IAAA,CAAgBsrD,CAAArrB,GAAhB,CAAoC2U,CAAA3U,GAApC,CAGF,CAAI8rB,CAAA,CAAY,CAAZ,CAAA7V,SAAJ,GAAgCtB,CAAAsB,SAAhC,GACE6V,CAAA1uD,KAAA,CAAiB,UAAjB,CAA8BiuD,CAAApV,SAA9B,CAAwDtB,CAAAsB,SAAxD,CACA,CAAInkC,CAAJ,EAIEg6C,CAAA1uD,KAAA,CAAiB,UAAjB,CAA6BiuD,CAAApV,SAA7B,CANJ,CAVF,GAuBoB,EAAlB,GAAItB,CAAA3U,GAAJ,EAAwBmsB,CAAxB,CAEExrD,CAFF,CAEYwrD,CAFZ,CAOGpsD,CAAAY,CAAAZ,CAAUwsD,CAAA1rD,MAAA,EAAVd,KAAA,CACQ40C,CAAA3U,GADR,CAAA5iC,KAAA,CAES,UAFT,CAEqBu3C,CAAAsB,SAFrB,CAAA54C,KAAA,CAGS,UAHT,CAGqBs3C,CAAAsB,SAHrB,CAAApsB,KAAA,CAIS8qB,CAAAsX,MAJT,CAkBH,CAXAb,CAAA7wD,KAAA,CAAsC,SACzBoG,CADyB,OAE3Bg0C,CAAAsX,MAF2B;GAG9BtX,CAAA3U,GAH8B,UAIxB2U,CAAAsB,SAJwB,CAAtC,CAWA,CALI6V,CAAJ,CACEA,CAAA3U,MAAA,CAAkBx2C,CAAlB,CADF,CAGEwqD,CAAAxqD,QAAAM,OAAA,CAA8BN,CAA9B,CAEF,CAAAmrD,CAAA,CAAcnrD,CAhDhB,CAqDF,KADA5F,CAAA,EACA,CAAMqwD,CAAA1xD,OAAN,CAA+BqB,CAA/B,CAAA,CACEqwD,CAAA7zC,IAAA,EAAA5W,QAAA6b,OAAA,EAnFe,CAuFnB,IAAA,CAAM6vC,CAAA3yD,OAAN,CAAiC+xD,CAAjC,CAAA,CACEY,CAAA90C,IAAA,EAAA,CAAwB,CAAxB,CAAA5W,QAAA6b,OAAA,EArKc,CA5HlB,IAAI9d,CAEJ,IAAI,EAAEA,CAAF,CAAU8tD,CAAA9tD,MAAA,CAAiBsqD,CAAjB,CAAV,CAAJ,CACE,KAAMD,GAAA,CAAgB,MAAhB,CAIJyD,CAJI,CAIQ9rD,EAAA,CAAY2pD,CAAZ,CAJR,CAAN,CAJgD,IAW9C6B,EAAY5tC,CAAA,CAAO5f,CAAA,CAAM,CAAN,CAAP,EAAmBA,CAAA,CAAM,CAAN,CAAnB,CAXkC,CAY9CmtD,EAAYntD,CAAA,CAAM,CAAN,CAAZmtD,EAAwBntD,CAAA,CAAM,CAAN,CAZsB,CAa9C8sD,EAAU9sD,CAAA,CAAM,CAAN,CAboC,CAc9CqtD,EAAYztC,CAAA,CAAO5f,CAAA,CAAM,CAAN,CAAP,EAAmB,EAAnB,CAdkC,CAe9CpC,EAAUgiB,CAAA,CAAO5f,CAAA,CAAM,CAAN,CAAA,CAAWA,CAAA,CAAM,CAAN,CAAX,CAAsBmtD,CAA7B,CAfoC,CAgB9CN,EAAWjtC,CAAA,CAAO5f,CAAA,CAAM,CAAN,CAAP,CAhBmC,CAkB9CitD,EADQjtD,CAAA+tD,CAAM,CAANA,CACE,CAAQnuC,CAAA,CAAO5f,CAAA,CAAM,CAAN,CAAP,CAAR,CAA2B,IAlBS,CAuB9C2tD,EAAoB,CAAC,CAAC,SAAUhC,CAAV,OAA+B,EAA/B,CAAD,CAAD,CAEpB8B,EAAJ,GAEEjH,CAAA,CAASiH,CAAT,CAAA,CAAqB7oD,CAArB,CAQA,CAJA6oD,CAAAxgC,YAAA,CAAuB,UAAvB,CAIA,CAAAwgC,CAAA3vC,OAAA,EAVF,CAcA6tC,EAAAvpD,MAAA,EAEAupD,EAAA1wC,GAAA,CAAiB,QAAjB,CAA2B,QAAQ,EAAG,CACpCrW,CAAAG,OAAA,CAAa,QAAQ,EAAG,CAAA,IAClBynD,CADkB,CAElBvE,EAAa4E,CAAA,CAASjoD,CAAT,CAAbqjD,EAAgC,EAFd,CAGlBhyC,EAAS,EAHS,CAIlB1a,CAJkB,CAIbY,CAJa,CAISE,CAJT,CAIgB0wD,CAJhB,CAI4B/xD,CAJ5B,CAIoC0yD,CAJpC,CAIiDR,CAEvE,IAAI5V,CAAJ,CAEE,IADAn7C,CACqB,CADb,EACa,CAAhB4wD,CAAgB,CAAH,CAAG,CAAAW,CAAA,CAAcC,CAAA3yD,OAAnC,CACK+xD,CADL;AACkBW,CADlB,CAEKX,CAAA,EAFL,CAME,IAFAP,CAEe,CAFDmB,CAAA,CAAkBZ,CAAlB,CAEC,CAAX1wD,CAAW,CAAH,CAAG,CAAArB,CAAA,CAASwxD,CAAAxxD,OAAxB,CAA4CqB,CAA5C,CAAoDrB,CAApD,CAA4DqB,CAAA,EAA5D,CACE,IAAI,CAAC2xD,CAAD,CAAiBxB,CAAA,CAAYnwD,CAAZ,CAAA4F,QAAjB,EAA6C,CAA7C,CAAAs1C,SAAJ,CAA8D,CAC5Dh8C,CAAA,CAAMyyD,CAAA3sD,IAAA,EACFyrD,EAAJ,GAAa72C,CAAA,CAAO62C,CAAP,CAAb,CAA+BvxD,CAA/B,CACA,IAAI0xD,CAAJ,CACE,IAAKC,CAAL,CAAkB,CAAlB,CAAqBA,CAArB,CAAkCjF,CAAAjtD,OAAlC,GACEib,CAAA,CAAOk3C,CAAP,CACI,CADgBlF,CAAA,CAAWiF,CAAX,CAChB,CAAAD,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAAA,EAA0B1a,CAFhC,EAAqD2xD,CAAA,EAArD,EADF,IAMEj3C,EAAA,CAAOk3C,CAAP,CAAA,CAAoBlF,CAAA,CAAW1sD,CAAX,CAEtBY,EAAAN,KAAA,CAAW+B,CAAA,CAAQgH,CAAR,CAAeqR,CAAf,CAAX,CAX4D,CAA9D,CATN,IA0BE,IADA1a,CACI,CADEowD,CAAAtqD,IAAA,EACF,CAAO,GAAP,EAAA9F,CAAJ,CACEY,CAAA,CAAQxB,CADV,KAEO,IAAY,EAAZ,GAAIY,CAAJ,CACLY,CAAA,CAAQ,IADH,KAGL,IAAI8wD,CAAJ,CACE,IAAKC,CAAL,CAAkB,CAAlB,CAAqBA,CAArB,CAAkCjF,CAAAjtD,OAAlC,CAAqDkyD,CAAA,EAArD,CAEE,IADAj3C,CAAA,CAAOk3C,CAAP,CACI,CADgBlF,CAAA,CAAWiF,CAAX,CAChB,CAAAD,CAAA,CAAQroD,CAAR,CAAeqR,CAAf,CAAA,EAA0B1a,CAA9B,CAAmC,CACjCY,CAAA,CAAQyB,CAAA,CAAQgH,CAAR,CAAeqR,CAAf,CACR,MAFiC,CAAnC,CAHJ,IASEA,EAAA,CAAOk3C,CAAP,CAEA,CAFoBlF,CAAA,CAAW1sD,CAAX,CAEpB,CADIuxD,CACJ,GADa72C,CAAA,CAAO62C,CAAP,CACb,CAD+BvxD,CAC/B,EAAAY,CAAA,CAAQyB,CAAA,CAAQgH,CAAR,CAAeqR,CAAf,CAIds8B,EAAAqB,cAAA,CAAmBz3C,CAAnB,CACAiwD,EAAA,EArDsB,CAAxB,CADoC,CAAtC,CA0DA7Z,EAAAwB,QAAA,CAAeqY,CAEfxnD,EAAAk/B,iBAAA,CAAuB+oB,CAAvB,CAAiCT,CAAjC,CACK9U,EAAL,EACE1yC,CAAAk/B,iBAAA,CAAuB,QAAQ,EAAG,CAAE,MAAOyO,EAAAyQ,YAAT,CAAlC,CAAgEoJ,CAAhE,CAvGgD,CAhGpD,GAAKvI,CAAA,CAAM,CAAN,CAAL,CAAA,CAF0C,IAItC+H,EAAa/H,CAAA,CAAM,CAAN,CACb4G,EAAAA,CAAc5G,CAAA,CAAM,CAAN,CALwB,KAMtCvM,EAAW34C,CAAA24C,SAN2B;AAOtCwW,EAAanvD,CAAAsvD,UAPyB,CAQtCR,EAAa,CAAA,CARyB,CAStC3B,CATsC,CAYtC+B,EAAiB3rD,CAAA,CAAOxH,CAAAgU,cAAA,CAAuB,QAAvB,CAAP,CAZqB,CAatCk/C,EAAkB1rD,CAAA,CAAOxH,CAAAgU,cAAA,CAAuB,UAAvB,CAAP,CAboB,CActCg8C,EAAgBmD,CAAA1rD,MAAA,EAGZnG,EAAAA,CAAI,CAAZ,KAjB0C,IAiB3ByR,EAAWxL,CAAAwL,SAAA,EAjBgB,CAiBIoE,EAAKpE,CAAAzS,OAAnD,CAAoEgB,CAApE,CAAwE6V,CAAxE,CAA4E7V,CAAA,EAA5E,CACE,GAA0B,EAA1B,GAAIyR,CAAA,CAASzR,CAAT,CAAAG,MAAJ,CAA8B,CAC5B2vD,CAAA,CAAc2B,CAAd,CAA2BhgD,CAAAuT,GAAA,CAAYhlB,CAAZ,CAC3B,MAF4B,CAMhC4vD,CAAAhB,KAAA,CAAgBH,CAAhB,CAA6BgD,CAA7B,CAAyC/C,CAAzC,CAGIpT,EAAJ,GACEmT,CAAAxW,SADF,CACyBia,QAAQ,CAAC/xD,CAAD,CAAQ,CACrC,MAAO,CAACA,CAAR,EAAkC,CAAlC,GAAiBA,CAAAnB,OADoB,CADzC,CAMI8yD,EAAJ,CAAgB3B,CAAA,CAAevnD,CAAf,CAAsB3C,CAAtB,CAA+BwoD,CAA/B,CAAhB,CACSnT,CAAJ,CAAcyU,CAAA,CAAgBnnD,CAAhB,CAAuB3C,CAAvB,CAAgCwoD,CAAhC,CAAd,CACAiB,CAAA,CAAc9mD,CAAd,CAAqB3C,CAArB,CAA8BwoD,CAA9B,CAA2CmB,CAA3C,CAjCL,CAF0C,CA7DvC,CANiE,CAApD,CAnzDtB,CAkwEI9iD,GAAkB,CAAC,cAAD,CAAiB,QAAQ,CAAC2W,CAAD,CAAe,CAC5D,IAAI0uC,EAAiB,WACR1wD,CADQ,cAELA,CAFK,CAKrB,OAAO,UACK,GADL,UAEK,GAFL,SAGIoH,QAAQ,CAAC5C,CAAD,CAAUtD,CAAV,CAAgB,CAC/B,GAAId,CAAA,CAAYc,CAAAxC,MAAZ,CAAJ,CAA6B,CAC3B,IAAIivB,EAAgB3L,CAAA,CAAaxd,CAAAkpB,KAAA,EAAb,CAA6B,CAAA,CAA7B,CACfC,EAAL,EACEzsB,CAAAgrB,KAAA,CAAU,OAAV,CAAmB1nB,CAAAkpB,KAAA,EAAnB,CAHyB,CAO7B,MAAO,SAAS,CAACvmB,CAAD,CAAQ3C,CAAR,CAAiBtD,CAAjB,CAAuB,CAAA,IAEjCpB,EAAS0E,CAAA1E,OAAA,EAFwB;AAGjCquD,EAAaruD,CAAAyH,KAAA,CAFIopD,mBAEJ,CAAbxC,EACEruD,CAAAA,OAAA,EAAAyH,KAAA,CAHeopD,mBAGf,CAEFxC,EAAJ,EAAkBA,CAAAjB,UAAlB,CAGE1oD,CAAAvD,KAAA,CAAa,UAAb,CAAyB,CAAA,CAAzB,CAHF,CAKEktD,CALF,CAKeuC,CAGX/iC,EAAJ,CACExmB,CAAAlF,OAAA,CAAa0rB,CAAb,CAA4BijC,QAA+B,CAAC3qB,CAAD,CAASC,CAAT,CAAiB,CAC1EhlC,CAAAgrB,KAAA,CAAU,OAAV,CAAmB+Z,CAAnB,CACIA,EAAJ,GAAeC,CAAf,EAAuBioB,CAAAT,aAAA,CAAwBxnB,CAAxB,CACvBioB,EAAAX,UAAA,CAAqBvnB,CAArB,CAH0E,CAA5E,CADF,CAOEkoB,CAAAX,UAAA,CAAqBtsD,CAAAxC,MAArB,CAGF8F,EAAAgZ,GAAA,CAAW,UAAX,CAAuB,QAAQ,EAAG,CAChC2wC,CAAAT,aAAA,CAAwBxsD,CAAAxC,MAAxB,CADgC,CAAlC,CAxBqC,CARR,CAH5B,CANqD,CAAxC,CAlwEtB,CAmzEI0M,GAAiBjL,CAAA,CAAQ,UACjB,GADiB,UAEjB,CAAA,CAFiB,CAAR,CAKfnD,EAAA0K,QAAA1B,UAAJ,CAEEk5B,OAAAE,IAAA,CAAY,gDAAZ,CAFF,EAp+nBA,CAHAhvB,EAGA,CAHSpT,CAAAoT,OAGT,GAAcA,EAAA/M,GAAAma,GAAd,EACE/Y,CAYA,CAZS2L,EAYT,CAXA7Q,CAAA,CAAO6Q,EAAA/M,GAAP,CAAkB,OACTigB,EAAAnc,MADS,cAEFmc,EAAAgF,aAFE,YAGJhF,EAAA7B,WAHI,UAIN6B,EAAAxc,SAJM;cAKDwc,EAAAwiC,cALC,CAAlB,CAWA,CAFA12C,EAAA,CAAwB,QAAxB,CAAkC,CAAA,CAAlC,CAAwC,CAAA,CAAxC,CAA8C,CAAA,CAA9C,CAEA,CADAA,EAAA,CAAwB,OAAxB,CAAiC,CAAA,CAAjC,CAAwC,CAAA,CAAxC,CAA+C,CAAA,CAA/C,CACA,CAAAA,EAAA,CAAwB,MAAxB,CAAgC,CAAA,CAAhC,CAAuC,CAAA,CAAvC,CAA8C,CAAA,CAA9C,CAbF,EAeE3K,CAfF,CAeW8L,CAi+nBX,CA/9nBA7I,EAAAlD,QA+9nBA,CA/9nBkBC,CA+9nBlB,CAFA4F,EAAA,CAAmB3C,EAAnB,CAEA,CAAAjD,CAAA,CAAOxH,CAAP,CAAA+7C,MAAA,CAAuB,QAAQ,EAAG,CAChCjzC,EAAA,CAAY9I,CAAZ,CAAsB+I,EAAtB,CADgC,CAAlC,CAZA,CA16qBqC,CAAtC,CAAA,CA07qBEhJ,MA17qBF,CA07qBUC,QA17qBV,CA47qBD,EAACD,MAAA0K,QAAAmpD,MAAA,EAAD,EAA2B7zD,MAAA0K,QAAAlD,QAAA,CAAuBvH,QAAvB,CAAAkE,KAAA,CAAsC,MAAtC,CAAA25C,QAAA,CAAsD,oVAAtD;",
+"sources":["angular.js"],
+"names":["window","document","undefined","minErr","isArrayLike","obj","isWindow","length","nodeType","isString","isArray","forEach","iterator","context","key","isFunction","hasOwnProperty","call","sortedKeys","keys","push","sort","forEachSorted","i","reverseParams","iteratorFn","value","nextUid","index","uid","digit","charCodeAt","join","String","fromCharCode","unshift","setHashKey","h","$$hashKey","extend","dst","arguments","int","str","parseInt","inherit","parent","extra","noop","identity","$","valueFn","isUndefined","isDefined","isObject","isNumber","isDate","toString","isRegExp","location","alert","setInterval","isElement","node","nodeName","prop","attr","find","map","results","list","indexOf","array","arrayRemove","splice","copy","source","destination","stackSource","stackDest","$evalAsync","$watch","ngMinErr","result","Date","getTime","RegExp","match","lastIndex","shallowCopy","src","charAt","equals","o1","o2","t1","t2","isNaN","keySet","bind","self","fn","curryArgs","slice","startIndex","apply","concat","toJsonReplacer","val","toJson","pretty","JSON","stringify","fromJson","json","parse","toBoolean","v","lowercase","startingTag","element","jqLite","clone","empty","e","elemHtml","append","html","TEXT_NODE","replace","tryDecodeURIComponent","decodeURIComponent","parseKeyValue","keyValue","key_value","split","toKeyValue","parts","arrayValue","encodeUriQuery","encodeUriSegment","pctEncodeSpaces","encodeURIComponent","angularInit","bootstrap","elements","appElement","module","names","NG_APP_CLASS_REGEXP","name","getElementById","querySelectorAll","exec","className","attributes","modules","doBootstrap","injector","tag","$provide","createInjector","invoke","scope","compile","animate","$apply","data","NG_DEFER_BOOTSTRAP","test","angular","resumeBootstrap","angular.resumeBootstrap","extraModules","snake_case","separator","SNAKE_CASE_REGEXP","letter","pos","toLowerCase","assertArg","arg","reason","assertArgFn","acceptArrayAnnotation","constructor","assertNotHasOwnProperty","getter","path","bindFnToScope","lastInstance","len","getBlockElements","nodes","startNode","endNode","nextSibling","setupModuleLoader","$injectorMinErr","$$minErr","factory","requires","configFn","invokeLater","provider","method","insertMethod","invokeQueue","moduleInstance","runBlocks","config","run","block","publishExternalAPI","version","uppercase","csp","angularModule","$LocaleProvider","ngModule","$$SanitizeUriProvider","$CompileProvider","directive","htmlAnchorDirective","inputDirective","formDirective","scriptDirective","selectDirective","styleDirective","optionDirective","ngBindDirective","ngBindHtmlDirective","ngBindTemplateDirective","ngClassDirective","ngClassEvenDirective","ngClassOddDirective","ngCloakDirective","ngControllerDirective","ngFormDirective","ngHideDirective","ngIfDirective","ngIncludeDirective","ngInitDirective","ngNonBindableDirective","ngPluralizeDirective","ngRepeatDirective","ngShowDirective","ngStyleDirective","ngSwitchDirective","ngSwitchWhenDirective","ngSwitchDefaultDirective","ngOptionsDirective","ngTranscludeDirective","ngModelDirective","ngListDirective","ngChangeDirective","requiredDirective","ngValueDirective","ngIncludeFillContentDirective","ngAttributeAliasDirectives","ngEventDirectives","$AnchorScrollProvider","$AnimateProvider","$BrowserProvider","$CacheFactoryProvider","$ControllerProvider","$DocumentProvider","$ExceptionHandlerProvider","$FilterProvider","$InterpolateProvider","$IntervalProvider","$HttpProvider","$HttpBackendProvider","$LocationProvider","$LogProvider","$ParseProvider","$RootScopeProvider","$QProvider","$SceProvider","$SceDelegateProvider","$SnifferProvider","$TemplateCacheProvider","$TimeoutProvider","$WindowProvider","$$RAFProvider","$$AsyncCallbackProvider","camelCase","SPECIAL_CHARS_REGEXP","_","offset","toUpperCase","MOZ_HACK_REGEXP","jqLitePatchJQueryRemove","dispatchThis","filterElems","getterIfNoArguments","removePatch","param","filter","fireEvent","set","setIndex","setLength","childIndex","children","shift","triggerHandler","childLength","jQuery","originalJqFn","$original","JQLite","trim","jqLiteMinErr","parsed","SINGLE_TAG_REGEXP","fragment","createDocumentFragment","HTML_REGEXP","tmp","appendChild","createElement","TAG_NAME_REGEXP","wrap","wrapMap","_default","innerHTML","XHTML_TAG_REGEXP","removeChild","firstChild","lastChild","j","jj","childNodes","textContent","createTextNode","jqLiteAddNodes","jqLiteClone","cloneNode","jqLiteDealoc","jqLiteRemoveData","jqLiteOff","type","unsupported","events","jqLiteExpandoStore","handle","eventHandler","removeEventListenerFn","expandoId","ng339","expandoStore","jqCache","$destroy","jqId","jqLiteData","isSetter","keyDefined","isSimpleGetter","jqLiteHasClass","selector","getAttribute","jqLiteRemoveClass","cssClasses","setAttribute","cssClass","jqLiteAddClass","existingClasses","root","jqLiteController","jqLiteInheritedData","documentElement","ii","parentNode","host","jqLiteEmpty","getBooleanAttrName","booleanAttr","BOOLEAN_ATTR","BOOLEAN_ELEMENTS","createEventHandler","event","preventDefault","event.preventDefault","returnValue","stopPropagation","event.stopPropagation","cancelBubble","target","srcElement","defaultPrevented","prevent","isDefaultPrevented","event.isDefaultPrevented","eventHandlersCopy","msie","elem","hashKey","nextUidFn","objType","HashMap","isolatedUid","this.nextUid","put","annotate","$inject","fnText","STRIP_COMMENTS","argDecl","FN_ARGS","FN_ARG_SPLIT","FN_ARG","all","underscore","last","modulesToLoad","supportObject","delegate","provider_","providerInjector","instantiate","$get","providerCache","providerSuffix","factoryFn","loadModules","moduleFn","loadedModules","get","_runBlocks","_invokeQueue","invokeArgs","message","stack","createInternalInjector","cache","getService","serviceName","INSTANTIATING","err","locals","args","Type","Constructor","returnedValue","prototype","instance","has","service","$injector","constant","instanceCache","decorator","decorFn","origProvider","orig$get","origProvider.$get","origInstance","instanceInjector","servicename","autoScrollingEnabled","disableAutoScrolling","this.disableAutoScrolling","$window","$location","$rootScope","getFirstAnchor","scroll","hash","elm","scrollIntoView","getElementsByName","scrollTo","autoScrollWatch","autoScrollWatchAction","$$rAF","$timeout","supported","Browser","$log","$sniffer","completeOutstandingRequest","outstandingRequestCount","outstandingRequestCallbacks","pop","error","startPoller","interval","setTimeout","check","pollFns","pollFn","pollTimeout","fireUrlChange","newLocation","lastBrowserUrl","url","urlChangeListeners","listener","rawDocument","history","clearTimeout","pendingDeferIds","isMock","$$completeOutstandingRequest","$$incOutstandingRequestCount","self.$$incOutstandingRequestCount","notifyWhenNoOutstandingRequests","self.notifyWhenNoOutstandingRequests","callback","addPollFn","self.addPollFn","href","baseElement","self.url","replaceState","pushState","urlChangeInit","onUrlChange","self.onUrlChange","on","hashchange","baseHref","self.baseHref","lastCookies","lastCookieString","cookiePath","cookies","self.cookies","cookieLength","cookie","escape","warn","cookieArray","unescape","substring","defer","self.defer","delay","timeoutId","cancel","self.defer.cancel","deferId","$document","this.$get","cacheFactory","cacheId","options","refresh","entry","freshEnd","staleEnd","n","link","p","nextEntry","prevEntry","caches","size","stats","capacity","Number","MAX_VALUE","lruHash","lruEntry","remove","removeAll","destroy","info","cacheFactory.info","cacheFactory.get","$cacheFactory","$$sanitizeUriProvider","hasDirectives","Suffix","COMMENT_DIRECTIVE_REGEXP","CLASS_DIRECTIVE_REGEXP","EVENT_HANDLER_ATTR_REGEXP","this.directive","registerDirective","directiveFactory","$exceptionHandler","directives","priority","require","controller","restrict","aHrefSanitizationWhitelist","this.aHrefSanitizationWhitelist","regexp","imgSrcSanitizationWhitelist","this.imgSrcSanitizationWhitelist","$interpolate","$http","$templateCache","$parse","$controller","$sce","$animate","$$sanitizeUri","$compileNodes","transcludeFn","maxPriority","ignoreDirective","previousCompileContext","nodeValue","compositeLinkFn","compileNodes","safeAddClass","publicLinkFn","cloneConnectFn","transcludeControllers","parentBoundTranscludeFn","$linkNode","JQLitePrototype","eq","$element","addClass","nodeList","$rootElement","childLinkFn","childScope","childBoundTranscludeFn","nodeListLength","stableNodeList","Array","linkFns","nodeLinkFn","$new","transcludeOnThisElement","createBoundTranscludeFn","transclude","templateOnThisElement","attrs","linkFnFound","Attributes","collectDirectives","applyDirectivesToNode","$$element","terminal","previousBoundTranscludeFn","boundTranscludeFn","transcludedScope","cloneFn","controllers","scopeCreated","$$transcluded","attrsMap","$attr","addDirective","directiveNormalize","nodeName_","isNgAttr","nAttrs","attrStartName","attrEndName","specified","ngAttrName","NG_ATTR_BINDING","substr","directiveNName","nName","addAttrInterpolateDirective","addTextInterpolateDirective","byPriority","groupScan","attrStart","attrEnd","depth","hasAttribute","$compileMinErr","groupElementsLinkFnWrapper","linkFn","compileNode","templateAttrs","jqCollection","originalReplaceDirective","preLinkFns","postLinkFns","addLinkFns","pre","post","directiveName","newIsolateScopeDirective","$$isolateScope","cloneAndAnnotateFn","getControllers","elementControllers","retrievalMethod","optional","linkNode","controllersBoundTransclude","cloneAttachFn","hasElementTranscludeDirective","isolateScope","LOCAL_REGEXP","templateDirective","$$originalDirective","definition","scopeName","attrName","mode","lastValue","parentGet","parentSet","compare","$$isolateBindings","$observe","$$observers","$$scope","literal","a","b","assign","parentValueWatch","parentValue","controllerDirectives","controllerInstance","controllerAs","$scope","scopeToChild","template","templateUrl","terminalPriority","newScopeDirective","nonTlbTranscludeDirective","hasTranscludeDirective","hasTemplate","$compileNode","$template","childTranscludeFn","$$start","$$end","directiveValue","assertNoDuplicate","$$tlb","createComment","replaceWith","replaceDirective","contents","denormalizeTemplate","newTemplateAttrs","templateDirectives","unprocessedDirectives","markDirectivesAsIsolate","mergeTemplateAttributes","compileTemplateUrl","Math","max","tDirectives","startAttrName","endAttrName","srcAttr","dstAttr","$set","tAttrs","linkQueue","afterTemplateNodeLinkFn","afterTemplateChildLinkFn","beforeTemplateCompileNode","origAsyncDirective","derivedSyncDirective","getTrustedResourceUrl","success","content","tempTemplateAttrs","beforeTemplateLinkNode","linkRootElement","oldClasses","response","code","headers","delayedNodeLinkFn","ignoreChildLinkFn","rootElement","diff","what","previousDirective","text","interpolateFn","textInterpolateCompileFn","templateNode","hasCompileParent","textInterpolateLinkFn","bindings","interpolateFnWatchAction","getTrustedContext","attrNormalizedName","HTML","RESOURCE_URL","attrInterpolatePreLinkFn","$$inter","newValue","oldValue","$updateClass","elementsToRemove","newNode","firstElementToRemove","removeCount","j2","replaceChild","expando","k","kk","annotation","$addClass","classVal","$removeClass","removeClass","newClasses","toAdd","tokenDifference","toRemove","setClass","writeAttr","booleanKey","removeAttr","listeners","startSymbol","endSymbol","PREFIX_REGEXP","str1","str2","values","tokens1","tokens2","token","CNTRL_REG","register","this.register","expression","identifier","exception","cause","parseHeaders","line","headersGetter","headersObj","transformData","fns","JSON_START","JSON_END","PROTECTION_PREFIX","CONTENT_TYPE_APPLICATION_JSON","defaults","d","interceptorFactories","interceptors","responseInterceptorFactories","responseInterceptors","$httpBackend","$browser","$q","requestConfig","transformResponse","resp","status","reject","transformRequest","mergeHeaders","defHeaders","reqHeaders","defHeaderName","reqHeaderName","common","lowercaseDefHeaderName","execHeaders","headerContent","headerFn","header","chain","serverRequest","reqData","withCredentials","sendReq","then","promise","when","reversedInterceptors","interceptor","request","requestError","responseError","thenFn","rejectFn","promise.success","promise.error","done","headersString","statusText","resolvePromise","$$phase","deferred","resolve","removePendingReq","idx","pendingRequests","cachedResp","buildUrl","params","defaultCache","xsrfValue","urlIsSameOrigin","xsrfCookieName","xsrfHeaderName","timeout","responseType","toISOString","interceptorFactory","responseFn","createShortMethods","createShortMethodsWithData","createXhr","XMLHttpRequest","ActiveXObject","createHttpBackend","callbacks","$browserDefer","jsonpReq","callbackId","script","async","body","called","addEventListenerFn","onreadystatechange","script.onreadystatechange","readyState","ABORTED","timeoutRequest","jsonpDone","xhr","abort","completeRequest","urlResolve","protocol","counter","open","setRequestHeader","xhr.onreadystatechange","responseHeaders","getAllResponseHeaders","responseText","send","this.startSymbol","this.endSymbol","mustHaveExpression","trustedContext","endIndex","hasInterpolation","startSymbolLength","exp","endSymbolLength","$interpolateMinErr","part","getTrusted","valueOf","newErr","$interpolate.startSymbol","$interpolate.endSymbol","count","invokeApply","clearInterval","iteration","skipApply","$$intervalId","tick","notify","intervals","interval.cancel","short","pluralCat","num","encodePath","segments","parseAbsoluteUrl","absoluteUrl","locationObj","appBase","parsedUrl","$$protocol","$$host","hostname","$$port","port","DEFAULT_PORTS","parseAppUrl","relativeUrl","prefixed","$$path","pathname","$$search","search","$$hash","beginsWith","begin","whole","stripHash","stripFile","lastIndexOf","LocationHtml5Url","basePrefix","$$html5","appBaseNoFile","$$parse","this.$$parse","pathUrl","$locationMinErr","$$compose","this.$$compose","$$url","$$absUrl","$$rewrite","this.$$rewrite","appUrl","prevAppUrl","LocationHashbangUrl","hashPrefix","withoutBaseUrl","withoutHashUrl","windowsFilePathExp","firstPathSegmentMatch","LocationHashbangInHtml5Url","locationGetter","property","locationGetterSetter","preprocess","html5Mode","this.hashPrefix","prefix","this.html5Mode","afterLocationChange","oldUrl","$broadcast","absUrl","LocationMode","initialUrl","IGNORE_URI_REGEXP","ctrlKey","metaKey","which","absHref","animVal","rewrittenUrl","newUrl","$digest","changeCounter","$locationWatch","currentReplace","$$replace","debug","debugEnabled","this.debugEnabled","flag","formatError","Error","sourceURL","consoleLog","console","logFn","log","hasApply","arg1","arg2","ensureSafeMemberName","fullExpression","$parseMinErr","ensureSafeObject","Object","setter","setValue","fullExp","propertyObj","unwrapPromises","promiseWarning","$$v","cspSafeGetterFn","key0","key1","key2","key3","key4","cspSafePromiseEnabledGetter","pathVal","cspSafeGetter","getterFn","getterFnCache","pathKeys","pathKeysLength","evaledFnGetter","Function","$parseOptions","this.unwrapPromises","logPromiseWarnings","this.logPromiseWarnings","$filter","promiseWarningCache","parsedExpression","lexer","Lexer","parser","Parser","qFactory","nextTick","exceptionHandler","defaultCallback","defaultErrback","pending","ref","createInternalRejectedPromise","progress","errback","progressback","wrappedCallback","wrappedErrback","wrappedProgressback","catch","finally","makePromise","resolved","handleCallback","isResolved","callbackOutput","promises","requestAnimationFrame","webkitRequestAnimationFrame","mozRequestAnimationFrame","cancelAnimationFrame","webkitCancelAnimationFrame","mozCancelAnimationFrame","webkitCancelRequestAnimationFrame","rafSupported","raf","id","timer","TTL","$rootScopeMinErr","lastDirtyWatch","digestTtl","this.digestTtl","Scope","$id","$parent","$$watchers","$$nextSibling","$$prevSibling","$$childHead","$$childTail","$root","$$destroyed","$$asyncQueue","$$postDigestQueue","$$listeners","$$listenerCount","beginPhase","phase","compileToFn","decrementListenerCount","current","initWatchVal","isolate","child","$$childScopeClass","this.$$childScopeClass","watchExp","objectEquality","watcher","listenFn","watcher.fn","newVal","oldVal","originalFn","deregisterWatch","$watchCollection","veryOldValue","trackVeryOldValue","changeDetected","objGetter","internalArray","internalObject","initRun","oldLength","$watchCollectionWatch","newLength","bothNaN","$watchCollectionAction","watch","watchers","asyncQueue","postDigestQueue","dirty","ttl","watchLog","logIdx","logMsg","asyncTask","$eval","next","$on","this.$watch","expr","$$postDigest","namedListeners","$emit","listenerArgs","array1","currentScope","sanitizeUri","uri","isImage","regex","normalizedVal","adjustMatcher","matcher","$sceMinErr","adjustMatchers","matchers","adjustedMatchers","SCE_CONTEXTS","resourceUrlWhitelist","resourceUrlBlacklist","this.resourceUrlWhitelist","this.resourceUrlBlacklist","generateHolderType","Base","holderType","trustedValue","$$unwrapTrustedValue","this.$$unwrapTrustedValue","holderType.prototype.valueOf","holderType.prototype.toString","htmlSanitizer","trustedValueHolderBase","byType","CSS","URL","JS","trustAs","maybeTrusted","allowed","enabled","this.enabled","$sceDelegate","msieDocumentMode","sce","isEnabled","sce.isEnabled","sce.getTrusted","parseAs","sce.parseAs","sceParseAsTrusted","enumValue","lName","eventSupport","android","userAgent","navigator","boxee","documentMode","vendorPrefix","vendorRegex","bodyStyle","style","transitions","animations","webkitTransition","webkitAnimation","hasEvent","divElm","deferreds","$$timeoutId","timeout.cancel","base","urlParsingNode","requestUrl","originUrl","filters","suffix","currencyFilter","dateFilter","filterFilter","jsonFilter","limitToFilter","lowercaseFilter","numberFilter","orderByFilter","uppercaseFilter","comparator","comparatorType","predicates","predicates.check","objKey","filtered","$locale","formats","NUMBER_FORMATS","amount","currencySymbol","CURRENCY_SYM","formatNumber","PATTERNS","GROUP_SEP","DECIMAL_SEP","number","fractionSize","pattern","groupSep","decimalSep","isFinite","isNegative","abs","numStr","formatedText","hasExponent","toFixed","fractionLen","min","minFrac","maxFrac","round","fraction","lgroup","lgSize","group","gSize","negPre","posPre","negSuf","posSuf","padNumber","digits","neg","dateGetter","date","dateStrGetter","shortForm","jsonStringToDate","string","R_ISO8601_STR","tzHour","tzMin","dateSetter","setUTCFullYear","setFullYear","timeSetter","setUTCHours","setHours","m","s","ms","parseFloat","format","DATETIME_FORMATS","NUMBER_STRING","DATE_FORMATS_SPLIT","DATE_FORMATS","object","input","limit","Infinity","out","sortPredicate","reverseOrder","reverseComparator","comp","descending","v1","v2","predicate","arrayCopy","ngDirective","FormController","toggleValidCss","isValid","validationErrorKey","INVALID_CLASS","VALID_CLASS","form","parentForm","nullFormCtrl","invalidCount","errors","$error","controls","$name","ngForm","$dirty","$pristine","$valid","$invalid","$addControl","PRISTINE_CLASS","form.$addControl","control","$removeControl","form.$removeControl","queue","validationToken","$setValidity","form.$setValidity","$setDirty","form.$setDirty","DIRTY_CLASS","$setPristine","form.$setPristine","validate","ctrl","validatorName","validity","testFlags","flags","addNativeHtml5Validators","badFlags","ignoreFlags","$$hasNativeValidators","$parsers","validator","textInputType","VALIDITY_STATE_PROPERTY","placeholder","noevent","$$validityState","composing","ev","ngTrim","revalidate","$viewValue","$setViewValue","deferListener","keyCode","$render","ctrl.$render","$isEmpty","ngPattern","patternValidator","patternObj","$formatters","ngMinlength","minlength","minLengthValidator","ngMaxlength","maxlength","maxLengthValidator","classDirective","arrayDifference","arrayClasses","classes","digestClassCounts","classCounts","classesToUpdate","ngClassWatchAction","$index","old$index","mod","isActive_","active","querySelector","addEventListener","attachEvent","removeEventListener","detachEvent","_data","JQLite._data","optgroup","option","tbody","tfoot","colgroup","caption","thead","th","td","ready","trigger","fired","removeAttribute","css","currentStyle","lowercasedName","getNamedItem","ret","getText","textProp","NODE_TYPE_TEXT_PROPERTY","$dv","multiple","selected","nodeCount","onFn","eventFns","contains","compareDocumentPosition","adown","bup","eventmap","related","relatedTarget","one","off","replaceNode","insertBefore","contentDocument","prepend","wrapNode","after","newElement","toggleClass","condition","classCondition","nextElementSibling","getElementsByTagName","extraParameters","dummyEvent","handlerArgs","eventName","eventFnsCopy","arg3","unbind","$animateMinErr","$$selectors","classNameFilter","this.classNameFilter","$$classNameFilter","$$asyncCallback","enter","leave","move","add","PATH_MATCH","paramValue","CALL","APPLY","BIND","OPERATORS","null","true","false","+","-","*","/","%","^","===","!==","==","!=","<",">","<=",">=","&&","||","&","|","!","ESCAPE","lex","ch","lastCh","tokens","is","readString","peek","readNumber","isIdent","readIdent","isWhitespace","ch2","ch3","fn2","fn3","throwError","chars","was","isExpOperator","start","end","colStr","peekCh","ident","lastDot","peekIndex","methodName","quote","rawString","hex","rep","ZERO","statements","primary","expect","filterChain","consume","arrayDeclaration","functionCall","objectIndex","fieldAccess","msg","peekToken","e1","e2","e3","e4","t","unaryFn","right","ternaryFn","left","middle","binaryFn","statement","argsFn","fnInvoke","assignment","ternary","logicalOR","logicalAND","equality","relational","additive","multiplicative","unary","field","o","indexFn","contextGetter","fnPtr","elementFns","allConstant","elementFn","keyValues","ampmGetter","getHours","AMPMS","timeZoneGetter","zone","getTimezoneOffset","paddedZone","xlinkHref","propName","normalized","ngBooleanAttrWatchAction","formDirectiveFactory","isNgForm","formElement","action","preventDefaultListener","parentFormCtrl","alias","URL_REGEXP","EMAIL_REGEXP","NUMBER_REGEXP","inputType","numberInputType","numberBadFlags","minValidator","maxValidator","urlInputType","urlValidator","emailInputType","emailValidator","radioInputType","checked","checkboxInputType","trueValue","ngTrueValue","falseValue","ngFalseValue","ctrl.$isEmpty","NgModelController","$modelValue","NaN","$viewChangeListeners","ngModelGet","ngModel","ngModelSet","this.$isEmpty","inheritedData","this.$setValidity","this.$setPristine","this.$setViewValue","ngModelWatch","formatters","ctrls","modelCtrl","formCtrl","ngChange","required","ngList","viewValue","CONSTANT_VALUE_REGEXP","tpl","tplAttr","ngValue","ngValueConstantLink","ngValueLink","valueWatchAction","templateElement","ngBind","ngBindWatchAction","ngBindTemplate","tElement","ngBindHtml","getStringValue","ngBindHtmlWatchAction","getTrustedHtml","ngEventHandler","$transclude","previousElements","ngIf","ngIfWatchAction","$anchorScroll","srcExp","ngInclude","onloadExp","onload","autoScrollExp","autoscroll","previousElement","currentElement","cleanupLastIncludeContent","parseAsResourceUrl","ngIncludeWatchAction","afterAnimation","thisChangeId","newScope","$compile","ngInit","BRACE","numberExp","whenExp","whens","whensExpFns","isWhen","attributeName","ngPluralizeWatch","ngPluralizeWatchAction","ngRepeatMinErr","ngRepeat","trackByExpGetter","trackByIdExpFn","trackByIdArrayFn","trackByIdObjFn","valueIdentifier","keyIdentifier","hashFnLocals","lhs","rhs","trackByExp","lastBlockMap","ngRepeatAction","collection","previousNode","nextNode","nextBlockMap","arrayLength","collectionKeys","nextBlockOrder","trackByIdFn","trackById","$first","$last","$middle","$odd","$even","ngShow","ngShowWatchAction","ngHide","ngHideWatchAction","ngStyle","ngStyleWatchAction","newStyles","oldStyles","ngSwitchController","cases","selectedTranscludes","selectedElements","selectedScopes","ngSwitch","ngSwitchWatchAction","change","selectedTransclude","selectedScope","caseElement","anchor","ngSwitchWhen","$attrs","ngOptionsMinErr","NG_OPTIONS_REGEXP","nullModelCtrl","optionsMap","ngModelCtrl","unknownOption","databound","init","self.init","ngModelCtrl_","nullOption_","unknownOption_","addOption","self.addOption","removeOption","self.removeOption","hasOption","renderUnknownOption","self.renderUnknownOption","unknownVal","self.hasOption","setupAsSingle","selectElement","selectCtrl","ngModelCtrl.$render","emptyOption","setupAsMultiple","lastView","items","selectMultipleWatch","setupAsOptions","render","optionGroups","optionGroupNames","optionGroupName","optionGroup","existingParent","existingOptions","existingOption","modelValue","valuesFn","keyName","groupIndex","selectedSet","trackFn","trackIndex","valueName","lastElement","groupByFn","modelCast","label","displayFn","nullOption","groupLength","optionGroupsCache","optGroupTemplate","optionTemplate","optionsExp","track","optionElement","ngOptions","ngModelCtrl.$isEmpty","nullSelectCtrl","selectCtrlName","interpolateWatchAction","$$csp"]
+}
diff --git a/bitbake/lib/toaster/toastergui/static/js/base.js b/bitbake/lib/toaster/toastergui/static/js/base.js
new file mode 100644
index 0000000000..619ad287c4
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/base.js
@@ -0,0 +1,134 @@
+
+
+function basePageInit (ctx) {
+
+ var newBuildButton = $("#new-build-button");
+ /* Hide the button if we're on the project,newproject or importlyaer page
+ * or if there are no projects yet defined
+ */
+ if (ctx.numProjects == 0 || ctx.currentUrl.search('newproject|project/\\d/$|importlayer/$') > 0){
+ newBuildButton.hide();
+ return;
+ }
+
+ /* Hide the change project icon when there is only one project */
+ if (ctx.numProjects == 1){
+ $('#project .icon-pencil').hide();
+ }
+
+ newBuildButton.show().removeAttr("disabled");
+
+ _checkProjectBuildable()
+ _setupNewBuildButton();
+
+
+ function _checkProjectBuildable(){
+ if (ctx.projectId == undefined)
+ return;
+
+ libtoaster.getProjectInfo(ctx.projectInfoUrl, ctx.projectId,
+ function(data){
+ if (data.machine.name == undefined || data.layers.length == 0) {
+ /* we can't build anything with out a machine and some layers */
+ $("#new-build-button #targets-form").hide();
+ $("#new-build-button .alert").show();
+ } else {
+ $("#new-build-button #targets-form").show();
+ $("#new-build-button .alert").hide();
+ }
+ }, null);
+ }
+
+ function _setupNewBuildButton() {
+ /* Setup New build button */
+ var newBuildProjectInput = $("#new-build-button #project-name-input");
+ var newBuildTargetBuildBtn = $("#new-build-button #build-button");
+ var newBuildTargetInput = $("#new-build-button #build-target-input");
+ var newBuildProjectSaveBtn = $("#new-build-button #save-project-button");
+ var selectedTarget;
+ var selectedProject;
+
+ /* If we don't have a current project then present the set project
+ * form.
+ */
+ if (ctx.projectId == undefined) {
+ $('#change-project-form').show();
+ $('#project .icon-pencil').hide();
+ }
+
+ libtoaster.makeTypeahead(newBuildTargetInput, ctx.xhrDataTypeaheadUrl, { type : "targets", project_id: ctx.projectId }, function(item){
+ /* successfully selected a target */
+ selectedTarget = item;
+ });
+
+
+ libtoaster.makeTypeahead(newBuildProjectInput, ctx.xhrDataTypeaheadUrl, { type : "projects" }, function(item){
+ /* successfully selected a project */
+ newBuildProjectSaveBtn.removeAttr("disabled");
+ selectedProject = item;
+ });
+
+ /* Any typing in the input apart from enter key is going to invalidate
+ * the value that has been set by selecting a suggestion from the typeahead
+ */
+ newBuildProjectInput.keyup(function(event) {
+ if (event.keyCode == 13)
+ return;
+ newBuildProjectSaveBtn.attr("disabled", "disabled");
+ });
+
+ newBuildTargetInput.keyup(function() {
+ if ($(this).val().length == 0)
+ newBuildTargetBuildBtn.attr("disabled", "disabled");
+ else
+ newBuildTargetBuildBtn.removeAttr("disabled");
+ });
+
+ newBuildTargetBuildBtn.click(function() {
+ if (!newBuildTargetInput.val())
+ return;
+
+ /* fire and forget */
+ libtoaster.startABuild(ctx.projectBuildUrl, ctx.projectId, selectedTarget.name, null, null);
+ window.location.replace(ctx.projectPageUrl+ctx.projectId);
+ });
+
+ newBuildProjectSaveBtn.click(function() {
+ ctx.projectId = selectedProject.id
+ /* Update the typeahead project_id paramater */
+ _checkProjectBuildable();
+ newBuildTargetInput.data('typeahead').options.xhrParams.project_id = ctx.projectId;
+ newBuildTargetInput.val("");
+
+ $("#new-build-button #project a").text(selectedProject.name).attr('href', ctx.projectPageUrl+ctx.projectId);
+ $("#new-build-button .alert a").attr('href', ctx.projectPageUrl+ctx.projectId);
+
+
+ $("#change-project-form").slideUp({ 'complete' : function() {
+ $("#new-build-button #project").show();
+ }});
+ });
+
+ $('#new-build-button #project .icon-pencil').click(function() {
+ newBuildProjectSaveBtn.attr("disabled", "disabled");
+ newBuildProjectInput.val($("#new-build-button #project a").text());
+ $(this).parent().hide();
+ $("#change-project-form").slideDown();
+ });
+
+ $("#new-build-button #cancel-change-project").click(function() {
+ $("#change-project-form").hide(function(){
+ $('#new-build-button #project').show();
+ });
+
+ newBuildProjectInput.val("");
+ newBuildProjectSaveBtn.attr("disabled", "disabled");
+ });
+
+ /* Keep the dropdown open even unless we click outside the dropdown area */
+ $(".new-build").click (function(event) {
+ event.stopPropagation();
+ });
+ };
+
+}
diff --git a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js b/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
index e4f481784e..848258d380 100644
--- a/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
+++ b/bitbake/lib/toaster/toastergui/static/js/bootstrap.min.js
@@ -1,7 +1,6 @@
/*!
- * Bootstrap v3.0.3 (http://getbootstrap.com)
- * Copyright 2013 Twitter, Inc.
- * Licensed under http://www.apache.org/licenses/LICENSE-2.0
- */
-
-if("undefined"==typeof jQuery)throw new Error("Bootstrap requires jQuery");+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]}}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one(a.support.transition.end,function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b()})}(jQuery),+function(a){"use strict";var b='[data-dismiss="alert"]',c=function(c){a(c).on("click",b,this.close)};c.prototype.close=function(b){function c(){f.trigger("closed.bs.alert").remove()}var d=a(this),e=d.attr("data-target");e||(e=d.attr("href"),e=e&&e.replace(/.*(?=#[^\s]*$)/,""));var f=a(e);b&&b.preventDefault(),f.length||(f=d.hasClass("alert")?d:d.parent()),f.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(f.removeClass("in"),a.support.transition&&f.hasClass("fade")?f.one(a.support.transition.end,c).emulateTransitionEnd(150):c())};var d=a.fn.alert;a.fn.alert=function(b){return this.each(function(){var d=a(this),e=d.data("bs.alert");e||d.data("bs.alert",e=new c(this)),"string"==typeof b&&e[b].call(d)})},a.fn.alert.Constructor=c,a.fn.alert.noConflict=function(){return a.fn.alert=d,this},a(document).on("click.bs.alert.data-api",b,c.prototype.close)}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d)};b.DEFAULTS={loadingText:"loading..."},b.prototype.setState=function(a){var b="disabled",c=this.$element,d=c.is("input")?"val":"html",e=c.data();a+="Text",e.resetText||c.data("resetText",c[d]()),c[d](e[a]||this.options[a]),setTimeout(function(){"loadingText"==a?c.addClass(b).attr(b,b):c.removeClass(b).removeAttr(b)},0)},b.prototype.toggle=function(){var a=this.$element.closest('[data-toggle="buttons"]'),b=!0;if(a.length){var c=this.$element.find("input");"radio"===c.prop("type")&&(c.prop("checked")&&this.$element.hasClass("active")?b=!1:a.find(".active").removeClass("active")),b&&c.prop("checked",!this.$element.hasClass("active")).trigger("change")}b&&this.$element.toggleClass("active")};var c=a.fn.button;a.fn.button=function(c){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof c&&c;e||d.data("bs.button",e=new b(this,f)),"toggle"==c?e.toggle():c&&e.setState(c)})},a.fn.button.Constructor=b,a.fn.button.noConflict=function(){return a.fn.button=c,this},a(document).on("click.bs.button.data-api","[data-toggle^=button]",function(b){var c=a(b.target);c.hasClass("btn")||(c=c.closest(".btn")),c.button("toggle"),b.preventDefault()})}(jQuery),+function(a){"use strict";var b=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=this.sliding=this.interval=this.$active=this.$items=null,"hover"==this.options.pause&&this.$element.on("mouseenter",a.proxy(this.pause,this)).on("mouseleave",a.proxy(this.cycle,this))};b.DEFAULTS={interval:5e3,pause:"hover",wrap:!0},b.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},b.prototype.getActiveIndex=function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},b.prototype.to=function(b){var c=this,d=this.getActiveIndex();return b>this.$items.length-1||0>b?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){c.to(b)}):d==b?this.pause().cycle():this.slide(b>d?"next":"prev",a(this.$items[b]))},b.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition.end&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},b.prototype.next=function(){return this.sliding?void 0:this.slide("next")},b.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},b.prototype.slide=function(b,c){var d=this.$element.find(".item.active"),e=c||d[b](),f=this.interval,g="next"==b?"left":"right",h="next"==b?"first":"last",i=this;if(!e.length){if(!this.options.wrap)return;e=this.$element.find(".item")[h]()}this.sliding=!0,f&&this.pause();var j=a.Event("slide.bs.carousel",{relatedTarget:e[0],direction:g});if(!e.hasClass("active")){if(this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid.bs.carousel",function(){var b=a(i.$indicators.children()[i.getActiveIndex()]);b&&b.addClass("active")})),a.support.transition&&this.$element.hasClass("slide")){if(this.$element.trigger(j),j.isDefaultPrevented())return;e.addClass(b),e[0].offsetWidth,d.addClass(g),e.addClass(g),d.one(a.support.transition.end,function(){e.removeClass([b,g].join(" ")).addClass("active"),d.removeClass(["active",g].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger("slid.bs.carousel")},0)}).emulateTransitionEnd(600)}else{if(this.$element.trigger(j),j.isDefaultPrevented())return;d.removeClass("active"),e.addClass("active"),this.sliding=!1,this.$element.trigger("slid.bs.carousel")}return f&&this.cycle(),this}};var c=a.fn.carousel;a.fn.carousel=function(c){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c),g="string"==typeof c?c:f.slide;e||d.data("bs.carousel",e=new b(this,f)),"number"==typeof c?e.to(c):g?e[g]():f.interval&&e.pause().cycle()})},a.fn.carousel.Constructor=b,a.fn.carousel.noConflict=function(){return a.fn.carousel=c,this},a(document).on("click.bs.carousel.data-api","[data-slide], [data-slide-to]",function(b){var c,d=a(this),e=a(d.attr("data-target")||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"")),f=a.extend({},e.data(),d.data()),g=d.attr("data-slide-to");g&&(f.interval=!1),e.carousel(f),(g=d.attr("data-slide-to"))&&e.data("bs.carousel").to(g),b.preventDefault()}),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var b=a(this);b.carousel(b.data())})})}(jQuery),+function(a){"use strict";var b=function(c,d){this.$element=a(c),this.options=a.extend({},b.DEFAULTS,d),this.transitioning=null,this.options.parent&&(this.$parent=a(this.options.parent)),this.options.toggle&&this.toggle()};b.DEFAULTS={toggle:!0},b.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},b.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b=a.Event("show.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.$parent&&this.$parent.find("> .panel > .in");if(c&&c.length){var d=c.data("bs.collapse");if(d&&d.transitioning)return;c.collapse("hide"),d||c.data("bs.collapse",null)}var e=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[e](0),this.transitioning=1;var f=function(){this.$element.removeClass("collapsing").addClass("in")[e]("auto"),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return f.call(this);var g=a.camelCase(["scroll",e].join("-"));this.$element.one(a.support.transition.end,a.proxy(f,this)).emulateTransitionEnd(350)[e](this.$element[0][g])}}},b.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse").removeClass("in"),this.transitioning=1;var d=function(){this.transitioning=0,this.$element.trigger("hidden.bs.collapse").removeClass("collapsing").addClass("collapse")};return a.support.transition?(this.$element[c](0).one(a.support.transition.end,a.proxy(d,this)).emulateTransitionEnd(350),void 0):d.call(this)}}},b.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()};var c=a.fn.collapse;a.fn.collapse=function(c){return this.each(function(){var d=a(this),e=d.data("bs.collapse"),f=a.extend({},b.DEFAULTS,d.data(),"object"==typeof c&&c);e||d.data("bs.collapse",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.collapse.Constructor=b,a.fn.collapse.noConflict=function(){return a.fn.collapse=c,this},a(document).on("click.bs.collapse.data-api","[data-toggle=collapse]",function(b){var c,d=a(this),e=d.attr("data-target")||b.preventDefault()||(c=d.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,""),f=a(e),g=f.data("bs.collapse"),h=g?"toggle":d.data(),i=d.attr("data-parent"),j=i&&a(i);g&&g.transitioning||(j&&j.find('[data-toggle=collapse][data-parent="'+i+'"]').not(d).addClass("collapsed"),d[f.hasClass("in")?"addClass":"removeClass"]("collapsed")),f.collapse(h)})}(jQuery),+function(a){"use strict";function b(){a(d).remove(),a(e).each(function(b){var d=c(a(this));d.hasClass("open")&&(d.trigger(b=a.Event("hide.bs.dropdown")),b.isDefaultPrevented()||d.removeClass("open").trigger("hidden.bs.dropdown"))})}function c(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}var d=".dropdown-backdrop",e="[data-toggle=dropdown]",f=function(b){a(b).on("click.bs.dropdown",this.toggle)};f.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=c(e),g=f.hasClass("open");if(b(),!g){if("ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a('<div class="dropdown-backdrop"/>').insertAfter(a(this)).on("click",b),f.trigger(d=a.Event("show.bs.dropdown")),d.isDefaultPrevented())return;f.toggleClass("open").trigger("shown.bs.dropdown"),e.focus()}return!1}},f.prototype.keydown=function(b){if(/(38|40|27)/.test(b.keyCode)){var d=a(this);if(b.preventDefault(),b.stopPropagation(),!d.is(".disabled, :disabled")){var f=c(d),g=f.hasClass("open");if(!g||g&&27==b.keyCode)return 27==b.which&&f.find(e).focus(),d.click();var h=a("[role=menu] li:not(.divider):visible a",f);if(h.length){var i=h.index(h.filter(":focus"));38==b.keyCode&&i>0&&i--,40==b.keyCode&&i<h.length-1&&i++,~i||(i=0),h.eq(i).focus()}}}};var g=a.fn.dropdown;a.fn.dropdown=function(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new f(this)),"string"==typeof b&&d[b].call(c)})},a.fn.dropdown.Constructor=f,a.fn.dropdown.noConflict=function(){return a.fn.dropdown=g,this},a(document).on("click.bs.dropdown.data-api",b).on("click.bs.dropdown.data-api",".dropdown form",function(a){a.stopPropagation()}).on("click.bs.dropdown.data-api",e,f.prototype.toggle).on("keydown.bs.dropdown.data-api",e+", [role=menu]",f.prototype.keydown)}(jQuery),+function(a){"use strict";var b=function(b,c){this.options=c,this.$element=a(b),this.$backdrop=this.isShown=null,this.options.remote&&this.$element.load(this.options.remote)};b.DEFAULTS={backdrop:!0,keyboard:!0,show:!0},b.prototype.toggle=function(a){return this[this.isShown?"hide":"show"](a)},b.prototype.show=function(b){var c=this,d=a.Event("show.bs.modal",{relatedTarget:b});this.$element.trigger(d),this.isShown||d.isDefaultPrevented()||(this.isShown=!0,this.escape(),this.$element.on("click.dismiss.modal",'[data-dismiss="modal"]',a.proxy(this.hide,this)),this.backdrop(function(){var d=a.support.transition&&c.$element.hasClass("fade");c.$element.parent().length||c.$element.appendTo(document.body),c.$element.show(),d&&c.$element[0].offsetWidth,c.$element.addClass("in").attr("aria-hidden",!1),c.enforceFocus();var e=a.Event("shown.bs.modal",{relatedTarget:b});d?c.$element.find(".modal-dialog").one(a.support.transition.end,function(){c.$element.focus().trigger(e)}).emulateTransitionEnd(300):c.$element.focus().trigger(e)}))},b.prototype.hide=function(b){b&&b.preventDefault(),b=a.Event("hide.bs.modal"),this.$element.trigger(b),this.isShown&&!b.isDefaultPrevented()&&(this.isShown=!1,this.escape(),a(document).off("focusin.bs.modal"),this.$element.removeClass("in").attr("aria-hidden",!0).off("click.dismiss.modal"),a.support.transition&&this.$element.hasClass("fade")?this.$element.one(a.support.transition.end,a.proxy(this.hideModal,this)).emulateTransitionEnd(300):this.hideModal())},b.prototype.enforceFocus=function(){a(document).off("focusin.bs.modal").on("focusin.bs.modal",a.proxy(function(a){this.$element[0]===a.target||this.$element.has(a.target).length||this.$element.focus()},this))},b.prototype.escape=function(){this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.bs.modal",a.proxy(function(a){27==a.which&&this.hide()},this)):this.isShown||this.$element.off("keyup.dismiss.bs.modal")},b.prototype.hideModal=function(){var a=this;this.$element.hide(),this.backdrop(function(){a.removeBackdrop(),a.$element.trigger("hidden.bs.modal")})},b.prototype.removeBackdrop=function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},b.prototype.backdrop=function(b){var c=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var d=a.support.transition&&c;if(this.$backdrop=a('<div class="modal-backdrop '+c+'" />').appendTo(document.body),this.$element.on("click.dismiss.modal",a.proxy(function(a){a.target===a.currentTarget&&("static"==this.options.backdrop?this.$element[0].focus.call(this.$element[0]):this.hide.call(this))},this)),d&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in"),!b)return;d?this.$backdrop.one(a.support.transition.end,b).emulateTransitionEnd(150):b()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),a.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(a.support.transition.end,b).emulateTransitionEnd(150):b()):b&&b()};var c=a.fn.modal;a.fn.modal=function(c,d){return this.each(function(){var e=a(this),f=e.data("bs.modal"),g=a.extend({},b.DEFAULTS,e.data(),"object"==typeof c&&c);f||e.data("bs.modal",f=new b(this,g)),"string"==typeof c?f[c](d):g.show&&f.show(d)})},a.fn.modal.Constructor=b,a.fn.modal.noConflict=function(){return a.fn.modal=c,this},a(document).on("click.bs.modal.data-api",'[data-toggle="modal"]',function(b){var c=a(this),d=c.attr("href"),e=a(c.attr("data-target")||d&&d.replace(/.*(?=#[^\s]+$)/,"")),f=e.data("modal")?"toggle":a.extend({remote:!/#/.test(d)&&d},e.data(),c.data());b.preventDefault(),e.modal(f,this).one("hide",function(){c.is(":visible")&&c.focus()})}),a(document).on("show.bs.modal",".modal",function(){a(document.body).addClass("modal-open")}).on("hidden.bs.modal",".modal",function(){a(document.body).removeClass("modal-open")})}(jQuery),+function(a){"use strict";var b=function(a,b){this.type=this.options=this.enabled=this.timeout=this.hoverState=this.$element=null,this.init("tooltip",a,b)};b.DEFAULTS={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},b.prototype.init=function(b,c,d){this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d);for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focus",i="hover"==g?"mouseleave":"blur";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},b.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},b.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type);return clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show),void 0):c.show()},b.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type);return clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide),void 0):c.hide()},b.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){if(this.$element.trigger(b),b.isDefaultPrevented())return;var c=this.tip();this.setContent(),this.options.animation&&c.addClass("fade");var d="function"==typeof this.options.placement?this.options.placement.call(this,c[0],this.$element[0]):this.options.placement,e=/\s?auto?\s?/i,f=e.test(d);f&&(d=d.replace(e,"")||"top"),c.detach().css({top:0,left:0,display:"block"}).addClass(d),this.options.container?c.appendTo(this.options.container):c.insertAfter(this.$element);var g=this.getPosition(),h=c[0].offsetWidth,i=c[0].offsetHeight;if(f){var j=this.$element.parent(),k=d,l=document.documentElement.scrollTop||document.body.scrollTop,m="body"==this.options.container?window.innerWidth:j.outerWidth(),n="body"==this.options.container?window.innerHeight:j.outerHeight(),o="body"==this.options.container?0:j.offset().left;d="bottom"==d&&g.top+g.height+i-l>n?"top":"top"==d&&g.top-l-i<0?"bottom":"right"==d&&g.right+h>m?"left":"left"==d&&g.left-h<o?"right":d,c.removeClass(k).addClass(d)}var p=this.getCalculatedOffset(d,g,h,i);this.applyPlacement(p,d),this.$element.trigger("shown.bs."+this.type)}},b.prototype.applyPlacement=function(a,b){var c,d=this.tip(),e=d[0].offsetWidth,f=d[0].offsetHeight,g=parseInt(d.css("margin-top"),10),h=parseInt(d.css("margin-left"),10);isNaN(g)&&(g=0),isNaN(h)&&(h=0),a.top=a.top+g,a.left=a.left+h,d.offset(a).addClass("in");var i=d[0].offsetWidth,j=d[0].offsetHeight;if("top"==b&&j!=f&&(c=!0,a.top=a.top+f-j),/bottom|top/.test(b)){var k=0;a.left<0&&(k=-2*a.left,a.left=0,d.offset(a),i=d[0].offsetWidth,j=d[0].offsetHeight),this.replaceArrow(k-e+i,i,"left")}else this.replaceArrow(j-f,j,"top");c&&d.offset(a)},b.prototype.replaceArrow=function(a,b,c){this.arrow().css(c,a?50*(1-a/b)+"%":"")},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle();a.find(".tooltip-inner")[this.options.html?"html":"text"](b),a.removeClass("fade in top bottom left right")},b.prototype.hide=function(){function b(){"in"!=c.hoverState&&d.detach()}var c=this,d=this.tip(),e=a.Event("hide.bs."+this.type);return this.$element.trigger(e),e.isDefaultPrevented()?void 0:(d.removeClass("in"),a.support.transition&&this.$tip.hasClass("fade")?d.one(a.support.transition.end,b).emulateTransitionEnd(150):b(),this.$element.trigger("hidden.bs."+this.type),this)},b.prototype.fixTitle=function(){var a=this.$element;(a.attr("title")||"string"!=typeof a.attr("data-original-title"))&&a.attr("data-original-title",a.attr("title")||"").attr("title","")},b.prototype.hasContent=function(){return this.getTitle()},b.prototype.getPosition=function(){var b=this.$element[0];return a.extend({},"function"==typeof b.getBoundingClientRect?b.getBoundingClientRect():{width:b.offsetWidth,height:b.offsetHeight},this.$element.offset())},b.prototype.getCalculatedOffset=function(a,b,c,d){return"bottom"==a?{top:b.top+b.height,left:b.left+b.width/2-c/2}:"top"==a?{top:b.top-d,left:b.left+b.width/2-c/2}:"left"==a?{top:b.top+b.height/2-d/2,left:b.left-c}:{top:b.top+b.height/2-d/2,left:b.left+b.width}},b.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},b.prototype.tip=function(){return this.$tip=this.$tip||a(this.options.template)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},b.prototype.validate=function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},b.prototype.enable=function(){this.enabled=!0},b.prototype.disable=function(){this.enabled=!1},b.prototype.toggleEnabled=function(){this.enabled=!this.enabled},b.prototype.toggle=function(b){var c=b?a(b.currentTarget)[this.type](this.getDelegateOptions()).data("bs."+this.type):this;c.tip().hasClass("in")?c.leave(c):c.enter(c)},b.prototype.destroy=function(){this.hide().$element.off("."+this.type).removeData("bs."+this.type)};var c=a.fn.tooltip;a.fn.tooltip=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tooltip"),f="object"==typeof c&&c;e||d.data("bs.tooltip",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.tooltip.Constructor=b,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=c,this}}(jQuery),+function(a){"use strict";var b=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");b.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),b.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),b.prototype.constructor=b,b.prototype.getDefaults=function(){return b.DEFAULTS},b.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content")[this.options.html?"html":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},b.prototype.hasContent=function(){return this.getTitle()||this.getContent()},b.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},b.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")},b.prototype.tip=function(){return this.$tip||(this.$tip=a(this.options.template)),this.$tip};var c=a.fn.popover;a.fn.popover=function(c){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof c&&c;e||d.data("bs.popover",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.popover.Constructor=b,a.fn.popover.noConflict=function(){return a.fn.popover=c,this}}(jQuery),+function(a){"use strict";function b(c,d){var e,f=a.proxy(this.process,this);this.$element=a(c).is("body")?a(window):a(c),this.$body=a("body"),this.$scrollElement=this.$element.on("scroll.bs.scroll-spy.data-api",f),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||(e=a(c).attr("href"))&&e.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.offsets=a([]),this.targets=a([]),this.activeTarget=null,this.refresh(),this.process()}b.DEFAULTS={offset:10},b.prototype.refresh=function(){var b=this.$element[0]==window?"offset":"position";this.offsets=a([]),this.targets=a([]);var c=this;this.$body.find(this.selector).map(function(){var d=a(this),e=d.data("target")||d.attr("href"),f=/^#\w/.test(e)&&a(e);return f&&f.length&&[[f[b]().top+(!a.isWindow(c.$scrollElement.get(0))&&c.$scrollElement.scrollTop()),e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){c.offsets.push(this[0]),c.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,d=c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(b>=d)return g!=(a=f.last()[0])&&this.activate(a);for(a=e.length;a--;)g!=f[a]&&b>=e[a]&&(!e[a+1]||b<=e[a+1])&&this.activate(f[a])},b.prototype.activate=function(b){this.activeTarget=b,a(this.selector).parents(".active").removeClass("active");var c=this.selector+'[data-target="'+b+'"],'+this.selector+'[href="'+b+'"]',d=a(c).parents("li").addClass("active");d.parent(".dropdown-menu").length&&(d=d.closest("li.dropdown").addClass("active")),d.trigger("activate.bs.scrollspy")};var c=a.fn.scrollspy;a.fn.scrollspy=function(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.scrollspy.Constructor=b,a.fn.scrollspy.noConflict=function(){return a.fn.scrollspy=c,this},a(window).on("load",function(){a('[data-spy="scroll"]').each(function(){var b=a(this);b.scrollspy(b.data())})})}(jQuery),+function(a){"use strict";var b=function(b){this.element=a(b)};b.prototype.show=function(){var b=this.element,c=b.closest("ul:not(.dropdown-menu)"),d=b.data("target");if(d||(d=b.attr("href"),d=d&&d.replace(/.*(?=#[^\s]*$)/,"")),!b.parent("li").hasClass("active")){var e=c.find(".active:last a")[0],f=a.Event("show.bs.tab",{relatedTarget:e});if(b.trigger(f),!f.isDefaultPrevented()){var g=a(d);this.activate(b.parent("li"),c),this.activate(g,g.parent(),function(){b.trigger({type:"shown.bs.tab",relatedTarget:e})})}}},b.prototype.activate=function(b,c,d){function e(){f.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),b.addClass("active"),g?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu")&&b.closest("li.dropdown").addClass("active"),d&&d()}var f=c.find("> .active"),g=d&&a.support.transition&&f.hasClass("fade");g?f.one(a.support.transition.end,e).emulateTransitionEnd(150):e(),f.removeClass("in")};var c=a.fn.tab;a.fn.tab=function(c){return this.each(function(){var d=a(this),e=d.data("bs.tab");e||d.data("bs.tab",e=new b(this)),"string"==typeof c&&e[c]()})},a.fn.tab.Constructor=b,a.fn.tab.noConflict=function(){return a.fn.tab=c,this},a(document).on("click.bs.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(b){b.preventDefault(),a(this).tab("show")})}(jQuery),+function(a){"use strict";var b=function(c,d){this.options=a.extend({},b.DEFAULTS,d),this.$window=a(window).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(c),this.affixed=this.unpin=null,this.checkPosition()};b.RESET="affix affix-top affix-bottom",b.DEFAULTS={offset:0},b.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},b.prototype.checkPosition=function(){if(this.$element.is(":visible")){var c=a(document).height(),d=this.$window.scrollTop(),e=this.$element.offset(),f=this.options.offset,g=f.top,h=f.bottom;"object"!=typeof f&&(h=g=f),"function"==typeof g&&(g=f.top()),"function"==typeof h&&(h=f.bottom());var i=null!=this.unpin&&d+this.unpin<=e.top?!1:null!=h&&e.top+this.$element.height()>=c-h?"bottom":null!=g&&g>=d?"top":!1;this.affixed!==i&&(this.unpin&&this.$element.css("top",""),this.affixed=i,this.unpin="bottom"==i?e.top-d:null,this.$element.removeClass(b.RESET).addClass("affix"+(i?"-"+i:"")),"bottom"==i&&this.$element.offset({top:document.body.offsetHeight-h-this.$element.height()}))}};var c=a.fn.affix;a.fn.affix=function(c){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof c&&c;e||d.data("bs.affix",e=new b(this,f)),"string"==typeof c&&e[c]()})},a.fn.affix.Constructor=b,a.fn.affix.noConflict=function(){return a.fn.affix=c,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var b=a(this),c=b.data();c.offset=c.offset||{},c.offsetBottom&&(c.offset.bottom=c.offsetBottom),c.offsetTop&&(c.offset.top=c.offsetTop),b.affix(c)})})}(jQuery);
+* Bootstrap.js by @fat & @mdo
+* Copyright 2013 Twitter, Inc.
+* http://www.apache.org/licenses/LICENSE-2.0.txt
+*/
+!function(e){"use strict";e(function(){e.support.transition=function(){var e=function(){var e=document.createElement("bootstrap"),t={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"},n;for(n in t)if(e.style[n]!==undefined)return t[n]}();return e&&{end:e}}()})}(window.jQuery),!function(e){"use strict";var t='[data-dismiss="alert"]',n=function(n){e(n).on("click",t,this.close)};n.prototype.close=function(t){function s(){i.trigger("closed").remove()}var n=e(this),r=n.attr("data-target"),i;r||(r=n.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,"")),i=e(r),t&&t.preventDefault(),i.length||(i=n.hasClass("alert")?n:n.parent()),i.trigger(t=e.Event("close"));if(t.isDefaultPrevented())return;i.removeClass("in"),e.support.transition&&i.hasClass("fade")?i.on(e.support.transition.end,s):s()};var r=e.fn.alert;e.fn.alert=function(t){return this.each(function(){var r=e(this),i=r.data("alert");i||r.data("alert",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.alert.Constructor=n,e.fn.alert.noConflict=function(){return e.fn.alert=r,this},e(document).on("click.alert.data-api",t,n.prototype.close)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.button.defaults,n)};t.prototype.setState=function(e){var t="disabled",n=this.$element,r=n.data(),i=n.is("input")?"val":"html";e+="Text",r.resetText||n.data("resetText",n[i]()),n[i](r[e]||this.options[e]),setTimeout(function(){e=="loadingText"?n.addClass(t).attr(t,t):n.removeClass(t).removeAttr(t)},0)},t.prototype.toggle=function(){var e=this.$element.closest('[data-toggle="buttons-radio"]');e&&e.find(".active").removeClass("active"),this.$element.toggleClass("active")};var n=e.fn.button;e.fn.button=function(n){return this.each(function(){var r=e(this),i=r.data("button"),s=typeof n=="object"&&n;i||r.data("button",i=new t(this,s)),n=="toggle"?i.toggle():n&&i.setState(n)})},e.fn.button.defaults={loadingText:"loading..."},e.fn.button.Constructor=t,e.fn.button.noConflict=function(){return e.fn.button=n,this},e(document).on("click.button.data-api","[data-toggle^=button]",function(t){var n=e(t.target);n.hasClass("btn")||(n=n.closest(".btn")),n.button("toggle")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.$indicators=this.$element.find(".carousel-indicators"),this.options=n,this.options.pause=="hover"&&this.$element.on("mouseenter",e.proxy(this.pause,this)).on("mouseleave",e.proxy(this.cycle,this))};t.prototype={cycle:function(t){return t||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(e.proxy(this.next,this),this.options.interval)),this},getActiveIndex:function(){return this.$active=this.$element.find(".item.active"),this.$items=this.$active.parent().children(),this.$items.index(this.$active)},to:function(t){var n=this.getActiveIndex(),r=this;if(t>this.$items.length-1||t<0)return;return this.sliding?this.$element.one("slid",function(){r.to(t)}):n==t?this.pause().cycle():this.slide(t>n?"next":"prev",e(this.$items[t]))},pause:function(t){return t||(this.paused=!0),this.$element.find(".next, .prev").length&&e.support.transition.end&&(this.$element.trigger(e.support.transition.end),this.cycle(!0)),clearInterval(this.interval),this.interval=null,this},next:function(){if(this.sliding)return;return this.slide("next")},prev:function(){if(this.sliding)return;return this.slide("prev")},slide:function(t,n){var r=this.$element.find(".item.active"),i=n||r[t](),s=this.interval,o=t=="next"?"left":"right",u=t=="next"?"first":"last",a=this,f;this.sliding=!0,s&&this.pause(),i=i.length?i:this.$element.find(".item")[u](),f=e.Event("slide",{relatedTarget:i[0],direction:o});if(i.hasClass("active"))return;this.$indicators.length&&(this.$indicators.find(".active").removeClass("active"),this.$element.one("slid",function(){var t=e(a.$indicators.children()[a.getActiveIndex()]);t&&t.addClass("active")}));if(e.support.transition&&this.$element.hasClass("slide")){this.$element.trigger(f);if(f.isDefaultPrevented())return;i.addClass(t),i[0].offsetWidth,r.addClass(o),i.addClass(o),this.$element.one(e.support.transition.end,function(){i.removeClass([t,o].join(" ")).addClass("active"),r.removeClass(["active",o].join(" ")),a.sliding=!1,setTimeout(function(){a.$element.trigger("slid")},0)})}else{this.$element.trigger(f);if(f.isDefaultPrevented())return;r.removeClass("active"),i.addClass("active"),this.sliding=!1,this.$element.trigger("slid")}return s&&this.cycle(),this}};var n=e.fn.carousel;e.fn.carousel=function(n){return this.each(function(){var r=e(this),i=r.data("carousel"),s=e.extend({},e.fn.carousel.defaults,typeof n=="object"&&n),o=typeof n=="string"?n:s.slide;i||r.data("carousel",i=new t(this,s)),typeof n=="number"?i.to(n):o?i[o]():s.interval&&i.pause().cycle()})},e.fn.carousel.defaults={interval:5e3,pause:"hover"},e.fn.carousel.Constructor=t,e.fn.carousel.noConflict=function(){return e.fn.carousel=n,this},e(document).on("click.carousel.data-api","[data-slide], [data-slide-to]",function(t){var n=e(this),r,i=e(n.attr("data-target")||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,"")),s=e.extend({},i.data(),n.data()),o;i.carousel(s),(o=n.attr("data-slide-to"))&&i.data("carousel").pause().to(o).cycle(),t.preventDefault()})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.collapse.defaults,n),this.options.parent&&(this.$parent=e(this.options.parent)),this.options.toggle&&this.toggle()};t.prototype={constructor:t,dimension:function(){var e=this.$element.hasClass("width");return e?"width":"height"},show:function(){var t,n,r,i;if(this.transitioning||this.$element.hasClass("in"))return;t=this.dimension(),n=e.camelCase(["scroll",t].join("-")),r=this.$parent&&this.$parent.find("> .accordion-group > .in");if(r&&r.length){i=r.data("collapse");if(i&&i.transitioning)return;r.collapse("hide"),i||r.data("collapse",null)}this.$element[t](0),this.transition("addClass",e.Event("show"),"shown"),e.support.transition&&this.$element[t](this.$element[0][n])},hide:function(){var t;if(this.transitioning||!this.$element.hasClass("in"))return;t=this.dimension(),this.reset(this.$element[t]()),this.transition("removeClass",e.Event("hide"),"hidden"),this.$element[t](0)},reset:function(e){var t=this.dimension();return this.$element.removeClass("collapse")[t](e||"auto")[0].offsetWidth,this.$element[e!==null?"addClass":"removeClass"]("collapse"),this},transition:function(t,n,r){var i=this,s=function(){n.type=="show"&&i.reset(),i.transitioning=0,i.$element.trigger(r)};this.$element.trigger(n);if(n.isDefaultPrevented())return;this.transitioning=1,this.$element[t]("in"),e.support.transition&&this.$element.hasClass("collapse")?this.$element.one(e.support.transition.end,s):s()},toggle:function(){this[this.$element.hasClass("in")?"hide":"show"]()}};var n=e.fn.collapse;e.fn.collapse=function(n){return this.each(function(){var r=e(this),i=r.data("collapse"),s=e.extend({},e.fn.collapse.defaults,r.data(),typeof n=="object"&&n);i||r.data("collapse",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.collapse.defaults={toggle:!0},e.fn.collapse.Constructor=t,e.fn.collapse.noConflict=function(){return e.fn.collapse=n,this},e(document).on("click.collapse.data-api","[data-toggle=collapse]",function(t){var n=e(this),r,i=n.attr("data-target")||t.preventDefault()||(r=n.attr("href"))&&r.replace(/.*(?=#[^\s]+$)/,""),s=e(i).data("collapse")?"toggle":n.data();n[e(i).hasClass("in")?"addClass":"removeClass"]("collapsed"),e(i).collapse(s)})}(window.jQuery),!function(e){"use strict";function r(){e(".dropdown-backdrop").remove(),e(t).each(function(){i(e(this)).removeClass("open")})}function i(t){var n=t.attr("data-target"),r;n||(n=t.attr("href"),n=n&&/#/.test(n)&&n.replace(/.*(?=#[^\s]*$)/,"")),r=n&&e(n);if(!r||!r.length)r=t.parent();return r}var t="[data-toggle=dropdown]",n=function(t){var n=e(t).on("click.dropdown.data-api",this.toggle);e("html").on("click.dropdown.data-api",function(){n.parent().removeClass("open")})};n.prototype={constructor:n,toggle:function(t){var n=e(this),s,o;if(n.is(".disabled, :disabled"))return;return s=i(n),o=s.hasClass("open"),r(),o||("ontouchstart"in document.documentElement&&e('<div class="dropdown-backdrop"/>').insertBefore(e(this)).on("click",r),s.toggleClass("open")),n.focus(),!1},keydown:function(n){var r,s,o,u,a,f;if(!/(38|40|27)/.test(n.keyCode))return;r=e(this),n.preventDefault(),n.stopPropagation();if(r.is(".disabled, :disabled"))return;u=i(r),a=u.hasClass("open");if(!a||a&&n.keyCode==27)return n.which==27&&u.find(t).focus(),r.click();s=e("[role=menu] li:not(.divider):visible a",u);if(!s.length)return;f=s.index(s.filter(":focus")),n.keyCode==38&&f>0&&f--,n.keyCode==40&&f<s.length-1&&f++,~f||(f=0),s.eq(f).focus()}};var s=e.fn.dropdown;e.fn.dropdown=function(t){return this.each(function(){var r=e(this),i=r.data("dropdown");i||r.data("dropdown",i=new n(this)),typeof t=="string"&&i[t].call(r)})},e.fn.dropdown.Constructor=n,e.fn.dropdown.noConflict=function(){return e.fn.dropdown=s,this},e(document).on("click.dropdown.data-api",r).on("click.dropdown.data-api",".dropdown form",function(e){e.stopPropagation()}).on("click.dropdown.data-api",t,n.prototype.toggle).on("keydown.dropdown.data-api",t+", [role=menu]",n.prototype.keydown)}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=n,this.$element=e(t).delegate('[data-dismiss="modal"]',"click.dismiss.modal",e.proxy(this.hide,this)),this.options.remote&&this.$element.find(".modal-body").load(this.options.remote)};t.prototype={constructor:t,toggle:function(){return this[this.isShown?"hide":"show"]()},show:function(){var t=this,n=e.Event("show");this.$element.trigger(n);if(this.isShown||n.isDefaultPrevented())return;this.isShown=!0,this.escape(),this.backdrop(function(){var n=e.support.transition&&t.$element.hasClass("fade");t.$element.parent().length||t.$element.appendTo(document.body),t.$element.show(),n&&t.$element[0].offsetWidth,t.$element.addClass("in").attr("aria-hidden",!1),t.enforceFocus(),n?t.$element.one(e.support.transition.end,function(){t.$element.focus().trigger("shown")}):t.$element.focus().trigger("shown")})},hide:function(t){t&&t.preventDefault();var n=this;t=e.Event("hide"),this.$element.trigger(t);if(!this.isShown||t.isDefaultPrevented())return;this.isShown=!1,this.escape(),e(document).off("focusin.modal"),this.$element.removeClass("in").attr("aria-hidden",!0),e.support.transition&&this.$element.hasClass("fade")?this.hideWithTransition():this.hideModal()},enforceFocus:function(){var t=this;e(document).on("focusin.modal",function(e){t.$element[0]!==e.target&&!t.$element.has(e.target).length&&t.$element.focus()})},escape:function(){var e=this;this.isShown&&this.options.keyboard?this.$element.on("keyup.dismiss.modal",function(t){t.which==27&&e.hide()}):this.isShown||this.$element.off("keyup.dismiss.modal")},hideWithTransition:function(){var t=this,n=setTimeout(function(){t.$element.off(e.support.transition.end),t.hideModal()},500);this.$element.one(e.support.transition.end,function(){clearTimeout(n),t.hideModal()})},hideModal:function(){var e=this;this.$element.hide(),this.backdrop(function(){e.removeBackdrop(),e.$element.trigger("hidden")})},removeBackdrop:function(){this.$backdrop&&this.$backdrop.remove(),this.$backdrop=null},backdrop:function(t){var n=this,r=this.$element.hasClass("fade")?"fade":"";if(this.isShown&&this.options.backdrop){var i=e.support.transition&&r;this.$backdrop=e('<div class="modal-backdrop '+r+'" />').appendTo(document.body),this.$backdrop.click(this.options.backdrop=="static"?e.proxy(this.$element[0].focus,this.$element[0]):e.proxy(this.hide,this)),i&&this.$backdrop[0].offsetWidth,this.$backdrop.addClass("in");if(!t)return;i?this.$backdrop.one(e.support.transition.end,t):t()}else!this.isShown&&this.$backdrop?(this.$backdrop.removeClass("in"),e.support.transition&&this.$element.hasClass("fade")?this.$backdrop.one(e.support.transition.end,t):t()):t&&t()}};var n=e.fn.modal;e.fn.modal=function(n){return this.each(function(){var r=e(this),i=r.data("modal"),s=e.extend({},e.fn.modal.defaults,r.data(),typeof n=="object"&&n);i||r.data("modal",i=new t(this,s)),typeof n=="string"?i[n]():s.show&&i.show()})},e.fn.modal.defaults={backdrop:!0,keyboard:!0,show:!0},e.fn.modal.Constructor=t,e.fn.modal.noConflict=function(){return e.fn.modal=n,this},e(document).on("click.modal.data-api",'[data-toggle="modal"]',function(t){var n=e(this),r=n.attr("href"),i=e(n.attr("data-target")||r&&r.replace(/.*(?=#[^\s]+$)/,"")),s=i.data("modal")?"toggle":e.extend({remote:!/#/.test(r)&&r},i.data(),n.data());t.preventDefault(),i.modal(s).one("hide",function(){n.focus()})})}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("tooltip",e,t)};t.prototype={constructor:t,init:function(t,n,r){var i,s,o,u,a;this.type=t,this.$element=e(n),this.options=this.getOptions(r),this.enabled=!0,o=this.options.trigger.split(" ");for(a=o.length;a--;)u=o[a],u=="click"?this.$element.on("click."+this.type,this.options.selector,e.proxy(this.toggle,this)):u!="manual"&&(i=u=="hover"?"mouseenter":"focus",s=u=="hover"?"mouseleave":"blur",this.$element.on(i+"."+this.type,this.options.selector,e.proxy(this.enter,this)),this.$element.on(s+"."+this.type,this.options.selector,e.proxy(this.leave,this)));this.options.selector?this._options=e.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},getOptions:function(t){return t=e.extend({},e.fn[this.type].defaults,this.$element.data(),t),t.delay&&typeof t.delay=="number"&&(t.delay={show:t.delay,hide:t.delay}),t},enter:function(t){var n=e.fn[this.type].defaults,r={},i;this._options&&e.each(this._options,function(e,t){n[e]!=t&&(r[e]=t)},this),i=e(t.currentTarget)[this.type](r).data(this.type);if(!i.options.delay||!i.options.delay.show)return i.show();clearTimeout(this.timeout),i.hoverState="in",this.timeout=setTimeout(function(){i.hoverState=="in"&&i.show()},i.options.delay.show)},leave:function(t){var n=e(t.currentTarget)[this.type](this._options).data(this.type);this.timeout&&clearTimeout(this.timeout);if(!n.options.delay||!n.options.delay.hide)return n.hide();n.hoverState="out",this.timeout=setTimeout(function(){n.hoverState=="out"&&n.hide()},n.options.delay.hide)},show:function(){var t,n,r,i,s,o,u=e.Event("show");if(this.hasContent()&&this.enabled){this.$element.trigger(u);if(u.isDefaultPrevented())return;t=this.tip(),this.setContent(),this.options.animation&&t.addClass("fade"),s=typeof this.options.placement=="function"?this.options.placement.call(this,t[0],this.$element[0]):this.options.placement,t.detach().css({top:0,left:0,display:"block"}),this.options.container?t.appendTo(this.options.container):t.insertAfter(this.$element),n=this.getPosition(),r=t[0].offsetWidth,i=t[0].offsetHeight;switch(s){case"bottom":o={top:n.top+n.height,left:n.left+n.width/2-r/2};break;case"top":o={top:n.top-i,left:n.left+n.width/2-r/2};break;case"left":o={top:n.top+n.height/2-i/2,left:n.left-r};break;case"right":o={top:n.top+n.height/2-i/2,left:n.left+n.width}}this.applyPlacement(o,s),this.$element.trigger("shown")}},applyPlacement:function(e,t){var n=this.tip(),r=n[0].offsetWidth,i=n[0].offsetHeight,s,o,u,a;n.offset(e).addClass(t).addClass("in"),s=n[0].offsetWidth,o=n[0].offsetHeight,t=="top"&&o!=i&&(e.top=e.top+i-o,a=!0),t=="bottom"||t=="top"?(u=0,e.left<0&&(u=e.left*-2,e.left=0,n.offset(e),s=n[0].offsetWidth,o=n[0].offsetHeight),this.replaceArrow(u-r+s,s,"left")):this.replaceArrow(o-i,o,"top"),a&&n.offset(e)},replaceArrow:function(e,t,n){this.arrow().css(n,e?50*(1-e/t)+"%":"")},setContent:function(){var e=this.tip(),t=this.getTitle();e.find(".tooltip-inner")[this.options.html?"html":"text"](t),e.removeClass("fade in top bottom left right")},hide:function(){function i(){var t=setTimeout(function(){n.off(e.support.transition.end).detach()},500);n.one(e.support.transition.end,function(){clearTimeout(t),n.detach()})}var t=this,n=this.tip(),r=e.Event("hide");this.$element.trigger(r);if(r.isDefaultPrevented())return;return n.removeClass("in"),e.support.transition&&this.$tip.hasClass("fade")?i():n.detach(),this.$element.trigger("hidden"),this},fixTitle:function(){var e=this.$element;(e.attr("title")||typeof e.attr("data-original-title")!="string")&&e.attr("data-original-title",e.attr("title")||"").attr("title","")},hasContent:function(){return this.getTitle()},getPosition:function(){var t=this.$element[0];return e.extend({},typeof t.getBoundingClientRect=="function"?t.getBoundingClientRect():{width:t.offsetWidth,height:t.offsetHeight},this.$element.offset())},getTitle:function(){var e,t=this.$element,n=this.options;return e=t.attr("data-original-title")||(typeof n.title=="function"?n.title.call(t[0]):n.title),e},tip:function(){return this.$tip=this.$tip||e(this.options.template)},arrow:function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},validate:function(){this.$element[0].parentNode||(this.hide(),this.$element=null,this.options=null)},enable:function(){this.enabled=!0},disable:function(){this.enabled=!1},toggleEnabled:function(){this.enabled=!this.enabled},toggle:function(t){var n=t?e(t.currentTarget)[this.type](this._options).data(this.type):this;n.tip().hasClass("in")?n.hide():n.show()},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}};var n=e.fn.tooltip;e.fn.tooltip=function(n){return this.each(function(){var r=e(this),i=r.data("tooltip"),s=typeof n=="object"&&n;i||r.data("tooltip",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.tooltip.Constructor=t,e.fn.tooltip.defaults={animation:!0,placement:"top",selector:!1,template:'<div class="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,container:!1},e.fn.tooltip.noConflict=function(){return e.fn.tooltip=n,this}}(window.jQuery),!function(e){"use strict";var t=function(e,t){this.init("popover",e,t)};t.prototype=e.extend({},e.fn.tooltip.Constructor.prototype,{constructor:t,setContent:function(){var e=this.tip(),t=this.getTitle(),n=this.getContent();e.find(".popover-title")[this.options.html?"html":"text"](t),e.find(".popover-content")[this.options.html?"html":"text"](n),e.removeClass("fade top bottom left right in")},hasContent:function(){return this.getTitle()||this.getContent()},getContent:function(){var e,t=this.$element,n=this.options;return e=(typeof n.content=="function"?n.content.call(t[0]):n.content)||t.attr("data-content"),e},tip:function(){return this.$tip||(this.$tip=e(this.options.template)),this.$tip},destroy:function(){this.hide().$element.off("."+this.type).removeData(this.type)}});var n=e.fn.popover;e.fn.popover=function(n){return this.each(function(){var r=e(this),i=r.data("popover"),s=typeof n=="object"&&n;i||r.data("popover",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.popover.Constructor=t,e.fn.popover.defaults=e.extend({},e.fn.tooltip.defaults,{placement:"right",trigger:"click",content:"",template:'<div class="popover"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'}),e.fn.popover.noConflict=function(){return e.fn.popover=n,this}}(window.jQuery),!function(e){"use strict";function t(t,n){var r=e.proxy(this.process,this),i=e(t).is("body")?e(window):e(t),s;this.options=e.extend({},e.fn.scrollspy.defaults,n),this.$scrollElement=i.on("scroll.scroll-spy.data-api",r),this.selector=(this.options.target||(s=e(t).attr("href"))&&s.replace(/.*(?=#[^\s]+$)/,"")||"")+" .nav li > a",this.$body=e("body"),this.refresh(),this.process()}t.prototype={constructor:t,refresh:function(){var t=this,n;this.offsets=e([]),this.targets=e([]),n=this.$body.find(this.selector).map(function(){var n=e(this),r=n.data("target")||n.attr("href"),i=/^#\w/.test(r)&&e(r);return i&&i.length&&[[i.position().top+(!e.isWindow(t.$scrollElement.get(0))&&t.$scrollElement.scrollTop()),r]]||null}).sort(function(e,t){return e[0]-t[0]}).each(function(){t.offsets.push(this[0]),t.targets.push(this[1])})},process:function(){var e=this.$scrollElement.scrollTop()+this.options.offset,t=this.$scrollElement[0].scrollHeight||this.$body[0].scrollHeight,n=t-this.$scrollElement.height(),r=this.offsets,i=this.targets,s=this.activeTarget,o;if(e>=n)return s!=(o=i.last()[0])&&this.activate(o);for(o=r.length;o--;)s!=i[o]&&e>=r[o]&&(!r[o+1]||e<=r[o+1])&&this.activate(i[o])},activate:function(t){var n,r;this.activeTarget=t,e(this.selector).parent(".active").removeClass("active"),r=this.selector+'[data-target="'+t+'"],'+this.selector+'[href="'+t+'"]',n=e(r).parent("li").addClass("active"),n.parent(".dropdown-menu").length&&(n=n.closest("li.dropdown").addClass("active")),n.trigger("activate")}};var n=e.fn.scrollspy;e.fn.scrollspy=function(n){return this.each(function(){var r=e(this),i=r.data("scrollspy"),s=typeof n=="object"&&n;i||r.data("scrollspy",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.scrollspy.Constructor=t,e.fn.scrollspy.defaults={offset:10},e.fn.scrollspy.noConflict=function(){return e.fn.scrollspy=n,this},e(window).on("load",function(){e('[data-spy="scroll"]').each(function(){var t=e(this);t.scrollspy(t.data())})})}(window.jQuery),!function(e){"use strict";var t=function(t){this.element=e(t)};t.prototype={constructor:t,show:function(){var t=this.element,n=t.closest("ul:not(.dropdown-menu)"),r=t.attr("data-target"),i,s,o;r||(r=t.attr("href"),r=r&&r.replace(/.*(?=#[^\s]*$)/,""));if(t.parent("li").hasClass("active"))return;i=n.find(".active:last a")[0],o=e.Event("show",{relatedTarget:i}),t.trigger(o);if(o.isDefaultPrevented())return;s=e(r),this.activate(t.parent("li"),n),this.activate(s,s.parent(),function(){t.trigger({type:"shown",relatedTarget:i})})},activate:function(t,n,r){function o(){i.removeClass("active").find("> .dropdown-menu > .active").removeClass("active"),t.addClass("active"),s?(t[0].offsetWidth,t.addClass("in")):t.removeClass("fade"),t.parent(".dropdown-menu")&&t.closest("li.dropdown").addClass("active"),r&&r()}var i=n.find("> .active"),s=r&&e.support.transition&&i.hasClass("fade");s?i.one(e.support.transition.end,o):o(),i.removeClass("in")}};var n=e.fn.tab;e.fn.tab=function(n){return this.each(function(){var r=e(this),i=r.data("tab");i||r.data("tab",i=new t(this)),typeof n=="string"&&i[n]()})},e.fn.tab.Constructor=t,e.fn.tab.noConflict=function(){return e.fn.tab=n,this},e(document).on("click.tab.data-api",'[data-toggle="tab"], [data-toggle="pill"]',function(t){t.preventDefault(),e(this).tab("show")})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.$element=e(t),this.options=e.extend({},e.fn.typeahead.defaults,n),this.matcher=this.options.matcher||this.matcher,this.sorter=this.options.sorter||this.sorter,this.highlighter=this.options.highlighter||this.highlighter,this.updater=this.options.updater||this.updater,this.source=this.options.source,this.$menu=e(this.options.menu),this.shown=!1,this.listen()};t.prototype={constructor:t,select:function(){var e=this.$menu.find(".active").attr("data-value");return this.$element.val(this.updater(e)).change(),this.hide()},updater:function(e){return e},show:function(){var t=e.extend({},this.$element.position(),{height:this.$element[0].offsetHeight});return this.$menu.insertAfter(this.$element).css({top:t.top+t.height,left:t.left}).show(),this.shown=!0,this},hide:function(){return this.$menu.hide(),this.shown=!1,this},lookup:function(t){var n;return this.query=this.$element.val(),!this.query||this.query.length<this.options.minLength?this.shown?this.hide():this:(n=e.isFunction(this.source)?this.source(this.query,e.proxy(this.process,this)):this.source,n?this.process(n):this)},process:function(t){var n=this;return t=e.grep(t,function(e){return n.matcher(e)}),t=this.sorter(t),t.length?this.render(t.slice(0,this.options.items)).show():this.shown?this.hide():this},matcher:function(e){return~e.toLowerCase().indexOf(this.query.toLowerCase())},sorter:function(e){var t=[],n=[],r=[],i;while(i=e.shift())i.toLowerCase().indexOf(this.query.toLowerCase())?~i.indexOf(this.query)?n.push(i):r.push(i):t.push(i);return t.concat(n,r)},highlighter:function(e){var t=this.query.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g,"\\$&");return e.replace(new RegExp("("+t+")","ig"),function(e,t){return"<strong>"+t+"</strong>"})},render:function(t){var n=this;return t=e(t).map(function(t,r){return t=e(n.options.item).attr("data-value",r),t.find("a").html(n.highlighter(r)),t[0]}),t.first().addClass("active"),this.$menu.html(t),this},next:function(t){var n=this.$menu.find(".active").removeClass("active"),r=n.next();r.length||(r=e(this.$menu.find("li")[0])),r.addClass("active")},prev:function(e){var t=this.$menu.find(".active").removeClass("active"),n=t.prev();n.length||(n=this.$menu.find("li").last()),n.addClass("active")},listen:function(){this.$element.on("focus",e.proxy(this.focus,this)).on("blur",e.proxy(this.blur,this)).on("keypress",e.proxy(this.keypress,this)).on("keyup",e.proxy(this.keyup,this)),this.eventSupported("keydown")&&this.$element.on("keydown",e.proxy(this.keydown,this)),this.$menu.on("click",e.proxy(this.click,this)).on("mouseenter","li",e.proxy(this.mouseenter,this)).on("mouseleave","li",e.proxy(this.mouseleave,this))},eventSupported:function(e){var t=e in this.$element;return t||(this.$element.setAttribute(e,"return;"),t=typeof this.$element[e]=="function"),t},move:function(e){if(!this.shown)return;switch(e.keyCode){case 9:case 13:case 27:e.preventDefault();break;case 38:e.preventDefault(),this.prev();break;case 40:e.preventDefault(),this.next()}e.stopPropagation()},keydown:function(t){this.suppressKeyPressRepeat=~e.inArray(t.keyCode,[40,38,9,13,27]),this.move(t)},keypress:function(e){if(this.suppressKeyPressRepeat)return;this.move(e)},keyup:function(e){switch(e.keyCode){case 40:case 38:case 16:case 17:case 18:break;case 9:case 13:if(!this.shown)return;this.select();break;case 27:if(!this.shown)return;this.hide();break;default:this.lookup()}e.stopPropagation(),e.preventDefault()},focus:function(e){this.focused=!0},blur:function(e){this.focused=!1,!this.mousedover&&this.shown&&this.hide()},click:function(e){e.stopPropagation(),e.preventDefault(),this.select(),this.$element.focus()},mouseenter:function(t){this.mousedover=!0,this.$menu.find(".active").removeClass("active"),e(t.currentTarget).addClass("active")},mouseleave:function(e){this.mousedover=!1,!this.focused&&this.shown&&this.hide()}};var n=e.fn.typeahead;e.fn.typeahead=function(n){return this.each(function(){var r=e(this),i=r.data("typeahead"),s=typeof n=="object"&&n;i||r.data("typeahead",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.typeahead.defaults={source:[],items:8,menu:'<ul class="typeahead dropdown-menu"></ul>',item:'<li><a href="#"></a></li>',minLength:1},e.fn.typeahead.Constructor=t,e.fn.typeahead.noConflict=function(){return e.fn.typeahead=n,this},e(document).on("focus.typeahead.data-api",'[data-provide="typeahead"]',function(t){var n=e(this);if(n.data("typeahead"))return;n.typeahead(n.data())})}(window.jQuery),!function(e){"use strict";var t=function(t,n){this.options=e.extend({},e.fn.affix.defaults,n),this.$window=e(window).on("scroll.affix.data-api",e.proxy(this.checkPosition,this)).on("click.affix.data-api",e.proxy(function(){setTimeout(e.proxy(this.checkPosition,this),1)},this)),this.$element=e(t),this.checkPosition()};t.prototype.checkPosition=function(){if(!this.$element.is(":visible"))return;var t=e(document).height(),n=this.$window.scrollTop(),r=this.$element.offset(),i=this.options.offset,s=i.bottom,o=i.top,u="affix affix-top affix-bottom",a;typeof i!="object"&&(s=o=i),typeof o=="function"&&(o=i.top()),typeof s=="function"&&(s=i.bottom()),a=this.unpin!=null&&n+this.unpin<=r.top?!1:s!=null&&r.top+this.$element.height()>=t-s?"bottom":o!=null&&n<=o?"top":!1;if(this.affixed===a)return;this.affixed=a,this.unpin=a=="bottom"?r.top-n:null,this.$element.removeClass(u).addClass("affix"+(a?"-"+a:""))};var n=e.fn.affix;e.fn.affix=function(n){return this.each(function(){var r=e(this),i=r.data("affix"),s=typeof n=="object"&&n;i||r.data("affix",i=new t(this,s)),typeof n=="string"&&i[n]()})},e.fn.affix.Constructor=t,e.fn.affix.defaults={offset:0},e.fn.affix.noConflict=function(){return e.fn.affix=n,this},e(window).on("load",function(){e('[data-spy="affix"]').each(function(){var t=e(this),n=t.data();n.offset=n.offset||{},n.offsetBottom&&(n.offset.bottom=n.offsetBottom),n.offsetTop&&(n.offset.top=n.offsetTop),t.affix(n)})})}(window.jQuery); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/importlayer.js b/bitbake/lib/toaster/toastergui/static/js/importlayer.js
new file mode 100644
index 0000000000..d6e140ffdc
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/importlayer.js
@@ -0,0 +1,289 @@
+"use strict"
+
+function importLayerPageInit (ctx) {
+
+ var layerDepBtn = $("#add-layer-dependency-btn");
+ var importAndAddBtn = $("#import-and-add-btn");
+ var layerNameInput = $("#import-layer-name");
+ var vcsURLInput = $("#layer-git-repo-url");
+ var gitRefInput = $("#layer-git-ref");
+ var layerDepInput = $("#layer-dependency");
+ var layerNameCtrl = $("#layer-name-ctrl");
+ var duplicatedLayerName = $("#duplicated-layer-name-hint");
+
+ var layerDeps = {};
+ var layerDepsDeps = {};
+ var currentLayerDepSelection;
+ var validLayerName = /^(\w|-)+$/;
+
+ $("#new-project-button").hide();
+
+ libtoaster.makeTypeahead(layerDepInput, ctx.xhrDataTypeaheadUrl, { type : "layers", project_id: ctx.projectId, include_added: "true" }, function(item){
+ currentLayerDepSelection = item;
+
+ layerDepBtn.removeAttr("disabled");
+ });
+
+
+ /* We automatically add "openembedded-core" layer for convenience as a
+ * dependency as pretty much all layers depend on this one
+ */
+ $.getJSON(ctx.xhrDataTypeaheadUrl, { type : "layers", project_id: ctx.projectId, include_added: "true" , value: "openembedded-core" }, function(layer) {
+ if (layer.list.length == 1) {
+ currentLayerDepSelection = layer.list[0];
+ layerDepBtn.click();
+ }
+ });
+
+ layerDepBtn.click(function(){
+ if (currentLayerDepSelection == undefined)
+ return;
+
+ layerDeps[currentLayerDepSelection.id] = currentLayerDepSelection;
+
+ /* Make a list item for the new layer dependency */
+ var newLayerDep = $("<li><a></a><span class=\"icon-trash\" data-toggle=\"tooltip\" title=\"Delete\"></span></li>");
+
+ newLayerDep.data('layer-id', currentLayerDepSelection.id);
+ newLayerDep.children("span").tooltip();
+
+ var link = newLayerDep.children("a");
+ link.attr("href", ctx.layerDetailsUrl+String(currentLayerDepSelection.id));
+ link.text(currentLayerDepSelection.name);
+ link.tooltip({title: currentLayerDepSelection.tooltip, placement: "right"});
+
+ var trashItem = newLayerDep.children("span");
+ trashItem.click(function () {
+ var toRemove = $(this).parent().data('layer-id');
+ delete layerDeps[toRemove];
+ $(this).parent().fadeOut(function (){
+ $(this).remove();
+ });
+ });
+
+ $("#layer-deps-list").append(newLayerDep);
+
+ libtoaster.getLayerDepsForProject(ctx.xhrDataTypeaheadUrl, ctx.projectId, currentLayerDepSelection.id, function (data){
+ /* These are the dependencies of the layer added as a dependency */
+ if (data.list.length > 0) {
+ currentLayerDepSelection.url = ctx.layerDetailsUrl+currentLayerDepSelection.id;
+ layerDeps[currentLayerDepSelection.id].deps = data.list
+ }
+
+ /* Clear the current selection */
+ layerDepInput.val("");
+ currentLayerDepSelection = undefined;
+ layerDepBtn.attr("disabled","disabled");
+ }, null);
+ });
+
+ importAndAddBtn.click(function(){
+ /* This is a list of the names from layerDeps for the layer deps
+ * modal dialog body
+ */
+ var depNames = [];
+
+ /* arrray of all layer dep ids includes parent and child deps */
+ var allDeps = [];
+
+ /* temporary object to use to do a reduce on the dependencies for each
+ * layer dependency added
+ */
+ var depDeps = {};
+
+ /* the layers that have dependencies have an extra property "deps"
+ * look in this for each layer and reduce this to a unquie object
+ * of deps.
+ */
+ for (var key in layerDeps){
+ if (layerDeps[key].hasOwnProperty('deps')){
+ for (var dep in layerDeps[key].deps){
+ var layer = layerDeps[key].deps[dep];
+ depDeps[layer.id] = layer;
+ }
+ }
+ depNames.push(layerDeps[key].name);
+ allDeps.push(layerDeps[key].id);
+ }
+
+ /* we actually want it as an array so convert it now */
+ var depDepsArray = [];
+ for (var key in depDeps)
+ depDepsArray.push (depDeps[key]);
+
+ if (depDepsArray.length > 0) {
+ var layer = { name: layerNameInput.val(), url: "#", id: -1 };
+ var title = "Layer";
+ var body = "<strong>"+layer.name+"</strong>'s dependencies ("+
+ depNames.join(", ")+"</span>) require some layers that are not added to your project. Select the ones you want to add:</p>";
+
+ show_layer_deps_modal(ctx.projectId, layer, depDepsArray, title, body, false, function(selected){
+ /* Add the accepted dependencies to the allDeps array */
+ if (selected.length > 0){
+ allDeps = allDeps.concat (selected);
+ }
+ import_and_add ();
+ });
+ } else {
+ import_and_add ();
+ }
+
+ function import_and_add () {
+ /* convert to a csv of all the deps to be added */
+ var layerDepsCsv = allDeps.join(",");
+
+ var layerData = {
+ name: layerNameInput.val(),
+ vcs_url: vcsURLInput.val(),
+ git_ref: gitRefInput.val(),
+ summary: $("#layer-summary").val(),
+ dir_path: $("#layer-subdir").val(),
+ project_id: ctx.projectId,
+ layer_deps: layerDepsCsv,
+ };
+
+ $.ajax({
+ type: "POST",
+ url: ctx.xhrImportLayerUrl,
+ data: layerData,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (data) {
+ if (data.error != "ok") {
+ show_error_message(data, layerData);
+ console.log(data.error);
+ } else {
+ /* Success layer import now go to the project page */
+ $.cookie('layer-imported-alert', JSON.stringify(data), { path: '/'});
+ window.location.replace(ctx.projectPageUrl+'#/layerimported');
+ }
+ },
+ error: function (data) {
+ console.log("Call failed");
+ console.log(data);
+ }
+ });
+ }
+ });
+
+ function show_error_message(error, layerData) {
+
+ var errorMsg = $("#import-error").fadeIn();
+ var errorType = error.error;
+ var body = errorMsg.children("p");
+ var title = errorMsg.children("h3");
+ var optionsList = errorMsg.children("ul");
+ var invalidLayerRevision = $("#invalid-layer-revision-hint");
+ var layerRevisionCtrl = $("#layer-revision-ctrl");
+
+ /* remove any existing items */
+ optionsList.children().each(function(){ $(this).remove(); });
+ body.text("");
+ title.text("");
+ invalidLayerRevision.hide();
+ layerNameCtrl.removeClass("error");
+ layerRevisionCtrl.removeClass("error");
+
+ switch (errorType){
+ case 'hint-layer-version-exists':
+ title.text("This layer already exists");
+ body.html("A layer <strong>"+layerData.name+"</strong> already exists with this Git repository URL and this revision. You can:");
+ optionsList.append("<li>Import <strong>"+layerData.name+"</strong> with a different revision </li>");
+ optionsList.append("<li>or <a href=\""+ctx.layerDetailsUrl+error.existing_layer_version+"/\" >change the revision of the existing layer</a></li>");
+
+ layerRevisionCtrl.addClass("error");
+
+ invalidLayerRevision.html("A layer <strong>"+layerData.name+"</strong> already exists with this revision.<br />You can import <strong>"+layerData.name+"</strong> with a different revision");
+ invalidLayerRevision.show();
+ break;
+
+ case 'hint-layer-exists-with-different-url':
+ title.text("This layer already exists");
+ body.html("A layer <strong>"+layerData.name+"</strong> already exists with a different Git repository URL:<p style='margin-top:10px;'><strong>"+error.current_url+"</strong></p><p>You can:</p>");
+ optionsList.append("<li>Import the layer under a different name</li>");
+ optionsList.append("<li>or <a href=\""+ctx.layerDetailsUrl+error.current_id+"/\" >change the Git repository URL of the existing layer</a></li>");
+ duplicatedLayerName.html("A layer <strong>"+layerData.name+"</strong> already exists with a different Git repository URL.<br />To import this layer give it a different name.");
+ duplicatedLayerName.show();
+ layerNameCtrl.addClass("error");
+ break;
+
+ case 'hint-layer-exists':
+ title.text("This layer already exists");
+ body.html("A layer <strong>"+layerData.name+"</strong> already exists. You can:");
+ optionsList.append("<li>Import the layer under a different name</li>");
+ break;
+ default:
+ title.text("Error")
+ body.text(data.error);
+ }
+ }
+
+ function enable_import_btn (enabled) {
+ var importAndAddHint = $("#import-and-add-hint");
+
+ if (enabled) {
+ importAndAddBtn.removeAttr("disabled");
+ importAndAddHint.hide();
+ return;
+ }
+
+ importAndAddBtn.attr("disabled", "disabled");
+ importAndAddHint.show();
+ }
+
+ function check_form() {
+ var valid = false;
+ var inputs = $("input:required");
+
+ for (var i=0; i<inputs.length; i++){
+ if (!(valid = inputs[i].value)){
+ enable_import_btn(false);
+ break;
+ }
+ }
+
+ if (valid)
+ enable_import_btn(true);
+ }
+
+ vcsURLInput.keyup(function() {
+ check_form();
+ });
+
+ gitRefInput.keyup(function() {
+ check_form();
+ });
+
+ layerNameInput.keyup(function() {
+ if ($(this).val() && !validLayerName.test($(this).val())){
+ layerNameCtrl.addClass("error")
+ $("#invalid-layer-name-hint").show();
+ enable_import_btn(false);
+ return;
+ }
+
+ /* Don't remove the error class if we're displaying the error for another
+ * reason.
+ */
+ if (!duplicatedLayerName.is(":visible"))
+ layerNameCtrl.removeClass("error")
+
+ $("#invalid-layer-name-hint").hide();
+ check_form();
+ });
+
+ /* Have a guess at the layer name */
+ vcsURLInput.focusout(function (){
+ /* If we a layer name specified don't overwrite it or if there isn't a
+ * url typed in yet return
+ */
+ if (layerNameInput.val() || !$(this).val())
+ return;
+
+ if ($(this).val().search("/")){
+ var urlPts = $(this).val().split("/");
+ var suggestion = urlPts[urlPts.length-1].replace(".git","");
+ layerNameInput.val(suggestion);
+ }
+ });
+
+}
diff --git a/bitbake/lib/toaster/toastergui/static/js/jquery-ui.js b/bitbake/lib/toaster/toastergui/static/js/jquery-ui.js
deleted file mode 100644
index 7b727e7435..0000000000
--- a/bitbake/lib/toaster/toastergui/static/js/jquery-ui.js
+++ /dev/null
@@ -1,15003 +0,0 @@
-/*! jQuery UI - v1.10.3 - 2013-05-03
-* http://jqueryui.com
-* Includes: jquery.ui.core.js, jquery.ui.widget.js, jquery.ui.mouse.js, jquery.ui.draggable.js, jquery.ui.droppable.js, jquery.ui.resizable.js, jquery.ui.selectable.js, jquery.ui.sortable.js, jquery.ui.effect.js, jquery.ui.accordion.js, jquery.ui.autocomplete.js, jquery.ui.button.js, jquery.ui.datepicker.js, jquery.ui.dialog.js, jquery.ui.effect-blind.js, jquery.ui.effect-bounce.js, jquery.ui.effect-clip.js, jquery.ui.effect-drop.js, jquery.ui.effect-explode.js, jquery.ui.effect-fade.js, jquery.ui.effect-fold.js, jquery.ui.effect-highlight.js, jquery.ui.effect-pulsate.js, jquery.ui.effect-scale.js, jquery.ui.effect-shake.js, jquery.ui.effect-slide.js, jquery.ui.effect-transfer.js, jquery.ui.menu.js, jquery.ui.position.js, jquery.ui.progressbar.js, jquery.ui.slider.js, jquery.ui.spinner.js, jquery.ui.tabs.js, jquery.ui.tooltip.js
-* Copyright 2013 jQuery Foundation and other contributors; Licensed MIT */
-(function( $, undefined ) {
-
-var uuid = 0,
- runiqueId = /^ui-id-\d+$/;
-
-// $.ui might exist from components with no dependencies, e.g., $.ui.position
-$.ui = $.ui || {};
-
-$.extend( $.ui, {
- version: "1.10.3",
-
- keyCode: {
- BACKSPACE: 8,
- COMMA: 188,
- DELETE: 46,
- DOWN: 40,
- END: 35,
- ENTER: 13,
- ESCAPE: 27,
- HOME: 36,
- LEFT: 37,
- NUMPAD_ADD: 107,
- NUMPAD_DECIMAL: 110,
- NUMPAD_DIVIDE: 111,
- NUMPAD_ENTER: 108,
- NUMPAD_MULTIPLY: 106,
- NUMPAD_SUBTRACT: 109,
- PAGE_DOWN: 34,
- PAGE_UP: 33,
- PERIOD: 190,
- RIGHT: 39,
- SPACE: 32,
- TAB: 9,
- UP: 38
- }
-});
-
-// plugins
-$.fn.extend({
- focus: (function( orig ) {
- return function( delay, fn ) {
- return typeof delay === "number" ?
- this.each(function() {
- var elem = this;
- setTimeout(function() {
- $( elem ).focus();
- if ( fn ) {
- fn.call( elem );
- }
- }, delay );
- }) :
- orig.apply( this, arguments );
- };
- })( $.fn.focus ),
-
- scrollParent: function() {
- var scrollParent;
- if (($.ui.ie && (/(static|relative)/).test(this.css("position"))) || (/absolute/).test(this.css("position"))) {
- scrollParent = this.parents().filter(function() {
- return (/(relative|absolute|fixed)/).test($.css(this,"position")) && (/(auto|scroll)/).test($.css(this,"overflow")+$.css(this,"overflow-y")+$.css(this,"overflow-x"));
- }).eq(0);
- } else {
- scrollParent = this.parents().filter(function() {
- return (/(auto|scroll)/).test($.css(this,"overflow")+$.css(this,"overflow-y")+$.css(this,"overflow-x"));
- }).eq(0);
- }
-
- return (/fixed/).test(this.css("position")) || !scrollParent.length ? $(document) : scrollParent;
- },
-
- zIndex: function( zIndex ) {
- if ( zIndex !== undefined ) {
- return this.css( "zIndex", zIndex );
- }
-
- if ( this.length ) {
- var elem = $( this[ 0 ] ), position, value;
- while ( elem.length && elem[ 0 ] !== document ) {
- // Ignore z-index if position is set to a value where z-index is ignored by the browser
- // This makes behavior of this function consistent across browsers
- // WebKit always returns auto if the element is positioned
- position = elem.css( "position" );
- if ( position === "absolute" || position === "relative" || position === "fixed" ) {
- // IE returns 0 when zIndex is not specified
- // other browsers return a string
- // we ignore the case of nested elements with an explicit value of 0
- // <div style="z-index: -10;"><div style="z-index: 0;"></div></div>
- value = parseInt( elem.css( "zIndex" ), 10 );
- if ( !isNaN( value ) && value !== 0 ) {
- return value;
- }
- }
- elem = elem.parent();
- }
- }
-
- return 0;
- },
-
- uniqueId: function() {
- return this.each(function() {
- if ( !this.id ) {
- this.id = "ui-id-" + (++uuid);
- }
- });
- },
-
- removeUniqueId: function() {
- return this.each(function() {
- if ( runiqueId.test( this.id ) ) {
- $( this ).removeAttr( "id" );
- }
- });
- }
-});
-
-// selectors
-function focusable( element, isTabIndexNotNaN ) {
- var map, mapName, img,
- nodeName = element.nodeName.toLowerCase();
- if ( "area" === nodeName ) {
- map = element.parentNode;
- mapName = map.name;
- if ( !element.href || !mapName || map.nodeName.toLowerCase() !== "map" ) {
- return false;
- }
- img = $( "img[usemap=#" + mapName + "]" )[0];
- return !!img && visible( img );
- }
- return ( /input|select|textarea|button|object/.test( nodeName ) ?
- !element.disabled :
- "a" === nodeName ?
- element.href || isTabIndexNotNaN :
- isTabIndexNotNaN) &&
- // the element and all of its ancestors must be visible
- visible( element );
-}
-
-function visible( element ) {
- return $.expr.filters.visible( element ) &&
- !$( element ).parents().addBack().filter(function() {
- return $.css( this, "visibility" ) === "hidden";
- }).length;
-}
-
-$.extend( $.expr[ ":" ], {
- data: $.expr.createPseudo ?
- $.expr.createPseudo(function( dataName ) {
- return function( elem ) {
- return !!$.data( elem, dataName );
- };
- }) :
- // support: jQuery <1.8
- function( elem, i, match ) {
- return !!$.data( elem, match[ 3 ] );
- },
-
- focusable: function( element ) {
- return focusable( element, !isNaN( $.attr( element, "tabindex" ) ) );
- },
-
- tabbable: function( element ) {
- var tabIndex = $.attr( element, "tabindex" ),
- isTabIndexNaN = isNaN( tabIndex );
- return ( isTabIndexNaN || tabIndex >= 0 ) && focusable( element, !isTabIndexNaN );
- }
-});
-
-// support: jQuery <1.8
-if ( !$( "<a>" ).outerWidth( 1 ).jquery ) {
- $.each( [ "Width", "Height" ], function( i, name ) {
- var side = name === "Width" ? [ "Left", "Right" ] : [ "Top", "Bottom" ],
- type = name.toLowerCase(),
- orig = {
- innerWidth: $.fn.innerWidth,
- innerHeight: $.fn.innerHeight,
- outerWidth: $.fn.outerWidth,
- outerHeight: $.fn.outerHeight
- };
-
- function reduce( elem, size, border, margin ) {
- $.each( side, function() {
- size -= parseFloat( $.css( elem, "padding" + this ) ) || 0;
- if ( border ) {
- size -= parseFloat( $.css( elem, "border" + this + "Width" ) ) || 0;
- }
- if ( margin ) {
- size -= parseFloat( $.css( elem, "margin" + this ) ) || 0;
- }
- });
- return size;
- }
-
- $.fn[ "inner" + name ] = function( size ) {
- if ( size === undefined ) {
- return orig[ "inner" + name ].call( this );
- }
-
- return this.each(function() {
- $( this ).css( type, reduce( this, size ) + "px" );
- });
- };
-
- $.fn[ "outer" + name] = function( size, margin ) {
- if ( typeof size !== "number" ) {
- return orig[ "outer" + name ].call( this, size );
- }
-
- return this.each(function() {
- $( this).css( type, reduce( this, size, true, margin ) + "px" );
- });
- };
- });
-}
-
-// support: jQuery <1.8
-if ( !$.fn.addBack ) {
- $.fn.addBack = function( selector ) {
- return this.add( selector == null ?
- this.prevObject : this.prevObject.filter( selector )
- );
- };
-}
-
-// support: jQuery 1.6.1, 1.6.2 (http://bugs.jquery.com/ticket/9413)
-if ( $( "<a>" ).data( "a-b", "a" ).removeData( "a-b" ).data( "a-b" ) ) {
- $.fn.removeData = (function( removeData ) {
- return function( key ) {
- if ( arguments.length ) {
- return removeData.call( this, $.camelCase( key ) );
- } else {
- return removeData.call( this );
- }
- };
- })( $.fn.removeData );
-}
-
-
-
-
-
-// deprecated
-$.ui.ie = !!/msie [\w.]+/.exec( navigator.userAgent.toLowerCase() );
-
-$.support.selectstart = "onselectstart" in document.createElement( "div" );
-$.fn.extend({
- disableSelection: function() {
- return this.bind( ( $.support.selectstart ? "selectstart" : "mousedown" ) +
- ".ui-disableSelection", function( event ) {
- event.preventDefault();
- });
- },
-
- enableSelection: function() {
- return this.unbind( ".ui-disableSelection" );
- }
-});
-
-$.extend( $.ui, {
- // $.ui.plugin is deprecated. Use $.widget() extensions instead.
- plugin: {
- add: function( module, option, set ) {
- var i,
- proto = $.ui[ module ].prototype;
- for ( i in set ) {
- proto.plugins[ i ] = proto.plugins[ i ] || [];
- proto.plugins[ i ].push( [ option, set[ i ] ] );
- }
- },
- call: function( instance, name, args ) {
- var i,
- set = instance.plugins[ name ];
- if ( !set || !instance.element[ 0 ].parentNode || instance.element[ 0 ].parentNode.nodeType === 11 ) {
- return;
- }
-
- for ( i = 0; i < set.length; i++ ) {
- if ( instance.options[ set[ i ][ 0 ] ] ) {
- set[ i ][ 1 ].apply( instance.element, args );
- }
- }
- }
- },
-
- // only used by resizable
- hasScroll: function( el, a ) {
-
- //If overflow is hidden, the element might have extra content, but the user wants to hide it
- if ( $( el ).css( "overflow" ) === "hidden") {
- return false;
- }
-
- var scroll = ( a && a === "left" ) ? "scrollLeft" : "scrollTop",
- has = false;
-
- if ( el[ scroll ] > 0 ) {
- return true;
- }
-
- // TODO: determine which cases actually cause this to happen
- // if the element doesn't have the scroll set, see if it's possible to
- // set the scroll
- el[ scroll ] = 1;
- has = ( el[ scroll ] > 0 );
- el[ scroll ] = 0;
- return has;
- }
-});
-
-})( jQuery );
-
-(function( $, undefined ) {
-
-var uuid = 0,
- slice = Array.prototype.slice,
- _cleanData = $.cleanData;
-$.cleanData = function( elems ) {
- for ( var i = 0, elem; (elem = elems[i]) != null; i++ ) {
- try {
- $( elem ).triggerHandler( "remove" );
- // http://bugs.jquery.com/ticket/8235
- } catch( e ) {}
- }
- _cleanData( elems );
-};
-
-$.widget = function( name, base, prototype ) {
- var fullName, existingConstructor, constructor, basePrototype,
- // proxiedPrototype allows the provided prototype to remain unmodified
- // so that it can be used as a mixin for multiple widgets (#8876)
- proxiedPrototype = {},
- namespace = name.split( "." )[ 0 ];
-
- name = name.split( "." )[ 1 ];
- fullName = namespace + "-" + name;
-
- if ( !prototype ) {
- prototype = base;
- base = $.Widget;
- }
-
- // create selector for plugin
- $.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) {
- return !!$.data( elem, fullName );
- };
-
- $[ namespace ] = $[ namespace ] || {};
- existingConstructor = $[ namespace ][ name ];
- constructor = $[ namespace ][ name ] = function( options, element ) {
- // allow instantiation without "new" keyword
- if ( !this._createWidget ) {
- return new constructor( options, element );
- }
-
- // allow instantiation without initializing for simple inheritance
- // must use "new" keyword (the code above always passes args)
- if ( arguments.length ) {
- this._createWidget( options, element );
- }
- };
- // extend with the existing constructor to carry over any static properties
- $.extend( constructor, existingConstructor, {
- version: prototype.version,
- // copy the object used to create the prototype in case we need to
- // redefine the widget later
- _proto: $.extend( {}, prototype ),
- // track widgets that inherit from this widget in case this widget is
- // redefined after a widget inherits from it
- _childConstructors: []
- });
-
- basePrototype = new base();
- // we need to make the options hash a property directly on the new instance
- // otherwise we'll modify the options hash on the prototype that we're
- // inheriting from
- basePrototype.options = $.widget.extend( {}, basePrototype.options );
- $.each( prototype, function( prop, value ) {
- if ( !$.isFunction( value ) ) {
- proxiedPrototype[ prop ] = value;
- return;
- }
- proxiedPrototype[ prop ] = (function() {
- var _super = function() {
- return base.prototype[ prop ].apply( this, arguments );
- },
- _superApply = function( args ) {
- return base.prototype[ prop ].apply( this, args );
- };
- return function() {
- var __super = this._super,
- __superApply = this._superApply,
- returnValue;
-
- this._super = _super;
- this._superApply = _superApply;
-
- returnValue = value.apply( this, arguments );
-
- this._super = __super;
- this._superApply = __superApply;
-
- return returnValue;
- };
- })();
- });
- constructor.prototype = $.widget.extend( basePrototype, {
- // TODO: remove support for widgetEventPrefix
- // always use the name + a colon as the prefix, e.g., draggable:start
- // don't prefix for widgets that aren't DOM-based
- widgetEventPrefix: existingConstructor ? basePrototype.widgetEventPrefix : name
- }, proxiedPrototype, {
- constructor: constructor,
- namespace: namespace,
- widgetName: name,
- widgetFullName: fullName
- });
-
- // If this widget is being redefined then we need to find all widgets that
- // are inheriting from it and redefine all of them so that they inherit from
- // the new version of this widget. We're essentially trying to replace one
- // level in the prototype chain.
- if ( existingConstructor ) {
- $.each( existingConstructor._childConstructors, function( i, child ) {
- var childPrototype = child.prototype;
-
- // redefine the child widget using the same prototype that was
- // originally used, but inherit from the new version of the base
- $.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto );
- });
- // remove the list of existing child constructors from the old constructor
- // so the old child constructors can be garbage collected
- delete existingConstructor._childConstructors;
- } else {
- base._childConstructors.push( constructor );
- }
-
- $.widget.bridge( name, constructor );
-};
-
-$.widget.extend = function( target ) {
- var input = slice.call( arguments, 1 ),
- inputIndex = 0,
- inputLength = input.length,
- key,
- value;
- for ( ; inputIndex < inputLength; inputIndex++ ) {
- for ( key in input[ inputIndex ] ) {
- value = input[ inputIndex ][ key ];
- if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) {
- // Clone objects
- if ( $.isPlainObject( value ) ) {
- target[ key ] = $.isPlainObject( target[ key ] ) ?
- $.widget.extend( {}, target[ key ], value ) :
- // Don't extend strings, arrays, etc. with objects
- $.widget.extend( {}, value );
- // Copy everything else by reference
- } else {
- target[ key ] = value;
- }
- }
- }
- }
- return target;
-};
-
-$.widget.bridge = function( name, object ) {
- var fullName = object.prototype.widgetFullName || name;
- $.fn[ name ] = function( options ) {
- var isMethodCall = typeof options === "string",
- args = slice.call( arguments, 1 ),
- returnValue = this;
-
- // allow multiple hashes to be passed on init
- options = !isMethodCall && args.length ?
- $.widget.extend.apply( null, [ options ].concat(args) ) :
- options;
-
- if ( isMethodCall ) {
- this.each(function() {
- var methodValue,
- instance = $.data( this, fullName );
- if ( !instance ) {
- return $.error( "cannot call methods on " + name + " prior to initialization; " +
- "attempted to call method '" + options + "'" );
- }
- if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) {
- return $.error( "no such method '" + options + "' for " + name + " widget instance" );
- }
- methodValue = instance[ options ].apply( instance, args );
- if ( methodValue !== instance && methodValue !== undefined ) {
- returnValue = methodValue && methodValue.jquery ?
- returnValue.pushStack( methodValue.get() ) :
- methodValue;
- return false;
- }
- });
- } else {
- this.each(function() {
- var instance = $.data( this, fullName );
- if ( instance ) {
- instance.option( options || {} )._init();
- } else {
- $.data( this, fullName, new object( options, this ) );
- }
- });
- }
-
- return returnValue;
- };
-};
-
-$.Widget = function( /* options, element */ ) {};
-$.Widget._childConstructors = [];
-
-$.Widget.prototype = {
- widgetName: "widget",
- widgetEventPrefix: "",
- defaultElement: "<div>",
- options: {
- disabled: false,
-
- // callbacks
- create: null
- },
- _createWidget: function( options, element ) {
- element = $( element || this.defaultElement || this )[ 0 ];
- this.element = $( element );
- this.uuid = uuid++;
- this.eventNamespace = "." + this.widgetName + this.uuid;
- this.options = $.widget.extend( {},
- this.options,
- this._getCreateOptions(),
- options );
-
- this.bindings = $();
- this.hoverable = $();
- this.focusable = $();
-
- if ( element !== this ) {
- $.data( element, this.widgetFullName, this );
- this._on( true, this.element, {
- remove: function( event ) {
- if ( event.target === element ) {
- this.destroy();
- }
- }
- });
- this.document = $( element.style ?
- // element within the document
- element.ownerDocument :
- // element is window or document
- element.document || element );
- this.window = $( this.document[0].defaultView || this.document[0].parentWindow );
- }
-
- this._create();
- this._trigger( "create", null, this._getCreateEventData() );
- this._init();
- },
- _getCreateOptions: $.noop,
- _getCreateEventData: $.noop,
- _create: $.noop,
- _init: $.noop,
-
- destroy: function() {
- this._destroy();
- // we can probably remove the unbind calls in 2.0
- // all event bindings should go through this._on()
- this.element
- .unbind( this.eventNamespace )
- // 1.9 BC for #7810
- // TODO remove dual storage
- .removeData( this.widgetName )
- .removeData( this.widgetFullName )
- // support: jquery <1.6.3
- // http://bugs.jquery.com/ticket/9413
- .removeData( $.camelCase( this.widgetFullName ) );
- this.widget()
- .unbind( this.eventNamespace )
- .removeAttr( "aria-disabled" )
- .removeClass(
- this.widgetFullName + "-disabled " +
- "ui-state-disabled" );
-
- // clean up events and states
- this.bindings.unbind( this.eventNamespace );
- this.hoverable.removeClass( "ui-state-hover" );
- this.focusable.removeClass( "ui-state-focus" );
- },
- _destroy: $.noop,
-
- widget: function() {
- return this.element;
- },
-
- option: function( key, value ) {
- var options = key,
- parts,
- curOption,
- i;
-
- if ( arguments.length === 0 ) {
- // don't return a reference to the internal hash
- return $.widget.extend( {}, this.options );
- }
-
- if ( typeof key === "string" ) {
- // handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } }
- options = {};
- parts = key.split( "." );
- key = parts.shift();
- if ( parts.length ) {
- curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] );
- for ( i = 0; i < parts.length - 1; i++ ) {
- curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {};
- curOption = curOption[ parts[ i ] ];
- }
- key = parts.pop();
- if ( value === undefined ) {
- return curOption[ key ] === undefined ? null : curOption[ key ];
- }
- curOption[ key ] = value;
- } else {
- if ( value === undefined ) {
- return this.options[ key ] === undefined ? null : this.options[ key ];
- }
- options[ key ] = value;
- }
- }
-
- this._setOptions( options );
-
- return this;
- },
- _setOptions: function( options ) {
- var key;
-
- for ( key in options ) {
- this._setOption( key, options[ key ] );
- }
-
- return this;
- },
- _setOption: function( key, value ) {
- this.options[ key ] = value;
-
- if ( key === "disabled" ) {
- this.widget()
- .toggleClass( this.widgetFullName + "-disabled ui-state-disabled", !!value )
- .attr( "aria-disabled", value );
- this.hoverable.removeClass( "ui-state-hover" );
- this.focusable.removeClass( "ui-state-focus" );
- }
-
- return this;
- },
-
- enable: function() {
- return this._setOption( "disabled", false );
- },
- disable: function() {
- return this._setOption( "disabled", true );
- },
-
- _on: function( suppressDisabledCheck, element, handlers ) {
- var delegateElement,
- instance = this;
-
- // no suppressDisabledCheck flag, shuffle arguments
- if ( typeof suppressDisabledCheck !== "boolean" ) {
- handlers = element;
- element = suppressDisabledCheck;
- suppressDisabledCheck = false;
- }
-
- // no element argument, shuffle and use this.element
- if ( !handlers ) {
- handlers = element;
- element = this.element;
- delegateElement = this.widget();
- } else {
- // accept selectors, DOM elements
- element = delegateElement = $( element );
- this.bindings = this.bindings.add( element );
- }
-
- $.each( handlers, function( event, handler ) {
- function handlerProxy() {
- // allow widgets to customize the disabled handling
- // - disabled as an array instead of boolean
- // - disabled class as method for disabling individual parts
- if ( !suppressDisabledCheck &&
- ( instance.options.disabled === true ||
- $( this ).hasClass( "ui-state-disabled" ) ) ) {
- return;
- }
- return ( typeof handler === "string" ? instance[ handler ] : handler )
- .apply( instance, arguments );
- }
-
- // copy the guid so direct unbinding works
- if ( typeof handler !== "string" ) {
- handlerProxy.guid = handler.guid =
- handler.guid || handlerProxy.guid || $.guid++;
- }
-
- var match = event.match( /^(\w+)\s*(.*)$/ ),
- eventName = match[1] + instance.eventNamespace,
- selector = match[2];
- if ( selector ) {
- delegateElement.delegate( selector, eventName, handlerProxy );
- } else {
- element.bind( eventName, handlerProxy );
- }
- });
- },
-
- _off: function( element, eventName ) {
- eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) + this.eventNamespace;
- element.unbind( eventName ).undelegate( eventName );
- },
-
- _delay: function( handler, delay ) {
- function handlerProxy() {
- return ( typeof handler === "string" ? instance[ handler ] : handler )
- .apply( instance, arguments );
- }
- var instance = this;
- return setTimeout( handlerProxy, delay || 0 );
- },
-
- _hoverable: function( element ) {
- this.hoverable = this.hoverable.add( element );
- this._on( element, {
- mouseenter: function( event ) {
- $( event.currentTarget ).addClass( "ui-state-hover" );
- },
- mouseleave: function( event ) {
- $( event.currentTarget ).removeClass( "ui-state-hover" );
- }
- });
- },
-
- _focusable: function( element ) {
- this.focusable = this.focusable.add( element );
- this._on( element, {
- focusin: function( event ) {
- $( event.currentTarget ).addClass( "ui-state-focus" );
- },
- focusout: function( event ) {
- $( event.currentTarget ).removeClass( "ui-state-focus" );
- }
- });
- },
-
- _trigger: function( type, event, data ) {
- var prop, orig,
- callback = this.options[ type ];
-
- data = data || {};
- event = $.Event( event );
- event.type = ( type === this.widgetEventPrefix ?
- type :
- this.widgetEventPrefix + type ).toLowerCase();
- // the original event may come from any element
- // so we need to reset the target on the new event
- event.target = this.element[ 0 ];
-
- // copy original event properties over to the new event
- orig = event.originalEvent;
- if ( orig ) {
- for ( prop in orig ) {
- if ( !( prop in event ) ) {
- event[ prop ] = orig[ prop ];
- }
- }
- }
-
- this.element.trigger( event, data );
- return !( $.isFunction( callback ) &&
- callback.apply( this.element[0], [ event ].concat( data ) ) === false ||
- event.isDefaultPrevented() );
- }
-};
-
-$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) {
- $.Widget.prototype[ "_" + method ] = function( element, options, callback ) {
- if ( typeof options === "string" ) {
- options = { effect: options };
- }
- var hasOptions,
- effectName = !options ?
- method :
- options === true || typeof options === "number" ?
- defaultEffect :
- options.effect || defaultEffect;
- options = options || {};
- if ( typeof options === "number" ) {
- options = { duration: options };
- }
- hasOptions = !$.isEmptyObject( options );
- options.complete = callback;
- if ( options.delay ) {
- element.delay( options.delay );
- }
- if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) {
- element[ method ]( options );
- } else if ( effectName !== method && element[ effectName ] ) {
- element[ effectName ]( options.duration, options.easing, callback );
- } else {
- element.queue(function( next ) {
- $( this )[ method ]();
- if ( callback ) {
- callback.call( element[ 0 ] );
- }
- next();
- });
- }
- };
-});
-
-})( jQuery );
-
-(function( $, undefined ) {
-
-var mouseHandled = false;
-$( document ).mouseup( function() {
- mouseHandled = false;
-});
-
-$.widget("ui.mouse", {
- version: "1.10.3",
- options: {
- cancel: "input,textarea,button,select,option",
- distance: 1,
- delay: 0
- },
- _mouseInit: function() {
- var that = this;
-
- this.element
- .bind("mousedown."+this.widgetName, function(event) {
- return that._mouseDown(event);
- })
- .bind("click."+this.widgetName, function(event) {
- if (true === $.data(event.target, that.widgetName + ".preventClickEvent")) {
- $.removeData(event.target, that.widgetName + ".preventClickEvent");
- event.stopImmediatePropagation();
- return false;
- }
- });
-
- this.started = false;
- },
-
- // TODO: make sure destroying one instance of mouse doesn't mess with
- // other instances of mouse
- _mouseDestroy: function() {
- this.element.unbind("."+this.widgetName);
- if ( this._mouseMoveDelegate ) {
- $(document)
- .unbind("mousemove."+this.widgetName, this._mouseMoveDelegate)
- .unbind("mouseup."+this.widgetName, this._mouseUpDelegate);
- }
- },
-
- _mouseDown: function(event) {
- // don't let more than one widget handle mouseStart
- if( mouseHandled ) { return; }
-
- // we may have missed mouseup (out of window)
- (this._mouseStarted && this._mouseUp(event));
-
- this._mouseDownEvent = event;
-
- var that = this,
- btnIsLeft = (event.which === 1),
- // event.target.nodeName works around a bug in IE 8 with
- // disabled inputs (#7620)
- elIsCancel = (typeof this.options.cancel === "string" && event.target.nodeName ? $(event.target).closest(this.options.cancel).length : false);
- if (!btnIsLeft || elIsCancel || !this._mouseCapture(event)) {
- return true;
- }
-
- this.mouseDelayMet = !this.options.delay;
- if (!this.mouseDelayMet) {
- this._mouseDelayTimer = setTimeout(function() {
- that.mouseDelayMet = true;
- }, this.options.delay);
- }
-
- if (this._mouseDistanceMet(event) && this._mouseDelayMet(event)) {
- this._mouseStarted = (this._mouseStart(event) !== false);
- if (!this._mouseStarted) {
- event.preventDefault();
- return true;
- }
- }
-
- // Click event may never have fired (Gecko & Opera)
- if (true === $.data(event.target, this.widgetName + ".preventClickEvent")) {
- $.removeData(event.target, this.widgetName + ".preventClickEvent");
- }
-
- // these delegates are required to keep context
- this._mouseMoveDelegate = function(event) {
- return that._mouseMove(event);
- };
- this._mouseUpDelegate = function(event) {
- return that._mouseUp(event);
- };
- $(document)
- .bind("mousemove."+this.widgetName, this._mouseMoveDelegate)
- .bind("mouseup."+this.widgetName, this._mouseUpDelegate);
-
- event.preventDefault();
-
- mouseHandled = true;
- return true;
- },
-
- _mouseMove: function(event) {
- // IE mouseup check - mouseup happened when mouse was out of window
- if ($.ui.ie && ( !document.documentMode || document.documentMode < 9 ) && !event.button) {
- return this._mouseUp(event);
- }
-
- if (this._mouseStarted) {
- this._mouseDrag(event);
- return event.preventDefault();
- }
-
- if (this._mouseDistanceMet(event) && this._mouseDelayMet(event)) {
- this._mouseStarted =
- (this._mouseStart(this._mouseDownEvent, event) !== false);
- (this._mouseStarted ? this._mouseDrag(event) : this._mouseUp(event));
- }
-
- return !this._mouseStarted;
- },
-
- _mouseUp: function(event) {
- $(document)
- .unbind("mousemove."+this.widgetName, this._mouseMoveDelegate)
- .unbind("mouseup."+this.widgetName, this._mouseUpDelegate);
-
- if (this._mouseStarted) {
- this._mouseStarted = false;
-
- if (event.target === this._mouseDownEvent.target) {
- $.data(event.target, this.widgetName + ".preventClickEvent", true);
- }
-
- this._mouseStop(event);
- }
-
- return false;
- },
-
- _mouseDistanceMet: function(event) {
- return (Math.max(
- Math.abs(this._mouseDownEvent.pageX - event.pageX),
- Math.abs(this._mouseDownEvent.pageY - event.pageY)
- ) >= this.options.distance
- );
- },
-
- _mouseDelayMet: function(/* event */) {
- return this.mouseDelayMet;
- },
-
- // These are placeholder methods, to be overriden by extending plugin
- _mouseStart: function(/* event */) {},
- _mouseDrag: function(/* event */) {},
- _mouseStop: function(/* event */) {},
- _mouseCapture: function(/* event */) { return true; }
-});
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.widget("ui.draggable", $.ui.mouse, {
- version: "1.10.3",
- widgetEventPrefix: "drag",
- options: {
- addClasses: true,
- appendTo: "parent",
- axis: false,
- connectToSortable: false,
- containment: false,
- cursor: "auto",
- cursorAt: false,
- grid: false,
- handle: false,
- helper: "original",
- iframeFix: false,
- opacity: false,
- refreshPositions: false,
- revert: false,
- revertDuration: 500,
- scope: "default",
- scroll: true,
- scrollSensitivity: 20,
- scrollSpeed: 20,
- snap: false,
- snapMode: "both",
- snapTolerance: 20,
- stack: false,
- zIndex: false,
-
- // callbacks
- drag: null,
- start: null,
- stop: null
- },
- _create: function() {
-
- if (this.options.helper === "original" && !(/^(?:r|a|f)/).test(this.element.css("position"))) {
- this.element[0].style.position = "relative";
- }
- if (this.options.addClasses){
- this.element.addClass("ui-draggable");
- }
- if (this.options.disabled){
- this.element.addClass("ui-draggable-disabled");
- }
-
- this._mouseInit();
-
- },
-
- _destroy: function() {
- this.element.removeClass( "ui-draggable ui-draggable-dragging ui-draggable-disabled" );
- this._mouseDestroy();
- },
-
- _mouseCapture: function(event) {
-
- var o = this.options;
-
- // among others, prevent a drag on a resizable-handle
- if (this.helper || o.disabled || $(event.target).closest(".ui-resizable-handle").length > 0) {
- return false;
- }
-
- //Quit if we're not on a valid handle
- this.handle = this._getHandle(event);
- if (!this.handle) {
- return false;
- }
-
- $(o.iframeFix === true ? "iframe" : o.iframeFix).each(function() {
- $("<div class='ui-draggable-iframeFix' style='background: #fff;'></div>")
- .css({
- width: this.offsetWidth+"px", height: this.offsetHeight+"px",
- position: "absolute", opacity: "0.001", zIndex: 1000
- })
- .css($(this).offset())
- .appendTo("body");
- });
-
- return true;
-
- },
-
- _mouseStart: function(event) {
-
- var o = this.options;
-
- //Create and append the visible helper
- this.helper = this._createHelper(event);
-
- this.helper.addClass("ui-draggable-dragging");
-
- //Cache the helper size
- this._cacheHelperProportions();
-
- //If ddmanager is used for droppables, set the global draggable
- if($.ui.ddmanager) {
- $.ui.ddmanager.current = this;
- }
-
- /*
- * - Position generation -
- * This block generates everything position related - it's the core of draggables.
- */
-
- //Cache the margins of the original element
- this._cacheMargins();
-
- //Store the helper's css position
- this.cssPosition = this.helper.css( "position" );
- this.scrollParent = this.helper.scrollParent();
- this.offsetParent = this.helper.offsetParent();
- this.offsetParentCssPosition = this.offsetParent.css( "position" );
-
- //The element's absolute position on the page minus margins
- this.offset = this.positionAbs = this.element.offset();
- this.offset = {
- top: this.offset.top - this.margins.top,
- left: this.offset.left - this.margins.left
- };
-
- //Reset scroll cache
- this.offset.scroll = false;
-
- $.extend(this.offset, {
- click: { //Where the click happened, relative to the element
- left: event.pageX - this.offset.left,
- top: event.pageY - this.offset.top
- },
- parent: this._getParentOffset(),
- relative: this._getRelativeOffset() //This is a relative to absolute position minus the actual position calculation - only used for relative positioned helper
- });
-
- //Generate the original position
- this.originalPosition = this.position = this._generatePosition(event);
- this.originalPageX = event.pageX;
- this.originalPageY = event.pageY;
-
- //Adjust the mouse offset relative to the helper if "cursorAt" is supplied
- (o.cursorAt && this._adjustOffsetFromHelper(o.cursorAt));
-
- //Set a containment if given in the options
- this._setContainment();
-
- //Trigger event + callbacks
- if(this._trigger("start", event) === false) {
- this._clear();
- return false;
- }
-
- //Recache the helper size
- this._cacheHelperProportions();
-
- //Prepare the droppable offsets
- if ($.ui.ddmanager && !o.dropBehaviour) {
- $.ui.ddmanager.prepareOffsets(this, event);
- }
-
-
- this._mouseDrag(event, true); //Execute the drag once - this causes the helper not to be visible before getting its correct position
-
- //If the ddmanager is used for droppables, inform the manager that dragging has started (see #5003)
- if ( $.ui.ddmanager ) {
- $.ui.ddmanager.dragStart(this, event);
- }
-
- return true;
- },
-
- _mouseDrag: function(event, noPropagation) {
- // reset any necessary cached properties (see #5009)
- if ( this.offsetParentCssPosition === "fixed" ) {
- this.offset.parent = this._getParentOffset();
- }
-
- //Compute the helpers position
- this.position = this._generatePosition(event);
- this.positionAbs = this._convertPositionTo("absolute");
-
- //Call plugins and callbacks and use the resulting position if something is returned
- if (!noPropagation) {
- var ui = this._uiHash();
- if(this._trigger("drag", event, ui) === false) {
- this._mouseUp({});
- return false;
- }
- this.position = ui.position;
- }
-
- if(!this.options.axis || this.options.axis !== "y") {
- this.helper[0].style.left = this.position.left+"px";
- }
- if(!this.options.axis || this.options.axis !== "x") {
- this.helper[0].style.top = this.position.top+"px";
- }
- if($.ui.ddmanager) {
- $.ui.ddmanager.drag(this, event);
- }
-
- return false;
- },
-
- _mouseStop: function(event) {
-
- //If we are using droppables, inform the manager about the drop
- var that = this,
- dropped = false;
- if ($.ui.ddmanager && !this.options.dropBehaviour) {
- dropped = $.ui.ddmanager.drop(this, event);
- }
-
- //if a drop comes from outside (a sortable)
- if(this.dropped) {
- dropped = this.dropped;
- this.dropped = false;
- }
-
- //if the original element is no longer in the DOM don't bother to continue (see #8269)
- if ( this.options.helper === "original" && !$.contains( this.element[ 0 ].ownerDocument, this.element[ 0 ] ) ) {
- return false;
- }
-
- if((this.options.revert === "invalid" && !dropped) || (this.options.revert === "valid" && dropped) || this.options.revert === true || ($.isFunction(this.options.revert) && this.options.revert.call(this.element, dropped))) {
- $(this.helper).animate(this.originalPosition, parseInt(this.options.revertDuration, 10), function() {
- if(that._trigger("stop", event) !== false) {
- that._clear();
- }
- });
- } else {
- if(this._trigger("stop", event) !== false) {
- this._clear();
- }
- }
-
- return false;
- },
-
- _mouseUp: function(event) {
- //Remove frame helpers
- $("div.ui-draggable-iframeFix").each(function() {
- this.parentNode.removeChild(this);
- });
-
- //If the ddmanager is used for droppables, inform the manager that dragging has stopped (see #5003)
- if( $.ui.ddmanager ) {
- $.ui.ddmanager.dragStop(this, event);
- }
-
- return $.ui.mouse.prototype._mouseUp.call(this, event);
- },
-
- cancel: function() {
-
- if(this.helper.is(".ui-draggable-dragging")) {
- this._mouseUp({});
- } else {
- this._clear();
- }
-
- return this;
-
- },
-
- _getHandle: function(event) {
- return this.options.handle ?
- !!$( event.target ).closest( this.element.find( this.options.handle ) ).length :
- true;
- },
-
- _createHelper: function(event) {
-
- var o = this.options,
- helper = $.isFunction(o.helper) ? $(o.helper.apply(this.element[0], [event])) : (o.helper === "clone" ? this.element.clone().removeAttr("id") : this.element);
-
- if(!helper.parents("body").length) {
- helper.appendTo((o.appendTo === "parent" ? this.element[0].parentNode : o.appendTo));
- }
-
- if(helper[0] !== this.element[0] && !(/(fixed|absolute)/).test(helper.css("position"))) {
- helper.css("position", "absolute");
- }
-
- return helper;
-
- },
-
- _adjustOffsetFromHelper: function(obj) {
- if (typeof obj === "string") {
- obj = obj.split(" ");
- }
- if ($.isArray(obj)) {
- obj = {left: +obj[0], top: +obj[1] || 0};
- }
- if ("left" in obj) {
- this.offset.click.left = obj.left + this.margins.left;
- }
- if ("right" in obj) {
- this.offset.click.left = this.helperProportions.width - obj.right + this.margins.left;
- }
- if ("top" in obj) {
- this.offset.click.top = obj.top + this.margins.top;
- }
- if ("bottom" in obj) {
- this.offset.click.top = this.helperProportions.height - obj.bottom + this.margins.top;
- }
- },
-
- _getParentOffset: function() {
-
- //Get the offsetParent and cache its position
- var po = this.offsetParent.offset();
-
- // This is a special case where we need to modify a offset calculated on start, since the following happened:
- // 1. The position of the helper is absolute, so it's position is calculated based on the next positioned parent
- // 2. The actual offset parent is a child of the scroll parent, and the scroll parent isn't the document, which means that
- // the scroll is included in the initial calculation of the offset of the parent, and never recalculated upon drag
- if(this.cssPosition === "absolute" && this.scrollParent[0] !== document && $.contains(this.scrollParent[0], this.offsetParent[0])) {
- po.left += this.scrollParent.scrollLeft();
- po.top += this.scrollParent.scrollTop();
- }
-
- //This needs to be actually done for all browsers, since pageX/pageY includes this information
- //Ugly IE fix
- if((this.offsetParent[0] === document.body) ||
- (this.offsetParent[0].tagName && this.offsetParent[0].tagName.toLowerCase() === "html" && $.ui.ie)) {
- po = { top: 0, left: 0 };
- }
-
- return {
- top: po.top + (parseInt(this.offsetParent.css("borderTopWidth"),10) || 0),
- left: po.left + (parseInt(this.offsetParent.css("borderLeftWidth"),10) || 0)
- };
-
- },
-
- _getRelativeOffset: function() {
-
- if(this.cssPosition === "relative") {
- var p = this.element.position();
- return {
- top: p.top - (parseInt(this.helper.css("top"),10) || 0) + this.scrollParent.scrollTop(),
- left: p.left - (parseInt(this.helper.css("left"),10) || 0) + this.scrollParent.scrollLeft()
- };
- } else {
- return { top: 0, left: 0 };
- }
-
- },
-
- _cacheMargins: function() {
- this.margins = {
- left: (parseInt(this.element.css("marginLeft"),10) || 0),
- top: (parseInt(this.element.css("marginTop"),10) || 0),
- right: (parseInt(this.element.css("marginRight"),10) || 0),
- bottom: (parseInt(this.element.css("marginBottom"),10) || 0)
- };
- },
-
- _cacheHelperProportions: function() {
- this.helperProportions = {
- width: this.helper.outerWidth(),
- height: this.helper.outerHeight()
- };
- },
-
- _setContainment: function() {
-
- var over, c, ce,
- o = this.options;
-
- if ( !o.containment ) {
- this.containment = null;
- return;
- }
-
- if ( o.containment === "window" ) {
- this.containment = [
- $( window ).scrollLeft() - this.offset.relative.left - this.offset.parent.left,
- $( window ).scrollTop() - this.offset.relative.top - this.offset.parent.top,
- $( window ).scrollLeft() + $( window ).width() - this.helperProportions.width - this.margins.left,
- $( window ).scrollTop() + ( $( window ).height() || document.body.parentNode.scrollHeight ) - this.helperProportions.height - this.margins.top
- ];
- return;
- }
-
- if ( o.containment === "document") {
- this.containment = [
- 0,
- 0,
- $( document ).width() - this.helperProportions.width - this.margins.left,
- ( $( document ).height() || document.body.parentNode.scrollHeight ) - this.helperProportions.height - this.margins.top
- ];
- return;
- }
-
- if ( o.containment.constructor === Array ) {
- this.containment = o.containment;
- return;
- }
-
- if ( o.containment === "parent" ) {
- o.containment = this.helper[ 0 ].parentNode;
- }
-
- c = $( o.containment );
- ce = c[ 0 ];
-
- if( !ce ) {
- return;
- }
-
- over = c.css( "overflow" ) !== "hidden";
-
- this.containment = [
- ( parseInt( c.css( "borderLeftWidth" ), 10 ) || 0 ) + ( parseInt( c.css( "paddingLeft" ), 10 ) || 0 ),
- ( parseInt( c.css( "borderTopWidth" ), 10 ) || 0 ) + ( parseInt( c.css( "paddingTop" ), 10 ) || 0 ) ,
- ( over ? Math.max( ce.scrollWidth, ce.offsetWidth ) : ce.offsetWidth ) - ( parseInt( c.css( "borderRightWidth" ), 10 ) || 0 ) - ( parseInt( c.css( "paddingRight" ), 10 ) || 0 ) - this.helperProportions.width - this.margins.left - this.margins.right,
- ( over ? Math.max( ce.scrollHeight, ce.offsetHeight ) : ce.offsetHeight ) - ( parseInt( c.css( "borderBottomWidth" ), 10 ) || 0 ) - ( parseInt( c.css( "paddingBottom" ), 10 ) || 0 ) - this.helperProportions.height - this.margins.top - this.margins.bottom
- ];
- this.relative_container = c;
- },
-
- _convertPositionTo: function(d, pos) {
-
- if(!pos) {
- pos = this.position;
- }
-
- var mod = d === "absolute" ? 1 : -1,
- scroll = this.cssPosition === "absolute" && !( this.scrollParent[ 0 ] !== document && $.contains( this.scrollParent[ 0 ], this.offsetParent[ 0 ] ) ) ? this.offsetParent : this.scrollParent;
-
- //Cache the scroll
- if (!this.offset.scroll) {
- this.offset.scroll = {top : scroll.scrollTop(), left : scroll.scrollLeft()};
- }
-
- return {
- top: (
- pos.top + // The absolute mouse position
- this.offset.relative.top * mod + // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.top * mod - // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollTop() : this.offset.scroll.top ) * mod )
- ),
- left: (
- pos.left + // The absolute mouse position
- this.offset.relative.left * mod + // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.left * mod - // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollLeft() : this.offset.scroll.left ) * mod )
- )
- };
-
- },
-
- _generatePosition: function(event) {
-
- var containment, co, top, left,
- o = this.options,
- scroll = this.cssPosition === "absolute" && !( this.scrollParent[ 0 ] !== document && $.contains( this.scrollParent[ 0 ], this.offsetParent[ 0 ] ) ) ? this.offsetParent : this.scrollParent,
- pageX = event.pageX,
- pageY = event.pageY;
-
- //Cache the scroll
- if (!this.offset.scroll) {
- this.offset.scroll = {top : scroll.scrollTop(), left : scroll.scrollLeft()};
- }
-
- /*
- * - Position constraining -
- * Constrain the position to a mix of grid, containment.
- */
-
- // If we are not dragging yet, we won't check for options
- if ( this.originalPosition ) {
- if ( this.containment ) {
- if ( this.relative_container ){
- co = this.relative_container.offset();
- containment = [
- this.containment[ 0 ] + co.left,
- this.containment[ 1 ] + co.top,
- this.containment[ 2 ] + co.left,
- this.containment[ 3 ] + co.top
- ];
- }
- else {
- containment = this.containment;
- }
-
- if(event.pageX - this.offset.click.left < containment[0]) {
- pageX = containment[0] + this.offset.click.left;
- }
- if(event.pageY - this.offset.click.top < containment[1]) {
- pageY = containment[1] + this.offset.click.top;
- }
- if(event.pageX - this.offset.click.left > containment[2]) {
- pageX = containment[2] + this.offset.click.left;
- }
- if(event.pageY - this.offset.click.top > containment[3]) {
- pageY = containment[3] + this.offset.click.top;
- }
- }
-
- if(o.grid) {
- //Check for grid elements set to 0 to prevent divide by 0 error causing invalid argument errors in IE (see ticket #6950)
- top = o.grid[1] ? this.originalPageY + Math.round((pageY - this.originalPageY) / o.grid[1]) * o.grid[1] : this.originalPageY;
- pageY = containment ? ((top - this.offset.click.top >= containment[1] || top - this.offset.click.top > containment[3]) ? top : ((top - this.offset.click.top >= containment[1]) ? top - o.grid[1] : top + o.grid[1])) : top;
-
- left = o.grid[0] ? this.originalPageX + Math.round((pageX - this.originalPageX) / o.grid[0]) * o.grid[0] : this.originalPageX;
- pageX = containment ? ((left - this.offset.click.left >= containment[0] || left - this.offset.click.left > containment[2]) ? left : ((left - this.offset.click.left >= containment[0]) ? left - o.grid[0] : left + o.grid[0])) : left;
- }
-
- }
-
- return {
- top: (
- pageY - // The absolute mouse position
- this.offset.click.top - // Click offset (relative to the element)
- this.offset.relative.top - // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.top + // The offsetParent's offset without borders (offset + border)
- ( this.cssPosition === "fixed" ? -this.scrollParent.scrollTop() : this.offset.scroll.top )
- ),
- left: (
- pageX - // The absolute mouse position
- this.offset.click.left - // Click offset (relative to the element)
- this.offset.relative.left - // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.left + // The offsetParent's offset without borders (offset + border)
- ( this.cssPosition === "fixed" ? -this.scrollParent.scrollLeft() : this.offset.scroll.left )
- )
- };
-
- },
-
- _clear: function() {
- this.helper.removeClass("ui-draggable-dragging");
- if(this.helper[0] !== this.element[0] && !this.cancelHelperRemoval) {
- this.helper.remove();
- }
- this.helper = null;
- this.cancelHelperRemoval = false;
- },
-
- // From now on bulk stuff - mainly helpers
-
- _trigger: function(type, event, ui) {
- ui = ui || this._uiHash();
- $.ui.plugin.call(this, type, [event, ui]);
- //The absolute position has to be recalculated after plugins
- if(type === "drag") {
- this.positionAbs = this._convertPositionTo("absolute");
- }
- return $.Widget.prototype._trigger.call(this, type, event, ui);
- },
-
- plugins: {},
-
- _uiHash: function() {
- return {
- helper: this.helper,
- position: this.position,
- originalPosition: this.originalPosition,
- offset: this.positionAbs
- };
- }
-
-});
-
-$.ui.plugin.add("draggable", "connectToSortable", {
- start: function(event, ui) {
-
- var inst = $(this).data("ui-draggable"), o = inst.options,
- uiSortable = $.extend({}, ui, { item: inst.element });
- inst.sortables = [];
- $(o.connectToSortable).each(function() {
- var sortable = $.data(this, "ui-sortable");
- if (sortable && !sortable.options.disabled) {
- inst.sortables.push({
- instance: sortable,
- shouldRevert: sortable.options.revert
- });
- sortable.refreshPositions(); // Call the sortable's refreshPositions at drag start to refresh the containerCache since the sortable container cache is used in drag and needs to be up to date (this will ensure it's initialised as well as being kept in step with any changes that might have happened on the page).
- sortable._trigger("activate", event, uiSortable);
- }
- });
-
- },
- stop: function(event, ui) {
-
- //If we are still over the sortable, we fake the stop event of the sortable, but also remove helper
- var inst = $(this).data("ui-draggable"),
- uiSortable = $.extend({}, ui, { item: inst.element });
-
- $.each(inst.sortables, function() {
- if(this.instance.isOver) {
-
- this.instance.isOver = 0;
-
- inst.cancelHelperRemoval = true; //Don't remove the helper in the draggable instance
- this.instance.cancelHelperRemoval = false; //Remove it in the sortable instance (so sortable plugins like revert still work)
-
- //The sortable revert is supported, and we have to set a temporary dropped variable on the draggable to support revert: "valid/invalid"
- if(this.shouldRevert) {
- this.instance.options.revert = this.shouldRevert;
- }
-
- //Trigger the stop of the sortable
- this.instance._mouseStop(event);
-
- this.instance.options.helper = this.instance.options._helper;
-
- //If the helper has been the original item, restore properties in the sortable
- if(inst.options.helper === "original") {
- this.instance.currentItem.css({ top: "auto", left: "auto" });
- }
-
- } else {
- this.instance.cancelHelperRemoval = false; //Remove the helper in the sortable instance
- this.instance._trigger("deactivate", event, uiSortable);
- }
-
- });
-
- },
- drag: function(event, ui) {
-
- var inst = $(this).data("ui-draggable"), that = this;
-
- $.each(inst.sortables, function() {
-
- var innermostIntersecting = false,
- thisSortable = this;
-
- //Copy over some variables to allow calling the sortable's native _intersectsWith
- this.instance.positionAbs = inst.positionAbs;
- this.instance.helperProportions = inst.helperProportions;
- this.instance.offset.click = inst.offset.click;
-
- if(this.instance._intersectsWith(this.instance.containerCache)) {
- innermostIntersecting = true;
- $.each(inst.sortables, function () {
- this.instance.positionAbs = inst.positionAbs;
- this.instance.helperProportions = inst.helperProportions;
- this.instance.offset.click = inst.offset.click;
- if (this !== thisSortable &&
- this.instance._intersectsWith(this.instance.containerCache) &&
- $.contains(thisSortable.instance.element[0], this.instance.element[0])
- ) {
- innermostIntersecting = false;
- }
- return innermostIntersecting;
- });
- }
-
-
- if(innermostIntersecting) {
- //If it intersects, we use a little isOver variable and set it once, so our move-in stuff gets fired only once
- if(!this.instance.isOver) {
-
- this.instance.isOver = 1;
- //Now we fake the start of dragging for the sortable instance,
- //by cloning the list group item, appending it to the sortable and using it as inst.currentItem
- //We can then fire the start event of the sortable with our passed browser event, and our own helper (so it doesn't create a new one)
- this.instance.currentItem = $(that).clone().removeAttr("id").appendTo(this.instance.element).data("ui-sortable-item", true);
- this.instance.options._helper = this.instance.options.helper; //Store helper option to later restore it
- this.instance.options.helper = function() { return ui.helper[0]; };
-
- event.target = this.instance.currentItem[0];
- this.instance._mouseCapture(event, true);
- this.instance._mouseStart(event, true, true);
-
- //Because the browser event is way off the new appended portlet, we modify a couple of variables to reflect the changes
- this.instance.offset.click.top = inst.offset.click.top;
- this.instance.offset.click.left = inst.offset.click.left;
- this.instance.offset.parent.left -= inst.offset.parent.left - this.instance.offset.parent.left;
- this.instance.offset.parent.top -= inst.offset.parent.top - this.instance.offset.parent.top;
-
- inst._trigger("toSortable", event);
- inst.dropped = this.instance.element; //draggable revert needs that
- //hack so receive/update callbacks work (mostly)
- inst.currentItem = inst.element;
- this.instance.fromOutside = inst;
-
- }
-
- //Provided we did all the previous steps, we can fire the drag event of the sortable on every draggable drag, when it intersects with the sortable
- if(this.instance.currentItem) {
- this.instance._mouseDrag(event);
- }
-
- } else {
-
- //If it doesn't intersect with the sortable, and it intersected before,
- //we fake the drag stop of the sortable, but make sure it doesn't remove the helper by using cancelHelperRemoval
- if(this.instance.isOver) {
-
- this.instance.isOver = 0;
- this.instance.cancelHelperRemoval = true;
-
- //Prevent reverting on this forced stop
- this.instance.options.revert = false;
-
- // The out event needs to be triggered independently
- this.instance._trigger("out", event, this.instance._uiHash(this.instance));
-
- this.instance._mouseStop(event, true);
- this.instance.options.helper = this.instance.options._helper;
-
- //Now we remove our currentItem, the list group clone again, and the placeholder, and animate the helper back to it's original size
- this.instance.currentItem.remove();
- if(this.instance.placeholder) {
- this.instance.placeholder.remove();
- }
-
- inst._trigger("fromSortable", event);
- inst.dropped = false; //draggable revert needs that
- }
-
- }
-
- });
-
- }
-});
-
-$.ui.plugin.add("draggable", "cursor", {
- start: function() {
- var t = $("body"), o = $(this).data("ui-draggable").options;
- if (t.css("cursor")) {
- o._cursor = t.css("cursor");
- }
- t.css("cursor", o.cursor);
- },
- stop: function() {
- var o = $(this).data("ui-draggable").options;
- if (o._cursor) {
- $("body").css("cursor", o._cursor);
- }
- }
-});
-
-$.ui.plugin.add("draggable", "opacity", {
- start: function(event, ui) {
- var t = $(ui.helper), o = $(this).data("ui-draggable").options;
- if(t.css("opacity")) {
- o._opacity = t.css("opacity");
- }
- t.css("opacity", o.opacity);
- },
- stop: function(event, ui) {
- var o = $(this).data("ui-draggable").options;
- if(o._opacity) {
- $(ui.helper).css("opacity", o._opacity);
- }
- }
-});
-
-$.ui.plugin.add("draggable", "scroll", {
- start: function() {
- var i = $(this).data("ui-draggable");
- if(i.scrollParent[0] !== document && i.scrollParent[0].tagName !== "HTML") {
- i.overflowOffset = i.scrollParent.offset();
- }
- },
- drag: function( event ) {
-
- var i = $(this).data("ui-draggable"), o = i.options, scrolled = false;
-
- if(i.scrollParent[0] !== document && i.scrollParent[0].tagName !== "HTML") {
-
- if(!o.axis || o.axis !== "x") {
- if((i.overflowOffset.top + i.scrollParent[0].offsetHeight) - event.pageY < o.scrollSensitivity) {
- i.scrollParent[0].scrollTop = scrolled = i.scrollParent[0].scrollTop + o.scrollSpeed;
- } else if(event.pageY - i.overflowOffset.top < o.scrollSensitivity) {
- i.scrollParent[0].scrollTop = scrolled = i.scrollParent[0].scrollTop - o.scrollSpeed;
- }
- }
-
- if(!o.axis || o.axis !== "y") {
- if((i.overflowOffset.left + i.scrollParent[0].offsetWidth) - event.pageX < o.scrollSensitivity) {
- i.scrollParent[0].scrollLeft = scrolled = i.scrollParent[0].scrollLeft + o.scrollSpeed;
- } else if(event.pageX - i.overflowOffset.left < o.scrollSensitivity) {
- i.scrollParent[0].scrollLeft = scrolled = i.scrollParent[0].scrollLeft - o.scrollSpeed;
- }
- }
-
- } else {
-
- if(!o.axis || o.axis !== "x") {
- if(event.pageY - $(document).scrollTop() < o.scrollSensitivity) {
- scrolled = $(document).scrollTop($(document).scrollTop() - o.scrollSpeed);
- } else if($(window).height() - (event.pageY - $(document).scrollTop()) < o.scrollSensitivity) {
- scrolled = $(document).scrollTop($(document).scrollTop() + o.scrollSpeed);
- }
- }
-
- if(!o.axis || o.axis !== "y") {
- if(event.pageX - $(document).scrollLeft() < o.scrollSensitivity) {
- scrolled = $(document).scrollLeft($(document).scrollLeft() - o.scrollSpeed);
- } else if($(window).width() - (event.pageX - $(document).scrollLeft()) < o.scrollSensitivity) {
- scrolled = $(document).scrollLeft($(document).scrollLeft() + o.scrollSpeed);
- }
- }
-
- }
-
- if(scrolled !== false && $.ui.ddmanager && !o.dropBehaviour) {
- $.ui.ddmanager.prepareOffsets(i, event);
- }
-
- }
-});
-
-$.ui.plugin.add("draggable", "snap", {
- start: function() {
-
- var i = $(this).data("ui-draggable"),
- o = i.options;
-
- i.snapElements = [];
-
- $(o.snap.constructor !== String ? ( o.snap.items || ":data(ui-draggable)" ) : o.snap).each(function() {
- var $t = $(this),
- $o = $t.offset();
- if(this !== i.element[0]) {
- i.snapElements.push({
- item: this,
- width: $t.outerWidth(), height: $t.outerHeight(),
- top: $o.top, left: $o.left
- });
- }
- });
-
- },
- drag: function(event, ui) {
-
- var ts, bs, ls, rs, l, r, t, b, i, first,
- inst = $(this).data("ui-draggable"),
- o = inst.options,
- d = o.snapTolerance,
- x1 = ui.offset.left, x2 = x1 + inst.helperProportions.width,
- y1 = ui.offset.top, y2 = y1 + inst.helperProportions.height;
-
- for (i = inst.snapElements.length - 1; i >= 0; i--){
-
- l = inst.snapElements[i].left;
- r = l + inst.snapElements[i].width;
- t = inst.snapElements[i].top;
- b = t + inst.snapElements[i].height;
-
- if ( x2 < l - d || x1 > r + d || y2 < t - d || y1 > b + d || !$.contains( inst.snapElements[ i ].item.ownerDocument, inst.snapElements[ i ].item ) ) {
- if(inst.snapElements[i].snapping) {
- (inst.options.snap.release && inst.options.snap.release.call(inst.element, event, $.extend(inst._uiHash(), { snapItem: inst.snapElements[i].item })));
- }
- inst.snapElements[i].snapping = false;
- continue;
- }
-
- if(o.snapMode !== "inner") {
- ts = Math.abs(t - y2) <= d;
- bs = Math.abs(b - y1) <= d;
- ls = Math.abs(l - x2) <= d;
- rs = Math.abs(r - x1) <= d;
- if(ts) {
- ui.position.top = inst._convertPositionTo("relative", { top: t - inst.helperProportions.height, left: 0 }).top - inst.margins.top;
- }
- if(bs) {
- ui.position.top = inst._convertPositionTo("relative", { top: b, left: 0 }).top - inst.margins.top;
- }
- if(ls) {
- ui.position.left = inst._convertPositionTo("relative", { top: 0, left: l - inst.helperProportions.width }).left - inst.margins.left;
- }
- if(rs) {
- ui.position.left = inst._convertPositionTo("relative", { top: 0, left: r }).left - inst.margins.left;
- }
- }
-
- first = (ts || bs || ls || rs);
-
- if(o.snapMode !== "outer") {
- ts = Math.abs(t - y1) <= d;
- bs = Math.abs(b - y2) <= d;
- ls = Math.abs(l - x1) <= d;
- rs = Math.abs(r - x2) <= d;
- if(ts) {
- ui.position.top = inst._convertPositionTo("relative", { top: t, left: 0 }).top - inst.margins.top;
- }
- if(bs) {
- ui.position.top = inst._convertPositionTo("relative", { top: b - inst.helperProportions.height, left: 0 }).top - inst.margins.top;
- }
- if(ls) {
- ui.position.left = inst._convertPositionTo("relative", { top: 0, left: l }).left - inst.margins.left;
- }
- if(rs) {
- ui.position.left = inst._convertPositionTo("relative", { top: 0, left: r - inst.helperProportions.width }).left - inst.margins.left;
- }
- }
-
- if(!inst.snapElements[i].snapping && (ts || bs || ls || rs || first)) {
- (inst.options.snap.snap && inst.options.snap.snap.call(inst.element, event, $.extend(inst._uiHash(), { snapItem: inst.snapElements[i].item })));
- }
- inst.snapElements[i].snapping = (ts || bs || ls || rs || first);
-
- }
-
- }
-});
-
-$.ui.plugin.add("draggable", "stack", {
- start: function() {
- var min,
- o = this.data("ui-draggable").options,
- group = $.makeArray($(o.stack)).sort(function(a,b) {
- return (parseInt($(a).css("zIndex"),10) || 0) - (parseInt($(b).css("zIndex"),10) || 0);
- });
-
- if (!group.length) { return; }
-
- min = parseInt($(group[0]).css("zIndex"), 10) || 0;
- $(group).each(function(i) {
- $(this).css("zIndex", min + i);
- });
- this.css("zIndex", (min + group.length));
- }
-});
-
-$.ui.plugin.add("draggable", "zIndex", {
- start: function(event, ui) {
- var t = $(ui.helper), o = $(this).data("ui-draggable").options;
- if(t.css("zIndex")) {
- o._zIndex = t.css("zIndex");
- }
- t.css("zIndex", o.zIndex);
- },
- stop: function(event, ui) {
- var o = $(this).data("ui-draggable").options;
- if(o._zIndex) {
- $(ui.helper).css("zIndex", o._zIndex);
- }
- }
-});
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-function isOverAxis( x, reference, size ) {
- return ( x > reference ) && ( x < ( reference + size ) );
-}
-
-$.widget("ui.droppable", {
- version: "1.10.3",
- widgetEventPrefix: "drop",
- options: {
- accept: "*",
- activeClass: false,
- addClasses: true,
- greedy: false,
- hoverClass: false,
- scope: "default",
- tolerance: "intersect",
-
- // callbacks
- activate: null,
- deactivate: null,
- drop: null,
- out: null,
- over: null
- },
- _create: function() {
-
- var o = this.options,
- accept = o.accept;
-
- this.isover = false;
- this.isout = true;
-
- this.accept = $.isFunction(accept) ? accept : function(d) {
- return d.is(accept);
- };
-
- //Store the droppable's proportions
- this.proportions = { width: this.element[0].offsetWidth, height: this.element[0].offsetHeight };
-
- // Add the reference and positions to the manager
- $.ui.ddmanager.droppables[o.scope] = $.ui.ddmanager.droppables[o.scope] || [];
- $.ui.ddmanager.droppables[o.scope].push(this);
-
- (o.addClasses && this.element.addClass("ui-droppable"));
-
- },
-
- _destroy: function() {
- var i = 0,
- drop = $.ui.ddmanager.droppables[this.options.scope];
-
- for ( ; i < drop.length; i++ ) {
- if ( drop[i] === this ) {
- drop.splice(i, 1);
- }
- }
-
- this.element.removeClass("ui-droppable ui-droppable-disabled");
- },
-
- _setOption: function(key, value) {
-
- if(key === "accept") {
- this.accept = $.isFunction(value) ? value : function(d) {
- return d.is(value);
- };
- }
- $.Widget.prototype._setOption.apply(this, arguments);
- },
-
- _activate: function(event) {
- var draggable = $.ui.ddmanager.current;
- if(this.options.activeClass) {
- this.element.addClass(this.options.activeClass);
- }
- if(draggable){
- this._trigger("activate", event, this.ui(draggable));
- }
- },
-
- _deactivate: function(event) {
- var draggable = $.ui.ddmanager.current;
- if(this.options.activeClass) {
- this.element.removeClass(this.options.activeClass);
- }
- if(draggable){
- this._trigger("deactivate", event, this.ui(draggable));
- }
- },
-
- _over: function(event) {
-
- var draggable = $.ui.ddmanager.current;
-
- // Bail if draggable and droppable are same element
- if (!draggable || (draggable.currentItem || draggable.element)[0] === this.element[0]) {
- return;
- }
-
- if (this.accept.call(this.element[0],(draggable.currentItem || draggable.element))) {
- if(this.options.hoverClass) {
- this.element.addClass(this.options.hoverClass);
- }
- this._trigger("over", event, this.ui(draggable));
- }
-
- },
-
- _out: function(event) {
-
- var draggable = $.ui.ddmanager.current;
-
- // Bail if draggable and droppable are same element
- if (!draggable || (draggable.currentItem || draggable.element)[0] === this.element[0]) {
- return;
- }
-
- if (this.accept.call(this.element[0],(draggable.currentItem || draggable.element))) {
- if(this.options.hoverClass) {
- this.element.removeClass(this.options.hoverClass);
- }
- this._trigger("out", event, this.ui(draggable));
- }
-
- },
-
- _drop: function(event,custom) {
-
- var draggable = custom || $.ui.ddmanager.current,
- childrenIntersection = false;
-
- // Bail if draggable and droppable are same element
- if (!draggable || (draggable.currentItem || draggable.element)[0] === this.element[0]) {
- return false;
- }
-
- this.element.find(":data(ui-droppable)").not(".ui-draggable-dragging").each(function() {
- var inst = $.data(this, "ui-droppable");
- if(
- inst.options.greedy &&
- !inst.options.disabled &&
- inst.options.scope === draggable.options.scope &&
- inst.accept.call(inst.element[0], (draggable.currentItem || draggable.element)) &&
- $.ui.intersect(draggable, $.extend(inst, { offset: inst.element.offset() }), inst.options.tolerance)
- ) { childrenIntersection = true; return false; }
- });
- if(childrenIntersection) {
- return false;
- }
-
- if(this.accept.call(this.element[0],(draggable.currentItem || draggable.element))) {
- if(this.options.activeClass) {
- this.element.removeClass(this.options.activeClass);
- }
- if(this.options.hoverClass) {
- this.element.removeClass(this.options.hoverClass);
- }
- this._trigger("drop", event, this.ui(draggable));
- return this.element;
- }
-
- return false;
-
- },
-
- ui: function(c) {
- return {
- draggable: (c.currentItem || c.element),
- helper: c.helper,
- position: c.position,
- offset: c.positionAbs
- };
- }
-
-});
-
-$.ui.intersect = function(draggable, droppable, toleranceMode) {
-
- if (!droppable.offset) {
- return false;
- }
-
- var draggableLeft, draggableTop,
- x1 = (draggable.positionAbs || draggable.position.absolute).left, x2 = x1 + draggable.helperProportions.width,
- y1 = (draggable.positionAbs || draggable.position.absolute).top, y2 = y1 + draggable.helperProportions.height,
- l = droppable.offset.left, r = l + droppable.proportions.width,
- t = droppable.offset.top, b = t + droppable.proportions.height;
-
- switch (toleranceMode) {
- case "fit":
- return (l <= x1 && x2 <= r && t <= y1 && y2 <= b);
- case "intersect":
- return (l < x1 + (draggable.helperProportions.width / 2) && // Right Half
- x2 - (draggable.helperProportions.width / 2) < r && // Left Half
- t < y1 + (draggable.helperProportions.height / 2) && // Bottom Half
- y2 - (draggable.helperProportions.height / 2) < b ); // Top Half
- case "pointer":
- draggableLeft = ((draggable.positionAbs || draggable.position.absolute).left + (draggable.clickOffset || draggable.offset.click).left);
- draggableTop = ((draggable.positionAbs || draggable.position.absolute).top + (draggable.clickOffset || draggable.offset.click).top);
- return isOverAxis( draggableTop, t, droppable.proportions.height ) && isOverAxis( draggableLeft, l, droppable.proportions.width );
- case "touch":
- return (
- (y1 >= t && y1 <= b) || // Top edge touching
- (y2 >= t && y2 <= b) || // Bottom edge touching
- (y1 < t && y2 > b) // Surrounded vertically
- ) && (
- (x1 >= l && x1 <= r) || // Left edge touching
- (x2 >= l && x2 <= r) || // Right edge touching
- (x1 < l && x2 > r) // Surrounded horizontally
- );
- default:
- return false;
- }
-
-};
-
-/*
- This manager tracks offsets of draggables and droppables
-*/
-$.ui.ddmanager = {
- current: null,
- droppables: { "default": [] },
- prepareOffsets: function(t, event) {
-
- var i, j,
- m = $.ui.ddmanager.droppables[t.options.scope] || [],
- type = event ? event.type : null, // workaround for #2317
- list = (t.currentItem || t.element).find(":data(ui-droppable)").addBack();
-
- droppablesLoop: for (i = 0; i < m.length; i++) {
-
- //No disabled and non-accepted
- if(m[i].options.disabled || (t && !m[i].accept.call(m[i].element[0],(t.currentItem || t.element)))) {
- continue;
- }
-
- // Filter out elements in the current dragged item
- for (j=0; j < list.length; j++) {
- if(list[j] === m[i].element[0]) {
- m[i].proportions.height = 0;
- continue droppablesLoop;
- }
- }
-
- m[i].visible = m[i].element.css("display") !== "none";
- if(!m[i].visible) {
- continue;
- }
-
- //Activate the droppable if used directly from draggables
- if(type === "mousedown") {
- m[i]._activate.call(m[i], event);
- }
-
- m[i].offset = m[i].element.offset();
- m[i].proportions = { width: m[i].element[0].offsetWidth, height: m[i].element[0].offsetHeight };
-
- }
-
- },
- drop: function(draggable, event) {
-
- var dropped = false;
- // Create a copy of the droppables in case the list changes during the drop (#9116)
- $.each(($.ui.ddmanager.droppables[draggable.options.scope] || []).slice(), function() {
-
- if(!this.options) {
- return;
- }
- if (!this.options.disabled && this.visible && $.ui.intersect(draggable, this, this.options.tolerance)) {
- dropped = this._drop.call(this, event) || dropped;
- }
-
- if (!this.options.disabled && this.visible && this.accept.call(this.element[0],(draggable.currentItem || draggable.element))) {
- this.isout = true;
- this.isover = false;
- this._deactivate.call(this, event);
- }
-
- });
- return dropped;
-
- },
- dragStart: function( draggable, event ) {
- //Listen for scrolling so that if the dragging causes scrolling the position of the droppables can be recalculated (see #5003)
- draggable.element.parentsUntil( "body" ).bind( "scroll.droppable", function() {
- if( !draggable.options.refreshPositions ) {
- $.ui.ddmanager.prepareOffsets( draggable, event );
- }
- });
- },
- drag: function(draggable, event) {
-
- //If you have a highly dynamic page, you might try this option. It renders positions every time you move the mouse.
- if(draggable.options.refreshPositions) {
- $.ui.ddmanager.prepareOffsets(draggable, event);
- }
-
- //Run through all droppables and check their positions based on specific tolerance options
- $.each($.ui.ddmanager.droppables[draggable.options.scope] || [], function() {
-
- if(this.options.disabled || this.greedyChild || !this.visible) {
- return;
- }
-
- var parentInstance, scope, parent,
- intersects = $.ui.intersect(draggable, this, this.options.tolerance),
- c = !intersects && this.isover ? "isout" : (intersects && !this.isover ? "isover" : null);
- if(!c) {
- return;
- }
-
- if (this.options.greedy) {
- // find droppable parents with same scope
- scope = this.options.scope;
- parent = this.element.parents(":data(ui-droppable)").filter(function () {
- return $.data(this, "ui-droppable").options.scope === scope;
- });
-
- if (parent.length) {
- parentInstance = $.data(parent[0], "ui-droppable");
- parentInstance.greedyChild = (c === "isover");
- }
- }
-
- // we just moved into a greedy child
- if (parentInstance && c === "isover") {
- parentInstance.isover = false;
- parentInstance.isout = true;
- parentInstance._out.call(parentInstance, event);
- }
-
- this[c] = true;
- this[c === "isout" ? "isover" : "isout"] = false;
- this[c === "isover" ? "_over" : "_out"].call(this, event);
-
- // we just moved out of a greedy child
- if (parentInstance && c === "isout") {
- parentInstance.isout = false;
- parentInstance.isover = true;
- parentInstance._over.call(parentInstance, event);
- }
- });
-
- },
- dragStop: function( draggable, event ) {
- draggable.element.parentsUntil( "body" ).unbind( "scroll.droppable" );
- //Call prepareOffsets one final time since IE does not fire return scroll events when overflow was caused by drag (see #5003)
- if( !draggable.options.refreshPositions ) {
- $.ui.ddmanager.prepareOffsets( draggable, event );
- }
- }
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-function num(v) {
- return parseInt(v, 10) || 0;
-}
-
-function isNumber(value) {
- return !isNaN(parseInt(value, 10));
-}
-
-$.widget("ui.resizable", $.ui.mouse, {
- version: "1.10.3",
- widgetEventPrefix: "resize",
- options: {
- alsoResize: false,
- animate: false,
- animateDuration: "slow",
- animateEasing: "swing",
- aspectRatio: false,
- autoHide: false,
- containment: false,
- ghost: false,
- grid: false,
- handles: "e,s,se",
- helper: false,
- maxHeight: null,
- maxWidth: null,
- minHeight: 10,
- minWidth: 10,
- // See #7960
- zIndex: 90,
-
- // callbacks
- resize: null,
- start: null,
- stop: null
- },
- _create: function() {
-
- var n, i, handle, axis, hname,
- that = this,
- o = this.options;
- this.element.addClass("ui-resizable");
-
- $.extend(this, {
- _aspectRatio: !!(o.aspectRatio),
- aspectRatio: o.aspectRatio,
- originalElement: this.element,
- _proportionallyResizeElements: [],
- _helper: o.helper || o.ghost || o.animate ? o.helper || "ui-resizable-helper" : null
- });
-
- //Wrap the element if it cannot hold child nodes
- if(this.element[0].nodeName.match(/canvas|textarea|input|select|button|img/i)) {
-
- //Create a wrapper element and set the wrapper to the new current internal element
- this.element.wrap(
- $("<div class='ui-wrapper' style='overflow: hidden;'></div>").css({
- position: this.element.css("position"),
- width: this.element.outerWidth(),
- height: this.element.outerHeight(),
- top: this.element.css("top"),
- left: this.element.css("left")
- })
- );
-
- //Overwrite the original this.element
- this.element = this.element.parent().data(
- "ui-resizable", this.element.data("ui-resizable")
- );
-
- this.elementIsWrapper = true;
-
- //Move margins to the wrapper
- this.element.css({ marginLeft: this.originalElement.css("marginLeft"), marginTop: this.originalElement.css("marginTop"), marginRight: this.originalElement.css("marginRight"), marginBottom: this.originalElement.css("marginBottom") });
- this.originalElement.css({ marginLeft: 0, marginTop: 0, marginRight: 0, marginBottom: 0});
-
- //Prevent Safari textarea resize
- this.originalResizeStyle = this.originalElement.css("resize");
- this.originalElement.css("resize", "none");
-
- //Push the actual element to our proportionallyResize internal array
- this._proportionallyResizeElements.push(this.originalElement.css({ position: "static", zoom: 1, display: "block" }));
-
- // avoid IE jump (hard set the margin)
- this.originalElement.css({ margin: this.originalElement.css("margin") });
-
- // fix handlers offset
- this._proportionallyResize();
-
- }
-
- this.handles = o.handles || (!$(".ui-resizable-handle", this.element).length ? "e,s,se" : { n: ".ui-resizable-n", e: ".ui-resizable-e", s: ".ui-resizable-s", w: ".ui-resizable-w", se: ".ui-resizable-se", sw: ".ui-resizable-sw", ne: ".ui-resizable-ne", nw: ".ui-resizable-nw" });
- if(this.handles.constructor === String) {
-
- if ( this.handles === "all") {
- this.handles = "n,e,s,w,se,sw,ne,nw";
- }
-
- n = this.handles.split(",");
- this.handles = {};
-
- for(i = 0; i < n.length; i++) {
-
- handle = $.trim(n[i]);
- hname = "ui-resizable-"+handle;
- axis = $("<div class='ui-resizable-handle " + hname + "'></div>");
-
- // Apply zIndex to all handles - see #7960
- axis.css({ zIndex: o.zIndex });
-
- //TODO : What's going on here?
- if ("se" === handle) {
- axis.addClass("ui-icon ui-icon-gripsmall-diagonal-se");
- }
-
- //Insert into internal handles object and append to element
- this.handles[handle] = ".ui-resizable-"+handle;
- this.element.append(axis);
- }
-
- }
-
- this._renderAxis = function(target) {
-
- var i, axis, padPos, padWrapper;
-
- target = target || this.element;
-
- for(i in this.handles) {
-
- if(this.handles[i].constructor === String) {
- this.handles[i] = $(this.handles[i], this.element).show();
- }
-
- //Apply pad to wrapper element, needed to fix axis position (textarea, inputs, scrolls)
- if (this.elementIsWrapper && this.originalElement[0].nodeName.match(/textarea|input|select|button/i)) {
-
- axis = $(this.handles[i], this.element);
-
- //Checking the correct pad and border
- padWrapper = /sw|ne|nw|se|n|s/.test(i) ? axis.outerHeight() : axis.outerWidth();
-
- //The padding type i have to apply...
- padPos = [ "padding",
- /ne|nw|n/.test(i) ? "Top" :
- /se|sw|s/.test(i) ? "Bottom" :
- /^e$/.test(i) ? "Right" : "Left" ].join("");
-
- target.css(padPos, padWrapper);
-
- this._proportionallyResize();
-
- }
-
- //TODO: What's that good for? There's not anything to be executed left
- if(!$(this.handles[i]).length) {
- continue;
- }
- }
- };
-
- //TODO: make renderAxis a prototype function
- this._renderAxis(this.element);
-
- this._handles = $(".ui-resizable-handle", this.element)
- .disableSelection();
-
- //Matching axis name
- this._handles.mouseover(function() {
- if (!that.resizing) {
- if (this.className) {
- axis = this.className.match(/ui-resizable-(se|sw|ne|nw|n|e|s|w)/i);
- }
- //Axis, default = se
- that.axis = axis && axis[1] ? axis[1] : "se";
- }
- });
-
- //If we want to auto hide the elements
- if (o.autoHide) {
- this._handles.hide();
- $(this.element)
- .addClass("ui-resizable-autohide")
- .mouseenter(function() {
- if (o.disabled) {
- return;
- }
- $(this).removeClass("ui-resizable-autohide");
- that._handles.show();
- })
- .mouseleave(function(){
- if (o.disabled) {
- return;
- }
- if (!that.resizing) {
- $(this).addClass("ui-resizable-autohide");
- that._handles.hide();
- }
- });
- }
-
- //Initialize the mouse interaction
- this._mouseInit();
-
- },
-
- _destroy: function() {
-
- this._mouseDestroy();
-
- var wrapper,
- _destroy = function(exp) {
- $(exp).removeClass("ui-resizable ui-resizable-disabled ui-resizable-resizing")
- .removeData("resizable").removeData("ui-resizable").unbind(".resizable").find(".ui-resizable-handle").remove();
- };
-
- //TODO: Unwrap at same DOM position
- if (this.elementIsWrapper) {
- _destroy(this.element);
- wrapper = this.element;
- this.originalElement.css({
- position: wrapper.css("position"),
- width: wrapper.outerWidth(),
- height: wrapper.outerHeight(),
- top: wrapper.css("top"),
- left: wrapper.css("left")
- }).insertAfter( wrapper );
- wrapper.remove();
- }
-
- this.originalElement.css("resize", this.originalResizeStyle);
- _destroy(this.originalElement);
-
- return this;
- },
-
- _mouseCapture: function(event) {
- var i, handle,
- capture = false;
-
- for (i in this.handles) {
- handle = $(this.handles[i])[0];
- if (handle === event.target || $.contains(handle, event.target)) {
- capture = true;
- }
- }
-
- return !this.options.disabled && capture;
- },
-
- _mouseStart: function(event) {
-
- var curleft, curtop, cursor,
- o = this.options,
- iniPos = this.element.position(),
- el = this.element;
-
- this.resizing = true;
-
- // bugfix for http://dev.jquery.com/ticket/1749
- if ( (/absolute/).test( el.css("position") ) ) {
- el.css({ position: "absolute", top: el.css("top"), left: el.css("left") });
- } else if (el.is(".ui-draggable")) {
- el.css({ position: "absolute", top: iniPos.top, left: iniPos.left });
- }
-
- this._renderProxy();
-
- curleft = num(this.helper.css("left"));
- curtop = num(this.helper.css("top"));
-
- if (o.containment) {
- curleft += $(o.containment).scrollLeft() || 0;
- curtop += $(o.containment).scrollTop() || 0;
- }
-
- //Store needed variables
- this.offset = this.helper.offset();
- this.position = { left: curleft, top: curtop };
- this.size = this._helper ? { width: el.outerWidth(), height: el.outerHeight() } : { width: el.width(), height: el.height() };
- this.originalSize = this._helper ? { width: el.outerWidth(), height: el.outerHeight() } : { width: el.width(), height: el.height() };
- this.originalPosition = { left: curleft, top: curtop };
- this.sizeDiff = { width: el.outerWidth() - el.width(), height: el.outerHeight() - el.height() };
- this.originalMousePosition = { left: event.pageX, top: event.pageY };
-
- //Aspect Ratio
- this.aspectRatio = (typeof o.aspectRatio === "number") ? o.aspectRatio : ((this.originalSize.width / this.originalSize.height) || 1);
-
- cursor = $(".ui-resizable-" + this.axis).css("cursor");
- $("body").css("cursor", cursor === "auto" ? this.axis + "-resize" : cursor);
-
- el.addClass("ui-resizable-resizing");
- this._propagate("start", event);
- return true;
- },
-
- _mouseDrag: function(event) {
-
- //Increase performance, avoid regex
- var data,
- el = this.helper, props = {},
- smp = this.originalMousePosition,
- a = this.axis,
- prevTop = this.position.top,
- prevLeft = this.position.left,
- prevWidth = this.size.width,
- prevHeight = this.size.height,
- dx = (event.pageX-smp.left)||0,
- dy = (event.pageY-smp.top)||0,
- trigger = this._change[a];
-
- if (!trigger) {
- return false;
- }
-
- // Calculate the attrs that will be change
- data = trigger.apply(this, [event, dx, dy]);
-
- // Put this in the mouseDrag handler since the user can start pressing shift while resizing
- this._updateVirtualBoundaries(event.shiftKey);
- if (this._aspectRatio || event.shiftKey) {
- data = this._updateRatio(data, event);
- }
-
- data = this._respectSize(data, event);
-
- this._updateCache(data);
-
- // plugins callbacks need to be called first
- this._propagate("resize", event);
-
- if (this.position.top !== prevTop) {
- props.top = this.position.top + "px";
- }
- if (this.position.left !== prevLeft) {
- props.left = this.position.left + "px";
- }
- if (this.size.width !== prevWidth) {
- props.width = this.size.width + "px";
- }
- if (this.size.height !== prevHeight) {
- props.height = this.size.height + "px";
- }
- el.css(props);
-
- if (!this._helper && this._proportionallyResizeElements.length) {
- this._proportionallyResize();
- }
-
- // Call the user callback if the element was resized
- if ( ! $.isEmptyObject(props) ) {
- this._trigger("resize", event, this.ui());
- }
-
- return false;
- },
-
- _mouseStop: function(event) {
-
- this.resizing = false;
- var pr, ista, soffseth, soffsetw, s, left, top,
- o = this.options, that = this;
-
- if(this._helper) {
-
- pr = this._proportionallyResizeElements;
- ista = pr.length && (/textarea/i).test(pr[0].nodeName);
- soffseth = ista && $.ui.hasScroll(pr[0], "left") /* TODO - jump height */ ? 0 : that.sizeDiff.height;
- soffsetw = ista ? 0 : that.sizeDiff.width;
-
- s = { width: (that.helper.width() - soffsetw), height: (that.helper.height() - soffseth) };
- left = (parseInt(that.element.css("left"), 10) + (that.position.left - that.originalPosition.left)) || null;
- top = (parseInt(that.element.css("top"), 10) + (that.position.top - that.originalPosition.top)) || null;
-
- if (!o.animate) {
- this.element.css($.extend(s, { top: top, left: left }));
- }
-
- that.helper.height(that.size.height);
- that.helper.width(that.size.width);
-
- if (this._helper && !o.animate) {
- this._proportionallyResize();
- }
- }
-
- $("body").css("cursor", "auto");
-
- this.element.removeClass("ui-resizable-resizing");
-
- this._propagate("stop", event);
-
- if (this._helper) {
- this.helper.remove();
- }
-
- return false;
-
- },
-
- _updateVirtualBoundaries: function(forceAspectRatio) {
- var pMinWidth, pMaxWidth, pMinHeight, pMaxHeight, b,
- o = this.options;
-
- b = {
- minWidth: isNumber(o.minWidth) ? o.minWidth : 0,
- maxWidth: isNumber(o.maxWidth) ? o.maxWidth : Infinity,
- minHeight: isNumber(o.minHeight) ? o.minHeight : 0,
- maxHeight: isNumber(o.maxHeight) ? o.maxHeight : Infinity
- };
-
- if(this._aspectRatio || forceAspectRatio) {
- // We want to create an enclosing box whose aspect ration is the requested one
- // First, compute the "projected" size for each dimension based on the aspect ratio and other dimension
- pMinWidth = b.minHeight * this.aspectRatio;
- pMinHeight = b.minWidth / this.aspectRatio;
- pMaxWidth = b.maxHeight * this.aspectRatio;
- pMaxHeight = b.maxWidth / this.aspectRatio;
-
- if(pMinWidth > b.minWidth) {
- b.minWidth = pMinWidth;
- }
- if(pMinHeight > b.minHeight) {
- b.minHeight = pMinHeight;
- }
- if(pMaxWidth < b.maxWidth) {
- b.maxWidth = pMaxWidth;
- }
- if(pMaxHeight < b.maxHeight) {
- b.maxHeight = pMaxHeight;
- }
- }
- this._vBoundaries = b;
- },
-
- _updateCache: function(data) {
- this.offset = this.helper.offset();
- if (isNumber(data.left)) {
- this.position.left = data.left;
- }
- if (isNumber(data.top)) {
- this.position.top = data.top;
- }
- if (isNumber(data.height)) {
- this.size.height = data.height;
- }
- if (isNumber(data.width)) {
- this.size.width = data.width;
- }
- },
-
- _updateRatio: function( data ) {
-
- var cpos = this.position,
- csize = this.size,
- a = this.axis;
-
- if (isNumber(data.height)) {
- data.width = (data.height * this.aspectRatio);
- } else if (isNumber(data.width)) {
- data.height = (data.width / this.aspectRatio);
- }
-
- if (a === "sw") {
- data.left = cpos.left + (csize.width - data.width);
- data.top = null;
- }
- if (a === "nw") {
- data.top = cpos.top + (csize.height - data.height);
- data.left = cpos.left + (csize.width - data.width);
- }
-
- return data;
- },
-
- _respectSize: function( data ) {
-
- var o = this._vBoundaries,
- a = this.axis,
- ismaxw = isNumber(data.width) && o.maxWidth && (o.maxWidth < data.width), ismaxh = isNumber(data.height) && o.maxHeight && (o.maxHeight < data.height),
- isminw = isNumber(data.width) && o.minWidth && (o.minWidth > data.width), isminh = isNumber(data.height) && o.minHeight && (o.minHeight > data.height),
- dw = this.originalPosition.left + this.originalSize.width,
- dh = this.position.top + this.size.height,
- cw = /sw|nw|w/.test(a), ch = /nw|ne|n/.test(a);
- if (isminw) {
- data.width = o.minWidth;
- }
- if (isminh) {
- data.height = o.minHeight;
- }
- if (ismaxw) {
- data.width = o.maxWidth;
- }
- if (ismaxh) {
- data.height = o.maxHeight;
- }
-
- if (isminw && cw) {
- data.left = dw - o.minWidth;
- }
- if (ismaxw && cw) {
- data.left = dw - o.maxWidth;
- }
- if (isminh && ch) {
- data.top = dh - o.minHeight;
- }
- if (ismaxh && ch) {
- data.top = dh - o.maxHeight;
- }
-
- // fixing jump error on top/left - bug #2330
- if (!data.width && !data.height && !data.left && data.top) {
- data.top = null;
- } else if (!data.width && !data.height && !data.top && data.left) {
- data.left = null;
- }
-
- return data;
- },
-
- _proportionallyResize: function() {
-
- if (!this._proportionallyResizeElements.length) {
- return;
- }
-
- var i, j, borders, paddings, prel,
- element = this.helper || this.element;
-
- for ( i=0; i < this._proportionallyResizeElements.length; i++) {
-
- prel = this._proportionallyResizeElements[i];
-
- if (!this.borderDif) {
- this.borderDif = [];
- borders = [prel.css("borderTopWidth"), prel.css("borderRightWidth"), prel.css("borderBottomWidth"), prel.css("borderLeftWidth")];
- paddings = [prel.css("paddingTop"), prel.css("paddingRight"), prel.css("paddingBottom"), prel.css("paddingLeft")];
-
- for ( j = 0; j < borders.length; j++ ) {
- this.borderDif[ j ] = ( parseInt( borders[ j ], 10 ) || 0 ) + ( parseInt( paddings[ j ], 10 ) || 0 );
- }
- }
-
- prel.css({
- height: (element.height() - this.borderDif[0] - this.borderDif[2]) || 0,
- width: (element.width() - this.borderDif[1] - this.borderDif[3]) || 0
- });
-
- }
-
- },
-
- _renderProxy: function() {
-
- var el = this.element, o = this.options;
- this.elementOffset = el.offset();
-
- if(this._helper) {
-
- this.helper = this.helper || $("<div style='overflow:hidden;'></div>");
-
- this.helper.addClass(this._helper).css({
- width: this.element.outerWidth() - 1,
- height: this.element.outerHeight() - 1,
- position: "absolute",
- left: this.elementOffset.left +"px",
- top: this.elementOffset.top +"px",
- zIndex: ++o.zIndex //TODO: Don't modify option
- });
-
- this.helper
- .appendTo("body")
- .disableSelection();
-
- } else {
- this.helper = this.element;
- }
-
- },
-
- _change: {
- e: function(event, dx) {
- return { width: this.originalSize.width + dx };
- },
- w: function(event, dx) {
- var cs = this.originalSize, sp = this.originalPosition;
- return { left: sp.left + dx, width: cs.width - dx };
- },
- n: function(event, dx, dy) {
- var cs = this.originalSize, sp = this.originalPosition;
- return { top: sp.top + dy, height: cs.height - dy };
- },
- s: function(event, dx, dy) {
- return { height: this.originalSize.height + dy };
- },
- se: function(event, dx, dy) {
- return $.extend(this._change.s.apply(this, arguments), this._change.e.apply(this, [event, dx, dy]));
- },
- sw: function(event, dx, dy) {
- return $.extend(this._change.s.apply(this, arguments), this._change.w.apply(this, [event, dx, dy]));
- },
- ne: function(event, dx, dy) {
- return $.extend(this._change.n.apply(this, arguments), this._change.e.apply(this, [event, dx, dy]));
- },
- nw: function(event, dx, dy) {
- return $.extend(this._change.n.apply(this, arguments), this._change.w.apply(this, [event, dx, dy]));
- }
- },
-
- _propagate: function(n, event) {
- $.ui.plugin.call(this, n, [event, this.ui()]);
- (n !== "resize" && this._trigger(n, event, this.ui()));
- },
-
- plugins: {},
-
- ui: function() {
- return {
- originalElement: this.originalElement,
- element: this.element,
- helper: this.helper,
- position: this.position,
- size: this.size,
- originalSize: this.originalSize,
- originalPosition: this.originalPosition
- };
- }
-
-});
-
-/*
- * Resizable Extensions
- */
-
-$.ui.plugin.add("resizable", "animate", {
-
- stop: function( event ) {
- var that = $(this).data("ui-resizable"),
- o = that.options,
- pr = that._proportionallyResizeElements,
- ista = pr.length && (/textarea/i).test(pr[0].nodeName),
- soffseth = ista && $.ui.hasScroll(pr[0], "left") /* TODO - jump height */ ? 0 : that.sizeDiff.height,
- soffsetw = ista ? 0 : that.sizeDiff.width,
- style = { width: (that.size.width - soffsetw), height: (that.size.height - soffseth) },
- left = (parseInt(that.element.css("left"), 10) + (that.position.left - that.originalPosition.left)) || null,
- top = (parseInt(that.element.css("top"), 10) + (that.position.top - that.originalPosition.top)) || null;
-
- that.element.animate(
- $.extend(style, top && left ? { top: top, left: left } : {}), {
- duration: o.animateDuration,
- easing: o.animateEasing,
- step: function() {
-
- var data = {
- width: parseInt(that.element.css("width"), 10),
- height: parseInt(that.element.css("height"), 10),
- top: parseInt(that.element.css("top"), 10),
- left: parseInt(that.element.css("left"), 10)
- };
-
- if (pr && pr.length) {
- $(pr[0]).css({ width: data.width, height: data.height });
- }
-
- // propagating resize, and updating values for each animation step
- that._updateCache(data);
- that._propagate("resize", event);
-
- }
- }
- );
- }
-
-});
-
-$.ui.plugin.add("resizable", "containment", {
-
- start: function() {
- var element, p, co, ch, cw, width, height,
- that = $(this).data("ui-resizable"),
- o = that.options,
- el = that.element,
- oc = o.containment,
- ce = (oc instanceof $) ? oc.get(0) : (/parent/.test(oc)) ? el.parent().get(0) : oc;
-
- if (!ce) {
- return;
- }
-
- that.containerElement = $(ce);
-
- if (/document/.test(oc) || oc === document) {
- that.containerOffset = { left: 0, top: 0 };
- that.containerPosition = { left: 0, top: 0 };
-
- that.parentData = {
- element: $(document), left: 0, top: 0,
- width: $(document).width(), height: $(document).height() || document.body.parentNode.scrollHeight
- };
- }
-
- // i'm a node, so compute top, left, right, bottom
- else {
- element = $(ce);
- p = [];
- $([ "Top", "Right", "Left", "Bottom" ]).each(function(i, name) { p[i] = num(element.css("padding" + name)); });
-
- that.containerOffset = element.offset();
- that.containerPosition = element.position();
- that.containerSize = { height: (element.innerHeight() - p[3]), width: (element.innerWidth() - p[1]) };
-
- co = that.containerOffset;
- ch = that.containerSize.height;
- cw = that.containerSize.width;
- width = ($.ui.hasScroll(ce, "left") ? ce.scrollWidth : cw );
- height = ($.ui.hasScroll(ce) ? ce.scrollHeight : ch);
-
- that.parentData = {
- element: ce, left: co.left, top: co.top, width: width, height: height
- };
- }
- },
-
- resize: function( event ) {
- var woset, hoset, isParent, isOffsetRelative,
- that = $(this).data("ui-resizable"),
- o = that.options,
- co = that.containerOffset, cp = that.position,
- pRatio = that._aspectRatio || event.shiftKey,
- cop = { top:0, left:0 }, ce = that.containerElement;
-
- if (ce[0] !== document && (/static/).test(ce.css("position"))) {
- cop = co;
- }
-
- if (cp.left < (that._helper ? co.left : 0)) {
- that.size.width = that.size.width + (that._helper ? (that.position.left - co.left) : (that.position.left - cop.left));
- if (pRatio) {
- that.size.height = that.size.width / that.aspectRatio;
- }
- that.position.left = o.helper ? co.left : 0;
- }
-
- if (cp.top < (that._helper ? co.top : 0)) {
- that.size.height = that.size.height + (that._helper ? (that.position.top - co.top) : that.position.top);
- if (pRatio) {
- that.size.width = that.size.height * that.aspectRatio;
- }
- that.position.top = that._helper ? co.top : 0;
- }
-
- that.offset.left = that.parentData.left+that.position.left;
- that.offset.top = that.parentData.top+that.position.top;
-
- woset = Math.abs( (that._helper ? that.offset.left - cop.left : (that.offset.left - cop.left)) + that.sizeDiff.width );
- hoset = Math.abs( (that._helper ? that.offset.top - cop.top : (that.offset.top - co.top)) + that.sizeDiff.height );
-
- isParent = that.containerElement.get(0) === that.element.parent().get(0);
- isOffsetRelative = /relative|absolute/.test(that.containerElement.css("position"));
-
- if(isParent && isOffsetRelative) {
- woset -= that.parentData.left;
- }
-
- if (woset + that.size.width >= that.parentData.width) {
- that.size.width = that.parentData.width - woset;
- if (pRatio) {
- that.size.height = that.size.width / that.aspectRatio;
- }
- }
-
- if (hoset + that.size.height >= that.parentData.height) {
- that.size.height = that.parentData.height - hoset;
- if (pRatio) {
- that.size.width = that.size.height * that.aspectRatio;
- }
- }
- },
-
- stop: function(){
- var that = $(this).data("ui-resizable"),
- o = that.options,
- co = that.containerOffset,
- cop = that.containerPosition,
- ce = that.containerElement,
- helper = $(that.helper),
- ho = helper.offset(),
- w = helper.outerWidth() - that.sizeDiff.width,
- h = helper.outerHeight() - that.sizeDiff.height;
-
- if (that._helper && !o.animate && (/relative/).test(ce.css("position"))) {
- $(this).css({ left: ho.left - cop.left - co.left, width: w, height: h });
- }
-
- if (that._helper && !o.animate && (/static/).test(ce.css("position"))) {
- $(this).css({ left: ho.left - cop.left - co.left, width: w, height: h });
- }
-
- }
-});
-
-$.ui.plugin.add("resizable", "alsoResize", {
-
- start: function () {
- var that = $(this).data("ui-resizable"),
- o = that.options,
- _store = function (exp) {
- $(exp).each(function() {
- var el = $(this);
- el.data("ui-resizable-alsoresize", {
- width: parseInt(el.width(), 10), height: parseInt(el.height(), 10),
- left: parseInt(el.css("left"), 10), top: parseInt(el.css("top"), 10)
- });
- });
- };
-
- if (typeof(o.alsoResize) === "object" && !o.alsoResize.parentNode) {
- if (o.alsoResize.length) { o.alsoResize = o.alsoResize[0]; _store(o.alsoResize); }
- else { $.each(o.alsoResize, function (exp) { _store(exp); }); }
- }else{
- _store(o.alsoResize);
- }
- },
-
- resize: function (event, ui) {
- var that = $(this).data("ui-resizable"),
- o = that.options,
- os = that.originalSize,
- op = that.originalPosition,
- delta = {
- height: (that.size.height - os.height) || 0, width: (that.size.width - os.width) || 0,
- top: (that.position.top - op.top) || 0, left: (that.position.left - op.left) || 0
- },
-
- _alsoResize = function (exp, c) {
- $(exp).each(function() {
- var el = $(this), start = $(this).data("ui-resizable-alsoresize"), style = {},
- css = c && c.length ? c : el.parents(ui.originalElement[0]).length ? ["width", "height"] : ["width", "height", "top", "left"];
-
- $.each(css, function (i, prop) {
- var sum = (start[prop]||0) + (delta[prop]||0);
- if (sum && sum >= 0) {
- style[prop] = sum || null;
- }
- });
-
- el.css(style);
- });
- };
-
- if (typeof(o.alsoResize) === "object" && !o.alsoResize.nodeType) {
- $.each(o.alsoResize, function (exp, c) { _alsoResize(exp, c); });
- }else{
- _alsoResize(o.alsoResize);
- }
- },
-
- stop: function () {
- $(this).removeData("resizable-alsoresize");
- }
-});
-
-$.ui.plugin.add("resizable", "ghost", {
-
- start: function() {
-
- var that = $(this).data("ui-resizable"), o = that.options, cs = that.size;
-
- that.ghost = that.originalElement.clone();
- that.ghost
- .css({ opacity: 0.25, display: "block", position: "relative", height: cs.height, width: cs.width, margin: 0, left: 0, top: 0 })
- .addClass("ui-resizable-ghost")
- .addClass(typeof o.ghost === "string" ? o.ghost : "");
-
- that.ghost.appendTo(that.helper);
-
- },
-
- resize: function(){
- var that = $(this).data("ui-resizable");
- if (that.ghost) {
- that.ghost.css({ position: "relative", height: that.size.height, width: that.size.width });
- }
- },
-
- stop: function() {
- var that = $(this).data("ui-resizable");
- if (that.ghost && that.helper) {
- that.helper.get(0).removeChild(that.ghost.get(0));
- }
- }
-
-});
-
-$.ui.plugin.add("resizable", "grid", {
-
- resize: function() {
- var that = $(this).data("ui-resizable"),
- o = that.options,
- cs = that.size,
- os = that.originalSize,
- op = that.originalPosition,
- a = that.axis,
- grid = typeof o.grid === "number" ? [o.grid, o.grid] : o.grid,
- gridX = (grid[0]||1),
- gridY = (grid[1]||1),
- ox = Math.round((cs.width - os.width) / gridX) * gridX,
- oy = Math.round((cs.height - os.height) / gridY) * gridY,
- newWidth = os.width + ox,
- newHeight = os.height + oy,
- isMaxWidth = o.maxWidth && (o.maxWidth < newWidth),
- isMaxHeight = o.maxHeight && (o.maxHeight < newHeight),
- isMinWidth = o.minWidth && (o.minWidth > newWidth),
- isMinHeight = o.minHeight && (o.minHeight > newHeight);
-
- o.grid = grid;
-
- if (isMinWidth) {
- newWidth = newWidth + gridX;
- }
- if (isMinHeight) {
- newHeight = newHeight + gridY;
- }
- if (isMaxWidth) {
- newWidth = newWidth - gridX;
- }
- if (isMaxHeight) {
- newHeight = newHeight - gridY;
- }
-
- if (/^(se|s|e)$/.test(a)) {
- that.size.width = newWidth;
- that.size.height = newHeight;
- } else if (/^(ne)$/.test(a)) {
- that.size.width = newWidth;
- that.size.height = newHeight;
- that.position.top = op.top - oy;
- } else if (/^(sw)$/.test(a)) {
- that.size.width = newWidth;
- that.size.height = newHeight;
- that.position.left = op.left - ox;
- } else {
- that.size.width = newWidth;
- that.size.height = newHeight;
- that.position.top = op.top - oy;
- that.position.left = op.left - ox;
- }
- }
-
-});
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.widget("ui.selectable", $.ui.mouse, {
- version: "1.10.3",
- options: {
- appendTo: "body",
- autoRefresh: true,
- distance: 0,
- filter: "*",
- tolerance: "touch",
-
- // callbacks
- selected: null,
- selecting: null,
- start: null,
- stop: null,
- unselected: null,
- unselecting: null
- },
- _create: function() {
- var selectees,
- that = this;
-
- this.element.addClass("ui-selectable");
-
- this.dragged = false;
-
- // cache selectee children based on filter
- this.refresh = function() {
- selectees = $(that.options.filter, that.element[0]);
- selectees.addClass("ui-selectee");
- selectees.each(function() {
- var $this = $(this),
- pos = $this.offset();
- $.data(this, "selectable-item", {
- element: this,
- $element: $this,
- left: pos.left,
- top: pos.top,
- right: pos.left + $this.outerWidth(),
- bottom: pos.top + $this.outerHeight(),
- startselected: false,
- selected: $this.hasClass("ui-selected"),
- selecting: $this.hasClass("ui-selecting"),
- unselecting: $this.hasClass("ui-unselecting")
- });
- });
- };
- this.refresh();
-
- this.selectees = selectees.addClass("ui-selectee");
-
- this._mouseInit();
-
- this.helper = $("<div class='ui-selectable-helper'></div>");
- },
-
- _destroy: function() {
- this.selectees
- .removeClass("ui-selectee")
- .removeData("selectable-item");
- this.element
- .removeClass("ui-selectable ui-selectable-disabled");
- this._mouseDestroy();
- },
-
- _mouseStart: function(event) {
- var that = this,
- options = this.options;
-
- this.opos = [event.pageX, event.pageY];
-
- if (this.options.disabled) {
- return;
- }
-
- this.selectees = $(options.filter, this.element[0]);
-
- this._trigger("start", event);
-
- $(options.appendTo).append(this.helper);
- // position helper (lasso)
- this.helper.css({
- "left": event.pageX,
- "top": event.pageY,
- "width": 0,
- "height": 0
- });
-
- if (options.autoRefresh) {
- this.refresh();
- }
-
- this.selectees.filter(".ui-selected").each(function() {
- var selectee = $.data(this, "selectable-item");
- selectee.startselected = true;
- if (!event.metaKey && !event.ctrlKey) {
- selectee.$element.removeClass("ui-selected");
- selectee.selected = false;
- selectee.$element.addClass("ui-unselecting");
- selectee.unselecting = true;
- // selectable UNSELECTING callback
- that._trigger("unselecting", event, {
- unselecting: selectee.element
- });
- }
- });
-
- $(event.target).parents().addBack().each(function() {
- var doSelect,
- selectee = $.data(this, "selectable-item");
- if (selectee) {
- doSelect = (!event.metaKey && !event.ctrlKey) || !selectee.$element.hasClass("ui-selected");
- selectee.$element
- .removeClass(doSelect ? "ui-unselecting" : "ui-selected")
- .addClass(doSelect ? "ui-selecting" : "ui-unselecting");
- selectee.unselecting = !doSelect;
- selectee.selecting = doSelect;
- selectee.selected = doSelect;
- // selectable (UN)SELECTING callback
- if (doSelect) {
- that._trigger("selecting", event, {
- selecting: selectee.element
- });
- } else {
- that._trigger("unselecting", event, {
- unselecting: selectee.element
- });
- }
- return false;
- }
- });
-
- },
-
- _mouseDrag: function(event) {
-
- this.dragged = true;
-
- if (this.options.disabled) {
- return;
- }
-
- var tmp,
- that = this,
- options = this.options,
- x1 = this.opos[0],
- y1 = this.opos[1],
- x2 = event.pageX,
- y2 = event.pageY;
-
- if (x1 > x2) { tmp = x2; x2 = x1; x1 = tmp; }
- if (y1 > y2) { tmp = y2; y2 = y1; y1 = tmp; }
- this.helper.css({left: x1, top: y1, width: x2-x1, height: y2-y1});
-
- this.selectees.each(function() {
- var selectee = $.data(this, "selectable-item"),
- hit = false;
-
- //prevent helper from being selected if appendTo: selectable
- if (!selectee || selectee.element === that.element[0]) {
- return;
- }
-
- if (options.tolerance === "touch") {
- hit = ( !(selectee.left > x2 || selectee.right < x1 || selectee.top > y2 || selectee.bottom < y1) );
- } else if (options.tolerance === "fit") {
- hit = (selectee.left > x1 && selectee.right < x2 && selectee.top > y1 && selectee.bottom < y2);
- }
-
- if (hit) {
- // SELECT
- if (selectee.selected) {
- selectee.$element.removeClass("ui-selected");
- selectee.selected = false;
- }
- if (selectee.unselecting) {
- selectee.$element.removeClass("ui-unselecting");
- selectee.unselecting = false;
- }
- if (!selectee.selecting) {
- selectee.$element.addClass("ui-selecting");
- selectee.selecting = true;
- // selectable SELECTING callback
- that._trigger("selecting", event, {
- selecting: selectee.element
- });
- }
- } else {
- // UNSELECT
- if (selectee.selecting) {
- if ((event.metaKey || event.ctrlKey) && selectee.startselected) {
- selectee.$element.removeClass("ui-selecting");
- selectee.selecting = false;
- selectee.$element.addClass("ui-selected");
- selectee.selected = true;
- } else {
- selectee.$element.removeClass("ui-selecting");
- selectee.selecting = false;
- if (selectee.startselected) {
- selectee.$element.addClass("ui-unselecting");
- selectee.unselecting = true;
- }
- // selectable UNSELECTING callback
- that._trigger("unselecting", event, {
- unselecting: selectee.element
- });
- }
- }
- if (selectee.selected) {
- if (!event.metaKey && !event.ctrlKey && !selectee.startselected) {
- selectee.$element.removeClass("ui-selected");
- selectee.selected = false;
-
- selectee.$element.addClass("ui-unselecting");
- selectee.unselecting = true;
- // selectable UNSELECTING callback
- that._trigger("unselecting", event, {
- unselecting: selectee.element
- });
- }
- }
- }
- });
-
- return false;
- },
-
- _mouseStop: function(event) {
- var that = this;
-
- this.dragged = false;
-
- $(".ui-unselecting", this.element[0]).each(function() {
- var selectee = $.data(this, "selectable-item");
- selectee.$element.removeClass("ui-unselecting");
- selectee.unselecting = false;
- selectee.startselected = false;
- that._trigger("unselected", event, {
- unselected: selectee.element
- });
- });
- $(".ui-selecting", this.element[0]).each(function() {
- var selectee = $.data(this, "selectable-item");
- selectee.$element.removeClass("ui-selecting").addClass("ui-selected");
- selectee.selecting = false;
- selectee.selected = true;
- selectee.startselected = true;
- that._trigger("selected", event, {
- selected: selectee.element
- });
- });
- this._trigger("stop", event);
-
- this.helper.remove();
-
- return false;
- }
-
-});
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-/*jshint loopfunc: true */
-
-function isOverAxis( x, reference, size ) {
- return ( x > reference ) && ( x < ( reference + size ) );
-}
-
-function isFloating(item) {
- return (/left|right/).test(item.css("float")) || (/inline|table-cell/).test(item.css("display"));
-}
-
-$.widget("ui.sortable", $.ui.mouse, {
- version: "1.10.3",
- widgetEventPrefix: "sort",
- ready: false,
- options: {
- appendTo: "parent",
- axis: false,
- connectWith: false,
- containment: false,
- cursor: "auto",
- cursorAt: false,
- dropOnEmpty: true,
- forcePlaceholderSize: false,
- forceHelperSize: false,
- grid: false,
- handle: false,
- helper: "original",
- items: "> *",
- opacity: false,
- placeholder: false,
- revert: false,
- scroll: true,
- scrollSensitivity: 20,
- scrollSpeed: 20,
- scope: "default",
- tolerance: "intersect",
- zIndex: 1000,
-
- // callbacks
- activate: null,
- beforeStop: null,
- change: null,
- deactivate: null,
- out: null,
- over: null,
- receive: null,
- remove: null,
- sort: null,
- start: null,
- stop: null,
- update: null
- },
- _create: function() {
-
- var o = this.options;
- this.containerCache = {};
- this.element.addClass("ui-sortable");
-
- //Get the items
- this.refresh();
-
- //Let's determine if the items are being displayed horizontally
- this.floating = this.items.length ? o.axis === "x" || isFloating(this.items[0].item) : false;
-
- //Let's determine the parent's offset
- this.offset = this.element.offset();
-
- //Initialize mouse events for interaction
- this._mouseInit();
-
- //We're ready to go
- this.ready = true;
-
- },
-
- _destroy: function() {
- this.element
- .removeClass("ui-sortable ui-sortable-disabled");
- this._mouseDestroy();
-
- for ( var i = this.items.length - 1; i >= 0; i-- ) {
- this.items[i].item.removeData(this.widgetName + "-item");
- }
-
- return this;
- },
-
- _setOption: function(key, value){
- if ( key === "disabled" ) {
- this.options[ key ] = value;
-
- this.widget().toggleClass( "ui-sortable-disabled", !!value );
- } else {
- // Don't call widget base _setOption for disable as it adds ui-state-disabled class
- $.Widget.prototype._setOption.apply(this, arguments);
- }
- },
-
- _mouseCapture: function(event, overrideHandle) {
- var currentItem = null,
- validHandle = false,
- that = this;
-
- if (this.reverting) {
- return false;
- }
-
- if(this.options.disabled || this.options.type === "static") {
- return false;
- }
-
- //We have to refresh the items data once first
- this._refreshItems(event);
-
- //Find out if the clicked node (or one of its parents) is a actual item in this.items
- $(event.target).parents().each(function() {
- if($.data(this, that.widgetName + "-item") === that) {
- currentItem = $(this);
- return false;
- }
- });
- if($.data(event.target, that.widgetName + "-item") === that) {
- currentItem = $(event.target);
- }
-
- if(!currentItem) {
- return false;
- }
- if(this.options.handle && !overrideHandle) {
- $(this.options.handle, currentItem).find("*").addBack().each(function() {
- if(this === event.target) {
- validHandle = true;
- }
- });
- if(!validHandle) {
- return false;
- }
- }
-
- this.currentItem = currentItem;
- this._removeCurrentsFromItems();
- return true;
-
- },
-
- _mouseStart: function(event, overrideHandle, noActivation) {
-
- var i, body,
- o = this.options;
-
- this.currentContainer = this;
-
- //We only need to call refreshPositions, because the refreshItems call has been moved to mouseCapture
- this.refreshPositions();
-
- //Create and append the visible helper
- this.helper = this._createHelper(event);
-
- //Cache the helper size
- this._cacheHelperProportions();
-
- /*
- * - Position generation -
- * This block generates everything position related - it's the core of draggables.
- */
-
- //Cache the margins of the original element
- this._cacheMargins();
-
- //Get the next scrolling parent
- this.scrollParent = this.helper.scrollParent();
-
- //The element's absolute position on the page minus margins
- this.offset = this.currentItem.offset();
- this.offset = {
- top: this.offset.top - this.margins.top,
- left: this.offset.left - this.margins.left
- };
-
- $.extend(this.offset, {
- click: { //Where the click happened, relative to the element
- left: event.pageX - this.offset.left,
- top: event.pageY - this.offset.top
- },
- parent: this._getParentOffset(),
- relative: this._getRelativeOffset() //This is a relative to absolute position minus the actual position calculation - only used for relative positioned helper
- });
-
- // Only after we got the offset, we can change the helper's position to absolute
- // TODO: Still need to figure out a way to make relative sorting possible
- this.helper.css("position", "absolute");
- this.cssPosition = this.helper.css("position");
-
- //Generate the original position
- this.originalPosition = this._generatePosition(event);
- this.originalPageX = event.pageX;
- this.originalPageY = event.pageY;
-
- //Adjust the mouse offset relative to the helper if "cursorAt" is supplied
- (o.cursorAt && this._adjustOffsetFromHelper(o.cursorAt));
-
- //Cache the former DOM position
- this.domPosition = { prev: this.currentItem.prev()[0], parent: this.currentItem.parent()[0] };
-
- //If the helper is not the original, hide the original so it's not playing any role during the drag, won't cause anything bad this way
- if(this.helper[0] !== this.currentItem[0]) {
- this.currentItem.hide();
- }
-
- //Create the placeholder
- this._createPlaceholder();
-
- //Set a containment if given in the options
- if(o.containment) {
- this._setContainment();
- }
-
- if( o.cursor && o.cursor !== "auto" ) { // cursor option
- body = this.document.find( "body" );
-
- // support: IE
- this.storedCursor = body.css( "cursor" );
- body.css( "cursor", o.cursor );
-
- this.storedStylesheet = $( "<style>*{ cursor: "+o.cursor+" !important; }</style>" ).appendTo( body );
- }
-
- if(o.opacity) { // opacity option
- if (this.helper.css("opacity")) {
- this._storedOpacity = this.helper.css("opacity");
- }
- this.helper.css("opacity", o.opacity);
- }
-
- if(o.zIndex) { // zIndex option
- if (this.helper.css("zIndex")) {
- this._storedZIndex = this.helper.css("zIndex");
- }
- this.helper.css("zIndex", o.zIndex);
- }
-
- //Prepare scrolling
- if(this.scrollParent[0] !== document && this.scrollParent[0].tagName !== "HTML") {
- this.overflowOffset = this.scrollParent.offset();
- }
-
- //Call callbacks
- this._trigger("start", event, this._uiHash());
-
- //Recache the helper size
- if(!this._preserveHelperProportions) {
- this._cacheHelperProportions();
- }
-
-
- //Post "activate" events to possible containers
- if( !noActivation ) {
- for ( i = this.containers.length - 1; i >= 0; i-- ) {
- this.containers[ i ]._trigger( "activate", event, this._uiHash( this ) );
- }
- }
-
- //Prepare possible droppables
- if($.ui.ddmanager) {
- $.ui.ddmanager.current = this;
- }
-
- if ($.ui.ddmanager && !o.dropBehaviour) {
- $.ui.ddmanager.prepareOffsets(this, event);
- }
-
- this.dragging = true;
-
- this.helper.addClass("ui-sortable-helper");
- this._mouseDrag(event); //Execute the drag once - this causes the helper not to be visible before getting its correct position
- return true;
-
- },
-
- _mouseDrag: function(event) {
- var i, item, itemElement, intersection,
- o = this.options,
- scrolled = false;
-
- //Compute the helpers position
- this.position = this._generatePosition(event);
- this.positionAbs = this._convertPositionTo("absolute");
-
- if (!this.lastPositionAbs) {
- this.lastPositionAbs = this.positionAbs;
- }
-
- //Do scrolling
- if(this.options.scroll) {
- if(this.scrollParent[0] !== document && this.scrollParent[0].tagName !== "HTML") {
-
- if((this.overflowOffset.top + this.scrollParent[0].offsetHeight) - event.pageY < o.scrollSensitivity) {
- this.scrollParent[0].scrollTop = scrolled = this.scrollParent[0].scrollTop + o.scrollSpeed;
- } else if(event.pageY - this.overflowOffset.top < o.scrollSensitivity) {
- this.scrollParent[0].scrollTop = scrolled = this.scrollParent[0].scrollTop - o.scrollSpeed;
- }
-
- if((this.overflowOffset.left + this.scrollParent[0].offsetWidth) - event.pageX < o.scrollSensitivity) {
- this.scrollParent[0].scrollLeft = scrolled = this.scrollParent[0].scrollLeft + o.scrollSpeed;
- } else if(event.pageX - this.overflowOffset.left < o.scrollSensitivity) {
- this.scrollParent[0].scrollLeft = scrolled = this.scrollParent[0].scrollLeft - o.scrollSpeed;
- }
-
- } else {
-
- if(event.pageY - $(document).scrollTop() < o.scrollSensitivity) {
- scrolled = $(document).scrollTop($(document).scrollTop() - o.scrollSpeed);
- } else if($(window).height() - (event.pageY - $(document).scrollTop()) < o.scrollSensitivity) {
- scrolled = $(document).scrollTop($(document).scrollTop() + o.scrollSpeed);
- }
-
- if(event.pageX - $(document).scrollLeft() < o.scrollSensitivity) {
- scrolled = $(document).scrollLeft($(document).scrollLeft() - o.scrollSpeed);
- } else if($(window).width() - (event.pageX - $(document).scrollLeft()) < o.scrollSensitivity) {
- scrolled = $(document).scrollLeft($(document).scrollLeft() + o.scrollSpeed);
- }
-
- }
-
- if(scrolled !== false && $.ui.ddmanager && !o.dropBehaviour) {
- $.ui.ddmanager.prepareOffsets(this, event);
- }
- }
-
- //Regenerate the absolute position used for position checks
- this.positionAbs = this._convertPositionTo("absolute");
-
- //Set the helper position
- if(!this.options.axis || this.options.axis !== "y") {
- this.helper[0].style.left = this.position.left+"px";
- }
- if(!this.options.axis || this.options.axis !== "x") {
- this.helper[0].style.top = this.position.top+"px";
- }
-
- //Rearrange
- for (i = this.items.length - 1; i >= 0; i--) {
-
- //Cache variables and intersection, continue if no intersection
- item = this.items[i];
- itemElement = item.item[0];
- intersection = this._intersectsWithPointer(item);
- if (!intersection) {
- continue;
- }
-
- // Only put the placeholder inside the current Container, skip all
- // items form other containers. This works because when moving
- // an item from one container to another the
- // currentContainer is switched before the placeholder is moved.
- //
- // Without this moving items in "sub-sortables" can cause the placeholder to jitter
- // beetween the outer and inner container.
- if (item.instance !== this.currentContainer) {
- continue;
- }
-
- // cannot intersect with itself
- // no useless actions that have been done before
- // no action if the item moved is the parent of the item checked
- if (itemElement !== this.currentItem[0] &&
- this.placeholder[intersection === 1 ? "next" : "prev"]()[0] !== itemElement &&
- !$.contains(this.placeholder[0], itemElement) &&
- (this.options.type === "semi-dynamic" ? !$.contains(this.element[0], itemElement) : true)
- ) {
-
- this.direction = intersection === 1 ? "down" : "up";
-
- if (this.options.tolerance === "pointer" || this._intersectsWithSides(item)) {
- this._rearrange(event, item);
- } else {
- break;
- }
-
- this._trigger("change", event, this._uiHash());
- break;
- }
- }
-
- //Post events to containers
- this._contactContainers(event);
-
- //Interconnect with droppables
- if($.ui.ddmanager) {
- $.ui.ddmanager.drag(this, event);
- }
-
- //Call callbacks
- this._trigger("sort", event, this._uiHash());
-
- this.lastPositionAbs = this.positionAbs;
- return false;
-
- },
-
- _mouseStop: function(event, noPropagation) {
-
- if(!event) {
- return;
- }
-
- //If we are using droppables, inform the manager about the drop
- if ($.ui.ddmanager && !this.options.dropBehaviour) {
- $.ui.ddmanager.drop(this, event);
- }
-
- if(this.options.revert) {
- var that = this,
- cur = this.placeholder.offset(),
- axis = this.options.axis,
- animation = {};
-
- if ( !axis || axis === "x" ) {
- animation.left = cur.left - this.offset.parent.left - this.margins.left + (this.offsetParent[0] === document.body ? 0 : this.offsetParent[0].scrollLeft);
- }
- if ( !axis || axis === "y" ) {
- animation.top = cur.top - this.offset.parent.top - this.margins.top + (this.offsetParent[0] === document.body ? 0 : this.offsetParent[0].scrollTop);
- }
- this.reverting = true;
- $(this.helper).animate( animation, parseInt(this.options.revert, 10) || 500, function() {
- that._clear(event);
- });
- } else {
- this._clear(event, noPropagation);
- }
-
- return false;
-
- },
-
- cancel: function() {
-
- if(this.dragging) {
-
- this._mouseUp({ target: null });
-
- if(this.options.helper === "original") {
- this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper");
- } else {
- this.currentItem.show();
- }
-
- //Post deactivating events to containers
- for (var i = this.containers.length - 1; i >= 0; i--){
- this.containers[i]._trigger("deactivate", null, this._uiHash(this));
- if(this.containers[i].containerCache.over) {
- this.containers[i]._trigger("out", null, this._uiHash(this));
- this.containers[i].containerCache.over = 0;
- }
- }
-
- }
-
- if (this.placeholder) {
- //$(this.placeholder[0]).remove(); would have been the jQuery way - unfortunately, it unbinds ALL events from the original node!
- if(this.placeholder[0].parentNode) {
- this.placeholder[0].parentNode.removeChild(this.placeholder[0]);
- }
- if(this.options.helper !== "original" && this.helper && this.helper[0].parentNode) {
- this.helper.remove();
- }
-
- $.extend(this, {
- helper: null,
- dragging: false,
- reverting: false,
- _noFinalSort: null
- });
-
- if(this.domPosition.prev) {
- $(this.domPosition.prev).after(this.currentItem);
- } else {
- $(this.domPosition.parent).prepend(this.currentItem);
- }
- }
-
- return this;
-
- },
-
- serialize: function(o) {
-
- var items = this._getItemsAsjQuery(o && o.connected),
- str = [];
- o = o || {};
-
- $(items).each(function() {
- var res = ($(o.item || this).attr(o.attribute || "id") || "").match(o.expression || (/(.+)[\-=_](.+)/));
- if (res) {
- str.push((o.key || res[1]+"[]")+"="+(o.key && o.expression ? res[1] : res[2]));
- }
- });
-
- if(!str.length && o.key) {
- str.push(o.key + "=");
- }
-
- return str.join("&");
-
- },
-
- toArray: function(o) {
-
- var items = this._getItemsAsjQuery(o && o.connected),
- ret = [];
-
- o = o || {};
-
- items.each(function() { ret.push($(o.item || this).attr(o.attribute || "id") || ""); });
- return ret;
-
- },
-
- /* Be careful with the following core functions */
- _intersectsWith: function(item) {
-
- var x1 = this.positionAbs.left,
- x2 = x1 + this.helperProportions.width,
- y1 = this.positionAbs.top,
- y2 = y1 + this.helperProportions.height,
- l = item.left,
- r = l + item.width,
- t = item.top,
- b = t + item.height,
- dyClick = this.offset.click.top,
- dxClick = this.offset.click.left,
- isOverElementHeight = ( this.options.axis === "x" ) || ( ( y1 + dyClick ) > t && ( y1 + dyClick ) < b ),
- isOverElementWidth = ( this.options.axis === "y" ) || ( ( x1 + dxClick ) > l && ( x1 + dxClick ) < r ),
- isOverElement = isOverElementHeight && isOverElementWidth;
-
- if ( this.options.tolerance === "pointer" ||
- this.options.forcePointerForContainers ||
- (this.options.tolerance !== "pointer" && this.helperProportions[this.floating ? "width" : "height"] > item[this.floating ? "width" : "height"])
- ) {
- return isOverElement;
- } else {
-
- return (l < x1 + (this.helperProportions.width / 2) && // Right Half
- x2 - (this.helperProportions.width / 2) < r && // Left Half
- t < y1 + (this.helperProportions.height / 2) && // Bottom Half
- y2 - (this.helperProportions.height / 2) < b ); // Top Half
-
- }
- },
-
- _intersectsWithPointer: function(item) {
-
- var isOverElementHeight = (this.options.axis === "x") || isOverAxis(this.positionAbs.top + this.offset.click.top, item.top, item.height),
- isOverElementWidth = (this.options.axis === "y") || isOverAxis(this.positionAbs.left + this.offset.click.left, item.left, item.width),
- isOverElement = isOverElementHeight && isOverElementWidth,
- verticalDirection = this._getDragVerticalDirection(),
- horizontalDirection = this._getDragHorizontalDirection();
-
- if (!isOverElement) {
- return false;
- }
-
- return this.floating ?
- ( ((horizontalDirection && horizontalDirection === "right") || verticalDirection === "down") ? 2 : 1 )
- : ( verticalDirection && (verticalDirection === "down" ? 2 : 1) );
-
- },
-
- _intersectsWithSides: function(item) {
-
- var isOverBottomHalf = isOverAxis(this.positionAbs.top + this.offset.click.top, item.top + (item.height/2), item.height),
- isOverRightHalf = isOverAxis(this.positionAbs.left + this.offset.click.left, item.left + (item.width/2), item.width),
- verticalDirection = this._getDragVerticalDirection(),
- horizontalDirection = this._getDragHorizontalDirection();
-
- if (this.floating && horizontalDirection) {
- return ((horizontalDirection === "right" && isOverRightHalf) || (horizontalDirection === "left" && !isOverRightHalf));
- } else {
- return verticalDirection && ((verticalDirection === "down" && isOverBottomHalf) || (verticalDirection === "up" && !isOverBottomHalf));
- }
-
- },
-
- _getDragVerticalDirection: function() {
- var delta = this.positionAbs.top - this.lastPositionAbs.top;
- return delta !== 0 && (delta > 0 ? "down" : "up");
- },
-
- _getDragHorizontalDirection: function() {
- var delta = this.positionAbs.left - this.lastPositionAbs.left;
- return delta !== 0 && (delta > 0 ? "right" : "left");
- },
-
- refresh: function(event) {
- this._refreshItems(event);
- this.refreshPositions();
- return this;
- },
-
- _connectWith: function() {
- var options = this.options;
- return options.connectWith.constructor === String ? [options.connectWith] : options.connectWith;
- },
-
- _getItemsAsjQuery: function(connected) {
-
- var i, j, cur, inst,
- items = [],
- queries = [],
- connectWith = this._connectWith();
-
- if(connectWith && connected) {
- for (i = connectWith.length - 1; i >= 0; i--){
- cur = $(connectWith[i]);
- for ( j = cur.length - 1; j >= 0; j--){
- inst = $.data(cur[j], this.widgetFullName);
- if(inst && inst !== this && !inst.options.disabled) {
- queries.push([$.isFunction(inst.options.items) ? inst.options.items.call(inst.element) : $(inst.options.items, inst.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"), inst]);
- }
- }
- }
- }
-
- queries.push([$.isFunction(this.options.items) ? this.options.items.call(this.element, null, { options: this.options, item: this.currentItem }) : $(this.options.items, this.element).not(".ui-sortable-helper").not(".ui-sortable-placeholder"), this]);
-
- for (i = queries.length - 1; i >= 0; i--){
- queries[i][0].each(function() {
- items.push(this);
- });
- }
-
- return $(items);
-
- },
-
- _removeCurrentsFromItems: function() {
-
- var list = this.currentItem.find(":data(" + this.widgetName + "-item)");
-
- this.items = $.grep(this.items, function (item) {
- for (var j=0; j < list.length; j++) {
- if(list[j] === item.item[0]) {
- return false;
- }
- }
- return true;
- });
-
- },
-
- _refreshItems: function(event) {
-
- this.items = [];
- this.containers = [this];
-
- var i, j, cur, inst, targetData, _queries, item, queriesLength,
- items = this.items,
- queries = [[$.isFunction(this.options.items) ? this.options.items.call(this.element[0], event, { item: this.currentItem }) : $(this.options.items, this.element), this]],
- connectWith = this._connectWith();
-
- if(connectWith && this.ready) { //Shouldn't be run the first time through due to massive slow-down
- for (i = connectWith.length - 1; i >= 0; i--){
- cur = $(connectWith[i]);
- for (j = cur.length - 1; j >= 0; j--){
- inst = $.data(cur[j], this.widgetFullName);
- if(inst && inst !== this && !inst.options.disabled) {
- queries.push([$.isFunction(inst.options.items) ? inst.options.items.call(inst.element[0], event, { item: this.currentItem }) : $(inst.options.items, inst.element), inst]);
- this.containers.push(inst);
- }
- }
- }
- }
-
- for (i = queries.length - 1; i >= 0; i--) {
- targetData = queries[i][1];
- _queries = queries[i][0];
-
- for (j=0, queriesLength = _queries.length; j < queriesLength; j++) {
- item = $(_queries[j]);
-
- item.data(this.widgetName + "-item", targetData); // Data for target checking (mouse manager)
-
- items.push({
- item: item,
- instance: targetData,
- width: 0, height: 0,
- left: 0, top: 0
- });
- }
- }
-
- },
-
- refreshPositions: function(fast) {
-
- //This has to be redone because due to the item being moved out/into the offsetParent, the offsetParent's position will change
- if(this.offsetParent && this.helper) {
- this.offset.parent = this._getParentOffset();
- }
-
- var i, item, t, p;
-
- for (i = this.items.length - 1; i >= 0; i--){
- item = this.items[i];
-
- //We ignore calculating positions of all connected containers when we're not over them
- if(item.instance !== this.currentContainer && this.currentContainer && item.item[0] !== this.currentItem[0]) {
- continue;
- }
-
- t = this.options.toleranceElement ? $(this.options.toleranceElement, item.item) : item.item;
-
- if (!fast) {
- item.width = t.outerWidth();
- item.height = t.outerHeight();
- }
-
- p = t.offset();
- item.left = p.left;
- item.top = p.top;
- }
-
- if(this.options.custom && this.options.custom.refreshContainers) {
- this.options.custom.refreshContainers.call(this);
- } else {
- for (i = this.containers.length - 1; i >= 0; i--){
- p = this.containers[i].element.offset();
- this.containers[i].containerCache.left = p.left;
- this.containers[i].containerCache.top = p.top;
- this.containers[i].containerCache.width = this.containers[i].element.outerWidth();
- this.containers[i].containerCache.height = this.containers[i].element.outerHeight();
- }
- }
-
- return this;
- },
-
- _createPlaceholder: function(that) {
- that = that || this;
- var className,
- o = that.options;
-
- if(!o.placeholder || o.placeholder.constructor === String) {
- className = o.placeholder;
- o.placeholder = {
- element: function() {
-
- var nodeName = that.currentItem[0].nodeName.toLowerCase(),
- element = $( "<" + nodeName + ">", that.document[0] )
- .addClass(className || that.currentItem[0].className+" ui-sortable-placeholder")
- .removeClass("ui-sortable-helper");
-
- if ( nodeName === "tr" ) {
- that.currentItem.children().each(function() {
- $( "<td>&#160;</td>", that.document[0] )
- .attr( "colspan", $( this ).attr( "colspan" ) || 1 )
- .appendTo( element );
- });
- } else if ( nodeName === "img" ) {
- element.attr( "src", that.currentItem.attr( "src" ) );
- }
-
- if ( !className ) {
- element.css( "visibility", "hidden" );
- }
-
- return element;
- },
- update: function(container, p) {
-
- // 1. If a className is set as 'placeholder option, we don't force sizes - the class is responsible for that
- // 2. The option 'forcePlaceholderSize can be enabled to force it even if a class name is specified
- if(className && !o.forcePlaceholderSize) {
- return;
- }
-
- //If the element doesn't have a actual height by itself (without styles coming from a stylesheet), it receives the inline height from the dragged item
- if(!p.height()) { p.height(that.currentItem.innerHeight() - parseInt(that.currentItem.css("paddingTop")||0, 10) - parseInt(that.currentItem.css("paddingBottom")||0, 10)); }
- if(!p.width()) { p.width(that.currentItem.innerWidth() - parseInt(that.currentItem.css("paddingLeft")||0, 10) - parseInt(that.currentItem.css("paddingRight")||0, 10)); }
- }
- };
- }
-
- //Create the placeholder
- that.placeholder = $(o.placeholder.element.call(that.element, that.currentItem));
-
- //Append it after the actual current item
- that.currentItem.after(that.placeholder);
-
- //Update the size of the placeholder (TODO: Logic to fuzzy, see line 316/317)
- o.placeholder.update(that, that.placeholder);
-
- },
-
- _contactContainers: function(event) {
- var i, j, dist, itemWithLeastDistance, posProperty, sizeProperty, base, cur, nearBottom, floating,
- innermostContainer = null,
- innermostIndex = null;
-
- // get innermost container that intersects with item
- for (i = this.containers.length - 1; i >= 0; i--) {
-
- // never consider a container that's located within the item itself
- if($.contains(this.currentItem[0], this.containers[i].element[0])) {
- continue;
- }
-
- if(this._intersectsWith(this.containers[i].containerCache)) {
-
- // if we've already found a container and it's more "inner" than this, then continue
- if(innermostContainer && $.contains(this.containers[i].element[0], innermostContainer.element[0])) {
- continue;
- }
-
- innermostContainer = this.containers[i];
- innermostIndex = i;
-
- } else {
- // container doesn't intersect. trigger "out" event if necessary
- if(this.containers[i].containerCache.over) {
- this.containers[i]._trigger("out", event, this._uiHash(this));
- this.containers[i].containerCache.over = 0;
- }
- }
-
- }
-
- // if no intersecting containers found, return
- if(!innermostContainer) {
- return;
- }
-
- // move the item into the container if it's not there already
- if(this.containers.length === 1) {
- if (!this.containers[innermostIndex].containerCache.over) {
- this.containers[innermostIndex]._trigger("over", event, this._uiHash(this));
- this.containers[innermostIndex].containerCache.over = 1;
- }
- } else {
-
- //When entering a new container, we will find the item with the least distance and append our item near it
- dist = 10000;
- itemWithLeastDistance = null;
- floating = innermostContainer.floating || isFloating(this.currentItem);
- posProperty = floating ? "left" : "top";
- sizeProperty = floating ? "width" : "height";
- base = this.positionAbs[posProperty] + this.offset.click[posProperty];
- for (j = this.items.length - 1; j >= 0; j--) {
- if(!$.contains(this.containers[innermostIndex].element[0], this.items[j].item[0])) {
- continue;
- }
- if(this.items[j].item[0] === this.currentItem[0]) {
- continue;
- }
- if (floating && !isOverAxis(this.positionAbs.top + this.offset.click.top, this.items[j].top, this.items[j].height)) {
- continue;
- }
- cur = this.items[j].item.offset()[posProperty];
- nearBottom = false;
- if(Math.abs(cur - base) > Math.abs(cur + this.items[j][sizeProperty] - base)){
- nearBottom = true;
- cur += this.items[j][sizeProperty];
- }
-
- if(Math.abs(cur - base) < dist) {
- dist = Math.abs(cur - base); itemWithLeastDistance = this.items[j];
- this.direction = nearBottom ? "up": "down";
- }
- }
-
- //Check if dropOnEmpty is enabled
- if(!itemWithLeastDistance && !this.options.dropOnEmpty) {
- return;
- }
-
- if(this.currentContainer === this.containers[innermostIndex]) {
- return;
- }
-
- itemWithLeastDistance ? this._rearrange(event, itemWithLeastDistance, null, true) : this._rearrange(event, null, this.containers[innermostIndex].element, true);
- this._trigger("change", event, this._uiHash());
- this.containers[innermostIndex]._trigger("change", event, this._uiHash(this));
- this.currentContainer = this.containers[innermostIndex];
-
- //Update the placeholder
- this.options.placeholder.update(this.currentContainer, this.placeholder);
-
- this.containers[innermostIndex]._trigger("over", event, this._uiHash(this));
- this.containers[innermostIndex].containerCache.over = 1;
- }
-
-
- },
-
- _createHelper: function(event) {
-
- var o = this.options,
- helper = $.isFunction(o.helper) ? $(o.helper.apply(this.element[0], [event, this.currentItem])) : (o.helper === "clone" ? this.currentItem.clone() : this.currentItem);
-
- //Add the helper to the DOM if that didn't happen already
- if(!helper.parents("body").length) {
- $(o.appendTo !== "parent" ? o.appendTo : this.currentItem[0].parentNode)[0].appendChild(helper[0]);
- }
-
- if(helper[0] === this.currentItem[0]) {
- this._storedCSS = { width: this.currentItem[0].style.width, height: this.currentItem[0].style.height, position: this.currentItem.css("position"), top: this.currentItem.css("top"), left: this.currentItem.css("left") };
- }
-
- if(!helper[0].style.width || o.forceHelperSize) {
- helper.width(this.currentItem.width());
- }
- if(!helper[0].style.height || o.forceHelperSize) {
- helper.height(this.currentItem.height());
- }
-
- return helper;
-
- },
-
- _adjustOffsetFromHelper: function(obj) {
- if (typeof obj === "string") {
- obj = obj.split(" ");
- }
- if ($.isArray(obj)) {
- obj = {left: +obj[0], top: +obj[1] || 0};
- }
- if ("left" in obj) {
- this.offset.click.left = obj.left + this.margins.left;
- }
- if ("right" in obj) {
- this.offset.click.left = this.helperProportions.width - obj.right + this.margins.left;
- }
- if ("top" in obj) {
- this.offset.click.top = obj.top + this.margins.top;
- }
- if ("bottom" in obj) {
- this.offset.click.top = this.helperProportions.height - obj.bottom + this.margins.top;
- }
- },
-
- _getParentOffset: function() {
-
-
- //Get the offsetParent and cache its position
- this.offsetParent = this.helper.offsetParent();
- var po = this.offsetParent.offset();
-
- // This is a special case where we need to modify a offset calculated on start, since the following happened:
- // 1. The position of the helper is absolute, so it's position is calculated based on the next positioned parent
- // 2. The actual offset parent is a child of the scroll parent, and the scroll parent isn't the document, which means that
- // the scroll is included in the initial calculation of the offset of the parent, and never recalculated upon drag
- if(this.cssPosition === "absolute" && this.scrollParent[0] !== document && $.contains(this.scrollParent[0], this.offsetParent[0])) {
- po.left += this.scrollParent.scrollLeft();
- po.top += this.scrollParent.scrollTop();
- }
-
- // This needs to be actually done for all browsers, since pageX/pageY includes this information
- // with an ugly IE fix
- if( this.offsetParent[0] === document.body || (this.offsetParent[0].tagName && this.offsetParent[0].tagName.toLowerCase() === "html" && $.ui.ie)) {
- po = { top: 0, left: 0 };
- }
-
- return {
- top: po.top + (parseInt(this.offsetParent.css("borderTopWidth"),10) || 0),
- left: po.left + (parseInt(this.offsetParent.css("borderLeftWidth"),10) || 0)
- };
-
- },
-
- _getRelativeOffset: function() {
-
- if(this.cssPosition === "relative") {
- var p = this.currentItem.position();
- return {
- top: p.top - (parseInt(this.helper.css("top"),10) || 0) + this.scrollParent.scrollTop(),
- left: p.left - (parseInt(this.helper.css("left"),10) || 0) + this.scrollParent.scrollLeft()
- };
- } else {
- return { top: 0, left: 0 };
- }
-
- },
-
- _cacheMargins: function() {
- this.margins = {
- left: (parseInt(this.currentItem.css("marginLeft"),10) || 0),
- top: (parseInt(this.currentItem.css("marginTop"),10) || 0)
- };
- },
-
- _cacheHelperProportions: function() {
- this.helperProportions = {
- width: this.helper.outerWidth(),
- height: this.helper.outerHeight()
- };
- },
-
- _setContainment: function() {
-
- var ce, co, over,
- o = this.options;
- if(o.containment === "parent") {
- o.containment = this.helper[0].parentNode;
- }
- if(o.containment === "document" || o.containment === "window") {
- this.containment = [
- 0 - this.offset.relative.left - this.offset.parent.left,
- 0 - this.offset.relative.top - this.offset.parent.top,
- $(o.containment === "document" ? document : window).width() - this.helperProportions.width - this.margins.left,
- ($(o.containment === "document" ? document : window).height() || document.body.parentNode.scrollHeight) - this.helperProportions.height - this.margins.top
- ];
- }
-
- if(!(/^(document|window|parent)$/).test(o.containment)) {
- ce = $(o.containment)[0];
- co = $(o.containment).offset();
- over = ($(ce).css("overflow") !== "hidden");
-
- this.containment = [
- co.left + (parseInt($(ce).css("borderLeftWidth"),10) || 0) + (parseInt($(ce).css("paddingLeft"),10) || 0) - this.margins.left,
- co.top + (parseInt($(ce).css("borderTopWidth"),10) || 0) + (parseInt($(ce).css("paddingTop"),10) || 0) - this.margins.top,
- co.left+(over ? Math.max(ce.scrollWidth,ce.offsetWidth) : ce.offsetWidth) - (parseInt($(ce).css("borderLeftWidth"),10) || 0) - (parseInt($(ce).css("paddingRight"),10) || 0) - this.helperProportions.width - this.margins.left,
- co.top+(over ? Math.max(ce.scrollHeight,ce.offsetHeight) : ce.offsetHeight) - (parseInt($(ce).css("borderTopWidth"),10) || 0) - (parseInt($(ce).css("paddingBottom"),10) || 0) - this.helperProportions.height - this.margins.top
- ];
- }
-
- },
-
- _convertPositionTo: function(d, pos) {
-
- if(!pos) {
- pos = this.position;
- }
- var mod = d === "absolute" ? 1 : -1,
- scroll = this.cssPosition === "absolute" && !(this.scrollParent[0] !== document && $.contains(this.scrollParent[0], this.offsetParent[0])) ? this.offsetParent : this.scrollParent,
- scrollIsRootNode = (/(html|body)/i).test(scroll[0].tagName);
-
- return {
- top: (
- pos.top + // The absolute mouse position
- this.offset.relative.top * mod + // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.top * mod - // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollTop() : ( scrollIsRootNode ? 0 : scroll.scrollTop() ) ) * mod)
- ),
- left: (
- pos.left + // The absolute mouse position
- this.offset.relative.left * mod + // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.left * mod - // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollLeft() : scrollIsRootNode ? 0 : scroll.scrollLeft() ) * mod)
- )
- };
-
- },
-
- _generatePosition: function(event) {
-
- var top, left,
- o = this.options,
- pageX = event.pageX,
- pageY = event.pageY,
- scroll = this.cssPosition === "absolute" && !(this.scrollParent[0] !== document && $.contains(this.scrollParent[0], this.offsetParent[0])) ? this.offsetParent : this.scrollParent, scrollIsRootNode = (/(html|body)/i).test(scroll[0].tagName);
-
- // This is another very weird special case that only happens for relative elements:
- // 1. If the css position is relative
- // 2. and the scroll parent is the document or similar to the offset parent
- // we have to refresh the relative offset during the scroll so there are no jumps
- if(this.cssPosition === "relative" && !(this.scrollParent[0] !== document && this.scrollParent[0] !== this.offsetParent[0])) {
- this.offset.relative = this._getRelativeOffset();
- }
-
- /*
- * - Position constraining -
- * Constrain the position to a mix of grid, containment.
- */
-
- if(this.originalPosition) { //If we are not dragging yet, we won't check for options
-
- if(this.containment) {
- if(event.pageX - this.offset.click.left < this.containment[0]) {
- pageX = this.containment[0] + this.offset.click.left;
- }
- if(event.pageY - this.offset.click.top < this.containment[1]) {
- pageY = this.containment[1] + this.offset.click.top;
- }
- if(event.pageX - this.offset.click.left > this.containment[2]) {
- pageX = this.containment[2] + this.offset.click.left;
- }
- if(event.pageY - this.offset.click.top > this.containment[3]) {
- pageY = this.containment[3] + this.offset.click.top;
- }
- }
-
- if(o.grid) {
- top = this.originalPageY + Math.round((pageY - this.originalPageY) / o.grid[1]) * o.grid[1];
- pageY = this.containment ? ( (top - this.offset.click.top >= this.containment[1] && top - this.offset.click.top <= this.containment[3]) ? top : ((top - this.offset.click.top >= this.containment[1]) ? top - o.grid[1] : top + o.grid[1])) : top;
-
- left = this.originalPageX + Math.round((pageX - this.originalPageX) / o.grid[0]) * o.grid[0];
- pageX = this.containment ? ( (left - this.offset.click.left >= this.containment[0] && left - this.offset.click.left <= this.containment[2]) ? left : ((left - this.offset.click.left >= this.containment[0]) ? left - o.grid[0] : left + o.grid[0])) : left;
- }
-
- }
-
- return {
- top: (
- pageY - // The absolute mouse position
- this.offset.click.top - // Click offset (relative to the element)
- this.offset.relative.top - // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.top + // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollTop() : ( scrollIsRootNode ? 0 : scroll.scrollTop() ) ))
- ),
- left: (
- pageX - // The absolute mouse position
- this.offset.click.left - // Click offset (relative to the element)
- this.offset.relative.left - // Only for relative positioned nodes: Relative offset from element to offset parent
- this.offset.parent.left + // The offsetParent's offset without borders (offset + border)
- ( ( this.cssPosition === "fixed" ? -this.scrollParent.scrollLeft() : scrollIsRootNode ? 0 : scroll.scrollLeft() ))
- )
- };
-
- },
-
- _rearrange: function(event, i, a, hardRefresh) {
-
- a ? a[0].appendChild(this.placeholder[0]) : i.item[0].parentNode.insertBefore(this.placeholder[0], (this.direction === "down" ? i.item[0] : i.item[0].nextSibling));
-
- //Various things done here to improve the performance:
- // 1. we create a setTimeout, that calls refreshPositions
- // 2. on the instance, we have a counter variable, that get's higher after every append
- // 3. on the local scope, we copy the counter variable, and check in the timeout, if it's still the same
- // 4. this lets only the last addition to the timeout stack through
- this.counter = this.counter ? ++this.counter : 1;
- var counter = this.counter;
-
- this._delay(function() {
- if(counter === this.counter) {
- this.refreshPositions(!hardRefresh); //Precompute after each DOM insertion, NOT on mousemove
- }
- });
-
- },
-
- _clear: function(event, noPropagation) {
-
- this.reverting = false;
- // We delay all events that have to be triggered to after the point where the placeholder has been removed and
- // everything else normalized again
- var i,
- delayedTriggers = [];
-
- // We first have to update the dom position of the actual currentItem
- // Note: don't do it if the current item is already removed (by a user), or it gets reappended (see #4088)
- if(!this._noFinalSort && this.currentItem.parent().length) {
- this.placeholder.before(this.currentItem);
- }
- this._noFinalSort = null;
-
- if(this.helper[0] === this.currentItem[0]) {
- for(i in this._storedCSS) {
- if(this._storedCSS[i] === "auto" || this._storedCSS[i] === "static") {
- this._storedCSS[i] = "";
- }
- }
- this.currentItem.css(this._storedCSS).removeClass("ui-sortable-helper");
- } else {
- this.currentItem.show();
- }
-
- if(this.fromOutside && !noPropagation) {
- delayedTriggers.push(function(event) { this._trigger("receive", event, this._uiHash(this.fromOutside)); });
- }
- if((this.fromOutside || this.domPosition.prev !== this.currentItem.prev().not(".ui-sortable-helper")[0] || this.domPosition.parent !== this.currentItem.parent()[0]) && !noPropagation) {
- delayedTriggers.push(function(event) { this._trigger("update", event, this._uiHash()); }); //Trigger update callback if the DOM position has changed
- }
-
- // Check if the items Container has Changed and trigger appropriate
- // events.
- if (this !== this.currentContainer) {
- if(!noPropagation) {
- delayedTriggers.push(function(event) { this._trigger("remove", event, this._uiHash()); });
- delayedTriggers.push((function(c) { return function(event) { c._trigger("receive", event, this._uiHash(this)); }; }).call(this, this.currentContainer));
- delayedTriggers.push((function(c) { return function(event) { c._trigger("update", event, this._uiHash(this)); }; }).call(this, this.currentContainer));
- }
- }
-
-
- //Post events to containers
- for (i = this.containers.length - 1; i >= 0; i--){
- if(!noPropagation) {
- delayedTriggers.push((function(c) { return function(event) { c._trigger("deactivate", event, this._uiHash(this)); }; }).call(this, this.containers[i]));
- }
- if(this.containers[i].containerCache.over) {
- delayedTriggers.push((function(c) { return function(event) { c._trigger("out", event, this._uiHash(this)); }; }).call(this, this.containers[i]));
- this.containers[i].containerCache.over = 0;
- }
- }
-
- //Do what was originally in plugins
- if ( this.storedCursor ) {
- this.document.find( "body" ).css( "cursor", this.storedCursor );
- this.storedStylesheet.remove();
- }
- if(this._storedOpacity) {
- this.helper.css("opacity", this._storedOpacity);
- }
- if(this._storedZIndex) {
- this.helper.css("zIndex", this._storedZIndex === "auto" ? "" : this._storedZIndex);
- }
-
- this.dragging = false;
- if(this.cancelHelperRemoval) {
- if(!noPropagation) {
- this._trigger("beforeStop", event, this._uiHash());
- for (i=0; i < delayedTriggers.length; i++) {
- delayedTriggers[i].call(this, event);
- } //Trigger all delayed events
- this._trigger("stop", event, this._uiHash());
- }
-
- this.fromOutside = false;
- return false;
- }
-
- if(!noPropagation) {
- this._trigger("beforeStop", event, this._uiHash());
- }
-
- //$(this.placeholder[0]).remove(); would have been the jQuery way - unfortunately, it unbinds ALL events from the original node!
- this.placeholder[0].parentNode.removeChild(this.placeholder[0]);
-
- if(this.helper[0] !== this.currentItem[0]) {
- this.helper.remove();
- }
- this.helper = null;
-
- if(!noPropagation) {
- for (i=0; i < delayedTriggers.length; i++) {
- delayedTriggers[i].call(this, event);
- } //Trigger all delayed events
- this._trigger("stop", event, this._uiHash());
- }
-
- this.fromOutside = false;
- return true;
-
- },
-
- _trigger: function() {
- if ($.Widget.prototype._trigger.apply(this, arguments) === false) {
- this.cancel();
- }
- },
-
- _uiHash: function(_inst) {
- var inst = _inst || this;
- return {
- helper: inst.helper,
- placeholder: inst.placeholder || $([]),
- position: inst.position,
- originalPosition: inst.originalPosition,
- offset: inst.positionAbs,
- item: inst.currentItem,
- sender: _inst ? _inst.element : null
- };
- }
-
-});
-
-})(jQuery);
-
-(function($, undefined) {
-
-var dataSpace = "ui-effects-";
-
-$.effects = {
- effect: {}
-};
-
-/*!
- * jQuery Color Animations v2.1.2
- * https://github.com/jquery/jquery-color
- *
- * Copyright 2013 jQuery Foundation and other contributors
- * Released under the MIT license.
- * http://jquery.org/license
- *
- * Date: Wed Jan 16 08:47:09 2013 -0600
- */
-(function( jQuery, undefined ) {
-
- var stepHooks = "backgroundColor borderBottomColor borderLeftColor borderRightColor borderTopColor color columnRuleColor outlineColor textDecorationColor textEmphasisColor",
-
- // plusequals test for += 100 -= 100
- rplusequals = /^([\-+])=\s*(\d+\.?\d*)/,
- // a set of RE's that can match strings and generate color tuples.
- stringParsers = [{
- re: /rgba?\(\s*(\d{1,3})\s*,\s*(\d{1,3})\s*,\s*(\d{1,3})\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
- parse: function( execResult ) {
- return [
- execResult[ 1 ],
- execResult[ 2 ],
- execResult[ 3 ],
- execResult[ 4 ]
- ];
- }
- }, {
- re: /rgba?\(\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
- parse: function( execResult ) {
- return [
- execResult[ 1 ] * 2.55,
- execResult[ 2 ] * 2.55,
- execResult[ 3 ] * 2.55,
- execResult[ 4 ]
- ];
- }
- }, {
- // this regex ignores A-F because it's compared against an already lowercased string
- re: /#([a-f0-9]{2})([a-f0-9]{2})([a-f0-9]{2})/,
- parse: function( execResult ) {
- return [
- parseInt( execResult[ 1 ], 16 ),
- parseInt( execResult[ 2 ], 16 ),
- parseInt( execResult[ 3 ], 16 )
- ];
- }
- }, {
- // this regex ignores A-F because it's compared against an already lowercased string
- re: /#([a-f0-9])([a-f0-9])([a-f0-9])/,
- parse: function( execResult ) {
- return [
- parseInt( execResult[ 1 ] + execResult[ 1 ], 16 ),
- parseInt( execResult[ 2 ] + execResult[ 2 ], 16 ),
- parseInt( execResult[ 3 ] + execResult[ 3 ], 16 )
- ];
- }
- }, {
- re: /hsla?\(\s*(\d+(?:\.\d+)?)\s*,\s*(\d+(?:\.\d+)?)\%\s*,\s*(\d+(?:\.\d+)?)\%\s*(?:,\s*(\d?(?:\.\d+)?)\s*)?\)/,
- space: "hsla",
- parse: function( execResult ) {
- return [
- execResult[ 1 ],
- execResult[ 2 ] / 100,
- execResult[ 3 ] / 100,
- execResult[ 4 ]
- ];
- }
- }],
-
- // jQuery.Color( )
- color = jQuery.Color = function( color, green, blue, alpha ) {
- return new jQuery.Color.fn.parse( color, green, blue, alpha );
- },
- spaces = {
- rgba: {
- props: {
- red: {
- idx: 0,
- type: "byte"
- },
- green: {
- idx: 1,
- type: "byte"
- },
- blue: {
- idx: 2,
- type: "byte"
- }
- }
- },
-
- hsla: {
- props: {
- hue: {
- idx: 0,
- type: "degrees"
- },
- saturation: {
- idx: 1,
- type: "percent"
- },
- lightness: {
- idx: 2,
- type: "percent"
- }
- }
- }
- },
- propTypes = {
- "byte": {
- floor: true,
- max: 255
- },
- "percent": {
- max: 1
- },
- "degrees": {
- mod: 360,
- floor: true
- }
- },
- support = color.support = {},
-
- // element for support tests
- supportElem = jQuery( "<p>" )[ 0 ],
-
- // colors = jQuery.Color.names
- colors,
-
- // local aliases of functions called often
- each = jQuery.each;
-
-// determine rgba support immediately
-supportElem.style.cssText = "background-color:rgba(1,1,1,.5)";
-support.rgba = supportElem.style.backgroundColor.indexOf( "rgba" ) > -1;
-
-// define cache name and alpha properties
-// for rgba and hsla spaces
-each( spaces, function( spaceName, space ) {
- space.cache = "_" + spaceName;
- space.props.alpha = {
- idx: 3,
- type: "percent",
- def: 1
- };
-});
-
-function clamp( value, prop, allowEmpty ) {
- var type = propTypes[ prop.type ] || {};
-
- if ( value == null ) {
- return (allowEmpty || !prop.def) ? null : prop.def;
- }
-
- // ~~ is an short way of doing floor for positive numbers
- value = type.floor ? ~~value : parseFloat( value );
-
- // IE will pass in empty strings as value for alpha,
- // which will hit this case
- if ( isNaN( value ) ) {
- return prop.def;
- }
-
- if ( type.mod ) {
- // we add mod before modding to make sure that negatives values
- // get converted properly: -10 -> 350
- return (value + type.mod) % type.mod;
- }
-
- // for now all property types without mod have min and max
- return 0 > value ? 0 : type.max < value ? type.max : value;
-}
-
-function stringParse( string ) {
- var inst = color(),
- rgba = inst._rgba = [];
-
- string = string.toLowerCase();
-
- each( stringParsers, function( i, parser ) {
- var parsed,
- match = parser.re.exec( string ),
- values = match && parser.parse( match ),
- spaceName = parser.space || "rgba";
-
- if ( values ) {
- parsed = inst[ spaceName ]( values );
-
- // if this was an rgba parse the assignment might happen twice
- // oh well....
- inst[ spaces[ spaceName ].cache ] = parsed[ spaces[ spaceName ].cache ];
- rgba = inst._rgba = parsed._rgba;
-
- // exit each( stringParsers ) here because we matched
- return false;
- }
- });
-
- // Found a stringParser that handled it
- if ( rgba.length ) {
-
- // if this came from a parsed string, force "transparent" when alpha is 0
- // chrome, (and maybe others) return "transparent" as rgba(0,0,0,0)
- if ( rgba.join() === "0,0,0,0" ) {
- jQuery.extend( rgba, colors.transparent );
- }
- return inst;
- }
-
- // named colors
- return colors[ string ];
-}
-
-color.fn = jQuery.extend( color.prototype, {
- parse: function( red, green, blue, alpha ) {
- if ( red === undefined ) {
- this._rgba = [ null, null, null, null ];
- return this;
- }
- if ( red.jquery || red.nodeType ) {
- red = jQuery( red ).css( green );
- green = undefined;
- }
-
- var inst = this,
- type = jQuery.type( red ),
- rgba = this._rgba = [];
-
- // more than 1 argument specified - assume ( red, green, blue, alpha )
- if ( green !== undefined ) {
- red = [ red, green, blue, alpha ];
- type = "array";
- }
-
- if ( type === "string" ) {
- return this.parse( stringParse( red ) || colors._default );
- }
-
- if ( type === "array" ) {
- each( spaces.rgba.props, function( key, prop ) {
- rgba[ prop.idx ] = clamp( red[ prop.idx ], prop );
- });
- return this;
- }
-
- if ( type === "object" ) {
- if ( red instanceof color ) {
- each( spaces, function( spaceName, space ) {
- if ( red[ space.cache ] ) {
- inst[ space.cache ] = red[ space.cache ].slice();
- }
- });
- } else {
- each( spaces, function( spaceName, space ) {
- var cache = space.cache;
- each( space.props, function( key, prop ) {
-
- // if the cache doesn't exist, and we know how to convert
- if ( !inst[ cache ] && space.to ) {
-
- // if the value was null, we don't need to copy it
- // if the key was alpha, we don't need to copy it either
- if ( key === "alpha" || red[ key ] == null ) {
- return;
- }
- inst[ cache ] = space.to( inst._rgba );
- }
-
- // this is the only case where we allow nulls for ALL properties.
- // call clamp with alwaysAllowEmpty
- inst[ cache ][ prop.idx ] = clamp( red[ key ], prop, true );
- });
-
- // everything defined but alpha?
- if ( inst[ cache ] && jQuery.inArray( null, inst[ cache ].slice( 0, 3 ) ) < 0 ) {
- // use the default of 1
- inst[ cache ][ 3 ] = 1;
- if ( space.from ) {
- inst._rgba = space.from( inst[ cache ] );
- }
- }
- });
- }
- return this;
- }
- },
- is: function( compare ) {
- var is = color( compare ),
- same = true,
- inst = this;
-
- each( spaces, function( _, space ) {
- var localCache,
- isCache = is[ space.cache ];
- if (isCache) {
- localCache = inst[ space.cache ] || space.to && space.to( inst._rgba ) || [];
- each( space.props, function( _, prop ) {
- if ( isCache[ prop.idx ] != null ) {
- same = ( isCache[ prop.idx ] === localCache[ prop.idx ] );
- return same;
- }
- });
- }
- return same;
- });
- return same;
- },
- _space: function() {
- var used = [],
- inst = this;
- each( spaces, function( spaceName, space ) {
- if ( inst[ space.cache ] ) {
- used.push( spaceName );
- }
- });
- return used.pop();
- },
- transition: function( other, distance ) {
- var end = color( other ),
- spaceName = end._space(),
- space = spaces[ spaceName ],
- startColor = this.alpha() === 0 ? color( "transparent" ) : this,
- start = startColor[ space.cache ] || space.to( startColor._rgba ),
- result = start.slice();
-
- end = end[ space.cache ];
- each( space.props, function( key, prop ) {
- var index = prop.idx,
- startValue = start[ index ],
- endValue = end[ index ],
- type = propTypes[ prop.type ] || {};
-
- // if null, don't override start value
- if ( endValue === null ) {
- return;
- }
- // if null - use end
- if ( startValue === null ) {
- result[ index ] = endValue;
- } else {
- if ( type.mod ) {
- if ( endValue - startValue > type.mod / 2 ) {
- startValue += type.mod;
- } else if ( startValue - endValue > type.mod / 2 ) {
- startValue -= type.mod;
- }
- }
- result[ index ] = clamp( ( endValue - startValue ) * distance + startValue, prop );
- }
- });
- return this[ spaceName ]( result );
- },
- blend: function( opaque ) {
- // if we are already opaque - return ourself
- if ( this._rgba[ 3 ] === 1 ) {
- return this;
- }
-
- var rgb = this._rgba.slice(),
- a = rgb.pop(),
- blend = color( opaque )._rgba;
-
- return color( jQuery.map( rgb, function( v, i ) {
- return ( 1 - a ) * blend[ i ] + a * v;
- }));
- },
- toRgbaString: function() {
- var prefix = "rgba(",
- rgba = jQuery.map( this._rgba, function( v, i ) {
- return v == null ? ( i > 2 ? 1 : 0 ) : v;
- });
-
- if ( rgba[ 3 ] === 1 ) {
- rgba.pop();
- prefix = "rgb(";
- }
-
- return prefix + rgba.join() + ")";
- },
- toHslaString: function() {
- var prefix = "hsla(",
- hsla = jQuery.map( this.hsla(), function( v, i ) {
- if ( v == null ) {
- v = i > 2 ? 1 : 0;
- }
-
- // catch 1 and 2
- if ( i && i < 3 ) {
- v = Math.round( v * 100 ) + "%";
- }
- return v;
- });
-
- if ( hsla[ 3 ] === 1 ) {
- hsla.pop();
- prefix = "hsl(";
- }
- return prefix + hsla.join() + ")";
- },
- toHexString: function( includeAlpha ) {
- var rgba = this._rgba.slice(),
- alpha = rgba.pop();
-
- if ( includeAlpha ) {
- rgba.push( ~~( alpha * 255 ) );
- }
-
- return "#" + jQuery.map( rgba, function( v ) {
-
- // default to 0 when nulls exist
- v = ( v || 0 ).toString( 16 );
- return v.length === 1 ? "0" + v : v;
- }).join("");
- },
- toString: function() {
- return this._rgba[ 3 ] === 0 ? "transparent" : this.toRgbaString();
- }
-});
-color.fn.parse.prototype = color.fn;
-
-// hsla conversions adapted from:
-// https://code.google.com/p/maashaack/source/browse/packages/graphics/trunk/src/graphics/colors/HUE2RGB.as?r=5021
-
-function hue2rgb( p, q, h ) {
- h = ( h + 1 ) % 1;
- if ( h * 6 < 1 ) {
- return p + (q - p) * h * 6;
- }
- if ( h * 2 < 1) {
- return q;
- }
- if ( h * 3 < 2 ) {
- return p + (q - p) * ((2/3) - h) * 6;
- }
- return p;
-}
-
-spaces.hsla.to = function ( rgba ) {
- if ( rgba[ 0 ] == null || rgba[ 1 ] == null || rgba[ 2 ] == null ) {
- return [ null, null, null, rgba[ 3 ] ];
- }
- var r = rgba[ 0 ] / 255,
- g = rgba[ 1 ] / 255,
- b = rgba[ 2 ] / 255,
- a = rgba[ 3 ],
- max = Math.max( r, g, b ),
- min = Math.min( r, g, b ),
- diff = max - min,
- add = max + min,
- l = add * 0.5,
- h, s;
-
- if ( min === max ) {
- h = 0;
- } else if ( r === max ) {
- h = ( 60 * ( g - b ) / diff ) + 360;
- } else if ( g === max ) {
- h = ( 60 * ( b - r ) / diff ) + 120;
- } else {
- h = ( 60 * ( r - g ) / diff ) + 240;
- }
-
- // chroma (diff) == 0 means greyscale which, by definition, saturation = 0%
- // otherwise, saturation is based on the ratio of chroma (diff) to lightness (add)
- if ( diff === 0 ) {
- s = 0;
- } else if ( l <= 0.5 ) {
- s = diff / add;
- } else {
- s = diff / ( 2 - add );
- }
- return [ Math.round(h) % 360, s, l, a == null ? 1 : a ];
-};
-
-spaces.hsla.from = function ( hsla ) {
- if ( hsla[ 0 ] == null || hsla[ 1 ] == null || hsla[ 2 ] == null ) {
- return [ null, null, null, hsla[ 3 ] ];
- }
- var h = hsla[ 0 ] / 360,
- s = hsla[ 1 ],
- l = hsla[ 2 ],
- a = hsla[ 3 ],
- q = l <= 0.5 ? l * ( 1 + s ) : l + s - l * s,
- p = 2 * l - q;
-
- return [
- Math.round( hue2rgb( p, q, h + ( 1 / 3 ) ) * 255 ),
- Math.round( hue2rgb( p, q, h ) * 255 ),
- Math.round( hue2rgb( p, q, h - ( 1 / 3 ) ) * 255 ),
- a
- ];
-};
-
-
-each( spaces, function( spaceName, space ) {
- var props = space.props,
- cache = space.cache,
- to = space.to,
- from = space.from;
-
- // makes rgba() and hsla()
- color.fn[ spaceName ] = function( value ) {
-
- // generate a cache for this space if it doesn't exist
- if ( to && !this[ cache ] ) {
- this[ cache ] = to( this._rgba );
- }
- if ( value === undefined ) {
- return this[ cache ].slice();
- }
-
- var ret,
- type = jQuery.type( value ),
- arr = ( type === "array" || type === "object" ) ? value : arguments,
- local = this[ cache ].slice();
-
- each( props, function( key, prop ) {
- var val = arr[ type === "object" ? key : prop.idx ];
- if ( val == null ) {
- val = local[ prop.idx ];
- }
- local[ prop.idx ] = clamp( val, prop );
- });
-
- if ( from ) {
- ret = color( from( local ) );
- ret[ cache ] = local;
- return ret;
- } else {
- return color( local );
- }
- };
-
- // makes red() green() blue() alpha() hue() saturation() lightness()
- each( props, function( key, prop ) {
- // alpha is included in more than one space
- if ( color.fn[ key ] ) {
- return;
- }
- color.fn[ key ] = function( value ) {
- var vtype = jQuery.type( value ),
- fn = ( key === "alpha" ? ( this._hsla ? "hsla" : "rgba" ) : spaceName ),
- local = this[ fn ](),
- cur = local[ prop.idx ],
- match;
-
- if ( vtype === "undefined" ) {
- return cur;
- }
-
- if ( vtype === "function" ) {
- value = value.call( this, cur );
- vtype = jQuery.type( value );
- }
- if ( value == null && prop.empty ) {
- return this;
- }
- if ( vtype === "string" ) {
- match = rplusequals.exec( value );
- if ( match ) {
- value = cur + parseFloat( match[ 2 ] ) * ( match[ 1 ] === "+" ? 1 : -1 );
- }
- }
- local[ prop.idx ] = value;
- return this[ fn ]( local );
- };
- });
-});
-
-// add cssHook and .fx.step function for each named hook.
-// accept a space separated string of properties
-color.hook = function( hook ) {
- var hooks = hook.split( " " );
- each( hooks, function( i, hook ) {
- jQuery.cssHooks[ hook ] = {
- set: function( elem, value ) {
- var parsed, curElem,
- backgroundColor = "";
-
- if ( value !== "transparent" && ( jQuery.type( value ) !== "string" || ( parsed = stringParse( value ) ) ) ) {
- value = color( parsed || value );
- if ( !support.rgba && value._rgba[ 3 ] !== 1 ) {
- curElem = hook === "backgroundColor" ? elem.parentNode : elem;
- while (
- (backgroundColor === "" || backgroundColor === "transparent") &&
- curElem && curElem.style
- ) {
- try {
- backgroundColor = jQuery.css( curElem, "backgroundColor" );
- curElem = curElem.parentNode;
- } catch ( e ) {
- }
- }
-
- value = value.blend( backgroundColor && backgroundColor !== "transparent" ?
- backgroundColor :
- "_default" );
- }
-
- value = value.toRgbaString();
- }
- try {
- elem.style[ hook ] = value;
- } catch( e ) {
- // wrapped to prevent IE from throwing errors on "invalid" values like 'auto' or 'inherit'
- }
- }
- };
- jQuery.fx.step[ hook ] = function( fx ) {
- if ( !fx.colorInit ) {
- fx.start = color( fx.elem, hook );
- fx.end = color( fx.end );
- fx.colorInit = true;
- }
- jQuery.cssHooks[ hook ].set( fx.elem, fx.start.transition( fx.end, fx.pos ) );
- };
- });
-
-};
-
-color.hook( stepHooks );
-
-jQuery.cssHooks.borderColor = {
- expand: function( value ) {
- var expanded = {};
-
- each( [ "Top", "Right", "Bottom", "Left" ], function( i, part ) {
- expanded[ "border" + part + "Color" ] = value;
- });
- return expanded;
- }
-};
-
-// Basic color names only.
-// Usage of any of the other color names requires adding yourself or including
-// jquery.color.svg-names.js.
-colors = jQuery.Color.names = {
- // 4.1. Basic color keywords
- aqua: "#00ffff",
- black: "#000000",
- blue: "#0000ff",
- fuchsia: "#ff00ff",
- gray: "#808080",
- green: "#008000",
- lime: "#00ff00",
- maroon: "#800000",
- navy: "#000080",
- olive: "#808000",
- purple: "#800080",
- red: "#ff0000",
- silver: "#c0c0c0",
- teal: "#008080",
- white: "#ffffff",
- yellow: "#ffff00",
-
- // 4.2.3. "transparent" color keyword
- transparent: [ null, null, null, 0 ],
-
- _default: "#ffffff"
-};
-
-})( jQuery );
-
-
-/******************************************************************************/
-/****************************** CLASS ANIMATIONS ******************************/
-/******************************************************************************/
-(function() {
-
-var classAnimationActions = [ "add", "remove", "toggle" ],
- shorthandStyles = {
- border: 1,
- borderBottom: 1,
- borderColor: 1,
- borderLeft: 1,
- borderRight: 1,
- borderTop: 1,
- borderWidth: 1,
- margin: 1,
- padding: 1
- };
-
-$.each([ "borderLeftStyle", "borderRightStyle", "borderBottomStyle", "borderTopStyle" ], function( _, prop ) {
- $.fx.step[ prop ] = function( fx ) {
- if ( fx.end !== "none" && !fx.setAttr || fx.pos === 1 && !fx.setAttr ) {
- jQuery.style( fx.elem, prop, fx.end );
- fx.setAttr = true;
- }
- };
-});
-
-function getElementStyles( elem ) {
- var key, len,
- style = elem.ownerDocument.defaultView ?
- elem.ownerDocument.defaultView.getComputedStyle( elem, null ) :
- elem.currentStyle,
- styles = {};
-
- if ( style && style.length && style[ 0 ] && style[ style[ 0 ] ] ) {
- len = style.length;
- while ( len-- ) {
- key = style[ len ];
- if ( typeof style[ key ] === "string" ) {
- styles[ $.camelCase( key ) ] = style[ key ];
- }
- }
- // support: Opera, IE <9
- } else {
- for ( key in style ) {
- if ( typeof style[ key ] === "string" ) {
- styles[ key ] = style[ key ];
- }
- }
- }
-
- return styles;
-}
-
-
-function styleDifference( oldStyle, newStyle ) {
- var diff = {},
- name, value;
-
- for ( name in newStyle ) {
- value = newStyle[ name ];
- if ( oldStyle[ name ] !== value ) {
- if ( !shorthandStyles[ name ] ) {
- if ( $.fx.step[ name ] || !isNaN( parseFloat( value ) ) ) {
- diff[ name ] = value;
- }
- }
- }
- }
-
- return diff;
-}
-
-// support: jQuery <1.8
-if ( !$.fn.addBack ) {
- $.fn.addBack = function( selector ) {
- return this.add( selector == null ?
- this.prevObject : this.prevObject.filter( selector )
- );
- };
-}
-
-$.effects.animateClass = function( value, duration, easing, callback ) {
- var o = $.speed( duration, easing, callback );
-
- return this.queue( function() {
- var animated = $( this ),
- baseClass = animated.attr( "class" ) || "",
- applyClassChange,
- allAnimations = o.children ? animated.find( "*" ).addBack() : animated;
-
- // map the animated objects to store the original styles.
- allAnimations = allAnimations.map(function() {
- var el = $( this );
- return {
- el: el,
- start: getElementStyles( this )
- };
- });
-
- // apply class change
- applyClassChange = function() {
- $.each( classAnimationActions, function(i, action) {
- if ( value[ action ] ) {
- animated[ action + "Class" ]( value[ action ] );
- }
- });
- };
- applyClassChange();
-
- // map all animated objects again - calculate new styles and diff
- allAnimations = allAnimations.map(function() {
- this.end = getElementStyles( this.el[ 0 ] );
- this.diff = styleDifference( this.start, this.end );
- return this;
- });
-
- // apply original class
- animated.attr( "class", baseClass );
-
- // map all animated objects again - this time collecting a promise
- allAnimations = allAnimations.map(function() {
- var styleInfo = this,
- dfd = $.Deferred(),
- opts = $.extend({}, o, {
- queue: false,
- complete: function() {
- dfd.resolve( styleInfo );
- }
- });
-
- this.el.animate( this.diff, opts );
- return dfd.promise();
- });
-
- // once all animations have completed:
- $.when.apply( $, allAnimations.get() ).done(function() {
-
- // set the final class
- applyClassChange();
-
- // for each animated element,
- // clear all css properties that were animated
- $.each( arguments, function() {
- var el = this.el;
- $.each( this.diff, function(key) {
- el.css( key, "" );
- });
- });
-
- // this is guarnteed to be there if you use jQuery.speed()
- // it also handles dequeuing the next anim...
- o.complete.call( animated[ 0 ] );
- });
- });
-};
-
-$.fn.extend({
- addClass: (function( orig ) {
- return function( classNames, speed, easing, callback ) {
- return speed ?
- $.effects.animateClass.call( this,
- { add: classNames }, speed, easing, callback ) :
- orig.apply( this, arguments );
- };
- })( $.fn.addClass ),
-
- removeClass: (function( orig ) {
- return function( classNames, speed, easing, callback ) {
- return arguments.length > 1 ?
- $.effects.animateClass.call( this,
- { remove: classNames }, speed, easing, callback ) :
- orig.apply( this, arguments );
- };
- })( $.fn.removeClass ),
-
- toggleClass: (function( orig ) {
- return function( classNames, force, speed, easing, callback ) {
- if ( typeof force === "boolean" || force === undefined ) {
- if ( !speed ) {
- // without speed parameter
- return orig.apply( this, arguments );
- } else {
- return $.effects.animateClass.call( this,
- (force ? { add: classNames } : { remove: classNames }),
- speed, easing, callback );
- }
- } else {
- // without force parameter
- return $.effects.animateClass.call( this,
- { toggle: classNames }, force, speed, easing );
- }
- };
- })( $.fn.toggleClass ),
-
- switchClass: function( remove, add, speed, easing, callback) {
- return $.effects.animateClass.call( this, {
- add: add,
- remove: remove
- }, speed, easing, callback );
- }
-});
-
-})();
-
-/******************************************************************************/
-/*********************************** EFFECTS **********************************/
-/******************************************************************************/
-
-(function() {
-
-$.extend( $.effects, {
- version: "1.10.3",
-
- // Saves a set of properties in a data storage
- save: function( element, set ) {
- for( var i=0; i < set.length; i++ ) {
- if ( set[ i ] !== null ) {
- element.data( dataSpace + set[ i ], element[ 0 ].style[ set[ i ] ] );
- }
- }
- },
-
- // Restores a set of previously saved properties from a data storage
- restore: function( element, set ) {
- var val, i;
- for( i=0; i < set.length; i++ ) {
- if ( set[ i ] !== null ) {
- val = element.data( dataSpace + set[ i ] );
- // support: jQuery 1.6.2
- // http://bugs.jquery.com/ticket/9917
- // jQuery 1.6.2 incorrectly returns undefined for any falsy value.
- // We can't differentiate between "" and 0 here, so we just assume
- // empty string since it's likely to be a more common value...
- if ( val === undefined ) {
- val = "";
- }
- element.css( set[ i ], val );
- }
- }
- },
-
- setMode: function( el, mode ) {
- if (mode === "toggle") {
- mode = el.is( ":hidden" ) ? "show" : "hide";
- }
- return mode;
- },
-
- // Translates a [top,left] array into a baseline value
- // this should be a little more flexible in the future to handle a string & hash
- getBaseline: function( origin, original ) {
- var y, x;
- switch ( origin[ 0 ] ) {
- case "top": y = 0; break;
- case "middle": y = 0.5; break;
- case "bottom": y = 1; break;
- default: y = origin[ 0 ] / original.height;
- }
- switch ( origin[ 1 ] ) {
- case "left": x = 0; break;
- case "center": x = 0.5; break;
- case "right": x = 1; break;
- default: x = origin[ 1 ] / original.width;
- }
- return {
- x: x,
- y: y
- };
- },
-
- // Wraps the element around a wrapper that copies position properties
- createWrapper: function( element ) {
-
- // if the element is already wrapped, return it
- if ( element.parent().is( ".ui-effects-wrapper" )) {
- return element.parent();
- }
-
- // wrap the element
- var props = {
- width: element.outerWidth(true),
- height: element.outerHeight(true),
- "float": element.css( "float" )
- },
- wrapper = $( "<div></div>" )
- .addClass( "ui-effects-wrapper" )
- .css({
- fontSize: "100%",
- background: "transparent",
- border: "none",
- margin: 0,
- padding: 0
- }),
- // Store the size in case width/height are defined in % - Fixes #5245
- size = {
- width: element.width(),
- height: element.height()
- },
- active = document.activeElement;
-
- // support: Firefox
- // Firefox incorrectly exposes anonymous content
- // https://bugzilla.mozilla.org/show_bug.cgi?id=561664
- try {
- active.id;
- } catch( e ) {
- active = document.body;
- }
-
- element.wrap( wrapper );
-
- // Fixes #7595 - Elements lose focus when wrapped.
- if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) {
- $( active ).focus();
- }
-
- wrapper = element.parent(); //Hotfix for jQuery 1.4 since some change in wrap() seems to actually lose the reference to the wrapped element
-
- // transfer positioning properties to the wrapper
- if ( element.css( "position" ) === "static" ) {
- wrapper.css({ position: "relative" });
- element.css({ position: "relative" });
- } else {
- $.extend( props, {
- position: element.css( "position" ),
- zIndex: element.css( "z-index" )
- });
- $.each([ "top", "left", "bottom", "right" ], function(i, pos) {
- props[ pos ] = element.css( pos );
- if ( isNaN( parseInt( props[ pos ], 10 ) ) ) {
- props[ pos ] = "auto";
- }
- });
- element.css({
- position: "relative",
- top: 0,
- left: 0,
- right: "auto",
- bottom: "auto"
- });
- }
- element.css(size);
-
- return wrapper.css( props ).show();
- },
-
- removeWrapper: function( element ) {
- var active = document.activeElement;
-
- if ( element.parent().is( ".ui-effects-wrapper" ) ) {
- element.parent().replaceWith( element );
-
- // Fixes #7595 - Elements lose focus when wrapped.
- if ( element[ 0 ] === active || $.contains( element[ 0 ], active ) ) {
- $( active ).focus();
- }
- }
-
-
- return element;
- },
-
- setTransition: function( element, list, factor, value ) {
- value = value || {};
- $.each( list, function( i, x ) {
- var unit = element.cssUnit( x );
- if ( unit[ 0 ] > 0 ) {
- value[ x ] = unit[ 0 ] * factor + unit[ 1 ];
- }
- });
- return value;
- }
-});
-
-// return an effect options object for the given parameters:
-function _normalizeArguments( effect, options, speed, callback ) {
-
- // allow passing all options as the first parameter
- if ( $.isPlainObject( effect ) ) {
- options = effect;
- effect = effect.effect;
- }
-
- // convert to an object
- effect = { effect: effect };
-
- // catch (effect, null, ...)
- if ( options == null ) {
- options = {};
- }
-
- // catch (effect, callback)
- if ( $.isFunction( options ) ) {
- callback = options;
- speed = null;
- options = {};
- }
-
- // catch (effect, speed, ?)
- if ( typeof options === "number" || $.fx.speeds[ options ] ) {
- callback = speed;
- speed = options;
- options = {};
- }
-
- // catch (effect, options, callback)
- if ( $.isFunction( speed ) ) {
- callback = speed;
- speed = null;
- }
-
- // add options to effect
- if ( options ) {
- $.extend( effect, options );
- }
-
- speed = speed || options.duration;
- effect.duration = $.fx.off ? 0 :
- typeof speed === "number" ? speed :
- speed in $.fx.speeds ? $.fx.speeds[ speed ] :
- $.fx.speeds._default;
-
- effect.complete = callback || options.complete;
-
- return effect;
-}
-
-function standardAnimationOption( option ) {
- // Valid standard speeds (nothing, number, named speed)
- if ( !option || typeof option === "number" || $.fx.speeds[ option ] ) {
- return true;
- }
-
- // Invalid strings - treat as "normal" speed
- if ( typeof option === "string" && !$.effects.effect[ option ] ) {
- return true;
- }
-
- // Complete callback
- if ( $.isFunction( option ) ) {
- return true;
- }
-
- // Options hash (but not naming an effect)
- if ( typeof option === "object" && !option.effect ) {
- return true;
- }
-
- // Didn't match any standard API
- return false;
-}
-
-$.fn.extend({
- effect: function( /* effect, options, speed, callback */ ) {
- var args = _normalizeArguments.apply( this, arguments ),
- mode = args.mode,
- queue = args.queue,
- effectMethod = $.effects.effect[ args.effect ];
-
- if ( $.fx.off || !effectMethod ) {
- // delegate to the original method (e.g., .show()) if possible
- if ( mode ) {
- return this[ mode ]( args.duration, args.complete );
- } else {
- return this.each( function() {
- if ( args.complete ) {
- args.complete.call( this );
- }
- });
- }
- }
-
- function run( next ) {
- var elem = $( this ),
- complete = args.complete,
- mode = args.mode;
-
- function done() {
- if ( $.isFunction( complete ) ) {
- complete.call( elem[0] );
- }
- if ( $.isFunction( next ) ) {
- next();
- }
- }
-
- // If the element already has the correct final state, delegate to
- // the core methods so the internal tracking of "olddisplay" works.
- if ( elem.is( ":hidden" ) ? mode === "hide" : mode === "show" ) {
- elem[ mode ]();
- done();
- } else {
- effectMethod.call( elem[0], args, done );
- }
- }
-
- return queue === false ? this.each( run ) : this.queue( queue || "fx", run );
- },
-
- show: (function( orig ) {
- return function( option ) {
- if ( standardAnimationOption( option ) ) {
- return orig.apply( this, arguments );
- } else {
- var args = _normalizeArguments.apply( this, arguments );
- args.mode = "show";
- return this.effect.call( this, args );
- }
- };
- })( $.fn.show ),
-
- hide: (function( orig ) {
- return function( option ) {
- if ( standardAnimationOption( option ) ) {
- return orig.apply( this, arguments );
- } else {
- var args = _normalizeArguments.apply( this, arguments );
- args.mode = "hide";
- return this.effect.call( this, args );
- }
- };
- })( $.fn.hide ),
-
- toggle: (function( orig ) {
- return function( option ) {
- if ( standardAnimationOption( option ) || typeof option === "boolean" ) {
- return orig.apply( this, arguments );
- } else {
- var args = _normalizeArguments.apply( this, arguments );
- args.mode = "toggle";
- return this.effect.call( this, args );
- }
- };
- })( $.fn.toggle ),
-
- // helper functions
- cssUnit: function(key) {
- var style = this.css( key ),
- val = [];
-
- $.each( [ "em", "px", "%", "pt" ], function( i, unit ) {
- if ( style.indexOf( unit ) > 0 ) {
- val = [ parseFloat( style ), unit ];
- }
- });
- return val;
- }
-});
-
-})();
-
-/******************************************************************************/
-/*********************************** EASING ***********************************/
-/******************************************************************************/
-
-(function() {
-
-// based on easing equations from Robert Penner (http://www.robertpenner.com/easing)
-
-var baseEasings = {};
-
-$.each( [ "Quad", "Cubic", "Quart", "Quint", "Expo" ], function( i, name ) {
- baseEasings[ name ] = function( p ) {
- return Math.pow( p, i + 2 );
- };
-});
-
-$.extend( baseEasings, {
- Sine: function ( p ) {
- return 1 - Math.cos( p * Math.PI / 2 );
- },
- Circ: function ( p ) {
- return 1 - Math.sqrt( 1 - p * p );
- },
- Elastic: function( p ) {
- return p === 0 || p === 1 ? p :
- -Math.pow( 2, 8 * (p - 1) ) * Math.sin( ( (p - 1) * 80 - 7.5 ) * Math.PI / 15 );
- },
- Back: function( p ) {
- return p * p * ( 3 * p - 2 );
- },
- Bounce: function ( p ) {
- var pow2,
- bounce = 4;
-
- while ( p < ( ( pow2 = Math.pow( 2, --bounce ) ) - 1 ) / 11 ) {}
- return 1 / Math.pow( 4, 3 - bounce ) - 7.5625 * Math.pow( ( pow2 * 3 - 2 ) / 22 - p, 2 );
- }
-});
-
-$.each( baseEasings, function( name, easeIn ) {
- $.easing[ "easeIn" + name ] = easeIn;
- $.easing[ "easeOut" + name ] = function( p ) {
- return 1 - easeIn( 1 - p );
- };
- $.easing[ "easeInOut" + name ] = function( p ) {
- return p < 0.5 ?
- easeIn( p * 2 ) / 2 :
- 1 - easeIn( p * -2 + 2 ) / 2;
- };
-});
-
-})();
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-var uid = 0,
- hideProps = {},
- showProps = {};
-
-hideProps.height = hideProps.paddingTop = hideProps.paddingBottom =
- hideProps.borderTopWidth = hideProps.borderBottomWidth = "hide";
-showProps.height = showProps.paddingTop = showProps.paddingBottom =
- showProps.borderTopWidth = showProps.borderBottomWidth = "show";
-
-$.widget( "ui.accordion", {
- version: "1.10.3",
- options: {
- active: 0,
- animate: {},
- collapsible: false,
- event: "click",
- header: "> li > :first-child,> :not(li):even",
- heightStyle: "auto",
- icons: {
- activeHeader: "ui-icon-triangle-1-s",
- header: "ui-icon-triangle-1-e"
- },
-
- // callbacks
- activate: null,
- beforeActivate: null
- },
-
- _create: function() {
- var options = this.options;
- this.prevShow = this.prevHide = $();
- this.element.addClass( "ui-accordion ui-widget ui-helper-reset" )
- // ARIA
- .attr( "role", "tablist" );
-
- // don't allow collapsible: false and active: false / null
- if ( !options.collapsible && (options.active === false || options.active == null) ) {
- options.active = 0;
- }
-
- this._processPanels();
- // handle negative values
- if ( options.active < 0 ) {
- options.active += this.headers.length;
- }
- this._refresh();
- },
-
- _getCreateEventData: function() {
- return {
- header: this.active,
- panel: !this.active.length ? $() : this.active.next(),
- content: !this.active.length ? $() : this.active.next()
- };
- },
-
- _createIcons: function() {
- var icons = this.options.icons;
- if ( icons ) {
- $( "<span>" )
- .addClass( "ui-accordion-header-icon ui-icon " + icons.header )
- .prependTo( this.headers );
- this.active.children( ".ui-accordion-header-icon" )
- .removeClass( icons.header )
- .addClass( icons.activeHeader );
- this.headers.addClass( "ui-accordion-icons" );
- }
- },
-
- _destroyIcons: function() {
- this.headers
- .removeClass( "ui-accordion-icons" )
- .children( ".ui-accordion-header-icon" )
- .remove();
- },
-
- _destroy: function() {
- var contents;
-
- // clean up main element
- this.element
- .removeClass( "ui-accordion ui-widget ui-helper-reset" )
- .removeAttr( "role" );
-
- // clean up headers
- this.headers
- .removeClass( "ui-accordion-header ui-accordion-header-active ui-helper-reset ui-state-default ui-corner-all ui-state-active ui-state-disabled ui-corner-top" )
- .removeAttr( "role" )
- .removeAttr( "aria-selected" )
- .removeAttr( "aria-controls" )
- .removeAttr( "tabIndex" )
- .each(function() {
- if ( /^ui-accordion/.test( this.id ) ) {
- this.removeAttribute( "id" );
- }
- });
- this._destroyIcons();
-
- // clean up content panels
- contents = this.headers.next()
- .css( "display", "" )
- .removeAttr( "role" )
- .removeAttr( "aria-expanded" )
- .removeAttr( "aria-hidden" )
- .removeAttr( "aria-labelledby" )
- .removeClass( "ui-helper-reset ui-widget-content ui-corner-bottom ui-accordion-content ui-accordion-content-active ui-state-disabled" )
- .each(function() {
- if ( /^ui-accordion/.test( this.id ) ) {
- this.removeAttribute( "id" );
- }
- });
- if ( this.options.heightStyle !== "content" ) {
- contents.css( "height", "" );
- }
- },
-
- _setOption: function( key, value ) {
- if ( key === "active" ) {
- // _activate() will handle invalid values and update this.options
- this._activate( value );
- return;
- }
-
- if ( key === "event" ) {
- if ( this.options.event ) {
- this._off( this.headers, this.options.event );
- }
- this._setupEvents( value );
- }
-
- this._super( key, value );
-
- // setting collapsible: false while collapsed; open first panel
- if ( key === "collapsible" && !value && this.options.active === false ) {
- this._activate( 0 );
- }
-
- if ( key === "icons" ) {
- this._destroyIcons();
- if ( value ) {
- this._createIcons();
- }
- }
-
- // #5332 - opacity doesn't cascade to positioned elements in IE
- // so we need to add the disabled class to the headers and panels
- if ( key === "disabled" ) {
- this.headers.add( this.headers.next() )
- .toggleClass( "ui-state-disabled", !!value );
- }
- },
-
- _keydown: function( event ) {
- /*jshint maxcomplexity:15*/
- if ( event.altKey || event.ctrlKey ) {
- return;
- }
-
- var keyCode = $.ui.keyCode,
- length = this.headers.length,
- currentIndex = this.headers.index( event.target ),
- toFocus = false;
-
- switch ( event.keyCode ) {
- case keyCode.RIGHT:
- case keyCode.DOWN:
- toFocus = this.headers[ ( currentIndex + 1 ) % length ];
- break;
- case keyCode.LEFT:
- case keyCode.UP:
- toFocus = this.headers[ ( currentIndex - 1 + length ) % length ];
- break;
- case keyCode.SPACE:
- case keyCode.ENTER:
- this._eventHandler( event );
- break;
- case keyCode.HOME:
- toFocus = this.headers[ 0 ];
- break;
- case keyCode.END:
- toFocus = this.headers[ length - 1 ];
- break;
- }
-
- if ( toFocus ) {
- $( event.target ).attr( "tabIndex", -1 );
- $( toFocus ).attr( "tabIndex", 0 );
- toFocus.focus();
- event.preventDefault();
- }
- },
-
- _panelKeyDown : function( event ) {
- if ( event.keyCode === $.ui.keyCode.UP && event.ctrlKey ) {
- $( event.currentTarget ).prev().focus();
- }
- },
-
- refresh: function() {
- var options = this.options;
- this._processPanels();
-
- // was collapsed or no panel
- if ( ( options.active === false && options.collapsible === true ) || !this.headers.length ) {
- options.active = false;
- this.active = $();
- // active false only when collapsible is true
- } else if ( options.active === false ) {
- this._activate( 0 );
- // was active, but active panel is gone
- } else if ( this.active.length && !$.contains( this.element[ 0 ], this.active[ 0 ] ) ) {
- // all remaining panel are disabled
- if ( this.headers.length === this.headers.find(".ui-state-disabled").length ) {
- options.active = false;
- this.active = $();
- // activate previous panel
- } else {
- this._activate( Math.max( 0, options.active - 1 ) );
- }
- // was active, active panel still exists
- } else {
- // make sure active index is correct
- options.active = this.headers.index( this.active );
- }
-
- this._destroyIcons();
-
- this._refresh();
- },
-
- _processPanels: function() {
- this.headers = this.element.find( this.options.header )
- .addClass( "ui-accordion-header ui-helper-reset ui-state-default ui-corner-all" );
-
- this.headers.next()
- .addClass( "ui-accordion-content ui-helper-reset ui-widget-content ui-corner-bottom" )
- .filter(":not(.ui-accordion-content-active)")
- .hide();
- },
-
- _refresh: function() {
- var maxHeight,
- options = this.options,
- heightStyle = options.heightStyle,
- parent = this.element.parent(),
- accordionId = this.accordionId = "ui-accordion-" +
- (this.element.attr( "id" ) || ++uid);
-
- this.active = this._findActive( options.active )
- .addClass( "ui-accordion-header-active ui-state-active ui-corner-top" )
- .removeClass( "ui-corner-all" );
- this.active.next()
- .addClass( "ui-accordion-content-active" )
- .show();
-
- this.headers
- .attr( "role", "tab" )
- .each(function( i ) {
- var header = $( this ),
- headerId = header.attr( "id" ),
- panel = header.next(),
- panelId = panel.attr( "id" );
- if ( !headerId ) {
- headerId = accordionId + "-header-" + i;
- header.attr( "id", headerId );
- }
- if ( !panelId ) {
- panelId = accordionId + "-panel-" + i;
- panel.attr( "id", panelId );
- }
- header.attr( "aria-controls", panelId );
- panel.attr( "aria-labelledby", headerId );
- })
- .next()
- .attr( "role", "tabpanel" );
-
- this.headers
- .not( this.active )
- .attr({
- "aria-selected": "false",
- tabIndex: -1
- })
- .next()
- .attr({
- "aria-expanded": "false",
- "aria-hidden": "true"
- })
- .hide();
-
- // make sure at least one header is in the tab order
- if ( !this.active.length ) {
- this.headers.eq( 0 ).attr( "tabIndex", 0 );
- } else {
- this.active.attr({
- "aria-selected": "true",
- tabIndex: 0
- })
- .next()
- .attr({
- "aria-expanded": "true",
- "aria-hidden": "false"
- });
- }
-
- this._createIcons();
-
- this._setupEvents( options.event );
-
- if ( heightStyle === "fill" ) {
- maxHeight = parent.height();
- this.element.siblings( ":visible" ).each(function() {
- var elem = $( this ),
- position = elem.css( "position" );
-
- if ( position === "absolute" || position === "fixed" ) {
- return;
- }
- maxHeight -= elem.outerHeight( true );
- });
-
- this.headers.each(function() {
- maxHeight -= $( this ).outerHeight( true );
- });
-
- this.headers.next()
- .each(function() {
- $( this ).height( Math.max( 0, maxHeight -
- $( this ).innerHeight() + $( this ).height() ) );
- })
- .css( "overflow", "auto" );
- } else if ( heightStyle === "auto" ) {
- maxHeight = 0;
- this.headers.next()
- .each(function() {
- maxHeight = Math.max( maxHeight, $( this ).css( "height", "" ).height() );
- })
- .height( maxHeight );
- }
- },
-
- _activate: function( index ) {
- var active = this._findActive( index )[ 0 ];
-
- // trying to activate the already active panel
- if ( active === this.active[ 0 ] ) {
- return;
- }
-
- // trying to collapse, simulate a click on the currently active header
- active = active || this.active[ 0 ];
-
- this._eventHandler({
- target: active,
- currentTarget: active,
- preventDefault: $.noop
- });
- },
-
- _findActive: function( selector ) {
- return typeof selector === "number" ? this.headers.eq( selector ) : $();
- },
-
- _setupEvents: function( event ) {
- var events = {
- keydown: "_keydown"
- };
- if ( event ) {
- $.each( event.split(" "), function( index, eventName ) {
- events[ eventName ] = "_eventHandler";
- });
- }
-
- this._off( this.headers.add( this.headers.next() ) );
- this._on( this.headers, events );
- this._on( this.headers.next(), { keydown: "_panelKeyDown" });
- this._hoverable( this.headers );
- this._focusable( this.headers );
- },
-
- _eventHandler: function( event ) {
- var options = this.options,
- active = this.active,
- clicked = $( event.currentTarget ),
- clickedIsActive = clicked[ 0 ] === active[ 0 ],
- collapsing = clickedIsActive && options.collapsible,
- toShow = collapsing ? $() : clicked.next(),
- toHide = active.next(),
- eventData = {
- oldHeader: active,
- oldPanel: toHide,
- newHeader: collapsing ? $() : clicked,
- newPanel: toShow
- };
-
- event.preventDefault();
-
- if (
- // click on active header, but not collapsible
- ( clickedIsActive && !options.collapsible ) ||
- // allow canceling activation
- ( this._trigger( "beforeActivate", event, eventData ) === false ) ) {
- return;
- }
-
- options.active = collapsing ? false : this.headers.index( clicked );
-
- // when the call to ._toggle() comes after the class changes
- // it causes a very odd bug in IE 8 (see #6720)
- this.active = clickedIsActive ? $() : clicked;
- this._toggle( eventData );
-
- // switch classes
- // corner classes on the previously active header stay after the animation
- active.removeClass( "ui-accordion-header-active ui-state-active" );
- if ( options.icons ) {
- active.children( ".ui-accordion-header-icon" )
- .removeClass( options.icons.activeHeader )
- .addClass( options.icons.header );
- }
-
- if ( !clickedIsActive ) {
- clicked
- .removeClass( "ui-corner-all" )
- .addClass( "ui-accordion-header-active ui-state-active ui-corner-top" );
- if ( options.icons ) {
- clicked.children( ".ui-accordion-header-icon" )
- .removeClass( options.icons.header )
- .addClass( options.icons.activeHeader );
- }
-
- clicked
- .next()
- .addClass( "ui-accordion-content-active" );
- }
- },
-
- _toggle: function( data ) {
- var toShow = data.newPanel,
- toHide = this.prevShow.length ? this.prevShow : data.oldPanel;
-
- // handle activating a panel during the animation for another activation
- this.prevShow.add( this.prevHide ).stop( true, true );
- this.prevShow = toShow;
- this.prevHide = toHide;
-
- if ( this.options.animate ) {
- this._animate( toShow, toHide, data );
- } else {
- toHide.hide();
- toShow.show();
- this._toggleComplete( data );
- }
-
- toHide.attr({
- "aria-expanded": "false",
- "aria-hidden": "true"
- });
- toHide.prev().attr( "aria-selected", "false" );
- // if we're switching panels, remove the old header from the tab order
- // if we're opening from collapsed state, remove the previous header from the tab order
- // if we're collapsing, then keep the collapsing header in the tab order
- if ( toShow.length && toHide.length ) {
- toHide.prev().attr( "tabIndex", -1 );
- } else if ( toShow.length ) {
- this.headers.filter(function() {
- return $( this ).attr( "tabIndex" ) === 0;
- })
- .attr( "tabIndex", -1 );
- }
-
- toShow
- .attr({
- "aria-expanded": "true",
- "aria-hidden": "false"
- })
- .prev()
- .attr({
- "aria-selected": "true",
- tabIndex: 0
- });
- },
-
- _animate: function( toShow, toHide, data ) {
- var total, easing, duration,
- that = this,
- adjust = 0,
- down = toShow.length &&
- ( !toHide.length || ( toShow.index() < toHide.index() ) ),
- animate = this.options.animate || {},
- options = down && animate.down || animate,
- complete = function() {
- that._toggleComplete( data );
- };
-
- if ( typeof options === "number" ) {
- duration = options;
- }
- if ( typeof options === "string" ) {
- easing = options;
- }
- // fall back from options to animation in case of partial down settings
- easing = easing || options.easing || animate.easing;
- duration = duration || options.duration || animate.duration;
-
- if ( !toHide.length ) {
- return toShow.animate( showProps, duration, easing, complete );
- }
- if ( !toShow.length ) {
- return toHide.animate( hideProps, duration, easing, complete );
- }
-
- total = toShow.show().outerHeight();
- toHide.animate( hideProps, {
- duration: duration,
- easing: easing,
- step: function( now, fx ) {
- fx.now = Math.round( now );
- }
- });
- toShow
- .hide()
- .animate( showProps, {
- duration: duration,
- easing: easing,
- complete: complete,
- step: function( now, fx ) {
- fx.now = Math.round( now );
- if ( fx.prop !== "height" ) {
- adjust += fx.now;
- } else if ( that.options.heightStyle !== "content" ) {
- fx.now = Math.round( total - toHide.outerHeight() - adjust );
- adjust = 0;
- }
- }
- });
- },
-
- _toggleComplete: function( data ) {
- var toHide = data.oldPanel;
-
- toHide
- .removeClass( "ui-accordion-content-active" )
- .prev()
- .removeClass( "ui-corner-top" )
- .addClass( "ui-corner-all" );
-
- // Work around for rendering bug in IE (#5421)
- if ( toHide.length ) {
- toHide.parent()[0].className = toHide.parent()[0].className;
- }
-
- this._trigger( "activate", null, data );
- }
-});
-
-})( jQuery );
-
-(function( $, undefined ) {
-
-// used to prevent race conditions with remote data sources
-var requestIndex = 0;
-
-$.widget( "ui.autocomplete", {
- version: "1.10.3",
- defaultElement: "<input>",
- options: {
- appendTo: null,
- autoFocus: false,
- delay: 300,
- minLength: 1,
- position: {
- my: "left top",
- at: "left bottom",
- collision: "none"
- },
- source: null,
-
- // callbacks
- change: null,
- close: null,
- focus: null,
- open: null,
- response: null,
- search: null,
- select: null
- },
-
- pending: 0,
-
- _create: function() {
- // Some browsers only repeat keydown events, not keypress events,
- // so we use the suppressKeyPress flag to determine if we've already
- // handled the keydown event. #7269
- // Unfortunately the code for & in keypress is the same as the up arrow,
- // so we use the suppressKeyPressRepeat flag to avoid handling keypress
- // events when we know the keydown event was used to modify the
- // search term. #7799
- var suppressKeyPress, suppressKeyPressRepeat, suppressInput,
- nodeName = this.element[0].nodeName.toLowerCase(),
- isTextarea = nodeName === "textarea",
- isInput = nodeName === "input";
-
- this.isMultiLine =
- // Textareas are always multi-line
- isTextarea ? true :
- // Inputs are always single-line, even if inside a contentEditable element
- // IE also treats inputs as contentEditable
- isInput ? false :
- // All other element types are determined by whether or not they're contentEditable
- this.element.prop( "isContentEditable" );
-
- this.valueMethod = this.element[ isTextarea || isInput ? "val" : "text" ];
- this.isNewMenu = true;
-
- this.element
- .addClass( "ui-autocomplete-input" )
- .attr( "autocomplete", "off" );
-
- this._on( this.element, {
- keydown: function( event ) {
- /*jshint maxcomplexity:15*/
- if ( this.element.prop( "readOnly" ) ) {
- suppressKeyPress = true;
- suppressInput = true;
- suppressKeyPressRepeat = true;
- return;
- }
-
- suppressKeyPress = false;
- suppressInput = false;
- suppressKeyPressRepeat = false;
- var keyCode = $.ui.keyCode;
- switch( event.keyCode ) {
- case keyCode.PAGE_UP:
- suppressKeyPress = true;
- this._move( "previousPage", event );
- break;
- case keyCode.PAGE_DOWN:
- suppressKeyPress = true;
- this._move( "nextPage", event );
- break;
- case keyCode.UP:
- suppressKeyPress = true;
- this._keyEvent( "previous", event );
- break;
- case keyCode.DOWN:
- suppressKeyPress = true;
- this._keyEvent( "next", event );
- break;
- case keyCode.ENTER:
- case keyCode.NUMPAD_ENTER:
- // when menu is open and has focus
- if ( this.menu.active ) {
- // #6055 - Opera still allows the keypress to occur
- // which causes forms to submit
- suppressKeyPress = true;
- event.preventDefault();
- this.menu.select( event );
- }
- break;
- case keyCode.TAB:
- if ( this.menu.active ) {
- this.menu.select( event );
- }
- break;
- case keyCode.ESCAPE:
- if ( this.menu.element.is( ":visible" ) ) {
- this._value( this.term );
- this.close( event );
- // Different browsers have different default behavior for escape
- // Single press can mean undo or clear
- // Double press in IE means clear the whole form
- event.preventDefault();
- }
- break;
- default:
- suppressKeyPressRepeat = true;
- // search timeout should be triggered before the input value is changed
- this._searchTimeout( event );
- break;
- }
- },
- keypress: function( event ) {
- if ( suppressKeyPress ) {
- suppressKeyPress = false;
- if ( !this.isMultiLine || this.menu.element.is( ":visible" ) ) {
- event.preventDefault();
- }
- return;
- }
- if ( suppressKeyPressRepeat ) {
- return;
- }
-
- // replicate some key handlers to allow them to repeat in Firefox and Opera
- var keyCode = $.ui.keyCode;
- switch( event.keyCode ) {
- case keyCode.PAGE_UP:
- this._move( "previousPage", event );
- break;
- case keyCode.PAGE_DOWN:
- this._move( "nextPage", event );
- break;
- case keyCode.UP:
- this._keyEvent( "previous", event );
- break;
- case keyCode.DOWN:
- this._keyEvent( "next", event );
- break;
- }
- },
- input: function( event ) {
- if ( suppressInput ) {
- suppressInput = false;
- event.preventDefault();
- return;
- }
- this._searchTimeout( event );
- },
- focus: function() {
- this.selectedItem = null;
- this.previous = this._value();
- },
- blur: function( event ) {
- if ( this.cancelBlur ) {
- delete this.cancelBlur;
- return;
- }
-
- clearTimeout( this.searching );
- this.close( event );
- this._change( event );
- }
- });
-
- this._initSource();
- this.menu = $( "<ul>" )
- .addClass( "ui-autocomplete ui-front" )
- .appendTo( this._appendTo() )
- .menu({
- // disable ARIA support, the live region takes care of that
- role: null
- })
- .hide()
- .data( "ui-menu" );
-
- this._on( this.menu.element, {
- mousedown: function( event ) {
- // prevent moving focus out of the text field
- event.preventDefault();
-
- // IE doesn't prevent moving focus even with event.preventDefault()
- // so we set a flag to know when we should ignore the blur event
- this.cancelBlur = true;
- this._delay(function() {
- delete this.cancelBlur;
- });
-
- // clicking on the scrollbar causes focus to shift to the body
- // but we can't detect a mouseup or a click immediately afterward
- // so we have to track the next mousedown and close the menu if
- // the user clicks somewhere outside of the autocomplete
- var menuElement = this.menu.element[ 0 ];
- if ( !$( event.target ).closest( ".ui-menu-item" ).length ) {
- this._delay(function() {
- var that = this;
- this.document.one( "mousedown", function( event ) {
- if ( event.target !== that.element[ 0 ] &&
- event.target !== menuElement &&
- !$.contains( menuElement, event.target ) ) {
- that.close();
- }
- });
- });
- }
- },
- menufocus: function( event, ui ) {
- // support: Firefox
- // Prevent accidental activation of menu items in Firefox (#7024 #9118)
- if ( this.isNewMenu ) {
- this.isNewMenu = false;
- if ( event.originalEvent && /^mouse/.test( event.originalEvent.type ) ) {
- this.menu.blur();
-
- this.document.one( "mousemove", function() {
- $( event.target ).trigger( event.originalEvent );
- });
-
- return;
- }
- }
-
- var item = ui.item.data( "ui-autocomplete-item" );
- if ( false !== this._trigger( "focus", event, { item: item } ) ) {
- // use value to match what will end up in the input, if it was a key event
- if ( event.originalEvent && /^key/.test( event.originalEvent.type ) ) {
- this._value( item.value );
- }
- } else {
- // Normally the input is populated with the item's value as the
- // menu is navigated, causing screen readers to notice a change and
- // announce the item. Since the focus event was canceled, this doesn't
- // happen, so we update the live region so that screen readers can
- // still notice the change and announce it.
- this.liveRegion.text( item.value );
- }
- },
- menuselect: function( event, ui ) {
- var item = ui.item.data( "ui-autocomplete-item" ),
- previous = this.previous;
-
- // only trigger when focus was lost (click on menu)
- if ( this.element[0] !== this.document[0].activeElement ) {
- this.element.focus();
- this.previous = previous;
- // #6109 - IE triggers two focus events and the second
- // is asynchronous, so we need to reset the previous
- // term synchronously and asynchronously :-(
- this._delay(function() {
- this.previous = previous;
- this.selectedItem = item;
- });
- }
-
- if ( false !== this._trigger( "select", event, { item: item } ) ) {
- this._value( item.value );
- }
- // reset the term after the select event
- // this allows custom select handling to work properly
- this.term = this._value();
-
- this.close( event );
- this.selectedItem = item;
- }
- });
-
- this.liveRegion = $( "<span>", {
- role: "status",
- "aria-live": "polite"
- })
- .addClass( "ui-helper-hidden-accessible" )
- .insertBefore( this.element );
-
- // turning off autocomplete prevents the browser from remembering the
- // value when navigating through history, so we re-enable autocomplete
- // if the page is unloaded before the widget is destroyed. #7790
- this._on( this.window, {
- beforeunload: function() {
- this.element.removeAttr( "autocomplete" );
- }
- });
- },
-
- _destroy: function() {
- clearTimeout( this.searching );
- this.element
- .removeClass( "ui-autocomplete-input" )
- .removeAttr( "autocomplete" );
- this.menu.element.remove();
- this.liveRegion.remove();
- },
-
- _setOption: function( key, value ) {
- this._super( key, value );
- if ( key === "source" ) {
- this._initSource();
- }
- if ( key === "appendTo" ) {
- this.menu.element.appendTo( this._appendTo() );
- }
- if ( key === "disabled" && value && this.xhr ) {
- this.xhr.abort();
- }
- },
-
- _appendTo: function() {
- var element = this.options.appendTo;
-
- if ( element ) {
- element = element.jquery || element.nodeType ?
- $( element ) :
- this.document.find( element ).eq( 0 );
- }
-
- if ( !element ) {
- element = this.element.closest( ".ui-front" );
- }
-
- if ( !element.length ) {
- element = this.document[0].body;
- }
-
- return element;
- },
-
- _initSource: function() {
- var array, url,
- that = this;
- if ( $.isArray(this.options.source) ) {
- array = this.options.source;
- this.source = function( request, response ) {
- response( $.ui.autocomplete.filter( array, request.term ) );
- };
- } else if ( typeof this.options.source === "string" ) {
- url = this.options.source;
- this.source = function( request, response ) {
- if ( that.xhr ) {
- that.xhr.abort();
- }
- that.xhr = $.ajax({
- url: url,
- data: request,
- dataType: "json",
- success: function( data ) {
- response( data );
- },
- error: function() {
- response( [] );
- }
- });
- };
- } else {
- this.source = this.options.source;
- }
- },
-
- _searchTimeout: function( event ) {
- clearTimeout( this.searching );
- this.searching = this._delay(function() {
- // only search if the value has changed
- if ( this.term !== this._value() ) {
- this.selectedItem = null;
- this.search( null, event );
- }
- }, this.options.delay );
- },
-
- search: function( value, event ) {
- value = value != null ? value : this._value();
-
- // always save the actual value, not the one passed as an argument
- this.term = this._value();
-
- if ( value.length < this.options.minLength ) {
- return this.close( event );
- }
-
- if ( this._trigger( "search", event ) === false ) {
- return;
- }
-
- return this._search( value );
- },
-
- _search: function( value ) {
- this.pending++;
- this.element.addClass( "ui-autocomplete-loading" );
- this.cancelSearch = false;
-
- this.source( { term: value }, this._response() );
- },
-
- _response: function() {
- var that = this,
- index = ++requestIndex;
-
- return function( content ) {
- if ( index === requestIndex ) {
- that.__response( content );
- }
-
- that.pending--;
- if ( !that.pending ) {
- that.element.removeClass( "ui-autocomplete-loading" );
- }
- };
- },
-
- __response: function( content ) {
- if ( content ) {
- content = this._normalize( content );
- }
- this._trigger( "response", null, { content: content } );
- if ( !this.options.disabled && content && content.length && !this.cancelSearch ) {
- this._suggest( content );
- this._trigger( "open" );
- } else {
- // use ._close() instead of .close() so we don't cancel future searches
- this._close();
- }
- },
-
- close: function( event ) {
- this.cancelSearch = true;
- this._close( event );
- },
-
- _close: function( event ) {
- if ( this.menu.element.is( ":visible" ) ) {
- this.menu.element.hide();
- this.menu.blur();
- this.isNewMenu = true;
- this._trigger( "close", event );
- }
- },
-
- _change: function( event ) {
- if ( this.previous !== this._value() ) {
- this._trigger( "change", event, { item: this.selectedItem } );
- }
- },
-
- _normalize: function( items ) {
- // assume all items have the right format when the first item is complete
- if ( items.length && items[0].label && items[0].value ) {
- return items;
- }
- return $.map( items, function( item ) {
- if ( typeof item === "string" ) {
- return {
- label: item,
- value: item
- };
- }
- return $.extend({
- label: item.label || item.value,
- value: item.value || item.label
- }, item );
- });
- },
-
- _suggest: function( items ) {
- var ul = this.menu.element.empty();
- this._renderMenu( ul, items );
- this.isNewMenu = true;
- this.menu.refresh();
-
- // size and position menu
- ul.show();
- this._resizeMenu();
- ul.position( $.extend({
- of: this.element
- }, this.options.position ));
-
- if ( this.options.autoFocus ) {
- this.menu.next();
- }
- },
-
- _resizeMenu: function() {
- var ul = this.menu.element;
- ul.outerWidth( Math.max(
- // Firefox wraps long text (possibly a rounding bug)
- // so we add 1px to avoid the wrapping (#7513)
- ul.width( "" ).outerWidth() + 1,
- this.element.outerWidth()
- ) );
- },
-
- _renderMenu: function( ul, items ) {
- var that = this;
- $.each( items, function( index, item ) {
- that._renderItemData( ul, item );
- });
- },
-
- _renderItemData: function( ul, item ) {
- return this._renderItem( ul, item ).data( "ui-autocomplete-item", item );
- },
-
- _renderItem: function( ul, item ) {
- return $( "<li>" )
- .append( $( "<a>" ).text( item.label ) )
- .appendTo( ul );
- },
-
- _move: function( direction, event ) {
- if ( !this.menu.element.is( ":visible" ) ) {
- this.search( null, event );
- return;
- }
- if ( this.menu.isFirstItem() && /^previous/.test( direction ) ||
- this.menu.isLastItem() && /^next/.test( direction ) ) {
- this._value( this.term );
- this.menu.blur();
- return;
- }
- this.menu[ direction ]( event );
- },
-
- widget: function() {
- return this.menu.element;
- },
-
- _value: function() {
- return this.valueMethod.apply( this.element, arguments );
- },
-
- _keyEvent: function( keyEvent, event ) {
- if ( !this.isMultiLine || this.menu.element.is( ":visible" ) ) {
- this._move( keyEvent, event );
-
- // prevents moving cursor to beginning/end of the text field in some browsers
- event.preventDefault();
- }
- }
-});
-
-$.extend( $.ui.autocomplete, {
- escapeRegex: function( value ) {
- return value.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&");
- },
- filter: function(array, term) {
- var matcher = new RegExp( $.ui.autocomplete.escapeRegex(term), "i" );
- return $.grep( array, function(value) {
- return matcher.test( value.label || value.value || value );
- });
- }
-});
-
-
-// live region extension, adding a `messages` option
-// NOTE: This is an experimental API. We are still investigating
-// a full solution for string manipulation and internationalization.
-$.widget( "ui.autocomplete", $.ui.autocomplete, {
- options: {
- messages: {
- noResults: "No search results.",
- results: function( amount ) {
- return amount + ( amount > 1 ? " results are" : " result is" ) +
- " available, use up and down arrow keys to navigate.";
- }
- }
- },
-
- __response: function( content ) {
- var message;
- this._superApply( arguments );
- if ( this.options.disabled || this.cancelSearch ) {
- return;
- }
- if ( content && content.length ) {
- message = this.options.messages.results( content.length );
- } else {
- message = this.options.messages.noResults;
- }
- this.liveRegion.text( message );
- }
-});
-
-}( jQuery ));
-
-(function( $, undefined ) {
-
-var lastActive, startXPos, startYPos, clickDragged,
- baseClasses = "ui-button ui-widget ui-state-default ui-corner-all",
- stateClasses = "ui-state-hover ui-state-active ",
- typeClasses = "ui-button-icons-only ui-button-icon-only ui-button-text-icons ui-button-text-icon-primary ui-button-text-icon-secondary ui-button-text-only",
- formResetHandler = function() {
- var form = $( this );
- setTimeout(function() {
- form.find( ":ui-button" ).button( "refresh" );
- }, 1 );
- },
- radioGroup = function( radio ) {
- var name = radio.name,
- form = radio.form,
- radios = $( [] );
- if ( name ) {
- name = name.replace( /'/g, "\\'" );
- if ( form ) {
- radios = $( form ).find( "[name='" + name + "']" );
- } else {
- radios = $( "[name='" + name + "']", radio.ownerDocument )
- .filter(function() {
- return !this.form;
- });
- }
- }
- return radios;
- };
-
-$.widget( "ui.button", {
- version: "1.10.3",
- defaultElement: "<button>",
- options: {
- disabled: null,
- text: true,
- label: null,
- icons: {
- primary: null,
- secondary: null
- }
- },
- _create: function() {
- this.element.closest( "form" )
- .unbind( "reset" + this.eventNamespace )
- .bind( "reset" + this.eventNamespace, formResetHandler );
-
- if ( typeof this.options.disabled !== "boolean" ) {
- this.options.disabled = !!this.element.prop( "disabled" );
- } else {
- this.element.prop( "disabled", this.options.disabled );
- }
-
- this._determineButtonType();
- this.hasTitle = !!this.buttonElement.attr( "title" );
-
- var that = this,
- options = this.options,
- toggleButton = this.type === "checkbox" || this.type === "radio",
- activeClass = !toggleButton ? "ui-state-active" : "",
- focusClass = "ui-state-focus";
-
- if ( options.label === null ) {
- options.label = (this.type === "input" ? this.buttonElement.val() : this.buttonElement.html());
- }
-
- this._hoverable( this.buttonElement );
-
- this.buttonElement
- .addClass( baseClasses )
- .attr( "role", "button" )
- .bind( "mouseenter" + this.eventNamespace, function() {
- if ( options.disabled ) {
- return;
- }
- if ( this === lastActive ) {
- $( this ).addClass( "ui-state-active" );
- }
- })
- .bind( "mouseleave" + this.eventNamespace, function() {
- if ( options.disabled ) {
- return;
- }
- $( this ).removeClass( activeClass );
- })
- .bind( "click" + this.eventNamespace, function( event ) {
- if ( options.disabled ) {
- event.preventDefault();
- event.stopImmediatePropagation();
- }
- });
-
- this.element
- .bind( "focus" + this.eventNamespace, function() {
- // no need to check disabled, focus won't be triggered anyway
- that.buttonElement.addClass( focusClass );
- })
- .bind( "blur" + this.eventNamespace, function() {
- that.buttonElement.removeClass( focusClass );
- });
-
- if ( toggleButton ) {
- this.element.bind( "change" + this.eventNamespace, function() {
- if ( clickDragged ) {
- return;
- }
- that.refresh();
- });
- // if mouse moves between mousedown and mouseup (drag) set clickDragged flag
- // prevents issue where button state changes but checkbox/radio checked state
- // does not in Firefox (see ticket #6970)
- this.buttonElement
- .bind( "mousedown" + this.eventNamespace, function( event ) {
- if ( options.disabled ) {
- return;
- }
- clickDragged = false;
- startXPos = event.pageX;
- startYPos = event.pageY;
- })
- .bind( "mouseup" + this.eventNamespace, function( event ) {
- if ( options.disabled ) {
- return;
- }
- if ( startXPos !== event.pageX || startYPos !== event.pageY ) {
- clickDragged = true;
- }
- });
- }
-
- if ( this.type === "checkbox" ) {
- this.buttonElement.bind( "click" + this.eventNamespace, function() {
- if ( options.disabled || clickDragged ) {
- return false;
- }
- });
- } else if ( this.type === "radio" ) {
- this.buttonElement.bind( "click" + this.eventNamespace, function() {
- if ( options.disabled || clickDragged ) {
- return false;
- }
- $( this ).addClass( "ui-state-active" );
- that.buttonElement.attr( "aria-pressed", "true" );
-
- var radio = that.element[ 0 ];
- radioGroup( radio )
- .not( radio )
- .map(function() {
- return $( this ).button( "widget" )[ 0 ];
- })
- .removeClass( "ui-state-active" )
- .attr( "aria-pressed", "false" );
- });
- } else {
- this.buttonElement
- .bind( "mousedown" + this.eventNamespace, function() {
- if ( options.disabled ) {
- return false;
- }
- $( this ).addClass( "ui-state-active" );
- lastActive = this;
- that.document.one( "mouseup", function() {
- lastActive = null;
- });
- })
- .bind( "mouseup" + this.eventNamespace, function() {
- if ( options.disabled ) {
- return false;
- }
- $( this ).removeClass( "ui-state-active" );
- })
- .bind( "keydown" + this.eventNamespace, function(event) {
- if ( options.disabled ) {
- return false;
- }
- if ( event.keyCode === $.ui.keyCode.SPACE || event.keyCode === $.ui.keyCode.ENTER ) {
- $( this ).addClass( "ui-state-active" );
- }
- })
- // see #8559, we bind to blur here in case the button element loses
- // focus between keydown and keyup, it would be left in an "active" state
- .bind( "keyup" + this.eventNamespace + " blur" + this.eventNamespace, function() {
- $( this ).removeClass( "ui-state-active" );
- });
-
- if ( this.buttonElement.is("a") ) {
- this.buttonElement.keyup(function(event) {
- if ( event.keyCode === $.ui.keyCode.SPACE ) {
- // TODO pass through original event correctly (just as 2nd argument doesn't work)
- $( this ).click();
- }
- });
- }
- }
-
- // TODO: pull out $.Widget's handling for the disabled option into
- // $.Widget.prototype._setOptionDisabled so it's easy to proxy and can
- // be overridden by individual plugins
- this._setOption( "disabled", options.disabled );
- this._resetButton();
- },
-
- _determineButtonType: function() {
- var ancestor, labelSelector, checked;
-
- if ( this.element.is("[type=checkbox]") ) {
- this.type = "checkbox";
- } else if ( this.element.is("[type=radio]") ) {
- this.type = "radio";
- } else if ( this.element.is("input") ) {
- this.type = "input";
- } else {
- this.type = "button";
- }
-
- if ( this.type === "checkbox" || this.type === "radio" ) {
- // we don't search against the document in case the element
- // is disconnected from the DOM
- ancestor = this.element.parents().last();
- labelSelector = "label[for='" + this.element.attr("id") + "']";
- this.buttonElement = ancestor.find( labelSelector );
- if ( !this.buttonElement.length ) {
- ancestor = ancestor.length ? ancestor.siblings() : this.element.siblings();
- this.buttonElement = ancestor.filter( labelSelector );
- if ( !this.buttonElement.length ) {
- this.buttonElement = ancestor.find( labelSelector );
- }
- }
- this.element.addClass( "ui-helper-hidden-accessible" );
-
- checked = this.element.is( ":checked" );
- if ( checked ) {
- this.buttonElement.addClass( "ui-state-active" );
- }
- this.buttonElement.prop( "aria-pressed", checked );
- } else {
- this.buttonElement = this.element;
- }
- },
-
- widget: function() {
- return this.buttonElement;
- },
-
- _destroy: function() {
- this.element
- .removeClass( "ui-helper-hidden-accessible" );
- this.buttonElement
- .removeClass( baseClasses + " " + stateClasses + " " + typeClasses )
- .removeAttr( "role" )
- .removeAttr( "aria-pressed" )
- .html( this.buttonElement.find(".ui-button-text").html() );
-
- if ( !this.hasTitle ) {
- this.buttonElement.removeAttr( "title" );
- }
- },
-
- _setOption: function( key, value ) {
- this._super( key, value );
- if ( key === "disabled" ) {
- if ( value ) {
- this.element.prop( "disabled", true );
- } else {
- this.element.prop( "disabled", false );
- }
- return;
- }
- this._resetButton();
- },
-
- refresh: function() {
- //See #8237 & #8828
- var isDisabled = this.element.is( "input, button" ) ? this.element.is( ":disabled" ) : this.element.hasClass( "ui-button-disabled" );
-
- if ( isDisabled !== this.options.disabled ) {
- this._setOption( "disabled", isDisabled );
- }
- if ( this.type === "radio" ) {
- radioGroup( this.element[0] ).each(function() {
- if ( $( this ).is( ":checked" ) ) {
- $( this ).button( "widget" )
- .addClass( "ui-state-active" )
- .attr( "aria-pressed", "true" );
- } else {
- $( this ).button( "widget" )
- .removeClass( "ui-state-active" )
- .attr( "aria-pressed", "false" );
- }
- });
- } else if ( this.type === "checkbox" ) {
- if ( this.element.is( ":checked" ) ) {
- this.buttonElement
- .addClass( "ui-state-active" )
- .attr( "aria-pressed", "true" );
- } else {
- this.buttonElement
- .removeClass( "ui-state-active" )
- .attr( "aria-pressed", "false" );
- }
- }
- },
-
- _resetButton: function() {
- if ( this.type === "input" ) {
- if ( this.options.label ) {
- this.element.val( this.options.label );
- }
- return;
- }
- var buttonElement = this.buttonElement.removeClass( typeClasses ),
- buttonText = $( "<span></span>", this.document[0] )
- .addClass( "ui-button-text" )
- .html( this.options.label )
- .appendTo( buttonElement.empty() )
- .text(),
- icons = this.options.icons,
- multipleIcons = icons.primary && icons.secondary,
- buttonClasses = [];
-
- if ( icons.primary || icons.secondary ) {
- if ( this.options.text ) {
- buttonClasses.push( "ui-button-text-icon" + ( multipleIcons ? "s" : ( icons.primary ? "-primary" : "-secondary" ) ) );
- }
-
- if ( icons.primary ) {
- buttonElement.prepend( "<span class='ui-button-icon-primary ui-icon " + icons.primary + "'></span>" );
- }
-
- if ( icons.secondary ) {
- buttonElement.append( "<span class='ui-button-icon-secondary ui-icon " + icons.secondary + "'></span>" );
- }
-
- if ( !this.options.text ) {
- buttonClasses.push( multipleIcons ? "ui-button-icons-only" : "ui-button-icon-only" );
-
- if ( !this.hasTitle ) {
- buttonElement.attr( "title", $.trim( buttonText ) );
- }
- }
- } else {
- buttonClasses.push( "ui-button-text-only" );
- }
- buttonElement.addClass( buttonClasses.join( " " ) );
- }
-});
-
-$.widget( "ui.buttonset", {
- version: "1.10.3",
- options: {
- items: "button, input[type=button], input[type=submit], input[type=reset], input[type=checkbox], input[type=radio], a, :data(ui-button)"
- },
-
- _create: function() {
- this.element.addClass( "ui-buttonset" );
- },
-
- _init: function() {
- this.refresh();
- },
-
- _setOption: function( key, value ) {
- if ( key === "disabled" ) {
- this.buttons.button( "option", key, value );
- }
-
- this._super( key, value );
- },
-
- refresh: function() {
- var rtl = this.element.css( "direction" ) === "rtl";
-
- this.buttons = this.element.find( this.options.items )
- .filter( ":ui-button" )
- .button( "refresh" )
- .end()
- .not( ":ui-button" )
- .button()
- .end()
- .map(function() {
- return $( this ).button( "widget" )[ 0 ];
- })
- .removeClass( "ui-corner-all ui-corner-left ui-corner-right" )
- .filter( ":first" )
- .addClass( rtl ? "ui-corner-right" : "ui-corner-left" )
- .end()
- .filter( ":last" )
- .addClass( rtl ? "ui-corner-left" : "ui-corner-right" )
- .end()
- .end();
- },
-
- _destroy: function() {
- this.element.removeClass( "ui-buttonset" );
- this.buttons
- .map(function() {
- return $( this ).button( "widget" )[ 0 ];
- })
- .removeClass( "ui-corner-left ui-corner-right" )
- .end()
- .button( "destroy" );
- }
-});
-
-}( jQuery ) );
-
-(function( $, undefined ) {
-
-$.extend($.ui, { datepicker: { version: "1.10.3" } });
-
-var PROP_NAME = "datepicker",
- instActive;
-
-/* Date picker manager.
- Use the singleton instance of this class, $.datepicker, to interact with the date picker.
- Settings for (groups of) date pickers are maintained in an instance object,
- allowing multiple different settings on the same page. */
-
-function Datepicker() {
- this._curInst = null; // The current instance in use
- this._keyEvent = false; // If the last event was a key event
- this._disabledInputs = []; // List of date picker inputs that have been disabled
- this._datepickerShowing = false; // True if the popup picker is showing , false if not
- this._inDialog = false; // True if showing within a "dialog", false if not
- this._mainDivId = "ui-datepicker-div"; // The ID of the main datepicker division
- this._inlineClass = "ui-datepicker-inline"; // The name of the inline marker class
- this._appendClass = "ui-datepicker-append"; // The name of the append marker class
- this._triggerClass = "ui-datepicker-trigger"; // The name of the trigger marker class
- this._dialogClass = "ui-datepicker-dialog"; // The name of the dialog marker class
- this._disableClass = "ui-datepicker-disabled"; // The name of the disabled covering marker class
- this._unselectableClass = "ui-datepicker-unselectable"; // The name of the unselectable cell marker class
- this._currentClass = "ui-datepicker-current-day"; // The name of the current day marker class
- this._dayOverClass = "ui-datepicker-days-cell-over"; // The name of the day hover marker class
- this.regional = []; // Available regional settings, indexed by language code
- this.regional[""] = { // Default regional settings
- closeText: "Done", // Display text for close link
- prevText: "Prev", // Display text for previous month link
- nextText: "Next", // Display text for next month link
- currentText: "Today", // Display text for current month link
- monthNames: ["January","February","March","April","May","June",
- "July","August","September","October","November","December"], // Names of months for drop-down and formatting
- monthNamesShort: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"], // For formatting
- dayNames: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"], // For formatting
- dayNamesShort: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"], // For formatting
- dayNamesMin: ["Su","Mo","Tu","We","Th","Fr","Sa"], // Column headings for days starting at Sunday
- weekHeader: "Wk", // Column header for week of the year
- dateFormat: "mm/dd/yy", // See format options on parseDate
- firstDay: 0, // The first day of the week, Sun = 0, Mon = 1, ...
- isRTL: false, // True if right-to-left language, false if left-to-right
- showMonthAfterYear: false, // True if the year select precedes month, false for month then year
- yearSuffix: "" // Additional text to append to the year in the month headers
- };
- this._defaults = { // Global defaults for all the date picker instances
- showOn: "focus", // "focus" for popup on focus,
- // "button" for trigger button, or "both" for either
- showAnim: "fadeIn", // Name of jQuery animation for popup
- showOptions: {}, // Options for enhanced animations
- defaultDate: null, // Used when field is blank: actual date,
- // +/-number for offset from today, null for today
- appendText: "", // Display text following the input box, e.g. showing the format
- buttonText: "...", // Text for trigger button
- buttonImage: "", // URL for trigger button image
- buttonImageOnly: false, // True if the image appears alone, false if it appears on a button
- hideIfNoPrevNext: false, // True to hide next/previous month links
- // if not applicable, false to just disable them
- navigationAsDateFormat: false, // True if date formatting applied to prev/today/next links
- gotoCurrent: false, // True if today link goes back to current selection instead
- changeMonth: false, // True if month can be selected directly, false if only prev/next
- changeYear: false, // True if year can be selected directly, false if only prev/next
- yearRange: "c-10:c+10", // Range of years to display in drop-down,
- // either relative to today's year (-nn:+nn), relative to currently displayed year
- // (c-nn:c+nn), absolute (nnnn:nnnn), or a combination of the above (nnnn:-n)
- showOtherMonths: false, // True to show dates in other months, false to leave blank
- selectOtherMonths: false, // True to allow selection of dates in other months, false for unselectable
- showWeek: false, // True to show week of the year, false to not show it
- calculateWeek: this.iso8601Week, // How to calculate the week of the year,
- // takes a Date and returns the number of the week for it
- shortYearCutoff: "+10", // Short year values < this are in the current century,
- // > this are in the previous century,
- // string value starting with "+" for current year + value
- minDate: null, // The earliest selectable date, or null for no limit
- maxDate: null, // The latest selectable date, or null for no limit
- duration: "fast", // Duration of display/closure
- beforeShowDay: null, // Function that takes a date and returns an array with
- // [0] = true if selectable, false if not, [1] = custom CSS class name(s) or "",
- // [2] = cell title (optional), e.g. $.datepicker.noWeekends
- beforeShow: null, // Function that takes an input field and
- // returns a set of custom settings for the date picker
- onSelect: null, // Define a callback function when a date is selected
- onChangeMonthYear: null, // Define a callback function when the month or year is changed
- onClose: null, // Define a callback function when the datepicker is closed
- numberOfMonths: 1, // Number of months to show at a time
- showCurrentAtPos: 0, // The position in multipe months at which to show the current month (starting at 0)
- stepMonths: 1, // Number of months to step back/forward
- stepBigMonths: 12, // Number of months to step back/forward for the big links
- altField: "", // Selector for an alternate field to store selected dates into
- altFormat: "", // The date format to use for the alternate field
- constrainInput: true, // The input is constrained by the current date format
- showButtonPanel: false, // True to show button panel, false to not show it
- autoSize: false, // True to size the input for the date format, false to leave as is
- disabled: false // The initial disabled state
- };
- $.extend(this._defaults, this.regional[""]);
- this.dpDiv = bindHover($("<div id='" + this._mainDivId + "' class='ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all'></div>"));
-}
-
-$.extend(Datepicker.prototype, {
- /* Class name added to elements to indicate already configured with a date picker. */
- markerClassName: "hasDatepicker",
-
- //Keep track of the maximum number of rows displayed (see #7043)
- maxRows: 4,
-
- // TODO rename to "widget" when switching to widget factory
- _widgetDatepicker: function() {
- return this.dpDiv;
- },
-
- /* Override the default settings for all instances of the date picker.
- * @param settings object - the new settings to use as defaults (anonymous object)
- * @return the manager object
- */
- setDefaults: function(settings) {
- extendRemove(this._defaults, settings || {});
- return this;
- },
-
- /* Attach the date picker to a jQuery selection.
- * @param target element - the target input field or division or span
- * @param settings object - the new settings to use for this date picker instance (anonymous)
- */
- _attachDatepicker: function(target, settings) {
- var nodeName, inline, inst;
- nodeName = target.nodeName.toLowerCase();
- inline = (nodeName === "div" || nodeName === "span");
- if (!target.id) {
- this.uuid += 1;
- target.id = "dp" + this.uuid;
- }
- inst = this._newInst($(target), inline);
- inst.settings = $.extend({}, settings || {});
- if (nodeName === "input") {
- this._connectDatepicker(target, inst);
- } else if (inline) {
- this._inlineDatepicker(target, inst);
- }
- },
-
- /* Create a new instance object. */
- _newInst: function(target, inline) {
- var id = target[0].id.replace(/([^A-Za-z0-9_\-])/g, "\\\\$1"); // escape jQuery meta chars
- return {id: id, input: target, // associated target
- selectedDay: 0, selectedMonth: 0, selectedYear: 0, // current selection
- drawMonth: 0, drawYear: 0, // month being drawn
- inline: inline, // is datepicker inline or not
- dpDiv: (!inline ? this.dpDiv : // presentation div
- bindHover($("<div class='" + this._inlineClass + " ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all'></div>")))};
- },
-
- /* Attach the date picker to an input field. */
- _connectDatepicker: function(target, inst) {
- var input = $(target);
- inst.append = $([]);
- inst.trigger = $([]);
- if (input.hasClass(this.markerClassName)) {
- return;
- }
- this._attachments(input, inst);
- input.addClass(this.markerClassName).keydown(this._doKeyDown).
- keypress(this._doKeyPress).keyup(this._doKeyUp);
- this._autoSize(inst);
- $.data(target, PROP_NAME, inst);
- //If disabled option is true, disable the datepicker once it has been attached to the input (see ticket #5665)
- if( inst.settings.disabled ) {
- this._disableDatepicker( target );
- }
- },
-
- /* Make attachments based on settings. */
- _attachments: function(input, inst) {
- var showOn, buttonText, buttonImage,
- appendText = this._get(inst, "appendText"),
- isRTL = this._get(inst, "isRTL");
-
- if (inst.append) {
- inst.append.remove();
- }
- if (appendText) {
- inst.append = $("<span class='" + this._appendClass + "'>" + appendText + "</span>");
- input[isRTL ? "before" : "after"](inst.append);
- }
-
- input.unbind("focus", this._showDatepicker);
-
- if (inst.trigger) {
- inst.trigger.remove();
- }
-
- showOn = this._get(inst, "showOn");
- if (showOn === "focus" || showOn === "both") { // pop-up date picker when in the marked field
- input.focus(this._showDatepicker);
- }
- if (showOn === "button" || showOn === "both") { // pop-up date picker when button clicked
- buttonText = this._get(inst, "buttonText");
- buttonImage = this._get(inst, "buttonImage");
- inst.trigger = $(this._get(inst, "buttonImageOnly") ?
- $("<img/>").addClass(this._triggerClass).
- attr({ src: buttonImage, alt: buttonText, title: buttonText }) :
- $("<button type='button'></button>").addClass(this._triggerClass).
- html(!buttonImage ? buttonText : $("<img/>").attr(
- { src:buttonImage, alt:buttonText, title:buttonText })));
- input[isRTL ? "before" : "after"](inst.trigger);
- inst.trigger.click(function() {
- if ($.datepicker._datepickerShowing && $.datepicker._lastInput === input[0]) {
- $.datepicker._hideDatepicker();
- } else if ($.datepicker._datepickerShowing && $.datepicker._lastInput !== input[0]) {
- $.datepicker._hideDatepicker();
- $.datepicker._showDatepicker(input[0]);
- } else {
- $.datepicker._showDatepicker(input[0]);
- }
- return false;
- });
- }
- },
-
- /* Apply the maximum length for the date format. */
- _autoSize: function(inst) {
- if (this._get(inst, "autoSize") && !inst.inline) {
- var findMax, max, maxI, i,
- date = new Date(2009, 12 - 1, 20), // Ensure double digits
- dateFormat = this._get(inst, "dateFormat");
-
- if (dateFormat.match(/[DM]/)) {
- findMax = function(names) {
- max = 0;
- maxI = 0;
- for (i = 0; i < names.length; i++) {
- if (names[i].length > max) {
- max = names[i].length;
- maxI = i;
- }
- }
- return maxI;
- };
- date.setMonth(findMax(this._get(inst, (dateFormat.match(/MM/) ?
- "monthNames" : "monthNamesShort"))));
- date.setDate(findMax(this._get(inst, (dateFormat.match(/DD/) ?
- "dayNames" : "dayNamesShort"))) + 20 - date.getDay());
- }
- inst.input.attr("size", this._formatDate(inst, date).length);
- }
- },
-
- /* Attach an inline date picker to a div. */
- _inlineDatepicker: function(target, inst) {
- var divSpan = $(target);
- if (divSpan.hasClass(this.markerClassName)) {
- return;
- }
- divSpan.addClass(this.markerClassName).append(inst.dpDiv);
- $.data(target, PROP_NAME, inst);
- this._setDate(inst, this._getDefaultDate(inst), true);
- this._updateDatepicker(inst);
- this._updateAlternate(inst);
- //If disabled option is true, disable the datepicker before showing it (see ticket #5665)
- if( inst.settings.disabled ) {
- this._disableDatepicker( target );
- }
- // Set display:block in place of inst.dpDiv.show() which won't work on disconnected elements
- // http://bugs.jqueryui.com/ticket/7552 - A Datepicker created on a detached div has zero height
- inst.dpDiv.css( "display", "block" );
- },
-
- /* Pop-up the date picker in a "dialog" box.
- * @param input element - ignored
- * @param date string or Date - the initial date to display
- * @param onSelect function - the function to call when a date is selected
- * @param settings object - update the dialog date picker instance's settings (anonymous object)
- * @param pos int[2] - coordinates for the dialog's position within the screen or
- * event - with x/y coordinates or
- * leave empty for default (screen centre)
- * @return the manager object
- */
- _dialogDatepicker: function(input, date, onSelect, settings, pos) {
- var id, browserWidth, browserHeight, scrollX, scrollY,
- inst = this._dialogInst; // internal instance
-
- if (!inst) {
- this.uuid += 1;
- id = "dp" + this.uuid;
- this._dialogInput = $("<input type='text' id='" + id +
- "' style='position: absolute; top: -100px; width: 0px;'/>");
- this._dialogInput.keydown(this._doKeyDown);
- $("body").append(this._dialogInput);
- inst = this._dialogInst = this._newInst(this._dialogInput, false);
- inst.settings = {};
- $.data(this._dialogInput[0], PROP_NAME, inst);
- }
- extendRemove(inst.settings, settings || {});
- date = (date && date.constructor === Date ? this._formatDate(inst, date) : date);
- this._dialogInput.val(date);
-
- this._pos = (pos ? (pos.length ? pos : [pos.pageX, pos.pageY]) : null);
- if (!this._pos) {
- browserWidth = document.documentElement.clientWidth;
- browserHeight = document.documentElement.clientHeight;
- scrollX = document.documentElement.scrollLeft || document.body.scrollLeft;
- scrollY = document.documentElement.scrollTop || document.body.scrollTop;
- this._pos = // should use actual width/height below
- [(browserWidth / 2) - 100 + scrollX, (browserHeight / 2) - 150 + scrollY];
- }
-
- // move input on screen for focus, but hidden behind dialog
- this._dialogInput.css("left", (this._pos[0] + 20) + "px").css("top", this._pos[1] + "px");
- inst.settings.onSelect = onSelect;
- this._inDialog = true;
- this.dpDiv.addClass(this._dialogClass);
- this._showDatepicker(this._dialogInput[0]);
- if ($.blockUI) {
- $.blockUI(this.dpDiv);
- }
- $.data(this._dialogInput[0], PROP_NAME, inst);
- return this;
- },
-
- /* Detach a datepicker from its control.
- * @param target element - the target input field or division or span
- */
- _destroyDatepicker: function(target) {
- var nodeName,
- $target = $(target),
- inst = $.data(target, PROP_NAME);
-
- if (!$target.hasClass(this.markerClassName)) {
- return;
- }
-
- nodeName = target.nodeName.toLowerCase();
- $.removeData(target, PROP_NAME);
- if (nodeName === "input") {
- inst.append.remove();
- inst.trigger.remove();
- $target.removeClass(this.markerClassName).
- unbind("focus", this._showDatepicker).
- unbind("keydown", this._doKeyDown).
- unbind("keypress", this._doKeyPress).
- unbind("keyup", this._doKeyUp);
- } else if (nodeName === "div" || nodeName === "span") {
- $target.removeClass(this.markerClassName).empty();
- }
- },
-
- /* Enable the date picker to a jQuery selection.
- * @param target element - the target input field or division or span
- */
- _enableDatepicker: function(target) {
- var nodeName, inline,
- $target = $(target),
- inst = $.data(target, PROP_NAME);
-
- if (!$target.hasClass(this.markerClassName)) {
- return;
- }
-
- nodeName = target.nodeName.toLowerCase();
- if (nodeName === "input") {
- target.disabled = false;
- inst.trigger.filter("button").
- each(function() { this.disabled = false; }).end().
- filter("img").css({opacity: "1.0", cursor: ""});
- } else if (nodeName === "div" || nodeName === "span") {
- inline = $target.children("." + this._inlineClass);
- inline.children().removeClass("ui-state-disabled");
- inline.find("select.ui-datepicker-month, select.ui-datepicker-year").
- prop("disabled", false);
- }
- this._disabledInputs = $.map(this._disabledInputs,
- function(value) { return (value === target ? null : value); }); // delete entry
- },
-
- /* Disable the date picker to a jQuery selection.
- * @param target element - the target input field or division or span
- */
- _disableDatepicker: function(target) {
- var nodeName, inline,
- $target = $(target),
- inst = $.data(target, PROP_NAME);
-
- if (!$target.hasClass(this.markerClassName)) {
- return;
- }
-
- nodeName = target.nodeName.toLowerCase();
- if (nodeName === "input") {
- target.disabled = true;
- inst.trigger.filter("button").
- each(function() { this.disabled = true; }).end().
- filter("img").css({opacity: "0.5", cursor: "default"});
- } else if (nodeName === "div" || nodeName === "span") {
- inline = $target.children("." + this._inlineClass);
- inline.children().addClass("ui-state-disabled");
- inline.find("select.ui-datepicker-month, select.ui-datepicker-year").
- prop("disabled", true);
- }
- this._disabledInputs = $.map(this._disabledInputs,
- function(value) { return (value === target ? null : value); }); // delete entry
- this._disabledInputs[this._disabledInputs.length] = target;
- },
-
- /* Is the first field in a jQuery collection disabled as a datepicker?
- * @param target element - the target input field or division or span
- * @return boolean - true if disabled, false if enabled
- */
- _isDisabledDatepicker: function(target) {
- if (!target) {
- return false;
- }
- for (var i = 0; i < this._disabledInputs.length; i++) {
- if (this._disabledInputs[i] === target) {
- return true;
- }
- }
- return false;
- },
-
- /* Retrieve the instance data for the target control.
- * @param target element - the target input field or division or span
- * @return object - the associated instance data
- * @throws error if a jQuery problem getting data
- */
- _getInst: function(target) {
- try {
- return $.data(target, PROP_NAME);
- }
- catch (err) {
- throw "Missing instance data for this datepicker";
- }
- },
-
- /* Update or retrieve the settings for a date picker attached to an input field or division.
- * @param target element - the target input field or division or span
- * @param name object - the new settings to update or
- * string - the name of the setting to change or retrieve,
- * when retrieving also "all" for all instance settings or
- * "defaults" for all global defaults
- * @param value any - the new value for the setting
- * (omit if above is an object or to retrieve a value)
- */
- _optionDatepicker: function(target, name, value) {
- var settings, date, minDate, maxDate,
- inst = this._getInst(target);
-
- if (arguments.length === 2 && typeof name === "string") {
- return (name === "defaults" ? $.extend({}, $.datepicker._defaults) :
- (inst ? (name === "all" ? $.extend({}, inst.settings) :
- this._get(inst, name)) : null));
- }
-
- settings = name || {};
- if (typeof name === "string") {
- settings = {};
- settings[name] = value;
- }
-
- if (inst) {
- if (this._curInst === inst) {
- this._hideDatepicker();
- }
-
- date = this._getDateDatepicker(target, true);
- minDate = this._getMinMaxDate(inst, "min");
- maxDate = this._getMinMaxDate(inst, "max");
- extendRemove(inst.settings, settings);
- // reformat the old minDate/maxDate values if dateFormat changes and a new minDate/maxDate isn't provided
- if (minDate !== null && settings.dateFormat !== undefined && settings.minDate === undefined) {
- inst.settings.minDate = this._formatDate(inst, minDate);
- }
- if (maxDate !== null && settings.dateFormat !== undefined && settings.maxDate === undefined) {
- inst.settings.maxDate = this._formatDate(inst, maxDate);
- }
- if ( "disabled" in settings ) {
- if ( settings.disabled ) {
- this._disableDatepicker(target);
- } else {
- this._enableDatepicker(target);
- }
- }
- this._attachments($(target), inst);
- this._autoSize(inst);
- this._setDate(inst, date);
- this._updateAlternate(inst);
- this._updateDatepicker(inst);
- }
- },
-
- // change method deprecated
- _changeDatepicker: function(target, name, value) {
- this._optionDatepicker(target, name, value);
- },
-
- /* Redraw the date picker attached to an input field or division.
- * @param target element - the target input field or division or span
- */
- _refreshDatepicker: function(target) {
- var inst = this._getInst(target);
- if (inst) {
- this._updateDatepicker(inst);
- }
- },
-
- /* Set the dates for a jQuery selection.
- * @param target element - the target input field or division or span
- * @param date Date - the new date
- */
- _setDateDatepicker: function(target, date) {
- var inst = this._getInst(target);
- if (inst) {
- this._setDate(inst, date);
- this._updateDatepicker(inst);
- this._updateAlternate(inst);
- }
- },
-
- /* Get the date(s) for the first entry in a jQuery selection.
- * @param target element - the target input field or division or span
- * @param noDefault boolean - true if no default date is to be used
- * @return Date - the current date
- */
- _getDateDatepicker: function(target, noDefault) {
- var inst = this._getInst(target);
- if (inst && !inst.inline) {
- this._setDateFromField(inst, noDefault);
- }
- return (inst ? this._getDate(inst) : null);
- },
-
- /* Handle keystrokes. */
- _doKeyDown: function(event) {
- var onSelect, dateStr, sel,
- inst = $.datepicker._getInst(event.target),
- handled = true,
- isRTL = inst.dpDiv.is(".ui-datepicker-rtl");
-
- inst._keyEvent = true;
- if ($.datepicker._datepickerShowing) {
- switch (event.keyCode) {
- case 9: $.datepicker._hideDatepicker();
- handled = false;
- break; // hide on tab out
- case 13: sel = $("td." + $.datepicker._dayOverClass + ":not(." +
- $.datepicker._currentClass + ")", inst.dpDiv);
- if (sel[0]) {
- $.datepicker._selectDay(event.target, inst.selectedMonth, inst.selectedYear, sel[0]);
- }
-
- onSelect = $.datepicker._get(inst, "onSelect");
- if (onSelect) {
- dateStr = $.datepicker._formatDate(inst);
-
- // trigger custom callback
- onSelect.apply((inst.input ? inst.input[0] : null), [dateStr, inst]);
- } else {
- $.datepicker._hideDatepicker();
- }
-
- return false; // don't submit the form
- case 27: $.datepicker._hideDatepicker();
- break; // hide on escape
- case 33: $.datepicker._adjustDate(event.target, (event.ctrlKey ?
- -$.datepicker._get(inst, "stepBigMonths") :
- -$.datepicker._get(inst, "stepMonths")), "M");
- break; // previous month/year on page up/+ ctrl
- case 34: $.datepicker._adjustDate(event.target, (event.ctrlKey ?
- +$.datepicker._get(inst, "stepBigMonths") :
- +$.datepicker._get(inst, "stepMonths")), "M");
- break; // next month/year on page down/+ ctrl
- case 35: if (event.ctrlKey || event.metaKey) {
- $.datepicker._clearDate(event.target);
- }
- handled = event.ctrlKey || event.metaKey;
- break; // clear on ctrl or command +end
- case 36: if (event.ctrlKey || event.metaKey) {
- $.datepicker._gotoToday(event.target);
- }
- handled = event.ctrlKey || event.metaKey;
- break; // current on ctrl or command +home
- case 37: if (event.ctrlKey || event.metaKey) {
- $.datepicker._adjustDate(event.target, (isRTL ? +1 : -1), "D");
- }
- handled = event.ctrlKey || event.metaKey;
- // -1 day on ctrl or command +left
- if (event.originalEvent.altKey) {
- $.datepicker._adjustDate(event.target, (event.ctrlKey ?
- -$.datepicker._get(inst, "stepBigMonths") :
- -$.datepicker._get(inst, "stepMonths")), "M");
- }
- // next month/year on alt +left on Mac
- break;
- case 38: if (event.ctrlKey || event.metaKey) {
- $.datepicker._adjustDate(event.target, -7, "D");
- }
- handled = event.ctrlKey || event.metaKey;
- break; // -1 week on ctrl or command +up
- case 39: if (event.ctrlKey || event.metaKey) {
- $.datepicker._adjustDate(event.target, (isRTL ? -1 : +1), "D");
- }
- handled = event.ctrlKey || event.metaKey;
- // +1 day on ctrl or command +right
- if (event.originalEvent.altKey) {
- $.datepicker._adjustDate(event.target, (event.ctrlKey ?
- +$.datepicker._get(inst, "stepBigMonths") :
- +$.datepicker._get(inst, "stepMonths")), "M");
- }
- // next month/year on alt +right
- break;
- case 40: if (event.ctrlKey || event.metaKey) {
- $.datepicker._adjustDate(event.target, +7, "D");
- }
- handled = event.ctrlKey || event.metaKey;
- break; // +1 week on ctrl or command +down
- default: handled = false;
- }
- } else if (event.keyCode === 36 && event.ctrlKey) { // display the date picker on ctrl+home
- $.datepicker._showDatepicker(this);
- } else {
- handled = false;
- }
-
- if (handled) {
- event.preventDefault();
- event.stopPropagation();
- }
- },
-
- /* Filter entered characters - based on date format. */
- _doKeyPress: function(event) {
- var chars, chr,
- inst = $.datepicker._getInst(event.target);
-
- if ($.datepicker._get(inst, "constrainInput")) {
- chars = $.datepicker._possibleChars($.datepicker._get(inst, "dateFormat"));
- chr = String.fromCharCode(event.charCode == null ? event.keyCode : event.charCode);
- return event.ctrlKey || event.metaKey || (chr < " " || !chars || chars.indexOf(chr) > -1);
- }
- },
-
- /* Synchronise manual entry and field/alternate field. */
- _doKeyUp: function(event) {
- var date,
- inst = $.datepicker._getInst(event.target);
-
- if (inst.input.val() !== inst.lastVal) {
- try {
- date = $.datepicker.parseDate($.datepicker._get(inst, "dateFormat"),
- (inst.input ? inst.input.val() : null),
- $.datepicker._getFormatConfig(inst));
-
- if (date) { // only if valid
- $.datepicker._setDateFromField(inst);
- $.datepicker._updateAlternate(inst);
- $.datepicker._updateDatepicker(inst);
- }
- }
- catch (err) {
- }
- }
- return true;
- },
-
- /* Pop-up the date picker for a given input field.
- * If false returned from beforeShow event handler do not show.
- * @param input element - the input field attached to the date picker or
- * event - if triggered by focus
- */
- _showDatepicker: function(input) {
- input = input.target || input;
- if (input.nodeName.toLowerCase() !== "input") { // find from button/image trigger
- input = $("input", input.parentNode)[0];
- }
-
- if ($.datepicker._isDisabledDatepicker(input) || $.datepicker._lastInput === input) { // already here
- return;
- }
-
- var inst, beforeShow, beforeShowSettings, isFixed,
- offset, showAnim, duration;
-
- inst = $.datepicker._getInst(input);
- if ($.datepicker._curInst && $.datepicker._curInst !== inst) {
- $.datepicker._curInst.dpDiv.stop(true, true);
- if ( inst && $.datepicker._datepickerShowing ) {
- $.datepicker._hideDatepicker( $.datepicker._curInst.input[0] );
- }
- }
-
- beforeShow = $.datepicker._get(inst, "beforeShow");
- beforeShowSettings = beforeShow ? beforeShow.apply(input, [input, inst]) : {};
- if(beforeShowSettings === false){
- return;
- }
- extendRemove(inst.settings, beforeShowSettings);
-
- inst.lastVal = null;
- $.datepicker._lastInput = input;
- $.datepicker._setDateFromField(inst);
-
- if ($.datepicker._inDialog) { // hide cursor
- input.value = "";
- }
- if (!$.datepicker._pos) { // position below input
- $.datepicker._pos = $.datepicker._findPos(input);
- $.datepicker._pos[1] += input.offsetHeight; // add the height
- }
-
- isFixed = false;
- $(input).parents().each(function() {
- isFixed |= $(this).css("position") === "fixed";
- return !isFixed;
- });
-
- offset = {left: $.datepicker._pos[0], top: $.datepicker._pos[1]};
- $.datepicker._pos = null;
- //to avoid flashes on Firefox
- inst.dpDiv.empty();
- // determine sizing offscreen
- inst.dpDiv.css({position: "absolute", display: "block", top: "-1000px"});
- $.datepicker._updateDatepicker(inst);
- // fix width for dynamic number of date pickers
- // and adjust position before showing
- offset = $.datepicker._checkOffset(inst, offset, isFixed);
- inst.dpDiv.css({position: ($.datepicker._inDialog && $.blockUI ?
- "static" : (isFixed ? "fixed" : "absolute")), display: "none",
- left: offset.left + "px", top: offset.top + "px"});
-
- if (!inst.inline) {
- showAnim = $.datepicker._get(inst, "showAnim");
- duration = $.datepicker._get(inst, "duration");
- inst.dpDiv.zIndex($(input).zIndex()+1);
- $.datepicker._datepickerShowing = true;
-
- if ( $.effects && $.effects.effect[ showAnim ] ) {
- inst.dpDiv.show(showAnim, $.datepicker._get(inst, "showOptions"), duration);
- } else {
- inst.dpDiv[showAnim || "show"](showAnim ? duration : null);
- }
-
- if ( $.datepicker._shouldFocusInput( inst ) ) {
- inst.input.focus();
- }
-
- $.datepicker._curInst = inst;
- }
- },
-
- /* Generate the date picker content. */
- _updateDatepicker: function(inst) {
- this.maxRows = 4; //Reset the max number of rows being displayed (see #7043)
- instActive = inst; // for delegate hover events
- inst.dpDiv.empty().append(this._generateHTML(inst));
- this._attachHandlers(inst);
- inst.dpDiv.find("." + this._dayOverClass + " a").mouseover();
-
- var origyearshtml,
- numMonths = this._getNumberOfMonths(inst),
- cols = numMonths[1],
- width = 17;
-
- inst.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width("");
- if (cols > 1) {
- inst.dpDiv.addClass("ui-datepicker-multi-" + cols).css("width", (width * cols) + "em");
- }
- inst.dpDiv[(numMonths[0] !== 1 || numMonths[1] !== 1 ? "add" : "remove") +
- "Class"]("ui-datepicker-multi");
- inst.dpDiv[(this._get(inst, "isRTL") ? "add" : "remove") +
- "Class"]("ui-datepicker-rtl");
-
- if (inst === $.datepicker._curInst && $.datepicker._datepickerShowing && $.datepicker._shouldFocusInput( inst ) ) {
- inst.input.focus();
- }
-
- // deffered render of the years select (to avoid flashes on Firefox)
- if( inst.yearshtml ){
- origyearshtml = inst.yearshtml;
- setTimeout(function(){
- //assure that inst.yearshtml didn't change.
- if( origyearshtml === inst.yearshtml && inst.yearshtml ){
- inst.dpDiv.find("select.ui-datepicker-year:first").replaceWith(inst.yearshtml);
- }
- origyearshtml = inst.yearshtml = null;
- }, 0);
- }
- },
-
- // #6694 - don't focus the input if it's already focused
- // this breaks the change event in IE
- // Support: IE and jQuery <1.9
- _shouldFocusInput: function( inst ) {
- return inst.input && inst.input.is( ":visible" ) && !inst.input.is( ":disabled" ) && !inst.input.is( ":focus" );
- },
-
- /* Check positioning to remain on screen. */
- _checkOffset: function(inst, offset, isFixed) {
- var dpWidth = inst.dpDiv.outerWidth(),
- dpHeight = inst.dpDiv.outerHeight(),
- inputWidth = inst.input ? inst.input.outerWidth() : 0,
- inputHeight = inst.input ? inst.input.outerHeight() : 0,
- viewWidth = document.documentElement.clientWidth + (isFixed ? 0 : $(document).scrollLeft()),
- viewHeight = document.documentElement.clientHeight + (isFixed ? 0 : $(document).scrollTop());
-
- offset.left -= (this._get(inst, "isRTL") ? (dpWidth - inputWidth) : 0);
- offset.left -= (isFixed && offset.left === inst.input.offset().left) ? $(document).scrollLeft() : 0;
- offset.top -= (isFixed && offset.top === (inst.input.offset().top + inputHeight)) ? $(document).scrollTop() : 0;
-
- // now check if datepicker is showing outside window viewport - move to a better place if so.
- offset.left -= Math.min(offset.left, (offset.left + dpWidth > viewWidth && viewWidth > dpWidth) ?
- Math.abs(offset.left + dpWidth - viewWidth) : 0);
- offset.top -= Math.min(offset.top, (offset.top + dpHeight > viewHeight && viewHeight > dpHeight) ?
- Math.abs(dpHeight + inputHeight) : 0);
-
- return offset;
- },
-
- /* Find an object's position on the screen. */
- _findPos: function(obj) {
- var position,
- inst = this._getInst(obj),
- isRTL = this._get(inst, "isRTL");
-
- while (obj && (obj.type === "hidden" || obj.nodeType !== 1 || $.expr.filters.hidden(obj))) {
- obj = obj[isRTL ? "previousSibling" : "nextSibling"];
- }
-
- position = $(obj).offset();
- return [position.left, position.top];
- },
-
- /* Hide the date picker from view.
- * @param input element - the input field attached to the date picker
- */
- _hideDatepicker: function(input) {
- var showAnim, duration, postProcess, onClose,
- inst = this._curInst;
-
- if (!inst || (input && inst !== $.data(input, PROP_NAME))) {
- return;
- }
-
- if (this._datepickerShowing) {
- showAnim = this._get(inst, "showAnim");
- duration = this._get(inst, "duration");
- postProcess = function() {
- $.datepicker._tidyDialog(inst);
- };
-
- // DEPRECATED: after BC for 1.8.x $.effects[ showAnim ] is not needed
- if ( $.effects && ( $.effects.effect[ showAnim ] || $.effects[ showAnim ] ) ) {
- inst.dpDiv.hide(showAnim, $.datepicker._get(inst, "showOptions"), duration, postProcess);
- } else {
- inst.dpDiv[(showAnim === "slideDown" ? "slideUp" :
- (showAnim === "fadeIn" ? "fadeOut" : "hide"))]((showAnim ? duration : null), postProcess);
- }
-
- if (!showAnim) {
- postProcess();
- }
- this._datepickerShowing = false;
-
- onClose = this._get(inst, "onClose");
- if (onClose) {
- onClose.apply((inst.input ? inst.input[0] : null), [(inst.input ? inst.input.val() : ""), inst]);
- }
-
- this._lastInput = null;
- if (this._inDialog) {
- this._dialogInput.css({ position: "absolute", left: "0", top: "-100px" });
- if ($.blockUI) {
- $.unblockUI();
- $("body").append(this.dpDiv);
- }
- }
- this._inDialog = false;
- }
- },
-
- /* Tidy up after a dialog display. */
- _tidyDialog: function(inst) {
- inst.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar");
- },
-
- /* Close date picker if clicked elsewhere. */
- _checkExternalClick: function(event) {
- if (!$.datepicker._curInst) {
- return;
- }
-
- var $target = $(event.target),
- inst = $.datepicker._getInst($target[0]);
-
- if ( ( ( $target[0].id !== $.datepicker._mainDivId &&
- $target.parents("#" + $.datepicker._mainDivId).length === 0 &&
- !$target.hasClass($.datepicker.markerClassName) &&
- !$target.closest("." + $.datepicker._triggerClass).length &&
- $.datepicker._datepickerShowing && !($.datepicker._inDialog && $.blockUI) ) ) ||
- ( $target.hasClass($.datepicker.markerClassName) && $.datepicker._curInst !== inst ) ) {
- $.datepicker._hideDatepicker();
- }
- },
-
- /* Adjust one of the date sub-fields. */
- _adjustDate: function(id, offset, period) {
- var target = $(id),
- inst = this._getInst(target[0]);
-
- if (this._isDisabledDatepicker(target[0])) {
- return;
- }
- this._adjustInstDate(inst, offset +
- (period === "M" ? this._get(inst, "showCurrentAtPos") : 0), // undo positioning
- period);
- this._updateDatepicker(inst);
- },
-
- /* Action for current link. */
- _gotoToday: function(id) {
- var date,
- target = $(id),
- inst = this._getInst(target[0]);
-
- if (this._get(inst, "gotoCurrent") && inst.currentDay) {
- inst.selectedDay = inst.currentDay;
- inst.drawMonth = inst.selectedMonth = inst.currentMonth;
- inst.drawYear = inst.selectedYear = inst.currentYear;
- } else {
- date = new Date();
- inst.selectedDay = date.getDate();
- inst.drawMonth = inst.selectedMonth = date.getMonth();
- inst.drawYear = inst.selectedYear = date.getFullYear();
- }
- this._notifyChange(inst);
- this._adjustDate(target);
- },
-
- /* Action for selecting a new month/year. */
- _selectMonthYear: function(id, select, period) {
- var target = $(id),
- inst = this._getInst(target[0]);
-
- inst["selected" + (period === "M" ? "Month" : "Year")] =
- inst["draw" + (period === "M" ? "Month" : "Year")] =
- parseInt(select.options[select.selectedIndex].value,10);
-
- this._notifyChange(inst);
- this._adjustDate(target);
- },
-
- /* Action for selecting a day. */
- _selectDay: function(id, month, year, td) {
- var inst,
- target = $(id);
-
- if ($(td).hasClass(this._unselectableClass) || this._isDisabledDatepicker(target[0])) {
- return;
- }
-
- inst = this._getInst(target[0]);
- inst.selectedDay = inst.currentDay = $("a", td).html();
- inst.selectedMonth = inst.currentMonth = month;
- inst.selectedYear = inst.currentYear = year;
- this._selectDate(id, this._formatDate(inst,
- inst.currentDay, inst.currentMonth, inst.currentYear));
- },
-
- /* Erase the input field and hide the date picker. */
- _clearDate: function(id) {
- var target = $(id);
- this._selectDate(target, "");
- },
-
- /* Update the input field with the selected date. */
- _selectDate: function(id, dateStr) {
- var onSelect,
- target = $(id),
- inst = this._getInst(target[0]);
-
- dateStr = (dateStr != null ? dateStr : this._formatDate(inst));
- if (inst.input) {
- inst.input.val(dateStr);
- }
- this._updateAlternate(inst);
-
- onSelect = this._get(inst, "onSelect");
- if (onSelect) {
- onSelect.apply((inst.input ? inst.input[0] : null), [dateStr, inst]); // trigger custom callback
- } else if (inst.input) {
- inst.input.trigger("change"); // fire the change event
- }
-
- if (inst.inline){
- this._updateDatepicker(inst);
- } else {
- this._hideDatepicker();
- this._lastInput = inst.input[0];
- if (typeof(inst.input[0]) !== "object") {
- inst.input.focus(); // restore focus
- }
- this._lastInput = null;
- }
- },
-
- /* Update any alternate field to synchronise with the main field. */
- _updateAlternate: function(inst) {
- var altFormat, date, dateStr,
- altField = this._get(inst, "altField");
-
- if (altField) { // update alternate field too
- altFormat = this._get(inst, "altFormat") || this._get(inst, "dateFormat");
- date = this._getDate(inst);
- dateStr = this.formatDate(altFormat, date, this._getFormatConfig(inst));
- $(altField).each(function() { $(this).val(dateStr); });
- }
- },
-
- /* Set as beforeShowDay function to prevent selection of weekends.
- * @param date Date - the date to customise
- * @return [boolean, string] - is this date selectable?, what is its CSS class?
- */
- noWeekends: function(date) {
- var day = date.getDay();
- return [(day > 0 && day < 6), ""];
- },
-
- /* Set as calculateWeek to determine the week of the year based on the ISO 8601 definition.
- * @param date Date - the date to get the week for
- * @return number - the number of the week within the year that contains this date
- */
- iso8601Week: function(date) {
- var time,
- checkDate = new Date(date.getTime());
-
- // Find Thursday of this week starting on Monday
- checkDate.setDate(checkDate.getDate() + 4 - (checkDate.getDay() || 7));
-
- time = checkDate.getTime();
- checkDate.setMonth(0); // Compare with Jan 1
- checkDate.setDate(1);
- return Math.floor(Math.round((time - checkDate) / 86400000) / 7) + 1;
- },
-
- /* Parse a string value into a date object.
- * See formatDate below for the possible formats.
- *
- * @param format string - the expected format of the date
- * @param value string - the date in the above format
- * @param settings Object - attributes include:
- * shortYearCutoff number - the cutoff year for determining the century (optional)
- * dayNamesShort string[7] - abbreviated names of the days from Sunday (optional)
- * dayNames string[7] - names of the days from Sunday (optional)
- * monthNamesShort string[12] - abbreviated names of the months (optional)
- * monthNames string[12] - names of the months (optional)
- * @return Date - the extracted date value or null if value is blank
- */
- parseDate: function (format, value, settings) {
- if (format == null || value == null) {
- throw "Invalid arguments";
- }
-
- value = (typeof value === "object" ? value.toString() : value + "");
- if (value === "") {
- return null;
- }
-
- var iFormat, dim, extra,
- iValue = 0,
- shortYearCutoffTemp = (settings ? settings.shortYearCutoff : null) || this._defaults.shortYearCutoff,
- shortYearCutoff = (typeof shortYearCutoffTemp !== "string" ? shortYearCutoffTemp :
- new Date().getFullYear() % 100 + parseInt(shortYearCutoffTemp, 10)),
- dayNamesShort = (settings ? settings.dayNamesShort : null) || this._defaults.dayNamesShort,
- dayNames = (settings ? settings.dayNames : null) || this._defaults.dayNames,
- monthNamesShort = (settings ? settings.monthNamesShort : null) || this._defaults.monthNamesShort,
- monthNames = (settings ? settings.monthNames : null) || this._defaults.monthNames,
- year = -1,
- month = -1,
- day = -1,
- doy = -1,
- literal = false,
- date,
- // Check whether a format character is doubled
- lookAhead = function(match) {
- var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match);
- if (matches) {
- iFormat++;
- }
- return matches;
- },
- // Extract a number from the string value
- getNumber = function(match) {
- var isDoubled = lookAhead(match),
- size = (match === "@" ? 14 : (match === "!" ? 20 :
- (match === "y" && isDoubled ? 4 : (match === "o" ? 3 : 2)))),
- digits = new RegExp("^\\d{1," + size + "}"),
- num = value.substring(iValue).match(digits);
- if (!num) {
- throw "Missing number at position " + iValue;
- }
- iValue += num[0].length;
- return parseInt(num[0], 10);
- },
- // Extract a name from the string value and convert to an index
- getName = function(match, shortNames, longNames) {
- var index = -1,
- names = $.map(lookAhead(match) ? longNames : shortNames, function (v, k) {
- return [ [k, v] ];
- }).sort(function (a, b) {
- return -(a[1].length - b[1].length);
- });
-
- $.each(names, function (i, pair) {
- var name = pair[1];
- if (value.substr(iValue, name.length).toLowerCase() === name.toLowerCase()) {
- index = pair[0];
- iValue += name.length;
- return false;
- }
- });
- if (index !== -1) {
- return index + 1;
- } else {
- throw "Unknown name at position " + iValue;
- }
- },
- // Confirm that a literal character matches the string value
- checkLiteral = function() {
- if (value.charAt(iValue) !== format.charAt(iFormat)) {
- throw "Unexpected literal at position " + iValue;
- }
- iValue++;
- };
-
- for (iFormat = 0; iFormat < format.length; iFormat++) {
- if (literal) {
- if (format.charAt(iFormat) === "'" && !lookAhead("'")) {
- literal = false;
- } else {
- checkLiteral();
- }
- } else {
- switch (format.charAt(iFormat)) {
- case "d":
- day = getNumber("d");
- break;
- case "D":
- getName("D", dayNamesShort, dayNames);
- break;
- case "o":
- doy = getNumber("o");
- break;
- case "m":
- month = getNumber("m");
- break;
- case "M":
- month = getName("M", monthNamesShort, monthNames);
- break;
- case "y":
- year = getNumber("y");
- break;
- case "@":
- date = new Date(getNumber("@"));
- year = date.getFullYear();
- month = date.getMonth() + 1;
- day = date.getDate();
- break;
- case "!":
- date = new Date((getNumber("!") - this._ticksTo1970) / 10000);
- year = date.getFullYear();
- month = date.getMonth() + 1;
- day = date.getDate();
- break;
- case "'":
- if (lookAhead("'")){
- checkLiteral();
- } else {
- literal = true;
- }
- break;
- default:
- checkLiteral();
- }
- }
- }
-
- if (iValue < value.length){
- extra = value.substr(iValue);
- if (!/^\s+/.test(extra)) {
- throw "Extra/unparsed characters found in date: " + extra;
- }
- }
-
- if (year === -1) {
- year = new Date().getFullYear();
- } else if (year < 100) {
- year += new Date().getFullYear() - new Date().getFullYear() % 100 +
- (year <= shortYearCutoff ? 0 : -100);
- }
-
- if (doy > -1) {
- month = 1;
- day = doy;
- do {
- dim = this._getDaysInMonth(year, month - 1);
- if (day <= dim) {
- break;
- }
- month++;
- day -= dim;
- } while (true);
- }
-
- date = this._daylightSavingAdjust(new Date(year, month - 1, day));
- if (date.getFullYear() !== year || date.getMonth() + 1 !== month || date.getDate() !== day) {
- throw "Invalid date"; // E.g. 31/02/00
- }
- return date;
- },
-
- /* Standard date formats. */
- ATOM: "yy-mm-dd", // RFC 3339 (ISO 8601)
- COOKIE: "D, dd M yy",
- ISO_8601: "yy-mm-dd",
- RFC_822: "D, d M y",
- RFC_850: "DD, dd-M-y",
- RFC_1036: "D, d M y",
- RFC_1123: "D, d M yy",
- RFC_2822: "D, d M yy",
- RSS: "D, d M y", // RFC 822
- TICKS: "!",
- TIMESTAMP: "@",
- W3C: "yy-mm-dd", // ISO 8601
-
- _ticksTo1970: (((1970 - 1) * 365 + Math.floor(1970 / 4) - Math.floor(1970 / 100) +
- Math.floor(1970 / 400)) * 24 * 60 * 60 * 10000000),
-
- /* Format a date object into a string value.
- * The format can be combinations of the following:
- * d - day of month (no leading zero)
- * dd - day of month (two digit)
- * o - day of year (no leading zeros)
- * oo - day of year (three digit)
- * D - day name short
- * DD - day name long
- * m - month of year (no leading zero)
- * mm - month of year (two digit)
- * M - month name short
- * MM - month name long
- * y - year (two digit)
- * yy - year (four digit)
- * @ - Unix timestamp (ms since 01/01/1970)
- * ! - Windows ticks (100ns since 01/01/0001)
- * "..." - literal text
- * '' - single quote
- *
- * @param format string - the desired format of the date
- * @param date Date - the date value to format
- * @param settings Object - attributes include:
- * dayNamesShort string[7] - abbreviated names of the days from Sunday (optional)
- * dayNames string[7] - names of the days from Sunday (optional)
- * monthNamesShort string[12] - abbreviated names of the months (optional)
- * monthNames string[12] - names of the months (optional)
- * @return string - the date in the above format
- */
- formatDate: function (format, date, settings) {
- if (!date) {
- return "";
- }
-
- var iFormat,
- dayNamesShort = (settings ? settings.dayNamesShort : null) || this._defaults.dayNamesShort,
- dayNames = (settings ? settings.dayNames : null) || this._defaults.dayNames,
- monthNamesShort = (settings ? settings.monthNamesShort : null) || this._defaults.monthNamesShort,
- monthNames = (settings ? settings.monthNames : null) || this._defaults.monthNames,
- // Check whether a format character is doubled
- lookAhead = function(match) {
- var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match);
- if (matches) {
- iFormat++;
- }
- return matches;
- },
- // Format a number, with leading zero if necessary
- formatNumber = function(match, value, len) {
- var num = "" + value;
- if (lookAhead(match)) {
- while (num.length < len) {
- num = "0" + num;
- }
- }
- return num;
- },
- // Format a name, short or long as requested
- formatName = function(match, value, shortNames, longNames) {
- return (lookAhead(match) ? longNames[value] : shortNames[value]);
- },
- output = "",
- literal = false;
-
- if (date) {
- for (iFormat = 0; iFormat < format.length; iFormat++) {
- if (literal) {
- if (format.charAt(iFormat) === "'" && !lookAhead("'")) {
- literal = false;
- } else {
- output += format.charAt(iFormat);
- }
- } else {
- switch (format.charAt(iFormat)) {
- case "d":
- output += formatNumber("d", date.getDate(), 2);
- break;
- case "D":
- output += formatName("D", date.getDay(), dayNamesShort, dayNames);
- break;
- case "o":
- output += formatNumber("o",
- Math.round((new Date(date.getFullYear(), date.getMonth(), date.getDate()).getTime() - new Date(date.getFullYear(), 0, 0).getTime()) / 86400000), 3);
- break;
- case "m":
- output += formatNumber("m", date.getMonth() + 1, 2);
- break;
- case "M":
- output += formatName("M", date.getMonth(), monthNamesShort, monthNames);
- break;
- case "y":
- output += (lookAhead("y") ? date.getFullYear() :
- (date.getYear() % 100 < 10 ? "0" : "") + date.getYear() % 100);
- break;
- case "@":
- output += date.getTime();
- break;
- case "!":
- output += date.getTime() * 10000 + this._ticksTo1970;
- break;
- case "'":
- if (lookAhead("'")) {
- output += "'";
- } else {
- literal = true;
- }
- break;
- default:
- output += format.charAt(iFormat);
- }
- }
- }
- }
- return output;
- },
-
- /* Extract all possible characters from the date format. */
- _possibleChars: function (format) {
- var iFormat,
- chars = "",
- literal = false,
- // Check whether a format character is doubled
- lookAhead = function(match) {
- var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match);
- if (matches) {
- iFormat++;
- }
- return matches;
- };
-
- for (iFormat = 0; iFormat < format.length; iFormat++) {
- if (literal) {
- if (format.charAt(iFormat) === "'" && !lookAhead("'")) {
- literal = false;
- } else {
- chars += format.charAt(iFormat);
- }
- } else {
- switch (format.charAt(iFormat)) {
- case "d": case "m": case "y": case "@":
- chars += "0123456789";
- break;
- case "D": case "M":
- return null; // Accept anything
- case "'":
- if (lookAhead("'")) {
- chars += "'";
- } else {
- literal = true;
- }
- break;
- default:
- chars += format.charAt(iFormat);
- }
- }
- }
- return chars;
- },
-
- /* Get a setting value, defaulting if necessary. */
- _get: function(inst, name) {
- return inst.settings[name] !== undefined ?
- inst.settings[name] : this._defaults[name];
- },
-
- /* Parse existing date and initialise date picker. */
- _setDateFromField: function(inst, noDefault) {
- if (inst.input.val() === inst.lastVal) {
- return;
- }
-
- var dateFormat = this._get(inst, "dateFormat"),
- dates = inst.lastVal = inst.input ? inst.input.val() : null,
- defaultDate = this._getDefaultDate(inst),
- date = defaultDate,
- settings = this._getFormatConfig(inst);
-
- try {
- date = this.parseDate(dateFormat, dates, settings) || defaultDate;
- } catch (event) {
- dates = (noDefault ? "" : dates);
- }
- inst.selectedDay = date.getDate();
- inst.drawMonth = inst.selectedMonth = date.getMonth();
- inst.drawYear = inst.selectedYear = date.getFullYear();
- inst.currentDay = (dates ? date.getDate() : 0);
- inst.currentMonth = (dates ? date.getMonth() : 0);
- inst.currentYear = (dates ? date.getFullYear() : 0);
- this._adjustInstDate(inst);
- },
-
- /* Retrieve the default date shown on opening. */
- _getDefaultDate: function(inst) {
- return this._restrictMinMax(inst,
- this._determineDate(inst, this._get(inst, "defaultDate"), new Date()));
- },
-
- /* A date may be specified as an exact value or a relative one. */
- _determineDate: function(inst, date, defaultDate) {
- var offsetNumeric = function(offset) {
- var date = new Date();
- date.setDate(date.getDate() + offset);
- return date;
- },
- offsetString = function(offset) {
- try {
- return $.datepicker.parseDate($.datepicker._get(inst, "dateFormat"),
- offset, $.datepicker._getFormatConfig(inst));
- }
- catch (e) {
- // Ignore
- }
-
- var date = (offset.toLowerCase().match(/^c/) ?
- $.datepicker._getDate(inst) : null) || new Date(),
- year = date.getFullYear(),
- month = date.getMonth(),
- day = date.getDate(),
- pattern = /([+\-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g,
- matches = pattern.exec(offset);
-
- while (matches) {
- switch (matches[2] || "d") {
- case "d" : case "D" :
- day += parseInt(matches[1],10); break;
- case "w" : case "W" :
- day += parseInt(matches[1],10) * 7; break;
- case "m" : case "M" :
- month += parseInt(matches[1],10);
- day = Math.min(day, $.datepicker._getDaysInMonth(year, month));
- break;
- case "y": case "Y" :
- year += parseInt(matches[1],10);
- day = Math.min(day, $.datepicker._getDaysInMonth(year, month));
- break;
- }
- matches = pattern.exec(offset);
- }
- return new Date(year, month, day);
- },
- newDate = (date == null || date === "" ? defaultDate : (typeof date === "string" ? offsetString(date) :
- (typeof date === "number" ? (isNaN(date) ? defaultDate : offsetNumeric(date)) : new Date(date.getTime()))));
-
- newDate = (newDate && newDate.toString() === "Invalid Date" ? defaultDate : newDate);
- if (newDate) {
- newDate.setHours(0);
- newDate.setMinutes(0);
- newDate.setSeconds(0);
- newDate.setMilliseconds(0);
- }
- return this._daylightSavingAdjust(newDate);
- },
-
- /* Handle switch to/from daylight saving.
- * Hours may be non-zero on daylight saving cut-over:
- * > 12 when midnight changeover, but then cannot generate
- * midnight datetime, so jump to 1AM, otherwise reset.
- * @param date (Date) the date to check
- * @return (Date) the corrected date
- */
- _daylightSavingAdjust: function(date) {
- if (!date) {
- return null;
- }
- date.setHours(date.getHours() > 12 ? date.getHours() + 2 : 0);
- return date;
- },
-
- /* Set the date(s) directly. */
- _setDate: function(inst, date, noChange) {
- var clear = !date,
- origMonth = inst.selectedMonth,
- origYear = inst.selectedYear,
- newDate = this._restrictMinMax(inst, this._determineDate(inst, date, new Date()));
-
- inst.selectedDay = inst.currentDay = newDate.getDate();
- inst.drawMonth = inst.selectedMonth = inst.currentMonth = newDate.getMonth();
- inst.drawYear = inst.selectedYear = inst.currentYear = newDate.getFullYear();
- if ((origMonth !== inst.selectedMonth || origYear !== inst.selectedYear) && !noChange) {
- this._notifyChange(inst);
- }
- this._adjustInstDate(inst);
- if (inst.input) {
- inst.input.val(clear ? "" : this._formatDate(inst));
- }
- },
-
- /* Retrieve the date(s) directly. */
- _getDate: function(inst) {
- var startDate = (!inst.currentYear || (inst.input && inst.input.val() === "") ? null :
- this._daylightSavingAdjust(new Date(
- inst.currentYear, inst.currentMonth, inst.currentDay)));
- return startDate;
- },
-
- /* Attach the onxxx handlers. These are declared statically so
- * they work with static code transformers like Caja.
- */
- _attachHandlers: function(inst) {
- var stepMonths = this._get(inst, "stepMonths"),
- id = "#" + inst.id.replace( /\\\\/g, "\\" );
- inst.dpDiv.find("[data-handler]").map(function () {
- var handler = {
- prev: function () {
- $.datepicker._adjustDate(id, -stepMonths, "M");
- },
- next: function () {
- $.datepicker._adjustDate(id, +stepMonths, "M");
- },
- hide: function () {
- $.datepicker._hideDatepicker();
- },
- today: function () {
- $.datepicker._gotoToday(id);
- },
- selectDay: function () {
- $.datepicker._selectDay(id, +this.getAttribute("data-month"), +this.getAttribute("data-year"), this);
- return false;
- },
- selectMonth: function () {
- $.datepicker._selectMonthYear(id, this, "M");
- return false;
- },
- selectYear: function () {
- $.datepicker._selectMonthYear(id, this, "Y");
- return false;
- }
- };
- $(this).bind(this.getAttribute("data-event"), handler[this.getAttribute("data-handler")]);
- });
- },
-
- /* Generate the HTML for the current state of the date picker. */
- _generateHTML: function(inst) {
- var maxDraw, prevText, prev, nextText, next, currentText, gotoDate,
- controls, buttonPanel, firstDay, showWeek, dayNames, dayNamesMin,
- monthNames, monthNamesShort, beforeShowDay, showOtherMonths,
- selectOtherMonths, defaultDate, html, dow, row, group, col, selectedDate,
- cornerClass, calender, thead, day, daysInMonth, leadDays, curRows, numRows,
- printDate, dRow, tbody, daySettings, otherMonth, unselectable,
- tempDate = new Date(),
- today = this._daylightSavingAdjust(
- new Date(tempDate.getFullYear(), tempDate.getMonth(), tempDate.getDate())), // clear time
- isRTL = this._get(inst, "isRTL"),
- showButtonPanel = this._get(inst, "showButtonPanel"),
- hideIfNoPrevNext = this._get(inst, "hideIfNoPrevNext"),
- navigationAsDateFormat = this._get(inst, "navigationAsDateFormat"),
- numMonths = this._getNumberOfMonths(inst),
- showCurrentAtPos = this._get(inst, "showCurrentAtPos"),
- stepMonths = this._get(inst, "stepMonths"),
- isMultiMonth = (numMonths[0] !== 1 || numMonths[1] !== 1),
- currentDate = this._daylightSavingAdjust((!inst.currentDay ? new Date(9999, 9, 9) :
- new Date(inst.currentYear, inst.currentMonth, inst.currentDay))),
- minDate = this._getMinMaxDate(inst, "min"),
- maxDate = this._getMinMaxDate(inst, "max"),
- drawMonth = inst.drawMonth - showCurrentAtPos,
- drawYear = inst.drawYear;
-
- if (drawMonth < 0) {
- drawMonth += 12;
- drawYear--;
- }
- if (maxDate) {
- maxDraw = this._daylightSavingAdjust(new Date(maxDate.getFullYear(),
- maxDate.getMonth() - (numMonths[0] * numMonths[1]) + 1, maxDate.getDate()));
- maxDraw = (minDate && maxDraw < minDate ? minDate : maxDraw);
- while (this._daylightSavingAdjust(new Date(drawYear, drawMonth, 1)) > maxDraw) {
- drawMonth--;
- if (drawMonth < 0) {
- drawMonth = 11;
- drawYear--;
- }
- }
- }
- inst.drawMonth = drawMonth;
- inst.drawYear = drawYear;
-
- prevText = this._get(inst, "prevText");
- prevText = (!navigationAsDateFormat ? prevText : this.formatDate(prevText,
- this._daylightSavingAdjust(new Date(drawYear, drawMonth - stepMonths, 1)),
- this._getFormatConfig(inst)));
-
- prev = (this._canAdjustMonth(inst, -1, drawYear, drawMonth) ?
- "<a class='ui-datepicker-prev ui-corner-all' data-handler='prev' data-event='click'" +
- " title='" + prevText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "e" : "w") + "'>" + prevText + "</span></a>" :
- (hideIfNoPrevNext ? "" : "<a class='ui-datepicker-prev ui-corner-all ui-state-disabled' title='"+ prevText +"'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "e" : "w") + "'>" + prevText + "</span></a>"));
-
- nextText = this._get(inst, "nextText");
- nextText = (!navigationAsDateFormat ? nextText : this.formatDate(nextText,
- this._daylightSavingAdjust(new Date(drawYear, drawMonth + stepMonths, 1)),
- this._getFormatConfig(inst)));
-
- next = (this._canAdjustMonth(inst, +1, drawYear, drawMonth) ?
- "<a class='ui-datepicker-next ui-corner-all' data-handler='next' data-event='click'" +
- " title='" + nextText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "w" : "e") + "'>" + nextText + "</span></a>" :
- (hideIfNoPrevNext ? "" : "<a class='ui-datepicker-next ui-corner-all ui-state-disabled' title='"+ nextText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "w" : "e") + "'>" + nextText + "</span></a>"));
-
- currentText = this._get(inst, "currentText");
- gotoDate = (this._get(inst, "gotoCurrent") && inst.currentDay ? currentDate : today);
- currentText = (!navigationAsDateFormat ? currentText :
- this.formatDate(currentText, gotoDate, this._getFormatConfig(inst)));
-
- controls = (!inst.inline ? "<button type='button' class='ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all' data-handler='hide' data-event='click'>" +
- this._get(inst, "closeText") + "</button>" : "");
-
- buttonPanel = (showButtonPanel) ? "<div class='ui-datepicker-buttonpane ui-widget-content'>" + (isRTL ? controls : "") +
- (this._isInRange(inst, gotoDate) ? "<button type='button' class='ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all' data-handler='today' data-event='click'" +
- ">" + currentText + "</button>" : "") + (isRTL ? "" : controls) + "</div>" : "";
-
- firstDay = parseInt(this._get(inst, "firstDay"),10);
- firstDay = (isNaN(firstDay) ? 0 : firstDay);
-
- showWeek = this._get(inst, "showWeek");
- dayNames = this._get(inst, "dayNames");
- dayNamesMin = this._get(inst, "dayNamesMin");
- monthNames = this._get(inst, "monthNames");
- monthNamesShort = this._get(inst, "monthNamesShort");
- beforeShowDay = this._get(inst, "beforeShowDay");
- showOtherMonths = this._get(inst, "showOtherMonths");
- selectOtherMonths = this._get(inst, "selectOtherMonths");
- defaultDate = this._getDefaultDate(inst);
- html = "";
- dow;
- for (row = 0; row < numMonths[0]; row++) {
- group = "";
- this.maxRows = 4;
- for (col = 0; col < numMonths[1]; col++) {
- selectedDate = this._daylightSavingAdjust(new Date(drawYear, drawMonth, inst.selectedDay));
- cornerClass = " ui-corner-all";
- calender = "";
- if (isMultiMonth) {
- calender += "<div class='ui-datepicker-group";
- if (numMonths[1] > 1) {
- switch (col) {
- case 0: calender += " ui-datepicker-group-first";
- cornerClass = " ui-corner-" + (isRTL ? "right" : "left"); break;
- case numMonths[1]-1: calender += " ui-datepicker-group-last";
- cornerClass = " ui-corner-" + (isRTL ? "left" : "right"); break;
- default: calender += " ui-datepicker-group-middle"; cornerClass = ""; break;
- }
- }
- calender += "'>";
- }
- calender += "<div class='ui-datepicker-header ui-widget-header ui-helper-clearfix" + cornerClass + "'>" +
- (/all|left/.test(cornerClass) && row === 0 ? (isRTL ? next : prev) : "") +
- (/all|right/.test(cornerClass) && row === 0 ? (isRTL ? prev : next) : "") +
- this._generateMonthYearHeader(inst, drawMonth, drawYear, minDate, maxDate,
- row > 0 || col > 0, monthNames, monthNamesShort) + // draw month headers
- "</div><table class='ui-datepicker-calendar'><thead>" +
- "<tr>";
- thead = (showWeek ? "<th class='ui-datepicker-week-col'>" + this._get(inst, "weekHeader") + "</th>" : "");
- for (dow = 0; dow < 7; dow++) { // days of the week
- day = (dow + firstDay) % 7;
- thead += "<th" + ((dow + firstDay + 6) % 7 >= 5 ? " class='ui-datepicker-week-end'" : "") + ">" +
- "<span title='" + dayNames[day] + "'>" + dayNamesMin[day] + "</span></th>";
- }
- calender += thead + "</tr></thead><tbody>";
- daysInMonth = this._getDaysInMonth(drawYear, drawMonth);
- if (drawYear === inst.selectedYear && drawMonth === inst.selectedMonth) {
- inst.selectedDay = Math.min(inst.selectedDay, daysInMonth);
- }
- leadDays = (this._getFirstDayOfMonth(drawYear, drawMonth) - firstDay + 7) % 7;
- curRows = Math.ceil((leadDays + daysInMonth) / 7); // calculate the number of rows to generate
- numRows = (isMultiMonth ? this.maxRows > curRows ? this.maxRows : curRows : curRows); //If multiple months, use the higher number of rows (see #7043)
- this.maxRows = numRows;
- printDate = this._daylightSavingAdjust(new Date(drawYear, drawMonth, 1 - leadDays));
- for (dRow = 0; dRow < numRows; dRow++) { // create date picker rows
- calender += "<tr>";
- tbody = (!showWeek ? "" : "<td class='ui-datepicker-week-col'>" +
- this._get(inst, "calculateWeek")(printDate) + "</td>");
- for (dow = 0; dow < 7; dow++) { // create date picker days
- daySettings = (beforeShowDay ?
- beforeShowDay.apply((inst.input ? inst.input[0] : null), [printDate]) : [true, ""]);
- otherMonth = (printDate.getMonth() !== drawMonth);
- unselectable = (otherMonth && !selectOtherMonths) || !daySettings[0] ||
- (minDate && printDate < minDate) || (maxDate && printDate > maxDate);
- tbody += "<td class='" +
- ((dow + firstDay + 6) % 7 >= 5 ? " ui-datepicker-week-end" : "") + // highlight weekends
- (otherMonth ? " ui-datepicker-other-month" : "") + // highlight days from other months
- ((printDate.getTime() === selectedDate.getTime() && drawMonth === inst.selectedMonth && inst._keyEvent) || // user pressed key
- (defaultDate.getTime() === printDate.getTime() && defaultDate.getTime() === selectedDate.getTime()) ?
- // or defaultDate is current printedDate and defaultDate is selectedDate
- " " + this._dayOverClass : "") + // highlight selected day
- (unselectable ? " " + this._unselectableClass + " ui-state-disabled": "") + // highlight unselectable days
- (otherMonth && !showOtherMonths ? "" : " " + daySettings[1] + // highlight custom dates
- (printDate.getTime() === currentDate.getTime() ? " " + this._currentClass : "") + // highlight selected day
- (printDate.getTime() === today.getTime() ? " ui-datepicker-today" : "")) + "'" + // highlight today (if different)
- ((!otherMonth || showOtherMonths) && daySettings[2] ? " title='" + daySettings[2].replace(/'/g, "&#39;") + "'" : "") + // cell title
- (unselectable ? "" : " data-handler='selectDay' data-event='click' data-month='" + printDate.getMonth() + "' data-year='" + printDate.getFullYear() + "'") + ">" + // actions
- (otherMonth && !showOtherMonths ? "&#xa0;" : // display for other months
- (unselectable ? "<span class='ui-state-default'>" + printDate.getDate() + "</span>" : "<a class='ui-state-default" +
- (printDate.getTime() === today.getTime() ? " ui-state-highlight" : "") +
- (printDate.getTime() === currentDate.getTime() ? " ui-state-active" : "") + // highlight selected day
- (otherMonth ? " ui-priority-secondary" : "") + // distinguish dates from other months
- "' href='#'>" + printDate.getDate() + "</a>")) + "</td>"; // display selectable date
- printDate.setDate(printDate.getDate() + 1);
- printDate = this._daylightSavingAdjust(printDate);
- }
- calender += tbody + "</tr>";
- }
- drawMonth++;
- if (drawMonth > 11) {
- drawMonth = 0;
- drawYear++;
- }
- calender += "</tbody></table>" + (isMultiMonth ? "</div>" +
- ((numMonths[0] > 0 && col === numMonths[1]-1) ? "<div class='ui-datepicker-row-break'></div>" : "") : "");
- group += calender;
- }
- html += group;
- }
- html += buttonPanel;
- inst._keyEvent = false;
- return html;
- },
-
- /* Generate the month and year header. */
- _generateMonthYearHeader: function(inst, drawMonth, drawYear, minDate, maxDate,
- secondary, monthNames, monthNamesShort) {
-
- var inMinYear, inMaxYear, month, years, thisYear, determineYear, year, endYear,
- changeMonth = this._get(inst, "changeMonth"),
- changeYear = this._get(inst, "changeYear"),
- showMonthAfterYear = this._get(inst, "showMonthAfterYear"),
- html = "<div class='ui-datepicker-title'>",
- monthHtml = "";
-
- // month selection
- if (secondary || !changeMonth) {
- monthHtml += "<span class='ui-datepicker-month'>" + monthNames[drawMonth] + "</span>";
- } else {
- inMinYear = (minDate && minDate.getFullYear() === drawYear);
- inMaxYear = (maxDate && maxDate.getFullYear() === drawYear);
- monthHtml += "<select class='ui-datepicker-month' data-handler='selectMonth' data-event='change'>";
- for ( month = 0; month < 12; month++) {
- if ((!inMinYear || month >= minDate.getMonth()) && (!inMaxYear || month <= maxDate.getMonth())) {
- monthHtml += "<option value='" + month + "'" +
- (month === drawMonth ? " selected='selected'" : "") +
- ">" + monthNamesShort[month] + "</option>";
- }
- }
- monthHtml += "</select>";
- }
-
- if (!showMonthAfterYear) {
- html += monthHtml + (secondary || !(changeMonth && changeYear) ? "&#xa0;" : "");
- }
-
- // year selection
- if ( !inst.yearshtml ) {
- inst.yearshtml = "";
- if (secondary || !changeYear) {
- html += "<span class='ui-datepicker-year'>" + drawYear + "</span>";
- } else {
- // determine range of years to display
- years = this._get(inst, "yearRange").split(":");
- thisYear = new Date().getFullYear();
- determineYear = function(value) {
- var year = (value.match(/c[+\-].*/) ? drawYear + parseInt(value.substring(1), 10) :
- (value.match(/[+\-].*/) ? thisYear + parseInt(value, 10) :
- parseInt(value, 10)));
- return (isNaN(year) ? thisYear : year);
- };
- year = determineYear(years[0]);
- endYear = Math.max(year, determineYear(years[1] || ""));
- year = (minDate ? Math.max(year, minDate.getFullYear()) : year);
- endYear = (maxDate ? Math.min(endYear, maxDate.getFullYear()) : endYear);
- inst.yearshtml += "<select class='ui-datepicker-year' data-handler='selectYear' data-event='change'>";
- for (; year <= endYear; year++) {
- inst.yearshtml += "<option value='" + year + "'" +
- (year === drawYear ? " selected='selected'" : "") +
- ">" + year + "</option>";
- }
- inst.yearshtml += "</select>";
-
- html += inst.yearshtml;
- inst.yearshtml = null;
- }
- }
-
- html += this._get(inst, "yearSuffix");
- if (showMonthAfterYear) {
- html += (secondary || !(changeMonth && changeYear) ? "&#xa0;" : "") + monthHtml;
- }
- html += "</div>"; // Close datepicker_header
- return html;
- },
-
- /* Adjust one of the date sub-fields. */
- _adjustInstDate: function(inst, offset, period) {
- var year = inst.drawYear + (period === "Y" ? offset : 0),
- month = inst.drawMonth + (period === "M" ? offset : 0),
- day = Math.min(inst.selectedDay, this._getDaysInMonth(year, month)) + (period === "D" ? offset : 0),
- date = this._restrictMinMax(inst, this._daylightSavingAdjust(new Date(year, month, day)));
-
- inst.selectedDay = date.getDate();
- inst.drawMonth = inst.selectedMonth = date.getMonth();
- inst.drawYear = inst.selectedYear = date.getFullYear();
- if (period === "M" || period === "Y") {
- this._notifyChange(inst);
- }
- },
-
- /* Ensure a date is within any min/max bounds. */
- _restrictMinMax: function(inst, date) {
- var minDate = this._getMinMaxDate(inst, "min"),
- maxDate = this._getMinMaxDate(inst, "max"),
- newDate = (minDate && date < minDate ? minDate : date);
- return (maxDate && newDate > maxDate ? maxDate : newDate);
- },
-
- /* Notify change of month/year. */
- _notifyChange: function(inst) {
- var onChange = this._get(inst, "onChangeMonthYear");
- if (onChange) {
- onChange.apply((inst.input ? inst.input[0] : null),
- [inst.selectedYear, inst.selectedMonth + 1, inst]);
- }
- },
-
- /* Determine the number of months to show. */
- _getNumberOfMonths: function(inst) {
- var numMonths = this._get(inst, "numberOfMonths");
- return (numMonths == null ? [1, 1] : (typeof numMonths === "number" ? [1, numMonths] : numMonths));
- },
-
- /* Determine the current maximum date - ensure no time components are set. */
- _getMinMaxDate: function(inst, minMax) {
- return this._determineDate(inst, this._get(inst, minMax + "Date"), null);
- },
-
- /* Find the number of days in a given month. */
- _getDaysInMonth: function(year, month) {
- return 32 - this._daylightSavingAdjust(new Date(year, month, 32)).getDate();
- },
-
- /* Find the day of the week of the first of a month. */
- _getFirstDayOfMonth: function(year, month) {
- return new Date(year, month, 1).getDay();
- },
-
- /* Determines if we should allow a "next/prev" month display change. */
- _canAdjustMonth: function(inst, offset, curYear, curMonth) {
- var numMonths = this._getNumberOfMonths(inst),
- date = this._daylightSavingAdjust(new Date(curYear,
- curMonth + (offset < 0 ? offset : numMonths[0] * numMonths[1]), 1));
-
- if (offset < 0) {
- date.setDate(this._getDaysInMonth(date.getFullYear(), date.getMonth()));
- }
- return this._isInRange(inst, date);
- },
-
- /* Is the given date in the accepted range? */
- _isInRange: function(inst, date) {
- var yearSplit, currentYear,
- minDate = this._getMinMaxDate(inst, "min"),
- maxDate = this._getMinMaxDate(inst, "max"),
- minYear = null,
- maxYear = null,
- years = this._get(inst, "yearRange");
- if (years){
- yearSplit = years.split(":");
- currentYear = new Date().getFullYear();
- minYear = parseInt(yearSplit[0], 10);
- maxYear = parseInt(yearSplit[1], 10);
- if ( yearSplit[0].match(/[+\-].*/) ) {
- minYear += currentYear;
- }
- if ( yearSplit[1].match(/[+\-].*/) ) {
- maxYear += currentYear;
- }
- }
-
- return ((!minDate || date.getTime() >= minDate.getTime()) &&
- (!maxDate || date.getTime() <= maxDate.getTime()) &&
- (!minYear || date.getFullYear() >= minYear) &&
- (!maxYear || date.getFullYear() <= maxYear));
- },
-
- /* Provide the configuration settings for formatting/parsing. */
- _getFormatConfig: function(inst) {
- var shortYearCutoff = this._get(inst, "shortYearCutoff");
- shortYearCutoff = (typeof shortYearCutoff !== "string" ? shortYearCutoff :
- new Date().getFullYear() % 100 + parseInt(shortYearCutoff, 10));
- return {shortYearCutoff: shortYearCutoff,
- dayNamesShort: this._get(inst, "dayNamesShort"), dayNames: this._get(inst, "dayNames"),
- monthNamesShort: this._get(inst, "monthNamesShort"), monthNames: this._get(inst, "monthNames")};
- },
-
- /* Format the given date for display. */
- _formatDate: function(inst, day, month, year) {
- if (!day) {
- inst.currentDay = inst.selectedDay;
- inst.currentMonth = inst.selectedMonth;
- inst.currentYear = inst.selectedYear;
- }
- var date = (day ? (typeof day === "object" ? day :
- this._daylightSavingAdjust(new Date(year, month, day))) :
- this._daylightSavingAdjust(new Date(inst.currentYear, inst.currentMonth, inst.currentDay)));
- return this.formatDate(this._get(inst, "dateFormat"), date, this._getFormatConfig(inst));
- }
-});
-
-/*
- * Bind hover events for datepicker elements.
- * Done via delegate so the binding only occurs once in the lifetime of the parent div.
- * Global instActive, set by _updateDatepicker allows the handlers to find their way back to the active picker.
- */
-function bindHover(dpDiv) {
- var selector = "button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a";
- return dpDiv.delegate(selector, "mouseout", function() {
- $(this).removeClass("ui-state-hover");
- if (this.className.indexOf("ui-datepicker-prev") !== -1) {
- $(this).removeClass("ui-datepicker-prev-hover");
- }
- if (this.className.indexOf("ui-datepicker-next") !== -1) {
- $(this).removeClass("ui-datepicker-next-hover");
- }
- })
- .delegate(selector, "mouseover", function(){
- if (!$.datepicker._isDisabledDatepicker( instActive.inline ? dpDiv.parent()[0] : instActive.input[0])) {
- $(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover");
- $(this).addClass("ui-state-hover");
- if (this.className.indexOf("ui-datepicker-prev") !== -1) {
- $(this).addClass("ui-datepicker-prev-hover");
- }
- if (this.className.indexOf("ui-datepicker-next") !== -1) {
- $(this).addClass("ui-datepicker-next-hover");
- }
- }
- });
-}
-
-/* jQuery extend now ignores nulls! */
-function extendRemove(target, props) {
- $.extend(target, props);
- for (var name in props) {
- if (props[name] == null) {
- target[name] = props[name];
- }
- }
- return target;
-}
-
-/* Invoke the datepicker functionality.
- @param options string - a command, optionally followed by additional parameters or
- Object - settings for attaching new datepicker functionality
- @return jQuery object */
-$.fn.datepicker = function(options){
-
- /* Verify an empty collection wasn't passed - Fixes #6976 */
- if ( !this.length ) {
- return this;
- }
-
- /* Initialise the date picker. */
- if (!$.datepicker.initialized) {
- $(document).mousedown($.datepicker._checkExternalClick);
- $.datepicker.initialized = true;
- }
-
- /* Append datepicker main container to body if not exist. */
- if ($("#"+$.datepicker._mainDivId).length === 0) {
- $("body").append($.datepicker.dpDiv);
- }
-
- var otherArgs = Array.prototype.slice.call(arguments, 1);
- if (typeof options === "string" && (options === "isDisabled" || options === "getDate" || options === "widget")) {
- return $.datepicker["_" + options + "Datepicker"].
- apply($.datepicker, [this[0]].concat(otherArgs));
- }
- if (options === "option" && arguments.length === 2 && typeof arguments[1] === "string") {
- return $.datepicker["_" + options + "Datepicker"].
- apply($.datepicker, [this[0]].concat(otherArgs));
- }
- return this.each(function() {
- typeof options === "string" ?
- $.datepicker["_" + options + "Datepicker"].
- apply($.datepicker, [this].concat(otherArgs)) :
- $.datepicker._attachDatepicker(this, options);
- });
-};
-
-$.datepicker = new Datepicker(); // singleton instance
-$.datepicker.initialized = false;
-$.datepicker.uuid = new Date().getTime();
-$.datepicker.version = "1.10.3";
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-var sizeRelatedOptions = {
- buttons: true,
- height: true,
- maxHeight: true,
- maxWidth: true,
- minHeight: true,
- minWidth: true,
- width: true
- },
- resizableRelatedOptions = {
- maxHeight: true,
- maxWidth: true,
- minHeight: true,
- minWidth: true
- };
-
-$.widget( "ui.dialog", {
- version: "1.10.3",
- options: {
- appendTo: "body",
- autoOpen: true,
- buttons: [],
- closeOnEscape: true,
- closeText: "close",
- dialogClass: "",
- draggable: true,
- hide: null,
- height: "auto",
- maxHeight: null,
- maxWidth: null,
- minHeight: 150,
- minWidth: 150,
- modal: false,
- position: {
- my: "center",
- at: "center",
- of: window,
- collision: "fit",
- // Ensure the titlebar is always visible
- using: function( pos ) {
- var topOffset = $( this ).css( pos ).offset().top;
- if ( topOffset < 0 ) {
- $( this ).css( "top", pos.top - topOffset );
- }
- }
- },
- resizable: true,
- show: null,
- title: null,
- width: 300,
-
- // callbacks
- beforeClose: null,
- close: null,
- drag: null,
- dragStart: null,
- dragStop: null,
- focus: null,
- open: null,
- resize: null,
- resizeStart: null,
- resizeStop: null
- },
-
- _create: function() {
- this.originalCss = {
- display: this.element[0].style.display,
- width: this.element[0].style.width,
- minHeight: this.element[0].style.minHeight,
- maxHeight: this.element[0].style.maxHeight,
- height: this.element[0].style.height
- };
- this.originalPosition = {
- parent: this.element.parent(),
- index: this.element.parent().children().index( this.element )
- };
- this.originalTitle = this.element.attr("title");
- this.options.title = this.options.title || this.originalTitle;
-
- this._createWrapper();
-
- this.element
- .show()
- .removeAttr("title")
- .addClass("ui-dialog-content ui-widget-content")
- .appendTo( this.uiDialog );
-
- this._createTitlebar();
- this._createButtonPane();
-
- if ( this.options.draggable && $.fn.draggable ) {
- this._makeDraggable();
- }
- if ( this.options.resizable && $.fn.resizable ) {
- this._makeResizable();
- }
-
- this._isOpen = false;
- },
-
- _init: function() {
- if ( this.options.autoOpen ) {
- this.open();
- }
- },
-
- _appendTo: function() {
- var element = this.options.appendTo;
- if ( element && (element.jquery || element.nodeType) ) {
- return $( element );
- }
- return this.document.find( element || "body" ).eq( 0 );
- },
-
- _destroy: function() {
- var next,
- originalPosition = this.originalPosition;
-
- this._destroyOverlay();
-
- this.element
- .removeUniqueId()
- .removeClass("ui-dialog-content ui-widget-content")
- .css( this.originalCss )
- // Without detaching first, the following becomes really slow
- .detach();
-
- this.uiDialog.stop( true, true ).remove();
-
- if ( this.originalTitle ) {
- this.element.attr( "title", this.originalTitle );
- }
-
- next = originalPosition.parent.children().eq( originalPosition.index );
- // Don't try to place the dialog next to itself (#8613)
- if ( next.length && next[0] !== this.element[0] ) {
- next.before( this.element );
- } else {
- originalPosition.parent.append( this.element );
- }
- },
-
- widget: function() {
- return this.uiDialog;
- },
-
- disable: $.noop,
- enable: $.noop,
-
- close: function( event ) {
- var that = this;
-
- if ( !this._isOpen || this._trigger( "beforeClose", event ) === false ) {
- return;
- }
-
- this._isOpen = false;
- this._destroyOverlay();
-
- if ( !this.opener.filter(":focusable").focus().length ) {
- // Hiding a focused element doesn't trigger blur in WebKit
- // so in case we have nothing to focus on, explicitly blur the active element
- // https://bugs.webkit.org/show_bug.cgi?id=47182
- $( this.document[0].activeElement ).blur();
- }
-
- this._hide( this.uiDialog, this.options.hide, function() {
- that._trigger( "close", event );
- });
- },
-
- isOpen: function() {
- return this._isOpen;
- },
-
- moveToTop: function() {
- this._moveToTop();
- },
-
- _moveToTop: function( event, silent ) {
- var moved = !!this.uiDialog.nextAll(":visible").insertBefore( this.uiDialog ).length;
- if ( moved && !silent ) {
- this._trigger( "focus", event );
- }
- return moved;
- },
-
- open: function() {
- var that = this;
- if ( this._isOpen ) {
- if ( this._moveToTop() ) {
- this._focusTabbable();
- }
- return;
- }
-
- this._isOpen = true;
- this.opener = $( this.document[0].activeElement );
-
- this._size();
- this._position();
- this._createOverlay();
- this._moveToTop( null, true );
- this._show( this.uiDialog, this.options.show, function() {
- that._focusTabbable();
- that._trigger("focus");
- });
-
- this._trigger("open");
- },
-
- _focusTabbable: function() {
- // Set focus to the first match:
- // 1. First element inside the dialog matching [autofocus]
- // 2. Tabbable element inside the content element
- // 3. Tabbable element inside the buttonpane
- // 4. The close button
- // 5. The dialog itself
- var hasFocus = this.element.find("[autofocus]");
- if ( !hasFocus.length ) {
- hasFocus = this.element.find(":tabbable");
- }
- if ( !hasFocus.length ) {
- hasFocus = this.uiDialogButtonPane.find(":tabbable");
- }
- if ( !hasFocus.length ) {
- hasFocus = this.uiDialogTitlebarClose.filter(":tabbable");
- }
- if ( !hasFocus.length ) {
- hasFocus = this.uiDialog;
- }
- hasFocus.eq( 0 ).focus();
- },
-
- _keepFocus: function( event ) {
- function checkFocus() {
- var activeElement = this.document[0].activeElement,
- isActive = this.uiDialog[0] === activeElement ||
- $.contains( this.uiDialog[0], activeElement );
- if ( !isActive ) {
- this._focusTabbable();
- }
- }
- event.preventDefault();
- checkFocus.call( this );
- // support: IE
- // IE <= 8 doesn't prevent moving focus even with event.preventDefault()
- // so we check again later
- this._delay( checkFocus );
- },
-
- _createWrapper: function() {
- this.uiDialog = $("<div>")
- .addClass( "ui-dialog ui-widget ui-widget-content ui-corner-all ui-front " +
- this.options.dialogClass )
- .hide()
- .attr({
- // Setting tabIndex makes the div focusable
- tabIndex: -1,
- role: "dialog"
- })
- .appendTo( this._appendTo() );
-
- this._on( this.uiDialog, {
- keydown: function( event ) {
- if ( this.options.closeOnEscape && !event.isDefaultPrevented() && event.keyCode &&
- event.keyCode === $.ui.keyCode.ESCAPE ) {
- event.preventDefault();
- this.close( event );
- return;
- }
-
- // prevent tabbing out of dialogs
- if ( event.keyCode !== $.ui.keyCode.TAB ) {
- return;
- }
- var tabbables = this.uiDialog.find(":tabbable"),
- first = tabbables.filter(":first"),
- last = tabbables.filter(":last");
-
- if ( ( event.target === last[0] || event.target === this.uiDialog[0] ) && !event.shiftKey ) {
- first.focus( 1 );
- event.preventDefault();
- } else if ( ( event.target === first[0] || event.target === this.uiDialog[0] ) && event.shiftKey ) {
- last.focus( 1 );
- event.preventDefault();
- }
- },
- mousedown: function( event ) {
- if ( this._moveToTop( event ) ) {
- this._focusTabbable();
- }
- }
- });
-
- // We assume that any existing aria-describedby attribute means
- // that the dialog content is marked up properly
- // otherwise we brute force the content as the description
- if ( !this.element.find("[aria-describedby]").length ) {
- this.uiDialog.attr({
- "aria-describedby": this.element.uniqueId().attr("id")
- });
- }
- },
-
- _createTitlebar: function() {
- var uiDialogTitle;
-
- this.uiDialogTitlebar = $("<div>")
- .addClass("ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix")
- .prependTo( this.uiDialog );
- this._on( this.uiDialogTitlebar, {
- mousedown: function( event ) {
- // Don't prevent click on close button (#8838)
- // Focusing a dialog that is partially scrolled out of view
- // causes the browser to scroll it into view, preventing the click event
- if ( !$( event.target ).closest(".ui-dialog-titlebar-close") ) {
- // Dialog isn't getting focus when dragging (#8063)
- this.uiDialog.focus();
- }
- }
- });
-
- this.uiDialogTitlebarClose = $("<button></button>")
- .button({
- label: this.options.closeText,
- icons: {
- primary: "ui-icon-closethick"
- },
- text: false
- })
- .addClass("ui-dialog-titlebar-close")
- .appendTo( this.uiDialogTitlebar );
- this._on( this.uiDialogTitlebarClose, {
- click: function( event ) {
- event.preventDefault();
- this.close( event );
- }
- });
-
- uiDialogTitle = $("<span>")
- .uniqueId()
- .addClass("ui-dialog-title")
- .prependTo( this.uiDialogTitlebar );
- this._title( uiDialogTitle );
-
- this.uiDialog.attr({
- "aria-labelledby": uiDialogTitle.attr("id")
- });
- },
-
- _title: function( title ) {
- if ( !this.options.title ) {
- title.html("&#160;");
- }
- title.text( this.options.title );
- },
-
- _createButtonPane: function() {
- this.uiDialogButtonPane = $("<div>")
- .addClass("ui-dialog-buttonpane ui-widget-content ui-helper-clearfix");
-
- this.uiButtonSet = $("<div>")
- .addClass("ui-dialog-buttonset")
- .appendTo( this.uiDialogButtonPane );
-
- this._createButtons();
- },
-
- _createButtons: function() {
- var that = this,
- buttons = this.options.buttons;
-
- // if we already have a button pane, remove it
- this.uiDialogButtonPane.remove();
- this.uiButtonSet.empty();
-
- if ( $.isEmptyObject( buttons ) || ($.isArray( buttons ) && !buttons.length) ) {
- this.uiDialog.removeClass("ui-dialog-buttons");
- return;
- }
-
- $.each( buttons, function( name, props ) {
- var click, buttonOptions;
- props = $.isFunction( props ) ?
- { click: props, text: name } :
- props;
- // Default to a non-submitting button
- props = $.extend( { type: "button" }, props );
- // Change the context for the click callback to be the main element
- click = props.click;
- props.click = function() {
- click.apply( that.element[0], arguments );
- };
- buttonOptions = {
- icons: props.icons,
- text: props.showText
- };
- delete props.icons;
- delete props.showText;
- $( "<button></button>", props )
- .button( buttonOptions )
- .appendTo( that.uiButtonSet );
- });
- this.uiDialog.addClass("ui-dialog-buttons");
- this.uiDialogButtonPane.appendTo( this.uiDialog );
- },
-
- _makeDraggable: function() {
- var that = this,
- options = this.options;
-
- function filteredUi( ui ) {
- return {
- position: ui.position,
- offset: ui.offset
- };
- }
-
- this.uiDialog.draggable({
- cancel: ".ui-dialog-content, .ui-dialog-titlebar-close",
- handle: ".ui-dialog-titlebar",
- containment: "document",
- start: function( event, ui ) {
- $( this ).addClass("ui-dialog-dragging");
- that._blockFrames();
- that._trigger( "dragStart", event, filteredUi( ui ) );
- },
- drag: function( event, ui ) {
- that._trigger( "drag", event, filteredUi( ui ) );
- },
- stop: function( event, ui ) {
- options.position = [
- ui.position.left - that.document.scrollLeft(),
- ui.position.top - that.document.scrollTop()
- ];
- $( this ).removeClass("ui-dialog-dragging");
- that._unblockFrames();
- that._trigger( "dragStop", event, filteredUi( ui ) );
- }
- });
- },
-
- _makeResizable: function() {
- var that = this,
- options = this.options,
- handles = options.resizable,
- // .ui-resizable has position: relative defined in the stylesheet
- // but dialogs have to use absolute or fixed positioning
- position = this.uiDialog.css("position"),
- resizeHandles = typeof handles === "string" ?
- handles :
- "n,e,s,w,se,sw,ne,nw";
-
- function filteredUi( ui ) {
- return {
- originalPosition: ui.originalPosition,
- originalSize: ui.originalSize,
- position: ui.position,
- size: ui.size
- };
- }
-
- this.uiDialog.resizable({
- cancel: ".ui-dialog-content",
- containment: "document",
- alsoResize: this.element,
- maxWidth: options.maxWidth,
- maxHeight: options.maxHeight,
- minWidth: options.minWidth,
- minHeight: this._minHeight(),
- handles: resizeHandles,
- start: function( event, ui ) {
- $( this ).addClass("ui-dialog-resizing");
- that._blockFrames();
- that._trigger( "resizeStart", event, filteredUi( ui ) );
- },
- resize: function( event, ui ) {
- that._trigger( "resize", event, filteredUi( ui ) );
- },
- stop: function( event, ui ) {
- options.height = $( this ).height();
- options.width = $( this ).width();
- $( this ).removeClass("ui-dialog-resizing");
- that._unblockFrames();
- that._trigger( "resizeStop", event, filteredUi( ui ) );
- }
- })
- .css( "position", position );
- },
-
- _minHeight: function() {
- var options = this.options;
-
- return options.height === "auto" ?
- options.minHeight :
- Math.min( options.minHeight, options.height );
- },
-
- _position: function() {
- // Need to show the dialog to get the actual offset in the position plugin
- var isVisible = this.uiDialog.is(":visible");
- if ( !isVisible ) {
- this.uiDialog.show();
- }
- this.uiDialog.position( this.options.position );
- if ( !isVisible ) {
- this.uiDialog.hide();
- }
- },
-
- _setOptions: function( options ) {
- var that = this,
- resize = false,
- resizableOptions = {};
-
- $.each( options, function( key, value ) {
- that._setOption( key, value );
-
- if ( key in sizeRelatedOptions ) {
- resize = true;
- }
- if ( key in resizableRelatedOptions ) {
- resizableOptions[ key ] = value;
- }
- });
-
- if ( resize ) {
- this._size();
- this._position();
- }
- if ( this.uiDialog.is(":data(ui-resizable)") ) {
- this.uiDialog.resizable( "option", resizableOptions );
- }
- },
-
- _setOption: function( key, value ) {
- /*jshint maxcomplexity:15*/
- var isDraggable, isResizable,
- uiDialog = this.uiDialog;
-
- if ( key === "dialogClass" ) {
- uiDialog
- .removeClass( this.options.dialogClass )
- .addClass( value );
- }
-
- if ( key === "disabled" ) {
- return;
- }
-
- this._super( key, value );
-
- if ( key === "appendTo" ) {
- this.uiDialog.appendTo( this._appendTo() );
- }
-
- if ( key === "buttons" ) {
- this._createButtons();
- }
-
- if ( key === "closeText" ) {
- this.uiDialogTitlebarClose.button({
- // Ensure that we always pass a string
- label: "" + value
- });
- }
-
- if ( key === "draggable" ) {
- isDraggable = uiDialog.is(":data(ui-draggable)");
- if ( isDraggable && !value ) {
- uiDialog.draggable("destroy");
- }
-
- if ( !isDraggable && value ) {
- this._makeDraggable();
- }
- }
-
- if ( key === "position" ) {
- this._position();
- }
-
- if ( key === "resizable" ) {
- // currently resizable, becoming non-resizable
- isResizable = uiDialog.is(":data(ui-resizable)");
- if ( isResizable && !value ) {
- uiDialog.resizable("destroy");
- }
-
- // currently resizable, changing handles
- if ( isResizable && typeof value === "string" ) {
- uiDialog.resizable( "option", "handles", value );
- }
-
- // currently non-resizable, becoming resizable
- if ( !isResizable && value !== false ) {
- this._makeResizable();
- }
- }
-
- if ( key === "title" ) {
- this._title( this.uiDialogTitlebar.find(".ui-dialog-title") );
- }
- },
-
- _size: function() {
- // If the user has resized the dialog, the .ui-dialog and .ui-dialog-content
- // divs will both have width and height set, so we need to reset them
- var nonContentHeight, minContentHeight, maxContentHeight,
- options = this.options;
-
- // Reset content sizing
- this.element.show().css({
- width: "auto",
- minHeight: 0,
- maxHeight: "none",
- height: 0
- });
-
- if ( options.minWidth > options.width ) {
- options.width = options.minWidth;
- }
-
- // reset wrapper sizing
- // determine the height of all the non-content elements
- nonContentHeight = this.uiDialog.css({
- height: "auto",
- width: options.width
- })
- .outerHeight();
- minContentHeight = Math.max( 0, options.minHeight - nonContentHeight );
- maxContentHeight = typeof options.maxHeight === "number" ?
- Math.max( 0, options.maxHeight - nonContentHeight ) :
- "none";
-
- if ( options.height === "auto" ) {
- this.element.css({
- minHeight: minContentHeight,
- maxHeight: maxContentHeight,
- height: "auto"
- });
- } else {
- this.element.height( Math.max( 0, options.height - nonContentHeight ) );
- }
-
- if (this.uiDialog.is(":data(ui-resizable)") ) {
- this.uiDialog.resizable( "option", "minHeight", this._minHeight() );
- }
- },
-
- _blockFrames: function() {
- this.iframeBlocks = this.document.find( "iframe" ).map(function() {
- var iframe = $( this );
-
- return $( "<div>" )
- .css({
- position: "absolute",
- width: iframe.outerWidth(),
- height: iframe.outerHeight()
- })
- .appendTo( iframe.parent() )
- .offset( iframe.offset() )[0];
- });
- },
-
- _unblockFrames: function() {
- if ( this.iframeBlocks ) {
- this.iframeBlocks.remove();
- delete this.iframeBlocks;
- }
- },
-
- _allowInteraction: function( event ) {
- if ( $( event.target ).closest(".ui-dialog").length ) {
- return true;
- }
-
- // TODO: Remove hack when datepicker implements
- // the .ui-front logic (#8989)
- return !!$( event.target ).closest(".ui-datepicker").length;
- },
-
- _createOverlay: function() {
- if ( !this.options.modal ) {
- return;
- }
-
- var that = this,
- widgetFullName = this.widgetFullName;
- if ( !$.ui.dialog.overlayInstances ) {
- // Prevent use of anchors and inputs.
- // We use a delay in case the overlay is created from an
- // event that we're going to be cancelling. (#2804)
- this._delay(function() {
- // Handle .dialog().dialog("close") (#4065)
- if ( $.ui.dialog.overlayInstances ) {
- this.document.bind( "focusin.dialog", function( event ) {
- if ( !that._allowInteraction( event ) ) {
- event.preventDefault();
- $(".ui-dialog:visible:last .ui-dialog-content")
- .data( widgetFullName )._focusTabbable();
- }
- });
- }
- });
- }
-
- this.overlay = $("<div>")
- .addClass("ui-widget-overlay ui-front")
- .appendTo( this._appendTo() );
- this._on( this.overlay, {
- mousedown: "_keepFocus"
- });
- $.ui.dialog.overlayInstances++;
- },
-
- _destroyOverlay: function() {
- if ( !this.options.modal ) {
- return;
- }
-
- if ( this.overlay ) {
- $.ui.dialog.overlayInstances--;
-
- if ( !$.ui.dialog.overlayInstances ) {
- this.document.unbind( "focusin.dialog" );
- }
- this.overlay.remove();
- this.overlay = null;
- }
- }
-});
-
-$.ui.dialog.overlayInstances = 0;
-
-// DEPRECATED
-if ( $.uiBackCompat !== false ) {
- // position option with array notation
- // just override with old implementation
- $.widget( "ui.dialog", $.ui.dialog, {
- _position: function() {
- var position = this.options.position,
- myAt = [],
- offset = [ 0, 0 ],
- isVisible;
-
- if ( position ) {
- if ( typeof position === "string" || (typeof position === "object" && "0" in position ) ) {
- myAt = position.split ? position.split(" ") : [ position[0], position[1] ];
- if ( myAt.length === 1 ) {
- myAt[1] = myAt[0];
- }
-
- $.each( [ "left", "top" ], function( i, offsetPosition ) {
- if ( +myAt[ i ] === myAt[ i ] ) {
- offset[ i ] = myAt[ i ];
- myAt[ i ] = offsetPosition;
- }
- });
-
- position = {
- my: myAt[0] + (offset[0] < 0 ? offset[0] : "+" + offset[0]) + " " +
- myAt[1] + (offset[1] < 0 ? offset[1] : "+" + offset[1]),
- at: myAt.join(" ")
- };
- }
-
- position = $.extend( {}, $.ui.dialog.prototype.options.position, position );
- } else {
- position = $.ui.dialog.prototype.options.position;
- }
-
- // need to show the dialog to get the actual offset in the position plugin
- isVisible = this.uiDialog.is(":visible");
- if ( !isVisible ) {
- this.uiDialog.show();
- }
- this.uiDialog.position( position );
- if ( !isVisible ) {
- this.uiDialog.hide();
- }
- }
- });
-}
-
-}( jQuery ) );
-
-(function( $, undefined ) {
-
-var rvertical = /up|down|vertical/,
- rpositivemotion = /up|left|vertical|horizontal/;
-
-$.effects.effect.blind = function( o, done ) {
- // Create element
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "height", "width" ],
- mode = $.effects.setMode( el, o.mode || "hide" ),
- direction = o.direction || "up",
- vertical = rvertical.test( direction ),
- ref = vertical ? "height" : "width",
- ref2 = vertical ? "top" : "left",
- motion = rpositivemotion.test( direction ),
- animation = {},
- show = mode === "show",
- wrapper, distance, margin;
-
- // if already wrapped, the wrapper's properties are my property. #6245
- if ( el.parent().is( ".ui-effects-wrapper" ) ) {
- $.effects.save( el.parent(), props );
- } else {
- $.effects.save( el, props );
- }
- el.show();
- wrapper = $.effects.createWrapper( el ).css({
- overflow: "hidden"
- });
-
- distance = wrapper[ ref ]();
- margin = parseFloat( wrapper.css( ref2 ) ) || 0;
-
- animation[ ref ] = show ? distance : 0;
- if ( !motion ) {
- el
- .css( vertical ? "bottom" : "right", 0 )
- .css( vertical ? "top" : "left", "auto" )
- .css({ position: "absolute" });
-
- animation[ ref2 ] = show ? margin : distance + margin;
- }
-
- // start at 0 if we are showing
- if ( show ) {
- wrapper.css( ref, 0 );
- if ( ! motion ) {
- wrapper.css( ref2, margin + distance );
- }
- }
-
- // Animate
- wrapper.animate( animation, {
- duration: o.duration,
- easing: o.easing,
- queue: false,
- complete: function() {
- if ( mode === "hide" ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- }
- });
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.bounce = function( o, done ) {
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "height", "width" ],
-
- // defaults:
- mode = $.effects.setMode( el, o.mode || "effect" ),
- hide = mode === "hide",
- show = mode === "show",
- direction = o.direction || "up",
- distance = o.distance,
- times = o.times || 5,
-
- // number of internal animations
- anims = times * 2 + ( show || hide ? 1 : 0 ),
- speed = o.duration / anims,
- easing = o.easing,
-
- // utility:
- ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
- motion = ( direction === "up" || direction === "left" ),
- i,
- upAnim,
- downAnim,
-
- // we will need to re-assemble the queue to stack our animations in place
- queue = el.queue(),
- queuelen = queue.length;
-
- // Avoid touching opacity to prevent clearType and PNG issues in IE
- if ( show || hide ) {
- props.push( "opacity" );
- }
-
- $.effects.save( el, props );
- el.show();
- $.effects.createWrapper( el ); // Create Wrapper
-
- // default distance for the BIGGEST bounce is the outer Distance / 3
- if ( !distance ) {
- distance = el[ ref === "top" ? "outerHeight" : "outerWidth" ]() / 3;
- }
-
- if ( show ) {
- downAnim = { opacity: 1 };
- downAnim[ ref ] = 0;
-
- // if we are showing, force opacity 0 and set the initial position
- // then do the "first" animation
- el.css( "opacity", 0 )
- .css( ref, motion ? -distance * 2 : distance * 2 )
- .animate( downAnim, speed, easing );
- }
-
- // start at the smallest distance if we are hiding
- if ( hide ) {
- distance = distance / Math.pow( 2, times - 1 );
- }
-
- downAnim = {};
- downAnim[ ref ] = 0;
- // Bounces up/down/left/right then back to 0 -- times * 2 animations happen here
- for ( i = 0; i < times; i++ ) {
- upAnim = {};
- upAnim[ ref ] = ( motion ? "-=" : "+=" ) + distance;
-
- el.animate( upAnim, speed, easing )
- .animate( downAnim, speed, easing );
-
- distance = hide ? distance * 2 : distance / 2;
- }
-
- // Last Bounce when Hiding
- if ( hide ) {
- upAnim = { opacity: 0 };
- upAnim[ ref ] = ( motion ? "-=" : "+=" ) + distance;
-
- el.animate( upAnim, speed, easing );
- }
-
- el.queue(function() {
- if ( hide ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- });
-
- // inject all the animations we just queued to be first in line (after "inprogress")
- if ( queuelen > 1) {
- queue.splice.apply( queue,
- [ 1, 0 ].concat( queue.splice( queuelen, anims + 1 ) ) );
- }
- el.dequeue();
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.clip = function( o, done ) {
- // Create element
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "height", "width" ],
- mode = $.effects.setMode( el, o.mode || "hide" ),
- show = mode === "show",
- direction = o.direction || "vertical",
- vert = direction === "vertical",
- size = vert ? "height" : "width",
- position = vert ? "top" : "left",
- animation = {},
- wrapper, animate, distance;
-
- // Save & Show
- $.effects.save( el, props );
- el.show();
-
- // Create Wrapper
- wrapper = $.effects.createWrapper( el ).css({
- overflow: "hidden"
- });
- animate = ( el[0].tagName === "IMG" ) ? wrapper : el;
- distance = animate[ size ]();
-
- // Shift
- if ( show ) {
- animate.css( size, 0 );
- animate.css( position, distance / 2 );
- }
-
- // Create Animation Object:
- animation[ size ] = show ? distance : 0;
- animation[ position ] = show ? 0 : distance / 2;
-
- // Animate
- animate.animate( animation, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: function() {
- if ( !show ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- }
- });
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.drop = function( o, done ) {
-
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "opacity", "height", "width" ],
- mode = $.effects.setMode( el, o.mode || "hide" ),
- show = mode === "show",
- direction = o.direction || "left",
- ref = ( direction === "up" || direction === "down" ) ? "top" : "left",
- motion = ( direction === "up" || direction === "left" ) ? "pos" : "neg",
- animation = {
- opacity: show ? 1 : 0
- },
- distance;
-
- // Adjust
- $.effects.save( el, props );
- el.show();
- $.effects.createWrapper( el );
-
- distance = o.distance || el[ ref === "top" ? "outerHeight": "outerWidth" ]( true ) / 2;
-
- if ( show ) {
- el
- .css( "opacity", 0 )
- .css( ref, motion === "pos" ? -distance : distance );
- }
-
- // Animation
- animation[ ref ] = ( show ?
- ( motion === "pos" ? "+=" : "-=" ) :
- ( motion === "pos" ? "-=" : "+=" ) ) +
- distance;
-
- // Animate
- el.animate( animation, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: function() {
- if ( mode === "hide" ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- }
- });
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.explode = function( o, done ) {
-
- var rows = o.pieces ? Math.round( Math.sqrt( o.pieces ) ) : 3,
- cells = rows,
- el = $( this ),
- mode = $.effects.setMode( el, o.mode || "hide" ),
- show = mode === "show",
-
- // show and then visibility:hidden the element before calculating offset
- offset = el.show().css( "visibility", "hidden" ).offset(),
-
- // width and height of a piece
- width = Math.ceil( el.outerWidth() / cells ),
- height = Math.ceil( el.outerHeight() / rows ),
- pieces = [],
-
- // loop
- i, j, left, top, mx, my;
-
- // children animate complete:
- function childComplete() {
- pieces.push( this );
- if ( pieces.length === rows * cells ) {
- animComplete();
- }
- }
-
- // clone the element for each row and cell.
- for( i = 0; i < rows ; i++ ) { // ===>
- top = offset.top + i * height;
- my = i - ( rows - 1 ) / 2 ;
-
- for( j = 0; j < cells ; j++ ) { // |||
- left = offset.left + j * width;
- mx = j - ( cells - 1 ) / 2 ;
-
- // Create a clone of the now hidden main element that will be absolute positioned
- // within a wrapper div off the -left and -top equal to size of our pieces
- el
- .clone()
- .appendTo( "body" )
- .wrap( "<div></div>" )
- .css({
- position: "absolute",
- visibility: "visible",
- left: -j * width,
- top: -i * height
- })
-
- // select the wrapper - make it overflow: hidden and absolute positioned based on
- // where the original was located +left and +top equal to the size of pieces
- .parent()
- .addClass( "ui-effects-explode" )
- .css({
- position: "absolute",
- overflow: "hidden",
- width: width,
- height: height,
- left: left + ( show ? mx * width : 0 ),
- top: top + ( show ? my * height : 0 ),
- opacity: show ? 0 : 1
- }).animate({
- left: left + ( show ? 0 : mx * width ),
- top: top + ( show ? 0 : my * height ),
- opacity: show ? 1 : 0
- }, o.duration || 500, o.easing, childComplete );
- }
- }
-
- function animComplete() {
- el.css({
- visibility: "visible"
- });
- $( pieces ).remove();
- if ( !show ) {
- el.hide();
- }
- done();
- }
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.fade = function( o, done ) {
- var el = $( this ),
- mode = $.effects.setMode( el, o.mode || "toggle" );
-
- el.animate({
- opacity: mode
- }, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: done
- });
-};
-
-})( jQuery );
-
-(function( $, undefined ) {
-
-$.effects.effect.fold = function( o, done ) {
-
- // Create element
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "height", "width" ],
- mode = $.effects.setMode( el, o.mode || "hide" ),
- show = mode === "show",
- hide = mode === "hide",
- size = o.size || 15,
- percent = /([0-9]+)%/.exec( size ),
- horizFirst = !!o.horizFirst,
- widthFirst = show !== horizFirst,
- ref = widthFirst ? [ "width", "height" ] : [ "height", "width" ],
- duration = o.duration / 2,
- wrapper, distance,
- animation1 = {},
- animation2 = {};
-
- $.effects.save( el, props );
- el.show();
-
- // Create Wrapper
- wrapper = $.effects.createWrapper( el ).css({
- overflow: "hidden"
- });
- distance = widthFirst ?
- [ wrapper.width(), wrapper.height() ] :
- [ wrapper.height(), wrapper.width() ];
-
- if ( percent ) {
- size = parseInt( percent[ 1 ], 10 ) / 100 * distance[ hide ? 0 : 1 ];
- }
- if ( show ) {
- wrapper.css( horizFirst ? {
- height: 0,
- width: size
- } : {
- height: size,
- width: 0
- });
- }
-
- // Animation
- animation1[ ref[ 0 ] ] = show ? distance[ 0 ] : size;
- animation2[ ref[ 1 ] ] = show ? distance[ 1 ] : 0;
-
- // Animate
- wrapper
- .animate( animation1, duration, o.easing )
- .animate( animation2, duration, o.easing, function() {
- if ( hide ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- });
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.highlight = function( o, done ) {
- var elem = $( this ),
- props = [ "backgroundImage", "backgroundColor", "opacity" ],
- mode = $.effects.setMode( elem, o.mode || "show" ),
- animation = {
- backgroundColor: elem.css( "backgroundColor" )
- };
-
- if (mode === "hide") {
- animation.opacity = 0;
- }
-
- $.effects.save( elem, props );
-
- elem
- .show()
- .css({
- backgroundImage: "none",
- backgroundColor: o.color || "#ffff99"
- })
- .animate( animation, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: function() {
- if ( mode === "hide" ) {
- elem.hide();
- }
- $.effects.restore( elem, props );
- done();
- }
- });
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.pulsate = function( o, done ) {
- var elem = $( this ),
- mode = $.effects.setMode( elem, o.mode || "show" ),
- show = mode === "show",
- hide = mode === "hide",
- showhide = ( show || mode === "hide" ),
-
- // showing or hiding leaves of the "last" animation
- anims = ( ( o.times || 5 ) * 2 ) + ( showhide ? 1 : 0 ),
- duration = o.duration / anims,
- animateTo = 0,
- queue = elem.queue(),
- queuelen = queue.length,
- i;
-
- if ( show || !elem.is(":visible")) {
- elem.css( "opacity", 0 ).show();
- animateTo = 1;
- }
-
- // anims - 1 opacity "toggles"
- for ( i = 1; i < anims; i++ ) {
- elem.animate({
- opacity: animateTo
- }, duration, o.easing );
- animateTo = 1 - animateTo;
- }
-
- elem.animate({
- opacity: animateTo
- }, duration, o.easing);
-
- elem.queue(function() {
- if ( hide ) {
- elem.hide();
- }
- done();
- });
-
- // We just queued up "anims" animations, we need to put them next in the queue
- if ( queuelen > 1 ) {
- queue.splice.apply( queue,
- [ 1, 0 ].concat( queue.splice( queuelen, anims + 1 ) ) );
- }
- elem.dequeue();
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.puff = function( o, done ) {
- var elem = $( this ),
- mode = $.effects.setMode( elem, o.mode || "hide" ),
- hide = mode === "hide",
- percent = parseInt( o.percent, 10 ) || 150,
- factor = percent / 100,
- original = {
- height: elem.height(),
- width: elem.width(),
- outerHeight: elem.outerHeight(),
- outerWidth: elem.outerWidth()
- };
-
- $.extend( o, {
- effect: "scale",
- queue: false,
- fade: true,
- mode: mode,
- complete: done,
- percent: hide ? percent : 100,
- from: hide ?
- original :
- {
- height: original.height * factor,
- width: original.width * factor,
- outerHeight: original.outerHeight * factor,
- outerWidth: original.outerWidth * factor
- }
- });
-
- elem.effect( o );
-};
-
-$.effects.effect.scale = function( o, done ) {
-
- // Create element
- var el = $( this ),
- options = $.extend( true, {}, o ),
- mode = $.effects.setMode( el, o.mode || "effect" ),
- percent = parseInt( o.percent, 10 ) ||
- ( parseInt( o.percent, 10 ) === 0 ? 0 : ( mode === "hide" ? 0 : 100 ) ),
- direction = o.direction || "both",
- origin = o.origin,
- original = {
- height: el.height(),
- width: el.width(),
- outerHeight: el.outerHeight(),
- outerWidth: el.outerWidth()
- },
- factor = {
- y: direction !== "horizontal" ? (percent / 100) : 1,
- x: direction !== "vertical" ? (percent / 100) : 1
- };
-
- // We are going to pass this effect to the size effect:
- options.effect = "size";
- options.queue = false;
- options.complete = done;
-
- // Set default origin and restore for show/hide
- if ( mode !== "effect" ) {
- options.origin = origin || ["middle","center"];
- options.restore = true;
- }
-
- options.from = o.from || ( mode === "show" ? {
- height: 0,
- width: 0,
- outerHeight: 0,
- outerWidth: 0
- } : original );
- options.to = {
- height: original.height * factor.y,
- width: original.width * factor.x,
- outerHeight: original.outerHeight * factor.y,
- outerWidth: original.outerWidth * factor.x
- };
-
- // Fade option to support puff
- if ( options.fade ) {
- if ( mode === "show" ) {
- options.from.opacity = 0;
- options.to.opacity = 1;
- }
- if ( mode === "hide" ) {
- options.from.opacity = 1;
- options.to.opacity = 0;
- }
- }
-
- // Animate
- el.effect( options );
-
-};
-
-$.effects.effect.size = function( o, done ) {
-
- // Create element
- var original, baseline, factor,
- el = $( this ),
- props0 = [ "position", "top", "bottom", "left", "right", "width", "height", "overflow", "opacity" ],
-
- // Always restore
- props1 = [ "position", "top", "bottom", "left", "right", "overflow", "opacity" ],
-
- // Copy for children
- props2 = [ "width", "height", "overflow" ],
- cProps = [ "fontSize" ],
- vProps = [ "borderTopWidth", "borderBottomWidth", "paddingTop", "paddingBottom" ],
- hProps = [ "borderLeftWidth", "borderRightWidth", "paddingLeft", "paddingRight" ],
-
- // Set options
- mode = $.effects.setMode( el, o.mode || "effect" ),
- restore = o.restore || mode !== "effect",
- scale = o.scale || "both",
- origin = o.origin || [ "middle", "center" ],
- position = el.css( "position" ),
- props = restore ? props0 : props1,
- zero = {
- height: 0,
- width: 0,
- outerHeight: 0,
- outerWidth: 0
- };
-
- if ( mode === "show" ) {
- el.show();
- }
- original = {
- height: el.height(),
- width: el.width(),
- outerHeight: el.outerHeight(),
- outerWidth: el.outerWidth()
- };
-
- if ( o.mode === "toggle" && mode === "show" ) {
- el.from = o.to || zero;
- el.to = o.from || original;
- } else {
- el.from = o.from || ( mode === "show" ? zero : original );
- el.to = o.to || ( mode === "hide" ? zero : original );
- }
-
- // Set scaling factor
- factor = {
- from: {
- y: el.from.height / original.height,
- x: el.from.width / original.width
- },
- to: {
- y: el.to.height / original.height,
- x: el.to.width / original.width
- }
- };
-
- // Scale the css box
- if ( scale === "box" || scale === "both" ) {
-
- // Vertical props scaling
- if ( factor.from.y !== factor.to.y ) {
- props = props.concat( vProps );
- el.from = $.effects.setTransition( el, vProps, factor.from.y, el.from );
- el.to = $.effects.setTransition( el, vProps, factor.to.y, el.to );
- }
-
- // Horizontal props scaling
- if ( factor.from.x !== factor.to.x ) {
- props = props.concat( hProps );
- el.from = $.effects.setTransition( el, hProps, factor.from.x, el.from );
- el.to = $.effects.setTransition( el, hProps, factor.to.x, el.to );
- }
- }
-
- // Scale the content
- if ( scale === "content" || scale === "both" ) {
-
- // Vertical props scaling
- if ( factor.from.y !== factor.to.y ) {
- props = props.concat( cProps ).concat( props2 );
- el.from = $.effects.setTransition( el, cProps, factor.from.y, el.from );
- el.to = $.effects.setTransition( el, cProps, factor.to.y, el.to );
- }
- }
-
- $.effects.save( el, props );
- el.show();
- $.effects.createWrapper( el );
- el.css( "overflow", "hidden" ).css( el.from );
-
- // Adjust
- if (origin) { // Calculate baseline shifts
- baseline = $.effects.getBaseline( origin, original );
- el.from.top = ( original.outerHeight - el.outerHeight() ) * baseline.y;
- el.from.left = ( original.outerWidth - el.outerWidth() ) * baseline.x;
- el.to.top = ( original.outerHeight - el.to.outerHeight ) * baseline.y;
- el.to.left = ( original.outerWidth - el.to.outerWidth ) * baseline.x;
- }
- el.css( el.from ); // set top & left
-
- // Animate
- if ( scale === "content" || scale === "both" ) { // Scale the children
-
- // Add margins/font-size
- vProps = vProps.concat([ "marginTop", "marginBottom" ]).concat(cProps);
- hProps = hProps.concat([ "marginLeft", "marginRight" ]);
- props2 = props0.concat(vProps).concat(hProps);
-
- el.find( "*[width]" ).each( function(){
- var child = $( this ),
- c_original = {
- height: child.height(),
- width: child.width(),
- outerHeight: child.outerHeight(),
- outerWidth: child.outerWidth()
- };
- if (restore) {
- $.effects.save(child, props2);
- }
-
- child.from = {
- height: c_original.height * factor.from.y,
- width: c_original.width * factor.from.x,
- outerHeight: c_original.outerHeight * factor.from.y,
- outerWidth: c_original.outerWidth * factor.from.x
- };
- child.to = {
- height: c_original.height * factor.to.y,
- width: c_original.width * factor.to.x,
- outerHeight: c_original.height * factor.to.y,
- outerWidth: c_original.width * factor.to.x
- };
-
- // Vertical props scaling
- if ( factor.from.y !== factor.to.y ) {
- child.from = $.effects.setTransition( child, vProps, factor.from.y, child.from );
- child.to = $.effects.setTransition( child, vProps, factor.to.y, child.to );
- }
-
- // Horizontal props scaling
- if ( factor.from.x !== factor.to.x ) {
- child.from = $.effects.setTransition( child, hProps, factor.from.x, child.from );
- child.to = $.effects.setTransition( child, hProps, factor.to.x, child.to );
- }
-
- // Animate children
- child.css( child.from );
- child.animate( child.to, o.duration, o.easing, function() {
-
- // Restore children
- if ( restore ) {
- $.effects.restore( child, props2 );
- }
- });
- });
- }
-
- // Animate
- el.animate( el.to, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: function() {
- if ( el.to.opacity === 0 ) {
- el.css( "opacity", el.from.opacity );
- }
- if( mode === "hide" ) {
- el.hide();
- }
- $.effects.restore( el, props );
- if ( !restore ) {
-
- // we need to calculate our new positioning based on the scaling
- if ( position === "static" ) {
- el.css({
- position: "relative",
- top: el.to.top,
- left: el.to.left
- });
- } else {
- $.each([ "top", "left" ], function( idx, pos ) {
- el.css( pos, function( _, str ) {
- var val = parseInt( str, 10 ),
- toRef = idx ? el.to.left : el.to.top;
-
- // if original was "auto", recalculate the new value from wrapper
- if ( str === "auto" ) {
- return toRef + "px";
- }
-
- return val + toRef + "px";
- });
- });
- }
- }
-
- $.effects.removeWrapper( el );
- done();
- }
- });
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.shake = function( o, done ) {
-
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "height", "width" ],
- mode = $.effects.setMode( el, o.mode || "effect" ),
- direction = o.direction || "left",
- distance = o.distance || 20,
- times = o.times || 3,
- anims = times * 2 + 1,
- speed = Math.round(o.duration/anims),
- ref = (direction === "up" || direction === "down") ? "top" : "left",
- positiveMotion = (direction === "up" || direction === "left"),
- animation = {},
- animation1 = {},
- animation2 = {},
- i,
-
- // we will need to re-assemble the queue to stack our animations in place
- queue = el.queue(),
- queuelen = queue.length;
-
- $.effects.save( el, props );
- el.show();
- $.effects.createWrapper( el );
-
- // Animation
- animation[ ref ] = ( positiveMotion ? "-=" : "+=" ) + distance;
- animation1[ ref ] = ( positiveMotion ? "+=" : "-=" ) + distance * 2;
- animation2[ ref ] = ( positiveMotion ? "-=" : "+=" ) + distance * 2;
-
- // Animate
- el.animate( animation, speed, o.easing );
-
- // Shakes
- for ( i = 1; i < times; i++ ) {
- el.animate( animation1, speed, o.easing ).animate( animation2, speed, o.easing );
- }
- el
- .animate( animation1, speed, o.easing )
- .animate( animation, speed / 2, o.easing )
- .queue(function() {
- if ( mode === "hide" ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- });
-
- // inject all the animations we just queued to be first in line (after "inprogress")
- if ( queuelen > 1) {
- queue.splice.apply( queue,
- [ 1, 0 ].concat( queue.splice( queuelen, anims + 1 ) ) );
- }
- el.dequeue();
-
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.slide = function( o, done ) {
-
- // Create element
- var el = $( this ),
- props = [ "position", "top", "bottom", "left", "right", "width", "height" ],
- mode = $.effects.setMode( el, o.mode || "show" ),
- show = mode === "show",
- direction = o.direction || "left",
- ref = (direction === "up" || direction === "down") ? "top" : "left",
- positiveMotion = (direction === "up" || direction === "left"),
- distance,
- animation = {};
-
- // Adjust
- $.effects.save( el, props );
- el.show();
- distance = o.distance || el[ ref === "top" ? "outerHeight" : "outerWidth" ]( true );
-
- $.effects.createWrapper( el ).css({
- overflow: "hidden"
- });
-
- if ( show ) {
- el.css( ref, positiveMotion ? (isNaN(distance) ? "-" + distance : -distance) : distance );
- }
-
- // Animation
- animation[ ref ] = ( show ?
- ( positiveMotion ? "+=" : "-=") :
- ( positiveMotion ? "-=" : "+=")) +
- distance;
-
- // Animate
- el.animate( animation, {
- queue: false,
- duration: o.duration,
- easing: o.easing,
- complete: function() {
- if ( mode === "hide" ) {
- el.hide();
- }
- $.effects.restore( el, props );
- $.effects.removeWrapper( el );
- done();
- }
- });
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.effects.effect.transfer = function( o, done ) {
- var elem = $( this ),
- target = $( o.to ),
- targetFixed = target.css( "position" ) === "fixed",
- body = $("body"),
- fixTop = targetFixed ? body.scrollTop() : 0,
- fixLeft = targetFixed ? body.scrollLeft() : 0,
- endPosition = target.offset(),
- animation = {
- top: endPosition.top - fixTop ,
- left: endPosition.left - fixLeft ,
- height: target.innerHeight(),
- width: target.innerWidth()
- },
- startPosition = elem.offset(),
- transfer = $( "<div class='ui-effects-transfer'></div>" )
- .appendTo( document.body )
- .addClass( o.className )
- .css({
- top: startPosition.top - fixTop ,
- left: startPosition.left - fixLeft ,
- height: elem.innerHeight(),
- width: elem.innerWidth(),
- position: targetFixed ? "fixed" : "absolute"
- })
- .animate( animation, o.duration, o.easing, function() {
- transfer.remove();
- done();
- });
-};
-
-})(jQuery);
-
-(function( $, undefined ) {
-
-$.widget( "ui.menu", {
- version: "1.10.3",
- defaultElement: "<ul>",
- delay: 300,
- options: {
- icons: {
- submenu: "ui-icon-carat-1-e"
- },
- menus: "ul",
- position: {
- my: "left top",
- at: "right top"
- },
- role: "menu",
-
- // callbacks
- blur: null,
- focus: null,
- select: null
- },
-
- _create: function() {
- this.activeMenu = this.element;
- // flag used to prevent firing of the click handler
- // as the event bubbles up through nested menus
- this.mouseHandled = false;
- this.element
- .uniqueId()
- .addClass( "ui-menu ui-widget ui-widget-content ui-corner-all" )
- .toggleClass( "ui-menu-icons", !!this.element.find( ".ui-icon" ).length )
- .attr({
- role: this.options.role,
- tabIndex: 0
- })
- // need to catch all clicks on disabled menu
- // not possible through _on
- .bind( "click" + this.eventNamespace, $.proxy(function( event ) {
- if ( this.options.disabled ) {
- event.preventDefault();
- }
- }, this ));
-
- if ( this.options.disabled ) {
- this.element
- .addClass( "ui-state-disabled" )
- .attr( "aria-disabled", "true" );
- }
-
- this._on({
- // Prevent focus from sticking to links inside menu after clicking
- // them (focus should always stay on UL during navigation).
- "mousedown .ui-menu-item > a": function( event ) {
- event.preventDefault();
- },
- "click .ui-state-disabled > a": function( event ) {
- event.preventDefault();
- },
- "click .ui-menu-item:has(a)": function( event ) {
- var target = $( event.target ).closest( ".ui-menu-item" );
- if ( !this.mouseHandled && target.not( ".ui-state-disabled" ).length ) {
- this.mouseHandled = true;
-
- this.select( event );
- // Open submenu on click
- if ( target.has( ".ui-menu" ).length ) {
- this.expand( event );
- } else if ( !this.element.is( ":focus" ) ) {
- // Redirect focus to the menu
- this.element.trigger( "focus", [ true ] );
-
- // If the active item is on the top level, let it stay active.
- // Otherwise, blur the active item since it is no longer visible.
- if ( this.active && this.active.parents( ".ui-menu" ).length === 1 ) {
- clearTimeout( this.timer );
- }
- }
- }
- },
- "mouseenter .ui-menu-item": function( event ) {
- var target = $( event.currentTarget );
- // Remove ui-state-active class from siblings of the newly focused menu item
- // to avoid a jump caused by adjacent elements both having a class with a border
- target.siblings().children( ".ui-state-active" ).removeClass( "ui-state-active" );
- this.focus( event, target );
- },
- mouseleave: "collapseAll",
- "mouseleave .ui-menu": "collapseAll",
- focus: function( event, keepActiveItem ) {
- // If there's already an active item, keep it active
- // If not, activate the first item
- var item = this.active || this.element.children( ".ui-menu-item" ).eq( 0 );
-
- if ( !keepActiveItem ) {
- this.focus( event, item );
- }
- },
- blur: function( event ) {
- this._delay(function() {
- if ( !$.contains( this.element[0], this.document[0].activeElement ) ) {
- this.collapseAll( event );
- }
- });
- },
- keydown: "_keydown"
- });
-
- this.refresh();
-
- // Clicks outside of a menu collapse any open menus
- this._on( this.document, {
- click: function( event ) {
- if ( !$( event.target ).closest( ".ui-menu" ).length ) {
- this.collapseAll( event );
- }
-
- // Reset the mouseHandled flag
- this.mouseHandled = false;
- }
- });
- },
-
- _destroy: function() {
- // Destroy (sub)menus
- this.element
- .removeAttr( "aria-activedescendant" )
- .find( ".ui-menu" ).addBack()
- .removeClass( "ui-menu ui-widget ui-widget-content ui-corner-all ui-menu-icons" )
- .removeAttr( "role" )
- .removeAttr( "tabIndex" )
- .removeAttr( "aria-labelledby" )
- .removeAttr( "aria-expanded" )
- .removeAttr( "aria-hidden" )
- .removeAttr( "aria-disabled" )
- .removeUniqueId()
- .show();
-
- // Destroy menu items
- this.element.find( ".ui-menu-item" )
- .removeClass( "ui-menu-item" )
- .removeAttr( "role" )
- .removeAttr( "aria-disabled" )
- .children( "a" )
- .removeUniqueId()
- .removeClass( "ui-corner-all ui-state-hover" )
- .removeAttr( "tabIndex" )
- .removeAttr( "role" )
- .removeAttr( "aria-haspopup" )
- .children().each( function() {
- var elem = $( this );
- if ( elem.data( "ui-menu-submenu-carat" ) ) {
- elem.remove();
- }
- });
-
- // Destroy menu dividers
- this.element.find( ".ui-menu-divider" ).removeClass( "ui-menu-divider ui-widget-content" );
- },
-
- _keydown: function( event ) {
- /*jshint maxcomplexity:20*/
- var match, prev, character, skip, regex,
- preventDefault = true;
-
- function escape( value ) {
- return value.replace( /[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&" );
- }
-
- switch ( event.keyCode ) {
- case $.ui.keyCode.PAGE_UP:
- this.previousPage( event );
- break;
- case $.ui.keyCode.PAGE_DOWN:
- this.nextPage( event );
- break;
- case $.ui.keyCode.HOME:
- this._move( "first", "first", event );
- break;
- case $.ui.keyCode.END:
- this._move( "last", "last", event );
- break;
- case $.ui.keyCode.UP:
- this.previous( event );
- break;
- case $.ui.keyCode.DOWN:
- this.next( event );
- break;
- case $.ui.keyCode.LEFT:
- this.collapse( event );
- break;
- case $.ui.keyCode.RIGHT:
- if ( this.active && !this.active.is( ".ui-state-disabled" ) ) {
- this.expand( event );
- }
- break;
- case $.ui.keyCode.ENTER:
- case $.ui.keyCode.SPACE:
- this._activate( event );
- break;
- case $.ui.keyCode.ESCAPE:
- this.collapse( event );
- break;
- default:
- preventDefault = false;
- prev = this.previousFilter || "";
- character = String.fromCharCode( event.keyCode );
- skip = false;
-
- clearTimeout( this.filterTimer );
-
- if ( character === prev ) {
- skip = true;
- } else {
- character = prev + character;
- }
-
- regex = new RegExp( "^" + escape( character ), "i" );
- match = this.activeMenu.children( ".ui-menu-item" ).filter(function() {
- return regex.test( $( this ).children( "a" ).text() );
- });
- match = skip && match.index( this.active.next() ) !== -1 ?
- this.active.nextAll( ".ui-menu-item" ) :
- match;
-
- // If no matches on the current filter, reset to the last character pressed
- // to move down the menu to the first item that starts with that character
- if ( !match.length ) {
- character = String.fromCharCode( event.keyCode );
- regex = new RegExp( "^" + escape( character ), "i" );
- match = this.activeMenu.children( ".ui-menu-item" ).filter(function() {
- return regex.test( $( this ).children( "a" ).text() );
- });
- }
-
- if ( match.length ) {
- this.focus( event, match );
- if ( match.length > 1 ) {
- this.previousFilter = character;
- this.filterTimer = this._delay(function() {
- delete this.previousFilter;
- }, 1000 );
- } else {
- delete this.previousFilter;
- }
- } else {
- delete this.previousFilter;
- }
- }
-
- if ( preventDefault ) {
- event.preventDefault();
- }
- },
-
- _activate: function( event ) {
- if ( !this.active.is( ".ui-state-disabled" ) ) {
- if ( this.active.children( "a[aria-haspopup='true']" ).length ) {
- this.expand( event );
- } else {
- this.select( event );
- }
- }
- },
-
- refresh: function() {
- var menus,
- icon = this.options.icons.submenu,
- submenus = this.element.find( this.options.menus );
-
- // Initialize nested menus
- submenus.filter( ":not(.ui-menu)" )
- .addClass( "ui-menu ui-widget ui-widget-content ui-corner-all" )
- .hide()
- .attr({
- role: this.options.role,
- "aria-hidden": "true",
- "aria-expanded": "false"
- })
- .each(function() {
- var menu = $( this ),
- item = menu.prev( "a" ),
- submenuCarat = $( "<span>" )
- .addClass( "ui-menu-icon ui-icon " + icon )
- .data( "ui-menu-submenu-carat", true );
-
- item
- .attr( "aria-haspopup", "true" )
- .prepend( submenuCarat );
- menu.attr( "aria-labelledby", item.attr( "id" ) );
- });
-
- menus = submenus.add( this.element );
-
- // Don't refresh list items that are already adapted
- menus.children( ":not(.ui-menu-item):has(a)" )
- .addClass( "ui-menu-item" )
- .attr( "role", "presentation" )
- .children( "a" )
- .uniqueId()
- .addClass( "ui-corner-all" )
- .attr({
- tabIndex: -1,
- role: this._itemRole()
- });
-
- // Initialize unlinked menu-items containing spaces and/or dashes only as dividers
- menus.children( ":not(.ui-menu-item)" ).each(function() {
- var item = $( this );
- // hyphen, em dash, en dash
- if ( !/[^\-\u2014\u2013\s]/.test( item.text() ) ) {
- item.addClass( "ui-widget-content ui-menu-divider" );
- }
- });
-
- // Add aria-disabled attribute to any disabled menu item
- menus.children( ".ui-state-disabled" ).attr( "aria-disabled", "true" );
-
- // If the active item has been removed, blur the menu
- if ( this.active && !$.contains( this.element[ 0 ], this.active[ 0 ] ) ) {
- this.blur();
- }
- },
-
- _itemRole: function() {
- return {
- menu: "menuitem",
- listbox: "option"
- }[ this.options.role ];
- },
-
- _setOption: function( key, value ) {
- if ( key === "icons" ) {
- this.element.find( ".ui-menu-icon" )
- .removeClass( this.options.icons.submenu )
- .addClass( value.submenu );
- }
- this._super( key, value );
- },
-
- focus: function( event, item ) {
- var nested, focused;
- this.blur( event, event && event.type === "focus" );
-
- this._scrollIntoView( item );
-
- this.active = item.first();
- focused = this.active.children( "a" ).addClass( "ui-state-focus" );
- // Only update aria-activedescendant if there's a role
- // otherwise we assume focus is managed elsewhere
- if ( this.options.role ) {
- this.element.attr( "aria-activedescendant", focused.attr( "id" ) );
- }
-
- // Highlight active parent menu item, if any
- this.active
- .parent()
- .closest( ".ui-menu-item" )
- .children( "a:first" )
- .addClass( "ui-state-active" );
-
- if ( event && event.type === "keydown" ) {
- this._close();
- } else {
- this.timer = this._delay(function() {
- this._close();
- }, this.delay );
- }
-
- nested = item.children( ".ui-menu" );
- if ( nested.length && ( /^mouse/.test( event.type ) ) ) {
- this._startOpening(nested);
- }
- this.activeMenu = item.parent();
-
- this._trigger( "focus", event, { item: item } );
- },
-
- _scrollIntoView: function( item ) {
- var borderTop, paddingTop, offset, scroll, elementHeight, itemHeight;
- if ( this._hasScroll() ) {
- borderTop = parseFloat( $.css( this.activeMenu[0], "borderTopWidth" ) ) || 0;
- paddingTop = parseFloat( $.css( this.activeMenu[0], "paddingTop" ) ) || 0;
- offset = item.offset().top - this.activeMenu.offset().top - borderTop - paddingTop;
- scroll = this.activeMenu.scrollTop();
- elementHeight = this.activeMenu.height();
- itemHeight = item.height();
-
- if ( offset < 0 ) {
- this.activeMenu.scrollTop( scroll + offset );
- } else if ( offset + itemHeight > elementHeight ) {
- this.activeMenu.scrollTop( scroll + offset - elementHeight + itemHeight );
- }
- }
- },
-
- blur: function( event, fromFocus ) {
- if ( !fromFocus ) {
- clearTimeout( this.timer );
- }
-
- if ( !this.active ) {
- return;
- }
-
- this.active.children( "a" ).removeClass( "ui-state-focus" );
- this.active = null;
-
- this._trigger( "blur", event, { item: this.active } );
- },
-
- _startOpening: function( submenu ) {
- clearTimeout( this.timer );
-
- // Don't open if already open fixes a Firefox bug that caused a .5 pixel
- // shift in the submenu position when mousing over the carat icon
- if ( submenu.attr( "aria-hidden" ) !== "true" ) {
- return;
- }
-
- this.timer = this._delay(function() {
- this._close();
- this._open( submenu );
- }, this.delay );
- },
-
- _open: function( submenu ) {
- var position = $.extend({
- of: this.active
- }, this.options.position );
-
- clearTimeout( this.timer );
- this.element.find( ".ui-menu" ).not( submenu.parents( ".ui-menu" ) )
- .hide()
- .attr( "aria-hidden", "true" );
-
- submenu
- .show()
- .removeAttr( "aria-hidden" )
- .attr( "aria-expanded", "true" )
- .position( position );
- },
-
- collapseAll: function( event, all ) {
- clearTimeout( this.timer );
- this.timer = this._delay(function() {
- // If we were passed an event, look for the submenu that contains the event
- var currentMenu = all ? this.element :
- $( event && event.target ).closest( this.element.find( ".ui-menu" ) );
-
- // If we found no valid submenu ancestor, use the main menu to close all sub menus anyway
- if ( !currentMenu.length ) {
- currentMenu = this.element;
- }
-
- this._close( currentMenu );
-
- this.blur( event );
- this.activeMenu = currentMenu;
- }, this.delay );
- },
-
- // With no arguments, closes the currently active menu - if nothing is active
- // it closes all menus. If passed an argument, it will search for menus BELOW
- _close: function( startMenu ) {
- if ( !startMenu ) {
- startMenu = this.active ? this.active.parent() : this.element;
- }
-
- startMenu
- .find( ".ui-menu" )
- .hide()
- .attr( "aria-hidden", "true" )
- .attr( "aria-expanded", "false" )
- .end()
- .find( "a.ui-state-active" )
- .removeClass( "ui-state-active" );
- },
-
- collapse: function( event ) {
- var newItem = this.active &&
- this.active.parent().closest( ".ui-menu-item", this.element );
- if ( newItem && newItem.length ) {
- this._close();
- this.focus( event, newItem );
- }
- },
-
- expand: function( event ) {
- var newItem = this.active &&
- this.active
- .children( ".ui-menu " )
- .children( ".ui-menu-item" )
- .first();
-
- if ( newItem && newItem.length ) {
- this._open( newItem.parent() );
-
- // Delay so Firefox will not hide activedescendant change in expanding submenu from AT
- this._delay(function() {
- this.focus( event, newItem );
- });
- }
- },
-
- next: function( event ) {
- this._move( "next", "first", event );
- },
-
- previous: function( event ) {
- this._move( "prev", "last", event );
- },
-
- isFirstItem: function() {
- return this.active && !this.active.prevAll( ".ui-menu-item" ).length;
- },
-
- isLastItem: function() {
- return this.active && !this.active.nextAll( ".ui-menu-item" ).length;
- },
-
- _move: function( direction, filter, event ) {
- var next;
- if ( this.active ) {
- if ( direction === "first" || direction === "last" ) {
- next = this.active
- [ direction === "first" ? "prevAll" : "nextAll" ]( ".ui-menu-item" )
- .eq( -1 );
- } else {
- next = this.active
- [ direction + "All" ]( ".ui-menu-item" )
- .eq( 0 );
- }
- }
- if ( !next || !next.length || !this.active ) {
- next = this.activeMenu.children( ".ui-menu-item" )[ filter ]();
- }
-
- this.focus( event, next );
- },
-
- nextPage: function( event ) {
- var item, base, height;
-
- if ( !this.active ) {
- this.next( event );
- return;
- }
- if ( this.isLastItem() ) {
- return;
- }
- if ( this._hasScroll() ) {
- base = this.active.offset().top;
- height = this.element.height();
- this.active.nextAll( ".ui-menu-item" ).each(function() {
- item = $( this );
- return item.offset().top - base - height < 0;
- });
-
- this.focus( event, item );
- } else {
- this.focus( event, this.activeMenu.children( ".ui-menu-item" )
- [ !this.active ? "first" : "last" ]() );
- }
- },
-
- previousPage: function( event ) {
- var item, base, height;
- if ( !this.active ) {
- this.next( event );
- return;
- }
- if ( this.isFirstItem() ) {
- return;
- }
- if ( this._hasScroll() ) {
- base = this.active.offset().top;
- height = this.element.height();
- this.active.prevAll( ".ui-menu-item" ).each(function() {
- item = $( this );
- return item.offset().top - base + height > 0;
- });
-
- this.focus( event, item );
- } else {
- this.focus( event, this.activeMenu.children( ".ui-menu-item" ).first() );
- }
- },
-
- _hasScroll: function() {
- return this.element.outerHeight() < this.element.prop( "scrollHeight" );
- },
-
- select: function( event ) {
- // TODO: It should never be possible to not have an active item at this
- // point, but the tests don't trigger mouseenter before click.
- this.active = this.active || $( event.target ).closest( ".ui-menu-item" );
- var ui = { item: this.active };
- if ( !this.active.has( ".ui-menu" ).length ) {
- this.collapseAll( event, true );
- }
- this._trigger( "select", event, ui );
- }
-});
-
-}( jQuery ));
-
-(function( $, undefined ) {
-
-$.ui = $.ui || {};
-
-var cachedScrollbarWidth,
- max = Math.max,
- abs = Math.abs,
- round = Math.round,
- rhorizontal = /left|center|right/,
- rvertical = /top|center|bottom/,
- roffset = /[\+\-]\d+(\.[\d]+)?%?/,
- rposition = /^\w+/,
- rpercent = /%$/,
- _position = $.fn.position;
-
-function getOffsets( offsets, width, height ) {
- return [
- parseFloat( offsets[ 0 ] ) * ( rpercent.test( offsets[ 0 ] ) ? width / 100 : 1 ),
- parseFloat( offsets[ 1 ] ) * ( rpercent.test( offsets[ 1 ] ) ? height / 100 : 1 )
- ];
-}
-
-function parseCss( element, property ) {
- return parseInt( $.css( element, property ), 10 ) || 0;
-}
-
-function getDimensions( elem ) {
- var raw = elem[0];
- if ( raw.nodeType === 9 ) {
- return {
- width: elem.width(),
- height: elem.height(),
- offset: { top: 0, left: 0 }
- };
- }
- if ( $.isWindow( raw ) ) {
- return {
- width: elem.width(),
- height: elem.height(),
- offset: { top: elem.scrollTop(), left: elem.scrollLeft() }
- };
- }
- if ( raw.preventDefault ) {
- return {
- width: 0,
- height: 0,
- offset: { top: raw.pageY, left: raw.pageX }
- };
- }
- return {
- width: elem.outerWidth(),
- height: elem.outerHeight(),
- offset: elem.offset()
- };
-}
-
-$.position = {
- scrollbarWidth: function() {
- if ( cachedScrollbarWidth !== undefined ) {
- return cachedScrollbarWidth;
- }
- var w1, w2,
- div = $( "<div style='display:block;width:50px;height:50px;overflow:hidden;'><div style='height:100px;width:auto;'></div></div>" ),
- innerDiv = div.children()[0];
-
- $( "body" ).append( div );
- w1 = innerDiv.offsetWidth;
- div.css( "overflow", "scroll" );
-
- w2 = innerDiv.offsetWidth;
-
- if ( w1 === w2 ) {
- w2 = div[0].clientWidth;
- }
-
- div.remove();
-
- return (cachedScrollbarWidth = w1 - w2);
- },
- getScrollInfo: function( within ) {
- var overflowX = within.isWindow ? "" : within.element.css( "overflow-x" ),
- overflowY = within.isWindow ? "" : within.element.css( "overflow-y" ),
- hasOverflowX = overflowX === "scroll" ||
- ( overflowX === "auto" && within.width < within.element[0].scrollWidth ),
- hasOverflowY = overflowY === "scroll" ||
- ( overflowY === "auto" && within.height < within.element[0].scrollHeight );
- return {
- width: hasOverflowY ? $.position.scrollbarWidth() : 0,
- height: hasOverflowX ? $.position.scrollbarWidth() : 0
- };
- },
- getWithinInfo: function( element ) {
- var withinElement = $( element || window ),
- isWindow = $.isWindow( withinElement[0] );
- return {
- element: withinElement,
- isWindow: isWindow,
- offset: withinElement.offset() || { left: 0, top: 0 },
- scrollLeft: withinElement.scrollLeft(),
- scrollTop: withinElement.scrollTop(),
- width: isWindow ? withinElement.width() : withinElement.outerWidth(),
- height: isWindow ? withinElement.height() : withinElement.outerHeight()
- };
- }
-};
-
-$.fn.position = function( options ) {
- if ( !options || !options.of ) {
- return _position.apply( this, arguments );
- }
-
- // make a copy, we don't want to modify arguments
- options = $.extend( {}, options );
-
- var atOffset, targetWidth, targetHeight, targetOffset, basePosition, dimensions,
- target = $( options.of ),
- within = $.position.getWithinInfo( options.within ),
- scrollInfo = $.position.getScrollInfo( within ),
- collision = ( options.collision || "flip" ).split( " " ),
- offsets = {};
-
- dimensions = getDimensions( target );
- if ( target[0].preventDefault ) {
- // force left top to allow flipping
- options.at = "left top";
- }
- targetWidth = dimensions.width;
- targetHeight = dimensions.height;
- targetOffset = dimensions.offset;
- // clone to reuse original targetOffset later
- basePosition = $.extend( {}, targetOffset );
-
- // force my and at to have valid horizontal and vertical positions
- // if a value is missing or invalid, it will be converted to center
- $.each( [ "my", "at" ], function() {
- var pos = ( options[ this ] || "" ).split( " " ),
- horizontalOffset,
- verticalOffset;
-
- if ( pos.length === 1) {
- pos = rhorizontal.test( pos[ 0 ] ) ?
- pos.concat( [ "center" ] ) :
- rvertical.test( pos[ 0 ] ) ?
- [ "center" ].concat( pos ) :
- [ "center", "center" ];
- }
- pos[ 0 ] = rhorizontal.test( pos[ 0 ] ) ? pos[ 0 ] : "center";
- pos[ 1 ] = rvertical.test( pos[ 1 ] ) ? pos[ 1 ] : "center";
-
- // calculate offsets
- horizontalOffset = roffset.exec( pos[ 0 ] );
- verticalOffset = roffset.exec( pos[ 1 ] );
- offsets[ this ] = [
- horizontalOffset ? horizontalOffset[ 0 ] : 0,
- verticalOffset ? verticalOffset[ 0 ] : 0
- ];
-
- // reduce to just the positions without the offsets
- options[ this ] = [
- rposition.exec( pos[ 0 ] )[ 0 ],
- rposition.exec( pos[ 1 ] )[ 0 ]
- ];
- });
-
- // normalize collision option
- if ( collision.length === 1 ) {
- collision[ 1 ] = collision[ 0 ];
- }
-
- if ( options.at[ 0 ] === "right" ) {
- basePosition.left += targetWidth;
- } else if ( options.at[ 0 ] === "center" ) {
- basePosition.left += targetWidth / 2;
- }
-
- if ( options.at[ 1 ] === "bottom" ) {
- basePosition.top += targetHeight;
- } else if ( options.at[ 1 ] === "center" ) {
- basePosition.top += targetHeight / 2;
- }
-
- atOffset = getOffsets( offsets.at, targetWidth, targetHeight );
- basePosition.left += atOffset[ 0 ];
- basePosition.top += atOffset[ 1 ];
-
- return this.each(function() {
- var collisionPosition, using,
- elem = $( this ),
- elemWidth = elem.outerWidth(),
- elemHeight = elem.outerHeight(),
- marginLeft = parseCss( this, "marginLeft" ),
- marginTop = parseCss( this, "marginTop" ),
- collisionWidth = elemWidth + marginLeft + parseCss( this, "marginRight" ) + scrollInfo.width,
- collisionHeight = elemHeight + marginTop + parseCss( this, "marginBottom" ) + scrollInfo.height,
- position = $.extend( {}, basePosition ),
- myOffset = getOffsets( offsets.my, elem.outerWidth(), elem.outerHeight() );
-
- if ( options.my[ 0 ] === "right" ) {
- position.left -= elemWidth;
- } else if ( options.my[ 0 ] === "center" ) {
- position.left -= elemWidth / 2;
- }
-
- if ( options.my[ 1 ] === "bottom" ) {
- position.top -= elemHeight;
- } else if ( options.my[ 1 ] === "center" ) {
- position.top -= elemHeight / 2;
- }
-
- position.left += myOffset[ 0 ];
- position.top += myOffset[ 1 ];
-
- // if the browser doesn't support fractions, then round for consistent results
- if ( !$.support.offsetFractions ) {
- position.left = round( position.left );
- position.top = round( position.top );
- }
-
- collisionPosition = {
- marginLeft: marginLeft,
- marginTop: marginTop
- };
-
- $.each( [ "left", "top" ], function( i, dir ) {
- if ( $.ui.position[ collision[ i ] ] ) {
- $.ui.position[ collision[ i ] ][ dir ]( position, {
- targetWidth: targetWidth,
- targetHeight: targetHeight,
- elemWidth: elemWidth,
- elemHeight: elemHeight,
- collisionPosition: collisionPosition,
- collisionWidth: collisionWidth,
- collisionHeight: collisionHeight,
- offset: [ atOffset[ 0 ] + myOffset[ 0 ], atOffset [ 1 ] + myOffset[ 1 ] ],
- my: options.my,
- at: options.at,
- within: within,
- elem : elem
- });
- }
- });
-
- if ( options.using ) {
- // adds feedback as second argument to using callback, if present
- using = function( props ) {
- var left = targetOffset.left - position.left,
- right = left + targetWidth - elemWidth,
- top = targetOffset.top - position.top,
- bottom = top + targetHeight - elemHeight,
- feedback = {
- target: {
- element: target,
- left: targetOffset.left,
- top: targetOffset.top,
- width: targetWidth,
- height: targetHeight
- },
- element: {
- element: elem,
- left: position.left,
- top: position.top,
- width: elemWidth,
- height: elemHeight
- },
- horizontal: right < 0 ? "left" : left > 0 ? "right" : "center",
- vertical: bottom < 0 ? "top" : top > 0 ? "bottom" : "middle"
- };
- if ( targetWidth < elemWidth && abs( left + right ) < targetWidth ) {
- feedback.horizontal = "center";
- }
- if ( targetHeight < elemHeight && abs( top + bottom ) < targetHeight ) {
- feedback.vertical = "middle";
- }
- if ( max( abs( left ), abs( right ) ) > max( abs( top ), abs( bottom ) ) ) {
- feedback.important = "horizontal";
- } else {
- feedback.important = "vertical";
- }
- options.using.call( this, props, feedback );
- };
- }
-
- elem.offset( $.extend( position, { using: using } ) );
- });
-};
-
-$.ui.position = {
- fit: {
- left: function( position, data ) {
- var within = data.within,
- withinOffset = within.isWindow ? within.scrollLeft : within.offset.left,
- outerWidth = within.width,
- collisionPosLeft = position.left - data.collisionPosition.marginLeft,
- overLeft = withinOffset - collisionPosLeft,
- overRight = collisionPosLeft + data.collisionWidth - outerWidth - withinOffset,
- newOverRight;
-
- // element is wider than within
- if ( data.collisionWidth > outerWidth ) {
- // element is initially over the left side of within
- if ( overLeft > 0 && overRight <= 0 ) {
- newOverRight = position.left + overLeft + data.collisionWidth - outerWidth - withinOffset;
- position.left += overLeft - newOverRight;
- // element is initially over right side of within
- } else if ( overRight > 0 && overLeft <= 0 ) {
- position.left = withinOffset;
- // element is initially over both left and right sides of within
- } else {
- if ( overLeft > overRight ) {
- position.left = withinOffset + outerWidth - data.collisionWidth;
- } else {
- position.left = withinOffset;
- }
- }
- // too far left -> align with left edge
- } else if ( overLeft > 0 ) {
- position.left += overLeft;
- // too far right -> align with right edge
- } else if ( overRight > 0 ) {
- position.left -= overRight;
- // adjust based on position and margin
- } else {
- position.left = max( position.left - collisionPosLeft, position.left );
- }
- },
- top: function( position, data ) {
- var within = data.within,
- withinOffset = within.isWindow ? within.scrollTop : within.offset.top,
- outerHeight = data.within.height,
- collisionPosTop = position.top - data.collisionPosition.marginTop,
- overTop = withinOffset - collisionPosTop,
- overBottom = collisionPosTop + data.collisionHeight - outerHeight - withinOffset,
- newOverBottom;
-
- // element is taller than within
- if ( data.collisionHeight > outerHeight ) {
- // element is initially over the top of within
- if ( overTop > 0 && overBottom <= 0 ) {
- newOverBottom = position.top + overTop + data.collisionHeight - outerHeight - withinOffset;
- position.top += overTop - newOverBottom;
- // element is initially over bottom of within
- } else if ( overBottom > 0 && overTop <= 0 ) {
- position.top = withinOffset;
- // element is initially over both top and bottom of within
- } else {
- if ( overTop > overBottom ) {
- position.top = withinOffset + outerHeight - data.collisionHeight;
- } else {
- position.top = withinOffset;
- }
- }
- // too far up -> align with top
- } else if ( overTop > 0 ) {
- position.top += overTop;
- // too far down -> align with bottom edge
- } else if ( overBottom > 0 ) {
- position.top -= overBottom;
- // adjust based on position and margin
- } else {
- position.top = max( position.top - collisionPosTop, position.top );
- }
- }
- },
- flip: {
- left: function( position, data ) {
- var within = data.within,
- withinOffset = within.offset.left + within.scrollLeft,
- outerWidth = within.width,
- offsetLeft = within.isWindow ? within.scrollLeft : within.offset.left,
- collisionPosLeft = position.left - data.collisionPosition.marginLeft,
- overLeft = collisionPosLeft - offsetLeft,
- overRight = collisionPosLeft + data.collisionWidth - outerWidth - offsetLeft,
- myOffset = data.my[ 0 ] === "left" ?
- -data.elemWidth :
- data.my[ 0 ] === "right" ?
- data.elemWidth :
- 0,
- atOffset = data.at[ 0 ] === "left" ?
- data.targetWidth :
- data.at[ 0 ] === "right" ?
- -data.targetWidth :
- 0,
- offset = -2 * data.offset[ 0 ],
- newOverRight,
- newOverLeft;
-
- if ( overLeft < 0 ) {
- newOverRight = position.left + myOffset + atOffset + offset + data.collisionWidth - outerWidth - withinOffset;
- if ( newOverRight < 0 || newOverRight < abs( overLeft ) ) {
- position.left += myOffset + atOffset + offset;
- }
- }
- else if ( overRight > 0 ) {
- newOverLeft = position.left - data.collisionPosition.marginLeft + myOffset + atOffset + offset - offsetLeft;
- if ( newOverLeft > 0 || abs( newOverLeft ) < overRight ) {
- position.left += myOffset + atOffset + offset;
- }
- }
- },
- top: function( position, data ) {
- var within = data.within,
- withinOffset = within.offset.top + within.scrollTop,
- outerHeight = within.height,
- offsetTop = within.isWindow ? within.scrollTop : within.offset.top,
- collisionPosTop = position.top - data.collisionPosition.marginTop,
- overTop = collisionPosTop - offsetTop,
- overBottom = collisionPosTop + data.collisionHeight - outerHeight - offsetTop,
- top = data.my[ 1 ] === "top",
- myOffset = top ?
- -data.elemHeight :
- data.my[ 1 ] === "bottom" ?
- data.elemHeight :
- 0,
- atOffset = data.at[ 1 ] === "top" ?
- data.targetHeight :
- data.at[ 1 ] === "bottom" ?
- -data.targetHeight :
- 0,
- offset = -2 * data.offset[ 1 ],
- newOverTop,
- newOverBottom;
- if ( overTop < 0 ) {
- newOverBottom = position.top + myOffset + atOffset + offset + data.collisionHeight - outerHeight - withinOffset;
- if ( ( position.top + myOffset + atOffset + offset) > overTop && ( newOverBottom < 0 || newOverBottom < abs( overTop ) ) ) {
- position.top += myOffset + atOffset + offset;
- }
- }
- else if ( overBottom > 0 ) {
- newOverTop = position.top - data.collisionPosition.marginTop + myOffset + atOffset + offset - offsetTop;
- if ( ( position.top + myOffset + atOffset + offset) > overBottom && ( newOverTop > 0 || abs( newOverTop ) < overBottom ) ) {
- position.top += myOffset + atOffset + offset;
- }
- }
- }
- },
- flipfit: {
- left: function() {
- $.ui.position.flip.left.apply( this, arguments );
- $.ui.position.fit.left.apply( this, arguments );
- },
- top: function() {
- $.ui.position.flip.top.apply( this, arguments );
- $.ui.position.fit.top.apply( this, arguments );
- }
- }
-};
-
-// fraction support test
-(function () {
- var testElement, testElementParent, testElementStyle, offsetLeft, i,
- body = document.getElementsByTagName( "body" )[ 0 ],
- div = document.createElement( "div" );
-
- //Create a "fake body" for testing based on method used in jQuery.support
- testElement = document.createElement( body ? "div" : "body" );
- testElementStyle = {
- visibility: "hidden",
- width: 0,
- height: 0,
- border: 0,
- margin: 0,
- background: "none"
- };
- if ( body ) {
- $.extend( testElementStyle, {
- position: "absolute",
- left: "-1000px",
- top: "-1000px"
- });
- }
- for ( i in testElementStyle ) {
- testElement.style[ i ] = testElementStyle[ i ];
- }
- testElement.appendChild( div );
- testElementParent = body || document.documentElement;
- testElementParent.insertBefore( testElement, testElementParent.firstChild );
-
- div.style.cssText = "position: absolute; left: 10.7432222px;";
-
- offsetLeft = $( div ).offset().left;
- $.support.offsetFractions = offsetLeft > 10 && offsetLeft < 11;
-
- testElement.innerHTML = "";
- testElementParent.removeChild( testElement );
-})();
-
-}( jQuery ) );
-
-(function( $, undefined ) {
-
-$.widget( "ui.progressbar", {
- version: "1.10.3",
- options: {
- max: 100,
- value: 0,
-
- change: null,
- complete: null
- },
-
- min: 0,
-
- _create: function() {
- // Constrain initial value
- this.oldValue = this.options.value = this._constrainedValue();
-
- this.element
- .addClass( "ui-progressbar ui-widget ui-widget-content ui-corner-all" )
- .attr({
- // Only set static values, aria-valuenow and aria-valuemax are
- // set inside _refreshValue()
- role: "progressbar",
- "aria-valuemin": this.min
- });
-
- this.valueDiv = $( "<div class='ui-progressbar-value ui-widget-header ui-corner-left'></div>" )
- .appendTo( this.element );
-
- this._refreshValue();
- },
-
- _destroy: function() {
- this.element
- .removeClass( "ui-progressbar ui-widget ui-widget-content ui-corner-all" )
- .removeAttr( "role" )
- .removeAttr( "aria-valuemin" )
- .removeAttr( "aria-valuemax" )
- .removeAttr( "aria-valuenow" );
-
- this.valueDiv.remove();
- },
-
- value: function( newValue ) {
- if ( newValue === undefined ) {
- return this.options.value;
- }
-
- this.options.value = this._constrainedValue( newValue );
- this._refreshValue();
- },
-
- _constrainedValue: function( newValue ) {
- if ( newValue === undefined ) {
- newValue = this.options.value;
- }
-
- this.indeterminate = newValue === false;
-
- // sanitize value
- if ( typeof newValue !== "number" ) {
- newValue = 0;
- }
-
- return this.indeterminate ? false :
- Math.min( this.options.max, Math.max( this.min, newValue ) );
- },
-
- _setOptions: function( options ) {
- // Ensure "value" option is set after other values (like max)
- var value = options.value;
- delete options.value;
-
- this._super( options );
-
- this.options.value = this._constrainedValue( value );
- this._refreshValue();
- },
-
- _setOption: function( key, value ) {
- if ( key === "max" ) {
- // Don't allow a max less than min
- value = Math.max( this.min, value );
- }
-
- this._super( key, value );
- },
-
- _percentage: function() {
- return this.indeterminate ? 100 : 100 * ( this.options.value - this.min ) / ( this.options.max - this.min );
- },
-
- _refreshValue: function() {
- var value = this.options.value,
- percentage = this._percentage();
-
- this.valueDiv
- .toggle( this.indeterminate || value > this.min )
- .toggleClass( "ui-corner-right", value === this.options.max )
- .width( percentage.toFixed(0) + "%" );
-
- this.element.toggleClass( "ui-progressbar-indeterminate", this.indeterminate );
-
- if ( this.indeterminate ) {
- this.element.removeAttr( "aria-valuenow" );
- if ( !this.overlayDiv ) {
- this.overlayDiv = $( "<div class='ui-progressbar-overlay'></div>" ).appendTo( this.valueDiv );
- }
- } else {
- this.element.attr({
- "aria-valuemax": this.options.max,
- "aria-valuenow": value
- });
- if ( this.overlayDiv ) {
- this.overlayDiv.remove();
- this.overlayDiv = null;
- }
- }
-
- if ( this.oldValue !== value ) {
- this.oldValue = value;
- this._trigger( "change" );
- }
- if ( value === this.options.max ) {
- this._trigger( "complete" );
- }
- }
-});
-
-})( jQuery );
-
-(function( $, undefined ) {
-
-// number of pages in a slider
-// (how many times can you page up/down to go through the whole range)
-var numPages = 5;
-
-$.widget( "ui.slider", $.ui.mouse, {
- version: "1.10.3",
- widgetEventPrefix: "slide",
-
- options: {
- animate: false,
- distance: 0,
- max: 100,
- min: 0,
- orientation: "horizontal",
- range: false,
- step: 1,
- value: 0,
- values: null,
-
- // callbacks
- change: null,
- slide: null,
- start: null,
- stop: null
- },
-
- _create: function() {
- this._keySliding = false;
- this._mouseSliding = false;
- this._animateOff = true;
- this._handleIndex = null;
- this._detectOrientation();
- this._mouseInit();
-
- this.element
- .addClass( "ui-slider" +
- " ui-slider-" + this.orientation +
- " ui-widget" +
- " ui-widget-content" +
- " ui-corner-all");
-
- this._refresh();
- this._setOption( "disabled", this.options.disabled );
-
- this._animateOff = false;
- },
-
- _refresh: function() {
- this._createRange();
- this._createHandles();
- this._setupEvents();
- this._refreshValue();
- },
-
- _createHandles: function() {
- var i, handleCount,
- options = this.options,
- existingHandles = this.element.find( ".ui-slider-handle" ).addClass( "ui-state-default ui-corner-all" ),
- handle = "<a class='ui-slider-handle ui-state-default ui-corner-all' href='#'></a>",
- handles = [];
-
- handleCount = ( options.values && options.values.length ) || 1;
-
- if ( existingHandles.length > handleCount ) {
- existingHandles.slice( handleCount ).remove();
- existingHandles = existingHandles.slice( 0, handleCount );
- }
-
- for ( i = existingHandles.length; i < handleCount; i++ ) {
- handles.push( handle );
- }
-
- this.handles = existingHandles.add( $( handles.join( "" ) ).appendTo( this.element ) );
-
- this.handle = this.handles.eq( 0 );
-
- this.handles.each(function( i ) {
- $( this ).data( "ui-slider-handle-index", i );
- });
- },
-
- _createRange: function() {
- var options = this.options,
- classes = "";
-
- if ( options.range ) {
- if ( options.range === true ) {
- if ( !options.values ) {
- options.values = [ this._valueMin(), this._valueMin() ];
- } else if ( options.values.length && options.values.length !== 2 ) {
- options.values = [ options.values[0], options.values[0] ];
- } else if ( $.isArray( options.values ) ) {
- options.values = options.values.slice(0);
- }
- }
-
- if ( !this.range || !this.range.length ) {
- this.range = $( "<div></div>" )
- .appendTo( this.element );
-
- classes = "ui-slider-range" +
- // note: this isn't the most fittingly semantic framework class for this element,
- // but worked best visually with a variety of themes
- " ui-widget-header ui-corner-all";
- } else {
- this.range.removeClass( "ui-slider-range-min ui-slider-range-max" )
- // Handle range switching from true to min/max
- .css({
- "left": "",
- "bottom": ""
- });
- }
-
- this.range.addClass( classes +
- ( ( options.range === "min" || options.range === "max" ) ? " ui-slider-range-" + options.range : "" ) );
- } else {
- this.range = $([]);
- }
- },
-
- _setupEvents: function() {
- var elements = this.handles.add( this.range ).filter( "a" );
- this._off( elements );
- this._on( elements, this._handleEvents );
- this._hoverable( elements );
- this._focusable( elements );
- },
-
- _destroy: function() {
- this.handles.remove();
- this.range.remove();
-
- this.element
- .removeClass( "ui-slider" +
- " ui-slider-horizontal" +
- " ui-slider-vertical" +
- " ui-widget" +
- " ui-widget-content" +
- " ui-corner-all" );
-
- this._mouseDestroy();
- },
-
- _mouseCapture: function( event ) {
- var position, normValue, distance, closestHandle, index, allowed, offset, mouseOverHandle,
- that = this,
- o = this.options;
-
- if ( o.disabled ) {
- return false;
- }
-
- this.elementSize = {
- width: this.element.outerWidth(),
- height: this.element.outerHeight()
- };
- this.elementOffset = this.element.offset();
-
- position = { x: event.pageX, y: event.pageY };
- normValue = this._normValueFromMouse( position );
- distance = this._valueMax() - this._valueMin() + 1;
- this.handles.each(function( i ) {
- var thisDistance = Math.abs( normValue - that.values(i) );
- if (( distance > thisDistance ) ||
- ( distance === thisDistance &&
- (i === that._lastChangedValue || that.values(i) === o.min ))) {
- distance = thisDistance;
- closestHandle = $( this );
- index = i;
- }
- });
-
- allowed = this._start( event, index );
- if ( allowed === false ) {
- return false;
- }
- this._mouseSliding = true;
-
- this._handleIndex = index;
-
- closestHandle
- .addClass( "ui-state-active" )
- .focus();
-
- offset = closestHandle.offset();
- mouseOverHandle = !$( event.target ).parents().addBack().is( ".ui-slider-handle" );
- this._clickOffset = mouseOverHandle ? { left: 0, top: 0 } : {
- left: event.pageX - offset.left - ( closestHandle.width() / 2 ),
- top: event.pageY - offset.top -
- ( closestHandle.height() / 2 ) -
- ( parseInt( closestHandle.css("borderTopWidth"), 10 ) || 0 ) -
- ( parseInt( closestHandle.css("borderBottomWidth"), 10 ) || 0) +
- ( parseInt( closestHandle.css("marginTop"), 10 ) || 0)
- };
-
- if ( !this.handles.hasClass( "ui-state-hover" ) ) {
- this._slide( event, index, normValue );
- }
- this._animateOff = true;
- return true;
- },
-
- _mouseStart: function() {
- return true;
- },
-
- _mouseDrag: function( event ) {
- var position = { x: event.pageX, y: event.pageY },
- normValue = this._normValueFromMouse( position );
-
- this._slide( event, this._handleIndex, normValue );
-
- return false;
- },
-
- _mouseStop: function( event ) {
- this.handles.removeClass( "ui-state-active" );
- this._mouseSliding = false;
-
- this._stop( event, this._handleIndex );
- this._change( event, this._handleIndex );
-
- this._handleIndex = null;
- this._clickOffset = null;
- this._animateOff = false;
-
- return false;
- },
-
- _detectOrientation: function() {
- this.orientation = ( this.options.orientation === "vertical" ) ? "vertical" : "horizontal";
- },
-
- _normValueFromMouse: function( position ) {
- var pixelTotal,
- pixelMouse,
- percentMouse,
- valueTotal,
- valueMouse;
-
- if ( this.orientation === "horizontal" ) {
- pixelTotal = this.elementSize.width;
- pixelMouse = position.x - this.elementOffset.left - ( this._clickOffset ? this._clickOffset.left : 0 );
- } else {
- pixelTotal = this.elementSize.height;
- pixelMouse = position.y - this.elementOffset.top - ( this._clickOffset ? this._clickOffset.top : 0 );
- }
-
- percentMouse = ( pixelMouse / pixelTotal );
- if ( percentMouse > 1 ) {
- percentMouse = 1;
- }
- if ( percentMouse < 0 ) {
- percentMouse = 0;
- }
- if ( this.orientation === "vertical" ) {
- percentMouse = 1 - percentMouse;
- }
-
- valueTotal = this._valueMax() - this._valueMin();
- valueMouse = this._valueMin() + percentMouse * valueTotal;
-
- return this._trimAlignValue( valueMouse );
- },
-
- _start: function( event, index ) {
- var uiHash = {
- handle: this.handles[ index ],
- value: this.value()
- };
- if ( this.options.values && this.options.values.length ) {
- uiHash.value = this.values( index );
- uiHash.values = this.values();
- }
- return this._trigger( "start", event, uiHash );
- },
-
- _slide: function( event, index, newVal ) {
- var otherVal,
- newValues,
- allowed;
-
- if ( this.options.values && this.options.values.length ) {
- otherVal = this.values( index ? 0 : 1 );
-
- if ( ( this.options.values.length === 2 && this.options.range === true ) &&
- ( ( index === 0 && newVal > otherVal) || ( index === 1 && newVal < otherVal ) )
- ) {
- newVal = otherVal;
- }
-
- if ( newVal !== this.values( index ) ) {
- newValues = this.values();
- newValues[ index ] = newVal;
- // A slide can be canceled by returning false from the slide callback
- allowed = this._trigger( "slide", event, {
- handle: this.handles[ index ],
- value: newVal,
- values: newValues
- } );
- otherVal = this.values( index ? 0 : 1 );
- if ( allowed !== false ) {
- this.values( index, newVal, true );
- }
- }
- } else {
- if ( newVal !== this.value() ) {
- // A slide can be canceled by returning false from the slide callback
- allowed = this._trigger( "slide", event, {
- handle: this.handles[ index ],
- value: newVal
- } );
- if ( allowed !== false ) {
- this.value( newVal );
- }
- }
- }
- },
-
- _stop: function( event, index ) {
- var uiHash = {
- handle: this.handles[ index ],
- value: this.value()
- };
- if ( this.options.values && this.options.values.length ) {
- uiHash.value = this.values( index );
- uiHash.values = this.values();
- }
-
- this._trigger( "stop", event, uiHash );
- },
-
- _change: function( event, index ) {
- if ( !this._keySliding && !this._mouseSliding ) {
- var uiHash = {
- handle: this.handles[ index ],
- value: this.value()
- };
- if ( this.options.values && this.options.values.length ) {
- uiHash.value = this.values( index );
- uiHash.values = this.values();
- }
-
- //store the last changed value index for reference when handles overlap
- this._lastChangedValue = index;
-
- this._trigger( "change", event, uiHash );
- }
- },
-
- value: function( newValue ) {
- if ( arguments.length ) {
- this.options.value = this._trimAlignValue( newValue );
- this._refreshValue();
- this._change( null, 0 );
- return;
- }
-
- return this._value();
- },
-
- values: function( index, newValue ) {
- var vals,
- newValues,
- i;
-
- if ( arguments.length > 1 ) {
- this.options.values[ index ] = this._trimAlignValue( newValue );
- this._refreshValue();
- this._change( null, index );
- return;
- }
-
- if ( arguments.length ) {
- if ( $.isArray( arguments[ 0 ] ) ) {
- vals = this.options.values;
- newValues = arguments[ 0 ];
- for ( i = 0; i < vals.length; i += 1 ) {
- vals[ i ] = this._trimAlignValue( newValues[ i ] );
- this._change( null, i );
- }
- this._refreshValue();
- } else {
- if ( this.options.values && this.options.values.length ) {
- return this._values( index );
- } else {
- return this.value();
- }
- }
- } else {
- return this._values();
- }
- },
-
- _setOption: function( key, value ) {
- var i,
- valsLength = 0;
-
- if ( key === "range" && this.options.range === true ) {
- if ( value === "min" ) {
- this.options.value = this._values( 0 );
- this.options.values = null;
- } else if ( value === "max" ) {
- this.options.value = this._values( this.options.values.length-1 );
- this.options.values = null;
- }
- }
-
- if ( $.isArray( this.options.values ) ) {
- valsLength = this.options.values.length;
- }
-
- $.Widget.prototype._setOption.apply( this, arguments );
-
- switch ( key ) {
- case "orientation":
- this._detectOrientation();
- this.element
- .removeClass( "ui-slider-horizontal ui-slider-vertical" )
- .addClass( "ui-slider-" + this.orientation );
- this._refreshValue();
- break;
- case "value":
- this._animateOff = true;
- this._refreshValue();
- this._change( null, 0 );
- this._animateOff = false;
- break;
- case "values":
- this._animateOff = true;
- this._refreshValue();
- for ( i = 0; i < valsLength; i += 1 ) {
- this._change( null, i );
- }
- this._animateOff = false;
- break;
- case "min":
- case "max":
- this._animateOff = true;
- this._refreshValue();
- this._animateOff = false;
- break;
- case "range":
- this._animateOff = true;
- this._refresh();
- this._animateOff = false;
- break;
- }
- },
-
- //internal value getter
- // _value() returns value trimmed by min and max, aligned by step
- _value: function() {
- var val = this.options.value;
- val = this._trimAlignValue( val );
-
- return val;
- },
-
- //internal values getter
- // _values() returns array of values trimmed by min and max, aligned by step
- // _values( index ) returns single value trimmed by min and max, aligned by step
- _values: function( index ) {
- var val,
- vals,
- i;
-
- if ( arguments.length ) {
- val = this.options.values[ index ];
- val = this._trimAlignValue( val );
-
- return val;
- } else if ( this.options.values && this.options.values.length ) {
- // .slice() creates a copy of the array
- // this copy gets trimmed by min and max and then returned
- vals = this.options.values.slice();
- for ( i = 0; i < vals.length; i+= 1) {
- vals[ i ] = this._trimAlignValue( vals[ i ] );
- }
-
- return vals;
- } else {
- return [];
- }
- },
-
- // returns the step-aligned value that val is closest to, between (inclusive) min and max
- _trimAlignValue: function( val ) {
- if ( val <= this._valueMin() ) {
- return this._valueMin();
- }
- if ( val >= this._valueMax() ) {
- return this._valueMax();
- }
- var step = ( this.options.step > 0 ) ? this.options.step : 1,
- valModStep = (val - this._valueMin()) % step,
- alignValue = val - valModStep;
-
- if ( Math.abs(valModStep) * 2 >= step ) {
- alignValue += ( valModStep > 0 ) ? step : ( -step );
- }
-
- // Since JavaScript has problems with large floats, round
- // the final value to 5 digits after the decimal point (see #4124)
- return parseFloat( alignValue.toFixed(5) );
- },
-
- _valueMin: function() {
- return this.options.min;
- },
-
- _valueMax: function() {
- return this.options.max;
- },
-
- _refreshValue: function() {
- var lastValPercent, valPercent, value, valueMin, valueMax,
- oRange = this.options.range,
- o = this.options,
- that = this,
- animate = ( !this._animateOff ) ? o.animate : false,
- _set = {};
-
- if ( this.options.values && this.options.values.length ) {
- this.handles.each(function( i ) {
- valPercent = ( that.values(i) - that._valueMin() ) / ( that._valueMax() - that._valueMin() ) * 100;
- _set[ that.orientation === "horizontal" ? "left" : "bottom" ] = valPercent + "%";
- $( this ).stop( 1, 1 )[ animate ? "animate" : "css" ]( _set, o.animate );
- if ( that.options.range === true ) {
- if ( that.orientation === "horizontal" ) {
- if ( i === 0 ) {
- that.range.stop( 1, 1 )[ animate ? "animate" : "css" ]( { left: valPercent + "%" }, o.animate );
- }
- if ( i === 1 ) {
- that.range[ animate ? "animate" : "css" ]( { width: ( valPercent - lastValPercent ) + "%" }, { queue: false, duration: o.animate } );
- }
- } else {
- if ( i === 0 ) {
- that.range.stop( 1, 1 )[ animate ? "animate" : "css" ]( { bottom: ( valPercent ) + "%" }, o.animate );
- }
- if ( i === 1 ) {
- that.range[ animate ? "animate" : "css" ]( { height: ( valPercent - lastValPercent ) + "%" }, { queue: false, duration: o.animate } );
- }
- }
- }
- lastValPercent = valPercent;
- });
- } else {
- value = this.value();
- valueMin = this._valueMin();
- valueMax = this._valueMax();
- valPercent = ( valueMax !== valueMin ) ?
- ( value - valueMin ) / ( valueMax - valueMin ) * 100 :
- 0;
- _set[ this.orientation === "horizontal" ? "left" : "bottom" ] = valPercent + "%";
- this.handle.stop( 1, 1 )[ animate ? "animate" : "css" ]( _set, o.animate );
-
- if ( oRange === "min" && this.orientation === "horizontal" ) {
- this.range.stop( 1, 1 )[ animate ? "animate" : "css" ]( { width: valPercent + "%" }, o.animate );
- }
- if ( oRange === "max" && this.orientation === "horizontal" ) {
- this.range[ animate ? "animate" : "css" ]( { width: ( 100 - valPercent ) + "%" }, { queue: false, duration: o.animate } );
- }
- if ( oRange === "min" && this.orientation === "vertical" ) {
- this.range.stop( 1, 1 )[ animate ? "animate" : "css" ]( { height: valPercent + "%" }, o.animate );
- }
- if ( oRange === "max" && this.orientation === "vertical" ) {
- this.range[ animate ? "animate" : "css" ]( { height: ( 100 - valPercent ) + "%" }, { queue: false, duration: o.animate } );
- }
- }
- },
-
- _handleEvents: {
- keydown: function( event ) {
- /*jshint maxcomplexity:25*/
- var allowed, curVal, newVal, step,
- index = $( event.target ).data( "ui-slider-handle-index" );
-
- switch ( event.keyCode ) {
- case $.ui.keyCode.HOME:
- case $.ui.keyCode.END:
- case $.ui.keyCode.PAGE_UP:
- case $.ui.keyCode.PAGE_DOWN:
- case $.ui.keyCode.UP:
- case $.ui.keyCode.RIGHT:
- case $.ui.keyCode.DOWN:
- case $.ui.keyCode.LEFT:
- event.preventDefault();
- if ( !this._keySliding ) {
- this._keySliding = true;
- $( event.target ).addClass( "ui-state-active" );
- allowed = this._start( event, index );
- if ( allowed === false ) {
- return;
- }
- }
- break;
- }
-
- step = this.options.step;
- if ( this.options.values && this.options.values.length ) {
- curVal = newVal = this.values( index );
- } else {
- curVal = newVal = this.value();
- }
-
- switch ( event.keyCode ) {
- case $.ui.keyCode.HOME:
- newVal = this._valueMin();
- break;
- case $.ui.keyCode.END:
- newVal = this._valueMax();
- break;
- case $.ui.keyCode.PAGE_UP:
- newVal = this._trimAlignValue( curVal + ( (this._valueMax() - this._valueMin()) / numPages ) );
- break;
- case $.ui.keyCode.PAGE_DOWN:
- newVal = this._trimAlignValue( curVal - ( (this._valueMax() - this._valueMin()) / numPages ) );
- break;
- case $.ui.keyCode.UP:
- case $.ui.keyCode.RIGHT:
- if ( curVal === this._valueMax() ) {
- return;
- }
- newVal = this._trimAlignValue( curVal + step );
- break;
- case $.ui.keyCode.DOWN:
- case $.ui.keyCode.LEFT:
- if ( curVal === this._valueMin() ) {
- return;
- }
- newVal = this._trimAlignValue( curVal - step );
- break;
- }
-
- this._slide( event, index, newVal );
- },
- click: function( event ) {
- event.preventDefault();
- },
- keyup: function( event ) {
- var index = $( event.target ).data( "ui-slider-handle-index" );
-
- if ( this._keySliding ) {
- this._keySliding = false;
- this._stop( event, index );
- this._change( event, index );
- $( event.target ).removeClass( "ui-state-active" );
- }
- }
- }
-
-});
-
-}(jQuery));
-
-(function( $ ) {
-
-function modifier( fn ) {
- return function() {
- var previous = this.element.val();
- fn.apply( this, arguments );
- this._refresh();
- if ( previous !== this.element.val() ) {
- this._trigger( "change" );
- }
- };
-}
-
-$.widget( "ui.spinner", {
- version: "1.10.3",
- defaultElement: "<input>",
- widgetEventPrefix: "spin",
- options: {
- culture: null,
- icons: {
- down: "ui-icon-triangle-1-s",
- up: "ui-icon-triangle-1-n"
- },
- incremental: true,
- max: null,
- min: null,
- numberFormat: null,
- page: 10,
- step: 1,
-
- change: null,
- spin: null,
- start: null,
- stop: null
- },
-
- _create: function() {
- // handle string values that need to be parsed
- this._setOption( "max", this.options.max );
- this._setOption( "min", this.options.min );
- this._setOption( "step", this.options.step );
-
- // format the value, but don't constrain
- this._value( this.element.val(), true );
-
- this._draw();
- this._on( this._events );
- this._refresh();
-
- // turning off autocomplete prevents the browser from remembering the
- // value when navigating through history, so we re-enable autocomplete
- // if the page is unloaded before the widget is destroyed. #7790
- this._on( this.window, {
- beforeunload: function() {
- this.element.removeAttr( "autocomplete" );
- }
- });
- },
-
- _getCreateOptions: function() {
- var options = {},
- element = this.element;
-
- $.each( [ "min", "max", "step" ], function( i, option ) {
- var value = element.attr( option );
- if ( value !== undefined && value.length ) {
- options[ option ] = value;
- }
- });
-
- return options;
- },
-
- _events: {
- keydown: function( event ) {
- if ( this._start( event ) && this._keydown( event ) ) {
- event.preventDefault();
- }
- },
- keyup: "_stop",
- focus: function() {
- this.previous = this.element.val();
- },
- blur: function( event ) {
- if ( this.cancelBlur ) {
- delete this.cancelBlur;
- return;
- }
-
- this._stop();
- this._refresh();
- if ( this.previous !== this.element.val() ) {
- this._trigger( "change", event );
- }
- },
- mousewheel: function( event, delta ) {
- if ( !delta ) {
- return;
- }
- if ( !this.spinning && !this._start( event ) ) {
- return false;
- }
-
- this._spin( (delta > 0 ? 1 : -1) * this.options.step, event );
- clearTimeout( this.mousewheelTimer );
- this.mousewheelTimer = this._delay(function() {
- if ( this.spinning ) {
- this._stop( event );
- }
- }, 100 );
- event.preventDefault();
- },
- "mousedown .ui-spinner-button": function( event ) {
- var previous;
-
- // We never want the buttons to have focus; whenever the user is
- // interacting with the spinner, the focus should be on the input.
- // If the input is focused then this.previous is properly set from
- // when the input first received focus. If the input is not focused
- // then we need to set this.previous based on the value before spinning.
- previous = this.element[0] === this.document[0].activeElement ?
- this.previous : this.element.val();
- function checkFocus() {
- var isActive = this.element[0] === this.document[0].activeElement;
- if ( !isActive ) {
- this.element.focus();
- this.previous = previous;
- // support: IE
- // IE sets focus asynchronously, so we need to check if focus
- // moved off of the input because the user clicked on the button.
- this._delay(function() {
- this.previous = previous;
- });
- }
- }
-
- // ensure focus is on (or stays on) the text field
- event.preventDefault();
- checkFocus.call( this );
-
- // support: IE
- // IE doesn't prevent moving focus even with event.preventDefault()
- // so we set a flag to know when we should ignore the blur event
- // and check (again) if focus moved off of the input.
- this.cancelBlur = true;
- this._delay(function() {
- delete this.cancelBlur;
- checkFocus.call( this );
- });
-
- if ( this._start( event ) === false ) {
- return;
- }
-
- this._repeat( null, $( event.currentTarget ).hasClass( "ui-spinner-up" ) ? 1 : -1, event );
- },
- "mouseup .ui-spinner-button": "_stop",
- "mouseenter .ui-spinner-button": function( event ) {
- // button will add ui-state-active if mouse was down while mouseleave and kept down
- if ( !$( event.currentTarget ).hasClass( "ui-state-active" ) ) {
- return;
- }
-
- if ( this._start( event ) === false ) {
- return false;
- }
- this._repeat( null, $( event.currentTarget ).hasClass( "ui-spinner-up" ) ? 1 : -1, event );
- },
- // TODO: do we really want to consider this a stop?
- // shouldn't we just stop the repeater and wait until mouseup before
- // we trigger the stop event?
- "mouseleave .ui-spinner-button": "_stop"
- },
-
- _draw: function() {
- var uiSpinner = this.uiSpinner = this.element
- .addClass( "ui-spinner-input" )
- .attr( "autocomplete", "off" )
- .wrap( this._uiSpinnerHtml() )
- .parent()
- // add buttons
- .append( this._buttonHtml() );
-
- this.element.attr( "role", "spinbutton" );
-
- // button bindings
- this.buttons = uiSpinner.find( ".ui-spinner-button" )
- .attr( "tabIndex", -1 )
- .button()
- .removeClass( "ui-corner-all" );
-
- // IE 6 doesn't understand height: 50% for the buttons
- // unless the wrapper has an explicit height
- if ( this.buttons.height() > Math.ceil( uiSpinner.height() * 0.5 ) &&
- uiSpinner.height() > 0 ) {
- uiSpinner.height( uiSpinner.height() );
- }
-
- // disable spinner if element was already disabled
- if ( this.options.disabled ) {
- this.disable();
- }
- },
-
- _keydown: function( event ) {
- var options = this.options,
- keyCode = $.ui.keyCode;
-
- switch ( event.keyCode ) {
- case keyCode.UP:
- this._repeat( null, 1, event );
- return true;
- case keyCode.DOWN:
- this._repeat( null, -1, event );
- return true;
- case keyCode.PAGE_UP:
- this._repeat( null, options.page, event );
- return true;
- case keyCode.PAGE_DOWN:
- this._repeat( null, -options.page, event );
- return true;
- }
-
- return false;
- },
-
- _uiSpinnerHtml: function() {
- return "<span class='ui-spinner ui-widget ui-widget-content ui-corner-all'></span>";
- },
-
- _buttonHtml: function() {
- return "" +
- "<a class='ui-spinner-button ui-spinner-up ui-corner-tr'>" +
- "<span class='ui-icon " + this.options.icons.up + "'>&#9650;</span>" +
- "</a>" +
- "<a class='ui-spinner-button ui-spinner-down ui-corner-br'>" +
- "<span class='ui-icon " + this.options.icons.down + "'>&#9660;</span>" +
- "</a>";
- },
-
- _start: function( event ) {
- if ( !this.spinning && this._trigger( "start", event ) === false ) {
- return false;
- }
-
- if ( !this.counter ) {
- this.counter = 1;
- }
- this.spinning = true;
- return true;
- },
-
- _repeat: function( i, steps, event ) {
- i = i || 500;
-
- clearTimeout( this.timer );
- this.timer = this._delay(function() {
- this._repeat( 40, steps, event );
- }, i );
-
- this._spin( steps * this.options.step, event );
- },
-
- _spin: function( step, event ) {
- var value = this.value() || 0;
-
- if ( !this.counter ) {
- this.counter = 1;
- }
-
- value = this._adjustValue( value + step * this._increment( this.counter ) );
-
- if ( !this.spinning || this._trigger( "spin", event, { value: value } ) !== false) {
- this._value( value );
- this.counter++;
- }
- },
-
- _increment: function( i ) {
- var incremental = this.options.incremental;
-
- if ( incremental ) {
- return $.isFunction( incremental ) ?
- incremental( i ) :
- Math.floor( i*i*i/50000 - i*i/500 + 17*i/200 + 1 );
- }
-
- return 1;
- },
-
- _precision: function() {
- var precision = this._precisionOf( this.options.step );
- if ( this.options.min !== null ) {
- precision = Math.max( precision, this._precisionOf( this.options.min ) );
- }
- return precision;
- },
-
- _precisionOf: function( num ) {
- var str = num.toString(),
- decimal = str.indexOf( "." );
- return decimal === -1 ? 0 : str.length - decimal - 1;
- },
-
- _adjustValue: function( value ) {
- var base, aboveMin,
- options = this.options;
-
- // make sure we're at a valid step
- // - find out where we are relative to the base (min or 0)
- base = options.min !== null ? options.min : 0;
- aboveMin = value - base;
- // - round to the nearest step
- aboveMin = Math.round(aboveMin / options.step) * options.step;
- // - rounding is based on 0, so adjust back to our base
- value = base + aboveMin;
-
- // fix precision from bad JS floating point math
- value = parseFloat( value.toFixed( this._precision() ) );
-
- // clamp the value
- if ( options.max !== null && value > options.max) {
- return options.max;
- }
- if ( options.min !== null && value < options.min ) {
- return options.min;
- }
-
- return value;
- },
-
- _stop: function( event ) {
- if ( !this.spinning ) {
- return;
- }
-
- clearTimeout( this.timer );
- clearTimeout( this.mousewheelTimer );
- this.counter = 0;
- this.spinning = false;
- this._trigger( "stop", event );
- },
-
- _setOption: function( key, value ) {
- if ( key === "culture" || key === "numberFormat" ) {
- var prevValue = this._parse( this.element.val() );
- this.options[ key ] = value;
- this.element.val( this._format( prevValue ) );
- return;
- }
-
- if ( key === "max" || key === "min" || key === "step" ) {
- if ( typeof value === "string" ) {
- value = this._parse( value );
- }
- }
- if ( key === "icons" ) {
- this.buttons.first().find( ".ui-icon" )
- .removeClass( this.options.icons.up )
- .addClass( value.up );
- this.buttons.last().find( ".ui-icon" )
- .removeClass( this.options.icons.down )
- .addClass( value.down );
- }
-
- this._super( key, value );
-
- if ( key === "disabled" ) {
- if ( value ) {
- this.element.prop( "disabled", true );
- this.buttons.button( "disable" );
- } else {
- this.element.prop( "disabled", false );
- this.buttons.button( "enable" );
- }
- }
- },
-
- _setOptions: modifier(function( options ) {
- this._super( options );
- this._value( this.element.val() );
- }),
-
- _parse: function( val ) {
- if ( typeof val === "string" && val !== "" ) {
- val = window.Globalize && this.options.numberFormat ?
- Globalize.parseFloat( val, 10, this.options.culture ) : +val;
- }
- return val === "" || isNaN( val ) ? null : val;
- },
-
- _format: function( value ) {
- if ( value === "" ) {
- return "";
- }
- return window.Globalize && this.options.numberFormat ?
- Globalize.format( value, this.options.numberFormat, this.options.culture ) :
- value;
- },
-
- _refresh: function() {
- this.element.attr({
- "aria-valuemin": this.options.min,
- "aria-valuemax": this.options.max,
- // TODO: what should we do with values that can't be parsed?
- "aria-valuenow": this._parse( this.element.val() )
- });
- },
-
- // update the value without triggering change
- _value: function( value, allowAny ) {
- var parsed;
- if ( value !== "" ) {
- parsed = this._parse( value );
- if ( parsed !== null ) {
- if ( !allowAny ) {
- parsed = this._adjustValue( parsed );
- }
- value = this._format( parsed );
- }
- }
- this.element.val( value );
- this._refresh();
- },
-
- _destroy: function() {
- this.element
- .removeClass( "ui-spinner-input" )
- .prop( "disabled", false )
- .removeAttr( "autocomplete" )
- .removeAttr( "role" )
- .removeAttr( "aria-valuemin" )
- .removeAttr( "aria-valuemax" )
- .removeAttr( "aria-valuenow" );
- this.uiSpinner.replaceWith( this.element );
- },
-
- stepUp: modifier(function( steps ) {
- this._stepUp( steps );
- }),
- _stepUp: function( steps ) {
- if ( this._start() ) {
- this._spin( (steps || 1) * this.options.step );
- this._stop();
- }
- },
-
- stepDown: modifier(function( steps ) {
- this._stepDown( steps );
- }),
- _stepDown: function( steps ) {
- if ( this._start() ) {
- this._spin( (steps || 1) * -this.options.step );
- this._stop();
- }
- },
-
- pageUp: modifier(function( pages ) {
- this._stepUp( (pages || 1) * this.options.page );
- }),
-
- pageDown: modifier(function( pages ) {
- this._stepDown( (pages || 1) * this.options.page );
- }),
-
- value: function( newVal ) {
- if ( !arguments.length ) {
- return this._parse( this.element.val() );
- }
- modifier( this._value ).call( this, newVal );
- },
-
- widget: function() {
- return this.uiSpinner;
- }
-});
-
-}( jQuery ) );
-
-(function( $, undefined ) {
-
-var tabId = 0,
- rhash = /#.*$/;
-
-function getNextTabId() {
- return ++tabId;
-}
-
-function isLocal( anchor ) {
- return anchor.hash.length > 1 &&
- decodeURIComponent( anchor.href.replace( rhash, "" ) ) ===
- decodeURIComponent( location.href.replace( rhash, "" ) );
-}
-
-$.widget( "ui.tabs", {
- version: "1.10.3",
- delay: 300,
- options: {
- active: null,
- collapsible: false,
- event: "click",
- heightStyle: "content",
- hide: null,
- show: null,
-
- // callbacks
- activate: null,
- beforeActivate: null,
- beforeLoad: null,
- load: null
- },
-
- _create: function() {
- var that = this,
- options = this.options;
-
- this.running = false;
-
- this.element
- .addClass( "ui-tabs ui-widget ui-widget-content ui-corner-all" )
- .toggleClass( "ui-tabs-collapsible", options.collapsible )
- // Prevent users from focusing disabled tabs via click
- .delegate( ".ui-tabs-nav > li", "mousedown" + this.eventNamespace, function( event ) {
- if ( $( this ).is( ".ui-state-disabled" ) ) {
- event.preventDefault();
- }
- })
- // support: IE <9
- // Preventing the default action in mousedown doesn't prevent IE
- // from focusing the element, so if the anchor gets focused, blur.
- // We don't have to worry about focusing the previously focused
- // element since clicking on a non-focusable element should focus
- // the body anyway.
- .delegate( ".ui-tabs-anchor", "focus" + this.eventNamespace, function() {
- if ( $( this ).closest( "li" ).is( ".ui-state-disabled" ) ) {
- this.blur();
- }
- });
-
- this._processTabs();
- options.active = this._initialActive();
-
- // Take disabling tabs via class attribute from HTML
- // into account and update option properly.
- if ( $.isArray( options.disabled ) ) {
- options.disabled = $.unique( options.disabled.concat(
- $.map( this.tabs.filter( ".ui-state-disabled" ), function( li ) {
- return that.tabs.index( li );
- })
- ) ).sort();
- }
-
- // check for length avoids error when initializing empty list
- if ( this.options.active !== false && this.anchors.length ) {
- this.active = this._findActive( options.active );
- } else {
- this.active = $();
- }
-
- this._refresh();
-
- if ( this.active.length ) {
- this.load( options.active );
- }
- },
-
- _initialActive: function() {
- var active = this.options.active,
- collapsible = this.options.collapsible,
- locationHash = location.hash.substring( 1 );
-
- if ( active === null ) {
- // check the fragment identifier in the URL
- if ( locationHash ) {
- this.tabs.each(function( i, tab ) {
- if ( $( tab ).attr( "aria-controls" ) === locationHash ) {
- active = i;
- return false;
- }
- });
- }
-
- // check for a tab marked active via a class
- if ( active === null ) {
- active = this.tabs.index( this.tabs.filter( ".ui-tabs-active" ) );
- }
-
- // no active tab, set to false
- if ( active === null || active === -1 ) {
- active = this.tabs.length ? 0 : false;
- }
- }
-
- // handle numbers: negative, out of range
- if ( active !== false ) {
- active = this.tabs.index( this.tabs.eq( active ) );
- if ( active === -1 ) {
- active = collapsible ? false : 0;
- }
- }
-
- // don't allow collapsible: false and active: false
- if ( !collapsible && active === false && this.anchors.length ) {
- active = 0;
- }
-
- return active;
- },
-
- _getCreateEventData: function() {
- return {
- tab: this.active,
- panel: !this.active.length ? $() : this._getPanelForTab( this.active )
- };
- },
-
- _tabKeydown: function( event ) {
- /*jshint maxcomplexity:15*/
- var focusedTab = $( this.document[0].activeElement ).closest( "li" ),
- selectedIndex = this.tabs.index( focusedTab ),
- goingForward = true;
-
- if ( this._handlePageNav( event ) ) {
- return;
- }
-
- switch ( event.keyCode ) {
- case $.ui.keyCode.RIGHT:
- case $.ui.keyCode.DOWN:
- selectedIndex++;
- break;
- case $.ui.keyCode.UP:
- case $.ui.keyCode.LEFT:
- goingForward = false;
- selectedIndex--;
- break;
- case $.ui.keyCode.END:
- selectedIndex = this.anchors.length - 1;
- break;
- case $.ui.keyCode.HOME:
- selectedIndex = 0;
- break;
- case $.ui.keyCode.SPACE:
- // Activate only, no collapsing
- event.preventDefault();
- clearTimeout( this.activating );
- this._activate( selectedIndex );
- return;
- case $.ui.keyCode.ENTER:
- // Toggle (cancel delayed activation, allow collapsing)
- event.preventDefault();
- clearTimeout( this.activating );
- // Determine if we should collapse or activate
- this._activate( selectedIndex === this.options.active ? false : selectedIndex );
- return;
- default:
- return;
- }
-
- // Focus the appropriate tab, based on which key was pressed
- event.preventDefault();
- clearTimeout( this.activating );
- selectedIndex = this._focusNextTab( selectedIndex, goingForward );
-
- // Navigating with control key will prevent automatic activation
- if ( !event.ctrlKey ) {
- // Update aria-selected immediately so that AT think the tab is already selected.
- // Otherwise AT may confuse the user by stating that they need to activate the tab,
- // but the tab will already be activated by the time the announcement finishes.
- focusedTab.attr( "aria-selected", "false" );
- this.tabs.eq( selectedIndex ).attr( "aria-selected", "true" );
-
- this.activating = this._delay(function() {
- this.option( "active", selectedIndex );
- }, this.delay );
- }
- },
-
- _panelKeydown: function( event ) {
- if ( this._handlePageNav( event ) ) {
- return;
- }
-
- // Ctrl+up moves focus to the current tab
- if ( event.ctrlKey && event.keyCode === $.ui.keyCode.UP ) {
- event.preventDefault();
- this.active.focus();
- }
- },
-
- // Alt+page up/down moves focus to the previous/next tab (and activates)
- _handlePageNav: function( event ) {
- if ( event.altKey && event.keyCode === $.ui.keyCode.PAGE_UP ) {
- this._activate( this._focusNextTab( this.options.active - 1, false ) );
- return true;
- }
- if ( event.altKey && event.keyCode === $.ui.keyCode.PAGE_DOWN ) {
- this._activate( this._focusNextTab( this.options.active + 1, true ) );
- return true;
- }
- },
-
- _findNextTab: function( index, goingForward ) {
- var lastTabIndex = this.tabs.length - 1;
-
- function constrain() {
- if ( index > lastTabIndex ) {
- index = 0;
- }
- if ( index < 0 ) {
- index = lastTabIndex;
- }
- return index;
- }
-
- while ( $.inArray( constrain(), this.options.disabled ) !== -1 ) {
- index = goingForward ? index + 1 : index - 1;
- }
-
- return index;
- },
-
- _focusNextTab: function( index, goingForward ) {
- index = this._findNextTab( index, goingForward );
- this.tabs.eq( index ).focus();
- return index;
- },
-
- _setOption: function( key, value ) {
- if ( key === "active" ) {
- // _activate() will handle invalid values and update this.options
- this._activate( value );
- return;
- }
-
- if ( key === "disabled" ) {
- // don't use the widget factory's disabled handling
- this._setupDisabled( value );
- return;
- }
-
- this._super( key, value);
-
- if ( key === "collapsible" ) {
- this.element.toggleClass( "ui-tabs-collapsible", value );
- // Setting collapsible: false while collapsed; open first panel
- if ( !value && this.options.active === false ) {
- this._activate( 0 );
- }
- }
-
- if ( key === "event" ) {
- this._setupEvents( value );
- }
-
- if ( key === "heightStyle" ) {
- this._setupHeightStyle( value );
- }
- },
-
- _tabId: function( tab ) {
- return tab.attr( "aria-controls" ) || "ui-tabs-" + getNextTabId();
- },
-
- _sanitizeSelector: function( hash ) {
- return hash ? hash.replace( /[!"$%&'()*+,.\/:;<=>?@\[\]\^`{|}~]/g, "\\$&" ) : "";
- },
-
- refresh: function() {
- var options = this.options,
- lis = this.tablist.children( ":has(a[href])" );
-
- // get disabled tabs from class attribute from HTML
- // this will get converted to a boolean if needed in _refresh()
- options.disabled = $.map( lis.filter( ".ui-state-disabled" ), function( tab ) {
- return lis.index( tab );
- });
-
- this._processTabs();
-
- // was collapsed or no tabs
- if ( options.active === false || !this.anchors.length ) {
- options.active = false;
- this.active = $();
- // was active, but active tab is gone
- } else if ( this.active.length && !$.contains( this.tablist[ 0 ], this.active[ 0 ] ) ) {
- // all remaining tabs are disabled
- if ( this.tabs.length === options.disabled.length ) {
- options.active = false;
- this.active = $();
- // activate previous tab
- } else {
- this._activate( this._findNextTab( Math.max( 0, options.active - 1 ), false ) );
- }
- // was active, active tab still exists
- } else {
- // make sure active index is correct
- options.active = this.tabs.index( this.active );
- }
-
- this._refresh();
- },
-
- _refresh: function() {
- this._setupDisabled( this.options.disabled );
- this._setupEvents( this.options.event );
- this._setupHeightStyle( this.options.heightStyle );
-
- this.tabs.not( this.active ).attr({
- "aria-selected": "false",
- tabIndex: -1
- });
- this.panels.not( this._getPanelForTab( this.active ) )
- .hide()
- .attr({
- "aria-expanded": "false",
- "aria-hidden": "true"
- });
-
- // Make sure one tab is in the tab order
- if ( !this.active.length ) {
- this.tabs.eq( 0 ).attr( "tabIndex", 0 );
- } else {
- this.active
- .addClass( "ui-tabs-active ui-state-active" )
- .attr({
- "aria-selected": "true",
- tabIndex: 0
- });
- this._getPanelForTab( this.active )
- .show()
- .attr({
- "aria-expanded": "true",
- "aria-hidden": "false"
- });
- }
- },
-
- _processTabs: function() {
- var that = this;
-
- this.tablist = this._getList()
- .addClass( "ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all" )
- .attr( "role", "tablist" );
-
- this.tabs = this.tablist.find( "> li:has(a[href])" )
- .addClass( "ui-state-default ui-corner-top" )
- .attr({
- role: "tab",
- tabIndex: -1
- });
-
- this.anchors = this.tabs.map(function() {
- return $( "a", this )[ 0 ];
- })
- .addClass( "ui-tabs-anchor" )
- .attr({
- role: "presentation",
- tabIndex: -1
- });
-
- this.panels = $();
-
- this.anchors.each(function( i, anchor ) {
- var selector, panel, panelId,
- anchorId = $( anchor ).uniqueId().attr( "id" ),
- tab = $( anchor ).closest( "li" ),
- originalAriaControls = tab.attr( "aria-controls" );
-
- // inline tab
- if ( isLocal( anchor ) ) {
- selector = anchor.hash;
- panel = that.element.find( that._sanitizeSelector( selector ) );
- // remote tab
- } else {
- panelId = that._tabId( tab );
- selector = "#" + panelId;
- panel = that.element.find( selector );
- if ( !panel.length ) {
- panel = that._createPanel( panelId );
- panel.insertAfter( that.panels[ i - 1 ] || that.tablist );
- }
- panel.attr( "aria-live", "polite" );
- }
-
- if ( panel.length) {
- that.panels = that.panels.add( panel );
- }
- if ( originalAriaControls ) {
- tab.data( "ui-tabs-aria-controls", originalAriaControls );
- }
- tab.attr({
- "aria-controls": selector.substring( 1 ),
- "aria-labelledby": anchorId
- });
- panel.attr( "aria-labelledby", anchorId );
- });
-
- this.panels
- .addClass( "ui-tabs-panel ui-widget-content ui-corner-bottom" )
- .attr( "role", "tabpanel" );
- },
-
- // allow overriding how to find the list for rare usage scenarios (#7715)
- _getList: function() {
- return this.element.find( "ol,ul" ).eq( 0 );
- },
-
- _createPanel: function( id ) {
- return $( "<div>" )
- .attr( "id", id )
- .addClass( "ui-tabs-panel ui-widget-content ui-corner-bottom" )
- .data( "ui-tabs-destroy", true );
- },
-
- _setupDisabled: function( disabled ) {
- if ( $.isArray( disabled ) ) {
- if ( !disabled.length ) {
- disabled = false;
- } else if ( disabled.length === this.anchors.length ) {
- disabled = true;
- }
- }
-
- // disable tabs
- for ( var i = 0, li; ( li = this.tabs[ i ] ); i++ ) {
- if ( disabled === true || $.inArray( i, disabled ) !== -1 ) {
- $( li )
- .addClass( "ui-state-disabled" )
- .attr( "aria-disabled", "true" );
- } else {
- $( li )
- .removeClass( "ui-state-disabled" )
- .removeAttr( "aria-disabled" );
- }
- }
-
- this.options.disabled = disabled;
- },
-
- _setupEvents: function( event ) {
- var events = {
- click: function( event ) {
- event.preventDefault();
- }
- };
- if ( event ) {
- $.each( event.split(" "), function( index, eventName ) {
- events[ eventName ] = "_eventHandler";
- });
- }
-
- this._off( this.anchors.add( this.tabs ).add( this.panels ) );
- this._on( this.anchors, events );
- this._on( this.tabs, { keydown: "_tabKeydown" } );
- this._on( this.panels, { keydown: "_panelKeydown" } );
-
- this._focusable( this.tabs );
- this._hoverable( this.tabs );
- },
-
- _setupHeightStyle: function( heightStyle ) {
- var maxHeight,
- parent = this.element.parent();
-
- if ( heightStyle === "fill" ) {
- maxHeight = parent.height();
- maxHeight -= this.element.outerHeight() - this.element.height();
-
- this.element.siblings( ":visible" ).each(function() {
- var elem = $( this ),
- position = elem.css( "position" );
-
- if ( position === "absolute" || position === "fixed" ) {
- return;
- }
- maxHeight -= elem.outerHeight( true );
- });
-
- this.element.children().not( this.panels ).each(function() {
- maxHeight -= $( this ).outerHeight( true );
- });
-
- this.panels.each(function() {
- $( this ).height( Math.max( 0, maxHeight -
- $( this ).innerHeight() + $( this ).height() ) );
- })
- .css( "overflow", "auto" );
- } else if ( heightStyle === "auto" ) {
- maxHeight = 0;
- this.panels.each(function() {
- maxHeight = Math.max( maxHeight, $( this ).height( "" ).height() );
- }).height( maxHeight );
- }
- },
-
- _eventHandler: function( event ) {
- var options = this.options,
- active = this.active,
- anchor = $( event.currentTarget ),
- tab = anchor.closest( "li" ),
- clickedIsActive = tab[ 0 ] === active[ 0 ],
- collapsing = clickedIsActive && options.collapsible,
- toShow = collapsing ? $() : this._getPanelForTab( tab ),
- toHide = !active.length ? $() : this._getPanelForTab( active ),
- eventData = {
- oldTab: active,
- oldPanel: toHide,
- newTab: collapsing ? $() : tab,
- newPanel: toShow
- };
-
- event.preventDefault();
-
- if ( tab.hasClass( "ui-state-disabled" ) ||
- // tab is already loading
- tab.hasClass( "ui-tabs-loading" ) ||
- // can't switch durning an animation
- this.running ||
- // click on active header, but not collapsible
- ( clickedIsActive && !options.collapsible ) ||
- // allow canceling activation
- ( this._trigger( "beforeActivate", event, eventData ) === false ) ) {
- return;
- }
-
- options.active = collapsing ? false : this.tabs.index( tab );
-
- this.active = clickedIsActive ? $() : tab;
- if ( this.xhr ) {
- this.xhr.abort();
- }
-
- if ( !toHide.length && !toShow.length ) {
- $.error( "jQuery UI Tabs: Mismatching fragment identifier." );
- }
-
- if ( toShow.length ) {
- this.load( this.tabs.index( tab ), event );
- }
- this._toggle( event, eventData );
- },
-
- // handles show/hide for selecting tabs
- _toggle: function( event, eventData ) {
- var that = this,
- toShow = eventData.newPanel,
- toHide = eventData.oldPanel;
-
- this.running = true;
-
- function complete() {
- that.running = false;
- that._trigger( "activate", event, eventData );
- }
-
- function show() {
- eventData.newTab.closest( "li" ).addClass( "ui-tabs-active ui-state-active" );
-
- if ( toShow.length && that.options.show ) {
- that._show( toShow, that.options.show, complete );
- } else {
- toShow.show();
- complete();
- }
- }
-
- // start out by hiding, then showing, then completing
- if ( toHide.length && this.options.hide ) {
- this._hide( toHide, this.options.hide, function() {
- eventData.oldTab.closest( "li" ).removeClass( "ui-tabs-active ui-state-active" );
- show();
- });
- } else {
- eventData.oldTab.closest( "li" ).removeClass( "ui-tabs-active ui-state-active" );
- toHide.hide();
- show();
- }
-
- toHide.attr({
- "aria-expanded": "false",
- "aria-hidden": "true"
- });
- eventData.oldTab.attr( "aria-selected", "false" );
- // If we're switching tabs, remove the old tab from the tab order.
- // If we're opening from collapsed state, remove the previous tab from the tab order.
- // If we're collapsing, then keep the collapsing tab in the tab order.
- if ( toShow.length && toHide.length ) {
- eventData.oldTab.attr( "tabIndex", -1 );
- } else if ( toShow.length ) {
- this.tabs.filter(function() {
- return $( this ).attr( "tabIndex" ) === 0;
- })
- .attr( "tabIndex", -1 );
- }
-
- toShow.attr({
- "aria-expanded": "true",
- "aria-hidden": "false"
- });
- eventData.newTab.attr({
- "aria-selected": "true",
- tabIndex: 0
- });
- },
-
- _activate: function( index ) {
- var anchor,
- active = this._findActive( index );
-
- // trying to activate the already active panel
- if ( active[ 0 ] === this.active[ 0 ] ) {
- return;
- }
-
- // trying to collapse, simulate a click on the current active header
- if ( !active.length ) {
- active = this.active;
- }
-
- anchor = active.find( ".ui-tabs-anchor" )[ 0 ];
- this._eventHandler({
- target: anchor,
- currentTarget: anchor,
- preventDefault: $.noop
- });
- },
-
- _findActive: function( index ) {
- return index === false ? $() : this.tabs.eq( index );
- },
-
- _getIndex: function( index ) {
- // meta-function to give users option to provide a href string instead of a numerical index.
- if ( typeof index === "string" ) {
- index = this.anchors.index( this.anchors.filter( "[href$='" + index + "']" ) );
- }
-
- return index;
- },
-
- _destroy: function() {
- if ( this.xhr ) {
- this.xhr.abort();
- }
-
- this.element.removeClass( "ui-tabs ui-widget ui-widget-content ui-corner-all ui-tabs-collapsible" );
-
- this.tablist
- .removeClass( "ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all" )
- .removeAttr( "role" );
-
- this.anchors
- .removeClass( "ui-tabs-anchor" )
- .removeAttr( "role" )
- .removeAttr( "tabIndex" )
- .removeUniqueId();
-
- this.tabs.add( this.panels ).each(function() {
- if ( $.data( this, "ui-tabs-destroy" ) ) {
- $( this ).remove();
- } else {
- $( this )
- .removeClass( "ui-state-default ui-state-active ui-state-disabled " +
- "ui-corner-top ui-corner-bottom ui-widget-content ui-tabs-active ui-tabs-panel" )
- .removeAttr( "tabIndex" )
- .removeAttr( "aria-live" )
- .removeAttr( "aria-busy" )
- .removeAttr( "aria-selected" )
- .removeAttr( "aria-labelledby" )
- .removeAttr( "aria-hidden" )
- .removeAttr( "aria-expanded" )
- .removeAttr( "role" );
- }
- });
-
- this.tabs.each(function() {
- var li = $( this ),
- prev = li.data( "ui-tabs-aria-controls" );
- if ( prev ) {
- li
- .attr( "aria-controls", prev )
- .removeData( "ui-tabs-aria-controls" );
- } else {
- li.removeAttr( "aria-controls" );
- }
- });
-
- this.panels.show();
-
- if ( this.options.heightStyle !== "content" ) {
- this.panels.css( "height", "" );
- }
- },
-
- enable: function( index ) {
- var disabled = this.options.disabled;
- if ( disabled === false ) {
- return;
- }
-
- if ( index === undefined ) {
- disabled = false;
- } else {
- index = this._getIndex( index );
- if ( $.isArray( disabled ) ) {
- disabled = $.map( disabled, function( num ) {
- return num !== index ? num : null;
- });
- } else {
- disabled = $.map( this.tabs, function( li, num ) {
- return num !== index ? num : null;
- });
- }
- }
- this._setupDisabled( disabled );
- },
-
- disable: function( index ) {
- var disabled = this.options.disabled;
- if ( disabled === true ) {
- return;
- }
-
- if ( index === undefined ) {
- disabled = true;
- } else {
- index = this._getIndex( index );
- if ( $.inArray( index, disabled ) !== -1 ) {
- return;
- }
- if ( $.isArray( disabled ) ) {
- disabled = $.merge( [ index ], disabled ).sort();
- } else {
- disabled = [ index ];
- }
- }
- this._setupDisabled( disabled );
- },
-
- load: function( index, event ) {
- index = this._getIndex( index );
- var that = this,
- tab = this.tabs.eq( index ),
- anchor = tab.find( ".ui-tabs-anchor" ),
- panel = this._getPanelForTab( tab ),
- eventData = {
- tab: tab,
- panel: panel
- };
-
- // not remote
- if ( isLocal( anchor[ 0 ] ) ) {
- return;
- }
-
- this.xhr = $.ajax( this._ajaxSettings( anchor, event, eventData ) );
-
- // support: jQuery <1.8
- // jQuery <1.8 returns false if the request is canceled in beforeSend,
- // but as of 1.8, $.ajax() always returns a jqXHR object.
- if ( this.xhr && this.xhr.statusText !== "canceled" ) {
- tab.addClass( "ui-tabs-loading" );
- panel.attr( "aria-busy", "true" );
-
- this.xhr
- .success(function( response ) {
- // support: jQuery <1.8
- // http://bugs.jquery.com/ticket/11778
- setTimeout(function() {
- panel.html( response );
- that._trigger( "load", event, eventData );
- }, 1 );
- })
- .complete(function( jqXHR, status ) {
- // support: jQuery <1.8
- // http://bugs.jquery.com/ticket/11778
- setTimeout(function() {
- if ( status === "abort" ) {
- that.panels.stop( false, true );
- }
-
- tab.removeClass( "ui-tabs-loading" );
- panel.removeAttr( "aria-busy" );
-
- if ( jqXHR === that.xhr ) {
- delete that.xhr;
- }
- }, 1 );
- });
- }
- },
-
- _ajaxSettings: function( anchor, event, eventData ) {
- var that = this;
- return {
- url: anchor.attr( "href" ),
- beforeSend: function( jqXHR, settings ) {
- return that._trigger( "beforeLoad", event,
- $.extend( { jqXHR : jqXHR, ajaxSettings: settings }, eventData ) );
- }
- };
- },
-
- _getPanelForTab: function( tab ) {
- var id = $( tab ).attr( "aria-controls" );
- return this.element.find( this._sanitizeSelector( "#" + id ) );
- }
-});
-
-})( jQuery );
-
-(function( $ ) {
-
-var increments = 0;
-
-function addDescribedBy( elem, id ) {
- var describedby = (elem.attr( "aria-describedby" ) || "").split( /\s+/ );
- describedby.push( id );
- elem
- .data( "ui-tooltip-id", id )
- .attr( "aria-describedby", $.trim( describedby.join( " " ) ) );
-}
-
-function removeDescribedBy( elem ) {
- var id = elem.data( "ui-tooltip-id" ),
- describedby = (elem.attr( "aria-describedby" ) || "").split( /\s+/ ),
- index = $.inArray( id, describedby );
- if ( index !== -1 ) {
- describedby.splice( index, 1 );
- }
-
- elem.removeData( "ui-tooltip-id" );
- describedby = $.trim( describedby.join( " " ) );
- if ( describedby ) {
- elem.attr( "aria-describedby", describedby );
- } else {
- elem.removeAttr( "aria-describedby" );
- }
-}
-
-$.widget( "ui.tooltip", {
- version: "1.10.3",
- options: {
- content: function() {
- // support: IE<9, Opera in jQuery <1.7
- // .text() can't accept undefined, so coerce to a string
- var title = $( this ).attr( "title" ) || "";
- // Escape title, since we're going from an attribute to raw HTML
- return $( "<a>" ).text( title ).html();
- },
- hide: true,
- // Disabled elements have inconsistent behavior across browsers (#8661)
- items: "[title]:not([disabled])",
- position: {
- my: "left top+15",
- at: "left bottom",
- collision: "flipfit flip"
- },
- show: true,
- tooltipClass: null,
- track: false,
-
- // callbacks
- close: null,
- open: null
- },
-
- _create: function() {
- this._on({
- mouseover: "open",
- focusin: "open"
- });
-
- // IDs of generated tooltips, needed for destroy
- this.tooltips = {};
- // IDs of parent tooltips where we removed the title attribute
- this.parents = {};
-
- if ( this.options.disabled ) {
- this._disable();
- }
- },
-
- _setOption: function( key, value ) {
- var that = this;
-
- if ( key === "disabled" ) {
- this[ value ? "_disable" : "_enable" ]();
- this.options[ key ] = value;
- // disable element style changes
- return;
- }
-
- this._super( key, value );
-
- if ( key === "content" ) {
- $.each( this.tooltips, function( id, element ) {
- that._updateContent( element );
- });
- }
- },
-
- _disable: function() {
- var that = this;
-
- // close open tooltips
- $.each( this.tooltips, function( id, element ) {
- var event = $.Event( "blur" );
- event.target = event.currentTarget = element[0];
- that.close( event, true );
- });
-
- // remove title attributes to prevent native tooltips
- this.element.find( this.options.items ).addBack().each(function() {
- var element = $( this );
- if ( element.is( "[title]" ) ) {
- element
- .data( "ui-tooltip-title", element.attr( "title" ) )
- .attr( "title", "" );
- }
- });
- },
-
- _enable: function() {
- // restore title attributes
- this.element.find( this.options.items ).addBack().each(function() {
- var element = $( this );
- if ( element.data( "ui-tooltip-title" ) ) {
- element.attr( "title", element.data( "ui-tooltip-title" ) );
- }
- });
- },
-
- open: function( event ) {
- var that = this,
- target = $( event ? event.target : this.element )
- // we need closest here due to mouseover bubbling,
- // but always pointing at the same event target
- .closest( this.options.items );
-
- // No element to show a tooltip for or the tooltip is already open
- if ( !target.length || target.data( "ui-tooltip-id" ) ) {
- return;
- }
-
- if ( target.attr( "title" ) ) {
- target.data( "ui-tooltip-title", target.attr( "title" ) );
- }
-
- target.data( "ui-tooltip-open", true );
-
- // kill parent tooltips, custom or native, for hover
- if ( event && event.type === "mouseover" ) {
- target.parents().each(function() {
- var parent = $( this ),
- blurEvent;
- if ( parent.data( "ui-tooltip-open" ) ) {
- blurEvent = $.Event( "blur" );
- blurEvent.target = blurEvent.currentTarget = this;
- that.close( blurEvent, true );
- }
- if ( parent.attr( "title" ) ) {
- parent.uniqueId();
- that.parents[ this.id ] = {
- element: this,
- title: parent.attr( "title" )
- };
- parent.attr( "title", "" );
- }
- });
- }
-
- this._updateContent( target, event );
- },
-
- _updateContent: function( target, event ) {
- var content,
- contentOption = this.options.content,
- that = this,
- eventType = event ? event.type : null;
-
- if ( typeof contentOption === "string" ) {
- return this._open( event, target, contentOption );
- }
-
- content = contentOption.call( target[0], function( response ) {
- // ignore async response if tooltip was closed already
- if ( !target.data( "ui-tooltip-open" ) ) {
- return;
- }
- // IE may instantly serve a cached response for ajax requests
- // delay this call to _open so the other call to _open runs first
- that._delay(function() {
- // jQuery creates a special event for focusin when it doesn't
- // exist natively. To improve performance, the native event
- // object is reused and the type is changed. Therefore, we can't
- // rely on the type being correct after the event finished
- // bubbling, so we set it back to the previous value. (#8740)
- if ( event ) {
- event.type = eventType;
- }
- this._open( event, target, response );
- });
- });
- if ( content ) {
- this._open( event, target, content );
- }
- },
-
- _open: function( event, target, content ) {
- var tooltip, events, delayedShow,
- positionOption = $.extend( {}, this.options.position );
-
- if ( !content ) {
- return;
- }
-
- // Content can be updated multiple times. If the tooltip already
- // exists, then just update the content and bail.
- tooltip = this._find( target );
- if ( tooltip.length ) {
- tooltip.find( ".ui-tooltip-content" ).html( content );
- return;
- }
-
- // if we have a title, clear it to prevent the native tooltip
- // we have to check first to avoid defining a title if none exists
- // (we don't want to cause an element to start matching [title])
- //
- // We use removeAttr only for key events, to allow IE to export the correct
- // accessible attributes. For mouse events, set to empty string to avoid
- // native tooltip showing up (happens only when removing inside mouseover).
- if ( target.is( "[title]" ) ) {
- if ( event && event.type === "mouseover" ) {
- target.attr( "title", "" );
- } else {
- target.removeAttr( "title" );
- }
- }
-
- tooltip = this._tooltip( target );
- addDescribedBy( target, tooltip.attr( "id" ) );
- tooltip.find( ".ui-tooltip-content" ).html( content );
-
- function position( event ) {
- positionOption.of = event;
- if ( tooltip.is( ":hidden" ) ) {
- return;
- }
- tooltip.position( positionOption );
- }
- if ( this.options.track && event && /^mouse/.test( event.type ) ) {
- this._on( this.document, {
- mousemove: position
- });
- // trigger once to override element-relative positioning
- position( event );
- } else {
- tooltip.position( $.extend({
- of: target
- }, this.options.position ) );
- }
-
- tooltip.hide();
-
- this._show( tooltip, this.options.show );
- // Handle tracking tooltips that are shown with a delay (#8644). As soon
- // as the tooltip is visible, position the tooltip using the most recent
- // event.
- if ( this.options.show && this.options.show.delay ) {
- delayedShow = this.delayedShow = setInterval(function() {
- if ( tooltip.is( ":visible" ) ) {
- position( positionOption.of );
- clearInterval( delayedShow );
- }
- }, $.fx.interval );
- }
-
- this._trigger( "open", event, { tooltip: tooltip } );
-
- events = {
- keyup: function( event ) {
- if ( event.keyCode === $.ui.keyCode.ESCAPE ) {
- var fakeEvent = $.Event(event);
- fakeEvent.currentTarget = target[0];
- this.close( fakeEvent, true );
- }
- },
- remove: function() {
- this._removeTooltip( tooltip );
- }
- };
- if ( !event || event.type === "mouseover" ) {
- events.mouseleave = "close";
- }
- if ( !event || event.type === "focusin" ) {
- events.focusout = "close";
- }
- this._on( true, target, events );
- },
-
- close: function( event ) {
- var that = this,
- target = $( event ? event.currentTarget : this.element ),
- tooltip = this._find( target );
-
- // disabling closes the tooltip, so we need to track when we're closing
- // to avoid an infinite loop in case the tooltip becomes disabled on close
- if ( this.closing ) {
- return;
- }
-
- // Clear the interval for delayed tracking tooltips
- clearInterval( this.delayedShow );
-
- // only set title if we had one before (see comment in _open())
- if ( target.data( "ui-tooltip-title" ) ) {
- target.attr( "title", target.data( "ui-tooltip-title" ) );
- }
-
- removeDescribedBy( target );
-
- tooltip.stop( true );
- this._hide( tooltip, this.options.hide, function() {
- that._removeTooltip( $( this ) );
- });
-
- target.removeData( "ui-tooltip-open" );
- this._off( target, "mouseleave focusout keyup" );
- // Remove 'remove' binding only on delegated targets
- if ( target[0] !== this.element[0] ) {
- this._off( target, "remove" );
- }
- this._off( this.document, "mousemove" );
-
- if ( event && event.type === "mouseleave" ) {
- $.each( this.parents, function( id, parent ) {
- $( parent.element ).attr( "title", parent.title );
- delete that.parents[ id ];
- });
- }
-
- this.closing = true;
- this._trigger( "close", event, { tooltip: tooltip } );
- this.closing = false;
- },
-
- _tooltip: function( element ) {
- var id = "ui-tooltip-" + increments++,
- tooltip = $( "<div>" )
- .attr({
- id: id,
- role: "tooltip"
- })
- .addClass( "ui-tooltip ui-widget ui-corner-all ui-widget-content " +
- ( this.options.tooltipClass || "" ) );
- $( "<div>" )
- .addClass( "ui-tooltip-content" )
- .appendTo( tooltip );
- tooltip.appendTo( this.document[0].body );
- this.tooltips[ id ] = element;
- return tooltip;
- },
-
- _find: function( target ) {
- var id = target.data( "ui-tooltip-id" );
- return id ? $( "#" + id ) : $();
- },
-
- _removeTooltip: function( tooltip ) {
- tooltip.remove();
- delete this.tooltips[ tooltip.attr( "id" ) ];
- },
-
- _destroy: function() {
- var that = this;
-
- // close open tooltips
- $.each( this.tooltips, function( id, element ) {
- // Delegate to close method to handle common cleanup
- var event = $.Event( "blur" );
- event.target = event.currentTarget = element[0];
- that.close( event, true );
-
- // Remove immediately; destroying an open tooltip doesn't use the
- // hide animation
- $( "#" + id ).remove();
-
- // Restore the title
- if ( element.data( "ui-tooltip-title" ) ) {
- element.attr( "title", element.data( "ui-tooltip-title" ) );
- element.removeData( "ui-tooltip-title" );
- }
- });
- }
-});
-
-}( jQuery ) );
diff --git a/bitbake/lib/toaster/toastergui/static/js/libtoaster.js b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
new file mode 100644
index 0000000000..15815b333e
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/libtoaster.js
@@ -0,0 +1,323 @@
+
+/* All shared functionality to go in libtoaster object.
+ * This object really just helps readability since we can then have
+ * a traceable namespace.
+ */
+var libtoaster = (function (){
+
+ /* makeTypeahead parameters
+ * elementSelector: JQuery elementSelector string
+ * xhrUrl: the url to get the JSON from expects JSON in the form:
+ * { "list": [ { "name": "test", "detail" : "a test thing" }, .... ] }
+ * xhrParams: the data/parameters to pass to the getJSON url e.g.
+ * { 'type' : 'projects' } the text typed will be passed as 'value'.
+ * selectedCB: function to call once an item has been selected one
+ * arg of the item.
+ */
+ function _makeTypeahead (jQElement, xhrUrl, xhrParams, selectedCB) {
+
+ jQElement.typeahead({
+ source: function(query, process){
+ xhrParams.value = query;
+ $.getJSON(xhrUrl, this.options.xhrParams, function(data){
+ if (data.error != "ok") {
+ console.log("Error getting data from server "+data.error);
+ return;
+ }
+
+ return process (data.list);
+ });
+ },
+ updater: function(item) {
+ var itemObj = this.$menu.find('.active').data('itemObject');
+ selectedCB(itemObj);
+ return item;
+ },
+ matcher: function(item) { return ~item.name.toLowerCase().indexOf(this.query.toLowerCase()); },
+ highlighter: function (item) {
+ if (item.hasOwnProperty('detail'))
+ /* Use jquery to escape the value as text into a span */
+ return $('<span></span>').text(item.name+' '+item.detail).get(0);
+ return $('<span></span>').text(item.name).get(0);
+ },
+ sorter: function (items) { return items; },
+ xhrUrl: xhrUrl,
+ xhrParams: xhrParams,
+ });
+
+
+ /* Copy of bootstrap's render func but sets selectedObject value */
+ function customRenderFunc (items) {
+ var that = this;
+
+ items = $(items).map(function (i, item) {
+ i = $(that.options.item).attr('data-value', item.name).data('itemObject', item);
+ i.find('a').html(that.highlighter(item));
+ return i[0];
+ });
+
+ items.first().addClass('active');
+ this.$menu.html(items);
+ return this;
+ }
+
+ jQElement.data('typeahead').render = customRenderFunc;
+ };
+
+ /*
+ * url - the url of the xhr build */
+ function _startABuild (url, project_id, targets, onsuccess, onfail) {
+ var data;
+
+ if (project_id)
+ data = 'project_id='+project_id+'&targets='+targets;
+ else
+ data = 'targets='+targets;
+
+ $.ajax( {
+ type: "POST",
+ url: url,
+ data: data,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (_data) {
+ if (_data.error != "ok") {
+ console.log(_data.error);
+ } else {
+ if (onsuccess != undefined) onsuccess(_data);
+ }
+ },
+ error: function (_data) {
+ console.log("Call failed");
+ console.log(_data);
+ if (onfail) onfail(data);
+ } });
+ };
+
+ /* Get a project's configuration info */
+ function _getProjectInfo(url, projectId, onsuccess, onfail){
+ $.ajax({
+ type: "POST",
+ url: url,
+ data: { project_id : projectId },
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (_data) {
+ if (_data.error != "ok") {
+ console.log(_data.error);
+ } else {
+ if (onsuccess != undefined) onsuccess(_data);
+ }
+ },
+ error: function (_data) {
+ console.log(_data);
+ if (onfail) onfail(data);
+ }
+ });
+ };
+
+ /* Properties for data can be:
+ * layerDel (csv)
+ * layerAdd (csv)
+ * projectName
+ * projectVersion
+ * machineName
+ */
+ function _editProject(url, projectId, data, onSuccess, onFail){
+ $.ajax({
+ type: "POST",
+ url: url,
+ data: data,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (data) {
+ if (data.error != "ok") {
+ console.log(data.error);
+ if (onFail != undefined)
+ onFail(data);
+ } else {
+ if (onSuccess != undefined)
+ onSuccess(data);
+ }
+ },
+ error: function (data) {
+ console.log("Call failed");
+ console.log(data);
+ }
+ });
+ };
+
+ function _getLayerDepsForProject(xhrDataTypeaheadUrl, projectId, layerId, onSuccess, onFail){
+ /* Check for dependencies not in the current project */
+ $.getJSON(xhrDataTypeaheadUrl,
+ { type: 'layerdeps', 'value': layerId , project_id: projectId },
+ function(data) {
+ if (data.error != "ok") {
+ console.log(data.error);
+ if (onFail != undefined)
+ onFail(data);
+ } else {
+ onSuccess(data);
+ }
+ }, function() {
+ console.log("E: Failed to make request");
+ });
+ };
+
+ return {
+ reload_params : reload_params,
+ startABuild : _startABuild,
+ makeTypeahead : _makeTypeahead,
+ getProjectInfo: _getProjectInfo,
+ getLayerDepsForProject : _getLayerDepsForProject,
+ editProject : _editProject,
+ }
+})();
+
+/* keep this in the global scope for compatability */
+function reload_params(params) {
+ uri = window.location.href;
+ splitlist = uri.split("?");
+ url = splitlist[0], parameters=splitlist[1];
+ // deserialize the call parameters
+ if(parameters){
+ cparams = parameters.split("&");
+ }else{
+ cparams = []
+ }
+ nparams = {}
+ for (i = 0; i < cparams.length; i++) {
+ temp = cparams[i].split("=");
+ nparams[temp[0]] = temp[1];
+ }
+ // update parameter values
+ for (i in params) {
+ nparams[encodeURIComponent(i)] = encodeURIComponent(params[i]);
+ }
+ // serialize the structure
+ callparams = []
+ for (i in nparams) {
+ callparams.push(i+"="+nparams[i]);
+ }
+ window.location.href = url+"?"+callparams.join('&');
+}
+
+
+/* Things that happen for all pages */
+$(document).ready(function() {
+
+ /*
+ * PrettyPrint plugin.
+ *
+ */
+ // Init
+ prettyPrint();
+
+ // Prevent invalid links from jumping page scroll
+ $('a[href=#]').click(function() {
+ return false;
+ });
+
+
+ /* Belen's additions */
+
+ // turn Edit columns dropdown into a multiselect menu
+ $('.dropdown-menu input, .dropdown-menu label').click(function(e) {
+ e.stopPropagation();
+ });
+
+ // enable popovers in any table cells that contain an anchor with the
+ // .btn class applied, and make sure popovers work on click, are mutually
+ // exclusive and they close when your click outside their area
+
+ $('html').click(function(e){
+ $('td > a.btn').popover('hide');
+ });
+
+ $('td > a.btn').popover({
+ html:true,
+ placement:'left',
+ container:'body',
+ trigger:'manual'
+ }).click(function(e){
+ $('td > a.btn').not(this).popover('hide');
+ // ideally we would use 'toggle' here
+ // but it seems buggy in our Bootstrap version
+ $(this).popover('show');
+ e.stopPropagation();
+ });
+
+ // enable tooltips for applied filters
+ $('th a.btn-primary').tooltip({container:'body', html:true, placement:'bottom', delay:{hide:1500}});
+
+ // hide applied filter tooltip when you click on the filter button
+ $('th a.btn-primary').click(function () {
+ $('.tooltip').hide();
+ });
+
+ // enable help information tooltip
+ $(".get-help").tooltip({container:'body', html:true, delay:{show:300}});
+
+ // show help bubble only on hover inside tables
+ $(".hover-help").css("visibility","hidden");
+ $("th, td").hover(function () {
+ $(this).find(".hover-help").css("visibility","visible");
+ });
+ $("th, td").mouseleave(function () {
+ $(this).find(".hover-help").css("visibility","hidden");
+ });
+
+ // show task type and outcome in task details pages
+ $(".task-info").tooltip({ container: 'body', html: true, delay: {show: 200}, placement: 'right' });
+
+ // initialise the tooltips for the icon-pencil icons
+ $(".icon-pencil").tooltip({ container: 'body', html: true, delay: {show: 400}, title: "Change" });
+
+ // linking directly to tabs
+ $(function(){
+ var hash = window.location.hash;
+ hash && $('ul.nav a[href="' + hash + '"]').tab('show');
+
+ $('.nav-tabs a').click(function (e) {
+ $(this).tab('show');
+ $('body').scrollTop();
+ });
+ });
+
+ // toggle for long content (variables, python stack trace, etc)
+ $('.full, .full-hide').hide();
+ $('.full-show').click(function(){
+ $('.full').slideDown(function(){
+ $('.full-hide').show();
+ });
+ $(this).hide();
+ });
+ $('.full-hide').click(function(){
+ $(this).hide();
+ $('.full').slideUp(function(){
+ $('.full-show').show();
+ });
+ });
+
+ //toggle the errors and warnings sections
+ $('.show-errors').click(function() {
+ $('#collapse-errors').addClass('in');
+ });
+ $('.toggle-errors').click(function() {
+ $('#collapse-errors').toggleClass('in');
+ });
+ $('.show-warnings').click(function() {
+ $('#collapse-warnings').addClass('in');
+ });
+ $('.toggle-warnings').click(function() {
+ $('#collapse-warnings').toggleClass('in');
+ });
+ $('.show-exceptions').click(function() {
+ $('#collapse-exceptions').addClass('in');
+ });
+ $('.toggle-exceptions').click(function() {
+ $('#collapse-exceptions').toggleClass('in');
+ });
+
+ //show warnings section when requested from the previous page
+ if (location.href.search('#warnings') > -1) {
+ $('#collapse-warnings').addClass('in');
+ }
+});
diff --git a/bitbake/lib/toaster/toastergui/static/js/main.js b/bitbake/lib/toaster/toastergui/static/js/main.js
deleted file mode 100644
index eef6b468f4..0000000000
--- a/bitbake/lib/toaster/toastergui/static/js/main.js
+++ /dev/null
@@ -1,111 +0,0 @@
-$(document).ready(function() {
-
- /*
- * PrettyPrint plugin.
- *
- */
- // Init
- prettyPrint();
-
- // Prevent invalid links from jumping page scroll
- $('a[href=#]').click(function() {
- return false;
- });
-
-
- /* Belen's additions */
-
- // turn Edit columns dropdown into a multiselect menu
- $('.dropdown-menu input, .dropdown-menu label').click(function(e) {
- e.stopPropagation();
- });
-
- // enable popovers in any table cells that contain an anchor with the
- // .btn class applied, and make sure popovers work on click, are mutually
- // exclusive and they close when your click outside their area
-
- $('html').click(function(e){
- $('td > a.btn').popover('hide');
- });
-
- $('td > a.btn').popover({
- html:true,
- placement:'left',
- container:'body',
- trigger:'manual'
- }).click(function(e){
- $('td > a.btn').not(this).popover('hide');
- // ideally we would use 'toggle' here
- // but it seems buggy in our Bootstrap version
- $(this).popover('show');
- e.stopPropagation();
- });
-
- // enable tooltips for applied filters
- $('th a.btn-primary').tooltip({container:'body', html:true, placement:'bottom', delay:{hide:1500}});
-
- // hide applied filter tooltip when you click on the filter button
- $('th a.btn-primary').click(function () {
- $('.tooltip').hide();
- });
-
- // enable help information tooltip
- $(".get-help").tooltip({container:'body', html:true, delay:{show:300}});
-
- // show help bubble only on hover inside tables
- $(".hover-help").css("visibility","hidden");
- $("th, td").hover(function () {
- $(this).find(".hover-help").css("visibility","visible");
- });
- $("th, td").mouseleave(function () {
- $(this).find(".hover-help").css("visibility","hidden");
- });
-
- // show task type and outcome in task details pages
- $(".task-info").tooltip({ container: 'body', html: true, delay: {show: 200}, placement: 'right' });
-
- // linking directly to tabs
- $(function(){
- var hash = window.location.hash;
- hash && $('ul.nav a[href="' + hash + '"]').tab('show');
-
- $('.nav-tabs a').click(function (e) {
- $(this).tab('show');
- $('body').scrollTop();
- });
- });
-
- // toggle for long content (variables, python stack trace, etc)
- $('.full, .full-hide').hide();
- $('.full-show').click(function(){
- $('.full').slideDown(function(){
- $('.full-hide').show();
- });
- $(this).hide();
- });
- $('.full-hide').click(function(){
- $(this).hide();
- $('.full').slideUp(function(){
- $('.full-show').show();
- });
- });
-
- //toggle the errors and warnings sections
- $('.show-errors').click(function() {
- $('#collapse-errors').addClass('in');
- });
- $('.toggle-errors').click(function() {
- $('#collapse-errors').toggleClass('in');
- });
- $('.show-warnings').click(function() {
- $('#collapse-warnings').addClass('in');
- });
- $('.toggle-warnings').click(function() {
- $('#collapse-warnings').toggleClass('in');
- });
- //show warnings section when requested from the previous page
- if (location.href.search('#warnings') > -1) {
- $('#collapse-warnings').addClass('in');
- }
-
-});
diff --git a/bitbake/lib/toaster/toastergui/static/js/projectapp.js b/bitbake/lib/toaster/toastergui/static/js/projectapp.js
new file mode 100644
index 0000000000..bb97f3292c
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/projectapp.js
@@ -0,0 +1,690 @@
+// vim: set tabstop=4 expandtab ai:
+// BitBake Toaster Implementation
+//
+// Copyright (C) 2013 Intel Corporation
+//
+// This program is free software; you can redistribute it and/or modify
+// it under the terms of the GNU General Public License version 2 as
+// published by the Free Software Foundation.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU General Public License for more details.
+//
+// You should have received a copy of the GNU General Public License along
+// with this program; if not, write to the Free Software Foundation, Inc.,
+// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+angular_formpost = function($httpProvider) {
+ // Use x-www-form-urlencoded Content-Type
+ // By Ezekiel Victor, http://victorblog.com/2012/12/20/make-angularjs-http-service-behave-like-jquery-ajax/, no license, with attribution
+ $httpProvider.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8';
+
+ /**
+ * The workhorse; converts an object to x-www-form-urlencoded serialization.
+ * @param {Object} obj
+ * @return {String}
+ */
+ var param = function(obj) {
+ var query = '', name, value, fullSubName, subName, subValue, innerObj, i;
+
+ for(name in obj) {
+ value = obj[name];
+
+ if(value instanceof Array) {
+ for(i=0; i<value.length; ++i) {
+ subValue = value[i];
+ fullSubName = name + '[' + i + ']';
+ innerObj = {};
+ innerObj[fullSubName] = subValue;
+ query += param(innerObj) + '&';
+ }
+ }
+ else if(value instanceof Object) {
+ for(subName in value) {
+ subValue = value[subName];
+ fullSubName = name + '[' + subName + ']';
+ innerObj = {};
+ innerObj[fullSubName] = subValue;
+ query += param(innerObj) + '&';
+ }
+ }
+ else if(value !== undefined && value !== null)
+ query += encodeURIComponent(name) + '=' + encodeURIComponent(value) + '&';
+ }
+
+ return query.length ? query.substr(0, query.length - 1) : query;
+ };
+
+ // Override $http service's default transformRequest
+ $httpProvider.defaults.transformRequest = [function(data) {
+ return angular.isObject(data) && String(data) !== '[object File]' ? param(data) : data;
+ }];
+}
+
+
+/**
+ * Helper to execute callback on elements from array differences; useful for incremental UI updating.
+ * @param {Array} oldArray
+ * @param {Array} newArray
+ * @param {function} compareElements
+ * @param {function} onAdded
+ * @param {function} onDeleted
+ *
+ * no return
+ */
+function _diffArrays(existingArray, newArray, compareElements, onAdded, onDeleted ) {
+ var added = [];
+ var removed = [];
+ newArray.forEach( function( newElement, newIndex, _newArray) {
+ var existingIndex = existingArray.findIndex(function ( existingElement, _existingIndex, _existingArray ) {
+ return compareElements(newElement, existingElement);
+ });
+ if (existingIndex < 0 && onAdded) { added.push(newElement); }
+ });
+ existingArray.forEach( function( existingElement, existingIndex, _existingArray) {
+ var newIndex = newArray.findIndex(function ( newElement, _newIndex, _newArray ) {
+ return compareElements(newElement, existingElement);
+ });
+ if (newIndex < 0 && onDeleted) { removed.push(existingElement); }
+ });
+
+ if (onAdded) {
+ added.map(onAdded);
+ }
+
+ if (onDeleted) {
+ removed.map(onDeleted);
+ }
+
+}
+
+
+var projectApp = angular.module('project', ['ngCookies', 'ngAnimate', 'ui.bootstrap', 'ngRoute', 'ngSanitize'], angular_formpost);
+
+// modify the template tag markers to prevent conflicts with Django
+projectApp.config(function($interpolateProvider) {
+ $interpolateProvider.startSymbol("{[");
+ $interpolateProvider.endSymbol("]}");
+});
+
+
+// add time interval to HH:mm filter
+projectApp.filter('timediff', function() {
+ return function(input) {
+ function pad(j) {
+ if (parseInt(j) < 10) {return "0" + j}
+ return j;
+ }
+ seconds = parseInt(input);
+ minutes = Math.floor(seconds / 60);
+ seconds = seconds - minutes * 60;
+ hours = Math.floor(seconds / 3600);
+ seconds = seconds - hours * 3600;
+ return pad(hours) + ":" + pad(minutes) + ":" + pad(seconds);
+ }
+});
+
+
+// main controller for the project page
+projectApp.controller('prjCtrl', function($scope, $modal, $http, $interval, $location, $cookies, $cookieStore, $q, $sce, $anchorScroll, $animate, $sanitize) {
+
+ $scope.getSuggestions = function(type, currentValue) {
+ var deffered = $q.defer();
+
+ $http({method:"GET", url: $scope.urls.xhr_datatypeahead, params : { type: type, value: currentValue}})
+ .success(function (_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ deffered.reject(_data.error);
+ }
+ deffered.resolve(_data.list);
+ });
+
+ return deffered.promise;
+ }
+
+ var inXHRcall = false;
+
+ // default handling of XHR calls that handles errors and updates commonly-used pages
+ $scope._makeXHRCall = function(callparams) {
+ if (inXHRcall) {
+ if (callparams.data === undefined) {
+ // we simply skip the data refresh calls
+ console.log("race on XHR, aborted");
+ return;
+ } else {
+ // we return a promise that we'll solve by reissuing the command later
+ var delayed = $q.defer();
+ console.log("race on XHR, delayed");
+ $interval(function () {$scope._makeXHRCall(callparams).then(function (d) { delayed.resolve(d); });}, 100, 1);
+
+ return delayed.promise;
+ }
+
+ }
+ var deffered = $q.defer();
+
+ if (undefined === callparams.headers) { callparams.headers = {} };
+ callparams.headers['X-CSRFToken'] = $cookies.csrftoken;
+
+ $http(callparams).success(function(_data, _status, _headers, _config) {
+ if (_data.error != "ok") {
+ alert("Failed XHR request (" + _status + "): " + _data.error);
+ console.error("Failed XHR request: ", _data, _status, _headers, _config);
+ // stop refreshing hte page
+ $interval.cancel($scope.pollHandle);
+ deffered.reject(_data.error);
+ }
+ else {
+
+ if (_data.builds !== undefined) {
+ var toDelete = [];
+ // step 1 - delete entries not found
+ $scope.builds.forEach(function (elem) {
+ if (-1 == _data.builds.findIndex(function (elemX) { return elemX.id == elem.id && elemX.status == elem.status; })) {
+ toDelete.push(elem);
+ }
+ });
+ toDelete.forEach(function (elem) {
+ $scope.builds.splice($scope.builds.indexOf(elem),1);
+ });
+ // step 2 - merge new entries
+ _data.builds.forEach(function (elem) {
+ var found = false;
+ var i = 0;
+ for (i = 0 ; i < $scope.builds.length; i ++) {
+ if ($scope.builds[i].id > elem.id) continue;
+ if ($scope.builds[i].id == elem.id) { found=true; break;}
+ if ($scope.builds[i].id < elem.id) break;
+ }
+ if (!found) {
+ $scope.builds.splice(i, 0, elem);
+ }
+ });
+
+ }
+ if (_data.layers !== undefined) {
+
+ var addedLayers = [];
+ var deletedLayers = [];
+
+ // step 1 - delete entries not found
+ $scope.layers.forEach(function (elem) {
+ if (-1 == _data.layers.findIndex(function (elemX) { return elemX.id == elem.id && elemX.name == elem.name; })) {
+ deletedLayers.push(elem);
+ }
+ });
+ deletedLayers.forEach(function (elem) {
+ $scope.layers.splice($scope.layers.indexOf(elem),1);
+ });
+ // step 2 - merge new entries
+ _data.layers.forEach(function (elem) {
+ var found = false;
+ var i;
+ for (i = 0 ; i < $scope.layers.length; i ++) {
+ if ($scope.layers[i].orderid < elem.orderid) continue;
+ if ($scope.layers[i].orderid == elem.orderid) {
+ found = true; break;
+ }
+ if ($scope.layers[i].orderid > elem.orderid) break;
+ }
+ if (!found) {
+ $scope.layers.splice(i, 0, elem);
+ addedLayers.push(elem);
+ }
+ });
+
+ // step 3 - display alerts.
+ if (addedLayers.length > 0) {
+ $scope.displayAlert($scope.zone2alerts, "You have added <b>"+addedLayers.length+"</b> layer" + ((addedLayers.length>1)?"s: ":": ") + addedLayers.map(function (e) { return "<a href=\""+e.layerdetailurl+"\">"+e.name+"</a>" }).join(", "), "alert-info");
+ }
+ if (deletedLayers.length > 0) {
+ $scope.displayAlert($scope.zone2alerts, "You have deleted <b>"+deletedLayers.length+"</b> layer" + ((deletedLayers.length>1)?"s: ":": ") + deletedLayers.map(function (e) { return "<a href=\""+e.layerdetailurl+"\">"+e.name+"</a>" }).join(", "), "alert-info");
+ }
+
+ }
+ if (_data.targets !== undefined) {
+ $scope.targets = _data.targets;
+ }
+ if (_data.machine !== undefined) {
+ $scope.machine = _data.machine;
+ }
+ if (_data.user !== undefined) {
+ $scope.user = _data.user;
+ }
+
+ if (_data.prj !== undefined) {
+ $scope.project = _data.prj;
+
+ // update breadcrumb, outside the controller
+ $('#project_name').text($scope.project.name);
+ }
+
+ $scope.validateData();
+ inXHRcall = false;
+ deffered.resolve(_data);
+ }
+ }).error(function(_data, _status, _headers, _config) {
+ alert("Failed HTTP XHR request (" + _status + ")" + _data);
+ console.error("Failed HTTP XHR request: ", _data, _status, _headers, _config);
+ inXHRcall = false;
+ deffered.reject(_data.error);
+ });
+
+ return deffered.promise;
+ }
+
+ $scope.layeralert = undefined;
+ // shows user alerts on invalid project data
+ $scope.validateData = function () {
+ if ($scope.layers.length == 0) {
+ $scope.layeralert = $scope.displayAlert($scope.zone1alerts, "You need to add some layers to this project. <a href=\""+$scope.urls.layers+"\">View all layers available in Toaster</a> or <a href=\""+$scope.urls.importlayer+"\">import a layer</a>");
+ } else {
+ if ($scope.layeralert != undefined) {
+ $scope.layeralert.close();
+ $scope.layeralert = undefined;
+ }
+ }
+ }
+
+ $scope.targetExistingBuild = function(targets) {
+ var oldTargetName = $scope.targetName;
+ $scope.targetName = targets.map(function(v,i,a){return v.target}).join(' ');
+ $scope.targetNamedBuild();
+ $scope.targetName = oldTargetName;
+ }
+
+ $scope.targetNamedBuild = function(target) {
+ if ($scope.targetName === undefined && $scope.targetName1 === undefined){
+ alert("No target defined, please type in a target name");
+ return;
+ }
+
+ $scope.sanitizeTargetName();
+
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_build,
+ data : {
+ targets: $scope.safeTargetName,
+ }
+ }).then(function (data) {
+ console.log("received ", data);
+ $scope.targetName = undefined;
+ $scope.targetName1 = undefined;
+ $location.hash('buildslist');
+ // call $anchorScroll()
+ $anchorScroll();
+ });
+ }
+
+ $scope.sanitizeTargetName = function() {
+ $scope.safeTargetName = undefined;
+ if (undefined === $scope.targetName) $scope.safeTargetName = $scope.targetName1;
+ if (undefined === $scope.targetName1) $scope.safeTargetName = $scope.targetName;
+
+ if (undefined === $scope.safeTargetName) return;
+
+ $scope.safeTargetName = $scope.safeTargetName.replace(/\[.*\]/, '').trim();
+ }
+
+ $scope.buildCancel = function(id) {
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_build,
+ data: {
+ buildCancel: id,
+ }
+ });
+ }
+
+ $scope.buildDelete = function(id) {
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_build,
+ data: {
+ buildDelete: id,
+ }
+ });
+ }
+
+
+ $scope.onLayerSelect = function (item, model, label) {
+ $scope.layerAddId = item.id;
+ }
+
+ $scope.layerAdd = function() {
+
+ $http({method:"GET", url: $scope.urls.xhr_datatypeahead, params : { type: "layerdeps", value: $scope.layerAddId }})
+ .success(function (_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ if (_data.list.length > 0) {
+ // activate modal
+ var modalInstance = $modal.open({
+ templateUrl: 'dependencies_modal',
+ controller: function ($scope, $modalInstance, items, layerAddName) {
+ $scope.items = items;
+ $scope.layerAddName = layerAddName;
+ $scope.selectedItems = (function () { s = {}; for (var i = 0; i < items.length; i++) { s[items[i].id] = true; };return s; })();
+
+ $scope.ok = function() {
+ console.log("scope selected is ", $scope.selectedItems);
+ $modalInstance.close(Object.keys($scope.selectedItems).filter(function (e) { return $scope.selectedItems[e];}));
+ };
+
+ $scope.cancel = function() {
+ $modalInstance.dismiss('cancel');
+ };
+
+ $scope.update = function() {
+ console.log("updated ", $scope.selectedItems);
+ };
+ },
+ resolve: {
+ items: function () {
+ return _data.list;
+ },
+ layerAddName: function () {
+ return $scope.layerAddName;
+ },
+ }
+ });
+
+ modalInstance.result.then(function (selectedArray) {
+ selectedArray.push($scope.layerAddId);
+ console.log("selected", selectedArray);
+
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_edit,
+ data: {
+ layerAdd: selectedArray.join(","),
+ }
+ }).then(function () {
+ $scope.layerAddName = undefined;
+ });
+ });
+ }
+ else {
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_edit,
+ data: {
+ layerAdd: $scope.layerAddId,
+ }
+ }).then(function () {
+ $scope.layerAddName = undefined;
+ });
+ }
+ }
+ });
+ }
+
+ $scope.layerDel = function(id) {
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_edit,
+ data: {
+ layerDel: id,
+ }
+ });
+ }
+
+
+ $scope.test = function(elementid) {
+ $http({method:"GET", url: $scope.urls.xhr_datatypeahead, params : { type: "versionlayers", value: $scope.projectVersion }}).
+ success(function (_data) {
+ if (_data.error != "ok") {
+ alert (_data.error);
+ }
+ else {
+ if (_data.list.length > 0) {
+ // activate modal
+ var modalInstance = $modal.open({
+ templateUrl: 'change_version_modal',
+ controller: function ($scope, $modalInstance, items, releaseName) {
+ $scope.items = items;
+ $scope.releaseName = releaseName;
+
+ $scope.ok = function() {
+ $modalInstance.close();
+ };
+
+ $scope.cancel = function() {
+ $modalInstance.dismiss('cancel');
+ };
+
+ },
+ resolve: {
+ items: function () {
+ return _data.list;
+ },
+ releaseName: function () {
+ return $scope.releases.filter(function (e) { if (e.id == $scope.projectVersion) return e;})[0].name;
+ },
+ }
+ });
+
+ modalInstance.result.then(function () { $scope.edit(elementid)});
+ } else {
+ $scope.edit(elementid);
+ }
+ }
+ });
+ }
+
+ $scope.edit = function(elementid) {
+ var data = {};
+ console.log("edit with ", elementid);
+ var alertText = undefined;
+ var alertZone = undefined;
+ var oldLayers = [];
+
+ switch(elementid) {
+ case '#select-machine':
+ alertText = "You have changed the machine to: <strong>" + $scope.machineName + "</strong>";
+ alertZone = $scope.zone2alerts;
+ data['machineName'] = $scope.machineName;
+ break;
+ case '#change-project-name':
+ data['projectName'] = $scope.projectName;
+ alertText = "You have changed the project name to: <strong>" + $scope.projectName + "</strong>";
+ alertZone = $scope.zone3alerts;
+ break;
+ case '#change-project-version':
+ data['projectVersion'] = $scope.projectVersion;
+ alertText = "You have changed the release to: ";
+ alertZone = $scope.zone3alerts;
+ // save old layers
+ oldLayers = $scope.layers.slice(0);
+ break;
+ default:
+ throw "FIXME: implement conversion for element " + elementid;
+ }
+
+ $scope._makeXHRCall({
+ method: "POST", url: $scope.urls.xhr_edit, data: data,
+ }).then( function (_data) {
+ $scope.toggle(elementid);
+ if (data['projectVersion'] != undefined) {
+ alertText += "<strong>" + $scope.project.release.desc + "</strong>. ";
+ }
+ if (elementid == '#change-project-version') {
+ // requirement https://bugzilla.yoctoproject.org/attachment.cgi?id=2229, notification for changed version to include layers
+ $scope.zone2alerts.forEach(function (e) { e.close() });
+ alertText += "This has caused the following changes in your project layers:<ul>"
+
+ if (_data.layers !== undefined) {
+ // show added/deleted layer notifications; scope.layers is already updated by this point.
+ var addedLayers = [];
+ var deletedLayers = [];
+ _diffArrays( oldLayers, $scope.layers, function (e, f) { return e.id == f.id },
+ function (e) {addedLayers.push(e); },
+ function (e) {deletedLayers.push(e); });
+
+ // some of the deleted layers are actually replaced (changed) layers
+ var changedLayers = [];
+ deletedLayers.forEach(function (e) {
+ if ( -1 < addedLayers.findIndex(function (f) { return f.name == e.name })) {
+ changedLayers.push(e);
+ }
+ });
+
+ changedLayers.forEach(function (e) {
+ deletedLayers.splice(deletedLayers.indexOf(e), 1);
+ });
+
+ if (addedLayers.length > 0) {
+ alertText += "<li><strong>"+addedLayers.length+"</strong> layer" + ((addedLayers.length>1)?"s changed: ":" changed: ") + addedLayers.map(function (e) { return "<a href=\""+e.layerdetailurl+"\">"+e.name+"</a>" }).join(", ") + "</li>";
+ }
+ if (deletedLayers.length > 0) {
+ alertText += "<li><strong>"+deletedLayers.length+"</strong> layer" + ((deletedLayers.length>1)?"s deleted: ":"deleted: ") + deletedLayers.map(function (e) { return "<a href=\""+e.layerdetailurl+"\">"+e.name+"</a>" }).join(", ") + "</li>";
+ }
+
+ }
+ alertText += "</ul>";
+ }
+ $scope.displayAlert(alertZone, alertText, "alert-info");
+ });
+ }
+
+
+ $scope.updateDisplayWithCommands = function() {
+ cmd = $location.path();
+
+ function _cmdExecuteWithParam(param, f) {
+ if (cmd.indexOf(param)==0) {
+ if (cmd.indexOf("=") > -1) {
+ var parameter = cmd.split("=", 2)[1];
+ if (parameter != undefined && parameter.length > 0) {
+ f(parameter);
+ }
+ } else {
+ f();
+ };
+ }
+ }
+
+ _cmdExecuteWithParam("/newproject", function () {
+ $scope.displayAlert($scope.zone1alerts,
+ "Your project <strong>" + $scope.project.name +
+ "</strong> has been created. You can now <a href=\""+ $scope.urls.layers +
+ "\">add layers</a> and <a href=\""+ $scope.urls.targets +
+ "\">select targets</a> you want to build.", "alert-success");
+ });
+
+ _cmdExecuteWithParam("/layerimported", function (layer) {
+ var imported = $cookieStore.get("layer-imported-alert");
+ var text;
+
+ if (!imported)
+ return;
+
+ if (imported.deps_added.length == 0) {
+ text = "You have imported <strong><a href=\""+$scope.urls.layer+
+ imported.imported_layer.id+"\">"+imported.imported_layer.name+
+ "</a></strong> and added it to your project.";
+ } else {
+ var links = "<a href=\""+$scope.urls.layer+
+ imported.imported_layer.id+"\">"+imported.imported_layer.name+
+ "</a>, ";
+
+ imported.deps_added.map (function(item, index){
+ links +="<a href=\""+$scope.urls.layer+item.id+"\" >"+item.name+
+ "</a>";
+ /*If we're at the last element we don't want the trailing comma */
+ if (imported.deps_added[index+1] != undefined)
+ links += ", ";
+ });
+
+ /* Length + 1 here to do deps + the imported layer */
+ text = "You have imported <strong><a href=\""+$scope.urls.layer+
+ imported.imported_layer.id+"\">"+imported.imported_layer.name+
+ "</a></strong> and added <strong>"+(imported.deps_added.length+1)+
+ "</strong> layers to your project: <strong>"+links+"</strong>";
+ }
+
+ $scope.displayAlert($scope.zone2alerts, text, "alert-info");
+ // This doesn't work
+ $cookieStore.remove("layer-imported-alert");
+ //use jquery plugin instead
+ $.removeCookie("layer-imported-alert", { path: "/"});
+ });
+
+ _cmdExecuteWithParam("/targetbuild=", function (targets) {
+ var oldTargetName = $scope.targetName;
+ $scope.targetName = targets.split(",").join(" ");
+ $scope.targetNamedBuild();
+ $scope.targetName = oldTargetName;
+ });
+
+ _cmdExecuteWithParam("/machineselect=", function (machine) {
+ $scope.machineName = machine;
+ $scope.toggle('#select-machine');
+ });
+
+
+ _cmdExecuteWithParam("/layeradd=", function (layer) {
+ angular.forEach(layer.split(","), function (l) {
+ $scope.layerAddId = l;
+ $scope.layerAdd();
+ });
+ });
+ }
+
+ $scope.displayAlert = function(zone, text, type) {
+ if (zone.maxid === undefined) { zone.maxid = 0; }
+ var crtid = zone.maxid ++;
+ angular.forEach(zone, function (o) { o.close() });
+ o = {
+ id: crtid, text: text, type: type,
+ close: function() {
+ zone.splice((function(id){ for (var i = 0; i < zone.length; i++) if (id == zone[i].id) { return i}; return undefined;})(crtid), 1);
+ },
+ }
+ zone.push(o);
+ return o;
+ }
+
+ $scope.toggle = function(id) {
+ $scope.projectName = $scope.project.name;
+ $scope.projectVersion = $scope.project.release.id;
+ $scope.machineName = $scope.machine.name;
+
+ angular.element(id).toggle();
+ angular.element(id+"-opposite").toggle();
+ }
+
+ $scope.selectedMostBuildTargets = function () {
+ keys = Object.keys($scope.mostBuiltTargets);
+ keys = keys.filter(function (e) { if ($scope.mostBuiltTargets[e]) return e });
+ return keys.length == 0;
+
+ }
+
+ // init code
+ //
+ $scope.init = function() {
+ $scope.pollHandle = $interval(function () { $scope._makeXHRCall({method: "GET", url: $scope.urls.xhr_edit, data: undefined});}, 2000, 0);
+ }
+
+ $scope.init();
+});
+
+
+/**
+ TESTING CODE
+*/
+
+function test_diff_arrays() {
+ _diffArrays([1,2,3], [2,3,4], function(e,f) { return e==f; }, function(e) {console.log("added", e)}, function(e) {console.log("deleted", e);})
+}
+
+// test_diff_arrays();
+
+var s = undefined;
+
+function test_set_alert(text) {
+ s = angular.element("div#main").scope();
+ s.displayAlert(s.zone3alerts, text);
+ console.log(s.zone3alerts);
+ s.$digest();
+}
diff --git a/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.js b/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.js
new file mode 100644
index 0000000000..fa6a861317
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.js
@@ -0,0 +1,10 @@
+/*
+ * angular-ui-bootstrap
+ * http://angular-ui.github.io/bootstrap/
+
+ * Version: 0.11.0 - 2014-05-01
+ * License: MIT
+ */
+angular.module("ui.bootstrap",["ui.bootstrap.tpls","ui.bootstrap.transition","ui.bootstrap.collapse","ui.bootstrap.accordion","ui.bootstrap.alert","ui.bootstrap.bindHtml","ui.bootstrap.buttons","ui.bootstrap.carousel","ui.bootstrap.dateparser","ui.bootstrap.position","ui.bootstrap.datepicker","ui.bootstrap.dropdown","ui.bootstrap.modal","ui.bootstrap.pagination","ui.bootstrap.tooltip","ui.bootstrap.popover","ui.bootstrap.progressbar","ui.bootstrap.rating","ui.bootstrap.tabs","ui.bootstrap.timepicker","ui.bootstrap.typeahead"]),angular.module("ui.bootstrap.tpls",["template/accordion/accordion-group.html","template/accordion/accordion.html","template/alert/alert.html","template/carousel/carousel.html","template/carousel/slide.html","template/datepicker/datepicker.html","template/datepicker/day.html","template/datepicker/month.html","template/datepicker/popup.html","template/datepicker/year.html","template/modal/backdrop.html","template/modal/window.html","template/pagination/pager.html","template/pagination/pagination.html","template/tooltip/tooltip-html-unsafe-popup.html","template/tooltip/tooltip-popup.html","template/popover/popover.html","template/progressbar/bar.html","template/progressbar/progress.html","template/progressbar/progressbar.html","template/rating/rating.html","template/tabs/tab.html","template/tabs/tabset.html","template/timepicker/timepicker.html","template/typeahead/typeahead-match.html","template/typeahead/typeahead-popup.html"]),angular.module("ui.bootstrap.transition",[]).factory("$transition",["$q","$timeout","$rootScope",function(a,b,c){function d(a){for(var b in a)if(void 0!==f.style[b])return a[b]}var e=function(d,f,g){g=g||{};var h=a.defer(),i=e[g.animation?"animationEndEventName":"transitionEndEventName"],j=function(){c.$apply(function(){d.unbind(i,j),h.resolve(d)})};return i&&d.bind(i,j),b(function(){angular.isString(f)?d.addClass(f):angular.isFunction(f)?f(d):angular.isObject(f)&&d.css(f),i||h.resolve(d)}),h.promise.cancel=function(){i&&d.unbind(i,j),h.reject("Transition cancelled")},h.promise},f=document.createElement("trans"),g={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd",transition:"transitionend"},h={WebkitTransition:"webkitAnimationEnd",MozTransition:"animationend",OTransition:"oAnimationEnd",transition:"animationend"};return e.transitionEndEventName=d(g),e.animationEndEventName=d(h),e}]),angular.module("ui.bootstrap.collapse",["ui.bootstrap.transition"]).directive("collapse",["$transition",function(a){return{link:function(b,c,d){function e(b){function d(){j===e&&(j=void 0)}var e=a(c,b);return j&&j.cancel(),j=e,e.then(d,d),e}function f(){k?(k=!1,g()):(c.removeClass("collapse").addClass("collapsing"),e({height:c[0].scrollHeight+"px"}).then(g))}function g(){c.removeClass("collapsing"),c.addClass("collapse in"),c.css({height:"auto"})}function h(){if(k)k=!1,i(),c.css({height:0});else{c.css({height:c[0].scrollHeight+"px"});{c[0].offsetWidth}c.removeClass("collapse in").addClass("collapsing"),e({height:0}).then(i)}}function i(){c.removeClass("collapsing"),c.addClass("collapse")}var j,k=!0;b.$watch(d.collapse,function(a){a?h():f()})}}}]),angular.module("ui.bootstrap.accordion",["ui.bootstrap.collapse"]).constant("accordionConfig",{closeOthers:!0}).controller("AccordionController",["$scope","$attrs","accordionConfig",function(a,b,c){this.groups=[],this.closeOthers=function(d){var e=angular.isDefined(b.closeOthers)?a.$eval(b.closeOthers):c.closeOthers;e&&angular.forEach(this.groups,function(a){a!==d&&(a.isOpen=!1)})},this.addGroup=function(a){var b=this;this.groups.push(a),a.$on("$destroy",function(){b.removeGroup(a)})},this.removeGroup=function(a){var b=this.groups.indexOf(a);-1!==b&&this.groups.splice(b,1)}}]).directive("accordion",function(){return{restrict:"EA",controller:"AccordionController",transclude:!0,replace:!1,templateUrl:"template/accordion/accordion.html"}}).directive("accordionGroup",function(){return{require:"^accordion",restrict:"EA",transclude:!0,replace:!0,templateUrl:"template/accordion/accordion-group.html",scope:{heading:"@",isOpen:"=?",isDisabled:"=?"},controller:function(){this.setHeading=function(a){this.heading=a}},link:function(a,b,c,d){d.addGroup(a),a.$watch("isOpen",function(b){b&&d.closeOthers(a)}),a.toggleOpen=function(){a.isDisabled||(a.isOpen=!a.isOpen)}}}}).directive("accordionHeading",function(){return{restrict:"EA",transclude:!0,template:"",replace:!0,require:"^accordionGroup",link:function(a,b,c,d,e){d.setHeading(e(a,function(){}))}}}).directive("accordionTransclude",function(){return{require:"^accordionGroup",link:function(a,b,c,d){a.$watch(function(){return d[c.accordionTransclude]},function(a){a&&(b.html(""),b.append(a))})}}}),angular.module("ui.bootstrap.alert",[]).controller("AlertController",["$scope","$attrs",function(a,b){a.closeable="close"in b}]).directive("alert",function(){return{restrict:"EA",controller:"AlertController",templateUrl:"template/alert/alert.html",transclude:!0,replace:!0,scope:{type:"@",close:"&"}}}),angular.module("ui.bootstrap.bindHtml",[]).directive("bindHtmlUnsafe",function(){return function(a,b,c){b.addClass("ng-binding").data("$binding",c.bindHtmlUnsafe),a.$watch(c.bindHtmlUnsafe,function(a){b.html(a||"")})}}),angular.module("ui.bootstrap.buttons",[]).constant("buttonConfig",{activeClass:"active",toggleEvent:"click"}).controller("ButtonsController",["buttonConfig",function(a){this.activeClass=a.activeClass||"active",this.toggleEvent=a.toggleEvent||"click"}]).directive("btnRadio",function(){return{require:["btnRadio","ngModel"],controller:"ButtonsController",link:function(a,b,c,d){var e=d[0],f=d[1];f.$render=function(){b.toggleClass(e.activeClass,angular.equals(f.$modelValue,a.$eval(c.btnRadio)))},b.bind(e.toggleEvent,function(){var d=b.hasClass(e.activeClass);(!d||angular.isDefined(c.uncheckable))&&a.$apply(function(){f.$setViewValue(d?null:a.$eval(c.btnRadio)),f.$render()})})}}}).directive("btnCheckbox",function(){return{require:["btnCheckbox","ngModel"],controller:"ButtonsController",link:function(a,b,c,d){function e(){return g(c.btnCheckboxTrue,!0)}function f(){return g(c.btnCheckboxFalse,!1)}function g(b,c){var d=a.$eval(b);return angular.isDefined(d)?d:c}var h=d[0],i=d[1];i.$render=function(){b.toggleClass(h.activeClass,angular.equals(i.$modelValue,e()))},b.bind(h.toggleEvent,function(){a.$apply(function(){i.$setViewValue(b.hasClass(h.activeClass)?f():e()),i.$render()})})}}}),angular.module("ui.bootstrap.carousel",["ui.bootstrap.transition"]).controller("CarouselController",["$scope","$timeout","$transition",function(a,b,c){function d(){e();var c=+a.interval;!isNaN(c)&&c>=0&&(g=b(f,c))}function e(){g&&(b.cancel(g),g=null)}function f(){h?(a.next(),d()):a.pause()}var g,h,i=this,j=i.slides=a.slides=[],k=-1;i.currentSlide=null;var l=!1;i.select=a.select=function(e,f){function g(){if(!l){if(i.currentSlide&&angular.isString(f)&&!a.noTransition&&e.$element){e.$element.addClass(f);{e.$element[0].offsetWidth}angular.forEach(j,function(a){angular.extend(a,{direction:"",entering:!1,leaving:!1,active:!1})}),angular.extend(e,{direction:f,active:!0,entering:!0}),angular.extend(i.currentSlide||{},{direction:f,leaving:!0}),a.$currentTransition=c(e.$element,{}),function(b,c){a.$currentTransition.then(function(){h(b,c)},function(){h(b,c)})}(e,i.currentSlide)}else h(e,i.currentSlide);i.currentSlide=e,k=m,d()}}function h(b,c){angular.extend(b,{direction:"",active:!0,leaving:!1,entering:!1}),angular.extend(c||{},{direction:"",active:!1,leaving:!1,entering:!1}),a.$currentTransition=null}var m=j.indexOf(e);void 0===f&&(f=m>k?"next":"prev"),e&&e!==i.currentSlide&&(a.$currentTransition?(a.$currentTransition.cancel(),b(g)):g())},a.$on("$destroy",function(){l=!0}),i.indexOfSlide=function(a){return j.indexOf(a)},a.next=function(){var b=(k+1)%j.length;return a.$currentTransition?void 0:i.select(j[b],"next")},a.prev=function(){var b=0>k-1?j.length-1:k-1;return a.$currentTransition?void 0:i.select(j[b],"prev")},a.isActive=function(a){return i.currentSlide===a},a.$watch("interval",d),a.$on("$destroy",e),a.play=function(){h||(h=!0,d())},a.pause=function(){a.noPause||(h=!1,e())},i.addSlide=function(b,c){b.$element=c,j.push(b),1===j.length||b.active?(i.select(j[j.length-1]),1==j.length&&a.play()):b.active=!1},i.removeSlide=function(a){var b=j.indexOf(a);j.splice(b,1),j.length>0&&a.active?i.select(b>=j.length?j[b-1]:j[b]):k>b&&k--}}]).directive("carousel",[function(){return{restrict:"EA",transclude:!0,replace:!0,controller:"CarouselController",require:"carousel",templateUrl:"template/carousel/carousel.html",scope:{interval:"=",noTransition:"=",noPause:"="}}}]).directive("slide",function(){return{require:"^carousel",restrict:"EA",transclude:!0,replace:!0,templateUrl:"template/carousel/slide.html",scope:{active:"=?"},link:function(a,b,c,d){d.addSlide(a,b),a.$on("$destroy",function(){d.removeSlide(a)}),a.$watch("active",function(b){b&&d.select(a)})}}}),angular.module("ui.bootstrap.dateparser",[]).service("dateParser",["$locale","orderByFilter",function(a,b){function c(a,b,c){return 1===b&&c>28?29===c&&(a%4===0&&a%100!==0||a%400===0):3===b||5===b||8===b||10===b?31>c:!0}this.parsers={};var d={yyyy:{regex:"\\d{4}",apply:function(a){this.year=+a}},yy:{regex:"\\d{2}",apply:function(a){this.year=+a+2e3}},y:{regex:"\\d{1,4}",apply:function(a){this.year=+a}},MMMM:{regex:a.DATETIME_FORMATS.MONTH.join("|"),apply:function(b){this.month=a.DATETIME_FORMATS.MONTH.indexOf(b)}},MMM:{regex:a.DATETIME_FORMATS.SHORTMONTH.join("|"),apply:function(b){this.month=a.DATETIME_FORMATS.SHORTMONTH.indexOf(b)}},MM:{regex:"0[1-9]|1[0-2]",apply:function(a){this.month=a-1}},M:{regex:"[1-9]|1[0-2]",apply:function(a){this.month=a-1}},dd:{regex:"[0-2][0-9]{1}|3[0-1]{1}",apply:function(a){this.date=+a}},d:{regex:"[1-2]?[0-9]{1}|3[0-1]{1}",apply:function(a){this.date=+a}},EEEE:{regex:a.DATETIME_FORMATS.DAY.join("|")},EEE:{regex:a.DATETIME_FORMATS.SHORTDAY.join("|")}};this.createParser=function(a){var c=[],e=a.split("");return angular.forEach(d,function(b,d){var f=a.indexOf(d);if(f>-1){a=a.split(""),e[f]="("+b.regex+")",a[f]="$";for(var g=f+1,h=f+d.length;h>g;g++)e[g]="",a[g]="$";a=a.join(""),c.push({index:f,apply:b.apply})}}),{regex:new RegExp("^"+e.join("")+"$"),map:b(c,"index")}},this.parse=function(b,d){if(!angular.isString(b))return b;d=a.DATETIME_FORMATS[d]||d,this.parsers[d]||(this.parsers[d]=this.createParser(d));var e=this.parsers[d],f=e.regex,g=e.map,h=b.match(f);if(h&&h.length){for(var i,j={year:1900,month:0,date:1,hours:0},k=1,l=h.length;l>k;k++){var m=g[k-1];m.apply&&m.apply.call(j,h[k])}return c(j.year,j.month,j.date)&&(i=new Date(j.year,j.month,j.date,j.hours)),i}}}]),angular.module("ui.bootstrap.position",[]).factory("$position",["$document","$window",function(a,b){function c(a,c){return a.currentStyle?a.currentStyle[c]:b.getComputedStyle?b.getComputedStyle(a)[c]:a.style[c]}function d(a){return"static"===(c(a,"position")||"static")}var e=function(b){for(var c=a[0],e=b.offsetParent||c;e&&e!==c&&d(e);)e=e.offsetParent;return e||c};return{position:function(b){var c=this.offset(b),d={top:0,left:0},f=e(b[0]);f!=a[0]&&(d=this.offset(angular.element(f)),d.top+=f.clientTop-f.scrollTop,d.left+=f.clientLeft-f.scrollLeft);var g=b[0].getBoundingClientRect();return{width:g.width||b.prop("offsetWidth"),height:g.height||b.prop("offsetHeight"),top:c.top-d.top,left:c.left-d.left}},offset:function(c){var d=c[0].getBoundingClientRect();return{width:d.width||c.prop("offsetWidth"),height:d.height||c.prop("offsetHeight"),top:d.top+(b.pageYOffset||a[0].documentElement.scrollTop),left:d.left+(b.pageXOffset||a[0].documentElement.scrollLeft)}},positionElements:function(a,b,c,d){var e,f,g,h,i=c.split("-"),j=i[0],k=i[1]||"center";e=d?this.offset(a):this.position(a),f=b.prop("offsetWidth"),g=b.prop("offsetHeight");var l={center:function(){return e.left+e.width/2-f/2},left:function(){return e.left},right:function(){return e.left+e.width}},m={center:function(){return e.top+e.height/2-g/2},top:function(){return e.top},bottom:function(){return e.top+e.height}};switch(j){case"right":h={top:m[k](),left:l[j]()};break;case"left":h={top:m[k](),left:e.left-f};break;case"bottom":h={top:m[j](),left:l[k]()};break;default:h={top:e.top-g,left:l[k]()}}return h}}}]),angular.module("ui.bootstrap.datepicker",["ui.bootstrap.dateparser","ui.bootstrap.position"]).constant("datepickerConfig",{formatDay:"dd",formatMonth:"MMMM",formatYear:"yyyy",formatDayHeader:"EEE",formatDayTitle:"MMMM yyyy",formatMonthTitle:"yyyy",datepickerMode:"day",minMode:"day",maxMode:"year",showWeeks:!0,startingDay:0,yearRange:20,minDate:null,maxDate:null}).controller("DatepickerController",["$scope","$attrs","$parse","$interpolate","$timeout","$log","dateFilter","datepickerConfig",function(a,b,c,d,e,f,g,h){var i=this,j={$setViewValue:angular.noop};this.modes=["day","month","year"],angular.forEach(["formatDay","formatMonth","formatYear","formatDayHeader","formatDayTitle","formatMonthTitle","minMode","maxMode","showWeeks","startingDay","yearRange"],function(c,e){i[c]=angular.isDefined(b[c])?8>e?d(b[c])(a.$parent):a.$parent.$eval(b[c]):h[c]}),angular.forEach(["minDate","maxDate"],function(d){b[d]?a.$parent.$watch(c(b[d]),function(a){i[d]=a?new Date(a):null,i.refreshView()}):i[d]=h[d]?new Date(h[d]):null}),a.datepickerMode=a.datepickerMode||h.datepickerMode,a.uniqueId="datepicker-"+a.$id+"-"+Math.floor(1e4*Math.random()),this.activeDate=angular.isDefined(b.initDate)?a.$parent.$eval(b.initDate):new Date,a.isActive=function(b){return 0===i.compare(b.date,i.activeDate)?(a.activeDateId=b.uid,!0):!1},this.init=function(a){j=a,j.$render=function(){i.render()}},this.render=function(){if(j.$modelValue){var a=new Date(j.$modelValue),b=!isNaN(a);b?this.activeDate=a:f.error('Datepicker directive: "ng-model" value must be a Date object, a number of milliseconds since 01.01.1970 or a string representing an RFC2822 or ISO 8601 date.'),j.$setValidity("date",b)}this.refreshView()},this.refreshView=function(){if(this.element){this._refreshView();var a=j.$modelValue?new Date(j.$modelValue):null;j.$setValidity("date-disabled",!a||this.element&&!this.isDisabled(a))}},this.createDateObject=function(a,b){var c=j.$modelValue?new Date(j.$modelValue):null;return{date:a,label:g(a,b),selected:c&&0===this.compare(a,c),disabled:this.isDisabled(a),current:0===this.compare(a,new Date)}},this.isDisabled=function(c){return this.minDate&&this.compare(c,this.minDate)<0||this.maxDate&&this.compare(c,this.maxDate)>0||b.dateDisabled&&a.dateDisabled({date:c,mode:a.datepickerMode})},this.split=function(a,b){for(var c=[];a.length>0;)c.push(a.splice(0,b));return c},a.select=function(b){if(a.datepickerMode===i.minMode){var c=j.$modelValue?new Date(j.$modelValue):new Date(0,0,0,0,0,0,0);c.setFullYear(b.getFullYear(),b.getMonth(),b.getDate()),j.$setViewValue(c),j.$render()}else i.activeDate=b,a.datepickerMode=i.modes[i.modes.indexOf(a.datepickerMode)-1]},a.move=function(a){var b=i.activeDate.getFullYear()+a*(i.step.years||0),c=i.activeDate.getMonth()+a*(i.step.months||0);i.activeDate.setFullYear(b,c,1),i.refreshView()},a.toggleMode=function(b){b=b||1,a.datepickerMode===i.maxMode&&1===b||a.datepickerMode===i.minMode&&-1===b||(a.datepickerMode=i.modes[i.modes.indexOf(a.datepickerMode)+b])},a.keys={13:"enter",32:"space",33:"pageup",34:"pagedown",35:"end",36:"home",37:"left",38:"up",39:"right",40:"down"};var k=function(){e(function(){i.element[0].focus()},0,!1)};a.$on("datepicker.focus",k),a.keydown=function(b){var c=a.keys[b.which];if(c&&!b.shiftKey&&!b.altKey)if(b.preventDefault(),b.stopPropagation(),"enter"===c||"space"===c){if(i.isDisabled(i.activeDate))return;a.select(i.activeDate),k()}else!b.ctrlKey||"up"!==c&&"down"!==c?(i.handleKeyDown(c,b),i.refreshView()):(a.toggleMode("up"===c?1:-1),k())}}]).directive("datepicker",function(){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/datepicker.html",scope:{datepickerMode:"=?",dateDisabled:"&"},require:["datepicker","?^ngModel"],controller:"DatepickerController",link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f)}}}).directive("daypicker",["dateFilter",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/day.html",require:"^datepicker",link:function(b,c,d,e){function f(a,b){return 1!==b||a%4!==0||a%100===0&&a%400!==0?i[b]:29}function g(a,b){var c=new Array(b),d=new Date(a),e=0;for(d.setHours(12);b>e;)c[e++]=new Date(d),d.setDate(d.getDate()+1);return c}function h(a){var b=new Date(a);b.setDate(b.getDate()+4-(b.getDay()||7));var c=b.getTime();return b.setMonth(0),b.setDate(1),Math.floor(Math.round((c-b)/864e5)/7)+1}b.showWeeks=e.showWeeks,e.step={months:1},e.element=c;var i=[31,28,31,30,31,30,31,31,30,31,30,31];e._refreshView=function(){var c=e.activeDate.getFullYear(),d=e.activeDate.getMonth(),f=new Date(c,d,1),i=e.startingDay-f.getDay(),j=i>0?7-i:-i,k=new Date(f);j>0&&k.setDate(-j+1);for(var l=g(k,42),m=0;42>m;m++)l[m]=angular.extend(e.createDateObject(l[m],e.formatDay),{secondary:l[m].getMonth()!==d,uid:b.uniqueId+"-"+m});b.labels=new Array(7);for(var n=0;7>n;n++)b.labels[n]={abbr:a(l[n].date,e.formatDayHeader),full:a(l[n].date,"EEEE")};if(b.title=a(e.activeDate,e.formatDayTitle),b.rows=e.split(l,7),b.showWeeks){b.weekNumbers=[];for(var o=h(b.rows[0][0].date),p=b.rows.length;b.weekNumbers.push(o++)<p;);}},e.compare=function(a,b){return new Date(a.getFullYear(),a.getMonth(),a.getDate())-new Date(b.getFullYear(),b.getMonth(),b.getDate())},e.handleKeyDown=function(a){var b=e.activeDate.getDate();if("left"===a)b-=1;else if("up"===a)b-=7;else if("right"===a)b+=1;else if("down"===a)b+=7;else if("pageup"===a||"pagedown"===a){var c=e.activeDate.getMonth()+("pageup"===a?-1:1);e.activeDate.setMonth(c,1),b=Math.min(f(e.activeDate.getFullYear(),e.activeDate.getMonth()),b)}else"home"===a?b=1:"end"===a&&(b=f(e.activeDate.getFullYear(),e.activeDate.getMonth()));e.activeDate.setDate(b)},e.refreshView()}}}]).directive("monthpicker",["dateFilter",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/month.html",require:"^datepicker",link:function(b,c,d,e){e.step={years:1},e.element=c,e._refreshView=function(){for(var c=new Array(12),d=e.activeDate.getFullYear(),f=0;12>f;f++)c[f]=angular.extend(e.createDateObject(new Date(d,f,1),e.formatMonth),{uid:b.uniqueId+"-"+f});b.title=a(e.activeDate,e.formatMonthTitle),b.rows=e.split(c,3)},e.compare=function(a,b){return new Date(a.getFullYear(),a.getMonth())-new Date(b.getFullYear(),b.getMonth())},e.handleKeyDown=function(a){var b=e.activeDate.getMonth();if("left"===a)b-=1;else if("up"===a)b-=3;else if("right"===a)b+=1;else if("down"===a)b+=3;else if("pageup"===a||"pagedown"===a){var c=e.activeDate.getFullYear()+("pageup"===a?-1:1);e.activeDate.setFullYear(c)}else"home"===a?b=0:"end"===a&&(b=11);e.activeDate.setMonth(b)},e.refreshView()}}}]).directive("yearpicker",["dateFilter",function(){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/year.html",require:"^datepicker",link:function(a,b,c,d){function e(a){return parseInt((a-1)/f,10)*f+1}var f=d.yearRange;d.step={years:f},d.element=b,d._refreshView=function(){for(var b=new Array(f),c=0,g=e(d.activeDate.getFullYear());f>c;c++)b[c]=angular.extend(d.createDateObject(new Date(g+c,0,1),d.formatYear),{uid:a.uniqueId+"-"+c});a.title=[b[0].label,b[f-1].label].join(" - "),a.rows=d.split(b,5)},d.compare=function(a,b){return a.getFullYear()-b.getFullYear()},d.handleKeyDown=function(a){var b=d.activeDate.getFullYear();"left"===a?b-=1:"up"===a?b-=5:"right"===a?b+=1:"down"===a?b+=5:"pageup"===a||"pagedown"===a?b+=("pageup"===a?-1:1)*d.step.years:"home"===a?b=e(d.activeDate.getFullYear()):"end"===a&&(b=e(d.activeDate.getFullYear())+f-1),d.activeDate.setFullYear(b)},d.refreshView()}}}]).constant("datepickerPopupConfig",{datepickerPopup:"yyyy-MM-dd",currentText:"Today",clearText:"Clear",closeText:"Done",closeOnDateSelection:!0,appendToBody:!1,showButtonBar:!0}).directive("datepickerPopup",["$compile","$parse","$document","$position","dateFilter","dateParser","datepickerPopupConfig",function(a,b,c,d,e,f,g){return{restrict:"EA",require:"ngModel",scope:{isOpen:"=?",currentText:"@",clearText:"@",closeText:"@",dateDisabled:"&"},link:function(h,i,j,k){function l(a){return a.replace(/([A-Z])/g,function(a){return"-"+a.toLowerCase()})}function m(a){if(a){if(angular.isDate(a)&&!isNaN(a))return k.$setValidity("date",!0),a;if(angular.isString(a)){var b=f.parse(a,n)||new Date(a);return isNaN(b)?void k.$setValidity("date",!1):(k.$setValidity("date",!0),b)}return void k.$setValidity("date",!1)}return k.$setValidity("date",!0),null}var n,o=angular.isDefined(j.closeOnDateSelection)?h.$parent.$eval(j.closeOnDateSelection):g.closeOnDateSelection,p=angular.isDefined(j.datepickerAppendToBody)?h.$parent.$eval(j.datepickerAppendToBody):g.appendToBody;h.showButtonBar=angular.isDefined(j.showButtonBar)?h.$parent.$eval(j.showButtonBar):g.showButtonBar,h.getText=function(a){return h[a+"Text"]||g[a+"Text"]},j.$observe("datepickerPopup",function(a){n=a||g.datepickerPopup,k.$render()});var q=angular.element("<div datepicker-popup-wrap><div datepicker></div></div>");q.attr({"ng-model":"date","ng-change":"dateSelection()"});var r=angular.element(q.children()[0]);j.datepickerOptions&&angular.forEach(h.$parent.$eval(j.datepickerOptions),function(a,b){r.attr(l(b),a)}),angular.forEach(["minDate","maxDate"],function(a){j[a]&&(h.$parent.$watch(b(j[a]),function(b){h[a]=b}),r.attr(l(a),a))}),j.dateDisabled&&r.attr("date-disabled","dateDisabled({ date: date, mode: mode })"),k.$parsers.unshift(m),h.dateSelection=function(a){angular.isDefined(a)&&(h.date=a),k.$setViewValue(h.date),k.$render(),o&&(h.isOpen=!1,i[0].focus())},i.bind("input change keyup",function(){h.$apply(function(){h.date=k.$modelValue})}),k.$render=function(){var a=k.$viewValue?e(k.$viewValue,n):"";i.val(a),h.date=m(k.$modelValue)};var s=function(a){h.isOpen&&a.target!==i[0]&&h.$apply(function(){h.isOpen=!1})},t=function(a){h.keydown(a)};i.bind("keydown",t),h.keydown=function(a){27===a.which?(a.preventDefault(),a.stopPropagation(),h.close()):40!==a.which||h.isOpen||(h.isOpen=!0)},h.$watch("isOpen",function(a){a?(h.$broadcast("datepicker.focus"),h.position=p?d.offset(i):d.position(i),h.position.top=h.position.top+i.prop("offsetHeight"),c.bind("click",s)):c.unbind("click",s)}),h.select=function(a){if("today"===a){var b=new Date;angular.isDate(k.$modelValue)?(a=new Date(k.$modelValue),a.setFullYear(b.getFullYear(),b.getMonth(),b.getDate())):a=new Date(b.setHours(0,0,0,0))}h.dateSelection(a)},h.close=function(){h.isOpen=!1,i[0].focus()};var u=a(q)(h);p?c.find("body").append(u):i.after(u),h.$on("$destroy",function(){u.remove(),i.unbind("keydown",t),c.unbind("click",s)})}}}]).directive("datepickerPopupWrap",function(){return{restrict:"EA",replace:!0,transclude:!0,templateUrl:"template/datepicker/popup.html",link:function(a,b){b.bind("click",function(a){a.preventDefault(),a.stopPropagation()})}}}),angular.module("ui.bootstrap.dropdown",[]).constant("dropdownConfig",{openClass:"open"}).service("dropdownService",["$document",function(a){var b=null;this.open=function(e){b||(a.bind("click",c),a.bind("keydown",d)),b&&b!==e&&(b.isOpen=!1),b=e},this.close=function(e){b===e&&(b=null,a.unbind("click",c),a.unbind("keydown",d))};var c=function(a){a&&a.isDefaultPrevented()||b.$apply(function(){b.isOpen=!1})},d=function(a){27===a.which&&(b.focusToggleElement(),c())}}]).controller("DropdownController",["$scope","$attrs","$parse","dropdownConfig","dropdownService","$animate",function(a,b,c,d,e,f){var g,h=this,i=a.$new(),j=d.openClass,k=angular.noop,l=b.onToggle?c(b.onToggle):angular.noop;this.init=function(d){h.$element=d,b.isOpen&&(g=c(b.isOpen),k=g.assign,a.$watch(g,function(a){i.isOpen=!!a}))},this.toggle=function(a){return i.isOpen=arguments.length?!!a:!i.isOpen},this.isOpen=function(){return i.isOpen},i.focusToggleElement=function(){h.toggleElement&&h.toggleElement[0].focus()},i.$watch("isOpen",function(b,c){f[b?"addClass":"removeClass"](h.$element,j),b?(i.focusToggleElement(),e.open(i)):e.close(i),k(a,b),angular.isDefined(b)&&b!==c&&l(a,{open:!!b})}),a.$on("$locationChangeSuccess",function(){i.isOpen=!1}),a.$on("$destroy",function(){i.$destroy()})}]).directive("dropdown",function(){return{restrict:"CA",controller:"DropdownController",link:function(a,b,c,d){d.init(b)}}}).directive("dropdownToggle",function(){return{restrict:"CA",require:"?^dropdown",link:function(a,b,c,d){if(d){d.toggleElement=b;var e=function(e){e.preventDefault(),b.hasClass("disabled")||c.disabled||a.$apply(function(){d.toggle()})};b.bind("click",e),b.attr({"aria-haspopup":!0,"aria-expanded":!1}),a.$watch(d.isOpen,function(a){b.attr("aria-expanded",!!a)}),a.$on("$destroy",function(){b.unbind("click",e)})}}}}),angular.module("ui.bootstrap.modal",["ui.bootstrap.transition"]).factory("$$stackedMap",function(){return{createNew:function(){var a=[];return{add:function(b,c){a.push({key:b,value:c})},get:function(b){for(var c=0;c<a.length;c++)if(b==a[c].key)return a[c]},keys:function(){for(var b=[],c=0;c<a.length;c++)b.push(a[c].key);return b},top:function(){return a[a.length-1]},remove:function(b){for(var c=-1,d=0;d<a.length;d++)if(b==a[d].key){c=d;break}return a.splice(c,1)[0]},removeTop:function(){return a.splice(a.length-1,1)[0]},length:function(){return a.length}}}}}).directive("modalBackdrop",["$timeout",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/modal/backdrop.html",link:function(b){b.animate=!1,a(function(){b.animate=!0})}}}]).directive("modalWindow",["$modalStack","$timeout",function(a,b){return{restrict:"EA",scope:{index:"@",animate:"="},replace:!0,transclude:!0,templateUrl:function(a,b){return b.templateUrl||"template/modal/window.html"},link:function(c,d,e){d.addClass(e.windowClass||""),c.size=e.size,b(function(){c.animate=!0,d[0].focus()}),c.close=function(b){var c=a.getTop();c&&c.value.backdrop&&"static"!=c.value.backdrop&&b.target===b.currentTarget&&(b.preventDefault(),b.stopPropagation(),a.dismiss(c.key,"backdrop click"))}}}}]).factory("$modalStack",["$transition","$timeout","$document","$compile","$rootScope","$$stackedMap",function(a,b,c,d,e,f){function g(){for(var a=-1,b=n.keys(),c=0;c<b.length;c++)n.get(b[c]).value.backdrop&&(a=c);return a}function h(a){var b=c.find("body").eq(0),d=n.get(a).value;n.remove(a),j(d.modalDomEl,d.modalScope,300,function(){d.modalScope.$destroy(),b.toggleClass(m,n.length()>0),i()})}function i(){if(k&&-1==g()){var a=l;j(k,l,150,function(){a.$destroy(),a=null}),k=void 0,l=void 0}}function j(c,d,e,f){function g(){g.done||(g.done=!0,c.remove(),f&&f())}d.animate=!1;var h=a.transitionEndEventName;if(h){var i=b(g,e);c.bind(h,function(){b.cancel(i),g(),d.$apply()})}else b(g,0)}var k,l,m="modal-open",n=f.createNew(),o={};return e.$watch(g,function(a){l&&(l.index=a)}),c.bind("keydown",function(a){var b;27===a.which&&(b=n.top(),b&&b.value.keyboard&&(a.preventDefault(),e.$apply(function(){o.dismiss(b.key,"escape key press")})))}),o.open=function(a,b){n.add(a,{deferred:b.deferred,modalScope:b.scope,backdrop:b.backdrop,keyboard:b.keyboard});var f=c.find("body").eq(0),h=g();h>=0&&!k&&(l=e.$new(!0),l.index=h,k=d("<div modal-backdrop></div>")(l),f.append(k));var i=angular.element("<div modal-window></div>");i.attr({"template-url":b.windowTemplateUrl,"window-class":b.windowClass,size:b.size,index:n.length()-1,animate:"animate"}).html(b.content);var j=d(i)(b.scope);n.top().value.modalDomEl=j,f.append(j),f.addClass(m)},o.close=function(a,b){var c=n.get(a).value;c&&(c.deferred.resolve(b),h(a))},o.dismiss=function(a,b){var c=n.get(a).value;c&&(c.deferred.reject(b),h(a))},o.dismissAll=function(a){for(var b=this.getTop();b;)this.dismiss(b.key,a),b=this.getTop()},o.getTop=function(){return n.top()},o}]).provider("$modal",function(){var a={options:{backdrop:!0,keyboard:!0},$get:["$injector","$rootScope","$q","$http","$templateCache","$controller","$modalStack",function(b,c,d,e,f,g,h){function i(a){return a.template?d.when(a.template):e.get(a.templateUrl,{cache:f}).then(function(a){return a.data})}function j(a){var c=[];return angular.forEach(a,function(a){(angular.isFunction(a)||angular.isArray(a))&&c.push(d.when(b.invoke(a)))}),c}var k={};return k.open=function(b){var e=d.defer(),f=d.defer(),k={result:e.promise,opened:f.promise,close:function(a){h.close(k,a)},dismiss:function(a){h.dismiss(k,a)}};if(b=angular.extend({},a.options,b),b.resolve=b.resolve||{},!b.template&&!b.templateUrl)throw new Error("One of template or templateUrl options is required.");var l=d.all([i(b)].concat(j(b.resolve)));return l.then(function(a){var d=(b.scope||c).$new();d.$close=k.close,d.$dismiss=k.dismiss;var f,i={},j=1;b.controller&&(i.$scope=d,i.$modalInstance=k,angular.forEach(b.resolve,function(b,c){i[c]=a[j++]}),f=g(b.controller,i)),h.open(k,{scope:d,deferred:e,content:a[0],backdrop:b.backdrop,keyboard:b.keyboard,windowClass:b.windowClass,windowTemplateUrl:b.windowTemplateUrl,size:b.size})},function(a){e.reject(a)}),l.then(function(){f.resolve(!0)},function(){f.reject(!1)}),k},k}]};return a}),angular.module("ui.bootstrap.pagination",[]).controller("PaginationController",["$scope","$attrs","$parse",function(a,b,c){var d=this,e={$setViewValue:angular.noop},f=b.numPages?c(b.numPages).assign:angular.noop;this.init=function(f,g){e=f,this.config=g,e.$render=function(){d.render()},b.itemsPerPage?a.$parent.$watch(c(b.itemsPerPage),function(b){d.itemsPerPage=parseInt(b,10),a.totalPages=d.calculateTotalPages()}):this.itemsPerPage=g.itemsPerPage},this.calculateTotalPages=function(){var b=this.itemsPerPage<1?1:Math.ceil(a.totalItems/this.itemsPerPage);return Math.max(b||0,1)},this.render=function(){a.page=parseInt(e.$viewValue,10)||1},a.selectPage=function(b){a.page!==b&&b>0&&b<=a.totalPages&&(e.$setViewValue(b),e.$render())},a.getText=function(b){return a[b+"Text"]||d.config[b+"Text"]},a.noPrevious=function(){return 1===a.page},a.noNext=function(){return a.page===a.totalPages},a.$watch("totalItems",function(){a.totalPages=d.calculateTotalPages()}),a.$watch("totalPages",function(b){f(a.$parent,b),a.page>b?a.selectPage(b):e.$render()})}]).constant("paginationConfig",{itemsPerPage:10,boundaryLinks:!1,directionLinks:!0,firstText:"First",previousText:"Previous",nextText:"Next",lastText:"Last",rotate:!0}).directive("pagination",["$parse","paginationConfig",function(a,b){return{restrict:"EA",scope:{totalItems:"=",firstText:"@",previousText:"@",nextText:"@",lastText:"@"},require:["pagination","?ngModel"],controller:"PaginationController",templateUrl:"template/pagination/pagination.html",replace:!0,link:function(c,d,e,f){function g(a,b,c){return{number:a,text:b,active:c}}function h(a,b){var c=[],d=1,e=b,f=angular.isDefined(k)&&b>k;f&&(l?(d=Math.max(a-Math.floor(k/2),1),e=d+k-1,e>b&&(e=b,d=e-k+1)):(d=(Math.ceil(a/k)-1)*k+1,e=Math.min(d+k-1,b)));for(var h=d;e>=h;h++){var i=g(h,h,h===a);c.push(i)}if(f&&!l){if(d>1){var j=g(d-1,"...",!1);c.unshift(j)}if(b>e){var m=g(e+1,"...",!1);c.push(m)}}return c}var i=f[0],j=f[1];if(j){var k=angular.isDefined(e.maxSize)?c.$parent.$eval(e.maxSize):b.maxSize,l=angular.isDefined(e.rotate)?c.$parent.$eval(e.rotate):b.rotate;c.boundaryLinks=angular.isDefined(e.boundaryLinks)?c.$parent.$eval(e.boundaryLinks):b.boundaryLinks,c.directionLinks=angular.isDefined(e.directionLinks)?c.$parent.$eval(e.directionLinks):b.directionLinks,i.init(j,b),e.maxSize&&c.$parent.$watch(a(e.maxSize),function(a){k=parseInt(a,10),i.render()});var m=i.render;i.render=function(){m(),c.page>0&&c.page<=c.totalPages&&(c.pages=h(c.page,c.totalPages))}}}}}]).constant("pagerConfig",{itemsPerPage:10,previousText:"« Previous",nextText:"Next »",align:!0}).directive("pager",["pagerConfig",function(a){return{restrict:"EA",scope:{totalItems:"=",previousText:"@",nextText:"@"},require:["pager","?ngModel"],controller:"PaginationController",templateUrl:"template/pagination/pager.html",replace:!0,link:function(b,c,d,e){var f=e[0],g=e[1];g&&(b.align=angular.isDefined(d.align)?b.$parent.$eval(d.align):a.align,f.init(g,a))}}}]),angular.module("ui.bootstrap.tooltip",["ui.bootstrap.position","ui.bootstrap.bindHtml"]).provider("$tooltip",function(){function a(a){var b=/[A-Z]/g,c="-";
+return a.replace(b,function(a,b){return(b?c:"")+a.toLowerCase()})}var b={placement:"top",animation:!0,popupDelay:0},c={mouseenter:"mouseleave",click:"click",focus:"blur"},d={};this.options=function(a){angular.extend(d,a)},this.setTriggers=function(a){angular.extend(c,a)},this.$get=["$window","$compile","$timeout","$parse","$document","$position","$interpolate",function(e,f,g,h,i,j,k){return function(e,l,m){function n(a){var b=a||o.trigger||m,d=c[b]||b;return{show:b,hide:d}}var o=angular.extend({},b,d),p=a(e),q=k.startSymbol(),r=k.endSymbol(),s="<div "+p+'-popup title="'+q+"tt_title"+r+'" content="'+q+"tt_content"+r+'" placement="'+q+"tt_placement"+r+'" animation="tt_animation" is-open="tt_isOpen"></div>';return{restrict:"EA",scope:!0,compile:function(){var a=f(s);return function(b,c,d){function f(){b.tt_isOpen?m():k()}function k(){(!y||b.$eval(d[l+"Enable"]))&&(b.tt_popupDelay?v||(v=g(p,b.tt_popupDelay,!1),v.then(function(a){a()})):p()())}function m(){b.$apply(function(){q()})}function p(){return v=null,u&&(g.cancel(u),u=null),b.tt_content?(r(),t.css({top:0,left:0,display:"block"}),w?i.find("body").append(t):c.after(t),z(),b.tt_isOpen=!0,b.$digest(),z):angular.noop}function q(){b.tt_isOpen=!1,g.cancel(v),v=null,b.tt_animation?u||(u=g(s,500)):s()}function r(){t&&s(),t=a(b,function(){}),b.$digest()}function s(){u=null,t&&(t.remove(),t=null)}var t,u,v,w=angular.isDefined(o.appendToBody)?o.appendToBody:!1,x=n(void 0),y=angular.isDefined(d[l+"Enable"]),z=function(){var a=j.positionElements(c,t,b.tt_placement,w);a.top+="px",a.left+="px",t.css(a)};b.tt_isOpen=!1,d.$observe(e,function(a){b.tt_content=a,!a&&b.tt_isOpen&&q()}),d.$observe(l+"Title",function(a){b.tt_title=a}),d.$observe(l+"Placement",function(a){b.tt_placement=angular.isDefined(a)?a:o.placement}),d.$observe(l+"PopupDelay",function(a){var c=parseInt(a,10);b.tt_popupDelay=isNaN(c)?o.popupDelay:c});var A=function(){c.unbind(x.show,k),c.unbind(x.hide,m)};d.$observe(l+"Trigger",function(a){A(),x=n(a),x.show===x.hide?c.bind(x.show,f):(c.bind(x.show,k),c.bind(x.hide,m))});var B=b.$eval(d[l+"Animation"]);b.tt_animation=angular.isDefined(B)?!!B:o.animation,d.$observe(l+"AppendToBody",function(a){w=angular.isDefined(a)?h(a)(b):w}),w&&b.$on("$locationChangeSuccess",function(){b.tt_isOpen&&q()}),b.$on("$destroy",function(){g.cancel(u),g.cancel(v),A(),s()})}}}}}]}).directive("tooltipPopup",function(){return{restrict:"EA",replace:!0,scope:{content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/tooltip/tooltip-popup.html"}}).directive("tooltip",["$tooltip",function(a){return a("tooltip","tooltip","mouseenter")}]).directive("tooltipHtmlUnsafePopup",function(){return{restrict:"EA",replace:!0,scope:{content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/tooltip/tooltip-html-unsafe-popup.html"}}).directive("tooltipHtmlUnsafe",["$tooltip",function(a){return a("tooltipHtmlUnsafe","tooltip","mouseenter")}]),angular.module("ui.bootstrap.popover",["ui.bootstrap.tooltip"]).directive("popoverPopup",function(){return{restrict:"EA",replace:!0,scope:{title:"@",content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/popover/popover.html"}}).directive("popover",["$tooltip",function(a){return a("popover","popover","click")}]),angular.module("ui.bootstrap.progressbar",[]).constant("progressConfig",{animate:!0,max:100}).controller("ProgressController",["$scope","$attrs","progressConfig",function(a,b,c){var d=this,e=angular.isDefined(b.animate)?a.$parent.$eval(b.animate):c.animate;this.bars=[],a.max=angular.isDefined(b.max)?a.$parent.$eval(b.max):c.max,this.addBar=function(b,c){e||c.css({transition:"none"}),this.bars.push(b),b.$watch("value",function(c){b.percent=+(100*c/a.max).toFixed(2)}),b.$on("$destroy",function(){c=null,d.removeBar(b)})},this.removeBar=function(a){this.bars.splice(this.bars.indexOf(a),1)}}]).directive("progress",function(){return{restrict:"EA",replace:!0,transclude:!0,controller:"ProgressController",require:"progress",scope:{},templateUrl:"template/progressbar/progress.html"}}).directive("bar",function(){return{restrict:"EA",replace:!0,transclude:!0,require:"^progress",scope:{value:"=",type:"@"},templateUrl:"template/progressbar/bar.html",link:function(a,b,c,d){d.addBar(a,b)}}}).directive("progressbar",function(){return{restrict:"EA",replace:!0,transclude:!0,controller:"ProgressController",scope:{value:"=",type:"@"},templateUrl:"template/progressbar/progressbar.html",link:function(a,b,c,d){d.addBar(a,angular.element(b.children()[0]))}}}),angular.module("ui.bootstrap.rating",[]).constant("ratingConfig",{max:5,stateOn:null,stateOff:null}).controller("RatingController",["$scope","$attrs","ratingConfig",function(a,b,c){var d={$setViewValue:angular.noop};this.init=function(e){d=e,d.$render=this.render,this.stateOn=angular.isDefined(b.stateOn)?a.$parent.$eval(b.stateOn):c.stateOn,this.stateOff=angular.isDefined(b.stateOff)?a.$parent.$eval(b.stateOff):c.stateOff;var f=angular.isDefined(b.ratingStates)?a.$parent.$eval(b.ratingStates):new Array(angular.isDefined(b.max)?a.$parent.$eval(b.max):c.max);a.range=this.buildTemplateObjects(f)},this.buildTemplateObjects=function(a){for(var b=0,c=a.length;c>b;b++)a[b]=angular.extend({index:b},{stateOn:this.stateOn,stateOff:this.stateOff},a[b]);return a},a.rate=function(b){!a.readonly&&b>=0&&b<=a.range.length&&(d.$setViewValue(b),d.$render())},a.enter=function(b){a.readonly||(a.value=b),a.onHover({value:b})},a.reset=function(){a.value=d.$viewValue,a.onLeave()},a.onKeydown=function(b){/(37|38|39|40)/.test(b.which)&&(b.preventDefault(),b.stopPropagation(),a.rate(a.value+(38===b.which||39===b.which?1:-1)))},this.render=function(){a.value=d.$viewValue}}]).directive("rating",function(){return{restrict:"EA",require:["rating","ngModel"],scope:{readonly:"=?",onHover:"&",onLeave:"&"},controller:"RatingController",templateUrl:"template/rating/rating.html",replace:!0,link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f)}}}),angular.module("ui.bootstrap.tabs",[]).controller("TabsetController",["$scope",function(a){var b=this,c=b.tabs=a.tabs=[];b.select=function(a){angular.forEach(c,function(b){b.active&&b!==a&&(b.active=!1,b.onDeselect())}),a.active=!0,a.onSelect()},b.addTab=function(a){c.push(a),1===c.length?a.active=!0:a.active&&b.select(a)},b.removeTab=function(a){var d=c.indexOf(a);if(a.active&&c.length>1){var e=d==c.length-1?d-1:d+1;b.select(c[e])}c.splice(d,1)}}]).directive("tabset",function(){return{restrict:"EA",transclude:!0,replace:!0,scope:{type:"@"},controller:"TabsetController",templateUrl:"template/tabs/tabset.html",link:function(a,b,c){a.vertical=angular.isDefined(c.vertical)?a.$parent.$eval(c.vertical):!1,a.justified=angular.isDefined(c.justified)?a.$parent.$eval(c.justified):!1}}}).directive("tab",["$parse",function(a){return{require:"^tabset",restrict:"EA",replace:!0,templateUrl:"template/tabs/tab.html",transclude:!0,scope:{active:"=?",heading:"@",onSelect:"&select",onDeselect:"&deselect"},controller:function(){},compile:function(b,c,d){return function(b,c,e,f){b.$watch("active",function(a){a&&f.select(b)}),b.disabled=!1,e.disabled&&b.$parent.$watch(a(e.disabled),function(a){b.disabled=!!a}),b.select=function(){b.disabled||(b.active=!0)},f.addTab(b),b.$on("$destroy",function(){f.removeTab(b)}),b.$transcludeFn=d}}}}]).directive("tabHeadingTransclude",[function(){return{restrict:"A",require:"^tab",link:function(a,b){a.$watch("headingElement",function(a){a&&(b.html(""),b.append(a))})}}}]).directive("tabContentTransclude",function(){function a(a){return a.tagName&&(a.hasAttribute("tab-heading")||a.hasAttribute("data-tab-heading")||"tab-heading"===a.tagName.toLowerCase()||"data-tab-heading"===a.tagName.toLowerCase())}return{restrict:"A",require:"^tabset",link:function(b,c,d){var e=b.$eval(d.tabContentTransclude);e.$transcludeFn(e.$parent,function(b){angular.forEach(b,function(b){a(b)?e.headingElement=b:c.append(b)})})}}}),angular.module("ui.bootstrap.timepicker",[]).constant("timepickerConfig",{hourStep:1,minuteStep:1,showMeridian:!0,meridians:null,readonlyInput:!1,mousewheel:!0}).controller("TimepickerController",["$scope","$attrs","$parse","$log","$locale","timepickerConfig",function(a,b,c,d,e,f){function g(){var b=parseInt(a.hours,10),c=a.showMeridian?b>0&&13>b:b>=0&&24>b;return c?(a.showMeridian&&(12===b&&(b=0),a.meridian===p[1]&&(b+=12)),b):void 0}function h(){var b=parseInt(a.minutes,10);return b>=0&&60>b?b:void 0}function i(a){return angular.isDefined(a)&&a.toString().length<2?"0"+a:a}function j(a){k(),o.$setViewValue(new Date(n)),l(a)}function k(){o.$setValidity("time",!0),a.invalidHours=!1,a.invalidMinutes=!1}function l(b){var c=n.getHours(),d=n.getMinutes();a.showMeridian&&(c=0===c||12===c?12:c%12),a.hours="h"===b?c:i(c),a.minutes="m"===b?d:i(d),a.meridian=n.getHours()<12?p[0]:p[1]}function m(a){var b=new Date(n.getTime()+6e4*a);n.setHours(b.getHours(),b.getMinutes()),j()}var n=new Date,o={$setViewValue:angular.noop},p=angular.isDefined(b.meridians)?a.$parent.$eval(b.meridians):f.meridians||e.DATETIME_FORMATS.AMPMS;this.init=function(c,d){o=c,o.$render=this.render;var e=d.eq(0),g=d.eq(1),h=angular.isDefined(b.mousewheel)?a.$parent.$eval(b.mousewheel):f.mousewheel;h&&this.setupMousewheelEvents(e,g),a.readonlyInput=angular.isDefined(b.readonlyInput)?a.$parent.$eval(b.readonlyInput):f.readonlyInput,this.setupInputEvents(e,g)};var q=f.hourStep;b.hourStep&&a.$parent.$watch(c(b.hourStep),function(a){q=parseInt(a,10)});var r=f.minuteStep;b.minuteStep&&a.$parent.$watch(c(b.minuteStep),function(a){r=parseInt(a,10)}),a.showMeridian=f.showMeridian,b.showMeridian&&a.$parent.$watch(c(b.showMeridian),function(b){if(a.showMeridian=!!b,o.$error.time){var c=g(),d=h();angular.isDefined(c)&&angular.isDefined(d)&&(n.setHours(c),j())}else l()}),this.setupMousewheelEvents=function(b,c){var d=function(a){a.originalEvent&&(a=a.originalEvent);var b=a.wheelDelta?a.wheelDelta:-a.deltaY;return a.detail||b>0};b.bind("mousewheel wheel",function(b){a.$apply(d(b)?a.incrementHours():a.decrementHours()),b.preventDefault()}),c.bind("mousewheel wheel",function(b){a.$apply(d(b)?a.incrementMinutes():a.decrementMinutes()),b.preventDefault()})},this.setupInputEvents=function(b,c){if(a.readonlyInput)return a.updateHours=angular.noop,void(a.updateMinutes=angular.noop);var d=function(b,c){o.$setViewValue(null),o.$setValidity("time",!1),angular.isDefined(b)&&(a.invalidHours=b),angular.isDefined(c)&&(a.invalidMinutes=c)};a.updateHours=function(){var a=g();angular.isDefined(a)?(n.setHours(a),j("h")):d(!0)},b.bind("blur",function(){!a.invalidHours&&a.hours<10&&a.$apply(function(){a.hours=i(a.hours)})}),a.updateMinutes=function(){var a=h();angular.isDefined(a)?(n.setMinutes(a),j("m")):d(void 0,!0)},c.bind("blur",function(){!a.invalidMinutes&&a.minutes<10&&a.$apply(function(){a.minutes=i(a.minutes)})})},this.render=function(){var a=o.$modelValue?new Date(o.$modelValue):null;isNaN(a)?(o.$setValidity("time",!1),d.error('Timepicker directive: "ng-model" value must be a Date object, a number of milliseconds since 01.01.1970 or a string representing an RFC2822 or ISO 8601 date.')):(a&&(n=a),k(),l())},a.incrementHours=function(){m(60*q)},a.decrementHours=function(){m(60*-q)},a.incrementMinutes=function(){m(r)},a.decrementMinutes=function(){m(-r)},a.toggleMeridian=function(){m(720*(n.getHours()<12?1:-1))}}]).directive("timepicker",function(){return{restrict:"EA",require:["timepicker","?^ngModel"],controller:"TimepickerController",replace:!0,scope:{},templateUrl:"template/timepicker/timepicker.html",link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f,b.find("input"))}}}),angular.module("ui.bootstrap.typeahead",["ui.bootstrap.position","ui.bootstrap.bindHtml"]).factory("typeaheadParser",["$parse",function(a){var b=/^\s*(.*?)(?:\s+as\s+(.*?))?\s+for\s+(?:([\$\w][\$\w\d]*))\s+in\s+(.*)$/;return{parse:function(c){var d=c.match(b);if(!d)throw new Error('Expected typeahead specification in form of "_modelValue_ (as _label_)? for _item_ in _collection_" but got "'+c+'".');return{itemName:d[3],source:a(d[4]),viewMapper:a(d[2]||d[1]),modelMapper:a(d[1])}}}}]).directive("typeahead",["$compile","$parse","$q","$timeout","$document","$position","typeaheadParser",function(a,b,c,d,e,f,g){var h=[9,13,27,38,40];return{require:"ngModel",link:function(i,j,k,l){var m,n=i.$eval(k.typeaheadMinLength)||1,o=i.$eval(k.typeaheadWaitMs)||0,p=i.$eval(k.typeaheadEditable)!==!1,q=b(k.typeaheadLoading).assign||angular.noop,r=b(k.typeaheadOnSelect),s=k.typeaheadInputFormatter?b(k.typeaheadInputFormatter):void 0,t=k.typeaheadAppendToBody?i.$eval(k.typeaheadAppendToBody):!1,u=b(k.ngModel).assign,v=g.parse(k.typeahead),w=i.$new();i.$on("$destroy",function(){w.$destroy()});var x="typeahead-"+w.$id+"-"+Math.floor(1e4*Math.random());j.attr({"aria-autocomplete":"list","aria-expanded":!1,"aria-owns":x});var y=angular.element("<div typeahead-popup></div>");y.attr({id:x,matches:"matches",active:"activeIdx",select:"select(activeIdx)",query:"query",position:"position"}),angular.isDefined(k.typeaheadTemplateUrl)&&y.attr("template-url",k.typeaheadTemplateUrl);var z=function(){w.matches=[],w.activeIdx=-1,j.attr("aria-expanded",!1)},A=function(a){return x+"-option-"+a};w.$watch("activeIdx",function(a){0>a?j.removeAttr("aria-activedescendant"):j.attr("aria-activedescendant",A(a))});var B=function(a){var b={$viewValue:a};q(i,!0),c.when(v.source(i,b)).then(function(c){var d=a===l.$viewValue;if(d&&m)if(c.length>0){w.activeIdx=0,w.matches.length=0;for(var e=0;e<c.length;e++)b[v.itemName]=c[e],w.matches.push({id:A(e),label:v.viewMapper(w,b),model:c[e]});w.query=a,w.position=t?f.offset(j):f.position(j),w.position.top=w.position.top+j.prop("offsetHeight"),j.attr("aria-expanded",!0)}else z();d&&q(i,!1)},function(){z(),q(i,!1)})};z(),w.query=void 0;var C;l.$parsers.unshift(function(a){return m=!0,a&&a.length>=n?o>0?(C&&d.cancel(C),C=d(function(){B(a)},o)):B(a):(q(i,!1),z()),p?a:a?void l.$setValidity("editable",!1):(l.$setValidity("editable",!0),a)}),l.$formatters.push(function(a){var b,c,d={};return s?(d.$model=a,s(i,d)):(d[v.itemName]=a,b=v.viewMapper(i,d),d[v.itemName]=void 0,c=v.viewMapper(i,d),b!==c?b:a)}),w.select=function(a){var b,c,e={};e[v.itemName]=c=w.matches[a].model,b=v.modelMapper(i,e),u(i,b),l.$setValidity("editable",!0),r(i,{$item:c,$model:b,$label:v.viewMapper(i,e)}),z(),d(function(){j[0].focus()},0,!1)},j.bind("keydown",function(a){0!==w.matches.length&&-1!==h.indexOf(a.which)&&(a.preventDefault(),40===a.which?(w.activeIdx=(w.activeIdx+1)%w.matches.length,w.$digest()):38===a.which?(w.activeIdx=(w.activeIdx?w.activeIdx:w.matches.length)-1,w.$digest()):13===a.which||9===a.which?w.$apply(function(){w.select(w.activeIdx)}):27===a.which&&(a.stopPropagation(),z(),w.$digest()))}),j.bind("blur",function(){m=!1});var D=function(a){j[0]!==a.target&&(z(),w.$digest())};e.bind("click",D),i.$on("$destroy",function(){e.unbind("click",D)});var E=a(y)(w);t?e.find("body").append(E):j.after(E)}}}]).directive("typeaheadPopup",function(){return{restrict:"EA",scope:{matches:"=",query:"=",active:"=",position:"=",select:"&"},replace:!0,templateUrl:"template/typeahead/typeahead-popup.html",link:function(a,b,c){a.templateUrl=c.templateUrl,a.isOpen=function(){return a.matches.length>0},a.isActive=function(b){return a.active==b},a.selectActive=function(b){a.active=b},a.selectMatch=function(b){a.select({activeIdx:b})}}}}).directive("typeaheadMatch",["$http","$templateCache","$compile","$parse",function(a,b,c,d){return{restrict:"EA",scope:{index:"=",match:"=",query:"="},link:function(e,f,g){var h=d(g.templateUrl)(e.$parent)||"template/typeahead/typeahead-match.html";a.get(h,{cache:b}).success(function(a){f.replaceWith(c(a.trim())(e))})}}}]).filter("typeaheadHighlight",function(){function a(a){return a.replace(/([.?*+^$[\]\\(){}|-])/g,"\\$1")}return function(b,c){return c?(""+b).replace(new RegExp(a(c),"gi"),"<strong>$&</strong>"):b}}),angular.module("template/accordion/accordion-group.html",[]).run(["$templateCache",function(a){a.put("template/accordion/accordion-group.html",'<div class="panel panel-default">\n <div class="panel-heading">\n <h4 class="panel-title">\n <a class="accordion-toggle" ng-click="toggleOpen()" accordion-transclude="heading"><span ng-class="{\'text-muted\': isDisabled}">{{heading}}</span></a>\n </h4>\n </div>\n <div class="panel-collapse" collapse="!isOpen">\n <div class="panel-body" ng-transclude></div>\n </div>\n</div>')}]),angular.module("template/accordion/accordion.html",[]).run(["$templateCache",function(a){a.put("template/accordion/accordion.html",'<div class="panel-group" ng-transclude></div>')}]),angular.module("template/alert/alert.html",[]).run(["$templateCache",function(a){a.put("template/alert/alert.html",'<div class="alert" ng-class="{\'alert-{{type || \'warning\'}}\': true, \'alert-dismissable\': closeable}" role="alert">\n <button ng-show="closeable" type="button" class="close" ng-click="close()">\n <span aria-hidden="true">&times;</span>\n <span class="sr-only">Close</span>\n </button>\n <div ng-transclude></div>\n</div>\n')}]),angular.module("template/carousel/carousel.html",[]).run(["$templateCache",function(a){a.put("template/carousel/carousel.html",'<div ng-mouseenter="pause()" ng-mouseleave="play()" class="carousel" ng-swipe-right="prev()" ng-swipe-left="next()">\n <ol class="carousel-indicators" ng-show="slides.length > 1">\n <li ng-repeat="slide in slides track by $index" ng-class="{active: isActive(slide)}" ng-click="select(slide)"></li>\n </ol>\n <div class="carousel-inner" ng-transclude></div>\n <a class="left carousel-control" ng-click="prev()" ng-show="slides.length > 1"><span class="glyphicon glyphicon-chevron-left"></span></a>\n <a class="right carousel-control" ng-click="next()" ng-show="slides.length > 1"><span class="glyphicon glyphicon-chevron-right"></span></a>\n</div>\n')}]),angular.module("template/carousel/slide.html",[]).run(["$templateCache",function(a){a.put("template/carousel/slide.html","<div ng-class=\"{\n 'active': leaving || (active && !entering),\n 'prev': (next || active) && direction=='prev',\n 'next': (next || active) && direction=='next',\n 'right': direction=='prev',\n 'left': direction=='next'\n }\" class=\"item text-center\" ng-transclude></div>\n")}]),angular.module("template/datepicker/datepicker.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/datepicker.html",'<div ng-switch="datepickerMode" role="application" ng-keydown="keydown($event)">\n <daypicker ng-switch-when="day" tabindex="0"></daypicker>\n <monthpicker ng-switch-when="month" tabindex="0"></monthpicker>\n <yearpicker ng-switch-when="year" tabindex="0"></yearpicker>\n</div>')}]),angular.module("template/datepicker/day.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/day.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th colspan="{{5 + showWeeks}}"><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n <tr>\n <th ng-show="showWeeks" class="text-center"></th>\n <th ng-repeat="label in labels track by $index" class="text-center"><small aria-label="{{label.full}}">{{label.abbr}}</small></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-show="showWeeks" class="text-center h6"><em>{{ weekNumbers[$index] }}</em></td>\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default btn-sm" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-muted\': dt.secondary, \'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/datepicker/month.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/month.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/datepicker/popup.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/popup.html",'<ul class="dropdown-menu" ng-style="{display: (isOpen && \'block\') || \'none\', top: position.top+\'px\', left: position.left+\'px\'}" ng-keydown="keydown($event)">\n <li ng-transclude></li>\n <li ng-if="showButtonBar" style="padding:10px 9px 2px">\n <span class="btn-group">\n <button type="button" class="btn btn-sm btn-info" ng-click="select(\'today\')">{{ getText(\'current\') }}</button>\n <button type="button" class="btn btn-sm btn-danger" ng-click="select(null)">{{ getText(\'clear\') }}</button>\n </span>\n <button type="button" class="btn btn-sm btn-success pull-right" ng-click="close()">{{ getText(\'close\') }}</button>\n </li>\n</ul>\n')}]),angular.module("template/datepicker/year.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/year.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th colspan="3"><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/modal/backdrop.html",[]).run(["$templateCache",function(a){a.put("template/modal/backdrop.html",'<div class="modal-backdrop fade"\n ng-class="{in: animate}"\n ng-style="{\'z-index\': 1040 + (index && 1 || 0) + index*10}"\n></div>\n')}]),angular.module("template/modal/window.html",[]).run(["$templateCache",function(a){a.put("template/modal/window.html",'<div tabindex="-1" role="dialog" class="modal fade" ng-class="{in: animate}" ng-style="{\'z-index\': 1050 + index*10, display: \'block\'}" ng-click="close($event)">\n <div class="modal-dialog" ng-class="{\'modal-sm\': size == \'sm\', \'modal-lg\': size == \'lg\'}"><div class="modal-content" ng-transclude></div></div>\n</div>')}]),angular.module("template/pagination/pager.html",[]).run(["$templateCache",function(a){a.put("template/pagination/pager.html",'<ul class="pager">\n <li ng-class="{disabled: noPrevious(), previous: align}"><a href ng-click="selectPage(page - 1)">{{getText(\'previous\')}}</a></li>\n <li ng-class="{disabled: noNext(), next: align}"><a href ng-click="selectPage(page + 1)">{{getText(\'next\')}}</a></li>\n</ul>')}]),angular.module("template/pagination/pagination.html",[]).run(["$templateCache",function(a){a.put("template/pagination/pagination.html",'<ul class="pagination">\n <li ng-if="boundaryLinks" ng-class="{disabled: noPrevious()}"><a href ng-click="selectPage(1)">{{getText(\'first\')}}</a></li>\n <li ng-if="directionLinks" ng-class="{disabled: noPrevious()}"><a href ng-click="selectPage(page - 1)">{{getText(\'previous\')}}</a></li>\n <li ng-repeat="page in pages track by $index" ng-class="{active: page.active}"><a href ng-click="selectPage(page.number)">{{page.text}}</a></li>\n <li ng-if="directionLinks" ng-class="{disabled: noNext()}"><a href ng-click="selectPage(page + 1)">{{getText(\'next\')}}</a></li>\n <li ng-if="boundaryLinks" ng-class="{disabled: noNext()}"><a href ng-click="selectPage(totalPages)">{{getText(\'last\')}}</a></li>\n</ul>')}]),angular.module("template/tooltip/tooltip-html-unsafe-popup.html",[]).run(["$templateCache",function(a){a.put("template/tooltip/tooltip-html-unsafe-popup.html",'<div class="tooltip {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="tooltip-arrow"></div>\n <div class="tooltip-inner" bind-html-unsafe="content"></div>\n</div>\n')}]),angular.module("template/tooltip/tooltip-popup.html",[]).run(["$templateCache",function(a){a.put("template/tooltip/tooltip-popup.html",'<div class="tooltip {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="tooltip-arrow"></div>\n <div class="tooltip-inner" ng-bind="content"></div>\n</div>\n')}]),angular.module("template/popover/popover.html",[]).run(["$templateCache",function(a){a.put("template/popover/popover.html",'<div class="popover {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="arrow"></div>\n\n <div class="popover-inner">\n <h3 class="popover-title" ng-bind="title" ng-show="title"></h3>\n <div class="popover-content" ng-bind="content"></div>\n </div>\n</div>\n')}]),angular.module("template/progressbar/bar.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/bar.html",'<div class="progress-bar" ng-class="type && \'progress-bar-\' + type" role="progressbar" aria-valuenow="{{value}}" aria-valuemin="0" aria-valuemax="{{max}}" ng-style="{width: percent + \'%\'}" aria-valuetext="{{percent | number:0}}%" ng-transclude></div>')}]),angular.module("template/progressbar/progress.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/progress.html",'<div class="progress" ng-transclude></div>')}]),angular.module("template/progressbar/progressbar.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/progressbar.html",'<div class="progress">\n <div class="progress-bar" ng-class="type && \'progress-bar-\' + type" role="progressbar" aria-valuenow="{{value}}" aria-valuemin="0" aria-valuemax="{{max}}" ng-style="{width: percent + \'%\'}" aria-valuetext="{{percent | number:0}}%" ng-transclude></div>\n</div>')}]),angular.module("template/rating/rating.html",[]).run(["$templateCache",function(a){a.put("template/rating/rating.html",'<span ng-mouseleave="reset()" ng-keydown="onKeydown($event)" tabindex="0" role="slider" aria-valuemin="0" aria-valuemax="{{range.length}}" aria-valuenow="{{value}}">\n <i ng-repeat="r in range track by $index" ng-mouseenter="enter($index + 1)" ng-click="rate($index + 1)" class="glyphicon" ng-class="$index < value && (r.stateOn || \'glyphicon-star\') || (r.stateOff || \'glyphicon-star-empty\')">\n <span class="sr-only">({{ $index < value ? \'*\' : \' \' }})</span>\n </i>\n</span>')}]),angular.module("template/tabs/tab.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tab.html",'<li ng-class="{active: active, disabled: disabled}">\n <a ng-click="select()" tab-heading-transclude>{{heading}}</a>\n</li>\n')}]),angular.module("template/tabs/tabset-titles.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tabset-titles.html","<ul class=\"nav {{type && 'nav-' + type}}\" ng-class=\"{'nav-stacked': vertical}\">\n</ul>\n")}]),angular.module("template/tabs/tabset.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tabset.html",'\n<div>\n <ul class="nav nav-{{type || \'tabs\'}}" ng-class="{\'nav-stacked\': vertical, \'nav-justified\': justified}" ng-transclude></ul>\n <div class="tab-content">\n <div class="tab-pane" \n ng-repeat="tab in tabs" \n ng-class="{active: tab.active}"\n tab-content-transclude="tab">\n </div>\n </div>\n</div>\n')}]),angular.module("template/timepicker/timepicker.html",[]).run(["$templateCache",function(a){a.put("template/timepicker/timepicker.html",'<table>\n <tbody>\n <tr class="text-center">\n <td><a ng-click="incrementHours()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-up"></span></a></td>\n <td>&nbsp;</td>\n <td><a ng-click="incrementMinutes()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-up"></span></a></td>\n <td ng-show="showMeridian"></td>\n </tr>\n <tr>\n <td style="width:50px;" class="form-group" ng-class="{\'has-error\': invalidHours}">\n <input type="text" ng-model="hours" ng-change="updateHours()" class="form-control text-center" ng-mousewheel="incrementHours()" ng-readonly="readonlyInput" maxlength="2">\n </td>\n <td>:</td>\n <td style="width:50px;" class="form-group" ng-class="{\'has-error\': invalidMinutes}">\n <input type="text" ng-model="minutes" ng-change="updateMinutes()" class="form-control text-center" ng-readonly="readonlyInput" maxlength="2">\n </td>\n <td ng-show="showMeridian"><button type="button" class="btn btn-default text-center" ng-click="toggleMeridian()">{{meridian}}</button></td>\n </tr>\n <tr class="text-center">\n <td><a ng-click="decrementHours()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-down"></span></a></td>\n <td>&nbsp;</td>\n <td><a ng-click="decrementMinutes()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-down"></span></a></td>\n <td ng-show="showMeridian"></td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/typeahead/typeahead-match.html",[]).run(["$templateCache",function(a){a.put("template/typeahead/typeahead-match.html",'<a tabindex="-1" bind-html-unsafe="match.label | typeaheadHighlight:query"></a>')}]),angular.module("template/typeahead/typeahead-popup.html",[]).run(["$templateCache",function(a){a.put("template/typeahead/typeahead-popup.html",'<ul class="dropdown-menu" ng-if="isOpen()" ng-style="{top: position.top+\'px\', left: position.left+\'px\'}" style="display: block;" role="listbox" aria-hidden="{{!isOpen()}}">\n <li ng-repeat="match in matches track by $index" ng-class="{active: isActive($index) }" ng-mouseenter="selectActive($index)" ng-click="selectMatch($index)" role="option" id="{{match.id}}">\n <div typeahead-match index="$index" match="match" query="query" template-url="templateUrl"></div>\n </li>\n</ul>')
+}]); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.min.js b/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.min.js
new file mode 100644
index 0000000000..fa6a861317
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/static/js/ui-bootstrap-tpls-0.11.0.min.js
@@ -0,0 +1,10 @@
+/*
+ * angular-ui-bootstrap
+ * http://angular-ui.github.io/bootstrap/
+
+ * Version: 0.11.0 - 2014-05-01
+ * License: MIT
+ */
+angular.module("ui.bootstrap",["ui.bootstrap.tpls","ui.bootstrap.transition","ui.bootstrap.collapse","ui.bootstrap.accordion","ui.bootstrap.alert","ui.bootstrap.bindHtml","ui.bootstrap.buttons","ui.bootstrap.carousel","ui.bootstrap.dateparser","ui.bootstrap.position","ui.bootstrap.datepicker","ui.bootstrap.dropdown","ui.bootstrap.modal","ui.bootstrap.pagination","ui.bootstrap.tooltip","ui.bootstrap.popover","ui.bootstrap.progressbar","ui.bootstrap.rating","ui.bootstrap.tabs","ui.bootstrap.timepicker","ui.bootstrap.typeahead"]),angular.module("ui.bootstrap.tpls",["template/accordion/accordion-group.html","template/accordion/accordion.html","template/alert/alert.html","template/carousel/carousel.html","template/carousel/slide.html","template/datepicker/datepicker.html","template/datepicker/day.html","template/datepicker/month.html","template/datepicker/popup.html","template/datepicker/year.html","template/modal/backdrop.html","template/modal/window.html","template/pagination/pager.html","template/pagination/pagination.html","template/tooltip/tooltip-html-unsafe-popup.html","template/tooltip/tooltip-popup.html","template/popover/popover.html","template/progressbar/bar.html","template/progressbar/progress.html","template/progressbar/progressbar.html","template/rating/rating.html","template/tabs/tab.html","template/tabs/tabset.html","template/timepicker/timepicker.html","template/typeahead/typeahead-match.html","template/typeahead/typeahead-popup.html"]),angular.module("ui.bootstrap.transition",[]).factory("$transition",["$q","$timeout","$rootScope",function(a,b,c){function d(a){for(var b in a)if(void 0!==f.style[b])return a[b]}var e=function(d,f,g){g=g||{};var h=a.defer(),i=e[g.animation?"animationEndEventName":"transitionEndEventName"],j=function(){c.$apply(function(){d.unbind(i,j),h.resolve(d)})};return i&&d.bind(i,j),b(function(){angular.isString(f)?d.addClass(f):angular.isFunction(f)?f(d):angular.isObject(f)&&d.css(f),i||h.resolve(d)}),h.promise.cancel=function(){i&&d.unbind(i,j),h.reject("Transition cancelled")},h.promise},f=document.createElement("trans"),g={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd",transition:"transitionend"},h={WebkitTransition:"webkitAnimationEnd",MozTransition:"animationend",OTransition:"oAnimationEnd",transition:"animationend"};return e.transitionEndEventName=d(g),e.animationEndEventName=d(h),e}]),angular.module("ui.bootstrap.collapse",["ui.bootstrap.transition"]).directive("collapse",["$transition",function(a){return{link:function(b,c,d){function e(b){function d(){j===e&&(j=void 0)}var e=a(c,b);return j&&j.cancel(),j=e,e.then(d,d),e}function f(){k?(k=!1,g()):(c.removeClass("collapse").addClass("collapsing"),e({height:c[0].scrollHeight+"px"}).then(g))}function g(){c.removeClass("collapsing"),c.addClass("collapse in"),c.css({height:"auto"})}function h(){if(k)k=!1,i(),c.css({height:0});else{c.css({height:c[0].scrollHeight+"px"});{c[0].offsetWidth}c.removeClass("collapse in").addClass("collapsing"),e({height:0}).then(i)}}function i(){c.removeClass("collapsing"),c.addClass("collapse")}var j,k=!0;b.$watch(d.collapse,function(a){a?h():f()})}}}]),angular.module("ui.bootstrap.accordion",["ui.bootstrap.collapse"]).constant("accordionConfig",{closeOthers:!0}).controller("AccordionController",["$scope","$attrs","accordionConfig",function(a,b,c){this.groups=[],this.closeOthers=function(d){var e=angular.isDefined(b.closeOthers)?a.$eval(b.closeOthers):c.closeOthers;e&&angular.forEach(this.groups,function(a){a!==d&&(a.isOpen=!1)})},this.addGroup=function(a){var b=this;this.groups.push(a),a.$on("$destroy",function(){b.removeGroup(a)})},this.removeGroup=function(a){var b=this.groups.indexOf(a);-1!==b&&this.groups.splice(b,1)}}]).directive("accordion",function(){return{restrict:"EA",controller:"AccordionController",transclude:!0,replace:!1,templateUrl:"template/accordion/accordion.html"}}).directive("accordionGroup",function(){return{require:"^accordion",restrict:"EA",transclude:!0,replace:!0,templateUrl:"template/accordion/accordion-group.html",scope:{heading:"@",isOpen:"=?",isDisabled:"=?"},controller:function(){this.setHeading=function(a){this.heading=a}},link:function(a,b,c,d){d.addGroup(a),a.$watch("isOpen",function(b){b&&d.closeOthers(a)}),a.toggleOpen=function(){a.isDisabled||(a.isOpen=!a.isOpen)}}}}).directive("accordionHeading",function(){return{restrict:"EA",transclude:!0,template:"",replace:!0,require:"^accordionGroup",link:function(a,b,c,d,e){d.setHeading(e(a,function(){}))}}}).directive("accordionTransclude",function(){return{require:"^accordionGroup",link:function(a,b,c,d){a.$watch(function(){return d[c.accordionTransclude]},function(a){a&&(b.html(""),b.append(a))})}}}),angular.module("ui.bootstrap.alert",[]).controller("AlertController",["$scope","$attrs",function(a,b){a.closeable="close"in b}]).directive("alert",function(){return{restrict:"EA",controller:"AlertController",templateUrl:"template/alert/alert.html",transclude:!0,replace:!0,scope:{type:"@",close:"&"}}}),angular.module("ui.bootstrap.bindHtml",[]).directive("bindHtmlUnsafe",function(){return function(a,b,c){b.addClass("ng-binding").data("$binding",c.bindHtmlUnsafe),a.$watch(c.bindHtmlUnsafe,function(a){b.html(a||"")})}}),angular.module("ui.bootstrap.buttons",[]).constant("buttonConfig",{activeClass:"active",toggleEvent:"click"}).controller("ButtonsController",["buttonConfig",function(a){this.activeClass=a.activeClass||"active",this.toggleEvent=a.toggleEvent||"click"}]).directive("btnRadio",function(){return{require:["btnRadio","ngModel"],controller:"ButtonsController",link:function(a,b,c,d){var e=d[0],f=d[1];f.$render=function(){b.toggleClass(e.activeClass,angular.equals(f.$modelValue,a.$eval(c.btnRadio)))},b.bind(e.toggleEvent,function(){var d=b.hasClass(e.activeClass);(!d||angular.isDefined(c.uncheckable))&&a.$apply(function(){f.$setViewValue(d?null:a.$eval(c.btnRadio)),f.$render()})})}}}).directive("btnCheckbox",function(){return{require:["btnCheckbox","ngModel"],controller:"ButtonsController",link:function(a,b,c,d){function e(){return g(c.btnCheckboxTrue,!0)}function f(){return g(c.btnCheckboxFalse,!1)}function g(b,c){var d=a.$eval(b);return angular.isDefined(d)?d:c}var h=d[0],i=d[1];i.$render=function(){b.toggleClass(h.activeClass,angular.equals(i.$modelValue,e()))},b.bind(h.toggleEvent,function(){a.$apply(function(){i.$setViewValue(b.hasClass(h.activeClass)?f():e()),i.$render()})})}}}),angular.module("ui.bootstrap.carousel",["ui.bootstrap.transition"]).controller("CarouselController",["$scope","$timeout","$transition",function(a,b,c){function d(){e();var c=+a.interval;!isNaN(c)&&c>=0&&(g=b(f,c))}function e(){g&&(b.cancel(g),g=null)}function f(){h?(a.next(),d()):a.pause()}var g,h,i=this,j=i.slides=a.slides=[],k=-1;i.currentSlide=null;var l=!1;i.select=a.select=function(e,f){function g(){if(!l){if(i.currentSlide&&angular.isString(f)&&!a.noTransition&&e.$element){e.$element.addClass(f);{e.$element[0].offsetWidth}angular.forEach(j,function(a){angular.extend(a,{direction:"",entering:!1,leaving:!1,active:!1})}),angular.extend(e,{direction:f,active:!0,entering:!0}),angular.extend(i.currentSlide||{},{direction:f,leaving:!0}),a.$currentTransition=c(e.$element,{}),function(b,c){a.$currentTransition.then(function(){h(b,c)},function(){h(b,c)})}(e,i.currentSlide)}else h(e,i.currentSlide);i.currentSlide=e,k=m,d()}}function h(b,c){angular.extend(b,{direction:"",active:!0,leaving:!1,entering:!1}),angular.extend(c||{},{direction:"",active:!1,leaving:!1,entering:!1}),a.$currentTransition=null}var m=j.indexOf(e);void 0===f&&(f=m>k?"next":"prev"),e&&e!==i.currentSlide&&(a.$currentTransition?(a.$currentTransition.cancel(),b(g)):g())},a.$on("$destroy",function(){l=!0}),i.indexOfSlide=function(a){return j.indexOf(a)},a.next=function(){var b=(k+1)%j.length;return a.$currentTransition?void 0:i.select(j[b],"next")},a.prev=function(){var b=0>k-1?j.length-1:k-1;return a.$currentTransition?void 0:i.select(j[b],"prev")},a.isActive=function(a){return i.currentSlide===a},a.$watch("interval",d),a.$on("$destroy",e),a.play=function(){h||(h=!0,d())},a.pause=function(){a.noPause||(h=!1,e())},i.addSlide=function(b,c){b.$element=c,j.push(b),1===j.length||b.active?(i.select(j[j.length-1]),1==j.length&&a.play()):b.active=!1},i.removeSlide=function(a){var b=j.indexOf(a);j.splice(b,1),j.length>0&&a.active?i.select(b>=j.length?j[b-1]:j[b]):k>b&&k--}}]).directive("carousel",[function(){return{restrict:"EA",transclude:!0,replace:!0,controller:"CarouselController",require:"carousel",templateUrl:"template/carousel/carousel.html",scope:{interval:"=",noTransition:"=",noPause:"="}}}]).directive("slide",function(){return{require:"^carousel",restrict:"EA",transclude:!0,replace:!0,templateUrl:"template/carousel/slide.html",scope:{active:"=?"},link:function(a,b,c,d){d.addSlide(a,b),a.$on("$destroy",function(){d.removeSlide(a)}),a.$watch("active",function(b){b&&d.select(a)})}}}),angular.module("ui.bootstrap.dateparser",[]).service("dateParser",["$locale","orderByFilter",function(a,b){function c(a,b,c){return 1===b&&c>28?29===c&&(a%4===0&&a%100!==0||a%400===0):3===b||5===b||8===b||10===b?31>c:!0}this.parsers={};var d={yyyy:{regex:"\\d{4}",apply:function(a){this.year=+a}},yy:{regex:"\\d{2}",apply:function(a){this.year=+a+2e3}},y:{regex:"\\d{1,4}",apply:function(a){this.year=+a}},MMMM:{regex:a.DATETIME_FORMATS.MONTH.join("|"),apply:function(b){this.month=a.DATETIME_FORMATS.MONTH.indexOf(b)}},MMM:{regex:a.DATETIME_FORMATS.SHORTMONTH.join("|"),apply:function(b){this.month=a.DATETIME_FORMATS.SHORTMONTH.indexOf(b)}},MM:{regex:"0[1-9]|1[0-2]",apply:function(a){this.month=a-1}},M:{regex:"[1-9]|1[0-2]",apply:function(a){this.month=a-1}},dd:{regex:"[0-2][0-9]{1}|3[0-1]{1}",apply:function(a){this.date=+a}},d:{regex:"[1-2]?[0-9]{1}|3[0-1]{1}",apply:function(a){this.date=+a}},EEEE:{regex:a.DATETIME_FORMATS.DAY.join("|")},EEE:{regex:a.DATETIME_FORMATS.SHORTDAY.join("|")}};this.createParser=function(a){var c=[],e=a.split("");return angular.forEach(d,function(b,d){var f=a.indexOf(d);if(f>-1){a=a.split(""),e[f]="("+b.regex+")",a[f]="$";for(var g=f+1,h=f+d.length;h>g;g++)e[g]="",a[g]="$";a=a.join(""),c.push({index:f,apply:b.apply})}}),{regex:new RegExp("^"+e.join("")+"$"),map:b(c,"index")}},this.parse=function(b,d){if(!angular.isString(b))return b;d=a.DATETIME_FORMATS[d]||d,this.parsers[d]||(this.parsers[d]=this.createParser(d));var e=this.parsers[d],f=e.regex,g=e.map,h=b.match(f);if(h&&h.length){for(var i,j={year:1900,month:0,date:1,hours:0},k=1,l=h.length;l>k;k++){var m=g[k-1];m.apply&&m.apply.call(j,h[k])}return c(j.year,j.month,j.date)&&(i=new Date(j.year,j.month,j.date,j.hours)),i}}}]),angular.module("ui.bootstrap.position",[]).factory("$position",["$document","$window",function(a,b){function c(a,c){return a.currentStyle?a.currentStyle[c]:b.getComputedStyle?b.getComputedStyle(a)[c]:a.style[c]}function d(a){return"static"===(c(a,"position")||"static")}var e=function(b){for(var c=a[0],e=b.offsetParent||c;e&&e!==c&&d(e);)e=e.offsetParent;return e||c};return{position:function(b){var c=this.offset(b),d={top:0,left:0},f=e(b[0]);f!=a[0]&&(d=this.offset(angular.element(f)),d.top+=f.clientTop-f.scrollTop,d.left+=f.clientLeft-f.scrollLeft);var g=b[0].getBoundingClientRect();return{width:g.width||b.prop("offsetWidth"),height:g.height||b.prop("offsetHeight"),top:c.top-d.top,left:c.left-d.left}},offset:function(c){var d=c[0].getBoundingClientRect();return{width:d.width||c.prop("offsetWidth"),height:d.height||c.prop("offsetHeight"),top:d.top+(b.pageYOffset||a[0].documentElement.scrollTop),left:d.left+(b.pageXOffset||a[0].documentElement.scrollLeft)}},positionElements:function(a,b,c,d){var e,f,g,h,i=c.split("-"),j=i[0],k=i[1]||"center";e=d?this.offset(a):this.position(a),f=b.prop("offsetWidth"),g=b.prop("offsetHeight");var l={center:function(){return e.left+e.width/2-f/2},left:function(){return e.left},right:function(){return e.left+e.width}},m={center:function(){return e.top+e.height/2-g/2},top:function(){return e.top},bottom:function(){return e.top+e.height}};switch(j){case"right":h={top:m[k](),left:l[j]()};break;case"left":h={top:m[k](),left:e.left-f};break;case"bottom":h={top:m[j](),left:l[k]()};break;default:h={top:e.top-g,left:l[k]()}}return h}}}]),angular.module("ui.bootstrap.datepicker",["ui.bootstrap.dateparser","ui.bootstrap.position"]).constant("datepickerConfig",{formatDay:"dd",formatMonth:"MMMM",formatYear:"yyyy",formatDayHeader:"EEE",formatDayTitle:"MMMM yyyy",formatMonthTitle:"yyyy",datepickerMode:"day",minMode:"day",maxMode:"year",showWeeks:!0,startingDay:0,yearRange:20,minDate:null,maxDate:null}).controller("DatepickerController",["$scope","$attrs","$parse","$interpolate","$timeout","$log","dateFilter","datepickerConfig",function(a,b,c,d,e,f,g,h){var i=this,j={$setViewValue:angular.noop};this.modes=["day","month","year"],angular.forEach(["formatDay","formatMonth","formatYear","formatDayHeader","formatDayTitle","formatMonthTitle","minMode","maxMode","showWeeks","startingDay","yearRange"],function(c,e){i[c]=angular.isDefined(b[c])?8>e?d(b[c])(a.$parent):a.$parent.$eval(b[c]):h[c]}),angular.forEach(["minDate","maxDate"],function(d){b[d]?a.$parent.$watch(c(b[d]),function(a){i[d]=a?new Date(a):null,i.refreshView()}):i[d]=h[d]?new Date(h[d]):null}),a.datepickerMode=a.datepickerMode||h.datepickerMode,a.uniqueId="datepicker-"+a.$id+"-"+Math.floor(1e4*Math.random()),this.activeDate=angular.isDefined(b.initDate)?a.$parent.$eval(b.initDate):new Date,a.isActive=function(b){return 0===i.compare(b.date,i.activeDate)?(a.activeDateId=b.uid,!0):!1},this.init=function(a){j=a,j.$render=function(){i.render()}},this.render=function(){if(j.$modelValue){var a=new Date(j.$modelValue),b=!isNaN(a);b?this.activeDate=a:f.error('Datepicker directive: "ng-model" value must be a Date object, a number of milliseconds since 01.01.1970 or a string representing an RFC2822 or ISO 8601 date.'),j.$setValidity("date",b)}this.refreshView()},this.refreshView=function(){if(this.element){this._refreshView();var a=j.$modelValue?new Date(j.$modelValue):null;j.$setValidity("date-disabled",!a||this.element&&!this.isDisabled(a))}},this.createDateObject=function(a,b){var c=j.$modelValue?new Date(j.$modelValue):null;return{date:a,label:g(a,b),selected:c&&0===this.compare(a,c),disabled:this.isDisabled(a),current:0===this.compare(a,new Date)}},this.isDisabled=function(c){return this.minDate&&this.compare(c,this.minDate)<0||this.maxDate&&this.compare(c,this.maxDate)>0||b.dateDisabled&&a.dateDisabled({date:c,mode:a.datepickerMode})},this.split=function(a,b){for(var c=[];a.length>0;)c.push(a.splice(0,b));return c},a.select=function(b){if(a.datepickerMode===i.minMode){var c=j.$modelValue?new Date(j.$modelValue):new Date(0,0,0,0,0,0,0);c.setFullYear(b.getFullYear(),b.getMonth(),b.getDate()),j.$setViewValue(c),j.$render()}else i.activeDate=b,a.datepickerMode=i.modes[i.modes.indexOf(a.datepickerMode)-1]},a.move=function(a){var b=i.activeDate.getFullYear()+a*(i.step.years||0),c=i.activeDate.getMonth()+a*(i.step.months||0);i.activeDate.setFullYear(b,c,1),i.refreshView()},a.toggleMode=function(b){b=b||1,a.datepickerMode===i.maxMode&&1===b||a.datepickerMode===i.minMode&&-1===b||(a.datepickerMode=i.modes[i.modes.indexOf(a.datepickerMode)+b])},a.keys={13:"enter",32:"space",33:"pageup",34:"pagedown",35:"end",36:"home",37:"left",38:"up",39:"right",40:"down"};var k=function(){e(function(){i.element[0].focus()},0,!1)};a.$on("datepicker.focus",k),a.keydown=function(b){var c=a.keys[b.which];if(c&&!b.shiftKey&&!b.altKey)if(b.preventDefault(),b.stopPropagation(),"enter"===c||"space"===c){if(i.isDisabled(i.activeDate))return;a.select(i.activeDate),k()}else!b.ctrlKey||"up"!==c&&"down"!==c?(i.handleKeyDown(c,b),i.refreshView()):(a.toggleMode("up"===c?1:-1),k())}}]).directive("datepicker",function(){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/datepicker.html",scope:{datepickerMode:"=?",dateDisabled:"&"},require:["datepicker","?^ngModel"],controller:"DatepickerController",link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f)}}}).directive("daypicker",["dateFilter",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/day.html",require:"^datepicker",link:function(b,c,d,e){function f(a,b){return 1!==b||a%4!==0||a%100===0&&a%400!==0?i[b]:29}function g(a,b){var c=new Array(b),d=new Date(a),e=0;for(d.setHours(12);b>e;)c[e++]=new Date(d),d.setDate(d.getDate()+1);return c}function h(a){var b=new Date(a);b.setDate(b.getDate()+4-(b.getDay()||7));var c=b.getTime();return b.setMonth(0),b.setDate(1),Math.floor(Math.round((c-b)/864e5)/7)+1}b.showWeeks=e.showWeeks,e.step={months:1},e.element=c;var i=[31,28,31,30,31,30,31,31,30,31,30,31];e._refreshView=function(){var c=e.activeDate.getFullYear(),d=e.activeDate.getMonth(),f=new Date(c,d,1),i=e.startingDay-f.getDay(),j=i>0?7-i:-i,k=new Date(f);j>0&&k.setDate(-j+1);for(var l=g(k,42),m=0;42>m;m++)l[m]=angular.extend(e.createDateObject(l[m],e.formatDay),{secondary:l[m].getMonth()!==d,uid:b.uniqueId+"-"+m});b.labels=new Array(7);for(var n=0;7>n;n++)b.labels[n]={abbr:a(l[n].date,e.formatDayHeader),full:a(l[n].date,"EEEE")};if(b.title=a(e.activeDate,e.formatDayTitle),b.rows=e.split(l,7),b.showWeeks){b.weekNumbers=[];for(var o=h(b.rows[0][0].date),p=b.rows.length;b.weekNumbers.push(o++)<p;);}},e.compare=function(a,b){return new Date(a.getFullYear(),a.getMonth(),a.getDate())-new Date(b.getFullYear(),b.getMonth(),b.getDate())},e.handleKeyDown=function(a){var b=e.activeDate.getDate();if("left"===a)b-=1;else if("up"===a)b-=7;else if("right"===a)b+=1;else if("down"===a)b+=7;else if("pageup"===a||"pagedown"===a){var c=e.activeDate.getMonth()+("pageup"===a?-1:1);e.activeDate.setMonth(c,1),b=Math.min(f(e.activeDate.getFullYear(),e.activeDate.getMonth()),b)}else"home"===a?b=1:"end"===a&&(b=f(e.activeDate.getFullYear(),e.activeDate.getMonth()));e.activeDate.setDate(b)},e.refreshView()}}}]).directive("monthpicker",["dateFilter",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/month.html",require:"^datepicker",link:function(b,c,d,e){e.step={years:1},e.element=c,e._refreshView=function(){for(var c=new Array(12),d=e.activeDate.getFullYear(),f=0;12>f;f++)c[f]=angular.extend(e.createDateObject(new Date(d,f,1),e.formatMonth),{uid:b.uniqueId+"-"+f});b.title=a(e.activeDate,e.formatMonthTitle),b.rows=e.split(c,3)},e.compare=function(a,b){return new Date(a.getFullYear(),a.getMonth())-new Date(b.getFullYear(),b.getMonth())},e.handleKeyDown=function(a){var b=e.activeDate.getMonth();if("left"===a)b-=1;else if("up"===a)b-=3;else if("right"===a)b+=1;else if("down"===a)b+=3;else if("pageup"===a||"pagedown"===a){var c=e.activeDate.getFullYear()+("pageup"===a?-1:1);e.activeDate.setFullYear(c)}else"home"===a?b=0:"end"===a&&(b=11);e.activeDate.setMonth(b)},e.refreshView()}}}]).directive("yearpicker",["dateFilter",function(){return{restrict:"EA",replace:!0,templateUrl:"template/datepicker/year.html",require:"^datepicker",link:function(a,b,c,d){function e(a){return parseInt((a-1)/f,10)*f+1}var f=d.yearRange;d.step={years:f},d.element=b,d._refreshView=function(){for(var b=new Array(f),c=0,g=e(d.activeDate.getFullYear());f>c;c++)b[c]=angular.extend(d.createDateObject(new Date(g+c,0,1),d.formatYear),{uid:a.uniqueId+"-"+c});a.title=[b[0].label,b[f-1].label].join(" - "),a.rows=d.split(b,5)},d.compare=function(a,b){return a.getFullYear()-b.getFullYear()},d.handleKeyDown=function(a){var b=d.activeDate.getFullYear();"left"===a?b-=1:"up"===a?b-=5:"right"===a?b+=1:"down"===a?b+=5:"pageup"===a||"pagedown"===a?b+=("pageup"===a?-1:1)*d.step.years:"home"===a?b=e(d.activeDate.getFullYear()):"end"===a&&(b=e(d.activeDate.getFullYear())+f-1),d.activeDate.setFullYear(b)},d.refreshView()}}}]).constant("datepickerPopupConfig",{datepickerPopup:"yyyy-MM-dd",currentText:"Today",clearText:"Clear",closeText:"Done",closeOnDateSelection:!0,appendToBody:!1,showButtonBar:!0}).directive("datepickerPopup",["$compile","$parse","$document","$position","dateFilter","dateParser","datepickerPopupConfig",function(a,b,c,d,e,f,g){return{restrict:"EA",require:"ngModel",scope:{isOpen:"=?",currentText:"@",clearText:"@",closeText:"@",dateDisabled:"&"},link:function(h,i,j,k){function l(a){return a.replace(/([A-Z])/g,function(a){return"-"+a.toLowerCase()})}function m(a){if(a){if(angular.isDate(a)&&!isNaN(a))return k.$setValidity("date",!0),a;if(angular.isString(a)){var b=f.parse(a,n)||new Date(a);return isNaN(b)?void k.$setValidity("date",!1):(k.$setValidity("date",!0),b)}return void k.$setValidity("date",!1)}return k.$setValidity("date",!0),null}var n,o=angular.isDefined(j.closeOnDateSelection)?h.$parent.$eval(j.closeOnDateSelection):g.closeOnDateSelection,p=angular.isDefined(j.datepickerAppendToBody)?h.$parent.$eval(j.datepickerAppendToBody):g.appendToBody;h.showButtonBar=angular.isDefined(j.showButtonBar)?h.$parent.$eval(j.showButtonBar):g.showButtonBar,h.getText=function(a){return h[a+"Text"]||g[a+"Text"]},j.$observe("datepickerPopup",function(a){n=a||g.datepickerPopup,k.$render()});var q=angular.element("<div datepicker-popup-wrap><div datepicker></div></div>");q.attr({"ng-model":"date","ng-change":"dateSelection()"});var r=angular.element(q.children()[0]);j.datepickerOptions&&angular.forEach(h.$parent.$eval(j.datepickerOptions),function(a,b){r.attr(l(b),a)}),angular.forEach(["minDate","maxDate"],function(a){j[a]&&(h.$parent.$watch(b(j[a]),function(b){h[a]=b}),r.attr(l(a),a))}),j.dateDisabled&&r.attr("date-disabled","dateDisabled({ date: date, mode: mode })"),k.$parsers.unshift(m),h.dateSelection=function(a){angular.isDefined(a)&&(h.date=a),k.$setViewValue(h.date),k.$render(),o&&(h.isOpen=!1,i[0].focus())},i.bind("input change keyup",function(){h.$apply(function(){h.date=k.$modelValue})}),k.$render=function(){var a=k.$viewValue?e(k.$viewValue,n):"";i.val(a),h.date=m(k.$modelValue)};var s=function(a){h.isOpen&&a.target!==i[0]&&h.$apply(function(){h.isOpen=!1})},t=function(a){h.keydown(a)};i.bind("keydown",t),h.keydown=function(a){27===a.which?(a.preventDefault(),a.stopPropagation(),h.close()):40!==a.which||h.isOpen||(h.isOpen=!0)},h.$watch("isOpen",function(a){a?(h.$broadcast("datepicker.focus"),h.position=p?d.offset(i):d.position(i),h.position.top=h.position.top+i.prop("offsetHeight"),c.bind("click",s)):c.unbind("click",s)}),h.select=function(a){if("today"===a){var b=new Date;angular.isDate(k.$modelValue)?(a=new Date(k.$modelValue),a.setFullYear(b.getFullYear(),b.getMonth(),b.getDate())):a=new Date(b.setHours(0,0,0,0))}h.dateSelection(a)},h.close=function(){h.isOpen=!1,i[0].focus()};var u=a(q)(h);p?c.find("body").append(u):i.after(u),h.$on("$destroy",function(){u.remove(),i.unbind("keydown",t),c.unbind("click",s)})}}}]).directive("datepickerPopupWrap",function(){return{restrict:"EA",replace:!0,transclude:!0,templateUrl:"template/datepicker/popup.html",link:function(a,b){b.bind("click",function(a){a.preventDefault(),a.stopPropagation()})}}}),angular.module("ui.bootstrap.dropdown",[]).constant("dropdownConfig",{openClass:"open"}).service("dropdownService",["$document",function(a){var b=null;this.open=function(e){b||(a.bind("click",c),a.bind("keydown",d)),b&&b!==e&&(b.isOpen=!1),b=e},this.close=function(e){b===e&&(b=null,a.unbind("click",c),a.unbind("keydown",d))};var c=function(a){a&&a.isDefaultPrevented()||b.$apply(function(){b.isOpen=!1})},d=function(a){27===a.which&&(b.focusToggleElement(),c())}}]).controller("DropdownController",["$scope","$attrs","$parse","dropdownConfig","dropdownService","$animate",function(a,b,c,d,e,f){var g,h=this,i=a.$new(),j=d.openClass,k=angular.noop,l=b.onToggle?c(b.onToggle):angular.noop;this.init=function(d){h.$element=d,b.isOpen&&(g=c(b.isOpen),k=g.assign,a.$watch(g,function(a){i.isOpen=!!a}))},this.toggle=function(a){return i.isOpen=arguments.length?!!a:!i.isOpen},this.isOpen=function(){return i.isOpen},i.focusToggleElement=function(){h.toggleElement&&h.toggleElement[0].focus()},i.$watch("isOpen",function(b,c){f[b?"addClass":"removeClass"](h.$element,j),b?(i.focusToggleElement(),e.open(i)):e.close(i),k(a,b),angular.isDefined(b)&&b!==c&&l(a,{open:!!b})}),a.$on("$locationChangeSuccess",function(){i.isOpen=!1}),a.$on("$destroy",function(){i.$destroy()})}]).directive("dropdown",function(){return{restrict:"CA",controller:"DropdownController",link:function(a,b,c,d){d.init(b)}}}).directive("dropdownToggle",function(){return{restrict:"CA",require:"?^dropdown",link:function(a,b,c,d){if(d){d.toggleElement=b;var e=function(e){e.preventDefault(),b.hasClass("disabled")||c.disabled||a.$apply(function(){d.toggle()})};b.bind("click",e),b.attr({"aria-haspopup":!0,"aria-expanded":!1}),a.$watch(d.isOpen,function(a){b.attr("aria-expanded",!!a)}),a.$on("$destroy",function(){b.unbind("click",e)})}}}}),angular.module("ui.bootstrap.modal",["ui.bootstrap.transition"]).factory("$$stackedMap",function(){return{createNew:function(){var a=[];return{add:function(b,c){a.push({key:b,value:c})},get:function(b){for(var c=0;c<a.length;c++)if(b==a[c].key)return a[c]},keys:function(){for(var b=[],c=0;c<a.length;c++)b.push(a[c].key);return b},top:function(){return a[a.length-1]},remove:function(b){for(var c=-1,d=0;d<a.length;d++)if(b==a[d].key){c=d;break}return a.splice(c,1)[0]},removeTop:function(){return a.splice(a.length-1,1)[0]},length:function(){return a.length}}}}}).directive("modalBackdrop",["$timeout",function(a){return{restrict:"EA",replace:!0,templateUrl:"template/modal/backdrop.html",link:function(b){b.animate=!1,a(function(){b.animate=!0})}}}]).directive("modalWindow",["$modalStack","$timeout",function(a,b){return{restrict:"EA",scope:{index:"@",animate:"="},replace:!0,transclude:!0,templateUrl:function(a,b){return b.templateUrl||"template/modal/window.html"},link:function(c,d,e){d.addClass(e.windowClass||""),c.size=e.size,b(function(){c.animate=!0,d[0].focus()}),c.close=function(b){var c=a.getTop();c&&c.value.backdrop&&"static"!=c.value.backdrop&&b.target===b.currentTarget&&(b.preventDefault(),b.stopPropagation(),a.dismiss(c.key,"backdrop click"))}}}}]).factory("$modalStack",["$transition","$timeout","$document","$compile","$rootScope","$$stackedMap",function(a,b,c,d,e,f){function g(){for(var a=-1,b=n.keys(),c=0;c<b.length;c++)n.get(b[c]).value.backdrop&&(a=c);return a}function h(a){var b=c.find("body").eq(0),d=n.get(a).value;n.remove(a),j(d.modalDomEl,d.modalScope,300,function(){d.modalScope.$destroy(),b.toggleClass(m,n.length()>0),i()})}function i(){if(k&&-1==g()){var a=l;j(k,l,150,function(){a.$destroy(),a=null}),k=void 0,l=void 0}}function j(c,d,e,f){function g(){g.done||(g.done=!0,c.remove(),f&&f())}d.animate=!1;var h=a.transitionEndEventName;if(h){var i=b(g,e);c.bind(h,function(){b.cancel(i),g(),d.$apply()})}else b(g,0)}var k,l,m="modal-open",n=f.createNew(),o={};return e.$watch(g,function(a){l&&(l.index=a)}),c.bind("keydown",function(a){var b;27===a.which&&(b=n.top(),b&&b.value.keyboard&&(a.preventDefault(),e.$apply(function(){o.dismiss(b.key,"escape key press")})))}),o.open=function(a,b){n.add(a,{deferred:b.deferred,modalScope:b.scope,backdrop:b.backdrop,keyboard:b.keyboard});var f=c.find("body").eq(0),h=g();h>=0&&!k&&(l=e.$new(!0),l.index=h,k=d("<div modal-backdrop></div>")(l),f.append(k));var i=angular.element("<div modal-window></div>");i.attr({"template-url":b.windowTemplateUrl,"window-class":b.windowClass,size:b.size,index:n.length()-1,animate:"animate"}).html(b.content);var j=d(i)(b.scope);n.top().value.modalDomEl=j,f.append(j),f.addClass(m)},o.close=function(a,b){var c=n.get(a).value;c&&(c.deferred.resolve(b),h(a))},o.dismiss=function(a,b){var c=n.get(a).value;c&&(c.deferred.reject(b),h(a))},o.dismissAll=function(a){for(var b=this.getTop();b;)this.dismiss(b.key,a),b=this.getTop()},o.getTop=function(){return n.top()},o}]).provider("$modal",function(){var a={options:{backdrop:!0,keyboard:!0},$get:["$injector","$rootScope","$q","$http","$templateCache","$controller","$modalStack",function(b,c,d,e,f,g,h){function i(a){return a.template?d.when(a.template):e.get(a.templateUrl,{cache:f}).then(function(a){return a.data})}function j(a){var c=[];return angular.forEach(a,function(a){(angular.isFunction(a)||angular.isArray(a))&&c.push(d.when(b.invoke(a)))}),c}var k={};return k.open=function(b){var e=d.defer(),f=d.defer(),k={result:e.promise,opened:f.promise,close:function(a){h.close(k,a)},dismiss:function(a){h.dismiss(k,a)}};if(b=angular.extend({},a.options,b),b.resolve=b.resolve||{},!b.template&&!b.templateUrl)throw new Error("One of template or templateUrl options is required.");var l=d.all([i(b)].concat(j(b.resolve)));return l.then(function(a){var d=(b.scope||c).$new();d.$close=k.close,d.$dismiss=k.dismiss;var f,i={},j=1;b.controller&&(i.$scope=d,i.$modalInstance=k,angular.forEach(b.resolve,function(b,c){i[c]=a[j++]}),f=g(b.controller,i)),h.open(k,{scope:d,deferred:e,content:a[0],backdrop:b.backdrop,keyboard:b.keyboard,windowClass:b.windowClass,windowTemplateUrl:b.windowTemplateUrl,size:b.size})},function(a){e.reject(a)}),l.then(function(){f.resolve(!0)},function(){f.reject(!1)}),k},k}]};return a}),angular.module("ui.bootstrap.pagination",[]).controller("PaginationController",["$scope","$attrs","$parse",function(a,b,c){var d=this,e={$setViewValue:angular.noop},f=b.numPages?c(b.numPages).assign:angular.noop;this.init=function(f,g){e=f,this.config=g,e.$render=function(){d.render()},b.itemsPerPage?a.$parent.$watch(c(b.itemsPerPage),function(b){d.itemsPerPage=parseInt(b,10),a.totalPages=d.calculateTotalPages()}):this.itemsPerPage=g.itemsPerPage},this.calculateTotalPages=function(){var b=this.itemsPerPage<1?1:Math.ceil(a.totalItems/this.itemsPerPage);return Math.max(b||0,1)},this.render=function(){a.page=parseInt(e.$viewValue,10)||1},a.selectPage=function(b){a.page!==b&&b>0&&b<=a.totalPages&&(e.$setViewValue(b),e.$render())},a.getText=function(b){return a[b+"Text"]||d.config[b+"Text"]},a.noPrevious=function(){return 1===a.page},a.noNext=function(){return a.page===a.totalPages},a.$watch("totalItems",function(){a.totalPages=d.calculateTotalPages()}),a.$watch("totalPages",function(b){f(a.$parent,b),a.page>b?a.selectPage(b):e.$render()})}]).constant("paginationConfig",{itemsPerPage:10,boundaryLinks:!1,directionLinks:!0,firstText:"First",previousText:"Previous",nextText:"Next",lastText:"Last",rotate:!0}).directive("pagination",["$parse","paginationConfig",function(a,b){return{restrict:"EA",scope:{totalItems:"=",firstText:"@",previousText:"@",nextText:"@",lastText:"@"},require:["pagination","?ngModel"],controller:"PaginationController",templateUrl:"template/pagination/pagination.html",replace:!0,link:function(c,d,e,f){function g(a,b,c){return{number:a,text:b,active:c}}function h(a,b){var c=[],d=1,e=b,f=angular.isDefined(k)&&b>k;f&&(l?(d=Math.max(a-Math.floor(k/2),1),e=d+k-1,e>b&&(e=b,d=e-k+1)):(d=(Math.ceil(a/k)-1)*k+1,e=Math.min(d+k-1,b)));for(var h=d;e>=h;h++){var i=g(h,h,h===a);c.push(i)}if(f&&!l){if(d>1){var j=g(d-1,"...",!1);c.unshift(j)}if(b>e){var m=g(e+1,"...",!1);c.push(m)}}return c}var i=f[0],j=f[1];if(j){var k=angular.isDefined(e.maxSize)?c.$parent.$eval(e.maxSize):b.maxSize,l=angular.isDefined(e.rotate)?c.$parent.$eval(e.rotate):b.rotate;c.boundaryLinks=angular.isDefined(e.boundaryLinks)?c.$parent.$eval(e.boundaryLinks):b.boundaryLinks,c.directionLinks=angular.isDefined(e.directionLinks)?c.$parent.$eval(e.directionLinks):b.directionLinks,i.init(j,b),e.maxSize&&c.$parent.$watch(a(e.maxSize),function(a){k=parseInt(a,10),i.render()});var m=i.render;i.render=function(){m(),c.page>0&&c.page<=c.totalPages&&(c.pages=h(c.page,c.totalPages))}}}}}]).constant("pagerConfig",{itemsPerPage:10,previousText:"« Previous",nextText:"Next »",align:!0}).directive("pager",["pagerConfig",function(a){return{restrict:"EA",scope:{totalItems:"=",previousText:"@",nextText:"@"},require:["pager","?ngModel"],controller:"PaginationController",templateUrl:"template/pagination/pager.html",replace:!0,link:function(b,c,d,e){var f=e[0],g=e[1];g&&(b.align=angular.isDefined(d.align)?b.$parent.$eval(d.align):a.align,f.init(g,a))}}}]),angular.module("ui.bootstrap.tooltip",["ui.bootstrap.position","ui.bootstrap.bindHtml"]).provider("$tooltip",function(){function a(a){var b=/[A-Z]/g,c="-";
+return a.replace(b,function(a,b){return(b?c:"")+a.toLowerCase()})}var b={placement:"top",animation:!0,popupDelay:0},c={mouseenter:"mouseleave",click:"click",focus:"blur"},d={};this.options=function(a){angular.extend(d,a)},this.setTriggers=function(a){angular.extend(c,a)},this.$get=["$window","$compile","$timeout","$parse","$document","$position","$interpolate",function(e,f,g,h,i,j,k){return function(e,l,m){function n(a){var b=a||o.trigger||m,d=c[b]||b;return{show:b,hide:d}}var o=angular.extend({},b,d),p=a(e),q=k.startSymbol(),r=k.endSymbol(),s="<div "+p+'-popup title="'+q+"tt_title"+r+'" content="'+q+"tt_content"+r+'" placement="'+q+"tt_placement"+r+'" animation="tt_animation" is-open="tt_isOpen"></div>';return{restrict:"EA",scope:!0,compile:function(){var a=f(s);return function(b,c,d){function f(){b.tt_isOpen?m():k()}function k(){(!y||b.$eval(d[l+"Enable"]))&&(b.tt_popupDelay?v||(v=g(p,b.tt_popupDelay,!1),v.then(function(a){a()})):p()())}function m(){b.$apply(function(){q()})}function p(){return v=null,u&&(g.cancel(u),u=null),b.tt_content?(r(),t.css({top:0,left:0,display:"block"}),w?i.find("body").append(t):c.after(t),z(),b.tt_isOpen=!0,b.$digest(),z):angular.noop}function q(){b.tt_isOpen=!1,g.cancel(v),v=null,b.tt_animation?u||(u=g(s,500)):s()}function r(){t&&s(),t=a(b,function(){}),b.$digest()}function s(){u=null,t&&(t.remove(),t=null)}var t,u,v,w=angular.isDefined(o.appendToBody)?o.appendToBody:!1,x=n(void 0),y=angular.isDefined(d[l+"Enable"]),z=function(){var a=j.positionElements(c,t,b.tt_placement,w);a.top+="px",a.left+="px",t.css(a)};b.tt_isOpen=!1,d.$observe(e,function(a){b.tt_content=a,!a&&b.tt_isOpen&&q()}),d.$observe(l+"Title",function(a){b.tt_title=a}),d.$observe(l+"Placement",function(a){b.tt_placement=angular.isDefined(a)?a:o.placement}),d.$observe(l+"PopupDelay",function(a){var c=parseInt(a,10);b.tt_popupDelay=isNaN(c)?o.popupDelay:c});var A=function(){c.unbind(x.show,k),c.unbind(x.hide,m)};d.$observe(l+"Trigger",function(a){A(),x=n(a),x.show===x.hide?c.bind(x.show,f):(c.bind(x.show,k),c.bind(x.hide,m))});var B=b.$eval(d[l+"Animation"]);b.tt_animation=angular.isDefined(B)?!!B:o.animation,d.$observe(l+"AppendToBody",function(a){w=angular.isDefined(a)?h(a)(b):w}),w&&b.$on("$locationChangeSuccess",function(){b.tt_isOpen&&q()}),b.$on("$destroy",function(){g.cancel(u),g.cancel(v),A(),s()})}}}}}]}).directive("tooltipPopup",function(){return{restrict:"EA",replace:!0,scope:{content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/tooltip/tooltip-popup.html"}}).directive("tooltip",["$tooltip",function(a){return a("tooltip","tooltip","mouseenter")}]).directive("tooltipHtmlUnsafePopup",function(){return{restrict:"EA",replace:!0,scope:{content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/tooltip/tooltip-html-unsafe-popup.html"}}).directive("tooltipHtmlUnsafe",["$tooltip",function(a){return a("tooltipHtmlUnsafe","tooltip","mouseenter")}]),angular.module("ui.bootstrap.popover",["ui.bootstrap.tooltip"]).directive("popoverPopup",function(){return{restrict:"EA",replace:!0,scope:{title:"@",content:"@",placement:"@",animation:"&",isOpen:"&"},templateUrl:"template/popover/popover.html"}}).directive("popover",["$tooltip",function(a){return a("popover","popover","click")}]),angular.module("ui.bootstrap.progressbar",[]).constant("progressConfig",{animate:!0,max:100}).controller("ProgressController",["$scope","$attrs","progressConfig",function(a,b,c){var d=this,e=angular.isDefined(b.animate)?a.$parent.$eval(b.animate):c.animate;this.bars=[],a.max=angular.isDefined(b.max)?a.$parent.$eval(b.max):c.max,this.addBar=function(b,c){e||c.css({transition:"none"}),this.bars.push(b),b.$watch("value",function(c){b.percent=+(100*c/a.max).toFixed(2)}),b.$on("$destroy",function(){c=null,d.removeBar(b)})},this.removeBar=function(a){this.bars.splice(this.bars.indexOf(a),1)}}]).directive("progress",function(){return{restrict:"EA",replace:!0,transclude:!0,controller:"ProgressController",require:"progress",scope:{},templateUrl:"template/progressbar/progress.html"}}).directive("bar",function(){return{restrict:"EA",replace:!0,transclude:!0,require:"^progress",scope:{value:"=",type:"@"},templateUrl:"template/progressbar/bar.html",link:function(a,b,c,d){d.addBar(a,b)}}}).directive("progressbar",function(){return{restrict:"EA",replace:!0,transclude:!0,controller:"ProgressController",scope:{value:"=",type:"@"},templateUrl:"template/progressbar/progressbar.html",link:function(a,b,c,d){d.addBar(a,angular.element(b.children()[0]))}}}),angular.module("ui.bootstrap.rating",[]).constant("ratingConfig",{max:5,stateOn:null,stateOff:null}).controller("RatingController",["$scope","$attrs","ratingConfig",function(a,b,c){var d={$setViewValue:angular.noop};this.init=function(e){d=e,d.$render=this.render,this.stateOn=angular.isDefined(b.stateOn)?a.$parent.$eval(b.stateOn):c.stateOn,this.stateOff=angular.isDefined(b.stateOff)?a.$parent.$eval(b.stateOff):c.stateOff;var f=angular.isDefined(b.ratingStates)?a.$parent.$eval(b.ratingStates):new Array(angular.isDefined(b.max)?a.$parent.$eval(b.max):c.max);a.range=this.buildTemplateObjects(f)},this.buildTemplateObjects=function(a){for(var b=0,c=a.length;c>b;b++)a[b]=angular.extend({index:b},{stateOn:this.stateOn,stateOff:this.stateOff},a[b]);return a},a.rate=function(b){!a.readonly&&b>=0&&b<=a.range.length&&(d.$setViewValue(b),d.$render())},a.enter=function(b){a.readonly||(a.value=b),a.onHover({value:b})},a.reset=function(){a.value=d.$viewValue,a.onLeave()},a.onKeydown=function(b){/(37|38|39|40)/.test(b.which)&&(b.preventDefault(),b.stopPropagation(),a.rate(a.value+(38===b.which||39===b.which?1:-1)))},this.render=function(){a.value=d.$viewValue}}]).directive("rating",function(){return{restrict:"EA",require:["rating","ngModel"],scope:{readonly:"=?",onHover:"&",onLeave:"&"},controller:"RatingController",templateUrl:"template/rating/rating.html",replace:!0,link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f)}}}),angular.module("ui.bootstrap.tabs",[]).controller("TabsetController",["$scope",function(a){var b=this,c=b.tabs=a.tabs=[];b.select=function(a){angular.forEach(c,function(b){b.active&&b!==a&&(b.active=!1,b.onDeselect())}),a.active=!0,a.onSelect()},b.addTab=function(a){c.push(a),1===c.length?a.active=!0:a.active&&b.select(a)},b.removeTab=function(a){var d=c.indexOf(a);if(a.active&&c.length>1){var e=d==c.length-1?d-1:d+1;b.select(c[e])}c.splice(d,1)}}]).directive("tabset",function(){return{restrict:"EA",transclude:!0,replace:!0,scope:{type:"@"},controller:"TabsetController",templateUrl:"template/tabs/tabset.html",link:function(a,b,c){a.vertical=angular.isDefined(c.vertical)?a.$parent.$eval(c.vertical):!1,a.justified=angular.isDefined(c.justified)?a.$parent.$eval(c.justified):!1}}}).directive("tab",["$parse",function(a){return{require:"^tabset",restrict:"EA",replace:!0,templateUrl:"template/tabs/tab.html",transclude:!0,scope:{active:"=?",heading:"@",onSelect:"&select",onDeselect:"&deselect"},controller:function(){},compile:function(b,c,d){return function(b,c,e,f){b.$watch("active",function(a){a&&f.select(b)}),b.disabled=!1,e.disabled&&b.$parent.$watch(a(e.disabled),function(a){b.disabled=!!a}),b.select=function(){b.disabled||(b.active=!0)},f.addTab(b),b.$on("$destroy",function(){f.removeTab(b)}),b.$transcludeFn=d}}}}]).directive("tabHeadingTransclude",[function(){return{restrict:"A",require:"^tab",link:function(a,b){a.$watch("headingElement",function(a){a&&(b.html(""),b.append(a))})}}}]).directive("tabContentTransclude",function(){function a(a){return a.tagName&&(a.hasAttribute("tab-heading")||a.hasAttribute("data-tab-heading")||"tab-heading"===a.tagName.toLowerCase()||"data-tab-heading"===a.tagName.toLowerCase())}return{restrict:"A",require:"^tabset",link:function(b,c,d){var e=b.$eval(d.tabContentTransclude);e.$transcludeFn(e.$parent,function(b){angular.forEach(b,function(b){a(b)?e.headingElement=b:c.append(b)})})}}}),angular.module("ui.bootstrap.timepicker",[]).constant("timepickerConfig",{hourStep:1,minuteStep:1,showMeridian:!0,meridians:null,readonlyInput:!1,mousewheel:!0}).controller("TimepickerController",["$scope","$attrs","$parse","$log","$locale","timepickerConfig",function(a,b,c,d,e,f){function g(){var b=parseInt(a.hours,10),c=a.showMeridian?b>0&&13>b:b>=0&&24>b;return c?(a.showMeridian&&(12===b&&(b=0),a.meridian===p[1]&&(b+=12)),b):void 0}function h(){var b=parseInt(a.minutes,10);return b>=0&&60>b?b:void 0}function i(a){return angular.isDefined(a)&&a.toString().length<2?"0"+a:a}function j(a){k(),o.$setViewValue(new Date(n)),l(a)}function k(){o.$setValidity("time",!0),a.invalidHours=!1,a.invalidMinutes=!1}function l(b){var c=n.getHours(),d=n.getMinutes();a.showMeridian&&(c=0===c||12===c?12:c%12),a.hours="h"===b?c:i(c),a.minutes="m"===b?d:i(d),a.meridian=n.getHours()<12?p[0]:p[1]}function m(a){var b=new Date(n.getTime()+6e4*a);n.setHours(b.getHours(),b.getMinutes()),j()}var n=new Date,o={$setViewValue:angular.noop},p=angular.isDefined(b.meridians)?a.$parent.$eval(b.meridians):f.meridians||e.DATETIME_FORMATS.AMPMS;this.init=function(c,d){o=c,o.$render=this.render;var e=d.eq(0),g=d.eq(1),h=angular.isDefined(b.mousewheel)?a.$parent.$eval(b.mousewheel):f.mousewheel;h&&this.setupMousewheelEvents(e,g),a.readonlyInput=angular.isDefined(b.readonlyInput)?a.$parent.$eval(b.readonlyInput):f.readonlyInput,this.setupInputEvents(e,g)};var q=f.hourStep;b.hourStep&&a.$parent.$watch(c(b.hourStep),function(a){q=parseInt(a,10)});var r=f.minuteStep;b.minuteStep&&a.$parent.$watch(c(b.minuteStep),function(a){r=parseInt(a,10)}),a.showMeridian=f.showMeridian,b.showMeridian&&a.$parent.$watch(c(b.showMeridian),function(b){if(a.showMeridian=!!b,o.$error.time){var c=g(),d=h();angular.isDefined(c)&&angular.isDefined(d)&&(n.setHours(c),j())}else l()}),this.setupMousewheelEvents=function(b,c){var d=function(a){a.originalEvent&&(a=a.originalEvent);var b=a.wheelDelta?a.wheelDelta:-a.deltaY;return a.detail||b>0};b.bind("mousewheel wheel",function(b){a.$apply(d(b)?a.incrementHours():a.decrementHours()),b.preventDefault()}),c.bind("mousewheel wheel",function(b){a.$apply(d(b)?a.incrementMinutes():a.decrementMinutes()),b.preventDefault()})},this.setupInputEvents=function(b,c){if(a.readonlyInput)return a.updateHours=angular.noop,void(a.updateMinutes=angular.noop);var d=function(b,c){o.$setViewValue(null),o.$setValidity("time",!1),angular.isDefined(b)&&(a.invalidHours=b),angular.isDefined(c)&&(a.invalidMinutes=c)};a.updateHours=function(){var a=g();angular.isDefined(a)?(n.setHours(a),j("h")):d(!0)},b.bind("blur",function(){!a.invalidHours&&a.hours<10&&a.$apply(function(){a.hours=i(a.hours)})}),a.updateMinutes=function(){var a=h();angular.isDefined(a)?(n.setMinutes(a),j("m")):d(void 0,!0)},c.bind("blur",function(){!a.invalidMinutes&&a.minutes<10&&a.$apply(function(){a.minutes=i(a.minutes)})})},this.render=function(){var a=o.$modelValue?new Date(o.$modelValue):null;isNaN(a)?(o.$setValidity("time",!1),d.error('Timepicker directive: "ng-model" value must be a Date object, a number of milliseconds since 01.01.1970 or a string representing an RFC2822 or ISO 8601 date.')):(a&&(n=a),k(),l())},a.incrementHours=function(){m(60*q)},a.decrementHours=function(){m(60*-q)},a.incrementMinutes=function(){m(r)},a.decrementMinutes=function(){m(-r)},a.toggleMeridian=function(){m(720*(n.getHours()<12?1:-1))}}]).directive("timepicker",function(){return{restrict:"EA",require:["timepicker","?^ngModel"],controller:"TimepickerController",replace:!0,scope:{},templateUrl:"template/timepicker/timepicker.html",link:function(a,b,c,d){var e=d[0],f=d[1];f&&e.init(f,b.find("input"))}}}),angular.module("ui.bootstrap.typeahead",["ui.bootstrap.position","ui.bootstrap.bindHtml"]).factory("typeaheadParser",["$parse",function(a){var b=/^\s*(.*?)(?:\s+as\s+(.*?))?\s+for\s+(?:([\$\w][\$\w\d]*))\s+in\s+(.*)$/;return{parse:function(c){var d=c.match(b);if(!d)throw new Error('Expected typeahead specification in form of "_modelValue_ (as _label_)? for _item_ in _collection_" but got "'+c+'".');return{itemName:d[3],source:a(d[4]),viewMapper:a(d[2]||d[1]),modelMapper:a(d[1])}}}}]).directive("typeahead",["$compile","$parse","$q","$timeout","$document","$position","typeaheadParser",function(a,b,c,d,e,f,g){var h=[9,13,27,38,40];return{require:"ngModel",link:function(i,j,k,l){var m,n=i.$eval(k.typeaheadMinLength)||1,o=i.$eval(k.typeaheadWaitMs)||0,p=i.$eval(k.typeaheadEditable)!==!1,q=b(k.typeaheadLoading).assign||angular.noop,r=b(k.typeaheadOnSelect),s=k.typeaheadInputFormatter?b(k.typeaheadInputFormatter):void 0,t=k.typeaheadAppendToBody?i.$eval(k.typeaheadAppendToBody):!1,u=b(k.ngModel).assign,v=g.parse(k.typeahead),w=i.$new();i.$on("$destroy",function(){w.$destroy()});var x="typeahead-"+w.$id+"-"+Math.floor(1e4*Math.random());j.attr({"aria-autocomplete":"list","aria-expanded":!1,"aria-owns":x});var y=angular.element("<div typeahead-popup></div>");y.attr({id:x,matches:"matches",active:"activeIdx",select:"select(activeIdx)",query:"query",position:"position"}),angular.isDefined(k.typeaheadTemplateUrl)&&y.attr("template-url",k.typeaheadTemplateUrl);var z=function(){w.matches=[],w.activeIdx=-1,j.attr("aria-expanded",!1)},A=function(a){return x+"-option-"+a};w.$watch("activeIdx",function(a){0>a?j.removeAttr("aria-activedescendant"):j.attr("aria-activedescendant",A(a))});var B=function(a){var b={$viewValue:a};q(i,!0),c.when(v.source(i,b)).then(function(c){var d=a===l.$viewValue;if(d&&m)if(c.length>0){w.activeIdx=0,w.matches.length=0;for(var e=0;e<c.length;e++)b[v.itemName]=c[e],w.matches.push({id:A(e),label:v.viewMapper(w,b),model:c[e]});w.query=a,w.position=t?f.offset(j):f.position(j),w.position.top=w.position.top+j.prop("offsetHeight"),j.attr("aria-expanded",!0)}else z();d&&q(i,!1)},function(){z(),q(i,!1)})};z(),w.query=void 0;var C;l.$parsers.unshift(function(a){return m=!0,a&&a.length>=n?o>0?(C&&d.cancel(C),C=d(function(){B(a)},o)):B(a):(q(i,!1),z()),p?a:a?void l.$setValidity("editable",!1):(l.$setValidity("editable",!0),a)}),l.$formatters.push(function(a){var b,c,d={};return s?(d.$model=a,s(i,d)):(d[v.itemName]=a,b=v.viewMapper(i,d),d[v.itemName]=void 0,c=v.viewMapper(i,d),b!==c?b:a)}),w.select=function(a){var b,c,e={};e[v.itemName]=c=w.matches[a].model,b=v.modelMapper(i,e),u(i,b),l.$setValidity("editable",!0),r(i,{$item:c,$model:b,$label:v.viewMapper(i,e)}),z(),d(function(){j[0].focus()},0,!1)},j.bind("keydown",function(a){0!==w.matches.length&&-1!==h.indexOf(a.which)&&(a.preventDefault(),40===a.which?(w.activeIdx=(w.activeIdx+1)%w.matches.length,w.$digest()):38===a.which?(w.activeIdx=(w.activeIdx?w.activeIdx:w.matches.length)-1,w.$digest()):13===a.which||9===a.which?w.$apply(function(){w.select(w.activeIdx)}):27===a.which&&(a.stopPropagation(),z(),w.$digest()))}),j.bind("blur",function(){m=!1});var D=function(a){j[0]!==a.target&&(z(),w.$digest())};e.bind("click",D),i.$on("$destroy",function(){e.unbind("click",D)});var E=a(y)(w);t?e.find("body").append(E):j.after(E)}}}]).directive("typeaheadPopup",function(){return{restrict:"EA",scope:{matches:"=",query:"=",active:"=",position:"=",select:"&"},replace:!0,templateUrl:"template/typeahead/typeahead-popup.html",link:function(a,b,c){a.templateUrl=c.templateUrl,a.isOpen=function(){return a.matches.length>0},a.isActive=function(b){return a.active==b},a.selectActive=function(b){a.active=b},a.selectMatch=function(b){a.select({activeIdx:b})}}}}).directive("typeaheadMatch",["$http","$templateCache","$compile","$parse",function(a,b,c,d){return{restrict:"EA",scope:{index:"=",match:"=",query:"="},link:function(e,f,g){var h=d(g.templateUrl)(e.$parent)||"template/typeahead/typeahead-match.html";a.get(h,{cache:b}).success(function(a){f.replaceWith(c(a.trim())(e))})}}}]).filter("typeaheadHighlight",function(){function a(a){return a.replace(/([.?*+^$[\]\\(){}|-])/g,"\\$1")}return function(b,c){return c?(""+b).replace(new RegExp(a(c),"gi"),"<strong>$&</strong>"):b}}),angular.module("template/accordion/accordion-group.html",[]).run(["$templateCache",function(a){a.put("template/accordion/accordion-group.html",'<div class="panel panel-default">\n <div class="panel-heading">\n <h4 class="panel-title">\n <a class="accordion-toggle" ng-click="toggleOpen()" accordion-transclude="heading"><span ng-class="{\'text-muted\': isDisabled}">{{heading}}</span></a>\n </h4>\n </div>\n <div class="panel-collapse" collapse="!isOpen">\n <div class="panel-body" ng-transclude></div>\n </div>\n</div>')}]),angular.module("template/accordion/accordion.html",[]).run(["$templateCache",function(a){a.put("template/accordion/accordion.html",'<div class="panel-group" ng-transclude></div>')}]),angular.module("template/alert/alert.html",[]).run(["$templateCache",function(a){a.put("template/alert/alert.html",'<div class="alert" ng-class="{\'alert-{{type || \'warning\'}}\': true, \'alert-dismissable\': closeable}" role="alert">\n <button ng-show="closeable" type="button" class="close" ng-click="close()">\n <span aria-hidden="true">&times;</span>\n <span class="sr-only">Close</span>\n </button>\n <div ng-transclude></div>\n</div>\n')}]),angular.module("template/carousel/carousel.html",[]).run(["$templateCache",function(a){a.put("template/carousel/carousel.html",'<div ng-mouseenter="pause()" ng-mouseleave="play()" class="carousel" ng-swipe-right="prev()" ng-swipe-left="next()">\n <ol class="carousel-indicators" ng-show="slides.length > 1">\n <li ng-repeat="slide in slides track by $index" ng-class="{active: isActive(slide)}" ng-click="select(slide)"></li>\n </ol>\n <div class="carousel-inner" ng-transclude></div>\n <a class="left carousel-control" ng-click="prev()" ng-show="slides.length > 1"><span class="glyphicon glyphicon-chevron-left"></span></a>\n <a class="right carousel-control" ng-click="next()" ng-show="slides.length > 1"><span class="glyphicon glyphicon-chevron-right"></span></a>\n</div>\n')}]),angular.module("template/carousel/slide.html",[]).run(["$templateCache",function(a){a.put("template/carousel/slide.html","<div ng-class=\"{\n 'active': leaving || (active && !entering),\n 'prev': (next || active) && direction=='prev',\n 'next': (next || active) && direction=='next',\n 'right': direction=='prev',\n 'left': direction=='next'\n }\" class=\"item text-center\" ng-transclude></div>\n")}]),angular.module("template/datepicker/datepicker.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/datepicker.html",'<div ng-switch="datepickerMode" role="application" ng-keydown="keydown($event)">\n <daypicker ng-switch-when="day" tabindex="0"></daypicker>\n <monthpicker ng-switch-when="month" tabindex="0"></monthpicker>\n <yearpicker ng-switch-when="year" tabindex="0"></yearpicker>\n</div>')}]),angular.module("template/datepicker/day.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/day.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th colspan="{{5 + showWeeks}}"><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n <tr>\n <th ng-show="showWeeks" class="text-center"></th>\n <th ng-repeat="label in labels track by $index" class="text-center"><small aria-label="{{label.full}}">{{label.abbr}}</small></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-show="showWeeks" class="text-center h6"><em>{{ weekNumbers[$index] }}</em></td>\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default btn-sm" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-muted\': dt.secondary, \'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/datepicker/month.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/month.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/datepicker/popup.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/popup.html",'<ul class="dropdown-menu" ng-style="{display: (isOpen && \'block\') || \'none\', top: position.top+\'px\', left: position.left+\'px\'}" ng-keydown="keydown($event)">\n <li ng-transclude></li>\n <li ng-if="showButtonBar" style="padding:10px 9px 2px">\n <span class="btn-group">\n <button type="button" class="btn btn-sm btn-info" ng-click="select(\'today\')">{{ getText(\'current\') }}</button>\n <button type="button" class="btn btn-sm btn-danger" ng-click="select(null)">{{ getText(\'clear\') }}</button>\n </span>\n <button type="button" class="btn btn-sm btn-success pull-right" ng-click="close()">{{ getText(\'close\') }}</button>\n </li>\n</ul>\n')}]),angular.module("template/datepicker/year.html",[]).run(["$templateCache",function(a){a.put("template/datepicker/year.html",'<table role="grid" aria-labelledby="{{uniqueId}}-title" aria-activedescendant="{{activeDateId}}">\n <thead>\n <tr>\n <th><button type="button" class="btn btn-default btn-sm pull-left" ng-click="move(-1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-left"></i></button></th>\n <th colspan="3"><button id="{{uniqueId}}-title" role="heading" aria-live="assertive" aria-atomic="true" type="button" class="btn btn-default btn-sm" ng-click="toggleMode()" tabindex="-1" style="width:100%;"><strong>{{title}}</strong></button></th>\n <th><button type="button" class="btn btn-default btn-sm pull-right" ng-click="move(1)" tabindex="-1"><i class="glyphicon glyphicon-chevron-right"></i></button></th>\n </tr>\n </thead>\n <tbody>\n <tr ng-repeat="row in rows track by $index">\n <td ng-repeat="dt in row track by dt.date" class="text-center" role="gridcell" id="{{dt.uid}}" aria-disabled="{{!!dt.disabled}}">\n <button type="button" style="width:100%;" class="btn btn-default" ng-class="{\'btn-info\': dt.selected, active: isActive(dt)}" ng-click="select(dt.date)" ng-disabled="dt.disabled" tabindex="-1"><span ng-class="{\'text-info\': dt.current}">{{dt.label}}</span></button>\n </td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/modal/backdrop.html",[]).run(["$templateCache",function(a){a.put("template/modal/backdrop.html",'<div class="modal-backdrop fade"\n ng-class="{in: animate}"\n ng-style="{\'z-index\': 1040 + (index && 1 || 0) + index*10}"\n></div>\n')}]),angular.module("template/modal/window.html",[]).run(["$templateCache",function(a){a.put("template/modal/window.html",'<div tabindex="-1" role="dialog" class="modal fade" ng-class="{in: animate}" ng-style="{\'z-index\': 1050 + index*10, display: \'block\'}" ng-click="close($event)">\n <div class="modal-dialog" ng-class="{\'modal-sm\': size == \'sm\', \'modal-lg\': size == \'lg\'}"><div class="modal-content" ng-transclude></div></div>\n</div>')}]),angular.module("template/pagination/pager.html",[]).run(["$templateCache",function(a){a.put("template/pagination/pager.html",'<ul class="pager">\n <li ng-class="{disabled: noPrevious(), previous: align}"><a href ng-click="selectPage(page - 1)">{{getText(\'previous\')}}</a></li>\n <li ng-class="{disabled: noNext(), next: align}"><a href ng-click="selectPage(page + 1)">{{getText(\'next\')}}</a></li>\n</ul>')}]),angular.module("template/pagination/pagination.html",[]).run(["$templateCache",function(a){a.put("template/pagination/pagination.html",'<ul class="pagination">\n <li ng-if="boundaryLinks" ng-class="{disabled: noPrevious()}"><a href ng-click="selectPage(1)">{{getText(\'first\')}}</a></li>\n <li ng-if="directionLinks" ng-class="{disabled: noPrevious()}"><a href ng-click="selectPage(page - 1)">{{getText(\'previous\')}}</a></li>\n <li ng-repeat="page in pages track by $index" ng-class="{active: page.active}"><a href ng-click="selectPage(page.number)">{{page.text}}</a></li>\n <li ng-if="directionLinks" ng-class="{disabled: noNext()}"><a href ng-click="selectPage(page + 1)">{{getText(\'next\')}}</a></li>\n <li ng-if="boundaryLinks" ng-class="{disabled: noNext()}"><a href ng-click="selectPage(totalPages)">{{getText(\'last\')}}</a></li>\n</ul>')}]),angular.module("template/tooltip/tooltip-html-unsafe-popup.html",[]).run(["$templateCache",function(a){a.put("template/tooltip/tooltip-html-unsafe-popup.html",'<div class="tooltip {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="tooltip-arrow"></div>\n <div class="tooltip-inner" bind-html-unsafe="content"></div>\n</div>\n')}]),angular.module("template/tooltip/tooltip-popup.html",[]).run(["$templateCache",function(a){a.put("template/tooltip/tooltip-popup.html",'<div class="tooltip {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="tooltip-arrow"></div>\n <div class="tooltip-inner" ng-bind="content"></div>\n</div>\n')}]),angular.module("template/popover/popover.html",[]).run(["$templateCache",function(a){a.put("template/popover/popover.html",'<div class="popover {{placement}}" ng-class="{ in: isOpen(), fade: animation() }">\n <div class="arrow"></div>\n\n <div class="popover-inner">\n <h3 class="popover-title" ng-bind="title" ng-show="title"></h3>\n <div class="popover-content" ng-bind="content"></div>\n </div>\n</div>\n')}]),angular.module("template/progressbar/bar.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/bar.html",'<div class="progress-bar" ng-class="type && \'progress-bar-\' + type" role="progressbar" aria-valuenow="{{value}}" aria-valuemin="0" aria-valuemax="{{max}}" ng-style="{width: percent + \'%\'}" aria-valuetext="{{percent | number:0}}%" ng-transclude></div>')}]),angular.module("template/progressbar/progress.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/progress.html",'<div class="progress" ng-transclude></div>')}]),angular.module("template/progressbar/progressbar.html",[]).run(["$templateCache",function(a){a.put("template/progressbar/progressbar.html",'<div class="progress">\n <div class="progress-bar" ng-class="type && \'progress-bar-\' + type" role="progressbar" aria-valuenow="{{value}}" aria-valuemin="0" aria-valuemax="{{max}}" ng-style="{width: percent + \'%\'}" aria-valuetext="{{percent | number:0}}%" ng-transclude></div>\n</div>')}]),angular.module("template/rating/rating.html",[]).run(["$templateCache",function(a){a.put("template/rating/rating.html",'<span ng-mouseleave="reset()" ng-keydown="onKeydown($event)" tabindex="0" role="slider" aria-valuemin="0" aria-valuemax="{{range.length}}" aria-valuenow="{{value}}">\n <i ng-repeat="r in range track by $index" ng-mouseenter="enter($index + 1)" ng-click="rate($index + 1)" class="glyphicon" ng-class="$index < value && (r.stateOn || \'glyphicon-star\') || (r.stateOff || \'glyphicon-star-empty\')">\n <span class="sr-only">({{ $index < value ? \'*\' : \' \' }})</span>\n </i>\n</span>')}]),angular.module("template/tabs/tab.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tab.html",'<li ng-class="{active: active, disabled: disabled}">\n <a ng-click="select()" tab-heading-transclude>{{heading}}</a>\n</li>\n')}]),angular.module("template/tabs/tabset-titles.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tabset-titles.html","<ul class=\"nav {{type && 'nav-' + type}}\" ng-class=\"{'nav-stacked': vertical}\">\n</ul>\n")}]),angular.module("template/tabs/tabset.html",[]).run(["$templateCache",function(a){a.put("template/tabs/tabset.html",'\n<div>\n <ul class="nav nav-{{type || \'tabs\'}}" ng-class="{\'nav-stacked\': vertical, \'nav-justified\': justified}" ng-transclude></ul>\n <div class="tab-content">\n <div class="tab-pane" \n ng-repeat="tab in tabs" \n ng-class="{active: tab.active}"\n tab-content-transclude="tab">\n </div>\n </div>\n</div>\n')}]),angular.module("template/timepicker/timepicker.html",[]).run(["$templateCache",function(a){a.put("template/timepicker/timepicker.html",'<table>\n <tbody>\n <tr class="text-center">\n <td><a ng-click="incrementHours()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-up"></span></a></td>\n <td>&nbsp;</td>\n <td><a ng-click="incrementMinutes()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-up"></span></a></td>\n <td ng-show="showMeridian"></td>\n </tr>\n <tr>\n <td style="width:50px;" class="form-group" ng-class="{\'has-error\': invalidHours}">\n <input type="text" ng-model="hours" ng-change="updateHours()" class="form-control text-center" ng-mousewheel="incrementHours()" ng-readonly="readonlyInput" maxlength="2">\n </td>\n <td>:</td>\n <td style="width:50px;" class="form-group" ng-class="{\'has-error\': invalidMinutes}">\n <input type="text" ng-model="minutes" ng-change="updateMinutes()" class="form-control text-center" ng-readonly="readonlyInput" maxlength="2">\n </td>\n <td ng-show="showMeridian"><button type="button" class="btn btn-default text-center" ng-click="toggleMeridian()">{{meridian}}</button></td>\n </tr>\n <tr class="text-center">\n <td><a ng-click="decrementHours()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-down"></span></a></td>\n <td>&nbsp;</td>\n <td><a ng-click="decrementMinutes()" class="btn btn-link"><span class="glyphicon glyphicon-chevron-down"></span></a></td>\n <td ng-show="showMeridian"></td>\n </tr>\n </tbody>\n</table>\n')}]),angular.module("template/typeahead/typeahead-match.html",[]).run(["$templateCache",function(a){a.put("template/typeahead/typeahead-match.html",'<a tabindex="-1" bind-html-unsafe="match.label | typeaheadHighlight:query"></a>')}]),angular.module("template/typeahead/typeahead-popup.html",[]).run(["$templateCache",function(a){a.put("template/typeahead/typeahead-popup.html",'<ul class="dropdown-menu" ng-if="isOpen()" ng-style="{top: position.top+\'px\', left: position.left+\'px\'}" style="display: block;" role="listbox" aria-hidden="{{!isOpen()}}">\n <li ng-repeat="match in matches track by $index" ng-class="{active: isActive($index) }" ng-mouseenter="selectActive($index)" ng-click="selectMatch($index)" role="option" id="{{match.id}}">\n <div typeahead-match index="$index" match="match" query="query" template-url="templateUrl"></div>\n </li>\n</ul>')
+}]); \ No newline at end of file
diff --git a/bitbake/lib/toaster/toastergui/templates/base.html b/bitbake/lib/toaster/toastergui/templates/base.html
index 9ef249aab3..bc7a0ee436 100644
--- a/bitbake/lib/toaster/toastergui/templates/base.html
+++ b/bitbake/lib/toaster/toastergui/templates/base.html
@@ -1,14 +1,16 @@
<!DOCTYPE html>
{% load static %}
-<html>
+<html lang="en">
<head>
<title>{% if objectname %} {{objectname|title}} - {% endif %}Toaster</title>
-<link rel="stylesheet" href="{% static 'css/bootstrap.min.css' %}" type="text/css">
-<link rel="stylesheet" href="{% static 'css/bootstrap-responsive.min.css' %}" type='text/css'>
-<link rel="stylesheet" href="{% static 'css/font-awesome.min.css' %}" type='text/css'>
-<link rel="stylesheet" href="{% static 'css/prettify.css' %}" type='text/css'>
-<link rel="stylesheet" href="{% static 'css/default.css' %}" type='text/css'>
+<link rel="stylesheet" href="{% static 'css/bootstrap.min.css' %}" type="text/css"/>
+<link rel="stylesheet" href="{% static 'css/bootstrap-responsive.min.css' %}" type='text/css'/>
+<link rel="stylesheet" href="{% static 'css/font-awesome.min.css' %}" type='text/css'/>
+<link rel="stylesheet" href="{% static 'css/prettify.css' %}" type='text/css'/>
+<link rel="stylesheet" href="{% static 'css/default.css' %}" type='text/css'/>
+
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
+<meta http-equiv="Content-Type" content="text/html;charset=UTF-8" />
<script src="{% static 'js/jquery-2.0.3.min.js' %}">
</script>
<script src="{% static 'js/jquery.cookie.js' %}">
@@ -17,36 +19,30 @@
</script>
<script src="{% static 'js/prettify.js' %}">
</script>
-<script src="{% static 'js/main.js' %}">
+<script src="{% static 'js/libtoaster.js' %}">
+</script>
+<script src="{% static 'js/base.js' %}"></script>
+{%if MANAGED %}
+<script>
+ $(document).ready(function () {
+ /* Vars needed for base.js */
+ var ctx = {};
+ ctx.xhrDataTypeaheadUrl = "{% url 'xhr_datatypeahead' %}";
+ ctx.projectBuildUrl = "{% url 'xhr_build' %}";
+ ctx.projectPageUrl = "{% url 'project' %}";
+ ctx.projectInfoUrl = "{% url 'xhr_projectinfo' %}";
+ ctx.numProjects = {{projects|length}};
+ {% if project %}
+ ctx.projectId = {{project.id}};
+ {% endif %}
+ ctx.currentUrl = "{{request.path|escapejs}}";
+
+ basePageInit(ctx);
+ });
</script>
+{% endif %}
<script>
-function reload_params(params) {
- uri = window.location.href;
- splitlist = uri.split("?");
- url = splitlist[0], parameters=splitlist[1];
- // deserialize the call parameters
- if(parameters){
- cparams = parameters.split("&");
- }else{
- cparams = []
- }
- nparams = {}
- for (i = 0; i < cparams.length; i++) {
- temp = cparams[i].split("=");
- nparams[temp[0]] = temp[1];
- }
- // update parameter values
- for (i in params) {
- nparams[encodeURIComponent(i)] = encodeURIComponent(params[i]);
- }
- // serialize the structure
- callparams = []
- for (i in nparams) {
- callparams.push(i+"="+nparams[i]);
- }
- window.location.href = url+"?"+callparams.join('&');
-}
</script>
{% block extraheadcontent %}
@@ -58,27 +54,55 @@ function reload_params(params) {
<div class="navbar-inner">
<a class="brand logo" href="#"><img src="{% static 'img/logo.png' %}" class="" alt="Yocto logo project"/></a>
<a class="brand" href="/">Toaster</a>
- {%if MANAGED %}
- <div class="btn-group pull-right">
- <a class="btn" href="{% url 'newproject' %}">New project</a>
- <button class="btn dropdown-toggle" data-toggle="dropdown">
- <i class="icon-caret-down"></i>
- </button>
- <ul class="dropdown-menu">
-{% for prj in projects %}
- <li><a href="{% url 'project' prj.id %}">{{prj.name}}</a></li>
-{% endfor %}
- <li><hr/></li>
- <li><a href="#">Clone project</a></li>
- <li><a href="#">Export project</a></li>
- <li><a href="#">Import project</a></li>
- </ul>
- </div>
- {%endif%}
<a class="pull-right manual" target="_blank" href="http://www.yoctoproject.org/documentation/toaster-manual">
<i class="icon-book"></i>
Toaster manual
</a>
+ {%if MANAGED %}
+ <div class="btn-group pull-right">
+ <a class="btn" id="new-project-button" href="{% url 'newproject' %}">New project</a>
+ </div>
+ <!-- New build popover -->
+ <div class="btn-group pull-right" id="new-build-button">
+ <button class="btn dropdown-toggle" data-toggle="dropdown">
+ New build
+ <i class="icon-caret-down"></i>
+ </button>
+ <ul class="dropdown-menu new-build multi-select">
+ <li>
+ <h3>New build</h3>
+ <h6>Project:</h6>
+ <span id="project">
+ <a class="lead" href="{% if project.id %}{% url 'project' project.id %}{% endif %}">{{project.name}}</a>
+ <i class="icon-pencil"></i>
+ </span>
+ <form id="change-project-form" style="display:none;">
+ <div class="input-append">
+ <input type="text" class="input-medium" id="project-name-input" placeholder="Type a project name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead"/>
+ <button id="save-project-button" class="btn" type="button">Save</button>
+ <a href="#" id="cancel-change-project" class="btn btn-link">Cancel</a>
+ </div>
+ <p><a id="view-all-projects" href="{% url 'all-projects' %}">View all projects</a></p>
+ </form>
+ </li>
+ <div class="alert" style="display:none">
+ This project's configuration is incomplete,<br/>so you cannot run builds.<br/>
+ <p><a href="{% if project.id %}{% url 'project' project.id %}{% endif %}">View project configuration</a></p>
+ </div>
+ <li id="targets-form">
+ <h6>Target(s):</h6>
+ <form>
+ <input type="text" class="input-xlarge" id="build-target-input" placeholder="Type a target name" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" />
+ <div>
+ <a class="btn btn-primary" id="build-button" disabled="disabled" data-project-id="{{project.id}}">Build</a>
+ </div>
+ </form>
+ </li>
+ </ul>
+ </div>
+
+ {%endif%}
+
</div>
</div>
diff --git a/bitbake/lib/toaster/toastergui/templates/basebuilddetailpage.html b/bitbake/lib/toaster/toastergui/templates/basebuilddetailpage.html
index 5149768517..c8e217e9dd 100755..100644
--- a/bitbake/lib/toaster/toastergui/templates/basebuilddetailpage.html
+++ b/bitbake/lib/toaster/toastergui/templates/basebuilddetailpage.html
@@ -7,7 +7,10 @@
<div class="section">
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'all-builds' %}">All builds</a></li>
- <li><a href="{%url 'builddashboard' build.pk%}">{{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|date:"d/m/y H:i"}})</a></li>
+ {% if MANAGED and build.project %}
+ <li><a href="{% url 'project' build.project.id %}">{{build.project.name}}</a></li>
+ {%endif%}
+ <li><a href="{%url 'builddashboard' build.pk%}">{{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%}{%if not MANAGED %}{{build.machine}}{%endif%} ({{build.completed_on|date:"d/m/y H:i"}})</a></li>
{% block localbreadcrumb %}{% endblock %}
</ul>
<script>
diff --git a/bitbake/lib/toaster/toastergui/templates/basebuildpage.html b/bitbake/lib/toaster/toastergui/templates/basebuildpage.html
index 46110519e9..c03f1b4015 100644
--- a/bitbake/lib/toaster/toastergui/templates/basebuildpage.html
+++ b/bitbake/lib/toaster/toastergui/templates/basebuildpage.html
@@ -8,11 +8,14 @@
<!-- Breadcrumbs -->
<div class="section">
<ul class="breadcrumb" id="breadcrumb">
-<li><a href="{% url 'all-builds' %}">All builds</a></li>
+ <li><a href="{% url 'all-builds' %}">All builds</a></li>
+ {% if MANAGED and build.project %}
+ <li><a href="{% url 'project' build.project.id %}">{{build.project.name}}</a></li>
+ {%endif%}
<li>
{% block parentbreadcrumb %}
<a href="{%url 'builddashboard' build.pk%}">
- {{build.get_sorted_target_list.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|date:"d/m/y H:i"}})
+ {{build.get_sorted_target_list.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {%if not MANAGED %}{{build.machine}}{% endif %} ({{build.completed_on|date:"d/m/y H:i"}})
</a>
{% endblock %}
</li>
diff --git a/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html b/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html
new file mode 100644
index 0000000000..95a9f470ba
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/baseprojectpage.html
@@ -0,0 +1,43 @@
+{% extends "base.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% block pagecontent %}
+
+
+ <div class="">
+<!-- Breadcrumbs -->
+ <div class="section">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'all-builds' %}">All builds</a></li>
+ {% block parentbreadcrumb %}
+ {% if project %}
+ <li>
+ <a href="{%url 'project' project.id %}"><span id="project_name">{{project.name}}</span>
+ </a>
+ </li>
+ {% endif %}
+ {% endblock %}
+ {% block localbreadcrumb %}{% endblock %}
+ </ul>
+ <script>
+ $( function () {
+ $('#breadcrumb > li').append("<span class=\"divider\">→</span>");
+ $('#breadcrumb > li:last').addClass("active");
+ $('#breadcrumb > li:last > span').remove();
+ });
+ </script>
+ </div>
+
+ <div>
+
+ <!-- Begin main page container -->
+ {% block projectinfomain %}{% endblock %}
+ <!-- End main container -->
+
+
+ </div>
+ </div>
+
+
+{% endblock %}
+
diff --git a/bitbake/lib/toaster/toastergui/templates/basetable_bottom.html b/bitbake/lib/toaster/toastergui/templates/basetable_bottom.html
index ac14363798..d48ad92020 100644
--- a/bitbake/lib/toaster/toastergui/templates/basetable_bottom.html
+++ b/bitbake/lib/toaster/toastergui/templates/basetable_bottom.html
@@ -23,13 +23,14 @@
{%endif%}
</ul>
<div class="pull-right">
- <span class="help-inline" style="padding-top:5px;">Show rows:</span>
- <select style="margin-top:5px;margin-bottom:0px;" class="pagesize">
- {% with "2 5 10 25 50 100" as list%}
- {% for i in list.split %}<option{%if i == request.GET.count %} selected{%endif%}>{{i}}</option>
- {% endfor %}
- {% endwith %}
- </select>
+ <span class="help-inline" style="padding-top:5px;">Show rows:</span>
+ <select style="margin-top:5px;margin-bottom:0px;" class="pagesize">
+ {% with "10 25 50 100 150" as list%}
+ {% for i in list.split %}
+ <option value="{{i}}">{{i}}</option>
+ {% endfor %}
+ {% endwith %}
+ </select>
</div>
</div>
@@ -40,22 +41,33 @@
// we load cookies for the column display
save = $.cookie('_displaycols_{{objectname}}');
- if (save != undefined) {
- setting = save.split(';');
- for ( i = 0; i < setting.length; i++) {
- if (setting[i].length > 0) {
- splitlist = setting[i].split(':');
- id = splitlist[0], v = splitlist[1];
- if (v == 'true') {
- $('.chbxtoggle#'+id).prop('checked', true);
- }
- else {
- $('.chbxtoggle#'+id).prop('checked', false);
+ if (save != undefined) {
+ setting = save.split(';');
+ for ( i = 0; i < setting.length; i++) {
+ if (setting[i].length > 0) {
+ splitlist = setting[i].split(':');
+ id = splitlist[0], v = splitlist[1];
+ if (v == 'true') {
+ $('.chbxtoggle#'+id).prop('checked', true);
+ }
+ else {
+ $('.chbxtoggle#'+id).prop('checked', false);
+ }
}
}
}
+
+ // load cookie for number of entries to be displayed on page
+ if ({{request.GET.count}} != "") {
+ pagesize = {{request.GET.count}};
+ } else {
+ pagesize = $.cookie('_count');
}
+ $('.pagesize option').prop('selected', false)
+ .filter('[value="' + pagesize + '"]')
+ .attr('selected', true);
+
$('.chbxtoggle').each(function () {
showhideTableColumn($(this).attr('id'), $(this).is(':checked'))
});
@@ -72,8 +84,9 @@
$('.progress, .lead span').tooltip({container:'table', placement:'top'});
$(".pagesize").change(function () {
- console.log("page size change");
- reload_params({"count":$(this).val()}); ;
+ // save cookie with pagesize
+ $.cookie("_count", $(this).val(), { path : $(location).attr('pathname') });
+ reload_params({"count":$(this).val()});
});
});
</script>
diff --git a/bitbake/lib/toaster/toastergui/templates/basetable_top.html b/bitbake/lib/toaster/toastergui/templates/basetable_top.html
index 1231e1f924..92a3b50801 100644
--- a/bitbake/lib/toaster/toastergui/templates/basetable_top.html
+++ b/bitbake/lib/toaster/toastergui/templates/basetable_top.html
@@ -156,6 +156,13 @@
showhideImmediateTableAction( clname, sh, orderkey );
}
+ //
+ // saves a cookie with selected order field
+ //
+ function saveOrderCookie( orderfield ) {
+ $.cookie("orderby", orderfield, { path: $(location).attr('pathname') });
+ }
+
</script>
<!-- control header -->
@@ -168,6 +175,7 @@
<button class="btn" type="submit" value="Search">Search</button>
</form>
<div class="pull-right">
+ {% block custombuttons%} {% endblock %}
{% if tablecols %}
<div class="btn-group">
<button class="btn dropdown-toggle" data-toggle="dropdown">Edit columns
@@ -204,10 +212,11 @@
<span class="divider-vertical"></span>
<span class="help-inline" style="padding-top:5px;">Show rows:</span>
<select style="margin-top:5px;margin-bottom:0px;" class="pagesize">
- {% with "2 5 10 25 50 100" as list%}
-{% for i in list.split %} <option{%if i == request.GET.count %} selected{%endif%}>{{i}}</option>
- {% endfor %}
- {% endwith %}
+ {% with "10 25 50 100 150" as list%}
+ {% for i in list.split %}
+ <option value="{{i}}">{{i}}</option>
+ {% endfor %}
+ {% endwith %}
</select>
</div>
</div>
@@ -221,7 +230,7 @@
<tr>
{% for tc in tablecols %}<th class="{{tc.dclass}} {{tc.clclass}}">
{%if tc.qhelp%}<i class="icon-question-sign get-help" title="{{tc.qhelp}}"></i>{%endif%}
- {%if tc.orderfield%}<a {%if tc.ordericon%} class="sorted" {%endif%}href="javascript:reload_params({'page': 1, 'orderby' : '{{tc.orderfield}}' })" >{{tc.name}}</a>{%else%}<span class="muted">{{tc.name}}</span>{%endif%}
+ {%if tc.orderfield%}<a {%if tc.ordericon%} class="sorted" {%endif%}href="javascript:reload_params({'page': 1, 'orderby' : '{{tc.orderfield}}' })" onclick="saveOrderCookie('{{tc.orderfield}}')">{{tc.name}}</a>{%else%}<span class="muted">{{tc.name}}</span>{%endif%}
{%if tc.ordericon%} <i class="icon-caret-{{tc.ordericon}}"></i>{%endif%}
{%if tc.filter%}<div class="btn-group pull-right">
<a href="#filter_{{tc.filter.class}}" role="button" class="btn btn-mini {%if request.GET.filter%}{{tc.filter.options|filtered_icon:request.GET.filter}} {%endif%}" {%if request.GET.filter and tc.filter.options|filtered_tooltip:request.GET.filter %} title="<p>{{tc.filter.options|filtered_tooltip:request.GET.filter}}</p><p><a class='btn btn-small btn-primary' href=javascript:reload_params({'filter':''})>Show all {% if filter_search_display %}{{filter_search_display}}{% else %}{{objectname}}{% endif %}</a></p>" {%endif%} data-toggle="modal"> <i class="icon-filter filtered"></i> </a>
diff --git a/bitbake/lib/toaster/toastergui/templates/basetable_top_buildprojects.html b/bitbake/lib/toaster/toastergui/templates/basetable_top_buildprojects.html
new file mode 100644
index 0000000000..d517179592
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/basetable_top_buildprojects.html
@@ -0,0 +1,16 @@
+{% extends "basetable_top.html" %}
+
+{%block custombuttons %}
+{% if MANAGED %}
+ <div class="btn-group builds-projects">
+ <button class="btn dropdown-toggle" data-toggle="dropdown">
+ <span class="selection">Show all builds</span>
+ <i class="icon-caret-down"></i>
+ </button>
+ <ul class="dropdown-menu">
+ <li><a href="{% url 'all-builds'%}">Show all builds</a></li>
+ <li><a href="{% url 'all-projects'%}">Show all projects</a></li>
+ </ul>
+ </div>
+{% endif %}
+{%endblock%}
diff --git a/bitbake/lib/toaster/toastergui/templates/basetable_top_layers.html b/bitbake/lib/toaster/toastergui/templates/basetable_top_layers.html
new file mode 100644
index 0000000000..bd6e7dd2de
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/basetable_top_layers.html
@@ -0,0 +1,5 @@
+{% extends "basetable_top.html" %}
+
+{%block custombuttons %}
+ <a class="btn" href="{% url 'importlayer' %}" style="margin-right:5px;">Import layer</a>
+{%endblock%}
diff --git a/bitbake/lib/toaster/toastergui/templates/basetable_top_projectbuilds.html b/bitbake/lib/toaster/toastergui/templates/basetable_top_projectbuilds.html
new file mode 100644
index 0000000000..bfefff5e33
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/basetable_top_projectbuilds.html
@@ -0,0 +1,16 @@
+{% extends "basetable_top.html" %}
+
+{%block custombuttons %}
+{% if MANAGED %}
+ <div class="btn-group builds-projects">
+ <button class="btn dropdown-toggle" data-toggle="dropdown">
+ <span class="selection">Show all projects</span>
+ <i class="icon-caret-down"></i>
+ </button>
+ <ul class="dropdown-menu">
+ <li><a href="{% url 'all-builds'%}">Show all builds</a></li>
+ <li><a href="{% url 'all-projects'%}">Show all projects</a></li>
+ </ul>
+ </div>
+{% endif %}
+{%endblock%}
diff --git a/bitbake/lib/toaster/toastergui/templates/build.html b/bitbake/lib/toaster/toastergui/templates/build.html
index faabd22f8b..e71e38feb9 100644
--- a/bitbake/lib/toaster/toastergui/templates/build.html
+++ b/bitbake/lib/toaster/toastergui/templates/build.html
@@ -6,83 +6,11 @@
{% block pagecontent %}
<div class="row-fluid">
- {% if not objects.paginator.count and not request.GET.filter and not request.GET.search %}
- <!-- Empty - no data in database -->
- <div class="hero-unit span12">
- <button type="button" class="close" data-dismiss="alert">&times;</button>
- <div class="row-fluid">
- <div class="span6">
- <h1>This is Toaster</h1>
- <p>A web interface to <a href="http://www.yoctoproject.org/tools-resources/projects/bitbake">BitBake</a>, the <a href="http://www.yoctoproject.org">Yocto Project</a> build system.</p>
- <p class="hero-actions">
- <a class="btn btn-primary btn-large" href="https://www.yoctoproject.org/documentation/toaster-manual">Show me the manual</a>
- <a class="btn btn-large" href="https://wiki.yoctoproject.org/wiki/Contribute_to_Toaster">I want to contribute</a>
- </p>
- </div>
- <div class="span5">
- <a href="http://www.yoctoproject.org"><img src="{% static 'img/toaster.png' %}" class="thumbnail" alt="Yocto Project"/> </a>
- </div>
- </div>
- </div>
- {% endif %}
- {%if mru.count > 0%}
- <div class="page-header top-air">
- <h1>
- Recent Builds
- </h1>
- </div>
- {% for build in mru %}
- <div class="alert {%if build.outcome == build.SUCCEEDED%}alert-success{%elif build.outcome == build.FAILED%}alert-error{%else%}alert-info{%endif%}">
- <div class="row-fluid">
- <div class="lead span5">
- {%if build.outcome == build.SUCCEEDED%}<i class="icon-ok-sign success"></i>{%elif build.outcome == build.FAILED%}<i class="icon-minus-sign error"></i>{%else%}{%endif%}
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- <a href="{%url 'builddashboard' build.pk%}" class="{%if build.outcome == build.SUCCEEDED %}success{%else%}error{%endif%}">
- {% endif %}
- <span data-toggle="tooltip" {%if build.target_set.all.count > 1%}title="Targets: {%for target in build.target_set.all%}{{target.target}} {%endfor%}"{%endif%}>{{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|naturaltime}})</span>
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- </a>
- {% endif %}
- </div>
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- <div class="span2 lead">
- {% if build.errors_no %}
- <i class="icon-minus-sign red"></i> <a href="{%url 'builddashboard' build.pk%}#errors" class="error">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>
- {% endif %}
- </div>
- <div class="span2 lead">
- {% if build.warnings_no %}
- <i class="icon-warning-sign yellow"></i> <a href="{%url 'builddashboard' build.pk%}#warnings" class="warning">{{build.warnings_no}} warning{{build.warnings_no|pluralize}}</a>
- {% endif %}
- </div>
- <div class="lead pull-right">
- Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent|sectohms }}</a>
- </div>
- {%endif%}{%if build.outcome == build.IN_PROGRESS %}
- <div class="span4">
- <div class="progress" style="margin-top:5px;" data-toggle="tooltip" title="{{build.completeper}}% of tasks complete">
- <div style="width: {{build.completeper}}%;" class="bar"></div>
- </div>
- </div>
- <div class="lead pull-right">ETA: in {{build.eta|naturaltime}}</div>
- {%endif%}
- </div>
- </div>
+ {% include "mrb_section.html" %}
- {% endfor %}{%endif%}
- {% if not objects.paginator.count and not request.GET.filter and not request.GET.search %}
- <!-- Empty - no data in database -->
- {% if mru.count == 0 %}
- <div class="page-header top-air">
- <h1>All builds</h1>
- </div>
- <div class="alert alert-info lead">
- Toaster has not recorded any builds yet. Go build something with <a href="http://www.yoctoproject.org/docs/current/yocto-project-qs/yocto-project-qs.html#test-run">Knotty</a> or <a href="https://www.yoctoproject.org/documentation/hob-manual">Hob</a>
- </div>
- {% endif %}
- {% else %}
+ {% if 1 %}
<div class="page-header top-air">
<h1>
{% if request.GET.filter and objects.paginator.count > 0 or request.GET.search and objects.paginator.count > 0 %}
@@ -108,7 +36,7 @@
{% else %}
- {% include "basetable_top.html" %}
+ {% include "basetable_top_buildprojects.html" %}
<!-- Table data rows; the order needs to match the order of "tablecols" definitions; and the <td class value needs to match the tablecols clclass value for show/hide buttons to work -->
{% for build in objects %}
<tr class="data">
@@ -117,16 +45,46 @@
<td class="machine"><a href="{% url "builddashboard" build.id %}">{{build.machine}}</a></td>
<td class="started_on"><a href="{% url "builddashboard" build.id %}">{{build.started_on|date:"d/m/y H:i"}}</a></td>
<td class="completed_on"><a href="{% url "builddashboard" build.id %}">{{build.completed_on|date:"d/m/y H:i"}}</a></td>
- <td class="failed_tasks error">{% query build.task_build outcome=4 order__gt=0 as exectask%}{% if exectask.count == 1 %}<a href="{% url "task" build.id exectask.0.id %}">{{exectask.0.recipe.name}}.{{exectask.0.task_name}}</a>{% elif exectask.count > 1%}<a href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}}</a>{%endif%}</td>
- <td class="errors_no">{% if build.errors_no %}<a class="errors_no error" href="{% url "builddashboard" build.id %}#errors">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>{%endif%}</td>
+ <td class="failed_tasks error">
+ {% query build.task_build outcome=4 order__gt=0 as exectask%}
+ {% if exectask.count == 1 %}
+ <a href="{% url "task" build.id exectask.0.id %}">{{exectask.0.recipe.name}}.{{exectask.0.task_name}}</a>
+ {% if MANAGED and build.project %}
+ <a href="{% url 'build_artifact' build.id "tasklogfile" exectask.0.id %}">
+ <i class="icon-download-alt" title="" data-original-title="Download task log file"></i>
+ </a>
+ {% endif %}
+ {% elif exectask.count > 1%}
+ <a href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}} task{{exectask.count|pluralize}}</a>
+ {%endif%}
+ </td>
+ <td class="errors_no">
+ {% if build.errors_no %}
+ <a class="errors_no error" href="{% url "builddashboard" build.id %}#errors">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>
+ {% if MANAGED and build.project %}
+ <a href="{% url 'build_artifact' build.id "cookerlog" build.id %}">
+ <i class="icon-download-alt" title="" data-original-title="Download build log"></i>
+ </a>
+ {% endif %}
+ {%endif%}
+ </td>
<td class="warnings_no">{% if build.warnings_no %}<a class="warnings_no warning" href="{% url "builddashboard" build.id %}#warnings">{{build.warnings_no}} warning{{build.warnings_no|pluralize}}</a>{%endif%}</td>
<td class="time"><a href="{% url "buildtime" build.id %}">{{build.timespent|sectohms}}</a></td>
- <td class="log">{{build.cooker_log_path}}</td>
+ {% if not MANAGED or not build.project %}
+ <td class="log">{{build.cooker_log_path}}</td>
+ {% endif %}
<td class="output">
{% if build.outcome == build.SUCCEEDED %}
<a href="{%url "builddashboard" build.id%}#images">{{fstypes|get_dict_value:build.id}}</a>
{% endif %}
</td>
+ {% if MANAGED %}
+ <td class="project">
+ {% if build.project %}
+ <a href="{% url 'project' build.project.id %}">{{build.project.name}}</a>
+ {% endif %}
+ </td>
+ {% endif %}
</tr>
{% endfor %}
diff --git a/bitbake/lib/toaster/toastergui/templates/builddashboard.html b/bitbake/lib/toaster/toastergui/templates/builddashboard.html
index acf4d0a361..2458cdb6d1 100644
--- a/bitbake/lib/toaster/toastergui/templates/builddashboard.html
+++ b/bitbake/lib/toaster/toastergui/templates/builddashboard.html
@@ -39,6 +39,14 @@
<span class="pull-right">Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent|sectohms }}</a></span>
{%endif%}
</div>
+ {% if build.toaster_exceptions.count > 0 %}
+ <div class="row">
+ <small class="pull-right">
+ <i class="icon-question-sign get-help get-help-blue" title="" data-original-title="Toaster exceptions do not affect your build: only the operation of Toaster"></i>
+ <a class="show-exceptions" href="#exceptions">Toaster threw {{build.toaster_exceptions.count}} exception{{build.toaster_exceptions.count|pluralize}}</a>
+ </small>
+ </div>
+ {% endif %}
</div>
</div>
@@ -46,6 +54,9 @@
<div class="accordion span10 pull-right" id="errors">
<div class="accordion-group">
<div class="accordion-heading">
+ {% if MANAGED and build.project %}
+ <a class="btn btn-large pull-right" href="{% url 'build_artifact' build.id "cookerlog" build.id %}" style="margin:15px;">Download build log</a>
+ {% endif %}
<a class="accordion-toggle error toggle-errors">
<h2 id="error-toggle">
<i class="icon-minus-sign"></i>
@@ -60,7 +71,8 @@
<div class="alert alert-error">
<pre>{{error.message}}</pre>
</div>
- {% endif %}{% endfor %}
+ {% endif %}
+ {% endfor %}
</div>
</div>
</div>
@@ -70,23 +82,23 @@
{%if build.outcome == build.SUCCEEDED%}
<!-- built images -->
+{% if hasImages %}
<div class="row-fluid span10 pull-right">
- {% if hasImages %}
- <h2>Images</h2>
- {% for target in targets %}
+ <h2>Images</h2>
+ {% for target in targets %}
{% if target.target.is_image %}
- <div class="well dashboard-section">
- <h3><a href="{% url 'target' build.pk target.target.pk %}">{{target.target}}</a>
+ <div class="well dashboard-section">
+ <h3><a href="{% url 'target' build.pk target.target.pk %}">{{target.target}}</a>
</h3>
- <dl class="dl-horizontal">
- <dt>Packages included</dt>
- <dd><a href="{% url 'target' build.pk target.target.pk %}">{{target.npkg}}</a></dd>
- <dt>Total package size</dt>
- <dd>{{target.pkgsz|filtered_filesizeformat}}</dd>
+ <dl class="dl-horizontal">
+ <dt>Packages included</dt>
+ <dd><a href="{% url 'target' build.pk target.target.pk %}">{{target.npkg}}</a></dd>
+ <dt>Total package size</dt>
+ <dd>{{target.pkgsz|filtered_filesizeformat}}</dd>
{% if target.targetHasNoImages %}
- </dl>
- <div class="row-fluid">
- <div class="alert alert-info span7">
+ </dl>
+ <div class="row-fluid">
+ <div class="alert alert-info span7">
<p>
<b>This build did not create any image files</b>
</p>
@@ -99,38 +111,81 @@
license manifest information</a> in Toaster.
</p>
</div>
- </div>
+ </div>
{% else %}
- <dt>
- <i class="icon-question-sign get-help" title="The location in disk of the license manifest, a document listing all packages installed in your image and their licenses"></i>
- <a href="{% url 'targetpkg' build.pk target.target.pk %}">License manifest</a>
- </dt>
- <dd><code>{{target.target.license_manifest_path}}</code></dd>
- <dt>
- <i class="icon-question-sign get-help" title="Image files are stored in <code>/build/tmp/deploy/images/</code>"></i>
- Image files
- </dt>
- <dd>
- <ul>
+ <dt>
+ <i class="icon-question-sign get-help" title="The location in disk of the license manifest, a document listing all packages installed in your image and their licenses"></i>
+
+ {% if MANAGED and build.project %}
+ License manifest
+ {% else %}
+ <a href="{% url 'targetpkg' build.pk target.target.pk %}">License manifest</a>
+ {% endif %}
+ </dt>
+ {% if MANAGED and build.project %}
+ <dd>
+ <a href="{% url 'targetpkg' build.pk target.target.pk %}">View in Toaster</a> |
+ <a href="{% url 'build_artifact' build.pk 'licensemanifest' target.target.pk %}">Download</a></dd>
+ {% else %}
+ <dd><code>{{target.target.license_manifest_path}}</code></dd>
+ {% endif %}
+ <dt>
+ <i class="icon-question-sign get-help" title="Image files are stored in <code>/build/tmp/deploy/images/</code>"></i>
+ Image files
+ </dt>
+ <dd>
+ <ul>
{% for i in target.imageFiles %}
- <li>{{i.path}}
- ({{i.size|filtered_filesizeformat}})</li>
+ {% if build.project %}
+ <li><a href="{% url 'build_artifact' build.pk 'imagefile' i.id %}">{{i.path}}</a>
+ {% else %}
+ <li>{{i.path}}
+ {% endif %}
+ ({{i.size|filtered_filesizeformat}})</li>
{% endfor %}
- </ul>
- </dd>
- </dl>
+ </ul>
+ </dd>
+ </dl>
{% endif %}
- </div>
+ </div>
{% endif %}
- {% endfor %}
-
- {% endif %}
+ {% endfor %}
</div>
+{% endif %}
{%else%}
<!-- error dump -->
{%endif%}
+<!-- other artifacts -->
+{% if build.buildartifact_set.all.count > 0 %}
+<div class="row-fluid span10 pull-right">
+<h2>Other artifacts</h2>
+
+ <div class="well dashboard-section">
+ <dl class="dl-horizontal">
+ <dt>
+ <i class="icon-question-sign get-help" title="Build artifacts discovered in <i>tmp/deploy/images</i>. Usually kernel images and kernel modules."></i>
+ Other artifacts</dt>
+ <dd><div>
+ {% for ba in build.buildartifact_set.all|dictsort:"file_name" %}
+ {% if MANAGED and build.project %}
+ <a href="{%url 'build_artifact' build.id 'buildartifact' ba.id %}">
+ {% endif %}
+ {{ba.get_local_file_name}}
+ {% if MANAGED and build.project %}
+ </a>
+ {% endif %}
+
+ ({{ba.file_size|filtered_filesizeformat}}) <br/>
+ {% endfor %}
+ </div>
+ </dd>
+
+ </div>
+
+</div>
+{% endif %}
<!-- build summary -->
<div class="row-fluid span10 pull-right">
<h2>Build summary</h2>
@@ -145,6 +200,27 @@
<div class="well span4 dashboard-section">
<h4><a href="{%url 'tasks' build.pk%}">Tasks</a></h4>
<dl>
+ {% query build.task_build outcome=4 order__gt=0 as exectask%}
+ {% if exectask.count > 0 %}
+ <dt>Failed tasks</dt>
+ <dd>
+ {% if exectask.count == 1 %}
+ <a class="error" href="{% url "task" build.id exectask.0.id %}">
+ {{exectask.0.recipe.name}}
+ <span class="task-name">{{exectask.0.task_name}}</span>
+
+ {% if MANAGED and build.project %}
+ <a href="{% url 'build_artifact' build.id "tasklogfile" exectask.0.id %}">
+ <i class="icon-download-alt" title="" data-original-title="Download task log file"></i>
+ </a>
+ {% endif %}
+
+ </a>
+ {% elif exectask.count > 1%}
+ <a class="error" href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}}</a>
+ {% endif %}
+ </dd>
+ {% endif %}
<dt>Total number of tasks</dt><dd><a href="{% url 'tasks' build.pk %}">{% query build.task_build order__gt=0 as alltasks %}{{alltasks.count}}</a></dd>
<dt>
Tasks executed
@@ -154,12 +230,12 @@
<dt>
Tasks not executed
<i class="icon-question-sign get-help" title="'Not executed' tasks don't need to run because their outcome is provided by another task"></i>
- </dt>
+ </dt>
<dd><a href="{% url 'tasks' build.pk %}?filter=task_executed%3A0&amp;count=25&amp;search=&amp;page=1&amp;orderby=order%3A%2B">{% query build.task_build task_executed=0 order__gt=0 as noexectask%}{{noexectask.count}}</a></dd>
<dt>
Reuse
<i class="icon-question-sign get-help" title="The percentage of 'not executed' tasks over the total number of tasks, which is a measure of the efficiency of your build"></i>
- </dt>
+ </dt>
<dd>
{% query build.task_build order__gt=0 as texec %}
{% if noexectask.count|multiply:100|divide:texec.count < 0 %}
@@ -206,6 +282,33 @@
</div>
{% endif %}
+
+{% if build.toaster_exceptions.count > 0 %}
+<div class="accordion span10 pull-right" id="exceptions">
+ <div class="accordion-group">
+ <div class="accordion-heading">
+ <a class="accordion-toggle exception toggle-exceptions">
+ <h2 id="exception-toggle">
+ <i class="icon-warning-sign"></i>
+ {{build.toaster_exceptions.count}} Toaster exception{{build.toaster_exceptions.count|pluralize}}
+ </h2>
+ </a>
+ </div>
+ <div class="accordion-body collapse" id="collapse-exceptions">
+ <div class="accordion-inner">
+ <div class="span10">
+ {% for exception in build.toaster_exceptions %}
+ <div class="alert alert-exception">
+ <pre>{{exception.message}}</pre>
+ </div>
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+ </div>
+</div>
+{% endif %}
+
<script type="text/javascript">
$(document).ready(function() {
//show warnings section when requested from the previous page
diff --git a/bitbake/lib/toaster/toastergui/templates/configuration.html b/bitbake/lib/toaster/toastergui/templates/configuration.html
index 49a6a89d5c..d3b34a2096 100644
--- a/bitbake/lib/toaster/toastergui/templates/configuration.html
+++ b/bitbake/lib/toaster/toastergui/templates/configuration.html
@@ -50,7 +50,9 @@
<th>Layer</th>
<th>Layer branch</th>
<th>Layer commit</th>
- <th>Layer directory</th>
+ {% if not MANAGED or not build.project %}
+ <th>Layer directory</th>
+ {% endif %}
</tr>
</thead>
<tbody>{% for lv in build.layer_version_build.all|dictsort:"layer.name" %}
@@ -61,7 +63,9 @@
<li>{{lv.commit}}</li> </ul>">
{{lv.commit|truncatechars:13}}
</a></td>
+ {% if not MANAGED or not build.project %}
<td>{{lv.layer.local_path}}</td>
+ {% endif %}
</tr>{% endfor %}
</tbody>
</table>
diff --git a/bitbake/lib/toaster/toastergui/templates/filtersnippet.html b/bitbake/lib/toaster/toastergui/templates/filtersnippet.html
index 90ffd3de6c..fe70e7143e 100644
--- a/bitbake/lib/toaster/toastergui/templates/filtersnippet.html
+++ b/bitbake/lib/toaster/toastergui/templates/filtersnippet.html
@@ -18,10 +18,10 @@
{% for option in f.options %}
{% if option.2 %}
<label class="radio">
- <input type="radio" name="filter" {%if request.GET.filter == option.1 %}checked{%endif%} value="{{option.1}}"> {{option.0}} ({{option.2}})
+ <input type="radio" name="filter" {%if request.GET.filter == option.1 %}checked{%endif%} value="{{option.1}}"> {{option.0}} (<span id="{{option.1}}_count">{{option.2}}</span>)
{% else %}
<label class="radio muted">
- <input type="radio" name="filter" disabled {%if request.GET.filter == option.1 %}checked{%endif%} value="{{option.1}}"> {{option.0}} ({{option.2}})
+ <input type="radio" name="filter" disabled {%if request.GET.filter == option.1 %}checked{%endif%} value="{{option.1}}"> {{option.0}} (<span id="{{option.1}}_count">{{option.2}}</span>)
{% endif %}
{% if option.3 %}<i class="icon-question-sign get-help" data-placement="right" title="{{option.3}}"></i>{% endif %}
</label>
diff --git a/bitbake/lib/toaster/toastergui/templates/importlayer.html b/bitbake/lib/toaster/toastergui/templates/importlayer.html
new file mode 100644
index 0000000000..a4d8ee1ebd
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/importlayer.html
@@ -0,0 +1,116 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+{% load static %}
+
+{% block localbreadcrumb %}
+<li>Import layer</li>
+{% endblock %}
+
+{% block projectinfomain %}
+
+ <script src="{% static 'js/importlayer.js' %}"></script>
+ <script>
+ $(document).ready(function (){
+ var ctx = {
+ xhrDataTypeaheadUrl : "{% url 'xhr_datatypeahead' %}",
+ layerDetailsUrl : "{% url 'layerdetails' %}",
+ xhrImportLayerUrl : "{% url 'xhr_importlayer' %}",
+ xhrEditProjectUrl : "{% url 'xhr_projectedit' project.id %}",
+ projectPageUrl : "{% url 'project' project.id %}",
+ projectId : {{project.id}}
+ };
+
+ try {
+ importLayerPageInit(ctx);
+ } catch (e) {
+ document.write("Sorry, An error has occurred loading this page");
+ console.warn(e);
+ }
+ });
+ </script>
+
+ <div class="page-header">
+ <h1>Import layer</h1>
+ </div>
+
+ {% include "layers_dep_modal.html" %}
+ <form>
+ {% if project %}
+ <span class="help-block" style="padding-left:19px;">The layer you are importing must be compatible with <strong>{{project.release.description}}</strong>, which is the release you are using in this project.</span>
+ {% endif %}
+ <fieldset class="air">
+ <legend>Layer repository information</legend>
+ <div class="alert alert-error" id="import-error" style="display:none">
+ <button type="button" class="close" data-dismiss="alert">&times;</button>
+ <h3></h3>
+ <p></p>
+ <ul></ul>
+ </div>
+
+ <div class="control-group" id="layer-name-ctrl">
+ <label class="control-label" for="import-layer-name">
+ Layer name
+ <span class="icon-question-sign get-help" title="Something like 'meta-mylayer'. Your layer name must be unique and can only include letters, numbers and dashes" />
+ </label>
+ <div class="controls">
+ <input id="import-layer-name" type="text" required autofocus>
+ <span class="help-inline" style="display: none;" id="invalid-layer-name-hint">A valid layer name can only include letters, numbers and dashes</span>
+ <span class="help-inline" style="display: none;" id="duplicated-layer-name-hint"></span>
+ </div>
+
+ </div>
+
+ <label for="layer-git-repo-url" class="project-form">
+ Git repository URL
+ <span class="icon-question-sign get-help" title="Fetch/clone URL of the repository. Currently, Toaster only supports Git repositories." />
+ </label>
+
+ <input type="text" id="layer-git-repo-url" class="input-xxlarge" required>
+ <label class="project-form" for="layer-subdir">
+ Repository subdirectory
+ <span class="muted">(optional)</span>
+ <span class="icon-question-sign get-help" title="Subdirectory within the repository where the layer is located, if not in the root (usually only used if the repository contains more than one layer)" />
+ </label>
+ <input type="text" id="layer-subdir">
+
+ <div class="control-group" id="layer-revision-ctrl">
+ <label class="control-label" for="layer-git-ref">Revision
+ <span class="icon-question-sign get-help" title="You can provide a Git branch, a tag or a commit SHA as the revision"></span>
+ </label>
+ <div class="controls">
+ <input type="text" class="span4" id="layer-git-ref" required>
+ <span class="help-inline" style="diaply:none;" id="invalid-layer-revision-hint"></span>
+ </div>
+ </div>
+
+ <label class="project-form" for="layer-description">Layer description
+ <span class="muted">(optional)</span>
+ <span class="icon-question-sign get-help" title="A short layer explanation" />
+ </label>
+ <textarea id="layer-description" class="input-xxlarge"></textarea>
+
+ </fieldset>
+ <fieldset class="air">
+ <legend>
+ Layer dependencies
+ <span class="muted">(optional)</span>
+ <span class="icon-question-sign get-help heading-help" title="Other layers this layer depends upon" />
+ </legend>
+ <ul class="unstyled configuration-list" id="layer-deps-list">
+ </ul>
+ <div class="input-append">
+ <input type="text" autocomplete="off" data-minLength="1" data-autocomplete="off" data-provide="typeahead" placeholder="Type a layer name" id="layer-dependency" class="input-xlarge">
+ <a class="btn" type="button" id="add-layer-dependency-btn" disabled>
+ Add layer
+ </a>
+ </div>
+ <span class="help-inline">You can only add layers Toaster knows about</span>
+ </fieldset>
+ <div class="form-actions" id="form-actions">
+ <button class="btn btn-primary btn-large" data-toggle="modal" id="import-and-add-btn" data-target="#dependencies-message" disabled>Import and add to project</button>
+ <span class="help-inline" id="import-and-add-hint" style="vertical-align: middle;">To import a layer, you need to enter a repository URL, a branch, tag or commit and a layer name</span>
+ </div>
+ </form>
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/landing.html b/bitbake/lib/toaster/toastergui/templates/landing.html
new file mode 100644
index 0000000000..071edf86ef
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/landing.html
@@ -0,0 +1,66 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block pagecontent %}
+
+ <div class="container-fluid">
+ <div class="row-fluid">
+ <!-- Empty - no data in database -->
+ <div class="hero-unit span12">
+ <button class="close" data-dismiss="alert" type="button">
+ ×
+ </button>
+ <div class="row-fluid">
+ <div class="span6">
+ <h1>
+ This is Toaster
+ </h1>
+ <p>
+ A web interface to
+ <a href="http://www.yoctoproject.org/tools-resources/projects/bitbake">
+ BitBake
+ </a>
+ , the
+ <a href="http://www.yoctoproject.org">
+ Yocto Project
+ </a>
+ build system.
+ </p>
+ <p class="hero-actions">
+ <a class="btn btn-primary btn-large" href="https://www.yoctoproject.org/documentation/toaster-manual">
+ Show me the manual
+ </a>
+ <a class="btn btn-large" href="https://wiki.yoctoproject.org/wiki/Contribute_to_Toaster">
+ I want to contribute
+ </a>
+ </p>
+ </div>
+ <div class="span5">
+ <a href="http://www.yoctoproject.org">
+ <img alt="Yocto Project" class="thumbnail" src="/static/img/toaster.png"/>
+ </a>
+ </div>
+ </div>
+ </div>
+ <!-- Empty - no data in database -->
+ <div class="page-header top-air">
+ <h1>
+ All builds
+ </h1>
+ </div>
+ <div class="alert alert-info lead">
+ Toaster has not recorded any builds yet. Go build something with
+ <a href="http://www.yoctoproject.org/docs/current/yocto-project-qs/yocto-project-qs.html#test-run">
+ Knotty
+ </a>
+ or
+ <a href="https://www.yoctoproject.org/documentation/hob-manual">
+ Hob
+ </a>
+ </div>
+ </div>
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/layerdetails.html b/bitbake/lib/toaster/toastergui/templates/layerdetails.html
new file mode 100644
index 0000000000..78dc54bfd1
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/layerdetails.html
@@ -0,0 +1,159 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Layer Details</li>
+{% endblock %}
+
+{% block projectinfomain %}
+<div class="page-header">
+ <h1>Layer Details</h1>
+</div>
+
+ <div class="row-fluid span7 tabbable">
+ <ul class="nav nav-pills">
+ <li class="active">
+ <a data-toggle="tab" href="#information">Layer details</a>
+ </li>
+ <li>
+ <a data-toggle="tab" href="#targets">Targets (0)</a>
+ </li>
+ <li>
+ <a data-toggle="tab" href="#machines">Machines (0)</a>
+ </li>
+ <li>
+ <a data-toggle="tab" href="#classes">Classes (0)</a>
+ </li>
+ <li>
+ <a data-toggle="tab" href="#bbappends">bbappends (0)</a>
+ </li>
+ </ul>
+ <div class="tab-content">
+ <div name="information" id="information" class="tab-pane active">
+ <dl class="dl-horizontal">
+ <dt class="">
+ <i class="icon-question-sign get-help" title="Fetch/clone URL of the repository"></i>
+ Repository URL
+ </dt>
+ <dd>
+ <form id="change-repo-form" class="control-group">
+ <div class="input-append">
+ <input type="text" class="input-xlarge" id="type-repo" value="{{layerversion.layer.vcs_url}}">
+ <button id="apply-change-repo" class="btn" type="button">Change</button>
+ <!--a href="#" id="cancel-change-repo" class="btn btn-link">Cancel</a-->
+ </div>
+ <span class="help-block">Cloning this Git repository failed</span>
+ </form>
+ </dd>
+ <dt>
+ <i class="icon-question-sign get-help" title="Subdirectory within the repository where the layer is located, if not in the root (usually only used if the repository contains more than one layer)"></i>
+ Repository subdirectory
+ </dt>
+ <dd>
+ <span id="subdir">{{layerversion.dirpath}}</span>
+ <i id="change-subdir" class="icon-pencil"></i>
+ <i id="delete-subdir" class="icon-trash"></i>
+ <form id="change-subdir-form" style="display:none;">
+ <div class="input-append">
+ <input type="text" id="type-subdir" value="meta-acer">
+ <button id="apply-change-subdir" class="btn" type="button">Change</button>
+ <a href="#" id="cancel-change-subdir" class="btn btn-link">Cancel</a>
+ </div>
+ </form>
+ </dd>
+ <dt>Brach, tag or commit</dt>
+ <dd>
+ {{layerversion.up_branch.name}}
+ <i class="icon-pencil"></i>
+ </dd>
+ <dt>
+ <i class="icon-question-sign get-help" title="The Yocto Project versions with which this layer is compatible. Currently Toaster supports Yocto Project 1.6 and 1.7"></i>
+ Yocto Project compatibility
+ </dt>
+ <dd>
+ <i class="icon-pencil"></i>
+ </dd>
+ <dt>
+ <i class="icon-question-sign get-help" title="Other layers this layer depends upon"></i>
+ Layer dependencies
+ </dt>
+ <dd>
+ <ul class="unstyled">
+ {% for ld in layer.dependencies.all %}
+ <li>
+ <a href="#">openembedded core (meta)</a>
+ <i class="icon-trash"></i>
+ </li>
+ {% endfor %}
+ </ul>
+ <div class="input-append">
+ <input type="text" autocomplete="off" data-minLength="1" data-autocomplete="off"
+ data-provide="typeahead" data-source='
+ '
+ placeholder="Type a layer name" id="layer-dependency">
+ <a class="btn" type="button" id="add-layer-dependency" disabled>
+ Add layer
+ </a>
+ </div>
+ <span class="help-block">You can only add layers Toaster knows about</span>
+ </dd>
+ </dl>
+ </div>
+ <div name="targets" id="targets" class="tab-pane">
+ <div class="alert alert-info">
+ <strong>There is no target data for {{layerversion.layer.name}} ... yet</strong> <br />
+ Toaster learns about layers as they are built. Once you have used {{layerversion.layer.name}} in a build, Toaster will show you
+ here the targets it provides.
+ </div>
+ </div>
+ <div name="machines" id="machines" class="tab-pane">
+ <div class="alert alert-info">
+ <strong>There is no machine data for {{layerversion.layer.name}} ... yet</strong> <br />
+ Toaster learns about layers as they are built. Once you have used {{layerversion.layer.name}} in a build, Toaster will show you
+ here the machines it provides.
+ </div>
+ </div>
+ </div>
+</div>
+<div class="row span4 well">
+<h2>About {{layerversion.layer.name}}</h2>
+<dl>
+
+ <dt>
+ Summary
+ <i class="icon-question-sign get-help" title="One-line description of the layer"></i>
+ </dt>
+ <dd>
+ <span >{{layerversion.layer.summary}}</span>
+ <i class="icon-pencil"></i>
+ </dd>
+ <!--form>
+ <textarea class="span12" rows="2"></textarea>
+ <button class="btn" type="button">Change</button>
+ <a href="#" class="btn btn-link">Cancel</a>
+ </form-->
+ <dt>
+ Description
+ </dt>
+ <dd>
+ <span >{{layerversion.layer.description}}</span>
+ <i class="icon-pencil"></i>
+ </dd>
+ <!--form>
+ <textarea class="span12" rows="6"></textarea>
+ <button class="btn" type="button">Change</button>
+ <a href="#" class="btn btn-link">Cancel</a>
+ </form-->
+ <dt>
+ Maintainer(s)
+ </dt>
+ <dd>
+ <span class="muted">Not set</span>
+ <i class="icon-pencil"></i>
+ </dd>
+</dl>
+</div>
+
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/layers.html b/bitbake/lib/toaster/toastergui/templates/layers.html
new file mode 100644
index 0000000000..ced54c2098
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/layers.html
@@ -0,0 +1,288 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Layers</li>
+{% endblock %}
+
+{% block projectinfomain %}
+ <div class="page-header">
+ <h1>
+ {% if request.GET.filter and total_count > 0 or request.GET.search and total_count > 0 %}
+ {{total_count}} layer{{total_count|pluralize}} found
+ {% elif request.GET.filter and total_count == 0 or request.GET.search and total_count == 0 %}
+ No layers found
+ {%else%}
+ All layers
+ {%endif%}
+ <i class="icon-question-sign get-help heading-help" title="This page lists all the layers compatible with {{project.release.name}} that Toaster knows about."></i>
+ </h1>
+ </div>
+
+ <div id="zone1alerts">
+ </div>
+
+{% if objects.paginator.count == 0 %}
+ <div class="row-fluid">
+ <div class="alert">
+ <form class="no-results input-append" id="searchform">
+ <input id="search" name="search" class="input-xxlarge" type="text" value="{{request.GET.search}}"/>{% if request.GET.search %}<a href="javascript:$('#search').val('');searchform.submit()" class="add-on btn" tabindex="-1"><i class="icon-remove"></i></a>{% endif %}
+ <button class="btn" type="submit" value="Search">Search</button>
+ <button class="btn btn-link" onclick="javascript:$('#search').val('');searchform.submit()">Show all layers</button>
+ </form>
+ </div>
+ </div>
+
+{% else %}
+
+{% include "basetable_top_layers.html" %}
+ {% for o in objects %}
+ <tr class="data">
+ <td class="layer"><a href="{% url 'layerdetails' o.id %}">{{o.layer.name}}</a></td>
+ <td class="description">{% if o.layer.summary %}{{o.layer.summary}}{%endif%}</td>
+ <td class="source"><a href="{% url 'layerdetails' o.pk %}">{{o.layer_source.name}}</a></td>
+ <td class="git-repo"><a href="{% url 'layerdetails' o.pk %}"><code>{{o.layer.vcs_url}}</code></a>
+ {% if o.get_vcs_link_url %}
+ <a target="_blank" href="{{ o.get_vcs_link_url }}"><i class="icon-share get-info"></i></a>
+ {% endif %}
+ </td>
+ <td class="git-subdir" style="display: table-cell;"><a href="{% url 'layerdetails' o.pk %}"><code>{{o.dirpath}}</code></a>
+ {% if o.dirpath and o.get_vcs_dirpath_link_url %}
+ <a target="_blank" href="{{ o.get_vcs_dirpath_link_url }}"><i class="icon-share get-info"></i></a>
+ {% endif %}
+ </td>
+ <td class="branch">
+ {% if o.branch %}
+ {{o.branch}}
+ {% else %}
+ {{o.up_branch.name}}
+ <i class="icon-question-sign get-help hover-help" title="Your builds will use the tip of the branch you have cloned or downloaded to your computer, so nothing will be fetched"></i>
+ {% endif %}
+ </td>
+ <td class="dependencies">
+ {% with ods=o.dependencies.all%}
+ {% if ods.count %}
+ <a class="btn"
+ title="<a href='{% url "layerdetails" o.pk %}'>{{o.layer.name}}</a> dependencies"
+ data-content="<ul class='unstyled'>
+ {% for i in ods%}
+ <li><a href='{% url "layerdetails" i.depends_on.pk %}'>{{i.depends_on.layer.name}}</a></li>
+ {% endfor %}
+ </ul>">
+ {{ods.count}}
+ </a>
+ {% endif %}
+ {% endwith %}
+ </td>
+ {% if project %}
+ <td class="add-del-layers" value="{{o.pk}}">
+ <div id="layer-tooltip-{{o.pk}}" style="display: none; font-size: 11px; line-height: 1.3;" class="tooltip-inner">layer was modified</div>
+ <button id="layer-del-{{o.pk}}" class="btn btn-danger btn-block remove-layer layerbtn" style="display:none;" onclick="layerDel({{o.pk}}, '{{o.layer.name}}', '{%url 'layerdetails' o.pk%}')" >
+ <i class="icon-trash"></i>
+ Delete layer
+ </button>
+ <button id="layer-add-{{o.pk}}" class="btn btn-block layerbtn" style="display:none;" onclick="layerAdd({{o.pk}}, '{{o.layer.name}}', '{%url 'layerdetails' o.pk%}')" title="layer added">
+ <i class="icon-plus"></i>
+ Add layer
+ </button>
+ </td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+{% include "basetable_bottom.html" %}
+
+ <!-- Modals -->
+
+ <!-- 'Layer dependencies modal' -->
+ <div id="dependencies_modal" class="modal hide fade" tabindex="-1" role="dialog" aria-hidden="true">
+ <form id="dependencies_modal_form">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">x</button>
+ <h3><span class="layer-name"></span> dependencies</h3>
+ </div>
+ <div class="modal-body">
+ <p><strong class="layer-name"></strong> depends on some layers that are not added to your project. Select the ones you want to add:</p>
+ <ul class="unstyled" id="dependencies_list">
+ </ul>
+ </div>
+ <div class="modal-footer">
+ <button class="btn btn-primary" type="submit">Add layers</button>
+ <button class="btn" type="reset" data-dismiss="modal">Cancel</button>
+ </div>
+ </form>
+ </div>
+
+{% if project %}
+<script>
+
+var tooltipUpdateText;
+
+function _makeXHREditCall(data, onsuccess, onfail) {
+ $.ajax( {
+ type: "POST",
+ url: "{% url 'xhr_projectedit' project.id %}",
+ data: data,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ updateButtons(_data.layers.map(function (e) {return e.id}));
+ if (onsuccess != undefined) onsuccess(_data);
+ }
+ },
+ error: function (_data) {
+ alert("Call failed");
+ console.log(_data);
+ }
+ });
+}
+
+function updateLayerCountLabels (amount) {
+ /* Update the filter labels */
+ var countLabel = $("#projectlayer__project\\:{{project.id}}_count");
+ countLabel.text(Number(countLabel.text())+amount);
+
+ var countLabelRemaining = $("#projectlayer__project\\:NOT{{project.id}}_count");
+ countLabelRemaining.text(Number(countLabelRemaining.text())-amount);
+}
+
+
+function layerDel(layerId, layerName, layerURL) {
+ tooltipUpdateText = "1 layer deleted";
+ _makeXHREditCall({ 'layerDel': layerId }, function () {
+ updateLayerCountLabels(-1);
+
+ show_alert("You have deleted <strong>1</strong> layer from <a href=\"{% url 'project' project.id%}\">{{project.name}}</a>: <a href=\""+layerURL+"\">" + layerName +"</a>");
+ });
+}
+
+function show_alert(text, cls) {
+ $("#zone1alerts").html("<div class=\"alert alert-info lead\"><button type=\"button\" class=\"close\" data-dismiss=\"alert\">&times;</button>" + text + "</div>");
+}
+
+function show_dependencies_modal(layerId, layerName, layerURL, dependencies) {
+ // update layer name
+ $('.layer-name').text(layerName);
+ var deplistHtml = "";
+ for (var i = 0; i < dependencies.length; i++) {
+ deplistHtml += "<li><label class=\"checkbox\"><input name=\"dependencies\" value=\""
+ deplistHtml += dependencies[i].id;
+ deplistHtml +="\" type=\"checkbox\" checked=\"checked\"/>";
+ deplistHtml += dependencies[i].name;
+ deplistHtml += "</label></li>";
+ }
+ $('#dependencies_list').html(deplistHtml);
+
+ var selected = [layerId];
+ var layer_link_list = "<a href='"+layerURL+"'>"+layerName+"</a>";
+
+ $("#dependencies_modal_form").submit(function (e) {
+ e.preventDefault();
+ $("input[name='dependencies']:checked").map(function () { selected.push(parseInt($(this).val()))});
+ if (selected.length > 1) {
+ tooltipUpdateText = "" + selected.length + " layers added";
+ } else {
+ tooltipUpdateText = "1 layer added";
+ }
+
+ for (var i = 0; i < selected.length; i++) {
+ for (var j = 0; j < dependencies.length; j++) {
+ if (dependencies[j].id == selected[i]) {
+ layer_link_list+= ", <a href='"+dependencies[j].layerdetailurl+"'>"+dependencies[j].name+"</a>"
+ break;
+ }
+ }
+ }
+
+ $('#dependencies_modal').modal('hide');
+
+ {% if project %}
+ _makeXHREditCall({ 'layerAdd': selected.join(",") }, function () {
+ show_alert("You have added <strong>"+selected.length+"</strong> layers to <a href=\"{% url 'project' project.id%}\">{{project.name}}</a>: " + layer_link_list);
+ });
+ {% endif %}
+
+ });
+ $('#dependencies_modal').modal('show');
+}
+
+
+function layerAdd(layerId, layerName, layerURL) {
+ $.ajax({
+ url: '{% url "xhr_datatypeahead" %}',
+ data: {'type': 'layerdeps','value':layerId},
+ success: function(_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ updateLayerCountLabels(_data.list.length+1);
+
+ if (_data.list.length > 0) {
+ show_dependencies_modal(layerId, layerName, layerURL, _data.list);
+ }
+ else {
+ tooltipUpdateText = "1 layer added";
+ _makeXHREditCall({ 'layerAdd': layerId }, function () {
+ show_alert("You have added <strong>1</strong> layer to <a href=\"{% url 'project' project.id%}\">{{project.name}}</a>: <a href=\""+layerURL+"\">" + layerName +"</a>");
+ });
+ }
+ }
+ }
+ })
+}
+
+function button_set(id, state) {
+ var tohide, toshow;
+ if (state == "add")
+ {
+ tohide = "#layer-del-";
+ toshow = "#layer-add-";
+ }
+ else if (state == "del")
+ {
+ tohide = "#layer-add-";
+ toshow = "#layer-del-";
+ }
+
+
+ var previouslyvisible = $(tohide + id).is(":visible");
+ if (previouslyvisible) {
+ $(tohide + id).fadeOut( function() {
+ $("#layer-tooltip-" + id).text(tooltipUpdateText);
+ $("#layer-tooltip-" + id).fadeIn().delay(2000).fadeOut(function(){
+ $(toshow + id).delay(300).fadeIn();
+ });
+ });
+ } else {
+ $(tohide + id).hide();
+ $("#layer-tooltip-" + id).hide();
+ $(toshow + id).show();
+ }
+};
+
+function updateButtons(projectLayers) {
+ var displayedLayers = [];
+ $(".add-del-layers").map(function () { displayedLayers.push(parseInt($(this).attr('value')))});
+ for (var i=0; i < displayedLayers.length; i++) {
+ if (projectLayers.indexOf(displayedLayers[i]) > -1) {
+ button_set(displayedLayers[i], "del");
+ }
+ else {
+ button_set(displayedLayers[i], "add");
+ }
+ }
+}
+
+$(document).ready(function (){
+ $('.layerbtn').tooltip({ trigger: 'manual' });
+ updateButtons({{projectlayerset}});
+});
+
+</script>
+{%endif%}
+
+{%endif%}
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/layers_dep_modal.html b/bitbake/lib/toaster/toastergui/templates/layers_dep_modal.html
new file mode 100644
index 0000000000..b03fd0b218
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/layers_dep_modal.html
@@ -0,0 +1,90 @@
+<!-- 'Layer dependencies modal' -->
+ <div id="dependencies_modal" class="modal hide fade" tabindex="-1" role="dialog" aria-hidden="true">
+ <form id="dependencies_modal_form">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">x</button>
+ <h3><span id="title"></span> dependencies</h3>
+ </div>
+ <div class="modal-body">
+ <p id="body-text"> <strong id="layer-name"></strong> depends on some layers that are not added to your project. Select the ones you want to add:</p>
+ <ul class="unstyled" id="dependencies_list">
+ </ul>
+ </div>
+ <div class="modal-footer">
+ <button class="btn btn-primary" type="submit">Add layers</button>
+ <button class="btn" type="reset" data-dismiss="modal">Cancel</button>
+ </div>
+ </form>
+ </div>
+
+<script>
+function show_layer_deps_modal(projectId, layer, dependencies, title, body, addToProject, successAdd) {
+ // update layer name
+ if (title) {
+ $('#dependencies_modal #title').text(title);
+ } else {
+ $('#dependencies_modal #title').text(layer.name);
+ }
+
+ if (body) {
+ $("#dependencies_modal #body-text").html(body);
+ } else {
+ $("#dependencies_modal #layer-name").text(layer.name);
+ }
+
+ var deplistHtml = "";
+ for (var i = 0; i < dependencies.length; i++) {
+ deplistHtml += "<li><label class=\"checkbox\"><input name=\"dependencies\" value=\"";
+ deplistHtml += dependencies[i].id;
+ deplistHtml +="\" type=\"checkbox\" checked=\"checked\"/>";
+ deplistHtml += dependencies[i].name;
+ deplistHtml += "</label></li>";
+ }
+ $('#dependencies_list').html(deplistHtml);
+
+ var selected = [];
+ /* -1 is a special dummy Id which we use when the layer isn't yet in the
+ * system, normally we would add the current layer to the selection.
+ */
+ if (layer.id != -1)
+ selected.push(layer.id);
+
+ var layer_link_list = "<a href='"+layer.url+"'>"+layer.name+"</a>";
+
+ $("#dependencies_modal_form").submit(function (e) {
+ e.preventDefault();
+ $("input[name='dependencies']:checked").map(function () { selected.push(parseInt($(this).val()))});
+ if (selected.length > 1) {
+ tooltipUpdateText = "" + selected.length + " layers added";
+ } else {
+ tooltipUpdateText = "1 layer added";
+ }
+
+ for (var i = 0; i < selected.length; i++) {
+ for (var j = 0; j < dependencies.length; j++) {
+ if (dependencies[j].id == selected[i]) {
+ layer_link_list+= ", <a href='"+dependencies[j].layerdetailurl+"'>"+dependencies[j].name+"</a>"
+ break;
+ }
+ }
+ }
+
+ $('#dependencies_modal').modal('hide');
+
+ if (addToProject) {
+ var editProjectUrl = "{% url 'xhr_projectedit' project.id %}";
+ libtoaster.editProject(editProjectUrl, projectId, { 'layerAdd': selected.join(",") }, function () {
+ if (successAdd) {
+ successAdd(selected);
+ }
+ }, function () {
+ console.log ("Adding layers to project failed");
+ });
+ } else {
+ successAdd(selected);
+ }
+ });
+
+ $('#dependencies_modal').modal('show');
+}
+</script>
diff --git a/bitbake/lib/toaster/toastergui/templates/machines.html b/bitbake/lib/toaster/toastergui/templates/machines.html
new file mode 100644
index 0000000000..18e7485d50
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/machines.html
@@ -0,0 +1,63 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Machines</li>
+{% endblock %}
+
+{% block projectinfomain %}
+ <div class="page-header">
+ <h1>
+ All machines
+ <i class="icon-question-sign get-help heading-help" title="This page lists all the machines compatible with Yocto Project 1.7 'Dxxxx' that Toaster knows about. They include community-created targets suitable for use on top of OpenEmbedded Core and any targets you have imported"></i>
+ </h1>
+ </div>
+ <!--div class="alert">
+ <div class="input-append" style="margin-bottom:0px;">
+ <input class="input-xxlarge" type="text" placeholder="Search targets" value="browser" />
+ <a class="add-on btn">
+ <i class="icon-remove"></i>
+ </a>
+ <button class="btn" type="button">Search</button>
+ <a class="btn btn-link" href="#">Show all targets</a>
+ </div>
+ </div-->
+ <div id="target-added" class="alert alert-info lead" style="display:none;"></div>
+ <div id="target-removed" class="alert alert-info lead" style="display:none;">
+ <button type="button" class="close" data-dismiss="alert">&times;</button>
+ <strong>1</strong> target deleted from <a href="project-with-targets.html">your project</a>: <a href="#">meta-aarch64</a>
+ </div>
+
+
+{% include "basetable_top.html" %}
+ {% for o in objects %}
+ <tr class="data">
+ <td class="machine">
+ {{o.name}}
+ <a machine="_blank" href="http://layers.openembedded.org/layerindex/branch/master/machines/?q=3g-router-image"><i class="icon-share get-info"></i></a>
+ </td>
+ <td class="description">{{o.description}}</td>
+ <td class="machine-file">
+ <code>{{o.file_path}}</code>
+ <a href="http://github.com/embeddedgeeks/meta-embeddedgeeks/blob/master/machines-core/images/3g-router-image.bb" machine="_blank"><i class="icon-share get-info"></i></a>
+ </td>
+ <td class="layer"><a href="#">{{o.layer_version.layer.name}}</a></td>
+ <td class="source">{{o.layer_source.name}}</td>
+ <td class="branch">{{o.layer_version.commit}}</td>
+ <td class="build">
+ <a id="build-machine" href="project-with-machines.html?machine=3g-router-image" class="btn btn-block" style="display:none;">
+ Build machine
+ </a>
+ <a id="add-layer" href="#" class="btn btn-block nopop" title="1 layer added">
+ <i class="icon-plus"></i>
+ Add layer
+ <i class="icon-question-sign get-help" title="To build this machine, you must first add the meta-embeddedgeeks layer to your project"></i>
+ </a>
+ </td>
+ </tr>
+ {% endfor %}
+
+{% include "basetable_bottom.html" %}
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/mrb_section.html b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
new file mode 100644
index 0000000000..73031e2d1f
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/mrb_section.html
@@ -0,0 +1,108 @@
+{% load static %}
+{% load projecttags %}
+{% load humanize %}
+
+
+{%if mru.count > 0%}
+
+ <div class="page-header top-air">
+ <h1>
+ Latest builds
+ </h1>
+ </div>
+ <div id="latest-builds">
+ {% for build in mru %}
+ <div class="alert {%if build.outcome == build.SUCCEEDED%}alert-success{%elif build.outcome == build.FAILED%}alert-error{%else%}alert-info{%endif%} {% if MANAGED and build.project %}project-name{% endif %} ">
+ {% if MANAGED and build.project %}
+ <span class="label {%if build.outcome == build.SUCCEEDED%}label-success{%elif build.outcome == build.FAILED%}label-danger{%else%}label-info{%endif%}"> {{build.project.name}} </span>
+ {% endif %}
+
+ <div class="row-fluid">
+ <div class="span3 lead">
+ {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
+ <a href="{%url 'builddashboard' build.pk%}" class="{%if build.outcome == build.SUCCEEDED %}success{%else%}error{%endif%}">
+ {% endif %}
+ <span data-toggle="tooltip" {%if build.target_set.all.count > 1%}title="Targets: {%for target in build.target_set.all%}{{target.target}} {%endfor%}"{%endif%}>{{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%}
+ </span>
+ {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
+ </a>
+ {% endif %}
+ </div>
+ <div class="span2 lead">
+ {% if build.completed_on|format_build_date %}
+ {{ build.completed_on|date:'d/m/y H:i' }}
+ {% else %}
+ {{ build.completed_on|date:'H:i' }}
+ {% endif %}
+ </div>
+ {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
+ <div class="span2 lead">
+ {% if build.errors_no %}
+ <i class="icon-minus-sign red"></i> <a href="{%url 'builddashboard' build.pk%}#errors" class="error">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>
+ {% endif %}
+ </div>
+ <div class="span2 lead">
+ {% if build.warnings_no %}
+ <i class="icon-warning-sign yellow"></i> <a href="{%url 'builddashboard' build.pk%}#warnings" class="warning">{{build.warnings_no}} warning{{build.warnings_no|pluralize}}</a>
+ {% endif %}
+ </div>
+ <div class="lead ">
+ <span class="lead{%if not MANAGED or not build.project%} pull-right{%endif%}">
+ Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent|sectohms }}</a>
+ </span>
+ {% if MANAGED and build.project %}
+ <a class="btn {%if build.outcome == build.SUCCEEDED%}btn-success{%elif build.outcome == build.FAILED%}btn-danger{%else%}btn-info{%endif%} pull-right" onclick="scheduleBuild({% url 'xhr_projectbuild' build.project.id as bpi%}{{bpi|json}}, {{build.project.name|json}}, {{build.get_sorted_target_list|mapselect:'target'|json}})">Run again</a>
+ {% endif %}
+ </div>
+ {%endif%}
+ {%if build.outcome == build.IN_PROGRESS %}
+ <div class="span4">
+ <div class="progress" style="margin-top:5px;" data-toggle="tooltip" title="{{build.completeper}}% of tasks complete">
+ <div style="width: {{build.completeper}}%;" class="bar"></div>
+ </div>
+ </div>
+ <div class="lead pull-right">ETA: in {{build.eta|naturaltime}}</div>
+ {%endif%}
+ </div>
+ </div>
+
+ {% endfor %}
+ </div>
+
+<script>
+
+function _makeXHRBuildCall(url, data, onsuccess, onfail) {
+ $.ajax( {
+ type: "POST",
+ url: url,
+ data: data,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ if (onsuccess != undefined) onsuccess(_data);
+ }
+ },
+ error: function (_data) {
+ alert("Call failed");
+ console.log(_data);
+ if (onfail) onfail(data);
+ } });
+}
+
+
+function scheduleBuild(url, projectName, buildlist) {
+ console.log("scheduleBuild");
+ _makeXHRBuildCall(url, {targets: buildlist.join(" ")}, function (_data) {
+
+ $('#latest-builds').prepend('<div class="alert alert-info" style="padding-top:0px">' + '<span class="label label-info" style="font-weight: normal; margin-bottom: 5px; margin-left:-15px; padding-top:5px;">'+projectName+'</span><div class="row-fluid">' +
+ '<div class="span4 lead">' + buildlist.join(" ") +
+ '</div><div class="span4 lead pull-right">Build queued. Your build will start shortly.</div></div></div>');
+ });
+}
+
+</script>
+
+{%endif%}
+
diff --git a/bitbake/lib/toaster/toastergui/templates/newproject.html b/bitbake/lib/toaster/toastergui/templates/newproject.html
index 8f1867a94f..512a8fa68f 100644
--- a/bitbake/lib/toaster/toastergui/templates/newproject.html
+++ b/bitbake/lib/toaster/toastergui/templates/newproject.html
@@ -3,41 +3,91 @@
{% load humanize %}
{% block pagecontent %}
<div class="row-fluid">
- <div class="span6">
- <div class="page-header">
- <h1>Create a new project</h1>
- </div>
- <div class="container-fluid">
- {% if alert %}
- <div class="alert alert-error row-fluid" role="alert">{{alert}}</div>
- {% endif %}
- </div>
- <form method="POST">{% csrf_token %}
- <fieldset>
- <label>Project name <span class="muted">(required)</span></label>
- <input type="text" class="input-xlarge" required name="projectname" value="{{projectname}}">
- <label class="project-form">
- Project owner
- <i class="icon-question-sign get-help" title="The go-to person for this project"></i>
- </label>
- <input type="text" name="username" value="{{username}}">
- <label class="project-form">Owner's email</label>
- <input type="email" class="input-large" name="email" value="{{email}}">
- <label class="project-form">
- Yocto Project version
- <i class="icon-question-sign get-help" title="This sets the branch for the Yocto Project core layers (meta, meta-yocto and meta-yocto-bsp), and for the layers you use from the OpenEmbedded Metadata Index"></i>
- </label>
- <select name="projectversion" id="projectversion">
- <!-- TODO: XHR data from http://layers.openembedded.org/layerindex/branch/master/layers/ -->
- <option value="master" {%if projectversion == "master" %}selected{%endif%}>master</option>
- </select>
- </fieldset>
+ <div class="page-header">
+ <h1>Create a new project</h1>
+ </div>
+ <div class="container-fluid">
+ {% if alert %}
+ <div class="alert alert-error row-fluid" role="alert">{{alert}}</div>
+ {% endif %}
+ </div>
+ {% if releases.count > 0 %}
+ <form method="POST">{% csrf_token %}
+ <fieldset>
+ <label>Project name <span class="muted">(required)</span></label>
+ <input type="text" class="input-xlarge" required id="new-project-name" name="projectname">
+ {% if releases.count > 1 %}
+ <label class="project-form">
+ Release
+ <i class="icon-question-sign get-help" title="The version of the build system you want to use"></i>
+ </label>
+ <select name="projectversion" id="projectversion">
+ {% for release in releases %}
+ <option value="{{release.id}}"{%if projectversion == release.id %} selected{%endif%}>{{release.description}}</option>
+ {% endfor %}
+ </select>
+ {% for release in releases %}
+ <div class="row-fluid helptext" id="description-{{release.id}}" style="display: none">
+ <span class="help-block span5">{{release.helptext|safe}}</span>
+ </div>
+ {% endfor %}
+ {% else %}
+ <input type="hidden" name="projectversion" value="{{releases.0.id}}"/>
+ {% endif %}
+ </fieldset>
- <div class="form-actions">
- <input type="submit" class="btn btn-primary btn-large" value="Create project"/>
- </div>
- </form>
- </div>
- </div>
- </div>
+ <div class="form-actions">
+ <input type="submit" class="btn btn-primary btn-large" value="Create project"></input>
+ <span class="help-inline" style="vertical-align:middle;">To create a project, you need to enter a project name</span>
+ </div>
+ </form>
+ {% else %}
+ <br/>
+ <div class="alert alert-warning row-fluid span6">
+ <h3>No releases configured</h3>
+ <p>
+ It looks like Toaster releases have not been configured properly. Contact the person who set up Toaster, and tell them about it.
+ </p>
+ <p>
+ If you are the Toaster administrator, we are sorry: setting up Toaster is not easy.
+ <ul>
+ <li><a href="{% url 'admin:orm_release_changelist' %}">Log in to the Django administration interface</a> and check the "Releases" section.</li>
+ <li>Check out the <a href="https://wiki.yoctoproject.org/wiki/Setting_up_a_hosted_managed_mode_for_Toaster#Releases">documentation about configuring releases</a></li>
+ </ul>
+ </p>
+ </div>
+ {% endif %}
+
+ </div>
+ <script type="text/javascript">
+ $(document).ready(function () {
+ // hide the new project button
+ $("#new-project-button").hide();
+ $('.btn-primary').attr('disabled', 'disabled');
+
+ // enable submit button when all required fields are populated
+ $("input#new-project-name").keyup(function() {
+ if ($("input#new-project-name").val().length > 0 ){
+ $('.btn-primary').removeAttr('disabled');
+ $(".help-inline").css('visibility','hidden');
+ }
+ else {
+ $('.btn-primary').attr('disabled', 'disabled');
+ $(".help-inline").css('visibility','visible');
+ }
+ });
+
+ // show relevant help text for the selected release
+ var selected_release = $('select').val();
+ $("#description-" + selected_release).show();
+
+
+ $('select').change(function(){
+ var new_release = $('select').val();
+ $(".helptext").hide();
+ $('#description-' + new_release).fadeIn();
+ });
+ })
+ </script>
+</div>
{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
index cd015d3555..dfeba55058 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_detail_base.html
@@ -135,11 +135,14 @@
</dt>
<dd class="iscommit">{{package.recipe.layer_version.commit}}</dd>
+
+ {% if not MANAGED or not build.project %}
<dt>
Layer directory
<i class="icon-question-sign get-help" title="Path to the layer providing the recipe that builds this package"></i>
</dt>
<dd><code>{{package.recipe.layer_version.layer.local_path}}</code></dd>
+ {% endif %}
</dl>
</div> <!-- row4 well -->
{% endblock twocolumns %}
diff --git a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
index a36464cad5..4ba472f2d6 100644
--- a/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
+++ b/bitbake/lib/toaster/toastergui/templates/package_included_reverse_dependencies.html
@@ -15,7 +15,7 @@
<div class="tab-content">
<div class="tab-pane active" id="brought-in-by">
- {% ifequal reverse_deps|length 0 %}
+ {% ifequal reverse_count 0 %}
<div class="alert alert-info">
<strong>{{package.fullpackagespec}}</strong> has no reverse runtime dependencies.
</div>
@@ -29,7 +29,7 @@
<a href="{% url 'package_included_detail' build.id target.id reverse_dep.package_id %}">
{{reverse_dep.package.name}}
</a>
- <script>fmtAliasHelp("{{reverse_dep.name}}", "{{reverse_dep.alias}}", true)</script>
+ <script>fmtAliasHelp("{{reverse_dep.package.name}}", "{{reverse_dep.alias}}", true)</script>
</td>
{% else %}
<td>
diff --git a/bitbake/lib/toaster/toastergui/templates/project.html b/bitbake/lib/toaster/toastergui/templates/project.html
index c859f6bcd1..2979db74ed 100644
--- a/bitbake/lib/toaster/toastergui/templates/project.html
+++ b/bitbake/lib/toaster/toastergui/templates/project.html
@@ -1,362 +1,412 @@
-{% extends "base.html" %}
+{% extends "baseprojectpage.html" %}
+<!--
+vim: expandtab tabstop=2
+-->
{% load projecttags %}
{% load humanize %}
-{% block pagecontent %}
+{% load static %}
-<script>
-var buildrequests = [];
-
-function targetInPage(targetname) {
- return targetname in $("ul#target-list > li > a").map(function (i, x) {return x.text});
-}
-
-function setEventHandlers() {
- $("i#del-target-icon").unbind().click(function (evt) {
- console.log("del target", evt.target.attributes["x-data"].value);
- postEditAjaxRequest({"targetDel": evt.target.attributes["x-data"].value});
- });
- $("button#add-target-button").unbind().click( function (evt) {
- if ( $("input#target")[0].value.length == 0) {
- alert("cannot add empty target");
- return;
- }
- postEditAjaxRequest({"targetAdd" : $("input#target")[0].value});
- });
-}
-
-function onEditPageUpdate(data) {
- // update targets
- var i; var orightml = "";
-
- $("span#target-count").html(data.targets.length);
- for (i = 0; i < data.targets.length; i++) {
- if (! targetInPage(data.targets[i].target)) {
- orightml += '<li><a href="#">'+data.targets[i].target;
- if (data.targets[i].task != "" && data.targets[i].task !== null) {
- orightml += " ("+data.targets[i].task+")";
- }
- orightml += '</a><i title="" data-original-title="" class="icon-trash" id="del-target-icon" x-data="'+data.targets[i].pk+'"></i></li>';
- }
- }
-
- $("ul#target-list").html(orightml);
-
- // update recent builds
-
- setEventHandlers();
-}
-
-function onEditAjaxSuccess(data, textstatus) {
- console.log("XHR returned:", data, "(" + textstatus + ")");
- if (data.error != "ok") {
- alert("error on request:\n" + data.error);
- return;
- }
- onEditPageUpdate(data);
-}
-
-function onEditAjaxError(jqXHR, textstatus, error) {
- alert("XHR errored:\n" + error + "\n(" + textstatus + ")");
-}
-
-function postEditAjaxRequest(reqdata) {
- var ajax = $.ajax({
- type:"POST",
- data: $.param(reqdata),
- url:"{% url 'xhr_projectedit' project.id%}",
- headers: { 'X-CSRFToken': $.cookie("csrftoken")},
- success: onEditAjaxSuccess,
- error: onEditAjaxError,
- })
-}
-
-$(document).ready(function () {
- setEventHandlers();
-});
+{% block projectinfomain %}
+<script src="{% static "js/angular.min.js" %}"></script>
+<script src="{% static "js/angular-animate.min.js" %}"></script>
+<script src="{% static "js/angular-cookies.min.js" %}"></script>
+<script src="{% static "js/angular-route.min.js" %}"></script>
+<script src="{% static "js/angular-sanitize.min.js" %}"></script>
+<script src="{% static "js/ui-bootstrap-tpls-0.11.0.js" %}"></script>
-</script>
+<div id="main" role="main" ng-app="project" ng-controller="prjCtrl" class="top-padded">
- <div class="page-header">
- <h1>
- {{project.name}}
- {% if project.build_set.all.count == 0 %}
- <small>No builds yet</small>
- {% else %}
- <small><a href="#">{{project.build_set.all.count}} builds</a></small>
- {% endif %}
- </h1>
- </div>
+ <!-- project name -->
+ <div class="page-header">
+ <h1>{[project.name]}</h1>
+ </div>
+
+ <!-- alerts section 1-->
+ <div ng-repeat="a in zone1alerts">
+ <div class="alert alert-dismissible lead" role="alert" ng-class="a.type"><button type="button" class="close" data-dismiss="alert"><span aria-hidden="true">&times;</span></button>
+ <span ng-bind-html="a.text"></span>
+ </div>
+ </div>
+
+ <!-- custom templates for ng -->
+
+ <script type="text/ng-template" id="suggestion_details">
+ <a> {[match.model.name]} {[match.model.detail]} </a>
+ </script>
- <div class="well">
- <!--div class="control-group error"-->
- <button id="build-all-button" class="btn btn-primary btn-large">Build all added targets</button>
- <div class="input-append build-form controls">
- <input class="huge input-xxlarge" placeholder="Or enter the target you want to build" autocomplete="off" data-minlength="1" data-autocomplete="off" data-provide="typeahead" data-source="" type="text">
- <button id="build-button" class="btn btn-large" disabled="">Build</button>
- </div>
- <script>
-/* Provide XHR calls for the "build" buttons.*/
-$("button#build-all-button").click( function (evt) {
- var ajax = $.ajax({
- type:"POST",
- url:"{% url 'xhr_projectbuild' project.id %}",
- headers: { 'X-CSRFToken': $.cookie("csrftoken")},
- success: function (data, textstatus) {
- if (data.error != "ok") {
- alert("XHR fail: " + data.error );
- }
- },
- error: function (jqXHR, textstatus, error) { alert("XHR errored:" + error + "(" + textstatus + ")"); },
- })
-});
-
- </script>
- <!--span class="help-inline">This target is not provided <br />by any of your added layers
- <i class="icon-question-sign get-help get-help-red" title="Review your list of added layers to make sure one of them provides core-image-xyz. Clicking on a layer name will give you all the information Toaster has about the layer"></i>
- </span>
- </div-->
+ <!-- modal dialogs -->
+ <script type="text/ng-template" id="dependencies_modal">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">x</button>
+ <h3><span ng-bind="layerAddName"></span> dependencies</h3>
+ </div>
+ <div class="modal-body">
+ <p><strong>{[layerAddName]}</strong> depends on some layers that are not added to your project. Select the ones you want to add:</p>
+ <ul class="unstyled">
+ <li ng-repeat="ld in items">
+ <label class="checkbox">
+ <input type="checkbox" ng-model="selectedItems[ld.id]"> {[ld.name]}
+ </label>
+ </li>
+ </ul>
+ </div>
+ <div class="modal-footer">
+ <button class="btn btn-primary" ng-click="ok()">Add layers</button>
+ <button class="btn" ng-click="cancel()">Cancel</button>
+ </div>
+ </form>
+ </script>
+
+
+ <script type="text/ng-template" id="change_version_modal">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">x</button>
+ <h3>Changing release to {[releaseName]}</h3>
+ </div>
+ <div class="modal-body">
+ <p>The following project layers do not exist for {[releaseName]}:</p>
+ <ul>
+ <li ng-repeat="i in items"><span class="layer-info" data-toggle="tooltip" tooltip="{[i.detail]}">{[i.name]}</span></li>
+ </ul>
+ <p>If you change the release to {[releaseName]}, the above layers will be deleted from your project layers.</p>
+ </div>
+ <div class="modal-footer">
+ <button class="btn btn-primary" ng-click="ok()">Change release and delete layers</button>
+ <button class="btn" ng-click="cancel()">Cancel</button>
+ </div>
+ </script>
+
+ <script type="text/ng-template" id="target_display">
+ <div ng-switch on="t.task.length">
+ <div ng-switch-when="0">{[t.target]}</div>
+ <div ng-switch-default>{[t.target]}:{[t.task]}</div>
+ </div>
+ </script>
+
+
+ <!-- build form -->
+ <div class="well">
+ <form class="build-form" ng-submit="targetNamedBuild()">
+ <div class="input-append controls">
+ <input type="text" class="huge span7" placeholder="Type the target(s) you want to build" autocomplete="off" ng-model="targetName" typeahead="e.name for e in getSuggestions('targets', $viewValue)|filter:$viewValue" typeahead-template-url="suggestion_details" ng-disabled="!layers.length"/>
+ <button type="submit" id="build-button" class="btn btn-large btn-primary" ng-disabled="!targetName.length">
+ Build
+ </button>
+ </div>
+ <i class="icon-question-sign get-help get-help-blue" title="Type the name of one or more targets you want to build, separated by a space. You can also specify a task by appending a semicolon and a task name to a target name, like so: <code>core-image-minimal:do_build</code>"></i>
+ <p>
+ <a href="{% url 'targets' %}">
+ View all targets
+ </a>
+ {% if completedbuilds.count %}
+ | <a href="{% url 'projectbuilds' project.id %}">View all project builds ({{completedbuilds.count}})</a>
+ {% endif %}
+ </p>
+ </form>
+ </div>
+
+ <a id="buildslist"></a>
+ <h2 class="air" ng-if="builds.length">Latest builds</h2>
+ <div class="animate-repeat alert" ng-repeat="b in builds track by b.id" ng-class="{'queued':'alert-info', 'deleted':'alert-info', 'in progress': 'alert-info', 'failed':'alert-error', 'completed':{'In Progress':'alert-info', 'Succeeded':'alert-success', 'Failed':'alert-error'}[b.build[0].status]}[b.status]">
+ <div class="row-fluid">
+ <switch ng-switch="b.status">
+
+ <case ng-switch-when="failed">
+ <div class="lead span3"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span></div>
+ <div class="row-fluid">
+ <div class="air well" ng-repeat="e in b.errors">
+ Error type {[e.type]}: <pre>{[e.msg]}</pre>
+ </div>
</div>
+ </case>
- <div id="meta-tizen-alert" class="alert alert-info lead air" style="display:none;">
- <button type="button" class="close" data-dismiss="alert">?</button>
- You have added <strong>6</strong> layers: <a href="#">meta-tizen</a> and its dependencies (<a href="#">meta-efl</a>, <a href="#">meta-intel</a>, <a href="#">meta-multimedia</a>, <a href="#">meta-oe</a> and <a href="#">meta-ruby</a>).
+ <case ng-switch-when="queued">
+ <div class="lead span5"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span> </div>
+ <div class="span4 lead" >Build queued
+ <i title="This build will start as soon as a build server is available" class="icon-question-sign get-help get-help-blue heading-help" data-toggle="tooltip"></i>
</div>
+ <button class="btn pull-right btn-info" ng-click="buildCancel(b.id)">Cancel</button>
+ </case>
+ <case ng-switch-when="created">
+ <div class="lead span3"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span> </div>
+ <div class="span6" >
+ <span class="lead">Creating build</span>
+ </div>
+ <button class="btn pull-right btn-info" ng-click="buildCancel(b.id)">Cancel</button>
+ </case>
+ <case ng-switch-when="deleted">
+ <div class="lead span3"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span> </div>
+ <div class="span6" id="{[b.id]}-deleted" >
+ <span class="lead">Build deleted</span>
+ </div>
+ <button class="btn pull-right btn-info" ng-click="buildDelete(b.id)">Close</button>
+ </case>
+ <case ng-switch-when="in progress">
+ <switch ng-switch="b.build.length">
+ <case ng-switch-when="0">
+ <div class="lead span5"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span> </div>
+ <div class="span4 lead">
+ Checking out layers
+ </div>
+ </case>
+ <case ng-switch-default="">
+ <div class="lead span3"> <span ng-repeat="t in b.targets" ng-include src="'target_display'"></span> </div>
+ <div class="span4 offset1" >
+ <div class="progress" style="margin-top:5px;" data-toggle="tooltip" tooltip="{[b.build[0].completeper]}% of tasks complete">
+ <div style="width: {[b.build[0].completeper]}%;" class="bar"></div>
+ </div>
+ </div>
+ <div class="text-right lead">ETA: {[b.build[0].eta|date:"HH:mm:ss"]}</div>
+ </case>
+ </case>
- {% if builds|length > 0 or buildrequests|length > 0 %}
- <h2 class="air">Recent Builds</h2>
- <div id="scheduled-builds">
- {% for br in buildrequests %}
-<div class="alert {% if br.0.state == br.0.REQ_FAILED%}alert-error{%else%}alert-info{%endif%}" id="build-request">
- <div class="row-fluid">
- <div class="lead span4">
+ <case ng-switch-when="completed">
+ <div class="lead span3"><a class="success" href="{[b.build[0].build_page_url]}"><span ng-repeat="t in b.targets" ng-include src="'target_display'"></span></a></div>
+ <div class="span2 lead">
+ <ngif ng-if="b.build[0].completed_on - todaydate > 0">
+ {[b.build[0].completed_on|date:'HH:mm']}
+ </ngif>
+ <ngif ng-if="b.build[0].completed_on - todaydate < 0">
+ {[b.build[0].completed_on|date:'dd/MM/yy HH:mm']}
+ </ngif>
+ </div>
+ <div class="span2">
+ <ngif ng-if="b.build[0].errors">
<span>
- {{br.0.brtarget_set.all.0.target}} {%if br.brtarget_set.all.count > 1%}(+ {{br.brtarget_set.all.count|add:"-1"}}){%endif%} {{br.1.machine.value}} (Created {{br.0.created}})
+ <i class="icon-minus-sign red lead"></i>
+ <a href="{[b.build[0].build_page_url]}#errors" class="lead error">{[b.build[0].errors]}
+ <ng-pluralize count="b.build[0].errors" when="{'1':'error','other':'errors'}"></ng-pluralize></a>
</span>
+ </ngif>
</div>
<div class="span2">
- {{br.0.get_state_display}}
+ <ngif ng-if="b.build[0].warnings">
+ <span>
+ <i class="icon-warning-sign yellow lead"></i>
+ <a href="{[b.build[0].build_page_url]}#warnings" class="lead warning">{[b.build[0].warnings]}
+ <ng-pluralize count="b.build[0].warnings" when="{'1':'warning','other':'warnings'}"></ng-pluralize></a>
+ </span>
+ </ngif>
</div>
- <div class="span8">
-{% if br.state == br.REQ_FAILED%}
- {% for bre in br.0.brerror_set.all %} {{bre.errmsg}} ({{bre.errtype}}) <br/><hr/><code>{{bre.traceback}}</code>{%endfor%}
-{%endif%}
+ <div> <span class="lead">Build time: <a href="{[b.build[0].build_time_page_url]}">{[b.build[0].build_time|timediff]}</a></span>
+ <button class="btn pull-right" ng-class="{'Succeeded': 'btn-success', 'Failed': 'btn-danger'}[b.build[0].status]"
+ ng-click="targetExistingBuild(b.targets)">Run again</button>
+
</div>
+ </case>
- </div>
-</div>
- {% endfor %}
+ <case ng-switch-default="">
+ <div>FIXME!</div>
+ </case>
+ </switch>
+ <div class="lead pull-right">
+ </div>
+ </div>
+ </div>
+
+ <h2 class="air">Project configuration</h2>
+ <!-- alerts section 2 -->
+ <div ng-repeat="a in zone2alerts">
+ <div class="alert alert-dismissible lead" role="alert" ng-class="a.type"><button type="button" class="close" data-dismiss="alert"><span aria-hidden="true">&times;</span></button>
+ <span ng-bind-html="a.text"></span>
+ </div>
+ </div>
+
+ <div class="row-fluid">
+
+ <!-- project layers -->
+ <div id="layer-container" class="well well-transparent span4">
+ <h3>
+ Layers <span class="muted counter">({[layers.length]})</span>
+ <i class="icon-question-sign get-help heading-help" title="OpenEmbedded organises metadata into modules called 'layers'. Layers allow you to isolate different types of customizations from each other. <a href='http://www.yoctoproject.org/docs/current/dev-manual/dev-manual.html#understanding-and-creating-layers' target='_blank'>More on layers</a>"></i>
+ </h3>
+ <div class="alert" ng-if="!layers.length">
+ <b>You need to add some layers </b>
+ <p>
+ You can:
+ <ul>
+ <li> <a href="{% url 'layers'%}">View all layers available in Toaster</a>
+ <li> <a href="{% url 'importlayer' %}">Import a layer</a>
+ <li> <a href="https://www.yoctoproject.org/docs/1.6.1/dev-manual/dev-manual.html#understanding-and-creating-layers" target="_blank">Read about layers in the manual</a>
+ </ul>
+ Or type a layer name below.
+ </p>
+ </div>
+ <form ng-submit="layerAdd()">
+ <div class="input-append">
+ <input type="text" class="input-xlarge" id="layer" autocomplete="off" placeholder="Type a layer name" data-minLength="1" ng-model="layerAddName" typeahead="e.name for e in getSuggestions('layers', $viewValue)|filter:$viewValue" typeahead-template-url="suggestion_details" typeahead-on-select="onLayerSelect($item, $model, $label)" typeahead-editable="false" ng-class="{ 'has-error': layerAddName.$invalid }" />
+ <input type="submit" id="add-layer" class="btn" value="Add" ng-disabled="!layerAddName.length"/>
+ </div>
+ {% csrf_token %}
+ </form>
+ <p><a href="{% url 'layers' %}">View all layers</a> | <a href="{% url 'importlayer' %}">Import layer</a></p>
+ <ul class="unstyled configuration-list">
+ <li ng-repeat="l in layers track by l.id" class="animate-repeat">
+ <a href="{[l.layerdetailurl]}" target="_#" class="layer-info" data-toggle="tooltip" tooltip="{[l.giturl]} | {[l.branch.name]}">{[l.name]}</a>
+ <i class="icon-trash" ng-click="layerDel(l.id)" tooltip="Delete"></i>
+ </li>
+ </ul>
</div>
+ <!-- project targets -->
+ <div id="target-container" class="well well-transparent span4">
+ <h3>
+ Targets
+ <i class="icon-question-sign get-help heading-help" title="What you build, often a recipe producing a root file system file (an image). Something like <code>core-image-minimal</code> or <code>core-image-sato</code>"></i>
+ </h3>
+ <form ng-submit="targetNamedBuild()">
+ <div class="input-append">
+ <input type="text" class="input-xlarge" placeholder="Type the target(s) you want to build" autocomplete="off" data-minLength="1" ng-model="targetName1" typeahead="e.name for e in getSuggestions('targets', $viewValue)|filter:$viewValue" typeahead-template-url="suggestion_details" ng-disabled="!layers.length">
+ <button type="submit" id="build-button" class="btn btn-primary" ng-disabled="!targetName1.length">
+ Build </button>
+ </div>
+ {% csrf_token %}
+ </form>
+ <p><a href="{% url 'targets' %}">View all targets</a></p>
+ <div ng-if="frequenttargets.length">
+ <h4 class="air">
+ Most built targets
+ </h4>
+ <ul class="unstyled configuration-list">
+ <li ng-repeat="t in frequenttargets">
+ <label class="checkbox">
+ <input type="checkbox" ng-model="mostBuiltTargets[t]">{[t]}
+ </label>
+ </li>
+ </ul>
+ <button class="btn btn-large btn-primary" ng-disabled="selectedMostBuildTargets()">Build selected targets</button>
+ </div>
+ </div>
-<!-- Lifted from build.html -->
- {% for build in builds %}
-<div class="alert {%if build.outcome == build.SUCCEEDED%}alert-success{%elif build.outcome == build.FAILED%}alert-error{%else%}alert-info{%endif%}">
- <div class="row-fluid">
- <div class="lead span5">
- {%if build.outcome == build.SUCCEEDED%}<i class="icon-ok-sign success"></i>{%elif build.outcome == build.FAILED%}<i class="icon-minus-sign error"></i>{%else%}{%endif%}
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- <a href="{%url 'builddashboard' build.pk%}" class="{%if build.outcome == build.SUCCEEDED %}success{%else%}error{%endif%}">
- {% endif %}
- <span data-toggle="tooltip" {%if build.target_set.all.count > 1%}title="Targets: {%for target in build.target_set.all%}{{target.target}} {%endfor%}"{%endif%}>{{build.target_set.all.0.target}} {%if build.target_set.all.count > 1%}(+ {{build.target_set.all.count|add:"-1"}}){%endif%} {{build.machine}} ({{build.completed_on|naturaltime}})</span>
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- </a>
- {% endif %}
- </div>
- {%if build.outcome == build.SUCCEEDED or build.outcome == build.FAILED %}
- <div class="span2 lead">
- {% if build.errors_no %}
- <i class="icon-minus-sign red"></i> <a href="{%url 'builddashboard' build.pk%}#errors" class="error">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>
- {% endif %}
- </div>
- <div class="span2 lead">
- {% if build.warnings_no %}
- <i class="icon-warning-sign yellow"></i> <a href="{%url 'builddashboard' build.pk%}#warnings" class="warning">{{build.warnings_no}} warning{{build.warnings_no|pluralize}}</a>
- {% endif %}
- </div >
- <div class="lead pull-right">
- Build time: <a href="{% url 'buildtime' build.pk %}">{{ build.timespent|sectohms }}</a>
- </div>
- {%endif%}{%if build.outcome == build.IN_PROGRESS %}
- <div class="span4">
- <div class="progress" style="margin-top:5px;" data-toggle="tooltip" title="{{build.completeper}}% of tasks complete">
- <div style="width: {{build.completeper}}%;" class="bar"></div>
- </div>
- </div>
- <div class="lead pull-right">ETA: in {{build.eta|naturaltime}}</div>
- {%endif%}
+ <!-- project configuration -->
+ <div id="machine-distro" class="well well-transparent span4">
+ <h3>
+ Machine
+ <i class="icon-question-sign get-help heading-help" title="The machine is the hardware for which you want to build. You can only set one machine per project"></i>
+ </h3>
+ <p class="lead" id="select-machine-opposite">
+ <span>{[machine.name]}</span>
+ <i id="change-machine" class="icon-pencil" ng-click="toggle('#select-machine')"></i>
+ </p>
+ <div id="select-machine" style="display: none">
+ <div class="alert alert-info">
+ <strong>Machine changes have a big impact on build outcome.</strong>
+ You cannot really compare the builds for the new machine with the previous ones.
</div>
+ <form ng-submit="edit('#select-machine')" class="input-append">
+ <input type="text" id="machine" autocomplete="off" ng-model="machineName" typeahead="m.name for m in getSuggestions('machines', $viewValue)"/>
+ <input type="submit" id="apply-change-machine" class="btn" type="button" ng-disabled="machineName == machine.name || machineName.length == 0" value="Save"></input>
+ <input type="reset" id="cancel-machine" class="btn btn-link" ng-click="toggle('#select-machine')" value="Cancel"></input>
+ {% csrf_token %}
+ </form>
+ <p><a href="{% url 'machines' %}" class="link">View all machines</a></p>
+ </div>
+ <p class="link-action">
+ <a href="{% url 'projectconf' project.id %}" class="link">Edit configuration variables</a>
+ <i data-original-title="You can set other project configuration options here. Each option, like everything else in the build system, is a variable - value pair" class="icon-question-sign get-help heading-help" title=""></i>
+ </p>
</div>
- {% endfor %}
-<!-- end of lift-->
- {%endif%}
+ </div>
- <h2 class="air">Project configuration</h2>
- <div class="row-fluid">
+ <h2>Project details</h2>
- <div id="layer-container" class="well well-transparent span4">
- <h3>
- Add layers
- <i data-original-title="OpenEmbedded organises metadata into modules called 'layers'. Layers allow you to isolate different types of customizations from each other. <a href='http://www.yoctoproject.org/docs/1.6.1/dev-manual/dev-manual.html#understanding-and-creating-layers' target='_blank'>More on layers</a>" class="icon-question-sign get-help heading-help" title=""></i>
- </h3>
- <form style="margin-top:20px;">
- <div class="input-append">
- <input class="input-xlarge" id="layer" autocomplete="off" placeholder="Type a layer name" data-provide="typeahead" data-source="" data-minlength="1" data-autocomplete="off" type="text">
- <button id="add-layer" class="btn" disabled="">Add</button>
- </div>
- <div id="import-alert" class="alert alert-info" style="display:none;">
- Toaster does not know about this layer. Please <a href="#">import it</a>
- </div>
- <div id="dependency-alert" class="alert alert-info" style="display:none;">
- <p><strong>meta-tizen</strong> depends on the layers below. Check the ones you want to add: </p>
- <ul class="unstyled">
- <li>
- <label class="checkbox">
- <input checked="checked" type="checkbox">
- meta-efl
- </label>
- </li>
- <li>
- <label class="checkbox">
- <input checked="checked" type="checkbox">
- meta-intel
- </label>
- </li>
- <li>
- <label class="checkbox">
- <input checked="checked" type="checkbox">
- meta-multimedia
- </label>
- </li>
- <li>
- <label class="checkbox">
- <input checked="checked" type="checkbox">
- meta-oe
- </label>
- </li>
- <li>
- <label class="checkbox">
- <input checked="checked" type="checkbox">
- meta-ruby
- </label>
- </li>
- </ul>
- <button id="add-layer-dependencies" class="btn btn-info add-layer">Add layers</button>
- </div>
-
- <p><a href="#">Import your layer</a> | <a href="#">View all layers</a></p>
- </form>
-
- <h4 class="air">
- Added layers
- <span class="muted counter">{{project.projectlayer_set.count}}</span>
- <i data-original-title="Your added layers will be listed in this same order in your <code>bblayers.conf</code> file" class="icon-question-sign get-help heading-help" title=""></i>
- </h4>
- <ul class="unstyled configuration-list">
- {% for pl in project.projectlayer_set.all %}
- <li>
- <a href="#">{{pl.name}} (<span class="layer-version">{{pl.giturl}}</span>)</a>
- {% if pl.optional %}
- <i title="" data-original-title="" class="icon-trash" id="del-layer-icon" x-data="{{pl.pk}}"></i>
- {% endif %}
- </li>
- {% endfor %}
- </ul>
- </div>
+ <!-- alerts section 3 -->
+ <div ng-repeat="a in zone3alerts">
+ <div class="alert alert-dismissible lead" role="alert" ng-class="a.type"><button type="button" class="close" data-dismiss="alert"><span aria-hidden="true">&times;</span></button>
+ <span ng-bind-html="a.text"></span>
+ </div>
+ </div>
+
+
+ <div id="project-details" class="well well-transparent">
+ <h3>Project name</h3>
+ <p class="lead" id="change-project-name-opposite">
+ <span >{[project.name]}</span>
+ <i class="icon-pencil" ng-click="toggle('#change-project-name')" ></i>
+ </p>
+ <div id="change-project-name" style="display:none;">
+ <form ng-submit="edit('#change-project-name')" class="input-append">
+ <input type="text" class="input-xlarge" id="type-project-name" ng-model="projectName">
+ <input type="submit" class="btn" value="Save" ng-disabled="project.name == projectName"/>
+ <input type="reset" class="btn btn-link" value="Cancel" ng-click="toggle('#change-project-name')">
+ </form>
+ </div>
- <div id="target-container" class="well well-transparent span4">
- <h3>
- Add targets
- <i data-original-title="A target is what you want to build, usually an image recipe that produces a root file system" class="icon-question-sign get-help heading-help" title=""></i>
- </h3>
- <form style="margin-top:20px;">
- <div class="input-append">
- <input id="target" class="input-xlarge" autocomplete="off" placeholder="Type a target name" data-provide="typeahead" data-source="" data-minlength="1" data-autocomplete="off" type="text">
- <button id="add-target-button" class="btn" type="button">Add</button>
- </div>
-
- <p><a href="#" class="link">View all targets</a></p>
- </form>
- <h4 class="air">
- Added targets
- <span id="target-count" class="muted counter">{{project.projecttarget_set.count}}</span>
- </h4>
- <ul class="unstyled configuration-list" id="target-list">
- {% for target in project.projecttarget_set.all %}
- {% if target %}
- <li>
- <a href="#">{{target.target}}{% if target.task%} (target.task){%endif%}</a>
- {% if target.notprovided %}
- <i title="" data-original-title="" id="msg1" class="icon-exclamation-sign get-help-yellow" data-title="<strong>Target may not be provided</strong>" data-content="From the layer information it currently has, Toaster thinks this target is not provided by any of your added layers. If a target is not provided by one of your added layers, the build will fail.<h5>What Toaster suggests</h5><p>The <a href='#'>meta-abc</a> and <a href='#'>meta-efg</a> layers provide core-image-notprovided. You could add one of them to your project.</p><button class='btn btn-block'>Add meta-abc</button><button class='btn btn-block'>Add meta-efg</button><button id='dismiss1' class='btn btn-block btn-info'>Stop showing this message</button>"></i>
- {% elif target.notknown %}
- <i title="" data-original-title="" id="msg2" class="icon-exclamation-sign get-help-yellow" data-title="<strong>Target may not be provided</strong>" data-content="From the layer information it currently has, Toaster thinks this target is not provided by any of your added layers. If a target is not provided by one of your added layers, the build will fail.<h5>What Toaster suggests</h5><p>Review your added layers to make sure one of them provides core-image-unknown. Clicking on a layer name will give you all the information Toaster has about the layer. </p> <button class='btn btn-block btn-info'>Stop showing this message</button>"></i>
- {% endif %}
- <i title="" data-original-title="" class="icon-trash" id="del-target-icon" x-data="{{target.pk}}"></i>
- </li>
- {% endif %}
- {% endfor %}
-
-
- </ul>
- </div>
- <div class="well well-transparent span4">
- <h3>
- Set machine
- <i data-original-title="The machine is the hardware for which you want to build. You can only set one machine per project" class="icon-question-sign get-help heading-help" title=""></i>
- </h3>
- <p class="lead">
- {{machine}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- <h3>
- Set distro
- <i data-original-title="When you build an image using the Yocto Project and do not alter the distro, you are creating a Poky distribution" class="icon-question-sign get-help heading-help" title=""></i>
- </h3>
- <p class="lead">
- {{distro}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- <p class="localconf">
- <a href="#" class="link">Edit the <code>local.conf</code> file</a>
- <i data-original-title="The <code>local.conf</code> file is where other project configuration options are set. Pretty much any configuration option can be set in this file. Each option, like everything else in the build system, is a variable - value pair" class="icon-question-sign get-help heading-help" title=""></i>
- </p>
- </div>
- </div>
+ <h3>
+ Release
+ <i class="icon-question-sign get-help heading-help" title="The version of the build system you want to use"></i>
+ </h3>
+ <p class="lead" id="change-project-version-opposite">
+ <span id="project-version">{[project.release.desc]}</span>
+ <i id="change-version" class="icon-pencil" ng-click="toggle('#change-project-version')" ></i>
+ </p>
+ <div class="div-inline" id="change-project-version" style="display:none;">
+ <form ng-submit="test('#change-project-version')" class="input-append">
+ <select id="select-version" ng-model="projectVersion">
+ <option ng-repeat="r in releases" value="{[r.id]}" ng-selected="r.id == project.release.id">{[r.description]}</option>
+ </select>
+ <input type="submit" class="btn" style="margin-left:5px;" value="Save" ng-disabled="project.release.id == projectVersion"/>
+ <input type="reset" class="btn btn-link" value="Cancel" ng-click="toggle('#change-project-version')" ng-disabled="project.release.id == projectVersion"/>
+
+ </form>
+ </div>
+ </div>
+
+<!-- end main -->
+</div>
+
+
+<!-- load application logic !-->
+<script src="{% static "js/projectapp.js" %}"></script>
+
+<!-- dump initial data for use in the angular app -->
+<script>
+angular.element(document).ready(function() {
+ scope = angular.element("#main").scope();
+ scope.urls = {};
+ scope.urls.xhr_build = "{% url 'xhr_projectbuild' project.id %}";
+ scope.urls.xhr_edit = "{% url 'xhr_projectedit' project.id %}";
+ scope.urls.xhr_datatypeahead = "{% url 'xhr_datatypeahead' %}";
+ scope.urls.layers = "{% url 'layers' %}";
+ scope.urls.targets = "{% url 'targets' %}";
+ scope.urls.importlayer = "{% url 'importlayer'%}";
+ scope.urls.layer = "{% url 'layerdetails' %}";
+ scope.project = {{prj|json}};
+ scope.builds = {{builds|json}};
+ scope.layers = {{layers|json}};
+ scope.targets = {{targets|json}};
+ scope.frequenttargets = {{freqtargets|json}};
+ scope.machine = {{machine|json}};
+ scope.releases = {{releases|json}};
+
+ var now = (new Date()).getTime();
+ scope.todaydate = now - (now % 86400000);
+
+ scope.zone1alerts = [];
+ scope.zone2alerts = [];
+ scope.zone3alerts = [];
+
+ scope.mostBuiltTargets = {};
+
+ scope.updateDisplayWithCommands();
+ scope.validateData();
+
+ scope.$digest();
+
+ });
+</script>
- <h2>Project details</h2>
-
- <div class="well well-transparent">
- <h3>Project name</h3>
- <p class="lead">
- {{project.name}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- <h3>Project owner</h3>
- <p class="lead">
- {{puser.username}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- <h3>Owner's email</h3>
- <p class="lead">
- {{puser.email}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- <h3>Yocto Project version</h3>
- <p class="lead">
- {{project.branch}} - {{project.short_description}}
- <i title="" data-original-title="" class="icon-pencil"></i>
- </p>
- </div>
{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/projectbuilds.html b/bitbake/lib/toaster/toastergui/templates/projectbuilds.html
new file mode 100644
index 0000000000..8c5942c7cb
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/projectbuilds.html
@@ -0,0 +1,59 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Project builds</li>
+{% endblock %}
+
+{% block projectinfomain %}
+ <div class="page-header">
+ <h1>
+ All builds
+ <i class="icon-question-sign get-help heading-help" title="This page lists all the layers compatible with Yocto Project 1.7 'Dxxxx' that Toaster knows about. They include community-created layers suitable for use on top of OpenEmbedded Core and any layers you have imported"></i>
+ </h1>
+ </div>
+ <!--div class="alert">
+ <div class="input-append" style="margin-bottom:0px;">
+ <input class="input-xxlarge" type="text" placeholder="Search layers" value="browser" />
+ <a class="add-on btn">
+ <i class="icon-remove"></i>
+ </a>
+ <button class="btn" type="button">Search</button>
+ <a class="btn btn-link" href="#">Show all layers</a>
+ </div>
+ </div-->
+ <div id="layer-added" class="alert alert-info lead" style="display:none;"></div>
+ <div id="layer-removed" class="alert alert-info lead" style="display:none;">
+ <button type="button" class="close" data-dismiss="alert">&times;</button>
+ <strong>1</strong> layer deleted from <a href="project-with-targets.html">your project</a>: <a href="#">meta-aarch64</a>
+ </div>
+
+
+{% include "basetable_top.html" %}
+ {% for build in objects %}
+ <tr class="data">
+ <td class="outcome"><a href="{% url "builddashboard" build.id %}">{%if build.outcome == build.SUCCEEDED%}<i class="icon-ok-sign success"></i>{%elif build.outcome == build.FAILED%}<i class="icon-minus-sign error"></i>{%else%}{%endif%}</a></td>
+ <td class="target">{% for t in build.target_set.all %} <a href="{% url "builddashboard" build.id %}"> {{t.target}} </a> <br />{% endfor %}</td>
+ <td class="machine"><a href="{% url "builddashboard" build.id %}">{{build.machine}}</a></td>
+ <td class="started_on"><a href="{% url "builddashboard" build.id %}">{{build.started_on|date:"d/m/y H:i"}}</a></td>
+ <td class="completed_on"><a href="{% url "builddashboard" build.id %}">{{build.completed_on|date:"d/m/y H:i"}}</a></td>
+ <td class="failed_tasks error">{% query build.task_build outcome=4 order__gt=0 as exectask%}{% if exectask.count == 1 %}<a href="{% url "task" build.id exectask.0.id %}">{{exectask.0.recipe.name}}.{{exectask.0.task_name}}</a>{% elif exectask.count > 1%}<a href="{% url "tasks" build.id %}?filter=outcome%3A4">{{exectask.count}}</a>{%endif%}</td>
+ <td class="errors_no">{% if build.errors_no %}<a class="errors_no error" href="{% url "builddashboard" build.id %}#errors">{{build.errors_no}} error{{build.errors_no|pluralize}}</a>{%endif%}</td>
+ <td class="warnings_no">{% if build.warnings_no %}<a class="warnings_no warning" href="{% url "builddashboard" build.id %}#warnings">{{build.warnings_no}} warning{{build.warnings_no|pluralize}}</a>{%endif%}</td>
+ <td class="time"><a href="{% url "buildtime" build.id %}">{{build.timespent|sectohms}}</a></td>
+ <td class="log">{{build.cooker_log_path}}</td>
+ <td class="output">
+ {% if build.outcome == build.SUCCEEDED %}
+ <a href="{%url "builddashboard" build.id%}#images">{{fstypes|get_dict_value:build.id}}</a>
+ {% endif %}
+ </td>
+ </tr>
+
+ {% endfor %}
+{% include "basetable_bottom.html" %}
+
+ <!-- Modals -->
+
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/projectconf.html b/bitbake/lib/toaster/toastergui/templates/projectconf.html
new file mode 100644
index 0000000000..e8b0c39f25
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/projectconf.html
@@ -0,0 +1,62 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Project configuration</li>
+{% endblock %}
+
+{% block projectinfomain %}
+ <div class="page-header">
+ <h1>Configuration Variables</h1>
+ </div>
+
+ <div style="padding-left:19px;">
+
+ <dl class="dl-vertical">
+ {% for c in configvars %}
+ <dt>
+ {{c.name}}
+ <i class="icon-question-sign get-help" title="{{c.desc}}"></i>
+ </dt>
+ <dd class="lead">
+ <span id="distro">{{c.value}}</span>
+ <i class="icon-pencil" id="change-distro-icon"></i>
+ <form id="change-distro-form" style="display:none;">
+ <div class="input-append">
+ <input type="text" id="new-distro" value="poky tiny">
+ <button id="apply-change-distro" class="btn" type="button">Save</button>
+ <button id="cancel-change-distro" type="button" class="btn btn-link">Cancel</button>
+ </div>
+ </form>
+ </dd>
+ {% endfor %}
+
+
+ </dl>
+ <form id="variable-form">
+ <fieldset style="padding-left:0px;">
+ <legend>Add variable</legend>
+ <label>
+ Variable
+ <i class="icon-question-sign get-help" title="Variable names are case sensitive, cannot have spaces, and can only include letters, numbers, underscores and dashes"></i>
+ </label>
+ <input id="variable" type="text" placeholder="Type variable name">
+ <label>Value</label>
+ <input id="value" type="text" placeholder="Type variable value">
+ <div style="display:block;margin-top:10px;">
+ <a href="#" class="btn save" disabled>
+ Add variable
+ </a>
+ </div>
+ </fieldset>
+ </form>
+ <!--button id="add-variable" class="btn air">
+ <i class="icon-plus"></i>
+ Add variable
+ </button-->
+
+ </div>
+
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/projects.html b/bitbake/lib/toaster/toastergui/templates/projects.html
new file mode 100644
index 0000000000..432f025d3c
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/projects.html
@@ -0,0 +1,36 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block pagecontent %}
+
+
+ {% include "mrb_section.html" %}
+
+
+ <div class="page-header top-air">
+ <h1>
+ All projects
+ </h1>
+ </div>
+
+{% include "basetable_top_projectbuilds.html" %}
+ {% for o in objects %}
+ <tr class="data">
+ <td><a href="{% url 'project' o.id %}">{{o.name}}</a></td>
+ <td><a href="{% url 'project' o.id %}">{{o.release.name}}</a></td>
+ <td>{{o.get_current_machine_name}}</td>
+ <td>{{o.get_number_of_builds}}</td>
+ <td>{{o.get_last_outcome}}</td>
+ <td>{{o.get_last_target}}</td>
+ <td>{{o.get_last_errors}}</td>
+ <td>{{o.get_last_warnings}}</td>
+ <td>{{o.get_last_imgfiles}}</td>
+ <td>{{o.updated|date:"d/m/y H:i"}}</td>
+ </tr>
+ {% endfor %}
+{% include "basetable_bottom.html" %}
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/recipe.html b/bitbake/lib/toaster/toastergui/templates/recipe.html
index a830ba9fb8..b20c65e3c8 100644
--- a/bitbake/lib/toaster/toastergui/templates/recipe.html
+++ b/bitbake/lib/toaster/toastergui/templates/recipe.html
@@ -52,16 +52,19 @@
Layer
</dt>
<dd>{{layer.name}}</dd>
+
+ {% if not MANAGED or not build.project %}
<dt>
<i class="icon-question-sign get-help" title="Path to the layer providing the recipe"></i>
Layer directory
</dt>
<dd><code>{{layer.local_path}}</code></dd>
+ {% endif %}
<dt>
<i class="icon-question-sign get-help" title="Path to the recipe .bb file"></i>
Recipe file
</dt>
- <dd><code>{{object.file_path}}</code></dd>
+ <dd><code>{{object.get_local_path}}</code></dd>
{% if layer_version.branch %}
<dt>
<i class="icon-question-sign get-help" title="The Git branch of the layer providing the recipe"></i>
@@ -126,6 +129,12 @@
<td>
{% ifnotequal task.sstate_result task.SSTATE_NA %}
<a {{ task|task_color }} href="{% url "task" build.pk task.pk %}">{{task.get_sstate_result_display}}</a>
+ {% if MANAGED and build.project and task.outcome = task.OUTCOME_FAILED %}
+ <a href="{% url 'build_artifact' build.pk "tasklogfile" task.pk %}">
+ <i class="icon-download-alt" title="" data-original-title="Download task log file"></i>
+ </a>
+ {% endif %}
+
{% endifnotequal %}
</td>
diff --git a/bitbake/lib/toaster/toastergui/templates/recipes.html b/bitbake/lib/toaster/toastergui/templates/recipes.html
index 791a487a81..889e676b45 100755..100644
--- a/bitbake/lib/toaster/toastergui/templates/recipes.html
+++ b/bitbake/lib/toaster/toastergui/templates/recipes.html
@@ -98,8 +98,11 @@
{{recipe.layer_version.commit|truncatechars:13}}
</a>
</td>
- <!-- Layer directory -->
- <td class="layer_version__layer__local_path">{{recipe.layer_version.layer.local_path}}</td>
+
+ {% if not MANAGED or not build.project %}
+ <!-- Layer directory -->
+ <td class="layer_version__layer__local_path">{{recipe.layer_version.layer.local_path}}</td>
+ {% endif %}
</tr>
{% endfor %}
diff --git a/bitbake/lib/toaster/toastergui/templates/target.html b/bitbake/lib/toaster/toastergui/templates/target.html
index 564fd27162..1309b52dad 100644
--- a/bitbake/lib/toaster/toastergui/templates/target.html
+++ b/bitbake/lib/toaster/toastergui/templates/target.html
@@ -65,6 +65,7 @@
{% include "basetable_top.html" %}
{% for package in objects %}
<tr>
+ {# order of the table data must match the columns defined in template's context tablecols #}
<td class="package_name">
<a href="{% url 'package_included_detail' build.id target.id package.id %}">
{{package.name}}
@@ -79,16 +80,17 @@
{{package.version|filtered_packageversion:package.revision}}
</a>
</td>
+ <td class="license">
+ {{package.license}}
+ </td>
<td class="size sizecol">
{{package.size|filtered_installedsize:package.installed_size|filtered_filesizeformat}}
</td>
+
<td class="size_over_total sizecol">
{{package|filter_sizeovertotal:packages_sum}}
</td>
- <td class="license">
- {{package.license}}
- </td>
- <td class="depends">
+ <td class="depends">
{% with deps=package.runtime_dependencies %}
{% with deps_count=deps|length %}
{% if deps_count > 0 %}
@@ -150,9 +152,11 @@
{{package.recipe.layer_version.commit|truncatechars:13}}
</a>
</td>
- <td class="layer_directory">
- {{ package.recipe.layer_version.layer.local_path }}
- </td>
+ {% if not MANAGED or not build.project %}
+ <td class="layer_directory">
+ {{ package.recipe.layer_version.layer.local_path }}
+ </td>
+ {% endif %}
</tr>
{% endfor %}
diff --git a/bitbake/lib/toaster/toastergui/templates/targets.html b/bitbake/lib/toaster/toastergui/templates/targets.html
new file mode 100644
index 0000000000..f4313f92b2
--- /dev/null
+++ b/bitbake/lib/toaster/toastergui/templates/targets.html
@@ -0,0 +1,272 @@
+{% extends "baseprojectpage.html" %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block localbreadcrumb %}
+<li>Targets</li>
+{% endblock %}
+
+{% block projectinfomain %}
+ <div class="page-header">
+ <h1>
+ {% if request.GET.filter and objects.paginator.count > 0 or request.GET.search and objects.paginator.count > 0 %}
+ {{objects.paginator.count}} target{{objects.paginator.count|pluralize}} found
+ {% elif request.GET.filter and objects.paginator.count == 0 or request.GET.search and objects.paginator.count == 0 %}
+ No targets found
+ {%else%}
+ All targets
+ {%endif%}
+ <i class="icon-question-sign get-help heading-help" title="This page lists all the targets compatible with " + {{project.release.name}} + " that Toaster knows about."></i>
+ </h1>
+ </div>
+
+ <div id="zone1alerts">
+
+ </div>
+
+{% if objects.paginator.count == 0 %}
+ <div class="row-fluid">
+ <div class="alert">
+ <form class="no-results input-append" id="searchform">
+ <input id="search" name="search" class="input-xxlarge" type="text" value="{{request.GET.search}}"/>{% if request.GET.search %}<a href="javascript:$('#search').val('');searchform.submit()" class="add-on btn" tabindex="-1"><i class="icon-remove"></i></a>{% endif %}
+ <button class="btn" type="submit" value="Search">Search</button>
+ <button class="btn btn-link" onclick="javascript:$('#search').val('');searchform.submit()">Show all targets</button>
+ </form>
+ </div>
+ </div>
+
+{% else %}
+
+{% include "basetable_top.html" %}
+ {% for o in objects %}
+ {% if o.name %}
+ <tr class="data">
+ <td class="target">
+ {{o.name}}
+ <a target="_blank" href="{{o.get_layersource_view_url}}"><i class="icon-share get-info"></i></a>
+ </td>
+ <td class="version">{{o.version}}</td>
+ <td class="description">{% if o.description %}{{o.description}}{% else %}{{o.summary}}{%endif%}</td>
+ <td class="recipe-file">
+ <code>{{o.file_path}}</code>
+ <a href="{{o.get_vcs_link_url}}{{o.file_path}}" target="_blank"><i class="icon-share get-info"></i></a>
+ </td>
+ <td class="target-section">{{o.section}}</td>
+ <td class="license">{{o.license}}</td>
+ <td class="layer"><a href="{% url 'layerdetails' o.layer_version.id%}">{{o.layer_version.layer.name}}</a></td>
+ <td class="source">{{o.layer_source.name}}</td>
+ <td class="branch">
+ {% if o.layer_version.up_branch %}
+ {{o.layer_version.up_branch.name}}
+ {% else %}
+ <a class="btn"
+ data-content="<ul class='unstyled'>
+ <li>{{o.layer_version.commit}}</li>
+ </ul>">
+ {{o.layer_version.commit|truncatechars:13}}
+ </a>
+ {% endif %}
+ </td>
+ <td class="add-layer" value="{{o.pk}}" layerversion_id="{{o.layer_version.pk}}">
+ <div id="layer-tooltip-{{o.pk}}" style="display: none; font-size: 11px; line-height: 1.3;" class="tooltip-inner">layer was modified</div>
+ <a href="{% url 'project' project.id %}#/targetbuild={{o.name}}" id="target-build-{{o.pk}}" class="btn btn-block remove-layer" style="display:none;" >
+ Build target
+ </a>
+ <a id="layer-add-{{o.pk}}" class="btn btn-block" style="display:none;" href="javascript:layerAdd({{o.layer_version.pk}}, '{{o.layer_version.layer.name}}', '{%url 'layerdetails' o.layer_version.pk%}', {{o.pk}})" >
+ <i class="icon-plus"></i>
+ Add layer
+ <i title="" class="icon-question-sign get-help" data-original-title="To build this target, you must first add the {{o.layer_version.layer.name}} layer to your project"></i>
+ </a>
+ </td>
+ </tr>
+ {% endif %}
+ {% endfor %}
+{% include "basetable_bottom.html" %}
+
+ <!-- Modals -->
+
+ <!-- 'Layer dependencies modal' -->
+ <div id="dependencies_modal" class="modal hide fade" tabindex="-1" role="dialog" aria-hidden="true">
+ <form id="dependencies_modal_form">
+ <div class="modal-header">
+ <button type="button" class="close" data-dismiss="modal" aria-hidden="true">x</button>
+ <h3><span class="layer-name"></span> dependencies</h3>
+ </div>
+ <div class="modal-body">
+ <p><strong class="layer-name"></strong> depends on some layers that are not added to your project. Select the ones you want to add:</p>
+ <ul class="unstyled" id="dependencies_list">
+ </ul>
+ </div>
+ <div class="modal-footer">
+ <button class="btn btn-primary" type="submit">Add layers</button>
+ <button class="btn" type="reset" data-dismiss="modal">Cancel</button>
+ </div>
+ </form>
+ </div>
+
+{% endif %}
+
+{% if project %}
+<script>
+
+var tooltipUpdateText;
+
+function _makeXHREditCall(data, onsuccess, onfail) {
+ $.ajax( {
+ type: "POST",
+ url: "{% url 'xhr_projectedit' project.id %}",
+ data: data,
+ headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
+ success: function (_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ updateButtons(_data.layers.map(function (e) {return e.id}));
+ if (onsuccess != undefined) onsuccess(_data);
+ }
+ },
+ error: function (_data) {
+ alert("Call failed");
+ console.log(_data);
+ }
+ });
+}
+
+function show_alert(text, cls) {
+ $("#zone1alerts").html("<div class=\"alert alert-info lead\"><button type=\"button\" class=\"close\" data-dismiss=\"alert\">&times;</button>" + text + "</div>");
+}
+
+
+function show_dependencies_modal(layerId, layerName, layerURL, dependencies) {
+ // update layer name
+ $('.layer-name').text(layerName);
+ var deplistHtml = "";
+ for (var i = 0; i < dependencies.length; i++) {
+ deplistHtml += "<li><label class=\"checkbox\"><input name=\"dependencies\" value=\""
+ deplistHtml += dependencies[i].id;
+ deplistHtml +="\" type=\"checkbox\" checked=\"checked\"/>";
+ deplistHtml += dependencies[i].name;
+ deplistHtml += "</label></li>";
+ }
+ $('#dependencies_list').html(deplistHtml);
+
+ var selected = [layerId];
+ var layer_link_list = undefined;
+
+ $("#dependencies_modal_form").submit(function (e) {
+ e.preventDefault();
+ $("input[name='dependencies']:checked").map(function () { selected.push(parseInt($(this).val()))});
+ layer_link_list = "<a href='"+layerURL+"'>"+layerName+"</a>";
+ if (selected.length > 1) {
+ tooltipUpdateText = "" + selected.length + " layers added";
+ } else {
+ tooltipUpdateText = "1 layer added";
+ }
+
+ for (var i = 0; i < selected.length; i++) {
+ for (var j = 0; j < dependencies.length; j++) {
+ if (dependencies[j].id == selected[i]) {
+ layer_link_list+= ", <a href='"+dependencies[j].layerdetailurl+"'>"+dependencies[j].name+"</a>"
+ break;
+ }
+ }
+ }
+
+ $('#dependencies_modal').modal('hide');
+
+ {% if project %}
+ _makeXHREditCall({ 'layerAdd': selected.join(",") }, function onXHRSuccess() {
+ show_alert("You have added <strong>"+selected.length+"</strong> layer(s) to <a href=\"{% url 'project' project.id%}\">{{project.name}}</a>: " + layer_link_list);
+ });
+ {% endif %}
+
+ });
+ $('#dependencies_modal').modal('show');
+}
+
+function updateLayerCountLabels (amount) {
+ /* Update the filter labels */
+ var countLabel = $("#layer_version__projectlayer__project\\:{{project.id}}_count");
+ countLabel.text(Number(countLabel.text())+amount);
+
+ var countLabelRemaining = $("#layer_version__projectlayer__project\\:NOT{{project.id}}_count");
+ countLabelRemaining.text(Number(countLabelRemaining.text())-amount);
+}
+
+var pressedButton = undefined;
+
+function layerAdd(layerId, layerName, layerURL, pressedButtonId) {
+ pressedButton = pressedButtonId;
+ $.ajax({
+ url: '{% url "xhr_datatypeahead" %}',
+ data: {'type': 'layerdeps','value':layerId},
+ success: function(_data) {
+ if (_data.error != "ok") {
+ alert(_data.error);
+ } else {
+ updateLayerCountLabels(_data.list.length+1);
+
+ if (_data.list.length > 0) {
+ show_dependencies_modal(layerId, layerName, layerURL, _data.list);
+ }
+ else {
+ tooltipUpdateText = "1 layer added";
+ _makeXHREditCall({ 'layerAdd': layerId }, function () {
+ show_alert("You have added <strong>1</strong> layer to <a href=\"{% url 'project' project.id%}\">{{project.name}}</a>: <a href=\""+layerURL+"\">" + layerName +"</a>");
+ });
+ }
+ }
+ }
+ })
+}
+
+function buttonSet(id, state) {
+ var tohide, toshow;
+ if (state == "add")
+ {
+ toshow = "#layer-add-";
+ tohide = "#target-build-";
+ }
+ else if (state == "build")
+ {
+ tohide = "#layer-add-";
+ toshow = "#target-build-";
+ }
+
+ var previouslyvisible = $(tohide + id).is(":visible");
+ if (previouslyvisible && id == pressedButton) {
+ $(tohide + id).fadeOut( function() {
+ $("#layer-tooltip-" + id).text(tooltipUpdateText);
+ $("#layer-tooltip-" + id).fadeIn().delay(2000).fadeOut(function(){
+ $(toshow + id).delay(300).fadeIn();
+ });
+ });
+ } else {
+ $(tohide + id).hide();
+ $("#layer-tooltip-" + id).hide();
+ $(toshow + id).show();
+ }
+};
+
+
+function updateButtons(projectLayers) {
+ var displayedLayers = [];
+ $(".add-layer").map(function () { displayedLayers.push( { "l": parseInt($(this).attr('layerversion_id')), "i": parseInt($(this).attr('value'))})});
+ for (var i=0; i < displayedLayers.length; i++) {
+ if (projectLayers.indexOf(displayedLayers[i].l) > -1) {
+ buttonSet(displayedLayers[i].i, "build");
+ }
+ else {
+ buttonSet(displayedLayers[i].i, "add");
+ }
+ }
+}
+
+$(document).ready(function (){
+ updateButtons({{projectlayerset}});
+});
+
+</script>
+{%endif%}
+
+{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templates/task.html b/bitbake/lib/toaster/toastergui/templates/task.html
index 3c4a3d4893..09fd25b259 100644
--- a/bitbake/lib/toaster/toastergui/templates/task.html
+++ b/bitbake/lib/toaster/toastergui/templates/task.html
@@ -24,13 +24,17 @@
{# executed tasks outcome #}
<dl class="dl-horizontal">
{% if task.logfile %}
+ {% if MANAGED and build.project %}
+ <a class="btn btn-large" href="{% url 'build_artifact' build.id "tasklogfile" task.pk %}" style="margin:15px;">Download task log</a>
+ {% else %}
<dt>
<i class="icon-question-sign get-help" title="Path the task log file"></i> Log file
</dt>
<dd>
- <code>{{task.logfile}}</code>
+ <code>{{task.logfile}}</code>
</dd>
{% endif %}
+ {% endif %}
{# show stack trace for failed task #}
{% if task.outcome == task.OUTCOME_FAILED and log_head %}
<h3>Python stack trace</h3>
@@ -82,6 +86,9 @@
</td>
<td>
<a href="{%url "task" match.build.pk match.pk%}">{{match.task_name}}</a>
+ {% if task.get_description %}
+ <i class="icon-question-sign get-help hover-help" title="{{task.get_description}}"></i>
+ {% endif %}
</td>
<td>
<a href="{%url "task" match.build.pk match.pk%}">{{match.get_executed_display}}</a>
@@ -105,7 +112,7 @@
{% elif task.outcome == task.OUTCOME_COVERED %}
<dl class="dl-horizontal">
<dt>
- <i class="icon-question-sign get-help" title="The task providing the outcome of this task"></i> Task covered by
+ <i class="icon-question-sign get-help" title="The task(s) providing the outcome of this task"></i> Task covered by
</dt>
<dd>
<ul>
@@ -184,6 +191,9 @@
<strong>Failed</strong> to restore output from sstate cache. The file was found but could not be unpacked.
</div>
<dl class="dl-horizontal">
+ {% if MANAGED and build.project %}
+ <a href="{% url 'build_artifact' build.id "tasklogfile" task.pk %}" style="margin:15px;">Download log</a>
+ {% else %}
<dt>
<i class="icon-question-sign get-help" title="Path to the cache attempt log file"></i>
Log file
@@ -194,6 +204,7 @@
Time (secs)
</dt>
<dd>{{task.elapsed_time|format_none_and_zero}}</dd>
+ {% endif %}
</dl>
<div class="alert alert-info">
Running the real task instead.
@@ -261,8 +272,8 @@
Time (secs)
</dt>
<dd>{{task.elapsed_time|format_none_and_zero|floatformat:2}}</dd>
- {% endif %}
- {% if task.cpu_usage > 0 %}
+ {% endif %}
+ {% if task.cpu_usage > 0 %}
<dt>
<i class="icon-question-sign get-help" title="The percentage of task CPU utilization"></i>
CPU usage
diff --git a/bitbake/lib/toaster/toastergui/templates/tasks.html b/bitbake/lib/toaster/toastergui/templates/tasks.html
index d0c6f4e326..4cbcc5ed68 100644
--- a/bitbake/lib/toaster/toastergui/templates/tasks.html
+++ b/bitbake/lib/toaster/toastergui/templates/tasks.html
@@ -94,6 +94,11 @@
<td class="outcome">
<a href="{%url "task" build.pk task.pk%} ">{{task.get_outcome_display}} </a>
<i class="icon-question-sign get-help hover-help" title="{{task.get_outcome_help}}"></i>
+ {% if MANAGED and build.project and task.outcome = task.OUTCOME_FAILED %}
+ <a href="{% url 'build_artifact' build.pk "tasklogfile" task.pk %}">
+ <i class="icon-download-alt" title="" data-original-title="Download task log file"></i>
+ </a>
+ {% endif %}
</td>
<td class="cache_attempt">
<a href="{%url "task" build.pk task.pk%} ">{{task.get_sstate_result_display|format_none_and_zero}}</a>
@@ -107,9 +112,12 @@
<td class="disk_io">
{{task.disk_io|format_none_and_zero}}
</td>
+
+ {% if not MANAGED or not build.project %}
<td class="task_log">
{{task.logfile}}
</td>
+ {% endif %}
</tr>
{% endfor %}
@@ -124,10 +132,10 @@
// enable blue hightlight animation for the order link
if (location.href.search('#') > -1) {
var task_order = location.href.split('#')[1];
- $("#" + task_order).addClass("highlight");
+ $("#" + task_order).addClass("highlight");
}
});
-
+
</script>
{% endblock %}
diff --git a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
index b953aa1580..f564edfe49 100644
--- a/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
+++ b/bitbake/lib/toaster/toastergui/templatetags/projecttags.py
@@ -24,6 +24,8 @@ import re
from django import template
from django.utils import timezone
from django.template.defaultfilters import filesizeformat
+import json as JsonLib
+from django.utils.safestring import mark_safe
register = template.Library()
@@ -40,6 +42,19 @@ def sectohms(time):
hours = int(tdsec / 3600)
return "%02d:%02d:%02d" % (hours, int((tdsec - (hours * 3600))/ 60), int(tdsec) % 60)
+
+@register.filter(name = 'mapselect')
+def mapselect(value, argument):
+ return map(lambda x: vars(x)[argument], value)
+
+
+@register.filter(name = "json")
+def json(value):
+ # JSON spec says that "\/" is functionally identical to "/" to allow for HTML-tag embedding in JSON strings
+ # unfortunately, I can't find any option in the json module to turn on forward-slash escaping, so we do
+ # it manually here
+ return mark_safe(JsonLib.dumps(value, ensure_ascii=False).replace('</', '<\\/'))
+
@register.assignment_tag
def query(qs, **kwargs):
""" template tag which allows queryset filtering. Usage:
@@ -253,3 +268,11 @@ def get_dict_value(dictionary, key):
return dictionary[key]
except (KeyError, IndexError):
return ''
+
+@register.filter
+def format_build_date(completed_on):
+ now = timezone.now()
+ delta = now - completed_on
+
+ if delta.days >= 1:
+ return True
diff --git a/bitbake/lib/toaster/toastergui/urls.py b/bitbake/lib/toaster/toastergui/urls.py
index 7c4f894b9c..6e1b0ab913 100644
--- a/bitbake/lib/toaster/toastergui/urls.py
+++ b/bitbake/lib/toaster/toastergui/urls.py
@@ -21,6 +21,8 @@ from django.views.generic import RedirectView
urlpatterns = patterns('toastergui.views',
# landing page
+ url(r'^landing/$', 'landing', name='landing'),
+
url(r'^builds/$', 'builds', name='all-builds'),
# build info navigation
url(r'^build/(?P<build_id>\d+)$', 'builddashboard', name="builddashboard"),
@@ -57,22 +59,43 @@ urlpatterns = patterns('toastergui.views',
url(r'^build/(?P<build_id>\d+)/cpuusage$', 'cpuusage', name='cpuusage'),
url(r'^build/(?P<build_id>\d+)/diskio$', 'diskio', name='diskio'),
- # image information dir - not yet implemented
+ # image information dir
url(r'^build/(?P<build_id>\d+)/target/(?P<target_id>\d+)/packagefile/(?P<packagefile_id>\d+)$',
'image_information_dir', name='image_information_dir'),
+ # build download artifact
+ url(r'^build/(?P<build_id>\d+)/artifact/(?P<artifact_type>\w+)/id/(?P<artifact_id>\w+)', 'build_artifact', name="build_artifact"),
+
# urls not linked from the dashboard
- url(r'^layers/$', 'layer', name='all-layers'),
url(r'^layerversions/(?P<layerversion_id>\d+)/recipes/.*$', 'layer_versions_recipes', name='layer_versions_recipes'),
# project URLs
url(r'^newproject/$', 'newproject', name='newproject'),
+ url(r'^importlayer/$', 'importlayer', name='importlayer'),
+
+ url(r'^layers/$', 'layers', name='layers'),
+ url(r'^layer/(?P<layerid>\d+)/$', 'layerdetails', name='layerdetails'),
+ url(r'^layer/$', 'layerdetails', name='layerdetails'),
+ url(r'^targets/$', 'targets', name='targets'),
+ url(r'^machines/$', 'machines', name='machines'),
+
+ url(r'^projects/$', 'projects', name='all-projects'),
+
+ url(r'^project/$', 'project', name='project'),
url(r'^project/(?P<pid>\d+)/$', 'project', name='project'),
+ url(r'^project/(?P<pid>\d+)/configuration$', 'projectconf', name='projectconf'),
+ url(r'^project/(?P<pid>\d+)/builds$', 'projectbuilds', name='projectbuilds'),
+
+ url(r'^xhr_build/$', 'xhr_build', name='xhr_build'),
url(r'^xhr_projectbuild/(?P<pid>\d+)/$', 'xhr_projectbuild', name='xhr_projectbuild'),
+ url(r'^xhr_projectinfo/$', 'xhr_projectinfo', name='xhr_projectinfo'),
url(r'^xhr_projectedit/(?P<pid>\d+)/$', 'xhr_projectedit', name='xhr_projectedit'),
+ url(r'^xhr_datatypeahead/$', 'xhr_datatypeahead', name='xhr_datatypeahead'),
+ url(r'^xhr_importlayer/$', 'xhr_importlayer', name='xhr_importlayer'),
+
# default redirection
- url(r'^$', RedirectView.as_view( url= 'builds/')),
+ url(r'^$', RedirectView.as_view( url= 'landing')),
)
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py
index bd65c08b06..679c1e9430 100755
--- a/bitbake/lib/toaster/toastergui/views.py
+++ b/bitbake/lib/toaster/toastergui/views.py
@@ -20,22 +20,71 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import operator,re
+import HTMLParser
from django.db.models import Q, Sum
from django.db import IntegrityError
from django.shortcuts import render, redirect
from orm.models import Build, Target, Task, Layer, Layer_Version, Recipe, LogMessage, Variable
from orm.models import Task_Dependency, Recipe_Dependency, Package, Package_File, Package_Dependency
-from orm.models import Target_Installed_Package, Target_File, Target_Image_File
+from orm.models import Target_Installed_Package, Target_File, Target_Image_File, BuildArtifact
from django.views.decorators.cache import cache_control
from django.core.urlresolvers import reverse
+from django.core.exceptions import MultipleObjectsReturned
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseBadRequest, HttpResponseNotFound
from django.utils import timezone
+from django.utils.html import escape
from datetime import timedelta
from django.utils import formats
+from toastergui.templatetags.projecttags import json as jsonfilter
import json
+
+# all new sessions should come through the landing page;
+# determine in which mode we are running in, and redirect appropriately
+def landing(request):
+ if toastermain.settings.MANAGED and Build.objects.count() == 0 and Project.objects.count() > 0:
+ return redirect(reverse('all-projects'), permanent = False)
+
+ if Build.objects.all().count() > 0:
+ return redirect(reverse('all-builds'), permanent = False)
+
+ return render(request, 'landing.html')
+
+def _project_recent_build_list(prj):
+ # build requests not yet started
+ return (map(lambda x: {
+ "id": x.pk,
+ "targets" : map(lambda y: {"target": y.target }, x.brtarget_set.all()),
+ "status": x.get_state_display(),
+ }, prj.buildrequest_set.filter(state__lt = BuildRequest.REQ_INPROGRESS).order_by("-pk")) +
+ # build requests started, but with no build yet
+ map(lambda x: {
+ "id": x.pk,
+ "targets" : map(lambda y: {"target": y.target }, x.brtarget_set.all()),
+ "status": x.get_state_display(),
+ }, prj.buildrequest_set.filter(state = BuildRequest.REQ_INPROGRESS, build = None).order_by("-pk")) +
+ # build requests that failed
+ map(lambda x: {
+ "id": x.pk,
+ "targets" : map(lambda y: {"target": y.target }, x.brtarget_set.all()),
+ "status": x.get_state_display(),
+ "errors": map(lambda y: {"type": y.errtype, "msg": y.errmsg, "tb": y.traceback}, x.brerror_set.all()),
+ }, prj.buildrequest_set.filter(state = BuildRequest.REQ_FAILED).order_by("-pk")) +
+ # and already made builds
+ map(lambda x: {
+ "id": x.pk,
+ "targets": map(lambda y: {"target": y.target }, x.target_set.all()),
+ "status": x.get_outcome_display(),
+ "completed_on" : x.completed_on.strftime('%s')+"000",
+ "build_time" : (x.completed_on - x.started_on).total_seconds(),
+ "build_page_url" : reverse('builddashboard', args=(x.pk,)),
+ "completeper": x.completeper(),
+ "eta": x.eta().ctime(),
+ }, prj.build_set.all()))
+
+
def _build_page_range(paginator, index = 1):
try:
page = paginator.page(index)
@@ -76,7 +125,7 @@ def _redirect_parameters(view, g, mandatory_parameters, *args, **kwargs):
params[i] = g[i]
for i in mandatory_parameters:
if not i in params:
- params[i] = mandatory_parameters[i]
+ params[i] = urllib.unquote(str(mandatory_parameters[i]))
return redirect(url + "?%s" % urllib.urlencode(params), *args, **kwargs)
@@ -155,6 +204,7 @@ def _get_search_results(search_term, queryset, model):
search_objects.append(reduce(operator.or_, q_map))
search_object = reduce(operator.and_, search_objects)
+ print "search objects", search_object
queryset = queryset.filter(search_object)
return queryset
@@ -200,6 +250,24 @@ def _get_queryset(model, queryset, filter_string, search_term, ordering_string,
# insure only distinct records (e.g. from multiple search hits) are returned
return queryset.distinct()
+# returns the value of entries per page and the name of the applied sorting field.
+# if the value is given explicitly as a GET parameter it will be the first selected,
+# otherwise the cookie value will be used.
+def _get_parameters_values(request, default_count, default_order):
+ pagesize = request.GET.get('count', request.COOKIES.get('count', default_count))
+ orderby = request.GET.get('orderby', request.COOKIES.get('orderby', default_order))
+ return (pagesize, orderby)
+
+
+# set cookies for parameters. this is usefull in case parameters are set
+# manually from the GET values of the link
+def _save_parameters_cookies(response, pagesize, orderby, request):
+ html_parser = HTMLParser.HTMLParser()
+ response.set_cookie(key='count', value=pagesize, path=request.path)
+ response.set_cookie(key='orderby', value=html_parser.unescape(orderby), path=request.path)
+ return response
+
+
# shows the "all builds" page
def builds(request):
@@ -207,7 +275,8 @@ def builds(request):
# define here what parameters the view needs in the GET portion in order to
# be able to display something. 'count' and 'page' are mandatory for all views
# that use paginators.
- mandatory_parameters = { 'count': 10, 'page' : 1, 'orderby' : 'completed_on:-' };
+ (pagesize, orderby) = _get_parameters_values(request, 10, 'completed_on:-')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'all-builds', request.GET, mandatory_parameters)
@@ -220,21 +289,10 @@ def builds(request):
queryset = _get_queryset(Build, queryset_all, filter_string, search_term, ordering_string, '-completed_on')
# retrieve the objects that will be displayed in the table; builds a paginator and gets a page range to display
- build_info = _build_page_range(Paginator(queryset, request.GET.get('count', 10)),request.GET.get('page', 1))
+ build_info = _build_page_range(Paginator(queryset, pagesize), request.GET.get('page', 1))
# build view-specific information; this is rendered specifically in the builds page, at the top of the page (i.e. Recent builds)
- build_mru = Build.objects.filter(completed_on__gte=(timezone.now()-timedelta(hours=24))).order_by("-started_on")[:3]
- for b in [ x for x in build_mru if x.outcome == Build.IN_PROGRESS ]:
- tf = Task.objects.filter(build = b)
- tfc = tf.count()
- if tfc > 0:
- b.completeper = tf.exclude(order__isnull=True).count()*100/tf.count()
- else:
- b.completeper = 0
-
- b.eta = 0
- if b.completeper > 0:
- b.eta = timezone.now() + ((timezone.now() - b.started_on)*(100-b.completeper)/b.completeper)
+ build_mru = Build.objects.order_by("-started_on")[:3]
# set up list of fstypes for each build
fstypes_map = {};
@@ -364,22 +422,41 @@ def builds(request):
'ordericon':_get_toggle_order_icon(request, "timespent"),
'orderkey' : 'timespent',
},
- {'name': 'Log',
+ {'name': 'Image files', 'clclass': 'output',
+ 'qhelp': "The root file system types produced by the build. You can find them in your <code>/build/tmp/deploy/images/</code> directory",
+ # TODO: compute image fstypes from Target_Image_File
+ },
+ ]
+ }
+
+ if not toastermain.settings.MANAGED:
+ context['tablecols'].insert(-2,
+ {'name': 'Log1',
'dclass': "span4",
'qhelp': "Path to the build main log file",
'clclass': 'log', 'hidden': 1,
'orderfield': _get_toggle_order(request, "cooker_log_path"),
'ordericon':_get_toggle_order_icon(request, "cooker_log_path"),
'orderkey' : 'cooker_log_path',
- },
- {'name': 'Output', 'clclass': 'output',
- 'qhelp': "The root file system types produced by the build. You can find them in your <code>/build/tmp/deploy/images/</code> directory",
- # TODO: compute image fstypes from Target_Image_File
- },
- ]
- }
+ }
+ )
- return render(request, template, context)
+
+ if toastermain.settings.MANAGED:
+ context['tablecols'].append(
+ {'name': 'Project', 'clclass': 'project',
+ 'filter': {'class': 'project',
+ 'label': 'Project:',
+ 'options': map(lambda x: (x.name,'',x.build_set.filter(outcome__lt=Build.IN_PROGRESS).count()), Project.objects.all()),
+
+ }
+ }
+ )
+
+
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
##
@@ -391,7 +468,7 @@ def builddashboard( request, build_id ):
template = "builddashboard.html"
if Build.objects.filter( pk=build_id ).count( ) == 0 :
return redirect( builds )
- build = Build.objects.filter( pk = build_id )[ 0 ];
+ build = Build.objects.get( pk = build_id );
layerVersionId = Layer_Version.objects.filter( build = build_id );
recipeCount = Recipe.objects.filter( layer_version__id__in = layerVersionId ).count( );
tgts = Target.objects.filter( build_id = build_id ).order_by( 'target' );
@@ -411,12 +488,8 @@ def builddashboard( request, build_id ):
hasImages = True
npkg = 0
pkgsz = 0
- pid= 0
- tp = Target_Installed_Package.objects.filter( target_id = t.id )
package = None
- for p in tp:
- pid = p.package_id
- package = Package.objects.get( pk = p.package_id )
+ for package in Package.objects.filter(id__in = [x.package_id for x in t.target_installed_package_set.all()]):
pkgsz = pkgsz + package.size
if ( package.installed_name ):
npkg = npkg + 1
@@ -429,7 +502,7 @@ def builddashboard( request, build_id ):
if ( ndx < 0 ):
ndx = 0;
f = i.file_name[ ndx + 1: ]
- imageFiles.append({ 'path': f, 'size' : i.file_size })
+ imageFiles.append({ 'id': i.id, 'path': f, 'size' : i.file_size })
if ( t.is_image and
(( len( imageFiles ) <= 0 ) or ( len( t.license_manifest_path ) <= 0 ))):
targetHasNoImages = True
@@ -447,6 +520,8 @@ def builddashboard( request, build_id ):
if ( p.installed_name ):
packageCount = packageCount + 1
+ logmessages = list(LogMessage.objects.filter( build = build_id ))
+
context = {
'build' : build,
'hasImages' : hasImages,
@@ -454,7 +529,7 @@ def builddashboard( request, build_id ):
'targets' : targets,
'recipecount' : recipeCount,
'packagecount' : packageCount,
- 'logmessages' : LogMessage.objects.filter( build = build_id ),
+ 'logmessages' : logmessages,
}
return render( request, template, context )
@@ -530,14 +605,14 @@ def recipe(request, build_id, recipe_id):
if Recipe.objects.filter(pk=recipe_id).count() == 0 :
return redirect(builds)
- object = Recipe.objects.filter(pk=recipe_id)[0]
- layer_version = Layer_Version.objects.filter(pk=object.layer_version_id)[0]
- layer = Layer.objects.filter(pk=layer_version.layer_id)[0]
+ object = Recipe.objects.get(pk=recipe_id)
+ layer_version = Layer_Version.objects.get(pk=object.layer_version_id)
+ layer = Layer.objects.get(pk=layer_version.layer_id)
tasks = Task.objects.filter(recipe_id = recipe_id, build_id = build_id).exclude(order__isnull=True).exclude(task_name__endswith='_setscene').exclude(outcome=Task.OUTCOME_NA)
packages = Package.objects.filter(recipe_id = recipe_id).filter(build_id = build_id).filter(size__gte=0)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'object' : object,
'layer_version' : layer_version,
'layer' : layer,
@@ -548,8 +623,8 @@ def recipe(request, build_id, recipe_id):
def target_common( request, build_id, target_id, variant ):
template = "target.html"
- default_orderby = 'name:+';
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'name:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby': orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters(
@@ -565,8 +640,10 @@ def target_common( request, build_id, target_id, variant ):
packages_sum = queryset.aggregate( Sum( 'installed_size' ))
queryset = _get_queryset(
Package, queryset, filter_string, search_term, ordering_string, 'name' )
- packages = _build_page_range( Paginator(
- queryset, request.GET.get( 'count', 25 )),request.GET.get( 'page', 1 ))
+ packages = _build_page_range( Paginator(queryset, pagesize), request.GET.get( 'page', 1 ))
+
+
+ build = Build.objects.get( pk = build_id )
# bring in package dependencies
for p in packages.object_list:
@@ -600,8 +677,6 @@ def target_common( request, build_id, target_id, variant ):
tc_sizePercentage = {
'name' : 'Size over total (%)',
'qhelp' : 'Proportion of the overall size represented by this package',
- 'orderfield' : _get_toggle_order( request, "size" ),
- 'ordericon' : _get_toggle_order_icon( request, "size" ),
'clclass' : 'size_over_total',
'hidden' : 1,
}
@@ -630,8 +705,7 @@ eans multiple licenses exist that cover different parts of the source',
tc_dependencies[ "hidden" ] = 1
tc_rdependencies = {
'name' : 'Reverse dependencies',
- 'qhelp' : 'Package run-time reverse dependencies (i.e. which other packages depend on t\
-his package',
+ 'qhelp' : 'Package run-time reverse dependencies (i.e. which other packages depend on this package',
'clclass' : 'brought_in_by',
}
if ( variant == 'target' ):
@@ -674,23 +748,15 @@ his package',
'clclass' : 'layer_commit',
'hidden' : 1,
}
- tc_layerDir = {
- 'name':'Layer directory',
- 'qhelp':'Location in disk of the layer providing the recipe that builds the package',
- 'orderfield' : _get_toggle_order( request, "recipe__layer_version__layer__local_path" ),
- 'ordericon' : _get_toggle_order_icon( request, "recipe__layer_version__layer__local_path" )\
-,
- 'clclass' : 'layer_directory',
- 'hidden' : 1,
- }
+
context = {
'objectname': variant,
- 'build' : Build.objects.filter( pk = build_id )[ 0 ],
+ 'build' : build,
'target' : Target.objects.filter( pk = target_id )[ 0 ],
'objects' : packages,
'packages_sum' : packages_sum[ 'installed_size__sum' ],
'object_search_display': "packages included",
- 'default_orderby' : default_orderby,
+ 'default_orderby' : orderby,
'tablecols' : [
tc_package,
tc_packageVersion,
@@ -704,10 +770,24 @@ his package',
tc_layer,
tc_layerBranch,
tc_layerCommit,
- tc_layerDir,
]
}
- return( render( request, template, context ))
+
+ if not toastermain.settings.MANAGED or build.project is None:
+
+ tc_layerDir = {
+ 'name':'Layer directory',
+ 'qhelp':'Location in disk of the layer providing the recipe that builds the package',
+ 'orderfield' : _get_toggle_order( request, "recipe__layer_version__layer__local_path" ),
+ 'ordericon' : _get_toggle_order_icon( request, "recipe__layer_version__layer__local_path" ),
+ 'clclass' : 'layer_directory',
+ 'hidden' : 1,
+ }
+ context['tablecols'].append(tc_layerDir)
+
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def target( request, build_id, target_id ):
return( target_common( request, build_id, target_id, "target" ))
@@ -730,7 +810,6 @@ class LazyEncoder(json.JSONEncoder):
return super(LazyEncoder, self).default(obj)
from toastergui.templatetags.projecttags import filtered_filesizeformat
-from django import template
import os
def _get_dir_entries(build_id, target_id, start):
node_str = {
@@ -785,9 +864,7 @@ def _get_dir_entries(build_id, target_id, start):
# don't use resolved path from above, show immediate link-to
if o.sym_target_id != "" and o.sym_target_id != None:
entry['link_to'] = Target_File.objects.get(pk=o.sym_target_id).path
- t = template.Template('{% load projecttags %} {{ size|filtered_filesizeformat }}')
- c = template.Context({'size': o.size})
- entry['size'] = str(t.render(c))
+ entry['size'] = filtered_filesizeformat(o.size)
if entry['link_to'] != None:
entry['permission'] = node_str[o.inodetype] + o.permission
else:
@@ -796,13 +873,16 @@ def _get_dir_entries(build_id, target_id, start):
entry['group'] = o.group
response.append(entry)
- except:
+ except Exception as e:
+ print "Exception ", e
+ import traceback
+ traceback.print_exc(e)
pass
# sort by directories first, then by name
rsorted = sorted(response, key=lambda entry : entry['name'])
rsorted = sorted(rsorted, key=lambda entry : entry['isdir'], reverse=True)
- return json.dumps(rsorted, cls=LazyEncoder)
+ return json.dumps(rsorted, cls=LazyEncoder).replace('</', '<\\/')
def dirinfo(request, build_id, target_id, file_path=None):
template = "dirinfo.html"
@@ -825,8 +905,8 @@ def dirinfo(request, build_id, target_id, file_path=None):
if head != sep:
dir_list.insert(0, head)
- context = { 'build': Build.objects.filter(pk=build_id)[0],
- 'target': Target.objects.filter(pk=target_id)[0],
+ context = { 'build': Build.objects.get(pk=build_id),
+ 'target': Target.objects.get(pk=target_id),
'packages_sum': packages_sum['installed_size__sum'],
'objects': objects,
'dir_list': dir_list,
@@ -889,32 +969,31 @@ def tasks_common(request, build_id, variant, task_anchor):
title_variant='Time'
object_search_display="time data"
filter_search_display="tasks"
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'elapsed_time:-'};
- default_orderby = 'elapsed_time:-';
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'elapsed_time:-')
elif 'diskio' == variant:
title_variant='Disk I/O'
object_search_display="disk I/O data"
filter_search_display="tasks"
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'disk_io:-'};
- default_orderby = 'disk_io:-';
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'disk_io:-')
elif 'cpuusage' == variant:
title_variant='CPU usage'
object_search_display="CPU usage data"
filter_search_display="tasks"
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'cpu_usage:-'};
- default_orderby = 'cpu_usage:-';
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'cpu_usage:-')
else :
title_variant='Tasks'
object_search_display="tasks"
filter_search_display="tasks"
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'order:+'};
- default_orderby = 'order:+';
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'order:+')
+
+
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby': orderby }
template = 'tasks.html'
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
if task_anchor:
- mandatory_parameters['anchor']=task_anchor
+ mandatory_parameters['anchor']=task_anchor
return _redirect_parameters( variant, request.GET, mandatory_parameters, build_id = build_id)
(filter_string, search_term, ordering_string) = _search_tuple(request, Task)
queryset_all = Task.objects.filter(build=build_id).exclude(order__isnull=True).exclude(outcome=Task.OUTCOME_NA)
@@ -928,22 +1007,22 @@ def tasks_common(request, build_id, variant, task_anchor):
else:
queryset = _get_queryset(Task, queryset_all, filter_string, search_term, ordering_string, 'order')
- # compute the anchor's page
+ # compute the anchor's page
if anchor:
- request.GET = request.GET.copy()
+ request.GET = request.GET.copy()
del request.GET['anchor']
i=0
a=int(anchor)
- count_per_page=int(request.GET.get('count', 100))
+ count_per_page=int(pagesize)
for task in queryset.iterator():
if a == task.order:
- new_page= (i / count_per_page ) + 1
- request.GET.__setitem__('page', new_page)
- mandatory_parameters['page']=new_page
- return _redirect_parameters( variant, request.GET, mandatory_parameters, build_id = build_id)
+ new_page= (i / count_per_page ) + 1
+ request.GET.__setitem__('page', new_page)
+ mandatory_parameters['page']=new_page
+ return _redirect_parameters( variant, request.GET, mandatory_parameters, build_id = build_id)
i += 1
- tasks = _build_page_range(Paginator(queryset, request.GET.get('count', 100)),request.GET.get('page', 1))
+ tasks = _build_page_range(Paginator(queryset, pagesize),request.GET.get('page', 1))
# define (and modify by variants) the 'tablecols' members
tc_order={
@@ -1067,14 +1146,15 @@ def tasks_common(request, build_id, variant, task_anchor):
}
if 'diskio' == variant: tc_diskio['hidden']='0'; del tc_diskio['clclass']; tc_cache['hidden']='1';
+ build = Build.objects.get(pk=build_id)
context = { 'objectname': variant,
'object_search_display': object_search_display,
'filter_search_display': filter_search_display,
'title': title_variant,
- 'build': Build.objects.filter(pk=build_id)[0],
+ 'build': build,
'objects': tasks,
- 'default_orderby' : default_orderby,
+ 'default_orderby' : orderby,
'search_term': search_term,
'total_count': queryset_with_search.count(),
'tablecols':[
@@ -1088,10 +1168,15 @@ def tasks_common(request, build_id, variant, task_anchor):
tc_time,
tc_cpu,
tc_diskio,
- tc_log,
]}
- return render(request, template, context)
+
+ if not toastermain.settings.MANAGED or build.project is None:
+ context['tablecols'].append(tc_log)
+
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def tasks(request, build_id):
return tasks_common(request, build_id, 'tasks', '')
@@ -1111,7 +1196,8 @@ def cpuusage(request, build_id):
def recipes(request, build_id):
template = 'recipes.html'
- mandatory_parameters = { 'count': 100, 'page' : 1, 'orderby':'name:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 100, 'name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'recipes', request.GET, mandatory_parameters, build_id = build_id)
@@ -1119,7 +1205,7 @@ def recipes(request, build_id):
queryset = Recipe.objects.filter(layer_version__id__in=Layer_Version.objects.filter(build=build_id))
queryset = _get_queryset(Recipe, queryset, filter_string, search_term, ordering_string, 'name')
- recipes = _build_page_range(Paginator(queryset, request.GET.get('count', 100)),request.GET.get('page', 1))
+ recipes = _build_page_range(Paginator(queryset, pagesize),request.GET.get('page', 1))
# prefetch the forward and reverse recipe dependencies
deps = { }; revs = { }
@@ -1134,9 +1220,11 @@ def recipes(request, build_id):
revlist.append(recipe_dep)
revs[recipe.id] = revlist
+ build = Build.objects.get(pk=build_id)
+
context = {
'objectname': 'recipes',
- 'build': Build.objects.filter(pk=build_id)[0],
+ 'build': build,
'objects': recipes,
'default_orderby' : 'name:+',
'recipe_deps' : deps,
@@ -1207,6 +1295,11 @@ def recipes(request, build_id):
'qhelp':'The Git commit of the layer providing the recipe',
'clclass': 'layer_version__layer__commit', 'hidden': 1,
},
+ ]
+ }
+
+ if not toastermain.settings.MANAGED or build.project is None:
+ context['tablecols'].append(
{
'name':'Layer directory',
'qhelp':'Path to the layer prodiving the recipe',
@@ -1214,26 +1307,34 @@ def recipes(request, build_id):
'ordericon':_get_toggle_order_icon(request, "layer_version__layer__local_path"),
'orderkey' : 'layer_version__layer__local_path',
'clclass': 'layer_version__layer__local_path', 'hidden': 1,
- },
- ]
- }
+ })
- return render(request, template, context)
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def configuration(request, build_id):
template = 'configuration.html'
variables = Variable.objects.filter(build=build_id)
- BB_VERSION=variables.filter(variable_name='BB_VERSION')[0].variable_value
- BUILD_SYS=variables.filter(variable_name='BUILD_SYS')[0].variable_value
- NATIVELSBSTRING=variables.filter(variable_name='NATIVELSBSTRING')[0].variable_value
- TARGET_SYS=variables.filter(variable_name='TARGET_SYS')[0].variable_value
- MACHINE=variables.filter(variable_name='MACHINE')[0].variable_value
- DISTRO=variables.filter(variable_name='DISTRO')[0].variable_value
- DISTRO_VERSION=variables.filter(variable_name='DISTRO_VERSION')[0].variable_value
- TUNE_FEATURES=variables.filter(variable_name='TUNE_FEATURES')[0].variable_value
- TARGET_FPU=variables.filter(variable_name='TARGET_FPU')[0].variable_value
+
+ def _get_variable_or_empty(variable_name):
+ from django.core.exceptions import ObjectDoesNotExist
+ try:
+ return variables.get(variable_name=variable_name).variable_value
+ except ObjectDoesNotExist:
+ return ''
+
+ BB_VERSION=_get_variable_or_empty(variable_name='BB_VERSION')
+ BUILD_SYS=_get_variable_or_empty(variable_name='BUILD_SYS')
+ NATIVELSBSTRING=_get_variable_or_empty(variable_name='NATIVELSBSTRING')
+ TARGET_SYS=_get_variable_or_empty(variable_name='TARGET_SYS')
+ MACHINE=_get_variable_or_empty(variable_name='MACHINE')
+ DISTRO=_get_variable_or_empty(variable_name='DISTRO')
+ DISTRO_VERSION=_get_variable_or_empty(variable_name='DISTRO_VERSION')
+ TUNE_FEATURES=_get_variable_or_empty(variable_name='TUNE_FEATURES')
+ TARGET_FPU=_get_variable_or_empty(variable_name='TARGET_FPU')
targets = Target.objects.filter(build=build_id)
@@ -1241,7 +1342,7 @@ def configuration(request, build_id):
'objectname': 'configuration',
'object_search_display':'variables',
'filter_search_display':'variables',
- 'build': Build.objects.filter(pk=build_id)[0],
+ 'build': Build.objects.get(pk=build_id),
'BB_VERSION':BB_VERSION,
'BUILD_SYS':BUILD_SYS,
'NATIVELSBSTRING':NATIVELSBSTRING,
@@ -1258,7 +1359,8 @@ def configuration(request, build_id):
def configvars(request, build_id):
template = 'configvars.html'
- mandatory_parameters = { 'count': 100, 'page' : 1, 'orderby':'variable_name:+', 'filter':'description__regex:.+'};
+ (pagesize, orderby) = _get_parameters_values(request, 100, 'variable_name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby, 'filter' : 'description__regex:.+' }
retval = _verify_parameters( request.GET, mandatory_parameters )
(filter_string, search_term, ordering_string) = _search_tuple(request, Variable)
if retval:
@@ -1273,7 +1375,7 @@ def configvars(request, build_id):
# remove records where the value is empty AND there are no history files
queryset = queryset.exclude(variable_value='',vhistory__file_name__isnull=True)
- variables = _build_page_range(Paginator(queryset, request.GET.get('count', 50)), request.GET.get('page', 1))
+ variables = _build_page_range(Paginator(queryset, pagesize), request.GET.get('page', 1))
# show all matching files (not just the last one)
file_filter= search_term + ":"
@@ -1285,14 +1387,14 @@ def configvars(request, build_id):
file_filter += 'conf/distro/'
if filter_string.find('/bitbake.conf') > 0:
file_filter += '/bitbake.conf'
- build_dir=re.sub("/tmp/log/.*","",Build.objects.filter(pk=build_id)[0].cooker_log_path)
+ build_dir=re.sub("/tmp/log/.*","",Build.objects.get(pk=build_id).cooker_log_path)
context = {
'objectname': 'configvars',
'object_search_display':'BitBake variables',
'filter_search_display':'variables',
'file_filter': file_filter,
- 'build': Build.objects.filter(pk=build_id)[0],
+ 'build': Build.objects.get(pk=build_id),
'objects' : variables,
'total_count':queryset_with_search.count(),
'default_orderby' : 'variable_name:+',
@@ -1339,12 +1441,14 @@ def configvars(request, build_id):
],
}
- return render(request, template, context)
-
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def bpackage(request, build_id):
template = 'bpackage.html'
- mandatory_parameters = { 'count': 100, 'page' : 1, 'orderby':'name:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 100, 'name:+')
+ mandatory_parameters = { 'count' : pagesize, 'page' : 1, 'orderby' : orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'packages', request.GET, mandatory_parameters, build_id = build_id)
@@ -1352,11 +1456,11 @@ def bpackage(request, build_id):
queryset = Package.objects.filter(build = build_id).filter(size__gte=0)
queryset = _get_queryset(Package, queryset, filter_string, search_term, ordering_string, 'name')
- packages = _build_page_range(Paginator(queryset, request.GET.get('count', 100)),request.GET.get('page', 1))
+ packages = _build_page_range(Paginator(queryset, pagesize),request.GET.get('page', 1))
context = {
'objectname': 'packages built',
- 'build': Build.objects.filter(pk=build_id)[0],
+ 'build': Build.objects.get(pk=build_id),
'objects' : packages,
'default_orderby' : 'name:+',
'tablecols':[
@@ -1432,18 +1536,20 @@ def bpackage(request, build_id):
]
}
- return render(request, template, context)
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def bfile(request, build_id, package_id):
template = 'bfile.html'
files = Package_File.objects.filter(package = package_id)
- context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : files}
+ context = {'build': Build.objects.get(pk=build_id), 'objects' : files}
return render(request, template, context)
def tpackage(request, build_id, target_id):
template = 'package.html'
packages = map(lambda x: x.package, list(Target_Installed_Package.objects.filter(target=target_id)))
- context = {'build': Build.objects.filter(pk=build_id)[0], 'objects' : packages}
+ context = {'build': Build.objects.get(pk=build_id), 'objects' : packages}
return render(request, template, context)
def layer(request):
@@ -1465,7 +1571,7 @@ def layer_versions_recipes(request, layerversion_id):
recipes = Recipe.objects.filter(layer_version__id = layerversion_id)
context = {'objects': recipes,
- 'layer_version' : Layer_Version.objects.filter( id = layerversion_id )[0]
+ 'layer_version' : Layer_Version.objects.get( id = layerversion_id )
}
return render(request, template, context)
@@ -1587,7 +1693,8 @@ def package_built_detail(request, build_id, package_id):
# follow convention for pagination w/ search although not used for this view
queryset = Package_File.objects.filter(package_id__exact=package_id)
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'path:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'path:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'package_built_detail', request.GET, mandatory_parameters, build_id = build_id, package_id = package_id)
@@ -1595,10 +1702,10 @@ def package_built_detail(request, build_id, package_id):
(filter_string, search_term, ordering_string) = _search_tuple(request, Package_File)
paths = _get_queryset(Package_File, queryset, filter_string, search_term, ordering_string, 'path')
- package = Package.objects.filter(pk=package_id)[0]
+ package = Package.objects.get(pk=package_id)
package.fullpackagespec = _get_fullpackagespec(package)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'package' : package,
'dependency_count' : _get_package_dependency_count(package, -1, False),
'objects' : paths,
@@ -1618,18 +1725,21 @@ def package_built_detail(request, build_id, package_id):
}
if paths.all().count() < 2:
context['disable_sort'] = True;
- return render(request, template, context)
+
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def package_built_dependencies(request, build_id, package_id):
template = "package_built_dependencies.html"
if Build.objects.filter(pk=build_id).count() == 0 :
return redirect(builds)
- package = Package.objects.filter(pk=package_id)[0]
+ package = Package.objects.get(pk=package_id)
package.fullpackagespec = _get_fullpackagespec(package)
dependencies = _get_package_dependencies(package_id)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'package' : package,
'runtime_deps' : dependencies['runtime_deps'],
'other_deps' : dependencies['other_deps'],
@@ -1643,9 +1753,9 @@ def package_included_detail(request, build_id, target_id, package_id):
if Build.objects.filter(pk=build_id).count() == 0 :
return redirect(builds)
-
# follow convention for pagination w/ search although not used for this view
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'path:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'path:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'package_included_detail', request.GET, mandatory_parameters, build_id = build_id, target_id = target_id, package_id = package_id)
@@ -1654,12 +1764,12 @@ def package_included_detail(request, build_id, target_id, package_id):
queryset = Package_File.objects.filter(package_id__exact=package_id)
paths = _get_queryset(Package_File, queryset, filter_string, search_term, ordering_string, 'path')
- package = Package.objects.filter(pk=package_id)[0]
+ package = Package.objects.get(pk=package_id)
package.fullpackagespec = _get_fullpackagespec(package)
package.alias = _get_package_alias(package)
- target = Target.objects.filter(pk=target_id)[0]
+ target = Target.objects.get(pk=target_id)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'target' : target,
'package' : package,
'reverse_count' : _get_package_reverse_dep_count(package, target_id),
@@ -1680,22 +1790,24 @@ def package_included_detail(request, build_id, target_id, package_id):
]
}
if paths.all().count() < 2:
- context['disable_sort'] = True;
- return render(request, template, context)
+ context['disable_sort'] = True
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def package_included_dependencies(request, build_id, target_id, package_id):
template = "package_included_dependencies.html"
if Build.objects.filter(pk=build_id).count() == 0 :
return redirect(builds)
- package = Package.objects.filter(pk=package_id)[0]
+ package = Package.objects.get(pk=package_id)
package.fullpackagespec = _get_fullpackagespec(package)
package.alias = _get_package_alias(package)
- target = Target.objects.filter(pk=target_id)[0]
+ target = Target.objects.get(pk=target_id)
dependencies = _get_package_dependencies(package_id, target_id)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'package' : package,
'target' : target,
'runtime_deps' : dependencies['runtime_deps'],
@@ -1710,7 +1822,8 @@ def package_included_reverse_dependencies(request, build_id, target_id, package_
if Build.objects.filter(pk=build_id).count() == 0 :
return redirect(builds)
- mandatory_parameters = { 'count': 25, 'page' : 1, 'orderby':'package__name:+'};
+ (pagesize, orderby) = _get_parameters_values(request, 25, 'package__name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby': orderby }
retval = _verify_parameters( request.GET, mandatory_parameters )
if retval:
return _redirect_parameters( 'package_included_reverse_dependencies', request.GET, mandatory_parameters, build_id = build_id, target_id = target_id, package_id = package_id)
@@ -1719,16 +1832,16 @@ def package_included_reverse_dependencies(request, build_id, target_id, package_
queryset = Package_Dependency.objects.select_related('depends_on__name', 'depends_on__size').filter(depends_on=package_id, target_id=target_id, dep_type=Package_Dependency.TYPE_TRDEPENDS)
objects = _get_queryset(Package_Dependency, queryset, filter_string, search_term, ordering_string, 'package__name')
- package = Package.objects.filter(pk=package_id)[0]
+ package = Package.objects.get(pk=package_id)
package.fullpackagespec = _get_fullpackagespec(package)
package.alias = _get_package_alias(package)
- target = Target.objects.filter(pk=target_id)[0]
+ target = Target.objects.get(pk=target_id)
for o in objects:
if o.package.version != '':
o.package.version += '-' + o.package.revision
o.alias = _get_package_alias(o.package)
context = {
- 'build' : Build.objects.filter(pk=build_id)[0],
+ 'build' : Build.objects.get(pk=build_id),
'package' : package,
'target' : target,
'objects' : objects,
@@ -1752,8 +1865,10 @@ def package_included_reverse_dependencies(request, build_id, target_id, package_
]
}
if objects.all().count() < 2:
- context['disable_sort'] = True;
- return render(request, template, context)
+ context['disable_sort'] = True
+ response = render(request, template, context)
+ _save_parameters_cookies(response, pagesize, orderby, request)
+ return response
def image_information_dir(request, build_id, target_id, packagefile_id):
# stubbed for now
@@ -1772,6 +1887,7 @@ if toastermain.settings.MANAGED:
from django.contrib.auth.decorators import login_required
from orm.models import Project, ProjectLayer, ProjectTarget, ProjectVariable
+ from orm.models import Branch, LayerSource, ToasterSetting, Release, Machine, LayerVersionDependency
from bldcontrol.models import BuildRequest
import traceback
@@ -1780,10 +1896,16 @@ if toastermain.settings.MANAGED:
# the context processor that supplies data used across all the pages
def managedcontextprocessor(request):
- return {
+ ret = {
"projects": Project.objects.all(),
"MANAGED" : toastermain.settings.MANAGED
}
+ if 'project_id' in request.session:
+ try:
+ ret['project'] = Project.objects.get(pk = request.session['project_id'])
+ except Project.DoesNotExist:
+ del request.session['project_id']
+ return ret
# new project
def newproject(request):
@@ -1791,14 +1913,19 @@ if toastermain.settings.MANAGED:
context = {
'email': request.user.email if request.user.is_authenticated() else '',
'username': request.user.username if request.user.is_authenticated() else '',
+ 'releases': Release.objects.order_by("description"),
}
+ try:
+ context['defaultbranch'] = ToasterSetting.objects.get(name = "DEFAULT_RELEASE").value
+ except ToasterSetting.DoesNotExist:
+ pass
if request.method == "GET":
# render new project page
return render(request, template, context)
elif request.method == "POST":
- mandatory_fields = ['projectname', 'email', 'username', 'projectversion']
+ mandatory_fields = ['projectname', 'projectversion']
try:
# make sure we have values for all mandatory_fields
if reduce( lambda x, y: x or y, map(lambda x: len(request.POST.get(x, '')) == 0, mandatory_fields)):
@@ -1807,24 +1934,22 @@ if toastermain.settings.MANAGED:
", ".join([x for x in mandatory_fields if len(request.POST.get(x, '')) == 0 ]))
if not request.user.is_authenticated():
- user = authenticate(username = request.POST['username'], password = 'nopass')
+ user = authenticate(username = request.POST.get('username', '_anonuser'), password = 'nopass')
if user is None:
- user = User.objects.create_user(username = request.POST['username'], email = request.POST['email'], password = "nopass")
+ user = User.objects.create_user(username = request.POST.get('username', '_anonuser'), email = request.POST.get('email', ''), password = "nopass")
user = authenticate(username = user.username, password = 'nopass')
login(request, user)
# save the project
- prj = Project.objects.create_project(name = request.POST['projectname'],
- branch = request.POST['projectversion'].split(" ")[0],
- short_description=request.POST['projectversion'].split(" ")[1:])
+ prj = Project.objects.create_project(name = request.POST['projectname'], release = Release.objects.get(pk = request.POST['projectversion']))
prj.user_id = request.user.pk
prj.save()
- return redirect(reverse(project, args = (prj.pk,)))
+ return redirect(reverse(project, args = (prj.pk,)) + "#/newproject")
except (IntegrityError, BadParameterException) as e:
# fill in page with previously submitted values
- map(lambda x: context.__setitem__(x, request.POST[x]), mandatory_fields)
+ map(lambda x: context.__setitem__(x, request.POST.get(x, "-- missing")), mandatory_fields)
if isinstance(e, IntegrityError) and "username" in str(e):
context['alert'] = "Your chosen username is already used"
else:
@@ -1833,6 +1958,27 @@ if toastermain.settings.MANAGED:
raise Exception("Invalid HTTP method for this page")
+ # returns a list for most recent builds; for use in the Project page, xhr_ updates, and other places, as needed
+ def _project_recent_build_list(prj):
+ return map(lambda x: {
+ "id": x.pk,
+ "targets" : map(lambda y: {"target": y.target, "task": y.task }, x.brtarget_set.all()),
+ "status": x.get_state_display(),
+ "errors": map(lambda y: {"type": y.errtype, "msg": y.errmsg, "tb": y.traceback}, x.brerror_set.all()),
+ "build" : map( lambda y: {"id": y.pk,
+ "status": y.get_outcome_display(),
+ "completed_on" : y.completed_on.strftime('%s')+"000",
+ "build_time" : (y.completed_on - y.started_on).total_seconds(),
+ "build_page_url" : reverse('builddashboard', args=(y.pk,)),
+ 'build_time_page_url': reverse('buildtime', args=(y.pk,)),
+ "errors": y.errors_no,
+ "warnings": y.warnings_no,
+ "completeper": y.completeper(),
+ "eta": y.eta().strftime('%s')+"000"}, Build.objects.filter(buildrequest = x)),
+ }, list(prj.buildrequest_set.filter(Q(state__lt=BuildRequest.REQ_COMPLETED) or Q(state=BuildRequest.REQ_DELETED)).order_by("-pk")) +
+ list(prj.buildrequest_set.filter(state__in=[BuildRequest.REQ_COMPLETED, BuildRequest.REQ_FAILED]).order_by("-pk")[:3]))
+
+
# Shows the edit project page
def project(request, pid):
template = "project.html"
@@ -1846,79 +1992,931 @@ if toastermain.settings.MANAGED:
except User.DoesNotExist:
puser = None
+ # we use implicit knowledge of the current user's project to filter layer information, e.g.
+ request.session['project_id'] = prj.id
+
+ from collections import Counter
+ freqtargets = []
+ try:
+ freqtargets += map(lambda x: x.target, reduce(lambda x, y: x + y, map(lambda x: list(x.target_set.all()), Build.objects.filter(project = prj, outcome__lt = Build.IN_PROGRESS))))
+ freqtargets += map(lambda x: x.target, reduce(lambda x, y: x + y, map(lambda x: list(x.brtarget_set.all()), BuildRequest.objects.filter(project = prj, state__lte = BuildRequest.REQ_QUEUED))))
+ except TypeError:
+ pass
+ freqtargets = Counter(freqtargets)
+ freqtargets = sorted(freqtargets, key = lambda x: freqtargets[x])
+
context = {
"project" : prj,
+ "completedbuilds": Build.objects.filter(project = prj).exclude(outcome = Build.IN_PROGRESS),
+ "prj" : {"name": prj.name, "release": { "id": prj.release.pk, "name": prj.release.name, "desc": prj.release.description}},
#"buildrequests" : prj.buildrequest_set.filter(state=BuildRequest.REQ_QUEUED),
- "buildrequests" : map(lambda x: (x, {"machine" : x.brvariable_set.filter(name="MACHINE")[0]}), prj.buildrequest_set.order_by("-pk")),
- "builds" : prj.build_set.all(),
- "puser": puser,
+ "builds" : _project_recent_build_list(prj),
+ "layers" : map(lambda x: {
+ "id": x.layercommit.pk,
+ "orderid": x.pk,
+ "name" : x.layercommit.layer.name,
+ "giturl": x.layercommit.layer.vcs_url,
+ "url": x.layercommit.layer.layer_index_url,
+ "layerdetailurl": reverse("layerdetails", args=(x.layercommit.pk,)),
+ # This branch name is actually the release
+ "branch" : { "name" : x.layercommit.commit, "layersource" : x.layercommit.up_branch.layer_source.name}},
+ prj.projectlayer_set.all().order_by("id")),
+ "targets" : map(lambda x: {"target" : x.target, "task" : x.task, "pk": x.pk}, prj.projecttarget_set.all()),
+ "freqtargets": freqtargets,
+ "releases": map(lambda x: {"id": x.pk, "name": x.name, "description":x.description}, Release.objects.all()),
}
try:
- context["machine"] = prj.projectvariable_set.get(name="MACHINE").value
+ context["machine"] = {"name": prj.projectvariable_set.get(name="MACHINE").value}
except ProjectVariable.DoesNotExist:
- context["machine"] = "-- not set yet"
-
+ context["machine"] = None
try:
context["distro"] = prj.projectvariable_set.get(name="DISTRO").value
except ProjectVariable.DoesNotExist:
context["distro"] = "-- not set yet"
-
- return render(request, template, context)
-
- import json
+ response = render(request, template, context)
+ response['Cache-Control'] = "no-cache, must-revalidate, no-store"
+ response['Pragma'] = "no-cache"
+ return response
+
+ # This is a wrapper for xhr_projectbuild which allows for a project id
+ # which only becomes known client side.
+ def xhr_build(request):
+ if request.POST.has_key("project_id"):
+ pid = request.POST['project_id']
+ return xhr_projectbuild(request, pid)
+ else:
+ raise BadParameterException("invalid project id")
def xhr_projectbuild(request, pid):
try:
if request.method != "POST":
raise BadParameterException("invalid method")
+ request.session['project_id'] = pid
prj = Project.objects.get(id = pid)
- if prj.projecttarget_set.count() == 0:
- raise BadParameterException("no targets selected")
-
- br = prj.schedule_build()
- return HttpResponse(json.dumps({"error":"ok",
- "brtarget" : map(lambda x: x.target, br.brtarget_set.all()),
- "machine" : br.brvariable_set.get(name="MACHINE").value,
- }), content_type = "application/json")
- except Exception as e:
- return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
-
- def xhr_projectedit(request, pid):
- try:
- prj = Project.objects.get(id = pid)
- # add targets
- if 'targetAdd' in request.POST:
- for t in request.POST['targetAdd'].strip().split(" "):
+ if 'buildCancel' in request.POST:
+ for i in request.POST['buildCancel'].strip().split(" "):
+ try:
+ br = BuildRequest.objects.select_for_update().get(project = prj, pk = i, state__lte = BuildRequest.REQ_QUEUED)
+ br.state = BuildRequest.REQ_DELETED
+ br.save()
+ except BuildRequest.DoesNotExist:
+ pass
+
+ if 'buildDelete' in request.POST:
+ for i in request.POST['buildDelete'].strip().split(" "):
+ try:
+ br = BuildRequest.objects.select_for_update().get(project = prj, pk = i, state__lte = BuildRequest.REQ_DELETED).delete()
+ except BuildRequest.DoesNotExist:
+ pass
+
+ if 'targets' in request.POST:
+ ProjectTarget.objects.filter(project = prj).delete()
+ s = str(request.POST['targets'])
+ for t in s.translate(None, ";%|\"").split(" "):
if ":" in t:
target, task = t.split(":")
else:
target = t
task = ""
+ ProjectTarget.objects.create(project = prj, target = target, task = task)
+
+ br = prj.schedule_build()
+
+ return HttpResponse(jsonfilter({"error":"ok",
+ "builds" : _project_recent_build_list(prj),
+ }), content_type = "application/json")
+ except Exception as e:
+ return HttpResponse(jsonfilter({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+ # This is a wraper for xhr_projectedit which allows for a project id
+ # which only becomes known client side
+ def xhr_projectinfo(request):
+ if request.POST.has_key("project_id") == False:
+ raise BadParameterException("invalid project id")
- pt, created = ProjectTarget.objects.get_or_create(project = prj, target = target, task = task)
- # remove targets
- if 'targetDel' in request.POST:
- for t in request.POST['targetDel'].strip().split(" "):
- pt = ProjectTarget.objects.get(pk = int(t)).delete()
+ return xhr_projectedit(request, request.POST['project_id'])
+ def xhr_projectedit(request, pid):
+ try:
+ prj = Project.objects.get(id = pid)
# add layers
+ if 'layerAdd' in request.POST:
+ for lc in Layer_Version.objects.filter(pk__in=request.POST['layerAdd'].split(",")):
+ ProjectLayer.objects.get_or_create(project = prj, layercommit = lc)
# remove layers
+ if 'layerDel' in request.POST:
+ for t in request.POST['layerDel'].strip().split(" "):
+ pt = ProjectLayer.objects.filter(project = prj, layercommit_id = int(t)).delete()
+
+ if 'projectName' in request.POST:
+ prj.name = request.POST['projectName']
+ prj.save();
+
+ if 'projectVersion' in request.POST:
+ prj.release = Release.objects.get(pk = request.POST['projectVersion'])
+ # we need to change the bitbake version
+ prj.bitbake_version = prj.release.bitbake_version
+ prj.save()
+ # we need to change the layers
+ for i in prj.projectlayer_set.all():
+ # find and add a similarly-named layer on the new branch
+ try:
+ lv = prj.compatible_layerversions(layer_name = i.layercommit.layer.name)[0]
+ ProjectLayer.objects.get_or_create(project = prj, layercommit = lv)
+ except IndexError:
+ pass
+ finally:
+ # get rid of the old entry
+ i.delete()
+
+ if 'machineName' in request.POST:
+ machinevar = prj.projectvariable_set.get(name="MACHINE")
+ machinevar.value=request.POST['machineName']
+ machinevar.save()
# return all project settings
- return HttpResponse(json.dumps( {
+ return HttpResponse(jsonfilter( {
"error": "ok",
- "layers": map(lambda x: (x.name, x.giturl), prj.projectlayer_set.all()),
- "targets" : map(lambda x: {"target" : x.target, "task" : x.task, "pk": x.pk}, prj.projecttarget_set.all()),
+ "layers" : map(lambda x: {"id": x.layercommit.pk, "orderid" : x.pk, "name" : x.layercommit.layer.name, "giturl" : x.layercommit.layer.vcs_url, "url": x.layercommit.layer.layer_index_url, "layerdetailurl": reverse("layerdetails", args=(x.layercommit.layer.pk,)), "branch" : { "name" : x.layercommit.up_branch.name, "layersource" : x.layercommit.up_branch.layer_source.name}}, prj.projectlayer_set.all().order_by("id")),
+ "builds" : _project_recent_build_list(prj),
"variables": map(lambda x: (x.name, x.value), prj.projectvariable_set.all()),
+ "machine": {"name": prj.projectvariable_set.get(name="MACHINE").value},
+ "prj": {"name": prj.name, "release": { "id": prj.release.pk, "name": prj.release.name, "desc": prj.release.description}},
}), content_type = "application/json")
except Exception as e:
- return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+ return HttpResponse(jsonfilter({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+
+ from django.views.decorators.csrf import csrf_exempt
+ @csrf_exempt
+ def xhr_datatypeahead(request):
+ try:
+ prj = None
+ if request.GET.has_key('project_id'):
+ prj = Project.objects.get(pk = request.GET['project_id'])
+ elif 'project_id' in request.session:
+ prj = Project.objects.get(pk = request.session['project_id'])
+ else:
+ raise Exception("No valid project selected")
+
+
+ def _lv_to_dict(x):
+ return {"id": x.pk, "name": x.layer.name, "tooltip": x.layer.vcs_url+" | "+x.commit,
+ "detail": "(" + x.layer.vcs_url + (")" if x.up_branch == None else " | "+x.up_branch.name+")"),
+ "giturl": x.layer.vcs_url, "layerdetailurl" : reverse('layerdetails', args=(x.pk,))}
+
+
+ # returns layers for current project release that are not in the project set, matching the name
+ if request.GET['type'] == "layers":
+ # all layers for the current project
+ queryset_all = prj.compatible_layerversions().filter(layer__name__icontains=request.GET.get('value',''))
+
+ # but not layers with equivalent layers already in project
+ if not request.GET.has_key('include_added'):
+ queryset_all = queryset_all.exclude(pk__in = [x.id for x in prj.projectlayer_equivalent_set()])[:8]
+
+ # and show only the selected layers for this project
+ final_list = set([x.get_equivalents_wpriority(prj)[0] for x in queryset_all])
+
+ return HttpResponse(jsonfilter( { "error":"ok", "list" : map( _lv_to_dict, final_list) }), content_type = "application/json")
+
+
+ # returns layer dependencies for a layer, excluding current project layers
+ if request.GET['type'] == "layerdeps":
+ queryset = prj.compatible_layerversions().exclude(pk__in = [x.id for x in prj.projectlayer_equivalent_set()]).filter(
+ layer__name__in = [ x.depends_on.layer.name for x in LayerVersionDependency.objects.filter(layer_version_id = request.GET['value'])])
+
+ return HttpResponse(jsonfilter( { "error":"ok", "list" : map( _lv_to_dict, queryset) }), content_type = "application/json")
+
+
+
+ # returns layer versions that would be deleted on the new release__pk
+ if request.GET['type'] == "versionlayers":
+ if not 'project_id' in request.session:
+ raise Exception("This call cannot makes no sense outside a project context")
+
+ retval = []
+ for i in prj.projectlayer_set.all():
+ lv = prj.compatible_layerversions(release = Release.objects.get(pk=request.GET['value'])).filter(layer__name = i.layercommit.layer.name)
+ # there is no layer_version with the new release id, and the same name
+ if lv.count() < 1:
+ retval.append(i)
+
+ return HttpResponse(jsonfilter( {"error":"ok",
+ "list" : map( _lv_to_dict, map(lambda x: x.layercommit, retval ))
+ }), content_type = "application/json")
+
+
+ # returns targets provided by current project layers
+ if request.GET['type'] == "targets":
+ queryset_all = Recipe.objects.all()
+ queryset_all = queryset_all.filter(layer_version__in = prj.projectlayer_equivalent_set())
+ return HttpResponse(jsonfilter({ "error":"ok",
+ "list" : map ( lambda x: {"id": x.pk, "name": x.name, "detail":"[" + x.layer_version.layer.name+ (" | " + x.layer_version.up_branch.name + "]" if x.layer_version.up_branch is not None else "]")},
+ queryset_all.filter(name__icontains=request.GET.get('value',''))[:8]),
+
+ }), content_type = "application/json")
+
+ if request.GET['type'] == "machines":
+ queryset_all = Machine.objects.all()
+ if 'project_id' in request.session:
+ queryset_all = queryset_all.filter(layer_version__in = prj.projectlayer_equivalent_set())
+
+ return HttpResponse(jsonfilter({ "error":"ok",
+ "list" : map ( lambda x: {"id": x.pk, "name": x.name, "detail":"[" + x.layer_version.layer.name+ (" | " + x.layer_version.up_branch.name + "]" if x.layer_version.up_branch is not None else "]")},
+ queryset_all.filter(name__icontains=request.GET.get('value',''))[:8]),
+
+ }), content_type = "application/json")
+
+ if request.GET['type'] == "projects":
+ queryset_all = Project.objects.all()
+ ret = { "error": "ok",
+ "list": map (lambda x: {"id":x.pk, "name": x.name},
+ queryset_all.filter(name__icontains=request.GET.get('value',''))[:8])}
+
+ return HttpResponse(jsonfilter(ret), content_type = "application/json")
+
+ raise Exception("Unknown request! " + request.GET.get('type', "No parameter supplied"))
+ except Exception as e:
+ return HttpResponse(jsonfilter({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+
+ def xhr_importlayer(request):
+ if (not request.POST.has_key('vcs_url') or
+ not request.POST.has_key('name') or
+ not request.POST.has_key('git_ref') or
+ not request.POST.has_key('project_id')):
+ return HttpResponse(jsonfilter({"error": "Missing parameters; requires vcs_url, name, git_ref and project_id"}), content_type = "application/json")
+
+ layers_added = [];
+
+ # Rudimentary check for any possible html tags
+ if "<" in request.POST:
+ return HttpResponse(jsonfilter({"error": "Invalid character <"}), content_type = "application/json")
+
+ prj = Project.objects.get(pk=request.POST['project_id'])
+
+ # Strip trailing/leading whitespace from all values
+ # put into a new dict because POST one is immutable
+ post_data = dict()
+ for key,val in request.POST.iteritems():
+ post_data[key] = val.strip()
+
+
+ # We need to know what release the current project is so that we
+ # can set the imported layer's up_branch_id
+ prj_branch_name = Release.objects.get(pk=prj.release_id).branch_name
+ up_branch, branch_created = Branch.objects.get_or_create(name=prj_branch_name, layer_source_id=LayerSource.TYPE_IMPORTED)
+
+ layer_source = LayerSource.objects.get(sourcetype=LayerSource.TYPE_IMPORTED)
+ try:
+ layer, layer_created = Layer.objects.get_or_create(name=post_data['name'])
+ except MultipleObjectsReturned:
+ return HttpResponse(jsonfilter({"error": "hint-layer-exists"}), content_type = "application/json")
+
+ if layer:
+ if layer_created:
+ layer.layer_source = layer_source
+ layer.vcs_url = post_data['vcs_url']
+ if post_data.has_key('summary'):
+ layer.summary = layer.description = post_data['summary']
+
+ layer.up_date = timezone.now()
+ layer.save()
+ else:
+ # We have an existing layer by this name, let's see if the git
+ # url is the same, if it is then we can just create a new layer
+ # version for this layer. Otherwise we need to bail out.
+ if layer.vcs_url != post_data['vcs_url']:
+ return HttpResponse(jsonfilter({"error": "hint-layer-exists-with-different-url" , "current_url" : layer.vcs_url, "current_id": layer.id }), content_type = "application/json")
+
+
+ layer_version, version_created = Layer_Version.objects.get_or_create(layer_source=layer_source, layer=layer, project=prj, up_branch_id=up_branch.id,branch=post_data['git_ref'], commit=post_data['git_ref'], dirpath=post_data['dir_path'])
+
+ if layer_version:
+ if not version_created:
+ return HttpResponse(jsonfilter({"error": "hint-layer-version-exists", "existing_layer_version": layer_version.id }), content_type = "application/json")
+
+ layer_version.up_date = timezone.now()
+ layer_version.save()
+
+ # Add the dependencies specified for this new layer
+ if (post_data.has_key("layer_deps") and
+ version_created and
+ len(post_data["layer_deps"]) > 0):
+ for layer_dep_id in post_data["layer_deps"].split(","):
+
+ layer_dep_obj = Layer_Version.objects.get(pk=layer_dep_id)
+ LayerVersionDependency.objects.get_or_create(layer_version=layer_version, depends_on=layer_dep_obj)
+ # Now add them to the project, we could get an execption
+ # if the project now contains the exact
+ # dependency already (like modified on another page)
+ try:
+ prj_layer, prj_layer_created = ProjectLayer.objects.get_or_create(layercommit=layer_dep_obj, project=prj)
+ except:
+ continue
+
+ if prj_layer_created:
+ layers_added.append({'id': layer_dep_obj.id, 'name': Layer.objects.get(id=layer_dep_obj.layer_id).name})
+
+
+ # If an old layer version exists in our project then remove it
+ for prj_layers in ProjectLayer.objects.filter(project=prj):
+ dup_layer_v = Layer_Version.objects.filter(id=prj_layers.layercommit_id, layer_id=layer.id)
+ if len(dup_layer_v) >0 :
+ prj_layers.delete()
+
+ # finally add the imported layer (version id) to the project
+ ProjectLayer.objects.create(layercommit=layer_version, project=prj,optional=1)
+
+ else:
+ # We didn't create a layer version so back out now and clean up.
+ if layer_created:
+ layer.delete()
+ return HttpResponse(jsonfilter({"error": "Uncaught error: Could not create layer version"}), content_type = "application/json")
+
+
+ return HttpResponse(jsonfilter({"error": "ok", "imported_layer" : { "name" : layer.name, "id": layer_version.id }, "deps_added": layers_added }), content_type = "application/json")
+
+
+
+ def importlayer(request):
+ template = "importlayer.html"
+ context = {
+ }
+ return render(request, template, context)
+
+
+ def layers(request):
+ if not 'project_id' in request.session:
+ raise Exception("invalid page: cannot show page without a project")
+
+ template = "layers.html"
+ # define here what parameters the view needs in the GET portion in order to
+ # be able to display something. 'count' and 'page' are mandatory for all views
+ # that use paginators.
+ (pagesize, orderby) = _get_parameters_values(request, 100, 'layer__name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby };
+ retval = _verify_parameters( request.GET, mandatory_parameters )
+ if retval:
+ return _redirect_parameters( 'layers', request.GET, mandatory_parameters)
+
+ # boilerplate code that takes a request for an object type and returns a queryset
+ # for that object type. copypasta for all needed table searches
+ (filter_string, search_term, ordering_string) = _search_tuple(request, Layer_Version)
+
+ prj = Project.objects.get(pk = request.session['project_id'])
+
+ queryset_all = prj.compatible_layerversions()
+
+ queryset_all = _get_queryset(Layer_Version, queryset_all, filter_string, search_term, ordering_string, '-layer__name')
+
+
+ # retrieve the objects that will be displayed in the table; layers a paginator and gets a page range to display
+ layer_info = _build_page_range(Paginator(queryset_all, request.GET.get('count', 10)),request.GET.get('page', 1))
+
+
+ context = {
+ 'projectlayerset' : jsonfilter(map(lambda x: x.layercommit.id, prj.projectlayer_set.all())),
+ 'objects' : layer_info,
+ 'objectname' : "layers",
+ 'default_orderby' : 'layer__name:+',
+ 'total_count': queryset_all.count(),
+
+ 'tablecols' : [
+ { 'name': 'Layer',
+ 'orderfield': _get_toggle_order(request, "layer__name"),
+ 'ordericon' : _get_toggle_order_icon(request, "layer__name"),
+ },
+ { 'name': 'Description',
+ 'dclass': 'span4',
+ 'clclass': 'description',
+ },
+ { 'name': 'Layer source',
+ 'clclass': 'source',
+ 'qhelp': "Where the layer is coming from, for example, if it's part of the OpenEmbedded collection of layers or if it's a layer you have imported",
+ 'orderfield': _get_toggle_order(request, "layer_source__name"),
+ 'ordericon': _get_toggle_order_icon(request, "layer_source__name"),
+ 'filter': {
+ 'class': 'layer',
+ 'label': 'Show:',
+ 'options': map(lambda x: (x.name + " layers", 'layer_source__pk:' + str(x.id), queryset_all.filter(layer_source__pk = x.id).count() ), LayerSource.objects.all()),
+ }
+ },
+ { 'name': 'Git repository URL',
+ 'dclass': 'span6',
+ 'clclass': 'git-repo', 'hidden': 1,
+ 'qhelp': "The Git repository for the layer source code",
+ },
+ { 'name': 'Subdirectory',
+ 'clclass': 'git-subdir',
+ 'hidden': 1,
+ 'qhelp': "The layer directory within the Git repository",
+ },
+ { 'name': 'Branch, tag o commit',
+ 'clclass': 'branch',
+ 'qhelp': "The Git branch of the layer. For the layers from the OpenEmbedded source, the branch matches the Yocto Project version you selected for this project",
+ },
+ { 'name': 'Dependencies',
+ 'clclass': 'dependencies',
+ 'qhelp': "Other layers a layer depends upon",
+ },
+ { 'name': 'Add | Delete',
+ 'dclass': 'span2',
+ 'qhelp': "Add or delete layers to / from your project ",
+ 'filter': {
+ 'class': 'add-del-layers',
+ 'label': 'Show:',
+ 'options': [
+ ('Layers added to this project', "projectlayer__project:" + str(prj.id), queryset_all.filter(projectlayer__project = prj.id).count()),
+ ('Layers not added to this project', "projectlayer__project:NOT" + str(prj.id), queryset_all.exclude(projectlayer__project = prj.id).count()),
+ ]
+
+ }
+ },
+ ]
+ }
+
+ return render(request, template, context)
+
+ def layerdetails(request, layerid):
+ template = "layerdetails.html"
+ context = {
+ 'layerversion': Layer_Version.objects.get(pk = layerid),
+ }
+ return render(request, template, context)
+
+ def targets(request):
+ if not 'project_id' in request.session:
+ raise Exception("invalid page: cannot show page without a project")
+
+ template = 'targets.html'
+ (pagesize, orderby) = _get_parameters_values(request, 100, 'name:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
+ retval = _verify_parameters( request.GET, mandatory_parameters )
+ if retval:
+ return _redirect_parameters( 'targets', request.GET, mandatory_parameters)
+ (filter_string, search_term, ordering_string) = _search_tuple(request, Recipe)
+
+ prj = Project.objects.get(pk = request.session['project_id'])
+ queryset_all = Recipe.objects.filter(Q(layer_version__up_branch__name= prj.release.name) | Q(layer_version__build__in = prj.build_set.all()))
+
+ queryset_with_search = _get_queryset(Recipe, queryset_all, None, search_term, ordering_string, '-name')
+
+ queryset_with_search.prefetch_related("layer_source")
+
+ # retrieve the objects that will be displayed in the table; targets a paginator and gets a page range to display
+ target_info = _build_page_range(Paginator(queryset_with_search, request.GET.get('count', 10)),request.GET.get('page', 1))
+
+
+ context = {
+ 'projectlayerset' : jsonfilter(map(lambda x: x.layercommit.id, prj.projectlayer_set.all())),
+ 'objects' : target_info,
+ 'objectname' : "targets",
+ 'default_orderby' : 'name:+',
+ 'total_count': queryset_with_search.count(),
+
+ 'tablecols' : [
+ { 'name': 'Target',
+ 'orderfield': _get_toggle_order(request, "name"),
+ 'ordericon' : _get_toggle_order_icon(request, "name"),
+ },
+ { 'name': 'Target version',
+ 'dclass': 'span2',
+ },
+ { 'name': 'Description',
+ 'dclass': 'span5',
+ 'clclass': 'description',
+ },
+ { 'name': 'Recipe file',
+ 'clclass': 'recipe-file',
+ 'hidden': 1,
+ 'dclass': 'span5',
+ },
+ { 'name': 'Section',
+ 'clclass': 'target-section',
+ 'hidden': 1,
+ 'orderfield': _get_toggle_order(request, "section"),
+ 'ordericon': _get_toggle_order_icon(request, "section"),
+ },
+ { 'name': 'License',
+ 'clclass': 'license',
+ 'hidden': 1,
+ 'orderfield': _get_toggle_order(request, "license"),
+ 'ordericon': _get_toggle_order_icon(request, "license"),
+ },
+ { 'name': 'Layer',
+ 'clclass': 'layer',
+ 'orderfield': _get_toggle_order(request, "layer_version__layer__name"),
+ 'ordericon': _get_toggle_order_icon(request, "layer_version__layer__name"),
+ },
+ { 'name': 'Layer source',
+ 'clclass': 'source',
+ 'qhelp': "Where the target is coming from, for example, if it's part of the OpenEmbedded collection of targets or if it's a target you have imported",
+ 'orderfield': _get_toggle_order(request, "layer_source__name"),
+ 'ordericon': _get_toggle_order_icon(request, "layer_source__name"),
+ 'filter': {
+ 'class': 'target',
+ 'label': 'Show:',
+ 'options': map(lambda x: ("Targets provided by " + x.name + " layers", 'layer_source__pk:' + str(x.id), queryset_with_search.filter(layer_source__pk = x.id).count() ), LayerSource.objects.all()),
+ }
+ },
+ { 'name': 'Branch, tag or commit',
+ 'clclass': 'branch',
+ 'hidden': 1,
+ },
+ ]
+ }
+
+ if 'project_id' in request.session:
+ context['tablecols'] += [
+ { 'name': 'Build',
+ 'dclass': 'span2',
+ 'qhelp': "Add or delete targets to / from your project ",
+ 'filter': {
+ 'class': 'add-layer',
+ 'label': 'Show:',
+ 'options': [
+ ('Targets provided by layers added to this project', "layer_version__projectlayer__project:" + str(prj.id), queryset_with_search.filter(layer_version__projectlayer__project = prj.id).count()),
+ ('Targets provided by layers not added to this project', "layer_version__projectlayer__project:NOT" + str(prj.id), queryset_with_search.exclude(layer_version__projectlayer__project = prj.id).count()),
+ ]
+
+ }
+ }, ]
+
+
+ return render(request, template, context)
+
+ def machines(request):
+ template = "machines.html"
+ # define here what parameters the view needs in the GET portion in order to
+ # be able to display something. 'count' and 'page' are mandatory for all views
+ # that use paginators.
+ mandatory_parameters = { 'count': 10, 'page' : 1, 'orderby' : 'name:+' };
+ retval = _verify_parameters( request.GET, mandatory_parameters )
+ if retval:
+ return _redirect_parameters( 'machines', request.GET, mandatory_parameters)
+
+ # boilerplate code that takes a request for an object type and returns a queryset
+ # for that object type. copypasta for all needed table searches
+ (filter_string, search_term, ordering_string) = _search_tuple(request, Machine)
+
+ queryset_all = Machine.objects.all()
+# if 'project_id' in request.session:
+# queryset_all = queryset_all.filter(Q(layer_version__up_branch__name = Project.objects.get(request.session['project_id']).release.branch_name) | Q(layer_version__build__in = Project.objects.get(request.session['project_id']).build_set.all()))
+
+ queryset_with_search = _get_queryset(Machine, queryset_all, None, search_term, ordering_string, '-name')
+ queryset = _get_queryset(Machine, queryset_all, filter_string, search_term, ordering_string, '-name')
+
+ # retrieve the objects that will be displayed in the table; machines a paginator and gets a page range to display
+ machine_info = _build_page_range(Paginator(queryset, request.GET.get('count', 10)),request.GET.get('page', 1))
+
+
+ context = {
+ 'objects' : machine_info,
+ 'objectname' : "machines",
+ 'default_orderby' : 'name:+',
+ 'total_count': queryset_with_search.count(),
+
+ 'tablecols' : [
+ { 'name': 'Machine',
+ 'orderfield': _get_toggle_order(request, "name"),
+ 'ordericon' : _get_toggle_order_icon(request, "name"),
+ },
+ { 'name': 'Description',
+ 'dclass': 'span5',
+ 'clclass': 'description',
+ },
+ { 'name': 'Machine file',
+ 'clclass': 'machine-file',
+ 'hidden': 1,
+ },
+ { 'name': 'Layer',
+ 'clclass': 'layer',
+ },
+ { 'name': 'Layer source',
+ 'clclass': 'source',
+ 'qhelp': "Where the machine is coming from, for example, if it's part of the OpenEmbedded collection of machines or if it's a machine you have imported",
+ 'orderfield': _get_toggle_order(request, "layer_source__name"),
+ 'ordericon': _get_toggle_order_icon(request, "layer_source__name"),
+ 'filter': {
+ 'class': 'machine',
+ 'label': 'Show:',
+ 'options': map(lambda x: (x.name, 'layer_source__pk:' + str(x.id), queryset_with_search.filter(layer_source__pk = x.id).count() ), LayerSource.objects.all()),
+ }
+ },
+ { 'name': 'Branch, tag or commit',
+ 'clclass': 'branch',
+ 'hidden': 1,
+ },
+ { 'name': 'Select',
+ 'dclass': 'span2',
+ 'qhelp': "Add or delete machines to / from your project ",
+ },
+
+ ]
+ }
+
+ return render(request, template, context)
+
+ def projectconf(request, pid):
+ template = "projectconf.html"
+ context = {
+ 'configvars': ProjectVariable.objects.filter(project_id = pid),
+ }
+ return render(request, template, context)
+
+ def projectbuilds(request, pid):
+ template = 'projectbuilds.html'
+ # define here what parameters the view needs in the GET portion in order to
+ # be able to display something. 'count' and 'page' are mandatory for all views
+ # that use paginators.
+ mandatory_parameters = { 'count': 10, 'page' : 1, 'orderby' : 'completed_on:-' };
+ retval = _verify_parameters( request.GET, mandatory_parameters )
+
+ # boilerplate code that takes a request for an object type and returns a queryset
+ # for that object type. copypasta for all needed table searches
+ (filter_string, search_term, ordering_string) = _search_tuple(request, Build)
+ queryset_all = Build.objects.all().exclude(outcome = Build.IN_PROGRESS)
+ queryset_with_search = _get_queryset(Build, queryset_all, None, search_term, ordering_string, '-completed_on')
+ queryset = _get_queryset(Build, queryset_all, filter_string, search_term, ordering_string, '-completed_on')
+
+ # retrieve the objects that will be displayed in the table; builds a paginator and gets a page range to display
+ build_info = _build_page_range(Paginator(queryset, request.GET.get('count', 10)),request.GET.get('page', 1))
+
+
+ # set up list of fstypes for each build
+ fstypes_map = {};
+ for build in build_info:
+ targets = Target.objects.filter( build_id = build.id )
+ comma = "";
+ extensions = "";
+ for t in targets:
+ if ( not t.is_image ):
+ continue
+ tif = Target_Image_File.objects.filter( target_id = t.id )
+ for i in tif:
+ s=re.sub('.*tar.bz2', 'tar.bz2', i.file_name)
+ if s == i.file_name:
+ s=re.sub('.*\.', '', i.file_name)
+ if None == re.search(s,extensions):
+ extensions += comma + s
+ comma = ", "
+ fstypes_map[build.id]=extensions
+
+ # send the data to the template
+ context = {
+ 'objects' : build_info,
+ 'objectname' : "builds",
+ 'default_orderby' : 'completed_on:-',
+ 'fstypes' : fstypes_map,
+ 'search_term' : search_term,
+ 'total_count' : queryset_with_search.count(),
+ # Specifies the display of columns for the table, appearance in "Edit columns" box, toggling default show/hide, and specifying filters for columns
+ 'tablecols' : [
+ {'name': 'Outcome', # column with a single filter
+ 'qhelp' : "The outcome tells you if a build successfully completed or failed", # the help button content
+ 'dclass' : "span2", # indication about column width; comes from the design
+ 'orderfield': _get_toggle_order(request, "outcome"), # adds ordering by the field value; default ascending unless clicked from ascending into descending
+ 'ordericon':_get_toggle_order_icon(request, "outcome"),
+ # filter field will set a filter on that column with the specs in the filter description
+ # the class field in the filter has no relation with clclass; the control different aspects of the UI
+ # still, it is recommended for the values to be identical for easy tracking in the generated HTML
+ 'filter' : {'class' : 'outcome',
+ 'label': 'Show:',
+ 'options' : [
+ ('Successful builds', 'outcome:' + str(Build.SUCCEEDED), queryset_with_search.filter(outcome=str(Build.SUCCEEDED)).count()), # this is the field search expression
+ ('Failed builds', 'outcome:'+ str(Build.FAILED), queryset_with_search.filter(outcome=str(Build.FAILED)).count()),
+ ]
+ }
+ },
+ {'name': 'Target', # default column, disabled box, with just the name in the list
+ 'qhelp': "This is the build target or build targets (i.e. one or more recipes or image recipes)",
+ 'orderfield': _get_toggle_order(request, "target__target"),
+ 'ordericon':_get_toggle_order_icon(request, "target__target"),
+ },
+ {'name': 'Machine',
+ 'qhelp': "The machine is the hardware for which you are building a recipe or image recipe",
+ 'orderfield': _get_toggle_order(request, "machine"),
+ 'ordericon':_get_toggle_order_icon(request, "machine"),
+ 'dclass': 'span3'
+ }, # a slightly wider column
+ {'name': 'Started on', 'clclass': 'started_on', 'hidden' : 1, # this is an unchecked box, which hides the column
+ 'qhelp': "The date and time you started the build",
+ 'orderfield': _get_toggle_order(request, "started_on", True),
+ 'ordericon':_get_toggle_order_icon(request, "started_on"),
+ 'filter' : {'class' : 'started_on',
+ 'label': 'Show:',
+ 'options' : [
+ ("Today's builds" , 'started_on__gte:'+timezone.now().strftime("%Y-%m-%d"), queryset_with_search.filter(started_on__gte=timezone.now()).count()),
+ ("Yesterday's builds", 'started_on__gte:'+(timezone.now()-timedelta(hours=24)).strftime("%Y-%m-%d"), queryset_with_search.filter(started_on__gte=(timezone.now()-timedelta(hours=24))).count()),
+ ("This week's builds", 'started_on__gte:'+(timezone.now()-timedelta(days=7)).strftime("%Y-%m-%d"), queryset_with_search.filter(started_on__gte=(timezone.now()-timedelta(days=7))).count()),
+ ]
+ }
+ },
+ {'name': 'Completed on',
+ 'qhelp': "The date and time the build finished",
+ 'orderfield': _get_toggle_order(request, "completed_on", True),
+ 'ordericon':_get_toggle_order_icon(request, "completed_on"),
+ 'orderkey' : 'completed_on',
+ 'filter' : {'class' : 'completed_on',
+ 'label': 'Show:',
+ 'options' : [
+ ("Today's builds", 'completed_on__gte:'+timezone.now().strftime("%Y-%m-%d"), queryset_with_search.filter(completed_on__gte=timezone.now()).count()),
+ ("Yesterday's builds", 'completed_on__gte:'+(timezone.now()-timedelta(hours=24)).strftime("%Y-%m-%d"), queryset_with_search.filter(completed_on__gte=(timezone.now()-timedelta(hours=24))).count()),
+ ("This week's builds", 'completed_on__gte:'+(timezone.now()-timedelta(days=7)).strftime("%Y-%m-%d"), queryset_with_search.filter(completed_on__gte=(timezone.now()-timedelta(days=7))).count()),
+ ]
+ }
+ },
+ {'name': 'Failed tasks', 'clclass': 'failed_tasks', # specifing a clclass will enable the checkbox
+ 'qhelp': "How many tasks failed during the build",
+ 'filter' : {'class' : 'failed_tasks',
+ 'label': 'Show:',
+ 'options' : [
+ ('Builds with failed tasks', 'task_build__outcome:4', queryset_with_search.filter(task_build__outcome=4).count()),
+ ('Builds without failed tasks', 'task_build__outcome:NOT4', queryset_with_search.filter(~Q(task_build__outcome=4)).count()),
+ ]
+ }
+ },
+ {'name': 'Errors', 'clclass': 'errors_no',
+ 'qhelp': "How many errors were encountered during the build (if any)",
+ 'orderfield': _get_toggle_order(request, "errors_no", True),
+ 'ordericon':_get_toggle_order_icon(request, "errors_no"),
+ 'orderkey' : 'errors_no',
+ 'filter' : {'class' : 'errors_no',
+ 'label': 'Show:',
+ 'options' : [
+ ('Builds with errors', 'errors_no__gte:1', queryset_with_search.filter(errors_no__gte=1).count()),
+ ('Builds without errors', 'errors_no:0', queryset_with_search.filter(errors_no=0).count()),
+ ]
+ }
+ },
+ {'name': 'Warnings', 'clclass': 'warnings_no',
+ 'qhelp': "How many warnings were encountered during the build (if any)",
+ 'orderfield': _get_toggle_order(request, "warnings_no", True),
+ 'ordericon':_get_toggle_order_icon(request, "warnings_no"),
+ 'orderkey' : 'warnings_no',
+ 'filter' : {'class' : 'warnings_no',
+ 'label': 'Show:',
+ 'options' : [
+ ('Builds with warnings','warnings_no__gte:1', queryset_with_search.filter(warnings_no__gte=1).count()),
+ ('Builds without warnings','warnings_no:0', queryset_with_search.filter(warnings_no=0).count()),
+ ]
+ }
+ },
+ {'name': 'Time', 'clclass': 'time', 'hidden' : 1,
+ 'qhelp': "How long it took the build to finish",
+ 'orderfield': _get_toggle_order(request, "timespent", True),
+ 'ordericon':_get_toggle_order_icon(request, "timespent"),
+ 'orderkey' : 'timespent',
+ },
+ {'name': 'Log',
+ 'dclass': "span4",
+ 'qhelp': "Path to the build main log file",
+ 'clclass': 'log', 'hidden': 1,
+ 'orderfield': _get_toggle_order(request, "cooker_log_path"),
+ 'ordericon':_get_toggle_order_icon(request, "cooker_log_path"),
+ 'orderkey' : 'cooker_log_path',
+ },
+ {'name': 'Output', 'clclass': 'output',
+ 'qhelp': "The root file system types produced by the build. You can find them in your <code>/build/tmp/deploy/images/</code> directory",
+ },
+ ]
+ }
+
+ return render(request, template, context)
+
+
+ def _file_name_for_artifact(b, artifact_type, artifact_id):
+ file_name = None
+ # Target_Image_File file_name
+ if artifact_type == "imagefile":
+ file_name = Target_Image_File.objects.get(target__build = b, pk = artifact_id).file_name
+
+ elif artifact_type == "cookerlog":
+ file_name = b.cooker_log_path
+
+ elif artifact_type == "buildartifact":
+ file_name = BuildArtifact.objects.get(build = b, pk = artifact_id).file_name
+
+ elif artifact_type == "licensemanifest":
+ file_name = Target.objects.get(build = b, pk = artifact_id).license_manifest_path
+
+ elif artifact_type == "tasklogfile":
+ file_name = Task.objects.get(build = b, pk = artifact_id).logfile
+
+ elif artifact_type == "logmessagefile":
+ file_name = LogMessage.objects.get(build = b, pk = artifact_id).pathname
+ else:
+ raise Exception("FIXME: artifact type %s not implemented" % (artifact_type))
+
+ return file_name
+
+
+ def build_artifact(request, build_id, artifact_type, artifact_id):
+ b = Build.objects.get(pk=build_id)
+ if b.buildrequest is None or b.buildrequest.environment is None:
+ raise Exception("Artifact not available for download (missing build request or build environment)")
+
+ file_name = _file_name_for_artifact(b, artifact_type, artifact_id)
+ fsock = None
+ content_type='application/force-download'
+
+ if file_name is None:
+ raise Exception("Could not handle artifact %s id %s" % (artifact_type, artifact_id))
+ else:
+ content_type = b.buildrequest.environment.get_artifact_type(file_name)
+ fsock = b.buildrequest.environment.get_artifact(file_name)
+ file_name = os.path.basename(file_name) # we assume that the build environment system has the same path conventions as host
+
+ response = HttpResponse(fsock, content_type = content_type)
+
+ # returns a file from the environment
+ response['Content-Disposition'] = 'attachment; filename=' + file_name
+ return response
+
+
+
+ def projects(request):
+ template="projects.html"
+
+ (pagesize, orderby) = _get_parameters_values(request, 10, 'updated:+')
+ mandatory_parameters = { 'count': pagesize, 'page' : 1, 'orderby' : orderby }
+ retval = _verify_parameters( request.GET, mandatory_parameters )
+ if retval:
+ return _redirect_parameters( 'all-projects', request.GET, mandatory_parameters)
+
+ queryset_all = Project.objects.all()
+
+ # boilerplate code that takes a request for an object type and returns a queryset
+ # for that object type. copypasta for all needed table searches
+ (filter_string, search_term, ordering_string) = _search_tuple(request, Project)
+ queryset_with_search = _get_queryset(Project, queryset_all, None, search_term, ordering_string, '-updated')
+ queryset = _get_queryset(Project, queryset_all, filter_string, search_term, ordering_string, '-updated')
+
+ # retrieve the objects that will be displayed in the table; projects a paginator and gets a page range to display
+ project_info = _build_page_range(Paginator(queryset, pagesize), request.GET.get('page', 1))
+
+ # build view-specific information; this is rendered specifically in the builds page, at the top of the page (i.e. Recent builds)
+ build_mru = Build.objects.order_by("-started_on")[:3]
+
+
+
+ context = {
+ 'mru' : build_mru,
+
+ 'objects' : project_info,
+ 'objectname' : "projects",
+ 'default_orderby' : 'id:-',
+ 'search_term' : search_term,
+ 'total_count' : queryset_with_search.count(),
+ 'tablecols': [
+ {'name': 'Project',
+ 'orderfield': _get_toggle_order(request, "name"),
+ 'ordericon':_get_toggle_order_icon(request, "name"),
+ 'orderkey' : 'name',
+ },
+ {'name': 'Release',
+ 'qhelp' : "The version of the build system used by the project",
+ 'orderfield': _get_toggle_order(request, "release__name"),
+ 'ordericon':_get_toggle_order_icon(request, "release__name"),
+ 'orderkey' : 'release__name',
+ },
+ {'name': 'Machine',
+ 'qhelp': "The hardware currently selected for the project",
+ },
+ {'name': 'Number of builds',
+ 'qhelp': "How many builds have been run for the project",
+ },
+ {'name': 'Last outcome', 'clclass': 'loutcome',
+ 'qhelp': "Tells you if the last project build completed successfully or failed",
+ },
+ {'name': 'Last target', 'clclass': 'ltarget',
+ 'qhelp': "The last project build target(s): one or more recipes or image recipes",
+ },
+ {'name': 'Last errors', 'clclass': 'lerrors',
+ 'qhelp': "How many errors were encountered during the last project build (if any)",
+ },
+ {'name': 'Last warnings', 'clclass': 'lwarnings',
+ 'qhelp': "How many warnigns were encountered during the last project build (if any)",
+ },
+ {'name': 'Last image files', 'clclass': 'limagefiles', 'hidden': 1,
+ 'qhelp': "The root file system types produced by the last project build",
+ },
+ {'name': 'Last updated', 'clclass': 'updated',
+ 'orderfield': _get_toggle_order(request, "updated"),
+ 'ordericon':_get_toggle_order_icon(request, "updated"),
+ 'orderkey' : 'updated',
+ }
+ ]
+ }
+ return render(request, template, context)
else:
# these are pages that are NOT available in interactive mode
@@ -1937,6 +2935,47 @@ else:
def xhr_projectbuild(request, pid):
raise Exception("page not available in interactive mode")
+ def xhr_build(request, pid):
+ raise Exception("page not available in interactive mode")
+
+ def xhr_projectinfo(request, pid):
+ raise Exception("page not available in interactive mode")
+
def xhr_projectedit(request, pid):
raise Exception("page not available in interactive mode")
+ def xhr_datatypeahead(request):
+ raise Exception("page not available in interactive mode")
+
+ def importlayer(request):
+ raise Exception("page not available in interactive mode")
+
+ def layers(request):
+ raise Exception("page not available in interactive mode")
+
+ def layerdetails(request):
+ raise Exception("page not available in interactive mode")
+
+ def targets(request):
+ raise Exception("page not available in interactive mode")
+
+ def targetdetails(request):
+ raise Exception("page not available in interactive mode")
+
+ def machines(request):
+ raise Exception("page not available in interactive mode")
+
+ def projectconf(request):
+ raise Exception("page not available in interactive mode")
+
+ def projectbuilds(request):
+ raise Exception("page not available in interactive mode")
+
+ def build_artifact(request, build_id, artifact_type, artifact_id):
+ raise Exception("page not available in interactive mode")
+
+ def projects(request):
+ raise Exception("page not available in interactive mode")
+
+ def xhr_importlayer(request):
+ raise Exception("page not available in interactive mode")
diff --git a/bitbake/lib/toaster/toastermain/settings.py b/bitbake/lib/toaster/toastermain/settings.py
index 09ec2bda98..acc20cc11d 100644
--- a/bitbake/lib/toaster/toastermain/settings.py
+++ b/bitbake/lib/toaster/toastermain/settings.py
@@ -21,9 +21,17 @@
# Django settings for Toaster project.
+import os, re
+
DEBUG = True
TEMPLATE_DEBUG = DEBUG
+# Set to True to see the SQL queries in console
+SQL_DEBUG = False
+if os.environ.get("TOASTER_SQLDEBUG", None) is not None:
+ SQL_DEBUG = True
+
+
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
@@ -42,7 +50,6 @@ DATABASES = {
}
# Reinterpret database settings if we have DATABASE_URL environment variable defined
-import os, re
if 'DATABASE_URL' in os.environ:
dburl = os.environ['DATABASE_URL']
@@ -208,6 +215,9 @@ MIDDLEWARE_CLASSES = (
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
+from os.path import dirname as DN
+SITE_ROOT=DN(DN(os.path.abspath(__file__)))
+
ROOT_URLCONF = 'toastermain.urls'
# Python dotted path to the WSGI application used by Django's runserver.
@@ -232,26 +242,39 @@ TEMPLATE_CONTEXT_PROCESSORS = ('django.contrib.auth.context_processors.auth',
INSTALLED_APPS = (
#'django.contrib.sites',
- #'django.contrib.messages',
'django.contrib.staticfiles',
- # Uncomment the next line to enable the admin:
- # 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.humanize',
'orm',
'toastermain',
- 'toastergui',
- 'bldviewer',
'south',
- 'bldcontrol',
)
+
+# Load django-fresh is TOASTER_DEVEL is set, and the module is available
+FRESH_ENABLED = False
+if os.environ.get('TOASTER_DEVEL', None) is not None:
+ try:
+ import fresh
+ MIDDLEWARE_CLASSES = MIDDLEWARE_CLASSES + ("fresh.middleware.FreshMiddleware",)
+ INSTALLED_APPS = INSTALLED_APPS + ('fresh',)
+ FRESH_ENABLED = True
+ except:
+ pass
+
+
+SOUTH_TESTS_MIGRATE = False
+
# if we run in managed mode, we need user support
if MANAGED:
INSTALLED_APPS = ('django.contrib.auth',
'django.contrib.contenttypes',
- 'django.contrib.sessions',) + INSTALLED_APPS
+ 'django.contrib.messages',
+ 'django.contrib.sessions',
+ # Uncomment the next line to enable the admin:
+ 'django.contrib.admin',
+ ) + INSTALLED_APPS
# We automatically detect and install applications here if
@@ -260,8 +283,12 @@ import os
currentdir = os.path.dirname(__file__)
for t in os.walk(os.path.dirname(currentdir)):
modulename = os.path.basename(t[0])
+ #if we have a virtualenv skip it to avoid incorrect imports
+ if os.environ.has_key('VIRTUAL_ENV') and os.environ['VIRTUAL_ENV'] in t[0]:
+ continue
+
if ("views.py" in t[2] or "models.py" in t[2]) and not modulename in INSTALLED_APPS:
- INSTALLED_APPS.append(modulename)
+ INSTALLED_APPS = INSTALLED_APPS + (modulename,)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
@@ -276,14 +303,28 @@ LOGGING = {
'()': 'django.utils.log.RequireDebugFalse'
}
},
+ 'formatters': {
+ 'datetime': {
+ 'format': '%(levelname)s %(asctime)s %(message)s'
+ }
+ },
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
+ },
+ 'console': {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'datetime',
}
},
'loggers': {
+ 'toaster' : {
+ 'handlers': ['console'],
+ 'level': 'DEBUG',
+ },
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
@@ -292,6 +333,13 @@ LOGGING = {
}
}
+if DEBUG and SQL_DEBUG:
+ LOGGING['loggers']['django.db.backends'] = {
+ 'level': 'DEBUG',
+ 'handlers': ['console'],
+ }
+
+
# If we're using sqlite, we need to tweak the performance a bit
from django.db.backends.signals import connection_created
def activate_synchronous_off(sender, connection, **kwargs):
diff --git a/bitbake/lib/toaster/toastermain/urls.py b/bitbake/lib/toaster/toastermain/urls.py
index 0e7b5c2d77..a2916e2dd7 100644
--- a/bitbake/lib/toaster/toastermain/urls.py
+++ b/bitbake/lib/toaster/toastermain/urls.py
@@ -25,11 +25,12 @@ from django.views.decorators.cache import never_cache
# Uncomment the next two lines to enable the admin:
-# from django.contrib import admin
-# admin.autodiscover()
+from django.contrib import admin
+admin.autodiscover()
+
urlpatterns = patterns('',
+ # the api-s are not auto-discoverable
url(r'^api/1.0/', include('bldviewer.api')),
- url(r'^$', never_cache(RedirectView.as_view(url='/toastergui/'))),
# Examples:
# url(r'^toaster/', include('toaster.foo.urls')),
@@ -37,16 +38,31 @@ urlpatterns = patterns('',
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
- # Uncomment the next line to enable the admin:
- # url(r'^admin/', include(admin.site.urls)),
+
+ # if no application is selected, we have the magic toastergui app here
+ url(r'^$', never_cache(RedirectView.as_view(url='/toastergui/'))),
)
+import toastermain.settings
+
+if toastermain.settings.FRESH_ENABLED:
+ urlpatterns.insert(1, url(r'', include('fresh.urls')))
+
+if toastermain.settings.MANAGED:
+ urlpatterns = [
+ # Uncomment the next line to enable the admin:
+ url(r'^admin/', include(admin.site.urls)),
+ ] + urlpatterns
# Automatically discover urls.py in various apps, beside our own
# and map module directories to the patterns
import os
currentdir = os.path.dirname(__file__)
for t in os.walk(os.path.dirname(currentdir)):
+ #if we have a virtualenv skip it to avoid incorrect imports
+ if os.environ.has_key('VIRTUAL_ENV') and os.environ['VIRTUAL_ENV'] in t[0]:
+ continue
+
if "urls.py" in t[2] and t[0] != currentdir:
modulename = os.path.basename(t[0])
- urlpatterns.append( url(r'^' + modulename + '/', include ( modulename + '.urls')))
+ urlpatterns.insert(0, url(r'^' + modulename + '/', include ( modulename + '.urls')))
diff --git a/documentation/adt-manual/adt-command.xml b/documentation/adt-manual/adt-command.xml
index 9aa25fad40..164b1efbff 100644
--- a/documentation/adt-manual/adt-command.xml
+++ b/documentation/adt-manual/adt-command.xml
@@ -177,7 +177,7 @@
Thus, the following command works:
<literallayout class='monospaced'>
$ ./configure --host=armv5te-poky-linux-gnueabi \
- --with-libtool-sysroot=&lt;sysroot-dir&gt;
+ --with-libtool-sysroot=<replaceable>sysroot-dir</replaceable>
</literallayout>
</para>
@@ -186,13 +186,13 @@
cross-toolchain tools.
<note>
If the <filename>configure</filename> script results in problems recognizing the
- <filename>--with-libtool-sysroot=&lt;sysroot-dir&gt;</filename> option,
+ <filename>--with-libtool-sysroot=</filename><replaceable>sysroot-dir</replaceable> option,
regenerate the script to enable the support by doing the following and then
run the script again:
<literallayout class='monospaced'>
$ libtoolize --automake
$ aclocal -I ${OECORE_NATIVE_SYSROOT}/usr/share/aclocal \
- [-I &lt;dir_containing_your_project-specific_m4_macros&gt;]
+ [-I <replaceable>dir_containing_your_project-specific_m4_macros</replaceable>]
$ autoconf
$ autoheader
$ automake -a
diff --git a/documentation/adt-manual/adt-manual.xml b/documentation/adt-manual/adt-manual.xml
index 31695b0387..5492c6ff09 100644
--- a/documentation/adt-manual/adt-manual.xml
+++ b/documentation/adt-manual/adt-manual.xml
@@ -78,9 +78,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/adt-manual/adt-package.xml b/documentation/adt-manual/adt-package.xml
index da032eee5b..5c3196ea91 100644
--- a/documentation/adt-manual/adt-package.xml
+++ b/documentation/adt-manual/adt-package.xml
@@ -80,17 +80,17 @@
Next, source the environment setup script found in the
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>.
Follow that by setting up the installation destination to point to your
- sysroot as <filename>&lt;sysroot_dir&gt;</filename>.
- Finally, have an OPKG configuration file <filename>&lt;conf_file&gt;</filename>
+ sysroot as <replaceable>sysroot_dir</replaceable>.
+ Finally, have an OPKG configuration file <replaceable>conf_file</replaceable>
that corresponds to the <filename>opkg</filename> repository you have just created.
The following command forms should now work:
<literallayout class='monospaced'>
- $ opkg-cl –f &lt;conf_file&gt; -o &lt;sysroot_dir&gt; update
- $ opkg-cl –f &lt;cconf_file&gt; -o &lt;sysroot_dir&gt; \
+ $ opkg-cl –f <replaceable>conf_file</replaceable> -o <replaceable>sysroot_dir</replaceable> update
+ $ opkg-cl –f <replaceable>cconf_file</replaceable> -o <replaceable>sysroot_dir</replaceable> \
--force-overwrite install libglade
- $ opkg-cl –f &lt;cconf_file&gt; -o &lt;sysroot_dir&gt; \
+ $ opkg-cl –f <replaceable>cconf_file</replaceable> -o <replaceable>sysroot_dir</replaceable> \
--force-overwrite install libglade-dbg
- $ opkg-cl –f &lt;conf_file&gt; -o &lt;sysroot_dir&gt; \
+ $ opkg-cl –f <replaceable>conf_file&gt; -o </replaceable>sysroot_dir&gt; \
--force-overwrite install libglade-dev
</literallayout>
</para>
diff --git a/documentation/adt-manual/adt-prepare.xml b/documentation/adt-manual/adt-prepare.xml
index 89ef09fb24..7faf39b9e2 100644
--- a/documentation/adt-manual/adt-prepare.xml
+++ b/documentation/adt-manual/adt-prepare.xml
@@ -183,20 +183,20 @@
Please make sure you understand the security implications of doing this.
You might also have to modify your firewall settings to allow
NFS booting to work.</note></para></listitem>
- <listitem><para><filename>YOCTOADT_ROOTFS_&lt;arch&gt;</filename>: The root
+ <listitem><para><filename>YOCTOADT_ROOTFS_</filename><replaceable>arch</replaceable>: The root
filesystem images you want to download from the
<filename>YOCTOADT_IPKG_REPO</filename> repository.</para></listitem>
- <listitem><para><filename>YOCTOADT_TARGET_SYSROOT_IMAGE_&lt;arch&gt;</filename>: The
+ <listitem><para><filename>YOCTOADT_TARGET_SYSROOT_IMAGE_</filename><replaceable>arch</replaceable>: The
particular root filesystem used to extract and create the target sysroot.
The value of this variable must have been specified with
- <filename>YOCTOADT_ROOTFS_&lt;arch&gt;</filename>.
+ <filename>YOCTOADT_ROOTFS_</filename><replaceable>arch</replaceable>.
For example, if you downloaded both <filename>minimal</filename> and
<filename>sato-sdk</filename> images by setting
- <filename>YOCTOADT_ROOTFS_&lt;arch&gt;</filename>
- to "minimal sato-sdk", then <filename>YOCTOADT_ROOTFS_&lt;arch&gt;</filename>
+ <filename>YOCTOADT_ROOTFS_</filename><replaceable>arch</replaceable>
+ to "minimal sato-sdk", then <filename>YOCTOADT_ROOTFS_</filename><replaceable>arch</replaceable>
must be set to either "minimal" or "sato-sdk".
</para></listitem>
- <listitem><para><filename>YOCTOADT_TARGET_SYSROOT_LOC_&lt;arch&gt;</filename>: The
+ <listitem><para><filename>YOCTOADT_TARGET_SYSROOT_LOC_</filename><replaceable>arch</replaceable>: The
location on the development host where the target sysroot is created.
</para></listitem>
</itemizedlist>
@@ -212,7 +212,7 @@
Once the installer begins to run, you are asked to enter the
location for cross-toolchain installation.
The default location is
- <filename>/opt/poky/&lt;release&gt;</filename>.
+ <filename>/opt/poky/</filename><replaceable>release</replaceable>.
After either accepting the default location or selecting your
own location, you are prompted to run the installation script
interactively or in silent mode.
@@ -230,7 +230,7 @@
<filename>adt-installer</filename> directory according to your
installer configurations, and the target sysroot located
according to the
- <filename>YOCTOADT_TARGET_SYSROOT_LOC_&lt;arch&gt;</filename>
+ <filename>YOCTOADT_TARGET_SYSROOT_LOC_</filename><replaceable>arch</replaceable>
variable also in your configuration file.
</para>
</section>
@@ -593,7 +593,7 @@
section.
</para></listitem>
<listitem><para>
- Use <filename>bitbake &lt;image&gt; -c populate_sdk</filename>.
+ Use <filename>bitbake</filename> <replaceable>image</replaceable> <filename>-c populate_sdk</filename>.
This method has significant advantages over the previous method
because it results in a toolchain installer that contains the
sysroot that matches your target root filesystem.
diff --git a/documentation/bsp-guide/bsp-guide.xml b/documentation/bsp-guide/bsp-guide.xml
index af4ea64f43..0fd4728807 100644
--- a/documentation/bsp-guide/bsp-guide.xml
+++ b/documentation/bsp-guide/bsp-guide.xml
@@ -90,9 +90,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/bsp-guide/bsp.xml b/documentation/bsp-guide/bsp.xml
index 482f2ebe7f..d4850234d1 100644
--- a/documentation/bsp-guide/bsp.xml
+++ b/documentation/bsp-guide/bsp.xml
@@ -1351,7 +1351,7 @@
<listitem><para>By default, the script creates the new BSP Layer in the
current working directory of the
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>,
- which is <filename>poky</filename> in this case.
+ (i.e. <filename>poky/build</filename>).
</para></listitem>
</orderedlist>
</para>
diff --git a/documentation/dev-manual/dev-manual-common-tasks.xml b/documentation/dev-manual/dev-manual-common-tasks.xml
index 32500921aa..0085cefa81 100644
--- a/documentation/dev-manual/dev-manual-common-tasks.xml
+++ b/documentation/dev-manual/dev-manual-common-tasks.xml
@@ -421,7 +421,7 @@
<itemizedlist>
<listitem><para>Store custom layers in a Git repository
that uses the
- <filename>meta-&lt;layer_name&gt;</filename> format.
+ <filename>meta-<replaceable>layer_name</replaceable></filename> format.
</para></listitem>
<listitem><para>Clone the repository alongside other
<filename>meta</filename> directories in the
@@ -678,7 +678,7 @@
<para>
Use the following form when running the layer management tool.
<literallayout class='monospaced'>
- $ bitbake-layers &lt;command&gt; [arguments]
+ $ bitbake-layers <replaceable>command</replaceable> [<replaceable>arguments</replaceable>]
</literallayout>
The following list describes the available commands:
<itemizedlist>
@@ -782,7 +782,7 @@
The default mode of the script's operation is to prompt you for
information needed to generate the layer:
<itemizedlist>
- <listitem><para>The layer priority
+ <listitem><para>The layer priority.
</para></listitem>
<listitem><para>Whether or not to create a sample recipe.
</para></listitem>
@@ -823,7 +823,7 @@
This directory contains the layer's configuration file.
The root name for the file is the same as the root name
your provided for the layer (e.g.
- <filename>&lt;layer&gt;.conf</filename>).
+ <filename><replaceable>layer</replaceable>.conf</filename>).
</para></listitem>
<listitem><para><emphasis>The
<filename>COPYING.MIT</filename> file:</emphasis>
@@ -839,7 +839,7 @@
<para>
If you choose to generate a sample recipe file, the script
prompts you for the name for the recipe and then creates it
- in <filename>&lt;layer&gt;/recipes-example/example/</filename>.
+ in <filename><replaceable>layer</replaceable>/recipes-example/example/</filename>.
The script creates a <filename>.bb</filename> file and a
directory, which contains a sample
<filename>helloworld.c</filename> source file, along with
@@ -851,7 +851,7 @@
<para>
If you choose to generate a sample append file, the script
prompts you for the name for the file and then creates it
- in <filename>&lt;layer&gt;/recipes-example-bbappend/example-bbappend/</filename>.
+ in <filename><replaceable>layer</replaceable>/recipes-example-bbappend/example-bbappend/</filename>.
The script creates a <filename>.bbappend</filename> file and a
directory, which contains a sample patch file.
If you do not provide a recipe name, the script uses
@@ -1228,7 +1228,7 @@
S = "${WORKDIR}/${PN}-${PV}"
- inherit &lt;stuff&gt;
+ inherit <replaceable>stuff</replaceable>
</literallayout>
Modifying this recipe is the recommended method for
creating a new recipe.
@@ -1274,7 +1274,7 @@
When you name your recipe, you need to follow this naming
convention:
<literallayout class='monospaced'>
- &lt;basename&gt;_&lt;version&gt;.bb
+ <replaceable>basename</replaceable>_<replaceable>version</replaceable>.bb
</literallayout>
Use lower-cased characters and do not include the reserved
suffixes <filename>-native</filename>,
@@ -1394,13 +1394,13 @@
</note>
</para></listitem>
<listitem><para><emphasis>Using Variables: <filename>${...}</filename></emphasis> -
- Use the <filename>${&lt;varname&gt;}</filename> syntax to
+ Use the <filename>${<replaceable>varname</replaceable>}</filename> syntax to
access the contents of a variable:
<literallayout class='monospaced'>
SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/zlib-${PV}.tar.gz"
</literallayout>
</para></listitem>
- <listitem><para><emphasis>Quote All Assignments: <filename>"&lt;value&gt;"</filename></emphasis> -
+ <listitem><para><emphasis>Quote All Assignments: <filename>"<replaceable>value</replaceable>"</filename></emphasis> -
Use double quotes around the value in all variable
assignments.
<literallayout class='monospaced'>
@@ -1541,12 +1541,12 @@
Realize that some layers have a policy to use spaces
for all indentation.
</para></listitem>
- <listitem><para><emphasis>Using Python for Complex Operations: <filename>${@&lt;python_code&gt;}</filename></emphasis> -
+ <listitem><para><emphasis>Using Python for Complex Operations: <filename>${@<replaceable>python_code</replaceable>}</filename></emphasis> -
For more advanced processing, it is possible to use
Python code during variable assignments (e.g.
search and replacement on a variable).</para>
<para>You indicate Python code using the
- <filename>${@&lt;python_code&gt;}</filename>
+ <filename>${@<replaceable>python_code</replaceable>}</filename>
syntax for the variable assignment:
<literallayout class='monospaced'>
SRC_URI = "ftp://ftp.info-zip.org/pub/infozip/src/zip${@d.getVar('PV',1).replace('.', '')}.tgz
@@ -1589,10 +1589,10 @@
<link linkend='build-directory'>Build Directory</link>,
use BitBake to process your recipe.
All you need to provide is the
- <filename>&lt;basename&gt;</filename> of the recipe as described
+ <filename><replaceable>basename</replaceable></filename> of the recipe as described
in the previous section:
<literallayout class='monospaced'>
- $ bitbake &lt;basename&gt;
+ $ bitbake <replaceable>basename</replaceable>
</literallayout>
</para>
@@ -1632,7 +1632,7 @@
You can find log files for each task in the recipe's
<filename>temp</filename> directory (e.g.
<filename>poky/build/tmp/work/qemux86-poky-linux/foo/1.3.0-r0/temp</filename>).
- Log files are named <filename>log.&lt;taskname&gt;</filename>
+ Log files are named <filename>log.<replaceable>taskname</replaceable></filename>
(e.g. <filename>log.do_configure</filename>,
<filename>log.do_fetch</filename>, and
<filename>log.do_compile</filename>).
@@ -1993,6 +1993,12 @@
incorrect md5 strings, attempt to build the software,
and then note the resulting error messages that will
report the correct md5 strings.
+ See the
+ "<link linkend='new-recipe-fetching-code'>Fetching Code</link>"
+ section for additional information.
+ </para>
+
+ <para>
Here is an example that assumes the software has a
<filename>COPYING</filename> file:
<literallayout class='monospaced'>
@@ -2087,7 +2093,7 @@
<filename>configure.ac</filename> file, then your
software is built using Autotools.
If this is the case, you just need to worry about
- tweaking the configuration.</para>
+ modifying the configuration.</para>
<para>When using Autotools, your recipe needs to inherit
the
<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-autotools'><filename>autotools</filename></ulink>
@@ -2104,7 +2110,7 @@
<filename>CMakeLists.txt</filename> file, then your
software is built using CMake.
If this is the case, you just need to worry about
- tweaking the configuration.</para>
+ modifying the configuration.</para>
<para>When you use CMake, your recipe needs to inherit
the
<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-cmake'><filename>cmake</filename></ulink>
@@ -2152,8 +2158,8 @@
that it did not find something that it needed for some
desired optional functionality, then you would need to add
those to <filename>DEPENDS</filename>.
- Looking at the log might also reveal items being checked for
- and/or enabled that you do not want, or items not being found
+ Looking at the log might also reveal items being checked for,
+ enabled, or both that you do not want, or items not being found
that are in <filename>DEPENDS</filename>, in which case
you would need to look at passing extra options to the
configure script as needed.
@@ -2197,7 +2203,12 @@
to an empty string:
<literallayout class='monospaced'>
PARALLEL_MAKE = ""
- </literallayout></para></listitem>
+ </literallayout></para>
+ <para>
+ For information on parallel Makefile issues, see the
+ "<link linkend='debugging-parallel-make-races'>Debugging Parallel Make Races</link>"
+ section.
+ </para></listitem>
<listitem><para><emphasis>Improper host path usage:</emphasis>
This failure applies to recipes building for the target
or <filename>nativesdk</filename> only.
@@ -2287,7 +2298,7 @@
<filename>make install</filename>, you should do this
using a <filename>do_install_append</filename> function
using the install command as described in
- <emphasis>Manual</emphasis> later in this list.
+ the "Manual" bulleted item later in this list.
</para></listitem>
<listitem><para><emphasis>Other (using
<filename>make install</filename>):</emphasis>
@@ -2336,39 +2347,47 @@
files have been installed correctly.
</para>
- <note>
- During the installation process, you might need to modify
- some of the installed files to suit the target layout.
- For example, you might need to replace hard-coded paths in an
- initscript with values of variables provided by the build
- system, such as replacing <filename>/usr/bin/</filename> with
- <filename>${bindir}</filename>.
- If you do perform such modifications during
- <filename>do_install</filename>, be sure to modify the
- destination file after copying rather than before copying.
- Modifying after copying ensures that the build system can
- re-execute <filename>do_install</filename> if needed.
- </note>
-
- <note>
- <filename>oe_runmake install</filename>, which can be run
- directly or can be run indirectly by the
- <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-autotools'><filename>autotools</filename></ulink>
- and
- <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-cmake'><filename>cmake</filename></ulink>
- classes, runs <filename>make install</filename> in parallel.
- Sometimes, a Makefile can have missing dependencies between
- targets that can result in race conditions.
- If you experience intermittent failures during
- <filename>do_install</filename>, you might be able to work
- around them by disabling parallel Makefile installs
- by adding the following to the recipe:
- <literallayout class='monospaced'>
+ <note><title>Notes</title>
+ <itemizedlist>
+ <listitem><para>
+ During the installation process, you might need to
+ modify some of the installed files to suit the target
+ layout.
+ For example, you might need to replace hard-coded paths
+ in an initscript with values of variables provided by
+ the build system, such as replacing
+ <filename>/usr/bin/</filename> with
+ <filename>${bindir}</filename>.
+ If you do perform such modifications during
+ <filename>do_install</filename>, be sure to modify the
+ destination file after copying rather than before
+ copying.
+ Modifying after copying ensures that the build system
+ can re-execute <filename>do_install</filename> if
+ needed.
+ </para></listitem>
+ <listitem><para>
+ <filename>oe_runmake install</filename>, which can be
+ run directly or can be run indirectly by the
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-autotools'><filename>autotools</filename></ulink>
+ and
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-cmake'><filename>cmake</filename></ulink>
+ classes, runs <filename>make install</filename> in
+ parallel.
+ Sometimes, a Makefile can have missing dependencies
+ between targets that can result in race conditions.
+ If you experience intermittent failures during
+ <filename>do_install</filename>, you might be able to
+ work around them by disabling parallel Makefile
+ installs by adding the following to the recipe:
+ <literallayout class='monospaced'>
PARALLEL_MAKEINST = ""
- </literallayout>
- See
- <ulink url='&YOCTO_DOCS_REF_URL;#var-PARALLEL_MAKEINST'><filename>PARALLEL_MAKEINST</filename></ulink>
- for additional information.
+ </literallayout>
+ See
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PARALLEL_MAKEINST'><filename>PARALLEL_MAKEINST</filename></ulink>
+ for additional information.
+ </para></listitem>
+ </itemizedlist>
</note>
</section>
@@ -2464,7 +2483,9 @@
that files are split up and packaged correctly.
</para></listitem>
<listitem><para><emphasis>Running QA Checks</emphasis>:
- The <filename>insane</filename> class adds a step to
+ The
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-insane'><filename>insane</filename></ulink>
+ class adds a step to
the package generation process so that output quality
assurance checks are generated by the OpenEmbedded
build system.
@@ -2528,7 +2549,7 @@
machine or architecture at all (e.g. recipes
that simply package script files or configuration
files), you should use the
- <ulink url='&YOCTO_DOCS_REF_URL;#allarch'><filename>allarch</filename></ulink>
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-allarch'><filename>allarch</filename></ulink>
class to do this for you by adding this to your
recipe:
<literallayout class='monospaced'>
@@ -2537,7 +2558,7 @@
Ensuring that the package architecture is correct is
not critical while you are doing the first few builds
of your recipe.
- However, it is important in order to
+ However, it is important in order
to ensure that your recipe rebuilds (or does not
rebuild) appropriately in response to changes in
configuration, and to ensure that you get the
@@ -2575,7 +2596,7 @@
recommended convention is to set
<ulink url='&YOCTO_DOCS_REF_URL;#var-PV'><filename>PV</filename></ulink>
within the recipe to
- "&lt;previous version&gt;+&lt;current version&gt;".
+ "<replaceable>previous_version</replaceable>+<replaceable>current_version</replaceable>".
You can use an additional variable so that you can use the
current version elsewhere.
Here is an example:
@@ -2591,7 +2612,7 @@
<para>
Post-installation scripts run immediately after installing
- a package on the target, or during image creation when a
+ a package on the target or during image creation when a
package is included in an image.
To add a post-installation script to a package, add a
<filename>pkg_postinst_PACKAGENAME()</filename> function to
@@ -2703,7 +2724,7 @@
<para>
Building an application from a single file that is stored
- locally (e.g. under <filename>files/</filename>) requires
+ locally (e.g. under <filename>files</filename>) requires
a recipe that has the file listed in the
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-SRC_URI'>SRC_URI</ulink></filename>
variable.
@@ -2794,7 +2815,7 @@
If you need additional <filename>make</filename> options, you should store them in the
<filename><ulink url='&YOCTO_DOCS_REF_URL;#var-EXTRA_OEMAKE'>EXTRA_OEMAKE</ulink></filename>
variable.
- BitBake passes these options into the <filename>make</filename> GNU invocation.
+ BitBake passes these options into the GNU <filename>make</filename> invocation.
Note that a <filename>do_install</filename> task is still required.
Otherwise, BitBake runs an empty <filename>do_install</filename> task by default.
</para>
@@ -3045,7 +3066,7 @@
<listitem><para><filename><ulink url='&YOCTO_DOCS_REF_URL;#var-TARGET_ARCH'>TARGET_ARCH</ulink></filename>
(e.g. "arm")</para></listitem>
<listitem><para><filename><ulink url='&YOCTO_DOCS_REF_URL;#var-PREFERRED_PROVIDER'>PREFERRED_PROVIDER</ulink>_virtual/kernel</filename>
- (see below)</para></listitem>
+ </para></listitem>
<listitem><para><filename><ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE_FEATURES'>MACHINE_FEATURES</ulink></filename>
(e.g. "apm screen wifi")</para></listitem>
</itemizedlist>
@@ -3515,7 +3536,7 @@
The <filename>wic</filename> command and the infrastructure
it is based on is by definition incomplete.
Its purpose is to allow the generation of customized images,
- and as such was designed to be completely extensible via a
+ and as such was designed to be completely extensible through a
plugin interface.
See the
"<link linkend='openembedded-kickstart-plugins'>Plugins</link>"
@@ -3638,7 +3659,7 @@
<filename>create</filename> and <filename>list</filename>.
You can get help for these commands as follows:
<literallayout class='monospaced'>
- $ wic help &lt;command&gt;
+ $ wic help <replaceable>command</replaceable>
</literallayout>
</para>
@@ -3652,7 +3673,7 @@
a given topic by prefacing the topic with
<filename>wic help</filename>:
<literallayout class='monospaced'>
- $ wic help &lt;help topic&gt;
+ $ wic help <replaceable>help_topic</replaceable>
</literallayout>
</para>
@@ -3661,9 +3682,9 @@
<filename>wic</filename> creates using the existing
kickstart files with the following form of the command:
<literallayout class='monospaced'>
- $ wic list &lt;image&gt; help
+ $ wic list <replaceable>image</replaceable> help
</literallayout>
- where <filename>&lt;image&gt;</filename> is either
+ where <filename><replaceable>image</replaceable></filename> is either
<filename>directdisk</filename> or
<filename>mkefidisk</filename>.
</para>
@@ -3881,8 +3902,8 @@
</para>
<para>
- The output specifies exactly which image was
- created as well as where it was created.
+ The output specifies the exact created as well as where
+ it was created.
The output also names the artifacts used and the exact
<filename>.wks</filename> script that was used to generate
the image.
@@ -4139,7 +4160,7 @@
<para>
To be more concrete, here is the plugin definition that
matches a
- <filename>'&dash;&dash;source bootimg-pcbios'</filename> usage,
+ <filename>&dash;&dash;source bootimg-pcbios</filename> usage,
along with an example
method called by the <filename>wic</filename> implementation
when it needs to invoke an implementation-specific
@@ -4223,7 +4244,7 @@
The code that then needs to call the plugin methods uses
<filename>plugin.get_source_plugin_methods()</filename>
to find the method or methods needed by the call.
- Retrieval of those methods is accomplished is accomplished
+ Retrieval of those methods is accomplished
by filling up a dict with keys
containing the method names of interest.
On success, these will be filled in with the actual
@@ -4273,13 +4294,14 @@
This command creates a partition on the system and uses the
following syntax:
<literallayout class='monospaced'>
- part &lt;mntpoint&gt;
+ part <replaceable>mntpoint</replaceable>
</literallayout>
- The <filename>&lt;mntpoint&gt;</filename> is where the
+ The <filename><replaceable>mntpoint</replaceable></filename>
+ is where the
partition will be mounted and must be of one of the
following forms:
<itemizedlist>
- <listitem><para><filename>/&lt;path&gt;</filename>:
+ <listitem><para><filename>/<replaceable>path</replaceable></filename>:
For example, <filename>/</filename>,
<filename>/usr</filename>, and
<filename>/home</filename></para></listitem>
@@ -4327,7 +4349,7 @@
follows for more information.
</para>
<para>If you use
- <filename>&dash;&dash;source &lt;plugin-name&gt;</filename>,
+ <filename>&dash;&dash;source <replaceable>plugin-name</replaceable></filename>,
<filename>wic</filename> creates a partition as
large as needed and fills it with the contents of
the partition that is generated by the
@@ -4426,7 +4448,7 @@
You can use the <filename>menuconfig</filename> tool and configuration fragments to
make sure your <filename>.config</filename> file is just how you need it.
This section describes how to use <filename>menuconfig</filename>, create and use
- configuration fragments, and how to interactively tweak your <filename>.config</filename>
+ configuration fragments, and how to interactively modify your <filename>.config</filename>
file to create the leanest kernel configuration file possible.
</para>
@@ -5230,8 +5252,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<para>
To set up passwords, use the
- <filename>extrausers</filename> class, which is the
- preferred method.
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-extrausers'><filename>extrausers</filename></ulink>
+ class, which is the preferred method.
For an example on how to set up both root and user
passwords, see the
"<ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-extrausers'><filename>extrausers.bbclass</filename></ulink>"
@@ -5252,8 +5274,11 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para></listitem>
<listitem><para>
Consider enabling a Mandatory Access Control (MAC)
- framework (such as SMACK or SELinux) and tuning it
+ framework such as SMACK or SELinux and tuning it
appropriately for your device's usage.
+ You can find more information in the
+ <ulink url='http://git.yoctoproject.org/cgit/cgit.cgi/meta-selinux/'><filename>meta-selinux</filename></ulink>
+ layer.
</para></listitem>
</itemizedlist>
</para>
@@ -5682,7 +5707,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
section of the Yocto Project Linux Kernel Development
Manual and the "<link linkend='creating-config-fragments'>Creating Configuration Fragments</link>"
section, which is in this manual.</para></listitem>
- <listitem><para><filename>bitbake -u depexp -g &lt;bitbake_target&gt;</filename>:
+ <listitem><para><filename>bitbake -u depexp -g <replaceable>bitbake_target</replaceable></filename>:
Using the BitBake command with these options brings up
a Dependency Explorer from which you can view file
dependencies.
@@ -5701,7 +5726,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
libraries, and applications.
To change things, you can configure how the packaging happens,
which changes the way you build them.
- You can also tweak the filesystem itself or select a different
+ You can also modify the filesystem itself or select a different
filesystem.
</para>
@@ -5709,7 +5734,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
First, find out what is hogging your root filesystem by running the
<filename>dirsize.py</filename> script from your root directory:
<literallayout class='monospaced'>
- $ cd &lt;root-directory-of-image&gt;
+ $ cd <replaceable>root-directory-of-image</replaceable>
$ dirsize.py 100000 > dirsize-100k.log
$ cat dirsize-100k.log
</literallayout>
@@ -5729,8 +5754,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
One way to see how packages relate to each other is by using
the Dependency Explorer UI with the BitBake command:
<literallayout class='monospaced'>
- $ cd &lt;image-directory&gt;
- $ bitbake -u depexp -g &lt;image&gt;
+ $ cd <replaceable>image-directory</replaceable>
+ $ bitbake -u depexp -g <replaceable>image</replaceable>
</literallayout>
Use the interface to select potential packages you wish to
eliminate and see their dependency relationships.
@@ -5797,7 +5822,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
Linux build directory to get an idea of what is making up
the kernel:
<literallayout class='monospaced'>
- $ cd &lt;top-level-linux-build-directory&gt;
+ $ cd <replaceable>top-level-linux-build-directory</replaceable>
$ ksize.py > ksize.log
$ cat ksize.log
</literallayout>
@@ -5948,6 +5973,192 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</section>
</section>
+ <section id='building-images-for-more-than-one-machine'>
+ <title>Building Images for More than One Machine</title>
+
+ <para>
+ A common scenario developers face is creating images for several
+ different machines that use the same software environment.
+ In this situation, it is tempting to set the
+ tunings and optimization flags for each build specifically for
+ the targeted hardware (i.e. "maxing out" the tunings).
+ Doing so can considerably add to build times and package feed
+ maintenance collectively for the machines.
+ For example, selecting tunes that are extremely specific to a
+ CPU core used in a system might enable some micro optimizations
+ in GCC for that particular system but would otherwise not gain
+ you much of a performance difference across the other systems
+ as compared to using a more general tuning across all the builds
+ (e.g. setting
+ <ulink url='var-DEFAULTTUNE'><filename>DEFAULTTUNE</filename></ulink>
+ specifically for each machine's build).
+ Rather than "max out" each build's tunings, you can take steps that
+ cause the OpenEmbedded build system to reuse software across the
+ various machines where it makes sense.
+ </para>
+ <para>
+ If build speed and package feed maintenance are considerations,
+ you should consider the points in this section that can help you
+ optimize your tunings to best consider build times and package
+ feed maintenance.
+ <itemizedlist>
+ <listitem><para><emphasis>Share the Build Directory:</emphasis>
+ If at all possible, share the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-TMPDIR'><filename>TMPDIR</filename></ulink>
+ across builds.
+ The Yocto Project supports switching between different
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'><filename>MACHINE</filename></ulink>
+ values in the same <filename>TMPDIR</filename>.
+ This practice is well supported and regularly used by
+ developers when building for multiple machines.
+ When you use the same <filename>TMPDIR</filename> for
+ multiple machine builds, the OpenEmbedded build system can
+ reuse the existing native and often cross-recipes for
+ multiple machines.
+ Thus, build time decreases.
+ <note>
+ If
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-DISTRO'><filename>DISTRO</filename></ulink>
+ settings change or fundamental configuration settings
+ such as the filesystem layout, you need to work with
+ a clean <filename>TMPDIR</filename>.
+ Sharing <filename>TMPDIR</filename> under these
+ circumstances might work but since it is not
+ guaranteed, you should use a clean
+ <filename>TMPDIR</filename>.
+ </note>
+ </para></listitem>
+ <listitem><para><emphasis>Enable the Appropriate Package Architecture:</emphasis>
+ By default, the OpenEmbedded build system enables three
+ levels of package architectures: "all", "tune" or "package",
+ and "machine".
+ Any given recipe usually selects one of these package
+ architectures (types) for its output.
+ Depending for what a given recipe creates packages, making
+ sure you enable the appropriate package architecture can
+ directly impact the build time.</para>
+ <para>A recipe that just generates scripts can enable
+ "all" architecture because there are no binaries to build.
+ To specifically enable "all" architecture, be sure your
+ recipe inherits the
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-allarch'><filename>allarch</filename></ulink>
+ class.
+ This class is useful for "all" architectures because it
+ configures many variables so packages can be used across
+ multiple architectures.</para>
+ <para>If your recipe needs to generate packages that are
+ machine-specific or when one of the build or runtime
+ dependencies is already machine-architecture dependent,
+ which makes your recipe also machine-architecture dependent,
+ make sure your recipe enables the "machine" package
+ architecture through the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE_ARCH'><filename>MACHINE_ARCH</filename></ulink>
+ variable:
+ <literallayout class='monospaced'>
+ PACKAGE_ARCH = "${MACHINE_ARCH}"
+ </literallayout>
+ When you do not specifically enable a package
+ architecture through the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_ARCH'><filename>PACKAGE_ARCH</filename></ulink>,
+ The OpenEmbedded build system defaults to the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-TUNE_PKGARCH'><filename>TUNE_PKGARCH</filename></ulink>
+ setting:
+ <literallayout class='monospaced'>
+ PACKAGE_ARCH = "${TUNE_PKGARCH}"
+ </literallayout>
+ </para></listitem>
+ <listitem><para><emphasis>Choose a Generic Tuning File if Possible:</emphasis>
+ Some tunes are more generic and can run on multiple targets
+ (e.g. an <filename>armv5</filename> set of packages could
+ run on <filename>armv6</filename> and
+ <filename>armv7</filename> processors in most cases).
+ Similarly, <filename>i486</filename> binaries could work
+ on <filename>i586</filename> and higher processors.
+ You should realize, however, that advances on newer
+ processor versions would not be used.</para>
+ <para>If you select the same tune for several different
+ machines, the OpenEmbedded build system reuses software
+ previously built, thus speeding up the overall build time.
+ Realize that even though a new sysroot for each machine is
+ generated, the software is not recompiled and only one
+ package feed exists.
+ </para></listitem>
+ <listitem><para><emphasis>Manage Granular Level Packaging:</emphasis>
+ Sometimes cases exist where injecting another level
+ of package architecture beyond the three higher levels
+ noted earlier can be useful.
+ For example, consider the <filename>emgd</filename>
+ graphics stack in the
+ <filename>meta-intel</filename> layer.
+ In this layer, a subset of software exists that is
+ compiled against something different from the rest of the
+ generic packages.
+ You can examine the key code in the
+ <ulink url='http://git.yoctoproject.org/cgit/cgit.cgi'>Source Repositories</ulink>
+ "daisy" branch in
+ <filename>classes/emgd-gl.bbclass</filename>.
+ For a specific set of packages, the code redefines
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_ARCH'><filename>PACKAGE_ARCH</filename></ulink>.
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_EXTRA_ARCHS'><filename>PACKAGE_EXTRA_ARCHS</filename></ulink>
+ is then appended with this extra tune name in
+ <filename>meta-intel-emgd.inc</filename>.
+ The result is that when searching for packages, the
+ build system uses a four-level search and the packages
+ in this new level are preferred as compared to the standard
+ tune.
+ The overall result is that the build system reuses most
+ software from the common tune except for specific cases
+ as needed.
+ </para></listitem>
+ <listitem><para><emphasis>Use Tools to Debug Issues:</emphasis>
+ Sometimes you can run into situations where software is
+ being rebuilt when you think it should not be.
+ For example, the OpenEmbedded build system might not be
+ using shared state between machines when you think it
+ should be.
+ These types of situations are usually due to references
+ to machine-specific variables such as
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE'><filename>MACHINE</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-SERIAL_CONSOLE'><filename>SERIAL_CONSOLE</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-XSERVER'><filename>XSERVER</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-MACHINE_FEATURES'><filename>MACHINE_FEATURES</filename></ulink>,
+ and so forth in code that is supposed to only be
+ tune-specific or when the recipe depends
+ (<ulink url='&YOCTO_DOCS_REF_URL;#var-DEPENDS'><filename>DEPENDS</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-RDEPENDS'><filename>RDEPENDS</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-RRECOMMENDS'><filename>RRECOMMENDS</filename></ulink>,
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-RSUGGESTS'><filename>RSUGGESTS</filename></ulink>,
+ and so forth) on some other recipe that already has
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_ARCH'><filename>PACKAGE_ARCH</filename></ulink>
+ defined as "${MACHINE_ARCH}".
+ <note>
+ Patches to fix any issues identified are most welcome
+ as these issues occasionally do occur.
+ </note></para>
+ <para>For such cases, you can use some tools to help you
+ sort out the situation:
+ <itemizedlist>
+ <listitem><para><emphasis><filename>sstate-diff-machines.sh</filename>:</emphasis>
+ You can find this tool in the
+ <filename>scripts</filename> directory of the
+ Source Repositories.
+ See the comments in the script for information on
+ how to use the tool.
+ </para></listitem>
+ <listitem><para><emphasis>BitBake's "-S printdiff" Option:</emphasis>
+ Using this option causes BitBake to try to
+ establish the closest signature match it can
+ (e.g. in the shared state cache) and then run
+ <filename>bitbake-diffsigs</filename> over the
+ matches to determine the stamps and delta where
+ these two stamp trees diverge.
+ </para></listitem>
+ </itemizedlist>
+ </para></listitem>
+ </itemizedlist>
+ </para>
+ </section>
+
<section id='working-with-packages'>
<title>Working with Packages</title>
@@ -6148,7 +6359,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
For this scenario, you need to start the PR Service using
the <filename>bitbake-prserv</filename> command:
<literallayout class='monospaced'>
- bitbake-prserv &dash;&dash;host &lt;ip&gt; &dash;&dash;port &lt;port&gt; &dash;&dash;start
+ bitbake-prserv &dash;&dash;host <replaceable>ip</replaceable> &dash;&dash;port <replaceable>port</replaceable> &dash;&dash;start
</literallayout>
In addition to hand-starting the service, you need to
update the <filename>local.conf</filename> file of each
@@ -6667,7 +6878,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
Realize that it is not sufficient to simply do the
following:
<literallayout class='monospaced'>
- $ bitbake &lt;some-package&gt; package-index
+ $ bitbake <replaceable>some-package</replaceable> package-index
</literallayout>
This is because BitBake does not properly schedule the
<filename>package-index</filename> target fully after any
@@ -6687,7 +6898,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<para>
When your build is complete, your packages reside in the
- <filename>${TMPDIR}/deploy/&lt;package-format&gt;</filename>
+ <filename>${TMPDIR}/deploy/<replaceable>package-format</replaceable></filename>
directory.
For example, if <filename>${TMPDIR}</filename>
is <filename>tmp</filename> and your selected package type
@@ -6785,7 +6996,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<para>
If you are using lighttpd, all you need
to do is to provide a link from your
- <filename>${TMPDIR}/deploy/&lt;package-format&gt;</filename>
+ <filename>${TMPDIR}/deploy/<replaceable>package-format</replaceable></filename>
directory to lighttpd's document-root.
You can determine the specifics of your lighttpd
installation by looking through its configuration file,
@@ -6936,7 +7147,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
a shell script (<filename>run-ptest</filename>) that starts
the test.
The shell script that starts the test must not contain
- the actual test, the script only starts it.
+ the actual test - the script only starts the test.
On the other hand, the test can be anything from a simple
shell script that runs a binary and checks the output to
an elaborate system of test binaries and data files.
@@ -6946,18 +7157,24 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
The test generates output in the format used by
Automake:
<literallayout class='monospaced'>
- &lt;result&gt;: &lt;testname&gt;
+ <replaceable>result</replaceable>: <replaceable>testname</replaceable>
</literallayout>
where the result can be <filename>PASS</filename>,
<filename>FAIL</filename>, or <filename>SKIP</filename>,
and the testname can be any identifying string.
</para>
- <note>
- A recipe is "ptest-enabled" if it inherits the
- <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-ptest'><filename>ptest</filename></ulink>
- class.
- </note>
+ <para>
+ For a list of Yocto Project recipes that are already
+ enabled with ptest, see the
+ <ulink url='https://wiki.yoctoproject.org/wiki/Ptest'>Ptest</ulink>
+ wiki page.
+ <note>
+ A recipe is "ptest-enabled" if it inherits the
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-ptest'><filename>ptest</filename></ulink>
+ class.
+ </note>
+ </para>
<section id='adding-ptest-to-your-build'>
<title>Adding ptest to Your Build</title>
@@ -6974,10 +7191,11 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
EXTRA_IMAGE_FEATURES += "ptest-pkgs"
</literallayout>
Once your build is complete, the ptest files are installed
- into the <filename>/usr/lib/&lt;package&gt;/ptest</filename>
+ into the
+ <filename>/usr/lib/<replaceable>package</replaceable>/ptest</filename>
directory within the image, where
- <filename>&lt;package&gt;</filename> is the name of the
- package.
+ <filename><replaceable>package</replaceable></filename>
+ is the name of the package.
</para>
</section>
@@ -7062,7 +7280,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
Consequently, packages that use the unaltered,
patched version of <filename>make check</filename>
automatically cross-compiles.</para>
- <para>However, you still must add a
+ <para>Regardless, you still must add a
<filename>do_compile_ptest</filename> function to
build the test suite.
Add a function similar to the following to your
@@ -7185,7 +7403,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
Use the following BitBake command form to fetch all the
necessary sources without starting the build:
<literallayout class='monospaced'>
- $ bitbake -c fetchall &lt;target&gt;
+ $ bitbake -c fetchall <replaceable>target</replaceable>
</literallayout>
This variation of the BitBake command guarantees that you
have all the sources for that BitBake target should you
@@ -7200,8 +7418,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<para>
By default, the OpenEmbedded build system uses the
- <link linkend='build-directory'>Build Directory</link> to
- build source code.
+ <link linkend='build-directory'>Build Directory</link> when
+ building source code.
The build process involves fetching the source files, unpacking
them, and then patching them if necessary before the build takes
place.
@@ -7235,8 +7453,24 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<filename>local.conf</filename> file:
<literallayout class='monospaced'>
INHERIT += "externalsrc"
- EXTERNALSRC_pn-myrecipe = "/some/path/to/your/source/tree"
+ EXTERNALSRC_pn-<replaceable>myrecipe</replaceable> = "<replaceable>path-to-your-source-tree</replaceable>"
+ </literallayout>
+ </para>
+
+ <para>
+ This next example shows how to accomplish the same thing by setting
+ <filename>EXTERNALSRC</filename> in the recipe itself or in the
+ recipe's append file:
+ <literallayout class='monospaced'>
+ EXTERNALSRC = "<replaceable>path</replaceable>"
+ EXTERNALSRC_BUILD = "<replaceable>path</replaceable>"
</literallayout>
+ <note>
+ In order for these settings to take effect, you must globally
+ or locally inherit the
+ <ulink url='&YOCTO_DOCS_REF_URL;#ref-classes-externalsrc'><filename>externalsrc</filename></ulink>
+ class.
+ </note>
</para>
<para>
@@ -7249,7 +7483,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<ulink url='&YOCTO_DOCS_REF_URL;#var-EXTERNALSRC_BUILD'><filename>EXTERNALSRC_BUILD</filename></ulink>
to point to that directory:
<literallayout class='monospaced'>
- EXTERNALSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree"
+ EXTERNALSRC_BUILD_pn-<replaceable>myrecipe</replaceable> = "<replaceable>path-to-your-source-tree</replaceable>"
</literallayout>
</para>
</section>
@@ -7310,7 +7544,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<title>Using systemd for the Main Image and Using SysVinit for the Rescue Image</title>
<para>
- Set the these variables in your distribution configuration
+ Set these variables in your distribution configuration
file as follows:
<literallayout class='monospaced'>
DISTRO_FEATURES_append = " systemd"
@@ -7354,7 +7588,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
Then, you can add the following to your
<filename>local.conf</filename>:
<literallayout class='monospaced'>
- SRCREV_pn-&lt;PN&gt; = "${AUTOREV}"
+ SRCREV_pn-<replaceable>PN</replaceable> = "${AUTOREV}"
</literallayout>
<ulink url='&YOCTO_DOCS_REF_URL;#var-PN'><filename>PN</filename></ulink>
is the name of the recipe for which you want to enable automatic source
@@ -7559,8 +7793,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<title>Enabling Tests</title>
<para>
- Depending on whether you are planning on running tests using
- QEMU or on running them on the hardware, you have to take
+ Depending on whether you are planning to run tests using
+ QEMU or on the hardware, you have to take
different steps to enable the tests.
See the following subsections for information on how to
enable both types of tests.
@@ -7580,7 +7814,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<listitem><para>Add
<filename>NOPASSWD</filename> for your user
in <filename>/etc/sudoers</filename> either for
- ALL commands or just for
+ all commands or just for
<filename>runqemu-ifup</filename>.
You must provide the full path as that can
change if you are using multiple clones of the
@@ -7622,7 +7856,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<para>
Once you start running the tests, the following happens:
- <itemizedlist>
+ <orderedlist>
<listitem><para>A copy of the root filesystem is written
to <filename>${WORKDIR}/testimage</filename>.
</para></listitem>
@@ -7652,7 +7886,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<filename>unittest</filename> in the task log at
<filename>${WORKDIR}/temp/log.do_testimage</filename>.
</para></listitem>
- </itemizedlist>
+ </orderedlist>
</para>
</section>
@@ -7700,8 +7934,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<filename>TEST_TARGET</filename> to an appropriate value.
For QEMU, you do not have to change anything, the default
value is "QemuTarget".
- For running tests on hardware, two options exist:
- "SimpleRemoteTarget" and "GummibootTarget".
+ For running tests on hardware, the following options exist:
<itemizedlist>
<listitem><para><emphasis>"SimpleRemoteTarget":</emphasis>
Choose "SimpleRemoteTarget" if you are going to
@@ -7728,6 +7961,45 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
"<link linkend='selecting-gummiboottarget'>Selecting GummibootTarget</link>"
section, which follows, for more information.
</para></listitem>
+ <listitem><para><emphasis>"BeagleBoneTarget":</emphasis>
+ Choose "BeagleBoneTarget" if you are deploying
+ images and running tests on the BeagleBone
+ "Black" or original "White" hardware.
+ For information on how to use these tests, see the
+ comments at the top of the BeagleBoneTarget
+ <filename>meta-yocto-bsp/lib/oeqa/controllers/beaglebonetarget.py</filename>
+ file.
+ </para></listitem>
+ <listitem><para><emphasis>"EdgeRouterTarget":</emphasis>
+ Choose "EdgeRouterTarget" is you are deploying
+ images and running tests on the Ubiquiti Networks
+ EdgeRouter Lite.
+ For information on how to use these tests, see the
+ comments at the top of the EdgeRouterTarget
+ <filename>meta-yocto-bsp/lib/oeqa/controllers/edgeroutertarget.py</filename>
+ file.
+ </para></listitem>
+ <listitem><para><emphasis>"GrubTarget":</emphasis>
+ Choose the "supports deploying images and running
+ tests on any generic PC that boots using GRUB.
+ For information on how to use these tests, see the
+ comments at the top of the GrubTarget
+ <filename>meta-yocto-bsp/lib/oeqa/controllers/grubtarget.py</filename>
+ file.
+ </para></listitem>
+ <listitem><para><emphasis>"<replaceable>your-target</replaceable>":</emphasis>
+ Create your own custom target if you want to run
+ tests when you are deploying images and running
+ tests on a custom machine within your BSP layer.
+ To do this, you need to add a Python unit that
+ defines the target class under
+ <filename>lib/oeqa/controllers/</filename> within
+ your layer.
+ You must also provide an empty
+ <filename>__init__.py</filename>.
+ For examples, see files in
+ <filename>meta-yocto-bsp/lib/oeqa/controllers/</filename>.
+ </para></listitem>
</itemizedlist>
</para>
</section>
@@ -7779,7 +8051,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<filename>kmod</filename>).
</para></listitem>
<listitem><para>Uses a custom
- initramfs image with a custom installer.
+ Initial RAM Disk (initramfs) image with a
+ custom installer.
A normal image that you can install usually
creates a single rootfs partition.
This image uses another installer that
@@ -7837,10 +8110,14 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para></listitem>
</orderedlist>
</para>
+ </section>
+
+ <section id='power-control'>
+ <title>Power Control</title>
<para>
- Here is some additional information regarding running
- "GummibootTarget" as your test target:
+ For most hardware targets other than SimpleRemoteTarget,
+ you can control power:
<itemizedlist>
<listitem><para>
You can use
@@ -7859,7 +8136,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
In this example, the expect script does the
following:
<literallayout class='monospaced'>
- ssh test@10.11.12.1 "pyctl nuc1 &lt;arg&gt;"
+ ssh test@10.11.12.1 "pyctl nuc1 <replaceable>arg</replaceable>"
</literallayout>
It then runs a Python script that controls power
for a label called <filename>nuc1</filename>.
@@ -7885,6 +8162,63 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
some manual interaction is okay from time to time.
</para></listitem>
</itemizedlist>
+ If you have no hardware to automatically perform power
+ control but still wish to experiment with automated
+ hardware testing, you can use the dialog-power-control
+ script that shows a dialog prompting you to perform the
+ required power action.
+ This script requires either KDialog or Zenity to be
+ installed.
+ To use this script, set the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-TEST_POWERCONTROL_CMD'><filename>TEST_POWERCONTROL_CMD</filename></ulink>
+ variable as follows:
+ <literallayout class='monospaced'>
+ TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
+ </literallayout>
+ </para>
+ </section>
+
+ <section id='serial-console-connection'>
+ <title>Serial Console Connection</title>
+
+ <para>
+ For test target classes requiring a serial console
+ to interact with the bootloader (e.g. BeagleBoneTarget,
+ EdgeRouterTarget, and GrubTarget), you need to
+ specify a command to use to connect to the serial console
+ of the target machine by using the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-TEST_POWERCONTROL_CMD'><filename>TEST_POWERCONTROL_CMD</filename></ulink>
+ variable and optionally the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-TEST_SERIALCONTROL_EXTRA_ARGS'><filename>TEST_SERIALCONTROL_EXTRA_ARGS</filename></ulink>
+ variable.
+ </para>
+
+ <para>
+ These cases could be a serial terminal program if the
+ machine is connected to a local serial port, or a
+ <filename>telnet</filename> or
+ <filename>ssh</filename> command connecting to a remote
+ console server.
+ Regardless of the case, the command simply needs to
+ connect to the serial console and forward that connection
+ to standard input and output as any normal terminal
+ program does.
+ For example, to use the picocom terminal program on
+ serial device <filename>/dev/ttyUSB0</filename>
+ at 115200bps, you would set the variable as follows:
+ <literallayout class='monospaced'>
+ TEST_SERIALCONTROL_CMD = "picocom /dev/ttyUSB0 -b 115200"
+ </literallayout>
+ For local devices where the serial port device disappears
+ when the device reboots, an additional "serdevtry" wrapper
+ script is provided.
+ To use this wrapper, simply prefix the terminal command
+ with
+ <filename>${COREBASE}/scripts/contrib/serdevtry</filename>:
+ <literallayout class='monospaced'>
+ TEST_SERIALCONTROL_CMD = "${COREBASE}/scripts/contrib/serdevtry picocom -b
+115200 /dev/ttyUSB0"
+ </literallayout>
</para>
</section>
</section>
@@ -7922,7 +8256,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</literallayout>
Next, use BitBake to run the tests:
<literallayout class='monospaced'>
- bitbake -c testimage &lt;image&gt;
+ bitbake -c testimage <replaceable>image</replaceable>
</literallayout></para></listitem>
</itemizedlist>
</para>
@@ -7944,7 +8278,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<ulink url='&YOCTO_DOCS_REF_URL;#var-BBPATH'><filename>BBPATH</filename></ulink>
in the <filename>local.conf</filename> file as normal.
Be sure that tests reside in
- <filename>&lt;layer&gt;/lib/oeqa/runtime</filename>.
+ <filename><replaceable>layer</replaceable>/lib/oeqa/runtime</filename>.
<note>
Be sure that module names do not collide with module names
used in the default set of test modules in
@@ -7990,7 +8324,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<listitem><para>The default tests for the image are defined
as:
<literallayout class='monospaced'>
- DEFAULT_TEST_SUITES_pn-&lt;image&gt; = "ping ssh df connman syslog xorg scp vnc date rpm smart dmesg"
+ DEFAULT_TEST_SUITES_pn-<replaceable>image</replaceable> = "ping ssh df connman syslog xorg scp vnc date rpm smart dmesg"
</literallayout></para></listitem>
<listitem><para>Add your own test to the list of the
by using the following:
@@ -8021,7 +8355,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<para>
- If you image is already built, make sure the following are set
+ If your image is already built, make sure the following are set
in your <filename>local.conf</filename> file.
Be sure to provide the IP address you need:
<literallayout class='monospaced'>
@@ -8042,22 +8376,6 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<para>
- The exported data (i.e. <filename>testdata.json</filename>)
- contains paths to the Build Directory.
- Thus, the contents of the directory can be moved
- to another machine as long as you update some paths in the
- JSON.
- Usually you only care about the
- ${DEPLOY_DIR}/rpm directory (assuming the RPM and Smart tests
- are enabled).
- Consequently, running the tests on other machine
- means that you have to move the contents and call
- <filename>runexported</filename> with "&dash;&dash;deploy-dir PATH:
- ./runexported.py &dash;&dash;deploy-dir /new/path/on/this/machine testdata.json
- runexported.py accepts other arguments as well, see &dash;&dash;help.
- </para>
-
- <para>
You can now run the tests outside of the build environment:
<literallayout class='monospaced'>
$ cd tmp/testimage/core-image-sato
@@ -8071,6 +8389,27 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<filename>runexported.py</filename>
</note>
</para>
+
+ <para>
+ The exported data (i.e. <filename>testdata.json</filename>)
+ contains paths to the Build Directory.
+ Thus, the contents of the directory can be moved
+ to another machine as long as you update some paths in the
+ JSON.
+ Usually, you only care about the
+ <filename>${DEPLOY_DIR}/rpm</filename> directory
+ (assuming the RPM and Smart tests are enabled).
+ Consequently, running the tests on other machine
+ means that you have to move the contents and call
+ <filename>runexported.py</filename> with
+ "&dash;&dash;deploy-dir <replaceable>path</replaceable>" as
+ follows:
+ <literallayout class='monospaced'>
+ ./runexported.py &dash;&dash;deploy-dir /new/path/on/this/machine testdata.json
+ </literallayout>
+ <filename>runexported.py</filename> accepts other arguments
+ as well as described using <filename>&dash;&dash;help</filename>.
+ </para>
</section>
<section id="qemu-image-writing-new-tests">
@@ -8081,8 +8420,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
proper place for the build system to find them.
New tests for additional functionality outside of the core
should be added to the layer that adds the functionality, in
- <filename>&lt;layer&gt;/lib/oeqa/runtime</filename> (as
- long as
+ <filename><replaceable>layer</replaceable>/lib/oeqa/runtime</filename>
+ (as long as
<ulink url='&YOCTO_DOCS_REF_URL;#var-BBPATH'><filename>BBPATH</filename></ulink>
is extended in the layer's
<filename>layer.conf</filename> file as normal).
@@ -8192,11 +8531,11 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
The command returns a tuple:
(status, output), which are what
their names imply - the return code
- of 'cmd' and whatever output
+ of "cmd" and whatever output
it produces.
The optional timeout argument
represents the number of seconds the
- test should wait for 'cmd' to
+ test should wait for "cmd" to
return.
If the argument is "None", the
test uses the default instance's
@@ -8250,7 +8589,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<tip>
- For best results, install DBG (<filename>-dbg</filename>) packages
+ For best results, install debug (<filename>-dbg</filename>) packages
for the applications you are going to debug.
Doing so makes extra debug symbols available that give you more
meaningful output.
@@ -8280,12 +8619,21 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
as all the heavy debugging is done by the host GDB.
Offloading these processes gives the Gdbserver running on the target a chance to remain
small and fast.
+ <note>
+ By default, source files are part of the
+ <filename>*-dbg</filename> packages in order to enable GDB
+ to show source lines in its output.
+ You can save further space on the target by setting the
+ <ulink url='&YOCTO_DOCS_REF_URL;#var-PACKAGE_DEBUG_SPLIT_STYLE'><filename>PACKAGE_DEBUG_SPLIT_STYLE</filename></ulink>
+ variable to "debug-without-src" so that these packages do not
+ include the source files.
+ </note>
</para>
<para>
Because the host GDB is responsible for loading the debugging information and
- for doing the necessary processing to make actual debugging happen, the
- user has to make sure the host can access the unstripped binaries complete
+ for doing the necessary processing to make actual debugging happen,
+ you have to make sure the host can access the unstripped binaries complete
with their debugging information and also be sure the target is compiled with no optimizations.
The host GDB must also have local access to all the libraries used by the
debugged program.
@@ -8333,7 +8681,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<para>
- Here is an example that when entered from the host
+ Here is an example, that when entered from the host,
connects to the target and launches Gdbserver in order to
"debug" a binary named <filename>helloworld</filename>:
<literallayout class='monospaced'>
@@ -8384,7 +8732,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</literallayout>
Once the binary is built, you can find it here:
<literallayout class='monospaced'>
- tmp/sysroots/&lt;host-arch&gt;/usr/bin/&lt;target-platform&gt;/&lt;target-abi&gt;-gdb
+ tmp/sysroots/<replaceable>host-arch</replaceable>/usr/bin/<replaceable>target-platform</replaceable>/<replaceable>target-abi</replaceable>-gdb
</literallayout>
</para>
</section>
@@ -8700,7 +9048,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</literallayout>
The final thing you need to do to implement the fix in the
build is to update the "neard" recipe (i.e.
- <filename>neard-0.14.bb</filename> so that the
+ <filename>neard-0.14.bb</filename>) so that the
<ulink url='&YOCTO_DOCS_REF_URL;#var-SRC_URI'><filename>SRC_URI</filename></ulink>
statement includes the patch file.
The recipe file is in the folder above the patch.
@@ -8913,19 +9261,20 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
</para>
<note><title>Notes</title>
- <para>
- For information on how to delete information from the Toaster
- database, see the
- <ulink url='https://wiki.yoctoproject.org/wiki/Toaster#Deleting_a_Build_from_the_Toaster_Database'>Deleting a Build from the Toaster Database</ulink>
- wiki page.
- </para>
-
- <para>
- For information on how to set up an instance of Toaster on
- a remote host, see the
- <ulink url='https://wiki.yoctoproject.org/wiki/Toaster#Setting_up_a_Toaster_Instance_on_a_Remote_Host'>Setting Up a Toaster Instance on a Remote Host</ulink>
- wiki page.
- </para>
+ <itemizedlist>
+ <listitem><para>
+ For information on how to delete information from the
+ Toaster database, see the
+ <ulink url='https://wiki.yoctoproject.org/wiki/Toaster#Deleting_a_Build_from_the_Toaster_Database'>Deleting a Build from the Toaster Database</ulink>
+ wiki page.
+ </para></listitem>
+ <listitem><para>
+ For information on how to set up an instance of Toaster
+ on a remote host, see the
+ <ulink url='https://wiki.yoctoproject.org/wiki/Toaster#Setting_up_a_Toaster_Instance_on_a_Remote_Host'>Setting Up a Toaster Instance on a Remote Host</ulink>
+ wiki page.
+ </para></listitem>
+ </itemizedlist>
</note>
</section>
@@ -8995,7 +9344,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
takes place.
You can gain access to the symbols by using "dbg-pkgs" in the
<filename>IMAGE_FEATURES</filename> variable or by
- installing the appropriate DBG (<filename>-dbg</filename>) packages.
+ installing the appropriate debug (<filename>-dbg</filename>)
+ packages.
</para>
<para>
@@ -9169,8 +9519,8 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
If you wish to perform kernel profiling, you need to be sure
a <filename>vmlinux</filename> file that matches the running kernel is available.
In the source directory, that file is usually located in
- <filename>/boot/vmlinux-KERNELVERSION</filename>, where
- <filename>KERNEL-version</filename> is the version of the kernel.
+ <filename>/boot/vmlinux-<replaceable>kernelversion</replaceable></filename>, where
+ <filename><replaceable>kernelversion</replaceable></filename> is the version of the kernel.
The OpenEmbedded build system generates separate <filename>vmlinux</filename>
packages for each kernel it builds.
Thus, it should just be a question of making sure a matching package is
@@ -9272,7 +9622,7 @@ Gateways via their Web Interfaces</ulink>"</emphasis>
<note>
The Yocto Project generates a license manifest during
image creation that is located
- in <filename>${DEPLOY_DIR}/licenses/&lt;image_name-datestamp&gt;</filename>
+ in <filename>${DEPLOY_DIR}/licenses/<replaceable>image_name-datestamp</replaceable></filename>
to assist with any audits.
</note>
</para>
diff --git a/documentation/dev-manual/dev-manual-model.xml b/documentation/dev-manual/dev-manual-model.xml
index 45015e4e1c..43c31b8405 100644
--- a/documentation/dev-manual/dev-manual-model.xml
+++ b/documentation/dev-manual/dev-manual-model.xml
@@ -279,20 +279,35 @@
Within this group, you will find several kernels supported by
the Yocto Project:
<itemizedlist>
- <listitem><para><emphasis><filename>linux-yocto-3.4</filename></emphasis> - The
- stable Yocto Project kernel to use with the Yocto Project Release 1.3. This kernel
- is based on the Linux 3.4 released kernel.</para></listitem>
- <listitem><para><emphasis><filename>linux-yocto-3.8</filename></emphasis> - The
- stable Yocto Project kernel to use with the Yocto Project Release 1.4. This kernel
- is based on the Linux 3.8 released kernel.</para></listitem>
- <listitem><para><emphasis><filename>linux-yocto-3.10</filename></emphasis> - The
- stable Yocto Project kernel to use with the Yocto Project Release 1.5. This kernel
- is based on the Linux 3.10 released kernel.</para></listitem>
- <listitem><para><emphasis><filename>linux-yocto-3.14</filename></emphasis> - The
- stable Yocto Project kernel to use with the Yocto Project Release 1.6. This kernel
- is based on the Linux 3.14 released kernel.</para></listitem>
- <listitem><para><emphasis><filename>linux-yocto-dev</filename></emphasis> - A development
- kernel based on the latest upstream release candidate available.</para></listitem>
+ <listitem><para><emphasis>
+ <filename>linux-yocto-3.8</filename></emphasis> - The
+ stable Yocto Project kernel to use with the Yocto
+ Project Release 1.4. This kernel is based on the
+ Linux 3.8 released kernel.
+ </para></listitem>
+ <listitem><para><emphasis>
+ <filename>linux-yocto-3.10</filename></emphasis> - The
+ stable Yocto Project kernel to use with the Yocto
+ Project Release 1.5.
+ This kernel is based on the Linux 3.10 released kernel.
+ </para></listitem>
+ <listitem><para><emphasis>
+ <filename>linux-yocto-3.14</filename></emphasis> - The
+ stable Yocto Project kernel to use with the Yocto
+ Project Releases 1.6 and 1.7.
+ This kernel is based on the Linux 3.14 released kernel.
+ </para></listitem>
+ <listitem><para><emphasis>
+ <filename>linux-yocto-3.17</filename></emphasis> - An
+ additional Yocto Project kernel used with the Yocto
+ Project Release 1.7.
+ This kernel is based on the Linux 3.17 released kernel.
+ </para></listitem>
+ <listitem><para><emphasis>
+ <filename>linux-yocto-dev</filename></emphasis> - A
+ development kernel based on the latest upstream release
+ candidate available.
+ </para></listitem>
</itemizedlist>
</para>
diff --git a/documentation/dev-manual/dev-manual-qemu.xml b/documentation/dev-manual/dev-manual-qemu.xml
index d206c111ed..739fd7104b 100644
--- a/documentation/dev-manual/dev-manual-qemu.xml
+++ b/documentation/dev-manual/dev-manual-qemu.xml
@@ -349,19 +349,19 @@
<listitem><para><emphasis><filename>start</filename>:</emphasis>
Starts the NFS share:
<literallayout class='monospaced'>
- runqemu-export-rootfs start &lt;<replaceable>file-system-location</replaceable>&gt;
+ runqemu-export-rootfs start <replaceable>file-system-location</replaceable>
</literallayout>
</para></listitem>
<listitem><para><emphasis><filename>stop</filename>:</emphasis>
Stops the NFS share:
<literallayout class='monospaced'>
- runqemu-export-rootfs stop &lt;<replaceable>file-system-location</replaceable>&gt;
+ runqemu-export-rootfs stop <replaceable>file-system-location</replaceable>
</literallayout>
</para></listitem>
<listitem><para><emphasis><filename>restart</filename>:</emphasis>
Restarts the NFS share:
<literallayout class='monospaced'>
- runqemu-export-rootfs restart &lt;<replaceable>file-system-location</replaceable>&gt;
+ runqemu-export-rootfs restart <replaceable>file-system-location</replaceable>
</literallayout>
</para></listitem>
</itemizedlist>
@@ -379,7 +379,7 @@
<listitem><para><emphasis>Switching Between Consoles:</emphasis>
When booting or running QEMU, you can switch between
supported consoles by using
- Ctrl+Alt+&lt;<replaceable>number</replaceable>&gt;.
+ Ctrl+Alt+<replaceable>number</replaceable>.
For example, Ctrl+Alt+3 switches you to the serial console as
long as that console is enabled.
Being able to switch consoles is helpful, for example, if the
diff --git a/documentation/dev-manual/dev-manual-start.xml b/documentation/dev-manual/dev-manual-start.xml
index b79f65e878..61434ff72c 100644
--- a/documentation/dev-manual/dev-manual-start.xml
+++ b/documentation/dev-manual/dev-manual-start.xml
@@ -190,13 +190,13 @@
Resolving deltas: 100% (260/260), done.
</literallayout></para></listitem>
<listitem><para id='supported-board-support-packages-(bsps)'><emphasis>Supported Board Support Packages (BSPs):</emphasis>
- The Yocto Project provides a layer called
- <filename>meta-intel</filename> and it is maintained in its own
- separate Git repository.
- The <filename>meta-intel</filename> layer contains many
- supported
- <ulink url='&YOCTO_DOCS_BSP_URL;#bsp-layers'>BSP Layers</ulink>.
- </para>
+ The Yocto Project supports many BSPs, which are maintained in
+ their own layers or in layers designed to contain several
+ BSPs.
+ To get an idea of machine support through BSP layers, you can
+ look at the
+ <ulink url='&YOCTO_RELEASE_DL_URL;/machines'>index of machines</ulink>
+ for the release.</para>
<para>The Yocto Project uses the following BSP layer naming
scheme:
@@ -214,18 +214,18 @@
See the
"<ulink url='&YOCTO_DOCS_BSP_URL;#bsp-layers'>BSP Layers</ulink>"
section in the Yocto Project Board Support Package (BSP)
- Developer's Guide for more information on BSP Layers.
- </para>
+ Developer's Guide for more information on BSP Layers.</para>
- <para>
+ <para>A useful Git repository released with the Yocto
+ Project is <filename>meta-intel</filename>, which is a
+ parent layer that contains many supported
+ <ulink url='&YOCTO_DOCS_BSP_URL;#bsp-layers'>BSP Layers</ulink>.
You can locate the <filename>meta-intel</filename> Git
repository in the "Yocto Metadata Layers" area of the Yocto
Project Source Repositories at
- <ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink>.
- </para>
+ <ulink url='&YOCTO_GIT_URL;/cgit.cgi'></ulink>.</para>
- <para>
- Using
+ <para>Using
<link linkend='git'>Git</link> to create a local clone of the
upstream repository can be helpful if you are working with
BSPs.
@@ -252,11 +252,9 @@
remote: Total 8844 (delta 4931), reused 8780 (delta 4867)
Receiving objects: 100% (8844/8844), 2.48 MiB | 264 KiB/s, done.
Resolving deltas: 100% (4931/4931), done.
- </literallayout>
- </para>
+ </literallayout></para>
- <para>
- The same
+ <para>The same
<ulink url='&YOCTO_WIKI_URL;/wiki/Transcript:_from_git_checkout_to_meta-intel_BSP'>wiki page</ulink>
referenced earlier covers how to set up the
<filename>meta-intel</filename> Git repository.
diff --git a/documentation/dev-manual/dev-manual.xml b/documentation/dev-manual/dev-manual.xml
index d7e3177af8..4a7840f5df 100644
--- a/documentation/dev-manual/dev-manual.xml
+++ b/documentation/dev-manual/dev-manual.xml
@@ -68,9 +68,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/kernel-dev/kernel-dev-advanced.xml b/documentation/kernel-dev/kernel-dev-advanced.xml
index 4a6aeb7391..283f483112 100644
--- a/documentation/kernel-dev/kernel-dev-advanced.xml
+++ b/documentation/kernel-dev/kernel-dev-advanced.xml
@@ -214,7 +214,7 @@
Here is an example that shows a trivial tree of kernel Metadata
stored in recipe-space within a BSP layer:
<literallayout class='monospaced'>
- meta-my_bsp_layer/
+ meta-<replaceable>my_bsp_layer</replaceable>/
`-- recipes-kernel
`-- linux
`-- linux-yocto
@@ -370,7 +370,7 @@
of Metadata.
The following Metadata file hierarchy is recommended:
<literallayout class='monospaced'>
- &lt;base&gt;/
+ <replaceable>base</replaceable>/
bsp/
cfg/
features/
@@ -513,7 +513,7 @@
patch mypatch.patch
patches/mypatch.patch:
- &lt;typical-patch&gt;
+ <replaceable>typical-patch</replaceable>
</literallayout>
You can create the typical <filename>.patch</filename>
file using <filename>diff -Nurp</filename> or
@@ -968,37 +968,38 @@
hierarchical branching system similar to what the linux-yocto Linux
kernel repositories use:
<literallayout class='monospaced'>
- &lt;common&gt;/&lt;kernel_type&gt;/&lt;machine&gt;
+ <replaceable>common</replaceable>/<replaceable>kernel_type</replaceable>/<replaceable>machine</replaceable>
</literallayout>
</para>
<para>
If you had two kernel types, "standard" and "small" for
- instance, and three machines, the branches in your
+ instance, three machines, and <replaceable>common</replaceable>
+ as <filename>mydir</filename>, the branches in your
Git repository might look like this:
<literallayout class='monospaced'>
- common/base
- common/standard/base
- common/standard/machine_a
- common/standard/machine_b
- common/standard/machine_c
- common/small/base
- common/small/machine_a
+ mydir/base
+ mydir/standard/base
+ mydir/standard/machine_a
+ mydir/standard/machine_b
+ mydir/standard/machine_c
+ mydir/small/base
+ mydir/small/machine_a
</literallayout>
</para>
<para>
This organization can help clarify the branch relationships.
- In this case, <filename>common/standard/machine_a</filename>
- includes everything in <filename>common/base</filename> and
- <filename>common/standard/base</filename>.
+ In this case, <filename>mydir/standard/machine_a</filename>
+ includes everything in <filename>mydir/base</filename> and
+ <filename>mydir/standard/base</filename>.
The "standard" and "small" branches add sources specific to those
kernel types that for whatever reason are not appropriate for the
other branches.
<note>The "base" branches are an artifact of the way Git manages
its data internally on the filesystem: Git will not allow you
- to use <filename>common/standard</filename> and
- <filename>common/standard/machine_a</filename> because it
+ to use <filename>mydir/standard</filename> and
+ <filename>mydir/standard/machine_a</filename> because it
would have to create a file and a directory named "standard".
</note>
</para>
diff --git a/documentation/kernel-dev/kernel-dev-common.xml b/documentation/kernel-dev/kernel-dev-common.xml
index 35e7d8b080..58cc98ddff 100644
--- a/documentation/kernel-dev/kernel-dev-common.xml
+++ b/documentation/kernel-dev/kernel-dev-common.xml
@@ -88,7 +88,7 @@
recipe, the append file will typically be located as follows
within your custom layer:
<literallayout class='monospaced'>
- &lt;your-layer&gt;/recipes-kernel/linux/linux-yocto_3.4.bbappend
+ <replaceable>your-layer</replaceable>/recipes-kernel/linux/linux-yocto_3.4.bbappend
</literallayout>
The append file should initially extend the
<ulink url='&YOCTO_DOCS_REF_URL;#var-FILESPATH'><filename>FILESPATH</filename></ulink>
@@ -107,7 +107,7 @@
described above, you must place the files in your layer in the
following area:
<literallayout class='monospaced'>
- &lt;your-layer&gt;/recipes-kernel/linux/linux-yocto/
+ <replaceable>your-layer</replaceable>/recipes-kernel/linux/linux-yocto/
</literallayout>
<note>If you are working on a new machine Board Support Package
(BSP), be sure to refer to the
diff --git a/documentation/kernel-dev/kernel-dev-maint-appx.xml b/documentation/kernel-dev/kernel-dev-maint-appx.xml
index a7c144ff75..a72dcff01b 100644
--- a/documentation/kernel-dev/kernel-dev-maint-appx.xml
+++ b/documentation/kernel-dev/kernel-dev-maint-appx.xml
@@ -88,7 +88,7 @@
feature description in an <filename>.scc</filename> file
whose name follows this format:
<literallayout class='monospaced'>
- &lt;bsp_name&gt;-&lt;kernel_type&gt;.scc
+ <replaceable>bsp_name</replaceable>-<replaceable>kernel_type</replaceable>.scc
</literallayout>
</para></listitem>
<listitem><para>Once located, the feature description is either compiled into a simple script
@@ -157,7 +157,7 @@
<listitem><para>A BSP build branch exists.
This branch has the following form:
<literallayout class='monospaced'>
- &lt;kernel_type&gt;/&lt;bsp_name&gt;
+ <replaceable>kernel_type</replaceable>/<replaceable>bsp_name</replaceable>
</literallayout></para></listitem>
</itemizedlist>
@@ -194,7 +194,7 @@
<filename>${MACHINE}</filename> is the metadata name of the machine (BSP) and "kernel_type" is one
of the Yocto Project supported kernel types (e.g. "standard"):
<literallayout class='monospaced'>
- linux-${MACHINE}-&lt;kernel_type&gt;-build
+ linux-${MACHINE}-<replaceable>kernel_type</replaceable>-build
</literallayout>
</para>
diff --git a/documentation/kernel-dev/kernel-dev.xml b/documentation/kernel-dev/kernel-dev.xml
index dddc003ba0..c1b9d53e92 100644
--- a/documentation/kernel-dev/kernel-dev.xml
+++ b/documentation/kernel-dev/kernel-dev.xml
@@ -53,9 +53,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/mega-manual/figures/buildhistory.png b/documentation/mega-manual/figures/buildhistory.png
index edf98d95cf..9a77bde68b 100644
--- a/documentation/mega-manual/figures/buildhistory.png
+++ b/documentation/mega-manual/figures/buildhistory.png
Binary files differ
diff --git a/documentation/poky.ent b/documentation/poky.ent
index 37763481cd..d066a7e2e0 100644
--- a/documentation/poky.ent
+++ b/documentation/poky.ent
@@ -1,9 +1,9 @@
-<!ENTITY DISTRO "1.7">
-<!ENTITY DISTRO_COMPRESSED "17">
+<!ENTITY DISTRO "1.8">
+<!ENTITY DISTRO_COMPRESSED "18">
<!ENTITY DISTRO_NAME "tbd">
-<!ENTITY YOCTO_DOC_VERSION "1.7">
-<!ENTITY POKYVERSION "12.0.0">
-<!ENTITY POKYVERSION_COMPRESSED "1100">
+<!ENTITY YOCTO_DOC_VERSION "1.8">
+<!ENTITY POKYVERSION "13.0.0">
+<!ENTITY POKYVERSION_COMPRESSED "1300">
<!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
<!ENTITY COPYRIGHT_YEAR "2010-2014">
<!ENTITY YOCTO_DL_URL "http://downloads.yoctoproject.org">
diff --git a/documentation/profile-manual/profile-manual-usage.xml b/documentation/profile-manual/profile-manual-usage.xml
index 5577b1b001..95ad73909c 100644
--- a/documentation/profile-manual/profile-manual-usage.xml
+++ b/documentation/profile-manual/profile-manual-usage.xml
@@ -842,7 +842,7 @@
idea. One of the first projects to do this was IBM's DProbes
dpcc compiler, an ANSI C compiler which targeted a low-level
assembly language running on an in-kernel interpreter on the
- target system. This is exactly analagous to what Sun's DTrace
+ target system. This is exactly analogous to what Sun's DTrace
did, except that DTrace invented its own language for the purpose.
Systemtap, heavily inspired by DTrace, also created its own
one-off language, but rather than running the product on an
@@ -1275,7 +1275,7 @@
</para>
<informalexample>
- <emphasis>Tying it Together:</emphasis> The trace events subsystem accomodate static
+ <emphasis>Tying it Together:</emphasis> The trace events subsystem accommodate static
and dynamic tracepoints in exactly the same way - there's no
difference as far as the infrastructure is concerned. See the
ftrace section for more details on the trace event subsystem.
@@ -2169,7 +2169,7 @@
### Shell environment set up for builds. ###
- You can now run 'bitbake &lt;target&gt;'
+ You can now run 'bitbake <replaceable>target</replaceable>'
Common targets are:
core-image-minimal
@@ -3257,15 +3257,25 @@
<title>Documentation</title>
<para>
- There doesn't seem to be any current documentation covering
- LTTng 2.0, but maybe that's because the project is in transition.
- The LTTng 2.0 website, however, is here:
+ You can find the primary LTTng Documentation on the
+ <ulink url='https://lttng.org/docs/'>LTTng Documentation</ulink>
+ site.
+ The documentation on this site is appropriate for intermediate to
+ advanced software developers who are working in a Linux environment
+ and are interested in efficient software tracing.
+ </para>
+
+ <para>
+ For information on LTTng in general, visit the
<ulink url='http://lttng.org/lttng2.0'>LTTng Project</ulink>
+ site.
+ You can find a "Getting Started" link on this site that takes
+ you to an LTTng Quick Start.
</para>
<para>
- You can access extensive help information on how to use the
- LTTng plug-in to search and analyze captured traces via the
+ Finally, you can access extensive help information on how to use
+ the LTTng plug-in to search and analyze captured traces via the
Eclipse help system:
<literallayout class='monospaced'>
Help | Help Contents | LTTng Plug-in User Guide
diff --git a/documentation/profile-manual/profile-manual.xml b/documentation/profile-manual/profile-manual.xml
index e5b253b552..9842c46cab 100644
--- a/documentation/profile-manual/profile-manual.xml
+++ b/documentation/profile-manual/profile-manual.xml
@@ -53,9 +53,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/ref-manual/closer-look.xml b/documentation/ref-manual/closer-look.xml
index f0ed967228..c0c0d619a4 100644
--- a/documentation/ref-manual/closer-look.xml
+++ b/documentation/ref-manual/closer-look.xml
@@ -255,7 +255,7 @@
<para>
When you launch your build with the
- <filename>bitbake &lt;target&gt;</filename> command, BitBake
+ <filename>bitbake <replaceable>target</replaceable></filename> command, BitBake
sorts out the configurations to ultimately define your build
environment.
</para>
@@ -351,7 +351,7 @@
Best practices dictate that you isolate these types of
configurations into their own layer.
Settings you provide in
- <filename>conf/distro/&lt;distro&gt;.conf</filename> override
+ <filename>conf/distro/<replaceable>distro</replaceable>.conf</filename> override
similar
settings that BitBake finds in your
<filename>conf/local.conf</filename> file in the Build
@@ -375,7 +375,7 @@
This area holds configuration files for the
layer (<filename>conf/layer.conf</filename>),
the distribution
- (<filename>conf/distro/&lt;distro&gt;.conf</filename>),
+ (<filename>conf/distro/<replaceable>distro</replaceable>.conf</filename>),
and any distribution-wide include files.
</para></listitem>
<listitem><para><emphasis>recipes-*:</emphasis>
@@ -408,7 +408,7 @@
<para>
The BSP Layer's configuration directory contains
configuration files for the machine
- (<filename>conf/machine/&lt;machine&gt;.conf</filename>) and,
+ (<filename>conf/machine/<replaceable>machine</replaceable>.conf</filename>) and,
of course, the layer (<filename>conf/layer.conf</filename>).
</para>
@@ -1145,7 +1145,7 @@
<para>
Images are written out to the
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>
- inside the <filename>tmp/deploy/images/&lt;machine&gt;/</filename>
+ inside the <filename>tmp/deploy/images/<replaceable>machine</replaceable>/</filename>
folder as shown in the figure.
This folder contains any files expected to be loaded on the
target device.
@@ -1157,43 +1157,43 @@
variable points to the appropriate directory containing images for
the current configuration.
<itemizedlist>
- <listitem><para><filename>&lt;kernel-image&gt;</filename>:
+ <listitem><para><filename><replaceable>kernel-image</replaceable></filename>:
A kernel binary file.
The <link linkend='var-KERNEL_IMAGETYPE'><filename>KERNEL_IMAGETYPE</filename></link>
variable setting determines the naming scheme for the
kernel image file.
Depending on that variable, the file could begin with
a variety of naming strings.
- The <filename>deploy/images/&lt;machine&gt;</filename>
+ The <filename>deploy/images/<replaceable>machine</replaceable></filename>
directory can contain multiple image files for the
machine.</para></listitem>
- <listitem><para><filename>&lt;root-filesystem-image&gt;</filename>:
+ <listitem><para><filename><replaceable>root-filesystem-image</replaceable></filename>:
Root filesystems for the target device (e.g.
<filename>*.ext3</filename> or <filename>*.bz2</filename>
files).
The <link linkend='var-IMAGE_FSTYPES'><filename>IMAGE_FSTYPES</filename></link>
variable setting determines the root filesystem image
type.
- The <filename>deploy/images/&lt;machine&gt;</filename>
+ The <filename>deploy/images/<replaceable>machine</replaceable></filename>
directory can contain multiple root filesystems for the
machine.</para></listitem>
- <listitem><para><filename>&lt;kernel-modules&gt;</filename>:
+ <listitem><para><filename><replaceable>kernel-modules</replaceable></filename>:
Tarballs that contain all the modules built for the kernel.
Kernel module tarballs exist for legacy purposes and
can be suppressed by setting the
<link linkend='var-MODULE_TARBALL_DEPLOY'><filename>MODULE_TARBALL_DEPLOY</filename></link>
variable to "0".
- The <filename>deploy/images/&lt;machine&gt;</filename>
+ The <filename>deploy/images/<replaceable>machine</replaceable></filename>
directory can contain multiple kernel module tarballs
for the machine.</para></listitem>
- <listitem><para><filename>&lt;bootloaders&gt;</filename>:
+ <listitem><para><filename><replaceable>bootloaders</replaceable></filename>:
Bootloaders supporting the image, if applicable to the
target machine.
- The <filename>deploy/images/&lt;machine&gt;</filename>
+ The <filename>deploy/images/<replaceable>machine</replaceable></filename>
directory can contain multiple bootloaders for the
machine.</para></listitem>
- <listitem><para><filename>&lt;symlinks&gt;</filename>:
- The <filename>deploy/images/&lt;machine&gt;</filename>
+ <listitem><para><filename><replaceable>symlinks</replaceable></filename>:
+ The <filename>deploy/images/<replaceable>machine</replaceable></filename>
folder contains
a symbolic link that points to the most recently built file
for each machine.
@@ -1280,7 +1280,7 @@
part of the SDK (i.e. the part that runs on
the <filename>SDKMACHINE</filename>).
When you use
- <filename>bitbake -c populate_sdk &lt;imagename&gt;</filename>
+ <filename>bitbake -c populate_sdk <replaceable>imagename</replaceable></filename>
to create the SDK, a set of default packages
apply.
This variable allows you to add more packages.
diff --git a/documentation/ref-manual/faq.xml b/documentation/ref-manual/faq.xml
index 5488f8ef23..da6ce20eef 100644
--- a/documentation/ref-manual/faq.xml
+++ b/documentation/ref-manual/faq.xml
@@ -321,7 +321,7 @@
<qandaentry>
<question>
<para>
- What’s the difference between <filename>foo</filename> and <filename>foo-native</filename>?
+ What’s the difference between <replaceable>target</replaceable> and <replaceable>target</replaceable><filename>-native</filename>?
</para>
</question>
<answer>
diff --git a/documentation/ref-manual/figures/buildhistory.png b/documentation/ref-manual/figures/buildhistory.png
index edf98d95cf..9a77bde68b 100644
--- a/documentation/ref-manual/figures/buildhistory.png
+++ b/documentation/ref-manual/figures/buildhistory.png
Binary files differ
diff --git a/documentation/ref-manual/introduction.xml b/documentation/ref-manual/introduction.xml
index 6a5937bdb1..f986f7c28e 100644
--- a/documentation/ref-manual/introduction.xml
+++ b/documentation/ref-manual/introduction.xml
@@ -73,6 +73,10 @@
Describes the tasks defined by the OpenEmbedded build system.
</para></listitem>
<listitem><para><emphasis>
+ <link linkend='ref-qa-checks'>QA Error and Warning Messages</link>:</emphasis>
+ Lists and describes QA warning and error messages.
+ </para></listitem>
+ <listitem><para><emphasis>
<link linkend='ref-images'>Images</link>:</emphasis>
Describes the standard images that the Yocto Project supports.
</para></listitem>
@@ -164,6 +168,8 @@
<listitem><para>Debian GNU/Linux 7.2 (Wheezy)</para></listitem>
<listitem><para>Debian GNU/Linux 7.3 (Wheezy)</para></listitem>
<listitem><para>Debian GNU/Linux 7.4 (Wheezy)</para></listitem>
+ <listitem><para>Debian GNU/Linux 7.5 (Wheezy)</para></listitem>
+ <listitem><para>Debian GNU/Linux 7.6 (Wheezy)</para></listitem>
<!-- <listitem><para>openSUSE 11.4</para></listitem>
<listitem><para>openSUSE 12.1</para></listitem> -->
<listitem><para>openSUSE 12.2</para></listitem>
@@ -398,14 +404,14 @@
choose the installation directory.
For example, you could choose the following:
<literallayout class='monospaced'>
- /home/your-username/buildtools
+ /home/<replaceable>your-username</replaceable>/buildtools
</literallayout>
</para></listitem>
<listitem><para>
Source the tools environment setup script by using a
command like the following:
<literallayout class='monospaced'>
- $ source /home/your-username/buildtools/environment-setup-i586-poky-linux
+ $ source /home/<replaceable>your-username</replaceable>/buildtools/environment-setup-i586-poky-linux
</literallayout>
Of course, you need to supply your installation directory and be
sure to use the right file (i.e. i585 or x86-64).
@@ -484,14 +490,14 @@
choose the installation directory.
For example, you could choose the following:
<literallayout class='monospaced'>
- /home/your-username/buildtools
+ /home/<replaceable>your-username</replaceable>/buildtools
</literallayout>
</para></listitem>
<listitem><para>
Source the tools environment setup script by using a
command like the following:
<literallayout class='monospaced'>
- $ source /home/your-username/buildtools/environment-setup-i586-poky-linux
+ $ source /home/<replaceable>your-username</replaceable>/buildtools/environment-setup-i586-poky-linux
</literallayout>
Of course, you need to supply your installation directory and be
sure to use the right file (i.e. i585 or x86-64).
diff --git a/documentation/ref-manual/migration.xml b/documentation/ref-manual/migration.xml
index efed11dcb2..d072ecfa0e 100644
--- a/documentation/ref-manual/migration.xml
+++ b/documentation/ref-manual/migration.xml
@@ -110,7 +110,7 @@
appended to the path used to access the mirror.
Here is an example:
<literallayout class='monospaced'>
- SSTATE_MIRRORS = "file://.* http://someserver.tld/share/sstate/PATH"
+ SSTATE_MIRRORS = "file://.* http://<replaceable>someserver</replaceable>.tld/share/sstate/PATH"
</literallayout>
</para>
</section>
@@ -375,10 +375,11 @@
<listitem><para><emphasis>Shared State Code:</emphasis>
The shared state code has been optimized to avoid running
unnecessary tasks.
- For example,
- <filename>bitbake -c rootfs some-image</filename> from
- shared state no longer populates the target sysroot
- since that is not necessary.
+ For example, the following no longer populates the target
+ sysroot since that is not necessary:
+ <literallayout class='monospaced'>
+ $ bitbake -c rootfs <replaceable>some-image</replaceable>
+ </literallayout>
Instead, the system just needs to extract the output
package contents, re-create the packages, and construct
the root filesystem.
@@ -832,7 +833,7 @@
This directory is located under
<filename>sysroots</filename> and uses a machine-specific
name (i.e.
- <filename>tmp/sysroots/&lt;machine&gt;/pkgdata</filename>).
+ <filename>tmp/sysroots/<replaceable>machine</replaceable>/pkgdata</filename>).
</para></listitem>
</itemizedlist>
</para>
@@ -1100,7 +1101,7 @@
</para></listitem>
<listitem><para>
<filename>base-files</filename>: Remove the unnecessary
- <filename>media/xxx</filename> directories.
+ <filename>media/</filename><replaceable>xxx</replaceable> directories.
</para></listitem>
<listitem><para>
<filename>alsa-state</filename>: Provide an empty
@@ -1228,7 +1229,7 @@
value against the branch.
You can specify the branch using the following form:
<literallayout class='monospaced'>
- SRC_URI = "git://server.name/repository;branch=&lt;branchname&gt;"
+ SRC_URI = "git://server.name/repository;branch=<replaceable>branchname</replaceable>"
</literallayout>
If you do not specify a branch, BitBake looks
in the default "master" branch.
@@ -1305,10 +1306,10 @@
</section>
<section id='migration-1.6-task-taskname-overrides'>
- <title><filename>task-&lt;taskname&gt;</filename> Overrides</title>
+ <title><filename>task-</filename><replaceable>taskname</replaceable> Overrides</title>
<para>
- <filename>task-&lt;taskname&gt;</filename> overrides have been
+ <filename>task-</filename><replaceable>taskname</replaceable> overrides have been
adjusted so that tasks whose names contain underscores have the
underscores replaced by hyphens for the override so that they
now function properly.
@@ -1691,6 +1692,327 @@
</para>
</section>
</section>
+
+<section id='moving-to-the-yocto-project-1.7-release'>
+ <title>Moving to the Yocto Project 1.7 Release</title>
+
+ <para>
+ This section provides migration information for moving to the
+ Yocto Project 1.7 Release from the prior release.
+ </para>
+
+ <section id='migration-1.7-changes-to-setting-qemu-packageconfig-options'>
+ <title>Changes to Setting QEMU <filename>PACKAGECONFIG</filename> Options in <filename>local.conf</filename></title>
+
+ <para>
+ The QEMU recipe now uses a number of
+ <link linkend='var-PACKAGECONFIG'><filename>PACKAGECONFIG</filename></link>
+ options to enable various optional features.
+ The method used to set defaults for these options means that
+ existing
+ <filename>local.conf</filename> files will need to be be
+ modified to append to <filename>PACKAGECONFIG</filename> for
+ <filename>qemu-native</filename> and
+ <filename>nativesdk-qemu</filename> instead of setting it.
+ In other words, to enable graphical output for QEMU, you should
+ now have these lines in <filename>local.conf</filename>:
+ <literallayout class='monospaced'>
+ PACKAGECONFIG_append_pn-qemu-native = " sdl"
+ PACKAGECONFIG_append_pn-nativesdk-qemu = " sdl"
+ </literallayout>
+ </para>
+ </section>
+
+ <section id='migration-1.7-minimum-git-version'>
+ <title>Minimum Git version</title>
+
+ <para>
+ The minimum
+ <ulink url='&YOCTO_DOCS_DEV_URL;#git'>Git</ulink> version required
+ on the build host is now 1.7.8 because the
+ <filename>&dash;&dash;list</filename> option is now required by
+ BitBake's Git fetcher.
+ As always, if your host distribution does not provide a version of
+ Git that meets this requirement, you can use the
+ <filename>buildtools-tarball</filename> that does.
+ See the
+ "<link linkend='required-git-tar-and-python-versions'>Required Git, tar, and Python Versions</link>"
+ section for more information.
+ </para>
+ </section>
+
+ <section id='migration-1.7-autotools-class-changes'>
+ <title>Autotools Class Changes</title>
+
+ <para>
+ The following
+ <link linkend='ref-classes-autotools'><filename>autotools</filename></link>
+ class changes occurred:
+ <itemizedlist>
+ <listitem><para><emphasis>
+ A separate build directory is now used by default:</emphasis>
+ The <filename>autotools</filename> class has been changed
+ to use a directory for building
+ (<link linkend='var-B'><filename>B</filename></link>),
+ which is separate from the source directory
+ (<link linkend='var-S'><filename>S</filename></link>).
+ This is commonly referred to as
+ <filename>B != S</filename>, or an out-of-tree build.</para>
+ <para>If the software being built is already capable of
+ building in a directory separate from the source, you
+ do not need to do anything.
+ However, if the software is not capable of being built
+ in this manner, you will
+ need to either patch the software so that it can build
+ separately, or you will need to change the recipe to
+ inherit the
+ <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
+ class instead of the <filename>autotools</filename> class.
+ </para></listitem>
+ <listitem><para><emphasis>
+ The <filename>&dash;&dash;foreign</filename> option is
+ no longer passed to <filename>automake</filename> when
+ running <filename>autoconf</filename>:</emphasis>
+ This option tells <filename>automake</filename> that a
+ particular software package does not follow the GNU
+ standards and therefore should not be expected
+ to distribute certain files such as
+ <filename>ChangeLog</filename>,
+ <filename>AUTHORS</filename>, and so forth.
+ Because the majority of upstream software packages already
+ tell <filename>automake</filename> to enable foreign mode
+ themselves, the option is mostly superfluous.
+ However, some recipes will need patches for this change.
+ You can easily make the change by patching
+ <filename>configure.ac</filename> so that it passes
+ "foreign" to <filename>AM_INIT_AUTOMAKE()</filename>.
+ See
+ <ulink url='http://cgit.openembedded.org/openembedded-core/commit/?id=01943188f85ce6411717fb5bf702d609f55813f2'>this commit</ulink>
+ for an example showing how to make the patch.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+ </section>
+
+ <section id='migration-1.7-binary-configuration-scripts-disabled'>
+ <title>Binary Configuration Scripts Disabled</title>
+
+ <para>
+ Some of the core recipes that package binary configuration scripts
+ now disable the scripts due to the
+ scripts previously requiring error-prone path substitution.
+ Software that links against these libraries using these scripts
+ should use the much more robust <filename>pkg-config</filename>
+ instead.
+ The list of recipes changed in this version (and their
+ configuration scripts) is as follows:
+ <literallayout class='monospaced'>
+ directfb (directfb-config)
+ freetype (freetype-config)
+ gpgme (gpgme-config)
+ libassuan (libassuan-config)
+ libcroco (croco-6.0-config)
+ libgcrypt (libgcrypt-config)
+ libgpg-error (gpg-error-config)
+ libksba (ksba-config)
+ libpcap (pcap-config)
+ libpcre (pcre-config)
+ libpng (libpng-config, libpng16-config)
+ libsdl (sdl-config)
+ libusb-compat (libusb-config)
+ libxml2 (xml2-config)
+ libxslt (xslt-config)
+ ncurses (ncurses-config)
+ neon (neon-config)
+ npth (npth-config)
+ pth (pth-config)
+ taglib (taglib-config)
+ </literallayout>
+ Additionally, support for <filename>pkg-config</filename> has been
+ added to some recipes in the previous list in the rare cases
+ where the upstream software package does not already provide
+ it.
+ </para>
+ </section>
+
+ <section id='migration-1.7-glibc-replaces-eglibc'>
+ <title><filename>eglibc 2.19</filename> Replaced with <filename>glibc 2.20</filename></title>
+
+ <para>
+ Because <filename>eglibc</filename> and
+ <filename>glibc</filename> were already fairly close, this
+ replacement should not require any significant changes to other
+ software that links to <filename>eglibc</filename>.
+ However, there were a number of minor changes in
+ <filename>glibc 2.20</filename> upstream that could require
+ patching some software (e.g. the removal of the
+ <filename>_BSD_SOURCE</filename> feature test macro).
+ </para>
+
+ <para>
+ <filename>glibc 2.20</filename> requires version 2.6.32 or greater
+ of the Linux kernel.
+ Thus, older kernels will no longer be usable in conjunction with it.
+ </para>
+
+ <para>
+ For full details on the changes in <filename>glibc 2.20</filename>,
+ see the upstream release notes
+ <ulink url='https://sourceware.org/ml/libc-alpha/2014-09/msg00088.html'>here</ulink>.
+ </para>
+ </section>
+
+ <section id='migration-1.7-kernel-module-autoloading'>
+ <title>Kernel Module Autoloading</title>
+
+ <para>
+ The
+ <link linkend='var-module_autoload'><filename>module_autoload_*</filename></link>
+ variable is now deprecated and a new
+ <link linkend='var-KERNEL_MODULE_AUTOLOAD'><filename>KERNEL_MODULE_AUTOLOAD</filename></link>
+ variable should be used instead.
+ Also,
+ <link linkend='var-module_conf'><filename>module_conf_*</filename></link>
+ must now be used in conjunction with a new
+ <link linkend='var-KERNEL_MODULE_PROBECONF'><filename>KERNEL_MODULE_PROBECONF</filename></link>
+ variable.
+ The new variables no longer require you to specify the module name
+ as part of the variable name.
+ This change not only simplifies usage but also allows the values
+ of these variables to be appropriately incorporated into task
+ signatures and thus trigger the appropriate tasks to re-execute
+ when changed.
+ You should replace any references to
+ <filename>module_autoload_*</filename> with
+ <filename>KERNEL_MODULE_AUTOLOAD</filename>, and add any modules
+ for which <filename>module_conf_*</filename> is specified to
+ <filename>KERNEL_MODULE_PROBECONF</filename>.
+ </para>
+
+ <para>
+ For more information, see the
+ <link linkend='var-KERNEL_MODULE_AUTOLOAD'><filename>KERNEL_MODULE_AUTOLOAD</filename></link>
+ and
+ <link linkend='var-KERNEL_MODULE_PROBECONF'><filename>KERNEL_MODULE_PROBECONF</filename></link>
+ variables.
+ </para>
+ </section>
+
+ <section id='migration-1.7-qa-check-changes'>
+ <title>QA Check Changes</title>
+
+ <para>
+ The following changes have occurred to the QA check process:
+ <itemizedlist>
+ <listitem><para>
+ Additional QA checks <filename>file-rdeps</filename>
+ and <filename>build-deps</filename> have been added in
+ order to verify that file dependencies are satisfied
+ (e.g. package contains a script requiring
+ <filename>/bin/bash</filename>) and build-time dependencies
+ are declared, respectively.
+ For more information, please see the
+ "<link linkend='ref-qa-checks'>QA Error and Warning Messages</link>"
+ chapter.
+ </para></listitem>
+ <listitem><para>
+ Package QA checks are now performed during a new
+ <link linkend='ref-tasks-package_qa'><filename>do_package_qa</filename></link>
+ task rather than being part of the
+ <link linkend='ref-tasks-package'><filename>do_package</filename></link>
+ task.
+ This allows more parallel execution.
+ This change is unlikely to be an issue except for highly
+ customized recipes that disable packaging tasks themselves
+ by marking them as <filename>noexec</filename>.
+ For those packages, you will need to disable the
+ <filename>do_package_qa</filename> task as well.
+ </para></listitem>
+ <listitem><para>
+ Files being overwritten during the
+ <link linkend='ref-tasks-populate_sysroot'><filename>do_populate_sysroot</filename></link>
+ task now trigger an error instead of a warning.
+ Recipes should not be overwriting files written to the
+ sysroot by other recipes.
+ If you have these types of recipes, you need to alter them
+ so that they do not overwrite these files.</para>
+ <para>You might now receive this error after changes in
+ configuration or metadata resulting in orphaned files
+ being left in the sysroot.
+ If you do receive this error, the way to resolve the issue
+ is to delete your
+ <link linkend='var-TMPDIR'><filename>TMPDIR</filename></link>
+ or to move it out of the way and then re-start the build.
+ Anything that has been fully built up to that point and
+ does not need rebuilding will be restored from the shared
+ state cache and the rest of the build will be able to
+ proceed as normal.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+ </section>
+
+ <section id='migration-1.7-removed-recipes'>
+ <title>Removed Recipes</title>
+
+ <para>
+ The following recipes have been removed:
+ <itemizedlist>
+ <listitem><para>
+ <filename>x-load</filename>:
+ This recipe has been superseded by
+ U-boot SPL for all Cortex-based TI SoCs.
+ For legacy boards, the <filename>meta-ti</filename>
+ layer, which contains a maintained recipe, should be used
+ instead.
+ </para></listitem>
+ <listitem><para>
+ <filename>ubootchart</filename>:
+ This recipe is obsolete.
+ A <filename>bootchart2</filename> recipe has been added
+ to functionally replace it.
+ </para></listitem>
+ <listitem><para>
+ <filename>linux-yocto 3.4</filename>:
+ Support for the linux-yocto 3.4 kernel has been dropped.
+ Support for the 3.10 and 3.14 kernels remains, while
+ support for version 3.17 has been added.
+ </para></listitem>
+ <listitem><para>
+ <filename>eglibc</filename> has been removed in favor of
+ <filename>glibc</filename>.
+ See the
+ "<link linkend='migration-1.7-glibc-replaces-eglibc'><filename>eglibc 2.19</filename> Replaced with <filename>glibc 2.20</filename></link>"
+ section for more information.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+ </section>
+
+ <section id='migration-1.7-miscellaneous-changes'>
+ <title>Miscellaneous Changes</title>
+
+ <para>
+ The following miscellaneous change occurred:
+ <itemizedlist>
+ <listitem><para>
+ The build history feature now writes
+ <filename>build-id.txt</filename> instead of
+ <filename>build-id</filename>.
+ Additionally, <filename>build-id.txt</filename>
+ now contains the full build header as printed by
+ BitBake upon starting the build.
+ You should manually remove old "build-id" files from your
+ existing build history repositories to avoid confusion.
+ For information on the build history feature, see the
+ "<link linkend='maintaining-build-output-quality'>Maintaining Build Output Quality</link>"
+ section.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+ </section>
+</section>
+
</chapter>
<!--
vim: expandtab tw=80 ts=4
diff --git a/documentation/ref-manual/ref-classes.xml b/documentation/ref-manual/ref-classes.xml
index 868a0da5d4..fed25b25e6 100644
--- a/documentation/ref-manual/ref-classes.xml
+++ b/documentation/ref-manual/ref-classes.xml
@@ -102,10 +102,22 @@
</para>
<para>
+ By default, the <filename>autotools</filename> class
+ uses out-of-tree builds
+ (<link linkend='var-B'><filename>B</filename></link> <filename>!=</filename>
+ <link linkend='var-S'><filename>S</filename></link>).
+ If the software being built by a recipe does not support
+ using out-of-tree builds, you should have the recipe inherit the
+ <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
+ class.
+ </para>
+
+ <para>
It's useful to have some idea of how the tasks defined by this class work
and what they do behind the scenes.
<itemizedlist>
- <listitem><para><link linkend='ref-tasks-configure'><filename>do_configure</filename></link> &dash; Regenerates the
+ <listitem><para><link linkend='ref-tasks-configure'><filename>do_configure</filename></link> &dash;
+ Regenerates the
configure script (using <filename>autoreconf</filename>) and then launches it
with a standard set of arguments used during cross-compilation.
You can pass additional parameters to <filename>configure</filename> through the
@@ -123,17 +135,6 @@
</para></listitem>
</itemizedlist>
</para>
-
- <note>
- It is planned for future Yocto Project releases that by default, the
- <filename>autotools</filename> class supports out-of-tree builds
- (<link linkend='var-B'><filename>B</filename></link> !=
- <link linkend='var-S'><filename>S</filename></link>).
- If your recipes do not support out-of-tree builds, you should
- have them inherit the
- <link linkend='ref-classes-autotools-brokensep'><filename>autotools-brokensep</filename></link>
- class.
- </note>
</section>
<section id='ref-classes-autotools-brokensep'>
@@ -186,6 +187,8 @@
binary from source.
The binary package is extracted and new packages in the configured
output package format are created.
+ Extraction and installation of proprietary binaries is a good example
+ use for this class.
<note>
For RPMs and other packages that do not contain a subdirectory,
you should specify a "subdir" parameter.
@@ -229,6 +232,21 @@
</para>
</section>
+<section id='ref-classes-binconfig-disabled'>
+ <title><filename>binconfig-disabled.bbclass</filename></title>
+
+ <para>
+ An alternative version of the
+ <link linkend='ref-classes-binconfig'><filename>binconfig</filename></link>
+ class, which disables binary configuration scripts by making them
+ return an error in favor of using <filename>pkg-config</filename>
+ to query the information.
+ The scripts to be disabled should be specified using the
+ <link linkend='var-BINCONFIG'><filename>BINCONFIG</filename></link>
+ variable within the recipe inheriting the class.
+ </para>
+</section>
+
<section id='ref-classes-blacklist'>
<title><filename>blacklist.bbclass</filename></title>
@@ -364,6 +382,18 @@
</para>
</section>
+<section id='ref-classes-buildstats-summary'>
+ <title><filename>buildstats-summary.bbclass</filename></title>
+
+ <para>
+ When inherited globally, prints statistics at the end of the build
+ on sstate re-use.
+ In order to function, this class requires the
+ <link linkend='ref-classes-buildstats'><filename>buildstats</filename></link>
+ class be enabled.
+ </para>
+</section>
+
<section id='ref-classes-ccache'>
<title><filename>ccache.bbclass</filename></title>
@@ -431,6 +461,19 @@
</para>
</section>
+<section id='ref-classes-compress_doc'>
+ <title><filename>compress_doc.bbclass</filename></title>
+
+ <para>
+ Enables compression for man pages and info pages.
+ This class is intended to be inherited globally.
+ The default compression mechanism is gz (gzip) but you can
+ select an alternative mechanism by setting the
+ <link linkend='var-DOC_COMPRESS'><filename>DOC_COMPRESS</filename></link>
+ variable.
+ </para>
+</section>
+
<section id='ref-classes-copyleft_compliance'>
<title><filename>copyleft_compliance.bbclass</filename></title>
@@ -445,6 +488,20 @@
</para>
</section>
+<section id='ref-classes-copyleft_filter'>
+ <title><filename>copyleft_filter.bbclass</filename></title>
+
+ <para>
+ A class used by the
+ <link linkend='ref-classes-archiver'><filename>archiver</filename></link>
+ and
+ <link linkend='ref-classes-copyleft_compliance'><filename>copyleft_compliance</filename></link>
+ classes for filtering licenses.
+ The <filename>copyleft_filter</filename> class is an internal class
+ and is not intended to be used directly.
+ </para>
+</section>
+
<section id='ref-classes-core-image'>
<title><filename>core-image.bbclass</filename></title>
@@ -980,6 +1037,36 @@
</para>
</section>
+<section id='ref-classes-gummiboot'>
+ <title><filename>gummiboot.bbclass</filename></title>
+
+ <para>
+ The <filename>gummiboot</filename> class provides functions specific
+ to the gummiboot bootloader for building bootable images.
+ This is an internal class and is not intended to be
+ used directly.
+ Set the
+ <link linkend='var-EFI_PROVIDER'><filename>EFI_PROVIDER</filename></link>
+ variable to "gummiboot" to use this class.
+ </para>
+
+ <para>
+ For information on more variables used and supported in this class,
+ see the
+ <link linkend='var-GUMMIBOOT_CFG'><filename>GUMMIBOOT_CFG</filename></link>,
+ <link linkend='var-GUMMIBOOT_ENTRIES'><filename>GUMMIBOOT_ENTRIES</filename></link>,
+ and
+ <link linkend='var-GUMMIBOOT_TIMEOUT'><filename>GUMMIBOOT_TIMEOUT</filename></link>
+ variables.
+ </para>
+
+ <para>
+ You can also see the
+ <ulink url='http://freedesktop.org/wiki/Software/gummiboot/'>Gummiboot documentation</ulink>
+ for more information.
+ </para>
+</section>
+
<section id='ref-classes-gzipnative'>
<title><filename>gzipnative.bbclass</filename></title>
@@ -1333,6 +1420,35 @@
Currently, this test triggers too many false positives and
thus is not normally enabled.
</para></listitem>
+ <listitem><para><emphasis><filename>build-deps:</filename></emphasis>
+ Determines if a build-time dependency that is specified through
+ <link linkend='var-DEPENDS'><filename>DEPENDS</filename></link>,
+ explicit
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>,
+ or task-level dependencies exists to match any runtime
+ dependency.
+ This determination is particularly useful to discover where
+ runtime dependencies are detected and added during packaging.
+ If no explicit dependency has been specified within the
+ metadata, at the packaging stage it is too late to ensure that
+ the dependency is built, and thus you can end up with an
+ error when the package is installed into the image during the
+ <link linkend='ref-tasks-rootfs'><filename>do_rootfs</filename></link>
+ task because the auto-detected dependency was not satisfied.
+ An example of this would be where the
+ <link linkend='ref-classes-update-rc.d'><filename>update-rc.d</filename></link>
+ class automatically adds a dependency on the
+ <filename>initscripts-functions</filename> package to packages
+ that install an initscript that refers to
+ <filename>/etc/init.d/functions</filename>.
+ The recipe should really have an explicit
+ <filename>RDEPENDS</filename> for the package in question on
+ <filename>initscripts-functions</filename> so that the
+ OpenEmbedded build system is able to ensure that the
+ <filename>initscripts</filename> recipe is actually built and
+ thus the <filename>initscripts-functions</filename> package is
+ made available.
+ </para></listitem>
<listitem><para><emphasis><filename>compile-host-path:</filename></emphasis>
Checks the
<link linkend='ref-tasks-compile'><filename>do_compile</filename></link>
@@ -1384,6 +1500,25 @@
Some very rare cases do exist for dynamically loaded modules where
these symlinks are needed instead in the main package.
</para></listitem>
+ <listitem><para><emphasis><filename>file-rdeps:</filename></emphasis>
+ Checks that file-level dependencies identified by the
+ OpenEmbedded build system at packaging time are satisfied.
+ For example, a shell script might start with the line
+ <filename>#!/bin/bash</filename>.
+ This line would translate to a file dependency on
+ <filename>/bin/bash</filename>.
+ Of the three package managers that the OpenEmbedded build
+ system supports, only RPM directly handles file-level
+ dependencies, resolving them automatically to packages
+ providing the files.
+ However, the lack of that functionality in the other two
+ package managers does not mean the dependencies do not still
+ need resolving.
+ This QA check attempts to ensure that explicitly declared
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ exist to handle any file-level dependency detected in
+ packaged files.
+ </para></listitem>
<listitem><para><emphasis><filename>files-invalid:</filename></emphasis>
Checks for
<link linkend='var-FILES'><filename>FILES</filename></link>
@@ -1896,7 +2031,7 @@
You can create a recipe that builds tools that run natively on the
host a couple different ways:
<itemizedlist>
- <listitem><para>Create a <filename>myrecipe-native.bb</filename>
+ <listitem><para>Create a <replaceable>myrecipe</replaceable><filename>-native.bb</filename>
that inherits the <filename>native</filename> class.
If you use this method, you must order the inherit statement
in the recipe after all other inherit statements so that the
@@ -1937,7 +2072,7 @@
You can create a recipe that builds tools that run on the SDK machine
a couple different ways:
<itemizedlist>
- <listitem><para>Create a <filename>myrecipe-nativesdk.bb</filename>
+ <listitem><para>Create a <replaceable>myrecipe</replaceable><filename>-nativesdk.bb</filename>
recipe that inherits the <filename>nativesdk</filename> class.
If you use this method, you must order the inherit statement
in the recipe after all other inherit statements so that the
@@ -2340,7 +2475,7 @@
<itemizedlist>
<listitem><para><emphasis><filename>populate_sdk_base</filename>:</emphasis>
The base class supporting SDK creation under all package
- managers (i.e. DEB, RPM, and IPK).</para></listitem>
+ managers (i.e. DEB, RPM, and opkg).</para></listitem>
<listitem><para><emphasis><filename>populate_sdk_deb</filename>:</emphasis>
Supports creation of the SDK given the Debian package manager.
</para></listitem>
@@ -2348,13 +2483,14 @@
Supports creation of the SDK given the RPM package manager.
</para></listitem>
<listitem><para><emphasis><filename>populate_sdk_ipk</filename>:</emphasis>
- Supports creation of the SDK given the IPK package manager.
+ Supports creation of the SDK given the opkg (IPK format)
+ package manager.
</para></listitem>
</itemizedlist>
</para>
<para>
- The <filename>populate_sdk_base</filename> package inherits the
+ The <filename>populate_sdk_base</filename> class inherits the
appropriate <filename>populate_sdk_*</filename> (i.e.
<filename>deb</filename>, <filename>rpm</filename>, and
<filename>ipk</filename>) based on
@@ -2365,8 +2501,8 @@
The base class ensures all source and destination directories are
established and then populates the SDK.
After populating the SDK, the <filename>populate_sdk_base</filename>
- class constructs two images:
- <link linkend='var-SDK_ARCH'><filename>SDK_ARCH</filename></link><filename>-nativesdk</filename>,
+ class constructs two sysroots:
+ <filename>${</filename><link linkend='var-SDK_ARCH'><filename>SDK_ARCH</filename></link><filename>}-nativesdk</filename>,
which contains the cross-compiler and associated tooling, and the
target, which contains a target root filesystem that is configured for
the SDK usage.
@@ -2374,8 +2510,8 @@
<link linkend='var-SDK_OUTPUT'><filename>SDK_OUTPUT</filename></link>,
which consists of the following:
<literallayout class='monospaced'>
- ${SDK_OUTPUT}/&lt;sdk_arch-nativesdk pkgs&gt;
- ${SDK_OUTPUT}/${SDKTARGETSYSROOT}/&lt;target pkgs&gt;
+ ${SDK_OUTPUT}/${SDK_ARCH}<replaceable>-nativesdk-pkgs</replaceable>
+ ${SDK_OUTPUT}/${SDKTARGETSYSROOT}/<replaceable>target-pkgs</replaceable>
</literallayout>
</para>
@@ -2481,6 +2617,22 @@
</para>
</section>
+<section id='ref-classes-ptest-gnome'>
+ <title><filename>ptest-gnome.bbclass</filename></title>
+
+ <para>
+ Enables package tests (ptests) specifically for GNOME packages,
+ which have tests intended to be executed with
+ <filename>gnome-desktop-testing</filename>.
+ </para>
+
+ <para>
+ For information on setting up and running ptests, see the
+ "<ulink url='&YOCTO_DOCS_DEV_URL;#testing-packages-with-ptest'>Testing Packages With ptest</ulink>"
+ section in the Yocto Project Development Manual.
+ </para>
+</section>
+
<section id='ref-classes-python-dir'>
<title><filename>python-dir.bbclass</filename></title>
@@ -2864,37 +3016,37 @@
<para>
The class supports the following variables:
<itemizedlist>
- <listitem><para><emphasis><link linkend='var-INITRD'><filename>INITRD</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-INITRD'><filename>INITRD</filename></link>:
Indicates list of filesystem images to concatenate and use as
an initial RAM disk (initrd).
This variable is optional.</para></listitem>
- <listitem><para><emphasis><link linkend='var-ROOTFS'><filename>ROOTFS</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-ROOTFS'><filename>ROOTFS</filename></link>:
Indicates a filesystem image to include as the root filesystem.
This variable is optional.</para></listitem>
- <listitem><para><emphasis><link linkend='var-AUTO_SYSLINUXMENU'><filename>AUTO_SYSLINUXMENU</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-AUTO_SYSLINUXMENU'><filename>AUTO_SYSLINUXMENU</filename></link>:
Enables creating an automatic menu when set to "1".
</para></listitem>
- <listitem><para><emphasis><link linkend='var-LABELS'><filename>LABELS</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-LABELS'><filename>LABELS</filename></link>:
Lists targets for automatic configuration.
</para></listitem>
- <listitem><para><emphasis><link linkend='var-APPEND'><filename>APPEND</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-APPEND'><filename>APPEND</filename></link>:
Lists append string overrides for each label.
</para></listitem>
- <listitem><para><emphasis><link linkend='var-SYSLINUX_OPTS'><filename>SYSLINUX_OPTS</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-SYSLINUX_OPTS'><filename>SYSLINUX_OPTS</filename></link>:
Lists additional options to add to the syslinux file.
Semicolon characters separate multiple options.
</para></listitem>
- <listitem><para><emphasis><link linkend='var-SYSLINUX_SPLASH'><filename>SYSLINUX_SPLASH</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-SYSLINUX_SPLASH'><filename>SYSLINUX_SPLASH</filename></link>:
Lists a background for the VGA boot menu when you are using the
boot menu.</para></listitem>
- <listitem><para><emphasis><link linkend='var-SYSLINUX_DEFAULT_CONSOLE'><filename>SYSLINUX_DEFAULT_CONSOLE</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-SYSLINUX_DEFAULT_CONSOLE'><filename>SYSLINUX_DEFAULT_CONSOLE</filename></link>:
Set to "console=ttyX" to change kernel boot default console.
</para></listitem>
- <listitem><para><emphasis><link linkend='var-SYSLINUX_SERIAL'><filename>SYSLINUX_SERIAL</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-SYSLINUX_SERIAL'><filename>SYSLINUX_SERIAL</filename></link>:
Sets an alternate serial port.
Or, turns off serial when the variable is set with an
empty string.</para></listitem>
- <listitem><para><emphasis><link linkend='var-SYSLINUX_SERIAL_TTY'><filename>SYSLINUX_SERIAL_TTY</filename></link>:</emphasis>
+ <listitem><para><link linkend='var-SYSLINUX_SERIAL_TTY'><filename>SYSLINUX_SERIAL_TTY</filename></link>:
Sets an alternate "console=tty..." kernel boot argument.
</para></listitem>
</itemizedlist>
@@ -3004,6 +3156,27 @@
</para>
</section>
+<section id='ref-classes-texinfo'>
+ <title><filename>texinfo.bbclass</filename></title>
+
+ <para>
+ This class should be inherited by recipes whose upstream packages
+ invoke the <filename>texinfo</filename> utilities at build-time.
+ Native and cross recipes are made to use the dummy scripts provided
+ by <filename>texinfo-dummy-native</filename>, for improved performance.
+ Target architecture recipes use the genuine
+ Texinfo utilities.
+ By default, they use the Texinfo utilities on the host system.
+ <note>
+ If you want to use the Texinfo recipe shipped with the build
+ system, you can remove "texinfo-native" from
+ <link linkend='var-ASSUME_PROVIDED'><filename>ASSUME_PROVIDED</filename></link>
+ and makeinfo from
+ <link linkend='var-SANITY_REQUIRED_UTILITIES'><filename>SANITY_REQUIRED_UTILITIES</filename></link>.
+ </note>
+ </para>
+</section>
+
<section id='ref-classes-tinderclient'>
<title><filename>tinderclient.bbclass</filename></title>
@@ -3079,6 +3252,21 @@
</para>
</section>
+<section id='ref-classes-uninative'>
+ <title><filename>uninative.bbclass</filename></title>
+
+ <para>
+ Provides a means of reusing <filename>native/cross</filename> over
+ multiple distros.
+ <note>
+ Currently, the method used by the <filename>uninative</filename>
+ class is experimental.
+ </note>
+ For more information, see the commit message
+ <ulink url='http://cgit.openembedded.org/openembedded-core/commit/?id=e66c96ae9c7ba21ebd04a4807390f0031238a85a'>here</ulink>.
+ </para>
+</section>
+
<section id='ref-classes-update-alternatives'>
<title><filename>update-alternatives.bbclass</filename></title>
diff --git a/documentation/ref-manual/ref-features.xml b/documentation/ref-manual/ref-features.xml
index c7fcb870f8..230cabd155 100644
--- a/documentation/ref-manual/ref-features.xml
+++ b/documentation/ref-manual/ref-features.xml
@@ -42,7 +42,7 @@
changed based on a given feature:
<literallayout class='monospaced'>
$ cd poky
- $ git grep 'contains.*MACHINE_FEATURES.*&lt;feature&gt;'
+ $ git grep 'contains.*MACHINE_FEATURES.*<replaceable>feature</replaceable>'
</literallayout>
</para>
@@ -72,16 +72,26 @@
</para></listitem>
<listitem><para><emphasis>bluetooth:</emphasis> Hardware has integrated BT
</para></listitem>
+ <listitem><para><emphasis>efi:</emphasis> Support for booting through EFI
+ </para></listitem>
<listitem><para><emphasis>ext2:</emphasis> Hardware HDD or Microdrive
</para></listitem>
<listitem><para><emphasis>irda:</emphasis> Hardware has IrDA support
</para></listitem>
<listitem><para><emphasis>keyboard:</emphasis> Hardware has a keyboard
</para></listitem>
+ <listitem><para><emphasis>pcbios:</emphasis> Support for booting through BIOS
+ </para></listitem>
<listitem><para><emphasis>pci:</emphasis> Hardware has a PCI bus
</para></listitem>
<listitem><para><emphasis>pcmcia:</emphasis> Hardware has PCMCIA or CompactFlash sockets
</para></listitem>
+ <listitem><para><emphasis>phone:</emphasis> Mobile phone (voice) support
+ </para></listitem>
+ <listitem><para><emphasis>qvga:</emphasis> Machine has a QVGA (320x240) display
+ </para></listitem>
+ <listitem><para><emphasis>rtc:</emphasis> Machine has a Real-Time Clock
+ </para></listitem>
<listitem><para><emphasis>screen:</emphasis> Hardware has a screen
</para></listitem>
<listitem><para><emphasis>serial:</emphasis> Hardware has serial support (usually RS232)
@@ -92,6 +102,8 @@
</para></listitem>
<listitem><para><emphasis>usbhost:</emphasis> Hardware is USB Host capable
</para></listitem>
+ <listitem><para><emphasis>vfat:</emphasis> FAT file system support
+ </para></listitem>
<listitem><para><emphasis>wifi:</emphasis> Hardware has integrated WiFi
</para></listitem>
</itemizedlist>
@@ -195,51 +207,100 @@
<title>Image Features</title>
<para>
- The contents of images generated by the OpenEmbedded build system can be controlled by the
- <filename><link linkend='var-IMAGE_FEATURES'>IMAGE_FEATURES</link></filename>
- and <filename><link linkend='var-EXTRA_IMAGE_FEATURES'>EXTRA_IMAGE_FEATURES</link></filename>
+ The contents of images generated by the OpenEmbedded build system
+ can be controlled by the
+ <link linkend='var-IMAGE_FEATURES'><filename>IMAGE_FEATURES</filename></link>
+ and
+ <link linkend='var-EXTRA_IMAGE_FEATURES'><filename>EXTRA_IMAGE_FEATURES</filename></link>
variables that you typically configure in your image recipes.
Through these variables, you can add several different
- predefined packages such as development utilities or packages with debug
- information needed to investigate application problems or profile applications.
+ predefined packages such as development utilities or packages with
+ debug information needed to investigate application problems or
+ profile applications.
</para>
<para>
- Current list of
- <filename>IMAGE_FEATURES</filename> contains the following:
+ The following image features are available for all images:
<itemizedlist>
- <listitem><para><emphasis>dbg-pkgs:</emphasis> Installs debug symbol packages for all packages
- installed in a given image.</para></listitem>
- <listitem><para><emphasis>dev-pkgs:</emphasis> Installs development packages (headers and
- extra library links) for all packages installed in a given image.</para></listitem>
- <listitem><para><emphasis>doc-pkgs:</emphasis> Installs documentation packages for all packages
- installed in a given image.</para></listitem>
- <listitem><para><emphasis>nfs-server:</emphasis> Installs an NFS server.</para></listitem>
- <listitem><para><emphasis>read-only-rootfs:</emphasis> Creates
- an image whose root filesystem is read-only.
+ <listitem><para><emphasis>dbg-pkgs:</emphasis>
+ Installs debug symbol packages for all packages installed
+ in a given image.
+ </para></listitem>
+ <listitem><para><emphasis>debug-tweaks:</emphasis>
+ Makes an image suitable for development (e.g.
+ allows root logins without passwords).
+ </para></listitem>
+ <listitem><para><emphasis>dev-pkgs:</emphasis>
+ Installs development packages (headers and extra library
+ links) for all packages installed in a given image.
+ </para></listitem>
+ <listitem><para><emphasis>doc-pkgs:</emphasis> Installs
+ documentation packages for all packages installed in a
+ given image.
+ </para></listitem>
+ <listitem><para><emphasis>package-management:</emphasis>
+ Installs package management tools and preserves the package
+ manager database.
+ </para></listitem>
+ <listitem><para><emphasis>ptest-pkgs:</emphasis>
+ Installs ptest packages for all ptest-enabled recipes.
+ </para></listitem>
+ <listitem><para><emphasis>read-only-rootfs:</emphasis>
+ Creates an image whose root filesystem is read-only.
See the
"<ulink url='&YOCTO_DOCS_DEV_URL;#creating-a-read-only-root-filesystem'>Creating a Read-Only Root Filesystem</ulink>"
section in the Yocto Project Development Manual for more
- information.</para></listitem>
- <listitem><para><emphasis>splash:</emphasis> Enables showing a splash screen during boot.
- By default, this screen is provided by <filename>psplash</filename>, which does
- allow customization.
- If you prefer to use an alternative splash screen package, you can do so by
- setting the <filename>SPLASH</filename> variable
- to a different package name (or names) within the image recipe or at the distro
- configuration level.</para></listitem>
- <listitem><para><emphasis>ssh-server-dropbear:</emphasis> Installs the Dropbear minimal
- SSH server.
- </para></listitem>
- <listitem><para><emphasis>ssh-server-openssh:</emphasis> Installs the OpenSSH SSH server,
- which is more full-featured than Dropbear.
- Note that if both the OpenSSH SSH server and the Dropbear minimal SSH server
- are present in <filename>IMAGE_FEATURES</filename>, then OpenSSH will take
- precedence and Dropbear will not be installed.</para></listitem>
- <listitem><para><emphasis>staticdev-pkgs:</emphasis> Installs static development
- packages (i.e. static libraries containing <filename>*.a</filename> files) for all
- packages installed in a given image.</para></listitem>
- <listitem><para><emphasis>tools-debug:</emphasis> Installs debugging tools such as
+ information.
+ </para></listitem>
+ <listitem><para><emphasis>splash:</emphasis>
+ Enables showing a splash screen during boot.
+ By default, this screen is provided by
+ <filename>psplash</filename>, which does allow
+ customization.
+ If you prefer to use an alternative splash screen package,
+ you can do so by setting the <filename>SPLASH</filename>
+ variable to a different package name (or names) within the
+ image recipe or at the distro configuration level.
+ </para></listitem>
+ <listitem><para><emphasis>staticdev-pkgs:</emphasis>
+ Installs static development packages, which are
+ static libraries (i.e. <filename>*.a</filename> files), for
+ all packages installed in a given image.
+ </para></listitem>
+ </itemizedlist>
+ </para>
+
+ <para>
+ Some image features are available only when you inherit the
+ <link linkend='ref-classes-core-image'><filename>core-image</filename></link>
+ class.
+ The current list of these valid features is as follows:
+ <itemizedlist>
+ <listitem><para><emphasis>eclipse-debug:</emphasis> Provides
+ Eclipse remote debugging support.
+ </para></listitem>
+ <listitem><para><emphasis>hwcodecs:</emphasis> Installs
+ hardware acceleration codecs.
+ </para></listitem>
+ <listitem><para><emphasis>nfs-server:</emphasis>
+ Installs an NFS server.
+ </para></listitem>
+ <listitem><para><emphasis>qt4-pkgs:</emphasis>
+ Supports Qt4/X11 and demo applications.
+ </para></listitem>
+ <listitem><para><emphasis>ssh-server-dropbear:</emphasis>
+ Installs the Dropbear minimal SSH server.
+ </para></listitem>
+ <listitem><para><emphasis>ssh-server-openssh:</emphasis>
+ Installs the OpenSSH SSH server, which is more
+ full-featured than Dropbear.
+ Note that if both the OpenSSH SSH server and the Dropbear
+ minimal SSH server are present in
+ <filename>IMAGE_FEATURES</filename>, then OpenSSH will take
+ precedence and Dropbear will not be installed.
+ </para></listitem>
+ <listitem><para><emphasis>tools-debug:</emphasis>
+ Installs debugging tools such as
<filename>strace</filename> and <filename>gdb</filename>.
For information on GDB, see the
"<ulink url='&YOCTO_DOCS_DEV_URL;#platdev-gdb-remotedebug'>Debugging With the GNU Project Debugger (GDB) Remotely</ulink>"
@@ -247,23 +308,33 @@
For information on tracing and profiling, see the
<ulink url='&YOCTO_DOCS_PROF_URL;'>Yocto Project Profiling and Tracing Manual</ulink>.
</para></listitem>
- <listitem><para><emphasis>tools-profile:</emphasis> Installs profiling tools such as
- <filename>oprofile</filename>, <filename>exmap</filename>, and
- <filename>LTTng</filename>.
+ <listitem><para><emphasis>tools-profile:</emphasis>
+ Installs profiling tools such as
+ <filename>oprofile</filename>, <filename>exmap</filename>,
+ and <filename>LTTng</filename>.
For general information on user-space tools, see the
"<ulink url='&YOCTO_DOCS_ADT_URL;#user-space-tools'>User-Space Tools</ulink>"
- section in the Yocto Project Application Developer's Guide.</para></listitem>
- <listitem><para><emphasis>tools-sdk:</emphasis> Installs a full SDK that runs on the device.
+ section in the Yocto Project Application Developer's
+ Guide.
+ </para></listitem>
+ <listitem><para><emphasis>tools-sdk:</emphasis>
+ Installs a full SDK that runs on the device.
+ </para></listitem>
+ <listitem><para><emphasis>tools-testapps:</emphasis>
+ Installs device testing tools (e.g. touchscreen debugging).
</para></listitem>
- <listitem><para><emphasis>tools-testapps:</emphasis> Installs device testing tools (e.g.
- touchscreen debugging).</para></listitem>
- <listitem><para><emphasis>x11:</emphasis> Installs the X server</para></listitem>
- <listitem><para><emphasis>x11-base:</emphasis> Installs the X server with a
- minimal environment.</para></listitem>
- <listitem><para><emphasis>x11-sato:</emphasis> Installs the OpenedHand Sato environment.
+ <listitem><para><emphasis>x11:</emphasis>
+ Installs the X server.
+ </para></listitem>
+ <listitem><para><emphasis>x11-base:</emphasis>
+ Installs the X server with a minimal environment.
+ </para></listitem>
+ <listitem><para><emphasis>x11-sato:</emphasis>
+ Installs the OpenedHand Sato environment.
</para></listitem>
</itemizedlist>
</para>
+
</section>
<section id='ref-features-backfill'>
diff --git a/documentation/ref-manual/ref-images.xml b/documentation/ref-manual/ref-images.xml
index 503595c364..d15ca5b93a 100644
--- a/documentation/ref-manual/ref-images.xml
+++ b/documentation/ref-manual/ref-images.xml
@@ -38,9 +38,9 @@
</para>
<para>
- Following, is a list of supported recipes:
+ Following is a list of supported recipes:
<itemizedlist>
- <listitem><para><emphasis><filename>build-appliance-image</filename>:</emphasis>
+ <listitem><para><filename>build-appliance-image</filename>:
An example virtual machine that contains all the pieces required to
run builds using the build system as well as the build system itself.
You can boot and run the image using either the
@@ -49,18 +49,18 @@
For more information on this image, see the
<ulink url='&YOCTO_HOME_URL;/documentation/build-appliance'>Build Appliance</ulink> page on
the Yocto Project website.</para></listitem>
- <listitem><para><emphasis><filename>core-image-base</filename>:</emphasis>
+ <listitem><para><filename>core-image-base</filename>:
A console-only image that fully supports the target device hardware.</para></listitem>
- <listitem><para><emphasis><filename>core-image-clutter</filename>:</emphasis>
+ <listitem><para><filename>core-image-clutter</filename>:
An image with support for the Open GL-based toolkit Clutter, which enables development of
rich and animated graphical user interfaces.</para></listitem>
- <listitem><para><emphasis><filename>core-image-directfb</filename>:</emphasis>
+ <listitem><para><filename>core-image-directfb</filename>:
An image that uses <filename>directfb</filename> instead of X11.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-full-cmdline</filename>:</emphasis>
+ <listitem><para><filename>core-image-full-cmdline</filename>:
A console-only image with more full-featured Linux system
functionality installed.</para></listitem>
- <listitem><para><emphasis><filename>core-image-lsb</filename>:</emphasis>
+ <listitem><para><filename>core-image-lsb</filename>:
An image that conforms to the Linux Standard Base (LSB)
specification.
This image requires a distribution configuration that
@@ -68,7 +68,7 @@
If you build <filename>core-image-lsb</filename> without that
configuration, the image will not be LSB-compliant.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-lsb-dev</filename>:</emphasis>
+ <listitem><para><filename>core-image-lsb-dev</filename>:
A <filename>core-image-lsb</filename> image that is suitable for development work
using the host.
The image includes headers and libraries you can use in a host development
@@ -78,7 +78,7 @@
If you build <filename>core-image-lsb-dev</filename> without that
configuration, the image will not be LSB-compliant.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-lsb-sdk</filename>:</emphasis>
+ <listitem><para><filename>core-image-lsb-sdk</filename>:
A <filename>core-image-lsb</filename> that includes everything in
meta-toolchain but also includes development headers and libraries
to form a complete standalone SDK.
@@ -87,15 +87,15 @@
If you build <filename>core-image-lsb-sdk</filename> without that
configuration, the image will not be LSB-compliant.
This image is suitable for development using the target.</para></listitem>
- <listitem><para><emphasis><filename>core-image-minimal</filename>:</emphasis>
+ <listitem><para><filename>core-image-minimal</filename>:
A small image just capable of allowing a device to boot.</para></listitem>
- <listitem><para><emphasis><filename>core-image-minimal-dev</filename>:</emphasis>
+ <listitem><para><filename>core-image-minimal-dev</filename>:
A <filename>core-image-minimal</filename> image suitable for development work
using the host.
The image includes headers and libraries you can use in a host development
environment.
</para></listitem>
- <listitem><para id='images-core-image-minimal-initramfs'><emphasis><filename>core-image-minimal-initramfs</filename>:</emphasis>
+ <listitem><para id='images-core-image-minimal-initramfs'><filename>core-image-minimal-initramfs</filename>:
A <filename>core-image-minimal</filename> image that has the Minimal RAM-based
Initial Root Filesystem (initramfs) as part of the kernel,
which allows the system to find the first “init†program more efficiently.
@@ -104,38 +104,38 @@
variable for additional information helpful when working with
initramfs images.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-minimal-mtdutils</filename>:</emphasis>
+ <listitem><para><filename>core-image-minimal-mtdutils</filename>:
A <filename>core-image-minimal</filename> image that has support
for the Minimal MTD Utilities, which let the user interact with the
MTD subsystem in the kernel to perform operations on flash devices.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-rt</filename>:</emphasis>
+ <listitem><para><filename>core-image-rt</filename>:
A <filename>core-image-minimal</filename> image plus a real-time test suite and
tools appropriate for real-time use.</para></listitem>
- <listitem><para><emphasis><filename>core-image-rt-sdk</filename>:</emphasis>
+ <listitem><para><filename>core-image-rt-sdk</filename>:
A <filename>core-image-rt</filename> image that includes everything in
<filename>meta-toolchain</filename>.
The image also includes development headers and libraries to form a complete
stand-alone SDK and is suitable for development using the target.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-sato</filename>:</emphasis>
+ <listitem><para><filename>core-image-sato</filename>:
An image with Sato support, a mobile environment and visual style that works well
with mobile devices.
The image supports X11 with a Sato theme and applications such as
a terminal, editor, file manager, media player, and so forth.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-sato-dev</filename>:</emphasis>
+ <listitem><para><filename>core-image-sato-dev</filename>:
A <filename>core-image-sato</filename> image suitable for development
using the host.
The image includes libraries needed to build applications on the device itself,
testing and profiling tools, and debug symbols.
This image was formerly <filename>core-image-sdk</filename>.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-sato-sdk</filename>:</emphasis>
+ <listitem><para><filename>core-image-sato-sdk</filename>:
A <filename>core-image-sato</filename> image that includes everything in meta-toolchain.
The image also includes development headers and libraries to form a complete standalone SDK
and is suitable for development using the target.</para></listitem>
- <listitem><para><emphasis><filename>core-image-testmaster</filename>:</emphasis>
+ <listitem><para><filename>core-image-testmaster</filename>:
A "master" image designed to be used for automated runtime testing.
Provides a "known good" image that is deployed to a separate
partition so that you can boot into it and use it to deploy a
@@ -144,21 +144,21 @@
"<ulink url='&YOCTO_DOCS_DEV_URL;#performing-automated-runtime-testing'>Performing Automated Runtime Testing</ulink>"
section in the Yocto Project Development Manual.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-testmaster-initramfs</filename>:</emphasis>
+ <listitem><para><filename>core-image-testmaster-initramfs</filename>:
A RAM-based Initial Root Filesystem (initramfs) image tailored for
use with the <filename>core-image-testmaster</filename> image.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-weston</filename>:</emphasis>
+ <listitem><para><filename>core-image-weston</filename>:
A very basic Wayland image with a terminal.
This image provides the Wayland protocol libraries and the
reference Weston compositor.
For more information, see the
"<link linkend='wayland'>Wayland</link>" section.
</para></listitem>
- <listitem><para><emphasis><filename>core-image-x11</filename>:</emphasis>
+ <listitem><para><filename>core-image-x11</filename>:
A very basic X11 image with a terminal.
</para></listitem>
- <listitem><para><emphasis><filename>qt4e-demo-image</filename>:</emphasis>
+ <listitem><para><filename>qt4e-demo-image</filename>:
An image that launches into the demo application for the embedded
(not based on X11) version of Qt.</para></listitem>
</itemizedlist>
diff --git a/documentation/ref-manual/ref-manual-customization.xsl b/documentation/ref-manual/ref-manual-customization.xsl
index 05c9b979b1..b58b1dcaf9 100644
--- a/documentation/ref-manual/ref-manual-customization.xsl
+++ b/documentation/ref-manual/ref-manual-customization.xsl
@@ -9,6 +9,7 @@
<xsl:include href="../template/division.title.xsl"/>
<xsl:include href="../template/formal.object.heading.xsl"/>
<xsl:include href="../template/gloss-permalinks.xsl"/>
+ <xsl:include href="../template/qa-code-permalinks.xsl"/>
<xsl:param name="html.stylesheet" select="'ref-style.css'" />
<xsl:param name="chapter.autolabel" select="1" />
diff --git a/documentation/ref-manual/ref-manual.xml b/documentation/ref-manual/ref-manual.xml
index 3eff7de712..cf586e721d 100644
--- a/documentation/ref-manual/ref-manual.xml
+++ b/documentation/ref-manual/ref-manual.xml
@@ -84,9 +84,14 @@
</revision>
<revision>
<revnumber>1.7</revnumber>
- <date>Fall of 2014</date>
+ <date>October 2014</date>
<revremark>Released with the Yocto Project 1.7 Release.</revremark>
</revision>
+ <revision>
+ <revnumber>1.8</revnumber>
+ <date>Sometime in 2015</date>
+ <revremark>Released with the Yocto Project 1.8 Release.</revremark>
+ </revision>
</revhistory>
<copyright>
diff --git a/documentation/ref-manual/ref-qa-checks.xml b/documentation/ref-manual/ref-qa-checks.xml
index 15c1bbe4f5..871cd294f6 100644
--- a/documentation/ref-manual/ref-qa-checks.xml
+++ b/documentation/ref-manual/ref-qa-checks.xml
@@ -59,10 +59,21 @@
<section id='qa-errors-and-warnings'>
<title>Errors and Warnings</title>
+<!--
+This section uses the <para><code> construct to enable permalinks for the
+various QA issue and warning messages. The file templates/qa-code-permalinks.xsl
+is used to locate the construct and generate the permalink. This solution
+leverages the fact that right now this section in the ref-manual is the only
+place is all the YP docs that uses the <para><code> construct. If, in the
+future, that construct were to appear in the ref-manual, a generic permalink
+would be generated for the text between <code></code>. If a better solution
+can be found then it should be implemented. I can't find one at the moment.
+-->
+
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-libexec'>
<code>
&lt;packagename&gt;: &lt;path&gt; is using libexec please relocate to &lt;libexecdir&gt; [libexec]
</code>
@@ -87,7 +98,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-rpaths'>
<code>
package &lt;packagename&gt; contains bad RPATH &lt;rpath&gt; in file &lt;file&gt; [rpaths]
</code>
@@ -119,7 +130,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-useless-rpaths'>
<code>
&lt;packagename&gt;: &lt;file&gt; contains probably-redundant RPATH &lt;rpath&gt; [useless-rpaths]
</code>
@@ -147,7 +158,63 @@
<para>
<itemizedlist>
<listitem>
+ <para id='qa-issue-file-rdeps'>
+ <code>
+ &lt;packagename&gt; requires &lt;files&gt;, but no providers in its RDEPENDS [file-rdeps]
+ </code>
+ </para>
+
<para>
+ A file-level dependency has been identified from the
+ specified package on the specified files, but there is
+ no explicit corresponding entry in
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>.
+ If particular files are required at runtime then
+ <filename>RDEPENDS</filename> should be declared in the
+ recipe to ensure the packages providing them are built.
+ </para>
+
+ <para>
+ &nbsp;
+ </para>
+ </listitem>
+ </itemizedlist>
+ </para>
+
+ <para>
+ <itemizedlist>
+ <listitem>
+ <para id='qa-issue-build-deps'>
+ <code>
+ &lt;packagename1&gt; rdepends on &lt;packagename2&gt;, but it isn't a build dependency? [build-deps]
+ </code>
+ </para>
+
+ <para>
+ A runtime dependency exists between the two specified
+ packages, but there is nothing explicit within the recipe
+ to enable the OpenEmbedded build system to ensure that
+ dependency is satisfied.
+ This condition is usually triggered by an
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ value being added at the packaging stage rather than up
+ front, which is usually automatic based on the contents of
+ the package.
+ In most cases, you should change the recipe to add an
+ explicit <filename>RDEPENDS</filename> for the dependency.
+ </para>
+
+ <para>
+ &nbsp;
+ </para>
+ </listitem>
+ </itemizedlist>
+ </para>
+
+ <para>
+ <itemizedlist>
+ <listitem>
+ <para id='qa-issue-dev-so'>
<code>
non -dev/-dbg/-nativesdk package contains symlink .so: &lt;packagename&gt; path '&lt;path&gt;' [dev-so]
</code>
@@ -178,7 +245,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-staticdev'>
<code>
non -staticdev package contains static .a library: &lt;packagename&gt; path '&lt;path&gt;' [staticdev]
</code>
@@ -205,7 +272,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-libdir'>
<code>
&lt;packagename&gt;: found library in wrong location [libdir]
</code>
@@ -236,7 +303,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-debug-files'>
<code>
non debug package contains .debug directory: &lt;packagename&gt; path &lt;path&gt; [debug-files]
</code>
@@ -269,7 +336,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-arch'>
<code>
Architecture did not match (&lt;machine_arch&gt; to &lt;file_arch&gt;) on &lt;file&gt; [arch]
</code>
@@ -308,7 +375,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-arch-bit-size-no-match'>
<code>
Bit size did not match (&lt;machine_bits&gt; to &lt;file_bits&gt;) &lt;recipe&gt; on &lt;file&gt; [arch]
</code>
@@ -347,7 +414,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-arch-endianness-no-match'>
<code>
Endianness did not match (&lt;machine_endianness&gt; to &lt;file_endianness&gt;) on &lt;file&gt; [arch]
</code>
@@ -386,7 +453,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-textrel'>
<code>
ELF binary '&lt;file&gt;' has relocations in .text [textrel]
</code>
@@ -397,13 +464,6 @@
<filename>.text</filename> sections.
This situation can result in a performance impact
at runtime.
- <note>
- A bug currently exists that causes this
- warning to appear erroneously.
- See
- <ulink url='https://bugzilla.yoctoproject.org/show_bug.cgi?id=6104'></ulink>
- for more information.
- </note>
</para>
<para>
@@ -416,7 +476,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-ldflags'>
<code>
No GNU_HASH in the elf binary: '&lt;file&gt;' [ldflags]
</code>
@@ -448,7 +508,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-xorg-driver-abi'>
<code>
Package &lt;packagename&gt; contains Xorg driver (&lt;driver&gt;) but no xorg-abi- dependencies [xorg-driver-abi]
</code>
@@ -478,7 +538,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-infodir'>
<code>
The /usr/share/info/dir file is not meant to be shipped in a particular package. [infodir]
</code>
@@ -506,7 +566,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-symlink-to-sysroot'>
<code>
Symlink &lt;path&gt; in &lt;packagename&gt; points to TMPDIR [symlink-to-sysroot]
</code>
@@ -533,7 +593,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-la'>
<code>
&lt;file&gt; failed sanity test (workdir) in path &lt;path&gt; [la]
</code>
@@ -559,7 +619,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-pkgconfig'>
<code>
&lt;file&gt; failed sanity test (tmpdir) in path &lt;path&gt; [pkgconfig]
</code>
@@ -584,7 +644,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-debug-deps'>
<code>
&lt;packagename&gt; rdepends on &lt;debug_packagename&gt; [debug-deps]
</code>
@@ -632,7 +692,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-dev-deps'>
<code>
&lt;packagename&gt; rdepends on &lt;dev_packagename&gt; [dev-deps]
</code>
@@ -667,7 +727,7 @@
files that it should not have (e.g. a non-symlink
<filename>.so</filename> file) or it might have been added
manually (e.g. by adding to
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>.
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>).
</para>
<para>
@@ -680,7 +740,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-dep-cmp'>
<code>
&lt;var&gt;_&lt;packagename&gt; is invalid: &lt;comparison&gt; (&lt;value&gt;) only comparisons &lt;, =, &gt;, &lt;=, and &gt;= are allowed [dep-cmp]
</code>
@@ -711,7 +771,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-compile-host-path'>
<code>
&lt;recipename&gt;: The compile log indicates that host include and/or library paths were used. Please check the log '&lt;logfile&gt;' for more information. [compile-host-path]
</code>
@@ -736,7 +796,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-install-host-path'>
<code>
&lt;recipename&gt;: The install log indicates that host include and/or library paths were used. Please check the log '&lt;logfile&gt;' for more information. [install-host-path]
</code>
@@ -761,7 +821,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-autoconf-log'>
<code>
This autoconf log indicates errors, it looked at host include and/or library paths while determining system capabilities. Rerun configure task after fixing this. The path was '&lt;path&gt;'
</code>
@@ -786,7 +846,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-pkgname'>
<code>
&lt;packagename&gt; doesn't match the [a-z0-9.+-]+ regex [pkgname]
</code>
@@ -818,7 +878,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-unknown-configure-option'>
<code>
&lt;recipe&gt;: configure was passed unrecognized options: &lt;options&gt; [unknown-configure-option]
</code>
@@ -854,7 +914,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-pn-overrides'>
<code>
Recipe &lt;recipefile&gt; has PN of "&lt;recipename&gt;" which is in OVERRIDES, this can result in unexpected behavior. [pn-overrides]
</code>
@@ -894,7 +954,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-pkgvarcheck'>
<code>
&lt;recipefile&gt;: Variable &lt;variable&gt; is set as not being package specific, please fix this. [pkgvarcheck]
</code>
@@ -932,7 +992,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-already-stripped'>
<code>
File '&lt;file&gt;' from &lt;recipename&gt; was already stripped, this will prevent future debugging! [already-stripped]
</code>
@@ -977,7 +1037,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-packages-list'>
<code>
&lt;packagename&gt; is listed in PACKAGES multiple times, this leads to packaging errors. [packages-list]
</code>
@@ -1002,7 +1062,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-files-invalid'>
<code>
FILES variable for package &lt;packagename&gt; contains '//' which is invalid. Attempting to fix this but you should correct the metadata. [files-invalid]
</code>
@@ -1025,7 +1085,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-installed-vs-shipped'>
<code>
&lt;recipename&gt;: Files/directories were installed but not shipped [installed-vs-shipped]
</code>
@@ -1065,7 +1125,7 @@
<para>
<itemizedlist>
<listitem>
- <para>
+ <para id='qa-issue-old-and-new-package-and-version-names'>
<code>
&lt;oldpackage&gt;-&lt;oldpkgversion&gt; was registered as shlib provider for &lt;library&gt;, changing it to &lt;newpackage&gt;-&lt;newpkgversion&gt; because it was built later
</code>
@@ -1151,7 +1211,6 @@ enabled by default:
</note>
</para>
</section>
-
</chapter>
<!--
vim: expandtab tw=80 ts=4
diff --git a/documentation/ref-manual/ref-structure.xml b/documentation/ref-manual/ref-structure.xml
index eb1be38701..14419d3a84 100644
--- a/documentation/ref-manual/ref-structure.xml
+++ b/documentation/ref-manual/ref-structure.xml
@@ -315,7 +315,7 @@
commands.
Following is the script syntax:
<literallayout class='monospaced'>
- $ source oe-init-build-env-memres &lt;port_number&gt; &lt;build_dir&gt;
+ $ source oe-init-build-env-memres <replaceable>port_number</replaceable> <replaceable>build_dir</replaceable>
</literallayout>
The script uses other scripts within the
<filename>scripts</filename> directory to do the bulk of the work.
@@ -499,7 +499,7 @@
the variable in the top-level build environment setup script as
follows:
<literallayout class='monospaced'>
- TEMPLATECONF=&lt;your_layer&gt;/conf
+ TEMPLATECONF=<replaceable>your_layer</replaceable>/conf
</literallayout>
Once the build process gets the sample file, it uses
<filename>sed</filename> to substitute final
@@ -554,7 +554,7 @@
you can base your build from any layer by setting the variable in
the top-level build environment setup script as follows:
<literallayout class='monospaced'>
- TEMPLATECONF=&lt;your_layer&gt;/conf
+ TEMPLATECONF=<replaceable>your_layer</replaceable>/conf
</literallayout>
Once the build process gets the sample file, it uses
<filename>sed</filename> to substitute final
diff --git a/documentation/ref-manual/ref-style.css b/documentation/ref-manual/ref-style.css
index e04b5006b4..e77ba718b3 100644
--- a/documentation/ref-manual/ref-style.css
+++ b/documentation/ref-manual/ref-style.css
@@ -201,8 +201,17 @@ div.variablelist dl {
.variablelist dl dt,
.variablelist dl dt span.term {
font-weight: normal;
- width: 20em;
+ width: 0em;
text-align: right;
+ margin-top: 4em;
+ margin-left: .5em;
+ margin-bottom: 0em;
+ -webkit-column-count: 2; /* Chrome, Safari, Opera */
+ -moz-column-count: 2; /* Firefox */
+ column-count: 2;
+ -webkit-column-gap: 4px; /* Chrome, Safari, Opera */
+ -moz-column-gap: 4px; /* Firefox */
+ column-gap: 4px;
}
.variablelist dl dt {
@@ -211,8 +220,8 @@ div.variablelist dl {
.glossary dl dd,
.variablelist dl dd {
- margin-top: -1em;
- margin-left: 25.5em;
+ margin-top: -3.5em;
+ margin-left: 15.5em;
}
.glossary dd p,
diff --git a/documentation/ref-manual/ref-tasks.xml b/documentation/ref-manual/ref-tasks.xml
index fc23a7ba0d..f325f0e233 100644
--- a/documentation/ref-manual/ref-tasks.xml
+++ b/documentation/ref-manual/ref-tasks.xml
@@ -129,6 +129,17 @@
</para>
</section>
+ <section id='ref-tasks-package_qa'>
+ <title><filename>do_package_qa</filename></title>
+
+ <para>
+ Runs QA checks on packaged files.
+ For more information on these checks, see the
+ <link linkend='ref-classes-insane'><filename>insane</filename></link>
+ class.
+ </para>
+ </section>
+
<section id='ref-tasks-package_write_deb'>
<title><filename>do_package_write_deb</filename></title>
@@ -313,7 +324,7 @@
<para>
You can run this task using BitBake as follows:
<literallayout class='monospaced'>
- $ bitbake -c clean &lt;recipe&gt;
+ $ bitbake -c clean <replaceable>recipe</replaceable>
</literallayout>
</para>
@@ -327,7 +338,7 @@
If you want to remove the sstate cache files for the recipe,
you need to use the
<link linkend='ref-tasks-cleansstate'><filename>do_cleansstate</filename></link>
- task instead (i.e. <filename>bitbake -c cleansstate &lt;recipe&gt;</filename>).
+ task instead (i.e. <filename>bitbake -c cleansstate</filename> <replaceable>recipe</replaceable>).
</para>
</section>
@@ -348,7 +359,7 @@
<para>
You can run this task using BitBake as follows:
<literallayout class='monospaced'>
- $ bitbake -c cleanall &lt;recipe&gt;
+ $ bitbake -c cleanall <replaceable>recipe</replaceable>
</literallayout>
</para>
@@ -378,7 +389,7 @@
<para>
You can run this task using BitBake as follows:
<literallayout class='monospaced'>
- $ bitbake -c cleansstate &lt;recipe&gt;
+ $ bitbake -c cleansstate <replaceable>recipe</replaceable>
</literallayout>
</para>
@@ -393,7 +404,7 @@
If you need to build a target from scratch using remote
mirrors, use the "-f" option as follows:
<literallayout class='monospaced'>
- $ bitbake -f -c do_cleansstate &lt;target&gt;
+ $ bitbake -f -c do_cleansstate <replaceable>target</replaceable>
</literallayout>
</note>
</para>
diff --git a/documentation/ref-manual/ref-variables.xml b/documentation/ref-manual/ref-variables.xml
index 7ea753df71..e3272fec79 100644
--- a/documentation/ref-manual/ref-variables.xml
+++ b/documentation/ref-manual/ref-variables.xml
@@ -20,9 +20,9 @@
<link linkend='var-B'>B</link>
<link linkend='var-CFLAGS'>C</link>
<link linkend='var-D'>D</link>
- <link linkend='var-ENABLE_BINARY_LOCALE_GENERATION'>E</link>
+ <link linkend='var-EFI_PROVIDER'>E</link>
<link linkend='var-FEATURE_PACKAGES'>F</link>
- <link linkend='var-GROUPADD_PARAM'>G</link>
+ <link linkend='var-GLIBC_GENERATE_LOCALES'>G</link>
<link linkend='var-HOMEPAGE'>H</link>
<link linkend='var-ICECC_DISABLED'>I</link>
<!-- <link linkend='var-glossary-j'>J</link> -->
@@ -39,14 +39,17 @@
<link linkend='var-UBOOT_CONFIG'>U</link>
<!-- <link linkend='var-glossary-v'>V</link> -->
<link linkend='var-WARN_QA'>W</link>
-<!-- <link linkend='var-glossary-x'>X</link> -->
+ <link linkend='var-XSERVER'>X</link>
<!-- <link linkend='var-glossary-y'>Y</link> -->
<!-- <link linkend='var-glossary-z'>Z</link>-->
</para>
<glossdiv id='var-glossary-a'><title>A</title>
- <glossentry id='var-ABIEXTENSION'><glossterm>ABIEXTENSION</glossterm>
+ <glossentry id='var-ABIEXTENSION'><glossterm><imagedata fileref="figures/define-generic.png" />ABIEXTENSION</glossterm>
+ <info>
+ ABIEXTENSION[doc] = "Extension to the Application Binary Interface (ABI) field of the GNU canonical architecture name (e.g. "eabi")."
+ </info>
<glossdef>
<para>
Extension to the Application Binary Interface (ABI)
@@ -66,7 +69,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ALLOW_EMPTY'><glossterm>ALLOW_EMPTY</glossterm>
+ <glossentry id='var-ALLOW_EMPTY'><glossterm><imagedata fileref="figures/define-generic.png" />ALLOW_EMPTY</glossterm>
+ <info>
+ ALLOW_EMPTY[doc] = "Specifies if an output package should still be produced if it is empty."
+ </info>
<glossdef>
<para>
Specifies if an output package should still be produced if it is empty.
@@ -86,9 +92,12 @@
</literallayout>
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-ALTERNATIVE'><glossterm>ALTERNATIVE</glossterm>
+ <glossentry id='var-ALTERNATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />ALTERNATIVE</glossterm>
+ <info>
+ ALTERNATIVE[doc] = "Lists commands in a package that need an alternative binary naming scheme."
+ </info>
<glossdef>
<para>
Lists commands in a package that need an alternative
@@ -113,9 +122,12 @@
section.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-ALTERNATIVE_LINK_NAME'><glossterm>ALTERNATIVE_LINK_NAME</glossterm>
+ <glossentry id='var-ALTERNATIVE_LINK_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />ALTERNATIVE_LINK_NAME</glossterm>
+ <info>
+ ALTERNATIVE_LINK_NAME[doc] = "Used by the alternatives system to map duplicated commands to actual locations."
+ </info>
<glossdef>
<para>
Used by the alternatives system to map duplicated commands
@@ -136,7 +148,7 @@
<note>
If <filename>ALTERNATIVE_LINK_NAME</filename> is not
defined, it defaults to
- <filename>${bindir}/&lt;name&gt;</filename>.
+ <filename>${bindir}/<replaceable>name</replaceable></filename>.
</note>
</para>
@@ -146,9 +158,12 @@
section.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-ALTERNATIVE_PRIORITY'><glossterm>ALTERNATIVE_PRIORITY</glossterm>
+ <glossentry id='var-ALTERNATIVE_PRIORITY'><glossterm><imagedata fileref="figures/define-generic.png" />ALTERNATIVE_PRIORITY</glossterm>
+ <info>
+ ALTERNATIVE_PRIORITY[doc] = "Used by the alternatives system to create default priorities for duplicated commands."
+ </info>
<glossdef>
<para>
Used by the alternatives system to create default
@@ -159,9 +174,9 @@
a default for specific commands tied to particular packages.
Here are the available syntax forms:
<literallayout class='monospaced'>
- ALTERNATIVE_PRIORITY = "&lt;priority&gt;"
- ALTERNATIVE_PRIORITY[&lt;name&gt;] = "&lt;priority&gt;"
- ALTERNATIVE_PRIORITY_&lt;pkg&gt;[&lt;name&gt;] = "&lt;priority&gt;"
+ ALTERNATIVE_PRIORITY = "<replaceable>priority</replaceable>"
+ ALTERNATIVE_PRIORITY[<replaceable>name</replaceable>] = "<replaceable>priority</replaceable>"
+ ALTERNATIVE_PRIORITY_<replaceable>pkg</replaceable>[<replaceable>name</replaceable>] = "<replaceable>priority</replaceable>"
</literallayout>
</para>
@@ -171,9 +186,12 @@
section.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-ALTERNATIVE_TARGET'><glossterm>ALTERNATIVE_TARGET</glossterm>
+ <glossentry id='var-ALTERNATIVE_TARGET'><glossterm><imagedata fileref="figures/define-generic.png" />ALTERNATIVE_TARGET</glossterm>
+ <info>
+ ALTERNATIVE_TARGET[doc] = "Used by the alternatives system to create default link locations for duplicated commands."
+ </info>
<glossdef>
<para>
Used by the alternatives system to create default link
@@ -185,9 +203,9 @@
a default for specific commands tied to particular packages.
Here are the available syntax forms:
<literallayout class='monospaced'>
- ALTERNATIVE_TARGET = "&lt;target&gt;"
- ALTERNATIVE_TARGET[&lt;name&gt;] = "&lt;target&gt;"
- ALTERNATIVE_TARGET_&lt;pkg&gt;[&lt;name&gt;] = "&lt;target&gt;"
+ ALTERNATIVE_TARGET = "<replaceable>target</replaceable>"
+ ALTERNATIVE_TARGET[<replaceable>name</replaceable>] = "<replaceable>target</replaceable>"
+ ALTERNATIVE_TARGET_<replaceable>pkg</replaceable>[<replaceable>name</replaceable>] = "<replaceable>target</replaceable>"
</literallayout>
<note>
<para>
@@ -222,9 +240,12 @@
section.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-APPEND'><glossterm>APPEND</glossterm>
+ <glossentry id='var-APPEND'><glossterm><imagedata fileref="figures/define-generic.png" />APPEND</glossterm>
+ <info>
+ APPEND[doc] = "An override list of append strings for each LABEL."
+ </info>
<glossdef>
<para>
An override list of append strings for each
@@ -237,32 +258,65 @@
class for more information on how this variable is used.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
+
+ <glossentry id='var-ASSUME_PROVIDED'><glossterm><imagedata fileref="figures/define-generic.png" />ASSUME_PROVIDED</glossterm>
+ <info>
+ ASSUME_PROVIDED[doc] = "Lists recipe names (PN values) BitBake does not attempt to build."
+ </info>
+ <glossdef>
+ <para>
+ Lists recipe names
+ (<link linkend='var-PN'><filename>PN</filename></link>
+ values) BitBake does not attempt to build.
+ Instead, BitBake assumes these recipes have already been
+ built.
+ </para>
+
+ <para>
+ In OpenEmbedded Core, <filename>ASSUME_PROVIDED</filename>
+ mostly specifies native tools that should not be built.
+ An example is <filename>git-native</filename>, which when
+ specified, allows for the Git binary from the host to be
+ used rather than building <filename>git-native</filename>.
+ </para>
+ </glossdef>
+ </glossentry>
- <glossentry id='var-AUTHOR'><glossterm>AUTHOR</glossterm>
+ <glossentry id='var-AUTHOR'><glossterm><imagedata fileref="figures/define-generic.png" />AUTHOR</glossterm>
+ <info>
+ AUTHOR[doc] = "Email address used to contact the original author or authors in order to send patches and forward bugs."
+ </info>
<glossdef>
<para>The email address used to contact the original author
or authors in order to send patches and forward bugs.</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-AUTO_SYSLINUXMENU'><glossterm>AUTO_SYSLINUXMENU</glossterm>
+ <glossentry id='var-AUTO_SYSLINUXMENU'><glossterm><imagedata fileref="figures/define-generic.png" />AUTO_SYSLINUXMENU</glossterm>
+ <info>
+ AUTO_SYSLINUXMENU[doc] = "Enables creating an automatic menu for the syslinux bootloader."
+ </info>
<glossdef>
<para>
- Enables creating an automatic menu.
- You must set this in your recipe.
+ Enables creating an automatic menu for the syslinux
+ bootloader.
+ You must set this variable in your recipe.
The
<link linkend='ref-classes-syslinux'><filename>syslinux</filename></link>
class checks this variable.
</para>
</glossdef>
- </glossentry>
+ </glossentry>
- <glossentry id='var-AUTOREV'><glossterm>AUTOREV</glossterm>
+ <glossentry id='var-AUTOREV'><glossterm><imagedata fileref="figures/define-generic.png" />AUTOREV</glossterm>
+ <info>
+ AUTOREV[doc] = "When SRCREV is set to the value of this variable, it specifies to use the latest source revision in the repository."
+ </info>
<glossdef>
<para>When <filename><link linkend='var-SRCREV'>SRCREV</link></filename>
- is set to the value of this variable, it specifies to use the latest
- source revision in the repository.
+ is set to the value of this variable, it specifies to use
+ the latest source revision in the repository.
Here is an example:
<literallayout class='monospaced'>
SRCREV = "${AUTOREV}"
@@ -271,9 +325,12 @@
</glossdef>
</glossentry>
- <glossentry id='var-AVAILTUNES'><glossterm>AVAILTUNES</glossterm>
+ <glossentry id='var-AVAILTUNES'><glossterm><imagedata fileref="figures/define-generic.png" />AVAILTUNES</glossterm>
+ <info>
+ AVAILTUNES[doc] = "The list of defined CPU and Application Binary Interface (ABI) tunings (i.e. "tunes") available for use by the OpenEmbedded build system."
+ </info>
<glossdef>
- <para>
+ <para id='var-define-AVAILTUNES'>
The list of defined CPU and Application Binary Interface
(ABI) tunings (i.e. "tunes") available for use by the
OpenEmbedded build system.
@@ -298,12 +355,14 @@
</glossdef>
</glossentry>
-
</glossdiv>
<glossdiv id='var-glossary-b'><title>B</title>
- <glossentry id='var-B'><glossterm>B</glossterm>
+ <glossentry id='var-B'><glossterm><imagedata fileref="figures/define-generic.png" />B</glossterm>
+ <info>
+ B[doc] = "The Build Directory. The OpenEmbedded build system places generated objects into the Build Directory during a recipe's build process."
+ </info>
<glossdef>
<para>
The directory within the
@@ -326,7 +385,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BAD_RECOMMENDATIONS'><glossterm>BAD_RECOMMENDATIONS</glossterm>
+ <glossentry id='var-BAD_RECOMMENDATIONS'><glossterm><imagedata fileref="figures/define-generic.png" />BAD_RECOMMENDATIONS</glossterm>
+ <info>
+ BAD_RECOMMENDATIONS[doc] = "A list of packages not to install despite being recommended by a recipe. Support for this variable exists only when using the IPK packaging backend."
+ </info>
<glossdef>
<para>
Lists "recommended-only" packages to not install.
@@ -338,13 +400,13 @@
being installed by listing them with the
<filename>BAD_RECOMMENDATIONS</filename> variable:
<literallayout class='monospaced'>
- BAD_RECOMMENDATIONS = "&lt;package_name&gt; &lt;package_name&gt; &lt;package_name&gt; ..."
+ BAD_RECOMMENDATIONS = "<replaceable>package_name</replaceable> <replaceable>package_name</replaceable> <replaceable>package_name</replaceable> ..."
</literallayout>
You can set this variable globally in your
<filename>local.conf</filename> file or you can attach it to
a specific image recipe by using the recipe name override:
<literallayout class='monospaced'>
- BAD_RECOMMENDATIONS_pn-&lt;target_image&gt; = "&lt;package_name&gt;"
+ BAD_RECOMMENDATIONS_pn-<replaceable>target_image</replaceable> = "<replaceable>package_name</replaceable>"
</literallayout>
</para>
@@ -374,7 +436,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BASE_LIB'><glossterm>BASE_LIB</glossterm>
+ <glossentry id='var-BASE_LIB'><glossterm><imagedata fileref="figures/define-generic.png" />BASE_LIB</glossterm>
+ <info>
+ BASE_LIB[doc] = "The library directory name for the CPU or Application Binary Interface (ABI) tune."
+ </info>
<glossdef>
<para>
The library directory name for the CPU or Application
@@ -396,7 +461,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DANGLINGAPPENDS_WARNONLY'><glossterm>BB_DANGLINGAPPENDS_WARNONLY</glossterm>
+ <glossentry id='var-BB_DANGLINGAPPENDS_WARNONLY'><glossterm><imagedata fileref="figures/define-generic.png" />BB_DANGLINGAPPENDS_WARNONLY</glossterm>
+ <info>
+ BB_DANGLINGAPPENDS_WARNONLY[doc] = "Defines how BitBake handles situations where an append file (.bbappend) has no corresponding recipe file (.bb)."
+ </info>
<glossdef>
<para>
Defines how BitBake handles situations where an append
@@ -430,7 +498,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DISKMON_DIRS'><glossterm>BB_DISKMON_DIRS</glossterm>
+ <glossentry id='var-BB_DISKMON_DIRS'><glossterm><imagedata fileref="figures/define-generic.png" />BB_DISKMON_DIRS</glossterm>
+ <info>
+ BB_DISKMON_DIRS[doc] = "Monitors disk space and available inodes during the build and allows you to control the build based on these parameters."
+ </info>
<glossdef>
<para>
Monitors disk space and available inodes during the build
@@ -445,11 +516,11 @@
<ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
Use the following form:
<literallayout class='monospaced'>
- BB_DISKMON_DIRS = "&lt;action&gt;,&lt;dir&gt;,&lt;threshold&gt; [...]"
+ BB_DISKMON_DIRS = "<replaceable>action</replaceable>,<replaceable>dir</replaceable>,<replaceable>threshold</replaceable> [...]"
where:
- &lt;action&gt; is:
+ <replaceable>action</replaceable> is:
ABORT: Immediately abort the build when
a threshold is broken.
STOPTASKS: Stop the build after the currently
@@ -463,14 +534,14 @@
which must be defined in the
conf/local.conf file.
- &lt;dir&gt; is:
+ <replaceable>dir</replaceable> is:
Any directory you choose. You can specify one or
more directories to monitor by separating the
groupings with a space. If two directories are
on the same device, only the first directory
is monitored.
- &lt;threshold&gt; is:
+ <replaceable>threshold</replaceable> is:
Either the minimum available disk space,
the minimum number of free inodes, or
both. You must specify at least one. To
@@ -525,7 +596,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_DISKMON_WARNINTERVAL'><glossterm>BB_DISKMON_WARNINTERVAL</glossterm>
+ <glossentry id='var-BB_DISKMON_WARNINTERVAL'><glossterm><imagedata fileref="figures/define-generic.png" />BB_DISKMON_WARNINTERVAL</glossterm>
+ <info>
+ BB_DISKMON_WARNINTERVAL[doc] = "Defines the disk space and free inode warning intervals. To set these intervals, define the variable in the conf/local.conf file in the Build Directory."
+ </info>
<glossdef>
<para>
Defines the disk space and free inode warning intervals.
@@ -559,16 +633,16 @@
When specifying the variable in your configuration file,
use the following form:
<literallayout class='monospaced'>
- BB_DISKMON_WARNINTERVAL = "&lt;disk_space_interval&gt;,&lt;disk_inode_interval&gt;"
+ BB_DISKMON_WARNINTERVAL = "<replaceable>disk_space_interval</replaceable>,<replaceable>disk_inode_interval</replaceable>"
where:
- &lt;disk_space_interval&gt; is:
+ <replaceable>disk_space_interval</replaceable> is:
An interval of memory expressed in either
G, M, or K for Gbytes, Mbytes, or Kbytes,
respectively. You cannot use GB, MB, or KB.
- &lt;disk_inode_interval&gt; is:
+ <replaceable>disk_inode_interval</replaceable> is:
An interval of free inodes expressed in either
G, M, or K for Gbytes, Mbytes, or Kbytes,
respectively. You cannot use GB, MB, or KB.
@@ -593,7 +667,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_GENERATE_MIRROR_TARBALLS'><glossterm>BB_GENERATE_MIRROR_TARBALLS</glossterm>
+ <glossentry id='var-BB_GENERATE_MIRROR_TARBALLS'><glossterm><imagedata fileref="figures/define-generic.png" />BB_GENERATE_MIRROR_TARBALLS</glossterm>
+ <info>
+ BB_GENERATE_MIRROR_TARBALLS[doc] = "Causes tarballs of the Git repositories to be placed in the DL_DIR directory."
+ </info>
<glossdef>
<para>
Causes tarballs of the Git repositories, including the
@@ -616,13 +693,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BB_NUMBER_THREADS'><glossterm>BB_NUMBER_THREADS</glossterm>
+ <glossentry id='var-BB_NUMBER_THREADS'><glossterm><imagedata fileref="figures/define-generic.png" />BB_NUMBER_THREADS</glossterm>
+ <info>
+ BB_NUMBER_THREADS[doc] = "The maximum number of tasks BitBake should run in parallel at any one time. A good rule of thumb is for this variable to be twice the number of cores."
+ </info>
<glossdef>
<para>
The maximum number of tasks BitBake should run in parallel
at any one time.
If your host development system supports multiple cores,
- a good rule of thumb is to set this variable to twice the
+ a good rule of thumb is to have this variable be twice the
number of cores.
</para>
@@ -633,7 +713,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBCLASSEXTEND'><glossterm>BBCLASSEXTEND</glossterm>
+ <glossentry id='var-BBCLASSEXTEND'><glossterm><imagedata fileref="figures/define-generic.png" />BBCLASSEXTEND</glossterm>
+ <info>
+ BBCLASSEXTEND[doc] = "Allows you to extend a recipe so that it builds variants of the software. Common variants for recipes are 'native', 'cross', 'nativesdk' and multilibs."
+ </info>
<glossdef>
<para>
Allows you to extend a recipe so that it builds variants of the software.
@@ -643,7 +726,7 @@
which is a compiler built to run on the build machine but produces binaries
that run on the target <link linkend='var-MACHINE'><filename>MACHINE</filename></link>;
"nativesdk", which targets the SDK machine instead of <filename>MACHINE</filename>;
- and "mulitlibs" in the form "<filename>multilib:&lt;multilib_name&gt;</filename>".
+ and "mulitlibs" in the form "<filename>multilib:</filename><replaceable>multilib_name</replaceable>".
</para>
<para>
@@ -651,13 +734,16 @@
is as simple as adding the following to your recipe:
<literallayout class='monospaced'>
BBCLASSEXTEND =+ "native nativesdk"
- BBCLASSEXTEND =+ "multilib:&lt;multilib_name&gt;"
+ BBCLASSEXTEND =+ "multilib:<replaceable>multilib_name</replaceable>"
</literallayout>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_COLLECTIONS'><glossterm>BBFILE_COLLECTIONS</glossterm>
+ <glossentry id='var-BBFILE_COLLECTIONS'><glossterm><imagedata fileref="figures/define-generic.png" />BBFILE_COLLECTIONS</glossterm>
+ <info>
+ BBFILE_COLLECTIONS[doc] = "Lists the names of configured layers. These names are used to find the other BBFILE_* variables."
+ </info>
<glossdef>
<para>Lists the names of configured layers.
These names are used to find the other <filename>BBFILE_*</filename>
@@ -668,7 +754,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_PATTERN'><glossterm>BBFILE_PATTERN</glossterm>
+ <glossentry id='var-BBFILE_PATTERN'><glossterm><imagedata fileref="figures/define-generic.png" />BBFILE_PATTERN</glossterm>
+ <info>
+ BBFILE_PATTERN[doc] = "Variable that expands to match files from BBFILES in a particular layer. This variable is used in the layer.conf file and must be suffixed with the name of a layer."
+ </info>
<glossdef>
<para>Variable that expands to match files from
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>
@@ -679,7 +768,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILE_PRIORITY'><glossterm>BBFILE_PRIORITY</glossterm>
+ <glossentry id='var-BBFILE_PRIORITY'><glossterm><imagedata fileref="figures/define-generic.png" />BBFILE_PRIORITY</glossterm>
+ <info>
+ BBFILE_PRIORITY[doc] = "Assigns the priority for recipe files in each layer. Setting this variable allows you to prioritize a layer against other layers that contain the same recipe."
+ </info>
<glossdef>
<para>Assigns the priority for recipe files in each layer.</para>
<para>This variable is useful in situations where the same recipe appears in
@@ -710,19 +802,28 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBFILES'><glossterm>BBFILES</glossterm>
+ <glossentry id='var-BBFILES'><glossterm><imagedata fileref="figures/define-generic.png" />BBFILES</glossterm>
+ <info>
+ BBFILES[doc] = "List of recipe files used by BitBake to build software."
+ </info>
<glossdef>
<para>List of recipe files used by BitBake to build software.</para>
</glossdef>
</glossentry>
- <glossentry id='var-BBINCLUDELOGS'><glossterm>BBINCLUDELOGS</glossterm>
+ <glossentry id='var-BBINCLUDELOGS'><glossterm><imagedata fileref="figures/define-generic.png" />BBINCLUDELOGS</glossterm>
+ <info>
+ BBINCLUDELOGS[doc] = "Variable that controls how BitBake displays logs on build failure."
+ </info>
<glossdef>
<para>Variable that controls how BitBake displays logs on build failure.</para>
</glossdef>
</glossentry>
- <glossentry id='var-BBLAYERS'><glossterm>BBLAYERS</glossterm>
+ <glossentry id='var-BBLAYERS'><glossterm><imagedata fileref="figures/define-generic.png" />BBLAYERS</glossterm>
+ <info>
+ BBLAYERS[doc] = "Lists the layers to enable during the build. This variable is defined in the bblayers.conf configuration file."
+ </info>
<glossdef>
<para>Lists the layers to enable during the build.
This variable is defined in the <filename>bblayers.conf</filename> configuration
@@ -747,7 +848,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBLAYERS_NON_REMOVABLE'><glossterm>BBLAYERS_NON_REMOVABLE</glossterm>
+ <glossentry id='var-BBLAYERS_NON_REMOVABLE'><glossterm><imagedata fileref="figures/define-generic.png" />BBLAYERS_NON_REMOVABLE</glossterm>
+ <info>
+ BBLAYERS_NON_REMOVABLE[doc] = "Lists core layers that cannot be removed from the bblayers.conf file."
+ </info>
<glossdef>
<para>Lists core layers that cannot be removed from the
<filename>bblayers.conf</filename> file during a build
@@ -780,8 +884,11 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBMASK'><glossterm>BBMASK</glossterm>
+ <glossentry id='var-BBMASK'><glossterm><imagedata fileref="figures/define-generic.png" />BBMASK</glossterm>
<glossdef>
+ <info>
+ BBMASK[doc] = "Prevents BitBake from processing specific recipes or recipe append files. Use the BBMASK variable from within conf/local.conf."
+ </info>
<para>
Prevents BitBake from processing recipes and recipe
append files.
@@ -840,7 +947,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BBPATH'><glossterm>BBPATH</glossterm>
+ <glossentry id='var-BBPATH'><glossterm><imagedata fileref="figures/define-generic.png" />BBPATH</glossterm>
+ <info>
+ BBPATH[doc] = "Used by BitBake to locate .bbclass and configuration files. This variable is analogous to the PATH variable."
+ </info>
<glossdef>
<para>
Used by BitBake to locate
@@ -856,16 +966,19 @@
Set the variable as you would any environment variable
and then run BitBake:
<literallayout class='monospaced'>
- $ BBPATH = "&lt;build_directory&gt;"
+ $ BBPATH = "<replaceable>build_directory</replaceable>"
$ export BBPATH
- $ bitbake &lt;target&gt;
+ $ bitbake <replaceable>target</replaceable>
</literallayout>
</note>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-BBSERVER'><glossterm>BBSERVER</glossterm>
+ <glossentry id='var-BBSERVER'><glossterm><imagedata fileref="figures/define-generic.png" />BBSERVER</glossterm>
+ <info>
+ BBSERVER[doc] = "Points to the server that runs memory-resident BitBake."
+ </info>
<glossdef>
<para>
Points to the server that runs memory-resident BitBake.
@@ -887,11 +1000,42 @@
</glossdef>
</glossentry>
- <glossentry id='var-BINCONFIG_GLOB'><glossterm>BINCONFIG_GLOB</glossterm>
+ <glossentry id='var-BINCONFIG'><glossterm><imagedata fileref="figures/define-generic.png" />BINCONFIG</glossterm>
+ <info>
+ BINCONFIG[doc] = "When inheriting the binconfig-disabled class, this variable specifies binary configuration scripts to disable in favor of using pkg-config to query the information."
+ </info>
<glossdef>
<para>
- When inheriting <filename>binconfig.bbclass</filename>
- from a recipe, this variable specifies a wildcard for
+ When inheriting the
+ <link linkend='ref-classes-binconfig-disabled'><filename>binconfig-disabled</filename></link>
+ class, this variable specifies binary configuration
+ scripts to disable in favor of using
+ <filename>pkg-config</filename> to query the information.
+ The <filename>binconfig-disabled</filename> class will
+ modify the specified scripts to return an error so that
+ calls to them can be easily found and replaced.
+ </para>
+
+ <para>
+ To add multiple scripts, separate them by spaces.
+ Here is an example from the <filename>libpng</filename>
+ recipe:
+ <literallayout class='monospaced'>
+ BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
+ </literallayout>
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-BINCONFIG_GLOB'><glossterm><imagedata fileref="figures/define-generic.png" />BINCONFIG_GLOB</glossterm>
+ <info>
+ BINCONFIG_GLOB[doc] = "When inheriting binconfig.bbclass from a recipe, this variable specifies a wildcard for configuration scripts that need editing."
+ </info>
+ <glossdef>
+ <para>
+ When inheriting the
+ <link linkend='ref-classes-binconfig'><filename>binconfig</filename></link>
+ class, this variable specifies a wildcard for
configuration scripts that need editing.
The scripts are edited to correct any paths that have been
set up during compilation so that they are correct for
@@ -910,7 +1054,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BP'><glossterm>BP</glossterm>
+ <glossentry id='var-BP'><glossterm><imagedata fileref="figures/define-generic.png" />BP</glossterm>
+ <info>
+ BP[doc] = "The base recipe name and version but without any special recipe name suffix (i.e. -native, lib64-, and so forth). BP is comprised of ${BPN}-${PV}"
+ </info>
<glossdef>
<para>The base recipe name and version but without any special
recipe name suffix (i.e. <filename>-native</filename>, <filename>lib64-</filename>,
@@ -922,7 +1069,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BPN'><glossterm>BPN</glossterm>
+ <glossentry id='var-BPN'><glossterm><imagedata fileref="figures/define-generic.png" />BPN</glossterm>
+ <info>
+ BPN[doc] = "The bare name of the recipe. This variable is a version of the PN variable but removes common suffixes and prefixes."
+ </info>
<glossdef>
<para>The bare name of the recipe.
This variable is a version of the <link linkend='var-PN'><filename>PN</filename></link> variable
@@ -937,7 +1087,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUGTRACKER'><glossterm>BUGTRACKER</glossterm>
+ <glossentry id='var-BUGTRACKER'><glossterm><imagedata fileref="figures/define-generic.png" />BUGTRACKER</glossterm>
+ <info>
+ BUGTRACKER[doc] = "Specifies a URL for an upstream bug tracking website for a recipe."
+ </info>
<glossdef>
<para>
Specifies a URL for an upstream bug tracking website for
@@ -949,7 +1102,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILD_CFLAGS'><glossterm>BUILD_CFLAGS</glossterm>
+ <glossentry id='var-BUILD_CFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILD_CFLAGS</glossterm>
+ <info>
+ BUILD_CFLAGS[doc] = "Specifies the flags to pass to the C compiler when building for the build host."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C compiler when building
@@ -961,7 +1117,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILD_CPPFLAGS'><glossterm>BUILD_CPPFLAGS</glossterm>
+ <glossentry id='var-BUILD_CPPFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILD_CPPFLAGS</glossterm>
+ <info>
+ BUILD_CPPFLAGS[doc] = "Specifies the flags to pass to the C pre-processor (i.e. to both the C and the C++ compilers) when building for the build host."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C pre-processor
@@ -974,7 +1133,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILD_CXXFLAGS'><glossterm>BUILD_CXXFLAGS</glossterm>
+ <glossentry id='var-BUILD_CXXFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILD_CXXFLAGS</glossterm>
+ <info>
+ BUILD_CXXFLAGS[doc] = "Specifies the flags to pass to the C++ compiler when building for the build host."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C++ compiler when
@@ -986,7 +1148,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILD_LDFLAGS'><glossterm>BUILD_LDFLAGS</glossterm>
+ <glossentry id='var-BUILD_LDFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILD_LDFLAGS</glossterm>
+ <info>
+ BUILD_LDFLAGS[doc] = "Specifies the flags to pass to the linker when building for the build host."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the linker when building
@@ -998,7 +1163,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILD_OPTIMIZATION'><glossterm>BUILD_OPTIMIZATION</glossterm>
+ <glossentry id='var-BUILD_OPTIMIZATION'><glossterm><imagedata fileref="figures/define-generic.png" />BUILD_OPTIMIZATION</glossterm>
+ <info>
+ BUILD_OPTIMIZATION[doc] = "Specifies the optimization flags passed to the C compiler when building for the build host or the SDK."
+ </info>
<glossdef>
<para>
Specifies the optimization flags passed to the C compiler
@@ -1018,7 +1186,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDDIR'><glossterm>BUILDDIR</glossterm>
+ <glossentry id='var-BUILDDIR'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDDIR</glossterm>
+ <info>
+ BUILDDIR[doc] = "Points to the location of the Build Directory."
+ </info>
<glossdef>
<para>
Points to the location of the
@@ -1036,13 +1207,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_COMMIT'><glossterm>BUILDHISTORY_COMMIT</glossterm>
+ <glossentry id='var-BUILDHISTORY_COMMIT'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_COMMIT</glossterm>
+ <info>
+ BUILDHISTORY_COMMIT[doc] = "When inheriting the buildhistory class, this variable specifies whether or not to commit the build history output in a local Git repository."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, specifies whether or not to commit the build
- history output in a local Git repository.
+ class, this variable specifies whether or not to commit the
+ build history output in a local Git repository.
If set to "1", this local repository will be maintained
automatically by the
<filename>buildhistory</filename>
@@ -1064,12 +1238,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_COMMIT_AUTHOR'><glossterm>BUILDHISTORY_COMMIT_AUTHOR</glossterm>
+ <glossentry id='var-BUILDHISTORY_COMMIT_AUTHOR'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_COMMIT_AUTHOR</glossterm>
+ <info>
+ BUILDHISTORY_COMMIT_AUTHOR[doc] = "When inheriting the buildhistory class, this variable specifies the author to use for each Git commit."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, specifies the author to use for each Git commit.
+ class, this variable specifies the author to use for each
+ Git commit.
In order for the <filename>BUILDHISTORY_COMMIT_AUTHOR</filename>
variable to work, the
<link linkend='var-BUILDHISTORY_COMMIT'><filename>BUILDHISTORY_COMMIT</filename></link>
@@ -1094,13 +1272,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_DIR'><glossterm>BUILDHISTORY_DIR</glossterm>
+ <glossentry id='var-BUILDHISTORY_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_DIR</glossterm>
+ <info>
+ BUILDHISTORY_DIR[doc] = "When inheriting the buildhistory class, this variable specifies the directory in which build history information is kept."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, specifies the directory in which build history
- information is kept.
+ class, this variable specifies the directory in which
+ build history information is kept.
For more information on how the variable works, see the
<filename>buildhistory.class</filename>.
</para>
@@ -1115,12 +1296,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_FEATURES'><glossterm>BUILDHISTORY_FEATURES</glossterm>
+ <glossentry id='var-BUILDHISTORY_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_FEATURES</glossterm>
+ <info>
+ BUILDHISTORY_FEATURES[doc] = "When inheriting the buildhistory class, this variable specifies the build history features to be enabled."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, specifies the build history features to be enabled.
+ class, this variable specifies the build history features
+ to be enabled.
For more information on how build history works, see the
"<link linkend='maintaining-build-output-quality'>Maintaining Build Output Quality</link>"
section.
@@ -1155,12 +1340,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_IMAGE_FILES'><glossterm>BUILDHISTORY_IMAGE_FILES</glossterm>
+ <glossentry id='var-BUILDHISTORY_IMAGE_FILES'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_IMAGE_FILES</glossterm>
+ <info>
+ BUILDHISTORY_IMAGE_FILES[doc] = "When inheriting the buildhistory class, this variable specifies a list of paths to files copied from the image contents into the build history directory under an "image-files" directory in the directory for the image, so that you can track the contents of each file."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, specifies a list of paths to files copied from the
+ class, this variable specifies a list of paths to files
+ copied from the
image contents into the build history directory under
an "image-files" directory in the directory for
the image, so that you can track the contents of each file.
@@ -1183,13 +1372,16 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDHISTORY_PUSH_REPO'><glossterm>BUILDHISTORY_PUSH_REPO</glossterm>
+ <glossentry id='var-BUILDHISTORY_PUSH_REPO'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDHISTORY_PUSH_REPO</glossterm>
+ <info>
+ BUILDHISTORY_PUSH_REPO[doc] = "When inheriting the buildhistory class, this variable optionally specifies a remote repository to which build history pushes Git changes."
+ </info>
<glossdef>
<para>
When inheriting the
<link linkend='ref-classes-buildhistory'><filename>buildhistory</filename></link>
- class, optionally specifies a remote repository
- to which build history pushes Git changes.
+ class, this variable optionally specifies a remote
+ repository to which build history pushes Git changes.
In order for <filename>BUILDHISTORY_PUSH_REPO</filename>
to work,
<link linkend='var-BUILDHISTORY_COMMIT'><filename>BUILDHISTORY_COMMIT</filename></link>
@@ -1214,7 +1406,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDSDK_CFLAGS'><glossterm>BUILDSDK_CFLAGS</glossterm>
+ <glossentry id='var-BUILDSDK_CFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDSDK_CFLAGS</glossterm>
+ <info>
+ BUILDSDK_CFLAGS[doc] = "Specifies the flags to pass to the C compiler when building for the SDK."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C compiler when building
@@ -1227,7 +1422,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDSDK_CPPFLAGS'><glossterm>BUILDSDK_CPPFLAGS</glossterm>
+ <glossentry id='var-BUILDSDK_CPPFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDSDK_CPPFLAGS</glossterm>
+ <info>
+ BUILDSDK_CPPFLAGS[doc] = "Specifies the flags to pass to the C pre-processor (i.e. to both the C and the C++ compilers) when building for the SDK."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C pre-processor
@@ -1241,7 +1439,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDSDK_CXXFLAGS'><glossterm>BUILDSDK_CXXFLAGS</glossterm>
+ <glossentry id='var-BUILDSDK_CXXFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDSDK_CXXFLAGS</glossterm>
+ <info>
+ BUILDSDK_CXXFLAGS[doc] = "Specifies the flags to pass to the C++ compiler when building for the SDK."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C++ compiler when
@@ -1254,7 +1455,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDSDK_LDFLAGS'><glossterm>BUILDSDK_LDFLAGS</glossterm>
+ <glossentry id='var-BUILDSDK_LDFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDSDK_LDFLAGS</glossterm>
+ <info>
+ BUILDSDK_LDFLAGS[doc] = "Specifies the flags to pass to the linker when building for the SDK."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the linker when building
@@ -1267,7 +1471,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUILDSTATS_BASE'><glossterm>BUILDSTATS_BASE</glossterm>
+ <glossentry id='var-BUILDSTATS_BASE'><glossterm><imagedata fileref="figures/define-generic.png" />BUILDSTATS_BASE</glossterm>
+ <info>
+ BUILDSTATS_BASE[doc] = "Points to the location of the directory that holds build statistics when you use and enable the buildstats class."
+ </info>
<glossdef>
<para>
Points to the location of the directory that holds build
@@ -1281,7 +1488,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-BUSYBOX_SPLIT_SUID'><glossterm>BUSYBOX_SPLIT_SUID</glossterm>
+ <glossentry id='var-BUSYBOX_SPLIT_SUID'><glossterm><imagedata fileref="figures/define-generic.png" />BUSYBOX_SPLIT_SUID</glossterm>
+ <info>
+ BUSYBOX_SPLIT_SUID[doc] = "For the BusyBox recipe, specifies whether to split the output executable file into two parts: one for features that require setuid root, and one for the remaining features."
+ </info>
<glossdef>
<para>
For the BusyBox recipe, specifies whether to split the
@@ -1304,7 +1514,10 @@
<glossdiv id='var-glossary-c'><title>C</title>
- <glossentry id='var-CFLAGS'><glossterm>CFLAGS</glossterm>
+ <glossentry id='var-CFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />CFLAGS</glossterm>
+ <info>
+ CFLAGS[doc] = "Flags passed to the C compiler for the target system. This variable evaluates to the same as TARGET_CFLAGS."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C compiler.
@@ -1336,7 +1549,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CLASSOVERRIDE'><glossterm>CLASSOVERRIDE</glossterm>
+ <glossentry id='var-CLASSOVERRIDE'><glossterm><imagedata fileref="figures/define-generic.png" />CLASSOVERRIDE</glossterm>
+ <info>
+ CLASSOVERRIDE[doc] = "An internal variable specifying the special class override that should currently apply (e.g. "class-target", "class-native", and so forth)."
+ </info>
<glossdef>
<para>
An internal variable specifying the special class override
@@ -1362,7 +1578,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COMBINED_FEATURES'><glossterm>COMBINED_FEATURES</glossterm>
+ <glossentry id='var-COMBINED_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />COMBINED_FEATURES</glossterm>
+ <info>
+ COMBINED_FEATURES[doc] = "A set of features common between MACHINE_FEATURES and DISTRO_FEATURES."
+ </info>
<glossdef>
<para>
Provides a list of hardware features that are enabled in
@@ -1388,7 +1607,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COMMON_LICENSE_DIR'><glossterm>COMMON_LICENSE_DIR</glossterm>
+ <glossentry id='var-COMMON_LICENSE_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />COMMON_LICENSE_DIR</glossterm>
+ <info>
+ COMMON_LICENSE_DIR[doc] = "Points to meta/files/common-licenses in the Source Directory, which is where generic license files reside."
+ </info>
<glossdef>
<para>
Points to <filename>meta/files/common-licenses</filename>
@@ -1399,7 +1621,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COMPATIBLE_HOST'><glossterm>COMPATIBLE_HOST</glossterm>
+ <glossentry id='var-COMPATIBLE_HOST'><glossterm><imagedata fileref="figures/define-generic.png" />COMPATIBLE_HOST</glossterm>
+ <info>
+ COMPATIBLE_HOST[doc] = "A regular expression that resolves to one or more hosts (when the recipe is native) or one or more targets (when the recipe is non-native) with which a recipe is compatible."
+ </info>
<glossdef>
<para>A regular expression that resolves to one or more hosts
(when the recipe is native) or one or more targets (when
@@ -1416,7 +1641,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COMPATIBLE_MACHINE'><glossterm>COMPATIBLE_MACHINE</glossterm>
+ <glossentry id='var-COMPATIBLE_MACHINE'><glossterm><imagedata fileref="figures/define-generic.png" />COMPATIBLE_MACHINE</glossterm>
+ <info>
+ COMPATIBLE_MACHINE[doc] = "A regular expression that resolves to one or more target machines with which a recipe is compatible."
+ </info>
<glossdef>
<para>A regular expression that resolves to one or more
target machines with which a recipe is compatible.
@@ -1431,7 +1659,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COMPLEMENTARY_GLOB'><glossterm>COMPLEMENTARY_GLOB</glossterm>
+ <glossentry id='var-COMPLEMENTARY_GLOB'><glossterm><imagedata fileref="figures/define-generic.png" />COMPLEMENTARY_GLOB</glossterm>
+ <info>
+ COMPLEMENTARY_GLOB[doc] = "Defines wildcards to match when installing a list of complementary packages for all the packages installed in an image."
+ </info>
<glossdef>
<para>
Defines wildcards to match when installing a list of
@@ -1458,7 +1689,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CONFFILES'><glossterm>CONFFILES</glossterm>
+ <glossentry id='var-CONFFILES'><glossterm><imagedata fileref="figures/define-generic.png" />CONFFILES</glossterm>
+ <info>
+ CONFFILES[doc] = "Identifies editable or configurable files that are part of a package."
+ </info>
<glossdef>
<para>
Identifies editable or configurable files that are part of a package.
@@ -1508,7 +1742,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CONFIG_INITRAMFS_SOURCE'><glossterm>CONFIG_INITRAMFS_SOURCE</glossterm>
+ <glossentry id='var-CONFIG_INITRAMFS_SOURCE'><glossterm><imagedata fileref="figures/define-generic.png" />CONFIG_INITRAMFS_SOURCE</glossterm>
+ <info>
+ CONFIG_INITRAMFS_SOURCE[doc] = "Identifies the initial RAM disk (initramfs) source files. The OpenEmbedded build system receives and uses this kernel Kconfig variable as an environment variable."
+ </info>
<glossdef>
<para>
Identifies the initial RAM disk (initramfs) source files.
@@ -1540,7 +1777,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CONFIG_SITE'><glossterm>CONFIG_SITE</glossterm>
+ <glossentry id='var-CONFIG_SITE'><glossterm><imagedata fileref="figures/define-generic.png" />CONFIG_SITE</glossterm>
+ <info>
+ CONFIG_SITE[doc] = "A list of files that contains autoconf test results relevant to the current build. This variable is used by the Autotools utilities when running configure."
+ </info>
<glossdef>
<para>
A list of files that contains <filename>autoconf</filename> test results relevant
@@ -1551,11 +1791,15 @@
</glossdef>
</glossentry>
- <glossentry id='var-CONFLICT_DISTRO_FEATURES'><glossterm>CONFLICT_DISTRO_FEATURES</glossterm>
+ <glossentry id='var-CONFLICT_DISTRO_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />CONFLICT_DISTRO_FEATURES</glossterm>
+ <info>
+ CONFLICT_DISTRO_FEATURES[doc] = "When a recipe inherits the distro_features_check class, this variable identifies distribution features that would be in conflict should the recipe be built."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>distro_features_check</filename> class, this
+ When inheriting the
+ <link linkend='ref-classes-distro_features_check'><filename>distro_features_check</filename></link>
+ class, this
variable identifies distribution features that would
be in conflict should the recipe
be built.
@@ -1569,7 +1813,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COPY_LIC_DIRS'><glossterm>COPY_LIC_DIRS</glossterm>
+ <glossentry id='var-COPY_LIC_DIRS'><glossterm><imagedata fileref="figures/define-generic.png" />COPY_LIC_DIRS</glossterm>
+ <info>
+ COPY_LIC_DIRS[doc] = "If set to "1" along with the COPY_LIC_MANIFEST variable, the OpenEmbedded build system copies into the image the license files, which are located in /usr/share/common-licenses, for each package."
+ </info>
<glossdef>
<para>
If set to "1" along with the
@@ -1584,7 +1831,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COPY_LIC_MANIFEST'><glossterm>COPY_LIC_MANIFEST</glossterm>
+ <glossentry id='var-COPY_LIC_MANIFEST'><glossterm><imagedata fileref="figures/define-generic.png" />COPY_LIC_MANIFEST</glossterm>
+ <info>
+ COPY_LIC_MANIFEST[doc] = "If set to "1", the OpenEmbedded build system copies the license manifest for the image to /usr/share/common-licenses/license.manifest within the image itself."
+ </info>
<glossdef>
<para>
If set to "1", the OpenEmbedded build system copies
@@ -1595,7 +1845,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CORE_IMAGE_EXTRA_INSTALL'><glossterm>CORE_IMAGE_EXTRA_INSTALL</glossterm>
+ <glossentry id='var-CORE_IMAGE_EXTRA_INSTALL'><glossterm><imagedata fileref="figures/define-generic.png" />CORE_IMAGE_EXTRA_INSTALL</glossterm>
+ <info>
+ CORE_IMAGE_EXTRA_INSTALL[doc] = "Specifies the list of packages to be added to the image. You should only set this variable in the conf/local.conf file in the Build Directory."
+ </info>
<glossdef>
<para>
Specifies the list of packages to be added to the image.
@@ -1611,7 +1864,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-COREBASE'><glossterm>COREBASE</glossterm>
+ <glossentry id='var-COREBASE'><glossterm><imagedata fileref="figures/define-generic.png" />COREBASE</glossterm>
+ <info>
+ COREBASE[doc] = "Specifies the parent directory of the OpenEmbedded Core Metadata layer (i.e. meta)."
+ </info>
<glossdef>
<para>
Specifies the parent directory of the OpenEmbedded
@@ -1633,7 +1889,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CPPFLAGS'><glossterm>CPPFLAGS</glossterm>
+ <glossentry id='var-CPPFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />CPPFLAGS</glossterm>
+ <info>
+ CPPFLAGS[doc] = "Specifies the flags to pass to the C pre-processor (i.e. to both the C and the C++ compilers)."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C pre-processor
@@ -1666,7 +1925,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-CXXFLAGS'><glossterm>CXXFLAGS</glossterm>
+ <glossentry id='var-CXXFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />CXXFLAGS</glossterm>
+ <info>
+ CXXFLAGS[doc] = "Specifies the flags to pass to the C++ compiler."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C++ compiler.
@@ -1702,7 +1964,10 @@
<glossdiv id='var-glossary-d'><title>D</title>
- <glossentry id='var-D'><glossterm>D</glossterm>
+ <glossentry id='var-D'><glossterm><imagedata fileref="figures/define-generic.png" />D</glossterm>
+ <info>
+ D[doc] = "The destination directory."
+ </info>
<glossdef>
<para>
The destination directory.
@@ -1718,7 +1983,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DATETIME'><glossterm>DATETIME</glossterm>
+ <glossentry id='var-DATETIME'><glossterm><imagedata fileref="figures/define-generic.png" />DATETIME</glossterm>
+ <info>
+ DATETIME[doc] = "The date and time the build was started."
+ </info>
<glossdef>
<para>
The date and time on which the current build started.
@@ -1727,7 +1995,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEBUG_BUILD'><glossterm>DEBUG_BUILD</glossterm>
+ <glossentry id='var-DEBUG_BUILD'><glossterm><imagedata fileref="figures/define-generic.png" />DEBUG_BUILD</glossterm>
+ <info>
+ DEBUG_BUILD[doc] = "Specifies to build packages with debugging information. This influences the value of the SELECTED_OPTIMIZATION variable."
+ </info>
<glossdef>
<para>
Specifies to build packages with debugging information.
@@ -1738,7 +2009,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEBUG_OPTIMIZATION'><glossterm>DEBUG_OPTIMIZATION</glossterm>
+ <glossentry id='var-DEBUG_OPTIMIZATION'><glossterm><imagedata fileref="figures/define-generic.png" />DEBUG_OPTIMIZATION</glossterm>
+ <info>
+ DEBUG_OPTIMIZATION[doc] = "The options to pass in TARGET_CFLAGS and CFLAGS when compiling a system for debugging. This variable defaults to '-O -fno-omit-frame-pointer -g'."
+ </info>
<glossdef>
<para>
The options to pass in
@@ -1750,7 +2024,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEFAULT_PREFERENCE'><glossterm>DEFAULT_PREFERENCE</glossterm>
+ <glossentry id='var-DEFAULT_PREFERENCE'><glossterm><imagedata fileref="figures/define-generic.png" />DEFAULT_PREFERENCE</glossterm>
+ <info>
+ DEFAULT_PREFERENCE[doc] = "Specifies a weak bias for recipe selection priority."
+ </info>
<glossdef>
<para>
Specifies a weak bias for recipe selection priority.
@@ -1774,7 +2051,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEFAULTTUNE'><glossterm>DEFAULTTUNE</glossterm>
+ <glossentry id='var-DEFAULTTUNE'><glossterm><imagedata fileref="figures/define-generic.png" />DEFAULTTUNE</glossterm>
+ <info>
+ DEFAULTTUNE[doc] = "The default CPU and Application Binary Interface (ABI) tunings (i.e. the "tune") used by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
The default CPU and Application Binary Interface (ABI)
@@ -1795,7 +2075,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEPENDS'><glossterm>DEPENDS</glossterm>
+ <glossentry id='var-DEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />DEPENDS</glossterm>
+ <info>
+ DEPENDS[doc] = "Lists a recipe's build-time dependencies (i.e. other recipe files)."
+ </info>
<glossdef>
<para>
Lists a recipe's build-time dependencies
@@ -1830,7 +2113,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEPLOY_DIR'><glossterm>DEPLOY_DIR</glossterm>
+ <glossentry id='var-DEPLOY_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />DEPLOY_DIR</glossterm>
+ <info>
+ DEPLOY_DIR[doc] = "Points to the general area that the OpenEmbedded build system uses to place images, packages, SDKs and other output files that are ready to be used outside of the build system."
+ </info>
<glossdef>
<para>
Points to the general area that the OpenEmbedded build
@@ -1855,7 +2141,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEPLOY_DIR_IMAGE'><glossterm>DEPLOY_DIR_IMAGE</glossterm>
+ <glossentry id='var-DEPLOY_DIR_IMAGE'><glossterm><imagedata fileref="figures/define-generic.png" />DEPLOY_DIR_IMAGE</glossterm>
+ <info>
+ DEPLOY_DIR_IMAGE[doc] = "Points to the area that the OpenEmbedded build system uses to place images and other associated output files that are ready to be deployed onto the target machine."
+ </info>
<glossdef>
<para>
Points to the area that the OpenEmbedded build system uses
@@ -1882,10 +2171,13 @@
</glossdef>
</glossentry>
- <glossentry id='var-DEPLOYDIR'><glossterm>DEPLOYDIR</glossterm>
+ <glossentry id='var-DEPLOYDIR'><glossterm><imagedata fileref="figures/define-generic.png" />DEPLOYDIR</glossterm>
+ <info>
+ DEPLOYDIR[doc] = "For recipes that inherit the deploy class, the DEPLOYDIR points to a temporary work area for deployed files."
+ </info>
<glossdef>
<para>
- For recipes that inherit the
+ When inheriting the
<link linkend='ref-classes-deploy'><filename>deploy</filename></link>
class, the <filename>DEPLOYDIR</filename> points to a
temporary work area for deployed files that is set in the
@@ -1903,7 +2195,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DESCRIPTION'><glossterm>DESCRIPTION</glossterm>
+ <glossentry id='var-DESCRIPTION'><glossterm><imagedata fileref="figures/define-generic.png" />DESCRIPTION</glossterm>
+ <info>
+ DESCRIPTION[doc] = "The package description used by package managers. If not set, DESCRIPTION takes the value of the SUMMARY variable."
+ </info>
<glossdef>
<para>The package description used by package managers.
If not set, <filename>DESCRIPTION</filename> takes
@@ -1914,7 +2209,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISK_SIGNATURE'><glossterm>DISK_SIGNATURE</glossterm>
+ <glossentry id='var-DISK_SIGNATURE'><glossterm><imagedata fileref="figures/define-generic.png" />DISK_SIGNATURE</glossterm>
+ <info>
+ DISK_SIGNATURE[doc] = "A 32-bit MBR disk signature used by directdisk images."
+ </info>
<glossdef>
<para>
A 32-bit MBR disk signature used by
@@ -1964,7 +2262,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO'><glossterm>DISTRO</glossterm>
+ <glossentry id='var-DISTRO'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO</glossterm>
+ <info>
+ DISTRO[doc] = "The short name of the distribution. If the variable is blank, meta/conf/distro/defaultsetup.conf will be used."
+ </info>
<glossdef>
<para>
The short name of the distribution.
@@ -2006,7 +2307,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_EXTRA_RDEPENDS'><glossterm>DISTRO_EXTRA_RDEPENDS</glossterm>
+ <glossentry id='var-DISTRO_EXTRA_RDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_EXTRA_RDEPENDS</glossterm>
+ <info>
+ DISTRO_EXTRA_RDEPENDS[doc] = "Specifies a list of distro-specific packages to add to all images. The variable only applies to the images that include packagegroup-base."
+ </info>
<glossdef>
<para>
Specifies a list of distro-specific packages to add to all images.
@@ -2022,7 +2326,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_EXTRA_RRECOMMENDS'><glossterm>DISTRO_EXTRA_RRECOMMENDS</glossterm>
+ <glossentry id='var-DISTRO_EXTRA_RRECOMMENDS'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_EXTRA_RRECOMMENDS</glossterm>
+ <info>
+ DISTRO_EXTRA_RRECOMMENDS[doc] = "Specifies a list of distro-specific packages to add to all images if the packages exist. The list of packages are automatically installed but you can remove them."
+ </info>
<glossdef>
<para>
Specifies a list of distro-specific packages to add to all images
@@ -2034,7 +2341,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_FEATURES'><glossterm>DISTRO_FEATURES</glossterm>
+ <glossentry id='var-DISTRO_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_FEATURES</glossterm>
+ <info>
+ DISTRO_FEATURES[doc] = "The features enabled for the distribution."
+ </info>
<glossdef>
<para>
The software support you want in your distribution for
@@ -2067,7 +2377,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_FEATURES_BACKFILL'><glossterm>DISTRO_FEATURES_BACKFILL</glossterm>
+ <glossentry id='var-DISTRO_FEATURES_BACKFILL'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_FEATURES_BACKFILL</glossterm>
+ <info>
+ DISTRO_FEATURES_BACKFILL[doc] = "Features to be added to DISTRO_FEATURES if not also present in DISTRO_FEATURES_BACKFILL_CONSIDERED. This variable is set in the meta/conf/bitbake.conf file and it is not intended to be user-configurable."
+ </info>
<glossdef>
<para>Features to be added to
<filename><link linkend='var-DISTRO_FEATURES'>DISTRO_FEATURES</link></filename>
@@ -2086,7 +2399,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_FEATURES_BACKFILL_CONSIDERED'><glossterm>DISTRO_FEATURES_BACKFILL_CONSIDERED</glossterm>
+ <glossentry id='var-DISTRO_FEATURES_BACKFILL_CONSIDERED'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_FEATURES_BACKFILL_CONSIDERED</glossterm>
+ <info>
+ DISTRO_FEATURES_BACKFILL_CONSIDERED[doc] = "Features from DISTRO_FEATURES_BACKFILL that should not be backfilled (i.e. added to DISTRO_FEATURES) during the build."
+ </info>
<glossdef>
<para>Features from
<filename><link linkend='var-DISTRO_FEATURES_BACKFILL'>DISTRO_FEATURES_BACKFILL</link></filename>
@@ -2099,13 +2415,19 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_NAME'><glossterm>DISTRO_NAME</glossterm>
+ <glossentry id='var-DISTRO_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_NAME</glossterm>
+ <info>
+ DISTRO_NAME[doc] = "The long name of the distribution."
+ </info>
<glossdef>
<para>The long name of the distribution.</para>
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_PN_ALIAS'><glossterm>DISTRO_PN_ALIAS</glossterm>
+ <glossentry id='var-DISTRO_PN_ALIAS'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_PN_ALIAS</glossterm>
+ <info>
+ DISTRO_PN_ALIAS[doc] = "Alias names used for the recipe in various Linux distributions."
+ </info>
<glossdef>
<para>Alias names used for the recipe in various Linux distributions.</para>
<para>See the
@@ -2115,13 +2437,19 @@
</glossdef>
</glossentry>
- <glossentry id='var-DISTRO_VERSION'><glossterm>DISTRO_VERSION</glossterm>
+ <glossentry id='var-DISTRO_VERSION'><glossterm><imagedata fileref="figures/define-generic.png" />DISTRO_VERSION</glossterm>
+ <info>
+ DISTRO_VERSION[doc] = "The version of the distribution."
+ </info>
<glossdef>
<para>The version of the distribution.</para>
</glossdef>
</glossentry>
- <glossentry id='var-DISTROOVERRIDES'><glossterm>DISTROOVERRIDES</glossterm>
+ <glossentry id='var-DISTROOVERRIDES'><glossterm><imagedata fileref="figures/define-generic.png" />DISTROOVERRIDES</glossterm>
+ <info>
+ DISTROOVERRIDES[doc] = "Lists overrides specific to the current distribution. By default, the variable list includes the value of the DISTRO variable."
+ </info>
<glossdef>
<para>
This variable lists overrides specific to the current
@@ -2137,7 +2465,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-DL_DIR'><glossterm>DL_DIR</glossterm>
+ <glossentry id='var-DL_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />DL_DIR</glossterm>
+ <info>
+ DL_DIR[doc] = "The central download directory used by the build process to store downloads. By default, the directory is 'downloads' in the Build Directory."
+ </info>
<glossdef>
<para>
The central download directory used by the build process to
@@ -2191,38 +2522,75 @@
chapter.
</para>
</glossdef>
+ </glossentry>
+ <glossentry id='var-DOC_COMPRESS'><glossterm><imagedata fileref="figures/define-generic.png" />DOC_COMPRESS</glossterm>
+ <info>
+ DOC_COMPRESS[doc] = "When inheriting the compress_doc class, this variable sets the compression policy used when the OpenEmbedded build system compresses man pages and info pages."
+ </info>
+ <glossdef>
+ <para>
+ When inheriting the
+ <link linkend='ref-classes-compress_doc'><filename>compress_doc</filename></link>
+ class, this variable sets the compression policy used when
+ the OpenEmbedded build system compresses man pages and info
+ pages.
+ By default, the compression method used is gz (gzip).
+ Other policies available are xz and bz2.
+ </para>
+
+ <para>
+ For information on policies and on how to use this
+ variable, see the comments in the
+ <filename>meta/classes/compress_doc.bbclass</filename> file.
+ </para>
+ </glossdef>
</glossentry>
+
</glossdiv>
<glossdiv id='var-glossary-e'><title>E</title>
- <glossentry id='var-ENABLE_BINARY_LOCALE_GENERATION'><glossterm>ENABLE_BINARY_LOCALE_GENERATION</glossterm>
+ <glossentry id='var-EFI_PROVIDER'><glossterm><imagedata fileref="figures/define-generic.png" />EFI_PROVIDER</glossterm>
+ <info>
+ EFI_PROVIDER[doc] = "When building bootable images (i.e. where hddimg or vmdk is in IMAGE_FSTYPES), the EFI_PROVIDER variable specifies the EFI bootloader to use."
+ </info>
<glossdef>
- <para></para>
- <para>Variable that controls which locales for
- <filename>eglibc</filename> are generated during the
- build (useful if the target device has 64Mbytes
- of RAM or less).</para>
+ <para>
+ When building bootable images (i.e. where
+ <filename>hddimg</filename> or <filename>vmdk</filename>
+ is in
+ <link linkend='var-IMAGE_FSTYPES'><filename>IMAGE_FSTYPES</filename></link>),
+ the <filename>EFI_PROVIDER</filename> variable specifies
+ the EFI bootloader to use.
+ The default is "grub-efi", but "gummiboot" can be used
+ instead.
+ </para>
+
+ <para>
+ See the
+ <link linkend='ref-classes-gummiboot'><filename>gummiboot</filename></link>
+ class for more information.
+ </para>
</glossdef>
</glossentry>
- <glossentry id='var-ERROR_QA'><glossterm>ERROR_QA</glossterm>
+ <glossentry id='var-ENABLE_BINARY_LOCALE_GENERATION'><glossterm><imagedata fileref="figures/define-generic.png" />ENABLE_BINARY_LOCALE_GENERATION</glossterm>
+ <info>
+ ENABLE_BINARY_LOCALE_GENERATION[doc] = "Controls which locales for glibc are generated during the build. The variable is useful if the target device has 64Mbytes of RAM or less."
+ </info>
<glossdef>
- <para>
- Specifies the quality assurance checks whose failures are
- reported as errors by the OpenEmbedded build system.
- You set this variable in your distribution configuration
- file.
- For a list of the checks you can control with this variable,
- see the
- "<link linkend='ref-classes-insane'><filename>insane.bbclass</filename></link>"
- section.
- </para>
+ <para>Variable that controls which locales for
+ <filename>eglibc</filename> are generated during the
+ build (useful if the target device has 64Mbytes
+ of RAM or less).</para>
</glossdef>
</glossentry>
- <glossentry id='var-ERR_REPORT_DIR'><glossterm>ERR_REPORT_DIR</glossterm>
+ <glossentry id='var-ERR_REPORT_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />ERR_REPORT_DIR</glossterm>
+ <info>
+ ERR_REPORT_DIR[doc] = "When used with the report-error class, specifies the path used for storing the debug files created by the error reporting tool, which allows you to submit build errors you encounter to a central database."
+ </info>
<glossdef>
<para>
When used with the
@@ -2241,13 +2609,34 @@
you want the error reporting tool to store the debug files
as follows in your <filename>local.conf</filename> file:
<literallayout class='monospaced'>
- ERR_REPORT_DIR = "path"
+ ERR_REPORT_DIR = "<replaceable>path</replaceable>"
</literallayout>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-EXCLUDE_FROM_WORLD'><glossterm>EXCLUDE_FROM_WORLD</glossterm>
+ <glossentry id='var-ERROR_QA'><glossterm><imagedata fileref="figures/define-generic.png" />ERROR_QA</glossterm>
+ <info>
+ ERROR_QA[doc] = "Specifies the quality assurance checks whose failures are reported as errors by the OpenEmbedded build system."
+ </info>
+ <glossdef>
+ <para>
+ Specifies the quality assurance checks whose failures are
+ reported as errors by the OpenEmbedded build system.
+ You set this variable in your distribution configuration
+ file.
+ For a list of the checks you can control with this variable,
+ see the
+ "<link linkend='ref-classes-insane'><filename>insane.bbclass</filename></link>"
+ section.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-EXCLUDE_FROM_WORLD'><glossterm><imagedata fileref="figures/define-generic.png" />EXCLUDE_FROM_WORLD</glossterm>
+ <info>
+ EXCLUDE_FROM_WORLD[doc] = "Directs BitBake to exclude a recipe from world builds (i.e. bitbake world)."
+ </info>
<glossdef>
<para>
Directs BitBake to exclude a recipe from world builds (i.e.
@@ -2273,7 +2662,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTENDPE'><glossterm>EXTENDPE</glossterm>
+ <glossentry id='var-EXTENDPE'><glossterm><imagedata fileref="figures/define-generic.png" />EXTENDPE</glossterm>
+ <info>
+ EXTENDPE[doc] = "Used with file and pathnames to create a prefix for a recipe's version based on the recipe's PE value. If PE is set and greater than zero for a recipe, EXTENDPE becomes that value."
+ </info>
<glossdef>
<para>
Used with file and pathnames to create a prefix for a recipe's
@@ -2291,7 +2683,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTENDPKGV'><glossterm>EXTENDPKGV</glossterm>
+ <glossentry id='var-EXTENDPKGV'><glossterm><imagedata fileref="figures/define-generic.png" />EXTENDPKGV</glossterm>
+ <info>
+ EXTENDPKGV[doc] = "The full package version specification as it appears on the final packages produced by a recipe."
+ </info>
<glossdef>
<para>
The full package version specification as it appears on the
@@ -2312,11 +2707,15 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTERNALSRC'><glossterm>EXTERNALSRC</glossterm>
+ <glossentry id='var-EXTERNALSRC'><glossterm><imagedata fileref="figures/define-generic.png" />EXTERNALSRC</glossterm>
+ <info>
+ EXTERNALSRC[doc] = "If externalsrc.bbclass is inherited, this variable points to the source tree, which is outside of the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
- If <filename>externalsrc.bbclass</filename> is inherited,
- this variable points to the source tree, which is
+ When inheriting the
+ <link linkend='ref-classes-externalsrc'><filename>externalsrc</filename></link>
+ class, this variable points to the source tree, which is
outside of the OpenEmbedded build system.
When set, this variable sets the
<link linkend='var-S'><filename>S</filename></link>
@@ -2337,13 +2736,17 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTERNALSRC_BUILD'><glossterm>EXTERNALSRC_BUILD</glossterm>
+ <glossentry id='var-EXTERNALSRC_BUILD'><glossterm><imagedata fileref="figures/define-generic.png" />EXTERNALSRC_BUILD</glossterm>
+ <info>
+ EXTERNALSRC_BUILD[doc] = "If externalsrc.bbclass is inherited, this variable points to the directory in which the recipe's source code is built, which is outside of the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
- If <filename>externalsrc.bbclass</filename> is inherited,
- this variable points to the directory in which the recipe's
- source code is built,
- which is outside of the OpenEmbedded build system.
+ When inheriting the
+ <link linkend='ref-classes-externalsrc'><filename>externalsrc</filename></link>
+ class, this variable points to the directory in which the
+ recipe's source code is built, which is outside of the
+ OpenEmbedded build system.
When set, this variable sets the
<link linkend='var-B'><filename>B</filename></link>
variable, which is what the OpenEmbedded build system uses
@@ -2363,7 +2766,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_IMAGE_FEATURES'><glossterm>EXTRA_IMAGE_FEATURES</glossterm>
+ <glossentry id='var-EXTRA_IMAGE_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_IMAGE_FEATURES</glossterm>
+ <info>
+ EXTRA_IMAGE_FEATURES[doc] = "The list of additional features to include in an image. Configure this variable in the conf/local.conf file in the Build Directory."
+ </info>
<glossdef>
<para>
The list of additional features to include in an image.
@@ -2435,7 +2841,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_IMAGECMD'><glossterm>EXTRA_IMAGECMD</glossterm>
+ <glossentry id='var-EXTRA_IMAGECMD'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_IMAGECMD</glossterm>
+ <info>
+ EXTRA_IMAGECMD[doc] = "Specifies additional options for the image creation command that has been specified in IMAGE_CMD. When setting this variable, you should use an override for the associated type."
+ </info>
<glossdef>
<para>
Specifies additional options for the image
@@ -2451,7 +2860,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_IMAGEDEPENDS'><glossterm>EXTRA_IMAGEDEPENDS</glossterm>
+ <glossentry id='var-EXTRA_IMAGEDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_IMAGEDEPENDS</glossterm>
+ <info>
+ EXTRA_IMAGEDEPENDS[doc] = "A list of recipes to build that do not provide packages for installing into the root filesystem. Use this variable to list recipes that are required to build the final image, but not needed in the root filesystem."
+ </info>
<glossdef>
<para>A list of recipes to build that do not provide packages
for installing into the root filesystem.
@@ -2471,28 +2883,40 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_OECMAKE'><glossterm>EXTRA_OECMAKE</glossterm>
+ <glossentry id='var-EXTRA_OECMAKE'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_OECMAKE</glossterm>
+ <info>
+ EXTRA_OECMAKE[doc] = "Additional cmake options."
+ </info>
<glossdef>
<para>Additional <filename>cmake</filename> options.</para>
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_OECONF'><glossterm>EXTRA_OECONF</glossterm>
+ <glossentry id='var-EXTRA_OECONF'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_OECONF</glossterm>
+ <info>
+ EXTRA_OECONF[doc] = "Additional configure script options."
+ </info>
<glossdef>
<para>Additional <filename>configure</filename> script options.</para>
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_OEMAKE'><glossterm>EXTRA_OEMAKE</glossterm>
+ <glossentry id='var-EXTRA_OEMAKE'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_OEMAKE</glossterm>
+ <info>
+ EXTRA_OEMAKE[doc] = "Additional GNU make options."
+ </info>
<glossdef>
<para>Additional GNU <filename>make</filename> options.</para>
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_OESCONS'><glossterm>EXTRA_OESCONS</glossterm>
+ <glossentry id='var-EXTRA_OESCONS'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_OESCONS</glossterm>
+ <info>
+ EXTRA_OESCONS[doc] = "When a recipe inherits the scons class, this variable specifies additional configuration options you want to pass to the scons command line."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
+ When inheriting the
<link linkend='ref-classes-scons'><filename>scons</filename></link>
class, this variable specifies additional configuration
options you want to pass to the
@@ -2501,7 +2925,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_QMAKEVARS_POST'><glossterm>EXTRA_QMAKEVARS_POST</glossterm>
+ <glossentry id='var-EXTRA_QMAKEVARS_POST'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_QMAKEVARS_POST</glossterm>
+ <info>
+ EXTRA_QMAKEVARS_POST[doc] = "Configuration variables or options you want to pass to qmake when the arguments need to be after the .pro file list on the command line."
+ </info>
<glossdef>
<para>
Configuration variables or options you want to pass to
@@ -2519,7 +2946,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_QMAKEVARS_PRE'><glossterm>EXTRA_QMAKEVARS_PRE</glossterm>
+ <glossentry id='var-EXTRA_QMAKEVARS_PRE'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_QMAKEVARS_PRE</glossterm>
+ <info>
+ EXTRA_QMAKEVARS_PRE[doc] = "Configuration variables or options you want to pass to qmake when the arguments need to be before the .pro file list on the command line."
+ </info>
<glossdef>
<para>
Configuration variables or options you want to pass to
@@ -2537,10 +2967,13 @@
</glossdef>
</glossentry>
- <glossentry id='var-EXTRA_USERS_PARAMS'><glossterm>EXTRA_USERS_PARAMS</glossterm>
+ <glossentry id='var-EXTRA_USERS_PARAMS'><glossterm><imagedata fileref="figures/define-generic.png" />EXTRA_USERS_PARAMS</glossterm>
+ <info>
+ EXTRA_USERS_PARAMS[doc] = "When a recipe inherits the extrausers class, this variable provides image level user and group operations."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
+ When inheriting the
<link linkend='ref-classes-extrausers'><filename>extrausers</filename></link>
class, this variable provides image level user and group
operations.
@@ -2575,7 +3008,10 @@
<glossdiv id='var-glossary-f'><title>F</title>
- <glossentry id='var-FEATURE_PACKAGES'><glossterm>FEATURE_PACKAGES</glossterm>
+ <glossentry id='var-FEATURE_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />FEATURE_PACKAGES</glossterm>
+ <info>
+ FEATURE_PACKAGES[doc] = "Defines one or more packages to include in an image when a specific item is included in IMAGE_FEATURES. When setting the value, FEATURE_PACKAGES should have the name of the feature item as an override."
+ </info>
<glossdef>
<para>
Defines one or more packages to include in an image when
@@ -2585,11 +3021,11 @@
should have the name of the feature item as an override.
Here is an example:
<literallayout class='monospaced'>
- FEATURE_PACKAGES_widget = "package1 package2"
+ FEATURE_PACKAGES_widget = "<replaceable>package1</replaceable> <replaceable>package2</replaceable>"
</literallayout>
In this example, if "widget" were added to
- <filename>IMAGE_FEATURES</filename>, "package1" and
- "package2" would be included in the image.
+ <filename>IMAGE_FEATURES</filename>, <replaceable>package1</replaceable> and
+ <replaceable>package2</replaceable> would be included in the image.
<note>
Packages installed by features defined through
<filename>FEATURE_PACKAGES</filename> are often package
@@ -2603,7 +3039,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FEED_DEPLOYDIR_BASE_URI'><glossterm>FEED_DEPLOYDIR_BASE_URI</glossterm>
+ <glossentry id='var-FEED_DEPLOYDIR_BASE_URI'><glossterm><imagedata fileref="figures/define-generic.png" />FEED_DEPLOYDIR_BASE_URI</glossterm>
+ <info>
+ FEED_DEPLOYDIR_BASE_URI[doc] = "Allow to serve ipk deploy directory as an ad hoc feed (bogofeed). Set to base URL of the directory as exported by HTTP. Set of ad hoc feed configs will be generated in the image."
+ </info>
<glossdef>
<para>
Points to the base URL of the server and location within
@@ -2630,7 +3069,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILES'><glossterm>FILES</glossterm>
+ <glossentry id='var-FILES'><glossterm><imagedata fileref="figures/define-generic.png" />FILES</glossterm>
+ <info>
+ FILES[doc] = "The list of directories or files that are placed in packages."
+ </info>
<glossdef>
<para>
The list of directories or files that are placed in packages.
@@ -2677,7 +3119,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILESEXTRAPATHS'><glossterm>FILESEXTRAPATHS</glossterm>
+ <glossentry id='var-FILESEXTRAPATHS'><glossterm><imagedata fileref="figures/define-generic.png" />FILESEXTRAPATHS</glossterm>
+ <info>
+ FILESEXTRAPATHS[doc] = "Extends the search path the OpenEmbedded build system uses when looking for files and patches as it processes recipes and append files."
+ </info>
<glossdef>
<para>
Extends the search path the OpenEmbedded build system uses
@@ -2744,7 +3189,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILESOVERRIDES'><glossterm>FILESOVERRIDES</glossterm>
+ <glossentry id='var-FILESOVERRIDES'><glossterm><imagedata fileref="figures/define-generic.png" />FILESOVERRIDES</glossterm>
+ <info>
+ FILESOVERRIDES[doc] = "A subset of OVERRIDES used by the OpenEmbedded build system for creating FILESPATH."
+ </info>
<glossdef>
<para>
A subset of <link linkend='var-OVERRIDES'><filename>OVERRIDES</filename></link>
@@ -2772,7 +3220,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILESPATH'><glossterm>FILESPATH</glossterm>
+ <glossentry id='var-FILESPATH'><glossterm><imagedata fileref="figures/define-generic.png" />FILESPATH</glossterm>
+ <info>
+ FILESPATH[doc] = "The default set of directories the OpenEmbedded build system uses when searching for patches and files. It is defined in the base.bbclass class found in meta/classes in the Source Directory. Do not hand-edit the FILESPATH variable."
+ </info>
<glossdef>
<para>
The default set of directories the OpenEmbedded build system
@@ -2815,7 +3266,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FILESYSTEM_PERMS_TABLES'><glossterm>FILESYSTEM_PERMS_TABLES</glossterm>
+ <glossentry id='var-FILESYSTEM_PERMS_TABLES'><glossterm><imagedata fileref="figures/define-generic.png" />FILESYSTEM_PERMS_TABLES</glossterm>
+ <info>
+ FILESYSTEM_PERMS_TABLES[doc] = "Allows you to define your own file permissions settings table as part of your configuration for the packaging process."
+ </info>
<glossdef>
<para>Allows you to define your own file permissions settings table as part of
your configuration for the packaging process.
@@ -2847,10 +3301,13 @@
</glossdef>
</glossentry>
- <glossentry id='var-FONT_PACKAGES'><glossterm>FONT_PACKAGES</glossterm>
+ <glossentry id='var-FONT_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />FONT_PACKAGES</glossterm>
+ <info>
+ FONT_PACKAGES[doc] = "When a recipe inherits the fontcache class, this variable identifies packages containing font files that need to be cached by Fontconfig."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
+ When inheriting the
<link linkend='ref-classes-fontcache'><filename>fontcache</filename></link>
class, this variable identifies packages containing font
files that need to be cached by Fontconfig.
@@ -2863,7 +3320,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-FULL_OPTIMIZATION'><glossterm>FULL_OPTIMIZATION</glossterm>
+ <glossentry id='var-FULL_OPTIMIZATION'><glossterm><imagedata fileref="figures/define-generic.png" />FULL_OPTIMIZATION</glossterm>
+ <info>
+ FULL_OPTIMIZATION[doc]= "The options to pass in TARGET_CFLAGS and CFLAGS when compiling an optimized system. This variable defaults to '-fexpensive-optimizations -fomit-frame-pointer -frename-registers -O2'."
+ </info>
<glossdef>
<para>
The options to pass in
@@ -2879,11 +3339,40 @@
<glossdiv id='var-glossary-g'><title>G</title>
- <glossentry id='var-GROUPADD_PARAM'><glossterm>GROUPADD_PARAM</glossterm>
+ <glossentry id='var-GLIBC_GENERATE_LOCALES'><glossterm><imagedata fileref="figures/define-generic.png" />GLIBC_GENERATE_LOCALES</glossterm>
+ <info>
+ GLIBC_GENERATE_LOCALES[doc]= "Specifies the list of GLIBC locales to generate should you not wish generate all LIBC locals, which can be time consuming."
+ </info>
+ <glossdef>
+ <para>
+ Specifies the list of GLIBC locales to generate should you
+ not wish generate all LIBC locals, which can be time
+ consuming.
+ <note>
+ If you specifically remove the locale
+ <filename>en_US.UTF-8</filename>, you must set
+ <link linkend='var-IMAGE_LINGUAS'><filename>IMAGE_LINGUAS</filename></link>
+ appropriately.
+ </note>
+ You can set <filename>GLIBC_GENERATE_LOCALES</filename>
+ in your <filename>local.conf</filename> file.
+ By default, all locales are generated.
+ <literallayout class='monospaced'>
+ GLIBC_GENERATE_LOCALES = "en_GB.UTF-8 en_US.UTF-8"
+ </literallayout>
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-GROUPADD_PARAM'><glossterm><imagedata fileref="figures/define-generic.png" />GROUPADD_PARAM</glossterm>
+ <info>
+ GROUPADD_PARAM[doc] = "When a recipe inherits the useradd class, this variable specifies for a package what parameters should be passed to the groupadd command if you wish to add a group to the system when the package is installed."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>useradd</filename> class, this variable
+ When inheriting the
+ <link linkend='ref-classes-useradd'><filename>useradd</filename></link>
+ class, this variable
specifies for a package what parameters should be passed
to the <filename>groupadd</filename> command
if you wish to add a group to the system when the package
@@ -2903,11 +3392,15 @@
</glossdef>
</glossentry>
- <glossentry id='var-GROUPMEMS_PARAM'><glossterm>GROUPMEMS_PARAM</glossterm>
+ <glossentry id='var-GROUPMEMS_PARAM'><glossterm><imagedata fileref="figures/define-generic.png" />GROUPMEMS_PARAM</glossterm>
+ <info>
+ GROUPMEMS_PARAM[doc] = "When a recipe inherits the useradd class, this variable specifies for a package what parameters should be passed to the groupmems command if you wish to modify the members of a group when the package is installed."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>useradd</filename> class, this variable
+ When inheriting the
+ <link linkend='ref-classes-useradd'><filename>useradd</filename></link>
+ class, this variable
specifies for a package what parameters should be passed
to the <filename>groupmems</filename> command
if you wish to modify the members of a group when the
@@ -2922,7 +3415,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-GRUB_GFXSERIAL'><glossterm>GRUB_GFXSERIAL</glossterm>
+ <glossentry id='var-GRUB_GFXSERIAL'><glossterm><imagedata fileref="figures/define-generic.png" />GRUB_GFXSERIAL</glossterm>
+ <info>
+ GRUB_GFXSERIAL[doc] = "Configures the GNU GRand Unified Bootloader (GRUB) to have graphics and serial in the boot menu."
+ </info>
<glossdef>
<para>
Configures the GNU GRand Unified Bootloader (GRUB) to have
@@ -2941,7 +3437,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-GRUB_OPTS'><glossterm>GRUB_OPTS</glossterm>
+ <glossentry id='var-GRUB_OPTS'><glossterm><imagedata fileref="figures/define-generic.png" />GRUB_OPTS</glossterm>
+ <info>
+ GRUB_OPTS[doc] = "Additional options to add to the GNU GRand Unified Bootloader (GRUB) configuration."
+ </info>
<glossdef>
<para>
Additional options to add to the GNU GRand Unified
@@ -2959,7 +3458,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-GRUB_TIMEOUT'><glossterm>GRUB_TIMEOUT</glossterm>
+ <glossentry id='var-GRUB_TIMEOUT'><glossterm><imagedata fileref="figures/define-generic.png" />GRUB_TIMEOUT</glossterm>
+ <info>
+ GRUB_TIMEOUT[doc] = "Specifies the timeout before executing the default LABEL in the GNU GRand Unified Bootloader (GRUB)."
+ </info>
<glossdef>
<para>
Specifies the timeout before executing the default
@@ -2976,10 +3478,13 @@
</glossdef>
</glossentry>
- <glossentry id='var-GTKIMMODULES_PACKAGES'><glossterm>GTKIMMODULES_PACKAGES</glossterm>
+ <glossentry id='var-GTKIMMODULES_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />GTKIMMODULES_PACKAGES</glossterm>
+ <info>
+ GTKIMMODULES_PACKAGES[doc] = "For recipes that inherit the gtk-immodules-cache class, this variable specifies the packages that contain the GTK+ input method modules being installed when the modules are in packages other than the main package."
+ </info>
<glossdef>
<para>
- For recipes that inherit the
+ When inheriting the
<link linkend='ref-classes-gtk-immodules-cache'><filename>gtk-immodules-cache</filename></link>
class, this variable specifies the packages that contain the
GTK+ input method modules being installed when the modules
@@ -2988,18 +3493,107 @@
</glossdef>
</glossentry>
+ <glossentry id='var-GUMMIBOOT_CFG'><glossterm><imagedata fileref="figures/define-generic.png" />GUMMIBOOT_CFG</glossterm>
+ <info>
+ GUMMIBOOT_CFG[doc] = "When EFI_PROVIDER is set to "gummiboot", the GUMMIBOOT_CFG variable specifies the configuration file that should be used."
+ </info>
+ <glossdef>
+ <para>
+ When
+ <link linkend='var-EFI_PROVIDER'><filename>EFI_PROVIDER</filename></link>
+ is set to "gummiboot", the
+ <filename>GUMMIBOOT_CFG</filename> variable specifies the
+ configuration file that should be used.
+ By default, the
+ <link linkend='ref-classes-gummiboot'><filename>gummiboot</filename></link>
+ class sets the <filename>GUMMIBOOT_CFG</filename> as
+ follows:
+ <literallayout class='monospaced'>
+ GUMMIBOOT_CFG ?= "${<link linkend='var-S'>S</link>}/loader.conf"
+ </literallayout>
+ </para>
+
+ <para>
+ For information on Gummiboot, see the
+ <ulink url='http://freedesktop.org/wiki/Software/gummiboot/'>Gummiboot documentation</ulink>.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-GUMMIBOOT_ENTRIES'><glossterm><imagedata fileref="figures/define-generic.png" />GUMMIBOOT_ENTRIES</glossterm>
+ <info>
+ GUMMIBOOT_ENTRIES[doc] = "When EFI_PROVIDER is set to "gummiboot", the GUMMIBOOT_ENTRIES variable specifies a list of entry files (*.conf) to be installed containing one boot entry per file."
+ </info>
+ <glossdef>
+ <para>
+ When
+ <link linkend='var-EFI_PROVIDER'><filename>EFI_PROVIDER</filename></link>
+ is set to "gummiboot", the
+ <filename>GUMMIBOOT_ENTRIES</filename> variable specifies
+ a list of entry files
+ (<filename>*.conf</filename>) to be installed
+ containing one boot entry per file.
+ By default, the
+ <link linkend='ref-classes-gummiboot'><filename>gummiboot</filename></link>
+ class sets the <filename>GUMMIBOOT_ENTRIES</filename> as
+ follows:
+ <literallayout class='monospaced'>
+ GUMMIBOOT_ENTRIES ?= ""
+ </literallayout>
+ </para>
+
+ <para>
+ For information on Gummiboot, see the
+ <ulink url='http://freedesktop.org/wiki/Software/gummiboot/'>Gummiboot documentation</ulink>.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-GUMMIBOOT_TIMEOUT'><glossterm><imagedata fileref="figures/define-generic.png" />GUMMIBOOT_TIMEOUT</glossterm>
+ <info>
+ GUMMIBOOT_TIMEOUT[doc] = "When EFI_PROVIDER is set to "gummiboot", the GUMMIBOOT_TIMEOUT variable specifies the boot menu timeout in seconds."
+ </info>
+ <glossdef>
+ <para>
+ When
+ <link linkend='var-EFI_PROVIDER'><filename>EFI_PROVIDER</filename></link>
+ is set to "gummiboot", the
+ <filename>GUMMIBOOT_TIMEOUT</filename> variable specifies
+ the boot menu timeout in seconds.
+ By default, the
+ <link linkend='ref-classes-gummiboot'><filename>gummiboot</filename></link>
+ class sets the <filename>GUMMIBOOT_TIMEOUT</filename> as
+ follows:
+ <literallayout class='monospaced'>
+ GUMMIBOOT_TIMEOUT ?= "10"
+ </literallayout>
+ </para>
+
+ <para>
+ For information on Gummiboot, see the
+ <ulink url='http://freedesktop.org/wiki/Software/gummiboot/'>Gummiboot documentation</ulink>.
+ </para>
+ </glossdef>
+ </glossentry>
+
</glossdiv>
<glossdiv id='var-glossary-h'><title>H</title>
- <glossentry id='var-HOMEPAGE'><glossterm>HOMEPAGE</glossterm>
+ <glossentry id='var-HOMEPAGE'><glossterm><imagedata fileref="figures/define-generic.png" />HOMEPAGE</glossterm>
+ <info>
+ HOMEPAGE[doc] = "Website where more information about the software the recipe is building can be found."
+ </info>
<glossdef>
<para>Website where more information about the software the recipe is building
can be found.</para>
</glossdef>
</glossentry>
- <glossentry id='var-HOST_CC_ARCH'><glossterm>HOST_CC_ARCH</glossterm>
+ <glossentry id='var-HOST_CC_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />HOST_CC_ARCH</glossterm>
+ <info>
+ HOST_CC_ARCH[doc] = "The name of the host architecture. Normally same as the TARGET_CC_ARCH."
+ </info>
<glossdef>
<para>
Specifies architecture-specific compiler flags that are
@@ -3029,7 +3623,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-HOST_SYS'><glossterm>HOST_SYS</glossterm>
+ <glossentry id='var-HOST_SYS'><glossterm><imagedata fileref="figures/define-generic.png" />HOST_SYS</glossterm>
+ <info>
+ HOST_SYS[doc] = "Specifies the system, including the architecture and the operating system, for with the build is occurring in the context of the current recipe."
+ </info>
<glossdef>
<para>
Specifies the system, including the architecture and the
@@ -3061,7 +3658,10 @@
<glossdiv id='var-glossary-i'><title>I</title>
- <glossentry id='var-ICECC_DISABLED'><glossterm>ICECC_DISABLED</glossterm>
+ <glossentry id='var-ICECC_DISABLED'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_DISABLED</glossterm>
+ <info>
+ ICECC_DISABLED[doc] = "Disables or enables the icecc (Icecream) function."
+ </info>
<glossdef>
<para>
Disables or enables the <filename>icecc</filename>
@@ -3086,7 +3686,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_ENV_EXEC'><glossterm>ICECC_ENV_EXEC</glossterm>
+ <glossentry id='var-ICECC_ENV_EXEC'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_ENV_EXEC</glossterm>
+ <info>
+ ICECC_ENV_EXEC[doc] = "Points to the icecc-create-env script that you provide."
+ </info>
<glossdef>
<para>
Points to the <filename>icecc-create-env</filename> script
@@ -3108,7 +3711,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_PARALLEL_MAKE'><glossterm>ICECC_PARALLEL_MAKE</glossterm>
+ <glossentry id='var-ICECC_PARALLEL_MAKE'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_PARALLEL_MAKE</glossterm>
+ <info>
+ ICECC_PARALLEL_MAKE[doc] = "Extra options passed to the make command during the do_compile task that specify parallel compilation."
+ </info>
<glossdef>
<para>
Extra options passed to the <filename>make</filename>
@@ -3149,7 +3755,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_PATH'><glossterm>ICECC_PATH</glossterm>
+ <glossentry id='var-ICECC_PATH'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_PATH</glossterm>
+ <info>
+ ICECC_PATH[doc] = "The location of the icecc binary."
+ </info>
<glossdef>
<para>
The location of the <filename>icecc</filename> binary.
@@ -3164,7 +3773,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_USER_CLASS_BL'><glossterm>ICECC_USER_CLASS_BL</glossterm>
+ <glossentry id='var-ICECC_USER_CLASS_BL'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_USER_CLASS_BL</glossterm>
+ <info>
+ ICECC_USER_CLASS_BL[doc] = "Identifies user classes that you do not want the Icecream distributed compile support to consider."
+ </info>
<glossdef>
<para>
Identifies user classes that you do not want the
@@ -3186,7 +3798,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_USER_PACKAGE_BL'><glossterm>ICECC_USER_PACKAGE_BL</glossterm>
+ <glossentry id='var-ICECC_USER_PACKAGE_BL'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_USER_PACKAGE_BL</glossterm>
+ <info>
+ ICECC_USER_PACKAGE_BL[doc] = "Identifies user recipes that you do not want the Icecream distributed compile support to consider."
+ </info>
<glossdef>
<para>
Identifies user recipes that you do not want the
@@ -3208,7 +3823,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-ICECC_USER_PACKAGE_WL'><glossterm>ICECC_USER_PACKAGE_WL</glossterm>
+ <glossentry id='var-ICECC_USER_PACKAGE_WL'><glossterm><imagedata fileref="figures/define-generic.png" />ICECC_USER_PACKAGE_WL</glossterm>
+ <info>
+ ICECC_USER_PACKAGE_WL[doc] = "Identifies user recipes that use an empty PARALLEL_MAKE variable that you want to force remote distributed compilation on using the Icecream distributed compile support."
+ </info>
<glossdef>
<para>
Identifies user recipes that use an empty
@@ -3225,7 +3843,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_BASENAME'><glossterm>IMAGE_BASENAME</glossterm>
+ <glossentry id='var-IMAGE_BASENAME'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_BASENAME</glossterm>
+ <info>
+ IMAGE_BASENAME[doc] = "The base name of image output files."
+ </info>
<glossdef>
<para>
The base name of image output files.
@@ -3235,7 +3856,56 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_CLASSES'><glossterm>IMAGE_CLASSES</glossterm>
+ <glossentry id='var-IMAGE_BOOT_FILES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_BOOT_FILES</glossterm>
+ <info>
+ IMAGE_BOOT_FILES[doc] = "Whitespace separated list of files from ${DEPLOY_DIR_IMAGE} to place in boot partition. Entries will be installed under a same name as the source file. To change the destination file name, pass a desired name after a semicolon (eg. u-boot.img;uboot)."
+ </info>
+ <glossdef>
+ <para>
+ A space-separated list of files installed into the
+ boot partition when preparing an image using the
+ <filename>wic</filename> tool with the
+ <filename>bootimg-partition</filename> source
+ plugin.
+ By default, the files are installed under
+ the same name as the source files.
+ To change the installed name, separate it from the
+ original name with a semi-colon (;).
+ Source files need to be located in
+ <link linkend='var-DEPLOY_DIR_IMAGE'><filename>DEPLOY_DIR_IMAGE</filename></link>.
+ Here are two examples:
+
+ <literallayout class="monospaced">
+ IMAGE_BOOT_FILES = "u-boot.img uImage;kernel"
+ IMAGE_BOOT_FILES = "u-boot.${UBOOT_SUFFIX} ${KERNEL_IMAGETYPE}"
+ </literallayout>
+ Alternatively, source files can be picked up using
+ a glob pattern.
+ In this case, the destination file
+ will have the same name as the base name of the source file
+ path.
+ To install files into a directory within the
+ target location, pass its name after a semi-colon
+ (;).
+ Here are two examples:
+ <literallayout class="monospaced">
+ IMAGE_BOOT_FILES = "bcm2835-bootfiles/*"
+ IMAGE_BOOT_FILES = "bcm2835-bootfiles/*;boot/"
+ </literallayout>
+ The first example installs all files from
+ <filename>${DEPLOY_DIR_IMAGE}/bcm2835-bootfiles</filename>
+ into the root of the target partition.
+ The second example installs the same files into a
+ <filename>boot</filename> directory within the
+ target partition.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-IMAGE_CLASSES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_CLASSES</glossterm>
+ <info>
+ IMAGE_CLASSES[doc] = "A list of classes that all images should inherit."
+ </info>
<glossdef>
<para>
A list of classes that all images should inherit.
@@ -3260,7 +3930,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_CMD'><glossterm>IMAGE_CMD</glossterm>
+ <glossentry id='var-IMAGE_CMD'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_CMD</glossterm>
+ <info>
+ IMAGE_CMD[doc] = "Specifies the command to create the image file for a specific image type, which corresponds to the value set set in IMAGE_FSTYPES, (e.g. ext3, btrfs, and so forth)."
+ </info>
<glossdef>
<para>
Specifies the command to create the image file for a
@@ -3287,7 +3960,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_DEVICE_TABLES'><glossterm>IMAGE_DEVICE_TABLES</glossterm>
+ <glossentry id='var-IMAGE_DEVICE_TABLES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_DEVICE_TABLES</glossterm>
+ <info>
+ IMAGE_DEVICE_TABLES[doc] = "Specifies one or more files that contain custom device tables that are passed to the makedevs command as part of creating an image."
+ </info>
<glossdef>
<para>
Specifies one or more files that contain custom device
@@ -3307,7 +3983,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_FEATURES'><glossterm>IMAGE_FEATURES</glossterm>
+ <glossentry id='var-IMAGE_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_FEATURES</glossterm>
+ <info>
+ IMAGE_FEATURES[doc] = "The primary list of features to include in an image. Configure this variable in an image recipe."
+ </info>
<glossdef>
<para>
The primary list of features to include in an image.
@@ -3336,7 +4015,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_FSTYPES'><glossterm>IMAGE_FSTYPES</glossterm>
+ <glossentry id='var-IMAGE_FSTYPES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_FSTYPES</glossterm>
+ <info>
+ IMAGE_FSTYPES[doc] = "Formats of root filesystem images that you want to have created."
+ </info>
<glossdef>
<para>
Specifies the formats the OpenEmbedded build system uses
@@ -3371,7 +4053,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_INSTALL'><glossterm>IMAGE_INSTALL</glossterm>
+ <glossentry id='var-IMAGE_INSTALL'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_INSTALL</glossterm>
+ <info>
+ IMAGE_INSTALL[doc] = "Specifies the packages to install into an image. Image recipes set IMAGE_INSTALL to specify the packages to install into an image through image.bbclass."
+ </info>
<glossdef>
<para>
Specifies the packages to install into an image.
@@ -3426,7 +4111,7 @@
<para>
When you use this variable, it is best to use it as follows:
<literallayout class='monospaced'>
- IMAGE_INSTALL_append = " package-name"
+ IMAGE_INSTALL_append = " <replaceable>package-name</replaceable>"
</literallayout>
Be sure to include the space between the quotation character
and the start of the package name or names.
@@ -3434,7 +4119,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_LINGUAS'><glossterm>IMAGE_LINGUAS</glossterm>
+ <glossentry id='var-IMAGE_LINGUAS'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_LINGUAS</glossterm>
+ <info>
+ IMAGE_LINGUAS[doc] = "Specifies the list of locales to install into the image during the root filesystem construction process."
+ </info>
<glossdef>
<para>
Specifies the list of locales to install into the image
@@ -3460,10 +4148,19 @@
packages only provide locale files by language and not by
country-specific language).
</para>
+
+ <para>
+ See the
+ <link linkend='var-GLIBC_GENERATE_LOCALES'><filename>GLIBC_GENERATE_LOCALES</filename></link>
+ variable for information on generating GLIBC locales.
+ </para>
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_MANIFEST'><glossterm>IMAGE_MANIFEST</glossterm>
+ <glossentry id='var-IMAGE_MANIFEST'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_MANIFEST</glossterm>
+ <info>
+ IMAGE_MANIFEST[doc] = "The manifest file for the image. This file lists all the installed packages that make up the image."
+ </info>
<glossdef>
<para>
The manifest file for the image.
@@ -3472,7 +4169,7 @@
The file contains package information on a line-per-package
basis as follows:
<literallayout class='monospaced'>
- &lt;packagename&gt; &lt;packagearch&gt; &lt;version&gt;
+ <replaceable>packagename</replaceable> <replaceable>packagearch</replaceable> <replaceable>version</replaceable>
</literallayout>
</para>
@@ -3496,7 +4193,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_NAME'><glossterm>IMAGE_NAME</glossterm>
+ <glossentry id='var-IMAGE_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_NAME</glossterm>
+ <info>
+ IMAGE_NAME[doc] = "The name of the output image files minus the extension."
+ </info>
<glossdef>
<para>
The name of the output image files minus the extension.
@@ -3513,7 +4213,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_OVERHEAD_FACTOR'><glossterm>IMAGE_OVERHEAD_FACTOR</glossterm>
+ <glossentry id='var-IMAGE_OVERHEAD_FACTOR'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_OVERHEAD_FACTOR</glossterm>
+ <info>
+ IMAGE_OVERHEAD_FACTOR[doc] = "Defines a multiplier that the build system applies to the initial image size for cases when the multiplier times the returned disk usage value for the image is greater than the sum of IMAGE_ROOTFS_SIZE and IMAGE_ROOTFS_EXTRA_SPACE."
+ </info>
<glossdef>
<para>
Defines a multiplier that the build system applies to the initial image
@@ -3555,7 +4258,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_PKGTYPE'><glossterm>IMAGE_PKGTYPE</glossterm>
+ <glossentry id='var-IMAGE_PKGTYPE'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_PKGTYPE</glossterm>
+ <info>
+ IMAGE_PKGTYPE[doc] = "Defines the package type (DEB, RPM, IPK, or TAR) used by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
Defines the package type (DEB, RPM, IPK, or TAR) used
@@ -3598,14 +4304,17 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_POSTPROCESS_COMMAND'><glossterm>IMAGE_POSTPROCESS_COMMAND</glossterm>
+ <glossentry id='var-IMAGE_POSTPROCESS_COMMAND'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_POSTPROCESS_COMMAND</glossterm>
+ <info>
+ IMAGE_POSTPROCESS_COMMAND[doc] = "Added by classes to run post processing commands once the OpenEmbedded build system has created the image."
+ </info>
<glossdef>
<para>
Added by classes to run post processing commands once the
OpenEmbedded build system has created the image.
You can specify shell commands separated by semicolons:
<literallayout class='monospaced'>
- IMAGE_POSTPROCESS_COMMAND += "&lt;shell_command&gt;; ... "
+ IMAGE_POSTPROCESS_COMMAND += "<replaceable>shell_command</replaceable>; ... "
</literallayout>
If you need to pass the path to the root filesystem within
the command, you can use
@@ -3615,7 +4324,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_ROOTFS'><glossterm>IMAGE_ROOTFS</glossterm>
+ <glossentry id='var-IMAGE_ROOTFS'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_ROOTFS</glossterm>
+ <info>
+ IMAGE_ROOTFS[doc] = "The location of the root filesystem while it is under construction (i.e. during do_rootfs)."
+ </info>
<glossdef>
<para>
The location of the root filesystem while it is under
@@ -3628,7 +4340,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_ROOTFS_ALIGNMENT'><glossterm>IMAGE_ROOTFS_ALIGNMENT</glossterm>
+ <glossentry id='var-IMAGE_ROOTFS_ALIGNMENT'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_ROOTFS_ALIGNMENT</glossterm>
+ <info>
+ IMAGE_ROOTFS_ALIGNMENT[doc] = "Specifies the alignment for the output image file in Kbytes."
+ </info>
<glossdef>
<para>
Specifies the alignment for the output image file in
@@ -3644,7 +4359,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_ROOTFS_EXTRA_SPACE'><glossterm>IMAGE_ROOTFS_EXTRA_SPACE</glossterm>
+ <glossentry id='var-IMAGE_ROOTFS_EXTRA_SPACE'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_ROOTFS_EXTRA_SPACE</glossterm>
+ <info>
+ IMAGE_ROOTFS_EXTRA_SPACE[doc] = "Defines additional free disk space created in the image in Kbytes. By default, this variable is set to '0'."
+ </info>
<glossdef>
<para>
Defines additional free disk space created in the image in Kbytes.
@@ -3675,7 +4393,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_ROOTFS_SIZE'><glossterm>IMAGE_ROOTFS_SIZE</glossterm>
+ <glossentry id='var-IMAGE_ROOTFS_SIZE'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_ROOTFS_SIZE</glossterm>
+ <info>
+ IMAGE_ROOTFS_SIZE[doc] = "Defines the size in Kbytes for the generated image."
+ </info>
<glossdef>
<para>
Defines the size in Kbytes for the generated image.
@@ -3718,7 +4439,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_TYPEDEP'><glossterm>IMAGE_TYPEDEP</glossterm>
+ <glossentry id='var-IMAGE_TYPEDEP'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_TYPEDEP</glossterm>
+ <info>
+ IMAGE_TYPEDEP[doc] = "Specifies a dependency from one image type on another."
+ </info>
<glossdef>
<para>
Specifies a dependency from one image type on another.
@@ -3741,7 +4465,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-IMAGE_TYPES'><glossterm>IMAGE_TYPES</glossterm>
+ <glossentry id='var-IMAGE_TYPES'><glossterm><imagedata fileref="figures/define-generic.png" />IMAGE_TYPES</glossterm>
+ <info>
+ IMAGE_TYPES[doc] = "Specifies the complete list of supported image types by default."
+ </info>
<glossdef>
<para>
Specifies the complete list of supported image types
@@ -3781,7 +4508,10 @@
</glossdef>
</glossentry>
- <glossentry id='var-INC_PR'><glossterm>INC_PR</glossterm>
+ <glossentry id='var-INC_PR'><glossterm><imagedata fileref="figures/define-generic.png" />INC_PR</glossterm>
+ <info>
+ INC_PR[doc] = "Helps define the recipe revision for recipes that share a common include file."
+ </info>
<glossdef>
<para>Helps define the recipe revision for recipes that share
a common <filename>include</filename> file.
@@ -3834,7 +4564,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INCOMPATIBLE_LICENSE'><glossterm>INCOMPATIBLE_LICENSE</glossterm>
+ <glossentry id='var-INCOMPATIBLE_LICENSE'><glossterm><imagedata fileref="figures/define-generic.png" />INCOMPATIBLE_LICENSE</glossterm>
+ <info>
+ INCOMPATIBLE_LICENSE[doc] = "Specifies a space-separated list of license names (as they would appear in LICENSE) that should be excluded from the build."
+ </info>
<glossdef>
<para>
Specifies a space-separated list of license names
@@ -3861,7 +4594,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INHIBIT_DEFAULT_DEPS'><glossterm>INHIBIT_DEFAULT_DEPS</glossterm>
+ <glossentry id='var-INHIBIT_DEFAULT_DEPS'><glossterm><imagedata fileref="figures/define-generic.png" />INHIBIT_DEFAULT_DEPS</glossterm>
+ <info>
+ INHIBIT_DEFAULT_DEPS[doc] = "Prevents the default dependencies, namely the C compiler and standard C library (libc), from being added to DEPENDS."
+ </info>
<glossdef>
<para>
Prevents the default dependencies, namely the C compiler
@@ -3878,7 +4614,41 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INHIBIT_PACKAGE_STRIP'><glossterm>INHIBIT_PACKAGE_STRIP</glossterm>
+ <glossentry id='var-INHIBIT_PACKAGE_DEBUG_SPLIT'><glossterm><imagedata fileref="figures/define-generic.png" />INHIBIT_PACKAGE_DEBUG_SPLIT</glossterm>
+ <info>
+ INHIBIT_PACKAGE_STRIP[doc] = "If set to "1", causes the build to not strip binaries in resulting packages."
+ </info>
+ <glossdef>
+
+ <para>
+ Prevents the OpenEmbedded build system from splitting
+ out debug information during packaging.
+ By default, the build system splits out debugging
+ information during the
+ <link linkend='ref-tasks-package'><filename>do_package</filename></link>
+ task.
+ For more information on how debug information is split out,
+ see the
+ <link linkend='var-PACKAGE_DEBUG_SPLIT_STYLE'><filename>PACKAGE_DEBUG_SPLIT_STYLE</filename></link>
+ variable.
+ </para>
+
+ <para>
+ To prevent the build system from splitting out
+ debug information during packaging, set the
+ <filename>INHIBIT_PACKAGE_DEBUG_SPLIT</filename> variable
+ as follows:
+ <literallayout class='monospaced'>
+ INHIBIT_PACKAGE_DEBUG_SPLIT = "1"
+ </literallayout>
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-INHIBIT_PACKAGE_STRIP'><glossterm><imagedata fileref="figures/define-generic.png" />INHIBIT_PACKAGE_STRIP</glossterm>
+ <info>
+ INHIBIT_PACKAGE_STRIP[doc] = "If set to "1", causes the build to not strip binaries in resulting packages."
+ </info>
<glossdef>
<para>
If set to "1", causes the build to not strip binaries in resulting packages.
@@ -3886,7 +4656,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INHERIT'><glossterm>INHERIT</glossterm>
+ <glossentry id='var-INHERIT'><glossterm><imagedata fileref="figures/define-generic.png" />INHERIT</glossterm>
+ <info>
+ INHERIT[doc] = "Causes the named class to be inherited at this point during parsing. The variable is only valid in configuration files."
+ </info>
<glossdef>
<para>
Causes the named class to be inherited at
@@ -3896,7 +4669,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INHERIT_DISTRO'><glossterm>INHERIT_DISTRO</glossterm>
+ <glossentry id='var-INHERIT_DISTRO'><glossterm><imagedata fileref="figures/define-generic.png" />INHERIT_DISTRO</glossterm>
+ <info>
+ INHERIT_DISTRO[doc] = "Lists classes that will be inherited at the distribution level. It is unlikely that you want to edit this variable."
+ </info>
<glossdef>
<para>
Lists classes that will be inherited at the
@@ -3915,7 +4691,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITRAMFS_FSTYPES'><glossterm>INITRAMFS_FSTYPES</glossterm>
+ <glossentry id='var-INITRAMFS_FSTYPES'><glossterm><imagedata fileref="figures/define-generic.png" />INITRAMFS_FSTYPES</glossterm>
+ <info>
+ INITRAMFS_FSTYPES[doc] = "Defines the format for the output image of an initial RAM disk (initramfs), which is used during boot."
+ </info>
<glossdef>
<para>
Defines the format for the output image of an initial
@@ -3927,7 +4706,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITRAMFS_IMAGE'><glossterm>INITRAMFS_IMAGE</glossterm>
+ <glossentry id='var-INITRAMFS_IMAGE'><glossterm><imagedata fileref="figures/define-generic.png" />INITRAMFS_IMAGE</glossterm>
+ <info>
+ INITRAMFS_IMAGE[doc] = "Causes the OpenEmbedded build system to build an additional recipe as a dependency to your root filesystem recipe (e.g. core-image-sato)."
+ </info>
<glossdef>
<para>
Causes the OpenEmbedded build system to build an additional
@@ -3970,7 +4752,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITRAMFS_IMAGE_BUNDLE'><glossterm>INITRAMFS_IMAGE_BUNDLE</glossterm>
+ <glossentry id='var-INITRAMFS_IMAGE_BUNDLE'><glossterm><imagedata fileref="figures/define-generic.png" />INITRAMFS_IMAGE_BUNDLE</glossterm>
+ <info>
+ INITRAMFS_IMAGE_BUNDLE[doc] = "Controls whether or not the image recipe specified by INITRAMFS_IMAGE is run through an extra pass during kernel compilation in order to build a single binary that contains both the kernel image and the initial RAM disk (initramfs)."
+ </info>
<glossdef>
<para>
Controls whether or not the image recipe specified by
@@ -4017,7 +4802,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITRD'><glossterm>INITRD</glossterm>
+ <glossentry id='var-INITRD'><glossterm><imagedata fileref="figures/define-generic.png" />INITRD</glossterm>
+ <info>
+ INITRD[doc] = "Indicates a list of filesystem images to concatenate and use as an initial RAM disk (initrd)."
+ </info>
<glossdef>
<para>
Indicates list of filesystem images to concatenate and use
@@ -4033,7 +4821,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITRD_IMAGE'><glossterm>INITRD_IMAGE</glossterm>
+ <glossentry id='var-INITRD_IMAGE'><glossterm><imagedata fileref="figures/define-generic.png" />INITRD_IMAGE</glossterm>
+ <info>
+ INITRD_IMAGE[doc] = "When building a "live" bootable image (i.e. when IMAGE_FSTYPES contains "live"), INITRD_IMAGE specifies the image recipe that should be built to provide the initial RAM disk image."
+ </info>
<glossdef>
<para>
When building a "live" bootable image (i.e. when
@@ -4052,7 +4843,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITSCRIPT_NAME'><glossterm>INITSCRIPT_NAME</glossterm>
+ <glossentry id='var-INITSCRIPT_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />INITSCRIPT_NAME</glossterm>
+ <info>
+ INITSCRIPT_NAME[doc] = "The filename of the initialization script as installed to ${sysconfdir}/init.d."
+ </info>
<glossdef>
<para>
The filename of the initialization script as installed to
@@ -4065,7 +4859,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITSCRIPT_PACKAGES'><glossterm>INITSCRIPT_PACKAGES</glossterm>
+ <glossentry id='var-INITSCRIPT_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />INITSCRIPT_PACKAGES</glossterm>
+ <info>
+ INITSCRIPT_PACKAGES[doc] = "A list of the packages that contain initscripts. This variable is used in recipes when using update-rc.d.bbclass. The variable is optional and defaults to the PN variable."
+ </info>
<glossdef>
<para>
A list of the packages that contain initscripts.
@@ -4079,7 +4876,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INITSCRIPT_PARAMS'><glossterm>INITSCRIPT_PARAMS</glossterm>
+ <glossentry id='var-INITSCRIPT_PARAMS'><glossterm><imagedata fileref="figures/define-generic.png" />INITSCRIPT_PARAMS</glossterm>
+ <info>
+ INITSCRIPT_PARAMS[doc] = "Specifies the options to pass to update-rc.d. The variable is mandatory and is used in recipes when using update-rc.d.bbclass."
+ </info>
<glossdef>
<para>
Specifies the options to pass to <filename>update-rc.d</filename>.
@@ -4108,7 +4908,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-INSANE_SKIP'><glossterm>INSANE_SKIP</glossterm>
+ <glossentry id='var-INSANE_SKIP'><glossterm><imagedata fileref="figures/define-generic.png" />INSANE_SKIP</glossterm>
+ <info>
+ INSANE_SKIP[doc] = "Specifies the QA checks to skip for a specific package within a recipe."
+ </info>
<glossdef>
<para>
Specifies the QA checks to skip for a specific package
@@ -4130,7 +4933,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-IPK_FEED_URIS'><glossterm>IPK_FEED_URIS</glossterm>
+ <glossentry id='var-IPK_FEED_URIS'><glossterm><imagedata fileref="figures/define-generic.png" />IPK_FEED_URIS</glossterm>
+ <info>
+ IPK_FEED_URIS[doc] = "List of ipkg feed records to put into generated image."
+ </info>
<glossdef>
<para>
When the IPK backend is in use and package management
@@ -4186,7 +4992,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-k'><title>K</title>
- <glossentry id='var-KARCH'><glossterm>KARCH</glossterm>
+ <glossentry id='var-KARCH'><glossterm><imagedata fileref="figures/define-generic.png" />KARCH</glossterm>
+ <info>
+ KARCH[doc] = "Defines the kernel architecture used when assembling the configuration. You define the KARCH variable in the BSP Descriptions."
+ </info>
<glossdef>
<para>
Defines the kernel architecture used when assembling
@@ -4209,7 +5018,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KBRANCH'><glossterm>KBRANCH</glossterm>
+ <glossentry id='var-KBRANCH'><glossterm><imagedata fileref="figures/define-generic.png" />KBRANCH</glossterm>
+ <info>
+ KBRANCH[doc] = "A regular expression used by the build process to explicitly identify the kernel branch that is validated, patched and configured during a build."
+ </info>
<glossdef>
<para>
A regular expression used by the build process to explicitly
@@ -4270,7 +5082,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KBRANCH_DEFAULT'><glossterm>KBRANCH_DEFAULT</glossterm>
+ <glossentry id='var-KBRANCH_DEFAULT'><glossterm><imagedata fileref="figures/define-generic.png" />KBRANCH_DEFAULT</glossterm>
+ <info>
+ KBRANCH_DEFAULT[doc] = "Defines the Linux kernel source repository's default branch used to build the Linux kernel. Unless you specify otherwise, the variable initializes to 'master'."
+ </info>
<glossdef>
<para>
Defines the Linux kernel source repository's default
@@ -4285,7 +5100,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_EXTRA_ARGS'><glossterm>KERNEL_EXTRA_ARGS</glossterm>
+ <glossentry id='var-KERNEL_EXTRA_ARGS'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_EXTRA_ARGS</glossterm>
+ <info>
+ KERNEL_EXTRA_ARGS[doc] = "Specifies additional make command-line arguments the OpenEmbedded build system passes on when compiling the kernel."
+ </info>
<glossdef>
<para>
Specifies additional <filename>make</filename>
@@ -4295,7 +5113,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_FEATURES'><glossterm>KERNEL_FEATURES</glossterm>
+ <glossentry id='var-KERNEL_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_FEATURES</glossterm>
+ <info>
+ KERNEL_FEATURES[doc] = "Includes additional metadata from the Yocto Project kernel Git repository. The metadata you add through this variable includes config fragments and features descriptions."
+ </info>
<glossdef>
<para>Includes additional metadata from the Yocto Project kernel Git repository.
In the OpenEmbedded build system, the default Board Support Packages (BSPs)
@@ -4325,7 +5146,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_IMAGE_BASE_NAME'><glossterm>KERNEL_IMAGE_BASE_NAME</glossterm>
+ <glossentry id='var-KERNEL_IMAGE_BASE_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_IMAGE_BASE_NAME</glossterm>
+ <info>
+ KERNEL_IMAGE_BASE_NAME[doc] = "The base name of the kernel image."
+ </info>
<glossdef>
<para>
The base name of the kernel image.
@@ -4348,7 +5172,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_IMAGETYPE'><glossterm>KERNEL_IMAGETYPE</glossterm>
+ <glossentry id='var-KERNEL_IMAGETYPE'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_IMAGETYPE</glossterm>
+ <info>
+ KERNEL_IMAGETYPE[doc] = "The type of kernel to build for a device, usually set by the machine configuration files and defaults to 'zImage'."
+ </info>
<glossdef>
<para>The type of kernel to build for a device, usually set by the
machine configuration files and defaults to "zImage".
@@ -4358,13 +5185,89 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_PATH'><glossterm>KERNEL_PATH</glossterm>
+ <glossentry id='var-KERNEL_MODULE_AUTOLOAD'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_MODULE_AUTOLOAD</glossterm>
+ <info>
+ KERNEL_MODULE_AUTOLOAD[doc] = "Lists kernel modules that need to be auto-loaded during boot"
+ </info>
+ <glossdef>
+ <para>
+ Lists kernel modules that need to be auto-loaded during
+ boot.
+ <note>
+ This variable replaces the deprecated
+ <link linkend='var-module_autoload'><filename>module_autoload</filename></link>
+ variable.
+ </note>
+ </para>
+
+ <para>
+ You can use the <filename>KERNEL_MODULE_AUTOLOAD</filename>
+ variable anywhere that it can be
+ recognized by the kernel recipe or by an out-of-tree kernel
+ module recipe (e.g. a machine configuration file, a
+ distribution configuration file, an append file for the
+ recipe, or the recipe itself).
+ </para>
+
+ <para>
+ Specify it as follows:
+ <literallayout class='monospaced'>
+ KERNEL_MODULE_AUTOLOAD += "<replaceable>module_name1</replaceable> <replaceable>module_name2</replaceable> <replaceable>module_name3</replaceable>"
+ </literallayout>
+ </para>
+
+ <para>
+ Including <filename>KERNEL_MODULE_AUTOLOAD</filename> causes
+ the OpenEmbedded build system to populate the
+ <filename>/etc/modules-load.d/modname.conf</filename>
+ file with the list of modules to be auto-loaded on boot.
+ The modules appear one-per-line in the file.
+ Here is an example of the most common use case:
+ <literallayout class='monospaced'>
+ KERNEL_MODULE_AUTOLOAD += "<replaceable>module_name</replaceable>"
+ </literallayout>
+ </para>
+
+ <para>
+ For information on how to populate the
+ <filename>modname.conf</filename> file with
+ <filename>modprobe.d</filename> syntax lines, see the
+ <link linkend='var-KERNEL_MODULE_PROBECONF'><filename>KERNEL_MODULE_PROBECONF</filename></link>
+ variable.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-KERNEL_MODULE_PROBECONF'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_MODULE_PROBECONF</glossterm>
+ <info>
+ KERNEL_MODULE_PROBECONF[doc] = "Lists kernel modules for which the build system expects to find module_conf_* values that specify configuration for each of the modules."
+ </info>
+ <glossdef>
+ <para>
+ Provides a list of modules for which the OpenEmbedded
+ build system expects to find
+ <filename>module_conf_</filename><replaceable>modname</replaceable>
+ values that specify configuration for each of the modules.
+ For information on how to provide those module
+ configurations, see the
+ <link linkend='var-module_conf'><filename>module_conf_*</filename></link>
+ variable.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-KERNEL_PATH'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_PATH</glossterm>
+ <info>
+ KERNEL_PATH[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
+ </info>
<glossdef>
<para>
The location of the kernel sources.
This variable is set to the value of the
<link linkend='var-STAGING_KERNEL_DIR'><filename>STAGING_KERNEL_DIR</filename></link>
- within the <filename>module.bbclass</filename> class.
+ within the
+ <link linkend='ref-classes-module'><filename>module</filename></link>
+ class.
For information on how this variable is used, see the
"<ulink url='&YOCTO_DOCS_KERNEL_DEV_URL;#incorporating-out-of-tree-modules'>Incorporating Out-of-Tree Modules</ulink>"
section.
@@ -4383,13 +5286,18 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KERNEL_SRC'><glossterm>KERNEL_SRC</glossterm>
+ <glossentry id='var-KERNEL_SRC'><glossterm><imagedata fileref="figures/define-generic.png" />KERNEL_SRC</glossterm>
+ <info>
+ KERNEL_SRC[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
+ </info>
<glossdef>
<para>
The location of the kernel sources.
This variable is set to the value of the
<link linkend='var-STAGING_KERNEL_DIR'><filename>STAGING_KERNEL_DIR</filename></link>
- within the <filename>module.bbclass</filename> class.
+ within the
+ <link linkend='ref-classes-module'><filename>module</filename></link>
+ class.
For information on how this variable is used, see the
"<ulink url='&YOCTO_DOCS_KERNEL_DEV_URL;#incorporating-out-of-tree-modules'>Incorporating Out-of-Tree Modules</ulink>"
section.
@@ -4408,7 +5316,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KFEATURE_DESCRIPTION'><glossterm>KFEATURE_DESCRIPTION</glossterm>
+ <glossentry id='var-KFEATURE_DESCRIPTION'><glossterm><imagedata fileref="figures/define-generic.png" />KFEATURE_DESCRIPTION</glossterm>
+ <info>
+ KFEATURE_DESCRIPTION[doc] = "Provides a short description of a configuration fragment. You use this variable in the .scc file that describes a configuration fragment file."
+ </info>
<glossdef>
<para>
Provides a short description of a configuration fragment.
@@ -4424,7 +5335,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KMACHINE'><glossterm>KMACHINE</glossterm>
+ <glossentry id='var-KMACHINE'><glossterm><imagedata fileref="figures/define-generic.png" />KMACHINE</glossterm>
+ <info>
+ KMACHINE[doc] = "The machine as known by the kernel."
+ </info>
<glossdef>
<para>
The machine as known by the kernel.
@@ -4517,7 +5431,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-KTYPE'><glossterm>KTYPE</glossterm>
+ <glossentry id='var-KTYPE'><glossterm><imagedata fileref="figures/define-generic.png" />KTYPE</glossterm>
+ <info>
+ KTYPE[doc] = "Defines the kernel type to be used in assembling the configuration."
+ </info>
<glossdef>
<para>
Defines the kernel type to be used in assembling the
@@ -4543,7 +5460,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-l'><title>L</title>
- <glossentry id='var-LABELS'><glossterm>LABELS</glossterm>
+ <glossentry id='var-LABELS'><glossterm><imagedata fileref="figures/define-generic.png" />LABELS</glossterm>
+ <info>
+ LABELS[doc] = "Provides a list of targets for automatic configuration."
+ </info>
<glossdef>
<para>
Provides a list of targets for automatic configuration.
@@ -4557,7 +5477,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LAYERDEPENDS'><glossterm>LAYERDEPENDS</glossterm>
+ <glossentry id='var-LAYERDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />LAYERDEPENDS</glossterm>
+ <info>
+ LAYERDEPENDS[doc] = "Lists the layers, separated by spaces, upon which this recipe depends. This variable is used in the conf/layer.conf file and must be suffixed with the name of the specific layer."
+ </info>
<glossdef>
<para>Lists the layers that this recipe depends upon, separated by spaces.
Optionally, you can specify a specific layer version for a dependency
@@ -4573,7 +5496,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LAYERDIR'><glossterm>LAYERDIR</glossterm>
+ <glossentry id='var-LAYERDIR'><glossterm><imagedata fileref="figures/define-generic.png" />LAYERDIR</glossterm>
+ <info>
+ LAYERDIR[doc] = "When used inside the layer.conf configuration file, this variable provides the path of the current layer."
+ </info>
<glossdef>
<para>When used inside the <filename>layer.conf</filename> configuration
file, this variable provides the path of the current layer.
@@ -4582,7 +5508,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LAYERVERSION'><glossterm>LAYERVERSION</glossterm>
+ <glossentry id='var-LAYERVERSION'><glossterm><imagedata fileref="figures/define-generic.png" />LAYERVERSION</glossterm>
+ <info>
+ LAYERVERSION[doc] = "Optionally specifies the version of a layer as a single number. This variable is used in the conf/layer.conf file and must be suffixed with the name of the specific layer."
+ </info>
<glossdef>
<para>Optionally specifies the version of a layer as a single number.
You can use this within
@@ -4595,7 +5524,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LDFLAGS'><glossterm>LDFLAGS</glossterm>
+ <glossentry id='var-LDFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />LDFLAGS</glossterm>
+ <info>
+ LDFLAGS[doc] = "Specifies the flags to pass to the linker."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the linker.
@@ -4627,7 +5559,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LEAD_SONAME'><glossterm>LEAD_SONAME</glossterm>
+ <glossentry id='var-LEAD_SONAME'><glossterm><imagedata fileref="figures/define-generic.png" />LEAD_SONAME</glossterm>
+ <info>
+ LEAD_SONAME[doc] = "Specifies the lead (or primary) compiled library file (.so) that the debian class applies its naming policy to given a recipe that packages multiple libraries."
+ </info>
<glossdef>
<para>
Specifies the lead (or primary) compiled library file
@@ -4644,7 +5579,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LIC_FILES_CHKSUM'><glossterm>LIC_FILES_CHKSUM</glossterm>
+ <glossentry id='var-LIC_FILES_CHKSUM'><glossterm><imagedata fileref="figures/define-generic.png" />LIC_FILES_CHKSUM</glossterm>
+ <info>
+ LIC_FILES_CHKSUM[doc] = "Checksums of the license text in the recipe source code."
+ </info>
<glossdef>
<para>Checksums of the license text in the recipe source code.</para>
<para>This variable tracks changes in license text of the source
@@ -4655,14 +5593,17 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<para>
This variable must be defined for all recipes (unless
<link linkend='var-LICENSE'><filename>LICENSE</filename></link>
- is set to "CLOSED")</para>
+ is set to "CLOSED").</para>
<para>For more information, see the
- <link linkend='usingpoky-configuring-LIC_FILES_CHKSUM'>
- Tracking License Changes</link> section</para>
+ "<link linkend='usingpoky-configuring-LIC_FILES_CHKSUM'>
+ Tracking License Changes</link>" section.</para>
</glossdef>
</glossentry>
- <glossentry id='var-LICENSE'><glossterm>LICENSE</glossterm>
+ <glossentry id='var-LICENSE'><glossterm><imagedata fileref="figures/define-generic.png" />LICENSE</glossterm>
+ <info>
+ LICENSE[doc] = "The list of source licenses for the recipe. The logical operators ampersand or '|' and parentheses can be used."
+ </info>
<glossdef>
<para>
The list of source licenses for the recipe.
@@ -4723,7 +5664,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LICENSE_FLAGS'><glossterm>LICENSE_FLAGS</glossterm>
+ <glossentry id='var-LICENSE_FLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />LICENSE_FLAGS</glossterm>
+ <info>
+ LICENSE_FLAGS[doc] = "Specifies additional flags for a recipe you must whitelist through LICENSE_FLAGS_WHITELIST in order to allow the recipe to be built."
+ </info>
<glossdef>
<para>
Specifies additional flags for a recipe you must
@@ -4747,7 +5691,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LICENSE_FLAGS_WHITELIST'><glossterm>LICENSE_FLAGS_WHITELIST</glossterm>
+ <glossentry id='var-LICENSE_FLAGS_WHITELIST'><glossterm><imagedata fileref="figures/define-generic.png" />LICENSE_FLAGS_WHITELIST</glossterm>
+ <info>
+ LICENSE_FLAGS_WHITELIST[doc] = "Lists license flags that when specified in LICENSE_FLAGS within a recipe should not prevent that recipe from being built."
+ </info>
<glossdef>
<para>
Lists license flags that when specified in
@@ -4763,7 +5710,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LICENSE_PATH'><glossterm>LICENSE_PATH</glossterm>
+ <glossentry id='var-LICENSE_PATH'><glossterm><imagedata fileref="figures/define-generic.png" />LICENSE_PATH</glossterm>
+ <info>
+ LICENSE_PATH[doc] = "Path to additional licenses used during the build."
+ </info>
<glossdef>
<para>Path to additional licenses used during the build.
By default, the OpenEmbedded build system uses <filename>COMMON_LICENSE_DIR</filename>
@@ -4771,12 +5721,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
The <filename>LICENSE_PATH</filename> variable allows you to extend that
location to other areas that have additional licenses:
<literallayout class='monospaced'>
- LICENSE_PATH += "/path/to/additional/common/licenses"
+ LICENSE_PATH += "<replaceable>path-to-additional-common-licenses</replaceable>"
</literallayout></para>
</glossdef>
</glossentry>
- <glossentry id='var-LINUX_KERNEL_TYPE'><glossterm>LINUX_KERNEL_TYPE</glossterm>
+ <glossentry id='var-LINUX_KERNEL_TYPE'><glossterm><imagedata fileref="figures/define-generic.png" />LINUX_KERNEL_TYPE</glossterm>
+ <info>
+ LINUX_KERNEL_TYPE[doc] = "Defines the kernel type to be used in assembling the configuration."
+ </info>
<glossdef>
<para>
Defines the kernel type to be used in assembling the
@@ -4805,7 +5758,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LINUX_VERSION'><glossterm>LINUX_VERSION</glossterm>
+ <glossentry id='var-LINUX_VERSION'><glossterm><imagedata fileref="figures/define-generic.png" />LINUX_VERSION</glossterm>
+ <info>
+ LINUX_VERSION[doc] = "The Linux version from kernel.org on which the Linux kernel image being built using the OpenEmbedded build system is based. You define this variable in the kernel recipe."
+ </info>
<glossdef>
<para>The Linux version from <filename>kernel.org</filename>
on which the Linux kernel image being built using the
@@ -4827,7 +5783,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LINUX_VERSION_EXTENSION'><glossterm>LINUX_VERSION_EXTENSION</glossterm>
+ <glossentry id='var-LINUX_VERSION_EXTENSION'><glossterm><imagedata fileref="figures/define-generic.png" />LINUX_VERSION_EXTENSION</glossterm>
+ <info>
+ LINUX_VERSION_EXTENSION[doc] = "A string extension compiled into the version string of the Linux kernel built with the OpenEmbedded build system. You define this variable in the kernel recipe."
+ </info>
<glossdef>
<para>A string extension compiled into the version
string of the Linux kernel built with the OpenEmbedded
@@ -4852,7 +5811,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-LOG_DIR'><glossterm>LOG_DIR</glossterm>
+ <glossentry id='var-LOG_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />LOG_DIR</glossterm>
+ <info>
+ LOG_DIR[doc] = "Specifies the directory to which the OpenEmbedded build system writes overall log files. The default directory is ${TMPDIR}/log"
+ </info>
<glossdef>
<para>
Specifies the directory to which the OpenEmbedded build
@@ -4871,7 +5833,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-m'><title>M</title>
- <glossentry id='var-MACHINE'><glossterm>MACHINE</glossterm>
+ <glossentry id='var-MACHINE'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE</glossterm>
+ <info>
+ MACHINE[doc] = "Specifies the target device for which the image is built. You define MACHINE in the conf/local.conf file in the Build Directory."
+ </info>
<glossdef>
<para>
Specifies the target device for which the image is built.
@@ -4918,7 +5883,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_ARCH'><glossterm>MACHINE_ARCH</glossterm>
+ <glossentry id='var-MACHINE_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_ARCH</glossterm>
+ <info>
+ MACHINE_ARCH[doc] = "Specifies the name of the machine-specific architecture. This variable is set automatically from MACHINE or TUNE_PKGARCH."
+ </info>
<glossdef>
<para>
Specifies the name of the machine-specific architecture.
@@ -4932,9 +5900,11 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_ESSENTIAL_EXTRA_RDEPENDS'><glossterm>MACHINE_ESSENTIAL_EXTRA_RDEPENDS</glossterm>
+ <glossentry id='var-MACHINE_ESSENTIAL_EXTRA_RDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_ESSENTIAL_EXTRA_RDEPENDS</glossterm>
+ <info>
+ MACHINE_ESSENTIAL_EXTRA_RDEPENDS[doc] = "A list of required machine-specific packages to install as part of the image being built. Because this is a 'machine essential' variable, the list of packages are essential for the machine to boot."
+ </info>
<glossdef>
- <para></para>
<para>
A list of required machine-specific packages to install as part of
the image being built.
@@ -4964,9 +5934,11 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS'><glossterm>MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS</glossterm>
+ <glossentry id='var-MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS</glossterm>
+ <info>
+ MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS[doc] = "A list of recommended machine-specific packages to install as part of the image being built. Because this is a 'machine essential' variable, the list of packages are essential for the machine to boot."
+ </info>
<glossdef>
- <para></para>
<para>
A list of recommended machine-specific packages to install as part of
the image being built.
@@ -5012,7 +5984,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_EXTRA_RDEPENDS'><glossterm>MACHINE_EXTRA_RDEPENDS</glossterm>
+ <glossentry id='var-MACHINE_EXTRA_RDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_EXTRA_RDEPENDS</glossterm>
+ <info>
+ MACHINE_EXTRA_RDEPENDS[doc] = "A list of machine-specific packages to install as part of the image being built that are not essential for the machine to boot. However, the build process for more fully-featured images depends on the packages being present."
+ </info>
<glossdef>
<para>
A list of machine-specific packages to install as part of the
@@ -5051,9 +6026,11 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_EXTRA_RRECOMMENDS'><glossterm>MACHINE_EXTRA_RRECOMMENDS</glossterm>
+ <glossentry id='var-MACHINE_EXTRA_RRECOMMENDS'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_EXTRA_RRECOMMENDS</glossterm>
+ <info>
+ MACHINE_EXTRA_RRECOMMENDS[doc] = "A list of machine-specific packages to install as part of the image being built that are not essential for booting the machine. The image being built has no build dependencies on the packages in this list."
+ </info>
<glossdef>
- <para></para>
<para>
A list of machine-specific packages to install as part of the
image being built that are not essential for booting the machine.
@@ -5090,7 +6067,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_FEATURES'><glossterm>MACHINE_FEATURES</glossterm>
+ <glossentry id='var-MACHINE_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_FEATURES</glossterm>
+ <info>
+ MACHINE_FEATURES[doc] = "Specifies the list of hardware features the MACHINE supports."
+ </info>
<glossdef>
<para>
Specifies the list of hardware features the
@@ -5113,7 +6093,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_FEATURES_BACKFILL'><glossterm>MACHINE_FEATURES_BACKFILL</glossterm>
+ <glossentry id='var-MACHINE_FEATURES_BACKFILL'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_FEATURES_BACKFILL</glossterm>
+ <info>
+ MACHINE_FEATURES_BACKFILL[doc] = "Features to be added to MACHINE_FEATURES if not also present in MACHINE_FEATURES_BACKFILL_CONSIDERED. This variable is set in the meta/conf/bitbake.conf file and is not intended to be user-configurable."
+ </info>
<glossdef>
<para>Features to be added to
<filename><link linkend='var-MACHINE_FEATURES'>MACHINE_FEATURES</link></filename>
@@ -5132,7 +6115,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINE_FEATURES_BACKFILL_CONSIDERED'><glossterm>MACHINE_FEATURES_BACKFILL_CONSIDERED</glossterm>
+ <glossentry id='var-MACHINE_FEATURES_BACKFILL_CONSIDERED'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINE_FEATURES_BACKFILL_CONSIDERED</glossterm>
+ <info>
+ MACHINE_FEATURES_BACKFILL_CONSIDERED[doc] = "Features from MACHINE_FEATURES_BACKFILL that should not be backfilled (i.e. added to MACHINE_FEATURES) during the build."
+ </info>
<glossdef>
<para>Features from
<filename><link linkend='var-MACHINE_FEATURES_BACKFILL'>MACHINE_FEATURES_BACKFILL</link></filename>
@@ -5145,7 +6131,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MACHINEOVERRIDES'><glossterm>MACHINEOVERRIDES</glossterm>
+ <glossentry id='var-MACHINEOVERRIDES'><glossterm><imagedata fileref="figures/define-generic.png" />MACHINEOVERRIDES</glossterm>
+ <info>
+ MACHINEOVERRIDES[doc] = "Lists overrides specific to the current machine. By default, this list includes the value of MACHINE."
+ </info>
<glossdef>
<para>
Lists overrides specific to the current machine.
@@ -5174,13 +6163,19 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MAINTAINER'><glossterm>MAINTAINER</glossterm>
+ <glossentry id='var-MAINTAINER'><glossterm><imagedata fileref="figures/define-generic.png" />MAINTAINER</glossterm>
+ <info>
+ MAINTAINER[doc] = "The email address of the distribution maintainer."
+ </info>
<glossdef>
<para>The email address of the distribution maintainer.</para>
</glossdef>
</glossentry>
- <glossentry id='var-MIRRORS'><glossterm>MIRRORS</glossterm>
+ <glossentry id='var-MIRRORS'><glossterm><imagedata fileref="figures/define-generic.png" />MIRRORS</glossterm>
+ <info>
+ MIRRORS[doc] = "Specifies additional paths from which the OpenEmbedded build system gets source code."
+ </info>
<glossdef>
<para>
Specifies additional paths from which the OpenEmbedded
@@ -5205,7 +6200,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MLPREFIX'><glossterm>MLPREFIX</glossterm>
+ <glossentry id='var-MLPREFIX'><glossterm><imagedata fileref="figures/define-generic.png" />MLPREFIX</glossterm>
+ <info>
+ MLPREFIX[doc] = "Specifies a prefix has been added to PN to create a special version of a recipe or package, such as a Multilib version."
+ </info>
<glossdef>
<para>
Specifies a prefix has been added to
@@ -5220,56 +6218,40 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-module_autoload'><glossterm>module_autoload</glossterm>
+ <glossentry id='var-module_autoload'><glossterm><imagedata fileref="figures/define-generic.png" />module_autoload</glossterm>
+ <info>
+ module_autoload[doc] = "This variable has been replaced by the KERNEL_MODULE_AUTOLOAD variable. You should replace all occurrences of module_autoload with additions to KERNEL_MODULE_AUTOLOAD."
+ </info>
<glossdef>
<para>
- Lists kernel modules that need to be auto-loaded during
- boot.
- </para>
-
- <para>
- You can use this variable anywhere that it can be
- recognized by the kernel recipe or out-of-tree kernel
- module recipe (e.g. a machine configuration file, a
- distribution configuration file, an append file for the
- recipe, or the recipe itself).
- </para>
-
- <para>
- Specify it as follows:
+ This variable has been replaced by the
+ <filename>KERNEL_MODULE_AUTOLOAD</filename> variable.
+ You should replace all occurrences of
+ <filename>module_autoload</filename> with additions to
+ <filename>KERNEL_MODULE_AUTOLOAD</filename>, for example:
<literallayout class='monospaced'>
- module_autoload_&lt;modname&gt; = "modname1 modname2 modname3"
+ module_autoload_rfcomm = "rfcomm"
</literallayout>
- You must use the kernel module name override.
- </para>
-
- <para>
- Including <filename>module_autoload</filename> causes the
- OpenEmbedded build system to populate the
- <filename>/etc/modules-load.d/modname.conf</filename>
- file with the list of modules to be auto-loaded on boot.
- The modules appear one-per-line in the file.
- Here is an example of the most common use case:
+ should now be replaced with:
<literallayout class='monospaced'>
- module_autoload_modname = "modname"
+ KERNEL_MODULE_AUTOLOAD += "rfcomm"
</literallayout>
- </para>
-
- <para>
- For information on how to populate the
- <filename>modname.conf</filename> file with
- <filename>modprobe.d</filename> syntax lines, see the
- <link linkend='var-module_conf'><filename>module_conf</filename></link>
- variable.
+ See the
+ <link linkend='var-KERNEL_MODULE_AUTOLOAD'><filename>KERNEL_MODULE_AUTOLOAD</filename></link>
+ variable for more information.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-module_conf'><glossterm>module_conf</glossterm>
+ <glossentry id='var-module_conf'><glossterm><imagedata fileref="figures/define-generic.png" />module_conf</glossterm>
+ <info>
+ module_conf[doc] = "Specifies modprobe.d syntax lines for inclusion in the /etc/modprobe.d/modname.conf file."
+ </info>
<glossdef>
<para>
- Specifies <filename>modprobe.d</filename> syntax lines
- for inclusion in the
+ Specifies
+ <ulink url='http://linux.die.net/man/5/modprobe.d'><filename>modprobe.d</filename></ulink>
+ syntax lines for inclusion in the
<filename>/etc/modprobe.d/modname.conf</filename> file.
</para>
@@ -5279,19 +6261,23 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
module recipe (e.g. a machine configuration file, a
distribution configuration file, an append file for the
recipe, or the recipe itself).
+ If you use this variable, you must also be sure to list
+ the module name in the
+ <link linkend='var-KERNEL_MODULE_AUTOLOAD'><filename>KERNEL_MODULE_AUTOLOAD</filename></link>
+ variable.
</para>
<para>
Here is the general syntax:
<literallayout class='monospaced'>
- module_conf_&lt;modname&gt; = "modprobe.d-syntax"
+ module_conf_<replaceable>module_name</replaceable> = "<replaceable>modprobe.d-syntax</replaceable>"
</literallayout>
You must use the kernel module name override.
</para>
<para>
Run <filename>man modprobe.d</filename> in the shell to
- find out more information on the exact syntax for lines
+ find out more information on the exact syntax
you want to provide with <filename>module_conf</filename>.
</para>
@@ -5300,22 +6286,27 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
OpenEmbedded build system to populate the
<filename>/etc/modprobe.d/modname.conf</filename>
file with <filename>modprobe.d</filename> syntax lines.
- Here is an example:
+ Here is an example that adds the options
+ <filename>arg1</filename> and <filename>arg2</filename>
+ to a module named <filename>mymodule</filename>:
<literallayout class='monospaced'>
- module_conf_&lt;modname&gt; = "options modname arg1=val1 arg2=val2"
+ module_conf_mymodule = "options mymodule arg1=val1 arg2=val2"
</literallayout>
</para>
<para>
For information on how to specify kernel modules to
auto-load on boot, see the
- <link linkend='var-module_autoload'><filename>module_autoload</filename></link>
+ <link linkend='var-KERNEL_MODULE_AUTOLOAD'><filename>KERNEL_MODULE_AUTOLOAD</filename></link>
variable.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-MODULE_IMAGE_BASE_NAME'><glossterm>MODULE_IMAGE_BASE_NAME</glossterm>
+ <glossentry id='var-MODULE_IMAGE_BASE_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />MODULE_IMAGE_BASE_NAME</glossterm>
+ <info>
+ MODULE_IMAGE_BASE_NAME[doc] = "The base name of the kernel modules tarball."
+ </info>
<glossdef>
<para>
The base name of the kernel modules tarball.
@@ -5337,7 +6328,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MODULE_TARBALL_DEPLOY'><glossterm>MODULE_TARBALL_DEPLOY</glossterm>
+ <glossentry id='var-MODULE_TARBALL_DEPLOY'><glossterm><imagedata fileref="figures/define-generic.png" />MODULE_TARBALL_DEPLOY</glossterm>
+ <info>
+ MODULE_TARBALL_DEPLOY[doc] = "Controls creation of the modules-*.tgz file. Set this variable to "0" to disable creation of this file, which contains all of the kernel modules resulting from a kernel build."
+ </info>
<glossdef>
<para>
Controls creation of the <filename>modules-*.tgz</filename>
@@ -5349,7 +6343,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-MULTIMACH_TARGET_SYS'><glossterm>MULTIMACH_TARGET_SYS</glossterm>
+ <glossentry id='var-MULTIMACH_TARGET_SYS'><glossterm><imagedata fileref="figures/define-generic.png" />MULTIMACH_TARGET_SYS</glossterm>
+ <info>
+ MULTIMACH_TARGET_SYS[doc] = "Separates files for different machines such that you can build for multiple target machines using the same output directories."
+ </info>
<glossdef>
<para>
Separates files for different machines such that you can build
@@ -5364,7 +6361,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-n'><title>N</title>
- <glossentry id='var-NATIVELSBSTRING'><glossterm>NATIVELSBSTRING</glossterm>
+ <glossentry id='var-NATIVELSBSTRING'><glossterm><imagedata fileref="figures/define-generic.png" />NATIVELSBSTRING</glossterm>
+ <info>
+ NATIVELSBSTRING[doc] = "A string identifying the host distribution."
+ </info>
<glossdef>
<para>
A string identifying the host distribution.
@@ -5389,7 +6389,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-NO_RECOMMENDATIONS'><glossterm>NO_RECOMMENDATIONS</glossterm>
+ <glossentry id='var-NO_RECOMMENDATIONS'><glossterm><imagedata fileref="figures/define-generic.png" />NO_RECOMMENDATIONS</glossterm>
+ <info>
+ NO_RECOMMENDATIONS[doc] = "When set to '1', no recommended packages will be installed. Realize that some recommended packages might be required for certain system functionality, such as kernel-modules. It is up to the user to add packages to IMAGE_INSTALL as needed."
+ </info>
<glossdef>
<para>
Prevents installation of all "recommended-only" packages.
@@ -5406,7 +6409,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<filename>local.conf</filename> file or you can attach it to
a specific image recipe by using the recipe name override:
<literallayout class='monospaced'>
- NO_RECOMMENDATIONS_pn-&lt;target_image&gt; = "&lt;package_name&gt;"
+ NO_RECOMMENDATIONS_pn-<replaceable>target_image</replaceable> = "<replaceable>package_name</replaceable>"
</literallayout>
</para>
@@ -5443,7 +6446,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-NOHDD'><glossterm>NOHDD</glossterm>
+ <glossentry id='var-NOHDD'><glossterm><imagedata fileref="figures/define-generic.png" />NOHDD</glossterm>
+ <info>
+ NOHDD[doc] = "Causes the OpenEmbedded build system to skip building the .hddimg image."
+ </info>
<glossdef>
<para>
Causes the OpenEmbedded build system to skip building the
@@ -5457,7 +6463,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-NOISO'><glossterm>NOISO</glossterm>
+ <glossentry id='var-NOISO'><glossterm><imagedata fileref="figures/define-generic.png" />NOISO</glossterm>
+ <info>
+ NOISO[doc] = "Causes the OpenEmbedded build system to skip building the ISO image."
+ </info>
<glossdef>
<para>
Causes the OpenEmbedded build system to skip building the
@@ -5476,11 +6485,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-o'><title>O</title>
- <glossentry id='var-OE_BINCONFIG_EXTRA_MANGLE'><glossterm>OE_BINCONFIG_EXTRA_MANGLE</glossterm>
+ <glossentry id='var-OE_BINCONFIG_EXTRA_MANGLE'><glossterm><imagedata fileref="figures/define-generic.png" />OE_BINCONFIG_EXTRA_MANGLE</glossterm>
+ <info>
+ OE_BINCONFIG_EXTRA_MANGLE[doc] = "When a recipe inherits the binconfig.bbclass class, this variable specifies additional arguments passed to the "sed" command."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>binconfig.bbclass</filename> class, this variable
+ When inheriting the
+ <link linkend='ref-classes-binconfig'><filename>binconfig</filename></link>
+ class, this variable
specifies additional arguments passed to the "sed" command.
The sed command alters any paths in configuration scripts
that have been set up during compilation.
@@ -5505,7 +6518,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-OE_IMPORTS'><glossterm>OE_IMPORTS</glossterm>
+ <glossentry id='var-OE_IMPORTS'><glossterm><imagedata fileref="figures/define-generic.png" />OE_IMPORTS</glossterm>
+ <info>
+ OE_IMPORTS[doc] = "An internal variable used to tell the OpenEmbedded build system what Python modules to import for every Python function run by the system."
+ </info>
<glossdef>
<para>
An internal variable used to tell the OpenEmbedded build
@@ -5520,7 +6536,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-OE_TERMINAL'><glossterm>OE_TERMINAL</glossterm>
+ <glossentry id='var-OE_TERMINAL'><glossterm><imagedata fileref="figures/define-generic.png" />OE_TERMINAL</glossterm>
+ <info>
+ OE_TERMINAL[doc] = "Controls how the OpenEmbedded build system spawns interactive terminals on the host development system."
+ </info>
<glossdef>
<para>
Controls how the OpenEmbedded build system spawns
@@ -5551,7 +6570,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-OEROOT'><glossterm>OEROOT</glossterm>
+ <glossentry id='var-OEROOT'><glossterm><imagedata fileref="figures/define-generic.png" />OEROOT</glossterm>
+ <info>
+ OEROOT[doc] = "The directory from which the top-level build environment setup script is sourced."
+ </info>
<glossdef>
<para>
The directory from which the top-level build environment
@@ -5573,7 +6595,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-OLDEST_KERNEL'><glossterm>OLDEST_KERNEL</glossterm>
+ <glossentry id='var-OLDEST_KERNEL'><glossterm><imagedata fileref="figures/define-generic.png" />OLDEST_KERNEL</glossterm>
+ <info>
+ OLDEST_KERNEL[doc] = "Declares the oldest version of the Linux kernel that the produced binaries must support."
+ </info>
<glossdef>
<para>
Declares the oldest version of the Linux kernel that the
@@ -5592,7 +6617,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-OVERRIDES'><glossterm>OVERRIDES</glossterm>
+ <glossentry id='var-OVERRIDES'><glossterm><imagedata fileref="figures/define-generic.png" />OVERRIDES</glossterm>
+ <info>
+ OVERRIDES[doc] = "BitBake uses OVERRIDES to control what variables are overridden after BitBake parses recipes and configuration files."
+ </info>
<glossdef>
<para>
BitBake uses <filename>OVERRIDES</filename> to control
@@ -5609,7 +6637,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-p'><title>P</title>
- <glossentry id='var-P'><glossterm>P</glossterm>
+ <glossentry id='var-P'><glossterm><imagedata fileref="figures/define-generic.png" />P</glossterm>
+ <info>
+ P[doc] = "The recipe name and version. P is comprised of ${PN}-${PV}."
+ </info>
<glossdef>
<para>The recipe name and version.
<filename>P</filename> is comprised of the following:
@@ -5619,7 +6650,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_ARCH'><glossterm>PACKAGE_ARCH</glossterm>
+ <glossentry id='var-PACKAGE_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_ARCH</glossterm>
+ <info>
+ PACKAGE_ARCH[doc] = "The architecture of the resulting package or packages."
+ </info>
<glossdef>
<para>
The architecture of the resulting package or packages.
@@ -5645,7 +6679,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_ARCHS'><glossterm>PACKAGE_ARCHS</glossterm>
+ <glossentry id='var-PACKAGE_ARCHS'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_ARCHS</glossterm>
+ <info>
+ PACKAGE_ARCHS[doc] = "A list of architectures compatible with the given target in order of priority."
+ </info>
<glossdef>
<para>
Specifies a list of architectures compatible with
@@ -5661,9 +6698,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
-
-
- <glossentry id='var-PACKAGE_BEFORE_PN'><glossterm>PACKAGE_BEFORE_PN</glossterm>
+ <glossentry id='var-PACKAGE_BEFORE_PN'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_BEFORE_PN</glossterm>
+ <info>
+ PACKAGE_BEFORE_PN[doc] = "Enables easily adding packages to PACKAGES before ${PN} so that the packages can pick up files that would normally be included in the default package."
+ </info>
<glossdef>
<para>Enables easily adding packages to
<filename><link linkend='var-PACKAGES'>PACKAGES</link></filename>
@@ -5673,7 +6711,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_CLASSES'><glossterm>PACKAGE_CLASSES</glossterm>
+ <glossentry id='var-PACKAGE_CLASSES'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_CLASSES</glossterm>
+ <info>
+ PACKAGE_CLASSES[doc] = "This variable specifies the package manager to use when packaging data. It is set in the conf/local.conf file in the Build Directory."
+ </info>
<glossdef>
<para>
This variable, which is set in the
@@ -5722,19 +6763,80 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_EXCLUDE'><glossterm>PACKAGE_EXCLUDE</glossterm>
+ <glossentry id='var-PACKAGE_DEBUG_SPLIT_STYLE'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_DEBUG_SPLIT_STYLE</glossterm>
+ <info>
+ PACKAGE_DEBUG_SPLIT_STYLE[doc] = "Determines how to split up the binary and debug information when creating *-dbg packages to be used with the GNU Project Debugger (GDB)."
+ </info>
+ <glossdef>
+ <para>
+ Determines how to split up the binary and debug information
+ when creating <filename>*-dbg</filename> packages to be
+ used with the GNU Project Debugger (GDB).
+ </para>
+
+ <para>
+ With the
+ <filename>PACKAGE_DEBUG_SPLIT_STYLE</filename> variable,
+ you can control where debug information, which can include
+ or exclude source files, is stored:
+ <itemizedlist>
+ <listitem><para>
+ ".debug": Debug symbol files are placed next
+ to the binary in a <filename>.debug</filename>
+ directory on the target.
+ For example, if a binary is installed into
+ <filename>/bin</filename>, the corresponding debug
+ symbol files are installed in
+ <filename>/bin/.debug</filename>.
+ Source files are placed in
+ <filename>/usr/src/debug</filename>.
+ This is the default behavior.
+ </para></listitem>
+ <listitem><para>
+ "debug-file-directory": Debug symbol files are
+ placed under <filename>/usr/lib/debug</filename>
+ on the target, and separated by the path from where
+ the binary is installed.
+ For example, if a binary is installed in
+ <filename>/bin</filename>, the corresponding debug
+ symbols are installed in
+ <filename>/usr/lib/debug/bin</filename>.
+ Source files are placed in
+ <filename>/usr/src/debug</filename>.
+ </para></listitem>
+ <listitem><para>
+ "debug-without-src": The same behavior as
+ ".debug" previously described with the exception
+ that no source files are installed.
+ </para></listitem>.
+ </itemizedlist>
+ </para>
+
+ <para>
+ You can find out more about debugging using GDB by reading
+ the
+ "<ulink url='&YOCTO_DOCS_DEV_URL;#platdev-gdb-remotedebug'>Debugging With the GNU Project Debugger (GDB) Remotely</ulink>"
+ section in the Yocto Project Development Manual.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-PACKAGE_EXCLUDE'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_EXCLUDE</glossterm>
+ <info>
+ PACKAGE_EXCLUDE[doc] = "Packages to exclude from the installation. If a listed package is required, an error is generated."
+ </info>
<glossdef>
<para>
Lists packages that should not be installed into an image.
For example:
<literallayout class='monospaced'>
- PACKAGE_EXCLUDE = "&lt;package_name&gt; &lt;package_name&gt; &lt;package_name&gt; ..."
+ PACKAGE_EXCLUDE = "<replaceable>package_name</replaceable> <replaceable>package_name</replaceable> <replaceable>package_name</replaceable> ..."
</literallayout>
You can set this variable globally in your
<filename>local.conf</filename> file or you can attach it to
a specific image recipe by using the recipe name override:
<literallayout class='monospaced'>
- PACKAGE_EXCLUDE_pn-&lt;target_image&gt; = "&lt;package_name&gt;"
+ PACKAGE_EXCLUDE_pn-<replaceable>target_image</replaceable> = "<replaceable>package_name</replaceable>"
</literallayout>
</para>
@@ -5767,17 +6869,22 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_EXTRA_ARCHS'><glossterm>PACKAGE_EXTRA_ARCHS</glossterm>
+ <glossentry id='var-PACKAGE_EXTRA_ARCHS'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_EXTRA_ARCHS</glossterm>
+ <info>
+ PACKAGE_EXTRA_ARCHS[doc] = "Specifies the list of architectures compatible with the device CPU. This variable is useful when you build for several different devices that use miscellaneous processors."
+ </info>
<glossdef>
<para>Specifies the list of architectures compatible with the device CPU.
This variable is useful when you build for several different devices that use
- miscellaneous processors such as XScale and ARM926-EJS).</para>
+ miscellaneous processors such as XScale and ARM926-EJS.</para>
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_GROUP'><glossterm>PACKAGE_GROUP</glossterm>
+ <glossentry id='var-PACKAGE_GROUP'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_GROUP</glossterm>
+ <info>
+ PACKAGE_GROUP[doc] = "Defines one or more packages to include in an image when a specific item is included in IMAGE_FEATURES."
+ </info>
<glossdef>
-
<para>
The <filename>PACKAGE_GROUP</filename> variable has been
renamed to
@@ -5794,7 +6901,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_INSTALL'><glossterm>PACKAGE_INSTALL</glossterm>
+ <glossentry id='var-PACKAGE_INSTALL'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_INSTALL</glossterm>
+ <info>
+ PACKAGE_INSTALL[doc] = "List of the packages to be installed into the image. The variable is generally not user-defined and uses IMAGE_INSTALL as part of the list."
+ </info>
<glossdef>
<para>
The final list of packages passed to the package manager
@@ -5822,7 +6932,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGE_PREPROCESS_FUNCS'><glossterm>PACKAGE_PREPROCESS_FUNCS</glossterm>
+ <glossentry id='var-PACKAGE_PREPROCESS_FUNCS'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGE_PREPROCESS_FUNCS</glossterm>
+ <info>
+ PACKAGE_PREPROCESS_FUNCS[doc] = "Specifies a list of functions run to pre-process the PKGD directory prior to splitting the files out to individual packages."
+ </info>
<glossdef>
<para>
Specifies a list of functions run to pre-process the
@@ -5833,7 +6946,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGECONFIG'><glossterm>PACKAGECONFIG</glossterm>
+ <glossentry id='var-PACKAGECONFIG'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGECONFIG</glossterm>
+ <info>
+ PACKAGECONFIG[doc] = "This variable provides a means of enabling or disabling features of a recipe on a per-recipe basis."
+ </info>
<glossdef>
<para>
This variable provides a means of enabling or disabling
@@ -5918,8 +7034,8 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<itemizedlist>
<listitem><para><emphasis>Append file:</emphasis>
Create an append file named
- <filename>&lt;recipename&gt;.bbappend</filename> in your
- layer and override the value of
+ <replaceable>recipename</replaceable><filename>.bbappend</filename>
+ in your layer and override the value of
<filename>PACKAGECONFIG</filename>.
You can either completely override the variable:
<literallayout class='monospaced'>
@@ -5933,22 +7049,25 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
This method is identical to changing the block
through an append file except you edit your
<filename>local.conf</filename> or
- <filename>&lt;mydistro&gt;.conf</filename> file.
+ <filename><replaceable>mydistro</replaceable>.conf</filename> file.
As with append files previously described,
you can either completely override the variable:
<literallayout class='monospaced'>
- PACKAGECONFIG_pn-&lt;recipename&gt;="f4 f5"
+ PACKAGECONFIG_pn-<replaceable>recipename</replaceable>="f4 f5"
</literallayout>
Or, you can just amend the variable:
<literallayout class='monospaced'>
- PACKAGECONFIG_append_pn-&lt;recipename&gt; = " f4"
+ PACKAGECONFIG_append_pn-<replaceable>recipename</replaceable> = " f4"
</literallayout></para></listitem>
</itemizedlist>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGES'><glossterm>PACKAGES</glossterm>
+ <glossentry id='var-PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGES</glossterm>
+ <info>
+ PACKAGES[doc] = "The list of packages to be created from the recipe."
+ </info>
<glossdef>
<para>The list of packages to be created from the recipe.
The default value is the following:
@@ -5958,25 +7077,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PACKAGESPLITFUNCS'><glossterm>PACKAGESPLITFUNCS</glossterm>
- <glossdef>
- <para>
- Specifies a list of functions run to perform additional
- splitting of files into individual packages.
- Recipes can either prepend to this variable or prepend
- to the <filename>populate_packages</filename> function
- in order to perform additional package splitting.
- In either case, the function should set
- <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>,
- <link linkend='var-FILES'><filename>FILES</filename></link>,
- <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
- and other packaging variables appropriately in order to
- perform the desired splitting.
- </para>
- </glossdef>
- </glossentry>
-
- <glossentry id='var-PACKAGES_DYNAMIC'><glossterm>PACKAGES_DYNAMIC</glossterm>
+ <glossentry id='var-PACKAGES_DYNAMIC'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGES_DYNAMIC</glossterm>
+ <info>
+ PACKAGES_DYNAMIC[doc] = "A promise that your recipe satisfies runtime dependencies for optional modules that are found in other recipes."
+ </info>
<glossdef>
<para>
A promise that your recipe satisfies runtime dependencies
@@ -6014,7 +7118,31 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PARALLEL_MAKE'><glossterm>PARALLEL_MAKE</glossterm>
+ <glossentry id='var-PACKAGESPLITFUNCS'><glossterm><imagedata fileref="figures/define-generic.png" />PACKAGESPLITFUNCS</glossterm>
+ <info>
+ PACKAGESPLITFUNCS[doc] = "Specifies a list of functions run to perform additional splitting of files into individual packages."
+ </info>
+ <glossdef>
+ <para>
+ Specifies a list of functions run to perform additional
+ splitting of files into individual packages.
+ Recipes can either prepend to this variable or prepend
+ to the <filename>populate_packages</filename> function
+ in order to perform additional package splitting.
+ In either case, the function should set
+ <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>,
+ <link linkend='var-FILES'><filename>FILES</filename></link>,
+ <link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link>
+ and other packaging variables appropriately in order to
+ perform the desired splitting.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-PARALLEL_MAKE'><glossterm><imagedata fileref="figures/define-generic.png" />PARALLEL_MAKE</glossterm>
+ <info>
+ PARALLEL_MAKE[doc] = "Specifies extra options that are passed to the make command during the compile tasks. This variable is usually in the form -j 4, where the number represents the maximum number of parallel threads make can run."
+ </info>
<glossdef>
<para>
Extra options passed to the <filename>make</filename>
@@ -6042,7 +7170,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PARALLEL_MAKEINST'><glossterm>PARALLEL_MAKEINST</glossterm>
+ <glossentry id='var-PARALLEL_MAKEINST'><glossterm><imagedata fileref="figures/define-generic.png" />PARALLEL_MAKEINST</glossterm>
+ <info>
+ PARALLEL_MAKEINST[doc] = "Extra options passed to the make install command during the do_install task in order to specify parallel installation."
+ </info>
<glossdef>
<para>
Extra options passed to the
@@ -6060,7 +7191,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PATCHRESOLVE'><glossterm>PATCHRESOLVE</glossterm>
+ <glossentry id='var-PATCHRESOLVE'><glossterm><imagedata fileref="figures/define-generic.png" />PATCHRESOLVE</glossterm>
+ <info>
+ PATCHRESOLVE[doc] = "Enable or disable interactive patch resolution."
+ </info>
<glossdef>
<para>
Determines the action to take when a patch fails.
@@ -6084,7 +7218,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PATCHTOOL'><glossterm>PATCHTOOL</glossterm>
+ <glossentry id='var-PATCHTOOL'><glossterm><imagedata fileref="figures/define-generic.png" />PATCHTOOL</glossterm>
+ <info>
+ PATCHTOOL[doc] = "Specifies the utility used to apply patches for a recipe during do_patch."
+ </info>
<glossdef>
<para>
Specifies the utility used to apply patches for a recipe
@@ -6111,7 +7248,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PE'><glossterm>PE</glossterm>
+ <glossentry id='var-PE'><glossterm><imagedata fileref="figures/define-generic.png" />PE</glossterm>
+ <info>
+ PE[doc] = "The epoch of the recipe. The default value is '0'. The field is used to make upgrades possible when the versioning scheme changes in some backwards incompatible way."
+ </info>
<glossdef>
<para>
The epoch of the recipe.
@@ -6123,7 +7263,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PF'><glossterm>PF</glossterm>
+ <glossentry id='var-PF'><glossterm><imagedata fileref="figures/define-generic.png" />PF</glossterm>
+ <info>
+ PF[doc] = "Specifies the recipe or package name and includes all version and revision numbers. This variable is comprised of ${PN}-${EXTENDPE}${PV}-${PR}."
+ </info>
<glossdef>
<para>Specifies the recipe or package name and includes all version and revision
numbers (i.e. <filename>eglibc-2.13-r20+svnr15508/</filename> and
@@ -6135,10 +7278,13 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PIXBUF_PACKAGES'><glossterm>PIXBUF_PACKAGES</glossterm>
+ <glossentry id='var-PIXBUF_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />PIXBUF_PACKAGES</glossterm>
+ <info>
+ PIXBUF_PACKAGES[doc] = "When a recipe inherits the pixbufcache class, this variable identifies packages that contain the pixbuf loaders used with gdk-pixbuf."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
+ When inheriting the
<link linkend='ref-classes-pixbufcache'><filename>pixbufcache</filename></link>
class, this variable identifies packages that contain
the pixbuf loaders used with
@@ -6152,7 +7298,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKG'><glossterm>PKG</glossterm>
+ <glossentry id='var-PKG'><glossterm><imagedata fileref="figures/define-generic.png" />PKG</glossterm>
+ <info>
+ PKG[doc] = "The name of the resulting package created by the OpenEmbedded build system. When you use this variable, you must use a package name override."
+ </info>
<glossdef>
<para>
The name of the resulting package created by the
@@ -6164,12 +7313,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
For example, when the
<link linkend='ref-classes-debian'><filename>debian</filename></link>
class renames the output package, it does so by setting
- <filename>PKG_&lt;packagename&gt;</filename>.
+ <filename>PKG_<replaceable>packagename</replaceable></filename>.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-PKGD'><glossterm>PKGD</glossterm>
+ <glossentry id='var-PKGD'><glossterm><imagedata fileref="figures/define-generic.png" />PKGD</glossterm>
+ <info>
+ PKGD[doc] = "Points to the destination directory for files to be packaged before they are split into individual packages."
+ </info>
<glossdef>
<para>
Points to the destination directory for files to be
@@ -6183,7 +7335,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGDATA_DIR'><glossterm>PKGDATA_DIR</glossterm>
+ <glossentry id='var-PKGDATA_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />PKGDATA_DIR</glossterm>
+ <info>
+ PKGDATA_DIR[doc] = "Points to a shared, global-state directory that holds data generated during the packaging process."
+ </info>
<glossdef>
<para>
Points to a shared, global-state directory that holds data
@@ -6201,7 +7356,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGDEST'><glossterm>PKGDEST</glossterm>
+ <glossentry id='var-PKGDEST'><glossterm><imagedata fileref="figures/define-generic.png" />PKGDEST</glossterm>
+ <info>
+ PKGDEST[doc] = "Points to the parent directory for files to be packaged after they have been split into individual packages."
+ </info>
<glossdef>
<para>
Points to the parent directory for files to be packaged
@@ -6218,7 +7376,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGDESTWORK'><glossterm>PKGDESTWORK</glossterm>
+ <glossentry id='var-PKGDESTWORK'><glossterm><imagedata fileref="figures/define-generic.png" />PKGDESTWORK</glossterm>
+ <info>
+ PKGDESTWORK[doc] = "Points to a temporary work area used by the do_package task to write output from the do_packagedata task."
+ </info>
<glossdef>
<para>
Points to a temporary work area used by the
@@ -6243,7 +7404,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGE'><glossterm>PKGE</glossterm>
+ <glossentry id='var-PKGE'><glossterm><imagedata fileref="figures/define-generic.png" />PKGE</glossterm>
+ <info>
+ PKGE[doc] = "The epoch of the output package built by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
The epoch of the output package built by the
@@ -6254,7 +7418,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGR'><glossterm>PKGR</glossterm>
+ <glossentry id='var-PKGR'><glossterm><imagedata fileref="figures/define-generic.png" />PKGR</glossterm>
+ <info>
+ PKGR[doc] = "The revision of the output package built by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
The revision of the output package built by the
@@ -6265,7 +7432,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PKGV'><glossterm>PKGV</glossterm>
+ <glossentry id='var-PKGV'><glossterm><imagedata fileref="figures/define-generic.png" />PKGV</glossterm>
+ <info>
+ PKGV[doc] = "The version of the output package built by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
The version of the output package built by the
@@ -6276,7 +7446,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PN'><glossterm>PN</glossterm>
+ <glossentry id='var-PN'><glossterm><imagedata fileref="figures/define-generic.png" />PN</glossterm>
+ <info>
+ PN[doc] = "PN refers to a recipe name in the context of a file used by the OpenEmbedded build system as input to create a package. It refers to a package name in the context of a file created or produced by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>This variable can have two separate functions depending on the context: a recipe
name or a resulting package name.</para>
@@ -6300,7 +7473,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PNBLACKLIST'><glossterm>PNBLACKLIST</glossterm>
+ <glossentry id='var-PNBLACKLIST'><glossterm><imagedata fileref="figures/define-generic.png" />PNBLACKLIST</glossterm>
+ <info>
+ PNBLACKLIST[doc] = "Lists recipes you do not want the OpenEmbedded build system to build."
+ </info>
<glossdef>
<para>
Lists recipes you do not want the OpenEmbedded build system
@@ -6324,7 +7500,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PR'><glossterm>PR</glossterm>
+ <glossentry id='var-PR'><glossterm><imagedata fileref="figures/define-generic.png" />PR</glossterm>
+ <info>
+ PR[doc] = "The revision of the recipe. The default value for this variable is 'r0'."
+ </info>
<glossdef>
<para>
The revision of the recipe.
@@ -6333,7 +7512,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PREFERRED_PROVIDER'><glossterm>PREFERRED_PROVIDER</glossterm>
+ <glossentry id='var-PREFERRED_PROVIDER'><glossterm><imagedata fileref="figures/define-generic.png" />PREFERRED_PROVIDER</glossterm>
+ <info>
+ PREFERRED_PROVIDER[doc] = "If multiple recipes provide an item, this variable determines which recipe should be given preference."
+ </info>
<glossdef>
<para>
If multiple recipes provide an item, this variable
@@ -6352,7 +7534,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PREFERRED_VERSION'><glossterm>PREFERRED_VERSION</glossterm>
+ <glossentry id='var-PREFERRED_VERSION'><glossterm><imagedata fileref="figures/define-generic.png" />PREFERRED_VERSION</glossterm>
+ <info>
+ PREFERRED_VERSION[doc] = "If there are multiple versions of recipes available, this variable determines which recipe should be given preference."
+ </info>
<glossdef>
<para>
If there are multiple versions of recipes available, this
@@ -6375,7 +7560,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PREMIRRORS'><glossterm>PREMIRRORS</glossterm>
+ <glossentry id='var-PREMIRRORS'><glossterm><imagedata fileref="figures/define-generic.png" />PREMIRRORS</glossterm>
+ <info>
+ PREMIRRORS[doc] = "Specifies additional paths from which the OpenEmbedded build system gets source code."
+ </info>
<glossdef>
<para>
Specifies additional paths from which the OpenEmbedded
@@ -6420,7 +7608,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PRINC'><glossterm>PRINC</glossterm>
+ <glossentry id='var-PRINC'><glossterm><imagedata fileref="figures/define-generic.png" />PRINC</glossterm>
+ <info>
+ PRINC[doc] = "Causes the PR variable of .bbappend files to dynamically increment. This increment minimizes the impact of layer ordering. This variable defaults to '0'."
+ </info>
<glossdef>
<para>
@@ -6466,7 +7657,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PRIVATE_LIBS'><glossterm>PRIVATE_LIBS</glossterm>
+ <glossentry id='var-PRIVATE_LIBS'><glossterm><imagedata fileref="figures/define-generic.png" />PRIVATE_LIBS</glossterm>
+ <info>
+ PRIVATE_LIBS[doc] = "Specifies libraries installed within a recipe that should be ignored by the OpenEmbedded build system's shared library resolver."
+ </info>
<glossdef>
<para>
Specifies libraries installed within a recipe that
@@ -6498,7 +7692,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PROVIDES'><glossterm>PROVIDES</glossterm>
+ <glossentry id='var-PROVIDES'><glossterm><imagedata fileref="figures/define-generic.png" />PROVIDES</glossterm>
+ <info>
+ PROVIDES[doc] = "A list of aliases that a recipe also provides. These aliases are useful for satisfying dependencies of other recipes during the build as specified by DEPENDS."
+ </info>
<glossdef>
<para>
A list of aliases by which a particular recipe can be
@@ -6527,7 +7724,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PRSERV_HOST'><glossterm>PRSERV_HOST</glossterm>
+ <glossentry id='var-PRSERV_HOST'><glossterm><imagedata fileref="figures/define-generic.png" />PRSERV_HOST</glossterm>
+ <info>
+ PRSERV_HOST[doc] = "The network based PR service host and port."
+ </info>
<glossdef>
<para>
The network based
@@ -6553,7 +7753,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PTEST_ENABLED'><glossterm>PTEST_ENABLED</glossterm>
+ <glossentry id='var-PTEST_ENABLED'><glossterm><imagedata fileref="figures/define-generic.png" />PTEST_ENABLED</glossterm>
+ <info>
+ PRSERV_HOST[doc] = "Specifies whether or not Package Test (ptest) functionality is enabled when building a recipe."
+ </info>
<glossdef>
<para>
Specifies whether or not
@@ -6568,7 +7771,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PV'><glossterm>PV</glossterm>
+ <glossentry id='var-PV'><glossterm><imagedata fileref="figures/define-generic.png" />PV</glossterm>
+ <info>
+ PV[doc] = "The version of the recipe. The version is normally extracted from the recipe filename."
+ </info>
<glossdef>
<para>
The version of the recipe.
@@ -6583,7 +7789,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PYTHON_ABI'><glossterm>PYTHON_ABI</glossterm>
+ <glossentry id='var-PYTHON_ABI'><glossterm><imagedata fileref="figures/define-generic.png" />PYTHON_ABI</glossterm>
+ <info>
+ PYTHON_ABI[doc] = "When used by recipes that inherit the distutils3, setuptools3, distutils, or setuptools classes, denotes the Application Binary Interface (ABI) currently in use for Python."
+ </info>
<glossdef>
<para>
When used by recipes that inherit the
@@ -6616,7 +7825,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-PYTHON_PN'><glossterm>PYTHON_PN</glossterm>
+ <glossentry id='var-PYTHON_PN'><glossterm><imagedata fileref="figures/define-generic.png" />PYTHON_PN</glossterm>
+ <info>
+ PYTHON_PN[doc] = "When used by recipes that inherit the distutils3, setuptools3, distutils, or setuptools classes, specifies the major Python version being built."
+ </info>
<glossdef>
<para>
When used by recipes that inherit the
@@ -6649,7 +7861,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-q'><title>Q</title>
- <glossentry id='var-QMAKE_PROFILES'><glossterm>QMAKE_PROFILES</glossterm>
+ <glossentry id='var-QMAKE_PROFILES'><glossterm><imagedata fileref="figures/define-generic.png" />QMAKE_PROFILES</glossterm>
+ <info>
+ QMAKE_PROFILES[doc] = "Specifies your own subset of .pro files to be built for use with qmake."
+ </info>
<glossdef>
<para>
Specifies your own subset of <filename>.pro</filename>
@@ -6674,7 +7889,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-r'><title>R</title>
- <glossentry id='var-RCONFLICTS'><glossterm>RCONFLICTS</glossterm>
+ <glossentry id='var-RCONFLICTS'><glossterm><imagedata fileref="figures/define-generic.png" />RCONFLICTS</glossterm>
+ <info>
+ RCONFLICTS[doc] = "The list of packages that conflict with another package. Note that the package will not be installed if the conflicting packages are not first removed."
+ </info>
<glossdef>
<para>
The list of packages that conflict with packages.
@@ -6687,7 +7905,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
them in conjunction with a package name override.
Here is an example:
<literallayout class='monospaced'>
- RCONFLICTS_${PN} = "another-conflicting-package-name"
+ RCONFLICTS_${PN} = "<replaceable>another-conflicting-package-name</replaceable>"
</literallayout>
</para>
@@ -6699,7 +7917,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
Here is the general syntax to specify versions with
the <filename>RCONFLICTS</filename> variable:
<literallayout class='monospaced'>
- RCONFLICTS_${PN} = "&lt;package&gt; (&lt;operator&gt; &lt;version&gt;)"
+ RCONFLICTS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
</literallayout>
For <filename>operator</filename>, you can specify the
following:
@@ -6719,7 +7937,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RDEPENDS'><glossterm>RDEPENDS</glossterm>
+ <glossentry id='var-RDEPENDS'><glossterm><imagedata fileref="figures/define-generic.png" />RDEPENDS</glossterm>
+ <info>
+ RDEPENDS[doc] = "Lists a package's runtime dependencies (i.e. other packages) that must be installed for the package to be built. They must be the names of other packages as listed in the PACKAGES variable, not recipe names (PN)."
+ </info>
<glossdef>
<para>
Lists a package's runtime dependencies (i.e. other packages)
@@ -6791,7 +8012,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
as it would in the <filename>PACKAGES</filename>
namespace before any renaming of the output package by
classes like
- <link linkend='ref-classes-debian'><filename>debian.bbclass</filename></link>.
+ <link linkend='ref-classes-debian'><filename>debian</filename></link>.
</para>
<para>
@@ -6825,7 +8046,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
Here is the general syntax to specify versions with
the <filename>RDEPENDS</filename> variable:
<literallayout class='monospaced'>
- RDEPENDS_${PN} = "&lt;package&gt; (&lt;operator&gt; &lt;version&gt;)"
+ RDEPENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
</literallayout>
For <filename>operator</filename>, you can specify the
following:
@@ -6851,11 +8072,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-REQUIRED_DISTRO_FEATURES'><glossterm>REQUIRED_DISTRO_FEATURES</glossterm>
+ <glossentry id='var-REQUIRED_DISTRO_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />REQUIRED_DISTRO_FEATURES</glossterm>
+ <info>
+ REQUIRED_DISTRO_FEATURES[doc] = "When a recipe inherits the distro_features_check class, this variable identifies distribution features that must exist in the current configuration in order for the OpenEmbedded build system to build the recipe."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>distro_features_check</filename> class, this
+ When inheriting the
+ <link linkend='ref-classes-distro_features_check'><filename>distro_features_check</filename></link>
+ class, this
variable identifies distribution features that must
exist in the current configuration in order for the
OpenEmbedded build system to build the recipe.
@@ -6869,7 +8094,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RM_OLD_IMAGE'><glossterm>RM_OLD_IMAGE</glossterm>
+ <glossentry id='var-RM_OLD_IMAGE'><glossterm><imagedata fileref="figures/define-generic.png" />RM_OLD_IMAGE</glossterm>
+ <info>
+ RM_OLD_IMAGE[doc] = "Reclaims disk space by removing previously built versions of the same image from the images directory pointed to by the DEPLOY_DIR variable."
+ </info>
<glossdef>
<para>
Reclaims disk space by removing previously built
@@ -6887,7 +8115,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RM_WORK_EXCLUDE'><glossterm>RM_WORK_EXCLUDE</glossterm>
+ <glossentry id='var-RM_WORK_EXCLUDE'><glossterm><imagedata fileref="figures/define-generic.png" />RM_WORK_EXCLUDE</glossterm>
+ <info>
+ RM_WORK_EXCLUDE[doc] = "With rm_work enabled, this variable specifies a list of packages whose work directories should not be removed."
+ </info>
<glossdef>
<para>
With <filename>rm_work</filename> enabled, this
@@ -6899,7 +8130,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-ROOT_HOME'><glossterm>ROOT_HOME</glossterm>
+ <glossentry id='var-ROOT_HOME'><glossterm><imagedata fileref="figures/define-generic.png" />ROOT_HOME</glossterm>
+ <info>
+ ROOT_HOME[doc] = "Defines the root home directory."
+ </info>
<glossdef>
<para>
Defines the root home directory.
@@ -6932,7 +8166,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-ROOTFS'><glossterm>ROOTFS</glossterm>
+ <glossentry id='var-ROOTFS'><glossterm><imagedata fileref="figures/define-generic.png" />ROOTFS</glossterm>
+ <info>
+ ROOTFS[doc] = "Indicates a filesystem image to include as the root filesystem."
+ </info>
<glossdef>
<para>
Indicates a filesystem image to include as the root
@@ -6948,14 +8185,17 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-ROOTFS_POSTPROCESS_COMMAND'><glossterm>ROOTFS_POSTPROCESS_COMMAND</glossterm>
+ <glossentry id='var-ROOTFS_POSTPROCESS_COMMAND'><glossterm><imagedata fileref="figures/define-generic.png" />ROOTFS_POSTPROCESS_COMMAND</glossterm>
+ <info>
+ ROOTFS_POSTPROCESS_COMMAND[doc] = "Added by classes to run post processing commands once the OpenEmbedded build system has created the root filesystem."
+ </info>
<glossdef>
<para>
Added by classes to run post processing commands once the
OpenEmbedded build system has created the root filesystem.
You can specify shell commands separated by semicolons:
<literallayout class='monospaced'>
- ROOTFS_POSTPROCESS_COMMAND += "&lt;shell_command&gt;; ... "
+ ROOTFS_POSTPROCESS_COMMAND += "<replaceable>shell_command</replaceable>; ... "
</literallayout>
If you need to pass the path to the root filesystem within
the command, you can use
@@ -6968,7 +8208,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RPROVIDES'><glossterm>RPROVIDES</glossterm>
+ <glossentry id='var-RPROVIDES'><glossterm><imagedata fileref="figures/define-generic.png" />RPROVIDES</glossterm>
+ <info>
+ RPROVIDES[doc] = "A list of package name aliases that a package also provides. These aliases are useful for satisfying runtime dependencies of other packages both during the build and on the target."
+ </info>
<glossdef>
<para>
A list of package name aliases that a package also provides.
@@ -6992,37 +8235,49 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RRECOMMENDS'><glossterm>RRECOMMENDS</glossterm>
+ <glossentry id='var-RRECOMMENDS'><glossterm><imagedata fileref="figures/define-generic.png" />RRECOMMENDS</glossterm>
+ <info>
+ RRECOMMENDS[doc] = "A list of packages that extends the usability of a package being built. The package being built does not depend on this list of packages in order to successfully build, but needs them for the extended usability."
+ </info>
<glossdef>
<para>
A list of packages that extends the usability of a package
being built.
The package being built does not depend on this list of
- packages in order to successfully build, but needs them for
- the extended usability.
+ packages in order to successfully build, but rather
+ uses them for extended usability.
To specify runtime dependencies for packages, see the
<filename><link linkend='var-RDEPENDS'>RDEPENDS</link></filename>
variable.
</para>
<para>
- The OpenEmbedded build process automatically installs the
- list of packages as part of the built package.
- However, you can remove these packages later if you want.
- If, during the build, a package from the
- <filename>RRECOMMENDS</filename> list cannot be
- found, the build process continues without an error.
- </para>
-
- <para>
- You can also prevent packages in the list from being
- installed by using several variables.
- See the
+ The package manager will automatically install the
+ <filename>RRECOMMENDS</filename> list of packages when
+ installing the built package.
+ However, you can prevent listed packages from being
+ installed by using the
<link linkend='var-BAD_RECOMMENDATIONS'><filename>BAD_RECOMMENDATIONS</filename></link>,
<link linkend='var-NO_RECOMMENDATIONS'><filename>NO_RECOMMENDATIONS</filename></link>,
and
<link linkend='var-PACKAGE_EXCLUDE'><filename>PACKAGE_EXCLUDE</filename></link>
- variables for more information.
+ variables.
+ </para>
+
+ <para>
+ Packages specified in
+ <filename>RRECOMMENDS</filename> need not actually be
+ produced.
+ However, a recipe must exist that provides each package,
+ either through the
+ <link linkend='var-PACKAGES'><filename>PACKAGES</filename></link>
+ or
+ <link linkend='var-PACKAGES_DYNAMIC'><filename>PACKAGES_DYNAMIC</filename></link>
+ variables or the
+ <link linkend='var-RPROVIDES'><filename>RPROVIDES</filename></link>
+ variable, or an error will occur during the build.
+ If such a recipe does exist and the package is not produced,
+ the build continues without error.
</para>
<para>
@@ -7034,7 +8289,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
that is extended to support wireless functionality.
In this case, you would use the following:
<literallayout class='monospaced'>
- RRECOMMENDS_${PN}-dev += "&lt;wireless_package_name&gt;"
+ RRECOMMENDS_${PN}-dev += "<replaceable>wireless_package_name</replaceable>"
</literallayout>
In the example, the package name
(<filename>${<link linkend='var-PN'>PN</link>}-dev</filename>)
@@ -7052,7 +8307,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
Here is the general syntax to specify versions with
the <filename>RRECOMMENDS</filename> variable:
<literallayout class='monospaced'>
- RRECOMMENDS_${PN} = "&lt;package&gt; (&lt;operator&gt; &lt;version&gt;)"
+ RRECOMMENDS_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
</literallayout>
For <filename>operator</filename>, you can specify the
following:
@@ -7072,7 +8327,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RREPLACES'><glossterm>RREPLACES</glossterm>
+ <glossentry id='var-RREPLACES'><glossterm><imagedata fileref="figures/define-generic.png" />RREPLACES</glossterm>
+ <info>
+ RREPLACES[doc] = "A list of packages replaced by a package. The package manager uses this variable to determine which package should be installed to replace other package(s) during an upgrade."
+ </info>
<glossdef>
<para>
A list of packages replaced by a package.
@@ -7090,7 +8348,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
override.
Here is an example:
<literallayout class='monospaced'>
- RREPLACES_${PN} = "other-package-being-replaced"
+ RREPLACES_${PN} = "<replaceable>other-package-being-replaced</replaceable>"
</literallayout>
</para>
@@ -7102,7 +8360,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
Here is the general syntax to specify versions with
the <filename>RREPLACES</filename> variable:
<literallayout class='monospaced'>
- RREPLACES_${PN} = "&lt;package&gt; (&lt;operator&gt; &lt;version&gt;)"
+ RREPLACES_${PN} = "<replaceable>package</replaceable> (<replaceable>operator</replaceable> <replaceable>version</replaceable>)"
</literallayout>
For <filename>operator</filename>, you can specify the
following:
@@ -7123,7 +8381,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-RSUGGESTS'><glossterm>RSUGGESTS</glossterm>
+ <glossentry id='var-RSUGGESTS'><glossterm><imagedata fileref="figures/define-generic.png" />RSUGGESTS</glossterm>
+ <info>
+ RSUGGESTS[doc] = "A list of additional packages that you can suggest for installation by the package manager at the time a package is installed. Not all package managers support this functionality."
+ </info>
<glossdef>
<para>
A list of additional packages that you can suggest for
@@ -7137,7 +8398,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
override.
Here is an example:
<literallayout class='monospaced'>
- RSUGGESTS_${PN} = "useful-package another-package"
+ RSUGGESTS_${PN} = "<replaceable>useful-package</replaceable> <replaceable>another-package</replaceable>"
</literallayout>
</para>
</glossdef>
@@ -7147,7 +8408,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-s'><title>S</title>
- <glossentry id='var-S'><glossterm>S</glossterm>
+ <glossentry id='var-S'><glossterm><imagedata fileref="figures/define-generic.png" />S</glossterm>
+ <info>
+ S[doc] = "The location in the Build Directory where unpacked package source code resides."
+ </info>
<glossdef>
<para>
The location in the
@@ -7178,7 +8442,25 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SANITY_TESTED_DISTROS'><glossterm>SANITY_TESTED_DISTROS</glossterm>
+ <glossentry id='var-SANITY_REQUIRED_UTILITIES'><glossterm><imagedata fileref="figures/define-generic.png" />SANITY_REQUIRED_UTILITIES</glossterm>
+ <info>
+ SANITY_REQUIRED_UTILITIES[doc] = "Specifies a list of command-line utilities that should be checked for during the initial sanity checking process when running BitBake."
+ </info>
+ <glossdef>
+ <para>
+ Specifies a list of command-line utilities that should be
+ checked for during the initial sanity checking process when
+ running BitBake.
+ If any of the utilities are not installed on the build host,
+ then BitBake immediately exits with an error.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-SANITY_TESTED_DISTROS'><glossterm><imagedata fileref="figures/define-generic.png" />SANITY_TESTED_DISTROS</glossterm>
+ <info>
+ SANITY_TESTED_DISTROS[doc] = "A list of the host distribution identifiers that the build system has been tested against."
+ </info>
<glossdef>
<para>
A list of the host distribution identifiers that the
@@ -7199,7 +8481,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_ARCH'><glossterm>SDK_ARCH</glossterm>
+ <glossentry id='var-SDK_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_ARCH</glossterm>
+ <info>
+ SDK_ARCH[doc] = "The target architecture for the SDK."
+ </info>
<glossdef>
<para>
The target architecture for the SDK.
@@ -7210,7 +8495,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_DEPLOY'><glossterm>SDK_DEPLOY</glossterm>
+ <glossentry id='var-SDK_DEPLOY'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_DEPLOY</glossterm>
+ <info>
+ SDK_DEPLOY[doc] = "The directory set up and used by the populate_sdk_base to which the SDK is deployed."
+ </info>
<glossdef>
<para>
The directory set up and used by the
@@ -7225,7 +8513,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_DIR'><glossterm>SDK_DIR</glossterm>
+ <glossentry id='var-SDK_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_DIR</glossterm>
+ <info>
+ SDK_DIR[doc] = "The parent directory used by the OpenEmbedded build system when creating SDK output."
+ </info>
<glossdef>
<para>
The parent directory used by the OpenEmbedded build system
@@ -7247,7 +8538,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_NAME'><glossterm>SDK_NAME</glossterm>
+ <glossentry id='var-SDK_NAME'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_NAME</glossterm>
+ <info>
+ SDK_NAME[doc] = "The base name for SDK output files."
+ </info>
<glossdef>
<para>
The base name for SDK output files.
@@ -7266,7 +8560,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_OUTPUT'><glossterm>SDK_OUTPUT</glossterm>
+ <glossentry id='var-SDK_OUTPUT'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_OUTPUT</glossterm>
+ <info>
+ SDK_OUTPUT[doc] = "The location used by the OpenEmbedded build system when creating SDK output."
+ </info>
<glossdef>
<para>
The location used by the OpenEmbedded build system when
@@ -7290,7 +8587,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDK_PACKAGE_ARCHS'><glossterm>SDK_PACKAGE_ARCHS</glossterm>
+ <glossentry id='var-SDK_PACKAGE_ARCHS'><glossterm><imagedata fileref="figures/define-generic.png" />SDK_PACKAGE_ARCHS</glossterm>
+ <info>
+ SDK_PACKAGE_ARCHS[doc] = "Specifies a list of architectures compatible with the SDK machine. This variable is set automatically and should not normally be hand-edited."
+ </info>
<glossdef>
<para>
Specifies a list of architectures compatible with
@@ -7306,20 +8606,26 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDKIMAGE_FEATURES'><glossterm>SDKIMAGE_FEATURES</glossterm>
+ <glossentry id='var-SDKIMAGE_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />SDKIMAGE_FEATURES</glossterm>
+ <info>
+ SDKIMAGE_FEATURES[doc] = "Equivalent to IMAGE_FEATURES. However, this variable applies to the SDK generated from an image using the command 'bitbake -c populate_sdk imagename'."
+ </info>
<glossdef>
<para>Equivalent to
<filename><link linkend='var-IMAGE_FEATURES'>IMAGE_FEATURES</link></filename>.
However, this variable applies to the SDK generated from an
image using the following command:
<literallayout class='monospaced'>
- $ bitbake -c populate_sdk imagename
+ $ bitbake -c populate_sdk <replaceable>imagename</replaceable>
</literallayout>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-SDKMACHINE'><glossterm>SDKMACHINE</glossterm>
+ <glossentry id='var-SDKMACHINE'><glossterm><imagedata fileref="figures/define-generic.png" />SDKMACHINE</glossterm>
+ <info>
+ SDKMACHINE[doc] = "Specifies the architecture (i.e. i686 or x86_64) for which to build SDK and ADT items."
+ </info>
<glossdef>
<para>
The machine for which the Application Development Toolkit
@@ -7348,7 +8654,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SDKPATH'><glossterm>SDKPATH</glossterm>
+ <glossentry id='var-SDKPATH'><glossterm><imagedata fileref="figures/define-generic.png" />SDKPATH</glossterm>
+ <info>
+ SDKPATH[doc] = "Defines the path offered to the user for installation of the SDK that is generated by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
Defines the path offered to the user for installation
@@ -7362,14 +8671,20 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SECTION'><glossterm>SECTION</glossterm>
+ <glossentry id='var-SECTION'><glossterm><imagedata fileref="figures/define-generic.png" />SECTION</glossterm>
+ <info>
+ SECTION[doc] = "The section in which packages should be categorized. Package management utilities can make use of this variable."
+ </info>
<glossdef>
<para>The section in which packages should be categorized.
Package management utilities can make use of this variable.</para>
</glossdef>
</glossentry>
- <glossentry id='var-SELECTED_OPTIMIZATION'><glossterm>SELECTED_OPTIMIZATION</glossterm>
+ <glossentry id='var-SELECTED_OPTIMIZATION'><glossterm><imagedata fileref="figures/define-generic.png" />SELECTED_OPTIMIZATION</glossterm>
+ <info>
+ SELECTED_OPTIMIZATION[doc] = "The variable takes the value of FULL_OPTIMIZATION unless DEBUG_BUILD = '1'. In this case, the value of DEBUG_OPTIMIZATION is used."
+ </info>
<glossdef>
<para>
Specifies the optimization flags passed to the C compiler
@@ -7390,7 +8705,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SERIAL_CONSOLE'><glossterm>SERIAL_CONSOLE</glossterm>
+ <glossentry id='var-SERIAL_CONSOLE'><glossterm><imagedata fileref="figures/define-generic.png" />SERIAL_CONSOLE</glossterm>
+ <info>
+ SERIAL_CONSOLE[doc] = "The speed and device for the serial port used to attach the serial console. This variable is given to the kernel as the 'console' parameter. After booting occurs, getty is started on that port so remote login is possible."
+ </info>
<glossdef>
<para>
Defines a serial console (TTY) to enable using getty.
@@ -7411,7 +8729,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SERIAL_CONSOLES'><glossterm>SERIAL_CONSOLES</glossterm>
+ <glossentry id='var-SERIAL_CONSOLES'><glossterm><imagedata fileref="figures/define-generic.png" />SERIAL_CONSOLES</glossterm>
+ <info>
+ SERIAL_CONSOLES[doc] = "Defines the serial consoles (TTYs) to enable using getty."
+ </info>
<glossdef>
<para>
Defines the serial consoles (TTYs) to enable using getty.
@@ -7425,7 +8746,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SERIAL_CONSOLES_CHECK'><glossterm>SERIAL_CONSOLES_CHECK</glossterm>
+ <glossentry id='var-SERIAL_CONSOLES_CHECK'><glossterm><imagedata fileref="figures/define-generic.png" />SERIAL_CONSOLES_CHECK</glossterm>
+ <info>
+ SERIAL_CONSOLES_CHECK[doc] = "Similar to SERIAL_CONSOLES except the device is checked for existence before attempting to enable it. Supported only by SysVinit."
+ </info>
<glossdef>
<para>
Similar to
@@ -7438,7 +8762,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS'><glossterm>SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS</glossterm>
+ <glossentry id='var-SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS'><glossterm><imagedata fileref="figures/define-generic.png" />SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS</glossterm>
+ <info>
+ SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS[doc] = "A list of recipe dependencies that should not be used to determine signatures of tasks from one recipe when they depend on tasks from another recipe."
+ </info>
<glossdef>
<para>
A list of recipe dependencies that should not be used to
@@ -7466,7 +8793,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SIGGEN_EXCLUDERECIPES_ABISAFE'><glossterm>SIGGEN_EXCLUDERECIPES_ABISAFE</glossterm>
+ <glossentry id='var-SIGGEN_EXCLUDERECIPES_ABISAFE'><glossterm><imagedata fileref="figures/define-generic.png" />SIGGEN_EXCLUDERECIPES_ABISAFE</glossterm>
+ <info>
+ SIGGEN_EXCLUDERECIPES_ABISAFE[doc] = "A list of recipes that are completely stable and will never change."
+ </info>
<glossdef>
<para>
A list of recipes that are completely stable and will
@@ -7486,7 +8816,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SITEINFO_BITS'><glossterm>SITEINFO_BITS</glossterm>
+ <glossentry id='var-SITEINFO_BITS'><glossterm><imagedata fileref="figures/define-generic.png" />SITEINFO_BITS</glossterm>
+ <info>
+ SITEINFO_BITS[doc] = "Specifies the number of bits for the target system CPU."
+ </info>
<glossdef>
<para>
Specifies the number of bits for the target system CPU.
@@ -7495,7 +8828,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SITEINFO_ENDIANNESS'><glossterm>SITEINFO_ENDIANNESS</glossterm>
+ <glossentry id='var-SITEINFO_ENDIANNESS'><glossterm><imagedata fileref="figures/define-generic.png" />SITEINFO_ENDIANNESS</glossterm>
+ <info>
+ SITEINFO_ENDIANNESS[doc] = "Specifies the endian byte order of the target system. The value should be either 'le' for 'little-endian' or 'be' for 'big-endian'."
+ </info>
<glossdef>
<para>
Specifies the endian byte order of the target system.
@@ -7504,7 +8840,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SOC_FAMILY'><glossterm>SOC_FAMILY</glossterm>
+ <glossentry id='var-SOC_FAMILY'><glossterm><imagedata fileref="figures/define-generic.png" />SOC_FAMILY</glossterm>
+ <info>
+ SOC_FAMILY[doc] = "Groups together machines based upon the same family of SOC (System On Chip). You typically set this variable in a common .inc file that you include in the configuration files of all the machines."
+ </info>
<glossdef>
<para>
Groups together machines based upon the same family
@@ -7522,7 +8861,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SOLIBS'><glossterm>SOLIBS</glossterm>
+ <glossentry id='var-SOLIBS'><glossterm><imagedata fileref="figures/define-generic.png" />SOLIBS</glossterm>
+ <info>
+ SOLIBS[doc] = "Defines the suffix for shared libraries used on the target platform."
+ </info>
<glossdef>
<para>
Defines the suffix for shared libraries used on the
@@ -7540,7 +8882,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SOLIBSDEV'><glossterm>SOLIBSDEV</glossterm>
+ <glossentry id='var-SOLIBSDEV'><glossterm><imagedata fileref="figures/define-generic.png" />SOLIBSDEV</glossterm>
+ <info>
+ SOLIBSDEV[doc] = "Defines the suffix for the development symbolic link (symlink) for shared libraries on the target platform."
+ </info>
<glossdef>
<para>
Defines the suffix for the development symbolic link
@@ -7558,7 +8903,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SOURCE_MIRROR_URL'><glossterm>SOURCE_MIRROR_URL</glossterm>
+ <glossentry id='var-SOURCE_MIRROR_URL'><glossterm><imagedata fileref="figures/define-generic.png" />SOURCE_MIRROR_URL</glossterm>
+ <info>
+ SOURCE_MIRROR_URL[doc] = "URL to source mirror that will be used before fetching from original SRC_URI."
+ </info>
<glossdef>
<para>
Defines your own
@@ -7585,7 +8933,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SPDXLICENSEMAP'><glossterm>SPDXLICENSEMAP</glossterm>
+ <glossentry id='var-SPDXLICENSEMAP'><glossterm><imagedata fileref="figures/define-generic.png" />SPDXLICENSEMAP</glossterm>
+ <info>
+ SPDXLICENSEMAP[doc] = "Maps commonly used license names to their SPDX counterparts found in meta/files/common-licenses/."
+ </info>
<glossdef>
<para>
Maps commonly used license names to their SPDX counterparts
@@ -7603,7 +8954,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SPECIAL_PKGSUFFIX'><glossterm>SPECIAL_PKGSUFFIX</glossterm>
+ <glossentry id='var-SPECIAL_PKGSUFFIX'><glossterm><imagedata fileref="figures/define-generic.png" />SPECIAL_PKGSUFFIX</glossterm>
+ <info>
+ SPECIAL_PKGSUFFIX[doc] = "A list of prefixes for PN used by the OpenEmbedded build system to create variants of recipes or packages. The list specifies the prefixes to strip off during certain circumstances such as the generation of the BPN variable."
+ </info>
<glossdef>
<para>
A list of prefixes for <link linkend='var-PN'><filename>PN</filename></link> used by the
@@ -7614,7 +8968,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SRC_URI'><glossterm>SRC_URI</glossterm>
+ <glossentry id='var-SRC_URI'><glossterm><imagedata fileref="figures/define-generic.png" />SRC_URI</glossterm>
+ <info>
+ SRC_URI[doc] = "The list of source files - local or remote. This variable tells the OpenEmbedded build system what bits to pull in for the build and how to pull them in."
+ </info>
<glossdef>
<para>The list of source files - local or remote.
This variable tells the OpenEmbedded build system which bits
@@ -7766,9 +9123,11 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SRC_URI_OVERRIDES_PACKAGE_ARCH'><glossterm>SRC_URI_OVERRIDES_PACKAGE_ARCH</glossterm>
+ <glossentry id='var-SRC_URI_OVERRIDES_PACKAGE_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />SRC_URI_OVERRIDES_PACKAGE_ARCH</glossterm>
+ <info>
+ SRC_URI_OVERRIDES_PACKAGE_ARCH[doc] = "By default, the OpenEmbedded build system automatically detects whether SRC_URI contains files that are machine-specific. If so, the build system automatically changes PACKAGE_ARCH. Setting this variable to '0' disables this behavior."
+ </info>
<glossdef>
- <para></para>
<para>
By default, the OpenEmbedded build system automatically detects whether
<filename><link linkend='var-SRC_URI'>SRC_URI</link></filename>
@@ -7780,7 +9139,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SRCDATE'><glossterm>SRCDATE</glossterm>
+ <glossentry id='var-SRCDATE'><glossterm><imagedata fileref="figures/define-generic.png" />SRCDATE</glossterm>
+ <info>
+ SRCDATE[doc] = "The date of the source code used to build the package. This variable applies only if the source was fetched from a Source Code Manager (SCM)."
+ </info>
<glossdef>
<para>
The date of the source code used to build the package.
@@ -7789,7 +9151,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SRCPV'><glossterm>SRCPV</glossterm>
+ <glossentry id='var-SRCPV'><glossterm><imagedata fileref="figures/define-generic.png" />SRCPV</glossterm>
+ <info>
+ SRCPV[doc] = "Returns the version string of the current package. This string is used to help define the value of PV."
+ </info>
<glossdef>
<para>
Returns the version string of the current package.
@@ -7823,7 +9188,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SRCREV'><glossterm>SRCREV</glossterm>
+ <glossentry id='var-SRCREV'><glossterm><imagedata fileref="figures/define-generic.png" />SRCREV</glossterm>
+ <info>
+ SRCREV[doc] = "The revision of the source code used to build the package. This variable applies to Subversion, Git, Mercurial and Bazaar only."
+ </info>
<glossdef>
<para>
The revision of the source code used to build the package.
@@ -7837,13 +9205,19 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SSTATE_DIR'><glossterm>SSTATE_DIR</glossterm>
+ <glossentry id='var-SSTATE_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />SSTATE_DIR</glossterm>
+ <info>
+ SSTATE_DIR[doc] = "The directory for the shared state cache."
+ </info>
<glossdef>
<para>The directory for the shared state cache.</para>
</glossdef>
</glossentry>
- <glossentry id='var-SSTATE_MIRROR_ALLOW_NETWORK'><glossterm>SSTATE_MIRROR_ALLOW_NETWORK</glossterm>
+ <glossentry id='var-SSTATE_MIRROR_ALLOW_NETWORK'><glossterm><imagedata fileref="figures/define-generic.png" />SSTATE_MIRROR_ALLOW_NETWORK</glossterm>
+ <info>
+ SSTATE_MIRROR_ALLOW_NETWORK[doc] = "If set to "1", allows fetches from mirrors that are specified in SSTATE_MIRRORS to work even when fetching from the network has been disabled by setting BB_NO_NETWORK to "1"."
+ </info>
<glossdef>
<para>
If set to "1", allows fetches from
@@ -7862,7 +9236,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SSTATE_MIRRORS'><glossterm>SSTATE_MIRRORS</glossterm>
+ <glossentry id='var-SSTATE_MIRRORS'><glossterm><imagedata fileref="figures/define-generic.png" />SSTATE_MIRRORS</glossterm>
+ <info>
+ SSTATE_MIRRORS[doc] = "Configures the OpenEmbedded build system to search other mirror locations for prebuilt cache data objects before building out the data. You can specify a filesystem directory or a remote URL such as HTTP or FTP."
+ </info>
<glossdef>
<para>
Configures the OpenEmbedded build system to search other
@@ -7893,14 +9270,17 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
directory structure.
<literallayout class='monospaced'>
SSTATE_MIRRORS ?= "\
- file://.* http://someserver.tld/share/sstate/PATH \n \
- file://.* file:///some/local/dir/sstate/PATH"
+ file://.* http://<replaceable>someserver</replaceable>.tld/share/sstate/PATH \n \
+ file://.* file:///<replaceable>some-local-dir</replaceable>/sstate/PATH"
</literallayout>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_BASE_LIBDIR_NATIVE'><glossterm>STAGING_BASE_LIBDIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_BASE_LIBDIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_BASE_LIBDIR_NATIVE</glossterm>
+ <info>
+ STAGING_BASE_LIBDIR_NATIVE[doc] = "Specifies the path to the /lib subdirectory of the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/lib</filename>
@@ -7910,7 +9290,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_BASELIBDIR'><glossterm>STAGING_BASELIBDIR</glossterm>
+ <glossentry id='var-STAGING_BASELIBDIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_BASELIBDIR</glossterm>
+ <info>
+ STAGING_BASELIBDIR[doc] = "Specifies the path to the /lib subdirectory of the sysroot directory for the target for which the current recipe is being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/lib</filename>
@@ -7921,7 +9304,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_BINDIR'><glossterm>STAGING_BINDIR</glossterm>
+ <glossentry id='var-STAGING_BINDIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_BINDIR</glossterm>
+ <info>
+ STAGING_BINDIR[doc] = "Specifies the path to the /usr/bin subdirectory of the sysroot directory for the target for which the current recipe is being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the
@@ -7933,7 +9319,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_BINDIR_CROSS'><glossterm>STAGING_BINDIR_CROSS</glossterm>
+ <glossentry id='var-STAGING_BINDIR_CROSS'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_BINDIR_CROSS</glossterm>
+ <info>
+ STAGING_BINDIR_CROSS[doc] = "Specifies the path to the directory containing binary configuration scripts. These scripts provide configuration information for other software that wants to make use of libraries or include files provided by the software associated with the script."
+ </info>
<glossdef>
<para>
Specifies the path to the directory containing binary
@@ -7955,7 +9344,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_BINDIR_NATIVE'><glossterm>STAGING_BINDIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_BINDIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_BINDIR_NATIVE</glossterm>
+ <info>
+ STAGING_BINDIR_NATIVE[doc] = "Specifies the path to the /usr/bin subdirectory of the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
Specifies the path to the
@@ -7965,7 +9357,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_DATADIR'><glossterm>STAGING_DATADIR</glossterm>
+ <glossentry id='var-STAGING_DATADIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DATADIR</glossterm>
+ <info>
+ STAGING_DATADIR[doc] = "Specifies the path to the /usr/share subdirectory of the sysroot directory for the target for which the current recipe is being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/usr/share</filename>
@@ -7976,7 +9371,22 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_DIR'><glossterm>STAGING_DIR</glossterm>
+ <glossentry id='var-STAGING_DATADIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DATADIR_NATIVE</glossterm>
+ <info>
+ STAGING_DATADIR_NATIVE[doc] = "Specifies the path to the /usr/share subdirectory of the sysroot directory for the build host."
+ </info>
+ <glossdef>
+ <para>
+ Specifies the path to the <filename>/usr/share</filename>
+ subdirectory of the sysroot directory for the build host.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-STAGING_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DIR</glossterm>
+ <info>
+ STAGING_DIR[doc] = "Specifies the path to the top-level sysroots directory (i.e. ${TMPDIR}/sysroots)."
+ </info>
<glossdef>
<para>
Specifies the path to the top-level sysroots directory
@@ -7997,7 +9407,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_DIR_HOST'><glossterm>STAGING_DIR_HOST</glossterm>
+ <glossentry id='var-STAGING_DIR_HOST'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DIR_HOST</glossterm>
+ <info>
+ STAGING_DIR_HOST[doc] = "Specifies the path to the primary sysroot directory for which the target is being built."
+ </info>
<glossdef>
<para>
Specifies the path to the primary sysroot directory for
@@ -8023,18 +9436,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_DATADIR_NATIVE'><glossterm>STAGING_DATADIR_NATIVE</glossterm>
- <glossdef>
- <para>
- Specifies the path to the <filename>/usr/share</filename>
- subdirectory of the sysroot directory for the build host.
- </para>
- </glossdef>
- </glossentry>
-
-
-
- <glossentry id='var-STAGING_DIR_NATIVE'><glossterm>STAGING_DIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_DIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DIR_NATIVE</glossterm>
+ <info>
+ STAGING_DIR_NATIVE[doc] = "Specifies the path to the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
Specifies the path to the sysroot directory for the
@@ -8043,7 +9448,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_DIR_TARGET'><glossterm>STAGING_DIR_TARGET</glossterm>
+ <glossentry id='var-STAGING_DIR_TARGET'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_DIR_TARGET</glossterm>
+ <info>
+ STAGING_DIR_TARGET[doc] = "Specifies the path to the sysroot directory for the target for which the current recipe is being built."
+ </info>
<glossdef>
<para>
Specifies the path to the sysroot directory for the
@@ -8067,7 +9475,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_ETCDIR_NATIVE'><glossterm>STAGING_ETCDIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_ETCDIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_ETCDIR_NATIVE</glossterm>
+ <info>
+ STAGING_ETCDIR_NATIVE[doc] = "Specifies the path to the /etc subdirectory of the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/etc</filename>
@@ -8077,7 +9488,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_EXECPREFIXDIR'><glossterm>STAGING_EXECPREFIXDIR</glossterm>
+ <glossentry id='var-STAGING_EXECPREFIXDIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_EXECPREFIXDIR</glossterm>
+ <info>
+ STAGING_EXECPREFIXDIR[doc] = "Specifies the path to the /usr subdirectory of the sysroot directory for the target for which the current recipe is being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/usr</filename>
@@ -8088,7 +9502,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_INCDIR'><glossterm>STAGING_INCDIR</glossterm>
+ <glossentry id='var-STAGING_INCDIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_INCDIR</glossterm>
+ <info>
+ STAGING_INCDIR[doc] = "Specifies the path to the /usr/include subdirectory of the sysroot directory for the target for which the current recipe being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the
@@ -8100,7 +9517,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_INCDIR_NATIVE'><glossterm>STAGING_INCDIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_INCDIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_INCDIR_NATIVE</glossterm>
+ <info>
+ STAGING_INCDIR_NATIVE[doc] = "Specifies the path to the /usr/include subdirectory of the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/usr/include</filename>
@@ -8109,36 +9529,48 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_LIBDIR'><glossterm>STAGING_LIBDIR</glossterm>
+ <glossentry id='var-STAGING_KERNEL_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_KERNEL_DIR</glossterm>
+ <info>
+ STAGING_KERNEL_DIR[doc] = "The directory with kernel headers that are required to build out-of-tree modules."
+ </info>
<glossdef>
<para>
- Specifies the path to the <filename>/usr/lib</filename>
- subdirectory of the sysroot directory for the target for
- which the current recipe is being built
- (<link linkend='var-STAGING_DIR_HOST'><filename>STAGING_DIR_HOST</filename></link>).
+ The directory with kernel headers that are required to build out-of-tree
+ modules.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_LIBDIR_NATIVE'><glossterm>STAGING_LIBDIR_NATIVE</glossterm>
+ <glossentry id='var-STAGING_LIBDIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_LIBDIR</glossterm>
+ <info>
+ STAGING_LIBDIR[doc] = "Specifies the path to the /usr/lib subdirectory of the sysroot directory for the target for which the current recipe is being built (STAGING_DIR_HOST)."
+ </info>
<glossdef>
<para>
Specifies the path to the <filename>/usr/lib</filename>
- subdirectory of the sysroot directory for the build host.
+ subdirectory of the sysroot directory for the target for
+ which the current recipe is being built
+ (<link linkend='var-STAGING_DIR_HOST'><filename>STAGING_DIR_HOST</filename></link>).
</para>
</glossdef>
</glossentry>
- <glossentry id='var-STAGING_KERNEL_DIR'><glossterm>STAGING_KERNEL_DIR</glossterm>
+ <glossentry id='var-STAGING_LIBDIR_NATIVE'><glossterm><imagedata fileref="figures/define-generic.png" />STAGING_LIBDIR_NATIVE</glossterm>
+ <info>
+ STAGING_LIBDIR_NATIVE[doc] = "Specifies the path to the /usr/lib subdirectory of the sysroot directory for the build host."
+ </info>
<glossdef>
<para>
- The directory with kernel headers that are required to build out-of-tree
- modules.
+ Specifies the path to the <filename>/usr/lib</filename>
+ subdirectory of the sysroot directory for the build host.
</para>
</glossdef>
</glossentry>
- <glossentry id='var-STAMP'><glossterm>STAMP</glossterm>
+ <glossentry id='var-STAMP'><glossterm><imagedata fileref="figures/define-generic.png" />STAMP</glossterm>
+ <info>
+ STAMP[doc] = "Specifies the base path used to create recipe stamp files. The path to an actual stamp file is constructed by evaluating this string and then appending additional information."
+ </info>
<glossdef>
<para>
Specifies the base path used to create recipe stamp files.
@@ -8161,7 +9593,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-STAMPS_DIR'><glossterm>STAMPS_DIR</glossterm>
+ <glossentry id='var-STAMPS_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />STAMPS_DIR</glossterm>
+ <info>
+ STAMPS_DIR[doc] = "Specifies the base directory in which the OpenEmbedded build system places stamps."
+ </info>
<glossdef>
<para>
Specifies the base directory in which the OpenEmbedded
@@ -8172,7 +9607,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SUMMARY'><glossterm>SUMMARY</glossterm>
+ <glossentry id='var-SUMMARY'><glossterm><imagedata fileref="figures/define-generic.png" />SUMMARY</glossterm>
+ <info>
+ SUMMARY[doc] = "The short (80 characters or less) summary of the binary package for packaging systems such as opkg, rpm or dpkg. By default, SUMMARY is used to define the DESCRIPTION variable if DESCRIPTION is not set in the recipe."
+ </info>
<glossdef>
<para>The short (72 characters or less) summary of the binary package for packaging
systems such as <filename>opkg</filename>, <filename>rpm</filename> or
@@ -8185,7 +9623,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSLINUX_DEFAULT_CONSOLE'><glossterm>SYSLINUX_DEFAULT_CONSOLE</glossterm>
+ <glossentry id='var-SYSLINUX_DEFAULT_CONSOLE'><glossterm><imagedata fileref="figures/define-generic.png" />SYSLINUX_DEFAULT_CONSOLE</glossterm>
+ <info>
+ SYSLINUX_DEFAULT_CONSOLE[doc] = "Specifies the kernel boot default console."
+ </info>
<glossdef>
<para>
Specifies the kernel boot default console.
@@ -8206,7 +9647,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSLINUX_OPTS'><glossterm>SYSLINUX_OPTS</glossterm>
+ <glossentry id='var-SYSLINUX_OPTS'><glossterm><imagedata fileref="figures/define-generic.png" />SYSLINUX_OPTS</glossterm>
+ <info>
+ SYSLINUX_OPTS[doc] = "Lists additional options to add to the syslinux file."
+ </info>
<glossdef>
<para>
Lists additional options to add to the syslinux file.
@@ -8223,7 +9667,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSLINUX_SERIAL'><glossterm>SYSLINUX_SERIAL</glossterm>
+ <glossentry id='var-SYSLINUX_SERIAL'><glossterm><imagedata fileref="figures/define-generic.png" />SYSLINUX_SERIAL</glossterm>
+ <info>
+ SYSLINUX_SERIAL[doc] = "Specifies the alternate serial port or turns it off."
+ </info>
<glossdef>
<para>
Specifies the alternate serial port or turns it off.
@@ -8239,7 +9686,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSLINUX_SPLASH'><glossterm>SYSLINUX_SPLASH</glossterm>
+ <glossentry id='var-SYSLINUX_SPLASH'><glossterm><imagedata fileref="figures/define-generic.png" />SYSLINUX_SPLASH</glossterm>
+ <info>
+ SYSLINUX_SPLASH[doc] = "An .LSS file used as the background for the VGA boot menu when you are using the boot menu."
+ </info>
<glossdef>
<para>
An <filename>.LSS</filename> file used as the background
@@ -8256,7 +9706,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSLINUX_SERIAL_TTY'><glossterm>SYSLINUX_SERIAL_TTY</glossterm>
+ <glossentry id='var-SYSLINUX_SERIAL_TTY'><glossterm><imagedata fileref="figures/define-generic.png" />SYSLINUX_SERIAL_TTY</glossterm>
+ <info>
+ SYSLINUX_SERIAL_TTY[doc] = "Specifies the alternate console=tty... kernel boot argument."
+ </info>
<glossdef>
<para>
Specifies the alternate console=tty... kernel boot argument.
@@ -8271,7 +9724,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSROOT_PREPROCESS_FUNCS'><glossterm>SYSROOT_PREPROCESS_FUNCS</glossterm>
+ <glossentry id='var-SYSROOT_PREPROCESS_FUNCS'><glossterm><imagedata fileref="figures/define-generic.png" />SYSROOT_PREPROCESS_FUNCS</glossterm>
+ <info>
+ SYSROOT_PREPROCESS_FUNCS[doc] = "A list of functions to execute after files are staged into the sysroot. These functions are usually used to apply additional processing on the staged files, or to stage additional files."
+ </info>
<glossdef>
<para>
A list of functions to execute after files are staged into
@@ -8283,10 +9739,13 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSTEMD_AUTO_ENABLE'><glossterm>SYSTEMD_AUTO_ENABLE</glossterm>
+ <glossentry id='var-SYSTEMD_AUTO_ENABLE'><glossterm><imagedata fileref="figures/define-generic.png" />SYSTEMD_AUTO_ENABLE</glossterm>
+ <info>
+ SYSTEMD_AUTO_ENABLE[doc] = "For recipes that inherit the systemd class, this variable specifies whether the service you have specified in SYSTEMD_SERVICE should be started automatically or not."
+ </info>
<glossdef>
<para>
- For recipes that inherit the
+ When inheriting the
<link linkend='ref-classes-systemd'><filename>systemd</filename></link>
class, this variable specifies whether the service you have
specified in
@@ -8306,10 +9765,13 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSTEMD_PACKAGES'><glossterm>SYSTEMD_PACKAGES</glossterm>
+ <glossentry id='var-SYSTEMD_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />SYSTEMD_PACKAGES</glossterm>
+ <info>
+ SYSTEMD_PACKAGES[doc] = "For recipes that inherit the systemd class, this variable locates the systemd unit files when they are not found in the main recipe's package."
+ </info>
<glossdef>
<para>
- For recipes that inherit the
+ When inheriting the
<link linkend='ref-classes-systemd'><filename>systemd</filename></link>
class, this variable locates the systemd unit files when
they are not found in the main recipe's package.
@@ -8329,10 +9791,13 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSTEMD_SERVICE'><glossterm>SYSTEMD_SERVICE</glossterm>
+ <glossentry id='var-SYSTEMD_SERVICE'><glossterm><imagedata fileref="figures/define-generic.png" />SYSTEMD_SERVICE</glossterm>
+ <info>
+ SYSTEMD_SERVICE[doc] = "For recipes that inherit the systemd class, this variable specifies the systemd service name for a package."
+ </info>
<glossdef>
<para>
- For recipes that inherit the
+ When inheriting the
<link linkend='ref-classes-systemd'><filename>systemd</filename></link>
class, this variable specifies the systemd service name for
a package.
@@ -8350,7 +9815,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-SYSVINIT_ENABLED_GETTYS'><glossterm>SYSVINIT_ENABLED_GETTYS</glossterm>
+ <glossentry id='var-SYSVINIT_ENABLED_GETTYS'><glossterm><imagedata fileref="figures/define-generic.png" />SYSVINIT_ENABLED_GETTYS</glossterm>
+ <info>
+ SYSVINIT_ENABLED_GETTYS[doc] = "Specifies which virtual terminals should be running a getty, the default is '1'."
+ </info>
<glossdef>
<para>
When using
@@ -8375,7 +9843,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-t'><title>T</title>
- <glossentry id='var-T'><glossterm>T</glossterm>
+ <glossentry id='var-T'><glossterm><imagedata fileref="figures/define-generic.png" />T</glossterm>
+ <info>
+ T[doc] = "This variable points to a directory were BitBake places temporary files, which consist mostly of task logs and scripts, when building a particular recipe."
+ </info>
<glossdef>
<para>This variable points to a directory were BitBake places
temporary files, which consist mostly of task logs and
@@ -8396,7 +9867,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_ARCH'><glossterm>TARGET_ARCH</glossterm>
+ <glossentry id='var-TARGET_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_ARCH</glossterm>
+ <info>
+ TARGET_ARCH[doc] = "The architecture of the device being built. The OpenEmbedded build system supports the following architectures: arm, mips, ppc, x86, x86-64."
+ </info>
<glossdef>
<para>
The target machine's architecture.
@@ -8422,7 +9896,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_AS_ARCH'><glossterm>TARGET_AS_ARCH</glossterm>
+ <glossentry id='var-TARGET_AS_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_AS_ARCH</glossterm>
+ <info>
+ TARGET_AS_ARCH[doc] = "Specifies architecture-specific assembler flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific assembler flags for the
@@ -8438,7 +9915,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_CC_ARCH'><glossterm>TARGET_CC_ARCH</glossterm>
+ <glossentry id='var-TARGET_CC_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_CC_ARCH</glossterm>
+ <info>
+ TARGET_CC_ARCH[doc] = "Specifies architecture-specific C compiler flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific C compiler flags for the
@@ -8458,7 +9938,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_CC_KERNEL_ARCH'><glossterm>TARGET_CC_KERNEL_ARCH</glossterm>
+ <glossentry id='var-TARGET_CC_KERNEL_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_CC_KERNEL_ARCH</glossterm>
+ <info>
+ TARGET_CC_KERNEL_ARCH[doc] = "This is a specific kernel compiler flag for a CPU or Application Binary Interface (ABI) tune."
+ </info>
<glossdef>
<para>
This is a specific kernel compiler flag for a CPU or
@@ -8479,7 +9962,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_CFLAGS'><glossterm>TARGET_CFLAGS</glossterm>
+ <glossentry id='var-TARGET_CFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_CFLAGS</glossterm>
+ <info>
+ TARGET_CFLAGS[doc] = "Flags passed to the C compiler for the target system. This variable evaluates to the same as CFLAGS."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C compiler when building
@@ -8501,7 +9987,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_CPPFLAGS'><glossterm>TARGET_CPPFLAGS</glossterm>
+ <glossentry id='var-TARGET_CPPFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_CPPFLAGS</glossterm>
+ <info>
+ TARGET_CPPFLAGS[doc] = "Specifies the flags to pass to the C pre-processor (i.e. to both the C and the C++ compilers) when building for the target."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C pre-processor
@@ -8524,7 +10013,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_CXXFLAGS'><glossterm>TARGET_CXXFLAGS</glossterm>
+ <glossentry id='var-TARGET_CXXFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_CXXFLAGS</glossterm>
+ <info>
+ TARGET_CXXFLAGS[doc] = "Specifies the flags to pass to the C++ compiler when building for the target."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the C++ compiler when
@@ -8546,7 +10038,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_FPU'><glossterm>TARGET_FPU</glossterm>
+ <glossentry id='var-TARGET_FPU'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_FPU</glossterm>
+ <info>
+ TARGET_FPU[doc] = "Specifies the method for handling FPU code. For FPU-less targets, which include most ARM CPUs, the variable must be set to 'soft'. If not, the kernel emulation gets used, which results in a performance penalty."
+ </info>
<glossdef>
<para>Specifies the method for handling FPU code.
For FPU-less targets, which include most ARM CPUs, the variable must be
@@ -8555,7 +10050,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_LD_ARCH'><glossterm>TARGET_LD_ARCH</glossterm>
+ <glossentry id='var-TARGET_LD_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_LD_ARCH</glossterm>
+ <info>
+ TARGET_LD_ARCH[doc] = "Specifies architecture-specific linker flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific linker flags for the
@@ -8571,7 +10069,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_LDFLAGS'><glossterm>TARGET_LDFLAGS</glossterm>
+ <glossentry id='var-TARGET_LDFLAGS'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_LDFLAGS</glossterm>
+ <info>
+ TARGET_LDFLAGS[doc] = "Specifies the flags to pass to the linker when building for the target."
+ </info>
<glossdef>
<para>
Specifies the flags to pass to the linker when building
@@ -8593,7 +10094,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TARGET_OS'><glossterm>TARGET_OS</glossterm>
+ <glossentry id='var-TARGET_OS'><glossterm><imagedata fileref="figures/define-generic.png" />TARGET_OS</glossterm>
+ <info>
+ TARGET_OS[doc] = "Specifies the target's operating system."
+ </info>
<glossdef>
<para>Specifies the target's operating system.
The variable can be set to "linux" for <filename>eglibc</filename>-based systems and
@@ -8603,7 +10107,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TCLIBC'><glossterm>TCLIBC</glossterm>
+ <glossentry id='var-TCLIBC'><glossterm><imagedata fileref="figures/define-generic.png" />TCLIBC</glossterm>
+ <info>
+ TCLIBC[doc] = "Specifies GNU standard C library (libc) variant to use during the build process. You can select 'glibc' or 'uclibc'."
+ </info>
<glossdef>
<para>
Specifies the GNU standard C library (<filename>libc</filename>)
@@ -8621,7 +10128,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TCMODE'><glossterm>TCMODE</glossterm>
+ <glossentry id='var-TCMODE'><glossterm><imagedata fileref="figures/define-generic.png" />TCMODE</glossterm>
+ <info>
+ TCMODE[doc] = "Enables an external toolchain (where provided by an additional layer) if set to a value other than 'default'."
+ </info>
<glossdef>
<para>
Specifies the toolchain selector.
@@ -8679,7 +10189,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_EXPORT_DIR'><glossterm>TEST_EXPORT_DIR</glossterm>
+ <glossentry id='var-TEST_EXPORT_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_EXPORT_DIR</glossterm>
+ <info>
+ TEST_EXPORT_DIR[doc] = "The location the OpenEmbedded build system uses to export tests when the TEST_EXPORT_ONLY variable is set to "1"."
+ </info>
<glossdef>
<para>
The location the OpenEmbedded build system uses to export
@@ -8695,7 +10208,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_EXPORT_ONLY'><glossterm>TEST_EXPORT_ONLY</glossterm>
+ <glossentry id='var-TEST_EXPORT_ONLY'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_EXPORT_ONLY</glossterm>
+ <info>
+ TEST_EXPORT_ONLY[doc] = "Specifies to export the tests only. Set this variable to "1" if you do not want to run the tests but you want them to be exported in a manner that you to run them outside of the build system."
+ </info>
<glossdef>
<para>
Specifies to export the tests only.
@@ -8706,7 +10222,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_IMAGE'><glossterm>TEST_IMAGE</glossterm>
+ <glossentry id='var-TEST_IMAGE'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_IMAGE</glossterm>
+ <info>
+ TEST_IMAGE[doc] = "Enables test booting of virtual machine images under the QEMU emulator after any root filesystems are created and runs tests against those images."
+ </info>
<glossdef>
<para>
Automatically runs the series of automated tests for
@@ -8736,7 +10255,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_LOG_DIR'><glossterm>TEST_LOG_DIR</glossterm>
+ <glossentry id='var-TEST_LOG_DIR'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_LOG_DIR</glossterm>
+ <info>
+ TEST_LOG_DIR[doc] = "Holds the SSH log and the boot log for QEMU machines. The <filename>TEST_LOG_DIR</filename> variable defaults to "${WORKDIR}/testimage"."
+ </info>
<glossdef>
<para>
Holds the SSH log and the boot log for QEMU machines.
@@ -8751,7 +10273,47 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_QEMUBOOT_TIMEOUT'><glossterm>TEST_QEMUBOOT_TIMEOUT</glossterm>
+ <glossentry id='var-TEST_POWERCONTROL_CMD'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_POWERCONTROL_CMD</glossterm>
+ <info>
+ TEST_POWERCONTROL_CMD[doc] = "For automated hardware testing, specifies the command to use to control the power of the target machine under test"
+ </info>
+ <glossdef>
+ <para>
+ For automated hardware testing, specifies the command to
+ use to control the power of the target machine under test.
+ Typically, this command would point to a script that
+ performs the appropriate action (e.g. interacting
+ with a web-enabled power strip).
+ The specified command should expect to receive as the last
+ argument "off", "on" or "cycle" specifying to power off,
+ on, or cycle (power off and then power on) the device,
+ respectively.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-TEST_POWERCONTROL_EXTRA_ARGS'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_POWERCONTROL_EXTRA_ARGS</glossterm>
+ <info>
+ TEST_POWERCONTROL_EXTRA_ARGS[doc] = "For automated hardware testing, specifies additional arguments to pass through to the command specified in TEST_POWERCONTROL_CMD"
+ </info>
+ <glossdef>
+ <para>
+ For automated hardware testing, specifies additional
+ arguments to pass through to the command specified in
+ <link linkend='var-TEST_POWERCONTROL_CMD'><filename>TEST_POWERCONTROL_CMD</filename></link>.
+ Setting <filename>TEST_POWERCONTROL_EXTRA_ARGS</filename>
+ is optional.
+ You can use it if you wish, for example, to separate the
+ machine-specific and non-machine-specific parts of the
+ arguments.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-TEST_QEMUBOOT_TIMEOUT'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_QEMUBOOT_TIMEOUT</glossterm>
+ <info>
+ TEST_QEMUBOOT_TIMEOUT[doc] = "The time in seconds allowed for an image to boot before automated runtime tests begin to run against an image."
+ </info>
<glossdef>
<para>
The time in seconds allowed for an image to boot before
@@ -8771,7 +10333,53 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_SERVER_IP'><glossterm>TEST_SERVER_IP</glossterm>
+ <glossentry id='var-TEST_SERIALCONTROL_CMD'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_SERIALCONTROL_CMD</glossterm>
+ <info>
+ TEST_SERIALCONTROL_CMD[doc] = "For automated hardware testing, specifies the command to use to connect to the serial console of the target machine under test."
+ </info>
+ <glossdef>
+ <para>
+ For automated hardware testing, specifies the command
+ to use to connect to the serial console of the target
+ machine under test.
+ This command simply needs to connect to the serial console
+ and forward that connection to standard input and output
+ as any normal terminal program does.
+ </para>
+
+ <para>
+ For example, to use the Picocom terminal program on
+ serial device <filename>/dev/ttyUSB0</filename> at
+ 115200bps, you would set the variable as follows:
+ <literallayout class='monospaced'>
+ TEST_SERIALCONTROL_CMD = "picocom /dev/ttyUSB0 -b 115200"
+ </literallayout>
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-TEST_SERIALCONTROL_EXTRA_ARGS'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_SERIALCONTROL_EXTRA_ARGS</glossterm>
+ <info>
+ TEST_SERIALCONTROL_EXTRA_ARGS[doc] = "For automated hardware testing, specifies additional arguments to pass through to the command specified in TEST_SERIALCONTROL_CMD."
+ </info>
+ <glossdef>
+ <para>
+ For automated hardware testing, specifies additional
+ arguments to pass through to the command specified in
+ <link linkend='var-TEST_SERIALCONTROL_CMD'><filename>TEST_SERIALCONTROL_CMD</filename></link>.
+ Setting <filename>TEST_SERIALCONTROL_EXTRA_ARGS</filename>
+ is optional.
+ You can use it if you wish, for example, to separate the
+ machine-specific and non-machine-specific parts of the
+ command.
+ </para>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-TEST_SERVER_IP'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_SERVER_IP</glossterm>
+ <info>
+ TEST_SERVER_IP[doc] = "The IP address of the build machine (host machine). This IP address is usually automatically detected."
+ </info>
<glossdef>
<para>
The IP address of the build machine (host machine).
@@ -8789,7 +10397,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_TARGET'><glossterm>TEST_TARGET</glossterm>
+ <glossentry id='var-TEST_TARGET'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_TARGET</glossterm>
+ <info>
+ TEST_TARGET[doc] = "For automated runtime testing, specifies the method of deploying the image and running tests on the target machine."
+ </info>
<glossdef>
<para>
Specifies the target controller to use when running tests
@@ -8854,7 +10465,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_TARGET_IP'><glossterm>TEST_TARGET_IP</glossterm>
+ <glossentry id='var-TEST_TARGET_IP'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_TARGET_IP</glossterm>
+ <info>
+ TEST_TARGET_IP[doc] = "The IP address of your hardware under test."
+ </info>
<glossdef>
<para>
The IP address of your hardware under test.
@@ -8880,7 +10494,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TEST_SUITES'><glossterm>TEST_SUITES</glossterm>
+ <glossentry id='var-TEST_SUITES'><glossterm><imagedata fileref="figures/define-generic.png" />TEST_SUITES</glossterm>
+ <info>
+ TEST_SUITES[doc] = "An ordered list of tests (modules) to run against an image when performing automated runtime testing."
+ </info>
<glossdef>
<para>
An ordered list of tests (modules) to run against
@@ -8900,7 +10517,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
You can add your own tests to the list of tests by
appending <filename>TEST_SUITES</filename> as follows:
<literallayout class='monospaced'>
- TEST_SUITES_append = " mytest"
+ TEST_SUITES_append = " <replaceable>mytest</replaceable>"
</literallayout>
Alternatively, you can provide the "auto" option to
have all applicable tests run against the image.
@@ -8935,7 +10552,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-THISDIR'><glossterm>THISDIR</glossterm>
+ <glossentry id='var-THISDIR'><glossterm><imagedata fileref="figures/define-generic.png" />THISDIR</glossterm>
+ <info>
+ THISDIR[doc] = "The directory in which the file BitBake is currently parsing is located."
+ </info>
<glossdef>
<para>
The directory in which the file BitBake is currently
@@ -8945,7 +10565,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TMPDIR'><glossterm>TMPDIR</glossterm>
+ <glossentry id='var-TMPDIR'><glossterm><imagedata fileref="figures/define-generic.png" />TMPDIR</glossterm>
+ <info>
+ TMPDIR[doc] = "The temporary directory the OpenEmbedded build system uses when it does its work building images. By default, the TMPDIR variable is named tmp within the Build Directory."
+ </info>
<glossdef>
<para>
This variable is the base directory the OpenEmbedded
@@ -8983,7 +10606,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TOOLCHAIN_HOST_TASK'><glossterm>TOOLCHAIN_HOST_TASK</glossterm>
+ <glossentry id='var-TOOLCHAIN_HOST_TASK'><glossterm><imagedata fileref="figures/define-generic.png" />TOOLCHAIN_HOST_TASK</glossterm>
+ <info>
+ TOOLCHAIN_HOST_TASK[doc] = "This variable lists packages the OpenEmbedded build system uses when building an SDK, which contains a cross-development environment."
+ </info>
<glossdef>
<para>
This variable lists packages the OpenEmbedded build system
@@ -9013,7 +10639,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TOOLCHAIN_TARGET_TASK'><glossterm>TOOLCHAIN_TARGET_TASK</glossterm>
+ <glossentry id='var-TOOLCHAIN_TARGET_TASK'><glossterm><imagedata fileref="figures/define-generic.png" />TOOLCHAIN_TARGET_TASK</glossterm>
+ <info>
+ TOOLCHAIN_TARGET_TASK[doc] = "This variable lists packages the OpenEmbedded build system uses when it creates the target part of an SDK, which includes libraries and headers."
+ </info>
<glossdef>
<para>
This variable lists packages the OpenEmbedded build system
@@ -9035,7 +10664,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TOPDIR'><glossterm>TOPDIR</glossterm>
+ <glossentry id='var-TOPDIR'><glossterm><imagedata fileref="figures/define-generic.png" />TOPDIR</glossterm>
+ <info>
+ TOPDIR[doc] = "The Build Directory. BitBake automatically sets this variable. The OpenEmbedded build system uses the Build Directory when building images."
+ </info>
<glossdef>
<para>
The top-level
@@ -9049,7 +10681,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TRANSLATED_TARGET_ARCH'><glossterm>TRANSLATED_TARGET_ARCH</glossterm>
+ <glossentry id='var-TRANSLATED_TARGET_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TRANSLATED_TARGET_ARCH</glossterm>
+ <info>
+ TRANSLATED_TARGET_ARCH[doc] = "A sanitized version of TARGET_ARCH. This variable is used where the architecture is needed in a value where underscores are not allowed."
+ </info>
<glossdef>
<para>
A sanitized version of
@@ -9067,7 +10702,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_ARCH'><glossterm>TUNE_ARCH</glossterm>
+ <glossentry id='var-TUNE_ARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_ARCH</glossterm>
+ <info>
+ TUNE_ARCH[doc] = "The GNU canonical architecture for a specific architecture (i.e. arm, armeb, mips, mips64, and so forth)."
+ </info>
<glossdef>
<para>
The GNU canonical architecture for a specific architecture
@@ -9122,7 +10760,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_ASARGS'><glossterm>TUNE_ASARGS</glossterm>
+ <glossentry id='var-TUNE_ASARGS'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_ASARGS</glossterm>
+ <info>
+ TUNE_ASARGS[doc] = "Specifies architecture-specific assembler flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific assembler flags for
@@ -9131,7 +10772,8 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<filename>TUNE_ASARGS</filename> is set using
the tune include files, which are typically under
<filename>meta/conf/machine/include/</filename> and are
- influenced through <filename>TUNE_FEATURES</filename>.
+ influenced through
+ <link linkend='var-TUNE_FEATURES'><filename>TUNE_FEATURES</filename></link>.
For example, the
<filename>meta/conf/machine/include/x86/arch-x86.inc</filename>
file defines the flags for the x86 architecture as follows:
@@ -9139,14 +10781,19 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
TUNE_ASARGS += "${@bb.utils.contains("TUNE_FEATURES", "mx32", "-x32", "", d)}"
</literallayout>
<note>
- Board Support Packages (BSPs) can supply their own
- set of flags.
+ Board Support Packages (BSPs) select the tune.
+ The selected tune, in turn, affects the tune variables
+ themselves (i.e. the tune can supply its own
+ set of flags).
</note>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_CCARGS'><glossterm>TUNE_CCARGS</glossterm>
+ <glossentry id='var-TUNE_CCARGS'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_CCARGS</glossterm>
+ <info>
+ TUNE_CCARGS[doc] = "Specifies architecture-specific C compiler flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific C compiler flags for
@@ -9155,16 +10802,22 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<filename>TUNE_CCARGS</filename> is set using
the tune include files, which are typically under
<filename>meta/conf/machine/include/</filename> and are
- influenced through <filename>TUNE_FEATURES</filename>.
+ influenced through
+ <link linkend='var-TUNE_FEATURES'><filename>TUNE_FEATURES</filename></link>.
<note>
- Board Support Packages (BSPs) can supply their own
- set of flags.
+ Board Support Packages (BSPs) select the tune.
+ The selected tune, in turn, affects the tune variables
+ themselves (i.e. the tune can supply its own
+ set of flags).
</note>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_LDARGS'><glossterm>TUNE_LDARGS</glossterm>
+ <glossentry id='var-TUNE_LDARGS'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_LDARGS</glossterm>
+ <info>
+ TUNE_LDARGS[doc] = "Specifies architecture-specific linker flags for the target system."
+ </info>
<glossdef>
<para>
Specifies architecture-specific linker flags for
@@ -9173,7 +10826,8 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<filename>TUNE_LDARGS</filename> is set using
the tune include files, which are typically under
<filename>meta/conf/machine/include/</filename> and are
- influenced through <filename>TUNE_FEATURES</filename>.
+ influenced through
+ <link linkend='var-TUNE_FEATURES'><filename>TUNE_FEATURES</filename></link>.
For example, the
<filename>meta/conf/machine/include/x86/arch-x86.inc</filename>
file defines the flags for the x86 architecture as follows:
@@ -9181,14 +10835,19 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
TUNE_LDARGS += "${@bb.utils.contains("TUNE_FEATURES", "mx32", "-m elf32_x86_64", "", d)}"
</literallayout>
<note>
- Board Support Packages (BSPs) can supply their own
- set of flags.
+ Board Support Packages (BSPs) select the tune.
+ The selected tune, in turn, affects the tune variables
+ themselves (i.e. the tune can supply its own
+ set of flags).
</note>
</para>
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_FEATURES'><glossterm>TUNE_FEATURES</glossterm>
+ <glossentry id='var-TUNE_FEATURES'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_FEATURES</glossterm>
+ <info>
+ TUNE_FEATURES[doc] = "Features used to "tune" a compiler for optimal use given a specific processor."
+ </info>
<glossdef>
<para>
Features used to "tune" a compiler for optimal use
@@ -9218,22 +10877,20 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNE_PKGARCH'><glossterm>TUNE_PKGARCH</glossterm>
+ <glossentry id='var-TUNE_PKGARCH'><glossterm><imagedata fileref="figures/define-generic.png" />TUNE_PKGARCH</glossterm>
+ <info>
+ TUNE_PKGARCH[doc] = "The package architecture understood by the packaging system to define the architecture, ABI, and tuning of output packages."
+ </info>
<glossdef>
<para>
The package architecture understood by the packaging
system to define the architecture, ABI, and tuning of
output packages.
- </para>
- </glossdef>
- </glossentry>
-
- <glossentry id='var-TUNE_PKGARCH_tune'><glossterm>TUNE_PKGARCH_tune</glossterm>
- <glossdef>
- <para>
- The CPU or Application Binary Interface (ABI) specific
- tuning of the
- <link linkend='var-TUNE_PKGARCH'><filename>TUNE_PKGARCH</filename></link>.
+ The specific tune is defined using the "_tune" override
+ as follows:
+ <literallayout class='monospaced'>
+ TUNE_PKGARCH_tune-<replaceable>tune</replaceable> = "<replaceable>tune</replaceable>"
+ </literallayout>
</para>
<para>
@@ -9250,7 +10907,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNEABI'><glossterm>TUNEABI</glossterm>
+ <glossentry id='var-TUNEABI'><glossterm><imagedata fileref="figures/define-generic.png" />TUNEABI</glossterm>
+ <info>
+ TUNEABI[doc] = "An underlying ABI used by a particular tuning in a given toolchain layer. This feature allows providers using prebuilt libraries to check compatibility of a tuning against their selection of libraries."
+ </info>
<glossdef>
<para>
An underlying Application Binary Interface (ABI) used by
@@ -9274,7 +10934,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNEABI_OVERRIDE'><glossterm>TUNEABI_OVERRIDE</glossterm>
+ <glossentry id='var-TUNEABI_OVERRIDE'><glossterm><imagedata fileref="figures/define-generic.png" />TUNEABI_OVERRIDE</glossterm>
+ <info>
+ TUNEABI_OVERRIDE[doc] = "If set, ignores TUNEABI_WHITELIST."
+ </info>
<glossdef>
<para>
If set, the OpenEmbedded system ignores the
@@ -9297,7 +10960,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNEABI_WHITELIST'><glossterm>TUNEABI_WHITELIST</glossterm>
+ <glossentry id='var-TUNEABI_WHITELIST'><glossterm><imagedata fileref="figures/define-generic.png" />TUNEABI_WHITELIST</glossterm>
+ <info>
+ TUNEABI_WHITELIST[doc] = "A whitelist of permissible TUNEABI values. If the variable is not set, all values are allowed."
+ </info>
<glossdef>
<para>
A whitelist of permissible
@@ -9321,11 +10987,14 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNECONFLICT'><glossterm>TUNECONFLICT[&lt;feature&gt;]</glossterm>
+ <glossentry id='var-TUNECONFLICTS'><glossterm><imagedata fileref="figures/define-generic.png" />TUNECONFLICTS[<replaceable>feature</replaceable>]</glossterm>
+ <info>
+ TUNECONFLICTS[doc] = "Specifies CPU or Application Binary Interface (ABI) tuning features that conflict with specified feature."
+ </info>
<glossdef>
<para>
Specifies CPU or Application Binary Interface (ABI)
- tuning features that conflict with &gt;feature&lt;.
+ tuning features that conflict with <replaceable>feature</replaceable>.
</para>
<para>
@@ -9343,7 +11012,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-TUNEVALID'><glossterm>TUNEVALID[&lt;feature&gt;]</glossterm>
+ <glossentry id='var-TUNEVALID'><glossterm><imagedata fileref="figures/define-generic.png" />TUNEVALID[<replaceable>feature</replaceable>]</glossterm>
+ <info>
+ TUNEVALID[doc] = "Descriptions, stored as flags, of valid tuning features."
+ </info>
<glossdef>
<para>
Specifies a valid CPU or Application Binary Interface (ABI)
@@ -9366,7 +11038,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-u'><title>U</title>
- <glossentry id='var-UBOOT_CONFIG'><glossterm>UBOOT_CONFIG</glossterm>
+ <glossentry id='var-UBOOT_CONFIG'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_CONFIG</glossterm>
+ <info>
+ UBOOT_CONFIG[doc] = "Configures the UBOOT_MACHINE and can also define IMAGE_FSTYPES for individual cases."
+ </info>
<glossdef>
<para>
Configures the
@@ -9405,7 +11080,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_ENTRYPOINT'><glossterm>UBOOT_ENTRYPOINT</glossterm>
+ <glossentry id='var-UBOOT_ENTRYPOINT'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_ENTRYPOINT</glossterm>
+ <info>
+ UBOOT_ENTRYPOINT[doc] = "Specifies the entry point for the U-Boot image."
+ </info>
<glossdef>
<para>
Specifies the entry point for the U-Boot image.
@@ -9417,7 +11095,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_LOADADDRESS'><glossterm>UBOOT_LOADADDRESS</glossterm>
+ <glossentry id='var-UBOOT_LOADADDRESS'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_LOADADDRESS</glossterm>
+ <info>
+ UBOOT_LOADADDRESS[doc] = "Specifies the load address for the U-Boot image."
+ </info>
<glossdef>
<para>
Specifies the load address for the U-Boot image.
@@ -9429,7 +11110,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_LOCALVERSION'><glossterm>UBOOT_LOCALVERSION</glossterm>
+ <glossentry id='var-UBOOT_LOCALVERSION'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_LOCALVERSION</glossterm>
+ <info>
+ UBOOT_LOCALVERSION[doc] = "Appends a string to the name of the local version of the U-Boot image."
+ </info>
<glossdef>
<para>
Appends a string to the name of the local version of the
@@ -9445,7 +11129,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_MACHINE'><glossterm>UBOOT_MACHINE</glossterm>
+ <glossentry id='var-UBOOT_MACHINE'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_MACHINE</glossterm>
+ <info>
+ UBOOT_MACHINE[doc] = "Specifies the value passed on the make command line when building a U-Boot image."
+ </info>
<glossdef>
<para>
Specifies the value passed on the
@@ -9454,7 +11141,7 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
The value indicates the target platform configuration.
You typically set this variable from the machine
configuration file (i.e.
- <filename>conf/machine/&lt;machine_name&gt;.conf</filename>).
+ <filename>conf/machine/<replaceable>machine_name</replaceable>.conf</filename>).
</para>
<para>
@@ -9465,7 +11152,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_MAKE_TARGET'><glossterm>UBOOT_MAKE_TARGET</glossterm>
+ <glossentry id='var-UBOOT_MAKE_TARGET'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_MAKE_TARGET</glossterm>
+ <info>
+ UBOOT_MAKE_TARGET[doc] = "Specifies the target called in the Makefile."
+ </info>
<glossdef>
<para>
Specifies the target called in the
@@ -9475,7 +11165,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_SUFFIX'><glossterm>UBOOT_SUFFIX</glossterm>
+ <glossentry id='var-UBOOT_SUFFIX'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_SUFFIX</glossterm>
+ <info>
+ UBOOT_SUFFIX[doc] = "Points to the generated U-Boot extension."
+ </info>
<glossdef>
<para>
Points to the generated U-Boot extension.
@@ -9490,7 +11183,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-UBOOT_TARGET'><glossterm>UBOOT_TARGET</glossterm>
+ <glossentry id='var-UBOOT_TARGET'><glossterm><imagedata fileref="figures/define-generic.png" />UBOOT_TARGET</glossterm>
+ <info>
+ UBOOT_TARGET[doc] = "Specifies the target used for building U-Boot."
+ </info>
<glossdef>
<para>
Specifies the target used for building U-Boot.
@@ -9503,7 +11199,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USE_VT'><glossterm>USE_VT</glossterm>
+ <glossentry id='var-USE_VT'><glossterm><imagedata fileref="figures/define-generic.png" />USE_VT</glossterm>
+ <info>
+ USE_VT[doc] = "When using SysVinit, determines whether or not to run a getty on any virtual terminals in order to enable logging in through those terminals."
+ </info>
<glossdef>
<para>
When using
@@ -9525,7 +11224,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USER_CLASSES'><glossterm>USER_CLASSES</glossterm>
+ <glossentry id='var-USER_CLASSES'><glossterm><imagedata fileref="figures/define-generic.png" />USER_CLASSES</glossterm>
+ <info>
+ USER_CLASSES[doc] = "List of additional classes to use when building images that enable extra features."
+ </info>
<glossdef>
<para>
A list of classes to globally inherit.
@@ -9549,7 +11251,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USERADD_ERROR_DYNAMIC'><glossterm>USERADD_ERROR_DYNAMIC</glossterm>
+ <glossentry id='var-USERADD_ERROR_DYNAMIC'><glossterm><imagedata fileref="figures/define-generic.png" />USERADD_ERROR_DYNAMIC</glossterm>
+ <info>
+ USERADD_ERROR_DYNAMIC[doc] = "Forces the OpenEmbedded build system to produce an error if the user identification (uid) and group identification (gid) values are not defined in files/passwd and files/group files."
+ </info>
<glossdef>
<para>
Forces the OpenEmbedded build system to produce an error
@@ -9586,7 +11291,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USERADD_GID_TABLES'><glossterm>USERADD_GID_TABLES</glossterm>
+ <glossentry id='var-USERADD_GID_TABLES'><glossterm><imagedata fileref="figures/define-generic.png" />USERADD_GID_TABLES</glossterm>
+ <info>
+ USERADD_GID_TABLES[doc] = "Specifies a password file to use for obtaining static group identification (gid) values when the OpenEmbedded build system adds a group to the system during package installation."
+ </info>
<glossdef>
<para>
Specifies a password file to use for obtaining static
@@ -9618,43 +11326,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USERADD_UID_TABLES'><glossterm>USERADD_UID_TABLES</glossterm>
+ <glossentry id='var-USERADD_PACKAGES'><glossterm><imagedata fileref="figures/define-generic.png" />USERADD_PACKAGES</glossterm>
+ <info>
+ USERADD_PACKAGES[doc] = "When a recipe inherits the useradd class, this variable specifies the individual packages within the recipe that require users and/or groups to be added."
+ </info>
<glossdef>
<para>
- Specifies a password file to use for obtaining static
- user identification (<filename>uid</filename>) values
- when the OpenEmbedded build system adds a user to the
- system during package installation.
- </para>
-
- <para>
- When applying static user identification
- (<filename>uid</filename>) values, the OpenEmbedded build
- system looks in
- <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
- for a <filename>files/passwd</filename> file and then applies
- those <filename>uid</filename> values.
- Set the variable as follows in your
- <filename>local.conf</filename> file:
- <literallayout class='monospaced'>
- USERADD_UID_TABLES = "files/passwd"
- </literallayout>
- </para>
-
- <note>
- Setting the
- <link linkend='var-USERADDEXTENSION'><filename>USERADDEXTENSION</filename></link>
- variable to "useradd-staticids" causes the build system
- to use static <filename>uid</filename> values.
- </note>
- </glossdef>
- </glossentry>
-
- <glossentry id='var-USERADD_PACKAGES'><glossterm>USERADD_PACKAGES</glossterm>
- <glossdef>
- <para>
- When a recipe inherits the
- <filename>useradd</filename> class, this variable
+ When inheriting the
+ <link linkend='ref-classes-useradd'><filename>useradd</filename></link>
+ class, this variable
specifies the individual packages within the recipe that
require users and/or groups to be added.
</para>
@@ -9682,11 +11362,15 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USERADD_PARAM'><glossterm>USERADD_PARAM</glossterm>
+ <glossentry id='var-USERADD_PARAM'><glossterm><imagedata fileref="figures/define-generic.png" />USERADD_PARAM</glossterm>
+ <info>
+ USERADD_PARAM[doc] = "When a recipe inherits the useradd class, this variable specifies for a package what parameters should be passed to the useradd command if you wish to add a user to the system when the package is installed."
+ </info>
<glossdef>
<para>
- When a recipe inherits the
- <filename>useradd</filename> class, this variable
+ When inheriting the
+ <link linkend='ref-classes-useradd'><filename>useradd</filename></link>
+ class, this variable
specifies for a package what parameters should be passed
to the <filename>useradd</filename> command
if you wish to add a user to the system when the package
@@ -9708,7 +11392,45 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-USERADDEXTENSION'><glossterm>USERADDEXTENSION</glossterm>
+ <glossentry id='var-USERADD_UID_TABLES'><glossterm><imagedata fileref="figures/define-generic.png" />USERADD_UID_TABLES</glossterm>
+ <info>
+ USERADD_UID_TABLES[doc] = "Specifies a password file to use for obtaining static user identification (uid) values when the OpenEmbedded build system adds a user to the system during package installation."
+ </info>
+ <glossdef>
+ <para>
+ Specifies a password file to use for obtaining static
+ user identification (<filename>uid</filename>) values
+ when the OpenEmbedded build system adds a user to the
+ system during package installation.
+ </para>
+
+ <para>
+ When applying static user identification
+ (<filename>uid</filename>) values, the OpenEmbedded build
+ system looks in
+ <link linkend='var-BBPATH'><filename>BBPATH</filename></link>
+ for a <filename>files/passwd</filename> file and then applies
+ those <filename>uid</filename> values.
+ Set the variable as follows in your
+ <filename>local.conf</filename> file:
+ <literallayout class='monospaced'>
+ USERADD_UID_TABLES = "files/passwd"
+ </literallayout>
+ </para>
+
+ <note>
+ Setting the
+ <link linkend='var-USERADDEXTENSION'><filename>USERADDEXTENSION</filename></link>
+ variable to "useradd-staticids" causes the build system
+ to use static <filename>uid</filename> values.
+ </note>
+ </glossdef>
+ </glossentry>
+
+ <glossentry id='var-USERADDEXTENSION'><glossterm><imagedata fileref="figures/define-generic.png" />USERADDEXTENSION</glossterm>
+ <info>
+ USERADDEXTENSION[doc] = "When set to "useradd-staticids", causes the OpenEmbedded build system to base all user and group additions on a static passwd and group files found in BBPATH."
+ </info>
<glossdef>
<para>
When set to "useradd-staticids", causes the
@@ -9760,7 +11482,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
<glossdiv id='var-glossary-w'><title>W</title>
- <glossentry id='var-WARN_QA'><glossterm>WARN_QA</glossterm>
+ <glossentry id='var-WARN_QA'><glossterm><imagedata fileref="figures/define-generic.png" />WARN_QA</glossterm>
+ <info>
+ WARN_QA[doc] = "Specifies the quality assurance checks whose failures are reported as warnings by the OpenEmbedded build system."
+ </info>
<glossdef>
<para>
Specifies the quality assurance checks whose failures are
@@ -9775,7 +11500,10 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdef>
</glossentry>
- <glossentry id='var-WORKDIR'><glossterm>WORKDIR</glossterm>
+ <glossentry id='var-WORKDIR'><glossterm><imagedata fileref="figures/define-generic.png" />WORKDIR</glossterm>
+ <info>
+ WORKDIR[doc] = "The pathname of the working directory in which the OpenEmbedded build system builds a recipe. This directory is located within the TMPDIR directory structure and changes as different packages are built."
+ </info>
<glossdef>
<para>
The pathname of the work directory in which the OpenEmbedded
@@ -9831,8 +11559,32 @@ recipes-graphics/xorg-font/font-alias_1.0.3.bb:PR = "${INC_PR}.3"
</glossdiv>
-<!-- <glossdiv id='var-glossary-x'><title>X</title>-->
-<!-- </glossdiv>-->
+ <glossdiv id='var-glossary-x'><title>X</title>
+
+ <glossentry id='var-XSERVER'><glossterm><imagedata fileref="figures/define-generic.png" />XSERVER</glossterm>
+ <info>
+ XSERVER[doc] = "Specifies the packages that should be installed
+ to provide an X server and drivers for the current machine."
+ </info>
+ <glossdef>
+ <para>
+ Specifies the packages that should be installed to
+ provide an X server and drivers for the current machine,
+ assuming your image directly includes
+ <filename>packagegroup-core-x11-xserver</filename> or,
+ perhaps indirectly, includes "x11-base" in
+ <link linkend='var-IMAGE_FEATURES'><filename>IMAGE_FEATURES</filename></link>.
+ </para>
+
+ <para>
+ The default value of <filename>XSERVER</filename>, if not
+ specified in the machine configuration, is
+ "xserver-xorg xf86-video-fbdev xf86-input-evdev".
+ </para>
+ </glossdef>
+ </glossentry>
+
+ </glossdiv>
<!-- <glossdiv id='var-glossary-y'><title>Y</title>-->
<!-- </glossdiv>-->
diff --git a/documentation/ref-manual/technical-details.xml b/documentation/ref-manual/technical-details.xml
index b0588351d5..6bb3381e72 100644
--- a/documentation/ref-manual/technical-details.xml
+++ b/documentation/ref-manual/technical-details.xml
@@ -85,7 +85,7 @@
</para>
<para>
- The most common usage for BitBake is <filename>bitbake &lt;packagename&gt;</filename>, where
+ The most common usage for BitBake is <filename>bitbake <replaceable>packagename</replaceable></filename>, where
<filename>packagename</filename> is the name of the package you want to build
(referred to as the "target" in this manual).
The target often equates to the first part of a recipe's filename
@@ -304,7 +304,8 @@
<para>
Here is the bootstrap process for the relocatable toolchain:
<literallayout class='monospaced'>
- gcc -> binutils-crosssdk -> gcc-crosssdk-initial -> linux-libc-headers -> eglibc-initial -> nativesdk-eglibc -> gcc-crosssdk -> gcc-cross-canadian
+ gcc -> binutils-crosssdk -> gcc-crosssdk-initial -> linux-libc-headers ->
+ eglibc-initial -> nativesdk-eglibc -> gcc-crosssdk -> gcc-cross-canadian
</literallayout>
<itemizedlist>
<listitem><para><filename>gcc</filename>:
@@ -608,13 +609,13 @@
make some dependency and hash information available to the build.
This information includes:
<itemizedlist>
- <listitem><para><filename>BB_BASEHASH_task-&lt;taskname&gt;</filename>:
+ <listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
The base hashes for each task in the recipe.
</para></listitem>
- <listitem><para><filename>BB_BASEHASH_&lt;filename:taskname&gt;</filename>:
+ <listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
The base hashes for each dependent task.
</para></listitem>
- <listitem><para><filename>BBHASHDEPS_&lt;filename:taskname&gt;</filename>:
+ <listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
The task dependencies for each task.
</para></listitem>
<listitem><para><filename>BB_TASKHASH</filename>:
diff --git a/documentation/ref-manual/usingpoky.xml b/documentation/ref-manual/usingpoky.xml
index 0ab19fd1a6..41f872c07e 100644
--- a/documentation/ref-manual/usingpoky.xml
+++ b/documentation/ref-manual/usingpoky.xml
@@ -35,12 +35,12 @@
<link linkend='structure-memres-core-script'><filename>oe-init-build-env-memres</filename></link>).
Here is an example:
<literallayout class='monospaced'>
- $ source &OE_INIT_FILE; [&lt;build_dir&gt;]
+ $ source &OE_INIT_FILE; [<replaceable>build_dir</replaceable>]
</literallayout>
</para>
<para>
- The <filename>build_dir</filename> argument is optional and specifies the directory the
+ The <replaceable>build_dir</replaceable> argument is optional and specifies the directory the
OpenEmbedded build system uses for the build -
the <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>.
If you do not specify a Build Directory, it defaults to a directory
@@ -53,12 +53,12 @@
<para>
Once the build environment is set up, you can build a target using:
<literallayout class='monospaced'>
- $ bitbake &lt;target&gt;
+ $ bitbake <replaceable>target</replaceable>
</literallayout>
</para>
<para>
- The <filename>target</filename> is the name of the recipe you want to build.
+ The <replaceable>target</replaceable> is the name of the recipe you want to build.
Common targets are the images in <filename>meta/recipes-core/images</filename>,
<filename>meta/recipes-sato/images</filename>, etc. all found in the
<ulink url='&YOCTO_DOCS_DEV_URL;#source-directory'>Source Directory</ulink>.
@@ -154,14 +154,14 @@
<title>Task Failures</title>
<para>The log file for shell tasks is available in
- <filename>${WORKDIR}/temp/log.do_taskname.pid</filename>.
- For example, the <filename>compile</filename> task for the QEMU minimal image for the x86
+ <filename>${WORKDIR}/temp/log.do_<replaceable>taskname</replaceable>.pid</filename>.
+ For example, the <filename>do_compile</filename> task for the QEMU minimal image for the x86
machine (<filename>qemux86</filename>) might be
<filename>tmp/work/qemux86-poky-linux/core-image-minimal/1.0-r0/temp/log.do_compile.20830</filename>.
To see what
<ulink url='&YOCTO_DOCS_DEV_URL;#bitbake-term'>BitBake</ulink>
runs to generate that log, look at the corresponding
- <filename>run.do_taskname.pid</filename> file located in the same directory.
+ <filename>run.do_<replaceable>taskname</replaceable>.pid</filename> file located in the same directory.
</para>
<para>
@@ -203,7 +203,7 @@
$ bitbake matchbox-desktop
.
.
- [make some changes to the source code in the work directory]
+ <replaceable>make some changes to the source code in the work directory</replaceable>
.
.
$ bitbake matchbox-desktop -c compile -f
@@ -238,7 +238,7 @@
<para>
Sometimes it can be hard to see why BitBake wants to build
other packages before building a given package you have specified.
- The <filename>bitbake -g &lt;targetname&gt;</filename> command
+ The <filename>bitbake -g <replaceable>targetname</replaceable></filename> command
creates the <filename>pn-buildlist</filename>,
<filename>pn-depends.dot</filename>,
<filename>package-depends.dot</filename>, and
@@ -247,7 +247,7 @@
These files show what will be built and the package and task
dependencies, which are useful for debugging problems.
You can use the
- <filename>bitbake -g -u depexp &lt;targetname&gt;</filename>
+ <filename>bitbake -g -u depexp <replaceable>targetname</replaceable></filename>
command to display the results in a more human-readable form.
</para>
</section>
@@ -264,7 +264,7 @@
</para>
<para>
- The output from <filename>bitbake -DDD -v targetname</filename> can reveal why
+ The output from <filename>bitbake -DDD -v</filename> <replaceable>targetname</replaceable> can reveal why
BitBake chose a certain version of a package or why BitBake
picked a certain provider.
This command could also help you in a situation where you think BitBake did something
@@ -310,7 +310,7 @@
To build a specific recipe (<filename>.bb</filename> file),
you can use the following command form:
<literallayout class='monospaced'>
- $ bitbake -b &lt;somepath/somerecipe.bb&gt;
+ $ bitbake -b <replaceable>somepath</replaceable>/<replaceable>somerecipe</replaceable>.bb
</literallayout>
This command form does not check for dependencies.
Consequently, you should use it
@@ -334,7 +334,7 @@
This next example shows the parsing environment for a specific
recipe:
<literallayout class='monospaced'>
- $ bitbake -e &lt;recipename&gt;
+ $ bitbake -e <replaceable>recipename</replaceable>
</literallayout>
</para>
</section>
@@ -540,7 +540,7 @@
<para>
Build history information is kept in
- <filename>$</filename><link linkend='var-TOPDIR'><filename>TOPDIR</filename></link><filename>/buildhistory</filename>
+ <filename>${</filename><link linkend='var-TOPDIR'><filename>TOPDIR</filename></link><filename>}/buildhistory</filename>
in the Build Directory as defined by the
<link linkend='var-BUILDHISTORY_DIR'><filename>BUILDHISTORY_DIR</filename></link>
variable.
@@ -679,9 +679,11 @@
The files are defined by
<link linkend='var-BUILDHISTORY_IMAGE_FILES'><filename>BUILDHISTORY_IMAGE_FILES</filename></link>.
</para></listitem>
- <listitem><para><filename>build-id:</filename>
+ <listitem><para><filename>build-id.txt:</filename>
Human-readable information about the build configuration
- and metadata source revisions.</para></listitem>
+ and metadata source revisions.
+ This file contains the full build header as printed
+ by BitBake.</para></listitem>
<listitem><para><filename>*.dot:</filename>
Dependency graphs for the image that are
compatible with <filename>graphviz</filename>.
@@ -886,6 +888,148 @@
</section>
</section>
+<section id='speeding-up-the-build'>
+ <title>Speeding Up the Build</title>
+
+ <para>
+ Build time can be an issue.
+ By default, the build system uses three simple controls to try and
+ maximize build efficiency:
+ <itemizedlist>
+ <listitem><para>
+ <link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
+ </para></listitem>
+ <listitem><para>
+ <ulink url='&YOCTO_DOCS_BB_URL;#var-BB_NUMBER_PARSE_THREADS'><filename>BB_NUMBER_PARSE_THREADS</filename></ulink>
+ </para></listitem>
+ <listitem><para>
+ <link linkend='var-PARALLEL_MAKE'><filename>PARALLEL_MAKE</filename></link>
+ </para></listitem>
+ </itemizedlist>
+ These three variables all scale to the number of processor cores
+ available on the build system.
+ This auto-scaling ensures that the build system fundamentally takes
+ advantage of potential parallel operations during the build
+ based on the build machine's capabilities.
+ </para>
+
+ <para>
+ If you need to achieve even faster builds than what the build system
+ produces by default, you can consider and implement some of the
+ following:
+ <itemizedlist>
+ <listitem><para>
+ <filename>BB_NUMBER_THREADS</filename>,
+ <filename>BB_NUMBER_PARSE_THREADS</filename>, and
+ <filename>PARALLEL_MAKE</filename>:
+ As previously mentioned, the build system scales the values
+ for these variables.
+ However, you can manually override them in your
+ <filename>local.conf</filename> file if you are not satisfied
+ with the defaults.
+ </para></listitem>
+ <listitem><para>
+ File system type:
+ The file system type that the build is being performed on can
+ also influence performance.
+ Using <filename>ext4</filename> is recommended as compared
+ to <filename>ext2</filename> and <filename>ext3</filename>
+ due to <filename>ext4</filename> improved features
+ such as extents.
+ </para></listitem>
+ <listitem><para>
+ Disabling the updating of access time using
+ <filename>noatime</filename>:
+ The <filename>noatime</filename> mount option prevents the
+ build system from updating file and directory access times.
+ </para></listitem>
+ <listitem><para>
+ Setting a longer commit:
+ Using the "commit=" mount option increases the interval
+ in seconds between disk cache writes.
+ Changing this interval from the five second default to
+ something longer increases the risk of data loss but decreases
+ the need to write to the disk, thus increasing the build
+ performance.
+ </para></listitem>
+ <listitem><para>
+ Choosing the packaging backend:
+ Of the available packaging backends, IPK is the fastest.
+ Additionally, selecting a singular packaging backend also
+ helps.
+ </para></listitem>
+ <listitem><para>
+ Using <filename>/tmp</filename> as a temporary file system:
+ While this can help speed up the build, the benefits are
+ limited due to the compiler using
+ <filename>-pipe</filename>.
+ The build system goes to some lengths to avoid
+ <filename>sync()</filename> calls into the
+ file system on the principle that if there was a significant
+ failure, the
+ <ulink url='&YOCTO_DOCS_DEV_URL;#build-directory'>Build Directory</ulink>
+ contents could easily be rebuilt.
+ </para></listitem>
+ <listitem><para>
+ Inheriting the
+ <link linkend='ref-classes-rm-work'><filename>rm_work</filename></link>
+ class:
+ Inheriting this class has shown to speed up builds due to
+ significantly lower amounts of data stored in the data
+ cache as well as on disk.
+ Inheriting this class also makes cleanup of
+ <link linkend='var-TMPDIR'><filename>TMPDIR</filename></link>
+ faster, at the expense of being easily able to dive into the
+ source code.
+ File system maintainers have recommended that the fastest way
+ to clean up large numbers of files is to reformat partitions
+ rather than delete files due to the linear nature of partitions.
+ This, of course, assumes you structure the disk partitions and
+ file systems in a way that this is practical.
+ </para></listitem>
+ </itemizedlist>
+ Aside from the previous list, you should keep some trade offs in
+ mind that can help you speed up the build:
+ <itemizedlist>
+ <listitem><para>
+ Exclude debug symbols and other debug information:
+ If you do not need these symbols and other debug information,
+ disabling the <filename>*-dbg</filename> package generation
+ can speed up the build.
+ You can disable this generation by setting the
+ <link linkend='var-INHIBIT_PACKAGE_DEBUG_SPLIT'><filename>INHIBIT_PACKAGE_DEBUG_SPLIT</filename></link>
+ variable to "1".
+ </para></listitem>
+ <listitem><para>
+ Disable static library generation for recipes derived from
+ <filename>autoconf</filename> or <filename>libtool</filename>:
+ Following is an example showing how to disable static
+ libraries and still provide an override to handle exceptions:
+ <literallayout class='monospaced'>
+ STATICLIBCONF = "--disable-static"
+ STATICLIBCONF_sqlite3-native = ""
+ EXTRA_OECONF += "${STATICLIBCONF}"
+ </literallayout>
+ <note><title>Notes</title>
+ <itemizedlist>
+ <listitem><para>
+ Some recipes need static libraries in order to work
+ correctly (e.g. <filename>pseudo-native</filename>
+ needs <filename>sqlite3-native</filename>).
+ Overrides, as in the previous example, account for
+ these kinds of exceptions.
+ </para></listitem>
+ <listitem><para>
+ Some packages have packaging code that assumes the
+ presence of the static libraries.
+ If so, you might need to exclude them as well.
+ </para></listitem>
+ </itemizedlist>
+ </note>
+ </para></listitem>
+ </itemizedlist>
+ </para>
+</section>
</chapter>
<!--
vim: expandtab tw=80 ts=4
diff --git a/documentation/template/qa-code-permalinks.xsl b/documentation/template/qa-code-permalinks.xsl
new file mode 100644
index 0000000000..a309095c60
--- /dev/null
+++ b/documentation/template/qa-code-permalinks.xsl
@@ -0,0 +1,23 @@
+<!--
+This XSL sheet enables creation of permalinks for <para><code>
+constructs. Right now, this construct occurs only in the ref-manual
+book's qa issues and warnings chapter. However, if the construct
+were to appear anywhere in that ref-manual, a permalink would be
+generated. I don't foresee any <para><code> constructs being used
+in the future but if they are then a permalink with a generically
+numbered permalink would be generated.
+-->
+<xsl:stylesheet version="1.0"
+ xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
+ xmlns:d="http://docbook.org/ns/docbook"
+ xmlns="http://www.w3.org/1999/xhtml">
+
+ <xsl:template match="para/code">
+ <xsl:apply-imports/>
+ <xsl:if test="$generate.permalink != 0">
+ <xsl:call-template name="permalink">
+ <xsl:with-param name="node" select=".."/>
+ </xsl:call-template>
+ </xsl:if>
+ </xsl:template>
+</xsl:stylesheet>
diff --git a/documentation/tools/mega-manual.sed b/documentation/tools/mega-manual.sed
index c319d39c64..748632bb55 100644
--- a/documentation/tools/mega-manual.sed
+++ b/documentation/tools/mega-manual.sed
@@ -2,30 +2,30 @@
# This style is for manual folders like "yocto-project-qs" and "poky-ref-manual".
# This is the old way that did it. Can't do that now that we have "bitbake-user-manual" strings
# in the mega-manual.
-# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/[a-z]*-[a-z]*-[a-z]*\/[a-z]*-[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/poky-ref-manual\/poky-ref-manual.html#/\"link\" href=\"#/g
+# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/[a-z]*-[a-z]*-[a-z]*\/[a-z]*-[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/poky-ref-manual\/poky-ref-manual.html#/\"link\" href=\"#/g
# Processes all other manuals (<word>-<word> style) except for the BitBake User Manual because
# it is not included in the mega-manual.
# This style is for manual folders that use two word, which is the standard now (e.g. "ref-manual").
# This was the one-liner that worked before we introduced the BitBake User Manual, which is
# not in the mega-manual.
-# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/[a-z]*-[a-z]*\/[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
+# s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/[a-z]*-[a-z]*\/[a-z]*-[a-z]*.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/adt-manual\/adt-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/bsp-guide\/bsp-guide.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/dev-manual\/dev-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/kernel-dev\/kernel-dev.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/profile-manual\/profile-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/ref-manual\/ref-manual.html#/\"link\" href=\"#/g
-s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/adt-manual\/adt-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/bsp-guide\/bsp-guide.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/dev-manual\/dev-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/kernel-dev\/kernel-dev.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/profile-manual\/profile-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/ref-manual\/ref-manual.html#/\"link\" href=\"#/g
+s/\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/yocto-project-qs\/yocto-project-qs.html#/\"link\" href=\"#/g
# Process cases where just an external manual is referenced without an id anchor
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/yocto-project-qs\/yocto-project-qs.html\" target=\"_top\">Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/dev-manual\/dev-manual.html\" target=\"_top\">Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/adt-manual\/adt-manual.html\" target=\"_top\">Yocto Project Application Developer's Guide<\/a>/Yocto Project Application Developer's Guide/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/bsp-guide\/bsp-guide.html\" target=\"_top\">Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/profile-manual\/profile-manual.html\" target=\"_top\">Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/kernel-dev\/kernel-dev.html\" target=\"_top\">Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g
-s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.7\/ref-manual\/ref-manual.html\" target=\"_top\">Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/yocto-project-qs\/yocto-project-qs.html\" target=\"_top\">Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/dev-manual\/dev-manual.html\" target=\"_top\">Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/adt-manual\/adt-manual.html\" target=\"_top\">Yocto Project Application Developer's Guide<\/a>/Yocto Project Application Developer's Guide/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/bsp-guide\/bsp-guide.html\" target=\"_top\">Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/profile-manual\/profile-manual.html\" target=\"_top\">Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/kernel-dev\/kernel-dev.html\" target=\"_top\">Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g
+s/<a class=\"ulink\" href=\"http:\/\/www.yoctoproject.org\/docs\/1.8\/ref-manual\/ref-manual.html\" target=\"_top\">Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g
diff --git a/documentation/yocto-project-qs/yocto-project-qs.xml b/documentation/yocto-project-qs/yocto-project-qs.xml
index 333ccc4727..9f1a4fbb36 100644
--- a/documentation/yocto-project-qs/yocto-project-qs.xml
+++ b/documentation/yocto-project-qs/yocto-project-qs.xml
@@ -1,4 +1,4 @@
-<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
+<!DOCTYPE article PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd"
[<!ENTITY % poky SYSTEM "../poky.ent"> %poky; ] >
@@ -86,10 +86,12 @@
the Yocto Project Reference Manual.
</para></listitem>
<listitem><para><emphasis>Developer Screencast:</emphasis> The
- <ulink url='http://vimeo.com/36450321'>Getting Started with the Yocto Project - New
- Developer Screencast Tutorial</ulink> provides a 30-minute video
- created for users unfamiliar with the Yocto Project but familiar
- with Linux build systems.</para></listitem>
+ <ulink url='http://vimeo.com/36450321'>Getting Started with the Yocto Project - New Developer Screencast Tutorial</ulink>
+ provides a 30-minute video created for users unfamiliar with
+ the Yocto Project but familiar with Linux build systems.
+ While this screencast is somewhat dated, the introductory
+ and fundamental concepts are useful for the beginner.
+ </para></listitem>
</itemizedlist>
</para>
</section>
@@ -177,7 +179,8 @@
decrease the time needed to build images.
</para></listitem>
<listitem><para>
- The right packages.
+ Appropriate packages installed on the system you are using for
+ builds.
</para></listitem>
<listitem><para>
A release of the Yocto Project.
@@ -509,7 +512,7 @@
Another couple of variables of interest are the
<ulink url='&YOCTO_DOCS_REF_URL;#var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></ulink> and the
<ulink url='&YOCTO_DOCS_REF_URL;#var-PARALLEL_MAKE'><filename>PARALLEL_MAKE</filename></ulink> variables.
- By default, these variables are set to how ever many processor
+ By default, these variables are set to the number of processor
cores your build host uses.
However, if your build host uses multiple processor cores,
you should increase these settings to twice the number of
@@ -546,7 +549,7 @@
"<ulink url='&YOCTO_DOCS_REF_URL;#required-git-tar-and-python-versions'>Required Git, tar, and Python</ulink>"
section in the Yocto Project Reference Manual.
</note>
- The final command runs the image:
+ The final command runs the image using the QEMU emulator:
<literallayout class='monospaced'>
$ runqemu qemux86
</literallayout>
@@ -622,25 +625,25 @@
</para>
<literallayout class='monospaced'>
- poky-eglibc-&lt;<emphasis>host_system</emphasis>&gt;-&lt;<emphasis>image_type</emphasis>&gt;-&lt;<emphasis>arch</emphasis>&gt;-toolchain-&lt;<emphasis>release_version</emphasis>&gt;.sh
+ poky-eglibc-<replaceable>host_system</replaceable>-<replaceable>image_type</replaceable>-<replaceable>arch</replaceable>-toolchain-<replaceable>release_version</replaceable>.sh
Where:
- &lt;<emphasis>host_system</emphasis>&gt; is a string representing your development system:
+ <replaceable>host_system</replaceable> is a string representing your development system:
i686 or x86_64.
- &lt;<emphasis>image_type</emphasis>&gt; is a string representing the image you wish to
+ <replaceable>image_type</replaceable> is a string representing the image you wish to
develop a Software Development Toolkit (SDK) for use against.
The Yocto Project builds toolchain installers using the
following BitBake command:
bitbake core-image-sato -c populate_sdk
- &lt;<emphasis>arch</emphasis>&gt; is a string representing the tuned target architecture:
+ <replaceable>arch</replaceable> is a string representing the tuned target architecture:
i586, x86_64, powerpc, mips, armv7a or armv5te
- &lt;<emphasis>release_version</emphasis>&gt; is a string representing the release number of the
+ <replaceable>release_version</replaceable> is a string representing the release number of the
Yocto Project:
&DISTRO;, &DISTRO;+snapshot
@@ -706,11 +709,11 @@
<para>
Most kernel files have one of the following forms:
<literallayout class='monospaced'>
- *zImage-qemu&lt;<emphasis>arch</emphasis>&gt;.bin
- vmlinux-qemu&lt;<emphasis>arch</emphasis>&gt;.bin
+ *zImage-qemu<replaceable>arch</replaceable>.bin
+ vmlinux-qemu<replaceable>arch</replaceable>.bin
Where:
- &lt;<emphasis>arch</emphasis>&gt; is a string representing the target architecture:
+ <replaceable>arch</replaceable> is a string representing the target architecture:
x86, x86-64, ppc, mips, or arm.
</literallayout>
</para>
@@ -740,17 +743,17 @@
The <filename>tar</filename> form can be flattened out in your host development system
and used for build purposes with the Yocto Project.
<literallayout class='monospaced'>
- core-image-&lt;<emphasis>profile</emphasis>&gt;-qemu&lt;<emphasis>arch</emphasis>&gt;.ext3
- core-image-&lt;<emphasis>profile</emphasis>&gt;-qemu&lt;<emphasis>arch</emphasis>&gt;.tar.bz2
+ core-image-<replaceable>profile</replaceable>-qemu<replaceable>arch</replaceable>.ext3
+ core-image-<replaceable>profile</replaceable>-qemu<replaceable>arch</replaceable>.tar.bz2
Where:
- &lt;<emphasis>profile</emphasis>&gt; is the filesystem image's profile:
+ <replaceable>profile</replaceable> is the filesystem image's profile:
lsb, lsb-dev, lsb-sdk, lsb-qt3, minimal, minimal-dev, sato,
sato-dev, or sato-sdk. For information on these types of image
- profiles, see the "<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Images</ulink>" chapter in the Yocto Project
- Reference Manual.
+ profiles, see the "<ulink url='&YOCTO_DOCS_REF_URL;#ref-images'>Images</ulink>"
+ chapter in the Yocto Project Reference Manual.
- &lt;<emphasis>arch</emphasis>&gt; is a string representing the target architecture:
+ <replaceable>arch</replaceable> is a string representing the target architecture:
x86, x86-64, ppc, mips, or arm.
</literallayout>
</para>
@@ -763,13 +766,13 @@
Before you start the QEMU emulator, you need to set up the emulation environment.
The following command form sets up the emulation environment.
<literallayout class='monospaced'>
- $ source &YOCTO_ADTPATH_DIR;/environment-setup-&lt;<emphasis>arch</emphasis>&gt;-poky-linux-&lt;<emphasis>if</emphasis>&gt;
+ $ source &YOCTO_ADTPATH_DIR;/environment-setup-<replaceable>arch</replaceable>-poky-linux-<replaceable>if</replaceable>
Where:
- &lt;<emphasis>arch</emphasis>&gt; is a string representing the target architecture:
+ <replaceable>arch</replaceable> is a string representing the target architecture:
i586, x86_64, ppc603e, mips, or armv5te.
- &lt;<emphasis>if</emphasis>&gt; is a string representing an embedded application binary interface.
+ <replaceable>if</replaceable> is a string representing an embedded application binary interface.
Not all setup scripts include this string.
</literallayout>
</para>
@@ -777,15 +780,15 @@
<para>
Finally, this command form invokes the QEMU emulator
<literallayout class='monospaced'>
- $ runqemu &lt;<emphasis>qemuarch</emphasis>&gt; &lt;<emphasis>kernel-image</emphasis>&gt; &lt;<emphasis>filesystem-image</emphasis>&gt;
+ $ runqemu <replaceable>qemuarch</replaceable> <replaceable>kernel-image</replaceable> <replaceable>filesystem-image</replaceable>
Where:
- &lt;<emphasis>qemuarch</emphasis>&gt; is a string representing the target architecture: qemux86, qemux86-64,
+ <replaceable>qemuarch</replaceable> is a string representing the target architecture: qemux86, qemux86-64,
qemuppc, qemumips, or qemuarm.
- &lt;<emphasis>kernel-image</emphasis>&gt; is the architecture-specific kernel image.
+ <replaceable>kernel-image</replaceable> is the architecture-specific kernel image.
- &lt;<emphasis>filesystem-image</emphasis>&gt; is the .ext3 filesystem image.
+ <replaceable>filesystem-image</replaceable> is the .ext3 filesystem image.
</literallayout>
</para>
diff --git a/meta-yocto-bsp/conf/machine/beaglebone.conf b/meta-yocto-bsp/conf/machine/beaglebone.conf
index 42637158df..fb0189d5d7 100644
--- a/meta-yocto-bsp/conf/machine/beaglebone.conf
+++ b/meta-yocto-bsp/conf/machine/beaglebone.conf
@@ -24,7 +24,7 @@ SERIAL_CONSOLE = "115200 ttyO0"
PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto"
PREFERRED_VERSION_linux-yocto ?= "3.14%"
-KERNEL_IMAGETYPE = "uImage"
+KERNEL_IMAGETYPE = "zImage"
KERNEL_DEVICETREE = "am335x-bone.dtb am335x-boneblack.dtb"
KERNEL_EXTRA_ARGS += "LOADADDR=${UBOOT_ENTRYPOINT}"
@@ -35,3 +35,5 @@ UBOOT_ENTRYPOINT = "0x80008000"
UBOOT_LOADADDRESS = "0x80008000"
MACHINE_FEATURES = "usbgadget usbhost vfat alsa"
+
+IMAGE_BOOT_FILES ?= "u-boot.${UBOOT_SUFFIX} MLO"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 2305c333ee..eb313ffe98 100644
--- a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
+++ b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -7,11 +7,11 @@ KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb"
KMACHINE_genericx86 ?= "common-pc"
KMACHINE_genericx86-64 ?= "common-pc-64"
-SRCREV_machine_genericx86 ?= "41d5fe27dc3d3e769cb6af01770cac3d75a91e1a"
-SRCREV_machine_genericx86-64 ?= "96930820e0cb6d4b31d5e0c8f3174805f4a868b3"
-SRCREV_machine_edgerouter ?= "96930820e0cb6d4b31d5e0c8f3174805f4a868b3"
-SRCREV_machine_beaglebone ?= "96930820e0cb6d4b31d5e0c8f3174805f4a868b3"
-SRCREV_machine_mpc8315e-rdb ?= "824683a5531beffbfc0537f55ff477ad28e761fa"
+SRCREV_machine_genericx86 ?= "a39fd81fa54776b2ac8c288251846890c3124dee"
+SRCREV_machine_genericx86-64 ?= "dbe5b52e93ff114b2c0f5da6f6af91f52c18f2b8"
+SRCREV_machine_edgerouter ?= "dbe5b52e93ff114b2c0f5da6f6af91f52c18f2b8"
+SRCREV_machine_beaglebone ?= "dbe5b52e93ff114b2c0f5da6f6af91f52c18f2b8"
+SRCREV_machine_mpc8315e-rdb ?= "4b2929392ec56ca8ef90cc98042609795c44aa3c"
COMPATIBLE_MACHINE_genericx86 = "genericx86"
COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64"
diff --git a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.4.bbappend b/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.4.bbappend
deleted file mode 100644
index f2023376ae..0000000000
--- a/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_3.4.bbappend
+++ /dev/null
@@ -1,7 +0,0 @@
-KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb"
-
-SRCREV_machine_genericx86 ?= "cdd7a546922ca1c46c94adeec3b9c90dc9aaad2d"
-SRCREV_machine_mpc8315e-rdb ?= "ed373937e5460d3321e1f243cb89ed512d0c9047"
-
-COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb"
-
diff --git a/meta-yocto/conf/distro/include/maintainers.inc b/meta-yocto/conf/distro/include/maintainers.inc
index 6940d9e736..84797c3ca6 100644
--- a/meta-yocto/conf/distro/include/maintainers.inc
+++ b/meta-yocto/conf/distro/include/maintainers.inc
@@ -28,7 +28,7 @@
# Please keep this list in alphabetical order.
#
RECIPE_MAINTAINER_pn-acl = "Chong Lu <Chong.Lu@windriver.com>"
-RECIPE_MAINTAINER_pn-acpid = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-acpid = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-adt-installer = "Jessica Zhang <jessica.zhang@intel.com>"
RECIPE_MAINTAINER_pn-alsa-lib = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-alsa-state = "Cristian Iorga <cristian.iorga@intel.com>"
@@ -38,10 +38,10 @@ RECIPE_MAINTAINER_pn-alsa-utils-alsaconf = "Cristian Iorga <cristian.iorga@intel
RECIPE_MAINTAINER_pn-apmd = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-apr = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-apr-util = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER_pn-apt = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-aspell = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-apt = "Aníbal Limón <anibal.limon@linux.intel.com>"
+RECIPE_MAINTAINER_pn-aspell = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-atk = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-at = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-at = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-at-spi2-atk = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-at-spi2-core = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-attr = "Chong Lu <Chong.Lu@windriver.com>"
@@ -55,7 +55,7 @@ RECIPE_MAINTAINER_pn-babeltrace = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-base-files = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-base-passwd = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-bash = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER_pn-bc = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-bc = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-bdwgc = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-beecrypt = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-bigreqsproto = "Saul Wold <sgw@linux.intel.com>"
@@ -92,8 +92,8 @@ RECIPE_MAINTAINER_pn-cogl-1.0 = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-compositeproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-connman = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-connman-gnome = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-consolekit = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-console-tools = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-consolekit = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER_pn-console-tools = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-core-image-base = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-core-image-clutter = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-core-image-directfb = "Saul Wold <sgw@linux.intel.com>"
@@ -113,12 +113,12 @@ RECIPE_MAINTAINER_pn-core-image-sato-sdk = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-core-image-x11 = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-coreutils = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-cpio = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER_pn-cracklib ="Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER_pn-createrepo = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-cronie = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-cracklib = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER_pn-createrepo = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER_pn-cronie = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-cross-localedef-native = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-cups = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-curl = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-cups = "Chong Lu <Chong.Lu@windriver.com>"
+RECIPE_MAINTAINER_pn-curl = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-cwautomacros = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-damageproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-db = "Cristian Iorga <cristian.iorga@intel.com>"
@@ -127,7 +127,7 @@ RECIPE_MAINTAINER_pn-dbus-ptest = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-dbus-glib = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-dbus-wait = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-desktop-file-utils-native = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-dhcp = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-dhcp = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-diffstat = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-diffutils = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-directfb-examples = "Hongxu Jia <hongxu.jia@windriver.com>"
@@ -143,7 +143,7 @@ RECIPE_MAINTAINER_pn-docbook-sgml-dtd-4.5-native = "Paul Eggleton <paul.eggleton
RECIPE_MAINTAINER_pn-docbook-sgml-dtd = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-docbook-utils = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-dosfstools = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-dpkg = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-dpkg = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-dri2proto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-dropbear = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-dtc = "Saul Wold <sgw@linux.intel.com>"
@@ -168,7 +168,7 @@ RECIPE_MAINTAINER_pn-font-alias = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-fontconfig = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-fontsproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-font-util = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-foomatic-filters = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-foomatic-filters = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-formfactor = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-fotowall = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-freetype = "Ross Burton <ross.burton@intel.com>"
@@ -184,7 +184,7 @@ RECIPE_MAINTAINER_pn-gdb = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-gdbm = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-gdk-pixbuf = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-genext2fs = "Robert Yang <liezhi.yang@windriver.com>"
-RECIPE_MAINTAINER_pn-gettext-minimal-native = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gettext-minimal-native = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-gettext = "Wenzong Fan <wenzong.fan@windriver.com>"
RECIPE_MAINTAINER_pn-ghostscript = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-git = "Robert Yang <liezhi.yang@windriver.com>"
@@ -194,20 +194,20 @@ RECIPE_MAINTAINER_pn-glibc = "Richard Purdie <richard.purdie@linuxfoundation.org
RECIPE_MAINTAINER_pn-glib-networking = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-glproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-gmp = "Robert Yang <liezhi.yang@windriver.com>"
-RECIPE_MAINTAINER_pn-gnome-common = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-gnome-desktop = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-gnome-doc-utils = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-gnome-icon-theme = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-gnome-mime-data = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-gnome-common = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gnome-desktop = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gnome-doc-utils = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gnome-icon-theme = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gnome-mime-data = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-gnu-config = "Robert Yang <liezhi.yang@windriver.com>"
-RECIPE_MAINTAINER_pn-gnupg = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-gnupg = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-gnutls = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-gperf = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-gpgme = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-grep = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-groff = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-grub = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-gsettings-desktop-schemas = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-gsettings-desktop-schemas = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-gst-ffmpeg = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-gst-fluendo-mp3 = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-gst-fluendo-mpegdemux = "Cristian Iorga <cristian.iorga@intel.com>"
@@ -240,8 +240,8 @@ RECIPE_MAINTAINER_pn-gummiboot = "Darren Hart <dvhart@linux.intel.com>"
RECIPE_MAINTAINER_pn-gzip = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-hal = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-harfbuzz = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-hdparm = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-help2man-native = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-hdparm = "Chong Lu <Chong.Lu@windriver.com>"
+RECIPE_MAINTAINER_pn-help2man-native = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-hicolor-icon-theme = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-hostap-conf = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-hostap-utils = "Cristian Iorga <cristian.iorga@intel.com>"
@@ -259,9 +259,9 @@ RECIPE_MAINTAINER_pn-insserv = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-intltool = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-iproute2 = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-iptables = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-iputils = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-iputils = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-irda-utils = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-jpeg = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-jpeg = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-json-glib = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-kbd = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-kbproto = "Ross Burton <ross.burton@intel.com>"
@@ -270,24 +270,24 @@ RECIPE_MAINTAINER_pn-kernelshark = "Darren Hart <dvhart@linux.intel.com>"
RECIPE_MAINTAINER_pn-kern-tools-native = "Bruce Ashfield <bruce.ashfield@windriver.com>"
RECIPE_MAINTAINER_pn-kexec-tools = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-keymaps = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-kmod = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-kmod = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-lame = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-latencytop = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-ldconfig-native = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-leafpad = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-less = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-less = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-liba52 = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-libacpi = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libacpi = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-libaio = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libarchive = "Kai Kang <kai.kang@windriver.com>"
+RECIPE_MAINTAINER_pn-libarchive = "Paul Barker <paul@paulbarker.me.uk>"
RECIPE_MAINTAINER_pn-libart-lgpl = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libassuan = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libatomics-ops = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-libav = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libbsd = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libcap = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libbsd = "Chong Lu <Chong.Lu@windriver.com>"
+RECIPE_MAINTAINER_pn-libcap = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-libcgroup = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libcheck = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libcheck = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-libclass-isa-perl = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libconvert-asn1-perl = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-libcroco = "Ross Burton <ross.burton@intel.com>"
@@ -297,6 +297,7 @@ RECIPE_MAINTAINER_pn-libdrm = "Richard Purdie <richard.purdie@linuxfoundation.or
RECIPE_MAINTAINER_pn-libdumpvalue-perl = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libenv-perl = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-liberation-fonts = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-libevdev = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-libevent = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-libexif = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libfakekey = "Ross Burton <ross.burton@intel.com>"
@@ -305,7 +306,7 @@ RECIPE_MAINTAINER_pn-libfile-checktree-perl = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libfm = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libfontenc = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libgcc = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libgcrypt = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libgcrypt = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-libglade = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libglu = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libgpg-error = "Paul Eggleton <paul.eggleton@linux.intel.com>"
@@ -321,6 +322,7 @@ RECIPE_MAINTAINER_pn-libmad = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libmatchbox = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libmpc = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-libnewt = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER_pn-libnewt-python = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-libnfsidmap = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-libnl = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libnotify = "Ross Burton <ross.burton@intel.com>"
@@ -356,14 +358,14 @@ RECIPE_MAINTAINER_pn-libunique = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-libunistring = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-liburcu = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-liburi-perl = "Kai Kang <kai.kang@windriver.com>"
-RECIPE_MAINTAINER_pn-libusb1 = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-libusb-compat = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libusb1 = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-libusb-compat = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-libuser = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-libvorbis = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-libx11-diet = "Kai Kang <kai.kang@windriver.com>"
RECIPE_MAINTAINER_pn-libx11 = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libxau = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-libxcalibrate ="Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-libxcalibrate = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libxcb = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libxcomposite = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-libxcursor = "Ross Burton <ross.burton@intel.com>"
@@ -413,21 +415,21 @@ RECIPE_MAINTAINER_pn-lrzsz = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-lsbinitscripts = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-lsbtest = "Yi Zhao <yi.zhao@windriver.com>"
RECIPE_MAINTAINER_pn-lsb = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER_pn-lsof = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER_pn-lsof = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-ltp = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-lttng-modules = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-lttng-tools = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-lttng-ust = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-lzop = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-lzo = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-lz4 = "Armin Kuster <akuster@mvista.com>"
+RECIPE_MAINTAINER_pn-lz4 = "Armin Kuster <akuster808@gmail.com>"
RECIPE_MAINTAINER_pn-m4 = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-mailx = "Kai Kang <kai.kang@windriver.com>"
RECIPE_MAINTAINER_pn-make = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-makedepend = "Robert Yang <liezhi.yang@windriver.com>"
-RECIPE_MAINTAINER_pn-makedevs = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-man-pages = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-man = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-makedevs = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER_pn-man-pages = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER_pn-man = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-matchbox-config-gtk = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-matchbox-desktop-sato = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-matchbox-desktop = "Ross Burton <ross.burton@intel.com>"
@@ -438,7 +440,7 @@ RECIPE_MAINTAINER_pn-matchbox-session = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-matchbox-terminal = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-matchbox-theme-sato = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-matchbox-wm = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-mc = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-mc = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-mdadm = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-menu-cache = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-mesa-demos = "Saul Wold <sgw@linux.intel.com>"
@@ -446,7 +448,7 @@ RECIPE_MAINTAINER_pn-mesa = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-meta-ide-support = "Jessica Zhang <jessica.zhang@intel.com>"
RECIPE_MAINTAINER_pn-meta-toolchain-qte = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-meta-toolchain = "Jessica Zhang <jessica.zhang@intel.com>"
-RECIPE_MAINTAINER_pn-midori = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-midori = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-mingetty = "Kai Kang <kai.kang@windriver.com>"
RECIPE_MAINTAINER_pn-minicom = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-mini-x-session = "Saul Wold <sgw@linux.intel.com>"
@@ -456,8 +458,7 @@ RECIPE_MAINTAINER_pn-mkfontscale = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-mklibs-native = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-mktemp = "Chong Lu <Chong.Lu@windriver.com>"
RECIPE_MAINTAINER_pn-mobile-broadband-provider-info = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-modutils-initscripts = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-modutils-initscripts = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-modutils-initscripts = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-mpeg2dec = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-mpfr = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-msmtp = "Saul Wold <sgw@linux.intel.com>"
@@ -473,23 +474,23 @@ RECIPE_MAINTAINER_pn-ncurses = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-neard = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-neon = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-netbase = "Cristian Iorga <cristian.iorga@intel.com>"
+RECIPE_MAINTAINER_pn-nettle = "Armin Kuster <akuster808@gmail.com>"
RECIPE_MAINTAINER_pn-net-tools = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-nfs-export-root = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-nfs-utils = "Paul Eggleton <paul.eggleton@linux.intel.com>"
+RECIPE_MAINTAINER_pn-nfs-export-root = "Chong Lu <Chong.Lu@windriver.com>"
+RECIPE_MAINTAINER_pn-nfs-utils = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-npth = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-nspr = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-ocf-linux = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-ocf-linux = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-ofono = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-oh-puzzles = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-openjade = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-opensp = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-openssh = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-openssl = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-opkg-collateral = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-opkg-config-base = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-opkg = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-opkg-utils = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-opkg-collateral = "Paul Barker <paul@paulbarker.me.uk>"
+RECIPE_MAINTAINER_pn-opkg-config-base = "Paul Barker <paul@paulbarker.me.uk>"
+RECIPE_MAINTAINER_pn-opkg = "Paul Barker <paul@paulbarker.me.uk>"
+RECIPE_MAINTAINER_pn-opkg-utils = "Paul Barker <paul@paulbarker.me.uk>"
RECIPE_MAINTAINER_pn-oprofile = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-oprofileui = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-oprofileui-server = "Saul Wold <sgw@linux.intel.com>"
@@ -503,7 +504,7 @@ RECIPE_MAINTAINER_pn-packagegroup-core-device-devel = "Saul Wold <sgw@linux.inte
RECIPE_MAINTAINER_pn-packagegroup-core-directfb = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-packagegroup-core-full-cmdline = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-packagegroup-core-lsb = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-packagegroup-core-nfs = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-packagegroup-core-nfs = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-packagegroup-core-qt4e = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-packagegroup-core-qt = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-packagegroup-core-sdk = "Saul Wold <sgw@linux.intel.com>"
@@ -525,21 +526,22 @@ RECIPE_MAINTAINER_pn-parted = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-patch = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-pax-utils = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-pax = "Hongxu Jia <hongxu.jia@windriver.com>"
-RECIPE_MAINTAINER_pn-pciutils = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-pcmanfm = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-pcmciautils = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-pciutils = "Chen Qi <Qi.Chen@windriver.com>"
+RECIPE_MAINTAINER_pn-pcmanfm = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
+RECIPE_MAINTAINER_pn-pcmciautils = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-perf = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-perl = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-piglit = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-pigz = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-pigz = "Hongxu Jia <hongxu.jia@windriver.com>"
+RECIPE_MAINTAINER_pn-pinentry = "Armin Kuster <akuster808@gmail.com>"
RECIPE_MAINTAINER_pn-pixman = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-pkgconfig = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-pkgconfig = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-pm-utils = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-pointercal = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-poky-feed-config-opkg = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-pong-clock = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-popt = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-portmap = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-portmap = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-powertop = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-ppp = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-ppp-dialin = "Hongxu Jia <hongxu.jia@windriver.com>"
@@ -559,6 +561,9 @@ RECIPE_MAINTAINER_pn-python-distribute = "Richard Purdie <richard.purdie@linuxfo
RECIPE_MAINTAINER_pn-python-docutils = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-python-gst = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-python-imaging = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER_pn-python-mako = "Khem Raj <raj.khem@gmail.com>"
+RECIPE_MAINTAINER_pn-python-nose = "Khem Raj <raj.khem@gmail.com>"
+RECIPE_MAINTAINER_pn-python-numpy = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER_pn-python-pycairo = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-python-pycurl = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-python-pygobject = "Richard Purdie <richard.purdie@linuxfoundation.org>"
@@ -568,6 +573,7 @@ RECIPE_MAINTAINER_pn-python-scons = "Richard Purdie <richard.purdie@linuxfoundat
RECIPE_MAINTAINER_pn-python-setuptools = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-python-smartpm = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-python3 = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER_pn-python3-distribute = "Khem Raj <raj.khem@gmail.com>"
RECIPE_MAINTAINER_pn-qemu-helper-native = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-qemu = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-qmmp = "Hongxu Jia <hongxu.jia@windriver.com>"
@@ -587,7 +593,7 @@ RECIPE_MAINTAINER_pn-readline = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-recordproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-remake = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-renderproto = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-resolvconf = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-resolvconf = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-resourceproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-rgb = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-rpcbind = "Hongxu Jia <hongxu.jia@windriver.com>"
@@ -597,26 +603,26 @@ RECIPE_MAINTAINER_pn-rsync = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-rt-tests = "Darren Hart <dvhart@linux.intel.com>"
RECIPE_MAINTAINER_pn-run-postinsts = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-rxvt-unicode = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-sato-icon-theme = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-sato-icon-theme = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-sato-screenshot = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-sbc = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-screen = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-screen = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-scrnsaverproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-sed = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER_pn-setserial = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-setserial = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-settings-daemon = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-sgml-common = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-sgmlspl = "Paul Eggleton <paul.eggleton@linux.intel.com>"
RECIPE_MAINTAINER_pn-shadow = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-shadow-securetty = "Chen Qi <Qi.Chen@windriver.com>"
-RECIPE_MAINTAINER_pn-shadow-sysroot = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-shadow-sysroot = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-shared-mime-info = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-shutdown-desktop = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-slang = "Kai Kang <kai.kang@windriver.com>"
RECIPE_MAINTAINER_pn-socat = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-speex = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-sqlite3 = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-squashfs-tools = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-sqlite3 = "Aníbal Limón <anibal.limon@linux.intel.com>"
+RECIPE_MAINTAINER_pn-squashfs-tools = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-startup-notification = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-stat = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-strace = "Chong Lu <Chong.Lu@windriver.com>"
@@ -633,52 +639,52 @@ RECIPE_MAINTAINER_pn-systemtap-uprobes = "Tom Zanussi <tom.zanussi@intel.com>"
RECIPE_MAINTAINER_pn-sysvinit-inittab = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-sysvinit = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-taglib = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-tar-replacement-native = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-tar-replacement-native = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-tar = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-tcf-agent = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-tcl = "Chong Lu <Chong.Lu@windriver.com>"
-RECIPE_MAINTAINER_pn-tcp-wrappers = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-tcp-wrappers = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-tcp-wrappers = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-telepathy-glib = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-telepathy-idle = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-telepathy-mission-control = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-telepathy-python = "Cristian Iorga <cristian.iorga@intel.com>"
-RECIPE_MAINTAINER_pn-texi2html = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-texinfo = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-texi2html = "Robert Yang <liezhi.yang@windriver.com>"
+RECIPE_MAINTAINER_pn-texinfo = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-tiff = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-time = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-tiny-init = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-trace-cmd = "Darren Hart <dvhart@linux.intel.com>"
RECIPE_MAINTAINER_pn-tremor = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-tslib = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-ttf-bitstream-vera = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-ttf-bitstream-vera = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-tzcode-native = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-tzdata = "Robert Yang <liezhi.yang@windriver.com>"
RECIPE_MAINTAINER_pn-ubootchart = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-u-boot-fw-utils = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-u-boot-mkimage = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-udev-extraconf = "Saul Wold <sgw@linux.intel.com>"
-RECIPE_MAINTAINER_pn-udev = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-udev-extraconf = "Aníbal Limón <anibal.limon@linux.intel.com>"
+RECIPE_MAINTAINER_pn-udev = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-unifdef = "Richard Purdie <richard.purdie@linuxfoundation.org>"
-RECIPE_MAINTAINER_pn-unzip = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-unzip = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-update-rc.d = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-usbinit = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-usbutils = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-unfs3 = "Paul Eggleton <paul.eggleton@linux.intel.com>"
-RECIPE_MAINTAINER_pn-util-linux = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-util-linux = "Chen Qi <Qi.Chen@windriver.com>"
RECIPE_MAINTAINER_pn-util-macros = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-v86d = "Richard Purdie <richard.purdie@linuxfoundation.org>"
RECIPE_MAINTAINER_pn-vala = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-valgrind = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-videoproto = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-vte ="Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-vte = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-watchdog = "Saul Wold <sgw@linux.intel.com>"
RECIPE_MAINTAINER_pn-wayland = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-webkit-gtk = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-web-webkit = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-weston = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-wget = "Robert Yang <liezhi.yang@windriver.com>"
-RECIPE_MAINTAINER_pn-which = "Saul Wold <sgw@linux.intel.com>"
+RECIPE_MAINTAINER_pn-which = "Alejandro Hernandez <alejandro.hernandez@linux.intel.com>"
RECIPE_MAINTAINER_pn-wireless-tools = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-wpa-supplicant = "Cristian Iorga <cristian.iorga@intel.com>"
RECIPE_MAINTAINER_pn-x11-common = "Ross Burton <ross.burton@intel.com>"
@@ -709,6 +715,7 @@ RECIPE_MAINTAINER_pn-xf86-input-vmmouse = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86miscproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86-video-fbdev = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86-video-intel = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-xf86-video-modesetting = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86-video-omapfb = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86-video-omap = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86-video-vesa = "Ross Burton <ross.burton@intel.com>"
@@ -716,22 +723,23 @@ RECIPE_MAINTAINER_pn-xf86-video-vmware = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xf86vidmodeproto = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xhost = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xineramaproto = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-xinetd = "Richard Purdie <richard.purdie@linuxfoundation.org>"
+RECIPE_MAINTAINER_pn-xinetd = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-xinit = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xinput = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xinput-calibrator = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xkbcomp = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xkeyboard-config = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-xmlto = "Hongxu Jia <hongxu.jia@windriver.com>"
RECIPE_MAINTAINER_pn-xmodmap = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xorg-minimal-fonts = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xprop = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xproto = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-xrandr = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-xrandr = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-xrestop = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xserver-nodm-init = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xserver-xf86-config = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xserver-xorg = "Ross Burton <ross.burton@intel.com>"
-RECIPE_MAINTAINER_pn-xset = "Ross Burton <ross.burton@intel.com>"
+RECIPE_MAINTAINER_pn-xset = "Aníbal Limón <anibal.limon@linux.intel.com>"
RECIPE_MAINTAINER_pn-xtrans = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xtscal = "Ross Burton <ross.burton@intel.com>"
RECIPE_MAINTAINER_pn-xvideo-tests = "Cristian Iorga <cristian.iorga@intel.com>"
diff --git a/meta-yocto/conf/distro/include/package_regex.inc b/meta-yocto/conf/distro/include/package_regex.inc
index 92202fd51f..119849c87c 100644
--- a/meta-yocto/conf/distro/include/package_regex.inc
+++ b/meta-yocto/conf/distro/include/package_regex.inc
@@ -7,361 +7,376 @@
#"
# The format is as a bitbake variable override for each recipe
#"
-# REGEX_URI_pn-<recipe name> = ""recipe_url"
+# REGEX_URI_pn-<recipe name> = "recipe_url"
# - This is the url used by the package checking system to
# get the latest version of the package
-# REGEX_pn-<recipe name> = ""package_regex"
+# REGEX_pn-<recipe name> = "package_regex"
# - This is the regex the package checking system uses to
# parse the page found at REGEX_URI_pn-<recipe name>
#
COMMON_REGEX = "((\d+[\.\-_]*)+)"
#This will need to be changed a little bit when versions are bigger than 9.
-REGEX_URI_pn-acpid = "http://sourceforge.net/projects/acpid/files/acpid/"
-REGEX_pn-acpid = "[hH][rR][eE][fF]=\"/projects/acpid/files/acpid/(acpid\-)?(?P<pver>(\d\.(\d+[\.\-_]*)+))/\""
+REGEX_URI_pn-acpid = "http://sourceforge.net/projects/acpid2/files/"
+REGEX_pn-acpid = "/projects/acpid2/files/(acpid\-)?(?P<pver>(\d\.(\d+[\.\-_]*)+)).tar.xz/"
REGEX_URI_pn-adt-installer = "http://code.google.com/p/opkg/downloads/list"
REGEX_URI_pn-arptables = "http://sourceforge.net/projects/ebtables/files/arptables/"
-REGEX_pn-arptables = "[hH][rR][eE][fF]=\"/projects/ebtables/files/arptables/arptables-v(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-arptables = "/projects/ebtables/files/arptables/arptables-v(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-beecrypt = "http://sourceforge.net/projects/beecrypt/files/beecrypt/"
-REGEX_pn-beecrypt = "[hH][rR][eE][fF]=\"/projects/beecrypt/files/beecrypt/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-beecrypt = "/projects/beecrypt/files/beecrypt/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-beecrypt-native = "http://sourceforge.net/projects/beecrypt/files/beecrypt/"
-REGEX_pn-beecrypt-native = "[hH][rR][eE][fF]=\"/projects/beecrypt/files/beecrypt/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-bdwgc = "[hH][rR][eE][fF]=\"gc\-(?P<pver>((\d+[a-z]?[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-beecrypt-native = "/projects/beecrypt/files/beecrypt/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_URI_pn-bdwgc = "http://www.hboehm.info/gc/gc_source/"
+REGEX_pn-bdwgc = "gc\-(?P<pver>((\d+[a-z]?[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-bind = "ftp://ftp.isc.org/isc/bind9/"
-REGEX_pn-bind = "[hH][rR][eE][fF]=\"(?P<pver>((P?\d+[\.\-_]*)+))/\""
+REGEX_pn-bind = "(?P<pver>((P?\d+[\.\-_]*)+))/"
REGEX_URI_pn-bjam-native = "http://sourceforge.net/projects/boost/files/boost/"
-REGEX_pn-bjam-native = "[hH][rR][eE][fF]=\"/projects/boost/files/boost/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-blktool = "[hH][rR][eE][fF]=\"blktool_(?P<pver>((\d+[\.\-_]*)+))\.(diff|orig\.tar)\.gz\""
+REGEX_pn-bjam-native = "/projects/boost/files/boost/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-blktool = "blktool_(?P<pver>((\d+[\.\-_]*)+))\.(diff|orig\.tar)\.gz"
REGEX_URI_pn-boost = "http://sourceforge.net/projects/boost/files/boost/"
-REGEX_pn-boost = "[hH][rR][eE][fF]=\"/projects/boost/files/boost/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-boost = "/projects/boost/files/boost/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_pn-btrfs-tools = "(?P<pver>(\d+(\.\d+)*(\-rc\d+)*))"
REGEX_pn-build-appliance-image = "(?P<pver>([0-9][\.|_]?)+)$"
REGEX_URI_pn-bzip2 = "http://www.bzip.org/downloads.html"
REGEX_pn-chkconfig-alternatives-native = "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))"
REGEX_URI_pn-chrpath = "http://alioth.debian.org/frs/?group_id=31052"
-REGEX_pn-chrpath = "[hH][rR][eE][fF]=\"/frs/download.php/file/\d+/chrpath-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-cmake = "[hH][rR][eE][fF]=\"cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-cmake-native = "[hH][rR][eE][fF]=\"cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-nativeesdk-cmake = "[hH][rR][eE][fF]=\"cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_URI_pn-console-tools = "http://sourceforge.net/projects/lct/files/console-tools/"
-REGEX_pn-console-tools = "[hH][rR][eE][fF]=\"/projects/lct/files/console-tools/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-chrpath = "/frs/download.php/file/\d+/chrpath-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-cmake = "cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-cmake-native = "cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-nativeesdk-cmake = "cmake\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_URI_pn-console-tools = "http://sourceforge.net/projects/lct/files/console-tools-devel/"
+REGEX_pn-console-tools = "/projects/lct/files/console-tools-devel/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-cracklib = "http://sourceforge.net/projects/cracklib/files/cracklib/"
-REGEX_pn-cracklib = "[hH][rR][eE][fF]=\"/projects/cracklib/files/cracklib/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-cracklib = "/projects/cracklib/files/cracklib/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-createrepo = "http://createrepo.baseurl.org/download/"
-REGEX_pn-createrepo = "[hH][rR][eE][fF]=\"createrepo\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-createrepo = "createrepo\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-createrepo-native = "http://createrepo.baseurl.org/download/"
-REGEX_pn-createrepo-native = "[hH][rR][eE][fF]=\"createrepo\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-createrepo-native = "createrepo\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-cups = "http://www.cups.org/software.php"
-REGEX_pn-cups = "<tt>cups\-(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz<\/tt>"
+REGEX_pn-cups = "cups\-(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz"
REGEX_URI_pn-curlpp = "http://code.google.com/p/curlpp/downloads/list"
REGEX_URI_pn-cwautomacros = "http://sourceforge.net/projects/cwautomacros.berlios/files/"
-REGEX_pn-cwautomacros = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/cwautomacros.berlios/files/cwautomacros\-(?P<pver>(\d+))\.tar\.bz2\/download""
+REGEX_pn-cwautomacros = "http://sourceforge.net/projects/cwautomacros.berlios/files/cwautomacros\-(?P<pver>(\d+))\.tar\.bz2\/download"
REGEX_URI_pn-cwautomacros-native = "http://sourceforge.net/projects/cwautomacros.berlios/files/"
-REGEX_pn-cwautomacros-native = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/cwautomacros.berlios/files/cwautomacros\-(?P<pver>(\d+))\.tar\.bz2\/download\""
+REGEX_pn-cwautomacros-native = "http://sourceforge.net/projects/cwautomacros.berlios/files/cwautomacros\-(?P<pver>(\d+))\.tar\.bz2\/download"
REGEX_URI_pn-db = "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html"
-REGEX_pn-db = "[hH][rR][eE][fF]=\"http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-db = "http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-db-native = "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html"
-REGEX_pn-db-native = "[hH][rR][eE][fF]=\"http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-db-native = "http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-nativesdk-db = "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html"
-REGEX_pn-nativesdk-db = "[hH][rR][eE][fF]=\"http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-nativesdk-db = "http://download.oracle.com/otn/berkeley-db/db-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-distcc = "http://code.google.com/p/distcc/downloads/list"
-REGEX_pn-distcc = "[hH][rR][eE][fF]=\"//distcc.googlecode.com/files/distcc-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2\""
+REGEX_pn-distcc = "//distcc.googlecode.com/files/distcc-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2"
REGEX_URI_pn-docbook-dsssl-stylesheets-native = "http://sourceforge.net/projects/docbook/files/docbook-dsssl/"
-REGEX_pn-docbook-dsssl-stylesheets-native = "[hH][rR][eE][fF]=\"/projects/docbook/files/docbook-dsssl/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-docbook-dsssl-stylesheets-native = "/projects/docbook/files/docbook-dsssl/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-docbook-sgml-dtd-3.1-native = "http://docbook.org/sgml/3.1/"
-PRSPV_pn-docbook-sgml-dtd-3.1-native = "${@d.getVar('PV',1).replace('.','')}"
REGEX_URI_pn-docbook-sgml-dtd-4.1-native = "http://docbook.org/sgml/4.1/"
-PRSPV_pn-docbook-sgml-dtd-4.1-native = "${@d.getVar('PV',1).replace('.','')}"
REGEX_URI_pn-docbook-sgml-dtd-4.5-native = "http://docbook.org/sgml/4.5/"
REGEX_URI_pn-e2fsprogs = "http://sourceforge.net/projects/e2fsprogs/files/e2fsprogs/"
-REGEX_pn-e2fsprogs = "[hH][rR][eE][fF]=\"/projects/e2fsprogs/files/e2fsprogs/v(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-elfutils = "[hH][rR][eE][fF]=\"(elfutils\-)?(?P<pver>((\d+[\.\-_]*)+))(/|\.tar\.bz2)\""
+REGEX_pn-e2fsprogs = "/projects/e2fsprogs/files/e2fsprogs/v(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-elfutils = "(elfutils\-)?(?P<pver>((\d+[\.\-_]*)+))(/|\.tar\.bz2)"
REGEX_URI_pn-expat = "http://sourceforge.net/projects/expat/files/expat/"
-REGEX_pn-expat-native = "[hH][rR][eE][fF]=\"/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-expat-native = "/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-expat-native = "http://sourceforge.net/projects/expat/files/expat/"
-REGEX_pn-expat = "[hH][rR][eE][fF]=\"/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-expat = "/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-nativesdk-expat = "http://sourceforge.net/projects/expat/files/expat/"
-REGEX_pn-nativesdk-expat = "[hH][rR][eE][fF]=\"/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-nativesdk-expat = "/projects/expat/files/expat/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-fetchmail = "http://sourceforge.net/projects/fetchmail.berlios/files/"
-REGEX_URI_pn-flac = "http://sourceforge.net/projects/flac/files/flac-linux-i386/"
-REGEX_pn-flac = "[hH][rR][eE][fF]=\"/projects/flac/files/flac-linux-i386/flac\-(?P<pver>((\d+[\.\-_]*)+))\-linux\-i386/\""
REGEX_URI_pn-flex = "http://sourceforge.net/projects/flex/files/"
-REGEX_pn-flex = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/flex/files/flex-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download\""
-REGEX_pn-foomatic-filters = "[hH][rR][eE][fF]=\"foomatic-filters-(?P<pver>((\d|\d\d)\.*)+)\.tar\.gz\""
+REGEX_pn-flex = "http://sourceforge.net/projects/flex/files/flex-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download"
+REGEX_pn-foomatic-filters = "foomatic-filters-(?P<pver>((\d|\d\d)\.*)+)\.tar\.gz"
REGEX_URI_pn-fotowall = "https://code.google.com/p/fotowall/downloads/list"
-REGEX_pn-fotowall = "[hH][rR][eE][fF]=\"//fotowall.googlecode.com/files/[F|f]otowall\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2\""
+REGEX_pn-fotowall = "//fotowall.googlecode.com/files/[F|f]otowall\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2"
REGEX_URI_pn-freetype = "http://sourceforge.net/projects/freetype/files/freetype2"
-REGEX_pn-freetype = "[hH][rR][eE][fF]=\"/projects/freetype/files/freetype\d/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-freetype = "/projects/freetype/files/freetype\d/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-freetype-native = "http://sourceforge.net/projects/freetype/files/freetype2"
-REGEX_pn-freetype-native = "[hH][rR][eE][fF]=\"/projects/freetype/files/freetype\d/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_URI_pn-git = "http://code.google.com/p/git-core/downloads/list"
-REGEX_pn-git = "[hH][rR][eE][fF]=\"//git-core.googlecode.com/files/git-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-freetype-native = "/projects/freetype/files/freetype\d/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-glew = "http://sourceforge.net/projects/glew/files/glew"
-REGEX_pn-glew = " [hH][rR][eE][fF]=\"/projects/glew/files/glew/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-glew = "/projects/glew/files/glew/(?P<pver>((\d+[\.\-_]*)+))/"
#REGEX_pn-gnutls = "ftp://ftp.gnutls.org/gcrypt/gnutls/"
-REGEX_pn-gstreamer = "[hH][rR][eE][fF]=\"gstreamer\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0 = "[hH][rR][eE][fF]=\"gstreamer\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-ffmpeg = "[hH][rR][eE][fF]=\"gst-ffmpeg\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-fluendo-mpegdemux = "[hH][rR][eE][fF]=\"gst-fluendo-mpegdemux\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-fluendo-mp3 = "[hH][rR][eE][fF]=\"gst-fluendo-mp3\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-plugins-base = "[hH][rR][eE][fF]=\"gst\-plugins\-base\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-plugins-base = "[hH][rR][eE][fF]=\"gst\-plugins\-base\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-plugins-bad = "[hH][rR][eE][fF]=\"gst\-plugins\-bad\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-plugins-bad = "[hH][rR][eE][fF]=\"gst\-plugins\-bad\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-plugins-gl = "[hH][rR][eE][fF]=\"gst\-plugins\-gl\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-plugins-good = "[hH][rR][eE][fF]=\"gst\-plugins\-good\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-plugins-good = "[hH][rR][eE][fF]=\"gst\-plugins\-good\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-libav = "[hH][rR][eE][fF]=\"gst\-libav\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-omx = "[hH][rR][eE][fF]=\"gst\-omx\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-openmax = "[hH][rR][eE][fF]=\"gst\-openmax\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gstreamer1.0-plugins-ugly = "[hH][rR][eE][fF]=\"gst\-plugins\-ugly\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gst-plugins-ugly = "[hH][rR][eE][fF]=\"gst\-plugins\-ugly\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)\""
-REGEX_pn-gtk+ = "[hH][rR][eE][fF]=\"gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz\""
-REGEX_pn-gtk+3 = "[hH][rR][eE][fF]=\"gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz\""
-REGEX_pn-gtk-update-icon-cache-native = "[hH][rR][eE][fF]=\"gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz\""
-REGEX_pn-webkit-gtk = "[hH][rR][eE][fF]=\"webkitgtk\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz\""
+REGEX_pn-gstreamer = "gstreamer\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0 = "gstreamer\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-ffmpeg = "gst-ffmpeg\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-fluendo-mpegdemux = "gst-fluendo-mpegdemux\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-fluendo-mp3 = "gst-fluendo-mp3\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-plugins-base = "gst\-plugins\-base\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-plugins-base = "gst\-plugins\-base\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-plugins-bad = "gst\-plugins\-bad\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-plugins-bad = "gst\-plugins\-bad\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-plugins-gl = "gst\-plugins\-gl\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-plugins-good = "gst\-plugins\-good\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-plugins-good = "gst\-plugins\-good\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-libav = "gst\-libav\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-omx = "gst\-omx\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-openmax = "gst\-openmax\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gstreamer1.0-plugins-ugly = "gst\-plugins\-ugly\-(?P<pver>(\d+\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gst-plugins-ugly = "gst\-plugins\-ugly\-(?P<pver>(0\.\d*[0|2|4|6|8]\.\d+))\.tar\.(gz|xz|bz2)"
+REGEX_pn-gtk+ = "gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz"
+REGEX_pn-gtk+3 = "gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz"
+REGEX_pn-gtk-update-icon-cache-native = "gtk\+\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz"
+REGEX_pn-webkit-gtk = "webkitgtk\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.xz"
REGEX_URI_pn-hdparm = "http://sourceforge.net/projects/hdparm/files/hdparm/"
-REGEX_pn-hdparm = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/hdparm/files/hdparm/hdparm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download\""
-REGEX_URI_pn-icu = "http://site.icu-project.org/download"
-REGEX_pn-icu = "[hH][rR][eE][fF]=\"http://site.icu-project.org/download/((\d+[\.\-_]*)+)#(TOC\-)?ICU4J\-Download\" rel=\"nofollow\">(<b>)?(?P<pver>((\d+[\.\-_]*)+))(</b>)?"
+REGEX_pn-hdparm = "http://sourceforge.net/projects/hdparm/files/hdparm/hdparm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download"
REGEX_URI_pn-intltool = "https://launchpad.net/intltool/+download"
-REGEX_pn-intltool = "[hH][rR][eE][fF]=\"https://launchpad.net/intltool/trunk/(?P<pver>((\d+[\.\-_]*)+))/\+download/intltool\-(\d+[\.\-_]*)+\.tar\.gz\""
+REGEX_pn-intltool = "https://launchpad.net/intltool/trunk/(?P<pver>((\d+[\.\-_]*)+))/\+download/intltool\-(\d+[\.\-_]*)+\.tar\.gz"
REGEX_URI_pn-intltool-native = "https://launchpad.net/intltool/+download"
-REGEX_pn-intltool-native = "[hH][rR][eE][fF]=\"https://launchpad.net/intltool/trunk/(?P<pver>((\d+[\.\-_]*)+))/\+download/intltool\-(\d+[\.\-_]*)+\.tar\.gz\""
+REGEX_pn-intltool-native = "https://launchpad.net/intltool/trunk/(?P<pver>((\d+[\.\-_]*)+))/\+download/intltool\-(\d+[\.\-_]*)+\.tar\.gz"
REGEX_URI_pn-irda-utils = "http://sourceforge.net/projects/irda/files/irda-utils/"
-REGEX_pn-irda-utils = "[hH][rR][eE][fF]=\"/projects/irda/files/irda-utils/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-iputils = "[Hh][Rr][Ee][Ff]=\"iputils\-(?P<pver>(s\d+))\.tar\.bz2\""
+REGEX_pn-irda-utils = "/projects/irda/files/irda-utils/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-iputils = "iputils\-(?P<pver>(s\d+))\.tar\.bz2"
REGEX_URI_pn-jpeg = "http://www.ijg.org/files/"
-REGEX_pn-jpeg = "[hH][rR][eE][fF]=\"jpegsrc.v(?P<pver>((\d+[a-z]*\d*)+))\.tar\.gz""
+REGEX_pn-jpeg = "jpegsrc.v(?P<pver>((\d+[a-z]*\d*)+))\.tar\.gz"
REGEX_URI_pn-js = "http://ftp.mozilla.org/pub/mozilla.org/js/"
-REGEX_pn-js = "[hH][rR][eE][fF]=\"js-?(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_URI_pn-json-c = "http://s3.amazonaws.com/json-c_releases/"
-REGEX_pn-json-c = "releases/json-c-(?P<pver>(\d+[\.\-_]*)+).tar.gz"
-REGEX_URI_pn-kconfig-frontends = "http://ymorin.is-a-geek.org/download/kconfig-frontends/"
-REGEX_pn-kconfig-frontends = "[hH][rR][eE][fF]=\'kconfig\-frontends\-(?P<pver>((\d+[\.\-]*)+))\.tar\.xz\'"
-GIT_REGEX_pn-kern-tools-native = "HEEEAD"
+REGEX_pn-js = "js-?(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_URI_pn-json-c = "https://github.com/json-c/json-c/releases"
+REGEX_pn-json-c = "json-c-(?P<pver>(\d+[\.\-_]*)+)-\d\d\d\d\d\d\d\d\.tar\.gz"
REGEX_URI_pn-lame = "http://sourceforge.net/projects/lame/files/lame/"
-REGEX_pn-lame = "[hH][rR][eE][fF]=\"/projects/lame/files/lame/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-lame = "/projects/lame/files/lame/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-less = "http://www.greenwoodsoftware.com/less/download.html"
-REGEX_pn-less = "[hH][rR][eE][fF]=\"less\-(?P<pver>(\d+))\.tar\.gz\""
+REGEX_pn-less = "less\-(?P<pver>(\d+))\.tar\.gz"
REGEX_URI_pn-liba52 = "http://liba52.sourceforge.net/downloads.html"
-REGEX_pn-liba52 = "[hH][rR][eE][fF]=\"/files/a52\w*\-(?P<pver>((\d+[a-z]?[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-liba52 = "/files/a52\w*\-(?P<pver>((\d+[a-z]?[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libacpi = "http://www.ngolde.de/libacpi.html"
REGEX_URI_pn-libaio = "http://ftp.debian.org/debian/pool/main/liba/libaio/"
-REGEX_pn-libaio = "[hH][rR][eE][fF]=\"libaio_(?P<pver>((\d+[\.\-_]*)+)).orig.tar.gz\""
+REGEX_pn-libaio = "libaio_(?P<pver>((\d+[\.\-_]*)+)).orig.tar.gz"
REGEX_URI_pn-libarchive = "http://www.libarchive.org/"
-REGEX_pn-libarchive = "[hH][rR][eE][fF]=\"downloads/libarchive\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-libarchive = "downloads/libarchive\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libcgroup = "http://sourceforge.net/projects/libcg/files/libcgroup/"
-#REGEX_pn-libcgroup = "[hH][rR][eE][fF]=\"/projects/libcg/files/libcgroup/v?(?P<pver>((\.?\d+[\.\-_]*)+(rc\d?)*)+)/\""
-REGEX_pn-libcgroup = "[hH][rR][eE][fF]=\"/projects/libcg/files/libcgroup/v?(?P<pver>((\.?\d+[\.\-_]*))+)/\""
+#REGEX_pn-libcgroup = "/projects/libcg/files/libcgroup/v?(?P<pver>((\.?\d+[\.\-_]*)+(rc\d?)*)+)/"
+REGEX_pn-libcgroup = "/projects/libcg/files/libcgroup/v?(?P<pver>((\.?\d+[\.\-_]*))+)/"
REGEX_URI_pn-libcheck = "http://sourceforge.net/projects/check/files/check/"
-REGEX_pn-libcheck = "[hH][rR][eE][fF]=\"/projects/check/files/check/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libcheck = "/projects/check/files/check/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libdnet = "http://code.google.com/p/libdnet/downloads/list"
#REGEX_URI_pn-libevent = "http://sourceforge.net/projects/levent/files/libevent/libevent-2.0/"
REGEX_URI_pn-libevent = "http://libevent.org/"
-#REGEX_pn-libevent = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/levent/files/libevent/libevent-2.0/libevent-(?P<pver>((\d+[\.\-_]*)+))\-stable\.tar\.gz/download\""
-REGEX_pn-libevent = "[hH][rR][eE][fF]=\"https://github.com/downloads/libevent/libevent/libevent-(?P<pver>((\d+[\.\-_]*)+))\-stable\.tar\.gz\""
+#REGEX_pn-libevent = "http://sourceforge.net/projects/levent/files/libevent/libevent-2.0/libevent-(?P<pver>((\d+[\.\-_]*)+))\-stable\.tar\.gz/download"
+REGEX_pn-libevent = "https://github.com/downloads/libevent/libevent/libevent-(?P<pver>((\d+[\.\-_]*)+))\-stable\.tar\.gz"
REGEX_URI_pn-libexif = "http://sourceforge.net/projects/libexif/files/libexif/"
-REGEX_pn-libexif = "[hH][rR][eE][fF]=\"/projects/libexif/files/libexif/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-libffi = "[hH][rR][eE][fF]=\"libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-libffi-native = "[hH][rR][eE][fF]=\"ftp://sourceware.org:21/pub/libffi/libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-nativesdk-libffi = "[hH][rR][eE][fF]=\"ftp://sourceware.org:21/pub/libffi/libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-libexif = "/projects/libexif/files/libexif/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-libffi = "libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-libffi-native = "ftp://sourceware.org:21/pub/libffi/libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-nativesdk-libffi = "ftp://sourceware.org:21/pub/libffi/libffi\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libfm = "http://sourceforge.net/projects/pcmanfm/files/PCManFM%20%2B%20Libfm%20%28tarball%20release%29/PCManFM/"
-REGEX_pn-libfm = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/pcmanfm/files/PCManFM%20%2B%20Libfm%20%28tarball%20release%29/PCManFM/pcmanfm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download\""
-REGEX_URI_pn-libical = "http://sourceforge.net/projects/libical/files/libical/"
-REGEX_pn-libical = "[hH][rR][eE][fF]=\"/projects/libical/files/libical/libical-(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libfm = "pcmanfm-(?P<pver>((\d+[\.\-_]*)+)).tar.xz/download$"
+REGEX_URI_pn-libical = "https://github.com/libical/libical/releases"
+REGEX_pn-libical = "/releases/tag/v(?P<pver>((\d+[\.\-_]*)+))"
REGEX_URI_pn-libid3tag = "http://sourceforge.net/projects/mad/files/libid3tag/"
-REGEX_pn-libid3tag = "[hH][rR][eE][fF]=\"/projects/mad/files/libid3tag/(?P<pver>((\d+[\.\-_]*)+([a-z]?)))/\""
+REGEX_pn-libid3tag = "/projects/mad/files/libid3tag/(?P<pver>((\d+[\.\-_]*)+([a-z]?)))/"
REGEX_URI_pn-libmad = "http://sourceforge.net/projects/mad/files/libmad/"
-REGEX_pn-libmad = "[hH][rR][eE][fF]=\"/projects/mad/files/libmad/(?P<pver>((\d+[\.\-_]*)+([a-z]?)))/\""
+REGEX_pn-libmad = "/projects/mad/files/libmad/(?P<pver>((\d+[\.\-_]*)+([a-z]?)))/"
REGEX_URI_pn-libogg = "http://downloads.xiph.org/releases/ogg/"
-REGEX_pn-libogg = "[hH][rR][eE][fF]=\"libogg\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-libogg = "libogg\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libmpc = "http://www.multiprecision.org/index.php?prog=mpc&page=download"
REGEX_URI_pn-libomxil = "http://sourceforge.net/projects/omxil/files/omxil/"
-REGEX_pn-libomxil = "[hH][rR][eE][fF]=\"/projects/omxil/files/omxil/Bellagio.20(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libomxil = "/projects/omxil/files/omxil/Bellagio.20(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libpcap = "http://www.tcpdump.org/release/"
-REGEX_pn-libpcap = "[hH][rR][eE][fF]=\"libpcap-(?P<pver>((\d+[\.\-_]*)+)).tar.gz\""
+REGEX_pn-libpcap = "libpcap-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-libpcre = "http://sourceforge.net/projects/pcre/files/pcre/"
-REGEX_pn-libpcre = "[hH][rR][eE][fF]=\"/projects/pcre/files/pcre/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libpcre = "/projects/pcre/files/pcre/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libpcre-native = "http://sourceforge.net/projects/pcre/files/pcre/"
-REGEX_pn-libpcre-native = "[hH][rR][eE][fF]=\"/projects/pcre/files/pcre/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libpcre-native = "/projects/pcre/files/pcre/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libpfm4 = "http://sourceforge.net/projects/perfmon2/files/libpfm4/"
-REGEX_URI_pn-libpng = "http://sourceforge.net/projects/libpng/files/"
-REGEX_pn-libpng = "[hH][rR][eE][fF]=\"/projects/libpng/files/latest/download\?source=files\" title=\"/libpng\d+(/(\d+\.?)+/)?libpng-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
+REGEX_pn-libpfm4 = "/projects/perfmon2/files/libpfm4/libpfm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/"
+REGEX_URI_pn-libpng = "http://sourceforge.net/projects/libpng/files/libpng16/"
+REGEX_pn-libpng = "/projects/libpng/files/libpng16/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libpng-native = "ftp://ftp.simplesystems.org/pub/libpng/png/src/"
-REGEX_pn-libpng-native = "[hH][rR][eE][fF]=\"ftp://ftp.simplesystems.org:21/pub/libpng/png/src/libpng-(?P<pver>((\d+[\.\-_]*)+)).tar.gz""
+REGEX_pn-libpng-native = "ftp://ftp.simplesystems.org:21/pub/libpng/png/src/libpng-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-libpng12 = "http://sourceforge.net/projects/libpng/files/libpng12/"
-REGEX_pn-libpng12 = "[hH][rR][eE][fF]=\"/projects/libpng/files/libpng12/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libpng12 = "/projects/libpng/files/libpng12/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libproxy = "http://code.google.com/p/libproxy/downloads/list"
-REGEX_pn-libtheora = "[Hh][Rr][Ee][Ff]=\"libtheora\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-libtheora = "libtheora\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libsamplerate0 = "http://www.mega-nerd.com/SRC/download.html"
-REGEX_pn-libsamplerate0 = "[hH][rR][eE][fF]=\"libsamplerate\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-libsamplerate0 = "libsamplerate\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-libtirpc = "http://sourceforge.net/projects/libtirpc/files/libtirpc/"
-REGEX_pn-libtirpc = "[hH][rR][eE][fF]=\"/projects/libtirpc/files/libtirpc/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-libtirpc = "/projects/libtirpc/files/libtirpc/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libusb-compat = "http://sourceforge.net/projects/libusb/files/libusb-compat-0.1/"
-REGEX_pn-libusb-compat = "[hH][rR][eE][fF]=\"/projects/libusb/files/libusb-compat-0.1/libusb-compat-(?P<pver>((\d+[\.\-_]*)+))/""
+REGEX_pn-libusb-compat = "/projects/libusb/files/libusb-compat-0.1/libusb-compat-(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libusb1 = "http://sourceforge.net/projects/libusb/files/libusb-1.0/"
-REGEX_pn-libusb1 = "[hH][rR][eE][fF]=\"/projects/libusb/files/libusb-1.0/libusb-(?P<pver>((\d+[\.\-_]*)+))/""
+REGEX_pn-libusb1 = "/projects/libusb/files/libusb-1.0/libusb-(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-libvorbis = "http://downloads.xiph.org/releases/vorbis/"
-REGEX_pn-libvorbis = "[hH][rR][eE][fF]=\"libvorbis-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_pn-libxslt = "[Hh][Rr][Ee][Ff]=\"libxslt\-(?P<pver>((\d+\.*)+))\.tar\.gz\""
+REGEX_pn-libvorbis = "libvorbis-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_pn-libxslt = "libxslt\-(?P<pver>((\d+\.*)+))\.tar\.gz"
REGEX_pn-linux-libc-headers-yocto = "v((?P<pver>((\d+[\.\-_]*)+)))"
REGEX_URI_pn-lrzsz = "http://ohse.de/uwe/software/lrzsz.html"
REGEX_URI_pn-lsb = "http://sourceforge.net/projects/lsb/files/lsb_release/"
-REGEX_pn-lsb = "[hH][rR][eE][fF]=\"/projects/lsb/files/lsb_release/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-lsb = "/projects/lsb/files/lsb_release/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-lsbinitscripts = "http://pkgs.fedoraproject.org/repo/pkgs/initscripts/"
-REGEX_pn-lsbinitscripts = "[hH][rR][eE][fF]=\"initscripts\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2/\""
+REGEX_pn-lsbinitscripts = "initscripts\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2/"
REGEX_URI_pn-ltp = "http://sourceforge.net/projects/ltp/files/LTP%20Source"
-REGEX_pn-ltp = "[hH][rR][eE][fF]=\"/projects/ltp/files/LTP%20Source/ltp\-(?P<pver>(\d+))/\""
+REGEX_pn-ltp = "/projects/ltp/files/LTP%20Source/ltp\-(?P<pver>(\d+))/"
REGEX_URI_pn-lzop = "http://www.lzop.org/download/"
-REGEX_pn-lzop = "[hH][rR][eE][fF]=\"lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-lzop = "lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-lzop-native = "http://www.lzop.org/download/"
-REGEX_pn-lzop-native = "[hH][rR][eE][fF]=\"lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-lzop-native = "lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-nativesdk-lzop = "http://www.lzop.org/download/"
-REGEX_pn-nativesdk-lzop = "[hH][rR][eE][fF]=\"lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_URI_pn-mailx = "http://sourceforge.net/projects/heirloom/files/heirloom-mailx/"
-REGEX_pn-mailx = "[hH][rR][eE][fF]=\"/projects/heirloom/files/heirloom-mailx/(?P<pver>((\d+[\.]*)+))/\""
+REGEX_pn-nativesdk-lzop = "lzop\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-memcached = "http://code.google.com/p/memcached/downloads/list"
REGEX_URI_pn-menu-cache = "http://sourceforge.net/projects/lxde/files/menu-cache/"
-REGEX_pn-menu-cache = "[hH][rR][eE][fF]=\"/projects/lxde/files/menu\-cache/menu-cache%20(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-mesa = "[hH][rR][eE][fF]=\"MesaLib-(?P<pver>((\d+[\.\-_]*)+))(\-rc\d+)*.tar.gz\""
+REGEX_pn-menu-cache = "/projects/lxde/files/menu\-cache/menu-cache%20(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-mesa = "MesaLib-(?P<pver>((\d+[\.\-_]*)+))(\-rc\d+)*.tar.gz"
REGEX_URI_pn-mesa-glsl-native = "ftp://ftp.freedesktop.org/pub/mesa/"
-REGEX_pn-mesa-glsl-native = "[hH][rR][eE][fF]=\"ftp://ftp.freedesktop.org:21/pub/mesa/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-mesa-glsl-native = "ftp://ftp.freedesktop.org:21/pub/mesa/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-minicom = "https://alioth.debian.org/frs/?group_id=30018"
-REGEX_pn-minicom = "[hH][rR][eE][fF]=\"/frs/download.php/file/\d+/minicom\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-minicom = "/frs/download.php/file/\d+/minicom\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-mingetty = "http://sourceforge.net/projects/mingetty/files/mingetty"
-REGEX_pn-mingetty = "[hH][rR][eE][fF]=\"/projects/mingetty/files/mingetty/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-mingetty = "/projects/mingetty/files/mingetty/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-mpeg2dec = "http://libmpeg2.sourceforge.net/downloads.html"
-REGEX_pn-mpeg2dec = "[hH][rR][eE][fF]=\"/files/mpeg2dec-(?P<pver>((\d+[\.\-_]*)+)).tar.gz\""
+REGEX_pn-mpeg2dec = "/files/mpeg2dec-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-mpfr = "http://ftp.gnu.org/gnu/mpfr/"
-REGEX_pn-mpfr = "[hH][rR][eE][fF]=\"mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz""
+REGEX_pn-mpfr = "mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-mpfr-native = "http://ftp.gnu.org/gnu/mpfr/"
-REGEX_pn-mpfr-native = "[hH][rR][eE][fF]=\"mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz\""
+REGEX_pn-mpfr-native = "mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-msmtp = "http://sourceforge.net/projects/msmtp/files/msmtp/"
-REGEX_pn-msmtp = "[hH][rR][eE][fF]=\"/projects/msmtp/files/msmtp/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-msmtp = "/projects/msmtp/files/msmtp/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-nativesdk-mpfr = "http://ftp.gnu.org/gnu/mpfr/"
-REGEX_pn-nativesdk-mpfr = "[hH][rR][eE][fF]=\"mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz\""
+REGEX_pn-nativesdk-mpfr = "mpfr-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-net-snmp = "http://sourceforge.net/projects/net-snmp/files/net-snmp/"
-REGEX_pn-net-snmp = "[hH][rR][eE][fF]=\"/projects/net-snmp/files/net-snmp/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-net-snmp = "/projects/net-snmp/files/net-snmp/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-netcat = "http://sourceforge.net/projects/netcat/files/netcat/"
-REGEX_pn-netcat = "[hH][rR][eE][fF]=\"/projects/netcat/files/netcat/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-netcat = "/projects/netcat/files/netcat/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-nfs-utils = "http://sourceforge.net/projects/nfs/files/nfs-utils/"
-REGEX_pn-nfs-utils = "[hH][rR][eE][fF]=\"/projects/nfs/files/nfs-utils/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-nfs-utils = "/projects/nfs/files/nfs-utils/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-ocf-linux = "http://sourceforge.net/projects/ocf-linux/files/ocf-linux/"
-REGEX_pn-ocf-linux = "[hH][rR][eE][fF]=\"/projects/ocf-linux/files/ocf-linux/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-ocf-linux = "/projects/ocf-linux/files/ocf-linux/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-openjade-native = "http://sourceforge.net/projects/openjade/files/openjade/"
-REGEX_pn-openjade-native = "[hH][rR][eE][fF]=\"/projects/openjade/files/openjade/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-openjade-native = "/projects/openjade/files/openjade/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-opensp = "http://sourceforge.net/projects/openjade/files/opensp/"
-REGEX_pn-opensp = "[hH][rR][eE][fF]=\"/projects/openjade/files/opensp/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-opensp = "/projects/openjade/files/opensp/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-opensp-native = "http://sourceforge.net/projects/openjade/files/opensp/"
-REGEX_pn-opensp-native = "[hH][rR][eE][fF]=\"/projects/openjade/files/opensp/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_URI_pn-opkg = "http://code.google.com/p/opkg/downloads/list"
+REGEX_pn-opensp-native = "/projects/openjade/files/opensp/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-oprofile = "http://sourceforge.net/projects/oprofile/files/oprofile/"
-REGEX_pn-oprofile = "[hH][rR][eE][fF]=\"/projects/oprofile/files/oprofile/oprofile-(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-oprofile = "/projects/oprofile/files/oprofile/oprofile-(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-ossp-uuid = "http://code.google.com/p/gnome-build-stage-1/downloads/list"
REGEX_URI_pn-pcmanfm = "http://sourceforge.net/projects/pcmanfm/files/PCManFM%20%2B%20Libfm%20%28tarball%20release%29/PCManFM/"
-REGEX_pn-pcmanfm = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/pcmanfm/files/PCManFM.20.2B.20Libfm.20.28tarball.20release.29/PCManFM/pcmanfm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download\""
-REGEX_URI_pn-procps = "http://procps.sourceforge.net/download.html"
-REGEX_pn-procps = "[hH][rR][eE][fF]=\"procps\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-pcmanfm = "http://sourceforge.net/projects/pcmanfm/files/PCManFM.20.2B.20Libfm.20.28tarball.20release.29/PCManFM/pcmanfm-(?P<pver>((\d+[\.\-_]*)+)).tar.gz/download"
+REGEX_pn-procps = "procps\-ng\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.(g|x)z"
REGEX_URI_pn-powertop = "https://01.org/powertop/downloads"
REGEX_URI_pn-pptp-linux = "http://sourceforge.net/projects/pptpclient/files/pptp/"
-REGEX_pn-pptp-linux = "[hH][rR][eE][fF]=\"/projects/pptpclient/files/pptp/pptp-(?P<pver>((\d+[\.\-_]*)+))/\""
-GIT_REGEX_pn-prelink = "cross_prelink"
+REGEX_pn-pptp-linux = "/projects/pptpclient/files/pptp/pptp-(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_pn-prelink = "(?P<pver>cross_prelink)"
REGEX_URI_pn-psmisc = "http://sourceforge.net/projects/psmisc/files/psmisc/"
-REGEX_pn-psmisc = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/psmisc/files/psmisc/psmisc\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz/download\""
+REGEX_pn-psmisc = "http://sourceforge.net/projects/psmisc/files/psmisc/psmisc\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz/download"
REGEX_URI_pn-ptpd = "http://sourceforge.net/projects/ptpd/files/ptpd/"
-REGEX_pn-ptpd = "[hH][rR][eE][fF]=\"/projects/ptpd/files/ptpd/(?P<pver>((\d+[\.\-_]*)+))/stats/timeline\""
+REGEX_pn-ptpd = "/projects/ptpd/files/ptpd/(?P<pver>((\d+[\.\-_]*)+))/stats/timeline"
REGEX_URI_pn-python-argparse = "https://code.google.com/p/argparse/downloads/list"
REGEX_URI_pn-python-docutils = "http://sourceforge.net/projects/docutils/files/docutils/"
-REGEX_pn-python-docutils = "[hH][rR][eE][fF]=\"/projects/docutils/files/docutils/(?P<pver>((\d+[\.\-_]*)+)).*/\""
+REGEX_pn-python-docutils = "/projects/docutils/files/docutils/(?P<pver>((\d+[\.\-_]*)+)).*/"
REGEX_URI_pn-python-numpy = "http://sourceforge.net/projects/numpy/files/NumPy/"
-REGEX_pn-python-numpy = "[hH][rR][eE][fF]=\"/projects/numpy/files/latest/download\?source=files\" title=\"/NumPy/(\d+\.?)+/numpy-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
+REGEX_pn-python-numpy = "/projects/numpy/files/NumPy/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-python-pycurl = "http://pycurl.sourceforge.net/download/"
-REGEX_pn-python-pycurl = "[hH][rR][eE][fF]=\"pycurl-(?P<pver>((\d+[\.\-_]*)+)).tar.gz\""
+REGEX_pn-python-pycurl = "pycurl-(?P<pver>((\d+[\.\-_]*)+)).tar.gz"
REGEX_URI_pn-python-scons = "http://sourceforge.net/projects/scons/files/scons/"
-REGEX_pn-python-scons = "[hH][rR][eE][fF]=\"/projects/scons/files/scons/(?P<pver>((\d+[\.\-_]*)+)).*/\""
+REGEX_pn-python-scons = "/projects/scons/files/scons/(?P<pver>((\d+[\.\-_]*)+)).*/"
REGEX_URI_pn-python-scons-native = "http://sourceforge.net/projects/scons/files/scons/"
-REGEX_pn-python-scons-native = "[hH][rR][eE][fF]=\"/projects/scons/files/scons/(?P<pver>((\d+[\.\-_]*)+)).*/\""
-REGEX_pn-python-setuptools = "[hH][rR][eE][fF]=\"setuptools\-(?P<pver>((\d+([a-z]\d+)?[\.\-_]*)+))\.(tar\.gz|\.zip)\""
+REGEX_pn-python-scons-native = "/projects/scons/files/scons/(?P<pver>((\d+[\.\-_]*)+)).*/"
+REGEX_pn-python-setuptools = "setuptools\-(?P<pver>((\d+([a-z]\d+)?[\.\-_]*)+))\.(tar\.gz|\.zip)"
REGEX_URI_pn-quota = "http://sourceforge.net/projects/linuxquota/files/quota-tools/"
-REGEX_pn-quota = "[hH][rR][eE][fF]=\"/projects/linuxquota/files/quota-tools/(?P<pver>((\d+[\.\-_]*)+([-\w\d]+)))/\""
+REGEX_pn-quota = "/projects/linuxquota/files/quota-tools/(?P<pver>((\d+[\.\-_]*)+([-\w\d]+)))/"
REGEX_pn-remake = "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\-\d+(\.\d+)*)*))"
REGEX_URI_pn-rpcbind = "http://sourceforge.net/projects/rpcbind/files/rpcbind/"
-REGEX_pn-rpcbind = "[hH][rR][eE][fF]=\"/projects/rpcbind/files/rpcbind/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-rpm = "[hH][rR][eE][fF]=\"rpm\-(?P<pver>((\d+[\.\-_]*)+)\-(\d+[\.]*)+)\.src\.rpm\""
-REGEX_pn-rpm-native = "[hH][rR][eE][fF]=\"rpm\-(?P<pver>((\d+[\.\-_]*)+)\-(\d+[\.]*)+)\.src\.rpm\""
+REGEX_pn-rpcbind = "/projects/rpcbind/files/rpcbind/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-rpm = "rpm\-(?P<pver>((\d+[\.\-_]*)+)\-(\d+[\.]*)+)\.src\.rpm"
+REGEX_pn-rpm-native = "rpm\-(?P<pver>((\d+[\.\-_]*)+)\-(\d+[\.]*)+)\.src\.rpm"
REGEX_URI_pn-setserial = "http://sourceforge.net/projects/setserial/files/setserial/"
-REGEX_pn-setserial = "[hH][rR][eE][fF]=\"/projects/setserial/files/setserial/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-shared-mime-info = "[hH][rR][eE][fF]=\"shared\-mime\-info\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.(bz2|gz|xz)\""
-REGEX_pn-shared-mime-info-native = "[hH][rR][eE][fF]=\"shared\-mime\-info\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.(bz2|gz|xz)\""
+REGEX_pn-setserial = "/projects/setserial/files/setserial/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-shared-mime-info = "shared\-mime\-info\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.(bz2|gz|xz)"
+REGEX_pn-shared-mime-info-native = "shared\-mime\-info\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.(bz2|gz|xz)"
REGEX_URI_pn-socat = "http://www.dest-unreach.org/socat/download/"
-REGEX_pn-socat = "[hH][rR][eE][fF]=\"socat\-(?P<pver>((\d+[\.\-]*)+))\.tar\.bz2\""
+REGEX_pn-socat = "socat\-(?P<pver>((\d+[\.\-]*)+))\.tar\.bz2"
REGEX_URI_pn-speex = "http://downloads.us.xiph.org/releases/speex/"
-REGEX_pn-speex = "[hH][rR][eE][fF]=\"speex\-(?P<pver>((\d+\.*)+))\.tar\.gz\""
+REGEX_pn-speex = "speex\-(?P<pver>((\d+\.*)+)(rc\d)?)\.tar\.gz"
REGEX_URI_pn-sqlite3 = "http://www.sqlite.org/download.html"
REGEX_pn-sqlite3 = "sqlite-autoconf-(?P<pver>(\d+)).tar.gz"
REGEX_URI_pn-sqlite3-native = "http://www.sqlite.org/download.html"
-REGEX_pn-sqlite3-native = "[hH][rR][eE][fF]=\"/sqlite-autoconf-(?P<pver>(\d+)).tar.gz\""
+REGEX_pn-sqlite3-native = "/sqlite-autoconf-(?P<pver>(\d+)).tar.gz"
REGEX_URI_pn-nativesdk-sqlite3 = "http://www.sqlite.org/download.html"
-REGEX_pn-nativesdk-sqlite3 = "[hH][rR][eE][fF]=\"/sqlite-autoconf-(?P<pver>(\d+)).tar.gz\""
+REGEX_pn-nativesdk-sqlite3 = "/sqlite-autoconf-(?P<pver>(\d+)).tar.gz"
REGEX_URI_pn-squashfs-tools = "http://sourceforge.net/projects/squashfs/files/squashfs/"
-REGEX_pn-squashfs-tools = "[hH][rR][eE][fF]=\"/projects/squashfs/files/squashfs/squashfs(?P<pver>((\d+[\.\-_]*)+)).*/\""
+REGEX_pn-squashfs-tools = "/projects/squashfs/files/squashfs/squashfs(?P<pver>((\d+[\.\-_]*)+)).*/"
REGEX_URI_pn-strace = "http://sourceforge.net/projects/strace/files/strace/"
-REGEX_pn-strace = "[hH][rR][eE][fF]=\"/projects/strace/files/strace/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-strace = "/projects/strace/files/strace/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-strace-native = "http://sourceforge.net/projects/strace/files/strace/"
-REGEX_pn-strace-native = "[hH][rR][eE][fF]=\"/projects/strace/files/strace/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-strace-native = "/projects/strace/files/strace/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-stunnel = "http://www.stunnel.org/downloads.html"
-REGEX_pn-stunnel = "[hH][rR][eE][fF]=\"downloads/stunnel-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-stunnel = "downloads/stunnel-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-sysfsutils = "http://sourceforge.net/projects/linux-diag/files/sysfsutils/"
-REGEX_pn-sysfsutils = "[hH][rR][eE][fF]=\"/projects/linux-diag/files/sysfsutils/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-sysfsutils = "/projects/linux-diag/files/sysfsutils/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-sysstat = "http://sebastien.godard.pagesperso-orange.fr/download.html"
-REGEX_pn-sysstat = "[hH][rR][eE][fF]=\"http://pagesperso-orange.fr/sebastien.godard/sysstat\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-sysstat = "http://pagesperso-orange.fr/sebastien.godard/sysstat\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-taglib = "http://github.com/taglib/taglib/releases/"
REGEX_URI_pn-tcl = "http://sourceforge.net/projects/tcl/files/Tcl/"
-REGEX_pn-tcl = "[hH][rR][eE][fF]=\"/projects/tcl/files/Tcl/(?P<pver>((\d+(b\d+)?[\.\-_]*)+))/\""
+REGEX_pn-tcl = "/projects/tcl/files/Tcl/(?P<pver>((\d+(b\d+)?[\.\-_]*)+))/"
REGEX_URI_pn-tcl-native = "http://sourceforge.net/projects/tcl/files/Tcl/"
-REGEX_pn-tcl-native = "[hH][rR][eE][fF]=\"/projects/tcl/files/Tcl/(?P<pver>((\d+(b\d+)?[\.\-_]*)+))/\""
+REGEX_pn-tcl-native = "/projects/tcl/files/Tcl/(?P<pver>((\d+(b\d+)?[\.\-_]*)+))/"
REGEX_URI_pn-tcpreplay = "http://sourceforge.net/projects/tcpreplay/files/tcpreplay/"
-REGEX_pn-tcpreplay = "[hH][rR][eE][fF]=\"/projects/tcpreplay/files/tcpreplay/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-tcpreplay = "/projects/tcpreplay/files/tcpreplay/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-tiff-native = "ftp://ftp.remotesensing.org/pub/libtiff/"
-REGEX_pn-tiff-native = "[hH][rR][eE][fF]=\"ftp://ftp.remotesensing.org:21/pub/libtiff/tiff-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
-REGEX_URI_pn-tslib = "http://sourceforge.net/projects/tslib.berlios/files/"
+REGEX_pn-tiff-native = "ftp://ftp.remotesensing.org:21/pub/libtiff/tiff-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_URI_pn-tslib = "https://github.com/kergoth/tslib/releases"
+REGEX_pn-tslib = "/releases/tag/(?P<pver>((\d+[\.\-_]*)+))"
REGEX_URI_pn-traceroute = "http://sourceforge.net/projects/traceroute/files/traceroute/"
-REGEX_pn-traceroute = "[hH][rR][eE][fF]=\"/projects/traceroute/files/traceroute/traceroute-(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-tslib = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/tslib.berlios/files/tslib\-(?P<pver>((\d+[\.\-_]*)+))\.tar\.bz2/download\""
+REGEX_pn-traceroute = "/projects/traceroute/files/traceroute/traceroute-(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-tunctl = "http://sourceforge.net/projects/tunctl/files/tunctl/"
-REGEX_pn-tunctl = "[hH][rR][eE][fF]=\"/projects/tunctl/files/tunctl/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-tzdata = "[hH][rR][eE][fF]=\"tzdata(?P<pver>((\d+[a-z])+))\.tar\.gz\""
+REGEX_pn-tunctl = "/projects/tunctl/files/tunctl/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-tzdata = "tzdata(?P<pver>((\d+[a-z])+))\.tar\.gz"
REGEX_URI_pn-unzip = "http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/"
-REGEX_pn-unzip = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip(?P<pver>(\d+))\.tar\.gz/download\""
-PRSPV_pn-unzip = "${@d.getVar('PV',1).replace('.','')}"
+REGEX_pn-unzip = "http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip(?P<pver>(\d+))\.tar\.gz/download"
REGEX_URI_pn-unzip-native = "http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/"
-REGEX_pn-unzip-native = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip(?P<pver>(\d+))\.tar\.gz/download\""
-PRSPV_pn-unzip-native = "${@d.getVar('PV',1).replace('.','')}"
+REGEX_pn-unzip-native = "http://sourceforge.net/projects/infozip/files/UnZip%206.x%20%28latest%29/UnZip%206.0/unzip(?P<pver>(\d+))\.tar\.gz/download"
#REGEX_URI_pn-v86d = "http://dev.gentoo.org/~spock/projects/uvesafb/archive/"
-REGEX_pn-v86d = "[hH][rR][eE][fF]=\"v86d\-(?P<pver>((\d+[\.]*)+))\.tar\.bz2\""
+REGEX_pn-v86d = "v86d\-(?P<pver>((\d+[\.]*)+))\.tar\.bz2"
REGEX_URI_pn-vblade = "http://sourceforge.net/projects/aoetools/files/vblade/"
-REGEX_pn-vblade = "[hH][rR][eE][fF]=\"/projects/aoetools/files/vblade/vblade-(?P<pver>((\d+[\.\-_]*)+))\.tgz/stats/timeline\""
+REGEX_pn-vblade = "/projects/aoetools/files/vblade/vblade-(?P<pver>((\d+[\.\-_]*)+))\.tgz/stats/timeline"
REGEX_URI_pn-vsftpd = "https://security.appspot.com/vsftpd.html"
-REGEX_pn-vsftpd = "[hH][rR][eE][fF]=\"https://security.appspot.com/downloads/vsftpd-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz\""
+REGEX_pn-vsftpd = "https://security.appspot.com/downloads/vsftpd-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
REGEX_URI_pn-watchdog = "http://sourceforge.net/projects/watchdog/files/watchdog/"
-REGEX_pn-watchdog = "[hH][rR][eE][fF]=\"/projects/watchdog/files/watchdog/(?P<pver>((\d+[\.\-_]*)+))/\""
+REGEX_pn-watchdog = "/projects/watchdog/files/watchdog/(?P<pver>((\d+[\.\-_]*)+))/"
REGEX_URI_pn-wireless-tools = "http://www.hpl.hp.com/personal/Jean_Tourrilhes/Linux/Tools.html"
-REGEX_pn-wireless-tools = "[hH][rR][eE][fF]=\"wireless_tools\.(?P<pver>(\d+))\.tar\.gz\""
+REGEX_pn-wireless-tools = "wireless_tools\.(?P<pver>(\d+)(\..*|))\.tar\.gz"
REGEX_URI_pn-x11vnc = "http://sourceforge.net/projects/libvncserver/files/x11vnc/"
-REGEX_pn-x11vnc = "[hH][rR][eE][fF]=\"/projects/libvncserver/files/x11vnc/(?P<pver>((\d+[\.\-_]*)+))/\""
-REGEX_pn-xdg-utils = "[hH][rR][eE][fF]=\"xdg\-utils\-(?P<pver>((\d+[\.\-_]*)+))\.(tar\.gz|tgz)\""
+REGEX_pn-x11vnc = "/projects/libvncserver/files/x11vnc/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_pn-xdg-utils = "xdg\-utils\-(?P<pver>((\d+[\.\-_]*)+))\.(tar\.gz|tgz)"
REGEX_pn-xf86-video-omap = "(?P<pver>(\d+\.(\d\.?)*))"
REGEX_URI_pn-zip = "http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/"
-REGEX_pn-zip = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/zip(?P<pver>(\d+))\.tar\.gz/download\""
-PRSPV_pn-zip = "${@d.getVar('PV',1).replace('.','')}"
+REGEX_pn-zip = "http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/zip(?P<pver>(\d+))\.tar\.gz/download"
REGEX_URI_pn-zip-native = "http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/"
-REGEX_pn-zip-native = "[hH][rR][eE][fF]=\"http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/zip(?P<pver>(\d+))\.tar\.gz/download\""
-PRSPV_pn-zip-native = "${@d.getVar('PV',1).replace('.','')}"
+REGEX_pn-zip-native = "http://sourceforge.net/projects/infozip/files/Zip%203.x%20%28latest%29/3.0/zip(?P<pver>(\d+))\.tar\.gz/download"
REGEX_URI_pn-zisofs-tools-native ="http://pkgs.fedoraproject.org/repo/pkgs/zisofs-tools/"
+REGEX_pn-qt-mobility-embedded="qt-mobility-opensource-src-(?P<pver>((\d+[\.\-_]*)+)).*\.tar\.xz"
+REGEX_URI_pn-qt-mobility-embedded="http://pkgs.fedoraproject.org/repo/pkgs/qt-mobility/"
+REGEX_pn-qt-mobility-x11="qt-mobility-opensource-src-(?P<pver>((\d+[\.\-_]*)+)).*\.tar\.xz"
+REGEX_URI_pn-qt-mobility-x11="http://pkgs.fedoraproject.org/repo/pkgs/qt-mobility/"
+REGEX_pn-expect="/projects/expect/files/Expect/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_URI_pn-expect="http://sourceforge.net/projects/expect/files/Expect/"
+REGEX_pn-gnu-efi="/projects/gnu-efi/files/gnu-efi_(?P<pver>((\d+[\.\-_]*)+).)\.orig\.tar\.gz/"
+REGEX_URI_pn-gnu-efi="http://sourceforge.net/projects/gnu-efi/files/"
+REGEX_pn-python-smartpm="/smart/\+milestone/(?P<pver>((\d+[\.\-_]*)+))"
+REGEX_URI_pn-python-smartpm="https://launchpad.net/smart/trunk/"
+REGEX_URI_pn-libatomics-ops="http://www.hpl.hp.com/research/linux/atomic_ops/download.php4"
+REGEX_URI_pn-sudo="http://www.sudo.ws"
+REGEX_pn-sudo="sudo-(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz"
+REGEX_URI_pn-docbook-xsl-stylesheets="http://sourceforge.net/projects/docbook/files/docbook-xsl/"
+REGEX_pn-docbook-xsl-stylesheets="/projects/docbook/files/docbook-xsl/(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_URI_pn-waffle="http://www.waffle-gl.org/releases.html"
+REGEX_URI_pn-qt4-x11-free="http://download.qt-project.org/official_releases/qt/4.8/"
+REGEX_pn-qt4-x11-free="(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_URI_pn-qt4-embedded="http://download.qt-project.org/official_releases/qt/4.8/"
+REGEX_pn-qt4-embedded="(?P<pver>((\d+[\.\-_]*)+))/"
+REGEX_URI_pn-dosfstools="http://daniel-baumann.ch/software/dosfstools/"
+REGEX_pn-midori="midori_(?P<pver>((\d+[\.\-_]*)+))_all_\.tar\.bz2"
+REGEX_URI_pn-midori="http://midori-browser.org/download/source/"
+REGEX_pn-icu="(?P<pver>((\d+[\.\-_]*)+))"
+REGEX_URI_pn-icu="http://download.icu-project.org/files/icu4c/"
+REGEX_URI_pn-pcmciautils="http://mirror.linux.org.au/linux/utils/kernel/pcmcia/"
+REGEX_URI_pn-man="http://www.ibiblio.org/pub/Linux/apps/doctools/man/"
# Regex used to parse tags and extract version
GITTAGREGEX_pn-build-appliance-image = "(?P<pver>(([0-9][\.|_]?)+[0-9]))"
GITTAGREGEX_pn-chkconfig-alternatives-native = "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))"
GITTAGREGEX_pn-remake = "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))"
GITTAGREGEX_pn-xf86-video-omap = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-linux-yocto = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-linux-yocto-dev = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-uclibc = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-uclibc-initial = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-mesa = "(?P<pver>(\d+\.(\d\.?)*))"
+GITTAGREGEX_pn-glibc = "(?P<pver>(\d+\.(\d\.?)*))"
diff --git a/meta-yocto/conf/distro/include/poky-floating-revisions.inc b/meta-yocto/conf/distro/include/poky-floating-revisions.inc
index 41dd71fb0f..5b8c3a0ea8 100644
--- a/meta-yocto/conf/distro/include/poky-floating-revisions.inc
+++ b/meta-yocto/conf/distro/include/poky-floating-revisions.inc
@@ -50,23 +50,23 @@ SRCREV_pn-tasks ?= "${AUTOREV}"
SRCREV_pn-ofono ?= "${AUTOREV}"
SRCREV_pn-dri2proto = "${AUTOREV}"
-#PREFERRED_VERSION_dri2proto ?= "1.99.1+git${SRCREV}"
+#PREFERRED_VERSION_dri2proto ?= "1.99.1+git%"
SRCREV_pn-libdrm = "${AUTOREV}"
-#PREFERRED_VERSION_libdrm ?= "2.4.0+git${SRCREV}"
+#PREFERRED_VERSION_libdrm ?= "2.4.0+git%"
SRCREV_pn-libxcb = "${AUTOREV}"
-#PREFERRED_VERSION_libxcb ?= "1.1.90.1+gitr${SRCREV}"
+#PREFERRED_VERSION_libxcb ?= "1.1.90.1+gitr%"
SRCREV_pn-lib-proto = "${AUTOREV}"
-#PREFERRED_VERSION_xcb-proto ?= "1.2+gitr${SRCREV}"
+#PREFERRED_VERSION_xcb-proto ?= "1.2+gitr%"
SRCREV_pn-libxcb-sdk = "${AUTOREV}"
-#PREFERRED_VERSION_libxcb-sdk ?= "1.1.90.1+gitr${SRCREV}"
+#PREFERRED_VERSION_libxcb-sdk ?= "1.1.90.1+gitr%"
SRCREV_pn-xf86-input-evdev = "${AUTOREV}"
#PREFERRED_VERSION_xf86-input-evdev ?= "2.0.4"
SRCREV_pn-xf86-input-mouse = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-mouse ?= "1.3.0+git${SRCREV}"
+#PREFERRED_VERSION_xf86-input-mouse ?= "1.3.0+git%"
SRCREV_pn-xf86-input-keyboard = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-keyboard ?= "1.3.1+git${SRCREV}"
+#PREFERRED_VERSION_xf86-input-keyboard ?= "1.3.1+git%"
SRCREV_pn-xf86-input-synaptics = "${AUTOREV}"
-#PREFERRED_VERSION_xf86-input-synaptics ?= "0.15.2+git${SRCREV}"
+#PREFERRED_VERSION_xf86-input-synaptics ?= "0.15.2+git%"
#SRCDATE_oprofile ?= "${DATE}"
diff --git a/meta-yocto/conf/distro/include/upstream_tracking.inc b/meta-yocto/conf/distro/include/upstream_tracking.inc
index a018803c7d..923fa1b20a 100644
--- a/meta-yocto/conf/distro/include/upstream_tracking.inc
+++ b/meta-yocto/conf/distro/include/upstream_tracking.inc
@@ -24,14 +24,6 @@
#
RECIPE_UPSTREAM_VERSION_pn-adt-installer = "1.1"
CHECK_DATE_pn-adt-installer = "Sep 11, 2012"
-RECIPE_UPSTREAM_DATE_pn-avahi-ui = "Oct 01, 2010"
-RECIPE_UPSTREAM_VERSION_pn-avahi = "0.6.31"
-RECIPE_UPSTREAM_DATE_pn-avahi = "Oct 01, 2010"
-RECIPE_UPSTREAM_VERSION_pn-bash = "4.2"
-RECIPE_UPSTREAM_DATE_pn-bash = "Dec 01, 2009"
-RECIPE_UPSTREAM_VERSION_pn-bc = "1.06"
-CHECK_DATA_pn-bc = "Jul 27, 2012"
-RECIPE_UPSTREAM_DATE_pn-bc = "Nov 01, 2000"
RECIPE_UPSTREAM_VERSION_pn-bdwgc = "gc-7.2d"
CHECK_DATE_pn-bdwgc = "Aug 27, 2012"
RECIPE_NO_UPDATE_REASON_pn-bluez4 = "BlueZ 5.x is not backward-compatible; components that interact with bluez not updated"
@@ -40,34 +32,10 @@ RECIPE_NO_UPDATE_REASON_pn-cdrtools = "v3.x uses incompatible CDDL license"
RECIPE_UPSTREAM_VERSION_pn-clutter = "1.10.0"
RECIPE_UPSTREAM_DATE_pn-clutter = "Mar 22, 2012"
CHECK_DATE_pn-clutter = "Aug 28, 2012"
-RECIPE_UPSTREAM_VERSION_pn-core-image-minimal = "1.0"
RECIPE_NO_UPDATE_REASON_pn-createrepo = "Versions after 0.9.* use YUM, so we hold at 0.4.11"
-RECIPE_UPSTREAM_VERSION_pn-cups = "1.6.2"
-RECIPE_UPSTREAM_DATE_pn-cups = "Mar 18, 2013"
-CHECK_DATE_pn-cups = "Jun 11, 2013"
RECIPE_NO_UPDATE_REASON_pn-db= "API compatibility issue"
-RECIPE_NO_UPDATE_REASON_pn-dbus = "D-BUS 1.7.x is the development version, not stable."
-RECIPE_NO_UPDATE_REASON_pn-docbook-sgml-dtd-3.1-native = "PRS Reports Incorrectly"
-RECIPE_NO_UPDATE_REASON_pn-docbook-sgml-dtd-4.1-native = "PRS Reports Incorrectly"
-RECIPE_NO_UPDATE_REASON_pn-docbook-sgml-dtd-4.5-native = "PRS Reports Incorrectly"
-RECIPE_UPSTREAM_VERSION_pn-eee-acpi-scripts = "1.1.12+9d4cdedca25b396405f8587f9c4fbf8229e041c2"
-RECIPE_UPSTREAM_DATE_pn-eee-acpi-scripts = "Oct 3, 2011"
-CHECK_DATE_pn-eee-acpi-scripts = "Aug 28, 2012"
-RECIPE_UPSTREAM_VERSION_pn-formfactor = "0.0"
-RECIPE_UPSTREAM_DATE_pn-formfactor = "n/a"
-RECIPE_UPSTREAM_VERSION_pn-fstests="0.0+svnr426"
-CHECK_DATE_pn-fstests = "Jul 06, 2011"
-RECIPE_UPSTREAM_VERSION_pn-gaku="0.0+svnr399"
+RECIPE_NO_UPDATE_REASON_pn-dbus = "D-BUS 1.9.x is the development version, not stable."
RECIPE_NO_UPDATE_REASON_pn-gawk = "Version 4.1.0 requires Automake 1.13, but have 1.12.6"
-RECIPE_UPSTREAM_DATE_pn-gaku="Jul 03, 2008"
-CHECK_DATE_pn-gaku = "Aug 29, 2012"
-RECIPE_UPSTREAM_VERSION_pn-gdk-pixbuf = "2.27.2"
-RECIPE_UPSTREAM_DATE_pn-gdk-pixbuf = "Mar 3, 2013"
-CHECK_DATE_pn-gdk-pixbuf = "Mar 5, 2013"
-RECIPE_NO_UPDATE_REASON_pn-gdk-pixbuf = "2.27.2 is unstable"
-RECIPE_UPSTREAM_VERSION_pn-glibc = "2.11.2"
-RECIPE_UPSTREAM_DATE_pn-glibc = "May 01, 2010"
-RECIPE_NO_UPDATE_REASON_pn-glibc = "two glibc plugins are based on 2.10.1"
RECIPE_NO_UPDATE_REASON_pn-glib-networking = "Pending Glib-2.0 Update"
RECIPE_UPSTREAM_VERSION_pn-gnome-desktop = "3.7.90"
RECIPE_UPSTREAM_DATE_pn-gnome-desktop = "Feb 19, 2013"
@@ -83,66 +51,35 @@ CHECK_DATE_pn-gnome-keyring = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-gnome-keyring = "waiting for the sato gtk3 port"
RECIPE_NO_UPDATE_REASON_pn-gobject_introspection = "Does not cross-compile"
RECIPE_NO_UPDATE_REASON_pn-groff = "1.18.1.4 is latest GPLv2 Version no 1.21"
-RECIPE_NO_UPDATE_REASON_pn-grub="grub2 is a different thing"
RECIPE_UPSTREAM_DATE_pn-gst-meta-base="n/a"
RECIPE_NO_UPDATE_REASON_pn-gst-plugins-base = "not compatible with gst-fluendo 0.10.x"
-RECIPE_NO_UPDATE_REASON_pn-gst-ffmpeg = "not compatible with gst-fluendo 0.10.x"
RECIPE_NO_UPDATE_REASON_pn-gst-plugins-bad = "not compatible with gst-fluendo 0.10.x"
RECIPE_NO_UPDATE_REASON_pn-gst-plugins-good = "not compatible with gst-fluendo 0.10.x"
-RECIPE_NO_UPDATE_REASON_pn-gst-plugins-ugly = "not compatible with gst-fluendo 0.10.x"
RECIPE_NO_UPDATE_REASON_pn-gstreamer=" not compatible with gst-fluendo 0.10.x "
RECIPE_NO_UPDATE_REASON_pn-gtk+ = "Do not upgrade to version: 2.99.2 because prefer stable tree"
-RECIPE_NO_UPDATE_REASON_pn-gtk-doc-stub = "PRS Reports Incorrectly"
RECIPE_UPSTREAM_VERSION_pn-gtk-engines = "2.91.1"
RECIPE_UPSTREAM_DATE_pn-gtk-engines = "Oct 9, 2010"
CHECK_DATE_pn-gtk-engines = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-gtk-engines = "2.91.1 is a 3.0 beta release"
-RECIPE_UPSTREAM_VERSION_pn-icu = "4.8.1.1"
-RECIPE_UPSTREAM_DATE_pn-icu = "Oct 10, 2011"
-CHECK_DATE_pn-icu = "Aug 29, 2012"
RECIPE_UPSTREAM_VERSION_pn-initscripts = "9.40"
RECIPE_UPSTREAM_DATE_pn-initscripts = "Aug 06, 2012"
CHECK_DATE_pn-initscripts = "Aug 29, 2012"
-RECIPE_UPSTREAM_VERSION_pn-inputproto = "2.2.99.1"
-RECIPE_UPSTREAM_DATE_pn-inputproto = "Jan 5, 2013"
-CHECK_DATE_pn-inputproto = "Mar 5, 2013"
-RECIPE_NO_UPDATE_REASON_pn-inputproto = "2.2.99.1 is unstable"
RECIPE_UPSTREAM_VERSION_pn-jpeg = "9"
RECIPE_UPSTREAM_DATE_pn-jpeg = "Jan 13, 2013"
CHECK_DATE_pn-jpeg = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-jpeg = "webkit-gtk 1.8.3 doesn't work with jpeg 9"
-RECIPE_NO_UPDATE_REASON_pn-kconfig-frontends = "PRS Reports Incorrectly"
RECIPE_NO_UPDATE_REASON_pn-kernelshark = "0.2 is the latest version."
-RECIPE_UPSTREAM_VERSION_pn-keymaps = "1.0"
-RECIPE_UPSTREAM_DATE_pn-keymaps = "n/a"
-RECIPE_UPSTREAM_VERSION_pn-kmod = "9"
-RECIPE_UPSTREAM_DATE_pn-kmod = "Jun 19, 2012"
-CHECK_DATE_pn-kmod = "Jul 27, 2012"
-CHECK_DATE_pn-latencytop = "Jul 30, 2014"
RECIPE_NO_UPDATE_REASON_pn-liberation-fonts = "2.00.0 - fontforge package required "
RECIPE_UPSTREAM_VERSION_pn-libgnome-keyring = "3.7.91"
RECIPE_UPSTREAM_DATE_pn-libgnome-keyring = "Mar 4, 2013"
CHECK_DATE_pn-libgnome-keyring = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-libgnome-keyring = "waiting for the sato gtk3 port"
-RECIPE_UPSTREAM_VERSION_pn-libiconv = "1.14"
-RECIPE_UPSTREAM_DATE_pn-libiconv = "Aug 07, 2011"
-CHECK_DATE_pn-libiconv = "Aug 30, 2012"
RECIPE_NO_UPDATE_REASON_pn-libnl = "libnl-3.2.2 is incompatible with libnl2, so no Upgrade"
RECIPE_UPSTREAM_VERSION_pn-libsoup = "2.41.90"
-RECIPE_UPSTREAM_VERSION_pn-libpng = "1.6.6"
-RECIPE_UPSTREAM_DATE_pn-libpng = "Sep 16, 2013"
-CHECK_DATE_pn-libpng = "Oct 02, 2013"
-RECIPE_UPSTREAM_VERSION_pn-libpng12 = "1.2.50"
-RECIPE_UPSTREAM_DATE_pn-libpng12 = "Jul 10, 2012"
-CHECK_DATE_pn-libpng12 = "Oct 02, 2013"
RECIPE_UPSTREAM_DATE_pn-libsoup = "Feb 19, 2013"
CHECK_DATE_pn-libsoup = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-libsoup = "2.41.90 is unstable"
-RECIPE_NO_UPDATE_REASON_pn-libtheora = "1.1.1 is latest stable release not 1.2.0alpha1"
RECIPE_NO_UPDATE_REASON_pn-libunique = "Do not upgrade to version: 2.91.4 because it requires gtk+ >= 2.90.0 and the stable version of gtk+ used by poky is 2.20.1"
-RECIPE_UPSTREAM_VERSION_pn-liburcu = "0.7.0"
-RECIPE_UPSTREAM_DATE_pn-liburcu = "May 25, 2012"
-CHECK_DATE_pn-liburcu = "Aug 30, 2012"
RECIPE_UPSTREAM_VERSION_pn-libusb = "1.0.9"
RECIPE_UPSTREAM_DATE_pn-libusb = "Apr 20, 2012"
CHECK_DATE_pn-libusb = "Aug 30, 2012"
@@ -150,68 +87,35 @@ RECIPE_UPSTREAM_DATE_pn-linuxdoc-tools-native = "Nov 22, 2009"
RECIPE_UPSTREAM_VERSION_pn-linuxdoc-tools-native = "0.9.66"
CHECK_DATE_pn-linuxdoc-tools-native = "Aug 31, 2012"
RECIPE_NO_UPDATE_REASON-libxml2 = "only release candidates available "
-RECIPE_UPSTREAM_VERSION_pn-lsbtest = "1.0"
-CHECK_DATE_pn-lsof = "Jul 30, 2014"
-RECIPE_UPSTREAM_VERSION_pn-meta-ide-support = "check"
-RECIPE_UPSTREAM_VERSION_pn-minicom = "2.6.2"
-RECIPE_UPSTREAM_DATE_pn-minicom = "Feb 06, 2013"
-CHECK_DATE_pn-minicom = "Oct 22, 2013"
+CHECK_DATE_pn-lsof = "Dec 16, 2014"
+RECIPE_UPSTREAM_VERSION_pn-lz4 = "r123"
+RECIPE_UPSTREAM_DATE_pn-lz4 = "Aug 28, 2014"
+CHECK_DATE_pn-lz4 = "Aug 30, 2014"
RECIPE_UPSTREAM_DATE_pn-module-init-tools = "Jun 02, 2011"
RECIPE_UPSTREAM_VERSION_pn-module-init-tools = "3.15"
CHECK_DATE_pn-module-init-tools = "Aug 31, 2012"
-RECIPE_UPSTREAM_DATE_pn-modutils-initscripts = "n/a"
-RECIPE_NO_UPDATE_REASON_pn-mpeg2dec = "Why are we currently at 0.4.1?"
RECIPE_NO_UPDATE_REASON_pn-mx-1.0 = "PRS 1.99 is dev version"
-RECIPE_UPSTREAM_VERSION_pn-ofono = "1.6"
-RECIPE_UPSTREAM_DATE_pn-ofono="Apr 20, 2012"
-CHECK_DATE_pn-ofono = "Aug 31, 2012"
-RECIPE_NO_UPDATE_REASON_pn-openssl = "1.0.1 is beta 1.0.0c is latest"
-RECIPE_UPSTREAM_DATE_pn-opkg-collateral = "n/a"
-RECIPE_UPSTREAM_VERSION_pn-opkg = "0.1.8"
-RECIPE_NO_UPDATE_REASON_pn-oprofileui-server = "PRS Reports Incorrectly"
-CHECK_DATE_pn-opkg = "Aug 31, 2012"
-RECIPE_UPSTREAM_DATE_pn-opkg = "Feb 01, 2010"
+CHECK_DATE_pn-nettle = "Aug 21, 2014"
+RECIPE_NO_UPDATE_REASON_pn-nettle = "3.0.0 breaks gnutls, api changes"
RECIPE_NO_UPDATE_REASON_pn-pkgconfig = "removes glib-conf, adds circular depends"
-RECIPE_UPSTREAM_VERSION_pn-pointercal = "0.0"
-RECIPE_UPSTREAM_DATE_pn-pointercal = "n/a"
RECIPE_UPSTREAM_DATE_pn-poky-feed-config-opkg = "n/a"
RECIPE_UPSTREAM_DATE_pn-polkit = "Jan 09, 2013"
RECIPE_UPSTREAM_VERSION_pn-polkit = "0.110"
CHECK_DATE_pn-polkit = "Mar 14, 2013"
RECIPE_NO_UPDATE_REASON_pn-polkit = "requires spidermonkey 1.8.5, not available in poky"
-RECIPE_UPSTREAM_VERSION_pn-pong-clock = "1.0"
-RECIPE_NO_UPDATE_REASON_pn-powertop = "Do not upgrade to version 1.97 since it's a beta release of the comming PowerTOP 2.0"
-RECIPE_UPSTREAM_VERSION_pn-ppp-dialin = "check"
-RECIPE_UPSTREAM_VERSION_pn-prelink="1.0+git1+9552f214c7faa816fdb55b289a457f0cab034f00"
RECIPE_NO_UPDATE_REASON_pn-python-distribute = "0.7.3 only upgrades 0.6.32, can't install on its own, also obsolete"
RECIPE_UPSTREAM_VERSION_pn-python-distribute = "0.7.3"
RECIPE_NO_UPDATE_REASON_pn-python-pygobject = "Newer versions of python-pygobject depend on gobject-introspection which doesn't cross-compile"
RECIPE_UPSTREAM_VERSION_pn-python-pygobject = "3.13.3"
-CHECK_DATE_pn-prelink = "Aug 31, 2012"
RECIPE_UPSTREAM_DATE_pn-qemu-helper-native = "n/a"
RECIPE_NO_UPDATE_REASON_pn-qemugl = "Do not upgrade to version: 33466cd8 because the upstream is broken in x86_64 build"
-RECIPE_UPSTREAM_VERSION_pn-rpm = "5.4.10"
-RECIPE_UPSTREAM_DATE_pn-rpm = "Jul 06, 2012"
-CHECK_DATE_pn-rpm = "Sep 03, 2012"
-RECIPE_UPSTREAM_VERSION_pn-run-postinsts = "check"
-RECIPE_NO_UPDATE_REASON_pn-sato-screenshot = "PRS Reports Incorrectly"
-RECIPE_NO_UPDATE_REASON_pn-socat = "2.0.0 is beta"
-RECIPE_UPSTREAM_VERSION_pn-sqlite3 = "3.8.0.2"
-RECIPE_UPSTREAM_DATE_pn-sqlite3 = "Sep 03, 2013"
-CHECK_DATE_pn-sqlite3= "Oct 02, 2013"
-RECIPE_UPSTREAM_VERSION_pn-squashfs-tools = "4.2"
-RECIPE_UPSTREAM_DATE_pn-squashfs-tools = "Feb 28, 2011"
-CHECK_DATE_pn-squashfs-tools = "Nov 09, 2012"
RECIPE_NO_UPDATE_REASON_pn-syslinux="Waiting for 4.06 for 3.3 kernel headers support"
RECIPE_NO_UPDATE_REASON_pn-texinfo = "Checking script parses directory wrong"
RECIPE_UPSTREAM_VERSION_pn-tinylogin = "1.4"
CHECK_DATE_pn-tinylogin = "Sep 03, 2012"
RECIPE_UPSTREAM_DATE_pn-tinylogin = "Jan 03, 2003"
RECIPE_NO_UPDATE_REASON_pn-tinylogin = "TinyLogin was merged into BusyBox."
-RECIPE_UPSTREAM_VERSION_pn-tremor="20120122"
-RECIPE_UPSTREAM_DATE_pn-tremor="Jan 22, 2012"
-CHECK_DATE_pn-tremor = "Mar 11, 2013"
-RECIPE_NO_UPDATE_REASON_pn-tremor = "Same revision on different SVN modules"
+
RECIPE_UPSTREAM_VERSION_pn-ubootchart = "0.0+r12"
CHECK_DATE_pn-ubootchart = "Sep 11, 2012"
RECIPE_UPSTREAM_DATE_pn-udev-extraconf = "n/a"
@@ -221,10 +125,6 @@ CHECK_DATE_pn-unifdef-native = "Sep 03, 2012"
RECIPE_UPSTREAM_VERSION_pn-unzip = "6.0"
RECIPE_UPSTREAM_DATE_pn-unzip = "Apr 25, 2009"
CHECK_DATE_pn-unzip = "Sep 03, 2012"
-RECIPE_UPSTREAM_VERSION_pn-usbinit = "n/a"
-RECIPE_UPSTREAM_VERSION_pn-usbutils = "006"
-RECIPE_UPSTREAM_DATE_pn-usbutils = "Jul 06, 2012"
-CHECK_DATE_pn-usbutils = "Sep 03, 2012"
RECIPE_NO_UPDATE_REASON_pn-vte = "Pending Glib-2.0 Update"
RECIPE_UPSTREAM_VERSION_pn-webkit-gtk = "1.10.2"
RECIPE_UPSTREAM_DATE_pn-webkit-gtk = "Dec 12, 2012"
@@ -232,18 +132,39 @@ CHECK_DATE_pn-webkit-gtk = "Mar 5, 2013"
RECIPE_NO_UPDATE_REASON_pn-webkit-gtk = ">= 1.10.2 needs ruby"
RECIPE_UPSTREAM_DATE_pn-x11-common="Sep 22, 2008"
RECIPE_UPSTREAM_VERSION_pn-x11-common = "1:7.6"
-CHECK_DAte_pn-x11-common = "Sep 03, 2012"
+CHECK_DATE_pn-x11-common = "Sep 03, 2012"
RECIPE_NO_UPDATE_REASON_pn-xdg-utils = "only release candidates available "
-RECIPE_UPSTREAM_VERSION_pn-xev = "1.0.4"
-RECIPE_UPSTREAM_DATE_pn-xev = "Oct 13, 2009"
-CHECK_DATE_pn-xev = "Sep 04, 2012"
-CHECK_DATE_pn-xinetd = "Jul 30, 2014"
RECIPE_UPSTREAM_DATE_pn-xorg-minimal-fonts = "n/a"
RECIPE_UPSTREAM_DATE_pn-xserver-nodm-init=""
-RECIPE_UPSTREAM_VERSION_pn-xz = "5.0.4"
-RECIPE_UPSTREAM_DATE_pn-xz = "unknown"
-CHECK_DATE_pn-xz = "Sep 04, 2012"
RECIPE_NO_UPDATE_REASON_pn-zaurusd = "SVN rev 426 is last Stable release"
RECIPE_UPSTREAM_VERSION_pn-zip = "3.0"
RECIPE_UPSTREAM_DATE_pn-zip = "Sep 01, 2009"
CHECK_DATE_pn-zip = "Sep 04, 2012"
+
+# GIT don't have tag
+RECIPE_UPSTREAM_VERSION_pn-x264 = "r2491"
+RECIPE_UPSTREAM_DATE_pn-x264 = "Nov 13, 2014"
+CHECK_DATE_pn-x264 = "Dec 17, 2014"
+
+RECIPE_UPSTREAM_VERSION_pn-mobile-broadband-provider-info = "20140618"
+RECIPE_UPSTREAM_DATE_pn-mobile-broadband-provider-info = "Jun 18, 2014"
+CHECK_DATE_pn-mobile-broadband-provider-info = "Dec 17, 2014"
+
+# SVN support isn't implemented
+RECIPE_UPSTREAM_VERSION_pn-tremor="20120314"
+RECIPE_UPSTREAM_DATE_pn-tremor="Jan 22, 2012"
+CHECK_DATE_pn-tremor = "Mar 11, 2013"
+RECIPE_NO_UPDATE_REASON_pn-tremor = "Same revision on different SVN modules"
+
+RECIPE_UPSTREAM_VERSION_pn-puzzles="r10286"
+RECIPE_UPSTREAM_DATE_pn-puzzles="Oct 20, 2014"
+CHECK_DATE_pn-puzzles = "Dec 18, 2014"
+
+RECIPE_UPSTREAM_VERSION_pn-unfs3="0.9.22.r494"
+RECIPE_UPSTREAM_DATE_pn-unfs3="Mar 14, 2014"
+CHECK_DATE_pn-unfs3 = "Dec 18, 2014"
+
+# Incompatible version format
+RECIPE_UPSTREAM_VERSION_pn-sqlite3="3.8.7.4"
+RECIPE_UPSTREAM_DATE_pn-sqlite3="Jan 22, 2012"
+CHECK_DATE_pn-sqlite3 = "Mar 11, 2013"
diff --git a/meta-yocto/conf/distro/poky-lsb.conf b/meta-yocto/conf/distro/poky-lsb.conf
index b0c2cf87f5..a9030a262e 100644
--- a/meta-yocto/conf/distro/poky-lsb.conf
+++ b/meta-yocto/conf/distro/poky-lsb.conf
@@ -11,4 +11,5 @@ PREFERRED_PROVIDER_virtual/libx11 = "libx11"
# Ensure the kernel nfs server is enabled
KERNEL_FEATURES_append_pn-linux-yocto = " features/nfsd/nfsd-enable.scc"
-
+# Use the LTSI Kernel for LSB Testing
+PREFERRED_VERSION_linux-yocto_linuxstdbase ?= "3.10%"
diff --git a/meta-yocto/conf/distro/poky-tiny.conf b/meta-yocto/conf/distro/poky-tiny.conf
index 335049103f..75508c107f 100644
--- a/meta-yocto/conf/distro/poky-tiny.conf
+++ b/meta-yocto/conf/distro/poky-tiny.conf
@@ -49,7 +49,9 @@ TCLIBCAPPEND = ""
# Disable wide char support for ncurses as we don't include it in
# in the LIBC features below.
-ENABLE_WIDEC="false"
+# Leave native enable to avoid build failures
+ENABLE_WIDEC = "false"
+ENABLE_WIDEC_class-native = "true"
# Drop native language support. This removes the
# eglibc->bash->gettext->libc-posix-clang-wchar dependency.
@@ -91,6 +93,8 @@ DISTRO_FEATURES = "${DISTRO_FEATURES_TINY} \
# Enable LFS - see bug YOCTO #5865
DISTRO_FEATURES_append_libc-uclibc = " largefile"
+DISTRO_FEATURES_class-native = "${DISTRO_FEATURES_DEFAULT} ${DISTRO_FEATURES_LIBC} ${POKY_DEFAULT_DISTRO_FEATURES}"
+
# Use tmpdevfs and the busybox runtime services
VIRTUAL-RUNTIME_dev_manager = ""
VIRTUAL-RUNTIME_login_manager = ""
@@ -119,7 +123,6 @@ MACHINE_ESSENTIAL_EXTRA_RDEPENDS = ""
# this script for the purposes of tiny, remove the dependency from here.
RDEPENDS_${PN}-mtrace_pn-eglibc = ""
-INHERIT += "blacklist"
PNBLACKLIST[build-appliance-image] = "not buildable with poky-tiny"
PNBLACKLIST[core-image-base] = "not buildable with poky-tiny"
PNBLACKLIST[core-image-clutter] = "not buildable with poky-tiny"
diff --git a/meta-yocto/conf/distro/poky.conf b/meta-yocto/conf/distro/poky.conf
index ec251f9b83..4d143e257d 100644
--- a/meta-yocto/conf/distro/poky.conf
+++ b/meta-yocto/conf/distro/poky.conf
@@ -1,7 +1,7 @@
DISTRO = "poky"
DISTRO_NAME = "Poky (Yocto Project Reference Distro)"
-DISTRO_VERSION = "1.6+snapshot-${DATE}"
-DISTRO_CODENAME = "next"
+DISTRO_VERSION = "1.7"
+DISTRO_CODENAME = "dizzy"
SDK_VENDOR = "-pokysdk"
SDK_VERSION := "${@'${DISTRO_VERSION}'.replace('snapshot-${DATE}','snapshot')}"
@@ -37,6 +37,7 @@ DISTRO_EXTRA_RRECOMMENDS += " ${POKY_DEFAULT_EXTRA_RRECOMMENDS}"
POKYQEMUDEPS = "${@bb.utils.contains("INCOMPATIBLE_LICENSE", "GPLv3", "", "packagegroup-core-device-devel",d)}"
DISTRO_EXTRA_RDEPENDS_append_qemuarm = " ${POKYQEMUDEPS}"
+DISTRO_EXTRA_RDEPENDS_append_qemuarm64 = " ${POKYQEMUDEPS}"
DISTRO_EXTRA_RDEPENDS_append_qemumips = " ${POKYQEMUDEPS}"
DISTRO_EXTRA_RDEPENDS_append_qemuppc = " ${POKYQEMUDEPS}"
DISTRO_EXTRA_RDEPENDS_append_qemux86 = " ${POKYQEMUDEPS}"
@@ -44,7 +45,7 @@ DISTRO_EXTRA_RDEPENDS_append_qemux86-64 = " ${POKYQEMUDEPS}"
TCLIBCAPPEND = ""
-QEMU_TARGETS ?= "arm i386 mips mipsel ppc x86_64"
+QEMU_TARGETS ?= "arm aarch64 i386 mips mipsel ppc x86_64"
# Other QEMU_TARGETS "mips64 mips64el sh4"
PREMIRRORS ??= "\
@@ -73,22 +74,15 @@ CONNECTIVITY_CHECK_URIS ?= " \
http://bugzilla.yoctoproject.org/report.cgi"
SANITY_TESTED_DISTROS ?= " \
- Poky-1.4 \n \
- Poky-1.5 \n \
Poky-1.6 \n \
+ Poky-1.7 \n \
Ubuntu-12.04 \n \
- Ubuntu-13.10 \n \
Ubuntu-14.04 \n \
- Fedora-19 \n \
+ Ubuntu-14.10 \n \
Fedora-20 \n \
- CentOS-6.4 \n \
CentOS-6.5 \n \
- Debian-7.0 \n \
- Debian-7.1 \n \
- Debian-7.2 \n \
- Debian-7.3 \n \
- Debian-7.4 \n \
- SUSE-LINUX-12.2 \n \
+ Debian-7.* \n \
+ Debian-8.* \n \
openSUSE-project-12.3 \n \
openSUSE-project-13.1 \n \
"
@@ -101,7 +95,7 @@ BB_SIGNATURE_HANDLER ?= 'OEBasicHash'
# that breaks the format and have been previously discussed on the mailing list
# with general agreement from the core team.
#
-OELAYOUT_ABI = "8"
+OELAYOUT_ABI = "10"
# add poky sanity bbclass
INHERIT += "poky-sanity"
diff --git a/meta-yocto/conf/local.conf.sample b/meta-yocto/conf/local.conf.sample
index 93c85c222e..a1d99f913e 100644
--- a/meta-yocto/conf/local.conf.sample
+++ b/meta-yocto/conf/local.conf.sample
@@ -18,6 +18,7 @@
# of emulated machines available which can boot and run in the QEMU emulator:
#
#MACHINE ?= "qemuarm"
+#MACHINE ?= "qemuarm64"
#MACHINE ?= "qemumips"
#MACHINE ?= "qemuppc"
#MACHINE ?= "qemux86"
diff --git a/meta-yocto/conf/local.conf.sample.extended b/meta-yocto/conf/local.conf.sample.extended
index f7d8798e7f..ccdd326827 100644
--- a/meta-yocto/conf/local.conf.sample.extended
+++ b/meta-yocto/conf/local.conf.sample.extended
@@ -25,8 +25,8 @@
# be appropriate for example.
-# eglibc configurability is used to reduce minimal image's size.
-# the all supported eglibc options are listed in DISTRO_FEATURES_LIBC
+# glibc configurability is used to reduce minimal image's size.
+# the all supported glibc options are listed in DISTRO_FEATURES_LIBC
# and disabled by default. Uncomment and copy the DISTRO_FEATURES_LIBC
# and DISTRO_FEATURES definitions to local.conf to enable the options.
#DISTRO_FEATURES_LIBC = "ipv6 libc-backtrace libc-big-macros libc-bsd libc-cxx-tests libc-catgets libc-charsets libc-crypt \
@@ -145,12 +145,12 @@
#MULTILIBS = "multilib:lib32"
#DEFAULTTUNE_virtclass-multilib-lib32 = "x86"
-# Set RPM_PREFER_COLOR to configure preferred ABI when using rpm packaging
+# Set RPM_PREFER_ELF_ARCH to configure preferred ABI when using rpm packaging
# backend to generate a rootfs, choices are:
# 1: ELF32 wins
# 2: ELF64 wins
-# 3: ELF64 N32 wins (for mips64 or mips64el only)
-#RPM_PREFER_COLOR ?= "2"
+# 4: ELF64 N32 wins (for mips64 or mips64el only)
+#RPM_PREFER_ELF_ARCH ?= "2"
# The network based PR service host and port
# Uncomment the following lines to enable PRservice.
@@ -361,3 +361,14 @@
# feed layout is used where package files are placed in <outdir>/<arch>/.
#
#IPK_HIERARCHICAL_FEED = "1"
+#
+
+# Using RPM4
+#
+# Currently the rootfs_rpm code has a hard depends on rpmresolve:do_populate_sysroot,
+# when using rpm4 the rpmresolve code will not compile due to a missing header file.
+# That dependency needs to be removed when using RPM4, also the PREFRRED_VERSION needs
+# to be set. This example shows how to enable rpm4
+# PREFERRED_VERSION_rpm = "4.11.2"
+# PREFERRED_VERSION_rpm-native = "4.11.2"
+# RPMROOTFSDEPENDS_remove = "rpmresolve-native:do_populate_sysroot"
diff --git a/meta-yocto/conf/toasterconf.json b/meta-yocto/conf/toasterconf.json
new file mode 100644
index 0000000000..14d62b1dcf
--- /dev/null
+++ b/meta-yocto/conf/toasterconf.json
@@ -0,0 +1,97 @@
+{
+ "config": {"MACHINE": "qemux86", "DISTRO": "poky"},
+ "layersources": [
+ {
+ "name": "Local Yocto Project",
+ "sourcetype": "local",
+ "apiurl": "../../",
+ "branches": ["HEAD", "master", "dizzy"],
+ "layers": [
+ {
+ "name": "openembedded-core",
+ "local_path": "meta",
+ "vcs_url": "remote:origin",
+ "dirpath": "meta"
+ },
+ {
+ "name": "meta-yocto",
+ "local_path": "meta-yocto",
+ "vcs_url": "remote:origin",
+ "dirpath": "meta-yocto"
+ },
+ {
+ "name": "meta-yocto-bsp",
+ "local_path": "meta-yocto-bsp",
+ "vcs_url": "remote:origin",
+ "dirpath": "meta-yocto-bsp"
+ }
+
+ ]
+ },
+ {
+ "name": "OpenEmbedded",
+ "sourcetype": "layerindex",
+ "apiurl": "http://layers.openembedded.org/layerindex/api/",
+ "branches": ["master", "dizzy"]
+ },
+ {
+ "name": "Imported layers",
+ "sourcetype": "imported",
+ "apiurl": "",
+ "branches": ["master", "dizzy", "HEAD"]
+
+ }
+ ],
+ "bitbake" : [
+ {
+ "name": "master",
+ "giturl": "remote:origin",
+ "branch": "master",
+ "dirpath": "bitbake"
+ },
+ {
+ "name": "dizzy",
+ "giturl": "remote:origin",
+ "branch": "dizzy",
+ "dirpath": "bitbake"
+ },
+ {
+ "name": "HEAD",
+ "giturl": "remote:origin",
+ "branch": "HEAD",
+ "dirpath": "bitbake"
+ }
+ ],
+
+ "defaultrelease": "master",
+
+ "releases": [
+ {
+ "name": "master",
+ "description": "Yocto Project master",
+ "bitbake": "master",
+ "branch": "master",
+ "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
+ "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds using the <a href=\"http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/\">Yocto Project master branch</a>, where active development takes place. This is not a stable branch, so your builds might not work as expected."
+ },
+ {
+ "name": "dizzy",
+ "description": "Yocto Project 1.7 Dizzy",
+ "bitbake": "dizzy",
+ "branch": "dizzy",
+ "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
+ "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds with the <a href=\"https://www.yoctoproject.org/downloads/core/dizzy1\">Yocto Project 1.7 \"Dizzy\"</a> release"
+ },
+ {
+ "name": "local",
+ "description": "Local Yocto Project",
+ "bitbake": "HEAD",
+ "branch": "HEAD",
+ "defaultlayers": [ "openembedded-core", "meta-yocto", "meta-yocto-bsp"],
+ "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds with the version of the Yocto Project you have cloned or downloaded to your computer."
+ }
+ ]
+}
diff --git a/meta/classes/allarch.bbclass b/meta/classes/allarch.bbclass
index c953e7c7d7..4bc99272c4 100644
--- a/meta/classes/allarch.bbclass
+++ b/meta/classes/allarch.bbclass
@@ -37,5 +37,7 @@ python () {
d.setVar("EXCLUDE_FROM_SHLIBS", "1")
d.setVar("INHIBIT_PACKAGE_DEBUG_SPLIT", "1")
d.setVar("INHIBIT_PACKAGE_STRIP", "1")
+ elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d):
+ bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True))
}
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index efd413bdc4..b598aa3ad6 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -136,7 +136,7 @@ python do_ar_original() {
bb.note('Archiving the original source...')
fetch = bb.fetch2.Fetch([], d)
for url in fetch.urls:
- local = fetch.localpath(url)
+ local = fetch.localpath(url).rstrip("/");
if os.path.isfile(local):
shutil.copy(local, ar_outdir)
elif os.path.isdir(local):
@@ -146,7 +146,12 @@ python do_ar_original() {
fetch.unpack(tmpdir, (url,))
os.chdir(tmpdir)
- tarname = os.path.join(ar_outdir, basename + '.tar.gz')
+ # We eliminate any AUTOINC+ in the revision.
+ try:
+ src_rev = bb.fetch2.get_srcrev(d).replace('AUTOINC+','')
+ except:
+ src_rev = 'NOREV'
+ tarname = os.path.join(ar_outdir, basename + '.' + src_rev + '.tar.gz')
tar = tarfile.open(tarname, 'w:gz')
tar.add('.')
tar.close()
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass
index c49f0631ab..ca04e7976e 100644
--- a/meta/classes/autotools.bbclass
+++ b/meta/classes/autotools.bbclass
@@ -109,7 +109,11 @@ autotools_preconfigure() {
else
# At least remove the .la files since automake won't automatically
# regenerate them even if CFLAGS/LDFLAGS are different
- cd ${S}; find ${S} -name \*.la -delete
+ cd ${S}
+ if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
+ ${MAKE} clean
+ fi
+ find ${S} -name \*.la -delete
fi
fi
fi
@@ -148,7 +152,7 @@ python autotools_copy_aclocals () {
if data[1] == "do_configure" and data[0] == pn:
start = dep
break
- if not start:
+ if start is None:
bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
# We need to find configure tasks which are either from <target> -> <target>
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass
index c0d61fe7aa..b8f61f3955 100644
--- a/meta/classes/base.bbclass
+++ b/meta/classes/base.bbclass
@@ -97,6 +97,7 @@ PATH_prepend = "${@extra_path_elements(d)}"
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
+do_fetch[vardeps] += "SRCREV"
python base_do_fetch() {
src_uri = (d.getVar('SRC_URI', True) or "").split()
@@ -215,11 +216,25 @@ python base_eventhandler() {
}
+CONFIGURESTAMPFILE = "${WORKDIR}/configure.sstate"
+CLEANBROKEN = "0"
+
addtask configure after do_patch
do_configure[dirs] = "${S} ${B}"
do_configure[deptask] = "do_populate_sysroot"
base_do_configure() {
- :
+ if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
+ if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then
+ cd ${B}
+ if [ "${CLEANBROKEN}" != "1" -a \( -e Makefile -o -e makefile -o -e GNUmakefile \) ]; then
+ ${MAKE} clean
+ fi
+ find ${B} -name \*.la -delete
+ fi
+ fi
+ if [ -n "${CONFIGURESTAMPFILE}" ]; then
+ echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
+ fi
}
addtask compile after do_configure
@@ -332,8 +347,6 @@ python () {
extrardeps = []
extraconf = []
for flag, flagval in sorted(pkgconfigflags.items()):
- if flag == "defaultval":
- continue
items = flagval.split(",")
num = len(items)
if num > 4:
@@ -361,7 +374,7 @@ python () {
# obsolete. Return a warning to the user.
princ = d.getVar('PRINC', True)
if princ and princ != "0":
- bb.warn("Use of PRINC %s was detected in the recipe %s (or one of its .bbappends)\nUse of PRINC is deprecated. The PR server should be used to automatically increment the PR. See: https://wiki.yoctoproject.org/wiki/PR_Service." % (princ, d.getVar("FILE", True)))
+ bb.error("Use of PRINC %s was detected in the recipe %s (or one of its .bbappends)\nUse of PRINC is deprecated. The PR server should be used to automatically increment the PR. See: https://wiki.yoctoproject.org/wiki/PR_Service." % (princ, d.getVar("FILE", True)))
pr = d.getVar('PR', True)
pr_prefix = re.search("\D+",pr)
prval = re.search("\d+",pr)
@@ -377,6 +390,7 @@ python () {
bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn)
if bb.data.inherits_class('license', d):
+ check_license_format(d)
unmatched_license_flag = check_license_flags(d)
if unmatched_license_flag:
bb.debug(1, "Skipping %s because it has a restricted license not"
@@ -430,7 +444,7 @@ python () {
check_license = False
if check_license and bad_licenses:
- bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses)
+ bad_licenses = expand_wildcard_licenses(d, bad_licenses)
whitelist = []
for lic in bad_licenses:
diff --git a/meta/classes/boot-directdisk.bbclass b/meta/classes/boot-directdisk.bbclass
index 09da032049..44f738b02e 100644
--- a/meta/classes/boot-directdisk.bbclass
+++ b/meta/classes/boot-directdisk.bbclass
@@ -20,6 +20,7 @@
# ${ROOTFS} - the rootfs image to incorporate
do_bootdirectdisk[depends] += "dosfstools-native:do_populate_sysroot \
+ virtual/kernel:do_deploy \
syslinux:do_populate_sysroot \
syslinux-native:do_populate_sysroot \
parted-native:do_populate_sysroot \
@@ -69,7 +70,7 @@ boot_direct_populate() {
install -d $dest
# Install bzImage, initrd, and rootfs.img in DEST for all loaders to use.
- install -m 0644 ${STAGING_KERNEL_DIR}/bzImage $dest/vmlinuz
+ install -m 0644 ${DEPLOY_DIR_IMAGE}/bzImage $dest/vmlinuz
# initrd is made of concatenation of multiple filesystem images
if [ -n "${INITRD}" ]; then
diff --git a/meta/classes/bootimg.bbclass b/meta/classes/bootimg.bbclass
index 859d517dbd..b1c03ba068 100644
--- a/meta/classes/bootimg.bbclass
+++ b/meta/classes/bootimg.bbclass
@@ -28,6 +28,7 @@
do_bootimg[depends] += "dosfstools-native:do_populate_sysroot \
mtools-native:do_populate_sysroot \
cdrtools-native:do_populate_sysroot \
+ virtual/kernel:do_deploy \
${@oe.utils.ifelse(d.getVar('COMPRESSISO'),'zisofs-tools-native:do_populate_sysroot','')}"
PACKAGES = " "
@@ -66,7 +67,7 @@ populate() {
install -d ${DEST}
# Install bzImage, initrd, and rootfs.img in DEST for all loaders to use.
- install -m 0644 ${STAGING_KERNEL_DIR}/bzImage ${DEST}/vmlinuz
+ install -m 0644 ${DEPLOY_DIR_IMAGE}/bzImage ${DEST}/vmlinuz
# initrd is made of concatenation of multiple filesystem images
if [ -n "${INITRD}" ]; then
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index 479e460c23..2b5f84a87a 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -17,7 +17,7 @@ BUILDHISTORY_COMMIT ?= "0"
BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>"
BUILDHISTORY_PUSH_REPO ?= ""
-SSTATEPOSTINSTFUNCS += "buildhistory_emit_pkghistory"
+SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory"
# We want to avoid influence the signatures of sstate tasks - first the function itself:
sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory"
# then the value added to SSTATEPOSTINSTFUNCS:
@@ -155,7 +155,7 @@ python buildhistory_emit_pkghistory() {
with open(os.path.join(pkgdata_dir, pn)) as f:
for line in f.readlines():
if line.startswith('PACKAGES: '):
- packages = squashspaces(line.split(': ', 1)[1])
+ packages = oe.utils.squashspaces(line.split(': ', 1)[1])
break
except IOError as e:
if e.errno == errno.ENOENT:
@@ -181,7 +181,7 @@ python buildhistory_emit_pkghistory() {
rcpinfo.pe = pe
rcpinfo.pv = pv
rcpinfo.pr = pr
- rcpinfo.depends = sortlist(squashspaces(d.getVar('DEPENDS', True) or ""))
+ rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or ""))
rcpinfo.packages = packages
write_recipehistory(rcpinfo, d)
@@ -222,13 +222,13 @@ python buildhistory_emit_pkghistory() {
pkginfo.pkge = pkge
pkginfo.pkgv = pkgv
pkginfo.pkgr = pkgr
- pkginfo.rprovides = sortpkglist(squashspaces(pkgdata.get('RPROVIDES', "")))
- pkginfo.rdepends = sortpkglist(squashspaces(pkgdata.get('RDEPENDS', "")))
- pkginfo.rrecommends = sortpkglist(squashspaces(pkgdata.get('RRECOMMENDS', "")))
- pkginfo.rsuggests = sortpkglist(squashspaces(pkgdata.get('RSUGGESTS', "")))
- pkginfo.rreplaces = sortpkglist(squashspaces(pkgdata.get('RREPLACES', "")))
- pkginfo.rconflicts = sortpkglist(squashspaces(pkgdata.get('RCONFLICTS', "")))
- pkginfo.files = squashspaces(pkgdata.get('FILES', ""))
+ pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(pkgdata.get('RPROVIDES', "")))
+ pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RDEPENDS', "")))
+ pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RRECOMMENDS', "")))
+ pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(pkgdata.get('RSUGGESTS', "")))
+ pkginfo.rreplaces = sortpkglist(oe.utils.squashspaces(pkgdata.get('RREPLACES', "")))
+ pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(pkgdata.get('RCONFLICTS', "")))
+ pkginfo.files = oe.utils.squashspaces(pkgdata.get('FILES', ""))
for filevar in pkginfo.filevars:
pkginfo.filevars[filevar] = pkgdata.get(filevar, "")
@@ -499,6 +499,8 @@ POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host ;
SDK_POSTPROCESS_COMMAND += "buildhistory_get_sdkinfo ; "
def buildhistory_get_build_id(d):
+ if d.getVar('BB_WORKERCONTEXT', True) != '1':
+ return ""
localdata = bb.data.createCopy(d)
bb.data.update_data(localdata)
statuslines = []
@@ -523,11 +525,6 @@ def buildhistory_get_metadata_revs(d):
for i in layers]
return '\n'.join(medadata_revs)
-
-def squashspaces(string):
- import re
- return re.sub("\s+", " ", string).strip()
-
def outputvars(vars, listvars, d):
vars = vars.split()
listvars = listvars.split()
@@ -536,7 +533,7 @@ def outputvars(vars, listvars, d):
value = d.getVar(var, True) or ""
if var in listvars:
# Squash out spaces
- value = squashspaces(value)
+ value = oe.utils.squashspaces(value)
ret += "%s = %s\n" % (var, value)
return ret.rstrip('\n')
diff --git a/meta/classes/cmake.bbclass b/meta/classes/cmake.bbclass
index c51b1cf9b2..3549c38f15 100644
--- a/meta/classes/cmake.bbclass
+++ b/meta/classes/cmake.bbclass
@@ -13,6 +13,7 @@ inherit autotools
# C/C++ Compiler (without cpu arch/tune arguments)
OECMAKE_C_COMPILER ?= "`echo ${CC} | sed 's/^\([^ ]*\).*/\1/'`"
OECMAKE_CXX_COMPILER ?= "`echo ${CXX} | sed 's/^\([^ ]*\).*/\1/'`"
+OECMAKE_AR ?= "${AR}"
# Compiler flags
OECMAKE_C_FLAGS ?= "${HOST_CC_ARCH} ${TOOLCHAIN_OPTIONS} ${CFLAGS}"
@@ -34,10 +35,14 @@ set( CMAKE_SYSTEM_NAME `echo ${TARGET_OS} | sed -e 's/^./\u&/' -e 's/^\(Linux\).
set( CMAKE_SYSTEM_PROCESSOR ${TARGET_ARCH} )
set( CMAKE_C_COMPILER ${OECMAKE_C_COMPILER} )
set( CMAKE_CXX_COMPILER ${OECMAKE_CXX_COMPILER} )
+set( CMAKE_ASM_COMPILER ${OECMAKE_C_COMPILER} )
+set( CMAKE_AR ${OECMAKE_AR} CACHE FILEPATH "Archiver" )
set( CMAKE_C_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "CFLAGS" )
set( CMAKE_CXX_FLAGS "${OECMAKE_CXX_FLAGS}" CACHE STRING "CXXFLAGS" )
+set( CMAKE_ASM_FLAGS "${OECMAKE_C_FLAGS}" CACHE STRING "ASM FLAGS" )
set( CMAKE_C_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "CFLAGS for release" )
set( CMAKE_CXX_FLAGS_RELEASE "${OECMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "CXXFLAGS for release" )
+set( CMAKE_ASM_FLAGS_RELEASE "${OECMAKE_C_FLAGS_RELEASE}" CACHE STRING "ASM FLAGS for release" )
set( CMAKE_C_LINK_FLAGS "${OECMAKE_C_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
set( CMAKE_CXX_LINK_FLAGS "${OECMAKE_CXX_LINK_FLAGS}" CACHE STRING "LDFLAGS" )
@@ -76,6 +81,8 @@ cmake_do_configure() {
rm -rf ${B}
mkdir -p ${B}
cd ${B}
+ else
+ find ${B} -name CMakeFiles -or -name Makefile -or -name cmake_install.cmake -or -name CMakeCache.txt -delete
fi
# Just like autotools cmake can use a site file to cache result that need generated binaries to run
diff --git a/meta/classes/cml1.bbclass b/meta/classes/cml1.bbclass
index b5adc47716..43acfd531b 100644
--- a/meta/classes/cml1.bbclass
+++ b/meta/classes/cml1.bbclass
@@ -9,10 +9,11 @@ addtask configure after do_unpack do_patch before do_compile
inherit terminal
-OE_TERMINAL_EXPORTS += "HOST_EXTRACFLAGS HOSTLDFLAGS HOST_LOADLIBES TERMINFO"
+OE_TERMINAL_EXPORTS += "HOST_EXTRACFLAGS HOSTLDFLAGS TERMINFO CROSS_CURSES_LIB CROSS_CURSES_INC"
HOST_EXTRACFLAGS = "${BUILD_CFLAGS} ${BUILD_LDFLAGS}"
HOSTLDFLAGS = "${BUILD_LDFLAGS}"
-HOST_LOADLIBES = "-lncurses"
+CROSS_CURSES_LIB = "-lncurses -ltinfo"
+CROSS_CURSES_INC = '-DCURSES_LOC="<curses.h>"'
TERMINFO = "${STAGING_DATADIR_NATIVE}/terminfo"
python do_menuconfig() {
diff --git a/meta/classes/compress_doc.bbclass b/meta/classes/compress_doc.bbclass
new file mode 100644
index 0000000000..9b58d82ce5
--- /dev/null
+++ b/meta/classes/compress_doc.bbclass
@@ -0,0 +1,260 @@
+# Compress man pages in ${mandir} and info pages in ${infodir}
+#
+# 1. The doc will be compressed to gz format by default.
+#
+# 2. It will automatically correct the compressed doc which is not
+# in ${DOC_COMPRESS} but in ${DOC_COMPRESS_LIST} to the format
+# of ${DOC_COMPRESS} policy
+#
+# 3. It is easy to add a new type compression by editing
+# local.conf, such as:
+# DOC_COMPRESS_LIST_append = ' abc'
+# DOC_COMPRESS = 'abc'
+# DOC_COMPRESS_CMD[abc] = 'abc compress cmd ***'
+# DOC_DECOMPRESS_CMD[abc] = 'abc decompress cmd ***'
+
+# All supported compression policy
+DOC_COMPRESS_LIST ?= "gz xz bz2"
+
+# Compression policy, must be one of ${DOC_COMPRESS_LIST}
+DOC_COMPRESS ?= "gz"
+
+# Compression shell command
+DOC_COMPRESS_CMD[gz] ?= 'gzip -v -9 -n'
+DOC_COMPRESS_CMD[bz2] ?= "bzip2 -v -9"
+DOC_COMPRESS_CMD[xz] ?= "xz -v"
+
+# Decompression shell command
+DOC_DECOMPRESS_CMD[gz] ?= 'gunzip -v'
+DOC_DECOMPRESS_CMD[bz2] ?= "bunzip2 -v"
+DOC_DECOMPRESS_CMD[xz] ?= "unxz -v"
+
+PACKAGE_PREPROCESS_FUNCS += "package_do_compress_doc compress_doc_updatealternatives"
+python package_do_compress_doc() {
+ compress_mode = d.getVar('DOC_COMPRESS', True)
+ compress_list = (d.getVar('DOC_COMPRESS_LIST', True) or '').split()
+ if compress_mode not in compress_list:
+ bb.fatal('Compression policy %s not supported (not listed in %s)\n' % (compress_mode, compress_list))
+
+ dvar = d.getVar('PKGD', True)
+ compress_cmds = {}
+ decompress_cmds = {}
+ for mode in compress_list:
+ compress_cmds[mode] = d.getVarFlag('DOC_COMPRESS_CMD', mode)
+ decompress_cmds[mode] = d.getVarFlag('DOC_DECOMPRESS_CMD', mode)
+
+ mandir = os.path.abspath(dvar + os.sep + d.getVar("mandir", True))
+ if os.path.exists(mandir):
+ # Decompress doc files which format is not compress_mode
+ decompress_doc(mandir, compress_mode, decompress_cmds)
+ compress_doc(mandir, compress_mode, compress_cmds)
+
+ infodir = os.path.abspath(dvar + os.sep + d.getVar("infodir", True))
+ if os.path.exists(infodir):
+ # Decompress doc files which format is not compress_mode
+ decompress_doc(infodir, compress_mode, decompress_cmds)
+ compress_doc(infodir, compress_mode, compress_cmds)
+}
+
+def _get_compress_format(file, compress_format_list):
+ for compress_format in compress_format_list:
+ compress_suffix = '.' + compress_format
+ if file.endswith(compress_suffix):
+ return compress_format
+
+ return ''
+
+# Collect hardlinks to dict, each element in dict lists hardlinks
+# which points to the same doc file.
+# {hardlink10: [hardlink11, hardlink12],,,}
+# The hardlink10, hardlink11 and hardlink12 are the same file.
+def _collect_hardlink(hardlink_dict, file):
+ for hardlink in hardlink_dict:
+ # Add to the existed hardlink
+ if os.path.samefile(hardlink, file):
+ hardlink_dict[hardlink].append(file)
+ return hardlink_dict
+
+ hardlink_dict[file] = []
+ return hardlink_dict
+
+def _process_hardlink(hardlink_dict, compress_mode, shell_cmds, decompress=False):
+ for target in hardlink_dict:
+ if decompress:
+ compress_format = _get_compress_format(target, shell_cmds.keys())
+ cmd = "%s -f %s" % (shell_cmds[compress_format], target)
+ bb.note('decompress hardlink %s' % target)
+ else:
+ cmd = "%s -f %s" % (shell_cmds[compress_mode], target)
+ bb.note('compress hardlink %s' % target)
+ (retval, output) = oe.utils.getstatusoutput(cmd)
+ if retval:
+ bb.warn("de/compress file failed %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
+ return
+
+ for hardlink_dup in hardlink_dict[target]:
+ if decompress:
+ # Remove compress suffix
+ compress_suffix = '.' + compress_format
+ new_hardlink = hardlink_dup[:-len(compress_suffix)]
+ new_target = target[:-len(compress_suffix)]
+ else:
+ # Append compress suffix
+ compress_suffix = '.' + compress_mode
+ new_hardlink = hardlink_dup + compress_suffix
+ new_target = target + compress_suffix
+
+ bb.note('hardlink %s-->%s' % (new_hardlink, new_target))
+ if not os.path.exists(new_hardlink):
+ os.link(new_target, new_hardlink)
+ if os.path.exists(hardlink_dup):
+ os.unlink(hardlink_dup)
+
+def _process_symlink(file, compress_format, decompress=False):
+ compress_suffix = '.' + compress_format
+ if decompress:
+ # Remove compress suffix
+ new_linkname = file[:-len(compress_suffix)]
+ new_source = os.readlink(file)[:-len(compress_suffix)]
+ else:
+ # Append compress suffix
+ new_linkname = file + compress_suffix
+ new_source = os.readlink(file) + compress_suffix
+
+ bb.note('symlink %s-->%s' % (new_linkname, new_source))
+ if not os.path.exists(new_linkname):
+ os.symlink(new_source, new_linkname)
+
+ os.unlink(file)
+
+def _is_info(file):
+ flags = '.info .info-'.split()
+ for flag in flags:
+ if flag in os.path.basename(file):
+ return True
+
+ return False
+
+def _is_man(file):
+ import re
+
+ # It refers MANSECT-var in man(1.6g)'s man.config
+ # ".1:.1p:.8:.2:.3:.3p:.4:.5:.6:.7:.9:.0p:.tcl:.n:.l:.p:.o"
+ # Not start with '.', and contain the above colon-seperate element
+ p = re.compile(r'[^\.]+\.([1-9lnop]|0p|tcl)')
+ if p.search(file):
+ return True
+
+ return False
+
+def _is_compress_doc(file, compress_format_list):
+ compress_format = _get_compress_format(file, compress_format_list)
+ compress_suffix = '.' + compress_format
+ if file.endswith(compress_suffix):
+ # Remove the compress suffix
+ uncompress_file = file[:-len(compress_suffix)]
+ if _is_info(uncompress_file) or _is_man(uncompress_file):
+ return True, compress_format
+
+ return False, ''
+
+def compress_doc(topdir, compress_mode, compress_cmds):
+ hardlink_dict = {}
+ for root, dirs, files in os.walk(topdir):
+ for f in files:
+ file = os.path.join(root, f)
+ if os.path.isdir(file):
+ continue
+
+ if _is_info(file) or _is_man(file):
+ # Symlink
+ if os.path.islink(file):
+ _process_symlink(file, compress_mode)
+ # Hardlink
+ elif os.lstat(file).st_nlink > 1:
+ _collect_hardlink(hardlink_dict, file)
+ # Normal file
+ elif os.path.isfile(file):
+ cmd = "%s %s" % (compress_cmds[compress_mode], file)
+ (retval, output) = oe.utils.getstatusoutput(cmd)
+ if retval:
+ bb.warn("compress failed %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
+ continue
+ bb.note('compress file %s' % file)
+
+ _process_hardlink(hardlink_dict, compress_mode, compress_cmds)
+
+# Decompress doc files which format is not compress_mode
+def decompress_doc(topdir, compress_mode, decompress_cmds):
+ hardlink_dict = {}
+ decompress = True
+ for root, dirs, files in os.walk(topdir):
+ for f in files:
+ file = os.path.join(root, f)
+ if os.path.isdir(file):
+ continue
+
+ res, compress_format = _is_compress_doc(file, decompress_cmds.keys())
+ # Decompress files which format is not compress_mode
+ if res and compress_mode!=compress_format:
+ # Symlink
+ if os.path.islink(file):
+ _process_symlink(file, compress_format, decompress)
+ # Hardlink
+ elif os.lstat(file).st_nlink > 1:
+ _collect_hardlink(hardlink_dict, file)
+ # Normal file
+ elif os.path.isfile(file):
+ cmd = "%s %s" % (decompress_cmds[compress_format], file)
+ (retval, output) = oe.utils.getstatusoutput(cmd)
+ if retval:
+ bb.warn("decompress failed %s (cmd was %s)%s" % (retval, cmd, ":\n%s" % output if output else ""))
+ continue
+ bb.note('decompress file %s' % file)
+
+ _process_hardlink(hardlink_dict, compress_mode, decompress_cmds, decompress)
+
+python compress_doc_updatealternatives () {
+ if not bb.data.inherits_class('update-alternatives', d):
+ return
+
+ mandir = d.getVar("mandir", True)
+ infodir = d.getVar("infodir", True)
+ compress_mode = d.getVar('DOC_COMPRESS', True)
+ for pkg in (d.getVar('PACKAGES', True) or "").split():
+ old_names = (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split()
+ new_names = []
+ for old_name in old_names:
+ old_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', old_name, True)
+ old_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True) or \
+ d.getVarFlag('ALTERNATIVE_TARGET', old_name, True) or \
+ d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or \
+ d.getVar('ALTERNATIVE_TARGET', True) or \
+ old_link
+ # Sometimes old_target is specified as relative to the link name.
+ old_target = os.path.join(os.path.dirname(old_link), old_target)
+
+ # The updatealternatives used for compress doc
+ if mandir in old_target or infodir in old_target:
+ new_name = old_name + '.' + compress_mode
+ new_link = old_link + '.' + compress_mode
+ new_target = old_target + '.' + compress_mode
+ d.delVarFlag('ALTERNATIVE_LINK_NAME', old_name)
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', new_name, new_link)
+ if d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name, True):
+ d.delVarFlag('ALTERNATIVE_TARGET_%s' % pkg, old_name)
+ d.setVarFlag('ALTERNATIVE_TARGET_%s' % pkg, new_name, new_target)
+ elif d.getVarFlag('ALTERNATIVE_TARGET', old_name, True):
+ d.delVarFlag('ALTERNATIVE_TARGET', old_name)
+ d.setVarFlag('ALTERNATIVE_TARGET', new_name, new_target)
+ elif d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True):
+ d.setVar('ALTERNATIVE_TARGET_%s' % pkg, new_target)
+ elif d.getVar('ALTERNATIVE_TARGET', old_name, True):
+ d.setVar('ALTERNATIVE_TARGET', new_target)
+
+ new_names.append(new_name)
+
+ if new_names:
+ d.setVar('ALTERNATIVE_%s' % pkg, ' '.join(new_names))
+}
+
diff --git a/meta/classes/cpan.bbclass b/meta/classes/cpan.bbclass
index 7088039fa0..e2bbd2f63a 100644
--- a/meta/classes/cpan.bbclass
+++ b/meta/classes/cpan.bbclass
@@ -47,8 +47,8 @@ cpan_do_compile () {
cpan_do_install () {
oe_runmake DESTDIR="${D}" install_vendor
- for PERLSCRIPT in `grep -rIEl '#!${bindir}/perl-native.*/perl' ${D}`; do
- sed -i -e 's|^#!${bindir}/perl-native.*/perl|#!/usr/bin/env nativeperl|' $PERLSCRIPT
+ for PERLSCRIPT in `grep -rIEl '#! *${bindir}/perl-native.*/perl' ${D}`; do
+ sed -i -e 's|${bindir}/perl-native.*/perl|/usr/bin/env nativeperl|' $PERLSCRIPT
done
}
diff --git a/meta/classes/cpan_build.bbclass b/meta/classes/cpan_build.bbclass
index 5b0ad61b4c..2eb8162314 100644
--- a/meta/classes/cpan_build.bbclass
+++ b/meta/classes/cpan_build.bbclass
@@ -3,6 +3,8 @@
#
inherit cpan-base perlnative
+EXTRA_CPAN_BUILD_FLAGS ?= ""
+
# Env var which tells perl if it should use host (no) or target (yes) settings
export PERLCONFIGTARGET = "${@is_target(d)}"
export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
@@ -36,7 +38,8 @@ cpan_build_do_configure () {
--install_path script=${bindir} \
--install_path bin=${bindir} \
--install_path bindoc=${mandir}/man1 \
- --install_path libdoc=${mandir}/man3
+ --install_path libdoc=${mandir}/man3 \
+ ${EXTRA_CPAN_BUILD_FLAGS}
}
cpan_build_do_compile () {
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index 6da43fe445..fec6438c60 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -111,6 +111,7 @@ export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${layout_libdir}/pkgconfig"
export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
do_populate_sysroot[stamp-extra-info] = ""
+do_packagedata[stamp-extra-info] = ""
USE_NLS = "${SDKUSE_NLS}"
diff --git a/meta/classes/debian.bbclass b/meta/classes/debian.bbclass
index 1ddb56fbb9..c859703669 100644
--- a/meta/classes/debian.bbclass
+++ b/meta/classes/debian.bbclass
@@ -9,6 +9,8 @@
# Better expressed as ensure all RDEPENDS package before we package
# This means we can't have circular RDEPENDS/RRECOMMENDS
+AUTO_LIBNAME_PKGS = "${PACKAGES}"
+
inherit package
DEBIANRDEP = "do_packagedata"
@@ -50,6 +52,13 @@ python debian_package_name_hook () {
return 0
return (s[stat.ST_MODE] & stat.S_IEXEC)
+ def add_rprovides(pkg, d):
+ newpkg = d.getVar('PKG_' + pkg)
+ if newpkg and newpkg != pkg:
+ provs = (d.getVar('RPROVIDES_' + pkg, True) or "").split()
+ if pkg not in provs:
+ d.appendVar('RPROVIDES_' + pkg, " " + pkg)
+
def auto_libname(packages, orig_pkg):
sonames = []
has_bins = 0
@@ -97,6 +106,7 @@ python debian_package_name_hook () {
(pkgname, devname) = soname_result
for pkg in packages.split():
if (d.getVar('PKG_' + pkg) or d.getVar('DEBIAN_NOAUTONAME_' + pkg)):
+ add_rprovides(pkg, d)
continue
debian_pn = d.getVar('DEBIANNAME_' + pkg)
if debian_pn:
@@ -111,6 +121,9 @@ python debian_package_name_hook () {
newpkg = mlpre + newpkg
if newpkg != pkg:
d.setVar('PKG_' + pkg, newpkg)
+ add_rprovides(pkg, d)
+ else:
+ add_rprovides(orig_pkg, d)
# reversed sort is needed when some package is substring of another
# ie in ncurses we get without reverse sort:
diff --git a/meta/classes/distrodata.bbclass b/meta/classes/distrodata.bbclass
index a890de7911..83aa381fe7 100644
--- a/meta/classes/distrodata.bbclass
+++ b/meta/classes/distrodata.bbclass
@@ -268,240 +268,6 @@ python do_checkpkg() {
import tempfile
import subprocess
- """
- sanity check to ensure same name and type. Match as many patterns as possible
- such as:
- gnome-common-2.20.0.tar.gz (most common format)
- gtk+-2.90.1.tar.gz
- xf86-input-synaptics-12.6.9.tar.gz
- dri2proto-2.3.tar.gz
- blktool_4.orig.tar.gz
- libid3tag-0.15.1b.tar.gz
- unzip552.tar.gz
- icu4c-3_6-src.tgz
- genext2fs_1.3.orig.tar.gz
- gst-fluendo-mp3
- """
- prefix1 = "[a-zA-Z][a-zA-Z0-9]*([\-_][a-zA-Z]\w+)*\+?[\-_]" # match most patterns which uses "-" as separator to version digits
- prefix2 = "[a-zA-Z]+" # a loose pattern such as for unzip552.tar.gz
- prefix3 = "[0-9]+[\-]?[a-zA-Z]+" # a loose pattern such as for 80325-quicky-0.4.tar.gz
- prefix = "(%s|%s|%s)" % (prefix1, prefix2, prefix3)
- ver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"#"((\d+[\.\-_[a-z]])+)"
- # src.rpm extension was added only for rpm package. Can be removed if the rpm
- # packaged will always be considered as having to be manually upgraded
- suffix = "(tar\.gz|tgz|tar\.bz2|tar\.lz4|zip|xz|rpm|bz2|lz4|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
-
- suffixtuple = ("tar.gz", "tgz", "zip", "tar.bz2", "tar.xz", "tar.lz4", "bz2", "lz4", "orig.tar.gz", "src.tar.gz", "src.rpm", "src.tgz", "svnr\d+.tar.bz2", "stable.tar.gz", "src.rpm")
- sinterstr = "(?P<name>%s?)v?(?P<ver>%s)(\-source)?" % (prefix, ver_regex)
- sdirstr = "(?P<name>%s)\.?v?(?P<ver>%s)(\-source)?[\.\-](?P<type>%s$)" % (prefix, ver_regex, suffix)
-
- def parse_inter(s):
- m = re.search(sinterstr, s)
- if not m:
- return None
- else:
- return (m.group('name'), m.group('ver'), "")
-
- def parse_dir(s):
- m = re.search(sdirstr, s)
- if not m:
- return None
- else:
- return (m.group('name'), m.group('ver'), m.group('type'))
-
- def modelate_version(version):
- if version[0] in ['.', '-']:
- if version[1].isdigit():
- version = version[1] + version[0] + version[2:len(version)]
- else:
- version = version[1:len(version)]
-
- version = re.sub('\-', '.', version)
- version = re.sub('_', '.', version)
- version = re.sub('(rc)+', '.-1.', version)
- version = re.sub('(alpha)+', '.-3.', version)
- version = re.sub('(beta)+', '.-2.', version)
- if version[0] == 'v':
- version = version[1:len(version)]
- return version
-
- """
- Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
- purpose. PE is cleared in comparison as it's not for build, and PV is cleared too
- for simplicity as it's somehow difficult to get from various upstream format
- """
- def __vercmp(old, new):
- (on, ov, ot) = old
- (en, ev, et) = new
- if on != en or (et and et not in suffixtuple):
- return False
- ov = modelate_version(ov)
- ev = modelate_version(ev)
-
- result = bb.utils.vercmp(("0", ov, ""), ("0", ev, ""))
- if result < 0:
- return True
- else:
- return False
-
- """
- wrapper for fetch upstream directory info
- 'url' - upstream link customized by regular expression
- 'd' - database
- 'tmpf' - tmpfile for fetcher output
- We don't want to exit whole build due to one recipe error. So handle all exceptions
- gracefully w/o leaking to outer.
- """
- def internal_fetch_wget(url, ud, d, tmpf):
- status = "ErrFetchUnknown"
-
- agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
- fetchcmd = "/usr/bin/env wget -t 1 --passive-ftp -O %s --user-agent=\"%s\" '%s'" % (tmpf.name, agent, url)
- try:
- fetcher = bb.fetch2.wget.Wget(d)
- fetcher._runwget(ud, d, fetchcmd, True)
- status = "SUCC"
- except bb.fetch2.BBFetchException, e:
- status = "ErrFetch"
-
- return status
-
- """
- Check on middle version directory such as "2.4/" in "http://xxx/2.4/pkg-2.4.1.tar.gz",
- 'url' - upstream link customized by regular expression
- 'd' - database
- 'curver' - current version
- Return new version if success, or else error in "Errxxxx" style
- """
- def check_new_dir(url, curver, ud, d):
- pn = d.getVar('PN', True)
- f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-1-" % pn)
- status = internal_fetch_wget(url, ud, d, f)
- fhtml = f.read()
- if status == "SUCC" and len(fhtml):
- newver = parse_inter(curver)
-
- """
- match "*4.1/">*4.1/ where '*' matches chars
- N.B. add package name, only match for digits
- """
- regex = d.getVar('REGEX', True)
- if regex == '':
- regex = "^%s" %prefix
- m = re.search("^%s" % regex, curver)
- if m:
- s = "%s[^\d\"]*?(\d+[\.\-_])+\d+/?" % m.group()
- else:
- s = "(\d+[\.\-_])+\d+/?"
-
- searchstr = "[hH][rR][eE][fF]=\"%s\">" % s
-
- reg = re.compile(searchstr)
- valid = 0
- for line in fhtml.split("\n"):
- if line.find(curver) >= 0:
- valid = 1
- m = reg.search(line)
- if m:
- ver = m.group().split("\"")[1]
- ver = ver.strip("/")
- ver = parse_inter(ver)
- if ver and __vercmp(newver, ver) == True:
- newver = ver
-
- """Expect a match for curver in directory list, or else it indicates unknown format"""
- if not valid:
- status = "ErrParseInterDir"
- else:
- """rejoin the path name"""
- status = newver[0] + newver[1]
- elif not len(fhtml):
- status = "ErrHostNoDir"
-
- f.close()
- if status != "ErrHostNoDir" and re.match("Err", status):
- logpath = d.getVar('LOG_DIR', True)
- subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
- os.unlink(f.name)
- return status
-
- """
- Check on the last directory to search '2.4.1' in "http://xxx/2.4/pkg-2.4.1.tar.gz",
- 'url' - upstream link customized by regular expression
- 'd' - database
- 'curname' - current package name
- Return new version if success, or else error in "Errxxxx" style
- """
- def check_new_version(url, curname, ud, d):
- """possible to have no version in pkg name, such as spectrum-fw"""
- if not re.search("\d+", curname):
- return pcurver
- pn = d.getVar('PN', True)
- newver_regex = d.getVar('REGEX', True)
- f = tempfile.NamedTemporaryFile(delete=False, prefix="%s-2-" % pn)
- status = internal_fetch_wget(url, ud, d, f)
- fhtml = f.read()
-
- if status == "SUCC" and len(fhtml):
- newver = parse_dir(curname)
-
- if not newver_regex:
- """this is the default matching pattern, if recipe does not """
- """provide a regex expression """
- """match "{PN}-5.21.1.tar.gz">{PN}-5.21.1.tar.gz """
- pn1 = re.search("^%s" % prefix, curname).group()
- s = "[^\"]*%s[^\d\"]*?(\d+[\.\-_])+[^\"]*" % pn1
- searchstr = "[hH][rR][eE][fF]=\"%s\".*[>\"]" % s
- reg = searchstr
- else:
- reg = newver_regex
- valid = 0
- count = 0
- for line in fhtml.split("\n"):
- if pn == 'kconfig-frontends':
- m = re.findall(reg, line)
- if m:
- valid = 1
- for match in m:
- (on, ov, oe) = newver
- ver = (on, match[0], oe)
- if ver and __vercmp(newver, ver) == True:
- newver = ver
- continue
- count += 1
- m = re.search(reg, line)
- if m:
- valid = 1
- if not newver_regex:
- ver = m.group().split("\"")[1].split("/")[-1]
- if ver == "download":
- ver = m.group().split("\"")[1].split("/")[-2]
- ver = parse_dir(ver)
- else:
- """ we cheat a little here, but we assume that the
- regular expression in the recipe will extract exacly
- the version """
- (on, ov, oe) = newver
- ver = (on, m.group('pver'), oe)
- if ver and __vercmp(newver, ver) == True:
- newver = ver
- """Expect a match for curver in directory list, or else it indicates unknown format"""
- if not valid:
- status = "ErrParseDir"
- else:
- """newver still contains a full package name string"""
- status = re.sub('_', '.', newver[1])
- elif not len(fhtml):
- status = "ErrHostNoDir"
-
- f.close()
- """if host hasn't directory information, no need to save tmp file"""
- if status != "ErrHostNoDir" and re.match("Err", status):
- logpath = d.getVar('LOG_DIR', True)
- subprocess.call("cp %s %s/" % (f.name, logpath), shell=True)
- os.unlink(f.name)
- return status
-
"""first check whether a uri is provided"""
src_uri = d.getVar('SRC_URI', True)
if not src_uri:
@@ -543,9 +309,6 @@ python do_checkpkg() {
localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
bb.data.update_data(localdata)
- chk_uri = d.getVar('REGEX_URI', True)
- if not chk_uri:
- chk_uri = src_uri
pdesc = localdata.getVar('DESCRIPTION', True)
pgrp = localdata.getVar('SECTION', True)
if localdata.getVar('PRSPV', True):
@@ -562,232 +325,63 @@ python do_checkpkg() {
psrcuri = localdata.getVar('SRC_URI', True)
maintainer = localdata.getVar('RECIPE_MAINTAINER', True)
+ """ Get upstream version version """
+ pupver = None
+ pstatus = "ErrUnknown"
found = 0
+
for uri in src_uri.split():
- m = re.compile('(?P<type>[^:]*)').match(uri)
- if not m:
- raise MalformedUrl(uri)
- elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
- found = 1
- pproto = m.group('type')
- break
+ m = re.compile('(?P<type>[^:]*)').match(uri)
+ if not m:
+ raise MalformedUrl(uri)
+ elif m.group('type') in ('http', 'https', 'ftp', 'cvs', 'svn', 'git'):
+ found = 1
+ psrcuri = uri
+ pproto = m.group('type')
+ break
if not found:
pproto = "file"
- pupver = "N/A"
- pstatus = "ErrUnknown"
-
- (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(uri)
- if type in ['http', 'https', 'ftp']:
- if d.getVar('PRSPV', True):
- pcurver = d.getVar('PRSPV', True)
- else:
- pcurver = d.getVar('PV', True)
- else:
- if d.getVar('PRSPV', True):
- pcurver = d.getVar('PRSPV', True)
- else:
- pcurver = d.getVar("SRCREV", True)
-
-
- if type in ['http', 'https', 'ftp']:
- ud = bb.fetch2.FetchData(uri, d)
- newver = pcurver
- altpath = path
- dirver = "-"
- curname = "-"
-
- """
- match version number amid the path, such as "5.7" in:
- http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
- N.B. how about sth. like "../5.7/5.8/..."? Not find such example so far :-P
- """
- m = re.search(r"[^/]*(\d+\.)+\d+([\-_]r\d+)*/", path)
- if m:
- altpath = path.split(m.group())[0]
- dirver = m.group().strip("/")
-
- """use new path and remove param. for wget only param is md5sum"""
- alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
- my_uri = d.getVar('REGEX_URI', True)
- if my_uri:
- if d.getVar('PRSPV', True):
- newver = d.getVar('PRSPV', True)
- else:
- newver = d.getVar('PV', True)
- else:
- newver = check_new_dir(alturi, dirver, ud, d)
- altpath = path
- if not re.match("Err", newver) and dirver != newver:
- altpath = altpath.replace(dirver, newver, True)
- # For folder in folder cases - try to enter the folder again and then try parsing
- """Now try to acquire all remote files in current directory"""
- if not re.match("Err", newver):
- curname = altpath.split("/")[-1]
-
- """get remote name by skipping pacakge name"""
- m = re.search(r"/.*/", altpath)
- if not m:
- altpath = "/"
- else:
- altpath = m.group()
-
- chk_uri = d.getVar('REGEX_URI', True)
- if not chk_uri:
- alturi = bb.fetch.encodeurl([type, host, altpath, user, pswd, {}])
- else:
- alturi = chk_uri
- newver = check_new_version(alturi, curname, ud, d)
- while(newver == "ErrHostNoDir"):
- if alturi == "/download":
- break
- else:
- alturi = "/".join(alturi.split("/")[0:-2]) + "/download"
- newver = check_new_version(alturi, curname, ud, d)
- if not re.match("Err", newver):
- pupver = newver
- if pupver != pcurver:
- pstatus = "UPDATE"
- else:
- pstatus = "MATCH"
-
- if re.match("Err", newver):
- pstatus = newver + ":" + altpath + ":" + dirver + ":" + curname
- elif type == 'git':
- if user:
- gituser = user + '@'
- else:
- gituser = ""
-
- if 'protocol' in parm:
- gitproto = parm['protocol']
- else:
- gitproto = "git"
- # Get all tags and HEAD
- if d.getVar('GIT_REGEX', True):
- gitcmd = "git ls-remote %s://%s%s%s %s 2>&1" % (gitproto, gituser, host, path, d.getVar('GIT_REGEX', True))
- else:
- gitcmd = "git ls-remote %s://%s%s%s *tag* 2>&1" % (gitproto, gituser, host, path)
- gitcmd2 = "git ls-remote %s://%s%s%s HEAD 2>&1" % (gitproto, gituser, host, path)
-
- tmp = os.popen(gitcmd).read()
- if 'unable to connect' in tmp:
- tmp = None
- tmp2 = os.popen(gitcmd2).read()
- if 'unable to connect' in tmp2:
- tmp2 = None
- #This is for those repos have tag like: refs/tags/1.2.2
- phash = pversion.rsplit("+")[-1]
- if tmp:
- tmpline = tmp.split("\n")
- verflag = 0
- pupver = pversion
- for line in tmpline:
- if len(line)==0:
- break;
- puptag = line.split("/")[-1]
- upstr_regex = d.getVar('REGEX', True)
- if upstr_regex:
- puptag = re.search(upstr_regex, puptag)
- else:
- puptag = re.search("(?P<pver>([0-9][\.|_]?)+)", puptag)
- if puptag == None:
- continue
- puptag = puptag.group('pver')
- puptag = re.sub("_",".",puptag)
- plocaltag = pupver.split("+git")[0]
- if "git" in plocaltag:
- plocaltag = plocaltag.split("-")[0]
- result = bb.utils.vercmp(("0", puptag, ""), ("0", plocaltag, ""))
-
- if result > 0:
- verflag = 1
- pupver = puptag
- elif verflag == 0 :
- pupver = plocaltag
- #This is for those no tag repo
- elif tmp2:
+ if pproto in ['http', 'https', 'ftp', 'git']:
+ try:
+ ud = bb.fetch2.FetchData(psrcuri, d)
+ pupver = ud.method.latest_versionstring(ud, d)
+ if pproto == 'git':
+ if pupver == "":
pupver = pversion.rsplit("+")[0]
- phash = pupver
- else:
- pstatus = "ErrGitAccess"
- if not ('ErrGitAccess' in pstatus):
-
- latest_head = tmp2.rsplit("\t")[0][:7]
- tmp3 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pversion)
- tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)(?P<git_prefix>(\+git[r|\-|]?)AUTOINC\+)(?P<head_md5>([\w|_]+))', pupver)
- if not tmp4:
- tmp4 = re.search('(?P<git_ver>(\d+[\.-]?)+)', pupver)
-
- if tmp3:
- # Get status of the package - MATCH/UPDATE
- result = bb.utils.vercmp(("0", tmp3.group('git_ver'), ""), ("0",tmp3.group('git_ver') , ""))
- # Get the latest tag
- pstatus = 'MATCH'
- if result < 0:
- latest_pv = tmp3.group('git_ver')
- else:
- latest_pv = pupver
- if not(tmp3.group('head_md5')[:7] in latest_head) or not(latest_head in tmp3.group('head_md5')[:7]):
- pstatus = 'UPDATE'
-
- git_prefix = tmp3.group('git_prefix')
- pupver = latest_pv + tmp3.group('git_prefix') + latest_head
- else:
- if not tmp3:
- bb.plain("#DEBUG# Package %s: current version (%s) doesn't match the usual pattern" %(pname, pversion))
- elif type == 'svn':
- ud = bb.fetch2.FetchData(uri, d)
-
- svnFetcher = bb.fetch2.svn.Svn(d)
- svnFetcher.urldata_init(ud, d)
- try:
- pupver = svnFetcher.latest_revision(ud, d, ud.names[0])
- except bb.fetch2.FetchError:
- pstatus = "ErrSvnAccess"
-
- if pupver:
- if pupver in pversion:
- pstatus = "MATCH"
- else:
- pstatus = "UPDATE"
- else:
- pstatus = "ErrSvnAccess"
-
- if 'rev' in ud.parm:
- pcurver = ud.parm['rev']
-
- if pstatus != "ErrSvnAccess":
- tag = pversion.rsplit("+svn")[0]
- svn_prefix = re.search('(\+svn[r|\-]?)', pversion)
- if tag and svn_prefix:
- pupver = tag + svn_prefix.group() + pupver
-
- elif type == 'cvs':
- pupver = "HEAD"
- pstatus = "UPDATE"
- elif type == 'file':
- """local file is always up-to-date"""
- pupver = pcurver
- pstatus = "MATCH"
+ if re.search(pversion, "gitrAUTOINC"):
+ pupver += "+gitrAUTOINC+"
+ else:
+ pupver += "+gitAUTOINC+"
+ latest_revision = ud.method.latest_revision(ud, d, ud.names[0])
+ pupver += latest_revision[:10]
+ except Exception as inst:
+ bb.warn("%s: unexpected error: %s" % (pname, repr(inst)))
+ pstatus = "ErrAccess"
+ elif pproto == "file":
+ """Local files are always updated"""
+ pupver = pversion
else:
- pstatus = "ErrUnsupportedProto"
+ pstatus = "ErrUnsupportedProto"
+ bb.note("do_checkpkg, protocol %s isn't implemented" % pproto)
- if re.match("Err", pstatus):
- pstatus += ":%s%s" % (host, path)
+ if not pupver:
+ pupver = "N/A"
+ elif pupver == pversion:
+ pstatus = "MATCH"
+ else:
+ pstatus = "UPDATE"
"""Read from manual distro tracking fields as alternative"""
pmver = d.getVar("RECIPE_UPSTREAM_VERSION", True)
if not pmver:
- pmver = "N/A"
- pmstatus = "ErrNoRecipeData"
+ pmver = "N/A"
+ pmstatus = "ErrNoRecipeData"
+ elif pmver == pupver:
+ pmstatus = "MATCH"
else:
- if pmver == pcurver:
- pmstatus = "MATCH"
- else:
- pmstatus = "UPDATE"
+ pmstatus = "UPDATE"
- psrcuri = psrcuri.split()[0]
pdepends = "".join(pdepends.split("\t"))
pdesc = "".join(pdesc.split("\t"))
no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True)
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 2ac62747a2..4e429d78d8 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -49,5 +49,8 @@ python () {
for task in d.getVar("SRCTREECOVEREDTASKS", True).split():
bb.build.deltask(task, d)
+
+ # Ensure compilation happens every time
+ d.setVarFlag('do_compile', 'nostamp', '1')
}
diff --git a/meta/classes/gnomebase.bbclass b/meta/classes/gnomebase.bbclass
index 739bf60649..6ca13cb1e0 100644
--- a/meta/classes/gnomebase.bbclass
+++ b/meta/classes/gnomebase.bbclass
@@ -9,13 +9,13 @@ SRC_URI = "${GNOME_MIRROR}/${GNOMEBN}/${@gnome_verdir("${PV}")}/${GNOMEBN}-${PV}
DEPENDS += "gnome-common-native"
FILES_${PN} += "${datadir}/application-registry \
- ${datadir}/mime-info \
- ${datadir}/mime/packages \
- ${datadir}/mime/application \
- ${datadir}/gnome-2.0 \
- ${datadir}/polkit* \
- ${datadir}/GConf \
- ${datadir}/glib-2.0/schemas \
+ ${datadir}/mime-info \
+ ${datadir}/mime/packages \
+ ${datadir}/mime/application \
+ ${datadir}/gnome-2.0 \
+ ${datadir}/polkit* \
+ ${datadir}/GConf \
+ ${datadir}/glib-2.0/schemas \
"
FILES_${PN}-doc += "${datadir}/devhelp"
diff --git a/meta/classes/image-buildinfo.bbclass b/meta/classes/image-buildinfo.bbclass
new file mode 100644
index 0000000000..aa17cc8f9e
--- /dev/null
+++ b/meta/classes/image-buildinfo.bbclass
@@ -0,0 +1,69 @@
+#
+# Writes build information to target filesystem on /etc/build
+#
+# Copyright (C) 2014 Intel Corporation
+# Author: Alejandro Enedino Hernandez Samaniego <alejandro.hernandez@intel.com>
+#
+# Licensed under the MIT license, see COPYING.MIT for details
+#
+# Usage: add INHERIT += "image-buildinfo" to your conf file
+#
+
+# Desired variables to display
+IMAGE_BUILDINFO_VARS ?= "DISTRO DISTRO_VERSION"
+
+# From buildhistory.bbclass
+def image_buildinfo_outputvars(vars, listvars, d):
+ vars = vars.split()
+ listvars = listvars.split()
+ ret = ""
+ for var in vars:
+ value = d.getVar(var, True) or ""
+ if (d.getVarFlag(var, 'type') == "list"):
+ value = oe.utils.squashspaces(value)
+ ret += "%s = %s\n" % (var, value)
+ return ret.rstrip('\n')
+
+# Gets git branch's status (clean or dirty)
+def get_layer_git_status(path):
+ f = os.popen("cd %s; git diff --stat 2>&1 | tail -n 1" % path)
+ data = f.read()
+ if f.close() is None:
+ if len(data) != 0:
+ return "-- modified"
+ return ""
+
+# Returns layer revisions along with their respective status
+def get_layer_revs(d):
+ layers = (d.getVar("BBLAYERS", True) or "").split()
+ medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \
+ base_get_metadata_git_branch(i, None).strip(), \
+ base_get_metadata_git_revision(i, None), \
+ get_layer_git_status(i)) \
+ for i in layers]
+ return '\n'.join(medadata_revs)
+
+def buildinfo_target(d):
+ # Get context
+ if d.getVar('BB_WORKERCONTEXT', True) != '1':
+ return ""
+ # Single and list variables to be read
+ vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "")
+ listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "")
+ return image_buildinfo_outputvars(vars, listvars, d)
+
+# Write build information to target filesystem
+buildinfo () {
+cat > ${IMAGE_ROOTFS}${sysconfdir}/build << END
+-----------------------
+Build Configuration: |
+-----------------------
+${@buildinfo_target(d)}
+-----------------------
+Layer Revisions: |
+-----------------------
+${@get_layer_revs(d)}
+END
+}
+
+IMAGE_PREPROCESS_COMMAND += "buildinfo;"
diff --git a/meta/classes/image-mklibs.bbclass b/meta/classes/image-mklibs.bbclass
index 11f082b373..c455a8e2d4 100644
--- a/meta/classes/image-mklibs.bbclass
+++ b/meta/classes/image-mklibs.bbclass
@@ -16,7 +16,7 @@ mklibs_optimize_image_doit() {
> ${WORKDIR}/mklibs/executables.list
case ${TARGET_ARCH} in
- powerpc | mips | microblaze )
+ powerpc | mips | mipsel | microblaze )
dynamic_loader="${base_libdir}/ld.so.1"
;;
powerpc64)
diff --git a/meta/classes/image-swab.bbclass b/meta/classes/image-swab.bbclass
index 124a090605..89318560db 100644
--- a/meta/classes/image-swab.bbclass
+++ b/meta/classes/image-swab.bbclass
@@ -11,7 +11,7 @@ BB_DEFAULT_TASK = "generate_swabber_report"
# Ideally these should be fixed but as a temporary measure disable parallel
# builds for troublesome recipes
PARALLEL_MAKE_pn-openssl = ""
-PARALLEL_MAKE_pn-eglibc = ""
+PARALLEL_MAKE_pn-glibc = ""
PARALLEL_MAKE_pn-glib-2.0 = ""
PARALLEL_MAKE_pn-libxml2 = ""
PARALLEL_MAKE_pn-readline = ""
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass
index 940bdb6bc0..34e9f4cba5 100644
--- a/meta/classes/image.bbclass
+++ b/meta/classes/image.bbclass
@@ -145,17 +145,6 @@ python () {
d.setVar('IMAGE_FEATURES', ' '.join(list(remain_features)))
- # Ensure we have the vendor list for complementary package handling
- ml_vendor_list = ""
- multilibs = d.getVar('MULTILIBS', True) or ""
- for ext in multilibs.split():
- eext = ext.split(':')
- if len(eext) > 1 and eext[0] == 'multilib':
- localdata = bb.data.createCopy(d)
- vendor = localdata.getVar("TARGET_VENDOR_virtclass-multilib-" + eext[1], False)
- ml_vendor_list += " " + vendor
- d.setVar('MULTILIB_VENDORS', ml_vendor_list)
-
check_image_features(d)
initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or ""
if initramfs_image != "":
@@ -186,7 +175,9 @@ IMAGE_LINGUAS ?= "de-de fr-fr en-gb"
LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}"
-PSEUDO_PASSWD = "${IMAGE_ROOTFS}"
+# Prefer image, but use the fallback files for lookups if the image ones
+# aren't yet available.
+PSEUDO_PASSWD = "${IMAGE_ROOTFS}:${STAGING_DIR_NATIVE}"
do_rootfs[dirs] = "${TOPDIR}"
do_rootfs[lockfiles] += "${IMAGE_ROOTFS}.lock"
@@ -197,18 +188,17 @@ do_rootfs[cleandirs] += "${S}"
do_rootfs[umask] = "022"
# A hook function to support read-only-rootfs IMAGE_FEATURES
-# Currently, it only supports sysvinit system.
read_only_rootfs_hook () {
# Tweak the mount option and fs_passno for rootfs in fstab
sed -i -e '/^[#[:space:]]*\/dev\/root/{s/defaults/ro/;s/\([[:space:]]*[[:digit:]]\)\([[:space:]]*\)[[:digit:]]$/\1\20/}' ${IMAGE_ROOTFS}/etc/fstab
if ${@bb.utils.contains("DISTRO_FEATURES", "sysvinit", "true", "false", d)}; then
- # Change the value of ROOTFS_READ_ONLY in /etc/default/rcS to yes
+ # Change the value of ROOTFS_READ_ONLY in /etc/default/rcS to yes
if [ -e ${IMAGE_ROOTFS}/etc/default/rcS ]; then
sed -i 's/ROOTFS_READ_ONLY=no/ROOTFS_READ_ONLY=yes/' ${IMAGE_ROOTFS}/etc/default/rcS
fi
- # Run populate-volatile.sh at rootfs time to set up basic files
- # and directories to support read-only rootfs.
+ # Run populate-volatile.sh at rootfs time to set up basic files
+ # and directories to support read-only rootfs.
if [ -x ${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh ]; then
${IMAGE_ROOTFS}/etc/init.d/populate-volatile.sh
fi
@@ -225,6 +215,27 @@ read_only_rootfs_hook () {
fi
fi
fi
+
+ if ${@bb.utils.contains("DISTRO_FEATURES", "systemd", "true", "false", d)}; then
+ # Update user database files so that services don't fail for a read-only systemd system
+ for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd.conf ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd-remote.conf; do
+ [ -e $conffile ] || continue
+ grep -v "^#" $conffile | sed -e '/^$/d' | while read type name id comment; do
+ if [ "$type" = "u" ]; then
+ useradd_params=""
+ [ "$id" != "-" ] && useradd_params="$useradd_params --uid $id"
+ [ "$comment" != "-" ] && useradd_params="$useradd_params --comment $comment"
+ useradd_params="$useradd_params --system $name"
+ eval useradd --root ${IMAGE_ROOTFS} $useradd_params || true
+ elif [ "$type" = "g" ]; then
+ groupadd_params=""
+ [ "$id" != "-" ] && groupadd_params="$groupadd_params --gid $id"
+ groupadd_params="$groupadd_params --system $name"
+ eval groupadd --root ${IMAGE_ROOTFS} $groupadd_params || true
+ fi
+ done
+ done
+ fi
}
PACKAGE_EXCLUDE ??= ""
@@ -419,6 +430,7 @@ do_compile[noexec] = "1"
do_install[noexec] = "1"
do_populate_sysroot[noexec] = "1"
do_package[noexec] = "1"
+do_package_qa[noexec] = "1"
do_packagedata[noexec] = "1"
do_package_write_ipk[noexec] = "1"
do_package_write_deb[noexec] = "1"
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index d298897268..02b107c7c1 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -13,7 +13,7 @@ def imagetypes_getdepends(d):
deps = []
ctypes = d.getVar('COMPRESSIONTYPES', True).split()
for type in (d.getVar('IMAGE_FSTYPES', True) or "").split():
- if type == "vmdk" or type == "live" or type == "iso" or type == "hddimg":
+ if type in ["vmdk", "live", "iso", "hddimg"]:
type = "ext3"
basetype = type
for ctype in ctypes:
@@ -21,6 +21,8 @@ def imagetypes_getdepends(d):
basetype = type[:-len("." + ctype)]
adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps)
break
+ for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split():
+ adddep(d.getVar('IMAGE_DEPENDS_%s' % typedepends, True) , deps)
adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps)
depstr = ""
@@ -70,7 +72,11 @@ IMAGE_CMD_cpio () {
(cd ${IMAGE_ROOTFS} && find . | cpio -o -H newc >${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.cpio)
if [ ! -e ${IMAGE_ROOTFS}/init ]; then
mkdir -p ${WORKDIR}/cpio_append
- touch ${WORKDIR}/cpio_append/init
+ if [ -e ${IMAGE_ROOTFS}/sbin/init -o -L ${IMAGE_ROOTFS}/sbin/init ]; then
+ ln -sf /sbin/init ${WORKDIR}/cpio_append/init
+ else
+ touch ${WORKDIR}/cpio_append/init
+ fi
(cd ${WORKDIR}/cpio_append && echo ./init | cpio -oA -H newc -F ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.cpio)
fi
}
@@ -97,6 +103,8 @@ IMAGE_CMD_ubi () {
mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ubifs ${MKUBIFS_ARGS}
ubinize -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ubi ${UBINIZE_ARGS} ubinize.cfg
}
+IMAGE_TYPEDEP_ubi = "ubifs"
+
IMAGE_CMD_ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${DEPLOY_DIR_IMAGE}/${IMAGE_NAME}.rootfs.ubifs ${MKUBIFS_ARGS}"
EXTRA_IMAGECMD = ""
@@ -128,7 +136,21 @@ IMAGE_DEPENDS_ubi = "mtd-utils-native"
IMAGE_DEPENDS_ubifs = "mtd-utils-native"
# This variable is available to request which values are suitable for IMAGE_FSTYPES
-IMAGE_TYPES = "jffs2 jffs2.sum cramfs ext2 ext2.gz ext2.bz2 ext3 ext3.gz ext2.lzma btrfs iso hddimg squashfs squashfs-xz squashfs-lzo ubi ubifs tar tar.gz tar.bz2 tar.xz tar.lz4 cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 vmdk elf"
+IMAGE_TYPES = " \
+ jffs2 jffs2.sum \
+ cramfs \
+ ext2 ext2.gz ext2.bz2 ext2.lzma \
+ ext3 ext3.gz \
+ btrfs \
+ iso \
+ hddimg \
+ squashfs squashfs-xz squashfs-lzo \
+ ubi ubifs \
+ tar tar.gz tar.bz2 tar.xz tar.lz4 \
+ cpio cpio.gz cpio.xz cpio.lzma cpio.lz4 \
+ vmdk \
+ elf \
+"
COMPRESSIONTYPES = "gz bz2 lzma xz lz4 sum"
COMPRESS_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}.rootfs.${type}"
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass
index 3dd2e7fb6a..0b45374a28 100644
--- a/meta/classes/insane.bbclass
+++ b/meta/classes/insane.bbclass
@@ -29,7 +29,7 @@ QA_SANE = "True"
WARN_QA ?= "ldflags useless-rpaths rpaths staticdev libdir xorg-driver-abi \
textrel already-stripped incompatible-license files-invalid \
installed-vs-shipped compile-host-path install-host-path \
- pn-overrides infodir build-deps \
+ pn-overrides infodir build-deps file-rdeps \
"
ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \
perms dep-cmp pkgvarcheck perm-config perm-line perm-link \
@@ -771,31 +771,102 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d):
rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "")
# Now do the sanity check!!!
- for rdepend in rdepends:
- if "-dbg" in rdepend and "debug-deps" not in skip:
- error_msg = "%s rdepends on %s" % (pkg,rdepend)
- sane = package_qa_handle_error("debug-deps", error_msg, d)
- if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip:
- error_msg = "%s rdepends on %s" % (pkg, rdepend)
- sane = package_qa_handle_error("dev-deps", error_msg, d)
- if rdepend not in packages:
- rdep_data = oe.packagedata.read_subpkgdata(rdepend, d)
- if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
- continue
- if not rdep_data or not 'PN' in rdep_data:
- pkgdata_dir = d.getVar("PKGDATA_DIR", True)
- try:
- possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
- except OSError:
- possibles = []
- for p in possibles:
- rdep_data = oe.packagedata.read_subpkgdata(p, d)
- if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
- break
- if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
- continue
- error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend)
- sane = package_qa_handle_error("build-deps", error_msg, d)
+ if "build-deps" not in skip:
+ for rdepend in rdepends:
+ if "-dbg" in rdepend and "debug-deps" not in skip:
+ error_msg = "%s rdepends on %s" % (pkg,rdepend)
+ sane = package_qa_handle_error("debug-deps", error_msg, d)
+ if (not "-dev" in pkg and not "-staticdev" in pkg) and rdepend.endswith("-dev") and "dev-deps" not in skip:
+ error_msg = "%s rdepends on %s" % (pkg, rdepend)
+ sane = package_qa_handle_error("dev-deps", error_msg, d)
+ if rdepend not in packages:
+ rdep_data = oe.packagedata.read_subpkgdata(rdepend, d)
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ continue
+ if not rdep_data or not 'PN' in rdep_data:
+ pkgdata_dir = d.getVar("PKGDATA_DIR", True)
+ try:
+ possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend))
+ except OSError:
+ possibles = []
+ for p in possibles:
+ rdep_data = oe.packagedata.read_subpkgdata(p, d)
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ break
+ if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps:
+ continue
+ error_msg = "%s rdepends on %s, but it isn't a build dependency?" % (pkg, rdepend)
+ sane = package_qa_handle_error("build-deps", error_msg, d)
+
+ if "file-rdeps" not in skip:
+ ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)'])
+ if bb.data.inherits_class('nativesdk', d):
+ ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl'])
+ # For Saving the FILERDEPENDS
+ filerdepends = set()
+ rdep_data = oe.packagedata.read_subpkgdata(pkg, d)
+ for key in rdep_data:
+ if key.startswith("FILERDEPENDS_"):
+ for subkey in rdep_data[key].split():
+ filerdepends.add(subkey)
+ filerdepends -= ignored_file_rdeps
+
+ if filerdepends:
+ next = rdepends
+ done = rdepends[:]
+ # Find all the rdepends on the dependency chain
+ while next:
+ new = []
+ for rdep in next:
+ rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
+ sub_rdeps = rdep_data.get("RDEPENDS_" + rdep)
+ if not sub_rdeps:
+ continue
+ for sub_rdep in sub_rdeps.split():
+ if sub_rdep in done:
+ continue
+ if not sub_rdep.startswith('(') and \
+ oe.packagedata.has_subpkgdata(sub_rdep, d):
+ # It's a new rdep
+ done.append(sub_rdep)
+ new.append(sub_rdep)
+ next = new
+
+ # Add the rprovides of itself
+ if pkg not in done:
+ done.insert(0, pkg)
+
+ # The python is not a package, but python-core provides it, so
+ # skip checking /usr/bin/python if python is in the rdeps, in
+ # case there is a RDEPENDS_pkg = "python" in the recipe.
+ for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]:
+ if py in done:
+ filerdepends.discard("/usr/bin/python")
+ done.remove(py)
+ for rdep in done:
+ # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO
+ rdep_rprovides = set()
+ rdep_data = oe.packagedata.read_subpkgdata(rdep, d)
+ for key in rdep_data:
+ if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES_"):
+ for subkey in rdep_data[key].split():
+ rdep_rprovides.add(subkey)
+ # Add the files list to the rprovides
+ if key == "FILES_INFO":
+ # Use eval() to make it as a dict
+ for subkey in eval(rdep_data[key]):
+ rdep_rprovides.add(subkey)
+ filerdepends -= rdep_rprovides
+ if not filerdepends:
+ # Break if all the file rdepends are met
+ break
+ else:
+ # Clear it for the next loop
+ rdep_rprovides.clear()
+ if filerdepends:
+ error_msg = "%s requires %s, but no providers in its RDEPENDS" % \
+ (pkg, ', '.join(str(e) for e in filerdepends))
+ sane = package_qa_handle_error("file-rdeps", error_msg, d)
return sane
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index 6010dc94e0..3b59d85f4e 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -1,5 +1,3 @@
-S = "${WORKDIR}/linux"
-
# remove tasks that modify the source tree in case externalsrc is inherited
SRCTREECOVEREDTASKS += "do_kernel_link_vmlinux do_kernel_configme do_validate_branches do_kernel_configcheck do_kernel_checkout do_patch"
@@ -71,6 +69,11 @@ do_patch() {
fi
machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
+ machine_srcrev="${SRCREV_machine}"
+ if [ -z "${machine_srcrev}" ]; then
+ # fallback to SRCREV if a non machine_meta tree is being built
+ machine_srcrev="${SRCREV}"
+ fi
# if we have a defined/set meta branch we should not be generating
# any meta data. The passed branch has what we need.
@@ -80,8 +83,7 @@ do_patch() {
createme ${createme_flags} ${ARCH} ${machine_branch}
if [ $? -ne 0 ]; then
- echo "ERROR. Could not create ${machine_branch}"
- exit 1
+ bbfatal "Could not create ${machine_branch}"
fi
sccs="${@" ".join(find_sccs(d))}"
@@ -110,36 +112,27 @@ do_patch() {
done
fi
- if [ "${machine_branch}" != "${KBRANCH_DEFAULT}" ]; then
- updateme_flags="--branch ${machine_branch}"
- fi
-
# updates or generates the target description
updateme ${updateme_flags} -DKDESC=${KMACHINE}:${LINUX_KERNEL_TYPE} \
${includes} ${addon_features} ${ARCH} ${KMACHINE} ${sccs} ${patches}
if [ $? -ne 0 ]; then
- echo "ERROR. Could not update ${machine_branch}"
- exit 1
+ bbfatal "Could not update ${machine_branch}"
fi
# executes and modifies the source tree as required
patchme ${KMACHINE}
if [ $? -ne 0 ]; then
- echo "ERROR. Could not apply patches for ${KMACHINE}."
- echo " Patch failures can be resolved in the devshell (bitbake -c devshell ${PN})"
- exit 1
+ bberror "Could not apply patches for ${KMACHINE}."
+ bbfatal "Patch failures can be resolved in the devshell (bitbake -c devshell ${PN})"
fi
- # Perform a final check. If something other than the default kernel
- # branch was requested, and that's not where we ended up, then we
- # should thrown an error, since we aren't building what was expected
- final_branch="$(git symbolic-ref HEAD 2>/dev/null)"
- final_branch=${final_branch##refs/heads/}
- if [ "${machine_branch}" != "${KBRANCH_DEFAULT}" ] &&
- [ "${final_branch}" != "${machine_branch}" ]; then
- echo "ERROR: branch ${machine_branch} was requested, but was not properly"
- echo " configured to be built. The current branch is ${final_branch}"
- exit 1
+ # check to see if the specified SRCREV is reachable from the final branch.
+ # if it wasn't something wrong has happened, and we should error.
+ if [ "${machine_srcrev}" != "AUTOINC" ]; then
+ if ! [ "$(git rev-parse --verify ${machine_srcrev}~0)" = "$(git merge-base ${machine_srcrev} HEAD)" ]; then
+ bberror "SRCREV ${machine_srcrev} was specified, but is not reachable"
+ bbfatal "Check the BSP description for incorrect branch selection, or other errors."
+ fi
fi
}
@@ -183,36 +176,17 @@ do_kernel_checkout() {
cd ${S}
if [ ! -f "Makefile" ]; then
- echo "[ERROR]: S is not set to the linux source directory. Check "
- echo " the recipe and set S to the proper extracted subdirectory"
- exit 1
+ bberror "S is not set to the linux source directory. Check "
+ bbfatal "the recipe and set S to the proper extracted subdirectory"
fi
+ rm -f .gitignore
git init
git add .
git commit -q -m "baseline commit: creating repo for ${PN}-${PV}"
+ git clean -d -f
fi
# end debare
- # If KMETA is defined, the branch must exist, but a machine branch
- # can be missing since it may be created later by the tools.
- if [ -n "${KMETA}" ]; then
- git branch -a --no-color | grep -q ${KMETA}
- if [ $? -ne 0 ]; then
- echo "ERROR. The branch '${KMETA}' is required and was not"
- echo "found. Ensure that the SRC_URI points to a valid linux-yocto"
- echo "kernel repository"
- exit 1
- fi
- fi
-
- machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
-
- if [ "${KBRANCH}" != "${machine_branch}" ]; then
- echo "WARNING: The SRC_URI machine branch and KBRANCH are not the same."
- echo " KBRANCH will be adjusted to match, but this typically is a"
- echo " misconfiguration and should be checked."
- fi
-
# convert any remote branches to local tracking ones
for i in `git branch -a --no-color | grep remotes | grep -v HEAD`; do
b=`echo $i | cut -d' ' -f2 | sed 's%remotes/origin/%%'`;
@@ -222,15 +196,27 @@ do_kernel_checkout() {
fi
done
+ # If KMETA is defined, the branch must exist, but a machine branch
+ # can be missing since it may be created later by the tools.
+ if [ -n "${KMETA}" ]; then
+ git show-ref --quiet --verify -- "refs/heads/${KMETA}"
+ if [ $? -eq 1 ]; then
+ bberror "The branch '${KMETA}' is required and was not found"
+ bberror "Ensure that the SRC_URI points to a valid linux-yocto"
+ bbfatal "kernel repository"
+ fi
+ fi
+
+
# Create a working tree copy of the kernel by checking out a branch
+ machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
git show-ref --quiet --verify -- "refs/heads/${machine_branch}"
if [ $? -eq 0 ]; then
- # checkout and clobber any unimportant files
- git checkout -f ${machine_branch}
- else
- echo "Not checking out ${machine_branch}, it will be created later"
- git checkout -f master
+ machine_branch="master"
fi
+
+ # checkout and clobber any unimportant files
+ git checkout -f ${machine_branch}
}
do_kernel_checkout[dirs] = "${S}"
@@ -238,7 +224,7 @@ addtask kernel_checkout before do_patch after do_unpack
do_kernel_configme[dirs] += "${S} ${B}"
do_kernel_configme() {
- echo "[INFO] doing kernel configme"
+ bbnote "kernel configme"
export KMETA=${KMETA}
if [ -n "${KCONFIG_MODE}" ]; then
@@ -255,8 +241,7 @@ do_kernel_configme() {
PATH=${PATH}:${S}/scripts/util
configme ${configmeflags} --reconfig --output ${B} ${LINUX_KERNEL_TYPE} ${KMACHINE}
if [ $? -ne 0 ]; then
- echo "ERROR. Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}"
- exit 1
+ bbfatal "Could not configure ${KMACHINE}-${LINUX_KERNEL_TYPE}"
fi
echo "# Global settings from linux recipe" >> ${B}/.config
@@ -292,104 +277,66 @@ python do_kernel_configcheck() {
# their SRCREV values. If they are NOT on the right commits, the branches
# are corrected to the proper commit.
do_validate_branches() {
+ set +e
cd ${S}
export KMETA=${KMETA}
machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
+ machine_srcrev="${SRCREV_machine}"
- set +e
# if SRCREV is AUTOREV it shows up as AUTOINC there's nothing to
# check and we can exit early
- if [ "${SRCREV_machine}" = "AUTOINC" ] || [ "${SRCREV_machine}" = "INVALID" ] ||
- [ "${SRCREV_machine}" = "" ]; then
- return
- fi
-
- # If something other than the default branch was requested, it must
- # exist in the tree, and it's a hard error if it wasn't
- git show-ref --quiet --verify -- "refs/heads/${machine_branch}"
- if [ $? -eq 1 ]; then
- if [ -n "${KBRANCH_DEFAULT}" ] &&
- [ "${machine_branch}" != "${KBRANCH_DEFAULT}" ]; then
- echo "ERROR: branch ${machine_branch} was set for kernel compilation, "
- echo " but it does not exist in the kernel repository."
- echo " Check the value of KBRANCH and ensure that it describes"
- echo " a valid banch in the source kernel repository"
- exit 1
+ if [ "${machine_srcrev}" = "AUTOINC" ]; then
+ bbnote "SRCREV validation is not required for AUTOREV"
+ elif [ "${machine_srcrev}" = "" ]; then
+ if [ "${SRCREV}" != "AUTOINC" ] && [ "${SRCREV}" != "INVALID" ]; then
+ # SRCREV_machine_<MACHINE> was not set. This means that a custom recipe
+ # that doesn't use the SRCREV_FORMAT "machine_meta" is being built. In
+ # this case, we need to reset to the give SRCREV before heading to patching
+ bbnote "custom recipe is being built, forcing SRCREV to ${SRCREV}"
+ force_srcrev="${SRCREV}"
fi
- fi
-
- if [ -z "${SRCREV_machine}" ]; then
- target_branch_head="${SRCREV}"
else
- target_branch_head="${SRCREV_machine}"
- fi
-
- # $SRCREV could have also been AUTOINC, so check again
- if [ "${target_branch_head}" = "AUTOINC" ]; then
- return
- fi
-
- ref=`git show ${target_branch_head} 2>&1 | head -n1 || true`
- if [ "$ref" = "fatal: bad object ${target_meta_head}" ]; then
- echo "ERROR ${target_branch_head} is not a valid commit ID."
- echo "The kernel source tree may be out of sync"
- exit 1
+ git cat-file -t ${machine_srcrev} > /dev/null
+ if [ $? -ne 0 ]; then
+ bberror "${machine_srcrev} is not a valid commit ID."
+ bbfatal "The kernel source tree may be out of sync"
+ fi
+ force_srcrev=${machine_srcrev}
fi
- containing_branches=`git branch --contains $target_branch_head | sed 's/^..//'`
- if [ -z "$containing_branches" ]; then
- echo "ERROR: SRCREV was set to \"$target_branch_head\", but no branches"
- echo " contain this commit"
- exit 1
- fi
+ ## KMETA branch validation.
+ target_meta_head="${SRCREV_meta}"
+ if [ "${target_meta_head}" = "AUTOINC" ] || [ "${target_meta_head}" = "" ]; then
+ bbnote "SRCREV validation skipped for AUTOREV or empty meta branch"
+ else
+ meta_head=`git show-ref -s --heads ${KMETA}`
- # force the SRCREV in each branch that contains the specified
- # SRCREV (if it isn't the current HEAD of that branch)
- git checkout -q master
- for b in $containing_branches; do
- branch_head=`git show-ref -s --heads ${b}`
- if [ "$branch_head" != "$target_branch_head" ]; then
- echo "[INFO] Setting branch $b to ${target_branch_head}"
- if [ "$b" = "master" ]; then
- git reset --hard $target_branch_head > /dev/null
- else
- git branch -D $b > /dev/null
- git branch $b $target_branch_head > /dev/null
- fi
+ git cat-file -t ${target_meta_head} > /dev/null
+ if [ $? -ne 0 ]; then
+ bberror "${target_meta_head} is not a valid commit ID"
+ bbfatal "The kernel source tree may be out of sync"
fi
- done
-
- ## KMETA branch validation.
- ## We do validation if the meta branch exists, and AUTOREV hasn't been set
- meta_head=`git show-ref -s --heads ${KMETA}`
- target_meta_head="${SRCREV_meta}"
- git show-ref --quiet --verify -- "refs/heads/${KMETA}"
- if [ $? -eq 0 ] && [ "${target_meta_head}" != "AUTOINC" ]; then
if [ "$meta_head" != "$target_meta_head" ]; then
- ref=`git show ${target_meta_head} 2>&1 | head -n1 || true`
- if [ "$ref" = "fatal: bad object ${target_meta_head}" ]; then
- echo "ERROR ${target_meta_head} is not a valid commit ID"
- echo "The kernel source tree may be out of sync"
- exit 1
- else
- echo "[INFO] Setting branch ${KMETA} to ${target_meta_head}"
- git branch -m ${KMETA} ${KMETA}-orig
- git checkout -q -b ${KMETA} ${target_meta_head}
- if [ $? -ne 0 ];then
- echo "ERROR: could not checkout ${KMETA} branch from known hash ${target_meta_head}"
- exit 1
- fi
+ bbnote "Setting branch ${KMETA} to ${target_meta_head}"
+ git branch -m ${KMETA} ${KMETA}-orig
+ git checkout -q -b ${KMETA} ${target_meta_head}
+ if [ $? -ne 0 ];then
+ bbfatal "Could not checkout ${KMETA} branch from known hash ${target_meta_head}"
fi
fi
fi
- git show-ref --quiet --verify -- "refs/heads/${machine_branch}"
- if [ $? -eq 0 ]; then
- # restore the branch for builds
- git checkout -q -f ${machine_branch}
- else
- git checkout -q master
+ git checkout -q -f ${machine_branch}
+ if [ -n "${force_srcrev}" ]; then
+ # see if the branch we are about to patch has been properly reset to the defined
+ # SRCREV .. if not, we reset it.
+ branch_head=`git rev-parse HEAD`
+ if [ "${force_srcrev}" != "${branch_head}" ]; then
+ current_branch=`git rev-parse --abbrev-ref HEAD`
+ git branch "$current_branch-orig"
+ git reset --hard ${force_srcrev}
+ fi
fi
}
diff --git a/meta/classes/kernel.bbclass b/meta/classes/kernel.bbclass
index 1289873032..5cabc2ce1c 100644
--- a/meta/classes/kernel.bbclass
+++ b/meta/classes/kernel.bbclass
@@ -1,7 +1,12 @@
inherit linux-kernel-base kernel-module-split
PROVIDES += "virtual/kernel"
-DEPENDS += "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}gcc kmod-native depmodwrapper-cross"
+DEPENDS += "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}gcc kmod-native depmodwrapper-cross bc-native"
+
+S = "${STAGING_KERNEL_DIR}"
+B = "${WORKDIR}/build"
+KBUILD_OUTPUT = "${B}"
+OE_TERMINAL_EXPORTS += "KBUILD_OUTPUT"
# we include gcc above, we dont need virtual/libc
INHIBIT_DEFAULT_DEPS = "1"
@@ -12,7 +17,7 @@ INITRAMFS_TASK ?= ""
INITRAMFS_IMAGE_BUNDLE ?= ""
python __anonymous () {
- kerneltype = d.getVar('KERNEL_IMAGETYPE', True) or ''
+ kerneltype = d.getVar('KERNEL_IMAGETYPE', True)
if kerneltype == 'uImage':
depends = d.getVar("DEPENDS", True)
depends = "%s u-boot-mkimage-native" % depends
@@ -31,6 +36,22 @@ python __anonymous () {
d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}')
}
+# Old style kernels may set ${S} = ${WORKDIR}/git for example
+# We need to move these over to STAGING_KERNEL_DIR. We can't just
+# create the symlink in advance as the git fetcher can't cope with
+# the symlink.
+do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B}"
+base_do_unpack_append () {
+ s = d.getVar("S", True)
+ kernsrc = d.getVar("STAGING_KERNEL_DIR", True)
+ if s != kernsrc:
+ bb.utils.mkdirhier(kernsrc)
+ bb.utils.remove(kernsrc, recurse=True)
+ import subprocess
+ subprocess.call(d.expand("mv ${S} ${STAGING_KERNEL_DIR}"), shell=True)
+ os.symlink(kernsrc, s)
+}
+
inherit kernel-arch deploy
PACKAGES_DYNAMIC += "^kernel-module-.*"
@@ -55,7 +76,7 @@ KERNEL_IMAGEDEST = "boot"
#
export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE",1) or "ttyS0"}"
-KERNEL_VERSION = "${@get_kernelversion('${B}')}"
+KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}"
KERNEL_LOCALVERSION ?= ""
@@ -213,98 +234,36 @@ kernel_do_install() {
#
kerneldir=${D}${KERNEL_SRC_PATH}
install -d $kerneldir
+ mkdir -p ${D}/lib/modules/${KERNEL_VERSION}
+ ln -sf ${KERNEL_SRC_PATH} "${D}/lib/modules/${KERNEL_VERSION}/build"
#
# Store the kernel version in sysroots for module-base.bbclass
#
echo "${KERNEL_VERSION}" > $kerneldir/kernel-abiversion
-
- #
- # Store kernel image name to allow use during image generation
- #
-
- echo "${KERNEL_IMAGE_BASE_NAME}" >$kerneldir/kernel-image-name
-
- #
- # Copy the entire source tree. In case an external build directory is
- # used, copy the build directory over first, then copy over the source
- # dir. This ensures the original Makefiles are used and not the
- # redirecting Makefiles in the build directory.
- #
- find . -depth -not -name "*.cmd" -not -name "*.o" -not -name "*.so.dbg" -not -path "./Documentation*" -not -path "./source*" -not -path "./.*" -print0 | cpio --null -pdlu $kerneldir
- cp .config $kerneldir
- if [ "${S}" != "${B}" ]; then
- pwd="$PWD"
- cd "${S}"
- find . -depth -not -path "./Documentation*" -not -path "./.*" -print0 | cpio --null -pdlu $kerneldir
- cd "$pwd"
- fi
-
- # Test to ensure that the output file and image type are not actually
- # the same file. If hardlinking is used, they will be the same, and there's
- # no need to install.
- ! [ ${KERNEL_OUTPUT} -ef $kerneldir/${KERNEL_IMAGETYPE} ] && install -m 0644 ${KERNEL_OUTPUT} $kerneldir/${KERNEL_IMAGETYPE}
- install -m 0644 System.map $kerneldir/System.map-${KERNEL_VERSION}
-
- # Dummy Makefile so the clean below works
- mkdir $kerneldir/Documentation
- touch $kerneldir/Documentation/Makefile
-
- #
- # Clean and remove files not needed for building modules.
- # Some distributions go through a lot more trouble to strip out
- # unecessary headers, for now, we just prune the obvious bits.
- #
- # We don't want to leave host-arch binaries in /sysroots, so
- # we clean the scripts dir while leaving the generated config
- # and include files.
- #
- oe_runmake -C $kerneldir CC="${KERNEL_CC}" LD="${KERNEL_LD}" clean _mrproper_scripts
-
- # hide directories that shouldn't have their .c, s and S files deleted
- for d in tools scripts lib; do
- mv $kerneldir/$d $kerneldir/.$d
- done
-
- # delete .c, .s and .S files, unless we hid a directory as .<dir>. This technique is
- # much faster than find -prune and -exec
- find $kerneldir -not -path '*/\.*' -type f -name "*.[csS]" -delete
-
- # put the hidden dirs back
- for d in tools scripts lib; do
- mv $kerneldir/.$d $kerneldir/$d
- done
+
+ # Copy files required for module builds
+ cp System.map $kerneldir/System.map-${KERNEL_VERSION}
+ cp Module.symvers $kerneldir/
+ cp .config $kerneldir/
+ mkdir -p $kerneldir/include/config
+ cp include/config/kernel.release $kerneldir/include/config/kernel.release
# As of Linux kernel version 3.0.1, the clean target removes
# arch/powerpc/lib/crtsavres.o which is present in
# KBUILD_LDFLAGS_MODULE, making it required to build external modules.
if [ ${ARCH} = "powerpc" ]; then
- cp -l arch/powerpc/lib/crtsavres.o $kerneldir/arch/powerpc/lib/crtsavres.o
+ mkdir -p $kerneldir/arch/powerpc/lib/
+ cp arch/powerpc/lib/crtsavres.o $kerneldir/arch/powerpc/lib/crtsavres.o
fi
- # Necessary for building modules like compat-wireless.
- if [ -f include/generated/bounds.h ]; then
- cp -l include/generated/bounds.h $kerneldir/include/generated/bounds.h
- fi
+ mkdir -p $kerneldir/include/generated/
+ cp -fR include/generated/* $kerneldir/include/generated/
+
if [ -d arch/${ARCH}/include/generated ]; then
mkdir -p $kerneldir/arch/${ARCH}/include/generated/
- cp -flR arch/${ARCH}/include/generated/* $kerneldir/arch/${ARCH}/include/generated/
- fi
-
- # Remove the following binaries which cause strip or arch QA errors
- # during do_package for cross-compiled platforms
- bin_files="arch/powerpc/boot/addnote arch/powerpc/boot/hack-coff \
- arch/powerpc/boot/mktree scripts/kconfig/zconf.tab.o \
- scripts/kconfig/conf.o scripts/kconfig/kxgettext.o"
- for entry in $bin_files; do
- rm -f $kerneldir/$entry
- done
-
- # kernels <2.6.30 don't have $kerneldir/tools directory so we check if it exists before calling sed
- if [ -f $kerneldir/tools/perf/Makefile ]; then
- # Fix SLANG_INC for slang.h
- sed -i 's#-I/usr/include/slang#-I=/usr/include/slang#g' $kerneldir/tools/perf/Makefile
+ cp -fR arch/${ARCH}/include/generated/* $kerneldir/arch/${ARCH}/include/generated/
fi
}
do_install[prefuncs] += "package_get_auto_pr"
@@ -313,7 +272,7 @@ python sysroot_stage_all () {
oe.path.copyhardlinktree(d.expand("${D}${KERNEL_SRC_PATH}"), d.expand("${SYSROOT_DESTDIR}${KERNEL_SRC_PATH}"))
}
-KERNEL_CONFIG_COMMAND ?= "oe_runmake_call oldnoconfig || yes '' | oe_runmake oldconfig"
+KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig"
kernel_do_configure() {
# fixes extra + in /lib/modules/2.6.37+
@@ -322,6 +281,10 @@ kernel_do_configure() {
# $ make kernelrelease => 2.6.37+
touch ${B}/.scmversion ${S}/.scmversion
+ if [ "${S}" != "${B}" ] && [ -f "${S}/.config" ] && [ ! -f "${B}/.config" ]; then
+ mv "${S}/.config" "${B}/.config"
+ fi
+
# Copy defconfig to .config if .config does not exist. This allows
# recipes to manage the .config themselves in do_configure_prepend().
if [ -f "${WORKDIR}/defconfig" ] && [ ! -f "${B}/.config" ]; then
@@ -346,7 +309,7 @@ PACKAGES = "kernel kernel-base kernel-vmlinux kernel-image kernel-dev kernel-mod
FILES_${PN} = ""
FILES_kernel-base = "/lib/modules/${KERNEL_VERSION}/modules.order /lib/modules/${KERNEL_VERSION}/modules.builtin"
FILES_kernel-image = "/boot/${KERNEL_IMAGETYPE}*"
-FILES_kernel-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH}"
+FILES_kernel-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} /lib/modules/${KERNEL_VERSION}/build"
FILES_kernel-vmlinux = "/boot/vmlinux*"
FILES_kernel-modules = ""
RDEPENDS_kernel = "kernel-base"
@@ -384,14 +347,12 @@ pkg_postrm_kernel-image () {
PACKAGESPLITFUNCS_prepend = "split_kernel_packages "
python split_kernel_packages () {
- do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.bin$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
- do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.fw$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
- do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.cis$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
+ do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.(bin|fw|cis|dsp)$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='')
}
do_strip() {
if [ -n "${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}" ]; then
- if [[ "${KERNEL_IMAGETYPE}" != "vmlinux" ]]; then
+ if [ "${KERNEL_IMAGETYPE}" != "vmlinux" ]; then
bbwarn "image type will not be stripped (not supported): ${KERNEL_IMAGETYPE}"
return
fi
diff --git a/meta/classes/kernelsrc.bbclass b/meta/classes/kernelsrc.bbclass
new file mode 100644
index 0000000000..4208eff6fc
--- /dev/null
+++ b/meta/classes/kernelsrc.bbclass
@@ -0,0 +1,10 @@
+S = "${STAGING_KERNEL_DIR}"
+do_fetch[noexec] = "1"
+do_unpack[depends] += "virtual/kernel:do_patch"
+do_unpack[noexec] = "1"
+do_patch[noexec] = "1"
+do_package[depends] += "virtual/kernel:do_populate_sysroot"
+KERNEL_VERSION = "${@get_kernelversion_file("${S}")}"
+
+inherit linux-kernel-base
+
diff --git a/meta/classes/libc-common.bbclass b/meta/classes/libc-common.bbclass
index daf499d3eb..bbc80167dd 100644
--- a/meta/classes/libc-common.bbclass
+++ b/meta/classes/libc-common.bbclass
@@ -25,12 +25,19 @@ def get_libc_fpu_setting(bb, d):
python populate_packages_prepend () {
if d.getVar('DEBIAN_NAMES', True):
+ pkgs = d.getVar('PACKAGES', True).split()
bpn = d.getVar('BPN', True)
- d.setVar('PKG_'+bpn, 'libc6')
- d.setVar('PKG_'+bpn+'-dev', 'libc6-dev')
- d.setVar('PKG_'+bpn+'-dbg', 'libc6-dbg')
+ prefix = d.getVar('MLPREFIX', True) or ""
+ # Set the base package...
+ d.setVar('PKG_' + prefix + bpn, prefix + 'libc6')
+ libcprefix = prefix + bpn + '-'
+ for p in pkgs:
+ # And all the subpackages.
+ if p.startswith(libcprefix):
+ renamed = p.replace(bpn, 'libc6', 1)
+ d.setVar('PKG_' + p, renamed)
# For backward compatibility with old -dbg package
- d.appendVar('RPROVIDES_' + bpn + '-dbg', ' libc-dbg')
- d.appendVar('RCONFLICTS_' + bpn + '-dbg', ' libc-dbg')
- d.appendVar('RREPLACES_' + bpn + '-dbg', ' libc-dbg')
+ d.appendVar('RPROVIDES_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
+ d.appendVar('RCONFLICTS_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
+ d.appendVar('RREPLACES_' + libcprefix + 'dbg', ' ' + prefix + 'libc-dbg')
}
diff --git a/meta/classes/libc-package.bbclass b/meta/classes/libc-package.bbclass
index c1bc399c18..793936e10b 100644
--- a/meta/classes/libc-package.bbclass
+++ b/meta/classes/libc-package.bbclass
@@ -268,6 +268,7 @@ python package_do_split_gconvs () {
locale_arch_options = { \
"arm": " --uint32-align=4 --little-endian ", \
"armeb": " --uint32-align=4 --big-endian ", \
+ "aarch64": " --uint32-align=4 --little-endian ", \
"aarch64_be": " --uint32-align=4 --big-endian ", \
"sh4": " --uint32-align=4 --big-endian ", \
"powerpc": " --uint32-align=4 --big-endian ", \
@@ -298,9 +299,7 @@ python package_do_split_gconvs () {
--inputfile=%s/i18n/locales/%s --charmap=%s %s" \
% (treedir, datadir, locale, encoding, name)
- qemu_options = d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True)
- if not qemu_options:
- qemu_options = d.getVar('QEMU_OPTIONS', True)
+ qemu_options = d.getVar('QEMU_OPTIONS', True)
cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \
-E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index 601f5611cc..d659b767c5 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -49,24 +49,25 @@ license_create_manifest() {
pkged_pv="$(sed -n 's/^PV: //p' ${filename})"
pkged_name="$(basename $(readlink ${filename}))"
- pkged_lic="$(sed -n "/^LICENSE_${pkged_name}: /{ s/^LICENSE_${pkged_name}: //; s/[|&()*]/ /g; s/ */ /g; p }" ${filename})"
+ pkged_lic="$(sed -n "/^LICENSE_${pkged_name}: /{ s/^LICENSE_${pkged_name}: //; p }" ${filename})"
if [ -z ${pkged_lic} ]; then
# fallback checking value of LICENSE
- pkged_lic="$(sed -n "/^LICENSE: /{ s/^LICENSE: //; s/[|&()*]/ /g; s/ */ /g; p }" ${filename})"
+ pkged_lic="$(sed -n "/^LICENSE: /{ s/^LICENSE: //; p }" ${filename})"
fi
echo "PACKAGE NAME:" ${pkg} >> ${LICENSE_MANIFEST}
echo "PACKAGE VERSION:" ${pkged_pv} >> ${LICENSE_MANIFEST}
echo "RECIPE NAME:" ${pkged_pn} >> ${LICENSE_MANIFEST}
- printf "LICENSE:" >> ${LICENSE_MANIFEST}
- for lic in ${pkged_lic}; do
+ echo "LICENSE:" ${pkged_lic} >> ${LICENSE_MANIFEST}
+ echo "" >> ${LICENSE_MANIFEST}
+
+ lics="$(echo ${pkged_lic} | sed "s/[|&()*]/ /g" | sed "s/ */ /g" )"
+ for lic in ${lics}; do
# to reference a license file trim trailing + symbol
if ! [ -e "${LICENSE_DIRECTORY}/${pkged_pn}/generic_${lic%+}" ]; then
bbwarn "The license listed ${lic} was not in the licenses collected for ${pkged_pn}"
fi
- printf " ${lic}" >> ${LICENSE_MANIFEST}
done
- printf "\n\n" >> ${LICENSE_MANIFEST}
done
# Two options here:
@@ -145,9 +146,16 @@ def copy_license_files(lic_files_paths, destdir):
bb.utils.mkdirhier(destdir)
for (basename, path) in lic_files_paths:
try:
- ret = shutil.copyfile(path, os.path.join(destdir, basename))
+ src = path
+ dst = os.path.join(destdir, basename)
+ if os.path.exists(dst):
+ os.remove(dst)
+ if os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev):
+ os.link(src, dst)
+ else:
+ shutil.copyfile(src, dst)
except Exception as e:
- bb.warn("Could not copy license file %s: %s" % (basename, e))
+ bb.warn("Could not copy license file %s to %s: %s" % (src, dst, e))
def find_license_files(d):
"""
@@ -267,9 +275,41 @@ def return_spdx(d, license):
def canonical_license(d, license):
"""
Return the canonical (SPDX) form of the license if available (so GPLv3
- becomes GPL-3.0), or the passed license if there is no canonical form.
+ becomes GPL-3.0), for the license named 'X+', return canonical form of
+ 'X' if availabel and the tailing '+' (so GPLv3+ becomes GPL-3.0+),
+ or the passed license if there is no canonical form.
+ """
+ lic = d.getVarFlag('SPDXLICENSEMAP', license, True) or ""
+ if not lic and license.endswith('+'):
+ lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'), True)
+ if lic:
+ lic += '+'
+ return lic or license
+
+def expand_wildcard_licenses(d, wildcard_licenses):
"""
- return d.getVarFlag('SPDXLICENSEMAP', license, True) or license
+ Return actual spdx format license names if wildcard used. We expand
+ wildcards from SPDXLICENSEMAP flags and SRC_DISTRIBUTE_LICENSES values.
+ """
+ import fnmatch
+ licenses = []
+ spdxmapkeys = d.getVarFlags('SPDXLICENSEMAP').keys()
+ for wld_lic in wildcard_licenses:
+ spdxflags = fnmatch.filter(spdxmapkeys, wld_lic)
+ licenses += [d.getVarFlag('SPDXLICENSEMAP', flag) for flag in spdxflags]
+
+ spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES') or '').split()
+ for wld_lic in wildcard_licenses:
+ licenses += fnmatch.filter(spdx_lics, wld_lic)
+
+ licenses = list(set(licenses))
+ return licenses
+
+def incompatible_license_contains(license, truevalue, falsevalue, d):
+ license = canonical_license(d, license)
+ bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split()
+ bad_licenses = expand_wildcard_licenses(d, bad_licenses)
+ return truevalue if license in bad_licenses else falsevalue
def incompatible_license(d, dont_want_licenses, package=None):
"""
@@ -369,6 +409,28 @@ def check_license_flags(d):
return unmatched_flag
return None
+def check_license_format(d):
+ """
+ This function checks if LICENSE is well defined,
+ Validate operators in LICENSES.
+ No spaces are allowed between LICENSES.
+ """
+ pn = d.getVar('PN', True)
+ licenses = d.getVar('LICENSE', True)
+ from oe.license import license_operator
+ from oe.license import license_pattern
+
+ elements = filter(lambda x: x.strip(), license_operator.split(licenses))
+ for pos, element in enumerate(elements):
+ if license_pattern.match(element):
+ if pos > 0 and license_pattern.match(elements[pos - 1]):
+ bb.warn("Recipe %s, LICENSE (%s) has invalid format, " \
+ "LICENSES must have operator \"%s\" between them." %
+ (pn, licenses, license_operator.pattern))
+ elif not license_operator.match(element):
+ bb.warn("Recipe %s, LICENSE (%s) has invalid operator (%s) not in" \
+ " \"%s\"." % (pn, licenses, element, license_operator.pattern))
+
SSTATETASKS += "do_populate_lic"
do_populate_lic[sstate-inputdirs] = "${LICSSTATEDIR}"
do_populate_lic[sstate-outputdirs] = "${LICENSE_DIRECTORY}/"
diff --git a/meta/classes/linux-kernel-base.bbclass b/meta/classes/linux-kernel-base.bbclass
index 4f2b0a4a98..89ce71605c 100644
--- a/meta/classes/linux-kernel-base.bbclass
+++ b/meta/classes/linux-kernel-base.bbclass
@@ -1,5 +1,5 @@
# parse kernel ABI version out of <linux/version.h>
-def get_kernelversion(p):
+def get_kernelversion_headers(p):
import re
fn = p + '/include/linux/utsrelease.h'
@@ -9,7 +9,6 @@ def get_kernelversion(p):
if not os.path.isfile(fn):
fn = p + '/include/linux/version.h'
- import re
try:
f = open(fn, 'r')
except IOError:
@@ -24,6 +23,16 @@ def get_kernelversion(p):
return m.group(1)
return None
+
+def get_kernelversion_file(p):
+ fn = p + '/kernel-abiversion'
+
+ try:
+ with open(fn, 'r') as f:
+ return f.readlines()[0].strip()
+ except IOError:
+ return None
+
def linux_module_packages(s, d):
suffix = ""
return " ".join(map(lambda s: "kernel-module-%s%s" % (s.lower().replace('_', '-').replace('@', '+'), suffix), s.split()))
diff --git a/meta/classes/metadata_scm.bbclass b/meta/classes/metadata_scm.bbclass
index ba0edf9486..237e61821d 100644
--- a/meta/classes/metadata_scm.bbclass
+++ b/meta/classes/metadata_scm.bbclass
@@ -60,7 +60,7 @@ def base_get_metadata_svn_revision(path, d):
try:
with open("%s/.svn/entries" % path) as f:
revision = f.readlines()[3].strip()
- except IOError, IndexError:
+ except (IOError, IndexError):
pass
return revision
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index 6e143dd70d..eea2fd59a1 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -21,6 +21,9 @@ python multilib_virtclass_handler () {
if bb.data.inherits_class('image', e.data):
e.data.setVar("MLPREFIX", variant + "-")
e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False))
+ target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False)
+ if target_vendor:
+ e.data.setVar("TARGET_VENDOR", target_vendor)
return
if bb.data.inherits_class('cross-canadian', e.data):
@@ -60,6 +63,7 @@ python multilib_virtclass_handler () {
newtune = e.data.getVar("DEFAULTTUNE_" + "virtclass-multilib-" + variant, False)
if newtune:
e.data.setVar("DEFAULTTUNE", newtune)
+ e.data.setVar('DEFAULTTUNE_ML_%s' % variant, newtune)
}
addhandler multilib_virtclass_handler
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass
index 9dec318aa8..4acc936ccd 100644
--- a/meta/classes/native.bbclass
+++ b/meta/classes/native.bbclass
@@ -61,16 +61,17 @@ PTEST_ENABLED = "0"
export CONFIG_SITE = "${COREBASE}/meta/site/native"
# set the compiler as well. It could have been set to something else
-export CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_ARCH}"
-export CXX = "${CCACHE}${HOST_PREFIX}g++ ${HOST_CC_ARCH}"
-export FC = "${CCACHE}${HOST_PREFIX}gfortran ${HOST_CC_ARCH}"
-export CPP = "${HOST_PREFIX}gcc ${HOST_CC_ARCH} -E"
-export LD = "${HOST_PREFIX}ld ${HOST_LD_ARCH} "
-export CCLD = "${CC}"
-export AR = "${HOST_PREFIX}ar"
-export AS = "${HOST_PREFIX}as ${HOST_AS_ARCH}"
-export RANLIB = "${HOST_PREFIX}ranlib"
-export STRIP = "${HOST_PREFIX}strip"
+export CC = "${BUILD_CC}"
+export CXX = "${BUILD_CXX}"
+export FC = "${BUILD_FC}"
+export CPP = "${BUILD_CPP}"
+export LD = "${BUILD_LD}"
+export CCLD = "${BUILD_CCLD}"
+export AR = "${BUILD_AR}"
+export AS = "${BUILD_AS}"
+export RANLIB = "${BUILD_RANLIB}"
+export STRIP = "${BUILD_STRIP}"
+export NM = "${BUILD_NM}"
# Path prefixes
base_prefix = "${STAGING_DIR_NATIVE}"
@@ -109,6 +110,7 @@ PKG_CONFIG_SYSROOT_DIR = ""
# we dont want libc-uclibc or libc-glibc to kick in for native recipes
LIBCOVERRIDE = ""
CLASSOVERRIDE = "class-native"
+MACHINEOVERRIDES = ""
PATH_prepend = "${COREBASE}/scripts/native-intercept:"
@@ -162,6 +164,7 @@ native_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise"
deltask package
deltask packagedata
+deltask package_qa
deltask package_write_ipk
deltask package_write_deb
deltask package_write_rpm
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass
index 37c9f79598..5e78116ab8 100644
--- a/meta/classes/nativesdk.bbclass
+++ b/meta/classes/nativesdk.bbclass
@@ -8,6 +8,7 @@ STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${SDK_ARCH}${S
NATIVESDKLIBC ?= "libc-glibc"
LIBCOVERRIDE = ":${NATIVESDKLIBC}"
CLASSOVERRIDE = "class-nativesdk"
+MACHINEOVERRIDES = ""
#
# Update PACKAGE_ARCH and PACKAGE_ARCHS
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 6a552d904e..fc501fcdea 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -208,16 +208,18 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst
else:
the_files.append(aux_files_pattern_verbatim % m.group(1))
d.setVar('FILES_' + pkg, " ".join(the_files))
- if extra_depends != '':
- d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
- d.setVar('DESCRIPTION_' + pkg, description % on)
- d.setVar('SUMMARY_' + pkg, summary % on)
- if postinst:
- d.setVar('pkg_postinst_' + pkg, postinst)
- if postrm:
- d.setVar('pkg_postrm_' + pkg, postrm)
else:
d.setVar('FILES_' + pkg, oldfiles + " " + newfile)
+ if extra_depends != '':
+ d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends)
+ if not d.getVar('DESCRIPTION_' + pkg, True):
+ d.setVar('DESCRIPTION_' + pkg, description % on)
+ if not d.getVar('SUMMARY_' + pkg, True):
+ d.setVar('SUMMARY_' + pkg, summary % on)
+ if postinst:
+ d.setVar('pkg_postinst_' + pkg, postinst)
+ if postrm:
+ d.setVar('pkg_postrm_' + pkg, postrm)
if callable(hook):
hook(f, pkg, file_regex, output_pattern, m.group(1))
@@ -374,6 +376,9 @@ def get_package_additional_metadata (pkg_type, d):
def runtime_mapping_rename (varname, pkg, d):
#bb.note("%s before: %s" % (varname, d.getVar(varname, True)))
+ if bb.data.inherits_class('packagegroup', d):
+ return
+
new_depends = {}
deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "")
for depend in deps:
@@ -389,28 +394,52 @@ def runtime_mapping_rename (varname, pkg, d):
#
python package_get_auto_pr() {
- # per recipe PRSERV_HOST
+ import oe.prservice
+ import re
+
+ # Support per recipe PRSERV_HOST
pn = d.getVar('PN', True)
host = d.getVar("PRSERV_HOST_" + pn, True)
if not (host is None):
d.setVar("PRSERV_HOST", host)
- if d.getVar('PRSERV_HOST', True):
- try:
- auto_pr=prserv_get_pr_auto(d)
- except Exception as e:
- bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
- if auto_pr is None:
- if d.getVar('PRSERV_LOCKDOWN', True):
- bb.fatal("Can NOT get PRAUTO from lockdown exported file")
- else:
- bb.fatal("Can NOT get PRAUTO from remote PR service")
- return
- d.setVar('PRAUTO',str(auto_pr))
- else:
+ # PR Server not active, handle AUTOINC
+ if not d.getVar('PRSERV_HOST', True):
pkgv = d.getVar("PKGV", True)
if 'AUTOINC' in pkgv:
d.setVar("PKGV", pkgv.replace("AUTOINC", "0"))
+ return
+
+ auto_pr = None
+ pv = d.getVar("PV", True)
+ version = d.getVar("PRAUTOINX", True)
+ pkgarch = d.getVar("PACKAGE_ARCH", True)
+ checksum = d.getVar("BB_TASKHASH", True)
+
+ if d.getVar('PRSERV_LOCKDOWN', True):
+ auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from lockdown exported file")
+ d.setVar('PRAUTO',str(auto_pr))
+ return
+
+ try:
+ conn = d.getVar("__PRSERV_CONN", True)
+ if conn is None:
+ conn = oe.prservice.prserv_make_conn(d)
+ if conn is not None:
+ if "AUTOINC" in pv:
+ srcpv = bb.fetch2.get_srcrev(d)
+ base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
+ value = conn.getPR(base_ver, pkgarch, srcpv)
+ d.setVar("PKGV", pv.replace("AUTOINC", str(value)))
+
+ auto_pr = conn.getPR(version, pkgarch, checksum)
+ except Exception as e:
+ bb.fatal("Can NOT get PRAUTO, exception %s" % str(e))
+ if auto_pr is None:
+ bb.fatal("Can NOT get PRAUTO from remote PR service")
+ d.setVar('PRAUTO',str(auto_pr))
}
LOCALEBASEPN ??= "${PN}"
@@ -928,13 +957,7 @@ python split_and_strip_files () {
for f in kernmods:
sfiles.append((f, 16, strip))
-
- import multiprocessing
- nproc = multiprocessing.cpu_count()
- pool = bb.utils.multiprocessingpool(nproc)
- processed = list(pool.imap(oe.package.runstrip, sfiles))
- pool.close()
- pool.join()
+ oe.utils.multiprocess_exec(sfiles, oe.package.runstrip)
#
# End of strip
@@ -1244,10 +1267,6 @@ python emit_pkgdata() {
sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size))
sf.close()
- # Symlinks needed for reverse lookups (from the final package name)
- subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
- oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
-
# Symlinks needed for rprovides lookup
if rprov:
for p in rprov.strip().split():
@@ -1262,6 +1281,10 @@ python emit_pkgdata() {
os.chdir(root)
g = glob('*')
if g or allow_empty == "1":
+ # Symlinks needed for reverse lookups (from the final package name)
+ subdata_sym = pkgdatadir + "/runtime-reverse/%s" % pkgval
+ oe.path.symlink("../runtime/%s" % pkg, subdata_sym, True)
+
packagedfile = pkgdatadir + '/runtime/%s.packaged' % pkg
open(packagedfile, 'w').close()
@@ -1311,12 +1334,7 @@ python package_do_filedeps() {
for files in chunks(pkgfiles[pkg], 100):
pkglist.append((pkg, files, rpmdeps, pkgdest))
- import multiprocessing
- nproc = multiprocessing.cpu_count()
- pool = bb.utils.multiprocessingpool(nproc)
- processed = list(pool.imap(oe.package.filedeprunner, pkglist))
- pool.close()
- pool.join()
+ processed = oe.utils.multiprocess_exec( pkglist, oe.package.filedeprunner)
provides_files = {}
requires_files = {}
@@ -1373,32 +1391,11 @@ python package_do_shlibs() {
pkgdest = d.getVar('PKGDEST', True)
- shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
shlibswork_dir = d.getVar('SHLIBSWORKDIR', True)
# Take shared lock since we're only reading, not writing
lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"))
- def read_shlib_providers():
- list_re = re.compile('^(.*)\.list$')
- # Go from least to most specific since the last one found wins
- for dir in reversed(shlibs_dirs):
- bb.debug(2, "Reading shlib providers in %s" % (dir))
- if not os.path.exists(dir):
- continue
- for file in os.listdir(dir):
- m = list_re.match(file)
- if m:
- dep_pkg = m.group(1)
- fd = open(os.path.join(dir, file))
- lines = fd.readlines()
- fd.close()
- for l in lines:
- s = l.strip().split(":")
- if s[0] not in shlib_provider:
- shlib_provider[s[0]] = {}
- shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
-
def linux_so(file, needed, sonames, renames, pkgver):
needs_ldconfig = False
ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '')
@@ -1495,8 +1492,7 @@ python package_do_shlibs() {
use_ldconfig = False
needed = {}
- shlib_provider = {}
- read_shlib_providers()
+ shlib_provider = oe.package.read_shlib_providers(d)
for pkg in packages.split():
private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or ""
@@ -1562,6 +1558,8 @@ python package_do_shlibs() {
if len(dep_pkg) == 2:
lib_ver = dep_pkg[1]
dep_pkg = dep_pkg[0]
+ if l not in shlib_provider:
+ shlib_provider[l] = {}
shlib_provider[l][libdir] = (dep_pkg, lib_ver)
libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)]
@@ -1580,8 +1578,11 @@ python package_do_shlibs() {
bb.debug(2, '%s: Dependency %s covered by PRIVATE_LIBS' % (pkg, n[0]))
continue
if n[0] in shlib_provider.keys():
+ shlib_provider_path = list()
+ for k in shlib_provider[n[0]].keys():
+ shlib_provider_path.append(k)
match = None
- for p in n[2] + libsearchpath:
+ for p in n[2] + shlib_provider_path + libsearchpath:
if p in shlib_provider[n[0]]:
match = p
break
@@ -1938,9 +1939,9 @@ python do_package () {
# Optimisations
###########################################################################
- # Contunually rexpanding complex expressions is inefficient, particularly when
- # we write to the datastore and invalidate the expansion cache. This code
- # pre-expands some frequently used variables
+ # Continually expanding complex expressions is inefficient, particularly
+ # when we write to the datastore and invalidate the expansion cache. This
+ # code pre-expands some frequently used variables
def expandVar(x, d):
d.setVar(x, d.getVar(x, True))
@@ -2029,7 +2030,4 @@ def mapping_rename_hook(d):
runtime_mapping_rename("RDEPENDS", pkg, d)
runtime_mapping_rename("RRECOMMENDS", pkg, d)
runtime_mapping_rename("RSUGGESTS", pkg, d)
- runtime_mapping_rename("RPROVIDES", pkg, d)
- runtime_mapping_rename("RREPLACES", pkg, d)
- runtime_mapping_rename("RCONFLICTS", pkg, d)
diff --git a/meta/classes/package_deb.bbclass b/meta/classes/package_deb.bbclass
index 7bc29df165..5b5f7e2c9a 100644
--- a/meta/classes/package_deb.bbclass
+++ b/meta/classes/package_deb.bbclass
@@ -239,13 +239,26 @@ python do_package_deb () {
scriptvar = localdata.getVar('pkg_%s' % script, True)
if not scriptvar:
continue
+ scriptvar = scriptvar.strip()
try:
scriptfile = open(os.path.join(controldir, script), 'w')
except OSError:
bb.utils.unlockfile(lf)
raise bb.build.FuncFailed("unable to open %s script file for writing." % script)
- scriptfile.write("#!/bin/sh\n")
- scriptfile.write(scriptvar)
+
+ if scriptvar.startswith("#!"):
+ pos = scriptvar.find("\n") + 1
+ scriptfile.write(scriptvar[:pos])
+ else:
+ pos = 0
+ scriptfile.write("#!/bin/sh\n")
+
+ # Prevent the prerm/postrm scripts from being run during an upgrade
+ if script in ('prerm', 'postrm'):
+ scriptfile.write('[ "$1" != "upgrade" ] || exit 0\n')
+
+ scriptfile.write(scriptvar[pos:])
+ scriptfile.write('\n')
scriptfile.close()
os.chmod(os.path.join(controldir, script), 0755)
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index 9586e06b2e..44fd3eb29c 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -207,7 +207,7 @@ python do_package_ipk () {
ctrlfile.write("Replaces: %s\n" % bb.utils.join_deps(rreplaces))
if rconflicts:
ctrlfile.write("Conflicts: %s\n" % bb.utils.join_deps(rconflicts))
- src_uri = localdata.getVar("SRC_URI", True) or "None"
+ src_uri = localdata.getVar("SRC_URI", True).strip() or "None"
if src_uri:
src_uri = re.sub("\s+", " ", src_uri)
ctrlfile.write("Source: %s\n" % " ".join(src_uri.split()))
diff --git a/meta/classes/package_rpm.bbclass b/meta/classes/package_rpm.bbclass
index 0a32b3e5dc..92ddf7a30f 100644
--- a/meta/classes/package_rpm.bbclass
+++ b/meta/classes/package_rpm.bbclass
@@ -185,7 +185,7 @@ python write_specfile () {
if not len(depends_dict[dep]):
array.append("%s: %s" % (tag, dep))
- def walk_files(walkpath, target, conffiles):
+ def walk_files(walkpath, target, conffiles, dirfiles):
# We can race against the ipk/deb backends which create CONTROL or DEBIAN directories
# when packaging. We just ignore these files which are created in
# packages-split/ and not package/
@@ -196,11 +196,24 @@ python write_specfile () {
path = rootpath.replace(walkpath, "")
if path.endswith("DEBIAN") or path.endswith("CONTROL"):
continue
- for dir in dirs:
- if dir == "CONTROL" or dir == "DEBIAN":
- continue
- # All packages own the directories their files are in...
- target.append('%dir "' + path + '/' + dir + '"')
+
+ # Directory handling can happen in two ways, either DIRFILES is not set at all
+ # in which case we fall back to the older behaviour of packages owning all their
+ # directories
+ if dirfiles is None:
+ for dir in dirs:
+ if dir == "CONTROL" or dir == "DEBIAN":
+ continue
+ # All packages own the directories their files are in...
+ target.append('%dir "' + path + '/' + dir + '"')
+ else:
+ # packages own only empty directories or explict directory.
+ # This will prevent the overlapping of security permission.
+ if path and not files and not dirs:
+ target.append('%dir "' + path + '"')
+ elif path and path in dirfiles:
+ target.append('%dir "' + path + '"')
+
for file in files:
if file == "CONTROL" or file == "DEBIAN":
continue
@@ -293,6 +306,7 @@ python write_specfile () {
spec_files_bottom = []
perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0"
+ extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA", True) or "0") == "1"
for pkg in packages.split():
localdata = bb.data.createCopy(d)
@@ -311,6 +325,9 @@ python write_specfile () {
bb.data.update_data(localdata)
conffiles = (localdata.getVar('CONFFILES', True) or "").split()
+ dirfiles = localdata.getVar('DIRFILES', True)
+ if dirfiles is not None:
+ dirfiles = dirfiles.split()
splitname = strip_multilib(pkgname, d)
@@ -367,12 +384,14 @@ python write_specfile () {
srcrpostrm = splitrpostrm
file_list = []
- walk_files(root, file_list, conffiles)
+ walk_files(root, file_list, conffiles, dirfiles)
if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname)
else:
bb.note("Creating RPM package for %s" % splitname)
spec_files_top.append('%files')
+ if extra_pkgdata:
+ package_rpm_extra_pkgdata(splitname, spec_files_top, localdata)
spec_files_top.append('%defattr(-,-,-,-)')
if file_list:
bb.note("Creating RPM package for %s" % splitname)
@@ -474,11 +493,13 @@ python write_specfile () {
# Now process files
file_list = []
- walk_files(root, file_list, conffiles)
+ walk_files(root, file_list, conffiles, dirfiles)
if not file_list and localdata.getVar('ALLOW_EMPTY') != "1":
bb.note("Not creating empty RPM package for %s" % splitname)
else:
spec_files_bottom.append('%%files -n %s' % splitname)
+ if extra_pkgdata:
+ package_rpm_extra_pkgdata(splitname, spec_files_bottom, localdata)
spec_files_bottom.append('%defattr(-,-,-,-)')
if file_list:
bb.note("Creating RPM package for %s" % splitname)
@@ -669,6 +690,7 @@ python do_package_rpm () {
cmd = rpmbuild
cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd
cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'"
+ cmd = cmd + " --define '_builddir " + d.getVar('S', True) + "'"
cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'"
cmd = cmd + " --define '_use_internal_dependency_generator 0'"
if perfiledeps:
diff --git a/meta/classes/packagegroup.bbclass b/meta/classes/packagegroup.bbclass
index 6606bc6f71..56cfead82a 100644
--- a/meta/classes/packagegroup.bbclass
+++ b/meta/classes/packagegroup.bbclass
@@ -9,7 +9,12 @@ PACKAGES = "${PN}"
# By default, packagegroup packages do not depend on a certain architecture.
# Only if dependencies are modified by MACHINE_FEATURES, packages
# need to be set to MACHINE_ARCH after inheriting packagegroup.bbclass
-inherit allarch
+PACKAGE_ARCH ?= "all"
+
+# Fully expanded - so it applies the overrides as well
+PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}"
+
+inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED', True) == 'all', 'allarch', '')}
# This automatically adds -dbg and -dev flavours of all PACKAGES
# to the list. Their dependencies (RRECOMMENDS) are handled as usual
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index 9e3bd61925..de72e32ed8 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -51,6 +51,19 @@ PID = "${@os.getpid()}"
EXCLUDE_FROM_WORLD = "1"
SDK_PACKAGING_FUNC ?= "create_shar"
+SDK_POST_INSTALL_COMMAND ?= ""
+
+SDK_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.manifest"
+python write_target_sdk_manifest () {
+ from oe.sdk import sdk_list_installed_packages
+ sdkmanifestdir = os.path.dirname(d.getVar("SDK_MANIFEST", True))
+ if not os.path.exists(sdkmanifestdir):
+ bb.utils.mkdirhier(sdkmanifestdir)
+ with open(d.getVar('SDK_MANIFEST', True), 'w') as output:
+ output.write(sdk_list_installed_packages(d, True, 'ver'))
+}
+
+POPULATE_SDK_POST_TARGET_COMMAND_append = " write_target_sdk_manifest ; "
fakeroot python do_populate_sdk() {
from oe.sdk import populate_sdk
@@ -60,6 +73,13 @@ fakeroot python do_populate_sdk() {
runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d)
runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d)
+ ld = bb.data.createCopy(d)
+ ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata")
+ runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld)
+ runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld)
+ d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK", True))
+ d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", True))
+
# create target/host SDK manifests
create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True),
manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST)
@@ -68,25 +88,8 @@ fakeroot python do_populate_sdk() {
populate_sdk(d)
- # Handle multilibs in the SDK environment, siteconfig, etc files...
- localdata = bb.data.createCopy(d)
-
- # make sure we only use the WORKDIR value from 'd', or it can change
- localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
-
- # make sure we only use the SDKTARGETSYSROOT value from 'd'
- localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
-
# Process DEFAULTTUNE
- bb.build.exec_func("create_sdk_files", localdata)
-
- variants = d.getVar("MULTILIB_VARIANTS", True) or ""
- for item in variants.split():
- # Load overrides from 'd' to avoid having to reset the value...
- overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
- localdata.setVar("OVERRIDES", overrides)
- bb.data.update_data(localdata)
- bb.build.exec_func("create_sdk_files", localdata)
+ bb.build.exec_func("create_sdk_files", d)
bb.build.exec_func("tar_sdk", d)
@@ -94,14 +97,6 @@ fakeroot python do_populate_sdk() {
}
fakeroot create_sdk_files() {
- # Setup site file for external use
- toolchain_create_sdk_siteconfig ${SDK_OUTPUT}/${SDKPATH}/site-config-${REAL_MULTIMACH_TARGET_SYS}
-
- toolchain_create_sdk_env_script ${SDK_OUTPUT}/${SDKPATH}/environment-setup-${REAL_MULTIMACH_TARGET_SYS}
-
- # Add version information
- toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${REAL_MULTIMACH_TARGET_SYS}
-
cp ${COREBASE}/scripts/relocate_sdk.py ${SDK_OUTPUT}/${SDKPATH}/
# Replace the ##DEFAULT_INSTALL_DIR## with the correct pattern.
@@ -123,10 +118,16 @@ fakeroot create_shar() {
# copy in the template shar extractor script
cp ${COREBASE}/meta/files/toolchain-shar-template.sh ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
+ cat << "EOF" > ${T}/post_install_command
+${SDK_POST_INSTALL_COMMAND}
+EOF
+ sed -i -e '/@SDK_POST_INSTALL_COMMAND@/r ${T}/post_install_command' ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
+
# substitute variables
sed -i -e 's#@SDK_ARCH@#${SDK_ARCH}#g' \
-e 's#@SDKPATH@#${SDKPATH}#g' \
-e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
+ -e '/@SDK_POST_INSTALL_COMMAND@/d' \
${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
# add execution permission
diff --git a/meta/classes/prserv.bbclass b/meta/classes/prserv.bbclass
index b440d863ef..139597f9cb 100644
--- a/meta/classes/prserv.bbclass
+++ b/meta/classes/prserv.bbclass
@@ -1,33 +1,2 @@
-def prserv_get_pr_auto(d):
- import oe.prservice
- import re
- pv = d.getVar("PV", True)
- if not d.getVar('PRSERV_HOST', True):
- if 'AUTOINC' in pv:
- d.setVar("PKGV", pv.replace("AUTOINC", "0"))
- bb.warn("Not using network based PR service")
- return None
- version = d.getVar("PRAUTOINX", True)
- pkgarch = d.getVar("PACKAGE_ARCH", True)
- checksum = d.getVar("BB_TASKHASH", True)
-
- conn = d.getVar("__PRSERV_CONN", True)
- if conn is None:
- conn = oe.prservice.prserv_make_conn(d)
- if conn is None:
- return None
-
- if "AUTOINC" in pv:
- srcpv = bb.fetch2.get_srcrev(d)
- base_ver = "AUTOINC-%s" % version[:version.find(srcpv)]
- value = conn.getPR(base_ver, pkgarch, srcpv)
- d.setVar("PKGV", pv.replace("AUTOINC", str(value)))
-
- if d.getVar('PRSERV_LOCKDOWN', True):
- auto_rev = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None
- else:
- auto_rev = conn.getPR(version, pkgarch, checksum)
-
- return auto_rev
diff --git a/meta/classes/pythonnative.bbclass b/meta/classes/pythonnative.bbclass
index fdd22bbc86..97029dc525 100644
--- a/meta/classes/pythonnative.bbclass
+++ b/meta/classes/pythonnative.bbclass
@@ -2,5 +2,7 @@
inherit python-dir
PYTHON="${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN}"
+# PYTHON_EXECUTABLE is used by cmake
+PYTHON_EXECUTABLE="${PYTHON}"
EXTRANATIVEPATH += "${PYTHON_PN}-native"
DEPENDS += " ${PYTHON_PN}-native "
diff --git a/meta/classes/qemu.bbclass b/meta/classes/qemu.bbclass
index abee8aaa7c..601f587534 100644
--- a/meta/classes/qemu.bbclass
+++ b/meta/classes/qemu.bbclass
@@ -29,19 +29,23 @@ def qemu_run_binary(data, rootfs_path, binary):
libdir = rootfs_path + data.getVar("libdir", False)
base_libdir = rootfs_path + data.getVar("base_libdir", False)
+ qemu_options = data.getVar("QEMU_OPTIONS", True)
- return "PSEUDO_UNLOAD=1 " + qemu_binary + " -L " + rootfs_path\
+ return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\
+ " -E LD_LIBRARY_PATH=" + libdir + ":" + base_libdir + " "\
+ rootfs_path + binary
-QEMU_OPTIONS = ""
-QEMU_OPTIONS_iwmmxt = "-cpu pxa270-c5"
-QEMU_OPTIONS_armv6 = "-cpu arm1136"
-QEMU_OPTIONS_armv7a = "-cpu cortex-a8"
-QEMU_OPTIONS_e500v2 = "-cpu e500v2"
-QEMU_OPTIONS_e500mc = "-cpu e500mc"
-QEMU_OPTIONS_e5500 = "-cpu e5500"
-QEMU_OPTIONS_e5500-64b = "-cpu e5500"
-QEMU_OPTIONS_e6500 = "-cpu e6500"
-QEMU_OPTIONS_e6500-64b = "-cpu e6500"
-QEMU_OPTIONS_ppc7400 = "-cpu 7400"
+# QEMU_EXTRAOPTIONS is not meant to be directly used, the extensions are
+# PACKAGE_ARCH, not overrides and hence have to do this dance. Simply being arch
+# specific isn't good enough.
+QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or ""}"
+QEMU_EXTRAOPTIONS_iwmmxt = " -cpu pxa270-c5"
+QEMU_EXTRAOPTIONS_armv6 = " -cpu arm1136"
+QEMU_EXTRAOPTIONS_armv7a = " -cpu cortex-a8"
+QEMU_EXTRAOPTIONS_e500v2 = " -cpu e500v2"
+QEMU_EXTRAOPTIONS_e500mc = " -cpu e500mc"
+QEMU_EXTRAOPTIONS_e5500 = " -cpu e5500"
+QEMU_EXTRAOPTIONS_e5500-64b = " -cpu e5500"
+QEMU_EXTRAOPTIONS_e6500 = " -cpu e6500"
+QEMU_EXTRAOPTIONS_e6500-64b = " -cpu e6500"
+QEMU_EXTRAOPTIONS_ppc7400 = " -cpu 7400"
diff --git a/meta/classes/qt4e.bbclass b/meta/classes/qt4e.bbclass
index 850bb6a717..13b1050aac 100644
--- a/meta/classes/qt4e.bbclass
+++ b/meta/classes/qt4e.bbclass
@@ -19,6 +19,3 @@ EXTRA_QMAKEVARS_PRE += " QT_LIBINFIX=${QT_LIBINFIX} "
# Qt4 uses atomic instructions not supported in thumb mode
ARM_INSTRUCTION_SET = "arm"
-
-# Qt4 could NOT be built on MIPS64 with 64 bits userspace
-COMPATIBLE_HOST_mips64 = "mips64.*-linux-gnun32"
diff --git a/meta/classes/qt4x11.bbclass b/meta/classes/qt4x11.bbclass
index 65d196afc6..b06e15d29e 100644
--- a/meta/classes/qt4x11.bbclass
+++ b/meta/classes/qt4x11.bbclass
@@ -9,6 +9,3 @@ QT_LIBINFIX = ""
# Qt4 uses atomic instructions not supported in thumb mode
ARM_INSTRUCTION_SET = "arm"
-
-# Qt4 could NOT be built on MIPS64 with 64 bits userspace
-COMPATIBLE_HOST_mips64 = "mips64.*-linux-gnun32"
diff --git a/meta/classes/report-error.bbclass b/meta/classes/report-error.bbclass
index 5fe2355842..8b30422edf 100644
--- a/meta/classes/report-error.bbclass
+++ b/meta/classes/report-error.bbclass
@@ -44,11 +44,14 @@ python errorreport_handler () {
task = e.task
taskdata={}
log = e.data.getVar('BB_LOGFILE', True)
- logFile = open(log, 'r')
taskdata['package'] = e.data.expand("${PF}")
taskdata['task'] = task
- taskdata['log'] = logFile.read()
- logFile.close()
+ if log:
+ logFile = open(log, 'r')
+ taskdata['log'] = logFile.read()
+ logFile.close()
+ else:
+ taskdata['log'] = "No Log"
jsondata = json.loads(errorreport_getdata(e))
jsondata['failures'].append(taskdata)
errorreport_savedata(e, jsondata, "error-report.txt")
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index f0f6d18249..7b1ec17205 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -97,3 +97,16 @@ do_rm_work_all () {
}
do_rm_work_all[recrdeptask] = "do_rm_work"
addtask rm_work_all after do_rm_work
+
+do_populate_sdk[postfuncs] += "rm_work_populatesdk"
+rm_work_populatesdk () {
+ :
+}
+rm_work_populatesdk[cleandirs] = "${WORKDIR}/sdk"
+
+do_rootfs[postfuncs] += "rm_work_rootfs"
+rm_work_rootfs () {
+ :
+}
+rm_work_rootfs[cleandirs] = "${WORKDIR}/rootfs"
+
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index b5fab6a668..2f75558104 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -150,10 +150,9 @@ def check_toolchain_tune(data, tune, multilib):
bb.debug(2, " %s: %s" % (feature, valid_tunes[feature]))
else:
tune_errors.append("Feature '%s' is not defined." % feature)
- whitelist = localdata.getVar("TUNEABI_WHITELIST", True) or ''
- override = localdata.getVar("TUNEABI_OVERRIDE", True) or ''
+ whitelist = localdata.getVar("TUNEABI_WHITELIST", True)
if whitelist:
- tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True) or ''
+ tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True)
if not tuneabi:
tuneabi = tune
if True not in [x in whitelist.split() for x in tuneabi.split()]:
@@ -265,7 +264,7 @@ def check_connectivity(d):
# CONNECTIVITY_CHECK_URIS are set
network_enabled = not d.getVar('BB_NO_NETWORK', True)
check_enabled = len(test_uris)
- # Take a copy of the data store and unset MIRRORS and PREMIRROS
+ # Take a copy of the data store and unset MIRRORS and PREMIRRORS
data = bb.data.createCopy(d)
data.delVar('PREMIRRORS')
data.delVar('MIRRORS')
@@ -520,6 +519,16 @@ def sanity_handle_abichanges(status, d):
status.addresult("Your configuration is using stamp files including the sstate hash but your build directory was built with stamp files that do not include this.\nTo continue, either rebuild or switch back to the OEBasic signature handler with BB_SIGNATURE_HANDLER = 'OEBasic'.\n")
elif (abi != current_abi and current_abi == "9"):
status.addresult("The layout of the TMPDIR STAMPS directory has changed. Please clean out TMPDIR and rebuild (sstate will be still be valid and reused)\n")
+ elif (abi != current_abi and current_abi == "10" and (abi == "8" or abi == "9")):
+ bb.note("Converting staging layout from version 8/9 to layout version 10")
+ cmd = d.expand("grep -r -l sysroot-providers/virtual_kernel ${SSTATE_MANIFESTS}")
+ ret, result = oe.utils.getstatusoutput(cmd)
+ result = result.split()
+ for f in result:
+ bb.note("Uninstalling manifest file %s" % f)
+ sstate_clean_manifest(f, d)
+ with open(abifile, "w") as f:
+ f.write(current_abi)
elif (abi != current_abi):
# Code to convert from one ABI to another could go here if possible.
status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi))
@@ -754,6 +763,44 @@ def check_sanity_everybuild(status, d):
if oeroot.find(' ') != -1:
status.addresult("Error, you have a space in your COREBASE directory path. Please move the installation to a directory which doesn't include a space since autotools doesn't support this.")
+ # Check the format of MIRRORS, PREMIRRORS and SSTATE_MIRRORS
+ import re
+ mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS']
+ protocols = ['http', 'ftp', 'file', 'https', \
+ 'git', 'gitsm', 'hg', 'osc', 'p4', 'svk', 'svn', \
+ 'bzr', 'cvs']
+ for mirror_var in mirror_vars:
+ mirrors = (d.getVar(mirror_var, True) or '').replace('\\n', '\n').split('\n')
+ for mirror_entry in mirrors:
+ mirror_entry = mirror_entry.strip()
+ if not mirror_entry:
+ # ignore blank lines
+ continue
+
+ try:
+ pattern, mirror = mirror_entry.split()
+ except ValueError:
+ bb.warn('Invalid %s: %s, should be 2 members.' % (mirror_var, mirror_entry.strip()))
+ continue
+
+ decoded = bb.fetch2.decodeurl(pattern)
+ try:
+ pattern_scheme = re.compile(decoded[0])
+ except re.error as exc:
+ bb.warn('Invalid scheme regex (%s) in %s; %s' % (pattern, mirror_var, mirror_entry))
+ continue
+
+ if not any(pattern_scheme.match(protocol) for protocol in protocols):
+ bb.warn('Invalid protocol (%s) in %s: %s' % (decoded[0], mirror_var, mirror_entry))
+ continue
+
+ if not any(mirror.startswith(protocol + '://') for protocol in protocols):
+ bb.warn('Invalid protocol in %s: %s' % (mirror_var, mirror_entry))
+ continue
+
+ if mirror.startswith('file://') and not mirror.startswith('file:///'):
+ bb.warn('Invalid file url in %s: %s, must be absolute path (file:///)' % (mirror_var, mirror_entry))
+
# Check that TMPDIR hasn't changed location since the last time we were run
tmpdir = d.getVar('TMPDIR', True)
checkfile = os.path.join(tmpdir, "saved_tmpdir")
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass
index 9a4d03b887..45dce489de 100644
--- a/meta/classes/siteconfig.bbclass
+++ b/meta/classes/siteconfig.bbclass
@@ -18,13 +18,13 @@ siteconfig_do_siteconfig_gencache () {
>${WORKDIR}/site_config_${MACHINE}/configure.ac
cd ${WORKDIR}/site_config_${MACHINE}
autoconf
- rm -f ${PN}_cache
- CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${PN}_cache
+ rm -f ${BPN}_cache
+ CONFIG_SITE="" ${EXTRASITECONFIG} ./configure ${CONFIGUREOPTS} --cache-file ${BPN}_cache
sed -n -e "/ac_cv_c_bigendian/p" -e "/ac_cv_sizeof_/p" \
-e "/ac_cv_type_/p" -e "/ac_cv_header_/p" -e "/ac_cv_func_/p" \
- < ${PN}_cache > ${PN}_config
+ < ${BPN}_cache > ${BPN}_config
mkdir -p ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
- cp ${PN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
+ cp ${BPN}_config ${SYSROOT_DESTDIR}${datadir}/${TARGET_SYS}_config_site.d
}
diff --git a/meta/classes/siteinfo.bbclass b/meta/classes/siteinfo.bbclass
index e90632aeef..b41db46bc0 100644
--- a/meta/classes/siteinfo.bbclass
+++ b/meta/classes/siteinfo.bbclass
@@ -18,10 +18,10 @@
def siteinfo_data(d):
archinfo = {
"allarch": "endian-little bit-32", # bogus, but better than special-casing the checks below for allarch
- "aarch64": "endian-little bit-64 arm-common",
- "aarch64_be": "endian-big bit-64 arm-common",
- "arm": "endian-little bit-32 arm-common",
- "armeb": "endian-big bit-32 arm-common",
+ "aarch64": "endian-little bit-64 arm-common arm-64",
+ "aarch64_be": "endian-big bit-64 arm-common arm-64",
+ "arm": "endian-little bit-32 arm-common arm-32",
+ "armeb": "endian-big bit-32 arm-common arm-32",
"avr32": "endian-big bit-32 avr32-common",
"bfin": "endian-little bit-32 bfin-common",
"i386": "endian-little bit-32 ix86-common",
diff --git a/meta/classes/spdx.bbclass b/meta/classes/spdx.bbclass
index 55ce3aff4f..454c53e96f 100644
--- a/meta/classes/spdx.bbclass
+++ b/meta/classes/spdx.bbclass
@@ -15,252 +15,291 @@
# SPDX file will be output to the path which is defined as[SPDX_MANIFEST_DIR]
# in ./meta/conf/licenses.conf.
-SPDXOUTPUTDIR = "${WORKDIR}/spdx_output_dir"
SPDXSSTATEDIR = "${WORKDIR}/spdx_sstate_dir"
+# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
+# the real top-level directory.
+SPDX_S ?= "${S}"
+
python do_spdx () {
import os, sys
- import json
+ import json, shutil
info = {}
- info['workdir'] = (d.getVar('WORKDIR', True) or "")
- info['sourcedir'] = (d.getVar('S', True) or "")
- info['pn'] = (d.getVar( 'PN', True ) or "")
- info['pv'] = (d.getVar( 'PV', True ) or "")
- info['src_uri'] = (d.getVar( 'SRC_URI', True ) or "")
- info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
- info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
-
- spdx_sstate_dir = (d.getVar('SPDXSSTATEDIR', True) or "")
- manifest_dir = (d.getVar('SPDX_MANIFEST_DIR', True) or "")
+ info['workdir'] = d.getVar('WORKDIR', True)
+ info['sourcedir'] = d.getVar('SPDX_S', True)
+ info['pn'] = d.getVar('PN', True)
+ info['pv'] = d.getVar('PV', True)
+ info['spdx_version'] = d.getVar('SPDX_VERSION', True)
+ info['data_license'] = d.getVar('DATA_LICENSE', True)
+
+ sstatedir = d.getVar('SPDXSSTATEDIR', True)
+ sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx")
+
+ manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True)
info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" )
- sstatefile = os.path.join(spdx_sstate_dir,
- info['pn'] + info['pv'] + ".spdx" )
- info['spdx_temp_dir'] = (d.getVar('SPDX_TEMP_DIR', True) or "")
- info['tar_file'] = os.path.join( info['workdir'], info['pn'] + ".tar.gz" )
+ info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True)
+ info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" )
+
+ # Make sure important dirs exist
+ try:
+ bb.utils.mkdirhier(manifest_dir)
+ bb.utils.mkdirhier(sstatedir)
+ bb.utils.mkdirhier(info['spdx_temp_dir'])
+ except OSError as e:
+ bb.error("SPDX: Could not set up required directories: " + str(e))
+ return
## get everything from cache. use it to decide if
## something needs to be rerun
- cur_ver_code = get_ver_code( info['sourcedir'] )
+ cur_ver_code = get_ver_code(info['sourcedir'])
cache_cur = False
- if not os.path.exists( spdx_sstate_dir ):
- bb.utils.mkdirhier( spdx_sstate_dir )
- if not os.path.exists( info['spdx_temp_dir'] ):
- bb.utils.mkdirhier( info['spdx_temp_dir'] )
- if os.path.exists( sstatefile ):
+ if os.path.exists(sstatefile):
## cache for this package exists. read it in
- cached_spdx = get_cached_spdx( sstatefile )
+ cached_spdx = get_cached_spdx(sstatefile)
if cached_spdx['PackageVerificationCode'] == cur_ver_code:
- bb.warn(info['pn'] + "'s ver code same as cache's. do nothing")
+ bb.warn("SPDX: Verification code for " + info['pn']
+ + "is same as cache's. do nothing")
cache_cur = True
else:
- local_file_info = setup_foss_scan( info,
- True, cached_spdx['Files'] )
+ local_file_info = setup_foss_scan(info, True, cached_spdx['Files'])
else:
- local_file_info = setup_foss_scan( info, False, None )
+ local_file_info = setup_foss_scan(info, False, None)
if cache_cur:
spdx_file_info = cached_spdx['Files']
+ foss_package_info = cached_spdx['Package']
+ foss_license_info = cached_spdx['Licenses']
else:
## setup fossology command
- foss_server = (d.getVar('FOSS_SERVER', True) or "")
- foss_flags = (d.getVar('FOSS_WGET_FLAGS', True) or "")
+ foss_server = d.getVar('FOSS_SERVER', True)
+ foss_flags = d.getVar('FOSS_WGET_FLAGS', True)
+ foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False
foss_command = "wget %s --post-file=%s %s"\
- % (foss_flags,info['tar_file'],foss_server)
+ % (foss_flags, info['tar_file'], foss_server)
- #bb.warn(info['pn'] + json.dumps(local_file_info))
- foss_file_info = run_fossology( foss_command )
- spdx_file_info = create_spdx_doc( local_file_info, foss_file_info )
- ## write to cache
- write_cached_spdx(sstatefile,cur_ver_code,spdx_file_info)
+ foss_result = run_fossology(foss_command, foss_full_spdx)
+ if foss_result is not None:
+ (foss_package_info, foss_file_info, foss_license_info) = foss_result
+ spdx_file_info = create_spdx_doc(local_file_info, foss_file_info)
+ ## write to cache
+ write_cached_spdx(sstatefile, cur_ver_code, foss_package_info,
+ spdx_file_info, foss_license_info)
+ else:
+ bb.error("SPDX: Could not communicate with FOSSology server. Command was: " + foss_command)
+ return
## Get document and package level information
- spdx_header_info = get_header_info(info, cur_ver_code, spdx_file_info)
+ spdx_header_info = get_header_info(info, cur_ver_code, foss_package_info)
## CREATE MANIFEST
- create_manifest(info,spdx_header_info,spdx_file_info)
+ create_manifest(info, spdx_header_info, spdx_file_info, foss_license_info)
## clean up the temp stuff
- remove_dir_tree( info['spdx_temp_dir'] )
+ shutil.rmtree(info['spdx_temp_dir'], ignore_errors=True)
if os.path.exists(info['tar_file']):
- remove_file( info['tar_file'] )
+ remove_file(info['tar_file'])
}
addtask spdx after do_patch before do_configure
-def create_manifest(info,header,files):
- with open(info['outfile'], 'w') as f:
+def create_manifest(info, header, files, licenses):
+ import codecs
+ with codecs.open(info['outfile'], mode='w', encoding='utf-8') as f:
+ # Write header
f.write(header + '\n')
+
+ # Write file data
for chksum, block in files.iteritems():
+ f.write("FileName: " + block['FileName'] + '\n')
+ for key, value in block.iteritems():
+ if not key == 'FileName':
+ f.write(key + ": " + value + '\n')
+ f.write('\n')
+
+ # Write license data
+ for id, block in licenses.iteritems():
+ f.write("LicenseID: " + id + '\n')
for key, value in block.iteritems():
- f.write(key + ": " + value)
- f.write('\n')
+ f.write(key + ": " + value + '\n')
f.write('\n')
-def get_cached_spdx( sstatefile ):
+def get_cached_spdx(sstatefile):
import json
+ import codecs
cached_spdx_info = {}
- with open( sstatefile, 'r' ) as f:
+ with codecs.open(sstatefile, mode='r', encoding='utf-8') as f:
try:
cached_spdx_info = json.load(f)
except ValueError as e:
cached_spdx_info = None
return cached_spdx_info
-def write_cached_spdx( sstatefile, ver_code, files ):
+def write_cached_spdx(sstatefile, ver_code, package_info, files, license_info):
import json
+ import codecs
spdx_doc = {}
spdx_doc['PackageVerificationCode'] = ver_code
spdx_doc['Files'] = {}
spdx_doc['Files'] = files
- with open( sstatefile, 'w' ) as f:
+ spdx_doc['Package'] = {}
+ spdx_doc['Package'] = package_info
+ spdx_doc['Licenses'] = {}
+ spdx_doc['Licenses'] = license_info
+ with codecs.open(sstatefile, mode='w', encoding='utf-8') as f:
f.write(json.dumps(spdx_doc))
-def setup_foss_scan( info, cache, cached_files ):
+def setup_foss_scan(info, cache, cached_files):
import errno, shutil
import tarfile
file_info = {}
cache_dict = {}
- for f_dir, f in list_files( info['sourcedir'] ):
- full_path = os.path.join( f_dir, f )
+ for f_dir, f in list_files(info['sourcedir']):
+ full_path = os.path.join(f_dir, f)
abs_path = os.path.join(info['sourcedir'], full_path)
- dest_dir = os.path.join( info['spdx_temp_dir'], f_dir )
- dest_path = os.path.join( info['spdx_temp_dir'], full_path )
- try:
- stats = os.stat(abs_path)
- except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
- continue
-
- checksum = hash_file( abs_path )
- mtime = time.asctime(time.localtime(stats.st_mtime))
-
- ## retain cache information if it exists
- file_info[checksum] = {}
- if cache and checksum in cached_files:
- file_info[checksum] = cached_files[checksum]
- else:
- file_info[checksum]['FileName'] = full_path
-
- try:
- os.makedirs( dest_dir )
- except OSError as e:
- if e.errno == errno.EEXIST and os.path.isdir(dest_dir):
- pass
+ dest_dir = os.path.join(info['spdx_temp_dir'], f_dir)
+ dest_path = os.path.join(info['spdx_temp_dir'], full_path)
+
+ checksum = hash_file(abs_path)
+ if not checksum is None:
+ file_info[checksum] = {}
+ ## retain cache information if it exists
+ if cache and checksum in cached_files:
+ file_info[checksum] = cached_files[checksum]
+ ## have the file included in what's sent to the FOSSology server
else:
- bb.warn( "mkdir failed " + str(e) + "\n" )
- continue
-
- if(cache and checksum not in cached_files) or not cache:
- try:
- shutil.copyfile( abs_path, dest_path )
- except shutil.Error as e:
- bb.warn( str(e) + "\n" )
- except IOError as e:
- bb.warn( str(e) + "\n" )
+ file_info[checksum]['FileName'] = full_path
+ try:
+ bb.utils.mkdirhier(dest_dir)
+ shutil.copyfile(abs_path, dest_path)
+ except OSError as e:
+ bb.warn("SPDX: mkdirhier failed: " + str(e))
+ except shutil.Error as e:
+ bb.warn("SPDX: copyfile failed: " + str(e))
+ except IOError as e:
+ bb.warn("SPDX: copyfile failed: " + str(e))
+ else:
+ bb.warn("SPDX: Could not get checksum for file: " + f)
- with tarfile.open( info['tar_file'], "w:gz" ) as tar:
- tar.add( info['spdx_temp_dir'], arcname=os.path.basename(info['spdx_temp_dir']) )
- tar.close()
+ with tarfile.open(info['tar_file'], "w:gz") as tar:
+ tar.add(info['spdx_temp_dir'], arcname=os.path.basename(info['spdx_temp_dir']))
return file_info
-
-def remove_dir_tree( dir_name ):
- import shutil
+def remove_file(file_name):
try:
- shutil.rmtree( dir_name )
- except:
- pass
-
-def remove_file( file_name ):
- try:
- os.remove( file_name )
+ os.remove(file_name)
except OSError as e:
pass
-def list_files( dir ):
- for root, subFolders, files in os.walk( dir ):
+def list_files(dir):
+ for root, subFolders, files in os.walk(dir):
for f in files:
- rel_root = os.path.relpath( root, dir )
+ rel_root = os.path.relpath(root, dir)
yield rel_root, f
return
-def hash_file( file_name ):
+def hash_file(file_name):
try:
- f = open( file_name, 'rb' )
- data_string = f.read()
+ with open(file_name, 'rb') as f:
+ data_string = f.read()
+ sha1 = hash_string(data_string)
+ return sha1
except:
- return None
- finally:
- f.close()
- sha1 = hash_string( data_string )
- return sha1
+ return None
-def hash_string( data ):
+def hash_string(data):
import hashlib
sha1 = hashlib.sha1()
- sha1.update( data )
+ sha1.update(data)
return sha1.hexdigest()
-def run_fossology( foss_command ):
+def run_fossology(foss_command, full_spdx):
import string, re
import subprocess
p = subprocess.Popen(foss_command.split(),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
foss_output, foss_error = p.communicate()
-
- records = []
- records = re.findall('FileName:.*?</text>', foss_output, re.S)
+ if p.returncode != 0:
+ return None
+
+ foss_output = unicode(foss_output, "utf-8")
+ foss_output = string.replace(foss_output, '\r', '')
+
+ # Package info
+ package_info = {}
+ if full_spdx:
+ # All mandatory, only one occurance
+ package_info['PackageCopyrightText'] = re.findall('PackageCopyrightText: (.*?</text>)', foss_output, re.S)[0]
+ package_info['PackageLicenseDeclared'] = re.findall('PackageLicenseDeclared: (.*)', foss_output)[0]
+ package_info['PackageLicenseConcluded'] = re.findall('PackageLicenseConcluded: (.*)', foss_output)[0]
+ # These may be more than one
+ package_info['PackageLicenseInfoFromFiles'] = re.findall('PackageLicenseInfoFromFiles: (.*)', foss_output)
+ else:
+ DEFAULT = "NOASSERTION"
+ package_info['PackageCopyrightText'] = "<text>" + DEFAULT + "</text>"
+ package_info['PackageLicenseDeclared'] = DEFAULT
+ package_info['PackageLicenseConcluded'] = DEFAULT
+ package_info['PackageLicenseInfoFromFiles'] = []
+ # File info
file_info = {}
+ records = []
+ # FileName is also in PackageFileName, so we match on FileType as well.
+ records = re.findall('FileName:.*?FileType:.*?</text>', foss_output, re.S)
for rec in records:
- rec = string.replace( rec, '\r', '' )
- chksum = re.findall( 'FileChecksum: SHA1: (.*)\n', rec)[0]
+ chksum = re.findall('FileChecksum: SHA1: (.*)\n', rec)[0]
file_info[chksum] = {}
- file_info[chksum]['FileCopyrightText'] = re.findall( 'FileCopyrightText: '
+ file_info[chksum]['FileCopyrightText'] = re.findall('FileCopyrightText: '
+ '(.*?</text>)', rec, re.S )[0]
- fields = ['FileType','LicenseConcluded',
- 'LicenseInfoInFile','FileName']
+ fields = ['FileName', 'FileType', 'LicenseConcluded', 'LicenseInfoInFile']
for field in fields:
file_info[chksum][field] = re.findall(field + ': (.*)', rec)[0]
- return file_info
+ # Licenses
+ license_info = {}
+ licenses = []
+ licenses = re.findall('LicenseID:.*?LicenseName:.*?\n', foss_output, re.S)
+ for lic in licenses:
+ license_id = re.findall('LicenseID: (.*)\n', lic)[0]
+ license_info[license_id] = {}
+ license_info[license_id]['ExtractedText'] = re.findall('ExtractedText: (.*?</text>)', lic, re.S)[0]
+ license_info[license_id]['LicenseName'] = re.findall('LicenseName: (.*)', lic)[0]
+
+ return (package_info, file_info, license_info)
-def create_spdx_doc( file_info, scanned_files ):
+def create_spdx_doc(file_info, scanned_files):
import json
## push foss changes back into cache
for chksum, lic_info in scanned_files.iteritems():
if chksum in file_info:
- file_info[chksum]['FileName'] = file_info[chksum]['FileName']
file_info[chksum]['FileType'] = lic_info['FileType']
file_info[chksum]['FileChecksum: SHA1'] = chksum
file_info[chksum]['LicenseInfoInFile'] = lic_info['LicenseInfoInFile']
file_info[chksum]['LicenseConcluded'] = lic_info['LicenseConcluded']
file_info[chksum]['FileCopyrightText'] = lic_info['FileCopyrightText']
else:
- bb.warn(lic_info['FileName'] + " : " + chksum
+ bb.warn("SPDX: " + lic_info['FileName'] + " : " + chksum
+ " : is not in the local file info: "
- + json.dumps(lic_info,indent=1))
+ + json.dumps(lic_info, indent=1))
return file_info
-def get_ver_code( dirname ):
+def get_ver_code(dirname):
chksums = []
- for f_dir, f in list_files( dirname ):
- try:
- stats = os.stat(os.path.join(dirname,f_dir,f))
- except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
- continue
- chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
- ver_code_string = ''.join( chksums ).lower()
- ver_code = hash_string( ver_code_string )
+ for f_dir, f in list_files(dirname):
+ hash = hash_file(os.path.join(dirname, f_dir, f))
+ if not hash is None:
+ chksums.append(hash)
+ else:
+ bb.warn("SPDX: Could not hash file: " + path)
+ ver_code_string = ''.join(chksums).lower()
+ ver_code = hash_string(ver_code_string)
return ver_code
-def get_header_info( info, spdx_verification_code, spdx_files ):
+def get_header_info(info, spdx_verification_code, package_info):
"""
Put together the header SPDX information.
Eventually this needs to become a lot less
@@ -271,14 +310,12 @@ def get_header_info( info, spdx_verification_code, spdx_files ):
head = []
DEFAULT = "NOASSERTION"
- #spdx_verification_code = get_ver_code( info['sourcedir'] )
- package_checksum = ''
- if os.path.exists(info['tar_file']):
- package_checksum = hash_file( info['tar_file'] )
- else:
+ package_checksum = hash_file(info['tar_file'])
+ if package_checksum is None:
package_checksum = DEFAULT
## document level information
+ head.append("## SPDX Document Information")
head.append("SPDXVersion: " + info['spdx_version'])
head.append("DataLicense: " + info['data_license'])
head.append("DocumentComment: <text>SPDX for "
@@ -286,9 +323,11 @@ def get_header_info( info, spdx_verification_code, spdx_files ):
head.append("")
## Creator information
- now = datetime.now().strftime('%Y-%m-%dT%H:%M:%S')
+ ## Note that this does not give time in UTC.
+ now = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
head.append("## Creation Information")
- head.append("Creator: fossology-spdx")
+ ## Tools are supposed to have a version, but FOSSology+SPDX provides none.
+ head.append("Creator: Tool: FOSSology+SPDX")
head.append("Created: " + now)
head.append("CreatorComment: <text>UNO</text>")
head.append("")
@@ -297,21 +336,26 @@ def get_header_info( info, spdx_verification_code, spdx_files ):
head.append("## Package Information")
head.append("PackageName: " + info['pn'])
head.append("PackageVersion: " + info['pv'])
- head.append("PackageDownloadLocation: " + DEFAULT)
- head.append("PackageSummary: <text></text>")
head.append("PackageFileName: " + os.path.basename(info['tar_file']))
head.append("PackageSupplier: Person:" + DEFAULT)
+ head.append("PackageDownloadLocation: " + DEFAULT)
+ head.append("PackageSummary: <text></text>")
head.append("PackageOriginator: Person:" + DEFAULT)
head.append("PackageChecksum: SHA1: " + package_checksum)
head.append("PackageVerificationCode: " + spdx_verification_code)
head.append("PackageDescription: <text>" + info['pn']
+ " version " + info['pv'] + "</text>")
head.append("")
- head.append("PackageCopyrightText: <text>" + DEFAULT + "</text>")
+ head.append("PackageCopyrightText: "
+ + package_info['PackageCopyrightText'])
head.append("")
- head.append("PackageLicenseDeclared: " + DEFAULT)
- head.append("PackageLicenseConcluded: " + DEFAULT)
- head.append("PackageLicenseInfoFromFiles: " + DEFAULT)
+ head.append("PackageLicenseDeclared: "
+ + package_info['PackageLicenseDeclared'])
+ head.append("PackageLicenseConcluded: "
+ + package_info['PackageLicenseConcluded'])
+
+ for licref in package_info['PackageLicenseInfoFromFiles']:
+ head.append("PackageLicenseInfoFromFiles: " + licref)
head.append("")
## header for file level
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 4eec6bd0f7..ace6bdb57a 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -21,7 +21,8 @@ SSTATE_PATHSPEC = "${SSTATE_DIR}/${SSTATE_EXTRAPATHWILDCARD}*/${SSTATE_PKGSPEC
# of the system, we let the sstate paths take care of this.
SSTATE_EXTRAPATH[vardepvalue] = ""
-SSTATE_DUPWHITELIST = "${DEPLOY_DIR_IMAGE}/ ${DEPLOY_DIR}/licenses/"
+# For multilib rpm the allarch packagegroup files can overwrite (in theory they're identical)
+SSTATE_DUPWHITELIST = "${DEPLOY_DIR_IMAGE}/ ${DEPLOY_DIR}/licenses/ ${DEPLOY_DIR_RPM}/all/"
# Avoid docbook/sgml catalog warnings for now
SSTATE_DUPWHITELIST += "${STAGING_ETCDIR_NATIVE}/sgml ${STAGING_DATADIR_NATIVE}/sgml"
@@ -32,15 +33,25 @@ BB_HASHFILENAME = "${SSTATE_EXTRAPATH} ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}"
SSTATE_MANMACH ?= "${SSTATE_PKGARCH}"
-SSTATEPREINSTFUNCS ?= ""
-SSTATEPOSTINSTFUNCS ?= ""
+SSTATECREATEFUNCS = "sstate_hardcode_path"
+SSTATEPOSTCREATEFUNCS = ""
+SSTATEPREINSTFUNCS = ""
+SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack"
+SSTATEPOSTINSTFUNCS = ""
EXTRA_STAGING_FIXMES ?= ""
+SIGGEN_LOCKEDSIGS_CHECK_LEVEL ?= 'error'
+
# Specify dirs in which the shell function is executed and don't use ${B}
# as default dirs to avoid possible race about ${B} with other task.
sstate_create_package[dirs] = "${SSTATE_BUILDDIR}"
sstate_unpack_package[dirs] = "${SSTATE_INSTDIR}"
+# Do not run sstate_hardcode_path() in ${B}:
+# the ${B} maybe removed by cmake_do_configure() while
+# sstate_hardcode_path() running.
+sstate_hardcode_path[dirs] = "${SSTATE_BUILDDIR}"
+
python () {
if bb.data.inherits_class('native', d):
d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH'))
@@ -182,7 +193,29 @@ def sstate_install(ss, d):
if search_output != "":
match.append("Matched in %s" % search_output.rstrip())
if match:
- bb.warn("The recipe %s is trying to install files into a shared area when those files already exist. Those files and their manifest location are:\n %s\nPlease verify which package should provide the above files." % (d.getVar('PN', True), "\n ".join(match)))
+ bb.error("The recipe %s is trying to install files into a shared " \
+ "area when those files already exist. Those files and their manifest " \
+ "location are:\n %s\nPlease verify which recipe should provide the " \
+ "above files.\nThe build has stopped as continuing in this scenario WILL " \
+ "break things, if not now, possibly in the future (we've seen builds fail " \
+ "several months later). If the system knew how to recover from this " \
+ "automatically it would however there are several different scenarios " \
+ "which can result in this and we don't know which one this is. It may be " \
+ "you have switched providers of something like virtual/kernel (e.g. from " \
+ "linux-yocto to linux-yocto-dev), in that case you need to execute the " \
+ "clean task for both recipes and it will resolve this error. It may be " \
+ "you changed DISTRO_FEATURES from systemd to udev or vice versa. Cleaning " \
+ "those recipes should again resolve this error however switching " \
+ "DISTRO_FEATURES on an existing build directory is not supported, you " \
+ "should really clean out tmp and rebuild (reusing sstate should be safe). " \
+ "It could be the overlapping files detected are harmless in which case " \
+ "adding them to SSTATE_DUPWHITELIST may be the correct solution. It could " \
+ "also be your build is including two different conflicting versions of " \
+ "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \
+ "be to resolve the conflict. If in doubt, please ask on the mailing list, " \
+ "sharing the error and filelist above." % \
+ (d.getVar('PN', True), "\n ".join(match)))
+ bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.")
# Write out the manifest
f = open(manifest, "w")
@@ -239,16 +272,32 @@ def sstate_installpkg(ss, d):
d.setVar('SSTATE_INSTDIR', sstateinst)
d.setVar('SSTATE_PKG', sstatepkg)
- for preinst in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split():
- bb.build.exec_func(preinst, d)
+ for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split():
+ bb.build.exec_func(f, d)
- bb.build.exec_func('sstate_unpack_package', d)
+ for state in ss['dirs']:
+ prepdir(state[1])
+ os.rename(sstateinst + state[0], state[1])
+ sstate_install(ss, d)
+ for plain in ss['plaindirs']:
+ workdir = d.getVar('WORKDIR', True)
+ src = sstateinst + "/" + plain.replace(workdir, '')
+ dest = plain
+ bb.utils.mkdirhier(src)
+ prepdir(dest)
+ os.rename(src, dest)
+
+ return True
+
+python sstate_hardcode_path_unpack () {
# Fixup hardcoded paths
#
# Note: The logic below must match the reverse logic in
# sstate_hardcode_path(d)
+ import subprocess
+ sstateinst = d.getVar('SSTATE_INSTDIR', True)
fixmefn = sstateinst + "fixmepath"
if os.path.isfile(fixmefn):
staging = d.getVar('STAGING_DIR', True)
@@ -276,21 +325,7 @@ def sstate_installpkg(ss, d):
# Need to remove this or we'd copy it into the target directory and may
# conflict with another writer
os.remove(fixmefn)
-
- for state in ss['dirs']:
- prepdir(state[1])
- os.rename(sstateinst + state[0], state[1])
- sstate_install(ss, d)
-
- for plain in ss['plaindirs']:
- workdir = d.getVar('WORKDIR', True)
- src = sstateinst + "/" + plain.replace(workdir, '')
- dest = plain
- bb.utils.mkdirhier(src)
- prepdir(dest)
- os.rename(src, dest)
-
- return True
+}
def sstate_clean_cachefile(ss, d):
import oe.path
@@ -395,7 +430,7 @@ python sstate_cleanall() {
sstate_clean(shared_state, ld)
}
-def sstate_hardcode_path(d):
+python sstate_hardcode_path () {
import subprocess, platform
# Need to remove hardcoded paths and fix these when we install the
@@ -413,7 +448,7 @@ def sstate_hardcode_path(d):
sstate_grep_cmd = "grep -l -e '%s'" % (staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging)
elif bb.data.inherits_class('cross', d):
- sstate_grep_cmd = "grep -l -e '(%s|%s)'" % (staging_target, staging)
+ sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging)
sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging)
else:
sstate_grep_cmd = "grep -l -e '%s'" % (staging_host)
@@ -449,6 +484,7 @@ def sstate_hardcode_path(d):
else:
bb.note("Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd))
subprocess.call(sstate_filelist_relative_cmd, shell=True)
+}
def sstate_package(ss, d):
import oe.path
@@ -506,9 +542,11 @@ def sstate_package(ss, d):
d.setVar('SSTATE_BUILDDIR', sstatebuild)
d.setVar('SSTATE_PKG', sstatepkg)
- sstate_hardcode_path(d)
- bb.build.exec_func('sstate_create_package', d)
-
+
+ for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + ['sstate_create_package'] + \
+ (d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split():
+ bb.build.exec_func(f, d)
+
bb.siggen.dump_this_task(sstatepkg + ".siginfo", d)
return
@@ -591,7 +629,8 @@ sstate_create_package () {
if [ "$(ls -A)" ]; then
set +e
tar -czf $TFILE *
- if [ $? -ne 0 ] && [ $? -ne 1 ]; then
+ ret=$?
+ if [ $ret -ne 0 ] && [ $ret -ne 1 ]; then
exit 1
fi
set -e
@@ -612,6 +651,8 @@ sstate_unpack_package () {
mkdir -p ${SSTATE_INSTDIR}
cd ${SSTATE_INSTDIR}
tar -xmvzf ${SSTATE_PKG}
+ # Use "! -w ||" to return true for read only files
+ [ ! -w ${SSTATE_PKG} ] || touch --no-dereference ${SSTATE_PKG}
}
BB_HASHCHECK_FUNCTION = "sstate_checkhashes"
@@ -704,6 +745,9 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d):
evdata['found'].append( (sq_fn[task], sq_task[task], sq_hash[task], sstatefile ) )
bb.event.fire(bb.event.MetadataEvent("MissedSstate", evdata), d)
+ if hasattr(bb.parse.siggen, "checkhashes"):
+ bb.parse.siggen.checkhashes(missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d)
+
return ret
BB_SETSCENE_DEPVALID = "setscene_depvalid"
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass
index 7c43e7618d..57b2743196 100644
--- a/meta/classes/staging.bbclass
+++ b/meta/classes/staging.bbclass
@@ -106,6 +106,7 @@ python do_populate_sysroot () {
}
do_populate_sysroot[vardeps] += "${SYSROOT_PREPROCESS_FUNCS}"
+do_populate_sysroot[vardepsexclude] += "MULTI_PROVIDER_WHITELIST"
SSTATETASKS += "do_populate_sysroot"
do_populate_sysroot[cleandirs] = "${SYSROOT_DESTDIR}"
diff --git a/meta/classes/systemd.bbclass b/meta/classes/systemd.bbclass
index a6ad723dfd..c34884bd38 100644
--- a/meta/classes/systemd.bbclass
+++ b/meta/classes/systemd.bbclass
@@ -55,6 +55,8 @@ fi
systemd_populate_packages[vardeps] += "systemd_prerm systemd_postinst"
+systemd_populate_packages[vardepsexclude] += "OVERRIDES"
+
python systemd_populate_packages() {
if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d):
@@ -141,10 +143,7 @@ python systemd_populate_packages() {
if has_exactly_one_service:
has_exactly_one_service = len(get_package_var(d, 'SYSTEMD_SERVICE', systemd_packages).split()) == 1
- keys = 'Also' # Conflicts??
- if has_exactly_one_service:
- # single service gets also the /dev/null dummies
- keys = 'Also Conflicts'
+ keys = 'Also'
# scan for all in SYSTEMD_SERVICE[]
for pkg_systemd in systemd_packages.split():
for service in get_package_var(d, 'SYSTEMD_SERVICE', pkg_systemd).split():
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass
index 97d0380153..683173854d 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes/testimage.bbclass
@@ -30,8 +30,8 @@ TEST_EXPORT_ONLY ?= "0"
DEFAULT_TEST_SUITES = "ping auto"
DEFAULT_TEST_SUITES_pn-core-image-minimal = "ping"
-DEFAULT_TEST_SUITES_pn-core-image-sato = "ping ssh df connman syslog xorg scp vnc date rpm smart dmesg python"
-DEFAULT_TEST_SUITES_pn-core-image-sato-sdk = "ping ssh df connman syslog xorg scp vnc date perl ldd gcc rpm smart kernelmodule dmesg python"
+DEFAULT_TEST_SUITES_pn-core-image-sato = "ping ssh df connman syslog xorg scp vnc date rpm smart dmesg python parselogs"
+DEFAULT_TEST_SUITES_pn-core-image-sato-sdk = "ping ssh df connman syslog xorg scp vnc date perl ldd gcc rpm smart kernelmodule dmesg python parselogs"
DEFAULT_TEST_SUITES_pn-meta-toolchain = "auto"
TEST_SUITES ?= "${DEFAULT_TEST_SUITES}"
@@ -281,6 +281,14 @@ def testsdk_main(d):
self.filesdir = os.path.join(os.path.dirname(os.path.abspath(oeqa.runtime.__file__)),"files")
self.sdktestdir = sdktestdir
self.sdkenv = sdkenv
+ self.imagefeatures = d.getVar("IMAGE_FEATURES", True).split()
+ self.distrofeatures = d.getVar("DISTRO_FEATURES", True).split()
+ manifest = os.path.join(d.getVar("SDK_MANIFEST", True))
+ try:
+ with open(manifest) as f:
+ self.pkgmanifest = f.read()
+ except IOError as e:
+ bb.fatal("No package manifest file found. Did you build the sdk image?\n%s" % e)
# test context
tc = TestContext()
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index 95499a5cdd..55d0d28157 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -71,7 +71,7 @@ python toaster_layerinfo_dumpdata() {
layer_url = 'http://layers.openembedded.org/layerindex/layer/{layer}/'
layer_url_name = _get_url_map_name(layer_name)
- layer_info['name'] = layer_name
+ layer_info['name'] = layer_url_name
layer_info['local_path'] = layer_path
layer_info['layer_index_url'] = layer_url.format(layer=layer_url_name)
layer_info['version'] = _get_layer_version_information(layer_path)
@@ -149,14 +149,26 @@ python toaster_image_dumpdata() {
image_name = d.getVar('IMAGE_NAME', True);
image_info_data = {}
+ artifact_info_data = {}
+ # collect all artifacts
for dirpath, dirnames, filenames in os.walk(deploy_dir_image):
for fn in filenames:
- if fn.startswith(image_name):
- image_output = os.path.join(dirpath, fn)
- image_info_data[image_output] = os.stat(image_output).st_size
+ try:
+ if fn.startswith(image_name):
+ image_output = os.path.join(dirpath, fn)
+ image_info_data[image_output] = os.stat(image_output).st_size
+ else:
+ import stat
+ artifact_path = os.path.join(dirpath, fn)
+ filestat = os.stat(artifact_path)
+ if stat.S_ISREG(filestat.st_mode):
+ artifact_info_data[artifact_path] = filestat.st_size
+ except OSError as e:
+ bb.event.fire(bb.event.MetadataEvent("OSErrorException", e), d)
bb.event.fire(bb.event.MetadataEvent("ImageFileSize",image_info_data), d)
+ bb.event.fire(bb.event.MetadataEvent("ArtifactFileSize",artifact_info_data), d)
}
@@ -187,8 +199,10 @@ python toaster_collect_task_stats() {
def _read_stats(filename):
cpu_usage = 0
disk_io = 0
- startio = ''
- endio = ''
+ startio = '0'
+ endio = '0'
+ started = '0'
+ ended = '0'
pn = ''
taskname = ''
statinfo = {}
@@ -198,20 +212,28 @@ python toaster_collect_task_stats() {
k,v = line.strip().split(": ", 1)
statinfo[k] = v
- try:
- cpu_usage = statinfo["CPU usage"]
- endio = statinfo["EndTimeIO"]
- startio = statinfo["StartTimeIO"]
- except KeyError:
- pass # we may have incomplete data here
+ if "CPU usage" in statinfo:
+ cpu_usage = str(statinfo["CPU usage"]).strip('% \n\r')
+
+ if "EndTimeIO" in statinfo:
+ endio = str(statinfo["EndTimeIO"]).strip('% \n\r')
+
+ if "StartTimeIO" in statinfo:
+ startio = str(statinfo["StartTimeIO"]).strip('% \n\r')
+
+ if "Started" in statinfo:
+ started = str(statinfo["Started"]).strip('% \n\r')
+
+ if "Ended" in statinfo:
+ ended = str(statinfo["Ended"]).strip('% \n\r')
+
+ disk_io = int(endio) - int(startio)
- if startio and endio:
- disk_io = int(endio.strip('\n ')) - int(startio.strip('\n '))
+ elapsed_time = float(ended) - float(started)
- if cpu_usage:
- cpu_usage = float(cpu_usage.strip('% \n'))
+ cpu_usage = float(cpu_usage)
- return {'cpu_usage': cpu_usage, 'disk_io': disk_io}
+ return {'cpu_usage': cpu_usage, 'disk_io': disk_io, 'elapsed_time': elapsed_time}
if isinstance(e, (bb.build.TaskSucceeded, bb.build.TaskFailed)):
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass
index 6cc8eba900..9836db4a4c 100644
--- a/meta/classes/toolchain-scripts.bbclass
+++ b/meta/classes/toolchain-scripts.bbclass
@@ -19,6 +19,7 @@ toolchain_create_sdk_env_script () {
EXTRAPATH="$EXTRAPATH:${SDKPATHNATIVE}${bindir_nativesdk}/${TARGET_ARCH}${TARGET_VENDOR}-$i"
done
echo 'export PATH=${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${bindir_nativesdk}/${TARGET_SYS}'$EXTRAPATH':$PATH' >> $script
+ echo 'export CCACHE_PATH=${SDKPATHNATIVE}${bindir_nativesdk}:${SDKPATHNATIVE}${bindir_nativesdk}/${TARGET_SYS}'$EXTRAPATH':$CCACHE_PATH' >> $script
echo 'export PKG_CONFIG_SYSROOT_DIR=$SDKTARGETSYSROOT' >> $script
echo 'export PKG_CONFIG_PATH=$SDKTARGETSYSROOT'"$libdir"'/pkgconfig' >> $script
echo 'export CONFIG_SITE=${SDKPATH}/site-config-'"${multimach_target_sys}" >> $script
@@ -37,6 +38,7 @@ toolchain_create_tree_env_script () {
rm -f $script
touch $script
echo 'export PATH=${STAGING_DIR_NATIVE}/usr/bin:${PATH}' >> $script
+ echo 'export CCACHE_PATH=${STAGING_DIR_NATIVE}/usr/bin:${CCACHE_PATH}' >> $script
echo 'export PKG_CONFIG_SYSROOT_DIR=${PKG_CONFIG_SYSROOT_DIR}' >> $script
echo 'export PKG_CONFIG_PATH=${PKG_CONFIG_PATH}' >> $script
echo 'export CONFIG_SITE="${@siteinfo_get_files(d)}"' >> $script
@@ -68,15 +70,31 @@ toolchain_shared_env_script () {
echo 'export CXXFLAGS="${TARGET_CXXFLAGS}"' >> $script
echo 'export LDFLAGS="${TARGET_LDFLAGS}"' >> $script
echo 'export CPPFLAGS="${TARGET_CPPFLAGS}"' >> $script
+ echo 'export KCFLAGS="--sysroot=$SDKTARGETSYSROOT"' >> $script
echo 'export OECORE_DISTRO_VERSION="${DISTRO_VERSION}"' >> $script
echo 'export OECORE_SDK_VERSION="${SDK_VERSION}"' >> $script
echo 'export ARCH=${ARCH}' >> $script
echo 'export CROSS_COMPILE=${TARGET_PREFIX}' >> $script
+
+ cat >> $script <<EOF
+
+# Append environment subscripts
+if [ -d "\$OECORE_TARGET_SYSROOT/environment-setup.d" ]; then
+ for envfile in \$OECORE_TARGET_SYSROOT/environment-setup.d/*.sh; do
+ source \$envfile
+ done
+fi
+if [ -d "\$OECORE_NATIVE_SYSROOT/environment-setup.d" ]; then
+ for envfile in \$OECORE_NATIVE_SYSROOT/environment-setup.d/*.sh; do
+ source \$envfile
+ done
+fi
+EOF
}
#we get the cached site config in the runtime
TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d, True)}"
-TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DATADIR}/${TARGET_SYS}_config_site.d"
+TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d"
TOOLCHAIN_NEED_CONFIGSITE_CACHE = "${TCLIBC} ncurses"
#This function create a site config file
@@ -116,5 +134,7 @@ python __anonymous () {
deps = ""
for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split():
deps += " %s:do_populate_sysroot" % dep
+ for variant in (d.getVar('MULTILIB_VARIANTS', True) or "").split():
+ deps += " %s-%s:do_populate_sysroot" % (variant, dep)
d.appendVarFlag('do_configure', 'depends', deps)
}
diff --git a/meta/classes/uboot-config.bbclass b/meta/classes/uboot-config.bbclass
index 8ac1b71bc2..b467659cbb 100644
--- a/meta/classes/uboot-config.bbclass
+++ b/meta/classes/uboot-config.bbclass
@@ -38,9 +38,6 @@ python () {
ubootconfig = ubootconfig[0]
for f, v in ubootconfigflags.items():
- if f == 'defaultval':
- continue
-
items = v.split(',')
if items[0] and len(items) > 2:
raise bb.parse.SkipPackage('Only config,images can be specified!')
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass
new file mode 100644
index 0000000000..51391dbc4a
--- /dev/null
+++ b/meta/classes/uninative.bbclass
@@ -0,0 +1,44 @@
+NATIVELSBSTRING = "universal"
+
+UNINATIVE_LOADER = "${STAGING_DIR_NATIVE}/lib/ld-linux-x86-64.so.2"
+
+addhandler uninative_eventhandler
+uninative_eventhandler[eventmask] = "bb.event.BuildStarted"
+
+python uninative_eventhandler() {
+ loader = e.data.getVar("UNINATIVE_LOADER", True)
+ if not os.path.exists(loader):
+ import subprocess
+ cmd = e.data.expand("mkdir -p ${STAGING_DIR}; cd ${STAGING_DIR}; tar -xjf ${COREBASE}/${BUILD_ARCH}-nativesdk-libc.tar.bz2; ${STAGING_DIR}/relocate_sdk.py ${STAGING_DIR_NATIVE} ${UNINATIVE_LOADER} ${UNINATIVE_LOADER} ${STAGING_BINDIR_NATIVE}/patchelf-uninative")
+ #bb.warn("nativesdk lib extraction: " + cmd)
+ subprocess.check_call(cmd, shell=True)
+}
+
+SSTATEPOSTUNPACKFUNCS_append = " uninative_changeinterp"
+
+python uninative_changeinterp () {
+ import subprocess
+ import stat
+ import oe.qa
+
+ if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)):
+ return
+
+ sstateinst = d.getVar('SSTATE_INSTDIR', True)
+ for walkroot, dirs, files in os.walk(sstateinst):
+ for file in files:
+ f = os.path.join(walkroot, file)
+ if os.path.islink(f):
+ continue
+ s = os.stat(f)
+ if not ((s[stat.ST_MODE] & stat.S_IXUSR) or (s[stat.ST_MODE] & stat.S_IXGRP) or (s[stat.ST_MODE] & stat.S_IXOTH)):
+ continue
+ elf = oe.qa.ELFFile(f)
+ try:
+ elf.open()
+ except:
+ continue
+
+ #bb.warn("patchelf-uninative --set-interpreter %s %s" % (d.getVar("UNINATIVE_LOADER", True), f))
+ subprocess.call("patchelf-uninative --set-interpreter %s %s" % (d.getVar("UNINATIVE_LOADER", True), f), shell=True)
+}
diff --git a/meta/classes/update-rc.d.bbclass b/meta/classes/update-rc.d.bbclass
index 19b081d235..bc1aa7dad6 100644
--- a/meta/classes/update-rc.d.bbclass
+++ b/meta/classes/update-rc.d.bbclass
@@ -72,6 +72,7 @@ PACKAGESPLITFUNCS_prepend = "populate_packages_updatercd "
PACKAGESPLITFUNCS_remove_class-nativesdk = "populate_packages_updatercd "
populate_packages_updatercd[vardeps] += "updatercd_prerm updatercd_postrm updatercd_preinst updatercd_postinst"
+populate_packages_updatercd[vardepsexclude] += "OVERRIDES"
python populate_packages_updatercd () {
def update_rcd_auto_depend(pkg):
@@ -121,7 +122,7 @@ python populate_packages_updatercd () {
# Check that this class isn't being inhibited (generally, by
# systemd.bbclass) before doing any work.
- if bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \
+ if bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) or \
not d.getVar("INHIBIT_UPDATERCD_BBCLASS", True):
pkgs = d.getVar('INITSCRIPT_PACKAGES', True)
if pkgs == None:
diff --git a/meta/classes/useradd-staticids.bbclass b/meta/classes/useradd-staticids.bbclass
index a89cb10a4a..421a70a6ab 100644
--- a/meta/classes/useradd-staticids.bbclass
+++ b/meta/classes/useradd-staticids.bbclass
@@ -58,7 +58,9 @@ def update_useradd_static_config(d):
newparams = []
for param in re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params):
- param=param.strip()
+ param = param.strip()
+ if not param:
+ continue
try:
uaargs = parser.parse_args(re.split('''[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
except:
@@ -194,7 +196,9 @@ def update_useradd_static_config(d):
newparams = []
for param in re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', params):
- param=param.strip()
+ param = param.strip()
+ if not param:
+ continue
try:
# If we're processing multiple lines, we could have left over values here...
gaargs = parser.parse_args(re.split('''[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param))
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass
index ea15dabb84..0b9a843b24 100644
--- a/meta/classes/useradd.bbclass
+++ b/meta/classes/useradd.bbclass
@@ -24,6 +24,8 @@ if test "x$D" != "x"; then
# Installing into a sysroot
SYSROOT="$D"
OPT="--root $D"
+ # user/group lookups should match useradd/groupadd --root
+ export PSEUDO_PASSWD="$SYSROOT:${STAGING_DIR_NATIVE}"
fi
# If we're not doing a special SSTATE/SYSROOT install
@@ -126,7 +128,7 @@ SYSROOTPOSTFUNC_class-cross = ""
SYSROOTPOSTFUNC_class-native = ""
SYSROOTPOSTFUNC_class-nativesdk = ""
-USERADDSETSCENEDEPS = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene"
+USERADDSETSCENEDEPS = "${MLPREFIX}base-passwd:do_populate_sysroot_setscene pseudo-native:do_populate_sysroot_setscene shadow-native:do_populate_sysroot_setscene ${MLPREFIX}shadow-sysroot:do_populate_sysroot_setscene"
USERADDSETSCENEDEPS_class-cross = ""
USERADDSETSCENEDEPS_class-native = ""
USERADDSETSCENEDEPS_class-nativesdk = ""
diff --git a/meta/conf/abi_version.conf b/meta/conf/abi_version.conf
index e8cf9a3127..2867850688 100644
--- a/meta/conf/abi_version.conf
+++ b/meta/conf/abi_version.conf
@@ -4,4 +4,4 @@
# that breaks the format and have been previously discussed on the mailing list
# with general agreement from the core team.
#
-OELAYOUT_ABI = "9"
+OELAYOUT_ABI = "10"
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 5872d1d12c..7902bd7d01 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -133,7 +133,7 @@ SDK_LD_ARCH = "${BUILD_LD_ARCH}"
SDK_AS_ARCH = "${BUILD_AS_ARCH}"
TUNE_PKGARCH ??= ""
-PACKAGE_ARCH = "${TUNE_PKGARCH}"
+PACKAGE_ARCH ??= "${TUNE_PKGARCH}"
MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', True), d.getVar('MACHINE', True)][bool(d.getVar('MACHINE', True))].replace('-', '_')}"
PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}"
PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}"
@@ -169,7 +169,8 @@ ASSUME_PROVIDED = "\
python-native-runtime \
tar-native \
virtual/libintl-native \
- texinfo-native\
+ texinfo-native \
+ bash-native \
"
# gzip-native should be listed above?
@@ -393,7 +394,7 @@ SDKPATHNATIVE = "${SDKPATH}/sysroots/${SDK_SYS}"
# Kernel info.
##################################################################
-OLDEST_KERNEL = "2.6.16"
+OLDEST_KERNEL = "2.6.32"
STAGING_KERNEL_DIR = "${STAGING_DIR_HOST}/usr/src/kernel"
##################################################################
@@ -458,7 +459,7 @@ PYTHON = "${@sys.executable}"
export BUILD_CC = "${CCACHE}${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
export BUILD_CXX = "${CCACHE}${BUILD_PREFIX}g++ ${BUILD_CC_ARCH}"
export BUILD_FC = "${CCACHE}${BUILD_PREFIX}gfortran ${BUILD_CC_ARCH}"
-export BUILD_CPP = "${BUILD_PREFIX}cpp ${BUILD_CC_ARCH}"
+export BUILD_CPP = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH} -E"
export BUILD_LD = "${BUILD_PREFIX}ld ${BUILD_LD_ARCH}"
export BUILD_CCLD = "${BUILD_PREFIX}gcc ${BUILD_CC_ARCH}"
export BUILD_AR = "${BUILD_PREFIX}ar"
@@ -585,7 +586,7 @@ SRC_URI = ""
# Use pseudo as the fakeroot implementation
PSEUDO_LOCALSTATEDIR ?= "${WORKDIR}/pseudo/"
-PSEUDO_PASSWD ?= "${STAGING_DIR_TARGET}"
+PSEUDO_PASSWD ?= "${STAGING_DIR_TARGET}:${STAGING_DIR_NATIVE}"
export PSEUDO_DISABLED = "1"
#export PSEUDO_PREFIX = "${STAGING_DIR_NATIVE}${prefix_native}"
#export PSEUDO_BINDIR = "${STAGING_DIR_NATIVE}${bindir_native}"
@@ -601,9 +602,6 @@ PREFERRED_PROVIDER_virtual/fakeroot-native ?= "pseudo-native"
# Miscellaneous utilities.
##################################################################
-MKTEMPDIRCMD = "mktemp -d -q ${TMPBASE}"
-MKTEMPCMD = "mktemp -q ${TMPBASE}"
-
# GNU patch tries to be intellgent about checking out read-only files from
# a RCS, which freaks out those special folks with active Perforce clients
# the following makes patch ignore RCS:
@@ -629,9 +627,6 @@ export PKG_CONFIG_LIBDIR = "${PKG_CONFIG_DIR}"
export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
export PKG_CONFIG_DISABLE_UNINSTALLED = "yes"
-# library package naming
-AUTO_LIBNAME_PKGS = "${PACKAGES}"
-
###
### Config file processing
###
@@ -694,7 +689,7 @@ MACHINE_TASK_PROVIDER ?= "${DEFAULT_TASK_PROVIDER}"
# The size in Kbytes for the generated image if it is larger than
# the required size (du -ks IMAGE_ROOTFS * IMAGE_OVERHEAD_FACTOR),
# and no effect if less than it.
-IMAGE_ROOTFS_SIZE ?= "65536"
+IMAGE_ROOTFS_SIZE ??= "65536"
# Forcefully set CACHE now so future changes to things like
# MACHINE don't change the path to the cache
@@ -780,7 +775,7 @@ BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
BB_HASHCONFIG_WHITELIST ?= "${BB_HASHBASE_WHITELIST} DATE TIME SSH_AGENT_PID \
SSH_AUTH_SOCK PSEUDO_BUILD BB_ENV_EXTRAWHITE DISABLE_SANITY_CHECKS \
PARALLEL_MAKE BB_NUMBER_THREADS BB_ORIGENV BB_INVALIDCONF BBINCLUDED"
-BB_SIGNATURE_EXCLUDE_FLAGS ?= "doc defaultval _append _prepend deps depends \
+BB_SIGNATURE_EXCLUDE_FLAGS ?= "doc deps depends \
lockfiles type vardepsexclude vardeps vardepvalue vardepvalueexclude \
file-checksums python func task export unexport noexec nostamp dirs cleandirs \
sstate-lockfile-shared prefuncs postfuncs export_func deptask rdeptask \
diff --git a/meta/conf/distro/defaultsetup.conf b/meta/conf/distro/defaultsetup.conf
index 4437267ff3..71c65b1462 100644
--- a/meta/conf/distro/defaultsetup.conf
+++ b/meta/conf/distro/defaultsetup.conf
@@ -6,7 +6,7 @@ include conf/distro/include/world-broken.inc
TCMODE ?= "default"
require conf/distro/include/tcmode-${TCMODE}.inc
-TCLIBC ?= "eglibc"
+TCLIBC ?= "glibc"
require conf/distro/include/tclibc-${TCLIBC}.inc
# Allow single libc distros to disable this code
diff --git a/meta/conf/distro/include/default-distrovars.inc b/meta/conf/distro/include/default-distrovars.inc
index 8d24d66fc1..04037c3399 100644
--- a/meta/conf/distro/include/default-distrovars.inc
+++ b/meta/conf/distro/include/default-distrovars.inc
@@ -25,7 +25,7 @@ IMAGE_FEATURES ?= ""
# directly part of the distribution.
HOSTTOOLS_WHITELIST_GPL-3.0 ?= ""
WHITELIST_GPL-3.0 ?= "less"
-LGPLv2_WHITELIST_GPL-3.0 ?= "libassuan gnutls libtasn1 libidn libgcc libgcc-initial gcc-runtime"
+LGPLv2_WHITELIST_GPL-3.0 ?= "libassuan gnutls libtasn1 libidn gcc-source libgcc libgcc-initial gcc-runtime"
COMMERCIAL_AUDIO_PLUGINS ?= ""
# COMMERCIAL_AUDIO_PLUGINS ?= "gst-plugins-ugly-mad gst-plugins-ugly-mpegaudioparse"
diff --git a/meta/conf/distro/include/default-providers.inc b/meta/conf/distro/include/default-providers.inc
index a1167fdfbf..4dc42a7add 100644
--- a/meta/conf/distro/include/default-providers.inc
+++ b/meta/conf/distro/include/default-providers.inc
@@ -21,6 +21,7 @@ PREFERRED_PROVIDER_xf86-video-intel ?= "xf86-video-intel"
VIRTUAL-RUNTIME_update-alternatives ?= "update-alternatives-opkg"
VIRTUAL-RUNTIME_apm ?= "apm"
VIRTUAL-RUNTIME_alsa-state ?= "alsa-state"
+VIRTUAL-RUNTIME_getopt ?= "util-linux-getopt"
#
# Default recipe providers
@@ -43,5 +44,7 @@ PREFERRED_PROVIDER_udev ?= "${@bb.utils.contains('DISTRO_FEATURES','systemd','sy
# There are issues with runtime packages and PREFERRED_PROVIDER, see YOCTO #5044 for details
# on this rather strange entry.
PREFERRED_PROVIDER_bluez4 ?= "bluez4"
+PREFERRED_PROVIDER_bluez-hcidump ?= "bluez-hcidump"
# Alternative is ltp-ddt in meta-oe: meta-oe/recipes-devtools/ltp-ddt/ltp-ddt_0.0.4.bb
PREFERRED_PROVIDER_ltp ?= "ltp"
+PREFERRED_PROVIDER_getopt ?= "util-linux-getopt"
diff --git a/meta/conf/distro/include/default-versions.inc b/meta/conf/distro/include/default-versions.inc
index 53ec2e7caf..6e92aa5499 100644
--- a/meta/conf/distro/include/default-versions.inc
+++ b/meta/conf/distro/include/default-versions.inc
@@ -9,3 +9,7 @@ PREFERRED_VERSION_python-native ?= "2.7.3"
# Force the older version of liberation-fonts until we fix the fontforge issue
PREFERRED_VERSION_liberation-fonts ?= "1.04"
+
+# Force db-native's version to keep sync with db while
+# 'AGPL-3.0' in ${INCOMPATIBLE_LICENSE} blacklist
+PREFERRED_VERSION_db-native = "${@incompatible_license_contains('AGPL-3.0', '5.%', '6.%', d)}"
diff --git a/meta/conf/distro/include/security_flags.inc b/meta/conf/distro/include/security_flags.inc
index fa604fd5a4..840f68a8eb 100644
--- a/meta/conf/distro/include/security_flags.inc
+++ b/meta/conf/distro/include/security_flags.inc
@@ -19,12 +19,12 @@ SECURITY_CFLAGS_pn-beecrypt = "${SECURITY_NO_PIE_CFLAGS}"
# Curl seems to check for FORTIFY_SOURCE in CFLAGS, but even assigned
# to CPPFLAGS it gets picked into CFLAGS in bitbake.
#TARGET_CPPFLAGS_pn-curl += "-D_FORTIFY_SOURCE=2"
-SECURITY_CFLAGS_pn-cups = "${SECURITY_NO_PIE_CLAGS}"
+SECURITY_CFLAGS_pn-cups = "${SECURITY_NO_PIE_CFLAGS}"
SECURITY_CFLAGS_pn-curl = "-fstack-protector-all -pie -fpie"
SECURITY_CFLAGS_pn-db = "${SECURITY_NO_PIE_CFLAGS}"
SECURITY_CFLAGS_pn-directfb = "${SECURITY_NO_PIE_CFLAGS}"
-SECURITY_CFLAGS_pn-eglibc = ""
-SECURITY_CFLAGS_pn-eglibc-initial = ""
+SECURITY_CFLAGS_pn-glibc = ""
+SECURITY_CFLAGS_pn-glibc-initial = ""
SECURITY_CFLAGS_pn-enchant = "${SECURITY_NO_PIE_CFLAGS}"
SECURITY_CFLAGS_pn-flac = "${SECURITY_NO_PIE_CFLAGS}"
SECURITY_CFLAGS_pn-gcc-runtime = "${SECURITY_NO_PIE_CFLAGS}"
diff --git a/meta/conf/distro/include/tclibc-eglibc.inc b/meta/conf/distro/include/tclibc-eglibc.inc
deleted file mode 100644
index 15f5ee5a30..0000000000
--- a/meta/conf/distro/include/tclibc-eglibc.inc
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# eglibc specific configuration
-#
-
-LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION', True) or '') != '']}"
-
-# Add glibc overrides to the overrides for eglibc.
-LIBCOVERRIDE = ":libc-glibc"
-OVERRIDES .= "${LIBCOVERRIDE}"
-
-PREFERRED_PROVIDER_virtual/libiconv ?= "eglibc"
-PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-eglibc"
-PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-eglibc"
-PREFERRED_PROVIDER_virtual/libintl ?= "eglibc"
-PREFERRED_PROVIDER_virtual/libc ?= "eglibc"
-PREFERRED_PROVIDER_virtual/nativesdk-libc ?= "nativesdk-eglibc"
-PREFERRED_PROVIDER_virtual/libc-locale ?= "eglibc-locale"
-
-CXXFLAGS += "-fvisibility-inlines-hidden"
-
-LIBC_DEPENDENCIES = "libsegfault \
- eglibc \
- eglibc-dbg \
- eglibc-dev \
- eglibc-utils \
- eglibc-thread-db \
- ${@get_libc_locales_dependencies(d)}"
-
-LIBC_LOCALE_DEPENDENCIES = "\
- eglibc-localedata-i18n \
- eglibc-gconv-ibm850 \
- eglibc-gconv-cp1252 \
- eglibc-gconv-iso8859-1 \
- eglibc-gconv-iso8859-15"
-
-def get_libc_locales_dependencies(d):
- if 'libc-locales' in (d.getVar('DISTRO_FEATURES', True) or '').split() :
- return d.getVar('LIBC_LOCALE_DEPENDENCIES', True) or ''
- else:
- return ''
diff --git a/meta/conf/distro/include/tclibc-glibc.inc b/meta/conf/distro/include/tclibc-glibc.inc
new file mode 100644
index 0000000000..649918fd2b
--- /dev/null
+++ b/meta/conf/distro/include/tclibc-glibc.inc
@@ -0,0 +1,40 @@
+#
+# glibc specific configuration
+#
+
+LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION', True) or '') != '']}"
+
+# Add glibc overrides to the overrides for glibc.
+LIBCOVERRIDE = ":libc-glibc"
+OVERRIDES .= "${LIBCOVERRIDE}"
+
+PREFERRED_PROVIDER_virtual/libiconv ?= "glibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-glibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-glibc"
+PREFERRED_PROVIDER_virtual/libintl ?= "glibc"
+PREFERRED_PROVIDER_virtual/libc ?= "glibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libc ?= "nativesdk-glibc"
+PREFERRED_PROVIDER_virtual/libc-locale ?= "glibc-locale"
+
+CXXFLAGS += "-fvisibility-inlines-hidden"
+
+LIBC_DEPENDENCIES = "libsegfault \
+ glibc \
+ glibc-dbg \
+ glibc-dev \
+ glibc-utils \
+ glibc-thread-db \
+ ${@get_libc_locales_dependencies(d)}"
+
+LIBC_LOCALE_DEPENDENCIES = "\
+ glibc-localedata-i18n \
+ glibc-gconv-ibm850 \
+ glibc-gconv-cp1252 \
+ glibc-gconv-iso8859-1 \
+ glibc-gconv-iso8859-15"
+
+def get_libc_locales_dependencies(d):
+ if 'libc-locales' in (d.getVar('DISTRO_FEATURES', True) or '').split() :
+ return d.getVar('LIBC_LOCALE_DEPENDENCIES', True) or ''
+ else:
+ return ''
diff --git a/meta/conf/distro/include/tclibc-musl.inc b/meta/conf/distro/include/tclibc-musl.inc
index d375d8d31d..d1272a39a0 100644
--- a/meta/conf/distro/include/tclibc-musl.inc
+++ b/meta/conf/distro/include/tclibc-musl.inc
@@ -11,8 +11,8 @@ OVERRIDES .= "${LIBCOVERRIDE}"
PREFERRED_PROVIDER_virtual/libc ?= "musl"
PREFERRED_PROVIDER_virtual/libiconv ?= "musl"
PREFERRED_PROVIDER_virtual/libintl ?= "gettext"
-PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-eglibc"
-PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-eglibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-glibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-glibc"
USE_NLS ?= "no"
diff --git a/meta/conf/distro/include/tclibc-uclibc.inc b/meta/conf/distro/include/tclibc-uclibc.inc
index a4d8c3920d..14c3cdbb8a 100644
--- a/meta/conf/distro/include/tclibc-uclibc.inc
+++ b/meta/conf/distro/include/tclibc-uclibc.inc
@@ -11,8 +11,8 @@ OVERRIDES .= "${LIBCOVERRIDE}"
PREFERRED_PROVIDER_virtual/libc ?= "uclibc"
PREFERRED_PROVIDER_virtual/libiconv ?= "libiconv"
PREFERRED_PROVIDER_virtual/libintl ?= "gettext"
-PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-eglibc"
-PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-eglibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libintl ?= "nativesdk-glibc"
+PREFERRED_PROVIDER_virtual/nativesdk-libiconv ?= "nativesdk-glibc"
USE_NLS ?= "no"
diff --git a/meta/conf/distro/include/tcmode-default.inc b/meta/conf/distro/include/tcmode-default.inc
index 41a014721d..2b4109053b 100644
--- a/meta/conf/distro/include/tcmode-default.inc
+++ b/meta/conf/distro/include/tcmode-default.inc
@@ -25,9 +25,9 @@ PREFERRED_PROVIDER_virtual/gettext ??= "gettext"
GCCVERSION ?= "4.9%"
SDKGCCVERSION ?= "${GCCVERSION}"
BINUVERSION ?= "2.24"
-EGLIBCVERSION ?= "2.19"
+GLIBCVERSION ?= "2.20"
UCLIBCVERSION ?= "0.9.33+git%"
-LINUXLIBCVERSION ?= "3.14"
+LINUXLIBCVERSION ?= "3.17.7"
PREFERRED_VERSION_gcc ?= "${GCCVERSION}"
PREFERRED_VERSION_gcc-cross-${TARGET_ARCH} ?= "${GCCVERSION}"
@@ -36,28 +36,30 @@ PREFERRED_VERSION_gcc-crosssdk-${SDK_ARCH} ?= "${SDKGCCVERSION}"
PREFERRED_VERSION_gcc-crosssdk-initial-${SDK_ARCH} ?= "${SDKGCCVERSION}"
PREFERRED_VERSION_gcc-cross-canadian-${TRANSLATED_TARGET_ARCH} ?= "${GCCVERSION}"
PREFERRED_VERSION_gcc-runtime ?= "${GCCVERSION}"
+PREFERRED_VERSION_gcc-source ?= "${GCCVERSION}"
PREFERRED_VERSION_nativesdk-gcc-runtime ?= "${SDKGCCVERSION}"
PREFERRED_VERSION_libgcc ?= "${GCCVERSION}"
PREFERRED_VERSION_libgcc-initial ?= "${GCCVERSION}"
PREFERRED_VERSION_nativesdk-libgcc ?= "${SDKGCCVERSION}"
PREFERRED_VERSION_nativesdk-libgcc-initial ?= "${SDKGCCVERSION}"
PREFERRED_VERSION_binutils ?= "${BINUVERSION}"
+PREFERRED_VERSION_binutils-native ?= "${BINUVERSION}"
PREFERRED_VERSION_binutils-cross-${TARGET_ARCH} ?= "${BINUVERSION}"
PREFERRED_VERSION_binutils-crosssdk-${SDK_ARCH} ?= "${BINUVERSION}"
PREFERRED_VERSION_binutils-cross-canadian-${TRANSLATED_TARGET_ARCH} ?= "${BINUVERSION}"
PREFERRED_VERSION_linux-libc-headers ?= "${LINUXLIBCVERSION}"
PREFERRED_VERSION_nativesdk-linux-libc-headers ?= "${LINUXLIBCVERSION}"
-PREFERRED_VERSION_eglibc ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_eglibc-locale ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_eglibc-mtrace ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_eglibc-scripts ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_nativesdk-eglibc ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_eglibc-initial ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_nativesdk-eglibc-initial ?= "${EGLIBCVERSION}"
-PREFERRED_VERSION_cross-localedef-native ?= "${EGLIBCVERSION}"
+PREFERRED_VERSION_glibc ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_glibc-locale ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_glibc-mtrace ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_glibc-scripts ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_nativesdk-glibc ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_glibc-initial ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_nativesdk-glibc-initial ?= "${GLIBCVERSION}"
+PREFERRED_VERSION_cross-localedef-native ?= "${GLIBCVERSION}"
PREFERRED_VERSION_uclibc ?= "${UCLIBCVERSION}"
PREFERRED_VERSION_uclibc-initial ?= "${UCLIBCVERSION}"
-PREFERRED_VERSION_elfutils ?= "0.148"
+PREFERRED_VERSION_elfutils ?= "0.160"
# don't use version earlier than 1.4 for gzip-native, as it's necessary for
# some packages using an archive format incompatible with earlier gzip
PREFERRED_VERSION_gzip-native ?= "1.6"
diff --git a/meta/conf/documentation.conf b/meta/conf/documentation.conf
index 7cf4476a14..2ab86e1487 100644
--- a/meta/conf/documentation.conf
+++ b/meta/conf/documentation.conf
@@ -18,6 +18,7 @@ do_compile_ptest_base[doc] = "Compiles the runtime test suite included in the so
do_configure[doc] = "Configures the source by enabling and disabling any build-time and configuration options for the software being built"
do_configure_ptest_base[doc] = "Configures the runtime test suite included in the software being built"
do_deploy[doc] = "Writes deployable output files to the deploy directory"
+do_devpyshell[doc] = "Starts an interactive Python shell for development/debugging"
do_devshell[doc] = "Starts a shell with the environment set up for development/debugging"
do_diffconfig[doc] = "Compares the old and new config files after running do_menuconfig for the kernel"
do_fetch[doc] = "Fetches the source code"
@@ -33,6 +34,7 @@ do_listtasks[doc] = "Lists all defined tasks for a target"
do_menuconfig[doc] = "Runs 'make menuconfig' for the kernel"
do_package[doc] = "Analyzes the content of the holding area and splits it into subsets based on available packages and files"
do_package_index[doc] = "Creates or updates the index in the Package Feed area"
+do_package_qa[doc] = "Runs QA checks on packaged files"
do_package_write_deb[doc] = "Creates the actual DEB packages and places them in the Package Feed area"
do_package_write_ipk[doc] = "Creates the actual IPK packages and places them in the Package Feed area"
do_package_write_rpm[doc] = "Creates the actual RPM packages and places them in the Package Feed area"
@@ -51,6 +53,7 @@ do_spdx[doc] = "A build stage that takes the source code and scans it on a remot
do_strip[doc] = "Strips unneeded sections out of the Linux kernel image"
do_testimage[doc] = "Boots an image and performs runtime tests within the image"
do_testimage_auto[doc] = "Boots an image and performs runtime tests within the image immediately after it has been built"
+do_testsdk[doc] = "Installs an SDK and performs runtime tests on the tools installed by it"
do_uboot_mkimage[doc] = "Creates a uImage file from the kernel for the U-Boot bootloader"
do_unpack[doc] = "Unpacks the source code into a working directory"
do_validate_branches[doc] = "Ensures that the source/meta branches are on the locations specified by their SRCREV values for a linux-yocto style kernel"
@@ -107,6 +110,7 @@ BUSYBOX_SPLIT_SUID[doc] = "For the BusyBox recipe, specifies whether to split th
CACHE[doc] = "The directory holding the cache of the metadata."
CFLAGS[doc] = "Flags passed to the C compiler for the target system. This variable evaluates to the same as TARGET_CFLAGS."
CLASSOVERRIDE[doc] = "An internal variable specifying the special class override that should currently apply (e.g. "class-target", "class-native", and so forth)."
+CLEANBROKEN[doc] = "Specifies if 'make clean' does not work for a recipe (and therefore the build system should not try to use it during do_configure)"
COMBINED_FEATURES[doc] = "A set of features common between MACHINE_FEATURES and DISTRO_FEATURES."
COMMON_LICENSE_DIR[doc] = "Points to meta/files/common-licenses in the Source Directory, which is where generic license files reside."
COMPATIBLE_HOST[doc] = "A regular expression that resolves to one or more hosts (when the recipe is native) or one or more targets (when the recipe is non-native) with which a recipe is compatible."
@@ -147,7 +151,7 @@ DL_DIR[doc] = "The central download directory used by the build process to store
#E
-ENABLE_BINARY_LOCALE_GENERATION[doc] = "Controls which locales for eglibc are generated during the build. The variable is useful if the target device has 64Mbytes of RAM or less."
+ENABLE_BINARY_LOCALE_GENERATION[doc] = "Controls which locales for glibc are generated during the build. The variable is useful if the target device has 64Mbytes of RAM or less."
ERROR_QA[doc] = "Specifies the quality assurance checks whose failures are reported as errors by the OpenEmbedded build system."
EXCLUDE_FROM_WORLD[doc] = "Directs BitBake to exclude a recipe from world builds (i.e. bitbake world)."
EXTENDPE[doc] = "Used with file and pathnames to create a prefix for a recipe's version based on the recipe's PE value. If PE is set and greater than zero for a recipe, EXTENDPE becomes that value."
@@ -204,6 +208,7 @@ ICECC_USER_CLASS_BL[doc] = "Identifies user classes that you do not want the Ice
ICECC_USER_PACKAGE_BL[doc] = "Identifies user recipes that you do not want the Icecream distributed compile support to consider."
ICECC_USER_PACKAGE_WL[doc] = "Identifies user recipes that use an empty PARALLEL_MAKE variable that you want to force remote distributed compilation on using the Icecream distributed compile support."
IMAGE_BASENAME[doc] = "The base name of image output files."
+IMAGE_BOOT_FILES[doc] = "Whitespace separated list of files from ${DEPLOY_DIR_IMAGE} to place in boot partition. Entries will be installed under a same name as the source file. To change the destination file name, pass a desired name after a semicolon (eg. u-boot.img;uboot)."
IMAGE_CLASSES[doc] = "A list of classes that all images should inherit."
IMAGE_FEATURES[doc] = "The primary list of features to include in an image. Configure this variable in an image recipe."
IMAGE_FSTYPES[doc] = "Formats of root filesystem images that you want to have created."
@@ -218,7 +223,7 @@ IMAGE_ROOTFS_EXTRA_SPACE[doc] = "Defines additional free disk space created in t
IMAGE_ROOTFS_SIZE[doc] = "Defines the size in Kbytes for the generated image."
IMAGE_TYPES[doc] = "Specifies the complete list of supported image types by default."
INC_PR[doc] = "Helps define the recipe revision for recipes that share a common include file."
-INCOMPATIBLE_LICENSE[doc] = "Specifies a space-separated list of license names (as they would appear in LICENSE) that should be excluded from the build."
+INCOMPATIBLE_LICENSE[doc] = "Specifies a space-separated list of license names (as they would appear in LICENSE) that should be excluded from the build. Wildcard is supported, such as '*GPLv3'"
INHIBIT_DEFAULT_DEPS[doc] = "Prevents the default dependencies, namely the C compiler and standard C library (libc), from being added to DEPENDS."
INHIBIT_PACKAGE_STRIP[doc] = "If set to "1", causes the build to not strip binaries in resulting packages."
INHERIT[doc] = "Causes the named class to be inherited at this point during parsing. The variable is only valid in configuration files."
@@ -239,6 +244,8 @@ KBRANCH_DEFAULT[doc] = "Defines the Linux kernel source repository's default bra
KERNEL_EXTRA_ARGS[doc] = "Specifies additional make command-line arguments the OpenEmbedded build system passes on when compiling the kernel."
KERNEL_FEATURES[doc] = "Includes additional metadata from the Yocto Project kernel Git repository. The metadata you add through this variable includes config fragments and features descriptions."
KERNEL_IMAGETYPE[doc] = "The type of kernel to build for a device, usually set by the machine configuration files and defaults to 'zImage'."
+KERNEL_MODULE_AUTOLOAD[doc] = "Lists kernel modules that need to be auto-loaded during boot"
+KERNEL_MODULE_PROBECONF[doc] = "Lists kernel modules for which the build system expects to find module_conf_* values that specify configuration for each of the modules"
KERNEL_PATH[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
KERNEL_SRC[doc] = "The location of the kernel sources. This variable is set to the value of the STAGING_KERNEL_DIR within the module class (module.bbclass)."
KFEATURE_DESCRIPTION[doc] = "Provides a short description of a configuration fragment. You use this variable in the .scc file that describes a configuration fragment file."
@@ -406,11 +413,16 @@ TARGET_FPU[doc] = "Specifies the method for handling FPU code. For FPU-less targ
TARGET_OS[doc] = "Specifies the target's operating system."
TARGET_PREFIX[doc] = "The prefix for the cross-compile toolchain (e.g. arm-linux-)."
TARGET_SYS[doc] = "The target system is comprised of TARGET_ARCH,TARGET_VENDOR and TARGET_OS."
-TCLIBC[doc] = "Specifies GNU standard C library (libc) variant to use during the build process. You can select 'eglibc' or 'uclibc'."
+TCLIBC[doc] = "Specifies GNU standard C library (libc) variant to use during the build process. You can select 'glibc' or 'uclibc'."
TCMODE[doc] = "Enables an external toolchain (where provided by an additional layer) if set to a value other than 'default'."
TEST_IMAGE[doc] = "Enables test booting of virtual machine images under the QEMU emulator after any root filesystems are created and runs tests against those images."
TEST_QEMUBOOT_TIMEOUT[doc] = "The time in seconds allowed for an image to boot before automated runtime tests begin to run against an image."
TEST_SUITES[doc] = "An ordered list of tests (modules) to run against an image when performing automated runtime testing."
+TEST_POWERCONTROL_CMD[doc] = "For automated hardware testing, specifies the command to use to control the power of the target machine under test"
+TEST_POWERCONTROL_EXTRA_ARGS[doc] = "For automated hardware testing, specifies additional arguments to pass through to the command specified in TEST_POWERCONTROL_CMD"
+TEST_SERIALCONTROL_CMD[doc] = "For automated hardware testing, specifies the command to use to connect to the serial console of the target machine under test"
+TEST_SERIALCONTROL_EXTRA_ARGS[doc] = "For automated hardware testing, specifies additional arguments to pass through to the command specified in TEST_SERIALCONTROL_CMD"
+TEST_TARGET[doc] = "For automated runtime testing, specifies the method of deploying the image and running tests on the target machine"
THISDIR[doc] = "The directory in which the file BitBake is currently parsing is located."
TIME[doc] = "The time the build was started using HMS format."
TMPDIR[doc] = "The temporary directory the OpenEmbedded build system uses when it does its work building images. By default, the TMPDIR variable is named tmp within the Build Directory."
diff --git a/meta/conf/layer.conf b/meta/conf/layer.conf
index 1f7d5c7a84..de96548bd2 100644
--- a/meta/conf/layer.conf
+++ b/meta/conf/layer.conf
@@ -14,6 +14,7 @@ LAYERVERSION_core = "4"
# Set a variable to get to the top of the metadata location
COREBASE = '${@os.path.normpath("${LAYERDIR}/../")}'
+# opkg-utils is for update-altnernatives :(
SIGGEN_EXCLUDERECIPES_ABISAFE += " \
sysvinit-inittab \
shadow-securetty \
@@ -31,10 +32,14 @@ SIGGEN_EXCLUDERECIPES_ABISAFE += " \
packagegroup-x11-xserver \
systemd-serialgetty \
initscripts \
+ shadow \
+ shadow-sysroot \
+ base-passwd \
+ opkg-utils \
"
SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \
- gcc-cross-${TARGET_ARCH}->eglibc \
+ gcc-cross-${TARGET_ARCH}->glibc \
gcc-cross-${TARGET_ARCH}->musl \
gcc-cross-${TARGET_ARCH}->uclibc \
gcc-cross-${TARGET_ARCH}->linux-libc-headers \
diff --git a/meta/conf/licenses.conf b/meta/conf/licenses.conf
index b1d5480b58..629916b6a5 100644
--- a/meta/conf/licenses.conf
+++ b/meta/conf/licenses.conf
@@ -49,7 +49,6 @@ SRC_DISTRIBUTE_LICENSES += "Zimbra-1.3 Zlib ZPL-1.1 ZPL-2.0 ZPL-2.1"
#
# We should NEVER have a GPL/LGPL without a version!!!!
# Any mapping to MPL/LGPL/GPL should be fixed
-# see: https://wiki.yoctoproject.org/wiki/License_Audit
# AGPL variations
SPDXLICENSEMAP[AGPL-3] = "AGPL-3.0"
@@ -144,7 +143,7 @@ DATA_LICENSE = "CC0-1.0"
# information.
#
-FOSS_COPYRIGHT = "true"
+FOSS_NO_COPYRIGHT = "true"
# A option defined as[FOSS_RECURSIVE_UNPACK] in ./meta/conf/licenses.conf. is
# used to control if FOSSology server need recursively unpack tar.gz file which
@@ -160,12 +159,30 @@ FOSS_COPYRIGHT = "true"
FOSS_RECURSIVE_UNPACK = "false"
-# FOSSologySPDX instance server.
+# An option defined as [FOSS_FULL_SPDX] in ./meta/conf/licenses.conf is used to
+# control what kind of SPDX output to get from the FOSSology server.
+#
+# FOSS_FULL_SPDX = "true":
+# Tell FOSSology server to return full SPDX output, like if the program was
+# run from the command line. This is needed in order to get license refs for
+# the full package rather than individual files only.
+#
+# FOSS_FULL_SPDX = "false":
+# Tell FOSSology to only process license information for files. All package
+# license tags in the report will be "NOASSERTION"
+#
+
+FOSS_FULL_SPDX = "true"
+
+# FOSSologySPDX instance server. http://localhost/repo is the default
+# installation location for FOSSology.
+#
# For more information on FOSSologySPDX commandline:
# https://github.com/spdx-tools/fossology-spdx/wiki/Fossology-SPDX-Web-API
#
-FOSS_SERVER = "http://localhost//?mod=spdx_license_once&noCopyright=${FOSS_COPYRIGHT}&recursiveUnpack=${FOSS_RECURSIVE_UNPACK}"
+FOSS_BASE_URL = "http://localhost/repo/?mod=spdx_license_once"
+FOSS_SERVER = "${FOSS_BASE_URL}&fullSPDXFlag=${FOSS_FULL_SPDX}&noCopyright=${FOSS_NO_COPYRIGHT}&recursiveUnpack=${FOSS_RECURSIVE_UNPACK}"
FOSS_WGET_FLAGS = "-qO - --no-check-certificate --timeout=0"
diff --git a/meta/conf/machine/include/arm/arch-arm64.inc b/meta/conf/machine/include/arm/arch-arm64.inc
new file mode 100644
index 0000000000..5376b87dd6
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-arm64.inc
@@ -0,0 +1,36 @@
+DEFAULTTUNE ?= "aarch64"
+
+require conf/machine/include/arm/arch-armv7a.inc
+
+TUNEVALID[aarch64] = "Enable instructions for aarch64"
+
+MACHINEOVERRIDES .= "${@bb.utils.contains("TUNE_FEATURES", "aarch64", ":aarch64", "" ,d)}"
+
+# Little Endian base configs
+AVAILTUNES += "aarch64 aarch64_be"
+ARMPKGARCH_tune-aarch64 ?= "aarch64"
+ARMPKGARCH_tune-aarch64_be ?= "aarch64_be"
+TUNE_FEATURES_tune-aarch64 ?= "aarch64"
+TUNE_FEATURES_tune-aarch64_be ?= "${TUNE_FEATURES_tune-aarch64} bigendian"
+BASE_LIB_tune-aarch64 = "lib64"
+BASE_LIB_tune-aarch64_be = "lib64"
+
+PACKAGE_EXTRA_ARCHS_tune-aarch64 = "aarch64"
+PACKAGE_EXTRA_ARCHS_tune-aarch64_be = "aarch64_be"
+
+ARMPKGSFX_ENDIAN_64 = "${@bb.utils.contains("TUNE_FEATURES", "bigendian", "_be", "", d)}"
+TUNE_ARCH_64 = "aarch64${ARMPKGSFX_ENDIAN_64}"
+TUNE_PKGARCH_64 = "aarch64${ARMPKGSFX_ENDIAN_64}"
+ABIEXTENSION_64 = ""
+TARGET_FPU_64 = ""
+
+# Duplicated from arch-arm.inc
+TUNE_ARCH_32 = "${@bb.utils.contains("TUNE_FEATURES", "bigendian", "armeb", "arm", d)}"
+TUNE_PKGARCH_32 = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI}${ARMPKGSFX_ENDIAN}${ARMPKGSFX_FPU}"
+ABIEXTENSION_32 = "eabi"
+TARGET_FPU_32 = "${@d.getVar('ARMPKGSFX_FPU', True).strip('-') or 'soft'}"
+
+TUNE_ARCH = "${@bb.utils.contains("TUNE_FEATURES", "aarch64", "${TUNE_ARCH_64}", "${TUNE_ARCH_32}" ,d)}"
+TUNE_PKGARCH = "${@bb.utils.contains("TUNE_FEATURES", "aarch64", "${TUNE_PKGARCH_64}", "${TUNE_PKGARCH_32}" ,d)}"
+ABIEXTENSION = "${@bb.utils.contains("TUNE_FEATURES", "aarch64", "${ABIEXTENSION_64}", "${ABIEXTENSION_32}" ,d)}"
+TARGET_FPU = "${@bb.utils.contains("TUNE_FEATURES", "aarch64", "${TARGET_FPU_64}", "${TARGET_FPU_32}" ,d)}"
diff --git a/meta/conf/machine/include/arm/arch-armv8.inc b/meta/conf/machine/include/arm/arch-armv8.inc
new file mode 100644
index 0000000000..5e832fae6d
--- /dev/null
+++ b/meta/conf/machine/include/arm/arch-armv8.inc
@@ -0,0 +1 @@
+require conf/machine/include/arm/arch-arm64.inc
diff --git a/meta/conf/machine/include/arm/feature-arm-thumb.inc b/meta/conf/machine/include/arm/feature-arm-thumb.inc
index a94386f577..8e6619d1b5 100644
--- a/meta/conf/machine/include/arm/feature-arm-thumb.inc
+++ b/meta/conf/machine/include/arm/feature-arm-thumb.inc
@@ -18,6 +18,8 @@ ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv7m', 't2', '', d
# some recipe explicitly sets ARM_INSTRUCTION_SET to "arm"
ARM_M_OPT = "${@bb.utils.contains('TUNE_FEATURES', 'arm', '${ARM_THUMB_OPT}', 'thumb', d)}"
python () {
+ if bb.utils.contains('TUNE_FEATURES', 'thumb', 'False', 'True', d):
+ return
selected = d.getVar('ARM_INSTRUCTION_SET', True)
if selected == None:
return
@@ -37,8 +39,10 @@ ARMPKGSFX_THUMB .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '${ARM_THUMB_
# arm system and vice versa. It is strongly recommended that DISTROs not
# turn this off - the actual cost is very small.
TUNEVALID[no-thumb-interwork] = "Disable mixing of thumb and ARM functions"
-TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'no-thumb-interwork', ' -mno-thumb-interwork', ' -mthumb-interwork', d)}"
-OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'no-thumb-interwork', ':thumb-interwork', '', d)}"
+THUMB_TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'no-thumb-interwork', ' -mno-thumb-interwork', ' -mthumb-interwork', d)}"
+THUMB_OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'no-thumb-interwork', ':thumb-interwork', '', d)}"
+TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', ' ${THUMB_TUNE_CCARGS}', '', d)}"
+OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', ' ${THUMB_OVERRIDES}', '', d)}"
# what about armv7m devices which don't support -marm (e.g. Cortex-M3)?
-TARGET_CC_KERNEL_ARCH += "-mno-thumb-interwork -marm"
+TARGET_CC_KERNEL_ARCH += "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '-mno-thumb-interwork -marm', '', d)}"
diff --git a/meta/conf/machine/include/tune-power5.inc b/meta/conf/machine/include/tune-power5.inc
new file mode 100644
index 0000000000..30be86941a
--- /dev/null
+++ b/meta/conf/machine/include/tune-power5.inc
@@ -0,0 +1,21 @@
+DEFAULTTUNE ?= "ppc64p5"
+
+require conf/machine/include/powerpc/arch-powerpc64.inc
+
+TUNEVALID[power5] = "Enable IBM Power5 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains("TUNE_FEATURES", "power5", " -mcpu=power5", "", d)}"
+
+AVAILTUNES += "ppcp5 ppc64p5"
+TUNE_FEATURES_tune-ppcp5 = "m32 fpu-hard power5 altivec"
+BASE_LIB_tune-ppcp5 = "lib"
+TUNE_PKGARCH_tune-ppcp5 = "ppcp5"
+PACKAGE_EXTRA_ARCHS_tune-ppcp5 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc} ppcp5"
+
+TUNE_FEATURES_tune-ppc64p5 = "m64 fpu-hard power5 altivec"
+BASE_LIB_tune-ppc64p5 = "lib64"
+TUNE_PKGARCH_tune-ppc64p5 = "ppc64p5"
+PACKAGE_EXTRA_ARCHS_tune-ppc64p5 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc64} ppc64p5"
+
+# glibc configure options to get power5 specific library
+GLIBC_EXTRA_OECONF_powerpc64 += "${@bb.utils.contains("TUNE_FEATURES", "power5", "--with-cpu=power5", "", d)}"
+GLIBC_EXTRA_OECONF_powerpc += "${@bb.utils.contains("TUNE_FEATURES", "power5", "--with-cpu=power5", "", d)}"
diff --git a/meta/conf/machine/include/tune-power6.inc b/meta/conf/machine/include/tune-power6.inc
new file mode 100644
index 0000000000..7563798120
--- /dev/null
+++ b/meta/conf/machine/include/tune-power6.inc
@@ -0,0 +1,21 @@
+DEFAULTTUNE ?= "ppcpr6"
+
+require conf/machine/include/powerpc/arch-powerpc64.inc
+
+TUNEVALID[power6] = "Enable IBM Power6 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains("TUNE_FEATURES", "power6", " -mcpu=power6", "", d)}"
+
+AVAILTUNES += "ppcp6 ppc64p6"
+TUNE_FEATURES_tune-ppcp6 = "m32 fpu-hard power6 altivec"
+BASE_LIB_tune-ppcp6 = "lib"
+TUNE_PKGARCH_tune-ppcp6 = "ppcp6"
+PACKAGE_EXTRA_ARCHS_tune-ppcp6 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc} ppcp6"
+
+TUNE_FEATURES_tune-ppc64p6 = "m64 fpu-hard power6 altivec"
+BASE_LIB_tune-ppc64p6 = "lib64"
+TUNE_PKGARCH_tune-ppc64p6 = "ppc64p6"
+PACKAGE_EXTRA_ARCHS_tune-ppc64p6 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc64} ppc64p6"
+
+# glibc configure options to get power6 specific library
+GLIBC_EXTRA_OECONF_powerpc64 += "${@bb.utils.contains("TUNE_FEATURES", "power6", "--with-cpu=power6", "", d)}"
+GLIBC_EXTRA_OECONF_powerpc += "${@bb.utils.contains("TUNE_FEATURES", "power6", "--with-cpu=power6", "", d)}"
diff --git a/meta/conf/machine/include/tune-power7.inc b/meta/conf/machine/include/tune-power7.inc
new file mode 100644
index 0000000000..7069e75026
--- /dev/null
+++ b/meta/conf/machine/include/tune-power7.inc
@@ -0,0 +1,21 @@
+DEFAULTTUNE ?= "ppcpr7"
+
+require conf/machine/include/powerpc/arch-powerpc64.inc
+
+TUNEVALID[power7] = "Enable IBM Power7 specific processor optimizations"
+TUNE_CCARGS .= "${@bb.utils.contains("TUNE_FEATURES", "power7", " -mcpu=power7", "", d)}"
+
+AVAILTUNES += "ppcp7 ppc64p7"
+TUNE_FEATURES_tune-ppcp7 = "m32 fpu-hard power7 altivec"
+BASE_LIB_tune-ppcp7 = "lib"
+TUNE_PKGARCH_tune-ppcp7 = "ppcp7"
+PACKAGE_EXTRA_ARCHS_tune-ppcp7 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc} ppcp7"
+
+TUNE_FEATURES_tune-ppc64p7 = "m64 fpu-hard power7 altivec"
+BASE_LIB_tune-ppc64p7 = "lib64"
+TUNE_PKGARCH_tune-ppc64p7 = "ppc64p7"
+PACKAGE_EXTRA_ARCHS_tune-ppc64p7 = "${PACKAGE_EXTRA_ARCHS_tune-powerpc64} ppc64p7"
+
+# glibc configure options to get power7 specific library
+GLIBC_EXTRA_OECONF_powerpc64 += "${@bb.utils.contains("TUNE_FEATURES", "power7", "--with-cpu=power7", "", d)}"
+GLIBC_EXTRA_OECONF_powerpc += "${@bb.utils.contains("TUNE_FEATURES", "power7", "--with-cpu=power7", "", d)}"
diff --git a/meta/conf/machine/qemuarm64.conf b/meta/conf/machine/qemuarm64.conf
new file mode 100644
index 0000000000..20bcfbac99
--- /dev/null
+++ b/meta/conf/machine/qemuarm64.conf
@@ -0,0 +1,12 @@
+#@TYPE: Machine
+#@NAME: generic armv8 machine
+#@DESCRIPTION: Machine configuration for running a generic armv8
+
+require conf/machine/include/arm/arch-armv8.inc
+require conf/machine/include/qemu.inc
+
+MACHINE_FEATURES = ""
+
+KERNEL_IMAGETYPE = "Image"
+
+SERIAL_CONSOLE = "38400 ttyAMA0"
diff --git a/meta/conf/machine/qemumips.conf b/meta/conf/machine/qemumips.conf
index ce1c419d1e..d9d2421616 100644
--- a/meta/conf/machine/qemumips.conf
+++ b/meta/conf/machine/qemumips.conf
@@ -3,7 +3,7 @@
#@DESCRIPTION: mti_malta32_be
require conf/machine/include/qemu.inc
-require conf/machine/include/tune-mips32.inc
+require conf/machine/include/tune-mips32r2.inc
KERNEL_IMAGETYPE = "vmlinux"
KERNEL_ALT_IMAGETYPE = "vmlinux.bin"
diff --git a/meta/conf/machine/qemux86-64.conf b/meta/conf/machine/qemux86-64.conf
index 642a2d9b52..837f9f4ab0 100644
--- a/meta/conf/machine/qemux86-64.conf
+++ b/meta/conf/machine/qemux86-64.conf
@@ -20,6 +20,8 @@ XSERVER = "xserver-xorg \
xf86-input-vmmouse \
xf86-input-keyboard \
xf86-input-evdev \
+ xf86-video-cirrus \
+ xf86-video-fbdev \
xf86-video-vmware"
MACHINE_FEATURES += "x86"
diff --git a/meta/conf/machine/qemux86.conf b/meta/conf/machine/qemux86.conf
index ebc27dba47..3562276932 100644
--- a/meta/conf/machine/qemux86.conf
+++ b/meta/conf/machine/qemux86.conf
@@ -19,6 +19,8 @@ XSERVER = "xserver-xorg \
xf86-input-vmmouse \
xf86-input-keyboard \
xf86-input-evdev \
+ xf86-video-cirrus \
+ xf86-video-fbdev \
xf86-video-vmware"
MACHINE_FEATURES += "x86"
diff --git a/meta/conf/multilib.conf b/meta/conf/multilib.conf
index 733236d6bb..37e8481112 100644
--- a/meta/conf/multilib.conf
+++ b/meta/conf/multilib.conf
@@ -14,6 +14,6 @@ INHERIT += "multilib_global"
BBCLASSEXTEND_append = " ${MULTILIBS}"
-MULTILIB_GLOBAL_VARIANTS = "lib32 lib64 libx32"
+MULTILIB_GLOBAL_VARIANTS ?= "lib32 lib64 libx32"
OPKG_ARGS_append = " --force-maintainer --force-overwrite"
diff --git a/meta/conf/sanity.conf b/meta/conf/sanity.conf
index e518b84146..1d55fa71d9 100644
--- a/meta/conf/sanity.conf
+++ b/meta/conf/sanity.conf
@@ -3,7 +3,7 @@
# See sanity.bbclass
#
# Expert users can confirm their sanity with "touch conf/sanity.conf"
-BB_MIN_VERSION = "1.23.1"
+BB_MIN_VERSION = "1.25.0"
SANITY_ABIFILE = "${TMPDIR}/abi_version"
diff --git a/meta/conf/toasterconf.json b/meta/conf/toasterconf.json
new file mode 100644
index 0000000000..fe1564ec26
--- /dev/null
+++ b/meta/conf/toasterconf.json
@@ -0,0 +1,84 @@
+{
+ "config": {"MACHINE": "qemux86", "DISTRO": "poky"},
+ "layersources": [
+ {
+ "name": "Local OpenEmbedded",
+ "sourcetype": "local",
+ "apiurl": "../../",
+ "branches": ["HEAD", "master", "dizzy"],
+ "layers": [
+ {
+ "name": "openembedded-core",
+ "local_path": "meta",
+ "vcs_url": "remote:origin",
+ "dirpath": "meta"
+ }
+ ]
+ },
+ {
+ "name": "OpenEmbedded",
+ "sourcetype": "layerindex",
+ "apiurl": "http://layers.openembedded.org/layerindex/api/",
+ "branches": ["master", "dizzy"]
+ },
+ {
+ "name": "Imported layers",
+ "sourcetype": "imported",
+ "apiurl": "",
+ "branches": ["master", "dizzy", "HEAD"]
+
+ }
+ ],
+ "bitbake" : [
+ {
+ "name": "master",
+ "giturl": "git://git.openembedded.org/bitbake",
+ "branch": "master",
+ "dirpath": ""
+ },
+ {
+ "name": "dizzy",
+ "giturl": "git://git.openembedded.org/bitbake",
+ "branch": "1.24",
+ "dirpath": ""
+ },
+ {
+ "name": "HEAD",
+ "giturl": "git://git.openembedded.org/bitbake",
+ "branch": "HEAD",
+ "dirpath": ""
+ }
+ ],
+
+ "defaultrelease": "master",
+
+ "releases": [
+ {
+ "name": "master",
+ "description": "OpenEmbedded master",
+ "bitbake": "master",
+ "branch": "master",
+ "defaultlayers": [ "openembedded-core" ],
+ "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds using the OpenEmbedded master branch, where active development takes place. This is not a stable branch, so your builds might not work as expected."
+ },
+ {
+ "name": "dizzy",
+ "description": "OpenEmbedded Dizzy",
+ "bitbake": "dizzy",
+ "branch": "dizzy",
+ "defaultlayers": [ "openembedded-core" ],
+ "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds with the OpenEmbedded Dizzy release"
+ },
+ {
+ "name": "local",
+ "description": "Local OpenEmbedded",
+ "bitbake": "HEAD",
+ "branch": "HEAD",
+ "defaultlayers": [ "openembedded-core" ],
+ "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" : 0 },
+ "helptext": "Toaster will run your builds with the version of OpenEmbedded that you have cloned or downloaded to your computer."
+ }
+ ]
+}
diff --git a/meta/files/common-licenses/SMAIL_GPL b/meta/files/common-licenses/SMAIL_GPL
new file mode 100644
index 0000000000..dfc3fd16cf
--- /dev/null
+++ b/meta/files/common-licenses/SMAIL_GPL
@@ -0,0 +1,164 @@
+This is the Debian GNU/Linux package debianutils.
+
+It is an original Debian package. Programs in it were maintained by
+Guy Maor <maor@debian.org>, and are now maintained by Clint Adams
+<schizo@debian.org>.
+
+All its programs except readlink, savelog, and which may be
+redistributed under the terms of the GNU GPL, Version 2 or later,
+found on Debian systems in the file /usr/share/common-licenses/GPL.
+
+which is in the public domain.
+
+readlink is Copyright (c) 1997 Kenneth Stailey, and may also be
+distributed under the terms of the BSD copyright.
+
+savelog may be redistributed under the following terms: (The rest of
+this file consists of savelog's distribution terms.)
+
+#ident "@(#)smail:RELEASE-3_2:COPYING,v 1.2 1996/06/14 18:59:10 woods Exp"
+
+ SMAIL GENERAL PUBLIC LICENSE
+ (Clarified 11 Feb 1988)
+
+ Copyright (C) 1988 Landon Curt Noll & Ronald S. Karr
+ Copyright (C) 1992 Ronald S. Karr
+ Copyleft (GNU) 1988 Landon Curt Noll & Ronald S. Karr
+
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license, but changing it is not allowed. You can also
+ use this wording to make the terms for other programs.
+
+ The license agreements of most software companies keep you at the
+mercy of those companies. By contrast, our general public license is
+intended to give everyone the right to share SMAIL. To make sure that
+you get the rights we want you to have, we need to make restrictions
+that forbid anyone to deny you these rights or to ask you to surrender
+the rights. Hence this license agreement.
+
+ Specifically, we want to make sure that you have the right to give
+away copies of SMAIL, that you receive source code or else can get it
+if you want it, that you can change SMAIL or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To make sure that everyone has such rights, we have to forbid you to
+deprive anyone else of these rights. For example, if you distribute
+copies of SMAIL, you must give the recipients all the rights that you
+have. You must make sure that they, too, receive or can get the
+source code. And you must tell them their rights.
+
+ Also, for our own protection, we must make certain that everyone
+finds out that there is no warranty for SMAIL. If SMAIL is modified by
+someone else and passed on, we want its recipients to know that what
+they have is not what we distributed, so that any problems introduced
+by others will not reflect on our reputation.
+
+ Therefore we (Landon Curt Noll and Ronald S. Karr) make the following
+terms which say what you must do to be allowed to distribute or change
+SMAIL.
+
+
+ COPYING POLICIES
+
+ 1. You may copy and distribute verbatim copies of SMAIL source code
+as you receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy a valid copyright notice "Copyright
+(C) 1988 Landon Curt Noll & Ronald S. Karr" (or with whatever year is
+appropriate); keep intact the notices on all files that refer to this
+License Agreement and to the absence of any warranty; and give any
+other recipients of the SMAIL program a copy of this License
+Agreement along with the program. You may charge a distribution fee
+for the physical act of transferring a copy.
+
+ 2. You may modify your copy or copies of SMAIL or any portion of it,
+and copy and distribute such modifications under the terms of
+Paragraph 1 above, provided that you also do the following:
+
+ a) cause the modified files to carry prominent notices stating
+ that you changed the files and the date of any change; and
+
+ b) cause the whole of any work that you distribute or publish,
+ that in whole or in part contains or is a derivative of SMAIL or
+ any part thereof, to be licensed at no charge to all third
+ parties on terms identical to those contained in this License
+ Agreement (except that you may choose to grant more extensive
+ warranty protection to some or all third parties, at your option).
+
+ c) You may charge a distribution fee for the physical act of
+ transferring a copy, and you may at your option offer warranty
+ protection in exchange for a fee.
+
+Mere aggregation of another unrelated program with this program (or its
+derivative) on a volume of a storage or distribution medium does not bring
+the other program under the scope of these terms.
+
+ 3. You may copy and distribute SMAIL (or a portion or derivative of it,
+under Paragraph 2) in object code or executable form under the terms of
+Paragraphs 1 and 2 above provided that you also do one of the following:
+
+ a) accompany it with the complete corresponding machine-readable
+ source code, which must be distributed under the terms of
+ Paragraphs 1 and 2 above; or,
+
+ b) accompany it with a written offer, valid for at least three
+ years, to give any third party free (except for a nominal
+ shipping charge) a complete machine-readable copy of the
+ corresponding source code, to be distributed under the terms of
+ Paragraphs 1 and 2 above; or,
+
+ c) accompany it with the information you received as to where the
+ corresponding source code may be obtained. (This alternative is
+ allowed only for non-commercial distribution and only if you
+ received the program in object code or executable form alone.)
+
+For an executable file, complete source code means all the source code for
+all modules it contains; but, as a special exception, it need not include
+source code for modules which are standard libraries that accompany the
+operating system on which the executable file runs.
+
+ 4. You may not copy, sublicense, distribute or transfer SMAIL
+except as expressly provided under this License Agreement. Any attempt
+otherwise to copy, sublicense, distribute or transfer SMAIL is void and
+your rights to use the program under this License agreement shall be
+automatically terminated. However, parties who have received computer
+software programs from you with this License Agreement will not have
+their licenses terminated so long as such parties remain in full compliance.
+
+ 5. If you wish to incorporate parts of SMAIL into other free
+programs whose distribution conditions are different, write to Landon
+Curt Noll & Ronald S. Karr via the Free Software Foundation at 51
+Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. We have not yet
+worked out a simple rule that can be stated here, but we will often
+permit this. We will be guided by the two goals of preserving the
+free status of all derivatives of our free software and of promoting
+the sharing and reuse of software.
+
+Your comments and suggestions about our licensing policies and our
+software are welcome! This contract was based on the contract made by
+the Free Software Foundation. Please contact the Free Software
+Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
+USA, or call (617) 542-5942 for details on copylefted material in
+general.
+
+ NO WARRANTY
+
+ BECAUSE SMAIL IS LICENSED FREE OF CHARGE, WE PROVIDE ABSOLUTELY NO
+WARRANTY, TO THE EXTENT PERMITTED BY APPLICABLE STATE LAW. EXCEPT WHEN
+OTHERWISE STATED IN WRITING, LANDON CURT NOLL & RONALD S. KARR AND/OR
+OTHER PARTIES PROVIDE SMAIL "AS IS" WITHOUT WARRANTY OF ANY KIND,
+EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
+THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF SMAIL IS WITH
+YOU. SHOULD SMAIL PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL
+NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW WILL LANDON CURT NOLL &
+RONALD S. KARR AND/OR ANY OTHER PARTY WHO MAY MODIFY AND REDISTRIBUTE
+SMAIL AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+LOST PROFITS, LOST MONIES, OR OTHER SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE
+(INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
+INACCURATE OR LOSSES SUSTAINED BY THIRD PARTIES OR A FAILURE OF THE
+PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS) SMAIL, EVEN IF YOU HAVE
+BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES, OR FOR ANY CLAIM BY
+ANY OTHER PARTY.
diff --git a/meta/files/toolchain-shar-template.sh b/meta/files/toolchain-shar-template.sh
index a37d6ff786..0d679a66db 100644
--- a/meta/files/toolchain-shar-template.sh
+++ b/meta/files/toolchain-shar-template.sh
@@ -132,7 +132,8 @@ if [ "$dl_path" = "" ] ; then
echo "SDK could not be set up. Relocate script unable to find ld-linux.so. Abort!"
exit 1
fi
-executable_files=$($SUDO_EXEC find $native_sysroot -type f -perm /111 -printf "'%h/%f' ")
+executable_files=$($SUDO_EXEC find $native_sysroot -type f \
+ \( -perm -0100 -o -perm -0010 -o -perm -0001 \) -printf "'%h/%f' ")
tdir=`mktemp -d`
if [ x$tdir = x ] ; then
@@ -153,7 +154,12 @@ if [ $relocate = 1 ] ; then
fi
# replace @SDKPATH@ with the new prefix in all text files: configs/scripts/etc
-$SUDO_EXEC find $native_sysroot -type f -exec file '{}' \;|grep ":.*\(ASCII\|script\|source\).*text"|awk -F':' '{printf "\"%s\"\n", $1}'|$SUDO_EXEC xargs -n32 sed -i -e "s:$DEFAULT_INSTALL_DIR:$target_sdk_dir:g"
+for replace in "$target_sdk_dir -maxdepth 1" "$native_sysroot"; do
+ $SUDO_EXEC find $replace -type f -exec file '{}' \; | \
+ grep ":.*\(ASCII\|script\|source\).*text" | \
+ awk -F':' '{printf "\"%s\"\n", $1}' | \
+ $SUDO_EXEC xargs -n32 sed -i -e "s:$DEFAULT_INSTALL_DIR:$target_sdk_dir:g"
+done
# change all symlinks pointing to @SDKPATH@
for l in $($SUDO_EXEC find $native_sysroot -type l); do
@@ -162,13 +168,15 @@ done
# find out all perl scripts in $native_sysroot and modify them replacing the
# host perl with SDK perl.
-for perl_script in $($SUDO_EXEC find $native_sysroot -type f -exec grep "^#!.*perl" -l '{}' \;); do
+for perl_script in $($SUDO_EXEC find $native_sysroot -type f -exec grep -l "^#!.*perl" '{}' \;); do
$SUDO_EXEC sed -i -e "s:^#! */usr/bin/perl.*:#! /usr/bin/env perl:g" -e \
"s: /usr/bin/perl: /usr/bin/env perl:g" $perl_script
done
echo done
+@SDK_POST_INSTALL_COMMAND@
+
# delete the relocating script, so that user is forced to re-run the installer
# if he/she wants another location for the sdk
if [ $savescripts = 0 ] ; then
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
index 68efca32d0..8da87b771a 100644
--- a/meta/lib/oe/classextend.py
+++ b/meta/lib/oe/classextend.py
@@ -57,6 +57,13 @@ class ClassExtender(object):
if dep.endswith(("-native", "-native-runtime")) or ('nativesdk-' in dep) or ('cross-canadian' in dep) or ('-crosssdk-' in dep):
return dep
else:
+ # Do not extend for that already have multilib prefix
+ var = self.d.getVar("MULTILIB_VARIANTS", True)
+ if var:
+ var = var.split()
+ for v in var:
+ if dep.startswith(v):
+ return dep
return self.extend_name(dep)
def map_depends_variable(self, varname, suffix = ""):
diff --git a/meta/lib/oe/image.py b/meta/lib/oe/image.py
index 354a676f42..f9c8f84cf8 100644
--- a/meta/lib/oe/image.py
+++ b/meta/lib/oe/image.py
@@ -48,11 +48,13 @@ class ImageDepGraph(object):
graph = dict()
def add_node(node):
+ base_type = self._image_base_type(node)
deps = (self.d.getVar('IMAGE_TYPEDEP_' + node, True) or "")
- if deps != "":
+ base_deps = (self.d.getVar('IMAGE_TYPEDEP_' + base_type, True) or "")
+ if deps != "" or base_deps != "":
graph[node] = deps
- for dep in deps.split():
+ for dep in deps.split() + base_deps.split():
if not dep in graph:
add_node(dep)
else:
@@ -72,6 +74,18 @@ class ImageDepGraph(object):
for item in remove_list:
self.graph.pop(item, None)
+ def _image_base_type(self, type):
+ ctypes = self.d.getVar('COMPRESSIONTYPES', True).split()
+ if type in ["vmdk", "live", "iso", "hddimg"]:
+ type = "ext3"
+ basetype = type
+ for ctype in ctypes:
+ if type.endswith("." + ctype):
+ basetype = type[:-len("." + ctype)]
+ break
+
+ return basetype
+
def _compute_dependencies(self):
"""
returns dict object of nodes with [no_of_depends_on, no_of_depended_by]
@@ -109,7 +123,7 @@ class ImageDepGraph(object):
# remove added nodes from deps_array
for item in group:
for node in self.graph:
- if item in self.graph[node]:
+ if item in self.graph[node].split():
self.deps_array[node][0] -= 1
self.deps_array.pop(item, None)
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
index f8b532220a..ea6feaaea4 100644
--- a/meta/lib/oe/package.py
+++ b/meta/lib/oe/package.py
@@ -97,3 +97,29 @@ def filedeprunner(arg):
raise e
return (pkg, provides, requires)
+
+
+def read_shlib_providers(d):
+ import re
+
+ shlib_provider = {}
+ shlibs_dirs = d.getVar('SHLIBSDIRS', True).split()
+ list_re = re.compile('^(.*)\.list$')
+ # Go from least to most specific since the last one found wins
+ for dir in reversed(shlibs_dirs):
+ bb.debug(2, "Reading shlib providers in %s" % (dir))
+ if not os.path.exists(dir):
+ continue
+ for file in os.listdir(dir):
+ m = list_re.match(file)
+ if m:
+ dep_pkg = m.group(1)
+ fd = open(os.path.join(dir, file))
+ lines = fd.readlines()
+ fd.close()
+ for l in lines:
+ s = l.strip().split(":")
+ if s[0] not in shlib_provider:
+ shlib_provider[s[0]] = {}
+ shlib_provider[s[0]][s[1]] = (dep_pkg, s[2])
+ return shlib_provider
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
index 8be3d41706..69100f16c1 100644
--- a/meta/lib/oe/package_manager.py
+++ b/meta/lib/oe/package_manager.py
@@ -7,6 +7,7 @@ import multiprocessing
import re
import bb
import tempfile
+import oe.utils
# this can be used by all PM backends to create the index files in parallel
@@ -15,11 +16,14 @@ def create_index(arg):
try:
bb.note("Executing '%s' ..." % index_cmd)
- subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True)
+ result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
return("Index creation command '%s' failed with return code %d:\n%s" %
(e.cmd, e.returncode, e.output))
+ if result:
+ bb.note(result)
+
return None
@@ -62,6 +66,9 @@ class RpmIndexer(Indexer):
localdata = bb.data.createCopy(self.d)
default_tune_key = "DEFAULTTUNE_virtclass-multilib-" + eext[1]
default_tune = localdata.getVar(default_tune_key, False)
+ if default_tune is None:
+ default_tune_key = "DEFAULTTUNE_ML_" + eext[1]
+ default_tune = localdata.getVar(default_tune_key, False)
if default_tune:
localdata.setVar("DEFAULTTUNE", default_tune)
bb.data.update_data(localdata)
@@ -116,15 +123,9 @@ class RpmIndexer(Indexer):
bb.note("There are no packages in %s" % self.deploy_dir)
return
- nproc = multiprocessing.cpu_count()
- pool = bb.utils.multiprocessingpool(nproc)
- results = list(pool.imap(create_index, index_cmds))
- pool.close()
- pool.join()
-
- for result in results:
- if result is not None:
- return(result)
+ result = oe.utils.multiprocess_exec(index_cmds, create_index)
+ if result:
+ bb.fatal('%s' % ('\n'.join(result)))
class OpkgIndexer(Indexer):
@@ -161,15 +162,10 @@ class OpkgIndexer(Indexer):
bb.note("There are no packages in %s!" % self.deploy_dir)
return
- nproc = multiprocessing.cpu_count()
- pool = bb.utils.multiprocessingpool(nproc)
- results = list(pool.imap(create_index, index_cmds))
- pool.close()
- pool.join()
+ result = oe.utils.multiprocess_exec(index_cmds, create_index)
+ if result:
+ bb.fatal('%s' % ('\n'.join(result)))
- for result in results:
- if result is not None:
- return(result)
class DpkgIndexer(Indexer):
@@ -183,6 +179,9 @@ class DpkgIndexer(Indexer):
if a not in pkg_archs:
arch_list.append(a)
+ all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
+ arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
+
apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
gzip = bb.utils.which(os.getenv('PATH'), "gzip")
@@ -210,15 +209,10 @@ class DpkgIndexer(Indexer):
bb.note("There are no packages in %s" % self.deploy_dir)
return
- nproc = multiprocessing.cpu_count()
- pool = bb.utils.multiprocessingpool(nproc)
- results = list(pool.imap(create_index, index_cmds))
- pool.close()
- pool.join()
+ result = oe.utils.multiprocess_exec(index_cmds, create_index)
+ if result:
+ bb.fatal('%s' % ('\n'.join(result)))
- for result in results:
- if result is not None:
- return(result)
class PkgsList(object):
@@ -243,6 +237,15 @@ class RpmPkgsList(PkgsList):
self.ml_prefix_list, self.ml_os_list = \
RpmIndexer(d, rootfs_dir).get_ml_prefix_and_os_list(arch_var, os_var)
+ # Determine rpm version
+ cmd = "%s --version" % self.rpm_cmd
+ try:
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Getting rpm version failed. Command '%s' "
+ "returned %d:\n%s" % (cmd, e.returncode, e.output))
+ self.rpm_version = int(output.split()[-1].split('.')[0])
+
'''
Translate the RPM/Smart format names to the OE multilib format names
'''
@@ -291,11 +294,16 @@ class RpmPkgsList(PkgsList):
def list(self, format=None):
if format == "deps":
+ if self.rpm_version == 4:
+ bb.fatal("'deps' format dependency listings are not supported with rpm 4 since rpmresolve does not work")
return self._list_pkg_deps()
cmd = self.rpm_cmd + ' --root ' + self.rootfs_dir
cmd += ' -D "_dbpath /var/lib/rpm" -qa'
- cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'"
+ if self.rpm_version == 4:
+ cmd += " --qf '[%{NAME} %{ARCH} %{VERSION}\n]'"
+ else:
+ cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'"
try:
# bb.note(cmd)
@@ -312,7 +320,10 @@ class RpmPkgsList(PkgsList):
pkg = line.split()[0]
arch = line.split()[1]
ver = line.split()[2]
- pkgorigin = line.split()[3]
+ if self.rpm_version == 4:
+ pkgorigin = "unknown"
+ else:
+ pkgorigin = line.split()[3]
new_pkg, new_arch = self._pkg_translate_smart_to_oe(pkg, arch)
if format == "arch":
@@ -567,7 +578,7 @@ class RpmPM(PackageManager):
self.install_dir = os.path.join(self.target_rootfs, "install")
self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
self.smart_cmd = bb.utils.which(os.getenv('PATH'), "smart")
- self.smart_opt = "--data-dir=" + os.path.join(target_rootfs,
+ self.smart_opt = "--quiet --data-dir=" + os.path.join(target_rootfs,
'var/lib/smart')
self.scriptlet_wrapper = self.d.expand('${WORKDIR}/scriptlet_wrapper')
self.solution_manifest = self.d.expand('${T}/saved/%s_solution' %
@@ -580,6 +591,7 @@ class RpmPM(PackageManager):
self.indexer = RpmIndexer(self.d, self.deploy_dir)
self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var)
+ self.rpm_version = self.pkgs_list.rpm_version
self.ml_prefix_list, self.ml_os_list = self.indexer.get_ml_prefix_and_os_list(arch_var, os_var)
@@ -769,43 +781,46 @@ class RpmPM(PackageManager):
# After change the __db.* cache size, log file will not be
# generated automatically, that will raise some warnings,
# so touch a bare log for rpm write into it.
- rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001')
- if not os.path.exists(rpmlib_log):
- bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log'))
- open(rpmlib_log, 'w+').close()
-
- DB_CONFIG_CONTENT = "# ================ Environment\n" \
- "set_data_dir .\n" \
- "set_create_dir .\n" \
- "set_lg_dir ./log\n" \
- "set_tmp_dir ./tmp\n" \
- "set_flags db_log_autoremove on\n" \
- "\n" \
- "# -- thread_count must be >= 8\n" \
- "set_thread_count 64\n" \
- "\n" \
- "# ================ Logging\n" \
- "\n" \
- "# ================ Memory Pool\n" \
- "set_cachesize 0 1048576 0\n" \
- "set_mp_mmapsize 268435456\n" \
- "\n" \
- "# ================ Locking\n" \
- "set_lk_max_locks 16384\n" \
- "set_lk_max_lockers 16384\n" \
- "set_lk_max_objects 16384\n" \
- "mutex_set_max 163840\n" \
- "\n" \
- "# ================ Replication\n"
-
- db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG')
- if not os.path.exists(db_config_dir):
- open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT)
+ if self.rpm_version == 5:
+ rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001')
+ if not os.path.exists(rpmlib_log):
+ bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log'))
+ open(rpmlib_log, 'w+').close()
+
+ DB_CONFIG_CONTENT = "# ================ Environment\n" \
+ "set_data_dir .\n" \
+ "set_create_dir .\n" \
+ "set_lg_dir ./log\n" \
+ "set_tmp_dir ./tmp\n" \
+ "set_flags db_log_autoremove on\n" \
+ "\n" \
+ "# -- thread_count must be >= 8\n" \
+ "set_thread_count 64\n" \
+ "\n" \
+ "# ================ Logging\n" \
+ "\n" \
+ "# ================ Memory Pool\n" \
+ "set_cachesize 0 1048576 0\n" \
+ "set_mp_mmapsize 268435456\n" \
+ "\n" \
+ "# ================ Locking\n" \
+ "set_lk_max_locks 16384\n" \
+ "set_lk_max_lockers 16384\n" \
+ "set_lk_max_objects 16384\n" \
+ "mutex_set_max 163840\n" \
+ "\n" \
+ "# ================ Replication\n"
+
+ db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG')
+ if not os.path.exists(db_config_dir):
+ open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT)
# Create database so that smart doesn't complain (lazy init)
- cmd = "%s --root %s --dbpath /var/lib/rpm -qa > /dev/null" % (
- self.rpm_cmd,
- self.target_rootfs)
+ opt = "-qa"
+ if self.rpm_version == 4:
+ opt = "--initdb"
+ cmd = "%s --root %s --dbpath /var/lib/rpm %s > /dev/null" % (
+ self.rpm_cmd, self.target_rootfs, opt)
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
@@ -822,26 +837,30 @@ class RpmPM(PackageManager):
self.d.getVar('localstatedir', True))
cmd = 'config --set rpm-extra-macros._tmppath=/install/tmp'
- prefer_color = self.d.getVar('RPM_PREFER_COLOR', True)
+ prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True)
if prefer_color:
- if prefer_color not in ['0', '1', '2', '3']:
- bb.fatal("Invalid RPM_PREFER_COLOR: %s, it should be one of:\n"
+ if prefer_color not in ['0', '1', '2', '4']:
+ bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n"
"\t1: ELF32 wins\n"
"\t2: ELF64 wins\n"
- "\t3: ELF64 N32 wins (mips64 or mips64el only)" %
+ "\t4: ELF64 N32 wins (mips64 or mips64el only)" %
prefer_color)
- if prefer_color == "3" and self.d.getVar("TUNE_ARCH", True) not in \
+ if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \
['mips64', 'mips64el']:
- bb.fatal("RPM_PREFER_COLOR = \"3\" is for mips64 or mips64el "
+ bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el "
"only.")
self._invoke_smart('config --set rpm-extra-macros._prefer_color=%s'
% prefer_color)
self._invoke_smart(cmd)
+ self._invoke_smart('config --set rpm-ignoresize=1')
# Write common configuration for host and target usage
self._invoke_smart('config --set rpm-nolinktos=1')
self._invoke_smart('config --set rpm-noparentdirs=1')
+ check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True)
+ if check_signature and check_signature.strip() == "0":
+ self._invoke_smart('config --set rpm-check-signatures=false')
for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
self._invoke_smart('flag --set ignore-recommends %s' % i)
@@ -880,6 +899,11 @@ class RpmPM(PackageManager):
# If we ever run into needing more the 899 scripts, we'll have to.
# change num to start with 1000.
#
+ if self.rpm_version == 4:
+ scriptletcmd = "$2 $3 $4\n"
+ else:
+ scriptletcmd = "$2 $1/$3 $4\n"
+
SCRIPTLET_FORMAT = "#!/bin/bash\n" \
"\n" \
"export PATH=%s\n" \
@@ -890,7 +914,7 @@ class RpmPM(PackageManager):
"export INTERCEPT_DIR=%s\n" \
"export NATIVE_ROOT=%s\n" \
"\n" \
- "$2 $1/$3 $4\n" \
+ + scriptletcmd + \
"if [ $? -ne 0 ]; then\n" \
" if [ $4 -eq 1 ]; then\n" \
" mkdir -p $1/etc/rpm-postinsts\n" \
@@ -1250,9 +1274,9 @@ class OpkgPM(PackageManager):
with open(cfg_file_name, "w+") as cfg_file:
cfg_file.write("src/gz local-%s %s/%s" %
- arch,
- self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
- arch)
+ (arch,
+ self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
+ arch))
def _create_config(self):
with open(self.config_file, "w+") as config_file:
@@ -1399,6 +1423,10 @@ class OpkgPM(PackageManager):
else:
status.write(line + "\n")
+ # Append a blank line after each package entry to ensure that it
+ # is separated from the following entry
+ status.write("\n")
+
'''
The following function dummy installs pkgs and returns the log of output.
'''
@@ -1469,6 +1497,10 @@ class DpkgPM(PackageManager):
self.apt_args = d.getVar("APT_ARGS", True)
+ self.all_arch_list = archs.split()
+ all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
+ self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
+
self._create_configs(archs, base_archs)
self.indexer = DpkgIndexer(self.d, self.deploy_dir)
@@ -1626,8 +1658,8 @@ class DpkgPM(PackageManager):
sources_conf = os.path.join("%s/etc/apt/sources.list"
% self.target_rootfs)
arch_list = []
- archs = self.d.getVar('PACKAGE_ARCHS', True)
- for arch in archs.split():
+
+ for arch in self.all_arch_list:
if not os.path.exists(os.path.join(self.deploy_dir, arch)):
continue
arch_list.append(arch)
@@ -1650,7 +1682,7 @@ class DpkgPM(PackageManager):
bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
arch_list = []
- for arch in archs.split():
+ for arch in self.all_arch_list:
if not os.path.exists(os.path.join(self.deploy_dir, arch)):
continue
arch_list.append(arch)
@@ -1679,15 +1711,25 @@ class DpkgPM(PackageManager):
sources_file.write("deb file:%s/ ./\n" %
os.path.join(self.deploy_dir, arch))
+ base_arch_list = base_archs.split()
+ multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True);
+ for variant in multilib_variants.split():
+ if variant == "lib32":
+ base_arch_list.append("i386")
+ elif variant == "lib64":
+ base_arch_list.append("amd64")
+
with open(self.apt_conf_file, "w+") as apt_conf:
with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
for line in apt_conf_sample.read().split("\n"):
- line = re.sub("Architecture \".*\";",
- "Architecture \"%s\";" % base_archs, line)
- line = re.sub("#ROOTFS#", self.target_rootfs, line)
- line = re.sub("#APTCONF#", self.apt_conf_dir, line)
-
- apt_conf.write(line + "\n")
+ match_arch = re.match(" Architecture \".*\";$", line)
+ if match_arch:
+ for base_arch in base_arch_list:
+ apt_conf.write(" Architecture \"%s\";\n" % base_arch)
+ else:
+ line = re.sub("#ROOTFS#", self.target_rootfs, line)
+ line = re.sub("#APTCONF#", self.apt_conf_dir, line)
+ apt_conf.write(line + "\n")
target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index b085c9d6b5..b838be80be 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -199,10 +199,108 @@ class PatchTree(PatchSet):
self.Pop(all=True)
class GitApplyTree(PatchTree):
+ patch_line_prefix = '%% original patch'
+
def __init__(self, dir, d):
PatchTree.__init__(self, dir, d)
+ @staticmethod
+ def extractPatchHeader(patchfile):
+ """
+ Extract just the header lines from the top of a patch file
+ """
+ lines = []
+ with open(patchfile, 'r') as f:
+ for line in f.readlines():
+ if line.startswith('Index: ') or line.startswith('diff -') or line.startswith('---'):
+ break
+ lines.append(line)
+ return lines
+
+ @staticmethod
+ def prepareCommit(patchfile):
+ """
+ Prepare a git commit command line based on the header from a patch file
+ (typically this is useful for patches that cannot be applied with "git am" due to formatting)
+ """
+ import tempfile
+ import re
+ author_re = re.compile('[\S ]+ <\S+@\S+\.\S+>')
+ # Process patch header and extract useful information
+ lines = GitApplyTree.extractPatchHeader(patchfile)
+ outlines = []
+ author = None
+ date = None
+ for line in lines:
+ if line.startswith('Subject: '):
+ subject = line.split(':', 1)[1]
+ # Remove any [PATCH][oe-core] etc.
+ subject = re.sub(r'\[.+?\]\s*', '', subject)
+ outlines.insert(0, '%s\n\n' % subject.strip())
+ continue
+ if line.startswith('From: ') or line.startswith('Author: '):
+ authorval = line.split(':', 1)[1].strip().replace('"', '')
+ # git is fussy about author formatting i.e. it must be Name <email@domain>
+ if author_re.match(authorval):
+ author = authorval
+ continue
+ if line.startswith('Date: '):
+ if date is None:
+ dateval = line.split(':', 1)[1].strip()
+ # Very crude check for date format, since git will blow up if it's not in the right
+ # format. Without e.g. a python-dateutils dependency we can't do a whole lot more
+ if len(dateval) > 12:
+ date = dateval
+ continue
+ if line.startswith('Signed-off-by: '):
+ authorval = line.split(':', 1)[1].strip().replace('"', '')
+ # git is fussy about author formatting i.e. it must be Name <email@domain>
+ if author_re.match(authorval):
+ author = authorval
+ outlines.append(line)
+ # Write out commit message to a file
+ with tempfile.NamedTemporaryFile('w', delete=False) as tf:
+ tmpfile = tf.name
+ for line in outlines:
+ tf.write(line)
+ # Prepare git command
+ cmd = ["git", "commit", "-F", tmpfile]
+ # git doesn't like plain email addresses as authors
+ if author and '<' in author:
+ cmd.append('--author="%s"' % author)
+ if date:
+ cmd.append('--date="%s"' % date)
+ return (tmpfile, cmd)
+
+ @staticmethod
+ def extractPatches(tree, startcommit, outdir):
+ import tempfile
+ import shutil
+ tempdir = tempfile.mkdtemp(prefix='oepatch')
+ try:
+ shellcmd = ["git", "format-patch", startcommit, "-o", tempdir]
+ out = runcmd(["sh", "-c", " ".join(shellcmd)], tree)
+ if out:
+ for srcfile in out.split():
+ patchlines = []
+ outfile = None
+ with open(srcfile, 'r') as f:
+ for line in f:
+ if line.startswith(GitApplyTree.patch_line_prefix):
+ outfile = line.split()[-1].strip()
+ continue
+ patchlines.append(line)
+ if not outfile:
+ outfile = os.path.basename(srcfile)
+ with open(os.path.join(outdir, outfile), 'w') as of:
+ for line in patchlines:
+ of.write(line)
+ finally:
+ shutil.rmtree(tempdir)
+
def _applypatch(self, patch, force = False, reverse = False, run = True):
+ import shutil
+
def _applypatchhelper(shellcmd, patch, force = False, reverse = False, run = True):
if reverse:
shellcmd.append('-R')
@@ -214,12 +312,62 @@ class GitApplyTree(PatchTree):
return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Add hooks which add a pointer to the original patch file name in the commit message
+ commithook = os.path.join(self.dir, '.git', 'hooks', 'commit-msg')
+ commithook_backup = commithook + '.devtool-orig'
+ applyhook = os.path.join(self.dir, '.git', 'hooks', 'applypatch-msg')
+ applyhook_backup = applyhook + '.devtool-orig'
+ if os.path.exists(commithook):
+ shutil.move(commithook, commithook_backup)
+ if os.path.exists(applyhook):
+ shutil.move(applyhook, applyhook_backup)
+ with open(commithook, 'w') as f:
+ # NOTE: the formatting here is significant; if you change it you'll also need to
+ # change other places which read it back
+ f.write('echo >> $1\n')
+ f.write('echo "%s: $PATCHFILE" >> $1\n' % GitApplyTree.patch_line_prefix)
+ os.chmod(commithook, 0755)
+ shutil.copy2(commithook, applyhook)
try:
- shellcmd = ["git", "--work-tree=.", "am", "-3", "-p%s" % patch['strippath']]
- return _applypatchhelper(shellcmd, patch, force, reverse, run)
- except CmdError:
- shellcmd = ["git", "--git-dir=.", "apply", "-p%s" % patch['strippath']]
- return _applypatchhelper(shellcmd, patch, force, reverse, run)
+ patchfilevar = 'PATCHFILE="%s"' % os.path.basename(patch['file'])
+ try:
+ shellcmd = [patchfilevar, "git", "--work-tree=.", "am", "-3", "--keep-cr", "-p%s" % patch['strippath']]
+ return _applypatchhelper(shellcmd, patch, force, reverse, run)
+ except CmdError:
+ # Need to abort the git am, or we'll still be within it at the end
+ try:
+ shellcmd = ["git", "--work-tree=.", "am", "--abort"]
+ runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ except CmdError:
+ pass
+ # Fall back to git apply
+ shellcmd = ["git", "--git-dir=.", "apply", "-p%s" % patch['strippath']]
+ try:
+ output = _applypatchhelper(shellcmd, patch, force, reverse, run)
+ except CmdError:
+ # Fall back to patch
+ output = PatchTree._applypatch(self, patch, force, reverse, run)
+ # Add all files
+ shellcmd = ["git", "add", "-f", "."]
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Exclude the patches directory
+ shellcmd = ["git", "reset", "HEAD", self.patchdir]
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ # Commit the result
+ (tmpfile, shellcmd) = self.prepareCommit(patch['file'])
+ try:
+ shellcmd.insert(0, patchfilevar)
+ output += runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
+ finally:
+ os.remove(tmpfile)
+ return output
+ finally:
+ os.remove(commithook)
+ os.remove(applyhook)
+ if os.path.exists(commithook_backup):
+ shutil.move(commithook_backup, commithook)
+ if os.path.exists(applyhook_backup):
+ shutil.move(applyhook_backup, applyhook)
class QuiltTree(PatchSet):
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
new file mode 100644
index 0000000000..1758dcecba
--- /dev/null
+++ b/meta/lib/oe/recipeutils.py
@@ -0,0 +1,279 @@
+# Utility functions for reading and modifying recipes
+#
+# Some code borrowed from the OE layer index
+#
+# Copyright (C) 2013-2014 Intel Corporation
+#
+
+import sys
+import os
+import os.path
+import tempfile
+import textwrap
+import difflib
+import utils
+import shutil
+import re
+from collections import OrderedDict, defaultdict
+
+
+# Help us to find places to insert values
+recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRC_URI', 'do_fetch', 'do_unpack', 'do_patch', 'EXTRA_OECONF', 'do_configure', 'EXTRA_OEMAKE', 'do_compile', 'do_install', 'do_populate_sysroot', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'do_package', 'do_deploy']
+# Variables that sometimes are a bit long but shouldn't be wrapped
+nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER']
+list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
+meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
+
+
+def pn_to_recipe(cooker, pn):
+ """Convert a recipe name (PN) to the path to the recipe file"""
+ import bb.providers
+
+ if pn in cooker.recipecache.pkg_pn:
+ filenames = cooker.recipecache.pkg_pn[pn]
+ best = bb.providers.findBestProvider(pn, cooker.data, cooker.recipecache, cooker.recipecache.pkg_pn)
+ return best[3]
+ else:
+ return None
+
+
+def get_unavailable_reasons(cooker, pn):
+ """If a recipe could not be found, find out why if possible"""
+ import bb.taskdata
+ taskdata = bb.taskdata.TaskData(None, skiplist=cooker.skiplist)
+ return taskdata.get_reasons(pn)
+
+
+def parse_recipe(fn, d):
+ """Parse an individual recipe"""
+ import bb.cache
+ envdata = bb.cache.Cache.loadDataFull(fn, [], d)
+ return envdata
+
+
+def get_var_files(fn, varlist, d):
+ """Find the file in which each of a list of variables is set.
+ Note: requires variable history to be enabled when parsing.
+ """
+ envdata = parse_recipe(fn, d)
+ varfiles = {}
+ for v in varlist:
+ history = envdata.varhistory.variable(v)
+ files = []
+ for event in history:
+ if 'file' in event and not 'flag' in event:
+ files.append(event['file'])
+ if files:
+ actualfile = files[-1]
+ else:
+ actualfile = None
+ varfiles[v] = actualfile
+
+ return varfiles
+
+
+def patch_recipe_file(fn, values, patch=False, relpath=''):
+ """Update or insert variable values into a recipe file (assuming you
+ have already identified the exact file you want to update.)
+ Note that some manual inspection/intervention may be required
+ since this cannot handle all situations.
+ """
+ remainingnames = {}
+ for k in values.keys():
+ remainingnames[k] = recipe_progression.index(k) if k in recipe_progression else -1
+ remainingnames = OrderedDict(sorted(remainingnames.iteritems(), key=lambda x: x[1]))
+
+ with tempfile.NamedTemporaryFile('w', delete=False) as tf:
+ def outputvalue(name):
+ rawtext = '%s = "%s"\n' % (name, values[name])
+ if name in nowrap_vars:
+ tf.write(rawtext)
+ elif name in list_vars:
+ splitvalue = values[name].split()
+ if len(splitvalue) > 1:
+ linesplit = ' \\\n' + (' ' * (len(name) + 4))
+ tf.write('%s = "%s%s"\n' % (name, linesplit.join(splitvalue), linesplit))
+ else:
+ tf.write(rawtext)
+ else:
+ wrapped = textwrap.wrap(rawtext)
+ for wrapline in wrapped[:-1]:
+ tf.write('%s \\\n' % wrapline)
+ tf.write('%s\n' % wrapped[-1])
+
+ tfn = tf.name
+ with open(fn, 'r') as f:
+ # First runthrough - find existing names (so we know not to insert based on recipe_progression)
+ # Second runthrough - make the changes
+ existingnames = []
+ for runthrough in [1, 2]:
+ currname = None
+ for line in f:
+ if not currname:
+ insert = False
+ for k in remainingnames.keys():
+ for p in recipe_progression:
+ if re.match('^%s[ ?:=]' % p, line):
+ if remainingnames[k] > -1 and recipe_progression.index(p) > remainingnames[k] and runthrough > 1 and not k in existingnames:
+ outputvalue(k)
+ del remainingnames[k]
+ break
+ for k in remainingnames.keys():
+ if re.match('^%s[ ?:=]' % k, line):
+ currname = k
+ if runthrough == 1:
+ existingnames.append(k)
+ else:
+ del remainingnames[k]
+ break
+ if currname and runthrough > 1:
+ outputvalue(currname)
+
+ if currname:
+ sline = line.rstrip()
+ if not sline.endswith('\\'):
+ currname = None
+ continue
+ if runthrough > 1:
+ tf.write(line)
+ f.seek(0)
+ if remainingnames:
+ tf.write('\n')
+ for k in remainingnames.keys():
+ outputvalue(k)
+
+ with open(tfn, 'U') as f:
+ tolines = f.readlines()
+ if patch:
+ with open(fn, 'U') as f:
+ fromlines = f.readlines()
+ relfn = os.path.relpath(fn, relpath)
+ diff = difflib.unified_diff(fromlines, tolines, 'a/%s' % relfn, 'b/%s' % relfn)
+ os.remove(tfn)
+ return diff
+ else:
+ with open(fn, 'w') as f:
+ f.writelines(tolines)
+ os.remove(tfn)
+ return None
+
+def localise_file_vars(fn, varfiles, varlist):
+ """Given a list of variables and variable history (fetched with get_var_files())
+ find where each variable should be set/changed. This handles for example where a
+ recipe includes an inc file where variables might be changed - in most cases
+ we want to update the inc file when changing the variable value rather than adding
+ it to the recipe itself.
+ """
+ fndir = os.path.dirname(fn) + os.sep
+
+ first_meta_file = None
+ for v in meta_vars:
+ f = varfiles.get(v, None)
+ if f:
+ actualdir = os.path.dirname(f) + os.sep
+ if actualdir.startswith(fndir):
+ first_meta_file = f
+ break
+
+ filevars = defaultdict(list)
+ for v in varlist:
+ f = varfiles[v]
+ # Only return files that are in the same directory as the recipe or in some directory below there
+ # (this excludes bbclass files and common inc files that wouldn't be appropriate to set the variable
+ # in if we were going to set a value specific to this recipe)
+ if f:
+ actualfile = f
+ else:
+ # Variable isn't in a file, if it's one of the "meta" vars, use the first file with a meta var in it
+ if first_meta_file:
+ actualfile = first_meta_file
+ else:
+ actualfile = fn
+
+ actualdir = os.path.dirname(actualfile) + os.sep
+ if not actualdir.startswith(fndir):
+ actualfile = fn
+ filevars[actualfile].append(v)
+
+ return filevars
+
+def patch_recipe(d, fn, varvalues, patch=False, relpath=''):
+ """Modify a list of variable values in the specified recipe. Handles inc files if
+ used by the recipe.
+ """
+ varlist = varvalues.keys()
+ varfiles = get_var_files(fn, varlist, d)
+ locs = localise_file_vars(fn, varfiles, varlist)
+ patches = []
+ for f,v in locs.iteritems():
+ vals = {k: varvalues[k] for k in v}
+ patchdata = patch_recipe_file(f, vals, patch, relpath)
+ if patch:
+ patches.append(patchdata)
+
+ if patch:
+ return patches
+ else:
+ return None
+
+
+
+def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True):
+ """Copy (local) recipe files, including both files included via include/require,
+ and files referred to in the SRC_URI variable."""
+ import bb.fetch2
+ import oe.path
+
+ # FIXME need a warning if the unexpanded SRC_URI value contains variable references
+
+ uris = (d.getVar('SRC_URI', True) or "").split()
+ fetch = bb.fetch2.Fetch(uris, d)
+ if download:
+ fetch.download()
+
+ # Copy local files to target directory and gather any remote files
+ bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep
+ remotes = []
+ includes = [path for path in d.getVar('BBINCLUDED', True).split() if
+ path.startswith(bb_dir) and os.path.exists(path)]
+ for path in fetch.localpaths() + includes:
+ # Only import files that are under the meta directory
+ if path.startswith(bb_dir):
+ if not whole_dir:
+ relpath = os.path.relpath(path, bb_dir)
+ subdir = os.path.join(tgt_dir, os.path.dirname(relpath))
+ if not os.path.exists(subdir):
+ os.makedirs(subdir)
+ shutil.copy2(path, os.path.join(tgt_dir, relpath))
+ else:
+ remotes.append(path)
+ # Simply copy whole meta dir, if requested
+ if whole_dir:
+ shutil.copytree(bb_dir, tgt_dir)
+
+ return remotes
+
+
+def get_recipe_patches(d):
+ """Get a list of the patches included in SRC_URI within a recipe."""
+ patchfiles = []
+ # Execute src_patches() defined in patch.bbclass - this works since that class
+ # is inherited globally
+ patches = bb.utils.exec_flat_python_func('src_patches', d)
+ for patch in patches:
+ _, _, local, _, _, parm = bb.fetch.decodeurl(patch)
+ patchfiles.append(local)
+ return patchfiles
+
+
+def validate_pn(pn):
+ """Perform validation on a recipe name (PN) for a new recipe."""
+ reserved_names = ['forcevariable', 'append', 'prepend', 'remove']
+ if not re.match('[0-9a-z-]+', pn):
+ return 'Recipe name "%s" is invalid: only characters 0-9, a-z and - are allowed' % pn
+ elif pn in reserved_names:
+ return 'Recipe name "%s" is invalid: is a reserved keyword' % pn
+ elif pn.startswith('pn-'):
+ return 'Recipe name "%s" is invalid: names starting with "pn-" are reserved' % pn
+ return ''
+
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 0424a01aa5..f99626ccfa 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -330,7 +330,7 @@ class RpmRootfs(Rootfs):
@staticmethod
def _depends_list():
return ['DEPLOY_DIR_RPM', 'INC_RPM_IMAGE_GEN', 'RPM_PREPROCESS_COMMANDS',
- 'RPM_POSTPROCESS_COMMANDS', 'RPM_PREFER_COLOR']
+ 'RPM_POSTPROCESS_COMMANDS', 'RPM_PREFER_ELF_ARCH']
def _get_delayed_postinsts(self):
postinst_dir = self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/rpm-postinsts")
@@ -347,7 +347,21 @@ class RpmRootfs(Rootfs):
# already saved in /etc/rpm-postinsts
pass
- def _log_check(self):
+ def _log_check_warn(self):
+ r = re.compile('^(warn|Warn|NOTE: warn|NOTE: Warn)')
+ log_path = self.d.expand("${T}/log.do_rootfs")
+ with open(log_path, 'r') as log:
+ for line in log.read().split('\n'):
+ if 'log_check' in line:
+ continue
+
+ m = r.search(line)
+ if m:
+ bb.warn('log_check: There is a warn message in the logfile')
+ bb.warn('log_check: Matched keyword: [%s]' % m.group())
+ bb.warn('log_check: %s\n' % line)
+
+ def _log_check_error(self):
r = re.compile('(unpacking of archive failed|Cannot find package|exit 1|ERR|Fail)')
log_path = self.d.expand("${T}/log.do_rootfs")
with open(log_path, 'r') as log:
@@ -370,6 +384,10 @@ class RpmRootfs(Rootfs):
if found_error == 6:
bb.fatal(message)
+ def _log_check(self):
+ self._log_check_warn()
+ self._log_check_error()
+
def _handle_intercept_failure(self, registered_pkgs):
rpm_postinsts_dir = self.image_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/')
bb.utils.mkdirhier(rpm_postinsts_dir)
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index ca349c433c..c57a441941 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -303,7 +303,7 @@ def sdk_list_installed_packages(d, target, format=None, rootfs_dir=None):
os_var = ["SDK_OS", None][target is True]
return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list(format)
elif img_type == "ipk":
- conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_Target"][target is True]
+ conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True]
return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list(format)
elif img_type == "deb":
return DpkgPkgsList(d, rootfs_dir).list(format)
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index d58147f78f..af7617ee61 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -14,6 +14,9 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
def isPackageGroup(fn):
inherits = " ".join(dataCache.inherits[fn])
return "/packagegroup.bbclass" in inherits
+ def isAllArch(fn):
+ inherits = " ".join(dataCache.inherits[fn])
+ return "/allarch.bbclass" in inherits
def isImage(fn):
return "/image.bbclass" in " ".join(dataCache.inherits[fn])
@@ -36,8 +39,8 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
# Only target packages beyond here
- # packagegroups are assumed to have well behaved names which don't change between architecures/tunes
- if isPackageGroup(fn):
+ # allarch packagegroups are assumed to have well behaved names which don't change between architecures/tunes
+ if isPackageGroup(fn) and isAllArch(fn):
return False
# Exclude well defined machine specific configurations which don't change ABI
@@ -58,6 +61,18 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
# Default to keep dependencies
return True
+def sstate_lockedsigs(d):
+ sigs = {}
+ types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split()
+ for t in types:
+ lockedsigs = (d.getVar("SIGGEN_LOCKEDSIGS_%s" % t, True) or "").split()
+ for ls in lockedsigs:
+ pn, task, h = ls.split(":", 2)
+ if pn not in sigs:
+ sigs[pn] = {}
+ sigs[pn][task] = h
+ return sigs
+
class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
name = "OEBasic"
def init_rundepcheck(self, data):
@@ -72,10 +87,99 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
def init_rundepcheck(self, data):
self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split()
self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split()
+ self.lockedsigs = sstate_lockedsigs(data)
+ self.lockedhashes = {}
+ self.lockedpnmap = {}
+ self.lockedhashfn = {}
+ self.machine = data.getVar("MACHINE", True)
+ self.mismatch_msgs = []
pass
def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
+ def get_taskdata(self):
+ data = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskdata()
+ return (data, self.lockedpnmap, self.lockedhashfn)
+
+ def set_taskdata(self, data):
+ coredata, self.lockedpnmap, self.lockedhashfn = data
+ super(bb.siggen.SignatureGeneratorBasicHash, self).set_taskdata(coredata)
+
+ def dump_sigs(self, dataCache, options):
+ self.dump_lockedsigs()
+ return super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigs(dataCache, options)
+
+ def get_taskhash(self, fn, task, deps, dataCache):
+ h = super(bb.siggen.SignatureGeneratorBasicHash, self).get_taskhash(fn, task, deps, dataCache)
+
+ recipename = dataCache.pkg_fn[fn]
+ self.lockedpnmap[fn] = recipename
+ self.lockedhashfn[fn] = dataCache.hashfn[fn]
+ if recipename in self.lockedsigs:
+ if task in self.lockedsigs[recipename]:
+ k = fn + "." + task
+ h_locked = self.lockedsigs[recipename][task]
+ self.lockedhashes[k] = h_locked
+ self.taskhash[k] = h_locked
+ #bb.warn("Using %s %s %s" % (recipename, task, h))
+
+ if h != h_locked:
+ self.mismatch_msgs.append('The %s:%s sig (%s) changed, use locked sig %s to instead'
+ % (recipename, task, h, h_locked))
+
+ return h_locked
+ #bb.warn("%s %s %s" % (recipename, task, h))
+ return h
+
+ def dump_sigtask(self, fn, task, stampbase, runtime):
+ k = fn + "." + task
+ if k in self.lockedhashes:
+ return
+ super(bb.siggen.SignatureGeneratorBasicHash, self).dump_sigtask(fn, task, stampbase, runtime)
+
+ def dump_lockedsigs(self, sigfile=None):
+ if not sigfile:
+ sigfile = os.getcwd() + "/locked-sigs.inc"
+
+ bb.plain("Writing locked sigs to %s" % sigfile)
+ types = {}
+ for k in self.runtaskdeps:
+ fn = k.rsplit(".",1)[0]
+ t = self.lockedhashfn[fn].split(" ")[1].split(":")[5]
+ t = 't-' + t.replace('_', '-')
+ if t not in types:
+ types[t] = []
+ types[t].append(k)
+
+ with open(sigfile, "w") as f:
+ for t in types:
+ f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t)
+ types[t].sort()
+ sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]])
+ for k in sortedk:
+ fn = k.rsplit(".",1)[0]
+ task = k.rsplit(".",1)[1]
+ if k not in self.taskhash:
+ continue
+ f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n")
+ f.write(' "\n')
+ f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(types.keys())))
+
+ def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d):
+ checklevel = d.getVar("SIGGEN_LOCKEDSIGS_CHECK_LEVEL", True)
+ for task in range(len(sq_fn)):
+ if task not in ret:
+ for pn in self.lockedsigs:
+ if sq_hash[task] in self.lockedsigs[pn].itervalues():
+ self.mismatch_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?"
+ % (pn, sq_task[task], sq_hash[task]))
+
+ if self.mismatch_msgs and checklevel == 'warn':
+ bb.warn("\n".join(self.mismatch_msgs))
+ elif self.mismatch_msgs and checklevel == 'error':
+ bb.fatal("\n".join(self.mismatch_msgs))
+
+
# Insert these classes into siggen's namespace so it can see and select them
bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
index 0a1d1080c9..1f84ba4b25 100644
--- a/meta/lib/oe/utils.py
+++ b/meta/lib/oe/utils.py
@@ -151,3 +151,36 @@ def execute_pre_post_process(d, cmds):
if cmd != '':
bb.note("Executing %s ..." % cmd)
bb.build.exec_func(cmd, d)
+
+def multiprocess_exec(commands, function):
+ import signal
+ import multiprocessing
+
+ if not commands:
+ return []
+
+ def init_worker():
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ nproc = min(multiprocessing.cpu_count(), len(commands))
+ pool = bb.utils.multiprocessingpool(nproc, init_worker)
+ imap = pool.imap(function, commands)
+
+ try:
+ res = list(imap)
+ pool.close()
+ pool.join()
+ results = []
+ for result in res:
+ if result is not None:
+ results.append(result)
+ return results
+
+ except KeyboardInterrupt:
+ pool.terminate()
+ pool.join()
+ raise
+
+def squashspaces(string):
+ import re
+ return re.sub("\s+", " ", string).strip()
diff --git a/meta/lib/oeqa/oetest.py b/meta/lib/oeqa/oetest.py
index 5552c4322e..0b7e7dc42d 100644
--- a/meta/lib/oeqa/oetest.py
+++ b/meta/lib/oeqa/oetest.py
@@ -23,7 +23,7 @@ def loadTests(tc, type="runtime"):
suite = unittest.TestSuite()
elif type == "sdk":
# set the context object passed from the test class
- setattr(oeSDKTest, "tc", tc)
+ setattr(oeTest, "tc", tc)
testloader = unittest.TestLoader()
testloader.sortTestMethodsUsing = None
suite = testloader.loadTestsFromNames(tc.testslist)
@@ -66,7 +66,14 @@ class oeRuntimeTest(oeTest):
self.target = oeRuntimeTest.tc.target
super(oeRuntimeTest, self).__init__(methodName)
-class oeSDKTest(unittest.TestCase):
+ #TODO: use package_manager.py to install packages on any type of image
+ def install_packages(self, packagelist):
+ for package in packagelist:
+ (status, result) = self.target.run("smart install -y "+package)
+ if status != 0:
+ return status
+
+class oeSDKTest(oeTest):
def __init__(self, methodName='runTest'):
self.sdktestdir = oeSDKTest.tc.sdktestdir
super(oeSDKTest, self).__init__(methodName)
diff --git a/meta/lib/oeqa/runtime/_ptest.py b/meta/lib/oeqa/runtime/_ptest.py
new file mode 100644
index 0000000000..4c58dc1d7f
--- /dev/null
+++ b/meta/lib/oeqa/runtime/_ptest.py
@@ -0,0 +1,124 @@
+import unittest, os, shutil
+from oeqa.oetest import oeRuntimeTest, skipModule
+from oeqa.utils.decorators import *
+from oeqa.utils.logparser import *
+from oeqa.utils.httpserver import HTTPService
+import bb
+import glob
+from oe.package_manager import RpmPkgsList
+import subprocess
+
+def setUpModule():
+ if not oeRuntimeTest.hasFeature("package-management"):
+ skipModule("Image doesn't have package management feature")
+ if not oeRuntimeTest.hasPackage("smart"):
+ skipModule("Image doesn't have smart installed")
+ if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]:
+ skipModule("Rpm is not the primary package manager")
+
+class PtestRunnerTest(oeRuntimeTest):
+
+ # a ptest log parser
+ def parse_ptest(self, logfile):
+ parser = Lparser(test_0_pass_regex="^PASS:(.+)", test_0_fail_regex="^FAIL:(.+)", section_0_begin_regex="^BEGIN: .*/(.+)/ptest", section_0_end_regex="^END: .*/(.+)/ptest")
+ parser.init()
+ result = Result()
+
+ with open(logfile) as f:
+ for line in f:
+ result_tuple = parser.parse_line(line)
+ if not result_tuple:
+ continue
+ result_tuple = line_type, category, status, name = parser.parse_line(line)
+
+ if line_type == 'section' and status == 'begin':
+ current_section = name
+ continue
+
+ if line_type == 'section' and status == 'end':
+ current_section = None
+ continue
+
+ if line_type == 'test' and status == 'pass':
+ result.store(current_section, name, status)
+ continue
+
+ if line_type == 'test' and status == 'fail':
+ result.store(current_section, name, status)
+ continue
+
+ result.sort_tests()
+ return result
+
+ @classmethod
+ def setUpClass(self):
+ #note the existing channels that are on the board before creating new ones
+# self.existingchannels = set()
+# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0)
+# for x in result.split("\n"):
+# self.existingchannels.add(x)
+ self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip)
+ self.repo_server.start()
+
+ @classmethod
+ def tearDownClass(self):
+ self.repo_server.stop()
+ #remove created channels to be able to repeat the tests on same image
+# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0)
+# for x in result.split("\n"):
+# if x not in self.existingchannels:
+# oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0)
+
+ def add_smart_channel(self):
+ image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True)
+ deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype)
+ pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split()
+ for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)):
+ if arch in pkgarchs:
+ self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0)
+ self.target.run('smart update', 0)
+
+ def install_complementary(self, globs=None):
+ installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True),
+ "installed_pkgs.txt")
+ self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS', True), oeRuntimeTest.tc.d.getVar('arch_var', True), oeRuntimeTest.tc.d.getVar('os_var', True))
+ with open(installed_pkgs_file, "w+") as installed_pkgs:
+ installed_pkgs.write(self.pkgs_list.list("arch"))
+
+ cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
+ "glob", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR', True), installed_pkgs_file,
+ globs]
+ try:
+ bb.note("Installing complementary packages ...")
+ complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Could not compute complementary packages list. Command "
+ "'%s' returned %d:\n%s" %
+ (' '.join(cmd), e.returncode, e.output))
+
+ return complementary_pkgs.split()
+
+ def setUp(self):
+ self.buildhist_dir = oeRuntimeTest.tc.d.getVar("BUILDHISTORY_DIR_IMAGE", True)
+ self.assertTrue(os.path.exists(self.buildhist_dir))
+ self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True))
+
+ @skipUnlessPassed('test_ssh')
+ def test_ptestrunner(self):
+ self.add_smart_channel()
+ cond = oeRuntimeTest.hasPackage("ptest-runner") and oeRuntimeTest.hasFeature("ptest") and oeRuntimeTest.hasPackage("-ptest")
+ if not cond:
+ self.install_packages(self.install_complementary("*-ptest"))
+ self.install_packages(['ptest-runner'])
+
+ self.target.run('/usr/bin/ptest-runner > /tmp/ptest.log 2>&1', 0)
+ self.target.copy_from('/tmp/ptest.log', self.ptest_log)
+ shutil.copyfile(self.ptest_log, os.path.join(self.buildhist_dir, "ptest.log"))
+
+ result = self.parse_ptest(os.path.join(self.buildhist_dir, "ptest.log"))
+ log_results_to_location = "./results"
+ if os.path.exists(log_results_to_location):
+ shutil.rmtree(log_results_to_location)
+ os.makedirs(log_results_to_location)
+
+ result.log_as_files(log_results_to_location, test_status = ['fail'])
diff --git a/meta/lib/oeqa/runtime/dmesg.py b/meta/lib/oeqa/runtime/dmesg.py
index 43e16c3f79..5831471e50 100644
--- a/meta/lib/oeqa/runtime/dmesg.py
+++ b/meta/lib/oeqa/runtime/dmesg.py
@@ -8,5 +8,5 @@ class DmesgTest(oeRuntimeTest):
@testcase(215)
@skipUnlessPassed('test_ssh')
def test_dmesg(self):
- (status, output) = self.target.run('dmesg | grep -v mmci-pl18x | grep -v "error changing net interface name" | grep -iv "dma timeout" | grep -i error')
+ (status, output) = self.target.run('dmesg | grep -v mmci-pl18x | grep -v "error changing net interface name" | grep -iv "dma timeout" | grep -v usbhid | grep -i error')
self.assertEqual(status, 1, msg = "Error messages in dmesg log: %s" % output)
diff --git a/meta/lib/oeqa/runtime/parselogs.py b/meta/lib/oeqa/runtime/parselogs.py
new file mode 100644
index 0000000000..19a4f1bf79
--- /dev/null
+++ b/meta/lib/oeqa/runtime/parselogs.py
@@ -0,0 +1,200 @@
+import os
+import unittest
+from oeqa.oetest import oeRuntimeTest
+from oeqa.utils.decorators import *
+
+#in the future these lists could be moved outside of module
+errors = ["error", "cannot", "can\'t", "failed"]
+
+common_errors = [
+ "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.",
+ "dma timeout",
+ "can\'t add hid device:",
+ "usbhid: probe of ",
+ "_OSC failed (AE_ERROR)",
+ "_OSC failed (AE_SUPPORT)",
+ "AE_ALREADY_EXISTS"
+ "ACPI _OSC request failed (AE_SUPPORT)"
+ "can\'t disable ASPM",
+ "Failed to load module \"vesa\"",
+ "Failed to load module vesa",
+ "Failed to load module \"modesetting\"",
+ "Failed to load module modesetting",
+ "Failed to load module \"glx\"",
+ "Failed to load module glx",
+ "[drm] Cannot find any crtc or sizes - going 1024x768"
+ ]
+
+x86_common = [
+ '[drm:psb_do_init] *ERROR* Debug is',
+ 'wrong ELF class',
+ 'Could not enable PowerButton event',
+ 'probe of LNXPWRBN:00 failed with error -22',
+] + common_errors
+
+qemux86_common = [
+ 'Fast TSC calibration',
+ '_OSC failed (AE_NOT_FOUND); disabling ASPM',
+ 'Open ACPI failed (/var/run/acpid.socket) (No such file or directory)',
+ 'wrong ELF class',
+] + common_errors
+
+ignore_errors = {
+ 'default' : common_errors,
+ 'qemux86' : [
+ 'Failed to access perfctr msr (MSR c1 is 0)',
+ "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.",
+ ] + qemux86_common,
+ 'qemux86-64' : qemux86_common,
+ 'qemumips' : [
+ 'Failed to load module "glx"',
+ ] + common_errors,
+ 'qemuppc' : [
+ 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]',
+ 'mode "640x480" test failed',
+ 'Failed to load module "glx"',
+ ] + common_errors,
+ 'qemuarm' : [
+ 'mmci-pl18x: probe of fpga:05 failed with error -22',
+ 'mmci-pl18x: probe of fpga:0b failed with error -22',
+ 'Failed to load module "glx"'
+ ] + common_errors,
+ 'emenlow' : x86_common,
+ 'crownbay' : x86_common,
+ 'genericx86' : x86_common,
+ 'genericx86-64' : x86_common,
+ 'edgerouter' : [
+ 'Fatal server error:',
+ ] + common_errors,
+ 'minnow' : [
+ 'netlink init failed',
+ 'NETLINK INITIALIZATION FAILED',
+ ] + common_errors,
+ 'jasperforest' : [
+ 'Activated service \'org.bluez\' failed:',
+ 'Unable to find NFC netlink family',
+ 'netlink init failed',
+ 'NETLINK INITIALIZATION FAILED',
+ ] + common_errors,
+}
+
+log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"]
+
+class ParseLogsTest(oeRuntimeTest):
+
+ @classmethod
+ def setUpClass(self):
+ self.errors = errors
+ self.ignore_errors = ignore_errors
+ self.log_locations = log_locations
+ self.msg = ""
+
+ def getMachine(self):
+ (status, output) = self.target.run("uname -n")
+ return output
+
+ #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases.
+ def getHardwareInfo(self):
+ hwi = ""
+ (status, cpu_name) = self.target.run("cat /proc/cpuinfo | grep \"model name\" | head -n1 | awk 'BEGIN{FS=\":\"}{print $2}'")
+ (status, cpu_physical_cores) = self.target.run("cat /proc/cpuinfo | grep \"cpu cores\" | head -n1 | awk {'print $4'}")
+ (status, cpu_logical_cores) = self.target.run("cat /proc/cpuinfo | grep \"processor\" | wc -l")
+ (status, cpu_arch) = self.target.run("uname -m")
+ hwi += "Machine information: \n"
+ hwi += "*******************************\n"
+ hwi += "Machine name: "+self.getMachine()+"\n"
+ hwi += "CPU: "+str(cpu_name)+"\n"
+ hwi += "Arch: "+str(cpu_arch)+"\n"
+ hwi += "Physical cores: "+str(cpu_physical_cores)+"\n"
+ hwi += "Logical cores: "+str(cpu_logical_cores)+"\n"
+ hwi += "*******************************\n"
+ return hwi
+
+ #go through the log locations provided and if it's a folder create a list with all the .log files in it, if it's a file just add
+ #it to that list
+ def getLogList(self, log_locations):
+ logs = []
+ for location in log_locations:
+ (status, output) = self.target.run("test -f "+str(location))
+ if (status == 0):
+ logs.append(str(location))
+ else:
+ (status, output) = self.target.run("test -d "+str(location))
+ if (status == 0):
+ (status, output) = self.target.run("find "+str(location)+"/*.log -maxdepth 1 -type f")
+ output = output.splitlines()
+ for logfile in output:
+ logs.append(os.path.join(location,str(logfile)))
+ return logs
+
+ #build the grep command to be used with filters and exclusions
+ def build_grepcmd(self, errors, ignore_errors, log):
+ grepcmd = "grep "
+ grepcmd +="-Ei \""
+ for error in errors:
+ grepcmd += error+"|"
+ grepcmd = grepcmd[:-1]
+ grepcmd += "\" "+str(log)+" | grep -Eiv \'"
+ try:
+ errorlist = ignore_errors[self.getMachine()]
+ except KeyError:
+ self.msg += "No ignore list found for this machine, using default\n"
+ errorlist = ignore_errors['default']
+ for ignore_error in errorlist:
+ ignore_error = ignore_error.replace("(", "\(")
+ ignore_error = ignore_error.replace(")", "\)")
+ ignore_error = ignore_error.replace("'", ".")
+ ignore_error = ignore_error.replace("?", "\?")
+ ignore_error = ignore_error.replace("[", "\[")
+ ignore_error = ignore_error.replace("]", "\]")
+ ignore_error = ignore_error.replace("*", "\*")
+ grepcmd += ignore_error+"|"
+ grepcmd = grepcmd[:-1]
+ grepcmd += "\'"
+ return grepcmd
+
+ #grep only the errors so that their context could be collected. Default context is 10 lines before and after the error itself
+ def parse_logs(self, errors, ignore_errors, logs, lines_before = 10, lines_after = 10):
+ results = {}
+ rez = []
+ for log in logs:
+ thegrep = self.build_grepcmd(errors, ignore_errors, log)
+ try:
+ (status, result) = self.target.run(thegrep)
+ except:
+ pass
+ if result:
+ results[log] = {}
+ rez = result.splitlines()
+ for xrez in rez:
+ command = "grep \"\\"+str(xrez)+"\" -B "+str(lines_before)+" -A "+str(lines_after)+" "+str(log)
+ try:
+ (status, yrez) = self.target.run(command)
+ except:
+ pass
+ results[log][xrez]=yrez
+ return results
+
+ #get the output of dmesg and write it in a file. This file is added to log_locations.
+ def write_dmesg(self):
+ (status, dmesg) = self.target.run("dmesg")
+ (status, dmesg2) = self.target.run("echo \""+str(dmesg)+"\" > /tmp/dmesg_output.log")
+
+ @skipUnlessPassed('test_ssh')
+ def test_parselogs(self):
+ self.write_dmesg()
+ log_list = self.getLogList(self.log_locations)
+ result = self.parse_logs(self.errors, self.ignore_errors, log_list)
+ print self.getHardwareInfo()
+ errcount = 0
+ for log in result:
+ self.msg += "Log: "+log+"\n"
+ self.msg += "-----------------------\n"
+ for error in result[log]:
+ errcount += 1
+ self.msg += "Central error: "+str(error)+"\n"
+ self.msg += "***********************\n"
+ self.msg += result[str(log)][str(error)]+"\n"
+ self.msg += "***********************\n"
+ self.msg += "%s errors found in logs." % errcount
+ self.assertEqual(errcount, 0, msg=self.msg)
diff --git a/meta/lib/oeqa/runtime/xorg.py b/meta/lib/oeqa/runtime/xorg.py
index 7aa61ad6ab..a07031e5c8 100644
--- a/meta/lib/oeqa/runtime/xorg.py
+++ b/meta/lib/oeqa/runtime/xorg.py
@@ -14,9 +14,4 @@ class XorgTest(oeRuntimeTest):
(status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep -v xinit | grep [X]org')
self.assertEqual(status, 0, msg="Xorg does not appear to be running %s" % self.target.run(oeRuntimeTest.pscmd)[1])
- @testcase(972)
- @skipUnlessPassed('test_ssh')
- def test_xorg_error(self):
- (status, output) = self.target.run('cat /var/log/Xorg.0.log | grep -v "(EE) error," | grep -v "PreInit" | grep -v "evdev:" | grep -v "glx" | grep "(EE)"')
- self.assertEqual(status, 1, msg="Errors in Xorg log: %s" % output)
diff --git a/meta/lib/oeqa/sdk/buildsudoku.py b/meta/lib/oeqa/sdk/buildsudoku.py
index 6a60c76be8..dea77c6599 100644
--- a/meta/lib/oeqa/sdk/buildsudoku.py
+++ b/meta/lib/oeqa/sdk/buildsudoku.py
@@ -2,6 +2,10 @@ from oeqa.oetest import oeSDKTest, skipModule
from oeqa.utils.decorators import *
from oeqa.utils.targetbuild import SDKBuildProject
+def setUpModule():
+ if not oeSDKTest.hasPackage("gtk\+"):
+ skipModule("Image doesn't have gtk+ in manifest")
+
class SudokuTest(oeSDKTest):
@classmethod
diff --git a/meta/lib/oeqa/selftest/bbtests.py b/meta/lib/oeqa/selftest/bbtests.py
index 68f97bd8e3..5708d3dc9b 100644
--- a/meta/lib/oeqa/selftest/bbtests.py
+++ b/meta/lib/oeqa/selftest/bbtests.py
@@ -27,7 +27,7 @@ class BitbakeTests(oeSelfTest):
def test_event_handler(self):
self.write_config("INHERIT += \"test_events\"")
result = bitbake('m4-native')
- find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Preparing runqueue", result.output)
+ find_build_started = re.search("NOTE: Test for bb\.event\.BuildStarted(\n.*)*NOTE: Preparing RunQueue", result.output)
find_build_completed = re.search("Tasks Summary:.*(\n.*)*NOTE: Test for bb\.event\.BuildCompleted", result.output)
self.assertTrue(find_build_started, msg = "Match failed in:\n%s" % result.output)
self.assertTrue(find_build_completed, msg = "Match failed in:\n%s" % result.output)
@@ -71,6 +71,7 @@ class BitbakeTests(oeSelfTest):
@testcase(163)
def test_force_task(self):
bitbake('m4-native')
+ self.add_command_to_tearDown('bitbake -c clean m4-native')
result = bitbake('-C compile m4-native')
look_for_tasks = ['do_compile', 'do_install', 'do_populate_sysroot']
for task in look_for_tasks:
diff --git a/meta/lib/oeqa/selftest/buildoptions.py b/meta/lib/oeqa/selftest/buildoptions.py
index ec541e5be6..381741ef0d 100644
--- a/meta/lib/oeqa/selftest/buildoptions.py
+++ b/meta/lib/oeqa/selftest/buildoptions.py
@@ -7,10 +7,15 @@ from oeqa.selftest.base import oeSelfTest
from oeqa.selftest.buildhistory import BuildhistoryBase
from oeqa.utils.commands import runCmd, bitbake, get_bb_var
import oeqa.utils.ftools as ftools
+from oeqa.utils.decorators import testcase
class ImageOptionsTests(oeSelfTest):
+ @testcase(761)
def test_incremental_image_generation(self):
+ image_pkgtype = get_bb_var("IMAGE_PKGTYPE")
+ if image_pkgtype != 'rpm':
+ self.skipTest('Not using RPM as main package format')
bitbake("-c cleanall core-image-minimal")
self.write_config('INC_RPM_IMAGE_GEN = "1"')
self.append_config('IMAGE_FEATURES += "ssh-server-openssh"')
@@ -22,6 +27,7 @@ class ImageOptionsTests(oeSelfTest):
res = runCmd("grep 'Removing openssh-sshd' %s" %(os.path.join(get_bb_var("WORKDIR", "core-image-minimal"), "temp/log.do_rootfs")),ignore_status=True)
self.assertEqual(0, res.status, msg="openssh-sshd was not removed from image")
+ @testcase(925)
def test_rm_old_image(self):
bitbake("core-image-minimal")
deploydir = get_bb_var("DEPLOY_DIR_IMAGE", target="core-image-minimal")
@@ -37,6 +43,7 @@ class ImageOptionsTests(oeSelfTest):
remaining_not_expected = [path for path in track_original_files if os.path.basename(path) in deploydir_files]
self.assertFalse(remaining_not_expected, msg="\nThe following image files ware not removed: %s" % ', '.join(map(str, remaining_not_expected)))
+ @testcase(286)
def test_ccache_tool(self):
bitbake("ccache-native")
self.assertTrue(os.path.isfile(os.path.join(get_bb_var('STAGING_BINDIR_NATIVE', 'ccache-native'), "ccache")))
@@ -50,6 +57,7 @@ class ImageOptionsTests(oeSelfTest):
class DiskMonTest(oeSelfTest):
+ @testcase(277)
def test_stoptask_behavior(self):
self.write_config('BB_DISKMON_DIRS = "STOPTASKS,${TMPDIR},100000G,100K"')
res = bitbake("m4", ignore_status = True)
@@ -65,6 +73,7 @@ class DiskMonTest(oeSelfTest):
class SanityOptionsTest(oeSelfTest):
+ @testcase(927)
def test_options_warnqa_errorqa_switch(self):
bitbake("xcursor-transparent-theme -ccleansstate")
@@ -84,6 +93,7 @@ class SanityOptionsTest(oeSelfTest):
self.delete_recipeinc('xcursor-transparent-theme')
self.assertTrue("WARNING: QA Issue: xcursor-transparent-theme-dbg is listed in PACKAGES multiple times, this leads to packaging errors." in res.output, msg=res.output)
+ @testcase(278)
def test_sanity_userspace_dependency(self):
self.append_config('WARN_QA_append = " unsafe-references-in-binaries unsafe-references-in-scripts"')
bitbake("-ccleansstate gzip nfs-utils")
@@ -93,10 +103,12 @@ class SanityOptionsTest(oeSelfTest):
class BuildhistoryTests(BuildhistoryBase):
+ @testcase(293)
def test_buildhistory_basic(self):
self.run_buildhistory_operation('xcursor-transparent-theme')
self.assertTrue(os.path.isdir(get_bb_var('BUILDHISTORY_DIR')))
+ @testcase(294)
def test_buildhistory_buildtime_pr_backwards(self):
self.add_command_to_tearDown('cleanup-workdir')
target = 'xcursor-transparent-theme'
diff --git a/meta/lib/oeqa/selftest/devtool.py b/meta/lib/oeqa/selftest/devtool.py
new file mode 100644
index 0000000000..74fb325803
--- /dev/null
+++ b/meta/lib/oeqa/selftest/devtool.py
@@ -0,0 +1,241 @@
+import unittest
+import os
+import logging
+import re
+import shutil
+import tempfile
+import glob
+
+import oeqa.utils.ftools as ftools
+from oeqa.selftest.base import oeSelfTest
+from oeqa.utils.commands import runCmd, bitbake, get_bb_var
+from oeqa.utils.decorators import testcase
+
+class DevtoolTests(oeSelfTest):
+
+ def test_create_workspace(self):
+ # Check preconditions
+ workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ result = runCmd('bitbake-layers show-layers')
+ self.assertTrue('/workspace' not in result.output, 'This test cannot be run with a workspace layer in bblayers.conf')
+ # Try creating a workspace layer with a specific path
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ result = runCmd('devtool create-workspace %s' % tempdir)
+ self.assertTrue(os.path.isfile(os.path.join(tempdir, 'conf', 'layer.conf')))
+ result = runCmd('bitbake-layers show-layers')
+ self.assertIn(tempdir, result.output)
+ # Try creating a workspace layer with the default path
+ self.track_for_cleanup(workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool create-workspace')
+ self.assertTrue(os.path.isfile(os.path.join(workspacedir, 'conf', 'layer.conf')))
+ result = runCmd('bitbake-layers show-layers')
+ self.assertNotIn(tempdir, result.output)
+ self.assertIn(workspacedir, result.output)
+
+ def test_recipetool_create(self):
+ # Try adding a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ tempsrc = os.path.join(tempdir, 'srctree')
+ os.makedirs(tempsrc)
+ recipefile = os.path.join(tempdir, 'logrotate_3.8.7.bb')
+ srcuri = 'https://fedorahosted.org/releases/l/o/logrotate/logrotate-3.8.7.tar.gz'
+ result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
+ self.assertTrue(os.path.isfile(recipefile))
+ checkvars = {}
+ checkvars['LICENSE'] = 'GPLv2'
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=18810669f13b87348459e611d31ab760'
+ checkvars['SRC_URI'] = 'https://fedorahosted.org/releases/l/o/logrotate/logrotate-${PV}.tar.gz'
+ checkvars['SRC_URI[md5sum]'] = '99e08503ef24c3e2e3ff74cc5f3be213'
+ checkvars['SRC_URI[sha256sum]'] = 'f6ba691f40e30e640efa2752c1f9499a3f9738257660994de70a45fe00d12b64'
+ with open(recipefile, 'r') as f:
+ for line in f:
+ if '=' in line:
+ splitline = line.split('=', 1)
+ var = splitline[0].rstrip()
+ value = splitline[1].strip().strip('"')
+ if var in checkvars:
+ needvalue = checkvars.pop(var)
+ self.assertEqual(value, needvalue)
+ if line.startswith('inherit '):
+ inherits = line.split()[1:]
+
+ self.assertEqual(checkvars, {}, 'Some variables not found')
+
+ def test_recipetool_create_git(self):
+ # Ensure we have the right data in shlibs/pkgdata
+ bitbake('libpng pango libx11 libxext')
+ # Try adding a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ tempsrc = os.path.join(tempdir, 'srctree')
+ os.makedirs(tempsrc)
+ recipefile = os.path.join(tempdir, 'libmatchbox.bb')
+ srcuri = 'git://git.yoctoproject.org/libmatchbox'
+ result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc))
+ self.assertTrue(os.path.isfile(recipefile), 'recipetool did not create recipe file; output:\n%s' % result.output)
+ checkvars = {}
+ checkvars['LICENSE'] = 'LGPLv2.1'
+ checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=7fbc338309ac38fefcd64b04bb903e34'
+ checkvars['S'] = '${WORKDIR}/git'
+ checkvars['PV'] = '1.0+git${SRCPV}'
+ checkvars['SRC_URI'] = srcuri
+ checkvars['DEPENDS'] = 'libpng pango libx11 libxext'
+ inherits = []
+ with open(recipefile, 'r') as f:
+ for line in f:
+ if '=' in line:
+ splitline = line.split('=', 1)
+ var = splitline[0].rstrip()
+ value = splitline[1].strip().strip('"')
+ if var in checkvars:
+ needvalue = checkvars.pop(var)
+ self.assertEqual(value, needvalue)
+ if line.startswith('inherit '):
+ inherits = line.split()[1:]
+
+ self.assertEqual(checkvars, {}, 'Some variables not found')
+
+ self.assertIn('autotools', inherits, 'Missing inherit of autotools')
+ self.assertIn('pkgconfig', inherits, 'Missing inherit of pkgconfig')
+
+ def test_devtool_add(self):
+ # Check preconditions
+ workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Fetch source
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ url = 'http://www.ivarch.com/programs/sources/pv-1.5.3.tar.bz2'
+ result = runCmd('wget %s' % url, cwd=tempdir)
+ result = runCmd('tar xfv pv-1.5.3.tar.bz2', cwd=tempdir)
+ srcdir = os.path.join(tempdir, 'pv-1.5.3')
+ self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure')), 'Unable to find configure script in source directory')
+ # Test devtool add
+ self.track_for_cleanup(workspacedir)
+ self.add_command_to_tearDown('bitbake -c cleansstate pv')
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool add pv %s' % srcdir)
+ self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn('pv', result.output)
+ self.assertIn(srcdir, result.output)
+ # Clean up anything in the workdir/sysroot/sstate cache (have to do this *after* devtool add since the recipe only exists then)
+ bitbake('pv -c cleansstate')
+ # Test devtool build
+ result = runCmd('devtool build pv')
+ installdir = get_bb_var('D', 'pv')
+ self.assertTrue(installdir, 'Could not query installdir variable')
+ bindir = get_bb_var('bindir', 'pv')
+ self.assertTrue(bindir, 'Could not query bindir variable')
+ if bindir[0] == '/':
+ bindir = bindir[1:]
+ self.assertTrue(os.path.isfile(os.path.join(installdir, bindir, 'pv')), 'pv binary not found in D')
+
+ def test_devtool_modify(self):
+ # Check preconditions
+ workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ # Clean up anything in the workdir/sysroot/sstate cache
+ bitbake('mdadm -c cleansstate')
+ # Try modifying a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ self.add_command_to_tearDown('bitbake -c clean mdadm')
+ result = runCmd('devtool modify mdadm -x %s' % tempdir)
+ self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), 'Extracted source could not be found')
+ self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+ self.assertTrue(os.path.exists(os.path.join(workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created')
+ matches = glob.glob(os.path.join(workspacedir, 'appends', 'mdadm_*.bbappend'))
+ self.assertTrue(matches, 'bbappend not created')
+ # Test devtool status
+ result = runCmd('devtool status')
+ self.assertIn('mdadm', result.output)
+ self.assertIn(tempdir, result.output)
+ # Check git repo
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+ result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+ self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+ # Try building
+ bitbake('mdadm')
+ # Try making (minor) modifications to the source
+ result = runCmd("sed -i 's!^\.TH.*!.TH MDADM 8 \"\" v9.999-custom!' %s" % os.path.join(tempdir, 'mdadm.8.in'))
+ bitbake('mdadm -c package')
+ pkgd = get_bb_var('PKGD', 'mdadm')
+ self.assertTrue(pkgd, 'Could not query PKGD variable')
+ mandir = get_bb_var('mandir', 'mdadm')
+ self.assertTrue(mandir, 'Could not query mandir variable')
+ if mandir[0] == '/':
+ mandir = mandir[1:]
+ with open(os.path.join(pkgd, mandir, 'man8', 'mdadm.8'), 'r') as f:
+ for line in f:
+ if line.startswith('.TH'):
+ self.assertEqual(line.rstrip(), '.TH MDADM 8 "" v9.999-custom', 'man file not modified')
+ # Test devtool reset
+ result = runCmd('devtool reset mdadm')
+ result = runCmd('devtool status')
+ self.assertNotIn('mdadm', result.output)
+
+ def test_devtool_update_recipe(self):
+ # Check preconditions
+ workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ testrecipe = 'minicom'
+ recipefile = get_bb_var('FILE', testrecipe)
+ result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+ self.assertEqual(result.output.strip(), "", '%s recipe is not clean' % testrecipe)
+ # First, modify a recipe
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ # (don't bother with cleaning the recipe on teardown, we won't be building it)
+ result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir))
+ # Check git repo
+ self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
+ result = runCmd('git status --porcelain', cwd=tempdir)
+ self.assertEqual(result.output.strip(), "", 'Created git repo is not clean')
+ result = runCmd('git symbolic-ref HEAD', cwd=tempdir)
+ self.assertEqual(result.output.strip(), "refs/heads/devtool", 'Wrong branch in git repo')
+ # Add a couple of commits
+ # FIXME: this only tests adding, need to also test update and remove
+ result = runCmd('echo "Additional line" >> README', cwd=tempdir)
+ result = runCmd('git commit -a -m "Change the README"', cwd=tempdir)
+ result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir)
+ result = runCmd('git add devtool-new-file', cwd=tempdir)
+ result = runCmd('git commit -m "Add a new file"', cwd=tempdir)
+ self.add_command_to_tearDown('cd %s; rm %s/*.patch; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile)))
+ result = runCmd('devtool update-recipe %s' % testrecipe)
+ result = runCmd('git status . --porcelain', cwd=os.path.dirname(recipefile))
+ self.assertNotEqual(result.output.strip(), "", '%s recipe should be modified' % testrecipe)
+ status = result.output.splitlines()
+ self.assertEqual(len(status), 3, 'Less/more files modified than expected. Entire status:\n%s' % result.output)
+ for line in status:
+ if line.endswith('0001-Change-the-README.patch'):
+ self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
+ elif line.endswith('0002-Add-a-new-file.patch'):
+ self.assertEqual(line[:3], '?? ', 'Unexpected status in line: %s' % line)
+ elif re.search('minicom_[^_]*.bb$', line):
+ self.assertEqual(line[:3], ' M ', 'Unexpected status in line: %s' % line)
+ else:
+ raise AssertionError('Unexpected modified file in status: %s' % line)
+
+ def test_devtool_extract(self):
+ # Check preconditions
+ workspacedir = os.path.join(self.builddir, 'workspace')
+ self.assertTrue(not os.path.exists(workspacedir), 'This test cannot be run with a workspace directory under the build directory')
+ tempdir = tempfile.mkdtemp(prefix='devtoolqa')
+ # Try devtool extract
+ self.track_for_cleanup(tempdir)
+ self.track_for_cleanup(workspacedir)
+ self.add_command_to_tearDown('bitbake-layers remove-layer */workspace')
+ result = runCmd('devtool extract remake %s' % tempdir)
+ self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found')
+ self.assertTrue(os.path.isdir(os.path.join(tempdir, '.git')), 'git repository for external source tree not found')
diff --git a/meta/lib/oeqa/selftest/sstatetests.py b/meta/lib/oeqa/selftest/sstatetests.py
index 3789426797..d578ddd489 100644
--- a/meta/lib/oeqa/selftest/sstatetests.py
+++ b/meta/lib/oeqa/selftest/sstatetests.py
@@ -40,11 +40,11 @@ class SStateTests(SStateBase):
@testcase(976)
def test_sstate_creation_distro_nonspecific_pass(self):
- self.run_test_sstate_creation(['eglibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+ self.run_test_sstate_creation(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
@testcase(976)
def test_sstate_creation_distro_nonspecific_fail(self):
- self.run_test_sstate_creation(['eglibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
+ self.run_test_sstate_creation(['glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False)
# Test the sstate files deletion part of the do_cleansstate task
@@ -67,16 +67,16 @@ class SStateTests(SStateBase):
@testcase(977)
def test_cleansstate_task_distro_specific_nonspecific(self):
targetarch = get_bb_var('TUNE_ARCH')
- self.run_test_cleansstate_task(['binutils-cross-' + targetarch, 'binutils-native', 'eglibc-initial'], distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
+ self.run_test_cleansstate_task(['binutils-cross-' + targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=True, temp_sstate_location=True)
@testcase(977)
def test_cleansstate_task_distro_nonspecific(self):
- self.run_test_cleansstate_task(['eglibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
+ self.run_test_cleansstate_task(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True)
@testcase(977)
def test_cleansstate_task_distro_specific(self):
targetarch = get_bb_var('TUNE_ARCH')
- self.run_test_cleansstate_task(['binutils-cross-'+ targetarch, 'binutils-native', 'eglibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
+ self.run_test_cleansstate_task(['binutils-cross-'+ targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True)
# Test rebuilding of distro-specific sstate files
diff --git a/meta/lib/oeqa/utils/commands.py b/meta/lib/oeqa/utils/commands.py
index 802bc2f208..5b601d9806 100644
--- a/meta/lib/oeqa/utils/commands.py
+++ b/meta/lib/oeqa/utils/commands.py
@@ -110,11 +110,11 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, **opti
def bitbake(command, ignore_status=False, timeout=None, postconfig=None, **options):
if postconfig:
- postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf')
- ftools.write_file(postconfig_file, postconfig)
- extra_args = "-R %s" % postconfig_file
+ postconfig_file = os.path.join(os.environ.get('BUILDDIR'), 'oeqa-post.conf')
+ ftools.write_file(postconfig_file, postconfig)
+ extra_args = "-R %s" % postconfig_file
else:
- extra_args = ""
+ extra_args = ""
if isinstance(command, basestring):
cmd = "bitbake " + extra_args + " " + command
@@ -122,7 +122,7 @@ def bitbake(command, ignore_status=False, timeout=None, postconfig=None, **optio
cmd = [ "bitbake" ] + [a for a in (command + extra_args.split(" ")) if a not in [""]]
try:
- return runCmd(cmd, ignore_status, timeout, **options)
+ return runCmd(cmd, ignore_status, timeout, **options)
finally:
if postconfig:
os.remove(postconfig_file)
@@ -138,9 +138,9 @@ def get_bb_var(var, target=None, postconfig=None):
val = None
bbenv = get_bb_env(target, postconfig=postconfig)
for line in bbenv.splitlines():
- if line.startswith(var + "="):
+ if line.startswith(var + "=") or line.startswith("export " + var + "="):
val = line.split('=')[1]
- val = val.replace('\"','')
+ val = val.strip('\"')
break
return val
diff --git a/meta/lib/oeqa/utils/decorators.py b/meta/lib/oeqa/utils/decorators.py
index a9e67ed863..2d5db2474a 100644
--- a/meta/lib/oeqa/utils/decorators.py
+++ b/meta/lib/oeqa/utils/decorators.py
@@ -86,10 +86,15 @@ class testcase(object):
self.test_case = test_case
def __call__(self, func):
- def wrapped_f(*args):
- return func(*args)
- wrapped_f.test_case = self.test_case
- return wrapped_f
+ def wrapped_f(*args):
+ return func(*args)
+ wrapped_f.test_case = self.test_case
+ wrapped_f.__name__ = func.__name__
+ return wrapped_f
+
+class NoParsingFilter(logging.Filter):
+ def filter(self, record):
+ return record.levelno == 100
def LogResults(original_class):
orig_method = original_class.run
@@ -97,49 +102,51 @@ def LogResults(original_class):
#rewrite the run method of unittest.TestCase to add testcase logging
def run(self, result, *args, **kws):
orig_method(self, result, *args, **kws)
- passed = True
- testMethod = getattr(self, self._testMethodName)
-
- #if test case is decorated then use it's number, else use it's name
- try:
- test_case = testMethod.test_case
- except AttributeError:
- test_case = self._testMethodName
-
- #create custom logging level for filtering.
- custom_log_level = 100
- logging.addLevelName(custom_log_level, 'RESULTS')
- caller = os.path.basename(sys.argv[0])
-
- def results(self, message, *args, **kws):
- if self.isEnabledFor(custom_log_level):
- self.log(custom_log_level, message, *args, **kws)
- logging.Logger.results = results
-
- logging.basicConfig(filename=os.path.join(os.getcwd(),'results-'+caller+'.log'),
+ passed = True
+ testMethod = getattr(self, self._testMethodName)
+
+ #if test case is decorated then use it's number, else use it's name
+ try:
+ test_case = testMethod.test_case
+ except AttributeError:
+ test_case = self._testMethodName
+
+ #create custom logging level for filtering.
+ custom_log_level = 100
+ logging.addLevelName(custom_log_level, 'RESULTS')
+ caller = os.path.basename(sys.argv[0])
+
+ def results(self, message, *args, **kws):
+ if self.isEnabledFor(custom_log_level):
+ self.log(custom_log_level, message, *args, **kws)
+ logging.Logger.results = results
+
+ logging.basicConfig(filename=os.path.join(os.getcwd(),'results-'+caller+'.log'),
filemode='w',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%H:%M:%S',
level=custom_log_level)
- local_log = logging.getLogger(caller)
+ for handler in logging.root.handlers:
+ handler.addFilter(NoParsingFilter())
+ local_log = logging.getLogger(caller)
- #check status of tests and record it
+ #check status of tests and record it
for (name, msg) in result.errors:
- if self._testMethodName == str(name).split(' ')[0]:
- local_log.results("Testcase "+str(test_case)+": ERROR")
- local_log.results("Testcase "+str(test_case)+":\n"+msg)
- passed = False
+ if self._testMethodName == str(name).split(' ')[0]:
+ local_log.results("Testcase "+str(test_case)+": ERROR")
+ local_log.results("Testcase "+str(test_case)+":\n"+msg)
+ passed = False
for (name, msg) in result.failures:
- if self._testMethodName == str(name).split(' ')[0]:
- local_log.results("Testcase "+str(test_case)+": FAILED")
- local_log.results("Testcase "+str(test_case)+":\n"+msg)
- passed = False
+ if self._testMethodName == str(name).split(' ')[0]:
+ local_log.results("Testcase "+str(test_case)+": FAILED")
+ local_log.results("Testcase "+str(test_case)+":\n"+msg)
+ passed = False
for (name, msg) in result.skipped:
- if self._testMethodName == str(name).split(' ')[0]:
- local_log.results("Testcase "+str(test_case)+": SKIPPED")
- passed = False
- if passed:
- local_log.results("Testcase "+str(test_case)+": PASSED")
+ if self._testMethodName == str(name).split(' ')[0]:
+ local_log.results("Testcase "+str(test_case)+": SKIPPED")
+ passed = False
+ if passed:
+ local_log.results("Testcase "+str(test_case)+": PASSED")
original_class.run = run
return original_class
diff --git a/meta/lib/oeqa/utils/httpserver.py b/meta/lib/oeqa/utils/httpserver.py
index f161a1bddd..76518d8ef9 100644
--- a/meta/lib/oeqa/utils/httpserver.py
+++ b/meta/lib/oeqa/utils/httpserver.py
@@ -5,6 +5,8 @@ import os
class HTTPServer(SimpleHTTPServer.BaseHTTPServer.HTTPServer):
def server_start(self, root_dir):
+ import signal
+ signal.signal(signal.SIGTERM, signal.SIG_DFL)
os.chdir(root_dir)
self.serve_forever()
diff --git a/meta/lib/oeqa/utils/logparser.py b/meta/lib/oeqa/utils/logparser.py
new file mode 100644
index 0000000000..87b50354cd
--- /dev/null
+++ b/meta/lib/oeqa/utils/logparser.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+
+import sys
+import os
+import re
+import ftools
+
+
+# A parser that can be used to identify weather a line is a test result or a section statement.
+class Lparser(object):
+
+ def __init__(self, test_0_pass_regex, test_0_fail_regex, section_0_begin_regex=None, section_0_end_regex=None, **kwargs):
+ # Initialize the arguments dictionary
+ if kwargs:
+ self.args = kwargs
+ else:
+ self.args = {}
+
+ # Add the default args to the dictionary
+ self.args['test_0_pass_regex'] = test_0_pass_regex
+ self.args['test_0_fail_regex'] = test_0_fail_regex
+ if section_0_begin_regex:
+ self.args['section_0_begin_regex'] = section_0_begin_regex
+ if section_0_end_regex:
+ self.args['section_0_end_regex'] = section_0_end_regex
+
+ self.test_possible_status = ['pass', 'fail', 'error']
+ self.section_possible_status = ['begin', 'end']
+
+ self.initialized = False
+
+
+ # Initialize the parser with the current configuration
+ def init(self):
+
+ # extra arguments can be added by the user to define new test and section categories. They must follow a pre-defined pattern: <type>_<category_name>_<status>_regex
+ self.test_argument_pattern = "^test_(.+?)_(%s)_regex" % '|'.join(map(str, self.test_possible_status))
+ self.section_argument_pattern = "^section_(.+?)_(%s)_regex" % '|'.join(map(str, self.section_possible_status))
+
+ # Initialize the test and section regex dictionaries
+ self.test_regex = {}
+ self.section_regex ={}
+
+ for arg, value in self.args.items():
+ if not value:
+ raise Exception('The value of provided argument %s is %s. Should have a valid value.' % (key, value))
+ is_test = re.search(self.test_argument_pattern, arg)
+ is_section = re.search(self.section_argument_pattern, arg)
+ if is_test:
+ if not is_test.group(1) in self.test_regex:
+ self.test_regex[is_test.group(1)] = {}
+ self.test_regex[is_test.group(1)][is_test.group(2)] = re.compile(value)
+ elif is_section:
+ if not is_section.group(1) in self.section_regex:
+ self.section_regex[is_section.group(1)] = {}
+ self.section_regex[is_section.group(1)][is_section.group(2)] = re.compile(value)
+ else:
+ # TODO: Make these call a traceback instead of a simple exception..
+ raise Exception("The provided argument name does not correspond to any valid type. Please give one of the following types:\nfor tests: %s\nfor sections: %s" % (self.test_argument_pattern, self.section_argument_pattern))
+
+ self.initialized = True
+
+ # Parse a line and return a tuple containing the type of result (test/section) and its category, status and name
+ def parse_line(self, line):
+ if not self.initialized:
+ raise Exception("The parser is not initialized..")
+
+ for test_category, test_status_list in self.test_regex.items():
+ for test_status, status_regex in test_status_list.items():
+ test_name = status_regex.search(line)
+ if test_name:
+ return ['test', test_category, test_status, test_name.group(1)]
+
+ for section_category, section_status_list in self.section_regex.items():
+ for section_status, status_regex in section_status_list.items():
+ section_name = status_regex.search(line)
+ if section_name:
+ return ['section', section_category, section_status, section_name.group(1)]
+ return None
+
+
+class Result(object):
+
+ def __init__(self):
+ self.result_dict = {}
+
+ def store(self, section, test, status):
+ if not section in self.result_dict:
+ self.result_dict[section] = []
+
+ self.result_dict[section].append((test, status))
+
+ # sort tests by the test name(the first element of the tuple), for each section. This can be helpful when using git to diff for changes by making sure they are always in the same order.
+ def sort_tests(self):
+ for package in self.result_dict:
+ sorted_results = sorted(self.result_dict[package], key=lambda tup: tup[0])
+ self.result_dict[package] = sorted_results
+
+ # Log the results as files. The file name is the section name and the contents are the tests in that section.
+ def log_as_files(self, target_dir, test_status):
+ status_regex = re.compile('|'.join(map(str, test_status)))
+ if not type(test_status) == type([]):
+ raise Exception("test_status should be a list. Got " + str(test_status) + " instead.")
+ if not os.path.exists(target_dir):
+ raise Exception("Target directory does not exist: %s" % target_dir)
+
+ for section, test_results in self.result_dict.items():
+ prefix = ''
+ for x in test_status:
+ prefix +=x+'.'
+ if (section != ''):
+ prefix += section
+ section_file = os.path.join(target_dir, prefix)
+ # purge the file contents if it exists
+ open(section_file, 'w').close()
+ for test_result in test_results:
+ (test_name, status) = test_result
+ # we log only the tests with status in the test_status list
+ match_status = status_regex.search(status)
+ if match_status:
+ ftools.append_file(section_file, status + ": " + test_name)
+
+ # Not yet implemented!
+ def log_to_lava(self):
+ pass
diff --git a/meta/recipes-bsp/acpid/acpid.inc b/meta/recipes-bsp/acpid/acpid.inc
index fad7afdf26..e0156192a0 100644
--- a/meta/recipes-bsp/acpid/acpid.inc
+++ b/meta/recipes-bsp/acpid/acpid.inc
@@ -4,30 +4,27 @@ BUGTRACKER = "http://sourceforge.net/tracker/?group_id=33140&atid=407341"
SECTION = "base"
LICENSE = "GPLv2+"
-SRC_URI = "${SOURCEFORGE_MIRROR}/acpid/acpid-${PV}.tar.gz \
+SRC_URI = "${SOURCEFORGE_MIRROR}/acpid2/acpid-${PV}.tar.xz \
file://init \
- file://set_socket_noblock.patch "
+ file://acpid.service \
+ "
-inherit update-rc.d
+inherit autotools update-rc.d systemd
INITSCRIPT_NAME = "acpid"
INITSCRIPT_PARAMS = "defaults"
-# Makefile ignores our CFLAGS, so override it.
-#
-EXTRA_OEMAKE = "CFLAGS='-W -Wall -Werror -Wundef -Wshadow ${CFLAGS} $(DEFS)'"
+SYSTEMD_SERVICE_${PN} = "acpid.service"
-do_compile () {
- oe_runmake 'CC=${CC} -D_GNU_SOURCE' 'CROSS=${HOST_PREFIX}'
-}
-
-do_install () {
- install -d ${D}${bindir}
- oe_runmake 'INSTPREFIX=${D}' install
+do_install_append () {
install -d ${D}${sysconfdir}/init.d
sed -e 's,/usr/sbin,${sbindir},g' ${WORKDIR}/init > ${D}${sysconfdir}/init.d/acpid
chmod 755 ${D}${sysconfdir}/init.d/acpid
install -d ${D}${sysconfdir}/acpi
install -d ${D}${sysconfdir}/acpi/events
+
+ install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/acpid.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_unitdir}/system/acpid.service
}
diff --git a/meta/recipes-bsp/acpid/acpid/acpid.service b/meta/recipes-bsp/acpid/acpid/acpid.service
new file mode 100644
index 0000000000..f70e740965
--- /dev/null
+++ b/meta/recipes-bsp/acpid/acpid/acpid.service
@@ -0,0 +1,10 @@
+[Unit]
+Description=ACPI Event Daemon
+After=syslog.target
+
+[Service]
+Type=forking
+ExecStart=@SBINDIR@/acpid
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-bsp/acpid/acpid/set_socket_noblock.patch b/meta/recipes-bsp/acpid/acpid/set_socket_noblock.patch
deleted file mode 100644
index 54d64351d7..0000000000
--- a/meta/recipes-bsp/acpid/acpid/set_socket_noblock.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- a/acpid.c
-+++ b/acpid.c
-@@ -307,6 +307,7 @@ main(int argc, char **argv)
- non_root_clients++;
- }
- fcntl(cli_fd, F_SETFD, FD_CLOEXEC);
-+ fcntl(cli_fd, F_SETFL, O_NONBLOCK);
- snprintf(buf, sizeof(buf)-1, "%d[%d:%d]",
- creds.pid, creds.uid, creds.gid);
- acpid_add_client(cli_fd, buf);
diff --git a/meta/recipes-bsp/acpid/acpid_1.0.10.bb b/meta/recipes-bsp/acpid/acpid_2.0.23.bb
index df55badbd4..a5317a4daa 100644
--- a/meta/recipes-bsp/acpid/acpid_1.0.10.bb
+++ b/meta/recipes-bsp/acpid/acpid_2.0.23.bb
@@ -3,6 +3,5 @@ require acpid.inc
LIC_FILES_CHKSUM = "file://COPYING;md5=8ca43cbc842c2336e835926c2166c28b \
file://acpid.h;endline=24;md5=324a9cf225ae69ddaad1bf9d942115b5"
-
-SRC_URI[md5sum] = "61156ef32015c56dc0f2e3317f4ae09e"
-SRC_URI[sha256sum] = "22703ce0dd7305aca01bc9ac741659c32b1593f1d6fde492df7f01067a534760"
+SRC_URI[md5sum] = "d7bcdcdefcd53b03730e50ba842554ea"
+SRC_URI[sha256sum] = "4396aaec13510c3a1faa941a15a4b5335b6ae4fbec8438b9249b88c3b66187ee"
diff --git a/meta/recipes-bsp/alsa-state/alsa-state.bb b/meta/recipes-bsp/alsa-state/alsa-state.bb
index d2f8ea2be1..17c2d002d5 100644
--- a/meta/recipes-bsp/alsa-state/alsa-state.bb
+++ b/meta/recipes-bsp/alsa-state/alsa-state.bb
@@ -18,6 +18,13 @@ SRC_URI = "\
file://alsa-state-init \
"
+# As the recipe doesn't inherit systemd.bbclass, we need to set this variable
+# manually to avoid unnecessary postinst/preinst generated.
+python __anonymous() {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+}
+
inherit update-rc.d
INITSCRIPT_NAME = "alsa-state"
@@ -54,9 +61,5 @@ pkg_postinst_${PN}() {
then
${sbindir}/alsactl -f ${localstatedir}/lib/alsa/asound.state restore
fi
- # INITSCRIPT_PARAMS changed, so remove the old and
- # install the new setting.
- update-rc.d -f ${INITSCRIPT_NAME} remove
- update-rc.d ${INITSCRIPT_NAME} ${INITSCRIPT_PARAMS}
fi
}
diff --git a/meta/recipes-bsp/grub/files/0001-Fix-build-with-glibc-2.20.patch b/meta/recipes-bsp/grub/files/0001-Fix-build-with-glibc-2.20.patch
new file mode 100644
index 0000000000..46aaa97cf2
--- /dev/null
+++ b/meta/recipes-bsp/grub/files/0001-Fix-build-with-glibc-2.20.patch
@@ -0,0 +1,32 @@
+From 275fe0269ecc00af4788afaf9dc151a50b25645b Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Thu, 28 Aug 2014 19:26:14 -0700
+Subject: [PATCH] Fix build with glibc 2.20
+
+* grub-core/kern/emu/hostfs.c: squahes below warning
+ warning: #warning "_BSD_SOURCE and _SVID_SOURCE are deprecated, use _DEFAULT_SOURCE"
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Submitted
+
+---
+ grub-core/kern/emu/hostfs.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/grub-core/kern/emu/hostfs.c b/grub-core/kern/emu/hostfs.c
+index 7e725f6..8dd8e65 100644
+--- a/grub-core/kern/emu/hostfs.c
++++ b/grub-core/kern/emu/hostfs.c
+@@ -19,7 +19,7 @@
+
+ #include <config-util.h>
+
+-#define _BSD_SOURCE
++#define _DEFAULT_SOURCE
+ #include <grub/fs.h>
+ #include <grub/file.h>
+ #include <grub/disk.h>
+--
+2.1.0
+
diff --git a/meta/recipes-bsp/grub/files/cfg b/meta/recipes-bsp/grub/files/cfg
index ffffe47638..8ca53d24d7 100644
--- a/meta/recipes-bsp/grub/files/cfg
+++ b/meta/recipes-bsp/grub/files/cfg
@@ -1,2 +1,2 @@
-search.file /EFI/BOOT/grub.cfg root
+search.file ($cmdpath)/EFI/BOOT/grub.cfg root
set prefix=($root)/EFI/BOOT
diff --git a/meta/recipes-bsp/grub/files/grub-efi-fix-with-glibc-2.20.patch b/meta/recipes-bsp/grub/files/grub-efi-fix-with-glibc-2.20.patch
new file mode 100644
index 0000000000..4f12628a4b
--- /dev/null
+++ b/meta/recipes-bsp/grub/files/grub-efi-fix-with-glibc-2.20.patch
@@ -0,0 +1,32 @@
+From eb6368e65f6988eebad26cebdec057f797bceb40 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Tue, 9 Sep 2014 00:02:30 -0700
+Subject: [PATCH] Fix build with glibc 2.20
+
+* grub-core/kern/emu/hostfs.c: squahes below warning
+ warning: #warning "_BSD_SOURCE and _SVID_SOURCE are deprecated, use _DEFAULT_SOURCE"
+
+Upstream-Status: Submitted
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ grub-core/kern/emu/hostfs.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/grub-core/kern/emu/hostfs.c b/grub-core/kern/emu/hostfs.c
+index 3cb089c..a51ee32 100644
+--- a/grub-core/kern/emu/hostfs.c
++++ b/grub-core/kern/emu/hostfs.c
+@@ -16,7 +16,7 @@
+ * You should have received a copy of the GNU General Public License
+ * along with GRUB. If not, see <http://www.gnu.org/licenses/>.
+ */
+-#define _BSD_SOURCE
++#define _DEFAULT_SOURCE
+ #include <grub/fs.h>
+ #include <grub/file.h>
+ #include <grub/disk.h>
+--
+1.7.9.5
+
diff --git a/meta/recipes-bsp/grub/grub-2.00/grub2-remove-sparc64-setup-from-x86-builds.patch b/meta/recipes-bsp/grub/grub-2.00/grub2-remove-sparc64-setup-from-x86-builds.patch
new file mode 100644
index 0000000000..6259a1abe3
--- /dev/null
+++ b/meta/recipes-bsp/grub/grub-2.00/grub2-remove-sparc64-setup-from-x86-builds.patch
@@ -0,0 +1,104 @@
+Upstream-Status: Inappropriate [embedded specific]
+
+remove the grub-sparc64-setup files from the x86 builds.
+
+Signed-Off-By: Nitin A Kamble <nitin.a.kamble@intel.com>
+
+--- a/Makefile.util.am 2014-08-15 11:41:02.441011278 -0700
++++ b/Makefile.util.am 2014-08-15 11:44:26.245021142 -0700
+@@ -4692,9 +4692,9 @@
+ endif
+
+ if COND_emu
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4715,9 +4715,9 @@
+ endif
+
+ if COND_i386_pc
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4738,9 +4738,9 @@
+ endif
+
+ if COND_i386_efi
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4761,9 +4761,9 @@
+ endif
+
+ if COND_i386_qemu
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4784,9 +4784,9 @@
+ endif
+
+ if COND_i386_coreboot
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4807,9 +4807,9 @@
+ endif
+
+ if COND_i386_multiboot
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4830,9 +4830,9 @@
+ endif
+
+ if COND_i386_ieee1275
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
+@@ -4853,9 +4853,9 @@
+ endif
+
+ if COND_x86_64_efi
+-sbin_PROGRAMS += grub-sparc64-setup
++sbin_PROGRAMS +=
+ if COND_MAN_PAGES
+-man_MANS += grub-sparc64-setup.8
++man_MANS +=
+
+ grub-sparc64-setup.8: grub-sparc64-setup
+ chmod a+x grub-sparc64-setup
diff --git a/meta/recipes-bsp/grub/grub-efi_2.00.bb b/meta/recipes-bsp/grub/grub-efi_2.00.bb
index 7b49493e3e..d213f31a20 100644
--- a/meta/recipes-bsp/grub/grub-efi_2.00.bb
+++ b/meta/recipes-bsp/grub/grub-efi_2.00.bb
@@ -28,6 +28,7 @@ SRC_URI = "ftp://ftp.gnu.org/gnu/grub/grub-${PV}.tar.gz \
file://fix-issue-with-flex-2.5.37.patch \
file://grub-efi-allow-a-compilation-without-mcmodel-large.patch \
file://grub-2.00-add-oe-kernel.patch \
+ file://grub-efi-fix-with-glibc-2.20.patch \
"
SRC_URI[md5sum] = "e927540b6eda8b024fb0391eeaa4091c"
SRC_URI[sha256sum] = "65b39a0558f8c802209c574f4d02ca263a804e8a564bc6caf1cd0fd3b3cc11e3"
diff --git a/meta/recipes-bsp/grub/grub_2.00.bb b/meta/recipes-bsp/grub/grub_2.00.bb
index 3c1ec0b651..e2fcbb98f5 100644
--- a/meta/recipes-bsp/grub/grub_2.00.bb
+++ b/meta/recipes-bsp/grub/grub_2.00.bb
@@ -23,6 +23,7 @@ SRC_URI = "ftp://ftp.gnu.org/gnu/grub/grub-${PV}.tar.gz \
file://fix-issue-with-flex-2.5.37.patch \
file://grub-2.00-add-oe-kernel.patch \
file://fix-endianness-problem.patch \
+ file://grub2-remove-sparc64-setup-from-x86-builds.patch \
"
SRC_URI[md5sum] = "e927540b6eda8b024fb0391eeaa4091c"
diff --git a/meta/recipes-bsp/grub/grub_git.bb b/meta/recipes-bsp/grub/grub_git.bb
index f74bae8eb6..2a6392b82e 100644
--- a/meta/recipes-bsp/grub/grub_git.bb
+++ b/meta/recipes-bsp/grub/grub_git.bb
@@ -21,6 +21,7 @@ SRCREV = "87de66d9d83446ecddb29cfbdf7369102c8e209e"
SRC_URI = "git://git.savannah.gnu.org/grub.git \
file://autogen.sh-exclude-pc.patch \
file://grub-2.00-add-oe-kernel.patch \
+ file://0001-Fix-build-with-glibc-2.20.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-bsp/hostap/hostap-utils-0.4.7/ldflags.patch b/meta/recipes-bsp/hostap/hostap-utils-0.4.7/ldflags.patch
new file mode 100644
index 0000000000..c4655cf092
--- /dev/null
+++ b/meta/recipes-bsp/hostap/hostap-utils-0.4.7/ldflags.patch
@@ -0,0 +1,27 @@
+Obey LDFLAGS
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Pending
+
+--- hostap-utils-0.4.7.orig/Makefile
++++ hostap-utils-0.4.7/Makefile
+@@ -14,17 +14,17 @@ prism2_srec.o: prism2_srec.c util.h
+ hostap_crypt_conf: hostap_crypt_conf.c
+
+ hostap_diag: hostap_diag.o util.o
+- $(CC) -o hostap_diag $(CFLAGS) hostap_diag.o util.o
++ $(CC) -o hostap_diag $(CFLAGS) $(LDFLAGS) hostap_diag.o util.o
+
+ hostap_diag.o: hostap_diag.c util.h
+
+ util.o: util.c util.h
+
+ hostap_io_debug: hostap_io_debug.c
+- $(CC) -o hostap_io_debug $(CFLAGS) hostap_io_debug.c
++ $(CC) -o hostap_io_debug $(CFLAGS) $(LDFLAGS) hostap_io_debug.c
+
+ hostap_rid: hostap_rid.o util.o
+- $(CC) -o hostap_rid $(CFLAGS) hostap_rid.o util.o
++ $(CC) -o hostap_rid $(CFLAGS) $(LDFLAGS) hostap_rid.o util.o
+
+ hostap_rid.o: hostap_rid.c util.h
diff --git a/meta/recipes-bsp/hostap/hostap-utils_0.4.7.bb b/meta/recipes-bsp/hostap/hostap-utils_0.4.7.bb
index 7b224a8a4d..be5481f6f7 100644
--- a/meta/recipes-bsp/hostap/hostap-utils_0.4.7.bb
+++ b/meta/recipes-bsp/hostap/hostap-utils_0.4.7.bb
@@ -1,4 +1,5 @@
require hostap-utils.inc
+SRC_URI += "file://ldflags.patch"
SRC_URI[md5sum] = "afe041581b8f01666e353bec20917c85"
SRC_URI[sha256sum] = "c6f598d8e356c1620fa009eca0a700bf1105e16817eefd77d891994261009355"
diff --git a/meta/recipes-bsp/keymaps/keymaps_1.0.bb b/meta/recipes-bsp/keymaps/keymaps_1.0.bb
index 123db3e0f2..925161b5d2 100644
--- a/meta/recipes-bsp/keymaps/keymaps_1.0.bb
+++ b/meta/recipes-bsp/keymaps/keymaps_1.0.bb
@@ -2,10 +2,7 @@ SUMMARY = "Keyboard maps"
DESCRIPTION = "Keymaps and initscript to set the keymap on bootup."
SECTION = "base"
-# Distro can override initscripts provider
-VIRTUAL-RUNTIME_initscripts ?= "initscripts"
-
-RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_initscripts} kbd"
+RDEPENDS_${PN} = "kbd"
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
@@ -14,6 +11,13 @@ PR = "r31"
INHIBIT_DEFAULT_DEPS = "1"
+# As the recipe doesn't inherit systemd.bbclass, we need to set this variable
+# manually to avoid unnecessary postinst/preinst generated.
+python __anonymous() {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+}
+
inherit update-rc.d
SRC_URI = "file://keymap.sh \
@@ -23,6 +27,22 @@ INITSCRIPT_NAME = "keymap.sh"
INITSCRIPT_PARAMS = "start 01 S ."
do_install () {
- install -d ${D}${sysconfdir}/init.d/
- install -m 0755 ${WORKDIR}/keymap.sh ${D}${sysconfdir}/init.d/
+ # Only install the script if 'sysvinit' is in DISTRO_FEATURES
+ # THe ulitity this script provides could be achieved by systemd-vconsole-setup.service
+ if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','true','false',d)}; then
+ install -d ${D}${sysconfdir}/init.d/
+ install -m 0755 ${WORKDIR}/keymap.sh ${D}${sysconfdir}/init.d/
+ fi
}
+
+DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}"
+pkg_postinst_${PN} () {
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then
+ if [ -n "$D" ]; then
+ OPTS="--root=$D"
+ fi
+ systemctl $OPTS mask keymap.service
+ fi
+}
+
+ALLOW_EMPTY_${PN} = "1"
diff --git a/meta/recipes-bsp/pciutils/pciutils/lib-build-fix.patch b/meta/recipes-bsp/pciutils/pciutils/lib-build-fix.patch
deleted file mode 100644
index 413ef0e1e5..0000000000
--- a/meta/recipes-bsp/pciutils/pciutils/lib-build-fix.patch
+++ /dev/null
@@ -1,92 +0,0 @@
-Link directly to lib/libpci.so.3.1.5 will hard code this versioned
-string into lspci and other utilities, which is not desirable and
-won't work. In the other hand, linking to a symbolic link lib/libpci.so
-doesn't have this issue.
-
-Upstream-Status: Pending
-
-7/30/2010 - created by Qing He <qing.he@intel.com>
-
-diff --git a/Makefile b/Makefile
-index 74c570a..31337e0 100644
---- a/Makefile
-+++ b/Makefile
-@@ -56,9 +56,9 @@ PCIINC_INS=lib/config.h lib/header.h lib/pci.h lib/types.h
-
- export
-
--all: lib/$(PCILIB) lspci setpci example lspci.8 setpci.8 pcilib.7 update-pciids update-pciids.8 $(PCI_IDS)
-+all: lib/$(PCILIB_DEV) lspci setpci example lspci.8 setpci.8 pcilib.7 update-pciids update-pciids.8 $(PCI_IDS)
-
--lib/$(PCILIB): $(PCIINC) force
-+lib/$(PCILIB) lib/$(PCILIB_DEV): $(PCIINC) force
- $(MAKE) -C lib all
-
- force:
-@@ -66,8 +66,8 @@ force:
- lib/config.h lib/config.mk:
- cd lib && ./configure
-
--lspci: lspci.o ls-vpd.o ls-caps.o ls-ecaps.o ls-kernel.o ls-tree.o ls-map.o common.o lib/$(PCILIB)
--setpci: setpci.o common.o lib/$(PCILIB)
-+lspci: lspci.o ls-vpd.o ls-caps.o ls-ecaps.o ls-kernel.o ls-tree.o ls-map.o common.o lib/$(PCILIB_DEV)
-+setpci: setpci.o common.o lib/$(PCILIB_DEV)
-
- LSPCIINC=lspci.h pciutils.h $(PCIINC)
- lspci.o: lspci.c $(LSPCIINC)
-@@ -89,7 +89,7 @@ update-pciids: update-pciids.sh
- chmod +x $@
-
- # The example of use of libpci
--example: example.o lib/$(PCILIB)
-+example: example.o lib/$(PCILIB_DEV)
- example.o: example.c $(PCIINC)
-
- %: %.o
-diff --git a/lib/Makefile b/lib/Makefile
-index 1eb06a5..a8dab56 100644
---- a/lib/Makefile
-+++ b/lib/Makefile
-@@ -42,7 +42,7 @@ ifdef PCI_HAVE_PM_NBSD_LIBPCI
- OBJS += nbsd-libpci
- endif
-
--all: $(PCILIB) $(PCILIBPC)
-+all: $(PCILIB_DEV) $(PCILIBPC)
-
- ifeq ($(SHARED),no)
- $(PCILIB): $(addsuffix .o,$(OBJS))
-@@ -53,6 +53,9 @@ else
- CFLAGS += -fPIC -fvisibility=hidden
- $(PCILIB): $(addsuffix .o,$(OBJS))
- $(CC) -shared $(LDFLAGS) $(SONAME) -Wl,--version-script=libpci.ver -o $@ $^ $(LIB_LDLIBS)
-+
-+$(PCILIB_DEV): $(PCILIB)
-+ ln -s $< $@
- endif
-
- $(PCILIBPC): libpci.pc.in
-diff --git a/lib/configure b/lib/configure
-index 27388bc..6c508cf 100755
---- a/lib/configure
-+++ b/lib/configure
-@@ -216,16 +216,18 @@ fi
- echo "Checking whether to build a shared library... $SHARED (set manually)"
- if [ "$SHARED" = no ] ; then
- echo >>$m 'PCILIB=$(LIBNAME).a'
-+ echo >>$m 'PCILIB_DEV=$(LIBNAME).a'
- echo >>$m 'LDLIBS=$(WITH_LIBS)'
- echo >>$m 'LIB_LDLIBS='
- else
- echo >>$m 'PCILIB=$(LIBNAME).so.$(VERSION)'
-+ echo >>$m 'PCILIB_DEV=$(LIBNAME).so'
- # We link the dependencies _to_ the library, so we do not need explicit deps in .pc
- echo >>$m 'LDLIBS='
- echo >>$m 'LIB_LDLIBS=$(WITH_LIBS)'
- echo >>$c '#define PCI_SHARED_LIB'
- if [ "$SHARED" = yes ] ; then
-- echo >>$m 'SONAME=-Wl,-soname,$(LIBNAME).so$(ABI_VERSION)'
-+ echo >>$m 'SONAME=-Wl,-soname -Wl,$(LIBNAME).so$(ABI_VERSION)'
- fi
- fi
- echo >>$m 'PCILIBPC=$(LIBNAME).pc'
diff --git a/meta/recipes-bsp/pciutils/pciutils_3.2.1.bb b/meta/recipes-bsp/pciutils/pciutils_3.3.0.bb
index 852a93e1b1..b713512dc6 100644
--- a/meta/recipes-bsp/pciutils/pciutils_3.2.1.bb
+++ b/meta/recipes-bsp/pciutils/pciutils_3.3.0.bb
@@ -8,16 +8,18 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
DEPENDS = "zlib kmod"
-SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.bz2 \
+SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \
file://configure.patch \
- file://lib-build-fix.patch \
file://guess-fix.patch \
file://makefile.patch"
-SRC_URI[md5sum] = "425b1acad6854cc2bbb06ac8e48e76fc"
-SRC_URI[sha256sum] = "12d52b19042e2fd058af12e7d877bbbce72213cb3a0b5ec7ff0703ac09e3dcde"
+SRC_URI[md5sum] = "3c19adf32a8457983b71ff376ef7dafe"
+SRC_URI[sha256sum] = "413395d4bdc66fdedd6c993ed9083d1dd73812bf2a679d320f73de35c7801301"
-PARALLEL_MAKE = ""
+inherit multilib_header
+
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'hwdb', '', d)}"
+PACKAGECONFIG[hwdb] = "HWDB=yes,HWDB=no,udev"
PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes"
@@ -25,15 +27,12 @@ PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes"
do_configure () {
(
cd lib && \
- ${PCI_CONF_FLAG} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH}
+ # EXTRA_OECONF for this recipe could only possibly contain 'HWDB=yes/no', so we put it
+ # before ./configure
+ ${PCI_CONF_FLAG} ${EXTRA_OECONF} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH}
)
}
-do_compile_prepend () {
- # Avoid this error: ln: failed to create symbolic link `libpci.so': File exists
- rm -f ${S}/lib/libpci.so
-}
-
export PREFIX = "${prefix}"
export SBINDIR = "${sbindir}"
export SHAREDIR = "${datadir}"
@@ -49,6 +48,8 @@ do_install () {
install -d ${D}${bindir}
ln -s ../sbin/lspci ${D}${bindir}/lspci
+
+ oe_multilib_header pci/config.h
}
PACKAGES =+ "${PN}-ids libpci libpci-dev libpci-dbg"
diff --git a/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb b/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
index 21d270442c..8d35187d63 100644
--- a/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
+++ b/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb
@@ -15,7 +15,7 @@ SRC_URI[sha256sum] = "8ed899032866d88b2933a1d34cc75e8ae42dcde20e1cc21836baaae3d4
inherit pkgconfig autotools
-RDEPENDS_${PN} = "grep"
+RDEPENDS_${PN} = "grep bash"
do_configure_prepend () {
( cd ${S}; autoreconf -f -i -s )
diff --git a/meta/recipes-bsp/setserial/setserial/ldflags.patch b/meta/recipes-bsp/setserial/setserial/ldflags.patch
new file mode 100644
index 0000000000..4313d2c797
--- /dev/null
+++ b/meta/recipes-bsp/setserial/setserial/ldflags.patch
@@ -0,0 +1,24 @@
+Obey LDFLAGS
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Pending
+
+--- setserial-2.17.orig/Makefile.in
++++ setserial-2.17/Makefile.in
+@@ -13,6 +13,7 @@ STRIP = @STRIP@
+ CC = @CC@
+ RM = rm -f
+ CFLAGS = @CFLAGS@
++LDFLAGS = @LDFLAGS@
+ DEFS = @DEFS@
+ INCS = -I.
+ TAR = tar
+@@ -20,7 +21,7 @@ TAR = tar
+ all: setserial setserial.cat
+
+ setserial: setserial.c
+- $(CC) $(CFLAGS) $(DEFS) $(INCS) setserial.c -o setserial
++ $(CC) $(CFLAGS) $(LDFLAGS) $(DEFS) $(INCS) setserial.c -o setserial
+
+ setserial.cat: setserial.8
+ nroff -man setserial.8 > setserial.cat
diff --git a/meta/recipes-bsp/setserial/setserial_2.17.bb b/meta/recipes-bsp/setserial/setserial_2.17.bb
index f1e8cd3392..8dbddd7dc5 100644
--- a/meta/recipes-bsp/setserial/setserial_2.17.bb
+++ b/meta/recipes-bsp/setserial/setserial_2.17.bb
@@ -14,6 +14,7 @@ inherit autotools-brokensep
SRC_URI = "${SOURCEFORGE_MIRROR}/setserial/${BPN}-${PV}.tar.gz \
file://add_stdlib.patch \
+ file://ldflags.patch \
"
SRC_URI[md5sum] = "c4867d72c41564318e0107745eb7a0f2"
diff --git a/meta/recipes-bsp/u-boot/files/0001-am335x_evm.h-Add-use-DEFAULT_LINUX_BOOT_ENV-environm.patch b/meta/recipes-bsp/u-boot/files/0001-am335x_evm.h-Add-use-DEFAULT_LINUX_BOOT_ENV-environm.patch
deleted file mode 100644
index 77e35bbc7c..0000000000
--- a/meta/recipes-bsp/u-boot/files/0001-am335x_evm.h-Add-use-DEFAULT_LINUX_BOOT_ENV-environm.patch
+++ /dev/null
@@ -1,74 +0,0 @@
-From 5701384cea4a829b772bf7a96a74825b58c22385 Mon Sep 17 00:00:00 2001
-From: Denys Dmytriyenko <denys@ti.com>
-Date: Thu, 17 Apr 2014 12:25:40 -0400
-Subject: [PATCH] am335x_evm.h: Add, use DEFAULT_LINUX_BOOT_ENV environment
- string
-
-Modified version of the patch currently being reviewed for mainline:
-http://patchwork.ozlabs.org/patch/334861/
-
-To deal with a reoccurring problem properly we need to specify addresses
-for the Linux kernel, Flatted Device Tree and ramdisk that obey the
-constraints within the kernel's Documentation/arm/Booting file but also
-make sure that we relocate things within a valid address range.
-
-Signed-off-by: Denys Dmytriyenko <denys@ti.com>
-Signed-off-by: Tom Rini <trini@ti.com>
-
-Upstream-Status: Pending
----
- include/configs/am335x_evm.h | 31 ++++++++++++++++++++++++++-----
- 1 file changed, 26 insertions(+), 5 deletions(-)
-
-diff --git a/include/configs/am335x_evm.h b/include/configs/am335x_evm.h
-index c5a6d4b..01e32b3 100644
---- a/include/configs/am335x_evm.h
-+++ b/include/configs/am335x_evm.h
-@@ -54,10 +54,7 @@
- #define CONFIG_ENV_VARS_UBOOT_RUNTIME_CONFIG
- #ifndef CONFIG_SPL_BUILD
- #define CONFIG_EXTRA_ENV_SETTINGS \
-- "loadaddr=0x80200000\0" \
-- "fdtaddr=0x80F80000\0" \
-- "fdt_high=0xffffffff\0" \
-- "rdaddr=0x81000000\0" \
-+ DEFAULT_LINUX_BOOT_ENV \
- "bootdir=/boot\0" \
- "bootfile=uImage\0" \
- "fdtfile=undefined\0" \
-@@ -197,7 +194,31 @@
- #define CONFIG_SYS_MEMTEST_END (CONFIG_SYS_MEMTEST_START \
- + (8 * 1024 * 1024))
-
--#define CONFIG_SYS_LOAD_ADDR 0x81000000 /* Default load address */
-+/*
-+ * Our DDR memory always starts at 0x80000000 and U-Boot shall have
-+ * relocated itself to higher in memory by the time this value is used.
-+ * However, set this to a 32MB offset to allow for easier Linux kernel
-+ * booting as the default is often used as the kernel load address.
-+ */
-+#define CONFIG_SYS_LOAD_ADDR 0x82000000 /* Default load address */
-+
-+/*
-+ * We setup defaults based on constraints from the Linux kernel, which should
-+ * also be safe elsewhere. We have the default load at 32MB into DDR (for
-+ * the kernel), FDT above 128MB (the maximum location for the end of the
-+ * kernel), and the ramdisk 512KB above that (allowing for hopefully never
-+ * seen large trees). We say all of this must be within the first 256MB
-+ * as that will normally be within the kernel lowmem and thus visible via
-+ * bootm_size and we only run on platforms with 256MB or more of memory.
-+ */
-+#define DEFAULT_LINUX_BOOT_ENV \
-+ "loadaddr=0x82000000\0" \
-+ "kernel_addr_r=0x82000000\0" \
-+ "fdtaddr=0x88000000\0" \
-+ "fdt_addr_r=0x88000000\0" \
-+ "rdaddr=0x88080000\0" \
-+ "ramdisk_addr_r=0x88080000\0" \
-+ "bootm_size=0x10000000\0"
-
- #define CONFIG_MMC
- #define CONFIG_GENERIC_MMC
---
-1.9.2
-
diff --git a/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2013.07.bb b/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2013.07.bb
deleted file mode 100644
index e6163972dc..0000000000
--- a/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2013.07.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities"
-LICENSE = "GPLv2+"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb"
-SECTION = "bootloader"
-DEPENDS = "mtd-utils"
-
-# This revision corresponds to the tag "v2013.07"
-# We use the revision in order to avoid having to fetch it from the
-# repo during parse
-SRCREV = "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c"
-
-PV = "v2013.07+git${SRCPV}"
-
-SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
-
-S = "${WORKDIR}/git"
-
-inherit uboot-config cross
-
-EXTRA_OEMAKE = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${TARGET_PREFIX}gcc ${TOOLCHAIN_OPTIONS}"'
-
-do_compile () {
- oe_runmake ${UBOOT_MACHINE}
- oe_runmake env
-}
-
-do_install () {
- install -d ${D}${bindir_cross}
- install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_printenv
- install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_setenv
-}
-
-SYSROOT_PREPROCESS_FUNCS = "uboot_fw_utils_cross"
-uboot_fw_utils_cross() {
- sysroot_stage_dir ${D}${bindir_cross} ${SYSROOT_DESTDIR}${bindir_cross}
-}
-
-PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2014.07.bb b/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2014.07.bb
new file mode 100644
index 0000000000..d1f1f9a38b
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/u-boot-fw-utils-cross_2014.07.bb
@@ -0,0 +1,38 @@
+SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities"
+LICENSE = "GPLv2+"
+LIC_FILES_CHKSUM = "file://Licenses/README;md5=025bf9f768cbcb1a165dbe1a110babfb"
+SECTION = "bootloader"
+DEPENDS = "mtd-utils"
+
+# This revision corresponds to the tag "v2014.07"
+# We use the revision in order to avoid having to fetch it from the
+# repo during parse
+SRCREV = "524123a70761110c5cf3ccc5f52f6d4da071b959"
+
+PV = "v2014.07+git${SRCPV}"
+
+SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
+
+S = "${WORKDIR}/git"
+
+inherit uboot-config cross
+
+EXTRA_OEMAKE = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${TARGET_PREFIX}gcc ${TARGET_CC_ARCH} ${TOOLCHAIN_OPTIONS}"'
+
+do_compile () {
+ oe_runmake ${UBOOT_MACHINE}
+ oe_runmake env
+}
+
+do_install () {
+ install -d ${D}${bindir_cross}
+ install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_printenv
+ install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_setenv
+}
+
+SYSROOT_PREPROCESS_FUNCS = "uboot_fw_utils_cross"
+uboot_fw_utils_cross() {
+ sysroot_stage_dir ${D}${bindir_cross} ${SYSROOT_DESTDIR}${bindir_cross}
+}
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-bsp/u-boot/u-boot-fw-utils_2013.07.bb b/meta/recipes-bsp/u-boot/u-boot-fw-utils_2013.07.bb
deleted file mode 100644
index 89fed8bee4..0000000000
--- a/meta/recipes-bsp/u-boot/u-boot-fw-utils_2013.07.bb
+++ /dev/null
@@ -1,35 +0,0 @@
-SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities"
-LICENSE = "GPLv2+"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb"
-SECTION = "bootloader"
-DEPENDS = "mtd-utils"
-
-# This revision corresponds to the tag "v2013.07"
-# We use the revision in order to avoid having to fetch it from the
-# repo during parse
-SRCREV = "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c"
-
-PV = "v2013.07+git${SRCPV}"
-
-SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
-
-S = "${WORKDIR}/git"
-
-EXTRA_OEMAKE = 'HOSTCC="${CC}" HOSTSTRIP="true"'
-
-inherit uboot-config
-
-do_compile () {
- oe_runmake ${UBOOT_MACHINE}
- oe_runmake env
-}
-
-do_install () {
- install -d ${D}${base_sbindir}
- install -d ${D}${sysconfdir}
- install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_printenv
- install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_setenv
- install -m 0644 ${S}/tools/env/fw_env.config ${D}${sysconfdir}/fw_env.config
-}
-
-PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-bsp/u-boot/u-boot-fw-utils_2014.07.bb b/meta/recipes-bsp/u-boot/u-boot-fw-utils_2014.07.bb
new file mode 100644
index 0000000000..a626c9599a
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/u-boot-fw-utils_2014.07.bb
@@ -0,0 +1,35 @@
+SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities"
+LICENSE = "GPLv2+"
+LIC_FILES_CHKSUM = "file://Licenses/README;md5=025bf9f768cbcb1a165dbe1a110babfb"
+SECTION = "bootloader"
+DEPENDS = "mtd-utils"
+
+# This revision corresponds to the tag "v2014.07"
+# We use the revision in order to avoid having to fetch it from the
+# repo during parse
+SRCREV = "524123a70761110c5cf3ccc5f52f6d4da071b959"
+
+PV = "v2014.07+git${SRCPV}"
+
+SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
+
+S = "${WORKDIR}/git"
+
+INSANE_SKIP_${PN} = "already-stripped"
+
+inherit uboot-config
+
+do_compile () {
+ oe_runmake ${UBOOT_MACHINE}
+ oe_runmake env
+}
+
+do_install () {
+ install -d ${D}${base_sbindir}
+ install -d ${D}${sysconfdir}
+ install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_printenv
+ install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_setenv
+ install -m 0644 ${S}/tools/env/fw_env.config ${D}${sysconfdir}/fw_env.config
+}
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-bsp/u-boot/u-boot-mkimage_2013.07.bb b/meta/recipes-bsp/u-boot/u-boot-mkimage_2013.07.bb
deleted file mode 100644
index 93c8102e73..0000000000
--- a/meta/recipes-bsp/u-boot/u-boot-mkimage_2013.07.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-SUMMARY = "U-Boot bootloader image creation tool"
-LICENSE = "GPLv2+"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb"
-SECTION = "bootloader"
-
-# This revision corresponds to the tag "v2013.07"
-# We use the revision in order to avoid having to fetch it from the
-# repo during parse
-SRCREV = "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c"
-
-PV = "v2013.07+git${SRCPV}"
-
-SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
-
-S = "${WORKDIR}/git"
-
-EXTRA_OEMAKE = 'HOSTCC="${CC}" HOSTLD="${LD}" HOSTLDFLAGS="${LDFLAGS}" HOSTSTRIP=true'
-
-do_compile () {
- # Make sure the recompile is OK
- rm -f ${B}/tools/.depend
-
- oe_runmake tools
-}
-
-do_install () {
- install -d ${D}${bindir}
- install -m 0755 tools/mkimage ${D}${bindir}/uboot-mkimage
- ln -sf uboot-mkimage ${D}${bindir}/mkimage
-}
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-bsp/u-boot/u-boot-mkimage_2014.07.bb b/meta/recipes-bsp/u-boot/u-boot-mkimage_2014.07.bb
new file mode 100644
index 0000000000..eabf680ec6
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/u-boot-mkimage_2014.07.bb
@@ -0,0 +1,34 @@
+SUMMARY = "U-Boot bootloader image creation tool"
+LICENSE = "GPLv2+"
+LIC_FILES_CHKSUM = "file://Licenses/README;md5=025bf9f768cbcb1a165dbe1a110babfb"
+SECTION = "bootloader"
+
+# This revision corresponds to the tag "v2014.07"
+# We use the revision in order to avoid having to fetch it from the
+# repo during parse
+SRCREV = "524123a70761110c5cf3ccc5f52f6d4da071b959"
+
+PV = "v2014.07+git${SRCPV}"
+
+SRC_URI = "git://git.denx.de/u-boot.git;branch=master;protocol=git"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OEMAKE = 'HOSTCC="${CC}" HOSTLD="${LD}" HOSTLDFLAGS="${LDFLAGS}" HOSTSTRIP=true'
+
+do_compile () {
+ # Make sure the recompile is OK
+ rm -f ${B}/tools/.depend
+
+ make HOSTCC="${BUILD_CC}" HOSTLD="${BUILD_LD}" HOSTLDFLAGS="${BUILD_LDFLAGS}" HOSTSTRIP=true dot-config=0 scripts_basic
+ sed 's/^tools-only: scripts_basic /tools-only: /' -i Makefile
+ oe_runmake tools-only
+}
+
+do_install () {
+ install -d ${D}${bindir}
+ install -m 0755 tools/mkimage ${D}${bindir}/uboot-mkimage
+ ln -sf uboot-mkimage ${D}${bindir}/mkimage
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-bsp/u-boot/u-boot.inc b/meta/recipes-bsp/u-boot/u-boot.inc
index 53c87c28f0..c695b73b48 100644
--- a/meta/recipes-bsp/u-boot/u-boot.inc
+++ b/meta/recipes-bsp/u-boot/u-boot.inc
@@ -3,6 +3,15 @@ HOMEPAGE = "http://www.denx.de/wiki/U-Boot/WebHome"
SECTION = "bootloaders"
PROVIDES = "virtual/bootloader"
+LICENSE = "GPLv2+"
+LIC_FILES_CHKSUM = "file://Licenses/README;md5=025bf9f768cbcb1a165dbe1a110babfb"
+
+SRC_URI = "git://git.denx.de/u-boot.git;branch=master"
+
+S = "${WORKDIR}/git"
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
inherit uboot-config deploy
EXTRA_OEMAKE = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${TARGET_PREFIX}gcc ${TOOLCHAIN_OPTIONS}"'
@@ -14,7 +23,7 @@ UBOOT_LOCALVERSION ?= ""
# Some versions of u-boot use .bin and others use .img. By default use .bin
# but enable individual recipes to change this value.
-UBOOT_SUFFIX ?= "bin"
+UBOOT_SUFFIX ??= "bin"
UBOOT_IMAGE ?= "u-boot-${MACHINE}-${PV}-${PR}.${UBOOT_SUFFIX}"
UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}"
UBOOT_SYMLINK ?= "u-boot-${MACHINE}.${UBOOT_SUFFIX}"
diff --git a/meta/recipes-bsp/u-boot/u-boot_2013.07.bb b/meta/recipes-bsp/u-boot/u-boot_2013.07.bb
deleted file mode 100644
index f8a88568d2..0000000000
--- a/meta/recipes-bsp/u-boot/u-boot_2013.07.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-require u-boot.inc
-
-# To build u-boot for your machine, provide the following lines in your machine
-# config, replacing the assignments as appropriate for your machine.
-# UBOOT_MACHINE = "omap3_beagle_config"
-# UBOOT_ENTRYPOINT = "0x80008000"
-# UBOOT_LOADADDRESS = "0x80008000"
-
-LICENSE = "GPLv2+"
-LIC_FILES_CHKSUM = "file://COPYING;md5=1707d6db1d42237583f50183a5651ecb \
- file://README;beginline=1;endline=22;md5=78b195c11cb6ef63e6985140db7d7bab"
-
-# This revision corresponds to the tag "v2013.07"
-# We use the revision in order to avoid having to fetch it from the repo during parse
-SRCREV = "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c"
-
-PV = "v2013.07+git${SRCPV}"
-
-SRC_URI = "git://git.denx.de/u-boot.git;branch=master \
- file://0001-am335x_evm.h-Add-use-DEFAULT_LINUX_BOOT_ENV-environm.patch \
-"
-
-S = "${WORKDIR}/git"
-
-PACKAGE_ARCH = "${MACHINE_ARCH}"
diff --git a/meta/recipes-bsp/u-boot/u-boot_2014.07.bb b/meta/recipes-bsp/u-boot/u-boot_2014.07.bb
new file mode 100644
index 0000000000..44cc52eb6c
--- /dev/null
+++ b/meta/recipes-bsp/u-boot/u-boot_2014.07.bb
@@ -0,0 +1,9 @@
+require u-boot.inc
+
+DEPENDS += "dtc-native"
+
+# This revision corresponds to the tag "v2014.07"
+# We use the revision in order to avoid having to fetch it from the repo during parse
+SRCREV = "524123a70761110c5cf3ccc5f52f6d4da071b959"
+
+PV = "v2014.07+git${SRCPV}"
diff --git a/meta/recipes-bsp/v86d/v86d_0.1.10.bb b/meta/recipes-bsp/v86d/v86d_0.1.10.bb
index 08438bea47..f50207212f 100644
--- a/meta/recipes-bsp/v86d/v86d_0.1.10.bb
+++ b/meta/recipes-bsp/v86d/v86d_0.1.10.bb
@@ -49,4 +49,21 @@ do_install () {
fi
}
+# As the recipe doesn't inherit systemd.bbclass, we need to set this variable
+# manually to avoid unnecessary postinst/preinst generated.
+python __anonymous() {
+ if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d):
+ d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1")
+}
+
inherit update-rc.d
+
+DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}"
+pkg_postinst_${PN} () {
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then
+ if [ -n "$D" ]; then
+ OPTS="--root=$D"
+ fi
+ systemctl $OPTS mask fbsetup.service
+ fi
+}
diff --git a/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb b/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
index 070530ec3f..eea4d70fab 100644
--- a/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
+++ b/meta/recipes-connectivity/avahi/avahi-ui_0.6.31.bb
@@ -45,7 +45,11 @@ RDEPENDS_python-avahi = "python-core python-dbus"
do_install_append () {
rm ${D}${sysconfdir} -rf
rm ${D}${base_libdir} -rf
- rm ${D}${base_libdir} -rf
+ rm ${D}${systemd_unitdir} -rf
+ # The ${systemd_unitdir} is /lib/systemd, so we need rmdir /lib,
+ # but not ${base_libdir} here. And the /lib may not exist
+ # whithout systemd.
+ [ ! -d ${D}/lib ] || rmdir ${D}/lib --ignore-fail-on-non-empty
rm ${D}${bindir}/avahi-b*
rm ${D}${bindir}/avahi-p*
rm ${D}${bindir}/avahi-r*
diff --git a/meta/recipes-connectivity/bind/bind/bind9 b/meta/recipes-connectivity/bind/bind/bind9
new file mode 100644
index 0000000000..968679ff7f
--- /dev/null
+++ b/meta/recipes-connectivity/bind/bind/bind9
@@ -0,0 +1,2 @@
+# startup options for the server
+OPTIONS="-u bind"
diff --git a/meta/recipes-connectivity/bind/bind/bind9_9_5-CVE-2014-8500.patch b/meta/recipes-connectivity/bind/bind/bind9_9_5-CVE-2014-8500.patch
new file mode 100644
index 0000000000..62142d2313
--- /dev/null
+++ b/meta/recipes-connectivity/bind/bind/bind9_9_5-CVE-2014-8500.patch
@@ -0,0 +1,990 @@
+From 603a0e2637b35a2da820bc807f69bcf09c682dce Mon Sep 17 00:00:00 2001
+From: Evan Hunt <each@isc.org>
+Date: Mon, 17 Nov 2014 23:49:07 -0800
+Subject: [PATCH] [v9_9] limit recursion depth and iterative queries
+
+4006. [security] A flaw in delegation handling could be exploited
+ to put named into an infinite loop. This has
+ been addressed by placing limits on the number
+ of levels of recursion named will allow (default 7),
+ and the number of iterative queries that it will
+ send (default 50) before terminating a recursive
+ query (CVE-2014-8500).
+
+ The recursion depth limit is configured via the
+ "max-recursion-depth" option. [RT #35780]
+
+Upstream-Status: Backport
+
+Signed-off-by: Sona Sarmadi <sona.sarmadi@enea.com>
+---
+ bin/named/config.c | 3 +-
+ bin/named/include/named/query.h | 2 -
+ bin/named/query.c | 7 ++-
+ bin/named/server.c | 5 ++
+ bin/tests/system/many/clean.sh | 7 +++
+ bin/tests/system/many/ns1/named.conf | 33 +++++++++++++
+ bin/tests/system/many/ns2/named.conf | 30 ++++++++++++
+ bin/tests/system/many/ns3/named.conf | 32 +++++++++++++
+ bin/tests/system/many/ns4/named.conf | 30 ++++++++++++
+ bin/tests/system/many/ns5/hints.db | 2 +
+ bin/tests/system/many/ns5/named.conf | 29 ++++++++++++
+ bin/tests/system/many/setup.sh | 75 ++++++++++++++++++++++++++++++
+ bin/tests/system/many/tests.sh | 48 +++++++++++++++++++
+ doc/arm/Bv9ARM-book.xml | 12 +++++
+ lib/dns/adb.c | 58 ++++++++++++++++-------
+ lib/dns/include/dns/adb.h | 8 ++++
+ lib/dns/include/dns/resolver.h | 25 ++++++++++
+ lib/dns/resolver.c | 90 ++++++++++++++++++++++++++++++------
+ lib/isccfg/namedconf.c | 1 +
+ 20 files changed, 471 insertions(+), 37 deletions(-)
+ create mode 100644 bin/tests/system/many/clean.sh
+ create mode 100644 bin/tests/system/many/ns1/named.conf
+ create mode 100644 bin/tests/system/many/ns2/named.conf
+ create mode 100644 bin/tests/system/many/ns3/named.conf
+ create mode 100644 bin/tests/system/many/ns4/named.conf
+ create mode 100644 bin/tests/system/many/ns5/hints.db
+ create mode 100644 bin/tests/system/many/ns5/named.conf
+ create mode 100644 bin/tests/system/many/setup.sh
+ create mode 100644 bin/tests/system/many/tests.sh
+
+diff --git a/bin/named/config.c b/bin/named/config.c
+index 2782720..5ee8c4e 100644
+--- a/bin/named/config.c
++++ b/bin/named/config.c
+@@ -15,8 +15,6 @@
+ * PERFORMANCE OF THIS SOFTWARE.
+ */
+
+-/* $Id: config.c,v 1.123 2012/01/06 23:46:41 tbox Exp $ */
+-
+ /*! \file */
+
+ #include <config.h>
+@@ -160,6 +158,7 @@ options {\n\
+ dnssec-accept-expired no;\n\
+ clients-per-query 10;\n\
+ max-clients-per-query 100;\n\
++ max-recursion-depth 7;\n\
+ zero-no-soa-ttl-cache no;\n\
+ nsec3-test-zone no;\n\
+ allow-new-zones no;\n\
+diff --git a/bin/named/include/named/query.h b/bin/named/include/named/query.h
+index 3beabb8..b5e3900 100644
+--- a/bin/named/include/named/query.h
++++ b/bin/named/include/named/query.h
+@@ -15,8 +15,6 @@
+ * PERFORMANCE OF THIS SOFTWARE.
+ */
+
+-/* $Id: query.h,v 1.45 2011/01/13 04:59:24 tbox Exp $ */
+-
+ #ifndef NAMED_QUERY_H
+ #define NAMED_QUERY_H 1
+
+diff --git a/bin/named/query.c b/bin/named/query.c
+index 982f76d..47bfc6a 100644
+--- a/bin/named/query.c
++++ b/bin/named/query.c
+@@ -3877,12 +3877,11 @@ query_recurse(ns_client_t *client, dns_rdatatype_t qtype, dns_name_t *qname,
+ peeraddr = &client->peeraddr;
+ else
+ peeraddr = NULL;
+- result = dns_resolver_createfetch2(client->view->resolver,
++ result = dns_resolver_createfetch3(client->view->resolver,
+ qname, qtype, qdomain, nameservers,
+ NULL, peeraddr, client->message->id,
+- client->query.fetchoptions,
+- client->task,
+- query_resume, client,
++ client->query.fetchoptions, 0,
++ client->task, query_resume, client,
+ rdataset, sigrdataset,
+ &client->query.fetch);
+
+diff --git a/bin/named/server.c b/bin/named/server.c
+index ac015a4..0559977 100644
+--- a/bin/named/server.c
++++ b/bin/named/server.c
+@@ -3161,6 +3161,11 @@ configure_view(dns_view_t *view, cfg_obj_t *config, cfg_obj_t *vconfig,
+ cfg_obj_asuint32(obj),
+ max_clients_per_query);
+
++ obj = NULL;
++ result = ns_config_get(maps, "max-recursion-depth", &obj);
++ INSIST(result == ISC_R_SUCCESS);
++ dns_resolver_setmaxdepth(view->resolver, cfg_obj_asuint32(obj));
++
+ #ifdef ALLOW_FILTER_AAAA_ON_V4
+ obj = NULL;
+ result = ns_config_get(maps, "filter-aaaa-on-v4", &obj);
+diff --git a/bin/tests/system/many/clean.sh b/bin/tests/system/many/clean.sh
+new file mode 100644
+index 0000000..119b1f5
+--- /dev/null
++++ b/bin/tests/system/many/clean.sh
+@@ -0,0 +1,7 @@
++rm -f ns1/[1-9]*example.tld?.db
++rm -f ns2/[1-9]*example.tld?.db
++rm -f ns1/zones.conf
++rm -f ns2/zones.conf
++rm -f */root.db
++rm -f ns3/tld1.db
++rm -f ns4/tld2.db
+diff --git a/bin/tests/system/many/ns1/named.conf b/bin/tests/system/many/ns1/named.conf
+new file mode 100644
+index 0000000..abc9dca
+--- /dev/null
++++ b/bin/tests/system/many/ns1/named.conf
+@@ -0,0 +1,33 @@
++/*
++ * Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++ *
++ * Permission to use, copy, modify, and/or distribute this software for any
++ * purpose with or without fee is hereby granted, provided that the above
++ * copyright notice and this permission notice appear in all copies.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++ * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++ * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++ * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++ * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++ * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++ * PERFORMANCE OF THIS SOFTWARE.
++ */
++
++controls { /* empty */ };
++
++options {
++ query-source address 10.53.0.1;
++ notify-source 10.53.0.1;
++ transfer-source 10.53.0.1;
++ port 5300;
++ pid-file "named.pid";
++ listen-on { 10.53.0.1; };
++ listen-on-v6 { none; };
++ recursion no;
++};
++
++include "zones.conf";
++
++// zone "tld1" { type master; file "tld1.db"; };
++// zone "tld2" { type master; file "tld2.db"; };
+diff --git a/bin/tests/system/many/ns2/named.conf b/bin/tests/system/many/ns2/named.conf
+new file mode 100644
+index 0000000..16266e2
+--- /dev/null
++++ b/bin/tests/system/many/ns2/named.conf
+@@ -0,0 +1,30 @@
++/*
++ * Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++ *
++ * Permission to use, copy, modify, and/or distribute this software for any
++ * purpose with or without fee is hereby granted, provided that the above
++ * copyright notice and this permission notice appear in all copies.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++ * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++ * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++ * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++ * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++ * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++ * PERFORMANCE OF THIS SOFTWARE.
++ */
++
++controls { /* empty */ };
++
++options {
++ query-source address 10.53.0.2;
++ notify-source 10.53.0.2;
++ transfer-source 10.53.0.2;
++ port 5300;
++ pid-file "named.pid";
++ listen-on { 10.53.0.2; };
++ listen-on-v6 { none; };
++ recursion no;
++};
++
++include "zones.conf";
+diff --git a/bin/tests/system/many/ns3/named.conf b/bin/tests/system/many/ns3/named.conf
+new file mode 100644
+index 0000000..b950afe
+--- /dev/null
++++ b/bin/tests/system/many/ns3/named.conf
+@@ -0,0 +1,32 @@
++/*
++ * Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++ *
++ * Permission to use, copy, modify, and/or distribute this software for any
++ * purpose with or without fee is hereby granted, provided that the above
++ * copyright notice and this permission notice appear in all copies.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++ * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++ * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++ * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++ * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++ * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++ * PERFORMANCE OF THIS SOFTWARE.
++ */
++
++controls { /* empty */ };
++
++options {
++ query-source address 10.53.0.3;
++ notify-source 10.53.0.3;
++ transfer-source 10.53.0.3;
++ port 5300;
++ pid-file "named.pid";
++ listen-on { 10.53.0.3; };
++ listen-on-v6 { none; };
++ recursion no;
++};
++
++zone "." { type master; file "root.db"; };
++
++zone "tld1" { type master; file "tld1.db"; };
+diff --git a/bin/tests/system/many/ns4/named.conf b/bin/tests/system/many/ns4/named.conf
+new file mode 100644
+index 0000000..ca9aa6a
+--- /dev/null
++++ b/bin/tests/system/many/ns4/named.conf
+@@ -0,0 +1,30 @@
++/*
++ * Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++ *
++ * Permission to use, copy, modify, and/or distribute this software for any
++ * purpose with or without fee is hereby granted, provided that the above
++ * copyright notice and this permission notice appear in all copies.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++ * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++ * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++ * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++ * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++ * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++ * PERFORMANCE OF THIS SOFTWARE.
++ */
++
++controls { /* empty */ };
++
++options {
++ query-source address 10.53.0.4;
++ notify-source 10.53.0.4;
++ transfer-source 10.53.0.4;
++ port 5300;
++ pid-file "named.pid";
++ listen-on { 10.53.0.4; };
++ listen-on-v6 { none; };
++ recursion no;
++};
++
++zone "tld2" { type master; file "tld2.db"; };
+diff --git a/bin/tests/system/many/ns5/hints.db b/bin/tests/system/many/ns5/hints.db
+new file mode 100644
+index 0000000..c05809b
+--- /dev/null
++++ b/bin/tests/system/many/ns5/hints.db
+@@ -0,0 +1,2 @@
++. 60 in ns ns.nil.
++ns.nil. 60 in A 10.53.0.3
+diff --git a/bin/tests/system/many/ns5/named.conf b/bin/tests/system/many/ns5/named.conf
+new file mode 100644
+index 0000000..fce7d59
+--- /dev/null
++++ b/bin/tests/system/many/ns5/named.conf
+@@ -0,0 +1,29 @@
++/*
++ * Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++ *
++ * Permission to use, copy, modify, and/or distribute this software for any
++ * purpose with or without fee is hereby granted, provided that the above
++ * copyright notice and this permission notice appear in all copies.
++ *
++ * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++ * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++ * AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++ * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++ * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++ * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++ * PERFORMANCE OF THIS SOFTWARE.
++ */
++
++controls { /* empty */ };
++
++options {
++ query-source address 10.53.0.5;
++ notify-source 10.53.0.5;
++ transfer-source 10.53.0.5;
++ port 5300;
++ pid-file "named.pid";
++ listen-on { 10.53.0.5; };
++ listen-on-v6 { none; };
++};
++
++zone "." { type hint; file "hints.db"; };
+diff --git a/bin/tests/system/many/setup.sh b/bin/tests/system/many/setup.sh
+new file mode 100644
+index 0000000..80695b5
+--- /dev/null
++++ b/bin/tests/system/many/setup.sh
+@@ -0,0 +1,75 @@
++i=1
++
++cat > ns3/root.db << EOF
++. 60 in soa ns.nil. hostmaster.ns.nil. 1 0 0 0 0
++. 60 in ns ns.nil.
++ns.nil. 60 in a 10.53.0.3
++tld1. 60 in ns ns.tld1.
++ns.tld1. 60 in a 10.53.0.3
++tld2. 60 in ns ns.tld2.
++ns.tld2. 60 in a 10.53.0.4
++EOF
++
++cat > ns3/tld1.db << EOF
++tld1. 60 in soa ns.tld1. hostmaster.ns.tld1. 1 0 0 0 0
++tld1. 60 in ns ns.tld1.
++ns.tld1. 60 in a 10.53.0.1
++EOF
++
++cat > ns4/tld2.db << EOF
++tld2. 60 in soa ns.tld2. hostmaster.ns.tld4. 1 0 0 0 0
++tld2. 60 in ns ns.tld2.
++ns.tld2. 60 in a 10.53.0.1
++EOF
++
++: > ns1/zones.conf
++: > ns2/zones.conf
++
++while [ $i -lt 1000 ]
++do
++j=`expr $i + 1`
++s=`expr $j % 2 + 1`
++n=`expr $i % 2 + 1`
++t=`expr $s + 2`
++
++# i=1 j=2 s=1 n=2
++# i=2 j=3 s=1 n=2
++# i=3 j=4 s=1 n=2
++
++cat > ns1/${i}example.tld${s}.db << EOF
++${i}example.tld${s}. 60 in soa ns.${j}example.tld${n}. hostmaster 1 0 0 0 0
++${i}example.tld${s}. 60 in ns ns.${j}example.tld${n}.
++ns.${i}example.tld${s}. 60 in a 10.53.0.1
++EOF
++
++cat >> ns1/zones.conf << EOF
++zone "${i}example.tld${s}" { type master; file "${i}example.tld${s}.db"; };
++EOF
++
++cat >> ns${t}/tld${s}.db << EOF
++${i}example.tld${s}. 60 in ns ns.${j}example.tld${n}.
++EOF
++
++i=$j
++
++done
++
++j=`expr $i + 1`
++s=`expr $j % 2 + 1`
++n=`expr $s % 2 + 1`
++t=`expr $s + 2`
++
++cat > ns1/${i}example.tld${s}.db << EOF
++${i}example.tld${s}. 60 in soa ns.${i}example.tld${s}. hostmaster 1 0 0 0 0
++${i}example.tld${s}. 60 in ns ns.${i}example.tld${s}.
++ns.${i}example.tld${s}. 60 in a 10.53.0.1
++EOF
++
++cat >> ns1/zones.conf << EOF
++zone "${i}example.tld${s}" { type master; file "${i}example.tld${s}.db"; };
++EOF
++
++cat >> ns${t}/tld${s}.db << EOF
++${i}example.tld${s}. 60 in ns ns.${i}example.tld${s}.
++ns.${i}example.tld${s}. 60 in a 10.53.0.1
++EOF
+diff --git a/bin/tests/system/many/tests.sh b/bin/tests/system/many/tests.sh
+new file mode 100644
+index 0000000..37964e2
+--- /dev/null
++++ b/bin/tests/system/many/tests.sh
+@@ -0,0 +1,48 @@
++#!/bin/sh
++#
++# Copyright (C) 2014 Internet Systems Consortium, Inc. ("ISC")
++#
++# Permission to use, copy, modify, and/or distribute this software for any
++# purpose with or without fee is hereby granted, provided that the above
++# copyright notice and this permission notice appear in all copies.
++#
++# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
++# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
++# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
++# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
++# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
++# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
++# PERFORMANCE OF THIS SOFTWARE.
++
++SYSTEMTESTTOP=..
++. $SYSTEMTESTTOP/conf.sh
++
++status=0
++n=0
++
++n=`expr $n + 1`
++echo "I: attempt lookup 1example.tld2 soa ($n)"
++ret=0
++$DIG +tcp 1example.tld1 soa @10.53.0.5 -p 5300 > dig.out.test$n
++grep "status: SERVFAIL" dig.out.test$n > /dev/null || ret=1
++if [ $ret != 0 ]; then echo "I:failed"; fi
++status=`expr $status + $ret`
++
++n=`expr $n + 1`
++echo "I: attempt lookup 992example.tld2 soa ($n)"
++ret=0
++$DIG +tcp 992example.tld2 soa @10.53.0.5 -p 5300 > dig.out.test$n
++grep "status: SERVFAIL" dig.out.test$n > /dev/null || ret=1
++if [ $ret != 0 ]; then echo "I:failed"; fi
++status=`expr $status + $ret`
++
++n=`expr $n + 1`
++echo "I: attempt lookup 993example.tld1 soa ($n)"
++ret=0
++$DIG +tcp 993example.tld1 soa @10.53.0.5 -p 5300 > dig.out.test$n
++grep "status: NOERROR" dig.out.test$n > /dev/null || ret=1
++if [ $ret != 0 ]; then echo "I:failed"; fi
++status=`expr $status + $ret`
++
++echo "I:exit status: $status"
++exit $status
+diff --git a/doc/arm/Bv9ARM-book.xml b/doc/arm/Bv9ARM-book.xml
+index 9f7bd38..fff4249 100644
+--- a/doc/arm/Bv9ARM-book.xml
++++ b/doc/arm/Bv9ARM-book.xml
+@@ -4861,6 +4861,7 @@ badresp:1,adberr:0,findfail:0,valfail:0]
+ <optional> max-acache-size <replaceable>size_spec</replaceable> ; </optional>
+ <optional> clients-per-query <replaceable>number</replaceable> ; </optional>
+ <optional> max-clients-per-query <replaceable>number</replaceable> ; </optional>
++ <optional> max-recursion-depth <replaceable>number</replaceable> ; </optional>
+ <optional> masterfile-format (<constant>text</constant>|<constant>raw</constant>) ; </optional>
+ <optional> empty-server <replaceable>name</replaceable> ; </optional>
+ <optional> empty-contact <replaceable>name</replaceable> ; </optional>
+@@ -8680,6 +8681,17 @@ avoid-v6-udp-ports { 40000; range 50000 60000; };
+ </listitem>
+ </varlistentry>
+
++ <varlistentry id="max-recursion-depth">
++ <term><command>max-recursion-depth</command></term>
++ <listitem>
++ <para>
++ Sets the maximum number of levels of recursion
++ permitted at any one time while resolving a name.
++ The default is 7.
++ </para>
++ </listitem>
++ </varlistentry>
++
+ <varlistentry>
+ <term><command>notify-delay</command></term>
+ <listitem>
+diff --git a/lib/dns/adb.c b/lib/dns/adb.c
+index 2ccb51e..fe9b3f7 100644
+--- a/lib/dns/adb.c
++++ b/lib/dns/adb.c
+@@ -199,6 +199,7 @@ struct dns_adbfetch {
+ unsigned int magic;
+ dns_fetch_t *fetch;
+ dns_rdataset_t rdataset;
++ unsigned int depth;
+ };
+
+ /*%
+@@ -300,7 +301,7 @@ static inline void violate_locking_hierarchy(isc_mutex_t *, isc_mutex_t *);
+ static isc_boolean_t clean_namehooks(dns_adb_t *, dns_adbnamehooklist_t *);
+ static void clean_target(dns_adb_t *, dns_name_t *);
+ static void clean_finds_at_name(dns_adbname_t *, isc_eventtype_t,
+- unsigned int);
++ isc_uint32_t, unsigned int);
+ static isc_boolean_t check_expire_namehooks(dns_adbname_t *, isc_stdtime_t);
+ static isc_boolean_t check_expire_entry(dns_adb_t *, dns_adbentry_t **,
+ isc_stdtime_t);
+@@ -308,7 +309,7 @@ static void cancel_fetches_at_name(dns_adbname_t *);
+ static isc_result_t dbfind_name(dns_adbname_t *, isc_stdtime_t,
+ dns_rdatatype_t);
+ static isc_result_t fetch_name(dns_adbname_t *, isc_boolean_t,
+- dns_rdatatype_t);
++ unsigned int, dns_rdatatype_t);
+ static inline void check_exit(dns_adb_t *);
+ static void destroy(dns_adb_t *);
+ static isc_boolean_t shutdown_names(dns_adb_t *);
+@@ -984,7 +985,7 @@ kill_name(dns_adbname_t **n, isc_eventtype_t ev) {
+ * Clean up the name's various lists. These two are destructive
+ * in that they will always empty the list.
+ */
+- clean_finds_at_name(name, ev, DNS_ADBFIND_ADDRESSMASK);
++ clean_finds_at_name(name, ev, 0, DNS_ADBFIND_ADDRESSMASK);
+ result4 = clean_namehooks(adb, &name->v4);
+ result6 = clean_namehooks(adb, &name->v6);
+ clean_target(adb, &name->target);
+@@ -1409,7 +1410,7 @@ event_free(isc_event_t *event) {
+ */
+ static void
+ clean_finds_at_name(dns_adbname_t *name, isc_eventtype_t evtype,
+- unsigned int addrs)
++ isc_uint32_t qtotal, unsigned int addrs)
+ {
+ isc_event_t *ev;
+ isc_task_t *task;
+@@ -1469,6 +1470,7 @@ clean_finds_at_name(dns_adbname_t *name, isc_eventtype_t evtype,
+ ev->ev_sender = find;
+ find->result_v4 = find_err_map[name->fetch_err];
+ find->result_v6 = find_err_map[name->fetch6_err];
++ find->qtotal += qtotal;
+ ev->ev_type = evtype;
+ ev->ev_destroy = event_free;
+ ev->ev_destroy_arg = find;
+@@ -1827,6 +1829,7 @@ new_adbfind(dns_adb_t *adb) {
+ h->flags = 0;
+ h->result_v4 = ISC_R_UNEXPECTED;
+ h->result_v6 = ISC_R_UNEXPECTED;
++ h->qtotal = 0;
+ ISC_LINK_INIT(h, publink);
+ ISC_LINK_INIT(h, plink);
+ ISC_LIST_INIT(h->list);
+@@ -2799,6 +2802,19 @@ dns_adb_createfind(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
+ isc_stdtime_t now, dns_name_t *target,
+ in_port_t port, dns_adbfind_t **findp)
+ {
++ return (dns_adb_createfind2(adb, task, action, arg, name,
++ qname, qtype, options, now,
++ target, port, 0, findp));
++}
++
++isc_result_t
++dns_adb_createfind2(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
++ void *arg, dns_name_t *name, dns_name_t *qname,
++ dns_rdatatype_t qtype, unsigned int options,
++ isc_stdtime_t now, dns_name_t *target,
++ in_port_t port, unsigned int depth,
++ dns_adbfind_t **findp)
++{
+ dns_adbfind_t *find;
+ dns_adbname_t *adbname;
+ int bucket;
+@@ -3029,7 +3045,7 @@ dns_adb_createfind(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
+ * Start V4.
+ */
+ if (WANT_INET(wanted_fetches) &&
+- fetch_name(adbname, start_at_zone,
++ fetch_name(adbname, start_at_zone, depth,
+ dns_rdatatype_a) == ISC_R_SUCCESS) {
+ DP(DEF_LEVEL,
+ "dns_adb_createfind: started A fetch for name %p",
+@@ -3040,7 +3056,7 @@ dns_adb_createfind(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
+ * Start V6.
+ */
+ if (WANT_INET6(wanted_fetches) &&
+- fetch_name(adbname, start_at_zone,
++ fetch_name(adbname, start_at_zone, depth,
+ dns_rdatatype_aaaa) == ISC_R_SUCCESS) {
+ DP(DEF_LEVEL,
+ "dns_adb_createfind: "
+@@ -3656,6 +3672,7 @@ fetch_callback(isc_task_t *task, isc_event_t *ev) {
+ isc_result_t result;
+ unsigned int address_type;
+ isc_boolean_t want_check_exit = ISC_FALSE;
++ isc_uint32_t qtotal = 0;
+
+ UNUSED(task);
+
+@@ -3666,6 +3683,8 @@ fetch_callback(isc_task_t *task, isc_event_t *ev) {
+ adb = name->adb;
+ INSIST(DNS_ADB_VALID(adb));
+
++ qtotal = dev->qtotal;
++
+ bucket = name->lock_bucket;
+ LOCK(&adb->namelocks[bucket]);
+
+@@ -3783,6 +3802,12 @@ fetch_callback(isc_task_t *task, isc_event_t *ev) {
+ DP(DEF_LEVEL, "adb: fetch of '%s' %s failed: %s",
+ buf, address_type == DNS_ADBFIND_INET ? "A" : "AAAA",
+ dns_result_totext(dev->result));
++ /*
++ * Don't record a failure unless this is the initial
++ * fetch of a chain.
++ */
++ if (fetch->depth > 1)
++ goto out;
+ /* XXXMLG Don't pound on bad servers. */
+ if (address_type == DNS_ADBFIND_INET) {
+ name->expire_v4 = ISC_MIN(name->expire_v4, now + 300);
+@@ -3814,15 +3839,14 @@ fetch_callback(isc_task_t *task, isc_event_t *ev) {
+ free_adbfetch(adb, &fetch);
+ isc_event_free(&ev);
+
+- clean_finds_at_name(name, ev_status, address_type);
++ clean_finds_at_name(name, ev_status, qtotal, address_type);
+
+ UNLOCK(&adb->namelocks[bucket]);
+ }
+
+ static isc_result_t
+-fetch_name(dns_adbname_t *adbname,
+- isc_boolean_t start_at_zone,
+- dns_rdatatype_t type)
++fetch_name(dns_adbname_t *adbname, isc_boolean_t start_at_zone,
++ unsigned int depth, dns_rdatatype_t type)
+ {
+ isc_result_t result;
+ dns_adbfetch_t *fetch = NULL;
+@@ -3867,12 +3891,14 @@ fetch_name(dns_adbname_t *adbname,
+ result = ISC_R_NOMEMORY;
+ goto cleanup;
+ }
+-
+- result = dns_resolver_createfetch(adb->view->resolver, &adbname->name,
+- type, name, nameservers, NULL,
+- options, adb->task, fetch_callback,
+- adbname, &fetch->rdataset, NULL,
+- &fetch->fetch);
++ fetch->depth = depth;
++
++ result = dns_resolver_createfetch3(adb->view->resolver, &adbname->name,
++ type, name, nameservers, NULL,
++ NULL, 0, options, depth, adb->task,
++ fetch_callback, adbname,
++ &fetch->rdataset, NULL,
++ &fetch->fetch);
+ if (result != ISC_R_SUCCESS)
+ goto cleanup;
+
+diff --git a/lib/dns/include/dns/adb.h b/lib/dns/include/dns/adb.h
+index 35350ff..7501f01 100644
+--- a/lib/dns/include/dns/adb.h
++++ b/lib/dns/include/dns/adb.h
+@@ -118,6 +118,8 @@ struct dns_adbfind {
+ isc_result_t result_v6; /*%< RO: v6 result */
+ ISC_LINK(dns_adbfind_t) publink; /*%< RW: client use */
+
++ isc_uint32_t qtotal;
++
+ /* Private */
+ isc_mutex_t lock; /* locks all below */
+ in_port_t port;
+@@ -334,6 +336,12 @@ dns_adb_createfind(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
+ dns_rdatatype_t qtype, unsigned int options,
+ isc_stdtime_t now, dns_name_t *target,
+ in_port_t port, dns_adbfind_t **find);
++isc_result_t
++dns_adb_createfind2(dns_adb_t *adb, isc_task_t *task, isc_taskaction_t action,
++ void *arg, dns_name_t *name, dns_name_t *qname,
++ dns_rdatatype_t qtype, unsigned int options,
++ isc_stdtime_t now, dns_name_t *target, in_port_t port,
++ unsigned int depth, dns_adbfind_t **find);
+ /*%<
+ * Main interface for clients. The adb will look up the name given in
+ * "name" and will build up a list of found addresses, and perhaps start
+diff --git a/lib/dns/include/dns/resolver.h b/lib/dns/include/dns/resolver.h
+index 4e20eb6..c256049 100644
+--- a/lib/dns/include/dns/resolver.h
++++ b/lib/dns/include/dns/resolver.h
+@@ -82,6 +82,7 @@ typedef struct dns_fetchevent {
+ isc_sockaddr_t * client;
+ dns_messageid_t id;
+ isc_result_t vresult;
++ isc_uint32_t qtotal;
+ } dns_fetchevent_t;
+
+ /*
+@@ -275,6 +276,18 @@ dns_resolver_createfetch2(dns_resolver_t *res, dns_name_t *name,
+ dns_rdataset_t *rdataset,
+ dns_rdataset_t *sigrdataset,
+ dns_fetch_t **fetchp);
++isc_result_t
++dns_resolver_createfetch3(dns_resolver_t *res, dns_name_t *name,
++ dns_rdatatype_t type,
++ dns_name_t *domain, dns_rdataset_t *nameservers,
++ dns_forwarders_t *forwarders,
++ isc_sockaddr_t *client, isc_uint16_t id,
++ unsigned int options, unsigned int depth,
++ isc_task_t *task,
++ isc_taskaction_t action, void *arg,
++ dns_rdataset_t *rdataset,
++ dns_rdataset_t *sigrdataset,
++ dns_fetch_t **fetchp);
+ /*%<
+ * Recurse to answer a question.
+ *
+@@ -576,6 +589,18 @@ dns_resolver_printbadcache(dns_resolver_t *resolver, FILE *fp);
+ * \li resolver to be valid.
+ */
+
++void
++dns_resolver_setmaxdepth(dns_resolver_t *resolver, unsigned int maxdepth);
++unsigned int
++dns_resolver_getmaxdepth(dns_resolver_t *resolver);
++/*%
++ * Get and set how many NS indirections will be followed when looking for
++ * nameserver addresses.
++ *
++ * Requires:
++ * \li resolver to be valid.
++ */
++
+ ISC_LANG_ENDDECLS
+
+ #endif /* DNS_RESOLVER_H */
+diff --git a/lib/dns/resolver.c b/lib/dns/resolver.c
+index e517dad..6a635b2 100644
+--- a/lib/dns/resolver.c
++++ b/lib/dns/resolver.c
+@@ -131,6 +131,16 @@
+ #define MAXIMUM_QUERY_TIMEOUT 30 /* The maximum time in seconds for the whole query to live. */
+ #endif
+
++/* The default maximum number of recursions to follow before giving up. */
++#ifndef DEFAULT_RECURSION_DEPTH
++#define DEFAULT_RECURSION_DEPTH 7
++#endif
++
++/* The default maximum number of iterative queries to allow before giving up. */
++#ifndef DEFAULT_MAX_QUERIES
++#define DEFAULT_MAX_QUERIES 50
++#endif
++
+ /*%
+ * Maximum EDNS0 input packet size.
+ */
+@@ -297,6 +307,7 @@ struct fetchctx {
+ isc_uint64_t duration;
+ isc_boolean_t logged;
+ unsigned int querysent;
++ unsigned int totalqueries;
+ unsigned int referrals;
+ unsigned int lamecount;
+ unsigned int neterr;
+@@ -307,6 +318,7 @@ struct fetchctx {
+ isc_boolean_t timeout;
+ dns_adbaddrinfo_t *addrinfo;
+ isc_sockaddr_t *client;
++ unsigned int depth;
+ };
+
+ #define FCTX_MAGIC ISC_MAGIC('F', '!', '!', '!')
+@@ -419,6 +431,7 @@ struct dns_resolver {
+ isc_timer_t * spillattimer;
+ isc_boolean_t zero_no_soa_ttl;
+ unsigned int query_timeout;
++ unsigned int maxdepth;
+
+ /* Locked by lock. */
+ unsigned int references;
+@@ -1097,6 +1110,7 @@ fctx_sendevents(fetchctx_t *fctx, isc_result_t result, int line) {
+ event->result == DNS_R_NCACHENXRRSET);
+ }
+
++ event->qtotal = fctx->totalqueries;
+ isc_task_sendanddetach(&task, ISC_EVENT_PTR(&event));
+ count++;
+ }
+@@ -1537,7 +1551,9 @@ fctx_query(fetchctx_t *fctx, dns_adbaddrinfo_t *addrinfo,
+ if (result != ISC_R_SUCCESS)
+ goto cleanup_dispatch;
+ }
++
+ fctx->querysent++;
++ fctx->totalqueries++;
+
+ ISC_LIST_APPEND(fctx->queries, query, link);
+ query->fctx->nqueries++;
+@@ -2194,9 +2210,10 @@ fctx_finddone(isc_task_t *task, isc_event_t *event) {
+ */
+ INSIST(!SHUTTINGDOWN(fctx));
+ fctx->attributes &= ~FCTX_ATTR_ADDRWAIT;
+- if (event->ev_type == DNS_EVENT_ADBMOREADDRESSES)
++ if (event->ev_type == DNS_EVENT_ADBMOREADDRESSES) {
+ want_try = ISC_TRUE;
+- else {
++ fctx->totalqueries += find->qtotal;
++ } else {
+ fctx->findfail++;
+ if (fctx->pending == 0) {
+ /*
+@@ -2479,12 +2496,13 @@ findname(fetchctx_t *fctx, dns_name_t *name, in_port_t port,
+ * See what we know about this address.
+ */
+ find = NULL;
+- result = dns_adb_createfind(fctx->adb,
+- res->buckets[fctx->bucketnum].task,
+- fctx_finddone, fctx, name,
+- &fctx->name, fctx->type,
+- options, now, NULL,
+- res->view->dstport, &find);
++ result = dns_adb_createfind2(fctx->adb,
++ res->buckets[fctx->bucketnum].task,
++ fctx_finddone, fctx, name,
++ &fctx->name, fctx->type,
++ options, now, NULL,
++ res->view->dstport,
++ fctx->depth + 1, &find);
+ if (result != ISC_R_SUCCESS) {
+ if (result == DNS_R_ALIAS) {
+ /*
+@@ -2592,6 +2610,11 @@ fctx_getaddresses(fetchctx_t *fctx, isc_boolean_t badcache) {
+
+ res = fctx->res;
+
++ if (fctx->depth > res->maxdepth) {
++ FCTXTRACE("too much NS indirection");
++ return (DNS_R_SERVFAIL);
++ }
++
+ /*
+ * Forwarders.
+ */
+@@ -3030,6 +3053,9 @@ fctx_try(fetchctx_t *fctx, isc_boolean_t retrying, isc_boolean_t badcache) {
+
+ REQUIRE(!ADDRWAIT(fctx));
+
++ if (fctx->totalqueries > DEFAULT_MAX_QUERIES)
++ fctx_done(fctx, DNS_R_SERVFAIL, __LINE__);
++
+ addrinfo = fctx_nextaddress(fctx);
+ if (addrinfo == NULL) {
+ /*
+@@ -3388,6 +3414,7 @@ fctx_start(isc_task_t *task, isc_event_t *event) {
+ * Normal fctx startup.
+ */
+ fctx->state = fetchstate_active;
++ fctx->totalqueries = 0;
+ /*
+ * Reset the control event for later use in shutting down
+ * the fctx.
+@@ -3457,6 +3484,7 @@ fctx_join(fetchctx_t *fctx, isc_task_t *task, isc_sockaddr_t *client,
+ event->fetch = fetch;
+ event->client = client;
+ event->id = id;
++ event->qtotal = 0;
+ dns_fixedname_init(&event->foundname);
+
+ /*
+@@ -3493,7 +3521,8 @@ log_ns_ttl(fetchctx_t *fctx, const char *where) {
+ static isc_result_t
+ fctx_create(dns_resolver_t *res, dns_name_t *name, dns_rdatatype_t type,
+ dns_name_t *domain, dns_rdataset_t *nameservers,
+- unsigned int options, unsigned int bucketnum, fetchctx_t **fctxp)
++ unsigned int options, unsigned int bucketnum, unsigned int depth,
++ fetchctx_t **fctxp)
+ {
+ fetchctx_t *fctx;
+ isc_result_t result;
+@@ -3545,6 +3574,7 @@ fctx_create(dns_resolver_t *res, dns_name_t *name, dns_rdatatype_t type,
+ fctx->state = fetchstate_init;
+ fctx->want_shutdown = ISC_FALSE;
+ fctx->cloned = ISC_FALSE;
++ fctx->depth = depth;
+ ISC_LIST_INIT(fctx->queries);
+ ISC_LIST_INIT(fctx->finds);
+ ISC_LIST_INIT(fctx->altfinds);
+@@ -3563,6 +3593,7 @@ fctx_create(dns_resolver_t *res, dns_name_t *name, dns_rdatatype_t type,
+ fctx->pending = 0;
+ fctx->restarts = 0;
+ fctx->querysent = 0;
++ fctx->totalqueries = 0;
+ fctx->referrals = 0;
+ TIME_NOW(&fctx->start);
+ fctx->timeouts = 0;
+@@ -7781,6 +7812,7 @@ dns_resolver_create(dns_view_t *view,
+ res->spillattimer = NULL;
+ res->zero_no_soa_ttl = ISC_FALSE;
+ res->query_timeout = DEFAULT_QUERY_TIMEOUT;
++ res->maxdepth = DEFAULT_RECURSION_DEPTH;
+ res->nbuckets = ntasks;
+ res->activebuckets = ntasks;
+ res->buckets = isc_mem_get(view->mctx,
+@@ -8219,9 +8251,9 @@ dns_resolver_createfetch(dns_resolver_t *res, dns_name_t *name,
+ dns_rdataset_t *sigrdataset,
+ dns_fetch_t **fetchp)
+ {
+- return (dns_resolver_createfetch2(res, name, type, domain,
++ return (dns_resolver_createfetch3(res, name, type, domain,
+ nameservers, forwarders, NULL, 0,
+- options, task, action, arg,
++ options, 0, task, action, arg,
+ rdataset, sigrdataset, fetchp));
+ }
+
+@@ -8237,6 +8269,25 @@ dns_resolver_createfetch2(dns_resolver_t *res, dns_name_t *name,
+ dns_rdataset_t *sigrdataset,
+ dns_fetch_t **fetchp)
+ {
++ return (dns_resolver_createfetch3(res, name, type, domain,
++ nameservers, forwarders, client, id,
++ options, 0, task, action, arg,
++ rdataset, sigrdataset, fetchp));
++}
++
++isc_result_t
++dns_resolver_createfetch3(dns_resolver_t *res, dns_name_t *name,
++ dns_rdatatype_t type,
++ dns_name_t *domain, dns_rdataset_t *nameservers,
++ dns_forwarders_t *forwarders,
++ isc_sockaddr_t *client, dns_messageid_t id,
++ unsigned int options, unsigned int depth,
++ isc_task_t *task,
++ isc_taskaction_t action, void *arg,
++ dns_rdataset_t *rdataset,
++ dns_rdataset_t *sigrdataset,
++ dns_fetch_t **fetchp)
++{
+ dns_fetch_t *fetch;
+ fetchctx_t *fctx = NULL;
+ isc_result_t result = ISC_R_SUCCESS;
+@@ -8325,11 +8376,12 @@ dns_resolver_createfetch2(dns_resolver_t *res, dns_name_t *name,
+
+ if (fctx == NULL) {
+ result = fctx_create(res, name, type, domain, nameservers,
+- options, bucketnum, &fctx);
++ options, bucketnum, depth, &fctx);
+ if (result != ISC_R_SUCCESS)
+ goto unlock;
+ new_fctx = ISC_TRUE;
+- }
++ } else if (fctx->depth > depth)
++ fctx->depth = depth;
+
+ result = fctx_join(fctx, task, client, id, action, arg,
+ rdataset, sigrdataset, fetch);
+@@ -9101,3 +9153,15 @@ dns_resolver_settimeout(dns_resolver_t *resolver, unsigned int seconds) {
+
+ resolver->query_timeout = seconds;
+ }
++
++void
++dns_resolver_setmaxdepth(dns_resolver_t *resolver, unsigned int maxdepth) {
++ REQUIRE(VALID_RESOLVER(resolver));
++ resolver->maxdepth = maxdepth;
++}
++
++unsigned int
++dns_resolver_getmaxdepth(dns_resolver_t *resolver) {
++ REQUIRE(VALID_RESOLVER(resolver));
++ return (resolver->maxdepth);
++}
+diff --git a/lib/isccfg/namedconf.c b/lib/isccfg/namedconf.c
+index bfd4bab..5f8b037 100644
+--- a/lib/isccfg/namedconf.c
++++ b/lib/isccfg/namedconf.c
+@@ -1393,6 +1393,7 @@ view_clauses[] = {
+ { "max-cache-ttl", &cfg_type_uint32, 0 },
+ { "max-clients-per-query", &cfg_type_uint32, 0 },
+ { "max-ncache-ttl", &cfg_type_uint32, 0 },
++ { "max-recursion-depth", &cfg_type_uint32, 0 },
+ { "max-udp-size", &cfg_type_uint32, 0 },
+ { "min-roots", &cfg_type_uint32, CFG_CLAUSEFLAG_NOTIMP },
+ { "minimal-responses", &cfg_type_boolean, 0 },
+--
+1.9.1
+
diff --git a/meta/recipes-connectivity/bind/bind/conf.patch b/meta/recipes-connectivity/bind/bind/conf.patch
index 2785c6a22f..57dbd4e155 100644
--- a/meta/recipes-connectivity/bind/bind/conf.patch
+++ b/meta/recipes-connectivity/bind/bind/conf.patch
@@ -55,6 +55,22 @@ diff -urN bind-9.3.1.orig/conf/db.empty bind-9.3.1/conf/db.empty
+ 86400 ) ; Negative Cache TTL
+;
+@ IN NS localhost.
+diff -urN bind-9.3.1.orig/conf/db.255 bind-9.3.1/conf/db.255
+--- bind-9.3.1.orig/conf/db.255 1970-01-01 01:00:00.000000000 +0100
++++ bind-9.3.1/conf/db.255 2005-07-10 22:14:00.000000000 +0200
+@@ -0,0 +1,12 @@
++;
++; BIND reserve data file for broadcast zone
++;
++$TTL 604800
++@ IN SOA localhost. root.localhost. (
++ 1 ; Serial
++ 604800 ; Refresh
++ 86400 ; Retry
++ 2419200 ; Expire
++ 604800 ) ; Negative Cache TTL
++;
++@ IN NS localhost.
diff -urN bind-9.3.1.orig/conf/db.local bind-9.3.1/conf/db.local
--- bind-9.3.1.orig/conf/db.local 1970-01-01 01:00:00.000000000 +0100
+++ bind-9.3.1/conf/db.local 2005-07-10 22:14:00.000000000 +0200
diff --git a/meta/recipes-connectivity/bind/bind/named.service b/meta/recipes-connectivity/bind/bind/named.service
index 1792e414ab..cda56ef015 100644
--- a/meta/recipes-connectivity/bind/bind/named.service
+++ b/meta/recipes-connectivity/bind/bind/named.service
@@ -6,7 +6,7 @@ After=network.target
[Service]
Type=forking
-EnvironmentFile=-/etc/sysconfig/named
+EnvironmentFile=-/etc/default/bind9
PIDFile=/run/named/named.pid
ExecStartPre=@SBINDIR@/generate-rndc-key.sh
diff --git a/meta/recipes-connectivity/bind/bind_9.9.5.bb b/meta/recipes-connectivity/bind/bind_9.9.5.bb
index de10eb8029..8e04f8a040 100644
--- a/meta/recipes-connectivity/bind/bind_9.9.5.bb
+++ b/meta/recipes-connectivity/bind/bind_9.9.5.bb
@@ -15,7 +15,9 @@ SRC_URI = "ftp://ftp.isc.org/isc/bind9/${PV}/${BPN}-${PV}.tar.gz \
file://dont-test-on-host.patch \
file://generate-rndc-key.sh \
file://named.service \
+ file://bind9 \
file://init.d-add-support-for-read-only-rootfs.patch \
+ file://bind9_9_5-CVE-2014-8500.patch \
"
SRC_URI[md5sum] = "e676c65cad5234617ee22f48e328c24e"
@@ -30,7 +32,11 @@ EXTRA_OECONF = " ${ENABLE_IPV6} --with-randomdev=/dev/random --disable-threads \
--with-openssl=${STAGING_LIBDIR}/.. --with-libxml2=${STAGING_LIBDIR}/.. \
--enable-exportlib --with-export-includedir=${includedir} --with-export-libdir=${libdir} \
"
-inherit autotools-brokensep update-rc.d systemd
+inherit autotools-brokensep update-rc.d systemd useradd
+
+USERADD_PACKAGES = "${PN}"
+USERADD_PARAM_${PN} = "--system --home /var/cache/bind --no-create-home \
+ --user-group bind"
INITSCRIPT_NAME = "bind"
INITSCRIPT_PARAMS = "defaults"
@@ -41,11 +47,18 @@ PARALLEL_MAKE = ""
RDEPENDS_${PN} = "python-core"
-PACKAGES_prepend = " ${PN}-utils "
+PACKAGE_BEFORE_PN += "${PN}-utils"
FILES_${PN}-utils = "${bindir}/host ${bindir}/dig"
FILES_${PN}-dev += "${bindir}/isc-config.h"
FILES_${PN} += "${sbindir}/generate-rndc-key.sh"
+do_install_prepend() {
+ # clean host path in isc-config.sh before the hardlink created
+ # by "make install":
+ # bind9-config -> isc-config.sh
+ sed -i -e "s,${STAGING_LIBDIR},${libdir}," ${S}/isc-config.sh
+}
+
do_install_append() {
rm "${D}${bindir}/nslookup"
rm "${D}${mandir}/man1/nslookup.1"
@@ -67,6 +80,9 @@ do_install_append() {
sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
-e 's,@SBINDIR@,${sbindir},g' \
${D}${systemd_unitdir}/system/named.service
+
+ install -d ${D}${sysconfdir}/default
+ install -m 0644 ${WORKDIR}/bind9 ${D}${sysconfdir}/default
}
CONFFILES_${PN} = " \
diff --git a/meta/recipes-connectivity/bluez/bluez-hcidump_2.5.bb b/meta/recipes-connectivity/bluez/bluez-hcidump_2.5.bb
index 5c1f4760a8..3950630a86 100644
--- a/meta/recipes-connectivity/bluez/bluez-hcidump_2.5.bb
+++ b/meta/recipes-connectivity/bluez/bluez-hcidump_2.5.bb
@@ -3,7 +3,9 @@ DESCRIPTION = "The hcidump tool reads raw HCI data coming from and going to a Bl
and displays the commands, events and data in a human-readable form."
SECTION = "console"
+# hcidump was integrated into bluez5
DEPENDS = "bluez4"
+RCONFLICTS_${PN} = "bluez5"
LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
file://src/hcidump.c;beginline=1;endline=23;md5=3bee3a162dff43a5be7470710b99fbcf"
diff --git a/meta/recipes-connectivity/bluez/gst-plugin-bluetooth_4.101.bb b/meta/recipes-connectivity/bluez/gst-plugin-bluetooth_4.101.bb
index f2dc0da06c..c71d61253a 100644
--- a/meta/recipes-connectivity/bluez/gst-plugin-bluetooth_4.101.bb
+++ b/meta/recipes-connectivity/bluez/gst-plugin-bluetooth_4.101.bb
@@ -25,6 +25,7 @@ do_install_append() {
rm -rf ${D}${libdir}/alsa-lib
rm -rf ${D}${datadir}
rm -rf ${D}${includedir}
+ rm -rf ${D}${nonarch_base_libdir}
}
FILES_${PN} = "${libdir}/gstreamer-0.10/lib*.so"
diff --git a/meta/recipes-connectivity/bluez5/bluez5.inc b/meta/recipes-connectivity/bluez5/bluez5.inc
index c3dd946517..67aafbbd6f 100644
--- a/meta/recipes-connectivity/bluez5/bluez5.inc
+++ b/meta/recipes-connectivity/bluez5/bluez5.inc
@@ -7,11 +7,14 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \
file://COPYING.LIB;md5=fb504b67c50331fc78734fed90fb0e09 \
file://src/main.c;beginline=1;endline=24;md5=9bc54b93cd7e17bf03f52513f39f926e"
DEPENDS = "udev libusb dbus-glib glib-2.0 libcheck readline"
+PROVIDES += "bluez-hcidump"
+RPROVIDES_${PN} += "bluez-hcidump"
RCONFLICTS_${PN} = "bluez4"
PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'alsa', 'alsa', '', d)} obex-profiles"
PACKAGECONFIG[obex-profiles] = "--enable-obex,--disable-obex,libical"
+PACKAGECONFIG[experimental] = "--enable-experimental,--disable-experimental,"
SRC_URI = "\
${KERNELORG_MIRROR}/linux/bluetooth/bluez-${PV}.tar.xz \
@@ -26,10 +29,19 @@ EXTRA_OECONF = "\
--disable-cups \
--enable-test \
--enable-datafiles \
- ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--with-systemdunitdir=${systemd_unitdir}/system/', '--disable-systemd', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--enable-systemd', '--disable-systemd', d)} \
--enable-library \
"
+# bluez5 builds a large number of useful utilities but does not
+# install them. Specify which ones we want put into ${PN}-noinst-tools.
+NOINST_TOOLS_READLINE ??= ""
+NOINST_TOOLS_EXPERIMENTAL ??= ""
+NOINST_TOOLS = " \
+ ${NOINST_TOOLS_READLINE} \
+ ${@bb.utils.contains('PACKAGECONFIG', 'experimental', '${NOINST_TOOLS_EXPERIMENTAL}', '', d)} \
+"
+
do_install_append() {
install -d ${D}${sysconfdir}/bluetooth/
if [ -f ${S}/profiles/audio/audio.conf ]; then
@@ -43,10 +55,15 @@ do_install_append() {
fi
# at_console doesn't really work with the current state of OE, so punch some more holes so people can actually use BT
install -m 0644 ${WORKDIR}/bluetooth.conf ${D}/${sysconfdir}/dbus-1/system.d/
+
+ # Install desired tools that upstream leaves in build area
+ for f in ${NOINST_TOOLS} ; do
+ install -m 755 ${B}/$f ${D}/${bindir}
+ done
}
ALLOW_EMPTY_libasound-module-bluez = "1"
-PACKAGES =+ "libasound-module-bluez ${PN}-testtools ${PN}-obex"
+PACKAGES =+ "libasound-module-bluez ${PN}-testtools ${PN}-obex ${PN}-noinst-tools"
FILES_libasound-module-bluez = "${libdir}/alsa-lib/lib*.so ${datadir}/alsa"
FILES_${PN} += "${libdir}/bluetooth/plugins ${libdir}/bluetooth/plugins/*.so ${base_libdir}/udev/ ${nonarch_base_libdir}/udev/ ${systemd_unitdir}/ ${datadir}/dbus-1"
@@ -63,6 +80,16 @@ SYSTEMD_SERVICE_${PN}-obex = "obex.service"
FILES_${PN}-testtools = "${libdir}/bluez/test/*"
+def get_noinst_tools_paths (d, bb, tools):
+ s = list()
+ bindir = d.getVar("bindir", True)
+ for bdp in tools.split():
+ f = os.path.basename(bdp)
+ s.append("%s/%s" % (bindir, f))
+ return "\n".join(s)
+
+FILES_${PN}-noinst-tools = "${@get_noinst_tools_paths(d, bb, d.getVar('NOINST_TOOLS', True))}"
+
FILES_${PN}-dbg += "\
${libdir}/${BPN}/bluetooth/.debug \
${libdir}/bluetooth/plugins/.debug \
diff --git a/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf b/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf
index e21e72e3ec..26845bb73c 100644
--- a/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf
+++ b/meta/recipes-connectivity/bluez5/bluez5/bluetooth.conf
@@ -11,6 +11,7 @@
<allow own="org.bluez"/>
<allow send_destination="org.bluez"/>
<allow send_interface="org.bluez.Agent1"/>
+ <allow send_type="method_call"/>
</policy>
</busconfig>
diff --git a/meta/recipes-connectivity/bluez5/bluez5_5.21.bb b/meta/recipes-connectivity/bluez5/bluez5_5.21.bb
deleted file mode 100644
index 5b0102e3f1..0000000000
--- a/meta/recipes-connectivity/bluez5/bluez5_5.21.bb
+++ /dev/null
@@ -1,3 +0,0 @@
-require bluez5.inc
-SRC_URI[md5sum] = "d5a0d05520e013f2394bf1d0bac12889"
-SRC_URI[sha256sum] = "81eb073516f39ff1fa02fa40120e834f40fdb0b772f8f4153a9ab566cc0a1f4f"
diff --git a/meta/recipes-connectivity/bluez5/bluez5_5.25.bb b/meta/recipes-connectivity/bluez5/bluez5_5.25.bb
new file mode 100644
index 0000000000..7d06b16549
--- /dev/null
+++ b/meta/recipes-connectivity/bluez5/bluez5_5.25.bb
@@ -0,0 +1,50 @@
+require bluez5.inc
+SRC_URI[md5sum] = "41bd0c915abde255622150ce6dcae67b"
+SRC_URI[sha256sum] = "5ca62f3f45e2638a0f7a81658d6c8813ee01487436ae8e53e9fe395e23d1fd30"
+
+# noinst programs in Makefile.tools that are conditional on READLINE
+# support
+NOINST_TOOLS_READLINE ?= " \
+ attrib/gatttool \
+ tools/obex-client-tool \
+ tools/obex-server-tool \
+ tools/bluetooth-player \
+ tools/obexctl \
+"
+
+# noinst programs in Makefile.tools that are conditional on EXPERIMENTAL
+# support
+NOINST_TOOLS_EXPERIMENTAL ?= " \
+ emulator/btvirt \
+ emulator/b1ee \
+ emulator/hfp \
+ tools/3dsp \
+ tools/mgmt-tester \
+ tools/gap-tester \
+ tools/l2cap-tester \
+ tools/sco-tester \
+ tools/smp-tester \
+ tools/hci-tester \
+ tools/rfcomm-tester \
+ tools/bdaddr \
+ tools/avinfo \
+ tools/avtest \
+ tools/scotest \
+ tools/amptest \
+ tools/hwdb \
+ tools/hcieventmask \
+ tools/hcisecfilter \
+ tools/btmgmt \
+ tools/btinfo \
+ tools/btattach \
+ tools/btsnoop \
+ tools/btproxy \
+ tools/btiotest \
+ tools/cltest \
+ tools/seq2bseq \
+ tools/hex2hcd \
+ tools/ibeacon \
+ tools/btgatt-client \
+ tools/gatt-service \
+ profiles/iap/iapd \
+"
diff --git a/meta/recipes-connectivity/connman/connman-gnome/connman-gnome-fix-dbus-interface-name.patch b/meta/recipes-connectivity/connman/connman-gnome/connman-gnome-fix-dbus-interface-name.patch
new file mode 100644
index 0000000000..f4049fa3e2
--- /dev/null
+++ b/meta/recipes-connectivity/connman/connman-gnome/connman-gnome-fix-dbus-interface-name.patch
@@ -0,0 +1,187 @@
+connman-gnome: fix dbus interface name
+
+This patch resolves following error:
+
+"connman-dbus.xml": "connman" is not a valid D-Bus interface name
+
+https://502552.bugs.gentoo.org/attachment.cgi?id=380652
+
+Upstream-Status: Backport
+
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ common/connman-client.c | 24 ++++++++++++------------
+ common/connman-client.h | 4 ++--
+ common/connman-dbus.c | 6 +++---
+ common/connman-dbus.xml | 2 +-
+ 4 files changed, 18 insertions(+), 18 deletions(-)
+
+diff --git a/common/connman-client.c b/common/connman-client.c
+index c55e25c..9d818b2 100644
+--- a/common/connman-client.c
++++ b/common/connman-client.c
+@@ -289,7 +289,7 @@ gboolean connman_client_set_ipv4(ConnmanClient *client, const gchar *device,
+
+ g_value_init(&value, DBUS_TYPE_G_DICTIONARY);
+ g_value_set_boxed(&value, ipv4);
+- ret = connman_set_property(proxy, "IPv4.Configuration", &value, NULL);
++ ret = net_connman_set_property(proxy, "IPv4.Configuration", &value, NULL);
+
+ g_object_unref(proxy);
+
+@@ -317,7 +317,7 @@ void connman_client_set_powered(ConnmanClient *client, const gchar *device,
+ g_value_set_boolean(&value, powered);
+
+ error = NULL;
+- connman_set_property(proxy, "Powered", &value, &error);
++ net_connman_set_property(proxy, "Powered", &value, &error);
+ if( error )
+ fprintf (stderr, "error: %s\n", error->message);
+
+@@ -325,7 +325,7 @@ void connman_client_set_powered(ConnmanClient *client, const gchar *device,
+ }
+
+ void connman_client_scan(ConnmanClient *client, const gchar *device,
+- connman_scan_reply callback, gpointer user_data)
++ net_connman_scan_reply callback, gpointer user_data)
+ {
+ ConnmanClientPrivate *priv = CONNMAN_CLIENT_GET_PRIVATE(client);
+ DBusGProxy *proxy;
+@@ -339,7 +339,7 @@ void connman_client_scan(ConnmanClient *client, const gchar *device,
+ if (proxy == NULL)
+ return;
+
+- connman_scan_async(proxy, callback, user_data);
++ net_connman_scan_async(proxy, callback, user_data);
+
+ g_object_unref(proxy);
+ }
+@@ -353,7 +353,7 @@ gboolean connman_client_get_offline_status(ConnmanClient *client)
+
+ DBG("client %p", client);
+
+- ret = connman_get_properties(priv->manager, &hash, NULL);
++ ret = net_connman_get_properties(priv->manager, &hash, NULL);
+
+ if (ret == FALSE)
+ goto done;
+@@ -375,7 +375,7 @@ void connman_client_set_offlinemode(ConnmanClient *client, gboolean status)
+ g_value_init(&value, G_TYPE_BOOLEAN);
+ g_value_set_boolean(&value, status);
+
+- connman_set_property(priv->manager, "OfflineMode", &value, NULL);
++ net_connman_set_property(priv->manager, "OfflineMode", &value, NULL);
+ }
+
+ static gboolean network_disconnect(GtkTreeModel *model, GtkTreePath *path,
+@@ -398,7 +398,7 @@ static gboolean network_disconnect(GtkTreeModel *model, GtkTreePath *path,
+ return TRUE;
+
+ if (type == CONNMAN_TYPE_WIFI)
+- connman_disconnect(proxy, NULL);
++ net_connman_disconnect(proxy, NULL);
+
+ g_object_unref(proxy);
+
+@@ -422,13 +422,13 @@ void connman_client_connect(ConnmanClient *client, const gchar *network)
+ if (proxy == NULL)
+ return;
+
+- connman_connect(proxy, NULL);
++ net_connman_connect(proxy, NULL);
+
+ g_object_unref(proxy);
+ }
+
+ void connman_client_connect_async(ConnmanClient *client, const gchar *network,
+- connman_connect_reply callback, gpointer userdata)
++ net_connman_connect_reply callback, gpointer userdata)
+ {
+ ConnmanClientPrivate *priv = CONNMAN_CLIENT_GET_PRIVATE(client);
+ DBusGProxy *proxy;
+@@ -446,7 +446,7 @@ void connman_client_connect_async(ConnmanClient *client, const gchar *network,
+ if (proxy == NULL)
+ goto done;
+
+- connman_connect_async(proxy, callback, userdata);
++ net_connman_connect_async(proxy, callback, userdata);
+
+ done:
+ return;
+@@ -476,7 +476,7 @@ void connman_client_disconnect(ConnmanClient *client, const gchar *network)
+ if (proxy == NULL)
+ return;
+
+- connman_disconnect(proxy, NULL);
++ net_connman_disconnect(proxy, NULL);
+
+ g_object_unref(proxy);
+ }
+@@ -532,7 +532,7 @@ void connman_client_remove(ConnmanClient *client, const gchar *network)
+ if (proxy == NULL)
+ return;
+
+- connman_remove(proxy, NULL);
++ net_connman_remove(proxy, NULL);
+
+ g_object_unref(proxy);
+ }
+diff --git a/common/connman-client.h b/common/connman-client.h
+index 9e2e6d5..98241de 100644
+--- a/common/connman-client.h
++++ b/common/connman-client.h
+@@ -70,13 +70,13 @@ void connman_client_set_powered(ConnmanClient *client, const gchar *device,
+ gboolean connman_client_set_ipv4(ConnmanClient *client, const gchar *device,
+ struct ipv4_config *ipv4_config);
+ void connman_client_scan(ConnmanClient *client, const gchar *device,
+- connman_scan_reply callback, gpointer user_data);
++ net_connman_scan_reply callback, gpointer user_data);
+
+ void connman_client_connect(ConnmanClient *client, const gchar *network);
+ void connman_client_disconnect(ConnmanClient *client, const gchar *network);
+ gchar *connman_client_get_security(ConnmanClient *client, const gchar *network);
+ void connman_client_connect_async(ConnmanClient *client, const gchar *network,
+- connman_connect_reply callback, gpointer userdata);
++ net_connman_connect_reply callback, gpointer userdata);
+ void connman_client_set_remember(ConnmanClient *client, const gchar *network,
+ gboolean remember);
+
+diff --git a/common/connman-dbus.c b/common/connman-dbus.c
+index b82b3e1..543eb43 100644
+--- a/common/connman-dbus.c
++++ b/common/connman-dbus.c
+@@ -655,15 +655,15 @@ DBusGProxy *connman_dbus_create_manager(DBusGConnection *conn,
+
+ DBG("getting manager properties");
+
+- connman_get_properties_async(proxy, manager_properties, store);
++ net_connman_get_properties_async(proxy, manager_properties, store);
+
+ DBG("getting technologies");
+
+- connman_get_technologies_async(proxy, manager_technologies, store);
++ net_connman_get_technologies_async(proxy, manager_technologies, store);
+
+ DBG("getting services");
+
+- connman_get_services_async(proxy, manager_services, store);
++ net_connman_get_services_async(proxy, manager_services, store);
+
+ return proxy;
+ }
+diff --git a/common/connman-dbus.xml b/common/connman-dbus.xml
+index 56b9582..0199d52 100644
+--- a/common/connman-dbus.xml
++++ b/common/connman-dbus.xml
+@@ -1,7 +1,7 @@
+ <?xml version="1.0" encoding="UTF-8" ?>
+
+ <node name="/">
+- <interface name="connman">
++ <interface name="net.connman">
+ <method name="GetProperties">
+ <arg type="a{sv}" direction="out"/>
+ </method>
+--
+1.9.1
+
diff --git a/meta/recipes-connectivity/connman/connman-gnome_0.7.bb b/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
index 6688739766..f5575d2938 100644
--- a/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
+++ b/meta/recipes-connectivity/connman/connman-gnome_0.7.bb
@@ -11,9 +11,10 @@ DEPENDS = "gtk+ dbus-glib intltool-native"
# 0.7 tag
SRCREV = "cf3c325b23dae843c5499a113591cfbc98acb143"
SRC_URI = "git://github.com/connectivity/connman-gnome.git \
- file://0001-Removed-icon-from-connman-gnome-about-applet.patch \
- file://null_check_for_ipv4_config.patch \
- file://images/* \
+ file://0001-Removed-icon-from-connman-gnome-about-applet.patch \
+ file://null_check_for_ipv4_config.patch \
+ file://images/* \
+ file://connman-gnome-fix-dbus-interface-name.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-connectivity/connman/connman.inc b/meta/recipes-connectivity/connman/connman.inc
index f121a81f1e..ddabdb96f5 100644
--- a/meta/recipes-connectivity/connman/connman.inc
+++ b/meta/recipes-connectivity/connman/connman.inc
@@ -27,10 +27,10 @@ EXTRA_OECONF += "\
--enable-test \
--disable-polkit \
--enable-client \
- ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--with-systemdunitdir=${systemd_unitdir}/system/', '--with-systemdunitdir=', d)} \
"
PACKAGECONFIG ??= "wispr \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd','systemd', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'wifi','wifi', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth','bluetooth', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', '3g','3g', '', d)} \
@@ -40,6 +40,7 @@ PACKAGECONFIG ??= "wispr \
# local.conf or distro config
# PACKAGECONFIG_append_pn-connman = " openvpn vpnc l2tp pptp"
+PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_unitdir}/system/,--with-systemdunitdir="
PACKAGECONFIG[wifi] = "--enable-wifi, --disable-wifi, wpa-supplicant"
PACKAGECONFIG[bluetooth] = "--enable-bluetooth, --disable-bluetooth, bluez4"
PACKAGECONFIG[3g] = "--enable-ofono, --disable-ofono, ofono"
diff --git a/meta/recipes-connectivity/connman/connman/build-libppp-plugin-without-versioning-info.patch b/meta/recipes-connectivity/connman/connman/build-libppp-plugin-without-versioning-info.patch
deleted file mode 100644
index 8e81f1bc52..0000000000
--- a/meta/recipes-connectivity/connman/connman/build-libppp-plugin-without-versioning-info.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-Upstream-Status: Submitted
-Signed-off-by: Jukka Rissanen <jukka.rissanen@linux.intel.com>
-
-From dd8044ed6ccb468558bab037257e27a409903d86 Mon Sep 17 00:00:00 2001
-From: Jukka Rissanen <jukka.rissanen@linux.intel.com>
-Date: Mon, 21 Jul 2014 15:01:45 +0300
-Subject: [PATCH] scripts: Build libppp-plugin as a plain .so file
-
-There is no need to have module versioning for libppp-plugin.so
-file so build the plugin same way as other vpn plugins.
-
-This issue was seen when building ConnMan for Yocto.
----
- Makefile.plugins | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/Makefile.plugins b/Makefile.plugins
-index 294cae0..8139967 100644
---- a/Makefile.plugins
-+++ b/Makefile.plugins
-@@ -4,7 +4,7 @@ plugin_cflags = -fvisibility=hidden -I$(srcdir)/gdbus \
- plugin_ldflags = -no-undefined -module -avoid-version
-
- script_cflags = -fvisibility=hidden -I$(srcdir)/gdbus \
-- @DBUS_CFLAGS@
-+ @DBUS_CFLAGS@ $(plugin_ldflags) -shared
-
- if LOOPBACK
- builtin_modules += loopback
---
-1.8.3.1
-
diff --git a/meta/recipes-connectivity/connman/connman_1.24.bb b/meta/recipes-connectivity/connman/connman_1.26.bb
index 29e239c671..b93f1cd6c3 100644
--- a/meta/recipes-connectivity/connman/connman_1.24.bb
+++ b/meta/recipes-connectivity/connman/connman_1.26.bb
@@ -4,9 +4,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
file://0001-plugin.h-Change-visibility-to-default-for-debug-symb.patch \
file://add_xuser_dbus_permission.patch \
file://connman \
- file://build-libppp-plugin-without-versioning-info.patch \
"
-SRC_URI[md5sum] = "dd6e1b4d9b9a28d127edb9f9b58bdec1"
-SRC_URI[sha256sum] = "551df7a5f0c6e4d69523dd2b3aa2c54525b323457d5135f64816215bad3dc24c"
+SRC_URI[md5sum] = "ba05b110b7c81e5fa14e8b402ef37a9e"
+SRC_URI[sha256sum] = "7184e4b6d954449ee00a30e188924b3e37a20ad2fd9a0b76a2bdd82c863dcf8a"
RRECOMMENDS_${PN} = "connman-conf"
diff --git a/meta/recipes-connectivity/dhcp/dhcp.inc b/meta/recipes-connectivity/dhcp/dhcp.inc
index 512127cd4c..4949e0201e 100644
--- a/meta/recipes-connectivity/dhcp/dhcp.inc
+++ b/meta/recipes-connectivity/dhcp/dhcp.inc
@@ -14,11 +14,19 @@ DEPENDS = "openssl bind"
SRC_URI = "ftp://ftp.isc.org/isc/dhcp/${PV}/dhcp-${PV}.tar.gz \
file://site.h \
- file://init-relay file://default-relay \
- file://init-server file://default-server \
- file://dhclient.conf file://dhcpd.conf"
+ file://init-relay file://default-relay \
+ file://init-server file://default-server \
+ file://dhclient.conf file://dhcpd.conf \
+ file://dhcpd.service file://dhcrelay.service"
-inherit autotools
+inherit autotools systemd
+
+SYSTEMD_PACKAGES = "${PN}-server ${PN}-relay"
+SYSTEMD_SERVICE_${PN}-server = "dhcpd.service"
+SYSTEMD_AUTO_ENABLE_${PN}-server = "disable"
+
+SYSTEMD_SERVICE_${PN}-relay = "dhcrelay.service"
+SYSTEMD_AUTO_ENABLE_${PN}-relay = "disable"
TARGET_CFLAGS += "-D_GNU_SOURCE"
EXTRA_OECONF = "--with-srv-lease-file=${localstatedir}/lib/dhcp/dhcpd.leases \
@@ -51,6 +59,13 @@ do_install_append () {
mv ${D}${sbindir}/dhclient ${D}${base_sbindir}/
fi
install -m 0755 ${S}/client/scripts/linux ${D}${base_sbindir}/dhclient-script
+
+ # Install systemd unit files
+ install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/dhcpd.service ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/dhcrelay.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_unitdir}/system/dhcpd.service ${D}${systemd_unitdir}/system/dhcrelay.service
+ sed -i -e 's,@SYSCONFDIR@,${sysconfdir},g' ${D}${systemd_unitdir}/system/dhcpd.service
}
PACKAGES += "dhcp-server dhcp-server-config dhcp-client dhcp-relay dhcp-omshell"
diff --git a/meta/recipes-connectivity/dhcp/dhcp/replace-ifconfig-route.patch b/meta/recipes-connectivity/dhcp/dhcp/replace-ifconfig-route.patch
new file mode 100644
index 0000000000..61dd6a7186
--- /dev/null
+++ b/meta/recipes-connectivity/dhcp/dhcp/replace-ifconfig-route.patch
@@ -0,0 +1,176 @@
+Found this patch here:
+https://lists.isc.org/pipermail/dhcp-users/2011-January/012910.html
+
+and made some adjustments/updates to make it work with this version.
+Wasn't able to find that why this patch was not accepted by ISC DHCP developers.
+
+Upstream-Status: Pending
+
+Signed-off-by: Muhammad Shakeel <muhammad_shakeel@mentor.com>
+
+--- dhcp-4.2.5-P1/client/scripts/linux.orig 2013-09-04 12:22:55.000000000 +0500
++++ dhcp-4.2.5-P1/client/scripts/linux 2013-09-04 12:52:19.068761518 +0500
+@@ -103,17 +103,11 @@
+ if [ x$old_broadcast_address != x ]; then
+ old_broadcast_arg="broadcast $old_broadcast_address"
+ fi
+-if [ x$new_subnet_mask != x ]; then
+- new_subnet_arg="netmask $new_subnet_mask"
+-fi
+-if [ x$old_subnet_mask != x ]; then
+- old_subnet_arg="netmask $old_subnet_mask"
+-fi
+-if [ x$alias_subnet_mask != x ]; then
+- alias_subnet_arg="netmask $alias_subnet_mask"
++if [ -n "$new_subnet_mask" ]; then
++ new_mask="/$new_subnet_mask"
+ fi
+-if [ x$new_interface_mtu != x ]; then
+- mtu_arg="mtu $new_interface_mtu"
++if [ -n "$alias_subnet_mask" ]; then
++ alias_mask="/$alias_subnet_mask"
+ fi
+ if [ x$IF_METRIC != x ]; then
+ metric_arg="metric $IF_METRIC"
+@@ -127,9 +121,9 @@
+ if [ x$reason = xPREINIT ]; then
+ if [ x$alias_ip_address != x ]; then
+ # Bring down alias interface. Its routes will disappear too.
+- ifconfig $interface:0- inet 0
++ ${ip} -4 addr flush dev ${interface} label ${interface}:0
+ fi
+- ifconfig $interface 0 up
++ ${ip} link set dev ${interface} up
+
+ # We need to give the kernel some time to get the interface up.
+ sleep 1
+@@ -156,25 +150,30 @@
+ if [ x$old_ip_address != x ] && [ x$alias_ip_address != x ] && \
+ [ x$alias_ip_address != x$old_ip_address ]; then
+ # Possible new alias. Remove old alias.
+- ifconfig $interface:0- inet 0
++ ${ip} -4 addr flush dev ${interface} label ${interface}:0
+ fi
+ if [ x$old_ip_address != x ] && [ x$old_ip_address != x$new_ip_address ]; then
+ # IP address changed. Bringing down the interface will delete all routes,
+ # and clear the ARP cache.
+- ifconfig $interface inet 0 down
++ ${ip} -4 addr flush dev ${interface} label ${interface}
+
+ fi
+ if [ x$old_ip_address = x ] || [ x$old_ip_address != x$new_ip_address ] || \
+ [ x$reason = xBOUND ] || [ x$reason = xREBOOT ]; then
+
+- ifconfig $interface inet $new_ip_address $new_subnet_arg \
+- $new_broadcast_arg $mtu_arg
++ ${ip} -4 addr add ${new_ip_address}${new_mask} ${new_broadcast_arg} \
++ dev ${interface} label ${interface}
++ if [ -n "$new_interface_mtu" ]; then
++ # set MTU
++ ${ip} link set dev ${interface} mtu ${new_interface_mtu}
++ fi
+ # Add a network route to the computed network address.
+ for router in $new_routers; do
+ if [ "x$new_subnet_mask" = "x255.255.255.255" ] ; then
+- route add -host $router dev $interface
++ ${ip} -4 route add ${router} dev $interface >/dev/null 2>&1
+ fi
+- route add default gw $router $metric_arg dev $interface
++ ${ip} -4 route add default via ${router} dev ${interface} \
++ ${metric_arg} >/dev/null 2>&1
+ done
+ else
+ # we haven't changed the address, have we changed other options
+@@ -182,21 +181,23 @@
+ if [ x$new_routers != x ] && [ x$new_routers != x$old_routers ] ; then
+ # if we've changed routers delete the old and add the new.
+ for router in $old_routers; do
+- route del default gw $router
++ ${ip} -4 route delete default via ${router}
+ done
+ for router in $new_routers; do
+ if [ "x$new_subnet_mask" = "x255.255.255.255" ] ; then
+- route add -host $router dev $interface
+- fi
+- route add default gw $router $metric_arg dev $interface
++ ${ip} -4 route add ${router} dev $interface >/dev/null 2>&1
++ fi
++ ${ip} -4 route add default via ${router} dev ${interface} \
++ ${metric_arg} >/dev/null 2>&1
+ done
+ fi
+ fi
+ if [ x$new_ip_address != x$alias_ip_address ] && [ x$alias_ip_address != x ];
+ then
+- ifconfig $interface:0- inet 0
+- ifconfig $interface:0 inet $alias_ip_address $alias_subnet_arg
+- route add -host $alias_ip_address $interface:0
++ ${ip} -4 addr flush dev ${interface} label ${interface}:0
++ ${ip} -4 addr add ${alias_ip_address}${alias_mask} \
++ dev ${interface} label ${interface}:0
++ ${ip} -4 route add ${alias_ip_address} dev ${interface} >/dev/null 2>&1
+ fi
+ make_resolv_conf
+ exit_with_hooks 0
+@@ -206,42 +207,49 @@
+ || [ x$reason = xSTOP ]; then
+ if [ x$alias_ip_address != x ]; then
+ # Turn off alias interface.
+- ifconfig $interface:0- inet 0
++ ${ip} -4 addr flush dev ${interface} label ${interface}:0
+ fi
+ if [ x$old_ip_address != x ]; then
+ # Shut down interface, which will delete routes and clear arp cache.
+- ifconfig $interface inet 0 down
++ ${ip} -4 addr flush dev ${interface} label ${interface}
+ fi
+ if [ x$alias_ip_address != x ]; then
+- ifconfig $interface:0 inet $alias_ip_address $alias_subnet_arg
+- route add -host $alias_ip_address $interface:0
++ ${ip} -4 addr add ${alias_ip_address}${alias_network_arg} \
++ dev ${interface} label ${interface}:0
++ ${ip} -4 route add ${alias_ip_address} dev ${interface} >/dev/null 2>&1
+ fi
+ exit_with_hooks 0
+ fi
+
+ if [ x$reason = xTIMEOUT ]; then
+ if [ x$alias_ip_address != x ]; then
+- ifconfig $interface:0- inet 0
++ ${ip} -4 addr flush dev ${interface} label ${interface}:0
++ fi
++ ${ip} -4 addr add ${new_ip_address}${new_mask} ${new_broadcast_arg} \
++ dev ${interface} label ${interface}
++ if [ -n "$new_interface_mtu" ]; then
++ # set MTU
++ ip link set dev ${interface} mtu ${new_interface_mtu}
+ fi
+- ifconfig $interface inet $new_ip_address $new_subnet_arg \
+- $new_broadcast_arg $mtu_arg
+ set $new_routers
+ if ping -q -c 1 $1; then
+ if [ x$new_ip_address != x$alias_ip_address ] && \
+ [ x$alias_ip_address != x ]; then
+- ifconfig $interface:0 inet $alias_ip_address $alias_subnet_arg
+- route add -host $alias_ip_address dev $interface:0
++ ${ip} -4 addr add ${alias_ip_address}${alias_mask} \
++ dev ${interface} label ${interface}:0
++ ${ip} -4 route add ${alias_ip_address} dev ${interface} >/dev/null 2>&1
+ fi
+ for router in $new_routers; do
+ if [ "x$new_subnet_mask" = "x255.255.255.255" ] ; then
+- route add -host $router dev $interface
++ ${ip} -4 route add ${router} dev $interface >/dev/null 2>&1
+ fi
+- route add default gw $router $metric_arg dev $interface
++ ${ip} -4 route add default via ${router} dev ${interface} \
++ ${metric_arg} >/dev/null 2>&1
+ done
+ make_resolv_conf
+ exit_with_hooks 0
+ fi
+- ifconfig $interface inet 0 down
++ ${ip} -4 addr flush dev ${interface}
+ exit_with_hooks 1
+ fi
+
diff --git a/meta/recipes-connectivity/dhcp/dhcp_4.3.0.bb b/meta/recipes-connectivity/dhcp/dhcp_4.3.0.bb
index 6da28faf50..13bcceb5a8 100644
--- a/meta/recipes-connectivity/dhcp/dhcp_4.3.0.bb
+++ b/meta/recipes-connectivity/dhcp/dhcp_4.3.0.bb
@@ -5,6 +5,7 @@ SRC_URI += "file://dhcp-3.0.3-dhclient-dbus.patch;striplevel=0 \
file://link-with-lcrypto.patch \
file://fixsepbuild.patch \
file://dhclient-script-drop-resolv.conf.dhclient.patch \
+ file://replace-ifconfig-route.patch \
"
SRC_URI[md5sum] = "1020d77e1a4c1f01b76279caff9beb80"
diff --git a/meta/recipes-connectivity/dhcp/files/dhcpd.service b/meta/recipes-connectivity/dhcp/files/dhcpd.service
new file mode 100644
index 0000000000..8648f1a253
--- /dev/null
+++ b/meta/recipes-connectivity/dhcp/files/dhcpd.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=DHCP Server Daemon
+After=network.target
+After=time-sync.target
+
+[Service]
+EnvironmentFile=@SYSCONFDIR@/default/dhcp-server
+ExecStart=-@SBINDIR@/dhcpd -q $INTERFACES
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/dhcp/files/dhcrelay.service b/meta/recipes-connectivity/dhcp/files/dhcrelay.service
new file mode 100644
index 0000000000..a2d818917d
--- /dev/null
+++ b/meta/recipes-connectivity/dhcp/files/dhcrelay.service
@@ -0,0 +1,9 @@
+[Unit]
+Description=DHCP Relay Agent Daemon
+After=network.target
+
+[Service]
+ExecStart=@SBINDIR@/dhcrelay -d --no-pid
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/iproute2/iproute2_3.15.0.bb b/meta/recipes-connectivity/iproute2/iproute2_3.17.0.bb
index 7cf9115b9c..d12b33fbd1 100644
--- a/meta/recipes-connectivity/iproute2/iproute2_3.15.0.bb
+++ b/meta/recipes-connectivity/iproute2/iproute2_3.17.0.bb
@@ -4,8 +4,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz \
file://configure-cross.patch \
file://0001-iproute2-de-bash-scripts.patch \
"
-SRC_URI[md5sum] = "5b1711c9d16071959052e369a2682d77"
-SRC_URI[sha256sum] = "5359ed1f31839d8542a057c0c4233131ab9c28d8c41fc9c8484579d9c0b99af4"
+SRC_URI[md5sum] = "b741a02c6dda5818d18011d572874493"
+SRC_URI[sha256sum] = "09e406636e7598e46d5d4f7b928bf5db57049d65dbeb9a496005957ee16f6000"
# CFLAGS are computed in Makefile and reference CCOPTS
#
diff --git a/meta/recipes-connectivity/irda-utils/irda-utils-0.9.18/ldflags.patch b/meta/recipes-connectivity/irda-utils/irda-utils-0.9.18/ldflags.patch
new file mode 100644
index 0000000000..2cdd1ac08b
--- /dev/null
+++ b/meta/recipes-connectivity/irda-utils/irda-utils-0.9.18/ldflags.patch
@@ -0,0 +1,75 @@
+Obey LDFLAGS
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Pending
+
+--- irda-utils-0.9.18.orig/findchip/Makefile
++++ irda-utils-0.9.18/findchip/Makefile
+@@ -65,5 +65,5 @@ install: findchip
+
+ gfindchip: gfindchip.c
+ $(prn_cc)
+- $(ECMD))$(CC) $(CFLAGS) `gtk-config --cflags` $< -o $@ `gtk-config --libs`
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) `gtk-config --cflags` $< -o $@ `gtk-config --libs`
+
+--- irda-utils-0.9.18.orig/irattach/Makefile
++++ irda-utils-0.9.18/irattach/Makefile
+@@ -49,13 +49,13 @@ all: $(TARGETS)
+
+ irattach: irattach.o util.o
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) irattach.o util.o -o $@
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) irattach.o util.o -o $@
+
+
+
+ dongle_attach: dongle_attach.o
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) dongle_attach.o -o $@
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) dongle_attach.o -o $@
+
+
+ install: $(TARGETS)
+--- irda-utils-0.9.18.orig/irdadump/Makefile
++++ irda-utils-0.9.18/irdadump/Makefile
+@@ -40,7 +40,7 @@ lib_irdadump.a: $(LIBIRDADUMP_OBJS)
+
+ irdadump: $(IRDADUMP_OBJS) $(LIBIRDADUMP_TARGET)
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) `pkg-config --libs glib-2.0` -o $(IRDADUMP_TARGET) $< $(LIBIRDADUMP_TARGET)
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) `pkg-config --libs glib-2.0` -o $(IRDADUMP_TARGET) $< $(LIBIRDADUMP_TARGET)
+
+
+ .c.o:
+--- irda-utils-0.9.18.orig/irdaping/Makefile
++++ irda-utils-0.9.18/irdaping/Makefile
+@@ -56,7 +56,7 @@ all: $(TARGETS)
+
+ irdaping: $(OBJS)
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) $(OBJS) -o $@
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) $(OBJS) -o $@
+
+
+ .c.o:
+--- irda-utils-0.9.18.orig/irnetd/Makefile
++++ irda-utils-0.9.18/irnetd/Makefile
+@@ -50,7 +50,7 @@ all: $(TARGETS)
+
+ irnetd: $(OBJS)
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) $(OBJS) -o $@
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) $(OBJS) -o $@
+
+
+ install: irnetd
+--- irda-utils-0.9.18.orig/psion/Makefile
++++ irda-utils-0.9.18/psion/Makefile
+@@ -25,4 +25,4 @@ install: $(PSION_TARGETS)
+ CFLAGS += -g -I../include -Wall -Wstrict-prototypes $(RPM_OPT_FLAGS)
+ irpsion5:
+ $(prn_cc_o)
+- $(ECMD)$(CC) $(CFLAGS) $(PSION_SRC) -o $@
+\ No newline at end of file
++ $(ECMD)$(CC) $(CFLAGS) $(LDFLAGS) $(PSION_SRC) -o $@
+\ No newline at end of file
diff --git a/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb b/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
index 7403dc95da..bd60b9f1e6 100644
--- a/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
+++ b/meta/recipes-connectivity/irda-utils/irda-utils_0.9.18.bb
@@ -12,21 +12,29 @@ LIC_FILES_CHKSUM = "file://irdadump/COPYING;md5=94d55d512a9ba36caa9b7df079bae19f
file://irdadump/irdadump.c;beginline=1;endline=24;md5=d78b9dce3cd78c2220250c9c7a2be178"
SRC_URI = "${SOURCEFORGE_MIRROR}/irda/irda-utils-${PV}.tar.gz \
- file://init"
+ file://ldflags.patch \
+ file://init"
SRC_URI[md5sum] = "84dc12aa4c3f61fccb8d8919bf4079bb"
SRC_URI[sha256sum] = "61980551e46b2eaa9e17ad31cbc1a638074611fc33bff34163d10c7a67a9fdc6"
-export SYS_INCLUDES="-I${STAGING_INCDIR}"
+inherit update-rc.d
-inherit autotools-brokensep update-rc.d
+EXTRA_OEMAKE = "\
+ 'CC=${CC}' \
+ 'LD=${LD}' \
+ 'CFLAGS=${CFLAGS}' \
+ 'LDFLAGS=${LDFLAGS}' \
+ 'SYS_INCLUDES=' \
+ 'V=1' \
+"
INITSCRIPT_NAME = "irattach"
INITSCRIPT_PARAMS = "defaults 20"
do_compile () {
- oe_runmake -e -C irattach
- oe_runmake -e -C irdaping
+ oe_runmake -C irattach
+ oe_runmake -C irdaping
}
do_install () {
diff --git a/meta/recipes-connectivity/libpcap/libpcap.inc b/meta/recipes-connectivity/libpcap/libpcap.inc
index 92d6eff628..a12eb16615 100644
--- a/meta/recipes-connectivity/libpcap/libpcap.inc
+++ b/meta/recipes-connectivity/libpcap/libpcap.inc
@@ -7,7 +7,7 @@ BUGTRACKER = "http://sourceforge.net/tracker/?group_id=53067&atid=469577"
SECTION = "libs/network"
LICENSE = "BSD"
LIC_FILES_CHKSUM = "file://LICENSE;md5=1d4b0366557951c84a94fabe3529f867 \
- file://pcap.h;beginline=1;endline=34;md5=8d6cf7e17d5745010d633e30bc529ea9"
+ file://pcap.h;beginline=1;endline=32;md5=39af3510e011f34b8872f120b1dc31d2"
DEPENDS = "flex-native bison-native"
PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluetooth', '', d)}"
@@ -17,7 +17,7 @@ PACKAGECONFIG[libnl] = "--with-libnl,--without-libnl,libnl"
INC_PR = "r5"
-SRC_URI = "http://www.tcpdump.org/release/libpcap-${PV}.tar.gz"
+SRC_URI = "http://www.tcpdump.org/release/${BP}.tar.gz"
BINCONFIG = "${bindir}/pcap-config"
diff --git a/meta/recipes-connectivity/libpcap/libpcap/ieee80215-arphrd.patch b/meta/recipes-connectivity/libpcap/libpcap/ieee80215-arphrd.patch
deleted file mode 100644
index 2f5cd213bc..0000000000
--- a/meta/recipes-connectivity/libpcap/libpcap/ieee80215-arphrd.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-Upstream-Status: Pending
-
-Index: libpcap-1.0.0/pcap-linux.c
-===================================================================
---- libpcap-1.0.0.orig/pcap-linux.c 2009-01-28 11:58:54.000000000 +0300
-+++ libpcap-1.0.0/pcap-linux.c 2009-01-28 11:59:04.000000000 +0300
-@@ -1616,6 +1616,17 @@
- * so let's use "Linux-cooked" mode. Jean II */
- //handle->md.cooked = 1;
- break;
-+#ifndef ARPHRD_IEEE80215
-+#define ARPHRD_IEEE80215 805
-+#endif
-+#ifndef ARPHRD_IEEE80215_PHY
-+#define ARPHRD_IEEE80215_PHY 806
-+#endif
-+
-+ case ARPHRD_IEEE80215:
-+ case ARPHRD_IEEE80215_PHY:
-+ handle->linktype = DLT_IEEE802_15_4;
-+ break;
-
- /* ARPHRD_LAPD is unofficial and randomly allocated, if reallocation
- * is needed, please report it to <daniele@orlandi.com> */
diff --git a/meta/recipes-connectivity/libpcap/libpcap_1.5.3.bb b/meta/recipes-connectivity/libpcap/libpcap_1.6.2.bb
index 3ba3caa3bc..a2d5ef46d7 100644
--- a/meta/recipes-connectivity/libpcap/libpcap_1.5.3.bb
+++ b/meta/recipes-connectivity/libpcap/libpcap_1.6.2.bb
@@ -1,10 +1,8 @@
require libpcap.inc
-SRC_URI += "file://aclocal.patch \
- file://ieee80215-arphrd.patch \
- "
-SRC_URI[md5sum] = "7e7321fb3aff2f2bb05c8229f3795d4a"
-SRC_URI[sha256sum] = "9ae92159c1060f15e6a90f2c4ad227268b6aaa382c316fa49a31c496b9979e93"
+SRC_URI += "file://aclocal.patch"
+SRC_URI[md5sum] = "5f14191c1a684a75532c739c2c4059fa"
+SRC_URI[sha256sum] = "5db3e2998f1eeba2c76da55da5d474248fe19c44f49e15cac8a796a2c7e19690"
#
# make install doesn't cover the shared lib
diff --git a/meta/recipes-connectivity/neard/neard/neard.service.in b/meta/recipes-connectivity/neard/neard/neard.service.in
index 90e5302662..a6f8a84180 100644
--- a/meta/recipes-connectivity/neard/neard/neard.service.in
+++ b/meta/recipes-connectivity/neard/neard/neard.service.in
@@ -1,13 +1,13 @@
[Unit]
-Description=NFC service
+Description=neard service
+Documentation=man:neard(8)
After=syslog.target
[Service]
Type=dbus
BusName=org.neard
-Restart=on-failure
ExecStart=@installpath@/neard -n
-StandardOutput=null
[Install]
+Alias=dbus-org.neard.service
WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-statd-fixed-the-with-statdpath-flag.patch b/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-statd-fixed-the-with-statdpath-flag.patch
deleted file mode 100644
index 2ce824cf9e..0000000000
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-statd-fixed-the-with-statdpath-flag.patch
+++ /dev/null
@@ -1,41 +0,0 @@
-From 3b1457d219ceb1058d44bacc657581f13437ae40 Mon Sep 17 00:00:00 2001
-From: Steve Dickson <steved@redhat.com>
-Date: Tue, 17 Jun 2014 13:28:53 -0400
-Subject: [PATCH] statd: fixed the --with-statdpath= flag
-
-Create the given path set with --with-statdpath
-
-Signed-off-by: chendt.fnst@cn.fujitsu.com
-Reported-by: yaoxp@cn.fujitsu.com
-Signed-off-by: Steve Dickson <steved@redhat.com>
-Upstream-Status: Backport
----
- Makefile.am | 10 +++++-----
- 1 file changed, 5 insertions(+), 5 deletions(-)
-
-diff --git a/Makefile.am b/Makefile.am
-index ae7cd16..5824adc 100644
---- a/Makefile.am
-+++ b/Makefile.am
-@@ -54,13 +54,13 @@ install-data-hook:
- touch $(DESTDIR)$(statedir)/xtab; chmod 644 $(DESTDIR)$(statedir)/xtab
- touch $(DESTDIR)$(statedir)/etab; chmod 644 $(DESTDIR)$(statedir)/etab
- touch $(DESTDIR)$(statedir)/rmtab; chmod 644 $(DESTDIR)$(statedir)/rmtab
-- mkdir -p $(DESTDIR)$(statedir)/sm $(DESTDIR)$(statedir)/sm.bak
-- touch $(DESTDIR)$(statedir)/state
-- chmod go-rwx $(DESTDIR)$(statedir)/sm $(DESTDIR)$(statedir)/sm.bak $(DESTDIR)$(statedir)/state
-- -chown $(statduser) $(DESTDIR)$(statedir)/sm $(DESTDIR)$(statedir)/sm.bak $(DESTDIR)$(statedir)/state
-+ mkdir -p $(DESTDIR)$(statdpath)/sm $(DESTDIR)$(statdpath)/sm.bak
-+ touch $(DESTDIR)$(statdpath)/state
-+ chmod go-rwx $(DESTDIR)$(statdpath)/sm $(DESTDIR)$(statdpath)/sm.bak $(DESTDIR)$(statdpath)/state
-+ -chown $(statduser) $(DESTDIR)$(statdpath)/sm $(DESTDIR)$(statdpath)/sm.bak $(DESTDIR)$(statdpath)/state
-
- uninstall-hook:
- rm $(DESTDIR)$(statedir)/xtab
- rm $(DESTDIR)$(statedir)/etab
- rm $(DESTDIR)$(statedir)/rmtab
-- rm $(DESTDIR)$(statedir)/state
-+ rm $(DESTDIR)$(statdpath)/state
---
-1.8.4.2
-
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-utils-1.0.6-uclibc.patch b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-utils-1.0.6-uclibc.patch
deleted file mode 100644
index c3442380e1..0000000000
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-utils-1.0.6-uclibc.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Inappropriate [embedded specific]
-
-Index: nfs-utils-1.2.6/support/nfs/svc_socket.c
-===================================================================
---- nfs-utils-1.2.6.orig/support/nfs/svc_socket.c 2012-05-14 07:40:52.000000000 -0700
-+++ nfs-utils-1.2.6/support/nfs/svc_socket.c 2012-10-28 02:42:50.179222457 -0700
-@@ -40,8 +40,9 @@
- char rpcdata[1024], servdata[1024];
- struct rpcent rpcbuf, *rpcp;
- struct servent servbuf, *servp = NULL;
-- int ret;
-+ int ret = 0;
-
-+#ifndef __UCLIBC__ /* neither getrpcbynumber() nor getrpcbynumber_r() is SuSv3 */
- ret = getrpcbynumber_r(number, &rpcbuf, rpcdata, sizeof rpcdata,
- &rpcp);
- if (ret == 0 && rpcp != NULL) {
-@@ -60,6 +61,7 @@
- }
- }
- }
-+#endif /* __UCLIBC__ */
-
- if (ret == 0 && servp != NULL)
- return ntohs(servp->s_port);
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfscommon b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfscommon
index 65fdd1dd4f..992267d5a1 100644
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils/nfscommon
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils/nfscommon
@@ -28,33 +28,7 @@ test -n "$NFS_STATEDIR" || NFS_STATEDIR=/var/lib/nfs
#----------------------------------------------------------------------
# Startup and shutdown functions.
# Actual startup/shutdown is at the end of this file.
-#directories
-create_directories(){
- echo -n 'creating NFS state directory: '
- mkdir -p "$NFS_STATEDIR"
- ( cd "$NFS_STATEDIR"
- umask 077
- mkdir -p rpc_pipefs
- mkdir -p sm sm.bak statd
- chown rpcuser:rpcuser sm sm.bak statd
- test -w statd/state || {
- rm -f statd/state
- :>statd/state
- }
- umask 022
- for file in xtab etab smtab rmtab
- do
- test -w "$file" || {
- rm -f "$file"
- :>"$file"
- }
- done
- )
- chown rpcuser:rpcuser "$NFS_STATEDIR"
- echo done
-}
-#statd
start_statd(){
echo -n "starting statd: "
start-stop-daemon --start --exec "$NFS_STATD" --pidfile "$STATD_PID"
@@ -74,7 +48,6 @@ stop_statd(){
#FIXME: need to create the /var/lib/nfs/... directories
case "$1" in
start)
- create_directories
start_statd;;
stop)
stop_statd;;
diff --git a/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.0.bb b/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.1.bb
index c813d7f61e..d312349fe4 100644
--- a/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.0.bb
+++ b/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.1.bb
@@ -21,7 +21,6 @@ USERADD_PARAM_${PN}-client = "--system --home-dir /var/lib/nfs \
SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.xz \
file://0001-configure-Allow-to-explicitly-disable-nfsidmap.patch \
- file://nfs-utils-1.0.6-uclibc.patch \
file://nfs-utils-1.2.3-sm-notify-res_init.patch \
file://nfsserver \
file://nfscommon \
@@ -30,11 +29,10 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.x
file://nfs-mountd.service \
file://nfs-statd.service \
file://nfs-utils-Do-not-pass-CFLAGS-to-gcc-while-building.patch \
- file://0001-statd-fixed-the-with-statdpath-flag.patch \
"
-SRC_URI[md5sum] = "6e93a7997ca3a1eac56bf219adab72a8"
-SRC_URI[sha256sum] = "ab8384d0e487ed6a18c5380d5df28015f7dd98680bf08f3247c97d9f7d99e56f"
+SRC_URI[md5sum] = "8de676b9ff34b8f9addc1d0800fabdf8"
+SRC_URI[sha256sum] = "ff79d70b7b58b2c8f9b798c58721127e82bb96022adc04a5c4cb251630e696b8"
PARALLEL_MAKE = ""
@@ -54,7 +52,7 @@ SYSTEMD_SERVICE_${PN}-client = "nfs-statd.service"
SYSTEMD_AUTO_ENABLE = "disable"
# --enable-uuid is need for cross-compiling
-EXTRA_OECONF = "--with-statduser=nobody \
+EXTRA_OECONF = "--with-statduser=rpcuser \
--enable-mountconfig \
--enable-libmount-mount \
--disable-nfsv41 \
@@ -90,7 +88,6 @@ do_compile_prepend() {
do_install_append () {
install -d ${D}${sysconfdir}/init.d
- install -d ${D}${localstatedir}/lib/nfs/statd
install -m 0755 ${WORKDIR}/nfsserver ${D}${sysconfdir}/init.d/nfsserver
install -m 0755 ${WORKDIR}/nfscommon ${D}${sysconfdir}/init.d/nfscommon
@@ -106,6 +103,10 @@ do_install_append () {
# kernel code as of 3.8 hard-codes this path as a default
install -d ${D}/var/lib/nfs/v4recovery
+ # chown the directories and files
+ chown -R rpcuser:rpcuser ${D}${localstatedir}/lib/nfs/statd
+ chmod 0644 ${D}${localstatedir}/lib/nfs/statd/state
+
# the following are built by CC_FOR_BUILD
rm -f ${D}${sbindir}/rpcdebug
rm -f ${D}${sbindir}/rpcgen
diff --git a/meta/recipes-connectivity/ofono/ofono.inc b/meta/recipes-connectivity/ofono/ofono.inc
index 9f65f4f144..bf44fcae08 100644
--- a/meta/recipes-connectivity/ofono/ofono.inc
+++ b/meta/recipes-connectivity/ofono/ofono.inc
@@ -7,16 +7,20 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \
inherit autotools pkgconfig update-rc.d systemd
-DEPENDS = "dbus glib-2.0 udev mobile-broadband-provider-info ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth','bluez4', '', d)}"
+DEPENDS = "dbus glib-2.0 udev mobile-broadband-provider-info"
INITSCRIPT_NAME = "ofono"
INITSCRIPT_PARAMS = "defaults 22"
-EXTRA_OECONF += "\
- ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '--with-systemdunitdir=${systemd_unitdir}/system/', '--with-systemdunitdir=', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth','--enable-bluetooth', '--disable-bluetooth', d)} \
- --enable-test \
-"
+PACKAGECONFIG ??= "\
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth','bluetooth', '', d)} \
+ "
+PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_unitdir}/system/,--with-systemdunitdir="
+PACKAGECONFIG[bluetooth] = "--enable-bluetooth,--disable-bluetooth,bluez4"
+
+EXTRA_OECONF += "--enable-test"
+
SYSTEMD_SERVICE_${PN} = "ofono.service"
do_install_append() {
diff --git a/meta/recipes-connectivity/ofono/ofono/Revert-test-Convert-to-Python-3.patch b/meta/recipes-connectivity/ofono/ofono/Revert-test-Convert-to-Python-3.patch
new file mode 100644
index 0000000000..5f8ca77101
--- /dev/null
+++ b/meta/recipes-connectivity/ofono/ofono/Revert-test-Convert-to-Python-3.patch
@@ -0,0 +1,1270 @@
+Upstream-Status: Inappropriate [configuration]
+
+From 572fc23f6efd65a2ef9e6c957b2506108738672b Mon Sep 17 00:00:00 2001
+From: Cristian Iorga <cristian.iorga@intel.com>
+Date: Mon, 25 Aug 2014 16:59:39 +0300
+Subject: [PATCH] Revert "test: Convert to Python 3"
+
+This reverts commit c027ab9fbc1a8e8c9e76bcd123df1ad7696307c2.
+---
+ test/activate-context | 2 +-
+ test/answer-calls | 2 +-
+ test/backtrace | 2 +-
+ test/cancel-ussd | 2 +-
+ test/cdma-connman-disable | 2 +-
+ test/cdma-connman-enable | 2 +-
+ test/cdma-dial-number | 2 +-
+ test/cdma-hangup | 2 +-
+ test/cdma-list-call | 2 +-
+ test/cdma-set-credentials | 2 +-
+ test/change-pin | 2 +-
+ test/create-internet-context | 2 +-
+ test/create-mms-context | 2 +-
+ test/create-multiparty | 2 +-
+ test/deactivate-all | 2 +-
+ test/deactivate-context | 2 +-
+ test/dial-number | 2 +-
+ test/disable-call-forwarding | 2 +-
+ test/disable-gprs | 2 +-
+ test/disable-modem | 2 +-
+ test/display-icon | 2 +-
+ test/enable-cbs | 2 +-
+ test/enable-gprs | 2 +-
+ test/enable-modem | 2 +-
+ test/enter-pin | 2 +-
+ test/get-icon | 2 +-
+ test/get-operators | 2 +-
+ test/get-tech-preference | 2 +-
+ test/hangup-active | 2 +-
+ test/hangup-all | 2 +-
+ test/hangup-call | 2 +-
+ test/hangup-multiparty | 2 +-
+ test/hold-and-answer | 2 +-
+ test/initiate-ussd | 4 ++--
+ test/list-calls | 2 +-
+ test/list-contexts | 2 +-
+ test/list-messages | 2 +-
+ test/list-modems | 2 +-
+ test/list-operators | 2 +-
+ test/lock-pin | 2 +-
+ test/lockdown-modem | 2 +-
+ test/monitor-ofono | 4 ++--
+ test/offline-modem | 2 +-
+ test/online-modem | 2 +-
+ test/private-chat | 2 +-
+ test/process-context-settings | 2 +-
+ test/receive-sms | 2 +-
+ test/reject-calls | 2 +-
+ test/release-and-answer | 2 +-
+ test/release-and-swap | 2 +-
+ test/remove-contexts | 2 +-
+ test/reset-pin | 2 +-
+ test/scan-for-operators | 2 +-
+ test/send-sms | 2 +-
+ test/send-ussd | 4 ++--
+ test/send-vcal | 2 +-
+ test/send-vcard | 2 +-
+ test/set-call-forwarding | 2 +-
+ test/set-cbs-topics | 2 +-
+ test/set-context-property | 2 +-
+ test/set-fast-dormancy | 2 +-
+ test/set-gsm-band | 2 +-
+ test/set-mic-volume | 2 +-
+ test/set-mms-details | 2 +-
+ test/set-msisdn | 2 +-
+ test/set-roaming-allowed | 2 +-
+ test/set-speaker-volume | 2 +-
+ test/set-tech-preference | 2 +-
+ test/set-tty | 2 +-
+ test/set-umts-band | 2 +-
+ test/set-use-sms-reports | 2 +-
+ test/swap-calls | 2 +-
+ test/test-advice-of-charge | 2 +-
+ test/test-call-barring | 2 +-
+ test/test-call-forwarding | 2 +-
+ test/test-call-settings | 2 +-
+ test/test-cbs | 4 ++--
+ test/test-gnss | 4 ++--
+ test/test-message-waiting | 2 +-
+ test/test-modem | 2 +-
+ test/test-network-registration | 2 +-
+ test/test-phonebook | 2 +-
+ test/test-push-notification | 2 +-
+ test/test-smart-messaging | 2 +-
+ test/test-sms | 18 +++++++++---------
+ test/test-ss | 2 +-
+ test/test-ss-control-cb | 2 +-
+ test/test-ss-control-cf | 2 +-
+ test/test-ss-control-cs | 2 +-
+ test/test-stk-menu | 34 +++++++++++++++++-----------------
+ test/unlock-pin | 2 +-
+ 94 files changed, 124 insertions(+), 124 deletions(-)
+
+diff --git a/test/activate-context b/test/activate-context
+index e4fc702..4241396 100755
+--- a/test/activate-context
++++ b/test/activate-context
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/answer-calls b/test/answer-calls
+index daa794b..45ff08f 100755
+--- a/test/answer-calls
++++ b/test/answer-calls
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/backtrace b/test/backtrace
+index 03c7632..c624709 100755
+--- a/test/backtrace
++++ b/test/backtrace
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import os
+ import re
+diff --git a/test/cancel-ussd b/test/cancel-ussd
+index e7559ba..1797f26 100755
+--- a/test/cancel-ussd
++++ b/test/cancel-ussd
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/cdma-connman-disable b/test/cdma-connman-disable
+index 3adc14d..0ddc0cd 100755
+--- a/test/cdma-connman-disable
++++ b/test/cdma-connman-disable
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/cdma-connman-enable b/test/cdma-connman-enable
+index ac16a2d..a3cca01 100755
+--- a/test/cdma-connman-enable
++++ b/test/cdma-connman-enable
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/cdma-dial-number b/test/cdma-dial-number
+index 683431e..9cdfb24 100755
+--- a/test/cdma-dial-number
++++ b/test/cdma-dial-number
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/cdma-hangup b/test/cdma-hangup
+index 41ffa60..493ece4 100755
+--- a/test/cdma-hangup
++++ b/test/cdma-hangup
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/cdma-list-call b/test/cdma-list-call
+index b132353..5d36a69 100755
+--- a/test/cdma-list-call
++++ b/test/cdma-list-call
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/cdma-set-credentials b/test/cdma-set-credentials
+index a60c86e..a286b0e 100755
+--- a/test/cdma-set-credentials
++++ b/test/cdma-set-credentials
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/change-pin b/test/change-pin
+index 301c6ce..000ce53 100755
+--- a/test/change-pin
++++ b/test/change-pin
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/create-internet-context b/test/create-internet-context
+index 1089053..efd0998 100755
+--- a/test/create-internet-context
++++ b/test/create-internet-context
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/create-mms-context b/test/create-mms-context
+index 598336f..e5be08d 100755
+--- a/test/create-mms-context
++++ b/test/create-mms-context
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/create-multiparty b/test/create-multiparty
+index 1b76010..97047c3 100755
+--- a/test/create-multiparty
++++ b/test/create-multiparty
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/deactivate-all b/test/deactivate-all
+index 5aa8587..427009e 100755
+--- a/test/deactivate-all
++++ b/test/deactivate-all
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/deactivate-context b/test/deactivate-context
+index 5c86a71..df47d2e 100755
+--- a/test/deactivate-context
++++ b/test/deactivate-context
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/dial-number b/test/dial-number
+index fe5adad..ee674d9 100755
+--- a/test/dial-number
++++ b/test/dial-number
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/disable-call-forwarding b/test/disable-call-forwarding
+index 811e4fa..3609816 100755
+--- a/test/disable-call-forwarding
++++ b/test/disable-call-forwarding
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ from gi.repository import GLib
+diff --git a/test/disable-gprs b/test/disable-gprs
+index 61ce216..c6c40a5 100755
+--- a/test/disable-gprs
++++ b/test/disable-gprs
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/disable-modem b/test/disable-modem
+index 6fba857..ca8c8d8 100755
+--- a/test/disable-modem
++++ b/test/disable-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/display-icon b/test/display-icon
+index ac40818..753d14d 100755
+--- a/test/display-icon
++++ b/test/display-icon
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/enable-cbs b/test/enable-cbs
+index 4a8bf66..c08bf2b 100755
+--- a/test/enable-cbs
++++ b/test/enable-cbs
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/enable-gprs b/test/enable-gprs
+index 68d5ef0..8664891 100755
+--- a/test/enable-gprs
++++ b/test/enable-gprs
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/enable-modem b/test/enable-modem
+index fc5958a..dfaaaa8 100755
+--- a/test/enable-modem
++++ b/test/enable-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/enter-pin b/test/enter-pin
+index 9556363..c6ee669 100755
+--- a/test/enter-pin
++++ b/test/enter-pin
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/get-icon b/test/get-icon
+index 5569a33..fdaaee7 100755
+--- a/test/get-icon
++++ b/test/get-icon
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/get-operators b/test/get-operators
+index 0f35c80..62354c5 100755
+--- a/test/get-operators
++++ b/test/get-operators
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/get-tech-preference b/test/get-tech-preference
+index 7ba6365..77d20d0 100755
+--- a/test/get-tech-preference
++++ b/test/get-tech-preference
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus, sys
+
+diff --git a/test/hangup-active b/test/hangup-active
+index 82e0eb0..5af62ab 100755
+--- a/test/hangup-active
++++ b/test/hangup-active
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/hangup-all b/test/hangup-all
+index 3a0138d..32933db 100755
+--- a/test/hangup-all
++++ b/test/hangup-all
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/hangup-call b/test/hangup-call
+index 5a2de20..447020c 100755
+--- a/test/hangup-call
++++ b/test/hangup-call
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/hangup-multiparty b/test/hangup-multiparty
+index 24751c3..48fe342 100755
+--- a/test/hangup-multiparty
++++ b/test/hangup-multiparty
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/hold-and-answer b/test/hold-and-answer
+index da3be57..2c47e27 100755
+--- a/test/hold-and-answer
++++ b/test/hold-and-answer
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/initiate-ussd b/test/initiate-ussd
+index faf50d0..d7022f1 100755
+--- a/test/initiate-ussd
++++ b/test/initiate-ussd
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+@@ -45,7 +45,7 @@ if state == "idle":
+ print("State: %s" % (state))
+
+ while state == "user-response":
+- response = input("Enter response: ")
++ response = raw_input("Enter response: ")
+
+ result = ussd.Respond(response, timeout=100)
+
+diff --git a/test/list-calls b/test/list-calls
+index f3ee991..08668c6 100755
+--- a/test/list-calls
++++ b/test/list-calls
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/list-contexts b/test/list-contexts
+index 78278ca..f0d4094 100755
+--- a/test/list-contexts
++++ b/test/list-contexts
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/list-messages b/test/list-messages
+index 9f5bce3..cfccbea 100755
+--- a/test/list-messages
++++ b/test/list-messages
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/list-modems b/test/list-modems
+index b9f510a..ed66124 100755
+--- a/test/list-modems
++++ b/test/list-modems
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/list-operators b/test/list-operators
+index 064c4e3..349bf41 100755
+--- a/test/list-operators
++++ b/test/list-operators
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/lock-pin b/test/lock-pin
+index 96ea9c2..5579735 100755
+--- a/test/lock-pin
++++ b/test/lock-pin
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/lockdown-modem b/test/lockdown-modem
+index 4e04205..781abb6 100755
+--- a/test/lockdown-modem
++++ b/test/lockdown-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/monitor-ofono b/test/monitor-ofono
+index 8830757..bd31617 100755
+--- a/test/monitor-ofono
++++ b/test/monitor-ofono
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+@@ -6,7 +6,7 @@ import dbus
+ import dbus.mainloop.glib
+
+ _dbus2py = {
+- dbus.String : str,
++ dbus.String : unicode,
+ dbus.UInt32 : int,
+ dbus.Int32 : int,
+ dbus.Int16 : int,
+diff --git a/test/offline-modem b/test/offline-modem
+index e8c043a..ea1f522 100755
+--- a/test/offline-modem
++++ b/test/offline-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus, sys
+
+diff --git a/test/online-modem b/test/online-modem
+index 029c4a5..310ed7d 100755
+--- a/test/online-modem
++++ b/test/online-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus, sys
+
+diff --git a/test/private-chat b/test/private-chat
+index e7e5406..ef2ef6c 100755
+--- a/test/private-chat
++++ b/test/private-chat
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/process-context-settings b/test/process-context-settings
+index 8a3ecfa..0f058b2 100755
+--- a/test/process-context-settings
++++ b/test/process-context-settings
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import os
+ import dbus
+diff --git a/test/receive-sms b/test/receive-sms
+index a0c6915..c23eb14 100755
+--- a/test/receive-sms
++++ b/test/receive-sms
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/reject-calls b/test/reject-calls
+index 71b243e..9edf1ff 100755
+--- a/test/reject-calls
++++ b/test/reject-calls
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/release-and-answer b/test/release-and-answer
+index dec8e17..25fd818 100755
+--- a/test/release-and-answer
++++ b/test/release-and-answer
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/release-and-swap b/test/release-and-swap
+index cb8c84e..7b3569f 100755
+--- a/test/release-and-swap
++++ b/test/release-and-swap
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/remove-contexts b/test/remove-contexts
+index b54184e..c5082cb 100755
+--- a/test/remove-contexts
++++ b/test/remove-contexts
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+
+diff --git a/test/reset-pin b/test/reset-pin
+index 3fbd126..b429254 100755
+--- a/test/reset-pin
++++ b/test/reset-pin
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/scan-for-operators b/test/scan-for-operators
+index b4fc05e..749c710 100755
+--- a/test/scan-for-operators
++++ b/test/scan-for-operators
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/send-sms b/test/send-sms
+index 98808aa..e06444d 100755
+--- a/test/send-sms
++++ b/test/send-sms
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/send-ussd b/test/send-ussd
+index a20e098..e585883 100755
+--- a/test/send-ussd
++++ b/test/send-ussd
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+@@ -46,7 +46,7 @@ if state == "idle":
+ print("State: %s" % (state))
+
+ while state == "user-response":
+- response = input("Enter response: ")
++ response = raw_input("Enter response: ")
+
+ print(ussd.Respond(response, timeout=100))
+
+diff --git a/test/send-vcal b/test/send-vcal
+index 566daef..7f8272b 100755
+--- a/test/send-vcal
++++ b/test/send-vcal
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/send-vcard b/test/send-vcard
+index 4dedf51..250b36f 100755
+--- a/test/send-vcard
++++ b/test/send-vcard
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-call-forwarding b/test/set-call-forwarding
+index 49d1ce0..9fd358b 100755
+--- a/test/set-call-forwarding
++++ b/test/set-call-forwarding
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ from gi.repository import GLib
+diff --git a/test/set-cbs-topics b/test/set-cbs-topics
+index db95e16..78d6d44 100755
+--- a/test/set-cbs-topics
++++ b/test/set-cbs-topics
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-context-property b/test/set-context-property
+index 5ff7a67..64a6fb8 100755
+--- a/test/set-context-property
++++ b/test/set-context-property
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-fast-dormancy b/test/set-fast-dormancy
+index ef77bcd..7bf7715 100755
+--- a/test/set-fast-dormancy
++++ b/test/set-fast-dormancy
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-gsm-band b/test/set-gsm-band
+index b37bcb5..3c17c10 100755
+--- a/test/set-gsm-band
++++ b/test/set-gsm-band
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-mic-volume b/test/set-mic-volume
+index cd6c73f..e0bff49 100755
+--- a/test/set-mic-volume
++++ b/test/set-mic-volume
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-mms-details b/test/set-mms-details
+index 6ee59fa..d2d0838 100755
+--- a/test/set-mms-details
++++ b/test/set-mms-details
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-msisdn b/test/set-msisdn
+index b5fe819..01f284d 100755
+--- a/test/set-msisdn
++++ b/test/set-msisdn
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-roaming-allowed b/test/set-roaming-allowed
+index 698c8b6..9e3e058 100755
+--- a/test/set-roaming-allowed
++++ b/test/set-roaming-allowed
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-speaker-volume b/test/set-speaker-volume
+index 6d4e301..7962f39 100755
+--- a/test/set-speaker-volume
++++ b/test/set-speaker-volume
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/set-tech-preference b/test/set-tech-preference
+index b549abc..2666cbd 100755
+--- a/test/set-tech-preference
++++ b/test/set-tech-preference
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-tty b/test/set-tty
+index eed1fba..53d6b99 100755
+--- a/test/set-tty
++++ b/test/set-tty
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-umts-band b/test/set-umts-band
+index 0bae5c4..c1e6448 100755
+--- a/test/set-umts-band
++++ b/test/set-umts-band
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/set-use-sms-reports b/test/set-use-sms-reports
+index 288d4e1..a4efe4f 100755
+--- a/test/set-use-sms-reports
++++ b/test/set-use-sms-reports
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+diff --git a/test/swap-calls b/test/swap-calls
+index 018a8d3..eeb257b 100755
+--- a/test/swap-calls
++++ b/test/swap-calls
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/test-advice-of-charge b/test/test-advice-of-charge
+index 6e87e61..0f1f57f 100755
+--- a/test/test-advice-of-charge
++++ b/test/test-advice-of-charge
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+ import sys
+diff --git a/test/test-call-barring b/test/test-call-barring
+index eedb69f..be4ab57 100755
+--- a/test/test-call-barring
++++ b/test/test-call-barring
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+ import sys
+diff --git a/test/test-call-forwarding b/test/test-call-forwarding
+index 5db84d7..01a7294 100755
+--- a/test/test-call-forwarding
++++ b/test/test-call-forwarding
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-call-settings b/test/test-call-settings
+index 435594c..5d7ee49 100755
+--- a/test/test-call-settings
++++ b/test/test-call-settings
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-cbs b/test/test-cbs
+index a5cec06..13cdd80 100755
+--- a/test/test-cbs
++++ b/test/test-cbs
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import dbus.mainloop.glib
+@@ -78,7 +78,7 @@ def set_topics(cbs):
+ invalidData = False;
+ index = 0
+
+- topics = input('Enter the topic ID(s) you want to register to: ')
++ topics = raw_input('Enter the topic ID(s) you want to register to: ')
+
+ while index < len(topics):
+ if topics[index] == ',' or topics[index] == '-':
+diff --git a/test/test-gnss b/test/test-gnss
+index 6ae64db..aa0b160 100755
+--- a/test/test-gnss
++++ b/test/test-gnss
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+ import sys
+@@ -40,7 +40,7 @@ def print_menu():
+ def stdin_handler(channel, condition, gnss, path):
+ in_key = os.read(channel.unix_get_fd(), 160).rstrip().decode('UTF-8')
+ if in_key == '0':
+- xml = input('type the element and press enter: ')
++ xml = raw_input('type the element and press enter: ')
+ try:
+ gnss.SendPositioningElement(dbus.String(xml))
+ print("ok")
+diff --git a/test/test-message-waiting b/test/test-message-waiting
+index 432862e..b93fbf3 100755
+--- a/test/test-message-waiting
++++ b/test/test-message-waiting
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+ import sys
+diff --git a/test/test-modem b/test/test-modem
+index aa38b1f..29dbf14 100755
+--- a/test/test-modem
++++ b/test/test-modem
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-network-registration b/test/test-network-registration
+index 68b4347..c5ad586 100755
+--- a/test/test-network-registration
++++ b/test/test-network-registration
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+ import sys
+diff --git a/test/test-phonebook b/test/test-phonebook
+index 42646d3..116fd4f 100755
+--- a/test/test-phonebook
++++ b/test/test-phonebook
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus, sys
+
+diff --git a/test/test-push-notification b/test/test-push-notification
+index d972ad3..ecc6afb 100755
+--- a/test/test-push-notification
++++ b/test/test-push-notification
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-smart-messaging b/test/test-smart-messaging
+index f22efd2..188ac1e 100755
+--- a/test/test-smart-messaging
++++ b/test/test-smart-messaging
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-sms b/test/test-sms
+index 30ac651..49935e1 100755
+--- a/test/test-sms
++++ b/test/test-sms
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+ # -*- coding: utf-8 -*-
+
+ from gi.repository import GLib
+@@ -132,7 +132,7 @@ def stdin_handler(channel, condition, sms, value, number):
+ lock = "on"
+ if in_key == '0':
+ print_send_sms_menu()
+- sms_type = input('Select SMS type: ')
++ sms_type = raw_input('Select SMS type: ')
+
+ if sms_type == '1':
+ message_send(sms, number, value)
+@@ -150,49 +150,49 @@ def stdin_handler(channel, condition, sms, value, number):
+
+ elif in_key == '1':
+ message_delivery_report(sms, 1)
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(1)" + value +
+ ": UseDeliveryReports[TRUE]"))
+
+ elif in_key == '2':
+ message_delivery_report(sms, 0)
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(2) " + value +
+ ": UseDeliveryReports[FALSE]"))
+
+ elif in_key == '3':
+ message_service_center_address(sms, SCA)
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(3) " + value +
+ ": ServiceCenterAddress"))
+
+ elif in_key == '4':
+ message_bearer(sms, "ps-only")
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(4) " + value +
+ ": Bearer[ps-only]"))
+
+ elif in_key == '5':
+ message_bearer(sms, "cs-only")
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(5) " + value +
+ ": Bearer[cs-only]"))
+
+ elif in_key == '6':
+ message_bearer(sms, "ps-preferred")
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms, number, ("(6) " + value +
+ ": Bearer[ps-preferred]"))
+
+ elif in_key == '7':
+ message_bearer(sms, "cs-preferred")
+- send_msg = input('Send test message[y/n]?: ')
++ send_msg = raw_input('Send test message[y/n]?: ')
+ if send_msg == 'y':
+ message_send(sms,number, ("(7) " + value +
+ ": Bearer[cs-preferred]"))
+diff --git a/test/test-ss b/test/test-ss
+index 4cd8732..2c80806 100755
+--- a/test/test-ss
++++ b/test/test-ss
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import sys
+ import dbus
+diff --git a/test/test-ss-control-cb b/test/test-ss-control-cb
+index ddae6d3..86bac9b 100755
+--- a/test/test-ss-control-cb
++++ b/test/test-ss-control-cb
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-ss-control-cf b/test/test-ss-control-cf
+index 095eb5d..d30bf4f 100755
+--- a/test/test-ss-control-cf
++++ b/test/test-ss-control-cf
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-ss-control-cs b/test/test-ss-control-cs
+index 8180474..e0ed1d1 100755
+--- a/test/test-ss-control-cs
++++ b/test/test-ss-control-cs
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+diff --git a/test/test-stk-menu b/test/test-stk-menu
+index 0cf8fa2..ac0a5bd 100755
+--- a/test/test-stk-menu
++++ b/test/test-stk-menu
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ from gi.repository import GLib
+
+@@ -58,7 +58,7 @@ class StkAgent(dbus.service.Object):
+ index += 1
+
+ print("\nDefault: %d" % (default))
+- select = input("Enter Selection (t, b):")
++ select = raw_input("Enter Selection (t, b):")
+
+ if select == 'b':
+ raise GoBack("User wishes to go back")
+@@ -75,7 +75,7 @@ class StkAgent(dbus.service.Object):
+ print("DisplayText (%s)" % (title))
+ print("Icon: (%d)" % (int(icon)))
+ print("Urgent: (%d)" % (urgent))
+- key = input("Press return to clear ('t' terminates, "
++ key = raw_input("Press return to clear ('t' terminates, "
+ "'b' goes back, 'n' busy, "
+ "'w' return and wait):")
+
+@@ -108,7 +108,7 @@ class StkAgent(dbus.service.Object):
+ print("Hide typing: (%s)" % (hide_typing))
+ print("Enter characters, min: %d, max: %d:" % (min_chars,
+ max_chars))
+- userin = input("")
++ userin = raw_input("")
+
+ return userin
+
+@@ -122,7 +122,7 @@ class StkAgent(dbus.service.Object):
+ print("Hide typing: (%s)" % (hide_typing))
+ print("Enter digits, min: %d, max: %d:" % (min_chars,
+ max_chars))
+- userin = input("'t' terminates, 'b' goes back:")
++ userin = raw_input("'t' terminates, 'b' goes back:")
+
+ if userin == 'b':
+ raise GoBack("User wishes to go back")
+@@ -136,7 +136,7 @@ class StkAgent(dbus.service.Object):
+ def RequestKey(self, title, icon):
+ print("Title: (%s)" % (title))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Enter Key (t, b):")
++ key = raw_input("Enter Key (t, b):")
+
+ if key == 'b':
+ raise GoBack("User wishes to go back")
+@@ -150,7 +150,7 @@ class StkAgent(dbus.service.Object):
+ def RequestDigit(self, title, icon):
+ print("Title: (%s)" % (title))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Enter Digit (t, b):")
++ key = raw_input("Enter Digit (t, b):")
+
+ if key == 'b':
+ raise GoBack("User wishes to go back")
+@@ -164,7 +164,7 @@ class StkAgent(dbus.service.Object):
+ def RequestQuickDigit(self, title, icon):
+ print("Title: (%s)" % (title))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Quick digit (0-9, *, #, t, b):")
++ key = raw_input("Quick digit (0-9, *, #, t, b):")
+
+ if key == 'b':
+ raise GoBack("User wishes to go back")
+@@ -178,7 +178,7 @@ class StkAgent(dbus.service.Object):
+ def RequestConfirmation(self, title, icon):
+ print("Title: (%s)" % (title))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Enter Confirmation (t, b, y, n):")
++ key = raw_input("Enter Confirmation (t, b, y, n):")
+
+ if key == 'b':
+ raise GoBack("User wishes to go back")
+@@ -194,7 +194,7 @@ class StkAgent(dbus.service.Object):
+ def ConfirmCallSetup(self, info, icon):
+ print("Information: (%s)" % (info))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Enter Confirmation (t, y, n):")
++ key = raw_input("Enter Confirmation (t, y, n):")
+
+ if key == 't':
+ raise EndSession("User wishes to terminate session")
+@@ -209,7 +209,7 @@ class StkAgent(dbus.service.Object):
+ print("Information: (%s)" % (info))
+ print("Icon: (%d)" % (int(icon)))
+ print("URL (%s)" % (url))
+- key = input("Enter Confirmation (y, n):")
++ key = raw_input("Enter Confirmation (y, n):")
+
+ if key == 'y':
+ return True
+@@ -232,7 +232,7 @@ class StkAgent(dbus.service.Object):
+ signal.alarm(5)
+
+ try:
+- key = input("Press return to end before end of"
++ key = raw_input("Press return to end before end of"
+ " single tone (t):")
+ signal.alarm(0)
+
+@@ -250,7 +250,7 @@ class StkAgent(dbus.service.Object):
+ print("LoopTone: %s" % (tone))
+ print("Text: %s" % (text))
+ print("Icon: %d" % (int(icon)))
+- key = input("Press return to end before timeout "
++ key = raw_input("Press return to end before timeout "
+ "('t' terminates, 'w' return and wait):")
+
+ if key == 'w':
+@@ -279,7 +279,7 @@ class StkAgent(dbus.service.Object):
+ def DisplayAction(self, text, icon):
+ print("Text: (%s)" % (text))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Press 't' to terminate the session ")
++ key = raw_input("Press 't' to terminate the session ")
+
+ if key == 't':
+ raise EndSession("User wishes to terminate session")
+@@ -289,7 +289,7 @@ class StkAgent(dbus.service.Object):
+ def ConfirmOpenChannel(self, info, icon):
+ print("Open channel confirmation: (%s)" % (info))
+ print("Icon: (%d)" % (int(icon)))
+- key = input("Enter Confirmation (t, y, n):")
++ key = raw_input("Enter Confirmation (t, y, n):")
+
+ if key == 't':
+ raise EndSession("User wishes to terminate session")
+@@ -299,7 +299,7 @@ class StkAgent(dbus.service.Object):
+ return False
+
+ _dbus2py = {
+- dbus.String : str,
++ dbus.String : unicode,
+ dbus.UInt32 : int,
+ dbus.Int32 : int,
+ dbus.Int16 : int,
+@@ -396,7 +396,7 @@ if __name__ == '__main__':
+ except:
+ pass
+
+- select = int(input("Enter Selection: "))
++ select = int(raw_input("Enter Selection: "))
+ stk.SelectItem(select, path)
+ elif mode == 'agent':
+ path = "/test/agent"
+diff --git a/test/unlock-pin b/test/unlock-pin
+index 61f4765..10b6626 100755
+--- a/test/unlock-pin
++++ b/test/unlock-pin
+@@ -1,4 +1,4 @@
+-#!/usr/bin/python3
++#!/usr/bin/python
+
+ import dbus
+ import sys
+--
+1.9.1
+
diff --git a/meta/recipes-connectivity/ofono/ofono_1.14.bb b/meta/recipes-connectivity/ofono/ofono_1.14.bb
deleted file mode 100644
index dea5d8808f..0000000000
--- a/meta/recipes-connectivity/ofono/ofono_1.14.bb
+++ /dev/null
@@ -1,13 +0,0 @@
-require ofono.inc
-
-PR = "r1"
-
-SRC_URI = "\
- ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
- file://ofono \
-"
-
-SRC_URI[md5sum] = "8bc398d86642408cc71d039f59c61538"
-SRC_URI[sha256sum] = "84d28d37cbc47129628a78bf3e17323af1636dceb2494511dd44caa829fb277f"
-
-CFLAGS_append_libc-uclibc = " -D_GNU_SOURCE"
diff --git a/meta/recipes-connectivity/ofono/ofono_1.15.bb b/meta/recipes-connectivity/ofono/ofono_1.15.bb
new file mode 100644
index 0000000000..bee2b4745c
--- /dev/null
+++ b/meta/recipes-connectivity/ofono/ofono_1.15.bb
@@ -0,0 +1,12 @@
+require ofono.inc
+
+SRC_URI = "\
+ ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \
+ file://ofono \
+ file://Revert-test-Convert-to-Python-3.patch \
+"
+
+SRC_URI[md5sum] = "4d03de85239d8100dc7721bf0dad2bd2"
+SRC_URI[sha256sum] = "978807a05e8904eb4e57d6533ed71e75676a55fa3819a39fe2c878f45dbf7af6"
+
+CFLAGS_append_libc-uclibc = " -D_GNU_SOURCE"
diff --git a/meta/recipes-connectivity/openssh/openssh/openssh-CVE-2014-2532.patch b/meta/recipes-connectivity/openssh/openssh/openssh-CVE-2014-2532.patch
deleted file mode 100644
index 3deaf3f0e9..0000000000
--- a/meta/recipes-connectivity/openssh/openssh/openssh-CVE-2014-2532.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-Upstream-Status: Backport
-
-Fix for CVE-2014-2532
-
-Backported from openssh-6.6p1.tar.gz
-
-Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
----
---- a/session.c
-+++ b/session.c
-@@ -955,6 +955,11 @@
- u_int envsize;
- u_int i, namelen;
-
-+ if (strchr(name, '=') != NULL) {
-+ error("Invalid environment variable \"%.100s\"", name);
-+ return;
-+ }
-+
- /*
- * If we're passed an uninitialized list, allocate a single null
- * entry before continuing.
diff --git a/meta/recipes-connectivity/openssh/openssh/sshd@.service b/meta/recipes-connectivity/openssh/openssh/sshd@.service
index 4eda6592f5..bb2d68e96a 100644
--- a/meta/recipes-connectivity/openssh/openssh/sshd@.service
+++ b/meta/recipes-connectivity/openssh/openssh/sshd@.service
@@ -8,3 +8,4 @@ ExecStart=-@SBINDIR@/sshd -i
ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
StandardInput=socket
StandardError=syslog
+KillMode=process
diff --git a/meta/recipes-connectivity/openssh/openssh_6.6p1.bb b/meta/recipes-connectivity/openssh/openssh_6.6p1.bb
index 047a895aae..abc302b90f 100644
--- a/meta/recipes-connectivity/openssh/openssh_6.6p1.bb
+++ b/meta/recipes-connectivity/openssh/openssh_6.6p1.bb
@@ -23,7 +23,6 @@ SRC_URI = "ftp://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-${PV}.tar.
file://volatiles.99_sshd \
file://add-test-support-for-busybox.patch \
file://run-ptest \
- file://openssh-CVE-2014-2532.patch \
file://openssh-CVE-2014-2653.patch \
file://auth2-none.c-avoid-authenticate-empty-passwords-to-m.patch"
@@ -50,7 +49,6 @@ inherit autotools-brokensep ptest
# LFS support:
CFLAGS += "-D__FILE_OFFSET_BITS=64"
-export LD = "${CC}"
# login path is hardcoded in sshd
EXTRA_OECONF = "'LOGIN_PROGRAM=${base_bindir}/login' \
@@ -75,16 +73,14 @@ CACHED_CONFIGUREVARS += "ac_cv_path_PATH_PASSWD_PROG=${bindir}/passwd"
EXTRA_OECONF_append_libc-uclibc=" --without-pam"
do_configure_prepend () {
+ export LD="${CC}"
+ install -m 0644 ${WORKDIR}/sshd_config ${B}/
+ install -m 0644 ${WORKDIR}/ssh_config ${B}/
if [ ! -e acinclude.m4 -a -e aclocal.m4 ]; then
cp aclocal.m4 acinclude.m4
fi
}
-do_compile_append () {
- install -m 0644 ${WORKDIR}/sshd_config ${S}/
- install -m 0644 ${WORKDIR}/ssh_config ${S}/
-}
-
do_install_append () {
if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then
install -D -m 0755 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd
diff --git a/meta/recipes-connectivity/openssl/openssl.inc b/meta/recipes-connectivity/openssl/openssl.inc
index a99953e4bc..9ec884f332 100644
--- a/meta/recipes-connectivity/openssl/openssl.inc
+++ b/meta/recipes-connectivity/openssl/openssl.inc
@@ -9,6 +9,7 @@ LICENSE = "openssl"
LIC_FILES_CHKSUM = "file://LICENSE;md5=f9a8f968107345e0b75aa8c2ecaa7ec8"
DEPENDS = "perl-native-runtime"
+DEPENDS_append_class-target = " openssl-native"
SRC_URI = "http://www.openssl.org/source/openssl-${PV}.tar.gz \
"
@@ -61,6 +62,7 @@ do_configure () {
os=${HOST_OS}
case $os in
+ linux-uclibc |\
linux-uclibceabi |\
linux-gnueabi |\
linux-uclibcspe |\
@@ -137,6 +139,10 @@ do_configure () {
perl ./Configure ${EXTRA_OECONF} shared --prefix=$useprefix --openssldir=${libdir}/ssl --libdir=`basename ${libdir}` $target
}
+do_compile_prepend_class-target () {
+ sed -i 's/\((OPENSSL=\)".*"/\1"openssl"/' Makefile
+}
+
do_compile () {
oe_runmake
}
diff --git a/meta/recipes-connectivity/openssl/openssl_1.0.1i.bb b/meta/recipes-connectivity/openssl/openssl_1.0.1j.bb
index 4f88784056..2da18aead7 100644
--- a/meta/recipes-connectivity/openssl/openssl_1.0.1i.bb
+++ b/meta/recipes-connectivity/openssl/openssl_1.0.1j.bb
@@ -38,8 +38,8 @@ SRC_URI += "file://configure-targets.patch \
file://run-ptest \
"
-SRC_URI[md5sum] = "c8dc151a671b9b92ff3e4c118b174972"
-SRC_URI[sha256sum] = "3c179f46ca77069a6a0bac70212a9b3b838b2f66129cb52d568837fc79d8fcc7"
+SRC_URI[md5sum] = "f7175c9cd3c39bb1907ac8bba9df8ed3"
+SRC_URI[sha256sum] = "1b60ca8789ba6f03e8ef20da2293b8dc131c39d83814e775069f02d26354edf3"
PACKAGES =+ " \
${PN}-engines \
diff --git a/meta/recipes-connectivity/portmap/portmap.inc b/meta/recipes-connectivity/portmap/portmap.inc
index 7588341c8e..f5f7fde8be 100644
--- a/meta/recipes-connectivity/portmap/portmap.inc
+++ b/meta/recipes-connectivity/portmap/portmap.inc
@@ -10,12 +10,15 @@ SRC_URI = "${DEBIAN_MIRROR}/main/p/portmap/portmap_5.orig.tar.gz \
${DEBIAN_MIRROR}/main/p/portmap/portmap_${PV}.diff.gz \
file://portmap.init \
file://make.patch;apply=yes"
+
S = "${WORKDIR}/portmap_5beta"
INITSCRIPT_NAME = "portmap"
-INITSCRIPT_PARAMS = "start 43 S . start 32 0 6 . stop 81 1 ."
+INITSCRIPT_PARAMS = "start 10 2 3 4 5 . stop 32 0 1 6 ."
+
+inherit update-rc.d systemd
-inherit update-rc.d
+SYSTEMD_SERVICE_${PN} = "portmap.service"
sbindir = "/sbin"
diff --git a/meta/recipes-connectivity/portmap/portmap/portmap.service b/meta/recipes-connectivity/portmap/portmap/portmap.service
new file mode 100644
index 0000000000..7ef9d7b02e
--- /dev/null
+++ b/meta/recipes-connectivity/portmap/portmap/portmap.service
@@ -0,0 +1,10 @@
+[Unit]
+Description=The RPC portmapper
+After=network.target
+
+[Service]
+Type=forking
+ExecStart=@BASE_SBINDIR@/portmap
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-connectivity/portmap/portmap_6.0.bb b/meta/recipes-connectivity/portmap/portmap_6.0.bb
index b0a9454c48..8b65a03346 100644
--- a/meta/recipes-connectivity/portmap/portmap_6.0.bb
+++ b/meta/recipes-connectivity/portmap/portmap_6.0.bb
@@ -5,7 +5,8 @@ PR = "r9"
SRC_URI = "http://www.sourcefiles.org/Networking/Tools/Miscellanenous/portmap-6.0.tgz \
file://destdir-no-strip.patch \
file://tcpd-config.patch \
- file://portmap.init"
+ file://portmap.init \
+ file://portmap.service"
SRC_URI[md5sum] = "ac108ab68bf0f34477f8317791aaf1ff"
SRC_URI[sha256sum] = "02c820d39f3e6e729d1bea3287a2d8a6c684f1006fb9612f97dcad4a281d41de"
@@ -23,4 +24,8 @@ fakeroot do_install() {
install -d ${D}${mandir}/man8/ ${D}${base_sbindir} ${D}${sysconfdir}/init.d
install -m 0755 ${WORKDIR}/portmap.init ${D}${sysconfdir}/init.d/portmap
oe_runmake install DESTDIR=${D}
+
+ install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/portmap.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,@BASE_SBINDIR@,${base_sbindir},g' ${D}${systemd_unitdir}/system/portmap.service
}
diff --git a/meta/recipes-connectivity/ppp/ppp_2.4.6.bb b/meta/recipes-connectivity/ppp/ppp_2.4.7.bb
index 8bc3672600..92ca0c92c6 100644
--- a/meta/recipes-connectivity/ppp/ppp_2.4.6.bb
+++ b/meta/recipes-connectivity/ppp/ppp_2.4.7.bb
@@ -31,8 +31,8 @@ SRC_URI = "http://ppp.samba.org/ftp/ppp/ppp-${PV}.tar.gz \
file://ppp@.service \
"
-SRC_URI[md5sum] = "3434d2cc9327167a0723aaaa8670083b"
-SRC_URI[sha256sum] = "1b33181a03962c8a092c055fb9980e9722728a8d98a4bb7ec7acda17c1b1b49d"
+SRC_URI[md5sum] = "78818f40e6d33a1d1de68a1551f6595a"
+SRC_URI[sha256sum] = "02e0a3dd3e4799e33103f70ec7df75348c8540966ee7c948e4ed8a42bbccfb30"
inherit autotools-brokensep systemd
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf/99_resolvconf b/meta/recipes-connectivity/resolvconf/resolvconf/99_resolvconf
new file mode 100644
index 0000000000..3790d774a7
--- /dev/null
+++ b/meta/recipes-connectivity/resolvconf/resolvconf/99_resolvconf
@@ -0,0 +1,4 @@
+d root root 0755 /var/run/resolvconf/interface none
+f root root 0644 /etc/resolvconf/run/resolv.conf none
+f root root 0644 /etc/resolvconf/run/enable-updates none
+l root root 0644 /etc/resolv.conf /etc/resolvconf/run/resolv.conf
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf/fix-path-for-busybox.patch b/meta/recipes-connectivity/resolvconf/resolvconf/fix-path-for-busybox.patch
new file mode 100644
index 0000000000..1aead07869
--- /dev/null
+++ b/meta/recipes-connectivity/resolvconf/resolvconf/fix-path-for-busybox.patch
@@ -0,0 +1,20 @@
+
+busybox installs readlink into /usr/bin, so ensure /usr/bin
+is in the path.
+
+Upstream-Status: Submitted
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: resolvconf-1.76/etc/resolvconf/update.d/libc
+===================================================================
+--- resolvconf-1.76.orig/etc/resolvconf/update.d/libc
++++ resolvconf-1.76/etc/resolvconf/update.d/libc
+@@ -16,7 +16,7 @@
+ #
+
+ set -e
+-PATH=/sbin:/bin
++PATH=/sbin:/bin:/usr/bin
+
+ [ -x /lib/resolvconf/list-records ] || exit 1
+
diff --git a/meta/recipes-connectivity/resolvconf/resolvconf_1.75.bb b/meta/recipes-connectivity/resolvconf/resolvconf_1.76.bb
index 7310c837e9..20a2c19328 100644
--- a/meta/recipes-connectivity/resolvconf/resolvconf_1.75.bb
+++ b/meta/recipes-connectivity/resolvconf/resolvconf_1.76.bb
@@ -11,10 +11,14 @@ AUTHOR = "Thomas Hood"
HOMEPAGE = "http://packages.debian.org/resolvconf"
RDEPENDS_${PN} = "bash"
-SRC_URI = "${DEBIAN_MIRROR}/main/r/resolvconf/resolvconf_${PV}.tar.xz"
+SRC_URI = "${DEBIAN_MIRROR}/main/r/resolvconf/resolvconf_${PV}.tar.xz \
+ file://fix-path-for-busybox.patch \
+ file://99_resolvconf \
+ "
-SRC_URI[md5sum] = "4b8bc86a3cf070e3fd0e9aff7eaaba56"
-SRC_URI[sha256sum] = "16167f37a77ef4bc4596dcbefece269b6a10d10fa448594ec55ed3303193086e"
+
+SRC_URI[md5sum] = "d78ce30ea068999cd3e0523300b27255"
+SRC_URI[sha256sum] = "c9f40f7405b37399ddbf29ca4205b4911ee35cb9ffd9be7671faa2385b1fa573"
inherit allarch
@@ -24,13 +28,13 @@ do_compile () {
do_install () {
install -d ${D}${sysconfdir}/default/volatiles
- echo "d root root 0755 ${localstatedir}/run/${BPN}/interface none" \
- > ${D}${sysconfdir}/default/volatiles/99_resolvconf
+ install -m 0644 ${WORKDIR}/99_resolvconf ${D}${sysconfdir}/default/volatiles
if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
install -d ${D}${sysconfdir}/tmpfiles.d
echo "d /run/${BPN}/interface - - - -" \
> ${D}${sysconfdir}/tmpfiles.d/resolvconf.conf
fi
+ install -d ${D}${base_libdir}/${BPN}
install -d ${D}${sysconfdir}/${BPN}
ln -snf ${localstatedir}/run/${BPN} ${D}${sysconfdir}/${BPN}/run
install -d ${D}${sysconfdir} ${D}${base_sbindir}
@@ -38,6 +42,11 @@ do_install () {
cp -pPR etc/* ${D}${sysconfdir}/
chown -R root:root ${D}${sysconfdir}/
install -m 0755 bin/resolvconf ${D}${base_sbindir}/
+ install -m 0755 bin/list-records ${D}${base_libdir}/${BPN}
+ install -d ${D}/${sysconfdir}/network/if-up.d
+ install -m 0755 debian/resolvconf.000resolvconf.if-up ${D}/${sysconfdir}/network/if-up.d/000resolvconf
+ install -d ${D}/${sysconfdir}/network/if-down.d
+ install -m 0755 debian/resolvconf.resolvconf.if-down ${D}/${sysconfdir}/network/if-down.d/resolvconf
install -m 0644 README ${D}${docdir}/${P}/
install -m 0644 man/resolvconf.8 ${D}${mandir}/man8/
}
@@ -51,3 +60,5 @@ pkg_postinst_${PN} () {
fi
fi
}
+
+FILES_${PN} += "${base_libdir}/${BPN}"
diff --git a/meta/recipes-connectivity/socat/socat/socat-1.7.2.4-linux-3.17.patch b/meta/recipes-connectivity/socat/socat/socat-1.7.2.4-linux-3.17.patch
new file mode 100644
index 0000000000..9152df533f
--- /dev/null
+++ b/meta/recipes-connectivity/socat/socat/socat-1.7.2.4-linux-3.17.patch
@@ -0,0 +1,29 @@
+socat: fix compile erorr against 3.17+ kernel headers
+
+With the linux 3.17 kernel socat's configure detects the wrong include for errqueue.h
+which results in a compilation error.
+
+By backporting the gentoo patch from:
+
+ http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/net-misc/socat/files/socat-1.7.2.4-linux-3.17.patch
+
+Fixes the build issue
+
+Upstream-status: backport
+
+Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+
+--- a/configure.in
++++ b/configure.in
+@@ -80,7 +80,10 @@
+ AC_CHECK_HEADERS(termios.h linux/if_tun.h)
+ AC_CHECK_HEADERS(net/if_dl.h)
+ AC_CHECK_HEADERS(linux/types.h)
+-AC_CHECK_HEADER(linux/errqueue.h, AC_DEFINE(HAVE_LINUX_ERRQUEUE_H), [], [#include <linux/types.h>])
++AC_CHECK_HEADER(linux/errqueue.h, AC_DEFINE(HAVE_LINUX_ERRQUEUE_H), [], [AC_INCLUDES_DEFAULT
++ #if HAVE_LINUX_TYPES_H
++ #include <linux/types.h>
++ #endif])
+ AC_CHECK_HEADERS(sys/utsname.h sys/select.h sys/file.h)
+ AC_CHECK_HEADERS(util.h bsd/libutil.h libutil.h sys/stropts.h regex.h)
+ AC_CHECK_HEADERS(linux/fs.h linux/ext2_fs.h)
diff --git a/meta/recipes-connectivity/socat/socat_1.7.2.4.bb b/meta/recipes-connectivity/socat/socat_1.7.2.4.bb
index efa3b91c16..f4461de49e 100644
--- a/meta/recipes-connectivity/socat/socat_1.7.2.4.bb
+++ b/meta/recipes-connectivity/socat/socat_1.7.2.4.bb
@@ -14,6 +14,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \
file://fix-cross-compiling-failed.patch \
+ file://socat-1.7.2.4-linux-3.17.patch \
"
SRC_URI[md5sum] = "69b8155dd442a6f24e28ef5407d868eb"
diff --git a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant.inc b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant.inc
index 3a1cce0657..6f0de3cab8 100644
--- a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant.inc
+++ b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant.inc
@@ -26,8 +26,8 @@ SRC_URI = "http://hostap.epitest.fi/releases/wpa_supplicant-${PV}.tar.gz \
file://99_wpa_supplicant \
file://fix-libnl3-host-contamination.patch \
"
-SRC_URI[md5sum] = "238e8e888bbd558e1a57e3eb28d1dd07"
-SRC_URI[sha256sum] = "e0d8b8fd68a659636eaba246bb2caacbf53d22d53b2b6b90eb4b4fef0993c8ed"
+SRC_URI[md5sum] = "f2ed8fef72cf63d8d446a2d0a6da630a"
+SRC_URI[sha256sum] = "eaaa5bf3055270e521b2dff64f2d203ec8040f71958b8588269a82c00c9d7b6a"
S = "${WORKDIR}/wpa_supplicant-${PV}"
@@ -38,8 +38,10 @@ FILES_${PN} += "${datadir}/dbus-1/system-services/*"
CONFFILES_${PN} += "${sysconfdir}/wpa_supplicant.conf"
do_configure () {
+ ${MAKE} -C wpa_supplicant clean
install -m 0755 ${WORKDIR}/defconfig wpa_supplicant/.config
echo "CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config
+ echo "DRV_CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config
if echo "${PACKAGECONFIG}" | grep -qw "openssl"; then
ssl=openssl
@@ -50,6 +52,8 @@ do_configure () {
sed -i "s/%ssl%/$ssl/" wpa_supplicant/.config
fi
+ # For rebuild
+ rm -f wpa_supplicant/*.d wpa_supplicant/dbus/*.d
}
export EXTRA_CFLAGS = "${CFLAGS}"
diff --git a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/fix-libnl3-host-contamination.patch b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/fix-libnl3-host-contamination.patch
index eb8036f50c..e899c1655d 100644
--- a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/fix-libnl3-host-contamination.patch
+++ b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/fix-libnl3-host-contamination.patch
@@ -15,15 +15,15 @@ diff --git a/src/drivers/drivers.mak b/src/drivers/drivers.mak
index 68ff910..1f38f57 100644
--- a/src/drivers/drivers.mak
+++ b/src/drivers/drivers.mak
-@@ -30,7 +30,7 @@ NEED_RFKILL=y
+@@ -35,7 +35,7 @@ NEED_RFKILL=y
ifdef CONFIG_LIBNL32
DRV_LIBS += -lnl-3
DRV_LIBS += -lnl-genl-3
- DRV_CFLAGS += -DCONFIG_LIBNL20 -I/usr/include/libnl3
+ DRV_CFLAGS += -DCONFIG_LIBNL20
- else
- ifdef CONFIG_LIBNL_TINY
- DRV_LIBS += -lnl-tiny
+ ifdef CONFIG_LIBNL3_ROUTE
+ DRV_LIBS += -lnl-route-3
+ DRV_CFLAGS += -DCONFIG_LIBNL3_ROUTE
diff --git a/src/drivers/drivers.mk b/src/drivers/drivers.mk
index db8561a..c93e88d 100644
--- a/src/drivers/drivers.mk
@@ -34,9 +34,9 @@ index db8561a..c93e88d 100644
DRV_LIBS += -lnl-genl-3
- DRV_CFLAGS += -DCONFIG_LIBNL20 -I/usr/include/libnl3
+ DRV_CFLAGS += -DCONFIG_LIBNL20
- else
- ifdef CONFIG_LIBNL_TINY
- DRV_LIBS += -lnl-tiny
+ ifdef CONFIG_LIBNL3_ROUTE
+ DRV_LIBS += -lnl-route-3
+ DRV_CFLAGS += -DCONFIG_LIBNL3_ROUTE
--
1.7.10.4
diff --git a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.2.bb b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.3.bb
index afd0654016..afd0654016 100644
--- a/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.2.bb
+++ b/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.3.bb
diff --git a/meta/recipes-core/base-files/base-files_3.0.14.bb b/meta/recipes-core/base-files/base-files_3.0.14.bb
index 07f5c54c97..6157ff381a 100644
--- a/meta/recipes-core/base-files/base-files_3.0.14.bb
+++ b/meta/recipes-core/base-files/base-files_3.0.14.bb
@@ -151,4 +151,5 @@ FILES_${PN}-doc = "${docdir} ${datadir}/common-licenses"
PACKAGE_ARCH = "${MACHINE_ARCH}"
CONFFILES_${PN} = "${sysconfdir}/fstab ${@['', '${sysconfdir}/hostname'][(d.getVar('hostname', True) != '')]} ${sysconfdir}/shells"
+CONFFILES_${PN} += "${sysconfdir}/motd ${sysconfdir}/nsswitch.conf ${sysconfdir}/profile"
diff --git a/meta/recipes-core/busybox/busybox.inc b/meta/recipes-core/busybox/busybox.inc
index bd66e4f148..0769d92c50 100644
--- a/meta/recipes-core/busybox/busybox.inc
+++ b/meta/recipes-core/busybox/busybox.inc
@@ -23,7 +23,7 @@ PACKAGES =+ "${PN}-httpd ${PN}-udhcpd ${PN}-udhcpc ${PN}-syslog ${PN}-mdev ${PN}
FILES_${PN}-httpd = "${sysconfdir}/init.d/busybox-httpd /srv/www"
FILES_${PN}-syslog = "${sysconfdir}/init.d/syslog* ${sysconfdir}/syslog-startup.conf* ${sysconfdir}/syslog.conf* ${systemd_unitdir}/system/syslog.service ${sysconfdir}/default/busybox-syslog"
-FILES_${PN}-mdev = "${sysconfdir}/init.d/mdev ${sysconfdir}/mdev.conf"
+FILES_${PN}-mdev = "${sysconfdir}/init.d/mdev ${sysconfdir}/mdev.conf ${sysconfdir}/mdev/*"
FILES_${PN}-udhcpd = "${sysconfdir}/init.d/busybox-udhcpd"
FILES_${PN}-udhcpc = "${sysconfdir}/udhcpc.d ${datadir}/udhcpc"
FILES_${PN}-hwclock = "${sysconfdir}/init.d/hwclock.sh"
@@ -268,6 +268,9 @@ do_install () {
install -m 0755 ${WORKDIR}/mdev ${D}${sysconfdir}/init.d/mdev
if grep "CONFIG_FEATURE_MDEV_CONF=y" ${B}/.config; then
install -m 644 ${WORKDIR}/mdev.conf ${D}${sysconfdir}/mdev.conf
+ install -d ${D}${sysconfdir}/mdev
+ install -m 0755 ${WORKDIR}/find-touchscreen.sh ${D}${sysconfdir}/mdev
+ install -m 0755 ${WORKDIR}/mdev-mount.sh ${D}${sysconfdir}/mdev
fi
fi
diff --git a/meta/recipes-core/busybox/busybox/busybox-cross-menuconfig.patch b/meta/recipes-core/busybox/busybox/busybox-cross-menuconfig.patch
new file mode 100644
index 0000000000..781f5af14a
--- /dev/null
+++ b/meta/recipes-core/busybox/busybox/busybox-cross-menuconfig.patch
@@ -0,0 +1,71 @@
+From: Jason Wessel <jason.wessel@windriver.com>
+Date: Sun, 3 Mar 2013 12:31:40 -0600
+Subject: [PATCH] menuconfig,check-lxdiaglog.sh: Allow specification of ncurses location
+
+Upstream-status: Submitted
+
+[ based on: https://lkml.org/lkml/2013/3/3/103 ]
+
+This patch syncs up with the way the menuconfig ncurses / curses
+is detected and the HOST_EXTRACFLAGS works in the Linux kernel
+and it allows the menuconfig to work with a sysroot version
+of the curses libraries.
+
+---
+
+In some cross build environments such as the Yocto Project build
+environment it provides an ncurses library that is compiled
+differently than the host's version. This causes display corruption
+problems when the host's curses includes are used instead of the
+includes from the provided compiler are overridden. There is a second
+case where there is no curses libraries at all on the host system and
+menuconfig will just fail entirely.
+
+The solution is simply to allow an override variable in
+check-lxdialog.sh for environments such as the Yocto Project. Adding
+a CROSS_CURSES_LIB and CROSS_CURSES_INC solves the issue and allowing
+compiling and linking against the right headers and libraries.
+
+Signed-off-by: Jason Wessel <jason.wessel@windriver.com>
+cc: Michal Marek <mmarek@suse.cz>
+cc: linux-kbuild@vger.kernel.org
+---
+ scripts/kconfig/lxdialog/Makefile | 2 +-
+ scripts/kconfig/lxdialog/check-lxdialog.sh | 8 ++++++++
+ 2 files changed, 9 insertions(+), 1 deletion(-)
+
+--- a/scripts/kconfig/lxdialog/check-lxdialog.sh
++++ b/scripts/kconfig/lxdialog/check-lxdialog.sh
+@@ -4,6 +4,10 @@
+ # What library to link
+ ldflags()
+ {
++ if [ x"$CROSS_CURSES_LIB" != x ]; then
++ echo "$CROSS_CURSES_LIB"
++ exit
++ fi
+ for ext in so a dylib ; do
+ for lib in ncursesw ncurses curses ; do
+ $cc -print-file-name=lib${lib}.${ext} | grep -q /
+@@ -19,6 +23,10 @@ ldflags()
+ # Where is ncurses.h?
+ ccflags()
+ {
++ if [ x"$CROSS_CURSES_INC" != x ]; then
++ echo "$CROSS_CURSES_INC"
++ exit
++ fi
+ if [ -f /usr/include/ncursesw/ncurses.h ]; then
+ echo '-I/usr/include/ncursesw -DCURSES_LOC="<ncurses.h>"'
+ elif [ -f /usr/include/ncursesw/curses.h ]; then
+--- a/scripts/kconfig/lxdialog/Makefile
++++ b/scripts/kconfig/lxdialog/Makefile
+@@ -5,7 +5,7 @@ check-lxdialog := $(srctree)/$(src)/che
+
+ # Use reursively expanded variables so we do not call gcc unless
+ # we really need to do so. (Do not call gcc as part of make mrproper)
+-HOST_EXTRACFLAGS = $(shell $(CONFIG_SHELL) $(check-lxdialog) -ccflags)
++HOST_EXTRACFLAGS += $(shell $(CONFIG_SHELL) $(check-lxdialog) -ccflags)
+ HOST_LOADLIBES = $(shell $(CONFIG_SHELL) $(check-lxdialog) -ldflags $(HOSTCC))
+
+ HOST_EXTRACFLAGS += -DLOCALE
diff --git a/meta/recipes-core/busybox/busybox/defconfig b/meta/recipes-core/busybox/busybox/defconfig
index 35f1026d65..8394067bed 100644
--- a/meta/recipes-core/busybox/busybox/defconfig
+++ b/meta/recipes-core/busybox/busybox/defconfig
@@ -159,7 +159,7 @@ CONFIG_FEATURE_TAR_GNU_EXTENSIONS=y
# CONFIG_FEATURE_TAR_LONG_OPTIONS is not set
# CONFIG_FEATURE_TAR_TO_COMMAND is not set
# CONFIG_FEATURE_TAR_UNAME_GNAME is not set
-# CONFIG_FEATURE_TAR_NOPRESERVE_TIME is not set
+CONFIG_FEATURE_TAR_NOPRESERVE_TIME=y
# CONFIG_FEATURE_TAR_SELINUX is not set
# CONFIG_UNCOMPRESS is not set
# CONFIG_UNLZMA is not set
diff --git a/meta/recipes-core/busybox/busybox_1.22.1.bb b/meta/recipes-core/busybox/busybox_1.22.1.bb
index edee4a9963..dd61a2680c 100644
--- a/meta/recipes-core/busybox/busybox_1.22.1.bb
+++ b/meta/recipes-core/busybox/busybox_1.22.1.bb
@@ -20,6 +20,7 @@ SRC_URI = "http://www.busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
file://busybox-syslog.default \
file://mdev \
file://mdev.conf \
+ file://mdev-mount.sh \
file://umount.busybox \
file://defconfig \
file://busybox-syslog.service.in \
@@ -31,6 +32,7 @@ SRC_URI = "http://www.busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \
file://login-utilities.cfg \
file://0001-build-system-Specify-nostldlib-when-linking-to-.o-fi.patch \
file://recognize_connmand.patch \
+ file://busybox-cross-menuconfig.patch \
"
SRC_URI[tarball.md5sum] = "337d1a15ab1cb1d4ed423168b1eb7d7e"
diff --git a/meta/recipes-core/busybox/busybox_git.bb b/meta/recipes-core/busybox/busybox_git.bb
index f2cc119400..f91b552f78 100644
--- a/meta/recipes-core/busybox/busybox_git.bb
+++ b/meta/recipes-core/busybox/busybox_git.bb
@@ -24,6 +24,7 @@ SRC_URI = "git://busybox.net/busybox.git \
file://busybox-syslog.default \
file://mdev \
file://mdev.conf \
+ file://mdev-mount.sh \
file://umount.busybox \
file://defconfig \
file://busybox-syslog.service.in \
diff --git a/meta/recipes-core/busybox/files/find-touchscreen.sh b/meta/recipes-core/busybox/files/find-touchscreen.sh
index 1582ea891c..52c5e7a096 100644
--- a/meta/recipes-core/busybox/files/find-touchscreen.sh
+++ b/meta/recipes-core/busybox/files/find-touchscreen.sh
@@ -1,9 +1,6 @@
#!/bin/sh
-if [ `egrep "input:.*-e0.*,3,.*a0,1,.*18,.*" /sys/class/input/$MDEV/device/modalias|wc -l` -gt 0 ]; then
- ln -sf /dev/input/$MDEV /dev/input/touchscreen0
+if grep -q "input:.*-e0.*,3,.*a0,1,\|ads7846" /sys/class/$MDEV/device/modalias ; then
+ ln -sf /dev/$MDEV /dev/input/touchscreen0
fi
-if [ `egrep "ads7846" /sys/class/input/$MDEV/device/modalias|wc -l` -gt 0 ]; then
- ln -sf /dev/input/$MDEV /dev/input/touchscreen0
-fi
diff --git a/meta/recipes-core/busybox/files/mdev-mount.sh b/meta/recipes-core/busybox/files/mdev-mount.sh
new file mode 100644
index 0000000000..d5d66d66fb
--- /dev/null
+++ b/meta/recipes-core/busybox/files/mdev-mount.sh
@@ -0,0 +1,63 @@
+#!/bin/sh
+MDEV_AUTOMOUNT=y
+MDEV_AUTOMOUNT_ROOT=/run/media
+[ -f /etc/default/mdev ] && . /etc/default/mdev
+if [ "${MDEV_AUTOMOUNT}" = "n" ] ; then
+ exit 0
+fi
+
+case "$ACTION" in
+ add|"")
+ ACTION="add"
+ # check if already mounted
+ if grep -q "^/dev/${MDEV} " /proc/mounts ; then
+ # Already mounted
+ exit 0
+ fi
+ DEVBASE=`expr substr $MDEV 1 3`
+ if [ "${DEVBASE}" == "mmc" ] ; then
+ DEVBASE=`expr substr $MDEV 1 7`
+ fi
+ # check for "please don't mount it" file
+ if [ -f "/dev/nomount.${DEVBASE}" ] ; then
+ # blocked
+ exit 0
+ fi
+ # check for full-disk partition
+ if [ "${DEVBASE}" == "${MDEV}" ] ; then
+ if [ -d /sys/block/${DEVBASE}/${DEVBASE}*1 ] ; then
+ # Partition detected, just quit
+ exit 0
+ fi
+ if [ ! -f /sys/block/${DEVBASE}/size ] ; then
+ # No size at all
+ exit 0
+ fi
+ if [ `cat /sys/block/${DEVBASE}/size` == 0 ] ; then
+ # empty device, bail out
+ exit 0
+ fi
+ fi
+ # first allow fstab to determine the mountpoint
+ if ! mount /dev/$MDEV > /dev/null 2>&1
+ then
+ MOUNTPOINT="${MDEV_AUTOMOUNT_ROOT}/$MDEV"
+ mkdir "$MOUNTPOINT"
+ mount -t auto /dev/$MDEV "$MOUNTPOINT"
+ fi
+ ;;
+ remove)
+ MOUNTPOINT=`grep "^/dev/$MDEV\s" /proc/mounts | cut -d' ' -f 2`
+ if [ ! -z "$MOUNTPOINT" ]
+ then
+ umount "$MOUNTPOINT"
+ rmdir "$MOUNTPOINT"
+ else
+ umount /dev/$MDEV
+ fi
+ ;;
+ *)
+ # Unexpected keyword
+ exit 1
+ ;;
+esac
diff --git a/meta/recipes-core/busybox/files/mdev.conf b/meta/recipes-core/busybox/files/mdev.conf
index e688911ff1..17e93da7c3 100644
--- a/meta/recipes-core/busybox/files/mdev.conf
+++ b/meta/recipes-core/busybox/files/mdev.conf
@@ -1,3 +1,5 @@
+$MODALIAS=.* 0:0 660 @modprobe "$MODALIAS"
+
console 0:0 0600
cpu_dma_latency 0:0 0660
fb0:0 44 0660
@@ -35,3 +37,6 @@ input/mice 0:0 0660
input/mouse.* 0:0 0660
tun[0-9]* 0:0 0660 =net/
+
+[hs]d[a-z][0-9]? 0:0 660 */etc/mdev/mdev-mount.sh
+mmcblk[0-9].* 0:0 660 */etc/mdev/mdev-mount.sh
diff --git a/meta/recipes-core/busybox/files/simple.script b/meta/recipes-core/busybox/files/simple.script
index 78ac4242a8..757e487b97 100644
--- a/meta/recipes-core/busybox/files/simple.script
+++ b/meta/recipes-core/busybox/files/simple.script
@@ -5,7 +5,6 @@
[ -z "$1" ] && echo "Error: should be called from udhcpc" && exit 1
RESOLV_CONF="/etc/resolv.conf"
-[ -n "$broadcast" ] && BROADCAST="broadcast $broadcast"
[ -n "$subnet" ] && NETMASK="netmask $subnet"
# return 0 if root is mounted on a network filesystem
@@ -17,8 +16,11 @@ root_is_nfs() {
have_bin_ip=0
if [ -x /sbin/ip ]; then
have_bin_ip=1
+ BROADCAST="broadcast +"
fi
+[ -n "$broadcast" ] && BROADCAST="broadcast $broadcast"
+
case "$1" in
deconfig)
if [ -x /sbin/resolvconf ]; then
diff --git a/meta/recipes-core/busybox/files/syslog b/meta/recipes-core/busybox/files/syslog
index 2944d3d06c..89c4d12e9c 100644
--- a/meta/recipes-core/busybox/files/syslog
+++ b/meta/recipes-core/busybox/files/syslog
@@ -28,7 +28,7 @@ if [ -f /etc/syslog-startup.conf ]; then
if [ -n "$ROTATEGENS" ]; then
SYSLOG_ARGS="$SYSLOG_ARGS -b $ROTATEGENS"
fi
- LOCAL=0
+ LOG_LOCAL=1
elif [ "$D" = "remote" ]; then
SYSLOG_ARGS="$SYSLOG_ARGS -R $REMOTE"
LOG_REMOTE=1
diff --git a/meta/recipes-core/coreutils/coreutils-8.22/fix-selinux-flask.patch b/meta/recipes-core/coreutils/coreutils-8.22/fix-selinux-flask.patch
new file mode 100644
index 0000000000..9d1ae55d47
--- /dev/null
+++ b/meta/recipes-core/coreutils/coreutils-8.22/fix-selinux-flask.patch
@@ -0,0 +1,39 @@
+From a1d360509fa3a4aff57eedcd528cc0347a87531d Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Tue, 16 Sep 2014 01:59:08 -0700
+Subject: [PATCH] gnulib-comp.m4: selinux/flask.h should respect to
+ with_selinux
+
+Fixed when build with meta-selinux even when --without-selinux:
+runcon.c:49:28: fatal error: selinux/flask.h: No such file or directory
+ # include <selinux/flask.h>
+ ^
+compilation terminated.
+
+Upstream-Status: Pending
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ m4/gnulib-comp.m4 | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/m4/gnulib-comp.m4 b/m4/gnulib-comp.m4
+index 472d3a0..5f09734 100644
+--- a/m4/gnulib-comp.m4
++++ b/m4/gnulib-comp.m4
+@@ -1730,11 +1730,11 @@ AC_DEFUN([gl_INIT],
+ AC_LIBOBJ([select])
+ fi
+ gl_SYS_SELECT_MODULE_INDICATOR([select])
+- AC_CHECK_HEADERS([selinux/flask.h])
+ AC_LIBOBJ([selinux-at])
+ gl_HEADERS_SELINUX_SELINUX_H
+ gl_HEADERS_SELINUX_CONTEXT_H
+ if test "$with_selinux" != no && test "$ac_cv_header_selinux_selinux_h" = yes; then
++ AC_CHECK_HEADERS([selinux/flask.h])
+ AC_LIBOBJ([getfilecon])
+ fi
+ gl_SERVENT
+--
+1.7.9.5
+
diff --git a/meta/recipes-core/coreutils/coreutils_8.22.bb b/meta/recipes-core/coreutils/coreutils_8.22.bb
index 83f786129b..f85bacabd3 100644
--- a/meta/recipes-core/coreutils/coreutils_8.22.bb
+++ b/meta/recipes-core/coreutils/coreutils_8.22.bb
@@ -16,6 +16,7 @@ SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.xz \
file://remove-usr-local-lib-from-m4.patch \
file://dummy_help2man.patch \
file://fix-for-dummy-man-usage.patch \
+ file://fix-selinux-flask.patch \
"
SRC_URI[md5sum] = "8fb0ae2267aa6e728958adc38f8163a2"
@@ -73,6 +74,11 @@ do_install_append() {
mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN}
}
+do_install_append_class-native(){
+ # remove groups to fix conflict with shadow-native
+ rm -f ${D}${STAGING_BINDIR_NATIVE}/groups
+}
+
inherit update-alternatives
ALTERNATIVE_PRIORITY = "100"
diff --git a/meta/recipes-core/dbus/dbus-glib-0.100.2/obsolete_automake_macros.patch b/meta/recipes-core/dbus/dbus-glib-0.100.2/obsolete_automake_macros.patch
deleted file mode 100644
index 40e3b12b5d..0000000000
--- a/meta/recipes-core/dbus/dbus-glib-0.100.2/obsolete_automake_macros.patch
+++ /dev/null
@@ -1,15 +0,0 @@
-Upstream-Status: Submitted [https://bugs.freedesktop.org/show_bug.cgi?id=59003]
-
-Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
-diff -Nurd dbus-glib-0.100/configure.ac dbus-glib-0.100/configure.ac
---- dbus-glib-0.100/configure.ac 2012-06-25 19:26:39.000000000 +0300
-+++ dbus-glib-0.100/configure.ac 2013-01-03 04:53:22.314976758 +0200
-@@ -8,7 +8,7 @@
-
- AM_INIT_AUTOMAKE([1.9])
-
--AM_CONFIG_HEADER(config.h)
-+AC_CONFIG_HEADERS(config.h)
-
- # Honor aclocal flags
- ACLOCAL="$ACLOCAL $ACLOCAL_FLAGS"
diff --git a/meta/recipes-core/dbus/dbus-glib.inc b/meta/recipes-core/dbus/dbus-glib.inc
index 6c60d9e408..abd013247c 100644
--- a/meta/recipes-core/dbus/dbus-glib.inc
+++ b/meta/recipes-core/dbus/dbus-glib.inc
@@ -13,7 +13,6 @@ DEPENDS_class-native = "glib-2.0-native dbus-native"
SRC_URI = "http://dbus.freedesktop.org/releases/dbus-glib/dbus-glib-${PV}.tar.gz \
file://no-examples.patch \
file://test-install-makefile.patch \
- file://obsolete_automake_macros.patch \
"
inherit autotools pkgconfig gettext
diff --git a/meta/recipes-core/dbus/dbus-glib-0.100.2/no-examples.patch b/meta/recipes-core/dbus/dbus-glib/no-examples.patch
index fbb4967828..fbb4967828 100644
--- a/meta/recipes-core/dbus/dbus-glib-0.100.2/no-examples.patch
+++ b/meta/recipes-core/dbus/dbus-glib/no-examples.patch
diff --git a/meta/recipes-core/dbus/dbus-glib-0.100.2/test-install-makefile.patch b/meta/recipes-core/dbus/dbus-glib/test-install-makefile.patch
index 2e8e178bb7..027c82b27e 100644
--- a/meta/recipes-core/dbus/dbus-glib-0.100.2/test-install-makefile.patch
+++ b/meta/recipes-core/dbus/dbus-glib/test-install-makefile.patch
@@ -3,26 +3,46 @@ Change Makefile.am to install regression tests for test package purpose.
Upstream-Status: Inappropriate [test not install is for purpose from upstream]
Signed-off-by: Yao Zhao <yao.zhao@windriver.com>
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ test/Makefile.am | 3 ++-
+ test/core/Makefile.am | 3 ++-
+ test/interfaces/Makefile.am | 3 ++-
+ 3 files changed, 6 insertions(+), 3 deletions(-)
diff --git a/test/Makefile.am b/test/Makefile.am
-index 7ba11a8..249f0e7 100644
+index 379c8c3..6580927 100644
--- a/test/Makefile.am
+++ b/test/Makefile.am
-@@ -13,7 +13,8 @@ else
- TEST_BINARIES=
- endif
+@@ -48,7 +48,8 @@ TESTS = \
+ $(test_scripts) \
+ $(NULL)
--noinst_PROGRAMS= $(TEST_BINARIES)
+-noinst_PROGRAMS = \
+testdir = $(datadir)/@PACKAGE@/tests
-+test_PROGRAMS= $(TEST_BINARIES)
++test_PROGRAMS = \
+ $(test_programs) \
+ $(test_related_programs) \
+ $(NULL)
+diff --git a/test/core/Makefile.am b/test/core/Makefile.am
+index e4bceb4..54b077d 100644
+--- a/test/core/Makefile.am
++++ b/test/core/Makefile.am
+@@ -64,7 +64,8 @@ endif
- test_service_SOURCES= \
- test-service.c
+ ## we use noinst_PROGRAMS not check_PROGRAMS for TESTS so that we
+ ## build even when not doing "make check"
+-noinst_PROGRAMS = \
++testdir = $(datadir)/@PACKAGE@/tests/core
++test_PROGRAMS = \
+ test-dbus-glib \
+ test-error-mapping \
+ test-service-glib \
diff --git a/test/interfaces/Makefile.am b/test/interfaces/Makefile.am
-index 3cb2c39..e6de67d 100644
+index a94d08d..707dcac 100644
--- a/test/interfaces/Makefile.am
+++ b/test/interfaces/Makefile.am
-@@ -39,7 +39,8 @@ if DBUS_BUILD_TESTS
+@@ -57,7 +57,8 @@ if DBUS_BUILD_TESTS
## we use noinst_PROGRAMS not check_PROGRAMS for TESTS so that we
## build even when not doing "make check"
@@ -32,17 +52,6 @@ index 3cb2c39..e6de67d 100644
test_service_SOURCES = \
test-interfaces.c \
-diff --git a/test/core/Makefile.am b/test/core/Makefile.am
-index ef6cb26..9786c3a 100644
---- a/test/core/Makefile.am
-+++ b/test/core/Makefile.am
-@@ -46,7 +46,8 @@ endif
-
- ## we use noinst_PROGRAMS not check_PROGRAMS for TESTS so that we
- ## build even when not doing "make check"
--noinst_PROGRAMS = \
-+testdir = $(datadir)/@PACKAGE@/tests/core
-+test_PROGRAMS = \
- test-dbus-glib \
- test-service-glib \
- $(THREAD_APPS) \
+--
+1.9.1
+
diff --git a/meta/recipes-core/dbus/dbus-glib_0.100.2.bb b/meta/recipes-core/dbus/dbus-glib_0.100.2.bb
deleted file mode 100644
index c1bb06a1b4..0000000000
--- a/meta/recipes-core/dbus/dbus-glib_0.100.2.bb
+++ /dev/null
@@ -1,5 +0,0 @@
-require dbus-glib.inc
-
-
-SRC_URI[md5sum] = "ad0920c7e3aad669163bb59171cf138e"
-SRC_URI[sha256sum] = "a5bb42da921f51c28161e0e54a5a8241d94a1c0499a14007150e9ce743da6ac5"
diff --git a/meta/recipes-core/dbus/dbus-glib_0.102.bb b/meta/recipes-core/dbus/dbus-glib_0.102.bb
new file mode 100644
index 0000000000..d798ebfac0
--- /dev/null
+++ b/meta/recipes-core/dbus/dbus-glib_0.102.bb
@@ -0,0 +1,5 @@
+require dbus-glib.inc
+
+
+SRC_URI[md5sum] = "f76b8558fd575d0106c3a556eaa49184"
+SRC_URI[sha256sum] = "6964ed585bb8149a14ab744b5ded5e77cf71ec5446e6dcc5fcf5eebcc52df29c"
diff --git a/meta/recipes-core/dbus/dbus-test_1.8.2.bb b/meta/recipes-core/dbus/dbus-test_1.8.10.bb
index e19d2e9d1e..fb796fb692 100644
--- a/meta/recipes-core/dbus/dbus-test_1.8.2.bb
+++ b/meta/recipes-core/dbus/dbus-test_1.8.10.bb
@@ -18,8 +18,8 @@ SRC_URI = "http://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.gz \
file://clear-guid_from_server-if-send_negotiate_unix_f.patch \
"
-SRC_URI[md5sum] = "d6f709bbec0a022a1847c7caec9d6068"
-SRC_URI[sha256sum] = "5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08"
+SRC_URI[md5sum] = "6be5ef99ae784de9d04589eb067fe038"
+SRC_URI[sha256sum] = "10bf87fdb68815edd01d53885101dbcdd80dacad7198912cca61a4fa22dfaf8e"
S="${WORKDIR}/dbus-${PV}"
FILESEXTRAPATHS =. "${FILE_DIRNAME}/dbus:"
diff --git a/meta/recipes-core/dbus/dbus.inc b/meta/recipes-core/dbus/dbus.inc
index d38ba7e1d2..fb5d017856 100644
--- a/meta/recipes-core/dbus/dbus.inc
+++ b/meta/recipes-core/dbus/dbus.inc
@@ -8,7 +8,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \
DEPENDS = "expat virtual/libintl"
RDEPENDS_dbus_class-native = ""
RDEPENDS_dbus_class-nativesdk = ""
-PACKAGES += "${@bb.utils.contains('PTEST_ENABLED', '1', 'dbus-ptest', '', d)}"
+PACKAGES += "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '${PN}-ptest', '', d)}"
ALLOW_EMPTY_dbus-ptest = "1"
RDEPENDS_dbus-ptest_class-target = "dbus-test-ptest"
diff --git a/meta/recipes-core/dbus/dbus_1.8.10.bb b/meta/recipes-core/dbus/dbus_1.8.10.bb
new file mode 100644
index 0000000000..250ea21ddc
--- /dev/null
+++ b/meta/recipes-core/dbus/dbus_1.8.10.bb
@@ -0,0 +1,4 @@
+include dbus.inc
+
+SRC_URI[md5sum] = "6be5ef99ae784de9d04589eb067fe038"
+SRC_URI[sha256sum] = "10bf87fdb68815edd01d53885101dbcdd80dacad7198912cca61a4fa22dfaf8e"
diff --git a/meta/recipes-core/dbus/dbus_1.8.2.bb b/meta/recipes-core/dbus/dbus_1.8.2.bb
deleted file mode 100644
index 37ebed2bd6..0000000000
--- a/meta/recipes-core/dbus/dbus_1.8.2.bb
+++ /dev/null
@@ -1,4 +0,0 @@
-include dbus.inc
-
-SRC_URI[md5sum] = "d6f709bbec0a022a1847c7caec9d6068"
-SRC_URI[sha256sum] = "5689f7411165adc953f37974e276a3028db94447c76e8dd92efe910c6d3bae08"
diff --git a/meta/recipes-core/dropbear/dropbear.inc b/meta/recipes-core/dropbear/dropbear.inc
index 9fec09e919..947a491679 100644
--- a/meta/recipes-core/dropbear/dropbear.inc
+++ b/meta/recipes-core/dropbear/dropbear.inc
@@ -27,6 +27,13 @@ PAM_SRC_URI = "file://0005-dropbear-enable-pam.patch \
file://0006-dropbear-configuration-file.patch \
file://dropbear"
+PAM_PLUGINS = "libpam-runtime \
+ pam-plugin-deny \
+ pam-plugin-permit \
+ pam-plugin-unix \
+ "
+RDEPENDS_${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_PLUGINS}', '', d)}"
+
inherit autotools update-rc.d systemd
INITSCRIPT_NAME = "dropbear"
diff --git a/meta/recipes-core/eglibc/cross-localedef-native_2.19.bb b/meta/recipes-core/eglibc/cross-localedef-native_2.19.bb
deleted file mode 100644
index 1c13eb824a..0000000000
--- a/meta/recipes-core/eglibc/cross-localedef-native_2.19.bb
+++ /dev/null
@@ -1,48 +0,0 @@
-SUMMARY = "Cross locale generation tool for eglibc"
-HOMEPAGE = "http://www.eglibc.org/home"
-SECTION = "libs"
-LICENSE = "LGPL-2.1"
-
-LIC_DIR = "${WORKDIR}/eglibc-${PV}/libc"
-LIC_FILES_CHKSUM = "file://${LIC_DIR}/LICENSES;md5=e9a558e243b36d3209f380deb394b213 \
- file://${LIC_DIR}/COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://${LIC_DIR}/posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
- file://${LIC_DIR}/COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
-
-
-inherit native
-inherit autotools
-
-FILESEXTRAPATHS =. "${FILE_DIRNAME}/${P}:"
-
-SRC_URI = "http://downloads.yoctoproject.org/releases/eglibc/eglibc-${PV}-svnr25243.tar.bz2 \
- file://fix_for_centos_5.8.patch;patchdir=.. \
- "
-SRC_URI[md5sum] = "197836c2ba42fb146e971222647198dd"
-SRC_URI[sha256sum] = "baaa030531fc308f7820c46acdf8e1b2f8e3c1f40bcd28b6e440d1c95d170d4c"
-
-S = "${WORKDIR}/eglibc-${PV}/localedef"
-
-do_unpack_append() {
- bb.build.exec_func('do_move_ports', d)
-}
-
-do_move_ports() {
- if test -d ${WORKDIR}/eglibc-${PV}/ports ; then
- rm -rf ${WORKDIR}/libc/ports
- mv ${WORKDIR}/eglibc-${PV}/ports ${WORKDIR}/libc/
- fi
-}
-
-EXTRA_OECONF = "--with-glibc=${WORKDIR}/eglibc-${PV}/libc"
-CFLAGS += "-DNOT_IN_libc=1"
-
-do_configure () {
- ${S}/configure ${EXTRA_OECONF}
-}
-
-
-do_install() {
- install -d ${D}${bindir}
- install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef
-}
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-menuconfig-support.patch b/meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-menuconfig-support.patch
deleted file mode 100644
index 4559a110fb..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-menuconfig-support.patch
+++ /dev/null
@@ -1,912 +0,0 @@
-Pulled from
-http://www.eglibc.org/archives/patches/msg01042.html
-
-Upstream-Status: Pending
-Signed-off-by: Khem
-
-Hi,
-
-This patch adds 'make menuconfig' support to EGLIBC.
-
-
-EGLIBC can re-use the Linux kernel kconfig host tools ('conf' and 'mconf') unmodified, by passing appropriate environment variables and with some pre- and post-processing on the input/output config files.
-
-There are three new make targets supported, which all are defined in the new libc/options-config/Makefile, which is included by the top-level libc/Makefile:
-
-- 'make defconfig'. This passes 'libc/option-groups.defaults' to 'conf' as a default config, and outputs 'option-groups.config' to the top-level build directory, which will be the same as the default config.
-
-- 'make config'. This is the same line-oriented interface as in the Linux kernel. Input and output is 'option-groups.config' in the top-level build directory.
-
-- 'make menuconfig'. This is the same menu-based interface as in the Linux kernel. Input and output is 'option-groups.config' in the top-level build directory.
-
-
-Pre-Processing:
-
-
-The Linux kernel kconfig tools expect a prefix of "CONFIG_" on all config option names, but EGLIBC expects a prefix of "OPTION_". The pre-processing script, libc/options-config/config-preproc.pl, simply replaces "CONFIG_ with "OPTION_" in the given config file. The libc/options-config/Makefile passes the script output to a temporary config file, which is then passed to 'conf' or 'mconf'.
-
-Post-Processing (libc/options-config/config-postproc.pl):
-
-
-- Disabled options are output as a comment line of the form "# CONFIG_FOO is not set". This needs to be changed to an explicit "CONFIG_FOO=n" in order to be compatible with 'option-groups.awk' which generates the option-groups.h header.
-
-- "CONFIG_" prefix is changed back to "OPTION_".
-
-
-- The kconfig tools will not output anything for options that depend on a parent option, when the parent option is disabled. This implicit disable must be converted to an explicit "CONFIG_FOO=n" in order to be compatible with the way EGLIBC overrides the default option settings in 'libc/option-groups.defaults' with those in 'option-groups.config'.
-
-
-A new configure option, '--with-kconfig=<PATH>', tells EGLIBC where to find the pre-built 'conf' and 'mconf' host tools from Linux kernel builds.
-
-libc/EGLIBC.cross-building is updated to include instructions for using '--with-kconfig' for the final EGLIBC build, and shows how and when to run 'make *config'.
-
-libc/EGLIBC.option-groups is updated to include new information on the menuconfig support.
-
-Thanks,
-
-attached is the updated patch to address above issues.
-
-Steve
-
---
-Steve Longerbeam | Senior Embedded Engineer, ESD Services
-Mentor Embedded(tm) | 46871 Bayside Parkway, Fremont, CA 94538
-P 510.354.5838 | M 408.410.2735
-Nucleus(r) | Linux(r) | Android(tm) | Services | UI | Multi-OS
-
-
-Index: libc/EGLIBC.cross-building
-===================================================================
---- libc.orig/EGLIBC.cross-building
-+++ libc/EGLIBC.cross-building
-@@ -243,9 +243,29 @@ full EGLIBC build:
- > $src/libc/configure \
- > --prefix=/usr \
- > --with-headers=$sysroot/usr/include \
-+ > --with-kconfig=$obj/linux/scripts/kconfig \
- > --build=$build \
- > --host=$target \
- > --disable-profile --without-gd --without-cvs --enable-add-ons
-+
-+Note the additional '--with-kconfig' option. This tells EGLIBC where to
-+find the host config tools used by the kernel 'make config' and 'make
-+menuconfig'. These tools can be re-used by EGLIBC for its own 'make
-+*config' support, which will create 'option-groups.config' for you.
-+But first make sure those tools have been built by running some
-+dummy 'make *config' calls in the kernel directory:
-+
-+ $ cd $obj/linux
-+ $ PATH=$tools/bin:$PATH make config \
-+ > ARCH=$linux_arch CROSS_COMPILE=$target- \
-+ $ PATH=$tools/bin:$PATH make menuconfig \
-+ > ARCH=$linux_arch CROSS_COMPILE=$target- \
-+
-+Now we can configure and build the full EGLIBC:
-+
-+ $ cd $obj/eglibc
-+ $ PATH=$tools/bin:$PATH make defconfig
-+ $ PATH=$tools/bin:$PATH make menuconfig
- $ PATH=$tools/bin:$PATH make
- $ PATH=$tools/bin:$PATH make install install_root=$sysroot
-
-Index: libc/configure.ac
-===================================================================
---- libc.orig/configure.ac
-+++ libc/configure.ac
-@@ -127,6 +127,16 @@ AC_ARG_WITH([headers],
- [sysheaders=''])
- AC_SUBST(sysheaders)
-
-+AC_ARG_WITH([kconfig],
-+ AC_HELP_STRING([--with-kconfig=PATH],
-+ [location of kconfig tools to use (from Linux
-+ kernel builds) to re-use for configuring EGLIBC
-+ option groups]),
-+ [KCONFIG_TOOLS=$withval],
-+ [KCONFIG_TOOLS=''])
-+AC_SUBST(KCONFIG_TOOLS)
-+
-+
- AC_SUBST(use_default_link)
- AC_ARG_WITH([default-link],
- AC_HELP_STRING([--with-default-link],
-Index: libc/config.make.in
-===================================================================
---- libc.orig/config.make.in
-+++ libc/config.make.in
-@@ -45,6 +45,8 @@ sysincludes = @SYSINCLUDES@
- c++-sysincludes = @CXX_SYSINCLUDES@
- all-warnings = @all_warnings@
-
-+kconfig_tools = @KCONFIG_TOOLS@
-+
- have-z-combreloc = @libc_cv_z_combreloc@
- have-z-execstack = @libc_cv_z_execstack@
- have-Bgroup = @libc_cv_Bgroup@
-Index: libc/options-config/config-postproc.pl
-===================================================================
---- /dev/null
-+++ libc/options-config/config-postproc.pl
-@@ -0,0 +1,54 @@
-+#!/usr/bin/perl
-+
-+$usage = "usage: $0 <default config file> <config file>\n";
-+
-+die "$usage" unless @ARGV;
-+$defaults = shift @ARGV;
-+die "$usage" unless @ARGV;
-+die "Could not open $ARGV[0]" unless -T $ARGV[0];
-+
-+sub yank {
-+ @option = grep($_ ne $_[0], @option);
-+}
-+
-+open(DEFAULTS, $defaults) || die "Could not open $defaults\n";
-+
-+# get the full list of available options using the default config file
-+$i = 0;
-+while (<DEFAULTS>) {
-+ if (/^\s*OPTION_(\w+)\s*=/) {
-+ $option[$i++] = $1;
-+ }
-+}
-+
-+# now go through the config file, making the necessary changes
-+while (<>) {
-+ if (/Linux Kernel Configuration/) {
-+ # change title
-+ s/Linux Kernel/Option Groups/;
-+ print;
-+ } elsif (/^\s*CONFIG_(\w+)\s*=/) {
-+ # this is an explicit option set line, change CONFIG_ to OPTION_
-+ # before printing and remove this option from option list
-+ $opt = $1;
-+ yank($opt);
-+ s/CONFIG_/OPTION_/g;
-+ print;
-+ } elsif (/^\s*#\s+CONFIG_(\w+) is not set/) {
-+ # this is a comment line, change CONFIG_ to OPTION_, remove this
-+ # option from option list, and convert to explicit OPTION_FOO=n
-+ $opt = $1;
-+ yank($opt);
-+ s/CONFIG_/OPTION_/g;
-+ print "OPTION_$opt=n\n";
-+ } else {
-+ print;
-+ }
-+}
-+
-+# any options left in @options, are options that were not mentioned in
-+# the config file, and implicitly that means the option must be set =n,
-+# so do that here.
-+foreach $opt (@option) {
-+ print "OPTION_$opt=n\n";
-+}
-Index: libc/options-config/config-preproc.pl
-===================================================================
---- /dev/null
-+++ libc/options-config/config-preproc.pl
-@@ -0,0 +1,8 @@
-+#!/usr/bin/perl
-+
-+if (@ARGV) {
-+ while (<>) {
-+ s/OPTION_/CONFIG_/g;
-+ print;
-+ }
-+}
-Index: libc/options-config/Makefile
-===================================================================
---- /dev/null
-+++ libc/options-config/Makefile
-@@ -0,0 +1,55 @@
-+# ===========================================================================
-+# EGLIBC option-groups configuration targets
-+# These targets are included from top-level makefile
-+
-+ifneq ($(kconfig_tools),)
-+ifneq (no,$(PERL))
-+
-+ocdir := options-config
-+
-+OconfigDefaults := option-groups.defaults
-+OconfigDefaults_tmp := $(common-objpfx).tmp.defconfig
-+OconfigDef := option-groups.def
-+Oconfig := $(common-objpfx)option-groups.config
-+Oconfig_tmp := $(common-objpfx).tmp.config
-+
-+conf := $(kconfig_tools)/conf
-+mconf := $(kconfig_tools)/mconf
-+
-+preproc := $(PERL) $(ocdir)/config-preproc.pl
-+postproc := $(PERL) $(ocdir)/config-postproc.pl
-+
-+PHONY += defconfig config menuconfig
-+
-+defconfig: $(conf) $(OconfigDefaults) $(OconfigDef)
-+ rm -f $(OconfigDefaults_tmp)
-+ rm -f $(Oconfig_tmp)
-+ $(preproc) $(OconfigDefaults) > $(OconfigDefaults_tmp)
-+ KCONFIG_CONFIG=$(Oconfig_tmp) $< --defconfig=$(OconfigDefaults_tmp) \
-+ $(OconfigDef)
-+ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
-+ rm $(Oconfig_tmp)
-+ rm $(OconfigDefaults_tmp)
-+
-+config: $(conf) $(OconfigDefaults) $(OconfigDef)
-+ rm -f $(Oconfig_tmp)
-+ $(preproc) $(wildcard $(Oconfig)) > $(Oconfig_tmp)
-+ KCONFIG_CONFIG=$(Oconfig_tmp) $< --oldaskconfig $(OconfigDef)
-+ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
-+ rm $(Oconfig_tmp)
-+
-+menuconfig: $(mconf) $(OconfigDefaults) $(OconfigDef)
-+ rm -f $(Oconfig_tmp)
-+ $(preproc) $(wildcard $(Oconfig)) > $(Oconfig_tmp)
-+ KCONFIG_CONFIG=$(Oconfig_tmp) $< $(OconfigDef)
-+ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
-+ rm $(Oconfig_tmp)
-+
-+# Help text used by make help
-+help:
-+ @echo ' defconfig - New config with default from default config'
-+ @echo ' config - Update current config utilising a line-oriented program'
-+ @echo ' menuconfig - Update current config utilising a menu based program'
-+
-+endif
-+endif
-Index: libc/option-groups.def
-===================================================================
---- libc.orig/option-groups.def
-+++ libc/option-groups.def
-@@ -4,19 +4,19 @@
- #
- # An entry of the form:
- #
--# config OPTION_GROUP_NAME
-+# config GROUP_NAME
- # bool "one-line explanation of what this option group controls"
- # help
- # Multi-line help explaining the option group's meaning in
- # some detail, terminated by indentation level.
- #
--# defines an option group whose variable is OPTION_GROUP_NAME, with
-+# defines an option group whose variable is GROUP_NAME, with
- # meaningful values 'y' (enabled) and 'n' (disabled). The
- # documentation is formatted to be consumed by some sort of
- # interactive configuration interface, but EGLIBC doesn't have such an
- # interface yet.
- #
--# An option may have a 'depends' line, indicating which other options
-+# An option may have a 'depends on' line, indicating which other options
- # must also be enabled if this option is. At present, EGLIBC doesn't
- # check that these dependencies are satisfied.
- #
-@@ -41,9 +41,9 @@
- # although this simply reestablishes the value already set by
- # 'option-groups.defaults'.
-
--config OPTION_EGLIBC_ADVANCED_INET6
-+config EGLIBC_ADVANCED_INET6
- bool "IPv6 Advanced Sockets API support (RFC3542)"
-- depends OPTION_EGLIBC_INET
-+ depends on EGLIBC_INET
- help
- This option group includes the functions specified by RFC 3542,
- "Advanced Sockets Application Program Interface (API) for
-@@ -71,7 +71,7 @@ config OPTION_EGLIBC_ADVANCED_INET6
- inet6_rth_segments
- inet6_rth_space
-
--config OPTION_EGLIBC_BACKTRACE
-+config EGLIBC_BACKTRACE
- bool "Functions for producing backtraces"
- help
- This option group includes functions for producing a list of
-@@ -85,7 +85,7 @@ config OPTION_EGLIBC_BACKTRACE
- backtrace_symbols
- backtrace_symbols_fd
-
--config OPTION_EGLIBC_BIG_MACROS
-+config EGLIBC_BIG_MACROS
- bool "Use extensive inline code"
- help
- This option group specifies whether certain pieces of code
-@@ -93,7 +93,7 @@ config OPTION_EGLIBC_BIG_MACROS
- group is not selected, function calls will be used instead,
- hence reducing the library footprint.
-
--config OPTION_EGLIBC_BSD
-+config EGLIBC_BSD
- bool "BSD-specific functions, and their compatibility stubs"
- help
- This option group includes functions specific to BSD kernels.
-@@ -109,10 +109,9 @@ config OPTION_EGLIBC_BSD
- revoke
- setlogin
-
--config OPTION_EGLIBC_CXX_TESTS
-+config EGLIBC_CXX_TESTS
- bool "Tests that link against the standard C++ library."
-- depends OPTION_POSIX_WIDE_CHAR_DEVICE_IO
-- depends OPTION_EGLIBC_LIBM
-+ depends on POSIX_WIDE_CHAR_DEVICE_IO && EGLIBC_LIBM
- help
- This option group does not include any C library functions;
- instead, it controls which EGLIBC tests an ordinary 'make
-@@ -121,23 +120,22 @@ config OPTION_EGLIBC_CXX_TESTS
- run.
-
- The standard C++ library depends on the math library 'libm' and
-- the wide character I/O functions included in EGLIBC. If those
-- option groups are disabled, this test must also be disabled.
-+ the wide character I/O functions included in EGLIBC. So those
-+ option groups must be enabled if this test is enabled.
-
--config OPTION_EGLIBC_CATGETS
-+config EGLIBC_CATGETS
- bool "Functions for accessing message catalogs"
-- depends OPTION_EGLIBC_LOCALE_CODE
-+ depends on EGLIBC_LOCALE_CODE
- help
- This option group includes functions for accessing message
- catalogs: catopen, catclose, and catgets.
-
-- This option group depends on the OPTION_EGLIBC_LOCALE_CODE
-- option group; if you disable that, you must also disable this.
-+ This option group depends on the EGLIBC_LOCALE_CODE
-+ option group.
-
--config OPTION_EGLIBC_CHARSETS
-+config EGLIBC_CHARSETS
- bool "iconv/gconv character set conversion libraries"
- help
--
- This option group includes support for character sets other
- than ASCII (ANSI_X3.4-1968) and Unicode and ISO-10646 in their
- various encodings. This affects both the character sets
-@@ -198,16 +196,16 @@ config OPTION_EGLIBC_CHARSETS
- WCHAR_T - EGLIBC's internal form (target-endian,
- 32-bit ISO 10646)
-
--config OPTION_EGLIBC_CRYPT
-+config EGLIBC_CRYPT
- bool "Encryption library"
- help
- This option group includes the `libcrypt' library which
- provides functions for one-way encryption. Supported
- encryption algorithms include MD5, SHA-256, SHA-512 and DES.
-
--config OPTION_EGLIBC_CRYPT_UFC
-+config EGLIBC_CRYPT_UFC
- bool "Ultra fast `crypt' implementation"
-- depends OPTION_EGLIBC_CRYPT
-+ depends on EGLIBC_CRYPT
- help
- This option group provides ultra fast DES-based implementation of
- the `crypt' function. When this option group is disabled,
-@@ -216,7 +214,7 @@ config OPTION_EGLIBC_CRYPT_UFC
- errno to ENOSYS if /salt/ passed does not correspond to either MD5,
- SHA-256 or SHA-512 algorithm.
-
--config OPTION_EGLIBC_DB_ALIASES
-+config EGLIBC_DB_ALIASES
- bool "Functions for accessing the mail aliases database"
- help
- This option group includues functions for looking up mail
-@@ -233,7 +231,7 @@ config OPTION_EGLIBC_DB_ALIASES
- When this option group is disabled, the NSS service libraries
- also lack support for querying their mail alias tables.
-
--config OPTION_EGLIBC_ENVZ
-+config EGLIBC_ENVZ
- bool "Functions for handling envz-style environment vectors."
- help
- This option group contains functions for creating and operating
-@@ -248,7 +246,7 @@ config OPTION_EGLIBC_ENVZ
- envz_entry envz_remove
- envz_get envz_strip
-
--config OPTION_EGLIBC_FCVT
-+config EGLIBC_FCVT
- bool "Functions for converting floating-point numbers to strings"
- help
- This option group includes functions for converting
-@@ -262,14 +260,14 @@ config OPTION_EGLIBC_FCVT
- fcvt_r qfcvt_r
- gcvt qgcvt
-
--config OPTION_EGLIBC_FMTMSG
-+config EGLIBC_FMTMSG
- bool "Functions for formatting messages"
- help
- This option group includes the following functions:
-
- addseverity fmtmsg
-
--config OPTION_EGLIBC_FSTAB
-+config EGLIBC_FSTAB
- bool "Access functions for 'fstab'"
- help
- This option group includes functions for reading the mount
-@@ -283,7 +281,7 @@ config OPTION_EGLIBC_FSTAB
- getfsent setfsent
- getfsfile
-
--config OPTION_EGLIBC_FTRAVERSE
-+config EGLIBC_FTRAVERSE
- bool "Functions for traversing file hierarchies"
- help
- This option group includes functions for traversing file
-@@ -297,9 +295,9 @@ config OPTION_EGLIBC_FTRAVERSE
- fts_set nftw64
- fts_close
-
--config OPTION_EGLIBC_GETLOGIN
-+config EGLIBC_GETLOGIN
- bool "The getlogin function"
-- depends OPTION_EGLIBC_UTMP
-+ depends on EGLIBC_UTMP
- help
- This function group includes the 'getlogin' and 'getlogin_r'
- functions, which return the user name associated by the login
-@@ -309,17 +307,17 @@ config OPTION_EGLIBC_GETLOGIN
- fall back on 'getlogin' to find the user's login name for tilde
- expansion when the 'HOME' environment variable is not set.
-
--config OPTION_EGLIBC_IDN
-+config EGLIBC_IDN
- bool "International domain names support"
- help
- This option group includes the `libcidn' library which
- provides support for international domain names.
-
--config OPTION_EGLIBC_INET
-+config EGLIBC_INET
- bool "Networking support"
- help
- This option group includes networking-specific functions and
-- data. With OPTION_EGLIBC_INET disabled, the EGLIBC
-+ data. With EGLIBC_INET disabled, the EGLIBC
- installation and API changes as follows:
-
- - The following libraries are not installed:
-@@ -439,14 +437,14 @@ config OPTION_EGLIBC_INET
- use Unix-domain sockets to communicate with the syslog daemon;
- syslog is valuable in non-networked contexts.
-
--config OPTION_EGLIBC_INET_ANL
-+config EGLIBC_INET_ANL
- bool "Asynchronous name lookup"
-- depends OPTION_EGLIBC_INET
-+ depends on EGLIBC_INET
- help
- This option group includes the `libanl' library which
- provides support for asynchronous name lookup.
-
--config OPTION_EGLIBC_LIBM
-+config EGLIBC_LIBM
- bool "libm (math library)"
- help
- This option group includes the 'libm' library, containing
-@@ -464,7 +462,7 @@ config OPTION_EGLIBC_LIBM
- group, you will not be able to build 'libstdc++' against the
- resulting EGLIBC installation.
-
--config OPTION_EGLIBC_LOCALES
-+config EGLIBC_LOCALES
- bool "Locale definitions"
- help
- This option group includes all locale definitions other than
-@@ -472,17 +470,17 @@ config OPTION_EGLIBC_LOCALES
- only the "C" locale is supported.
-
-
--config OPTION_EGLIBC_LOCALE_CODE
-+config EGLIBC_LOCALE_CODE
- bool "Locale functions"
-- depends OPTION_POSIX_C_LANG_WIDE_CHAR
-+ depends on POSIX_C_LANG_WIDE_CHAR
- help
- This option group includes locale support functions, programs,
-- and libraries. With OPTION_EGLIBC_LOCALE_FUNCTIONS disabled,
-+ and libraries. With EGLIBC_LOCALE_CODE disabled,
- EGLIBC supports only the 'C' locale (also known as 'POSIX'),
- and ignores the settings of the 'LANG' and 'LC_*' environment
- variables.
-
-- With OPTION_EGLIBC_LOCALE_CODE disabled, the following
-+ With EGLIBC_LOCALE_CODE disabled, the following
- functions are omitted from libc:
-
- duplocale localeconv nl_langinfo rpmatch strfmon_l
-@@ -491,46 +489,43 @@ config OPTION_EGLIBC_LOCALE_CODE
- Furthermore, only the LC_CTYPE and LC_TIME categories of the
- standard "C" locale are available.
-
-- The OPTION_EGLIBC_CATGETS option group depends on this option
-- group; if you disable OPTION_EGLIBC_LOCALE_CODE, you must also
-- disable OPTION_EGLIBC_CATGETS.
-+ The EGLIBC_CATGETS option group depends on this option group.
-+
-
--config OPTION_EGLIBC_MEMUSAGE
-+config EGLIBC_MEMUSAGE
- bool "Memory profiling library"
- help
- This option group includes the `libmemusage' library and
- the `memusage' and `memusagestat' utilities.
- These components provide memory profiling functions.
-
-- OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
-+ EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
-
- Libmemusage library buffers the profiling data in memory
- before writing it out to disk. By default, the library
- allocates 1.5M buffer, which can be substantial for some
-- systems. OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE option
-+ systems. EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE option
- allows to change the default buffer size. It specifies
- the number of entries the buffer should have.
- On most architectures one buffer entry amounts to 48 bytes,
- so setting this option to the value of 512 will reduce the size of
- the memory buffer to 24K.
-
--config OPTION_EGLIBC_NIS
-+config EGLIBC_NIS
- bool "Support for NIS, NIS+, and the special 'compat' services."
-- depends OPTION_EGLIBC_INET
-- depends OPTION_EGLIBC_SUNRPC
-+ depends on EGLIBC_INET && EGLIBC_SUNRPC
- help
- This option group includes the NIS, NIS+, and 'compat' Name
- Service Switch service libraries. When it is disabled, those
- services libraries are not installed; you should remove any
- references to them from your 'nsswitch.conf' file.
-
-- This option group depends on the OPTION_EGLIBC_INET option
-+ This option group depends on the EGLIBC_INET option
- group; you must enable that to enable this option group.
-
--config OPTION_EGLIBC_NSSWITCH
-+config EGLIBC_NSSWITCH
- bool "Name service switch (nsswitch) support"
- help
--
- This option group includes support for the 'nsswitch' facility.
- With this option group enabled, all EGLIBC functions for
- accessing various system databases (passwords and groups;
-@@ -544,12 +539,12 @@ config OPTION_EGLIBC_NSSWITCH
- 'option-groups.config' file must set the following two
- variables:
-
-- OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG
-+ EGLIBC_NSSWITCH_FIXED_CONFIG
-
- Set this to the name of a file whose contents observe the
- same syntax as an ordinary '/etc/nsswitch.conf' file. The
- EGLIBC build process parses this file just as EGLIBC would
-- at run time if OPTION_EGLIBC_NSSWITCH were enabled, and
-+ at run time if EGLIBC_NSSWITCH were enabled, and
- produces a C library that uses the nsswitch service
- libraries to search for database entries as this file
- specifies, instead of consulting '/etc/nsswitch.conf' at run
-@@ -567,7 +562,7 @@ config OPTION_EGLIBC_NSSWITCH
- you will probably want to delete references to databases not
- needed on your system.
-
-- OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS
-+ EGLIBC_NSSWITCH_FIXED_FUNCTIONS
-
- The EGLIBC build process uses this file to decide which
- functions to make available from which service libraries.
-@@ -585,28 +580,28 @@ config OPTION_EGLIBC_NSSWITCH
- Be sure to mention each function in each service you wish to
- use. If you do not mention a service's function here, the
- EGLIBC database access functions will not find it, even if
-- it is listed in the OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG
-+ it is listed in the EGLIBC_NSSWITCH_FIXED_CONFIG
- file.
-
-- In this arrangement, EGLIBC will not use the 'dlopen' and
-- 'dlsym' functions to find database access functions. Instead,
-- libc hard-codes references to the service libraries' database
-- access functions. You must explicitly link your program
-- against the name service libraries (those whose names start
-- with 'libnss_', in the sysroot's '/lib' directory) whose
-- functions you intend to use. This arrangement helps
-- system-wide static analysis tools decide which functions a
-- system actually uses.
--
-- Note that some nsswitch service libraries require other option
-- groups to be enabled; for example, the OPTION_EGLIBC_INET
-- option group must be enabled to use the 'libnss_dns.so.2'
-- service library, which uses the Domain Name System network
-- protocol to answer queries.
-+ In this arrangement, EGLIBC will not use the 'dlopen' and
-+ 'dlsym' functions to find database access functions. Instead,
-+ libc hard-codes references to the service libraries' database
-+ access functions. You must explicitly link your program
-+ against the name service libraries (those whose names start
-+ with 'libnss_', in the sysroot's '/lib' directory) whose
-+ functions you intend to use. This arrangement helps
-+ system-wide static analysis tools decide which functions a
-+ system actually uses.
-+
-+ Note that some nsswitch service libraries require other option
-+ groups to be enabled; for example, the EGLIBC_INET
-+ option group must be enabled to use the 'libnss_dns.so.2'
-+ service library, which uses the Domain Name System network
-+ protocol to answer queries.
-
--config OPTION_EGLIBC_RCMD
-+config EGLIBC_RCMD
- bool "Support for 'rcmd' and related library functions"
-- depends OPTION_EGLIBC_INET
-+ depends on EGLIBC_INET
- help
- This option group includes functions for running commands on
- remote machines via the 'rsh' protocol, and doing authentication
-@@ -622,7 +617,7 @@ config OPTION_EGLIBC_RCMD
- rresvport ruserpass
- rresvport_af
-
--config OPTION_EGLIBC_RTLD_DEBUG
-+config EGLIBC_RTLD_DEBUG
- bool "Runtime linker debug print outs"
- help
- This option group enables debug output of the runtime linker
-@@ -633,7 +628,7 @@ config OPTION_EGLIBC_RTLD_DEBUG
- the `ldd' utility which may also be used by the prelinker.
- In particular, the `--unused' ldd option will not work correctly.
-
--config OPTION_EGLIBC_SPAWN
-+config EGLIBC_SPAWN
- bool "Support for POSIX posix_spawn functions"
- help
- This option group includes the POSIX functions for executing
-@@ -669,7 +664,7 @@ config OPTION_EGLIBC_SPAWN
- disabled, those programs will only operate on uncompressed
- charmap files.
-
--config OPTION_EGLIBC_STREAMS
-+config EGLIBC_STREAMS
- bool "Support for accessing STREAMS."
- help
- This option group includes functions for reading and writing
-@@ -685,14 +680,14 @@ config OPTION_EGLIBC_STREAMS
- isastream fdetach
- putmsg
-
--config OPTION_EGLIBC_SUNRPC
-+config EGLIBC_SUNRPC
- bool "Support for the Sun 'RPC' protocol."
-- depends OPTION_EGLIBC_INET
-+ depends on EGLIBC_INET
- help
- This option group includes support for the Sun RPC protocols,
- including the 'rpcgen' and 'rpcinfo' programs.
-
--config OPTION_EGLIBC_UTMP
-+config EGLIBC_UTMP
- bool "Older access functions for 'utmp' login records"
- help
- This option group includes the older 'utent' family of
-@@ -719,9 +714,9 @@ config OPTION_EGLIBC_UTMP
-
- libutil.so (and libutil.a)
-
--config OPTION_EGLIBC_UTMPX
-+config EGLIBC_UTMPX
- bool "POSIX access functions for 'utmp' login records"
-- depends OPTION_EGLIBC_UTMP
-+ depends on EGLIBC_UTMP
- help
- This option group includes the POSIX functions for reading and
- writing user login records in the 'utmp' file (usually
-@@ -742,21 +737,21 @@ config OPTION_EGLIBC_UTMPX
- updwtmpx
- utmpxname
-
--config OPTION_EGLIBC_WORDEXP
-+config EGLIBC_WORDEXP
- bool "Shell-style word expansion"
- help
- This option group includes the 'wordexp' function for
- performing word expansion in the manner of the shell, and the
- accompanying 'wordfree' function.
-
--config OPTION_POSIX_C_LANG_WIDE_CHAR
-+config POSIX_C_LANG_WIDE_CHAR
- bool "ISO C library wide character functions, excluding I/O"
- help
- This option group includes the functions defined by the ISO C
- standard for working with wide and multibyte characters in
- memory. Functions for reading and writing wide and multibyte
- characters from and to files call in the
-- OPTION_POSIX_WIDE_CHAR_DEVICE_IO option group.
-+ POSIX_WIDE_CHAR_DEVICE_IO option group.
-
- This option group includes the following functions:
-
-@@ -778,14 +773,14 @@ config OPTION_POSIX_C_LANG_WIDE_CHAR
- mbrlen wcscoll wcstol
- mbrtowc wcscpy wcstold
-
--config OPTION_POSIX_REGEXP
-+config POSIX_REGEXP
- bool "Regular expressions"
- help
- This option group includes the POSIX regular expression
- functions, and the associated non-POSIX extensions and
- compatibility functions.
-
-- With OPTION_POSIX_REGEXP disabled, the following functions are
-+ With POSIX_REGEXP disabled, the following functions are
- omitted from libc:
-
- re_comp re_max_failures regcomp
-@@ -799,9 +794,9 @@ config OPTION_POSIX_REGEXP
- <regexp.h> header file, 'compile', 'step', and 'advance', is
- omitted.
-
--config OPTION_POSIX_REGEXP_GLIBC
-+config POSIX_REGEXP_GLIBC
- bool "Regular expressions from GLIBC"
-- depends OPTION_POSIX_REGEXP
-+ depends on POSIX_REGEXP
- help
- This option group specifies which regular expression
- library to use. The choice is between regex
-@@ -810,9 +805,9 @@ config OPTION_POSIX_REGEXP_GLIBC
- optimized for speed; regex from libiberty is more than twice
- as small while still is enough for most practical purposes.
-
--config OPTION_POSIX_WIDE_CHAR_DEVICE_IO
-+config POSIX_WIDE_CHAR_DEVICE_IO
- bool "Input and output functions for wide characters"
-- depends OPTION_POSIX_C_LANG_WIDE_CHAR
-+ depends on POSIX_C_LANG_WIDE_CHAR
- help
- This option group includes functions for reading and writing
- wide characters to and from <stdio.h> streams.
-Index: libc/Makefile
-===================================================================
---- libc.orig/Makefile
-+++ libc/Makefile
-@@ -24,6 +24,7 @@ endif
-
- include Makeconfig
-
-+include options-config/Makefile
-
- # This is the default target; it makes everything except the tests.
- .PHONY: all
-Index: libc/configure
-===================================================================
---- libc.orig/configure
-+++ libc/configure
-@@ -621,6 +621,7 @@ KSH
- libc_cv_have_bash2
- BASH_SHELL
- libc_cv_gcc_static_libgcc
-+KCONFIG_TOOLS
- CXX_SYSINCLUDES
- SYSINCLUDES
- AUTOCONF
-@@ -734,6 +735,7 @@ with_fp
- with_binutils
- with_selinux
- with_headers
-+with_kconfig
- with_default_link
- enable_sanity_checks
- enable_shared
-@@ -1438,6 +1440,9 @@ Optional Packages:
- --with-selinux if building with SELinux support
- --with-headers=PATH location of system headers to use (for example
- /usr/src/linux/include) [default=compiler default]
-+ --with-kconfig=PATH location of kconfig tools to use (from Linux kernel
-+ builds) to re-use for configuring EGLIBC option
-+ groups
- --with-default-link do not use explicit linker scripts
- --with-cpu=CPU select code for CPU variant
-
-@@ -3401,6 +3406,14 @@ fi
-
-
-
-+# Check whether --with-kconfig was given.
-+if test "${with_kconfig+set}" = set; then
-+ withval=$with_kconfig; KCONFIG_TOOLS=$withval
-+else
-+ KCONFIG_TOOLS=''
-+fi
-+
-+
-
- # Check whether --with-default-link was given.
- if test "${with_default_link+set}" = set; then :
-Index: libc/EGLIBC.option-groups
-===================================================================
---- libc.orig/EGLIBC.option-groups
-+++ libc/EGLIBC.option-groups
-@@ -56,33 +56,9 @@ disable option groups one by one, until
-
- The Option Groups
-
--EGLIBC currently implements the following option groups, also
--documented in the file 'option-groups.def':
--
--OPTION_EGLIBC_CATGETS
-- This option group includes functions for accessing message
-- catalogs: catopen, catclose, and catgets.
--
--OPTION_EGLIBC_LOCALES
-- This option group includes all locale definitions other than
-- those for the "C" locale. If this option group is omitted, then
-- only the "C" locale is supported.
--
--OPTION_EGLIBC_LIBM
-- This option group includes the 'libm' library, containing
-- mathematical functions. If this option group is omitted, then
-- an EGLIBC installation does not include shared or unshared versions
-- of the math library.
--
-- Note that this does not remove all floating-point related
-- functionality from EGLIBC; for example, 'printf' and 'scanf'
-- can still print and read floating-point values with this option
-- group disabled.
--
-- Note that the ISO Standard C++ library 'libstdc++' depends on
-- EGLIBC's math library 'libm'. If you disable this option
-- group, you will not be able to build 'libstdc++' against the
-- resulting EGLIBC installation.
-+To see the current full list of implemented option groups, refer to the
-+file 'option-groups.def' at the top of the source tree, or run
-+'make menuconfig' from the top-level build directory.
-
- The POSIX.1-2001 specification includes a suggested partition of all
- the functions in the POSIX C API into option groups: math functions
-@@ -110,6 +86,18 @@ data, but include mathematical functions
- OPTION_EGLIBC_LOCALES = n
- OPTION_EGLIBC_LIBM = y
-
-+Like the Linux kernel, EGLIBC supports a similar set of '*config' make
-+targets to make it easier to create 'option-groups.config', with all
-+dependencies between option groups automatically satisfied. Run
-+'make help' to see the list of supported make config targets. For
-+example, 'make menuconfig' will update the current config utilising a
-+menu based program.
-+
-+The option group names and their type (boolean, int, hex, string), help
-+description, and dependencies with other option groups, are described by
-+'option-groups.def' at the top of the source tree, analogous to the
-+'Kconfig' files in the Linux kernel.
-+
- In general, each option group variable controls whether a given set of
- object files in EGLIBC is compiled and included in the final
- libraries, or omitted from the build.
-@@ -132,22 +120,3 @@ under development.
-
- We have used the system to subset some portions of EGLIBC's
- functionality. It needs to be extended to cover more of the library.
--
--At the moment, EGLIBC performs no sanity checks on the contents of
--'option-groups.config'; if an option group's name is mistyped, the
--option group is silently included in the build. EGLIBC should check
--that all variables set in 'option-groups.config' are proper option
--group names, and that their values are appropriate.
--
--Some portions of EGLIBC depend on others; for example, the Sun Remote
--Procedure Call functions in 'sunrpc' depend on the networking
--functions in 'inet'. The sanity checking described above should check
--that the selection configuration satisfies dependencies within EGLIBC,
--and produce a legible error message if it does not. At the moment,
--inconsistent configurations produce link errors late in the build
--process.
--
--The Linux kernel's configuration system provides interactive
--interfaces for creating and modifying configuration files (which also
--perform the sanity checking and dependency tracking described above).
--EGLIBC should provide similar interfaces.
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/0002-eglibc-menuconfig-hex-string-options.patch b/meta/recipes-core/eglibc/eglibc-2.19/0002-eglibc-menuconfig-hex-string-options.patch
deleted file mode 100644
index 7caba48112..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/0002-eglibc-menuconfig-hex-string-options.patch
+++ /dev/null
@@ -1,169 +0,0 @@
-pulled from
-
-http://www.eglibc.org/archives/patches/msg01043.html
-
-
-Upstream-Status: Pending
-Signed-off-by: Khem
-
-
-This patch builds on the menuconfig patch for EGLIBC.
-
-
-There are a few options that have non-boolean types, that would benefit from the new 'make *config' support:
-
-EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE (int)
-EGLIBC_NSSWITCH_FIXED_CONFIG (string)
-EGLIBC_NSSWITCH_FIXED_FUNCTIONS (string)
-
-
-The patch converts these to real options in libc/option-groups.def. Also, libc/scripts/option-groups.awk is modified to output a '#define' line for int, hex, or string options encountered in the config file.
-
-In the post-processing script config-postproc.pl, a small change is needed: for any boolean option FOO that is implicitly disabled in the kconfig output, make sure that option is indeed a boolean before printing the explicit OPTION_FOO=n.
-
-Finally, libc/malloc/Makefile passes __OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE as a CPPFLAGS, which is not necessary anymore because this macro will now be present in the generated header.
-
-attached is the updated patch to address above issues.
-
-Steve
-
---
-Steve Longerbeam | Senior Embedded Engineer, ESD Services
-Mentor Embedded(tm) | 46871 Bayside Parkway, Fremont, CA 94538
-P 510.354.5838 | M 408.410.2735
-Nucleus(r) | Linux(r) | Android(tm) | Services | UI | Multi-OS
-
-
-Index: libc/malloc/Makefile
-===================================================================
---- libc.orig/malloc/Makefile 2012-01-04 22:06:18.000000000 -0800
-+++ libc/malloc/Makefile 2012-05-09 19:35:28.598682105 -0700
-@@ -48,10 +48,6 @@
- ifeq ($(OPTION_EGLIBC_MEMUSAGE),y)
- extra-libs = libmemusage
- extra-libs-others = $(extra-libs)
--
--ifdef OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
--CPPFLAGS-memusage += -D__OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE=$(OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE)
--endif
- endif
-
- libmemusage-routines = memusage
-Index: libc/option-groups.def
-===================================================================
---- libc.orig/option-groups.def 2012-05-09 19:33:48.398677256 -0700
-+++ libc/option-groups.def 2012-05-09 19:35:28.610682107 -0700
-@@ -513,8 +513,11 @@
- the `memusage' and `memusagestat' utilities.
- These components provide memory profiling functions.
-
-- EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
--
-+config EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
-+ int "Memory profiling library buffer size"
-+ depends on EGLIBC_MEMUSAGE
-+ default "32768"
-+ help
- Libmemusage library buffers the profiling data in memory
- before writing it out to disk. By default, the library
- allocates 1.5M buffer, which can be substantial for some
-@@ -553,8 +556,11 @@
- 'option-groups.config' file must set the following two
- variables:
-
-- EGLIBC_NSSWITCH_FIXED_CONFIG
--
-+config EGLIBC_NSSWITCH_FIXED_CONFIG
-+ string "Nsswitch fixed config filename"
-+ depends on !EGLIBC_NSSWITCH
-+ default ""
-+ help
- Set this to the name of a file whose contents observe the
- same syntax as an ordinary '/etc/nsswitch.conf' file. The
- EGLIBC build process parses this file just as EGLIBC would
-@@ -576,8 +582,11 @@
- you will probably want to delete references to databases not
- needed on your system.
-
-- EGLIBC_NSSWITCH_FIXED_FUNCTIONS
--
-+config EGLIBC_NSSWITCH_FIXED_FUNCTIONS
-+ string "Nsswitch fixed functions filename"
-+ depends on !EGLIBC_NSSWITCH
-+ default ""
-+ help
- The EGLIBC build process uses this file to decide which
- functions to make available from which service libraries.
- The file 'nss/fixed-nsswitch.functions' serves as a sample
-Index: libc/options-config/config-postproc.pl
-===================================================================
---- libc.orig/options-config/config-postproc.pl 2012-05-09 19:33:36.530676681 -0700
-+++ libc/options-config/config-postproc.pl 2012-05-09 19:35:28.610682107 -0700
-@@ -8,7 +8,7 @@
- die "Could not open $ARGV[0]" unless -T $ARGV[0];
-
- sub yank {
-- @option = grep($_ ne $_[0], @option);
-+ @option = grep(!($_ =~ /$_[0]\s*=/), @option);
- }
-
- open(DEFAULTS, $defaults) || die "Could not open $defaults\n";
-@@ -16,7 +16,7 @@
- # get the full list of available options using the default config file
- $i = 0;
- while (<DEFAULTS>) {
-- if (/^\s*OPTION_(\w+)\s*=/) {
-+ if (/^\s*OPTION_(\w+\s*=.*$)/) {
- $option[$i++] = $1;
- }
- }
-@@ -35,8 +35,9 @@
- s/CONFIG_/OPTION_/g;
- print;
- } elsif (/^\s*#\s+CONFIG_(\w+) is not set/) {
-- # this is a comment line, change CONFIG_ to OPTION_, remove this
-- # option from option list, and convert to explicit OPTION_FOO=n
-+ # this is a comment line for an unset boolean option, change CONFIG_
-+ # to OPTION_, remove this option from option list, and convert to
-+ # explicit OPTION_FOO=n
- $opt = $1;
- yank($opt);
- s/CONFIG_/OPTION_/g;
-@@ -46,9 +47,12 @@
- }
- }
-
--# any options left in @options, are options that were not mentioned in
-+# any boolean options left in @options, are options that were not mentioned in
- # the config file, and implicitly that means the option must be set =n,
- # so do that here.
- foreach $opt (@option) {
-- print "OPTION_$opt=n\n";
-+ if ($opt =~ /=\s*[yn]/) {
-+ $opt =~ s/=\s*[yn]/=n/;
-+ print "OPTION_$opt\n";
-+ }
- }
-Index: libc/scripts/option-groups.awk
-===================================================================
---- libc.orig/scripts/option-groups.awk 2012-01-04 22:06:00.000000000 -0800
-+++ libc/scripts/option-groups.awk 2012-05-09 19:35:28.610682107 -0700
-@@ -46,9 +46,15 @@
- print "#define __" var " 1"
- else if (vars[var] == "n")
- print "/* #undef __" var " */"
-- # Ignore variables that don't have boolean values.
-- # Ideally, this would be driven by the types given in
-- # option-groups.def.
-+ else if (vars[var] ~ /^[0-9]+/ ||
-+ vars[var] ~ /^0x[0-9aAbBcCdDeEfF]+/ ||
-+ vars[var] ~ /^\"/)
-+ print "#define __" var " " vars[var]
-+ else
-+ print "/* #undef __" var " */"
-+ # Ignore variables that don't have boolean, int, hex, or
-+ # string values. Ideally, this would be driven by the types
-+ # given in option-groups.def.
- }
- }
-
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/0003-eglibc-menuconfig-build-instructions.patch b/meta/recipes-core/eglibc/eglibc-2.19/0003-eglibc-menuconfig-build-instructions.patch
deleted file mode 100644
index d137f5b318..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/0003-eglibc-menuconfig-build-instructions.patch
+++ /dev/null
@@ -1,176 +0,0 @@
-Pulled from
-
-http://www.eglibc.org/archives/patches/msg01035.html
-
-Upstream-Status: Pending
-Signed-off-by: Khem
-
-As part of the menuconfig development, I encountered some outdated information in the cross-build instructions, libc/EGLIBC.cross-building. This patch updates the file with new (and tested) instructions. It is unrelated to the menuconfig support, but applies after.
-
-My testing was done with an ARM target, and an x86_64 Linux host, so I converted the instructions to use those host/target types from the original i686/powerpc. Hope that's ok.
-
-
-Thanks,
-
---
-Steve Longerbeam | Senior Embedded Engineer, ESD Services
-Mentor Embedded(tm) | 46871 Bayside Parkway, Fremont, CA 94538
-P 510.354.5838 | M 408.410.2735
-Nucleus(r) | Linux(r) | Android(tm) | Services | UI | Multi-OS
-
-
- EGLIBC.cross-building | 59 +++++++++++++++++++++++++++++---------------------
- 1 file changed, 35 insertions(+), 24 deletions(-)
-
-Index: libc/EGLIBC.cross-building
-===================================================================
---- libc.orig/EGLIBC.cross-building 2012-05-09 19:33:36.522676681 -0700
-+++ libc/EGLIBC.cross-building 2012-05-09 19:36:13.918684298 -0700
-@@ -47,31 +47,34 @@
- EGLIBC requires recent versions of the GNU binutils, GCC, and the
- Linux kernel. The web page <http://www.eglibc.org/prerequisites>
- documents the current requirements, and lists patches needed for
--certain target architectures. As of this writing, EGLIBC required
--binutils 2.17, GCC 4.1, and Linux 2.6.19.1.
-+certain target architectures. As of this writing, these build
-+instructions have been tested with binutils 2.22.51, GCC 4.6.2,
-+and Linux 3.1.
-
- First, let's set some variables, to simplify later commands. We'll
--build EGLIBC and GCC for a PowerPC target, known to the Linux kernel
--as 'powerpc', and we'll do the build on an Intel Linux box:
-+build EGLIBC and GCC for an ARM target, known to the Linux kernel
-+as 'arm', and we'll do the build on an Intel x86_64 Linux box:
-
-- $ build=i686-pc-linux-gnu
-+ $ build=x86_64-pc-linux-gnu
- $ host=$build
-- $ target=powerpc-none-linux-gnu
-- $ linux_arch=powerpc
-+ $ target=arm-none-linux-gnueabi
-+ $ linux_arch=arm
-
- We're using the aforementioned versions of Binutils, GCC, and Linux:
-
-- $ binutilsv=binutils-2.17
-- $ gccv=gcc-4.1.1
-- $ linuxv=linux-2.6.20
-+ $ binutilsv=binutils-2.22.51
-+ $ gccv=gcc-4.6.2
-+ $ linuxv=linux-3.1
-
- We're carrying out the entire process under '~/cross-build', which
--contains unpacked source trees:
-+contains unpacked source trees for binutils, gcc, and linux kernel,
-+along with EGLIBC svn trunk (which can be checked-out with
-+'svn co http://www.eglibc.org/svn/trunk eglibc'):
-
-- $ top=$HOME/cross-build/ppc
-+ $ top=$HOME/cross-build/$target
- $ src=$HOME/cross-build/src
- $ ls $src
-- binutils-2.17 gcc-4.1.1 libc linux-2.6.20
-+ binutils-2.22.51 eglibc gcc-4.6.2 linux-3.1
-
- We're going to place our build directories in a subdirectory 'obj',
- we'll install the cross-development toolchain in 'tools', and we'll
-@@ -99,7 +102,7 @@
-
- The First GCC
-
--For our work, we need a cross-compiler targeting a PowerPC Linux
-+For our work, we need a cross-compiler targeting an ARM Linux
- system. However, that configuration includes the shared library
- 'libgcc_s.so', which is compiled against the EGLIBC headers (which we
- haven't installed yet) and linked against 'libc.so' (which we haven't
-@@ -125,7 +128,8 @@
- > --prefix=$tools \
- > --without-headers --with-newlib \
- > --disable-shared --disable-threads --disable-libssp \
-- > --disable-libgomp --disable-libmudflap \
-+ > --disable-libgomp --disable-libmudflap --disable-libquadmath \
-+ > --disable-decimal-float --disable-libffi \
- > --enable-languages=c
- $ PATH=$tools/bin:$PATH make
- $ PATH=$tools/bin:$PATH make install
-@@ -162,12 +166,13 @@
- > CXX=$tools/bin/$target-g++ \
- > AR=$tools/bin/$target-ar \
- > RANLIB=$tools/bin/$target-ranlib \
-- > $src/libc/configure \
-+ > $src/eglibc/libc/configure \
- > --prefix=/usr \
- > --with-headers=$sysroot/usr/include \
- > --build=$build \
- > --host=$target \
-- > --disable-profile --without-gd --without-cvs --enable-add-ons
-+ > --disable-profile --without-gd --without-cvs \
-+ > --enable-add-ons=nptl,libidn,../ports
-
- The option '--prefix=/usr' may look strange, but you should never
- configure EGLIBC with a prefix other than '/usr': in various places,
-@@ -181,6 +186,11 @@
- The '--with-headers' option tells EGLIBC where the Linux headers have
- been installed.
-
-+The '--enable-add-ons=nptl,libidn,../ports' option tells EGLIBC to look
-+for the listed glibc add-ons. Most notably the ports add-on (located
-+just above the libc sources in the EGLIBC svn tree) is required to
-+support ARM targets.
-+
- We can now use the 'install-headers' makefile target to install the
- headers:
-
-@@ -223,6 +233,7 @@
- > --prefix=$tools \
- > --with-sysroot=$sysroot \
- > --disable-libssp --disable-libgomp --disable-libmudflap \
-+ > --disable-libffi --disable-libquadmath \
- > --enable-languages=c
- $ PATH=$tools/bin:$PATH make
- $ PATH=$tools/bin:$PATH make install
-@@ -240,13 +251,14 @@
- > CXX=$tools/bin/$target-g++ \
- > AR=$tools/bin/$target-ar \
- > RANLIB=$tools/bin/$target-ranlib \
-- > $src/libc/configure \
-+ > $src/eglibc/libc/configure \
- > --prefix=/usr \
- > --with-headers=$sysroot/usr/include \
- > --with-kconfig=$obj/linux/scripts/kconfig \
- > --build=$build \
- > --host=$target \
-- > --disable-profile --without-gd --without-cvs --enable-add-ons
-+ > --disable-profile --without-gd --without-cvs \
-+ > --enable-add-ons=nptl,libidn,../ports
-
- Note the additional '--with-kconfig' option. This tells EGLIBC where to
- find the host config tools used by the kernel 'make config' and 'make
-@@ -337,15 +349,15 @@
- ELF Header:
- ...
- Type: EXEC (Executable file)
-- Machine: PowerPC
-+ Machine: ARM
-
- ...
- Program Headers:
- Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align
- PHDR 0x000034 0x10000034 0x10000034 0x00100 0x00100 R E 0x4
-- INTERP 0x000134 0x10000134 0x10000134 0x0000d 0x0000d R 0x1
-- [Requesting program interpreter: /lib/ld.so.1]
-- LOAD 0x000000 0x10000000 0x10000000 0x008f0 0x008f0 R E 0x10000
-+ INTERP 0x000134 0x00008134 0x00008134 0x00013 0x00013 R 0x1
-+ [Requesting program interpreter: /lib/ld-linux.so.3]
-+ LOAD 0x000000 0x00008000 0x00008000 0x0042c 0x0042c R E 0x8000
- ...
-
- Looking at the dynamic section of the installed 'libgcc_s.so', we see
-@@ -357,7 +369,6 @@
- Dynamic section at offset 0x1083c contains 24 entries:
- Tag Type Name/Value
- 0x00000001 (NEEDED) Shared library: [libc.so.6]
-- 0x00000001 (NEEDED) Shared library: [ld.so.1]
- 0x0000000e (SONAME) Library soname: [libgcc_s.so.1]
- ...
-
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/GLRO_dl_debug_mask.patch b/meta/recipes-core/eglibc/eglibc-2.19/GLRO_dl_debug_mask.patch
deleted file mode 100644
index 7258c82418..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/GLRO_dl_debug_mask.patch
+++ /dev/null
@@ -1,143 +0,0 @@
-Its controlled by __OPTION_EGLIBC_RTLD_DEBUG
-so we should use GLRO_dl_debug_mask
-
-Singed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
-Index: libc/elf/dl-open.c
-===================================================================
---- libc.orig/elf/dl-open.c 2012-10-25 10:18:12.000000000 -0700
-+++ libc/elf/dl-open.c 2013-01-09 11:49:02.635577870 -0800
-@@ -155,7 +155,7 @@
- ns->_ns_main_searchlist->r_list[new_nlist++] = map;
-
- /* We modify the global scope. Report this. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_SCOPES, 0))
- _dl_debug_printf ("\nadd %s [%lu] to global scope\n",
- map->l_name, map->l_ns);
- }
-@@ -298,7 +298,7 @@
- LIBC_PROBE (map_complete, 3, args->nsid, r, new);
-
- /* Print scope information. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_SCOPES, 0))
- _dl_show_scope (new, 0);
-
- /* Only do lazy relocation if `LD_BIND_NOW' is not set. */
-@@ -515,7 +515,7 @@
- }
-
- /* Print scope information. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_SCOPES, 0))
- _dl_show_scope (imap, from_scope);
- }
-
-Index: libc/ports/sysdeps/mips/dl-lookup.c
-===================================================================
---- libc.orig/ports/sysdeps/mips/dl-lookup.c 2012-08-17 12:39:53.000000000 -0700
-+++ libc/ports/sysdeps/mips/dl-lookup.c 2013-01-09 11:49:02.635577870 -0800
-@@ -111,7 +111,7 @@
- continue;
-
- /* Print some debugging info if wanted. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_SYMBOLS, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_SYMBOLS, 0))
- _dl_debug_printf ("symbol=%s; lookup in file=%s [%lu]\n",
- undef_name,
- map->l_name[0] ? map->l_name : rtld_progname,
-@@ -432,7 +432,7 @@
- hash table. */
- if (__builtin_expect (tab->size, 0))
- {
-- assert (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK);
-+ assert (GLRO_dl_debug_mask & DL_DEBUG_PRELINK);
- __rtld_lock_unlock_recursive (tab->lock);
- goto success;
- }
-@@ -681,7 +681,7 @@
- }
-
- /* Display information if we are debugging. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_FILES, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_FILES, 0))
- _dl_debug_printf ("\
- \nfile=%s [%lu]; needed by %s [%lu] (relocation dependency)\n\n",
- map->l_name[0] ? map->l_name : rtld_progname,
-@@ -788,7 +788,7 @@
- {
- if ((*ref == NULL || ELFW(ST_BIND) ((*ref)->st_info) != STB_WEAK)
- && skip_map == NULL
-- && !(GLRO(dl_debug_mask) & DL_DEBUG_UNUSED))
-+ && !(GLRO_dl_debug_mask & DL_DEBUG_UNUSED))
- {
- /* We could find no value for a strong reference. */
- const char *reference_name = undef_map ? undef_map->l_name : "";
-@@ -861,7 +861,7 @@
- if (__builtin_expect (current_value.m->l_used == 0, 0))
- current_value.m->l_used = 1;
-
-- if (__builtin_expect (GLRO(dl_debug_mask)
-+ if (__builtin_expect (GLRO_dl_debug_mask
- & (DL_DEBUG_BINDINGS|DL_DEBUG_PRELINK), 0))
- _dl_debug_bindings (undef_name, undef_map, ref,
- &current_value, version, type_class, protected);
-@@ -926,7 +926,7 @@
- {
- const char *reference_name = undef_map->l_name;
-
-- if (GLRO(dl_debug_mask) & DL_DEBUG_BINDINGS)
-+ if (GLRO_dl_debug_mask & DL_DEBUG_BINDINGS)
- {
- _dl_debug_printf ("binding file %s [%lu] to %s [%lu]: %s symbol `%s'",
- (reference_name[0]
-@@ -942,7 +942,7 @@
- _dl_debug_printf_c ("\n");
- }
- #ifdef SHARED
-- if (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK)
-+ if (GLRO_dl_debug_mask & DL_DEBUG_PRELINK)
- {
- int conflict = 0;
- struct sym_val val = { NULL, NULL };
-Index: libc/elf/rtld.c
-===================================================================
---- libc.orig/elf/rtld.c 2012-10-10 08:35:46.000000000 -0700
-+++ libc/elf/rtld.c 2013-01-09 11:49:02.635577870 -0800
-@@ -2118,7 +2118,7 @@
- GLRO(dl_init_all_dirs) = GL(dl_all_dirs);
-
- /* Print scope information. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES, 0))
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_SCOPES, 0))
- {
- _dl_debug_printf ("\nInitial object scopes\n");
-
-Index: libc/elf/dl-lookup.c
-===================================================================
---- libc.orig/elf/dl-lookup.c 2012-08-17 12:39:53.000000000 -0700
-+++ libc/elf/dl-lookup.c 2013-01-09 11:49:02.635577870 -0800
-@@ -771,7 +771,7 @@
- {
- if ((*ref == NULL || ELFW(ST_BIND) ((*ref)->st_info) != STB_WEAK)
- && skip_map == NULL
-- && !(GLRO(dl_debug_mask) & DL_DEBUG_UNUSED))
-+ && !(GLRO_dl_debug_mask & DL_DEBUG_UNUSED))
- {
- /* We could find no value for a strong reference. */
- const char *reference_name = undef_map ? undef_map->l_name : "";
-Index: libc/elf/get-dynamic-info.h
-===================================================================
---- libc.orig/elf/get-dynamic-info.h 2012-12-02 13:11:45.000000000 -0800
-+++ libc/elf/get-dynamic-info.h 2013-01-09 12:53:51.015657653 -0800
-@@ -157,7 +157,7 @@
- them. Therefore to avoid breaking existing applications the
- best we can do is add a warning during debugging with the
- intent of notifying the user of the problem. */
-- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_FILES, 0)
-+ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_FILES, 0)
- && l->l_flags_1 & ~DT_1_SUPPORTED_MASK)
- _dl_debug_printf ("\nWARNING: Unsupported flag value(s) of 0x%x in DT_FLAGS_1.\n",
- l->l_flags_1 & ~DT_1_SUPPORTED_MASK);
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/eglibc-svn-arm-lowlevellock-include-tls.patch b/meta/recipes-core/eglibc/eglibc-2.19/eglibc-svn-arm-lowlevellock-include-tls.patch
deleted file mode 100644
index 4313aa5197..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/eglibc-svn-arm-lowlevellock-include-tls.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-In file included from ../nptl/sysdeps/unix/sysv/linux/libc-lowlevellock.c:21:
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c: In function '__lll_lock_wait_private':
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c:34: warning: implicit declaration of function 'THREAD_GETMEM'
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c:34: error: 'THREAD_SELF' undeclared (first use in this function)
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c:34: error: (Each undeclared identifier is reported only once
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c:34: error: for each function it appears in.)
-../nptl/sysdeps/unix/sysv/linux/lowlevellock.c:34: error: 'header' undeclared (first use in this function)
-make[4]: *** [/var/tmp/portage/sys-libs/glibc-2.7-r1/work/build-default-armv4l-unknown-linux-gnu-nptl/nptl/rtld-libc-lowlevellock.os] Error 1
-
-Upstream-Status: Pending
-
---- libc/ports/sysdeps/unix/sysv/linux/arm/nptl/lowlevellock.h.orig
-+++ libc/ports/sysdeps/unix/sysv/linux/arm/nptl/lowlevellock.h
-@@ -25,6 +25,7 @@
- #include <atomic.h>
- #include <sysdep.h>
- #include <kernel-features.h>
-+#include <tls.h>
-
- #define FUTEX_WAIT 0
- #define FUTEX_WAKE 1
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/eglibc_fix_findidx_parameters.patch b/meta/recipes-core/eglibc/eglibc-2.19/eglibc_fix_findidx_parameters.patch
deleted file mode 100644
index bbf4605505..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/eglibc_fix_findidx_parameters.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-Upstream-Status: backport
-
-Imported patch from: http://www.eglibc.org/archives/patches/msg01124.html
-
-Signed-Off-By: Nitin A Kamble <nitin.a.kamble@intel.com>
-2012/05/09
-
-Index: libc/posix/xregex.c
-===================================================================
---- libc.orig/posix/xregex.c
-+++ libc/posix/xregex.c
-@@ -2943,7 +2943,7 @@ PREFIX(regex_compile) (const char *ARG_P
- _NL_CURRENT (LC_COLLATE,
- _NL_COLLATE_INDIRECTWC);
-
-- idx = findidx ((const wint_t**)&cp);
-+ idx = findidx ((const wint_t**)&cp, -1);
- if (idx == 0 || cp < (wint_t*) str + c1)
- /* This is no valid character. */
- FREE_STACK_RETURN (REG_ECOLLATE);
-@@ -3392,7 +3392,7 @@ PREFIX(regex_compile) (const char *ARG_P
- indirect = (const int32_t *)
- _NL_CURRENT (LC_COLLATE, _NL_COLLATE_INDIRECTMB);
-
-- idx = findidx (&cp);
-+ idx = findidx (&cp, -1);
- if (idx == 0 || cp < str + c1)
- /* This is no valid character. */
- FREE_STACK_RETURN (REG_ECOLLATE);
-@@ -6363,7 +6363,7 @@ byte_re_match_2_internal (struct re_patt
- }
- str_buf[i] = TRANSLATE(*(d+i));
- str_buf[i+1] = '\0'; /* sentinel */
-- idx2 = findidx ((const wint_t**)&cp);
-+ idx2 = findidx ((const wint_t**)&cp, -1);
- }
-
- /* Update d, however d will be incremented at
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/fileops-without-wchar-io.patch b/meta/recipes-core/eglibc/eglibc-2.19/fileops-without-wchar-io.patch
deleted file mode 100644
index 2ca0bca248..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/fileops-without-wchar-io.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-Fix error like
-
-/home/kraj/work/angstrom/sources/openembedded-core/build/tmp-eglibc/work/mips64-oe-linux/eglibc-2.16-r2+svnr19383/build-mips64-oe-linux/libc_pic.os: In function `_IO_new_file_fopen':
-/home/kraj/work/angstrom/sources/openembedded-core/build/tmp-eglibc/work/mips64-oe-linux/eglibc-2.16-r2+svnr19383/eglibc-2_16/libc/libio/fileops.c:431: undefined reference to `_IO_file_close_it_internal'
-collect2: error: ld returned 1 exit status
-
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
-Index: libc/libio/fileops.c
-===================================================================
---- libc.orig/libio/fileops.c 2012-07-04 18:25:47.000000000 -0700
-+++ libc/libio/fileops.c 2012-07-24 00:21:17.220322557 -0700
-@@ -428,7 +428,7 @@
- result->_mode = 1;
- #else
- /* Treat this as if we couldn't find the given character set. */
-- (void) INTUSE(_IO_file_close_it) (fp);
-+ (void) _IO_file_close_it (fp);
- __set_errno (EINVAL);
- return NULL;
- #endif
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/fix_am_rootsbindir.patch b/meta/recipes-core/eglibc/eglibc-2.19/fix_am_rootsbindir.patch
deleted file mode 100644
index ac1e2e82f9..0000000000
--- a/meta/recipes-core/eglibc/eglibc-2.19/fix_am_rootsbindir.patch
+++ /dev/null
@@ -1,32 +0,0 @@
-sysdeps/gnu/configure.ac: handle correctly $libc_cv_rootsbindir
-
-Upstream-Status:Pending
-Signed-off-by: Matthieu Crapet <Matthieu.Crapet@ingenico.com>
-
-diff --git a/libc/sysdeps/gnu/configure b/libc/sysdeps/gnu/configure
-index e7f0e43..687beab 100644
---- a/libc/sysdeps/gnu/configure
-+++ b/libc/sysdeps/gnu/configure
-@@ -42,6 +42,6 @@ case "$prefix" in
- else
- libc_cv_localstatedir=$localstatedir
- fi
-- libc_cv_rootsbindir=/sbin
-+ test -n "$libc_cv_rootsbindir" || libc_cv_rootsbindir=/sbin
- ;;
- esac
-diff --git a/libc/sysdeps/gnu/configure.ac b/libc/sysdeps/gnu/configure.ac
-index ce251df..57c1e77 100644
---- a/libc/sysdeps/gnu/configure.ac
-+++ b/libc/sysdeps/gnu/configure.ac
-@@ -42,6 +42,6 @@ case "$prefix" in
- else
- libc_cv_localstatedir=$localstatedir
- fi
-- libc_cv_rootsbindir=/sbin
-+ test -n "$libc_cv_rootsbindir" || libc_cv_rootsbindir=/sbin
- ;;
- esac
---
-2.0.0
-
diff --git a/meta/recipes-core/eglibc/eglibc-common.inc b/meta/recipes-core/eglibc/eglibc-common.inc
deleted file mode 100644
index d18786ab93..0000000000
--- a/meta/recipes-core/eglibc/eglibc-common.inc
+++ /dev/null
@@ -1,9 +0,0 @@
-SUMMARY = "Embedded GLIBC (GNU C Library)"
-DESCRIPTION = "Embedded GLIBC (EGLIBC) is a variant of the GNU C Library (GLIBC) that is designed to work well on embedded systems. EGLIBC strives to be source and binary compatible with GLIBC. EGLIBC's goals include reduced footprint, configurable components, better support for cross-compilation and cross-testing."
-HOMEPAGE = "http://www.eglibc.org/home"
-SECTION = "libs"
-LICENSE = "GPLv2 & LGPLv2.1"
-LIC_FILES_CHKSUM ?= "file://LICENSES;md5=07a394b26e0902b9ffdec03765209770 \
- file://COPYING;md5=393a5ca445f6965873eca0259a17f833 \
- file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
- file://COPYING.LIB;md5=bbb461211a33b134d42ed5ee802b37ff "
diff --git a/meta/recipes-core/eglibc/eglibc-initial_2.19.bb b/meta/recipes-core/eglibc/eglibc-initial_2.19.bb
deleted file mode 100644
index de45079603..0000000000
--- a/meta/recipes-core/eglibc/eglibc-initial_2.19.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require eglibc_${PV}.bb
-require eglibc-initial.inc
-
-DEPENDS += "kconfig-frontends-native"
-
-# main eglibc recipes muck with TARGET_CPPFLAGS to point into
-# final target sysroot but we
-# are not there when building eglibc-initial
-# so reset it here
-
-TARGET_CPPFLAGS = ""
diff --git a/meta/recipes-core/eglibc/eglibc-locale_2.19.bb b/meta/recipes-core/eglibc/eglibc-locale_2.19.bb
deleted file mode 100644
index ce6c1d2320..0000000000
--- a/meta/recipes-core/eglibc/eglibc-locale_2.19.bb
+++ /dev/null
@@ -1 +0,0 @@
-require eglibc-locale.inc
diff --git a/meta/recipes-core/eglibc/eglibc-mtrace.inc b/meta/recipes-core/eglibc/eglibc-mtrace.inc
deleted file mode 100644
index d68783e439..0000000000
--- a/meta/recipes-core/eglibc/eglibc-mtrace.inc
+++ /dev/null
@@ -1,13 +0,0 @@
-include eglibc-collateral.inc
-
-SUMMARY = "mtrace utility provided by eglibc"
-DESCRIPTION = "mtrace utility provided by eglibc"
-RDEPENDS_${PN} = "perl"
-RPROVIDES_${PN} = "libc-mtrace"
-
-SRC = "${STAGING_INCDIR}/eglibc-scripts-internal-${MULTIMACH_TARGET_SYS}"
-
-do_install() {
- install -d -m 0755 ${D}${bindir}
- install -m 0755 ${SRC}/mtrace ${D}${bindir}/
-}
diff --git a/meta/recipes-core/eglibc/eglibc-mtrace_2.19.bb b/meta/recipes-core/eglibc/eglibc-mtrace_2.19.bb
deleted file mode 100644
index 6fa2be9cc9..0000000000
--- a/meta/recipes-core/eglibc/eglibc-mtrace_2.19.bb
+++ /dev/null
@@ -1 +0,0 @@
-require eglibc-mtrace.inc
diff --git a/meta/recipes-core/eglibc/eglibc-scripts.inc b/meta/recipes-core/eglibc/eglibc-scripts.inc
deleted file mode 100644
index 9ca6673404..0000000000
--- a/meta/recipes-core/eglibc/eglibc-scripts.inc
+++ /dev/null
@@ -1,16 +0,0 @@
-include eglibc-collateral.inc
-
-SUMMARY = "utility scripts provided by eglibc"
-DESCRIPTION = "utility scripts provided by eglibc"
-RDEPENDS_${PN} = "bash eglibc-mtrace"
-
-SRC = "${STAGING_INCDIR}/eglibc-scripts-internal-${MULTIMACH_TARGET_SYS}"
-
-bashscripts = "sotruss xtrace"
-
-do_install() {
- install -d -m 0755 ${D}${bindir}
- for i in ${bashscripts}; do
- install -m 0755 ${SRC}/$i ${D}${bindir}/
- done
-}
diff --git a/meta/recipes-core/eglibc/eglibc-scripts_2.19.bb b/meta/recipes-core/eglibc/eglibc-scripts_2.19.bb
deleted file mode 100644
index 31133621d1..0000000000
--- a/meta/recipes-core/eglibc/eglibc-scripts_2.19.bb
+++ /dev/null
@@ -1 +0,0 @@
-require eglibc-scripts.inc
diff --git a/meta/recipes-core/glib-2.0/glib-2.0/allow-run-media-sdX-drive-mount-if-username-root.patch b/meta/recipes-core/glib-2.0/glib-2.0/allow-run-media-sdX-drive-mount-if-username-root.patch
new file mode 100644
index 0000000000..3d0c008bbe
--- /dev/null
+++ b/meta/recipes-core/glib-2.0/glib-2.0/allow-run-media-sdX-drive-mount-if-username-root.patch
@@ -0,0 +1,39 @@
+From c53e94a520b573aa0dcf12903e9563fe8badc34c Mon Sep 17 00:00:00 2001
+From: Marius Avram <marius.avram@intel.com>
+Date: Wed, 27 Aug 2014 12:10:41 +0300
+Subject: [PATCH] Allow /run/media/sdX drive mount if username root
+
+In case that the username logged in the system is root
+the drives are directly mounted in /run/media/sdX and
+not /run/media/<username>/sdX as the function
+g_unix_mount_guess_should_display() expects.
+
+Without this change USB stick mounts are not accesible from
+graphical applications such as the File Manager (pcmanfm).
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Marius Avram <marius.avram@intel.com>
+---
+ gio/gunixmounts.c | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/gio/gunixmounts.c b/gio/gunixmounts.c
+index 4999354..f6c1472 100644
+--- a/gio/gunixmounts.c
++++ b/gio/gunixmounts.c
+@@ -2136,6 +2136,11 @@ g_unix_mount_guess_should_display (GUnixMountEntry *mount_entry)
+ mount_path[sizeof ("/run/media/") - 1 + user_name_len] == '/')
+ is_in_runtime_dir = TRUE;
+
++ /* Allow no username in path in /run/media if current user is root */
++ if (strcmp(user_name, "root") == 0 &&
++ strncmp (mount_path, "/run/media/", sizeof("run/media")) == 0)
++ is_in_runtime_dir = TRUE;
++
+ if (is_in_runtime_dir || g_str_has_prefix (mount_path, "/media/"))
+ {
+ char *path;
+--
+1.7.9.5
+
diff --git a/meta/recipes-core/glib-2.0/glib-2.0_2.40.0.bb b/meta/recipes-core/glib-2.0/glib-2.0_2.40.0.bb
index ff1fb87a38..4b1bcf85c5 100644
--- a/meta/recipes-core/glib-2.0/glib-2.0_2.40.0.bb
+++ b/meta/recipes-core/glib-2.0/glib-2.0_2.40.0.bb
@@ -13,6 +13,7 @@ SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \
file://ptest-paths.patch \
file://uclibc.patch \
file://0001-configure.ac-Do-not-use-readlink-when-cross-compilin.patch \
+ file://allow-run-media-sdX-drive-mount-if-username-root.patch \
"
SRC_URI_append_class-native = " file://glib-gettextize-dir.patch"
diff --git a/meta/recipes-core/glib-2.0/glib.inc b/meta/recipes-core/glib-2.0/glib.inc
index bd8d74b3aa..2d81afc8f2 100644
--- a/meta/recipes-core/glib-2.0/glib.inc
+++ b/meta/recipes-core/glib-2.0/glib.inc
@@ -86,17 +86,17 @@ RDEPENDS_${PN}-ptest += "\
"
RDEPENDS_${PN}-ptest_append_libc-glibc = "\
- eglibc-gconv-utf-16 \
- eglibc-charmap-utf-8 \
- eglibc-gconv-cp1255 \
- eglibc-charmap-cp1255 \
- eglibc-gconv-utf-32 \
- eglibc-gconv-utf-7 \
- eglibc-gconv-euc-jp \
- eglibc-gconv-iso8859-1 \
- eglibc-gconv-iso8859-15 \
- eglibc-charmap-invariant \
- eglibc-localedata-translit-cjk-variants \
+ glibc-gconv-utf-16 \
+ glibc-charmap-utf-8 \
+ glibc-gconv-cp1255 \
+ glibc-charmap-cp1255 \
+ glibc-gconv-utf-32 \
+ glibc-gconv-utf-7 \
+ glibc-gconv-euc-jp \
+ glibc-gconv-iso8859-1 \
+ glibc-gconv-iso8859-15 \
+ glibc-charmap-invariant \
+ glibc-localedata-translit-cjk-variants \
"
INSANE_SKIP_${PN}-ptest += "libdir"
diff --git a/meta/recipes-core/eglibc/cross-localedef-native-2.19/fix_for_centos_5.8.patch b/meta/recipes-core/glibc/cross-localedef-native/fix_for_centos_5.8.patch
index 7618c99534..186a480458 100644
--- a/meta/recipes-core/eglibc/cross-localedef-native-2.19/fix_for_centos_5.8.patch
+++ b/meta/recipes-core/glibc/cross-localedef-native/fix_for_centos_5.8.patch
@@ -3,10 +3,10 @@ Upstream-Status: Inappropriate [other]
This is a hack to fix building the locale bits on an older
CentOs 5.X machine
-Index: eglibc-2_16/libc/locale/programs/config.h
+Index: git/locale/programs/config.h
===================================================================
---- eglibc-2_16.orig/libc/locale/programs/config.h
-+++ eglibc-2_16/libc/locale/programs/config.h
+--- git/locale/programs/config.h
++++ git.orig/locale/programs/config.h
@@ -19,6 +19,8 @@
#ifndef _LD_CONFIG_H
#define _LD_CONFIG_H 1
diff --git a/meta/recipes-core/glibc/cross-localedef-native_2.20.bb b/meta/recipes-core/glibc/cross-localedef-native_2.20.bb
new file mode 100644
index 0000000000..8aaa225706
--- /dev/null
+++ b/meta/recipes-core/glibc/cross-localedef-native_2.20.bb
@@ -0,0 +1,56 @@
+SUMMARY = "Cross locale generation tool for glibc"
+HOMEPAGE = "http://www.gnu.org/software/libc/libc.html"
+SECTION = "libs"
+LICENSE = "LGPL-2.1"
+
+LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \
+ file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
+ file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c"
+
+
+inherit native
+inherit autotools
+
+FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:"
+
+PV = "2.20"
+
+SRC_URI = "git://sourceware.org/git/glibc.git;branch=release/${PV}/master;name=glibc \
+ git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef \
+ file://fix_for_centos_5.8.patch \
+ ${EGLIBCPATCHES} \
+ "
+EGLIBCPATCHES = "\
+ file://timezone-re-written-tzselect-as-posix-sh.patch \
+ file://eglibc.patch \
+ file://option-groups.patch \
+ file://GLRO_dl_debug_mask.patch \
+ file://eglibc-header-bootstrap.patch \
+ file://eglibc-resolv-dynamic.patch \
+ file://eglibc-ppc8xx-cache-line-workaround.patch \
+ file://eglibc-sh4-fpscr_values.patch \
+ file://eglibc-use-option-groups.patch \
+ "
+
+SRCREV_glibc = "b8079dd0d360648e4e8de48656c5c38972621072"
+SRCREV_localedef = "c833367348d39dad7ba018990bfdaffaec8e9ed3"
+
+# Makes for a rather long rev (22 characters), but...
+#
+SRCREV_FORMAT = "glibc__localedef"
+
+S = "${WORKDIR}/git"
+
+EXTRA_OECONF = "--with-glibc=${S}"
+CFLAGS += "-DNOT_IN_libc=1"
+
+do_configure () {
+ ${S}/localedef/configure ${EXTRA_OECONF}
+}
+
+
+do_install() {
+ install -d ${D}${bindir}
+ install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef
+}
diff --git a/meta/recipes-core/eglibc/eglibc-collateral.inc b/meta/recipes-core/glibc/glibc-collateral.inc
index 8feca09f2e..85e06da816 100644
--- a/meta/recipes-core/eglibc/eglibc-collateral.inc
+++ b/meta/recipes-core/glibc/glibc-collateral.inc
@@ -1,6 +1,6 @@
INHIBIT_DEFAULT_DEPS = "1"
LICENSE = "GPLv2 & LGPLv2.1"
-HOMEPAGE = "http://www.eglibc.org/"
+HOMEPAGE = "http://www.gnu.org/software/libc/index.html"
do_fetch[noexec] = "1"
do_unpack[noexec] = "1"
diff --git a/meta/recipes-core/glibc/glibc-common.inc b/meta/recipes-core/glibc/glibc-common.inc
new file mode 100644
index 0000000000..bba1568baf
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-common.inc
@@ -0,0 +1,9 @@
+SUMMARY = "GLIBC (GNU C Library)"
+DESCRIPTION = "The GNU C Library is used as the system C library in most systems with the Linux kernel."
+HOMEPAGE = "http://www.gnu.org/software/libc/libc.html"
+SECTION = "libs"
+LICENSE = "GPLv2 & LGPLv2.1"
+LIC_FILES_CHKSUM ?= "file://LICENSES;md5=07a394b26e0902b9ffdec03765209770 \
+ file://COPYING;md5=393a5ca445f6965873eca0259a17f833 \
+ file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
+ file://COPYING.LIB;md5=bbb461211a33b134d42ed5ee802b37ff "
diff --git a/meta/recipes-core/eglibc/eglibc-initial.inc b/meta/recipes-core/glibc/glibc-initial.inc
index 92402f8ff5..7db3deac1b 100644
--- a/meta/recipes-core/eglibc/eglibc-initial.inc
+++ b/meta/recipes-core/glibc/glibc-initial.inc
@@ -11,8 +11,8 @@ TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}"
do_configure () {
sed -ie 's,{ (exit 1); exit 1; }; },{ (exit 0); }; },g' ${S}/configure
chmod +x ${S}/configure
- (cd ${S} && gnu-configize) || die "failure in running gnu-configize"
- find ${S} -name "configure" | xargs touch
+ (cd ${S} && gnu-configize) || die "failure in running gnu-configize"
+ find ${S} -name "configure" | xargs touch
${S}/configure --host=${TARGET_SYS} --build=${BUILD_SYS} \
--prefix=/usr \
--without-cvs --disable-sanity-checks \
@@ -26,28 +26,28 @@ do_compile () {
}
do_install () {
- oe_runmake cross-compiling=yes install_root=${D} \
- includedir='${includedir}' prefix='${prefix}' \
- install-bootstrap-headers=yes install-headers
+ oe_runmake cross-compiling=yes install_root=${D} \
+ includedir='${includedir}' prefix='${prefix}' \
+ install-bootstrap-headers=yes install-headers
- oe_runmake csu/subdir_lib
- mkdir -p ${D}${libdir}/
- install -m 644 csu/crt[1in].o ${D}${libdir}
+ oe_runmake csu/subdir_lib
+ mkdir -p ${D}${libdir}/
+ install -m 644 csu/crt[1in].o ${D}${libdir}
- # Two headers -- stubs.h and features.h -- aren't installed by install-headers,
- # so do them by hand. We can tolerate an empty stubs.h for the moment.
- # See e.g. http://gcc.gnu.org/ml/gcc/2002-01/msg00900.html
- mkdir -p ${D}${includedir}/gnu/
- touch ${D}${includedir}/gnu/stubs.h
- cp ${S}/include/features.h ${D}${includedir}/features.h
+ # Two headers -- stubs.h and features.h -- aren't installed by install-headers,
+ # so do them by hand. We can tolerate an empty stubs.h for the moment.
+ # See e.g. http://gcc.gnu.org/ml/gcc/2002-01/msg00900.html
+ mkdir -p ${D}${includedir}/gnu/
+ touch ${D}${includedir}/gnu/stubs.h
+ cp ${S}/include/features.h ${D}${includedir}/features.h
- if [ -e ${B}/bits/stdio_lim.h ]; then
- cp ${B}/bits/stdio_lim.h ${D}${includedir}/bits/
- fi
- # add links to linux-libc-headers: final eglibc build need this.
- for t in linux asm asm-generic; do
- ln -s ${STAGING_DIR_TARGET}${includedir}/$t ${D}${includedir}/
- done
+ if [ -e ${B}/bits/stdio_lim.h ]; then
+ cp ${B}/bits/stdio_lim.h ${D}${includedir}/bits/
+ fi
+ # add links to linux-libc-headers: final glibc build need this.
+ for t in linux asm asm-generic; do
+ ln -s ${STAGING_DIR_TARGET}${includedir}/$t ${D}${includedir}/
+ done
}
do_install_locale() {
@@ -58,8 +58,8 @@ do_siteconfig () {
:
}
-SSTATEPOSTINSTFUNCS += "eglibcinitial_sstate_postinst"
-eglibcinitial_sstate_postinst() {
+SSTATEPOSTINSTFUNCS += "glibcinitial_sstate_postinst"
+glibcinitial_sstate_postinst() {
if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ]
then
# Recreate the symlinks to ensure they point to the correct location
diff --git a/meta/recipes-core/glibc/glibc-initial_2.20.bb b/meta/recipes-core/glibc/glibc-initial_2.20.bb
new file mode 100644
index 0000000000..8ab01dc79d
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-initial_2.20.bb
@@ -0,0 +1,11 @@
+require glibc_${PV}.bb
+require glibc-initial.inc
+
+DEPENDS += "kconfig-frontends-native"
+
+# main glibc recipes muck with TARGET_CPPFLAGS to point into
+# final target sysroot but we
+# are not there when building glibc-initial
+# so reset it here
+
+TARGET_CPPFLAGS = ""
diff --git a/meta/recipes-core/eglibc/eglibc-ld.inc b/meta/recipes-core/glibc/glibc-ld.inc
index 6261ae3419..962d666885 100644
--- a/meta/recipes-core/eglibc/eglibc-ld.inc
+++ b/meta/recipes-core/glibc/glibc-ld.inc
@@ -5,7 +5,7 @@ def ld_append_if_tune_exists(d, infos, dict):
infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }')
infos['lddrewrite'].add(libdir+'/'+dict[tune][0])
-def eglibc_dl_info(d):
+def glibc_dl_info(d):
ld_info_all = {
"mips": ["ld.so.1", "FLAG_ELF_LIBC6"],
"mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"],
@@ -52,5 +52,5 @@ def eglibc_dl_info(d):
infos['lddrewrite'] = ' '.join(infos['lddrewrite'])
return infos
-EGLIBC_KNOWN_INTERPRETER_NAMES = "${@eglibc_dl_info(d)['ldconfig']}"
-RTLDLIST = "${@eglibc_dl_info(d)['lddrewrite']}"
+EGLIBC_KNOWN_INTERPRETER_NAMES = "${@glibc_dl_info(d)['ldconfig']}"
+RTLDLIST = "${@glibc_dl_info(d)['lddrewrite']}"
diff --git a/meta/recipes-core/eglibc/eglibc-locale.inc b/meta/recipes-core/glibc/glibc-locale.inc
index 4da5abd778..df6d073436 100644
--- a/meta/recipes-core/eglibc/eglibc-locale.inc
+++ b/meta/recipes-core/glibc/glibc-locale.inc
@@ -1,12 +1,12 @@
-include eglibc-collateral.inc
+include glibc-collateral.inc
-SUMMARY = "Locale data from eglibc"
+SUMMARY = "Locale data from glibc"
-BPN = "eglibc"
-LOCALEBASEPN = "${MLPREFIX}eglibc"
+BPN = "glibc"
+LOCALEBASEPN = "${MLPREFIX}glibc"
-# eglibc-collateral.inc inhibits all default deps, but do_package needs objcopy
-# ERROR: objcopy failed with exit code 127 (cmd was 'i586-webos-linux-objcopy' --only-keep-debug 'eglibc-locale/2.17-r0/package/usr/lib/gconv/IBM1166.so' 'eglibc-locale/2.17-r0/package/usr/lib/gconv/.debug/IBM1166.so')
+# glibc-collateral.inc inhibits all default deps, but do_package needs objcopy
+# ERROR: objcopy failed with exit code 127 (cmd was 'i586-webos-linux-objcopy' --only-keep-debug 'glibc-locale/2.17-r0/package/usr/lib/gconv/IBM1166.so' 'glibc-locale/2.17-r0/package/usr/lib/gconv/.debug/IBM1166.so')
# ERROR: Function failed: split_and_strip_files
BINUTILSDEP = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot"
BINUTILSDEP_class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot"
@@ -19,11 +19,11 @@ do_package[depends] += "${BINUTILSDEP}"
# default to disabled
ENABLE_BINARY_LOCALE_GENERATION ?= "0"
-ENABLE_BINARY_LOCALE_GENERATION_pn-nativesdk-eglibc-locale = "0"
+ENABLE_BINARY_LOCALE_GENERATION_pn-nativesdk-glibc-locale = "0"
#enable locale generation on these arches
# BINARY_LOCALE_ARCHES is a space separated list of regular expressions
-BINARY_LOCALE_ARCHES ?= "arm.* i[3-6]86 x86_64 powerpc mips mips64"
+BINARY_LOCALE_ARCHES ?= "arm.* aarch64 i[3-6]86 x86_64 powerpc mips mips64"
# set "1" to use cross-localedef for locale generation
# set "0" for qemu emulation of native localedef for locale generation
@@ -34,40 +34,40 @@ PROVIDES = "virtual/libc-locale"
PACKAGES = "localedef ${PN}-dbg"
PACKAGES_DYNAMIC = "^locale-base-.* \
- ^eglibc-gconv-.* ^eglibc-charmap-.* ^eglibc-localedata-.* ^eglibc-binary-localedata-.* \
+ ^glibc-gconv-.* ^glibc-charmap-.* ^glibc-localedata-.* ^glibc-binary-localedata-.* \
^glibc-gconv-.* ^glibc-charmap-.* ^glibc-localedata-.* ^glibc-binary-localedata-.* \
- ^${MLPREFIX}eglibc-gconv$"
+ ^${MLPREFIX}glibc-gconv$"
-# Create a eglibc-binaries package
+# Create a glibc-binaries package
ALLOW_EMPTY_${BPN}-binaries = "1"
PACKAGES += "${BPN}-binaries"
-RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("eglibc-binary") != -1])}"
+RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-binary") != -1])}"
-# Create a eglibc-charmaps package
+# Create a glibc-charmaps package
ALLOW_EMPTY_${BPN}-charmaps = "1"
PACKAGES += "${BPN}-charmaps"
-RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("eglibc-charmap") != -1])}"
+RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-charmap") != -1])}"
-# Create a eglibc-gconvs package
+# Create a glibc-gconvs package
ALLOW_EMPTY_${BPN}-gconvs = "1"
PACKAGES += "${BPN}-gconvs"
-RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("eglibc-gconv") != -1])}"
+RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-gconv") != -1])}"
-# Create a eglibc-localedatas package
+# Create a glibc-localedatas package
ALLOW_EMPTY_${BPN}-localedatas = "1"
PACKAGES += "${BPN}-localedatas"
-RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("eglibc-localedata") != -1])}"
+RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-localedata") != -1])}"
-DESCRIPTION_localedef = "eglibc: compile locale definition files"
+DESCRIPTION_localedef = "glibc: compile locale definition files"
-# eglibc-gconv is dynamically added into PACKAGES, thus
-# FILES_eglibc-gconv will not be automatically extended in multilib.
-# Explicitly add ${MLPREFIX} for FILES_eglibc-gconv.
-FILES_${MLPREFIX}eglibc-gconv = "${libdir}/gconv/*"
+# glibc-gconv is dynamically added into PACKAGES, thus
+# FILES_glibc-gconv will not be automatically extended in multilib.
+# Explicitly add ${MLPREFIX} for FILES_glibc-gconv.
+FILES_${MLPREFIX}glibc-gconv = "${libdir}/gconv/*"
FILES_${PN}-dbg += "${libdir}/gconv/.debug/*"
FILES_localedef = "${bindir}/localedef"
-LOCALETREESRC = "${STAGING_INCDIR}/eglibc-locale-internal-${MULTIMACH_TARGET_SYS}"
+LOCALETREESRC = "${STAGING_INCDIR}/glibc-locale-internal-${MULTIMACH_TARGET_SYS}"
do_install () {
mkdir -p ${D}${bindir} ${D}${datadir} ${D}${libdir}
diff --git a/meta/recipes-core/glibc/glibc-locale_2.20.bb b/meta/recipes-core/glibc/glibc-locale_2.20.bb
new file mode 100644
index 0000000000..f7702e0358
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-locale_2.20.bb
@@ -0,0 +1 @@
+require glibc-locale.inc
diff --git a/meta/recipes-core/glibc/glibc-mtrace.inc b/meta/recipes-core/glibc/glibc-mtrace.inc
new file mode 100644
index 0000000000..e12b079e06
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-mtrace.inc
@@ -0,0 +1,13 @@
+include glibc-collateral.inc
+
+SUMMARY = "mtrace utility provided by glibc"
+DESCRIPTION = "mtrace utility provided by glibc"
+RDEPENDS_${PN} = "perl"
+RPROVIDES_${PN} = "libc-mtrace"
+
+SRC = "${STAGING_INCDIR}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}"
+
+do_install() {
+ install -d -m 0755 ${D}${bindir}
+ install -m 0755 ${SRC}/mtrace ${D}${bindir}/
+}
diff --git a/meta/recipes-core/glibc/glibc-mtrace_2.20.bb b/meta/recipes-core/glibc/glibc-mtrace_2.20.bb
new file mode 100644
index 0000000000..0b69bad46a
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-mtrace_2.20.bb
@@ -0,0 +1 @@
+require glibc-mtrace.inc
diff --git a/meta/recipes-core/eglibc/eglibc-options.inc b/meta/recipes-core/glibc/glibc-options.inc
index 259e752858..9fd27f32f9 100644
--- a/meta/recipes-core/eglibc/eglibc-options.inc
+++ b/meta/recipes-core/glibc/glibc-options.inc
@@ -1,4 +1,4 @@
-def eglibc_cfg(feature, tokens, cnf):
+def glibc_cfg(feature, tokens, cnf):
if type(tokens) == type(""):
tokens = [tokens]
if feature:
@@ -10,8 +10,8 @@ def eglibc_cfg(feature, tokens, cnf):
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG=\"${S}/nss/nsswitch.conf\""])
cnf.extend(["OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS=\"${S}/nss/fixed-nsswitch.functions\""])
-# Map distro features to eglibc options settings
-def features_to_eglibc_settings(d):
+# Map distro features to glibc options settings
+def features_to_glibc_settings(d):
cnf = ([])
ipv4 = bb.utils.contains('DISTRO_FEATURES', 'ipv4', True, False, d)
@@ -52,7 +52,7 @@ def features_to_eglibc_settings(d):
libc_posix_regexp_glibc = bb.utils.contains('DISTRO_FEATURES', 'libc-posix-regexp-glibc', True, False, d)
libc_posix_wchar_io = bb.utils.contains('DISTRO_FEATURES', 'libc-posix-wchar-io', True, False, d)
- # arrange the dependencies among eglibc configuable options according to file option-groups.def from eglibc source code
+ # arrange the dependencies among glibc configuable options according to file option-groups.def from glibc source code
new_dep = True
while new_dep:
new_dep = False
@@ -121,42 +121,42 @@ def features_to_eglibc_settings(d):
new_dep = True
libc_posix_clang_wchar = True
- eglibc_cfg(ipv6, 'OPTION_EGLIBC_ADVANCED_INET6', cnf)
- eglibc_cfg(libc_backtrace, 'OPTION_EGLIBC_BACKTRACE', cnf)
- eglibc_cfg(libc_big_macros, 'OPTION_EGLIBC_BIG_MACROS', cnf)
- eglibc_cfg(libc_bsd, 'OPTION_EGLIBC_BSD', cnf)
- eglibc_cfg(libc_cxx_tests, 'OPTION_EGLIBC_CXX_TESTS', cnf)
- eglibc_cfg(libc_catgets, 'OPTION_EGLIBC_CATGETS', cnf)
- eglibc_cfg(libc_charsets, 'OPTION_EGLIBC_CHARSETS', cnf)
- eglibc_cfg(libc_crypt, 'OPTION_EGLIBC_CRYPT', cnf)
- eglibc_cfg(libc_crypt_ufc, 'OPTION_EGLIBC_CRYPT_UFC', cnf)
- eglibc_cfg(libc_db_aliases, 'OPTION_EGLIBC_DB_ALIASES', cnf)
- eglibc_cfg(libc_envz, 'OPTION_EGLIBC_ENVZ', cnf)
- eglibc_cfg(libc_fcvt, 'OPTION_EGLIBC_FCVT', cnf)
- eglibc_cfg(libc_fmtmsg, 'OPTION_EGLIBC_FMTMSG', cnf)
- eglibc_cfg(libc_fstab, 'OPTION_EGLIBC_FSTAB', cnf)
- eglibc_cfg(libc_ftraverse, 'OPTION_EGLIBC_FTRAVERSE', cnf)
- eglibc_cfg(libc_getlogin, 'OPTION_EGLIBC_GETLOGIN', cnf)
- eglibc_cfg(libc_idn, 'OPTION_EGLIBC_IDN', cnf)
- eglibc_cfg(ipv4, 'OPTION_EGLIBC_INET', cnf)
- eglibc_cfg(libc_inet_anl, 'OPTION_EGLIBC_INET_ANL', cnf)
- eglibc_cfg(libc_libm, 'OPTION_EGLIBC_LIBM', cnf)
- eglibc_cfg(libc_locales, 'OPTION_EGLIBC_LOCALES', cnf)
- eglibc_cfg(libc_locale_code, 'OPTION_EGLIBC_LOCALE_CODE', cnf)
- eglibc_cfg(libc_memusage, 'OPTION_EGLIBC_MEMUSAGE', cnf)
- eglibc_cfg(libc_nis, 'OPTION_EGLIBC_NIS', cnf)
- eglibc_cfg(libc_nsswitch, 'OPTION_EGLIBC_NSSWITCH', cnf)
- eglibc_cfg(libc_rcmd, 'OPTION_EGLIBC_RCMD', cnf)
- eglibc_cfg(libc_rtld_debug, 'OPTION_EGLIBC_RTLD_DEBUG', cnf)
- eglibc_cfg(libc_spawn, 'OPTION_EGLIBC_SPAWN', cnf)
- eglibc_cfg(libc_streams, 'OPTION_EGLIBC_STREAMS', cnf)
- eglibc_cfg(libc_sunrpc, 'OPTION_EGLIBC_SUNRPC', cnf)
- eglibc_cfg(libc_utmp, 'OPTION_EGLIBC_UTMP', cnf)
- eglibc_cfg(libc_utmpx, 'OPTION_EGLIBC_UTMPX', cnf)
- eglibc_cfg(libc_wordexp, 'OPTION_EGLIBC_WORDEXP', cnf)
- eglibc_cfg(libc_posix_clang_wchar, 'OPTION_POSIX_C_LANG_WIDE_CHAR', cnf)
- eglibc_cfg(libc_posix_regexp, 'OPTION_POSIX_REGEXP', cnf)
- eglibc_cfg(libc_posix_regexp_glibc, 'OPTION_POSIX_REGEXP_GLIBC', cnf)
- eglibc_cfg(libc_posix_wchar_io, 'OPTION_POSIX_WIDE_CHAR_DEVICE_IO', cnf)
+ glibc_cfg(ipv6, 'OPTION_EGLIBC_ADVANCED_INET6', cnf)
+ glibc_cfg(libc_backtrace, 'OPTION_EGLIBC_BACKTRACE', cnf)
+ glibc_cfg(libc_big_macros, 'OPTION_EGLIBC_BIG_MACROS', cnf)
+ glibc_cfg(libc_bsd, 'OPTION_EGLIBC_BSD', cnf)
+ glibc_cfg(libc_cxx_tests, 'OPTION_EGLIBC_CXX_TESTS', cnf)
+ glibc_cfg(libc_catgets, 'OPTION_EGLIBC_CATGETS', cnf)
+ glibc_cfg(libc_charsets, 'OPTION_EGLIBC_CHARSETS', cnf)
+ glibc_cfg(libc_crypt, 'OPTION_EGLIBC_CRYPT', cnf)
+ glibc_cfg(libc_crypt_ufc, 'OPTION_EGLIBC_CRYPT_UFC', cnf)
+ glibc_cfg(libc_db_aliases, 'OPTION_EGLIBC_DB_ALIASES', cnf)
+ glibc_cfg(libc_envz, 'OPTION_EGLIBC_ENVZ', cnf)
+ glibc_cfg(libc_fcvt, 'OPTION_EGLIBC_FCVT', cnf)
+ glibc_cfg(libc_fmtmsg, 'OPTION_EGLIBC_FMTMSG', cnf)
+ glibc_cfg(libc_fstab, 'OPTION_EGLIBC_FSTAB', cnf)
+ glibc_cfg(libc_ftraverse, 'OPTION_EGLIBC_FTRAVERSE', cnf)
+ glibc_cfg(libc_getlogin, 'OPTION_EGLIBC_GETLOGIN', cnf)
+ glibc_cfg(libc_idn, 'OPTION_EGLIBC_IDN', cnf)
+ glibc_cfg(ipv4, 'OPTION_EGLIBC_INET', cnf)
+ glibc_cfg(libc_inet_anl, 'OPTION_EGLIBC_INET_ANL', cnf)
+ glibc_cfg(libc_libm, 'OPTION_EGLIBC_LIBM', cnf)
+ glibc_cfg(libc_locales, 'OPTION_EGLIBC_LOCALES', cnf)
+ glibc_cfg(libc_locale_code, 'OPTION_EGLIBC_LOCALE_CODE', cnf)
+ glibc_cfg(libc_memusage, 'OPTION_EGLIBC_MEMUSAGE', cnf)
+ glibc_cfg(libc_nis, 'OPTION_EGLIBC_NIS', cnf)
+ glibc_cfg(libc_nsswitch, 'OPTION_EGLIBC_NSSWITCH', cnf)
+ glibc_cfg(libc_rcmd, 'OPTION_EGLIBC_RCMD', cnf)
+ glibc_cfg(libc_rtld_debug, 'OPTION_EGLIBC_RTLD_DEBUG', cnf)
+ glibc_cfg(libc_spawn, 'OPTION_EGLIBC_SPAWN', cnf)
+ glibc_cfg(libc_streams, 'OPTION_EGLIBC_STREAMS', cnf)
+ glibc_cfg(libc_sunrpc, 'OPTION_EGLIBC_SUNRPC', cnf)
+ glibc_cfg(libc_utmp, 'OPTION_EGLIBC_UTMP', cnf)
+ glibc_cfg(libc_utmpx, 'OPTION_EGLIBC_UTMPX', cnf)
+ glibc_cfg(libc_wordexp, 'OPTION_EGLIBC_WORDEXP', cnf)
+ glibc_cfg(libc_posix_clang_wchar, 'OPTION_POSIX_C_LANG_WIDE_CHAR', cnf)
+ glibc_cfg(libc_posix_regexp, 'OPTION_POSIX_REGEXP', cnf)
+ glibc_cfg(libc_posix_regexp_glibc, 'OPTION_POSIX_REGEXP_GLIBC', cnf)
+ glibc_cfg(libc_posix_wchar_io, 'OPTION_POSIX_WIDE_CHAR_DEVICE_IO', cnf)
return "\n".join(cnf)
diff --git a/meta/recipes-core/eglibc/eglibc-package.inc b/meta/recipes-core/glibc/glibc-package.inc
index c357a1376b..984362e3ce 100644
--- a/meta/recipes-core/eglibc/eglibc-package.inc
+++ b/meta/recipes-core/glibc/glibc-package.inc
@@ -1,6 +1,6 @@
#
# For now, we will skip building of a gcc package if it is a uclibc one
-# and our build is not a uclibc one, and we skip a eglibc one if our build
+# and our build is not a uclibc one, and we skip a glibc one if our build
# is a uclibc build.
#
# See the note in gcc/gcc_3.4.0.oe
@@ -17,28 +17,30 @@ python __anonymous () {
# Set this to zero if you don't want ldconfig in the output package
USE_LDCONFIG ?= "1"
-PACKAGES = "${PN}-dbg catchsegv sln nscd ldd tzcode ${PN}-utils eglibc-thread-db ${PN}-pic libcidn libmemusage libsegfault ${PN}-pcprofile libsotruss ${PN} eglibc-extra-nss ${PN}-dev ${PN}-staticdev ${PN}-doc"
-
-# The ld.so in this eglibc supports the GNU_HASH
-RPROVIDES_${PN} = "glibc rtld(GNU_HASH)"
-RPROVIDES_${PN}-utils = "glibc-utils"
-RPROVIDES_${PN}-mtrace = "glibc-mtrace libc-mtrace"
-RPROVIDES_${PN}-pic = "glibc-pic"
-RPROVIDES_${PN}-dev = "glibc-dev libc6-dev virtual-libc-dev"
-RPROVIDES_${PN}-staticdev = "glibc-staticdev"
-RPROVIDES_${PN}-doc = "glibc-doc"
-RPROVIDES_eglibc-extra-nss = "glibc-extra-nss"
-RPROVIDES_eglibc-thread-db = "glibc-thread-db"
-RPROVIDES_${PN}-pcprofile = "glibc-pcprofile"
-RPROVIDES_${PN}-dbg = "glibc-dbg"
+PACKAGES = "${PN}-dbg catchsegv sln nscd ldd tzcode ${PN}-utils glibc-thread-db ${PN}-pic libcidn libmemusage libsegfault ${PN}-pcprofile libsotruss ${PN} glibc-extra-nss ${PN}-dev ${PN}-staticdev ${PN}-doc"
+
+# The ld.so in this glibc supports the GNU_HASH
+RPROVIDES_${PN} = "eglibc rtld(GNU_HASH)"
+RPROVIDES_${PN}-utils = "eglibc-utils"
+RPROVIDES_${PN}-mtrace = "eglibc-mtrace libc-mtrace"
+RPROVIDES_${PN}-pic = "eglibc-pic"
+RPROVIDES_${PN}-dev = "eglibc-dev libc6-dev virtual-libc-dev"
+RPROVIDES_${PN}-staticdev = "eglibc-staticdev"
+RPROVIDES_${PN}-doc = "eglibc-doc"
+RPROVIDES_glibc-extra-nss = "eglibc-extra-nss"
+RPROVIDES_glibc-thread-db = "eglibc-thread-db"
+RPROVIDES_${PN}-pcprofile = "eglibc-pcprofile"
+RPROVIDES_${PN}-dbg = "eglibc-dbg"
libc_baselibs = "${base_libdir}/libcrypt*.so.* ${base_libdir}/libcrypt-*.so ${base_libdir}/libc.so.* ${base_libdir}/libc-*.so ${base_libdir}/libm*.so.* ${base_libdir}/libm-*.so ${base_libdir}/ld*.so.* ${base_libdir}/ld-*.so ${base_libdir}/libpthread*.so.* ${base_libdir}/libpthread-*.so ${base_libdir}/libresolv*.so.* ${base_libdir}/libresolv-*.so ${base_libdir}/librt*.so.* ${base_libdir}/librt-*.so ${base_libdir}/libutil*.so.* ${base_libdir}/libutil-*.so ${base_libdir}/libnsl*.so.* ${base_libdir}/libnsl-*.so ${base_libdir}/libnss_files*.so.* ${base_libdir}/libnss_files-*.so ${base_libdir}/libnss_compat*.so.* ${base_libdir}/libnss_compat-*.so ${base_libdir}/libnss_dns*.so.* ${base_libdir}/libnss_dns-*.so ${base_libdir}/libdl*.so.* ${base_libdir}/libdl-*.so ${base_libdir}/libanl*.so.* ${base_libdir}/libanl-*.so ${base_libdir}/libBrokenLocale*.so.* ${base_libdir}/libBrokenLocale-*.so"
+libc_baselibs_append_aarch64 = " /lib/ld-linux-aarch64*.so.1"
+INSANE_SKIP_glibc_append_aarch64 = " libdir"
FILES_${PN} = "${libc_baselibs} ${libexecdir}/* ${@base_conditional('USE_LDCONFIG', '1', '${base_sbindir}/ldconfig ${sysconfdir}/ld.so.conf', '', d)}"
FILES_ldd = "${bindir}/ldd"
FILES_libsegfault = "${base_libdir}/libSegFault*"
FILES_libcidn = "${base_libdir}/libcidn-*.so ${base_libdir}/libcidn.so.*"
FILES_libmemusage = "${base_libdir}/libmemusage.so"
-FILES_eglibc-extra-nss = "${base_libdir}/libnss_*-*.so ${base_libdir}/libnss_*.so.*"
+FILES_glibc-extra-nss = "${base_libdir}/libnss_*-*.so ${base_libdir}/libnss_*.so.*"
FILES_sln = "${base_sbindir}/sln"
FILES_${PN}-pic = "${libdir}/*_pic.a ${libdir}/*_pic.map ${libdir}/libc_pic/*.o"
FILES_libsotruss = "${libdir}/audit/sotruss-lib.so"
@@ -53,7 +55,7 @@ FILES_${PN}-dbg += "${libexecdir}/*/.debug ${libdir}/audit/.debug"
FILES_catchsegv = "${bindir}/catchsegv"
RDEPENDS_catchsegv = "libsegfault"
FILES_${PN}-pcprofile = "${base_libdir}/libpcprofile.so"
-FILES_eglibc-thread-db = "${base_libdir}/libthread_db.so.* ${base_libdir}/libthread_db-*.so"
+FILES_glibc-thread-db = "${base_libdir}/libthread_db.so.* ${base_libdir}/libthread_db-*.so"
RPROVIDES_${PN}-dev += "libc-dev"
RPROVIDES_${PN}-staticdev += "libc-staticdev"
@@ -61,12 +63,12 @@ SUMMARY_sln = "The static ln"
DESCRIPTION_sln = "Similar to the 'ln' utility, but statically linked. sln is useful to make symbolic links to dynamic libraries if the dynamic linking system, for some reason, is not functional."
SUMMARY_nscd = "Name service cache daemon"
DESCRIPTION_nscd = "nscd, name service cache daemon, caches name service lookups for the passwd, group and hosts information. It can damatically improvide performance with remote, such as NIS or NIS+, name services."
-SUMMARY_eglibc-extra-nss = "hesiod, NIS and NIS+ nss libraries"
-DESCRIPTION_eglibc-extra-nss = "eglibc: nis, nisplus and hesiod search services."
+SUMMARY_glibc-extra-nss = "hesiod, NIS and NIS+ nss libraries"
+DESCRIPTION_glibc-extra-nss = "glibc: nis, nisplus and hesiod search services."
SUMMARY_ldd = "print shared library dependencies"
DESCRIPTION_ldd = "${bindir}/ldd prints shared library dependencies for each program or shared library specified on the command line."
-SUMMARY_${PN}-utils = "Miscellaneous utilities provided by eglibc"
-DESCRIPTION_${PN}-utils = "Miscellaneous utilities including getconf, iconf, locale, gencat, rpcinfo, ..."
+SUMMARY_${PN}-utils = "Miscellaneous utilities provided by glibc"
+DESCRIPTION_${PN}-utils = "Miscellaneous utilities including getconf, iconv, locale, gencat, rpcgen, ..."
DESCRIPTION_libsotruss = "Library to support sotruss which traces calls through PLTs"
DESCRIPTION_tzcode = "tzcode, timezone zoneinfo utils -- zic, zdump, tzselect"
@@ -76,9 +78,9 @@ do_install_append () {
rm -f ${D}${sysconfdir}/localtime
rm -rf ${D}${localstatedir}
- # remove empty eglibc dir
- if [ -d ${D}${libdir}/eglibc -a ! -e ${D}${libdir}/eglibc/pt_chown ]; then
- rmdir ${D}${libdir}/eglibc
+ # remove empty glibc dir
+ if [ -d ${D}${libdir}/glibc -a ! -e ${D}${libdir}/glibc/pt_chown ]; then
+ rmdir ${D}${libdir}/glibc
fi
oe_multilib_header bits/syscall.h
@@ -92,18 +94,33 @@ do_install_append () {
fi
if [ "${USE_LDCONFIG}" != "1" ]; then
- # We won't ship this file (see FILES above) so let's not install it
+ # We won't ship these files (see FILES above) so let's not install them
rm -f ${D}${sysconfdir}/ld.so.conf
+ rm -f ${D}${base_sbindir}/ldconfig
# This directory will be empty now so remove it too.
- # But check whether it exists first, since it won't for eglibc-initial.
+ # But check whether it exists first, since it won't for glibc-initial.
if [ -d ${D}${sysconfdir} ]; then
rmdir ${D}${sysconfdir}
fi
fi
}
+do_install_append_aarch64 () {
+ if [ "${base_libdir}" != "/lib" ] ; then
+ # The aarch64 ABI says the dynamic linker -must- be /lib/ld-linux-aarch64[_be].so.1
+ install -d ${D}/lib
+ if [ -e ${D}${base_libdir}/ld-linux-aarch64.so.1 ]; then
+ ln -s ${@base_path_relative('/lib', '${base_libdir}')}/ld-linux-aarch64.so.1 \
+ ${D}/lib/ld-linux-aarch64.so.1
+ elif [ -e ${D}${base_libdir}/ld-linux-aarch64_be.so.1 ]; then
+ ln -s ${@base_path_relative('/lib', '${base_libdir}')}/ld-linux-aarch64_be.so.1 \
+ ${D}/lib/ld-linux-aarch64_be.so.1
+ fi
+ fi
+}
+
do_install_locale () {
- dest=${D}/${includedir}/eglibc-locale-internal-${MULTIMACH_TARGET_SYS}
+ dest=${D}/${includedir}/glibc-locale-internal-${MULTIMACH_TARGET_SYS}
install -d ${dest}${base_libdir} ${dest}${bindir} ${dest}${libdir} ${dest}${datadir}
if [ "${base_libdir}" != "${libdir}" ]; then
cp -fpPR ${D}${base_libdir}/* ${dest}${base_libdir}
@@ -130,7 +147,7 @@ addtask do_install_locale after do_install before do_populate_sysroot do_package
bashscripts = "mtrace sotruss xtrace"
do_evacuate_scripts () {
- target=${D}${includedir}/eglibc-scripts-internal-${MULTIMACH_TARGET_SYS}
+ target=${D}${includedir}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}
mkdir -p $target
for i in ${bashscripts}; do
if [ -f ${D}${bindir}/$i ]; then
@@ -141,11 +158,11 @@ do_evacuate_scripts () {
addtask evacuate_scripts after do_install before do_populate_sysroot do_package
-PACKAGE_PREPROCESS_FUNCS += "eglibc_package_preprocess"
+PACKAGE_PREPROCESS_FUNCS += "glibc_package_preprocess"
-eglibc_package_preprocess () {
- rm -rf ${PKGD}/${includedir}/eglibc-locale-internal-${MULTIMACH_TARGET_SYS}
- rm -rf ${PKGD}/${includedir}/eglibc-scripts-internal-${MULTIMACH_TARGET_SYS}
+glibc_package_preprocess () {
+ rm -rf ${PKGD}/${includedir}/glibc-locale-internal-${MULTIMACH_TARGET_SYS}
+ rm -rf ${PKGD}/${includedir}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}
for i in ${bashscripts}; do
rm -f ${PKGD}${bindir}/$i
done
diff --git a/meta/recipes-core/glibc/glibc-scripts.inc b/meta/recipes-core/glibc/glibc-scripts.inc
new file mode 100644
index 0000000000..3a06773d66
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-scripts.inc
@@ -0,0 +1,16 @@
+include glibc-collateral.inc
+
+SUMMARY = "utility scripts provided by glibc"
+DESCRIPTION = "utility scripts provided by glibc"
+RDEPENDS_${PN} = "bash glibc-mtrace"
+
+SRC = "${STAGING_INCDIR}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}"
+
+bashscripts = "sotruss xtrace"
+
+do_install() {
+ install -d -m 0755 ${D}${bindir}
+ for i in ${bashscripts}; do
+ install -m 0755 ${SRC}/$i ${D}${bindir}/
+ done
+}
diff --git a/meta/recipes-core/glibc/glibc-scripts_2.20.bb b/meta/recipes-core/glibc/glibc-scripts_2.20.bb
new file mode 100644
index 0000000000..5a89bd8022
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc-scripts_2.20.bb
@@ -0,0 +1 @@
+require glibc-scripts.inc
diff --git a/meta/recipes-core/eglibc/eglibc-testing.inc b/meta/recipes-core/glibc/glibc-testing.inc
index ab3ec1555f..ec16fe1fde 100644
--- a/meta/recipes-core/eglibc/eglibc-testing.inc
+++ b/meta/recipes-core/glibc/glibc-testing.inc
@@ -1,46 +1,46 @@
do_compile_append () {
# now generate script to drive testing
- echo "#!/usr/bin/env sh" >${B}/${HOST_PREFIX}testeglibc
- set >> ${B}/${HOST_PREFIX}testeglibc
+ echo "#!/usr/bin/env sh" >${B}/${HOST_PREFIX}testglibc
+ set >> ${B}/${HOST_PREFIX}testglibc
# prune out the unneeded vars
- sed -i -e "/^BASH/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^USER/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^OPT/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^DIRSTACK/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^EUID/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^FUNCNAME/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^GROUPS/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^HOST/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^HOME/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^IFS/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^LC_ALL/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^LOGNAME/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^MACHTYPE/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^OSTYPE/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PIPE/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^SHELL/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^'/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^UID/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^TERM/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PATCH_GET/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PKG_/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^POSIXLY_/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PPID/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PS4/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^Q/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^SHLVL/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^STAGING/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^LD_LIBRARY_PATH/d" ${B}/${HOST_PREFIX}testeglibc
- sed -i -e "/^PSEUDO/d" ${B}/${HOST_PREFIX}testeglibc
+ sed -i -e "/^BASH/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^USER/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^OPT/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^DIRSTACK/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^EUID/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^FUNCNAME/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^GROUPS/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^HOST/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^HOME/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^IFS/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^LC_ALL/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^LOGNAME/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^MACHTYPE/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^OSTYPE/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PIPE/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^SHELL/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^'/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^UID/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^TERM/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PATCH_GET/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PKG_/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^POSIXLY_/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PPID/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PS4/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^Q/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^SHLVL/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^STAGING/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^LD_LIBRARY_PATH/d" ${B}/${HOST_PREFIX}testglibc
+ sed -i -e "/^PSEUDO/d" ${B}/${HOST_PREFIX}testglibc
# point to real sysroot not the toolchain bootstrap sysroot
- sed -i -e "s/\-tcbootstrap//g" ${B}/${HOST_PREFIX}testeglibc
+ sed -i -e "s/\-tcbootstrap//g" ${B}/${HOST_PREFIX}testglibc
# use the final cross-gcc to test since some tests need libstdc++
- sed -i -e "s/^PATH=.*\.gcc-cross-initial\:/PATH=/g" ${B}/${HOST_PREFIX}testeglibc
+ sed -i -e "s/^PATH=.*\.gcc-cross-initial\:/PATH=/g" ${B}/${HOST_PREFIX}testglibc
# append execution part script
-cat >> ${B}/${HOST_PREFIX}testeglibc << STOP
+cat >> ${B}/${HOST_PREFIX}testglibc << STOP
target="\$1"
if [ "x\$target" = "x" ]
then
@@ -59,7 +59,7 @@ then
echo " Please make sure that you have 'no_root_squash' added in /etc/exports if you want"
echo " to test as root user on target (usually its recommended to create a non"
echo " root user."
- echo " As a sanity check make sure that target can read/write to the eglibc build tree"
+ echo " As a sanity check make sure that target can read/write to the glibc build tree"
echo " Please refer to ${S}/EGLIBC.cross-testing for further instructions on setup"
exit 1
fi
@@ -75,5 +75,5 @@ make cross-localedef="\$localedef" cross-test-wrapper="\$wrapper" -k check
rm -rf ${B}/configparms
STOP
- chmod +x ${B}/${HOST_PREFIX}testeglibc
+ chmod +x ${B}/${HOST_PREFIX}testglibc
}
diff --git a/meta/recipes-core/eglibc/eglibc.inc b/meta/recipes-core/glibc/glibc.inc
index af7f6adefc..8aa78dfb34 100644
--- a/meta/recipes-core/eglibc/eglibc.inc
+++ b/meta/recipes-core/glibc/glibc.inc
@@ -1,6 +1,6 @@
-require eglibc-common.inc
-require eglibc-ld.inc
-require eglibc-testing.inc
+require glibc-common.inc
+require glibc-ld.inc
+require glibc-testing.inc
STAGINGCC = "gcc-cross-initial-${TARGET_ARCH}"
STAGINGCC_class-nativesdk = "gcc-crosssdk-initial-${TARGET_ARCH}"
@@ -8,12 +8,12 @@ PATH_prepend = "${STAGING_BINDIR_TOOLCHAIN}.${STAGINGCC}:"
TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}"
-# eglibc can't be built without optimization, if someone tries to compile an
+# glibc can't be built without optimization, if someone tries to compile an
# entire image as -O0, we override it with -O2 here and give a note about it.
def get_optimization(d):
selected_optimization = d.getVar("SELECTED_OPTIMIZATION", True)
if bb.utils.contains("SELECTED_OPTIMIZATION", "-O0", "x", "", d) == "x":
- bb.note("eglibc can't be built with -O0, -O2 will be used instead.")
+ bb.note("glibc can't be built with -O0, -O2 will be used instead.")
return selected_optimization.replace("-O0", "-O2")
return selected_optimization
@@ -39,7 +39,7 @@ DEPENDS = "virtual/${TARGET_PREFIX}gcc-initial libgcc-initial linux-libc-headers
PROVIDES = "virtual/libc virtual/${TARGET_PREFIX}libc-for-gcc"
PROVIDES += "virtual/libintl virtual/libiconv"
inherit autotools texinfo distro_features_check
-require eglibc-options.inc
+require glibc-options.inc
# The main purpose of setting this variable is to prevent users from accidently
# overriding DISTRO_FEATRUES, causing obscure build failures because of lack
@@ -63,19 +63,19 @@ INHIBIT_DEFAULT_DEPS = "1"
ARM_INSTRUCTION_SET = "arm"
-# eglibc uses PARALLELMFLAGS variable to pass parallel build info so transfer
+# glibc uses PARALLELMFLAGS variable to pass parallel build info so transfer
# PARALLEL_MAKE into PARALLELMFLAGS and empty out PARALLEL_MAKE
EGLIBCPARALLELISM := "PARALLELMFLAGS="${PARALLEL_MAKE}""
EXTRA_OEMAKE[vardepsexclude] += "EGLIBCPARALLELISM"
EXTRA_OEMAKE += "${EGLIBCPARALLELISM}"
PARALLEL_MAKE = ""
-# eglibc make-syscalls.sh has a number of issues with /bin/dash and
+# glibc make-syscalls.sh has a number of issues with /bin/dash and
# it's output which make calls via the SHELL also has issues, so
# ensure make uses /bin/bash
EXTRA_OEMAKE += "SHELL=/bin/bash"
-OE_FEATURES = "${@features_to_eglibc_settings(d)}"
+OE_FEATURES = "${@features_to_glibc_settings(d)}"
do_configure_prepend() {
sed -e "s#@BASH@#/bin/sh#" -i ${S}/elf/ldd.bash.in
echo '${OE_FEATURES}' > ${B}/option-groups.config
@@ -89,4 +89,4 @@ do_configure_append() {
sed -i 's/^OPTION_EGLIBC_NSSWITCH_FIXED_\(.*\)="\(.*\)"$/OPTION_EGLIBC_NSSWITCH_FIXED_\1=\2/' option-groups.config
}
-GLIBC_ADDONS ?= "ports,nptl,libidn"
+GLIBC_ADDONS ?= "nptl,libidn"
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/0001-R_ARM_TLS_DTPOFF32.patch b/meta/recipes-core/glibc/glibc/0001-R_ARM_TLS_DTPOFF32.patch
index b4489e9ae9..3922cb818f 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/0001-R_ARM_TLS_DTPOFF32.patch
+++ b/meta/recipes-core/glibc/glibc/0001-R_ARM_TLS_DTPOFF32.patch
@@ -41,9 +41,11 @@ Signed-off-by: Andrei Dinu <andrei.adrianx.dinu@intel.com>
1 file changed, 1 insertion(+), 1 deletion(-)
ndex 8d905e8..dcfa71e 100644
---- libc.orig/ports/sysdeps/arm/dl-machine.h
-+++ libc/ports/sysdeps/arm/dl-machine.h
-@@ -503,7 +503,7 @@ elf_machine_rel (struct link_map *map, const Elf32_Rel *reloc,
+Index: git/sysdeps/arm/dl-machine.h
+===================================================================
+--- git.orig/sysdeps/arm/dl-machine.h 2014-08-27 05:30:47.748070587 +0000
++++ git/sysdeps/arm/dl-machine.h 2014-08-27 05:30:47.740070587 +0000
+@@ -495,7 +495,7 @@
case R_ARM_TLS_DTPOFF32:
if (sym != NULL)
@@ -52,5 +54,3 @@ ndex 8d905e8..dcfa71e 100644
break;
case R_ARM_TLS_TPOFF32:
---
-
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch b/meta/recipes-core/glibc/glibc/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch
index a8463ea915..f341282ffb 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch
+++ b/meta/recipes-core/glibc/glibc/0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch
@@ -17,11 +17,11 @@ Signed-off-by: Ting Liu <b28495@freescale.com>
manual/Makefile | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
-diff --git a/manual/Makefile b/manual/Makefile
-index 6fddff0..7af242e 100644
---- a/manual/Makefile
-+++ b/manual/Makefile
-@@ -109,7 +109,8 @@ $(objpfx)libm-err.texi: $(objpfx)stamp-libm-err
+Index: git/manual/Makefile
+===================================================================
+--- git.orig/manual/Makefile 2014-08-29 10:35:18.728070587 -0700
++++ git/manual/Makefile 2014-08-29 10:35:18.720070587 -0700
+@@ -105,7 +105,8 @@
$(objpfx)stamp-libm-err: libm-err-tab.pl $(wildcard $(foreach dir,$(sysdirs),\
$(dir)/libm-test-ulps))
pwd=`pwd`; \
@@ -31,6 +31,3 @@ index 6fddff0..7af242e 100644
$(move-if-change) $(objpfx)libm-err-tmp $(objpfx)libm-err.texi
touch $@
---
-1.7.9.7
-
diff --git a/meta/recipes-core/glibc/glibc/CVE-2012-3406-Stack-overflow-in-vfprintf-BZ-16617.patch b/meta/recipes-core/glibc/glibc/CVE-2012-3406-Stack-overflow-in-vfprintf-BZ-16617.patch
new file mode 100644
index 0000000000..8cfdbeaa60
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/CVE-2012-3406-Stack-overflow-in-vfprintf-BZ-16617.patch
@@ -0,0 +1,339 @@
+From a5357b7ce2a2982c5778435704bcdb55ce3667a0 Mon Sep 17 00:00:00 2001
+From: Jeff Law <law@redhat.com>
+Date: Mon, 15 Dec 2014 10:09:32 +0100
+Subject: [PATCH] CVE-2012-3406: Stack overflow in vfprintf [BZ #16617]
+
+A larger number of format specifiers coudld cause a stack overflow,
+potentially allowing to bypass _FORTIFY_SOURCE format string
+protection.
+---
+ ChangeLog | 9 +++++++
+ NEWS | 13 +++++----
+ stdio-common/Makefile | 2 +-
+ stdio-common/bug23-2.c | 70 +++++++++++++++++++++++++++++++++++++++++++++++++
+ stdio-common/bug23-3.c | 50 +++++++++++++++++++++++++++++++++++
+ stdio-common/bug23-4.c | 31 ++++++++++++++++++++++
+ stdio-common/vfprintf.c | 40 ++++++++++++++++++++++++++--
+ 7 files changed, 207 insertions(+), 8 deletions(-)
+ create mode 100644 stdio-common/bug23-2.c
+ create mode 100644 stdio-common/bug23-3.c
+ create mode 100644 stdio-common/bug23-4.c
+
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,12 @@
++2014-12-15 Jeff Law <law@redhat.com>
++
++ [BZ #16617]
++ * stdio-common/vfprintf.c (vfprintf): Allocate large specs array
++ on the heap. (CVE-2012-3406)
++ * stdio-common/bug23-2.c, stdio-common/bug23-3.c: New file.
++ * stdio-common/bug23-4.c: New file. Test case by Joseph Myers.
++ * stdio-common/Makefile (tests): Add bug23-2, bug23-3, bug23-4.
++
+ 2014-11-19 Carlos O'Donell <carlos@redhat.com>
+ Florian Weimer <fweimer@redhat.com>
+ Joseph Myers <joseph@codesourcery.com>
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -13,24 +13,28 @@ Version 2.20
+ 15698, 15804, 15894, 15946, 16002, 16064, 16095, 16194, 16198, 16275,
+ 16284, 16287, 16315, 16348, 16349, 16354, 16357, 16362, 16447, 16516,
+ 16532, 16539, 16545, 16561, 16562, 16564, 16574, 16599, 16600, 16609,
+- 16610, 16611, 16613, 16619, 16623, 16629, 16632, 16634, 16639, 16642,
+- 16648, 16649, 16670, 16674, 16677, 16680, 16681, 16683, 16689, 16695,
+- 16701, 16706, 16707, 16712, 16713, 16714, 16724, 16731, 16739, 16740,
+- 16743, 16754, 16758, 16759, 16760, 16770, 16786, 16789, 16791, 16796,
+- 16799, 16800, 16815, 16823, 16824, 16831, 16838, 16839, 16849, 16854,
+- 16876, 16877, 16878, 16882, 16885, 16888, 16890, 16892, 16912, 16915,
+- 16916, 16917, 16918, 16922, 16927, 16928, 16932, 16943, 16958, 16965,
+- 16966, 16967, 16977, 16978, 16984, 16990, 16996, 17009, 17022, 17031,
+- 17042, 17048, 17050, 17058, 17061, 17062, 17069, 17075, 17078, 17079,
+- 17084, 17086, 17088, 17092, 17097, 17125, 17135, 17137, 17150, 17153,
+- 17187, 17213, 17259, 17261, 17262, 17263, 17319, 17325, 17354, 17625.
+-
++ 16610, 16611, 16613, 16617, 16619, 16623, 16629, 16632, 16634, 16639,
++ 16642, 16648, 16649, 16670, 16674, 16677, 16680, 16681, 16683, 16689,
++ 16695, 16701, 16706, 16707, 16712, 16713, 16714, 16724, 16731, 16739,
++ 16740, 16743, 16754, 16758, 16759, 16760, 16770, 16786, 16789, 16791,
++ 16796, 16799, 16800, 16815, 16823, 16824, 16831, 16838, 16839, 16849,
++ 16854, 16876, 16877, 16878, 16882, 16885, 16888, 16890, 16892, 16912,
++ 16915, 16916, 16917, 16918, 16922, 16927, 16928, 16932, 16943, 16958,
++ 16965, 16966, 16967, 16977, 16978, 16984, 16990, 16996, 17009, 17022,
++ 17031, 17042, 17048, 17050, 17058, 17061, 17062, 17069, 17075, 17078,
++ 17079, 17084, 17086, 17088, 17092, 17097, 17125, 17135, 17137, 17150,
++ 17153, 17187, 17213, 17259, 17261, 17262, 17263, 17319, 17325, 17354,
++ 17625.
++
+ * CVE-2104-7817 The wordexp function could ignore the WRDE_NOCMD flag
+ under certain input conditions resulting in the execution of a shell for
+ command substitution when the applicaiton did not request it. The
+ implementation now checks WRDE_NOCMD immediately before executing the
+ shell and returns the error WRDE_CMDSUB as expected.
+
++* CVE-2012-3406 printf-style functions could run into a stack overflow when
++ processing format strings with a large number of format specifiers.
++
+ * Reverted change of ABI data structures for s390 and s390x:
+ On s390 and s390x the size of struct ucontext and jmp_buf was increased in
+ 2.19. This change is reverted in 2.20. The introduced 2.19 symbol versions
+Index: git/stdio-common/bug23-2.c
+===================================================================
+--- /dev/null
++++ git/stdio-common/bug23-2.c
+@@ -0,0 +1,70 @@
++#include <stdio.h>
++#include <string.h>
++#include <stdlib.h>
++
++static const char expected[] = "\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55\
++\n\
++a\n\
++abbcd55%%%%%%%%%%%%%%%%%%%%%%%%%%\n";
++
++static int
++do_test (void)
++{
++ char *buf = malloc (strlen (expected) + 1);
++ snprintf (buf, strlen (expected) + 1,
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n",
++ "a", "b", "c", "d", 5);
++ return strcmp (buf, expected) != 0;
++}
++
++#define TEST_FUNCTION do_test ()
++#include "../test-skeleton.c"
+Index: git/stdio-common/bug23-3.c
+===================================================================
+--- /dev/null
++++ git/stdio-common/bug23-3.c
+@@ -0,0 +1,50 @@
++#include <stdio.h>
++#include <string.h>
++#include <stdlib.h>
++
++int
++do_test (void)
++{
++ size_t instances = 16384;
++#define X0 "\n%1$s\n" "%1$s" "%2$s" "%2$s" "%3$s" "%4$s" "%5$d" "%5$d"
++ const char *item = "\na\nabbcd55";
++#define X3 X0 X0 X0 X0 X0 X0 X0 X0
++#define X6 X3 X3 X3 X3 X3 X3 X3 X3
++#define X9 X6 X6 X6 X6 X6 X6 X6 X6
++#define X12 X9 X9 X9 X9 X9 X9 X9 X9
++#define X14 X12 X12 X12 X12
++#define TRAILER "%%%%%%%%%%%%%%%%%%%%%%%%%%"
++#define TRAILER2 TRAILER TRAILER
++ size_t length = instances * strlen (item) + strlen (TRAILER) + 1;
++
++ char *buf = malloc (length + 1);
++ snprintf (buf, length + 1,
++ X14 TRAILER2 "\n",
++ "a", "b", "c", "d", 5);
++
++ const char *p = buf;
++ size_t i;
++ for (i = 0; i < instances; ++i)
++ {
++ const char *expected;
++ for (expected = item; *expected; ++expected)
++ {
++ if (*p != *expected)
++ {
++ printf ("mismatch at offset %zu (%zu): expected %d, got %d\n",
++ (size_t) (p - buf), i, *expected & 0xFF, *p & 0xFF);
++ return 1;
++ }
++ ++p;
++ }
++ }
++ if (strcmp (p, TRAILER "\n") != 0)
++ {
++ printf ("mismatch at trailer: [%s]\n", p);
++ return 1;
++ }
++ free (buf);
++ return 0;
++}
++#define TEST_FUNCTION do_test ()
++#include "../test-skeleton.c"
+Index: git/stdio-common/bug23-4.c
+===================================================================
+--- /dev/null
++++ git/stdio-common/bug23-4.c
+@@ -0,0 +1,31 @@
++#include <stdio.h>
++#include <stdlib.h>
++#include <string.h>
++#include <sys/resource.h>
++
++#define LIMIT 1000000
++
++int
++main (void)
++{
++ struct rlimit lim;
++ getrlimit (RLIMIT_STACK, &lim);
++ lim.rlim_cur = 1048576;
++ setrlimit (RLIMIT_STACK, &lim);
++ char *fmtstr = malloc (4 * LIMIT + 1);
++ if (fmtstr == NULL)
++ abort ();
++ char *output = malloc (LIMIT + 1);
++ if (output == NULL)
++ abort ();
++ for (size_t i = 0; i < LIMIT; i++)
++ memcpy (fmtstr + 4 * i, "%1$d", 4);
++ fmtstr[4 * LIMIT] = '\0';
++ int ret = snprintf (output, LIMIT + 1, fmtstr, 0);
++ if (ret != LIMIT)
++ abort ();
++ for (size_t i = 0; i < LIMIT; i++)
++ if (output[i] != '0')
++ abort ();
++ return 0;
++}
+Index: git/stdio-common/vfprintf.c
+===================================================================
+--- git.orig/stdio-common/vfprintf.c
++++ git/stdio-common/vfprintf.c
+@@ -276,6 +276,12 @@ vfprintf (FILE *s, const CHAR_T *format,
+ /* For the argument descriptions, which may be allocated on the heap. */
+ void *args_malloced = NULL;
+
++ /* For positional argument handling. */
++ struct printf_spec *specs;
++
++ /* Track if we malloced the SPECS array and thus must free it. */
++ bool specs_malloced = false;
++
+ /* This table maps a character into a number representing a
+ class. In each step there is a destination label for each
+ class. */
+@@ -1699,8 +1705,8 @@ do_positional:
+ size_t nspecs = 0;
+ /* A more or less arbitrary start value. */
+ size_t nspecs_size = 32 * sizeof (struct printf_spec);
+- struct printf_spec *specs = alloca (nspecs_size);
+
++ specs = alloca (nspecs_size);
+ /* The number of arguments the format string requests. This will
+ determine the size of the array needed to store the argument
+ attributes. */
+@@ -1743,11 +1749,39 @@ do_positional:
+ if (nspecs * sizeof (*specs) >= nspecs_size)
+ {
+ /* Extend the array of format specifiers. */
++ if (nspecs_size * 2 < nspecs_size)
++ {
++ __set_errno (ENOMEM);
++ done = -1;
++ goto all_done;
++ }
+ struct printf_spec *old = specs;
+- specs = extend_alloca (specs, nspecs_size, 2 * nspecs_size);
++ if (__libc_use_alloca (2 * nspecs_size))
++ specs = extend_alloca (specs, nspecs_size, 2 * nspecs_size);
++ else
++ {
++ nspecs_size *= 2;
++ specs = malloc (nspecs_size);
++ if (specs == NULL)
++ {
++ __set_errno (ENOMEM);
++ specs = old;
++ done = -1;
++ goto all_done;
++ }
++ }
+
+ /* Copy the old array's elements to the new space. */
+ memmove (specs, old, nspecs * sizeof (*specs));
++
++ /* If we had previously malloc'd space for SPECS, then
++ release it after the copy is complete. */
++ if (specs_malloced)
++ free (old);
++
++ /* Now set SPECS_MALLOCED if needed. */
++ if (!__libc_use_alloca (nspecs_size))
++ specs_malloced = true;
+ }
+
+ /* Parse the format specifier. */
+@@ -2068,6 +2102,8 @@ do_positional:
+ }
+
+ all_done:
++ if (specs_malloced)
++ free (specs);
+ if (__glibc_unlikely (args_malloced != NULL))
+ free (args_malloced);
+ if (__glibc_unlikely (workstart != NULL))
+Index: git/stdio-common/Makefile
+===================================================================
+--- git.orig/stdio-common/Makefile
++++ git/stdio-common/Makefile
+@@ -66,7 +66,7 @@ tests := tstscanf test_rdwr test-popen t
+ tst-fwrite bug16 bug17 tst-sprintf2 bug18 \
+ bug19 tst-popen2 scanf14 scanf15 bug21 bug22 scanf16 scanf17 \
+ tst-setvbuf1 bug23 bug24 bug-vfprintf-nargs tst-sprintf3 bug25 \
+- tst-printf-round bug26
++ tst-printf-round bug23-2 bug23-3 bug23-4
+
+ tests-$(OPTION_EGLIBC_LOCALE_CODE) \
+ += tst-sscanf tst-swprintf test-vfprintf bug14 scanf13 tst-grouping
diff --git a/meta/recipes-core/glibc/glibc/CVE-2014-7817-wordexp-fails-to-honour-WRDE_NOCMD.patch b/meta/recipes-core/glibc/glibc/CVE-2014-7817-wordexp-fails-to-honour-WRDE_NOCMD.patch
new file mode 100644
index 0000000000..d95d182dc4
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/CVE-2014-7817-wordexp-fails-to-honour-WRDE_NOCMD.patch
@@ -0,0 +1,215 @@
+From a39208bd7fb76c1b01c127b4c61f9bfd915bfe7c Mon Sep 17 00:00:00 2001
+From: Carlos O'Donell <carlos@redhat.com>
+Date: Wed, 19 Nov 2014 11:44:12 -0500
+Subject: [PATCH] CVE-2014-7817: wordexp fails to honour WRDE_NOCMD.
+
+The function wordexp() fails to properly handle the WRDE_NOCMD
+flag when processing arithmetic inputs in the form of "$((... ``))"
+where "..." can be anything valid. The backticks in the arithmetic
+epxression are evaluated by in a shell even if WRDE_NOCMD forbade
+command substitution. This allows an attacker to attempt to pass
+dangerous commands via constructs of the above form, and bypass
+the WRDE_NOCMD flag. This patch fixes this by checking for WRDE_NOCMD
+in exec_comm(), the only place that can execute a shell. All other
+checks for WRDE_NOCMD are superfluous and removed.
+
+We expand the testsuite and add 3 new regression tests of roughly
+the same form but with a couple of nested levels.
+
+On top of the 3 new tests we add fork validation to the WRDE_NOCMD
+testing. If any forks are detected during the execution of a wordexp()
+call with WRDE_NOCMD, the test is marked as failed. This is slightly
+heuristic since vfork might be used in the future, but it provides a
+higher level of assurance that no shells were executed as part of
+command substitution with WRDE_NOCMD in effect. In addition it doesn't
+require libpthread or libdl, instead we use the public implementation
+namespace function __register_atfork (already part of the public ABI
+for libpthread).
+
+Tested on x86_64 with no regressions.
+---
+ ChangeLog | 22 ++++++++++++++++++++++
+ NEWS | 8 +++++++-
+ posix/wordexp-test.c | 44 ++++++++++++++++++++++++++++++++++++++++++++
+ posix/wordexp.c | 16 ++++------------
+ 4 files changed, 77 insertions(+), 13 deletions(-)
+
+Index: git/ChangeLog
+===================================================================
+--- git.orig/ChangeLog
++++ git/ChangeLog
+@@ -1,3 +1,24 @@
++2014-11-19 Carlos O'Donell <carlos@redhat.com>
++ Florian Weimer <fweimer@redhat.com>
++ Joseph Myers <joseph@codesourcery.com>
++ Adam Conrad <adconrad@0c3.net>
++ Andreas Schwab <schwab@suse.de>
++ Brooks <bmoses@google.com>
++
++ [BZ #17625]
++ * wordexp-test.c (__dso_handle): Add prototype.
++ (__register_atfork): Likewise.
++ (__app_register_atfork): New function.
++ (registered_forks): New global.
++ (register_fork): New function.
++ (test_case): Add 3 new tests for WRDE_CMDSUB.
++ (main): Call __app_register_atfork.
++ (testit): If WRDE_NOCMD set registered_forks to zero, run test, and if
++ fork count is non-zero fail the test.
++ * posix/wordexp.c (exec_comm): Return WRDE_CMDSUB if WRDE_NOCMD flag
++ is set.
++ (parse_dollars): Remove check for WRDE_NOCMD.
++
+ 2014-09-07 Allan McRae <allan@archlinux.org
+
+ * version.h (RELEASE): Set to "stable".
+Index: git/NEWS
+===================================================================
+--- git.orig/NEWS
++++ git/NEWS
+@@ -23,7 +23,13 @@ Version 2.20
+ 16966, 16967, 16977, 16978, 16984, 16990, 16996, 17009, 17022, 17031,
+ 17042, 17048, 17050, 17058, 17061, 17062, 17069, 17075, 17078, 17079,
+ 17084, 17086, 17088, 17092, 17097, 17125, 17135, 17137, 17150, 17153,
+- 17187, 17213, 17259, 17261, 17262, 17263, 17319, 17325, 17354.
++ 17187, 17213, 17259, 17261, 17262, 17263, 17319, 17325, 17354, 17625.
++
++* CVE-2104-7817 The wordexp function could ignore the WRDE_NOCMD flag
++ under certain input conditions resulting in the execution of a shell for
++ command substitution when the applicaiton did not request it. The
++ implementation now checks WRDE_NOCMD immediately before executing the
++ shell and returns the error WRDE_CMDSUB as expected.
+
+ * Reverted change of ABI data structures for s390 and s390x:
+ On s390 and s390x the size of struct ucontext and jmp_buf was increased in
+Index: git/posix/wordexp-test.c
+===================================================================
+--- git.orig/posix/wordexp-test.c
++++ git/posix/wordexp-test.c
+@@ -27,6 +27,25 @@
+
+ #define IFS " \n\t"
+
++extern void *__dso_handle __attribute__ ((__weak__, __visibility__ ("hidden")));
++extern int __register_atfork (void (*) (void), void (*) (void), void (*) (void), void *);
++
++static int __app_register_atfork (void (*prepare) (void), void (*parent) (void), void (*child) (void))
++{
++ return __register_atfork (prepare, parent, child,
++ &__dso_handle == NULL ? NULL : __dso_handle);
++}
++
++/* Number of forks seen. */
++static int registered_forks;
++
++/* For each fork increment the fork count. */
++static void
++register_fork (void)
++{
++ registered_forks++;
++}
++
+ struct test_case_struct
+ {
+ int retval;
+@@ -206,6 +225,12 @@ struct test_case_struct
+ { WRDE_SYNTAX, NULL, "$((2+))", 0, 0, { NULL, }, IFS },
+ { WRDE_SYNTAX, NULL, "`", 0, 0, { NULL, }, IFS },
+ { WRDE_SYNTAX, NULL, "$((010+4+))", 0, 0, { NULL }, IFS },
++ /* Test for CVE-2014-7817. We test 3 combinations of command
++ substitution inside an arithmetic expression to make sure that
++ no commands are executed and error is returned. */
++ { WRDE_CMDSUB, NULL, "$((`echo 1`))", WRDE_NOCMD, 0, { NULL, }, IFS },
++ { WRDE_CMDSUB, NULL, "$((1+`echo 1`))", WRDE_NOCMD, 0, { NULL, }, IFS },
++ { WRDE_CMDSUB, NULL, "$((1+$((`echo 1`))))", WRDE_NOCMD, 0, { NULL, }, IFS },
+
+ { -1, NULL, NULL, 0, 0, { NULL, }, IFS },
+ };
+@@ -258,6 +283,15 @@ main (int argc, char *argv[])
+ return -1;
+ }
+
++ /* If we are not allowed to do command substitution, we install
++ fork handlers to verify that no forks happened. No forks should
++ happen at all if command substitution is disabled. */
++ if (__app_register_atfork (register_fork, NULL, NULL) != 0)
++ {
++ printf ("Failed to register fork handler.\n");
++ return -1;
++ }
++
+ for (test = 0; test_case[test].retval != -1; test++)
+ if (testit (&test_case[test]))
+ ++fail;
+@@ -367,6 +401,9 @@ testit (struct test_case_struct *tc)
+
+ printf ("Test %d (%s): ", ++tests, tc->words);
+
++ if (tc->flags & WRDE_NOCMD)
++ registered_forks = 0;
++
+ if (tc->flags & WRDE_APPEND)
+ {
+ /* initial wordexp() call, to be appended to */
+@@ -378,6 +415,13 @@ testit (struct test_case_struct *tc)
+ }
+ retval = wordexp (tc->words, &we, tc->flags);
+
++ if ((tc->flags & WRDE_NOCMD)
++ && (registered_forks > 0))
++ {
++ printf ("FAILED fork called for WRDE_NOCMD\n");
++ return 1;
++ }
++
+ if (tc->flags & WRDE_DOOFFS)
+ start_offs = sav_we.we_offs;
+
+Index: git/posix/wordexp.c
+===================================================================
+--- git.orig/posix/wordexp.c
++++ git/posix/wordexp.c
+@@ -893,6 +893,10 @@ exec_comm (char *comm, char **word, size
+ pid_t pid;
+ int noexec = 0;
+
++ /* Do nothing if command substitution should not succeed. */
++ if (flags & WRDE_NOCMD)
++ return WRDE_CMDSUB;
++
+ /* Don't fork() unless necessary */
+ if (!comm || !*comm)
+ return 0;
+@@ -2082,9 +2086,6 @@ parse_dollars (char **word, size_t *word
+ }
+ }
+
+- if (flags & WRDE_NOCMD)
+- return WRDE_CMDSUB;
+-
+ (*offset) += 2;
+ return parse_comm (word, word_length, max_length, words, offset, flags,
+ quoted? NULL : pwordexp, ifs, ifs_white);
+@@ -2196,9 +2197,6 @@ parse_dquote (char **word, size_t *word_
+ break;
+
+ case '`':
+- if (flags & WRDE_NOCMD)
+- return WRDE_CMDSUB;
+-
+ ++(*offset);
+ error = parse_backtick (word, word_length, max_length, words,
+ offset, flags, NULL, NULL, NULL);
+@@ -2357,12 +2355,6 @@ wordexp (const char *words, wordexp_t *p
+ break;
+
+ case '`':
+- if (flags & WRDE_NOCMD)
+- {
+- error = WRDE_CMDSUB;
+- goto do_error;
+- }
+-
+ ++words_offset;
+ error = parse_backtick (&word, &word_length, &max_length, words,
+ &words_offset, flags, pwordexp, ifs,
diff --git a/meta/recipes-core/glibc/glibc/GLRO_dl_debug_mask.patch b/meta/recipes-core/glibc/glibc/GLRO_dl_debug_mask.patch
new file mode 100644
index 0000000000..e858bfaeda
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/GLRO_dl_debug_mask.patch
@@ -0,0 +1,529 @@
+Its controlled by __OPTION_EGLIBC_RTLD_DEBUG
+so we should use GLRO_dl_debug_mask
+
+Singed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
+Index: git/elf/dl-open.c
+===================================================================
+--- git.orig/elf/dl-open.c 2014-08-27 05:03:59.732070587 +0000
++++ git/elf/dl-open.c 2014-08-27 05:05:25.656070587 +0000
+@@ -147,7 +147,7 @@
+ ns->_ns_main_searchlist->r_list[new_nlist++] = map;
+
+ /* We modify the global scope. Report this. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_SCOPES))
+ _dl_debug_printf ("\nadd %s [%lu] to global scope\n",
+ map->l_name, map->l_ns);
+ }
+@@ -243,7 +243,7 @@
+ if (__glibc_unlikely (new->l_searchlist.r_list != NULL))
+ {
+ /* Let the user know about the opencount. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("opening file=%s [%lu]; direct_opencount=%u\n\n",
+ new->l_name, new->l_ns, new->l_direct_opencount);
+
+@@ -294,7 +294,7 @@
+ LIBC_PROBE (map_complete, 3, args->nsid, r, new);
+
+ /* Print scope information. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_SCOPES))
+ _dl_show_scope (new, 0);
+
+ /* Only do lazy relocation if `LD_BIND_NOW' is not set. */
+@@ -511,7 +511,7 @@
+ }
+
+ /* Print scope information. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_SCOPES))
+ _dl_show_scope (imap, from_scope);
+ }
+
+@@ -584,7 +584,7 @@
+ #endif
+
+ /* Let the user know about the opencount. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("opening file=%s [%lu]; direct_opencount=%u\n\n",
+ new->l_name, new->l_ns, new->l_direct_opencount);
+ }
+Index: git/elf/rtld.c
+===================================================================
+--- git.orig/elf/rtld.c 2014-08-27 05:03:59.732070587 +0000
++++ git/elf/rtld.c 2014-08-27 05:12:33.812070587 +0000
+@@ -321,7 +321,7 @@
+ }
+ #endif
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_STATISTICS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_STATISTICS))
+ {
+ #ifndef HP_TIMING_NONAVAIL
+ print_statistics (&rtld_total_time);
+@@ -1699,7 +1699,7 @@
+ after relocation. */
+ struct link_map *l;
+
+- if (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK)
++ if (GLRO_dl_debug_mask & DL_DEBUG_PRELINK)
+ {
+ struct r_scope_elem *scope = &main_map->l_searchlist;
+
+@@ -1729,7 +1729,7 @@
+ _dl_printf ("\n");
+ }
+ }
+- else if (GLRO(dl_debug_mask) & DL_DEBUG_UNUSED)
++ else if (GLRO_dl_debug_mask & DL_DEBUG_UNUSED)
+ {
+ /* Look through the dependencies of the main executable
+ and determine which of them is not actually
+@@ -1837,7 +1837,7 @@
+ }
+ }
+
+- if ((GLRO(dl_debug_mask) & DL_DEBUG_PRELINK)
++ if ((GLRO_dl_debug_mask & DL_DEBUG_PRELINK)
+ && rtld_multiple_ref)
+ {
+ /* Mark the link map as not yet relocated again. */
+@@ -1970,7 +1970,7 @@
+ if (r_list == r_listend && liblist == liblistend)
+ prelinked = true;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
+ _dl_debug_printf ("\nprelink checking: %s\n",
+ prelinked ? "ok" : "failed");
+ }
+@@ -1988,7 +1988,7 @@
+ GLRO(dl_init_all_dirs) = GL(dl_all_dirs);
+
+ /* Print scope information. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_SCOPES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_SCOPES))
+ {
+ _dl_debug_printf ("\nInitial object scopes\n");
+
+@@ -2262,7 +2262,7 @@
+ if (debopts[cnt].len == len
+ && memcmp (dl_debug, debopts[cnt].name, len) == 0)
+ {
+- GLRO(dl_debug_mask) |= debopts[cnt].mask;
++ GLRO_dl_debug_mask |= debopts[cnt].mask;
+ any_debug = 1;
+ break;
+ }
+@@ -2283,7 +2283,7 @@
+ ++dl_debug;
+ }
+
+- if (GLRO(dl_debug_mask) & DL_DEBUG_UNUSED)
++ if (GLRO_dl_debug_mask & DL_DEBUG_UNUSED)
+ {
+ /* In order to get an accurate picture of whether a particular
+ DT_NEEDED entry is actually used we have to process both
+@@ -2291,7 +2291,7 @@
+ GLRO(dl_lazy) = 0;
+ }
+
+- if (GLRO(dl_debug_mask) & DL_DEBUG_HELP)
++ if (GLRO_dl_debug_mask & DL_DEBUG_HELP)
+ {
+ size_t cnt;
+
+@@ -2490,7 +2490,7 @@
+ {
+ mode = trace;
+ GLRO(dl_verbose) = 1;
+- GLRO(dl_debug_mask) |= DL_DEBUG_PRELINK;
++ GLRO_dl_debug_mask |= DL_DEBUG_PRELINK;
+ GLRO(dl_trace_prelink) = &envline[17];
+ }
+ break;
+@@ -2537,7 +2537,7 @@
+ if (__access ("/etc/suid-debug", F_OK) != 0)
+ {
+ unsetenv ("MALLOC_CHECK_");
+- GLRO(dl_debug_mask) = 0;
++ GLRO_dl_debug_mask = 0;
+ }
+
+ if (mode != normal)
+Index: git/elf/dl-lookup.c
+===================================================================
+--- git.orig/elf/dl-lookup.c 2014-08-27 05:03:59.732070587 +0000
++++ git/elf/dl-lookup.c 2014-08-27 05:13:07.644070587 +0000
+@@ -300,7 +300,7 @@
+ hash table. */
+ if (__glibc_unlikely (tab->size))
+ {
+- assert (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK);
++ assert (GLRO_dl_debug_mask & DL_DEBUG_PRELINK);
+ goto success;
+ }
+ #endif
+@@ -375,7 +375,7 @@
+ continue;
+
+ /* Print some debugging info if wanted. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_SYMBOLS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_SYMBOLS))
+ _dl_debug_printf ("symbol=%s; lookup in file=%s [%lu]\n",
+ undef_name, DSO_FILENAME (map->l_name),
+ map->l_ns);
+@@ -698,7 +698,7 @@
+ }
+
+ /* Display information if we are debugging. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("\
+ \nfile=%s [%lu]; needed by %s [%lu] (relocation dependency)\n\n",
+ DSO_FILENAME (map->l_name),
+@@ -802,7 +802,7 @@
+ {
+ if ((*ref == NULL || ELFW(ST_BIND) ((*ref)->st_info) != STB_WEAK)
+ && skip_map == NULL
+- && !(GLRO(dl_debug_mask) & DL_DEBUG_UNUSED))
++ && !(GLRO_dl_debug_mask & DL_DEBUG_UNUSED))
+ {
+ /* We could find no value for a strong reference. */
+ const char *reference_name = undef_map ? undef_map->l_name : "";
+@@ -873,7 +873,7 @@
+ if (__glibc_unlikely (current_value.m->l_used == 0))
+ current_value.m->l_used = 1;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask)
++ if (__glibc_unlikely (GLRO_dl_debug_mask
+ & (DL_DEBUG_BINDINGS|DL_DEBUG_PRELINK)))
+ _dl_debug_bindings (undef_name, undef_map, ref,
+ &current_value, version, type_class, protected);
+@@ -938,7 +938,7 @@
+ {
+ const char *reference_name = undef_map->l_name;
+
+- if (GLRO(dl_debug_mask) & DL_DEBUG_BINDINGS)
++ if (GLRO_dl_debug_mask & DL_DEBUG_BINDINGS)
+ {
+ _dl_debug_printf ("binding file %s [%lu] to %s [%lu]: %s symbol `%s'",
+ DSO_FILENAME (reference_name),
+@@ -952,7 +952,7 @@
+ _dl_debug_printf_c ("\n");
+ }
+ #ifdef SHARED
+- if (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK)
++ if (GLRO_dl_debug_mask & DL_DEBUG_PRELINK)
+ {
+ int conflict = 0;
+ struct sym_val val = { NULL, NULL };
+Index: git/elf/get-dynamic-info.h
+===================================================================
+--- git.orig/elf/get-dynamic-info.h 2014-08-27 05:03:59.732070587 +0000
++++ git/elf/get-dynamic-info.h 2014-08-27 05:03:59.728070587 +0000
+@@ -157,7 +157,7 @@
+ them. Therefore to avoid breaking existing applications the
+ best we can do is add a warning during debugging with the
+ intent of notifying the user of the problem. */
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_FILES, 0)
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_FILES, 0)
+ && l->l_flags_1 & ~DT_1_SUPPORTED_MASK)
+ _dl_debug_printf ("\nWARNING: Unsupported flag value(s) of 0x%x in DT_FLAGS_1.\n",
+ l->l_flags_1 & ~DT_1_SUPPORTED_MASK);
+Index: git/csu/libc-start.c
+===================================================================
+--- git.orig/csu/libc-start.c 2014-08-27 04:59:01.412070587 +0000
++++ git/csu/libc-start.c 2014-08-27 05:09:28.936070587 +0000
+@@ -238,7 +238,7 @@
+
+ /* Call the initializer of the program, if any. */
+ #ifdef SHARED
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_IMPCALLS, 0))
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_IMPCALLS, 0))
+ GLRO(dl_debug_printf) ("\ninitialize program: %s\n\n", argv[0]);
+ #endif
+ if (init)
+@@ -261,7 +261,7 @@
+ #endif
+
+ #ifdef SHARED
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_IMPCALLS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_IMPCALLS))
+ GLRO(dl_debug_printf) ("\ntransferring control: %s\n\n", argv[0]);
+ #endif
+
+Index: git/elf/dl-cache.c
+===================================================================
+--- git.orig/elf/dl-cache.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-cache.c 2014-08-27 05:10:14.384070587 +0000
+@@ -187,7 +187,7 @@
+ const char *best;
+
+ /* Print a message if the loading of libs is traced. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
+ _dl_debug_printf (" search cache=%s\n", LD_SO_CACHE);
+
+ if (cache == NULL)
+@@ -285,7 +285,7 @@
+ }
+
+ /* Print our result if wanted. */
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_LIBS, 0)
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_LIBS, 0)
+ && best != NULL)
+ _dl_debug_printf (" trying file=%s\n", best);
+
+Index: git/elf/dl-close.c
+===================================================================
+--- git.orig/elf/dl-close.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-close.c 2014-08-27 05:10:26.456070587 +0000
+@@ -125,7 +125,7 @@
+ dl_close_state = rerun;
+
+ /* There are still references to this object. Do nothing more. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("\nclosing file=%s; direct_opencount=%u\n",
+ map->l_name, map->l_direct_opencount);
+
+@@ -257,7 +257,7 @@
+ if (imap->l_init_called)
+ {
+ /* When debugging print a message first. */
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_IMPCALLS,
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_IMPCALLS,
+ 0))
+ _dl_debug_printf ("\ncalling fini: %s [%lu]\n\n",
+ imap->l_name, nsid);
+@@ -664,7 +664,7 @@
+ free (imap->l_reldeps);
+
+ /* Print debugging message. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("\nfile=%s [%lu]; destroying link map\n",
+ imap->l_name, imap->l_ns);
+
+Index: git/elf/dl-conflict.c
+===================================================================
+--- git.orig/elf/dl-conflict.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-conflict.c 2014-08-27 05:10:37.652070587 +0000
+@@ -32,7 +32,7 @@
+ ElfW(Rela) *conflictend)
+ {
+ #if ! ELF_MACHINE_NO_RELA
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_RELOC))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_RELOC))
+ _dl_debug_printf ("\nconflict processing: %s\n", DSO_FILENAME (l->l_name));
+
+ {
+Index: git/elf/dl-deps.c
+===================================================================
+--- git.orig/elf/dl-deps.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-deps.c 2014-08-27 05:10:48.260070587 +0000
+@@ -127,7 +127,7 @@
+ else \
+ { \
+ /* This is for DT_AUXILIARY. */ \
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS)) \
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS)) \
+ _dl_debug_printf (N_("\
+ cannot load auxiliary `%s' because of empty dynamic string token " \
+ "substitution\n"), __str); \
+@@ -303,7 +303,7 @@
+ args.name = name;
+
+ /* Say that we are about to load an auxiliary library. */
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_LIBS,
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_LIBS,
+ 0))
+ _dl_debug_printf ("load auxiliary object=%s"
+ " requested by file=%s\n",
+@@ -520,7 +520,7 @@
+ runp->map->l_reserved = 0;
+ }
+
+- if (__builtin_expect (GLRO(dl_debug_mask) & DL_DEBUG_PRELINK, 0) != 0
++ if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_PRELINK, 0) != 0
+ && map == GL(dl_ns)[LM_ID_BASE]._ns_loaded)
+ {
+ /* If we are to compute conflicts, we have to build local scope
+Index: git/elf/dl-error.c
+===================================================================
+--- git.orig/elf/dl-error.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-error.c 2014-08-27 05:11:06.752070587 +0000
+@@ -139,7 +139,7 @@
+ _dl_signal_cerror (int errcode, const char *objname, const char *occation,
+ const char *errstring)
+ {
+- if (__builtin_expect (GLRO(dl_debug_mask)
++ if (__builtin_expect (GLRO_dl_debug_mask
+ & ~(DL_DEBUG_STATISTICS|DL_DEBUG_PRELINK), 0))
+ _dl_debug_printf ("%s: error: %s: %s (%s)\n", objname, occation,
+ errstring, receiver ? "continued" : "fatal");
+Index: git/elf/dl-fini.c
+===================================================================
+--- git.orig/elf/dl-fini.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-fini.c 2014-08-27 05:11:17.544070587 +0000
+@@ -234,7 +234,7 @@
+ || l->l_info[DT_FINI] != NULL)
+ {
+ /* When debugging print a message first. */
+- if (__builtin_expect (GLRO(dl_debug_mask)
++ if (__builtin_expect (GLRO_dl_debug_mask
+ & DL_DEBUG_IMPCALLS, 0))
+ _dl_debug_printf ("\ncalling fini: %s [%lu]\n\n",
+ DSO_FILENAME (l->l_name),
+@@ -286,7 +286,7 @@
+ goto again;
+ }
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_STATISTICS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_STATISTICS))
+ _dl_debug_printf ("\nruntime linker statistics:\n"
+ " final number of relocations: %lu\n"
+ "final number of relocations from cache: %lu\n",
+Index: git/elf/dl-init.c
+===================================================================
+--- git.orig/elf/dl-init.c 2014-08-27 04:59:01.568070587 +0000
++++ git/elf/dl-init.c 2014-08-27 05:11:28.372070587 +0000
+@@ -52,7 +52,7 @@
+ return;
+
+ /* Print a debug message if wanted. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_IMPCALLS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_IMPCALLS))
+ _dl_debug_printf ("\ncalling init: %s\n\n",
+ DSO_FILENAME (l->l_name));
+
+@@ -102,7 +102,7 @@
+ ElfW(Addr) *addrs;
+ unsigned int cnt;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_IMPCALLS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_IMPCALLS))
+ _dl_debug_printf ("\ncalling preinit: %s\n\n",
+ DSO_FILENAME (main_map->l_name));
+
+Index: git/elf/dl-load.c
+===================================================================
+--- git.orig/elf/dl-load.c 2014-08-27 04:59:01.572070587 +0000
++++ git/elf/dl-load.c 2014-08-27 05:11:41.156070587 +0000
+@@ -957,7 +957,7 @@
+ }
+
+ /* Print debugging message. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("file=%s [%lu]; generating link map\n", name, nsid);
+
+ /* This is the ELF header. We read it in `open_verify'. */
+@@ -1361,7 +1361,7 @@
+
+ l->l_entry += l->l_addr;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES))
+ _dl_debug_printf ("\
+ dynamic: 0x%0*lx base: 0x%0*lx size: 0x%0*Zx\n\
+ entry: 0x%0*lx phdr: 0x%0*lx phnum: %*u\n\n",
+@@ -1787,7 +1787,7 @@
+
+ /* If we are debugging the search for libraries print the path
+ now if it hasn't happened now. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS)
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS)
+ && current_what != this_dir->what)
+ {
+ current_what = this_dir->what;
+@@ -1808,7 +1808,7 @@
+ - buf);
+
+ /* Print name we try if this is wanted. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
+ _dl_debug_printf (" trying file=%s\n", buf);
+
+ fd = open_verify (buf, fbp, loader, whatcode, mode,
+@@ -1953,7 +1953,7 @@
+ }
+
+ /* Display information if we are debugging. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_FILES)
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_FILES)
+ && loader != NULL)
+ _dl_debug_printf ((mode & __RTLD_CALLMAP) == 0
+ ? "\nfile=%s [%lu]; needed by %s [%lu]\n"
+@@ -1995,7 +1995,7 @@
+
+ size_t namelen = strlen (name) + 1;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
+ _dl_debug_printf ("find library=%s [%lu]; searching\n", name, nsid);
+
+ fd = -1;
+@@ -2122,7 +2122,7 @@
+ &realname, &fb, l, LA_SER_DEFAULT, &found_other_class);
+
+ /* Add another newline when we are tracing the library loading. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
+ _dl_debug_printf ("\n");
+ }
+ else
+@@ -2155,7 +2155,7 @@
+ if (__glibc_unlikely (fd == -1))
+ {
+ if (trace_mode
+- && __glibc_likely ((GLRO(dl_debug_mask) & DL_DEBUG_PRELINK) == 0))
++ && __glibc_likely ((GLRO_dl_debug_mask & DL_DEBUG_PRELINK) == 0))
+ {
+ /* We haven't found an appropriate library. But since we
+ are only interested in the list of libraries this isn't
+Index: git/elf/dl-object.c
+===================================================================
+--- git.orig/elf/dl-object.c 2014-08-27 04:59:01.572070587 +0000
++++ git/elf/dl-object.c 2014-08-27 05:11:51.756070587 +0000
+@@ -98,7 +98,7 @@
+ new->l_type = type;
+ /* If we set the bit now since we know it is never used we avoid
+ dirtying the cache line later. */
+- if ((GLRO(dl_debug_mask) & DL_DEBUG_UNUSED) == 0)
++ if ((GLRO_dl_debug_mask & DL_DEBUG_UNUSED) == 0)
+ new->l_used = 1;
+ new->l_loader = loader;
+ #if NO_TLS_OFFSET != 0
+Index: git/elf/dl-reloc.c
+===================================================================
+--- git.orig/elf/dl-reloc.c 2014-08-27 04:59:01.572070587 +0000
++++ git/elf/dl-reloc.c 2014-08-27 05:12:07.056070587 +0000
+@@ -183,7 +183,7 @@
+ && __builtin_expect (l->l_info[DT_BIND_NOW] != NULL, 0))
+ lazy = 0;
+
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_RELOC))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_RELOC))
+ _dl_debug_printf ("\nrelocation processing: %s%s\n",
+ DSO_FILENAME (l->l_name), lazy ? " (lazy)" : "");
+
+Index: git/elf/dl-version.c
+===================================================================
+--- git.orig/elf/dl-version.c 2014-08-27 04:59:01.608070587 +0000
++++ git/elf/dl-version.c 2014-08-27 05:12:19.568070587 +0000
+@@ -82,7 +82,7 @@
+ int result = 0;
+
+ /* Display information about what we are doing while debugging. */
+- if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_VERSIONS))
++ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_VERSIONS))
+ _dl_debug_printf ("\
+ checking for version `%s' in file %s [%lu] required by file %s [%lu]\n",
+ string, DSO_FILENAME (map->l_name),
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/IO-acquire-lock-fix.patch b/meta/recipes-core/glibc/glibc/IO-acquire-lock-fix.patch
index cf5803585c..ffbaba14a2 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/IO-acquire-lock-fix.patch
+++ b/meta/recipes-core/glibc/glibc/IO-acquire-lock-fix.patch
@@ -2,11 +2,11 @@ import http://sourceware.org/ml/libc-ports/2007-12/msg00000.html
Upstream-Status: Pending
-Index: libc/bits/stdio-lock.h
+Index: git/bits/stdio-lock.h
===================================================================
---- libc.orig/bits/stdio-lock.h 2009-10-28 14:34:19.000000000 -0700
-+++ libc/bits/stdio-lock.h 2009-10-28 14:34:54.000000000 -0700
-@@ -50,6 +50,8 @@ __libc_lock_define_recursive (typedef, _
+--- git.orig/bits/stdio-lock.h 2014-08-29 10:33:57.960070587 -0700
++++ git/bits/stdio-lock.h 2014-08-29 10:33:57.952070587 -0700
+@@ -49,6 +49,8 @@
_IO_cleanup_region_start ((void (*) (void *)) _IO_funlockfile, (_fp)); \
_IO_flockfile (_fp)
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/add_resource_h_to_wait_h.patch b/meta/recipes-core/glibc/glibc/add_resource_h_to_wait_h.patch
index f5023c08d4..4559de7bc1 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/add_resource_h_to_wait_h.patch
+++ b/meta/recipes-core/glibc/glibc/add_resource_h_to_wait_h.patch
@@ -6,11 +6,11 @@ Upstream-Status: Inapproriate [older kernel/perf specific]
Signed-off-by: Saul Wold <sgw@linux.intel.com>
-Index: libc/posix/sys/wait.h
+Index: git/posix/sys/wait.h
===================================================================
---- libc.orig/posix/sys/wait.h
-+++ libc/posix/sys/wait.h
-@@ -28,6 +28,7 @@
+--- git.orig/posix/sys/wait.h 2014-08-29 10:35:10.432070587 -0700
++++ git/posix/sys/wait.h 2014-08-29 10:35:10.424070587 -0700
+@@ -27,6 +27,7 @@
__BEGIN_DECLS
#include <signal.h>
diff --git a/meta/recipes-core/glibc/glibc/eglibc-header-bootstrap.patch b/meta/recipes-core/glibc/glibc/eglibc-header-bootstrap.patch
new file mode 100644
index 0000000000..e1aa13926a
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-header-bootstrap.patch
@@ -0,0 +1,85 @@
+Taken from EGLIBC, r1484 + r1525
+
+ 2007-02-20 Jim Blandy <jimb@codesourcery.com>
+
+ * Makefile (install-headers): Preserve old behavior: depend on
+ $(inst_includedir)/gnu/stubs.h only if install-bootstrap-headers
+ is set; otherwise, place gnu/stubs.h on the 'install-others' list.
+
+ 2007-02-16 Jim Blandy <jimb@codesourcery.com>
+
+ * Makefile: Amend make install-headers to install everything
+ necessary for building a cross-compiler. Install gnu/stubs.h as
+ part of 'install-headers', not 'install-others'.
+ If install-bootstrap-headers is 'yes', install a dummy copy of
+ gnu/stubs.h, instead of computing the real thing.
+ * include/stubs-bootstrap.h: New file.
+
+Upstream-Status: Pending
+
+Index: git/Makefile
+===================================================================
+--- git.orig/Makefile 2014-08-27 18:35:18.908070587 +0000
++++ git/Makefile 2014-08-27 18:35:19.340070587 +0000
+@@ -69,9 +69,18 @@
+ vpath %.h $(subdir-dirs)
+
+ # What to install.
+-install-others = $(inst_includedir)/gnu/stubs.h
+ install-bin-script =
+
++# If we're bootstrapping, install a dummy gnu/stubs.h along with the
++# other headers, so 'make install-headers' produces a useable include
++# tree. Otherwise, install gnu/stubs.h later, after the rest of the
++# build is done.
++ifeq ($(install-bootstrap-headers),yes)
++install-headers: $(inst_includedir)/gnu/stubs.h
++else
++install-others = $(inst_includedir)/gnu/stubs.h
++endif
++
+ ifeq (yes,$(build-shared))
+ headers += gnu/lib-names.h
+ endif
+@@ -151,6 +160,16 @@
+
+ subdir-stubs := $(foreach dir,$(subdirs),$(common-objpfx)$(dir)/stubs)
+
++# gnu/stubs.h depends (via the subdir 'stubs' targets) on all the .o
++# files in EGLIBC. For bootstrapping a GCC/EGLIBC pair, an empty
++# gnu/stubs.h is good enough.
++ifeq ($(install-bootstrap-headers),yes)
++$(inst_includedir)/gnu/stubs.h: include/stubs-bootstrap.h $(+force)
++ $(make-target-directory)
++ $(INSTALL_DATA) $< $@
++
++installed-stubs =
++else
+ ifndef abi-variants
+ installed-stubs = $(inst_includedir)/gnu/stubs.h
+ else
+@@ -177,6 +196,7 @@
+
+ install-others-nosubdir: $(installed-stubs)
+ endif
++endif
+
+
+ # Since stubs.h is never needed when building the library, we simplify the
+Index: git/include/stubs-bootstrap.h
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/include/stubs-bootstrap.h 2014-08-27 18:35:19.340070587 +0000
+@@ -0,0 +1,12 @@
++/* Placeholder stubs.h file for bootstrapping.
++
++ When bootstrapping a GCC/EGLIBC pair, GCC requires that the EGLIBC
++ headers be installed, but we can't fully build EGLIBC without that
++ GCC. So we run the command:
++
++ make install-headers install-bootstrap-headers=yes
++
++ to install the headers GCC needs, but avoid building certain
++ difficult headers. The <gnu/stubs.h> header depends, via the
++ EGLIBC subdir 'stubs' make targets, on every .o file in EGLIBC, but
++ an empty stubs.h like this will do fine for GCC. */
diff --git a/meta/recipes-core/glibc/glibc/eglibc-install-pic-archives.patch b/meta/recipes-core/glibc/glibc/eglibc-install-pic-archives.patch
new file mode 100644
index 0000000000..9a31255b09
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-install-pic-archives.patch
@@ -0,0 +1,109 @@
+2008-02-07 Joseph Myers <joseph@codesourcery.com>
+
+ * Makerules (install-extras, install-map): New variables.
+ (installed-libcs): Add libc_pic.a.
+ (install-lib): Include _pic.a files for versioned shared
+ libraries.
+ (install-map-nosubdir, install-extras-nosubdir): Add rules for
+ installing extra files.
+ (install-no-libc.a-nosubdir): Depend on install-map-nosubdir and
+ install-extras-nosubdir.
+
+
+2008-04-01 Maxim Kuvyrkov <maxim@codesourcery.com>
+
+ * Makerules (install-lib): Don't install libpthread_pic.a.
+ (install-map): Don't install libpthread_pic.map.
+
+Upstream-Status: Pending
+
+Index: git/Makerules
+===================================================================
+--- git.orig/Makerules 2014-08-27 18:49:22.552070587 +0000
++++ git/Makerules 2014-08-27 18:49:27.308070587 +0000
+@@ -612,6 +631,9 @@
+ $(common-objpfx)libc.so: $(common-objpfx)libc.map
+ endif
+ common-generated += libc.so libc_pic.os
++ifndef subdir
++install-extras := soinit.o sofini.o
++endif
+ ifdef libc.so-version
+ $(common-objpfx)libc.so$(libc.so-version): $(common-objpfx)libc.so
+ $(make-link)
+@@ -834,6 +856,7 @@
+ installed-libcs := $(foreach o,$(filter-out .os,$(object-suffixes-for-libc)),\
+ $(inst_libdir)/$(patsubst %,$(libtype$o),\
+ $(libprefix)$(libc-name)))
++installed-libcs := $(installed-libcs) $(inst_libdir)/libc_pic.a
+ install: $(installed-libcs)
+ $(installed-libcs): $(inst_libdir)/lib$(libprefix)%: lib $(+force)
+ $(make-target-directory)
+@@ -862,6 +885,22 @@
+ install-lib.so-versioned := $(filter $(versioned), $(install-lib.so))
+ install-lib.so-unversioned := $(filter-out $(versioned), $(install-lib.so))
+
++# Install the _pic.a files for versioned libraries, and corresponding
++# .map files.
++# libpthread_pic.a breaks mklibs, so don't install it and its map.
++install-lib := $(install-lib) $(install-lib.so-versioned:%.so=%_pic.a)
++install-lib := $(filter-out libpthread_pic.a,$(install-lib))
++# Despite having a soname libhurduser and libmachuser do not use symbol
++# versioning, so don't install the corresponding .map files.
++ifeq ($(build-shared),yes)
++install-map := $(patsubst %.so,%.map,\
++ $(foreach L,$(install-lib.so-versioned),$(notdir $L)))
++install-map := $(filter-out libhurduser.map libmachuser.map libpthread.map,$(install-map))
++ifndef subdir
++install-map := $(install-map) libc.map
++endif
++endif
++
+ # For versioned libraries, we install three files:
+ # $(inst_libdir)/libfoo.so -- for linking, symlink or ld script
+ # $(inst_slibdir)/libfoo.so.NN -- for loading by SONAME, symlink
+@@ -1103,9 +1142,22 @@
+ endif # headers-nonh
+ endif # headers
+
++ifdef install-map
++$(addprefix $(inst_libdir)/,$(patsubst lib%.map,lib%_pic.map,$(install-map))): \
++ $(inst_libdir)/lib%_pic.map: $(common-objpfx)lib%.map $(+force)
++ $(do-install)
++endif
++
++ifdef install-extras
++$(addprefix $(inst_libdir)/libc_pic/,$(install-extras)): \
++ $(inst_libdir)/libc_pic/%.o: $(elfobjdir)/%.os $(+force)
++ $(do-install)
++endif
++
+ .PHONY: install-bin-nosubdir install-bin-script-nosubdir \
+ install-rootsbin-nosubdir install-sbin-nosubdir install-lib-nosubdir \
+- install-data-nosubdir install-headers-nosubdir
++ install-data-nosubdir install-headers-nosubdir install-map-nosubdir \
++ install-extras-nosubdir
+ install-bin-nosubdir: $(addprefix $(inst_bindir)/,$(install-bin))
+ install-bin-script-nosubdir: $(addprefix $(inst_bindir)/,$(install-bin-script))
+ install-rootsbin-nosubdir: \
+@@ -1118,6 +1170,10 @@
+ install-headers-nosubdir: $(addprefix $(inst_includedir)/,$(headers))
+ install-others-nosubdir: $(install-others)
+ install-others-programs-nosubdir: $(install-others-programs)
++install-map-nosubdir: $(addprefix $(inst_libdir)/,\
++ $(patsubst lib%.map,lib%_pic.map,$(install-map)))
++install-extras-nosubdir: $(addprefix $(inst_libdir)/libc_pic/,\
++ $(install-extras))
+
+ # We need all the `-nosubdir' targets so that `install' in the parent
+ # doesn't depend on several things which each iterate over the subdirs.
+@@ -1127,7 +1183,8 @@
+
+ .PHONY: install install-no-libc.a-nosubdir
+ install-no-libc.a-nosubdir: install-headers-nosubdir install-data-nosubdir \
+- install-lib-nosubdir install-others-nosubdir
++ install-lib-nosubdir install-others-nosubdir \
++ install-map-nosubdir install-extras-nosubdir
+ ifeq ($(build-programs),yes)
+ install-no-libc.a-nosubdir: install-bin-nosubdir install-bin-script-nosubdir \
+ install-rootsbin-nosubdir install-sbin-nosubdir \
diff --git a/meta/recipes-core/glibc/glibc/eglibc-ppc8xx-cache-line-workaround.patch b/meta/recipes-core/glibc/glibc/eglibc-ppc8xx-cache-line-workaround.patch
new file mode 100644
index 0000000000..bb83d6d36e
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-ppc8xx-cache-line-workaround.patch
@@ -0,0 +1,68 @@
+2007-06-13 Nathan Sidwell <nathan@codesourcery.com>
+ Mark Shinwell <shinwell@codesourcery.com>
+
+ * sysdeps/unix/sysv/linux/powerpc/libc-start.c
+ (__libc_start_main): Detect 8xx parts and clear
+ __cache_line_size if detected.
+ * sysdeps/unix/sysv/linux/powerpc/dl-sysdep.c
+ (DL_PLATFORM_AUXV): Likewise.
+
+Upstream-Status: Pending
+
+Index: git/sysdeps/unix/sysv/linux/powerpc/dl-sysdep.c
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/powerpc/dl-sysdep.c 2014-08-27 18:49:23.996070587 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/dl-sysdep.c 2014-08-27 18:49:27.332070587 +0000
+@@ -24,9 +24,21 @@
+ /* Scan the Aux Vector for the "Data Cache Block Size" entry. If found
+ verify that the static extern __cache_line_size is defined by checking
+ for not NULL. If it is defined then assign the cache block size
+- value to __cache_line_size. */
++ value to __cache_line_size. This is used by memset to
++ optimize setting to zero. We have to detect 8xx processors, which
++ have buggy dcbz implementations that cannot report page faults
++ correctly. That requires reading SPR, which is a privileged
++ operation. Fortunately 2.2.18 and later emulates PowerPC mfspr
++ reads from the PVR register. */
+ #define DL_PLATFORM_AUXV \
+ case AT_DCACHEBSIZE: \
++ if (__LINUX_KERNEL_VERSION >= 0x020218) \
++ { \
++ unsigned pvr = 0; \
++ asm ("mfspr %0, 287" : "=r" (pvr)); \
++ if ((pvr & 0xffff0000) == 0x00500000) \
++ break; \
++ } \
+ __cache_line_size = av->a_un.a_val; \
+ break;
+
+Index: git/sysdeps/unix/sysv/linux/powerpc/libc-start.c
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/powerpc/libc-start.c 2014-08-27 18:49:23.996070587 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/libc-start.c 2014-08-27 18:49:27.332070587 +0000
+@@ -68,11 +68,24 @@
+ rtld_fini = NULL;
+ }
+
+- /* Initialize the __cache_line_size variable from the aux vector. */
++ /* Initialize the __cache_line_size variable from the aux vector.
++ This is used by memset to optimize setting to zero. We have to
++ detect 8xx processors, which have buggy dcbz implementations that
++ cannot report page faults correctly. That requires reading SPR,
++ which is a privileged operation. Fortunately 2.2.18 and later
++ emulates PowerPC mfspr reads from the PVR register. */
+ for (ElfW (auxv_t) * av = auxvec; av->a_type != AT_NULL; ++av)
+ switch (av->a_type)
+ {
+ case AT_DCACHEBSIZE:
++ if (__LINUX_KERNEL_VERSION >= 0x020218)
++ {
++ unsigned pvr = 0;
++
++ asm ("mfspr %0, 287" : "=r" (pvr) :);
++ if ((pvr & 0xffff0000) == 0x00500000)
++ break;
++ }
+ __cache_line_size = av->a_un.a_val;
+ break;
+ }
diff --git a/meta/recipes-core/glibc/glibc/eglibc-resolv-dynamic.patch b/meta/recipes-core/glibc/glibc/eglibc-resolv-dynamic.patch
new file mode 100644
index 0000000000..a73bcebe34
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-resolv-dynamic.patch
@@ -0,0 +1,54 @@
+cherry-picked from http://www.eglibc.org/archives/patches/msg00772.html
+
+It hasnt yet been merged into glibc
+
+Signed-off-by: Khem Raj
+
+Upstream-Status: Pending
+
+Index: git/resolv/res_libc.c
+===================================================================
+--- git.orig/resolv/res_libc.c 2014-08-27 18:35:15.492070587 +0000
++++ git/resolv/res_libc.c 2014-08-27 18:35:19.204070587 +0000
+@@ -22,12 +22,13 @@
+ #include <arpa/nameser.h>
+ #include <resolv.h>
+ #include <bits/libc-lock.h>
+-
++#include <sys/stat.h>
+
+ /* The following bit is copied from res_data.c (where it is #ifdef'ed
+ out) since res_init() should go into libc.so but the rest of that
+ file should not. */
+
++__libc_lock_define_initialized (static, lock);
+ extern unsigned long long int __res_initstamp attribute_hidden;
+ /* We have atomic increment operations on 64-bit platforms. */
+ #if __WORDSIZE == 64
+@@ -35,7 +36,6 @@
+ # define atomicincunlock(lock) (void) 0
+ # define atomicinc(var) catomic_increment (&(var))
+ #else
+-__libc_lock_define_initialized (static, lock);
+ # define atomicinclock(lock) __libc_lock_lock (lock)
+ # define atomicincunlock(lock) __libc_lock_unlock (lock)
+ # define atomicinc(var) ++var
+@@ -94,7 +94,18 @@
+ int
+ __res_maybe_init (res_state resp, int preinit)
+ {
++ static time_t last_mtime;
++ struct stat statbuf;
++ int ret;
++
+ if (resp->options & RES_INIT) {
++ ret = stat (_PATH_RESCONF, &statbuf);
++ __libc_lock_lock (lock);
++ if ((ret == 0) && (last_mtime != statbuf.st_mtime)) {
++ last_mtime = statbuf.st_mtime;
++ atomicinc (__res_initstamp);
++ }
++ __libc_lock_unlock (lock);
+ if (__res_initstamp != resp->_u._ext.initstamp) {
+ if (resp->nscount > 0)
+ __res_iclose (resp, true);
diff --git a/meta/recipes-core/glibc/glibc/eglibc-sh4-fpscr_values.patch b/meta/recipes-core/glibc/glibc/eglibc-sh4-fpscr_values.patch
new file mode 100644
index 0000000000..bfb813eb7c
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-sh4-fpscr_values.patch
@@ -0,0 +1,42 @@
+2010-09-29 Nobuhiro Iwamatsu <iwamatsu@nigauri.org>
+ Andrew Stubbs <ams@codesourcery.com>
+
+ Resolve SH's __fpscr_values to symbol in libc.so.
+
+ * sysdeps/sh/sh4/fpu/fpu_control.h: Add C++ __set_fpscr prototype.
+ * sysdeps/unix/sysv/linux/sh/Versions (GLIBC_2.2): Add __fpscr_values.
+ * sysdeps/unix/sysv/linux/sh/sysdep.S (___fpscr_values): New constant.
+
+Upstream-Status: Pending
+
+Index: git/sysdeps/unix/sysv/linux/sh/sysdep.S
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/sh/sysdep.S 2014-08-27 18:49:24.036070587 +0000
++++ git/sysdeps/unix/sysv/linux/sh/sysdep.S 2014-08-27 18:49:27.332070587 +0000
+@@ -30,3 +30,14 @@
+
+ #define __syscall_error __syscall_error_1
+ #include <sysdeps/unix/sh/sysdep.S>
++
++ .data
++ .align 3
++ .globl ___fpscr_values
++ .type ___fpscr_values, @object
++ .size ___fpscr_values, 8
++___fpscr_values:
++ .long 0
++ .long 0x80000
++weak_alias (___fpscr_values, __fpscr_values)
++
+Index: git/sysdeps/unix/sysv/linux/sh/Versions
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/sh/Versions 2014-08-27 18:49:24.028070587 +0000
++++ git/sysdeps/unix/sysv/linux/sh/Versions 2014-08-27 18:49:27.332070587 +0000
+@@ -2,6 +2,7 @@
+ GLIBC_2.2 {
+ # functions used in other libraries
+ __xstat64; __fxstat64; __lxstat64;
++ __fpscr_values;
+
+ # a*
+ alphasort64;
diff --git a/meta/recipes-core/glibc/glibc/eglibc-use-option-groups.patch b/meta/recipes-core/glibc/glibc/eglibc-use-option-groups.patch
new file mode 100644
index 0000000000..7136253c8b
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc-use-option-groups.patch
@@ -0,0 +1,16577 @@
+Forward port eglibc options groups support
+
+Upstream-Status: Pending
+
+Index: git/argp/argp-fmtstream.c
+===================================================================
+--- git.orig/argp/argp-fmtstream.c 2014-08-29 20:00:42.976070587 -0700
++++ git/argp/argp-fmtstream.c 2014-08-29 20:01:15.188070587 -0700
+@@ -42,6 +42,7 @@
+ #ifdef _LIBC
+ # include <wchar.h>
+ # include <libio/libioP.h>
++# include <gnu/option-groups.h>
+ # define __vsnprintf(s, l, f, a) _IO_vsnprintf (s, l, f, a)
+ #endif
+
+@@ -100,7 +101,11 @@
+ __argp_fmtstream_update (fs);
+ if (fs->p > fs->buf)
+ {
++#ifdef _LIBC
+ __fxprintf (fs->stream, "%.*s", (int) (fs->p - fs->buf), fs->buf);
++#else
++ fwrite_unlocked (fs->buf, 1, fs->p - fs->buf, fs->stream);
++#endif
+ }
+ free (fs->buf);
+ free (fs);
+@@ -145,9 +150,17 @@
+ size_t i;
+ for (i = 0; i < pad; i++)
+ {
++#ifdef _LIBC
+ if (_IO_fwide (fs->stream, 0) > 0)
+- putwc_unlocked (L' ', fs->stream);
++ {
++#if ! _LIBC || __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++ putwc_unlocked (L' ', fs->stream);
++#else
++ abort ();
++#endif
++ }
+ else
++#endif
+ putc_unlocked (' ', fs->stream);
+ }
+ }
+@@ -308,9 +321,17 @@
+ *nl++ = ' ';
+ else
+ for (i = 0; i < fs->wmargin; ++i)
++#ifdef _LIBC
+ if (_IO_fwide (fs->stream, 0) > 0)
+- putwc_unlocked (L' ', fs->stream);
++ {
++#ifdef OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++ putwc_unlocked (L' ', fs->stream);
++#else
++ abort ();
++#endif
++ }
+ else
++#endif
+ putc_unlocked (' ', fs->stream);
+
+ /* Copy the tail of the original buffer into the current buffer
+Index: git/argp/argp-help.c
+===================================================================
+--- git.orig/argp/argp-help.c 2014-08-29 20:00:42.976070587 -0700
++++ git/argp/argp-help.c 2014-08-29 20:01:15.188070587 -0700
+@@ -51,6 +51,7 @@
+ #ifdef _LIBC
+ # include <../libio/libioP.h>
+ # include <wchar.h>
++# include <gnu/option-groups.h>
+ #endif
+
+ #ifndef _
+@@ -1702,7 +1703,7 @@
+ }
+
+ char *
+-__argp_short_program_name (void)
++(__argp_short_program_name) (void)
+ {
+ # if HAVE_DECL_PROGRAM_INVOCATION_SHORT_NAME
+ return program_invocation_short_name;
+@@ -1873,9 +1874,17 @@
+ #endif
+ }
+
++#ifdef _LIBC
+ if (_IO_fwide (stream, 0) > 0)
+- putwc_unlocked (L'\n', stream);
++ {
++#if ! _LIBC || __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++ putwc_unlocked (L'\n', stream);
++#else
++ abort ();
++#endif
++ }
+ else
++#endif
+ putc_unlocked ('\n', stream);
+
+ #if _LIBC || (HAVE_FLOCKFILE && HAVE_FUNLOCKFILE)
+Index: git/argp/argp-namefrob.h
+===================================================================
+--- git.orig/argp/argp-namefrob.h 2014-08-29 20:00:42.976070587 -0700
++++ git/argp/argp-namefrob.h 2014-08-29 20:01:15.192070587 -0700
+@@ -76,10 +76,12 @@
+ #undef __argp_fmtstream_wmargin
+ #define __argp_fmtstream_wmargin argp_fmtstream_wmargin
+
++#if 0
+ #include "mempcpy.h"
+ #include "strcase.h"
+ #include "strchrnul.h"
+ #include "strndup.h"
++#endif
+
+ /* normal libc functions we call */
+ #undef __flockfile
+Index: git/argp/Makefile
+===================================================================
+--- git.orig/argp/Makefile 2014-08-29 20:00:42.976070587 -0700
++++ git/argp/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for argp.
+ #
++include ../option-groups.mak
++
+ subdir := argp
+
+ include ../Makeconfig
+Index: git/catgets/Makefile
+===================================================================
+--- git.orig/catgets/Makefile 2014-08-29 20:00:43.008070587 -0700
++++ git/catgets/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -22,20 +22,23 @@
+
+ include ../Makeconfig
+
++include ../option-groups.mak
++
+ headers = nl_types.h
+-routines = catgets open_catalog
+-others = gencat
+-install-bin = gencat
+-extra-objs = $(gencat-modules:=.o)
++routines-$(OPTION_EGLIBC_CATGETS) := catgets open_catalog
++others-$(OPTION_EGLIBC_CATGETS) := gencat
++install-bin-$(OPTION_EGLIBC_CATGETS) := gencat
++extra-objs-$(OPTION_EGLIBC_CATGETS) := $(gencat-modules:=.o)
+
+-tests = tst-catgets
+-test-srcs = test-gencat
++tests-$(OPTION_EGLIBC_CATGETS) := tst-catgets
++test-srcs-$(OPTION_EGLIBC_CATGETS) := test-gencat
+
++ifeq (y,$(OPTION_EGLIBC_CATGETS))
+ ifeq ($(run-built-tests),yes)
+ tests-special += $(objpfx)de/libc.cat $(objpfx)test1.cat $(objpfx)test2.cat \
+ $(objpfx)sample.SJIS.cat $(objpfx)test-gencat.out
+ endif
+-
++endif
+ gencat-modules = xmalloc
+
+ # To find xmalloc.c
+Index: git/crypt/crypt-entry.c
+===================================================================
+--- git.orig/crypt/crypt-entry.c 2014-08-29 20:00:43.028070587 -0700
++++ git/crypt/crypt-entry.c 2014-08-29 20:01:15.192070587 -0700
+@@ -27,6 +27,7 @@
+ #include <stdio.h>
+ #endif
+ #include <string.h>
++#include <gnu/option-groups.h>
+ #include <errno.h>
+ #include <fips-private.h>
+
+@@ -76,9 +77,11 @@
+ const char *salt;
+ struct crypt_data * __restrict data;
+ {
++#if __OPTION_EGLIBC_CRYPT_UFC
+ ufc_long res[4];
+ char ktab[9];
+ ufc_long xx = 25; /* to cope with GCC long long compiler bugs */
++#endif /*__OPTION_EGLIBC_CRYPT_UFC*/
+
+ #ifdef _LIBC
+ /* Try to find out whether we have to use MD5 encryption replacement. */
+@@ -105,6 +108,7 @@
+ sizeof (struct crypt_data));
+ #endif
+
++#if __OPTION_EGLIBC_CRYPT_UFC
+ /*
+ * Hack DES tables according to salt
+ */
+@@ -144,6 +148,10 @@
+ */
+ _ufc_output_conversion_r (res[0], res[1], salt, data);
+ return data->crypt_3_buf;
++#else /* __OPTION_EGLIBC_CRYPT_UFC */
++ __set_errno (ENOSYS);
++ return NULL;
++#endif /* __OPTION_EGLIBC_CRYPT_UFC */
+ }
+ weak_alias (__crypt_r, crypt_r)
+
+@@ -168,7 +176,12 @@
+ return __sha512_crypt (key, salt);
+ #endif
+
++#if __OPTION_EGLIBC_CRYPT_UFC
+ return __crypt_r (key, salt, &_ufc_foobar);
++#else /* __OPTION_EGLIBC_CRYPT_UFC */
++ __set_errno (ENOSYS);
++ return NULL;
++#endif /* __OPTION_EGLIBC_CRYPT_UFC */
+ }
+
+
+Index: git/crypt/Makefile
+===================================================================
+--- git.orig/crypt/Makefile 2014-08-29 20:00:43.024070587 -0700
++++ git/crypt/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -18,21 +18,25 @@
+ #
+ # Sub-makefile for crypt() portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := crypt
+
+ include ../Makeconfig
+
+ headers := crypt.h
+
+-extra-libs := libcrypt
+-extra-libs-others := $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_CRYPT) := libcrypt
++extra-libs-others-y := $(extra-libs-y)
+
+-libcrypt-routines := crypt-entry md5-crypt sha256-crypt sha512-crypt crypt \
+- crypt_util
++libcrypt-routines :=crypt-entry md5-crypt sha256-crypt sha512-crypt crypt_common
++libcrypt-routines-$(OPTION_EGLIBC_CRYPT_UFC) := crypt crypt_util
++libcrypt-routines += $(libcrypt-routines-y)
+
+-tests := cert md5c-test sha256c-test sha512c-test badsalttest
++tests-$(OPTION_EGLIBC_CRYPT) := md5c-test sha256c-test sha512c-test badsalttest
++tests-$(OPTION_EGLIBC_CRYPT_UFC) += cert
+
+-ifeq ($(crypt-in-libc),yes)
++ifeq ($(crypt-in-libc)$(OPTION_EGLIBC_CRYPT),yesy)
+ routines += $(libcrypt-routines)
+ endif
+
+@@ -44,7 +48,7 @@
+ else
+ libcrypt-routines += md5 sha256 sha512
+
+-tests += md5test sha256test sha512test
++tests-$(OPTION_EGLIBC_CRYPT) += md5test sha256test sha512test
+
+ # The test md5test-giant uses up to 400 MB of RSS and runs on a fast
+ # machine over a minute.
+@@ -64,8 +68,10 @@
+ $(objpfx)sha512test: $(patsubst %, $(objpfx)%.o,$(sha512-routines))
+ endif
+
++ifeq ($(OPTION_EGLIBC_CRYPT),y)
+ ifeq (yes,$(build-shared))
+ $(addprefix $(objpfx),$(tests)): $(objpfx)libcrypt.so
+ else
+ $(addprefix $(objpfx),$(tests)): $(objpfx)libcrypt.a
+ endif
++endif # eglibc: OPTION_EGLIBC_CRYPT
+Index: git/csu/Makefile
+===================================================================
+--- git.orig/csu/Makefile 2014-08-29 20:00:43.032070587 -0700
++++ git/csu/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -22,6 +22,8 @@
+ # crtn.o, special "initializer" and "finalizer" files used in the link
+ # to make the .init and .fini sections work right.
+
++include ../option-groups.mak
++
+ subdir := csu
+
+ include ../Makeconfig
+Index: git/debug/Makefile
+===================================================================
+--- git.orig/debug/Makefile 2014-08-29 20:00:43.036070587 -0700
++++ git/debug/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for debug portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := debug
+
+ include ../Makeconfig
+@@ -27,7 +29,7 @@
+ # Note that ptsname_r_chk and getlogin_r are not here, but in
+ # login/Makefile instead. If that subdir is omitted from the
+ # build, its _FORTIFY_SOURCE support will be too.
+-routines = backtrace backtracesyms backtracesymsfd noophooks \
++routines = noophooks \
+ memcpy_chk memmove_chk mempcpy_chk memset_chk stpcpy_chk \
+ strcat_chk strcpy_chk strncat_chk strncpy_chk stpncpy_chk \
+ sprintf_chk vsprintf_chk snprintf_chk vsnprintf_chk \
+@@ -36,20 +38,27 @@
+ read_chk pread_chk pread64_chk recv_chk recvfrom_chk \
+ readlink_chk readlinkat_chk getwd_chk getcwd_chk \
+ realpath_chk fread_chk fread_u_chk \
+- wctomb_chk wcscpy_chk wmemcpy_chk wmemmove_chk wmempcpy_chk \
+- wcpcpy_chk wcsncpy_chk wcscat_chk wcsncat_chk wmemset_chk \
+- wcpncpy_chk \
+- swprintf_chk vswprintf_chk wprintf_chk fwprintf_chk \
+- vwprintf_chk vfwprintf_chk fgetws_chk fgetws_u_chk \
+ confstr_chk getgroups_chk ttyname_r_chk \
+- gethostname_chk getdomainname_chk wcrtomb_chk mbsnrtowcs_chk \
+- wcsnrtombs_chk mbsrtowcs_chk wcsrtombs_chk mbstowcs_chk \
+- wcstombs_chk asprintf_chk vasprintf_chk dprintf_chk \
++ gethostname_chk getdomainname_chk \
++ asprintf_chk vasprintf_chk dprintf_chk \
+ vdprintf_chk obprintf_chk \
+ longjmp_chk ____longjmp_chk \
+ fdelt_chk poll_chk ppoll_chk \
+ stack_chk_fail fortify_fail \
+ $(static-only-routines)
++routines-$(OPTION_EGLIBC_BACKTRACE) += backtrace backtracesyms backtracesymsfd
++routines-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) \
++ += wprintf_chk fwprintf_chk \
++ vwprintf_chk vfwprintf_chk fgetws_chk fgetws_u_chk
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += wctomb_chk wcscpy_chk wmemcpy_chk wmemmove_chk wmempcpy_chk \
++ wcpcpy_chk wcsncpy_chk wcscat_chk wcsncat_chk wmemset_chk \
++ wcpncpy_chk \
++ swprintf_chk vswprintf_chk \
++ wcrtomb_chk mbsnrtowcs_chk \
++ wcsnrtombs_chk mbsrtowcs_chk wcsrtombs_chk mbstowcs_chk \
++ wcstombs_chk
++
+ static-only-routines := warning-nop stack_chk_fail_local
+
+ CFLAGS-backtrace.c = -fno-omit-frame-pointer
+@@ -129,11 +138,15 @@
+ LDFLAGS-tst-backtrace5 = -rdynamic
+ LDFLAGS-tst-backtrace6 = -rdynamic
+
+-tests = backtrace-tst tst-longjmp_chk tst-chk1 tst-chk2 tst-chk3 \
+- tst-lfschk1 tst-lfschk2 tst-lfschk3 test-strcpy_chk test-stpcpy_chk \
+- tst-chk4 tst-chk5 tst-chk6 tst-lfschk4 tst-lfschk5 tst-lfschk6 \
+- tst-longjmp_chk2 tst-backtrace2 tst-backtrace3 tst-backtrace4 \
+- tst-backtrace5 tst-backtrace6
++tests = tst-longjmp_chk test-strcpy_chk test-stpcpy_chk tst-longjmp_chk2
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-chk1 tst-chk2 tst-chk3 tst-lfschk1 tst-lfschk2 tst-lfschk3
++tests-$(OPTION_EGLIBC_BACKTRACE) \
++ += backtrace-tst tst-backtrace2 tst-backtrace3 tst-backtrace4 \
++ tst-backtrace5 tst-backtrace6
++ifeq (yy,$(OPTION_EGLIBC_LOCALE_CODE)$(OPTION_EGLIBC_CXX_TESTS))
++tests += tst-chk4 tst-chk5 tst-chk6 tst-lfschk4 tst-lfschk5 tst-lfschk6
++endif
+
+ tests-ifunc := $(stpcpy_chk strcpy_chk:%=test-%-ifunc)
+ tests += $(tests-ifunc)
+Index: git/debug/segfault.c
+===================================================================
+--- git.orig/debug/segfault.c 2014-08-29 20:00:46.280070587 -0700
++++ git/debug/segfault.c 2014-08-29 20:01:15.192070587 -0700
+@@ -30,6 +30,7 @@
+ #include <unistd.h>
+ #include <_itoa.h>
+ #include <ldsodefs.h>
++#include <gnu/option-groups.h>
+
+ /* This file defines macros to access the content of the sigcontext element
+ passed up by the signal handler. */
+@@ -91,6 +92,7 @@
+ REGISTER_DUMP;
+ #endif
+
++#if __OPTION_EGLIBC_BACKTRACE
+ WRITE_STRING ("\nBacktrace:\n");
+
+ /* Get the backtrace. */
+@@ -113,6 +115,7 @@
+
+ /* Now generate nicely formatted output. */
+ __backtrace_symbols_fd (arr + i, cnt - i, fd);
++#endif
+
+ #ifdef HAVE_PROC_SELF
+ /* Now the link map. */
+Index: git/debug/tst-chk1.c
+===================================================================
+--- git.orig/debug/tst-chk1.c 2014-08-29 20:00:46.288070587 -0700
++++ git/debug/tst-chk1.c 2014-08-29 20:01:15.192070587 -0700
+@@ -31,6 +31,7 @@
+ #include <sys/select.h>
+ #include <sys/socket.h>
+ #include <sys/un.h>
++#include <gnu/option-groups.h>
+
+
+ #define obstack_chunk_alloc malloc
+@@ -307,6 +308,7 @@
+ snprintf (buf + 8, l0 + 3, "%d", num2);
+ CHK_FAIL_END
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ CHK_FAIL_START
+ swprintf (wbuf + 8, 3, L"%d", num1);
+ CHK_FAIL_END
+@@ -314,6 +316,7 @@
+ CHK_FAIL_START
+ swprintf (wbuf + 8, l0 + 3, L"%d", num1);
+ CHK_FAIL_END
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+ # endif
+
+ memcpy (buf, str1 + 2, l0 + 9);
+@@ -381,6 +384,7 @@
+ CHK_FAIL_END
+ #endif
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+
+ /* These ops can be done without runtime checking of object size. */
+ wmemcpy (wbuf, L"abcdefghij", 10);
+@@ -605,6 +609,7 @@
+ CHK_FAIL_END
+ #endif
+
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+
+ /* Now checks for %n protection. */
+
+@@ -1192,6 +1197,7 @@
+ # endif
+ #endif
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ if (setlocale (LC_ALL, "de_DE.UTF-8") != NULL)
+ {
+ assert (MB_CUR_MAX <= 10);
+@@ -1348,6 +1354,7 @@
+ puts ("cannot set locale");
+ ret = 1;
+ }
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+
+ int fd = posix_openpt (O_RDWR);
+ if (fd != -1)
+Index: git/dlfcn/Makefile
+===================================================================
+--- git.orig/dlfcn/Makefile 2014-08-29 20:00:46.312070587 -0700
++++ git/dlfcn/Makefile 2014-08-29 20:01:15.192070587 -0700
+@@ -15,6 +15,8 @@
+ # License along with the GNU C Library; if not, see
+ # <http://www.gnu.org/licenses/>.
+
++include ../option-groups.mak
++
+ subdir := dlfcn
+
+ include ../Makeconfig
+@@ -36,7 +38,9 @@
+ ifeq (yes,$(build-shared))
+ tests = glrefmain failtest tst-dladdr default errmsg1 tstcxaatexit \
+ bug-dlopen1 bug-dlsym1 tst-dlinfo bug-atexit1 bug-atexit2 \
+- bug-atexit3 tstatexit bug-dl-leaf
++ tstatexit bug-dl-leaf
++
++tests-$(OPTION_EGLIBC_CXX_TESTS) += bug-atexit3
+ endif
+ modules-names = glreflib1 glreflib2 glreflib3 failtestmod defaultmod1 \
+ defaultmod2 errmsg1mod modatexit modcxaatexit \
+Index: git/elf/dl-support.c
+===================================================================
+--- git.orig/elf/dl-support.c 2014-08-29 20:00:46.384070587 -0700
++++ git/elf/dl-support.c 2014-08-29 20:01:15.192070587 -0700
+@@ -19,6 +19,7 @@
+ /* This file defines some things that for the dynamic linker are defined in
+ rtld.c and dl-sysdep.c in ways appropriate to bootstrap dynamic linking. */
+
++#include <gnu/option-groups.h>
+ #include <errno.h>
+ #include <libintl.h>
+ #include <stdlib.h>
+@@ -42,7 +43,9 @@
+ const char *_dl_platform;
+ size_t _dl_platformlen;
+
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ int _dl_debug_mask;
++#endif
+ int _dl_lazy;
+ ElfW(Addr) _dl_use_load_bias = -2;
+ int _dl_dynamic_weak;
+Index: git/elf/rtld.c
+===================================================================
+--- git.orig/elf/rtld.c 2014-08-29 20:01:14.708070587 -0700
++++ git/elf/rtld.c 2014-08-29 20:01:15.196070587 -0700
+@@ -16,6 +16,7 @@
+ License along with the GNU C Library; if not, see
+ <http://www.gnu.org/licenses/>. */
+
++#include <gnu/option-groups.h>
+ #include <errno.h>
+ #include <dlfcn.h>
+ #include <fcntl.h>
+@@ -2200,6 +2201,7 @@
+ objname, errstring);
+ }
+
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ /* Nonzero if any of the debugging options is enabled. */
+ static int any_debug attribute_relro;
+
+@@ -2309,6 +2311,7 @@
+ _exit (0);
+ }
+ }
++#endif /* __OPTION_EGLIBC_RTLD_DEBUG */
+
+ static void
+ process_dl_audit (char *str)
+@@ -2376,12 +2379,14 @@
+ break;
+
+ case 5:
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ /* Debugging of the dynamic linker? */
+ if (memcmp (envline, "DEBUG", 5) == 0)
+ {
+ process_dl_debug (&envline[6]);
+ break;
+ }
++#endif
+ if (memcmp (envline, "AUDIT", 5) == 0)
+ process_dl_audit (&envline[6]);
+ break;
+@@ -2490,7 +2495,9 @@
+ {
+ mode = trace;
+ GLRO(dl_verbose) = 1;
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ GLRO_dl_debug_mask |= DL_DEBUG_PRELINK;
++#endif
+ GLRO(dl_trace_prelink) = &envline[17];
+ }
+ break;
+@@ -2537,12 +2544,15 @@
+ if (__access ("/etc/suid-debug", F_OK) != 0)
+ {
+ unsetenv ("MALLOC_CHECK_");
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ GLRO_dl_debug_mask = 0;
++#endif
+ }
+
+ if (mode != normal)
+ _exit (5);
+ }
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ /* If we have to run the dynamic linker in debugging mode and the
+ LD_DEBUG_OUTPUT environment variable is given, we write the debug
+ messages to this file. */
+@@ -2567,6 +2577,7 @@
+ /* We use standard output if opening the file failed. */
+ GLRO(dl_debug_fd) = STDOUT_FILENO;
+ }
++#endif /* __OPTION_EGLIBC_RTLD_DEBUG */
+ }
+
+
+Index: git/extra-lib.mk
+===================================================================
+--- git.orig/extra-lib.mk 2014-08-29 20:00:46.544070587 -0700
++++ git/extra-lib.mk 2014-08-29 20:01:15.196070587 -0700
+@@ -25,7 +25,9 @@
+ extra-objs := $(extra-objs)
+
+ # The modules that go in $(lib).
+-all-$(lib)-routines := $($(lib)-routines) $($(lib)-sysdep_routines)
++all-$(lib)-routines := $($(lib)-routines) \
++ $($(lib)-routines-y) \
++ $($(lib)-sysdep_routines)
+
+ # Add each flavor of library to the lists of things to build and install.
+ install-lib += $(foreach o,$(object-suffixes-$(lib)),$(lib:lib%=$(libtype$o)))
+@@ -101,7 +103,7 @@
+ endif
+
+ # This will define `libof-ROUTINE := LIB' for each of the routines.
+-cpp-srcs-left := $($(lib)-routines) $($(lib)-sysdep_routines)
++cpp-srcs-left := $(all-$(lib)-routines)
+ ifneq (,$(cpp-srcs-left))
+ include $(patsubst %,$(..)cppflags-iterator.mk,$(cpp-srcs-left))
+ endif
+Index: git/grp/Makefile
+===================================================================
+--- git.orig/grp/Makefile 2014-08-29 20:00:46.556070587 -0700
++++ git/grp/Makefile 2014-08-29 20:01:15.196070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for grp portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := grp
+
+ include ../Makeconfig
+@@ -29,6 +31,9 @@
+ getgrent_r getgrgid_r getgrnam_r fgetgrent_r
+
+ tests := testgrp
++ifneq (y,$(OPTION_EGLIBC_NSSWITCH))
++LDLIBS-testgrp += $(shell cat $(common-objpfx)nss/fixed-nsswitch-libs)
++endif
+
+ ifeq (yes,$(build-shared))
+ test-srcs := tst_fgetgrent
+Index: git/hesiod/Makefile
+===================================================================
+--- git.orig/hesiod/Makefile 2014-08-29 20:00:46.580070587 -0700
++++ git/hesiod/Makefile 2014-08-29 20:01:15.196070587 -0700
+@@ -18,12 +18,14 @@
+ #
+ # Sub-makefile for hesiod portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := hesiod
+
+ include ../Makeconfig
+
+-extra-libs := libnss_hesiod
+-extra-libs-others = $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_INET) += libnss_hesiod
++extra-libs-others-y += $(extra-libs-y)
+
+ subdir-dirs = nss_hesiod
+ vpath %.c nss_hesiod
+Index: git/iconv/gconv_db.c
+===================================================================
+--- git.orig/iconv/gconv_db.c 2014-08-29 20:00:46.604070587 -0700
++++ git/iconv/gconv_db.c 2014-08-29 20:01:15.196070587 -0700
+@@ -25,6 +25,7 @@
+ #include <sys/param.h>
+ #include <bits/libc-lock.h>
+ #include <locale/localeinfo.h>
++#include <gnu/option-groups.h>
+
+ #include <dlfcn.h>
+ #include <gconv_int.h>
+@@ -828,9 +829,11 @@
+ /* Free all resources if necessary. */
+ libc_freeres_fn (free_mem)
+ {
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* First free locale memory. This needs to be done before freeing derivations,
+ as ctype cleanup functions dereference steps arrays which we free below. */
+ _nl_locale_subfreeres ();
++#endif
+
+ /* finddomain.c has similar problem. */
+ extern void _nl_finddomain_subfreeres (void) attribute_hidden;
+Index: git/iconv/gconv_trans.c
+===================================================================
+--- git.orig/iconv/gconv_trans.c 2014-08-29 20:00:46.612070587 -0700
++++ git/iconv/gconv_trans.c 2014-08-29 20:01:15.196070587 -0700
+@@ -23,6 +23,7 @@
+ #include <stdint.h>
+ #include <string.h>
+ #include <stdlib.h>
++#include <gnu/option-groups.h>
+
+ #include <bits/libc-lock.h>
+ #include "gconv_int.h"
+@@ -59,6 +60,7 @@
+ PTR_DEMANGLE (fct);
+ #endif
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* If there is no transliteration information in the locale don't do
+ anything and return the error. */
+ size = _NL_CURRENT_WORD (LC_CTYPE, _NL_CTYPE_TRANSLIT_TAB_SIZE);
+@@ -194,6 +196,7 @@
+ sorted. */
+ break;
+ }
++#endif
+
+ /* One last chance: use the default replacement. */
+ if (_NL_CURRENT_WORD (LC_CTYPE, _NL_CTYPE_TRANSLIT_DEFAULT_MISSING_LEN) != 0)
+Index: git/iconv/iconv_prog.c
+===================================================================
+--- git.orig/iconv/iconv_prog.c 2014-08-29 20:00:46.612070587 -0700
++++ git/iconv/iconv_prog.c 2014-08-29 20:01:15.196070587 -0700
+@@ -35,6 +35,7 @@
+ #ifdef _POSIX_MAPPED_FILES
+ # include <sys/mman.h>
+ #endif
++#include <gnu/option-groups.h>
+ #include <charmap.h>
+ #include <gconv_int.h>
+ #include "iconv_prog.h"
+@@ -221,10 +222,17 @@
+ bool to_wrong =
+ (iconv_open (to_code, "UTF-8") == (iconv_t) -1
+ && errno == EINVAL);
++#if __OPTION_EGLIBC_LOCALE_CODE
+ const char *from_pretty =
+ (from_code[0] ? from_code : nl_langinfo (CODESET));
+ const char *to_pretty =
+ (orig_to_code[0] ? orig_to_code : nl_langinfo (CODESET));
++#else
++ const char *from_pretty =
++ (from_code[0] ? from_code : "ANSI_X3.4-1968");
++ const char *to_pretty =
++ (orig_to_code[0] ? orig_to_code : "ANSI_X3.4-1968");
++#endif
+
+ if (from_wrong)
+ {
+Index: git/iconv/Makefile
+===================================================================
+--- git.orig/iconv/Makefile 2014-08-29 20:00:46.600070587 -0700
++++ git/iconv/Makefile 2014-08-29 20:01:15.196070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for iconv.
+ #
++include ../option-groups.mak
++
+ subdir := iconv
+
+ include ../Makeconfig
+@@ -57,6 +59,9 @@
+ CPPFLAGS-strtab = -DNOT_IN_libc
+ CPPFLAGS-charmap = -DNOT_IN_libc
+ CPPFLAGS-charmap-dir = -DNOT_IN_libc
++ifneq (y,$(OPTION_EGLIBC_SPAWN))
++CPPFLAGS-charmap-dir.c += -DNO_UNCOMPRESS
++endif
+
+ ifeq ($(run-built-tests),yes)
+ xtests-special += $(objpfx)test-iconvconfig.out
+Index: git/iconvdata/Makefile
+===================================================================
+--- git.orig/iconvdata/Makefile 2014-08-29 20:00:46.628070587 -0700
++++ git/iconvdata/Makefile 2014-08-29 20:01:15.196070587 -0700
+@@ -18,12 +18,15 @@
+ #
+ # Makefile for iconv data and code.
+ #
++include ../option-groups.mak
++
+ subdir := iconvdata
+
+ include ../Makeconfig
+
+ # Names of all the shared objects which implement the transformations.
+-modules := ISO8859-1 ISO8859-2 ISO8859-3 ISO8859-4 ISO8859-5 \
++modules-$(OPTION_EGLIBC_CHARSETS) \
++ := ISO8859-1 ISO8859-2 ISO8859-3 ISO8859-4 ISO8859-5 \
+ ISO8859-6 ISO8859-7 ISO8859-8 ISO8859-9 ISO8859-10 \
+ ISO8859-11 ISO8859-13 ISO8859-14 ISO8859-15 ISO8859-16 \
+ T.61 ISO_6937 SJIS KOI-8 HP-ROMAN8 HP-ROMAN9 EBCDIC-AT-DE \
+@@ -63,11 +66,13 @@
+ MAC-CENTRALEUROPE KOI8-RU ISO8859-9E \
+ CP770 CP771 CP772 CP773 CP774
+
+-modules.so := $(addsuffix .so, $(modules))
++modules.so := $(addsuffix .so, $(modules-y))
+
+ ifeq (yes,$(build-shared))
+ tests = bug-iconv1 bug-iconv2 tst-loading tst-e2big tst-iconv4 bug-iconv4 \
+- tst-iconv6 bug-iconv5 bug-iconv6 tst-iconv7 bug-iconv8 bug-iconv9
++ tst-iconv6 bug-iconv5 bug-iconv8 bug-iconv9
++tests-$(OPTION_EGLIBC_LOCALE_CODE) += bug-iconv6 tst-iconv7
++
+ ifeq ($(have-thread-library),yes)
+ tests += bug-iconv3
+ endif
+@@ -130,13 +135,13 @@
+ # Rule to generate the shared objects.
+ charmaps = ../localedata/charmaps
+ -include $(objpfx)iconv-rules
+-extra-modules-left := $(modules)
++extra-modules-left := $(modules-y)
+ include extra-module.mk
+
+
+ extra-objs += $(modules.so)
+-install-others = $(addprefix $(inst_gconvdir)/, $(modules.so)) \
+- $(inst_gconvdir)/gconv-modules
++install-others-y += $(addprefix $(inst_gconvdir)/, $(modules.so))
++install-others-$(OPTION_EGLIBC_CHARSETS) += $(inst_gconvdir)/gconv-modules
+
+ # We can build the conversion tables for numerous charsets automatically.
+
+@@ -204,7 +209,7 @@
+ ifndef avoid-generated
+ $(objpfx)iconv-rules: Makefile
+ $(make-target-directory)
+- { echo $(filter-out lib%, $(modules)); \
++ { echo $(filter-out lib%, $(modules-y)); \
+ echo 8bit $(gen-8bit-modules); \
+ echo 8bit-gap $(gen-8bit-gap-modules); } | \
+ LC_ALL=C \
+@@ -247,7 +252,7 @@
+ $(do-install-program)
+ $(inst_gconvdir)/gconv-modules: gconv-modules $(+force)
+ $(do-install)
+-ifeq (no,$(cross-compiling))
++# eglibc: ifeq (no,$(cross-compiling))
+ # Update the $(prefix)/lib/gconv/gconv-modules.cache file. This is necessary
+ # if this libc has more gconv modules than the previously installed one.
+ if test -f "$(inst_gconvdir)/gconv-modules.cache"; then \
+@@ -256,9 +261,9 @@
+ $(common-objpfx)iconv/iconvconfig \
+ $(addprefix --prefix=,$(install_root)); \
+ fi
+-else
+- @echo '*@*@*@ You should recreate $(inst_gconvdir)/gconv-modules.cache'
+-endif
++# eglibc: else
++# eglibc: @echo '*@*@*@ You should recreate $(inst_gconvdir)/gconv-modules.cache'
++# eglibc: endif
+
+ endif # build-shared = yes
+
+Index: git/include/netdb.h
+===================================================================
+--- git.orig/include/netdb.h 2014-08-29 20:00:47.152070587 -0700
++++ git/include/netdb.h 2014-08-29 20:01:15.196070587 -0700
+@@ -232,6 +232,10 @@
+ (const char *name, int af, struct hostent *host, \
+ char *buffer, size_t buflen, int *errnop, \
+ int *h_errnop); \
++extern enum nss_status _nss_ ## service ## _gethostbyname3_r \
++ (const char *name, int af, struct hostent *result, \
++ char *buffer, size_t buflen, int *errnop, \
++ int *h_errnop, int32_t *ttlp, char **canonp); \
+ extern enum nss_status _nss_ ## service ## _gethostbyname_r \
+ (const char *name, struct hostent *host, char *buffer, \
+ size_t buflen, int *errnop, int *h_errnop); \
+Index: git/inet/Makefile
+===================================================================
+--- git.orig/inet/Makefile 2014-08-29 20:00:47.176070587 -0700
++++ git/inet/Makefile 2014-08-29 20:01:15.200070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for inet portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := inet
+
+ include ../Makeconfig
+@@ -27,7 +29,8 @@
+ netinet/tcp.h netinet/ip.h $(wildcard arpa/*.h protocols/*.h) \
+ aliases.h ifaddrs.h netinet/ip6.h netinet/icmp6.h bits/in.h
+
+-routines := htonl htons \
++routines-$(OPTION_EGLIBC_INET) \
++ += htonl htons \
+ inet_lnaof inet_mkadr \
+ inet_netof inet_ntoa inet_net herrno herrno-loc \
+ gethstbyad gethstbyad_r gethstbynm gethstbynm2 gethstbynm2_r \
+@@ -41,18 +44,23 @@
+ getrpcent_r getrpcbyname_r getrpcbynumber_r \
+ ether_aton ether_aton_r ether_hton ether_line \
+ ether_ntoa ether_ntoa_r ether_ntoh \
+- rcmd rexec ruserpass \
+ getnetgrent_r getnetgrent \
+- getaliasent_r getaliasent getaliasname getaliasname_r \
+- in6_addr getnameinfo if_index ifaddrs inet6_option \
++ in6_addr getnameinfo if_index ifaddrs \
+ getipv4sourcefilter setipv4sourcefilter \
+- getsourcefilter setsourcefilter inet6_opt inet6_rth
++ getsourcefilter setsourcefilter
++routines-$(OPTION_EGLIBC_RCMD) \
++ += rcmd rexec ruserpass
++routines-$(OPTION_EGLIBC_DB_ALIASES) \
++ += getaliasent_r getaliasent getaliasname getaliasname_r
++routines-$(OPTION_EGLIBC_ADVANCED_INET6) \
++ += inet6_option inet6_opt inet6_rth
+
+-aux := check_pf check_native ifreq
++aux-$(OPTION_EGLIBC_INET) += check_pf check_native ifreq
+
+ tests := htontest test_ifindex tst-ntoa tst-ether_aton tst-network \
+- tst-gethnm test-ifaddrs bug-if1 test-inet6_opt tst-ether_line \
++ tst-gethnm test-ifaddrs bug-if1 tst-ether_line \
+ tst-getni1 tst-getni2 tst-inet6_rth tst-checks
++tests-$(OPTION_EGLIBC_ADVANCED_INET6) += test-inet6_opt
+
+ include ../Rules
+
+Index: git/intl/dcigettext.c
+===================================================================
+--- git.orig/intl/dcigettext.c 2014-08-29 20:00:47.224070587 -0700
++++ git/intl/dcigettext.c 2014-08-29 20:01:15.200070587 -0700
+@@ -77,6 +77,10 @@
+ #endif
+ #include "hash-string.h"
+
++#ifdef _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ /* Thread safetyness. */
+ #ifdef _LIBC
+ # include <bits/libc-lock.h>
+@@ -449,9 +453,11 @@
+ #endif
+
+ #ifdef _LIBC
++#if __OPTION_EGLIBC_LOCALE_CODE
+ __libc_rwlock_define (extern, __libc_setlocale_lock attribute_hidden)
+ __libc_rwlock_rdlock (__libc_setlocale_lock);
+ #endif
++#endif
+
+ __libc_rwlock_rdlock (_nl_state_lock);
+
+@@ -470,7 +476,11 @@
+ search.category = category;
+ # ifdef HAVE_PER_THREAD_LOCALE
+ # ifdef _LIBC
++# if __OPTION_EGLIBC_LOCALE_CODE
+ localename = strdupa (__current_locale_name (category));
++# else
++ localename = "C";
++# endif
+ # endif
+ search.localename = localename;
+ # endif
+@@ -494,7 +504,9 @@
+ retval = (char *) (*foundp)->translation;
+
+ # ifdef _LIBC
++#if __OPTION_EGLIBC_LOCALE_CODE
+ __libc_rwlock_unlock (__libc_setlocale_lock);
++#endif
+ # endif
+ __libc_rwlock_unlock (_nl_state_lock);
+ return retval;
+@@ -611,7 +623,9 @@
+ {
+ no_translation:
+ FREE_BLOCKS (block_list);
++#if __OPTION_EGLIBC_LOCALE_CODE
+ __libc_rwlock_unlock (__libc_setlocale_lock);
++#endif
+ __libc_rwlock_unlock (_nl_state_lock);
+ __set_errno (saved_errno);
+ return (plural == 0
+@@ -730,7 +744,9 @@
+ if (plural)
+ retval = plural_lookup (domain, n, retval, retlen);
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ __libc_rwlock_unlock (__libc_setlocale_lock);
++#endif
+ __libc_rwlock_unlock (_nl_state_lock);
+ return retval;
+ }
+@@ -1361,7 +1377,11 @@
+ `LC_xxx', and `LANG'. On some systems this can be done by the
+ `setlocale' function itself. */
+ #ifdef _LIBC
++# if __OPTION_EGLIBC_LOCALE_CODE
+ retval = __current_locale_name (category);
++# else
++ retval = "C";
++# endif
+ #else
+ retval = _nl_locale_name (category, categoryname);
+ #endif
+Index: git/intl/Makefile
+===================================================================
+--- git.orig/intl/Makefile 2014-08-29 20:00:47.220070587 -0700
++++ git/intl/Makefile 2014-08-29 20:01:15.200070587 -0700
+@@ -16,6 +16,7 @@
+ # <http://www.gnu.org/licenses/>.
+
+ # Makefile for intl subdirectory: message handling code from GNU gettext.
++include ../option-groups.mak
+
+ subdir = intl
+
+@@ -48,7 +49,7 @@
+ $(objpfx)plural.o: plural.c
+
+ ifeq ($(run-built-tests),yes)
+-ifeq (yes,$(build-shared))
++ifeq (yyyes,$(OPTION_EGLIBC_LOCALES)$(OPTION_EGLIBC_LOCALE_CODE)$(build-shared))
+ ifneq ($(strip $(MSGFMT)),:)
+ tests-special += $(objpfx)tst-translit.out $(objpfx)tst-gettext.out \
+ $(objpfx)tst-gettext2.out $(objpfx)tst-codeset.out \
+Index: git/io/Makefile
+===================================================================
+--- git.orig/io/Makefile 2014-08-29 20:00:47.244070587 -0700
++++ git/io/Makefile 2014-08-29 20:01:15.200070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for I/O portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := io
+
+ include ../Makeconfig
+@@ -36,7 +38,7 @@
+ fxstatat fxstatat64 \
+ statfs fstatfs statfs64 fstatfs64 \
+ statvfs fstatvfs statvfs64 fstatvfs64 \
+- umask chmod fchmod lchmod fchmodat \
++ umask chmod fchmod fchmodat \
+ mkdir mkdirat \
+ open open_2 open64 open64_2 openat openat_2 openat64 openat64_2 \
+ read write lseek lseek64 access euidaccess faccessat \
+@@ -49,11 +51,13 @@
+ ttyname ttyname_r isatty \
+ link linkat symlink symlinkat readlink readlinkat \
+ unlink unlinkat rmdir \
+- ftw ftw64 fts poll ppoll \
++ poll ppoll \
+ posix_fadvise posix_fadvise64 \
+ posix_fallocate posix_fallocate64 \
+ sendfile sendfile64 \
+ utimensat futimens
++routines-$(OPTION_EGLIBC_BSD) += lchmod
++routines-$(OPTION_EGLIBC_FTRAVERSE) += ftw ftw64 fts
+
+ aux := have_o_cloexec
+
+@@ -64,18 +68,22 @@
+ fstatat fstatat64 mknod mknodat
+
+ others := pwd
+-test-srcs := ftwtest
++test-srcs-$(OPTION_EGLIBC_FTRAVERSE) := ftwtest
+ tests := test-utime test-stat test-stat2 test-lfs tst-getcwd \
+- tst-fcntl bug-ftw1 bug-ftw2 bug-ftw3 bug-ftw4 tst-statvfs \
++ tst-fcntl tst-statvfs \
+ tst-openat tst-unlinkat tst-fstatat tst-futimesat \
+ tst-renameat tst-fchownat tst-fchmodat tst-faccessat \
+ tst-symlinkat tst-linkat tst-readlinkat tst-mkdirat \
+- tst-mknodat tst-mkfifoat tst-ttyname_r bug-ftw5 \
++ tst-mknodat tst-mkfifoat tst-ttyname_r \
+ tst-posix_fallocate
++tests-$(OPTION_EGLIBC_FTRAVERSE) += bug-ftw1 bug-ftw2 bug-ftw3 bug-ftw4 \
++ bug-ftw5
+
+ ifeq ($(run-built-tests),yes)
++ifeq (y,$(OPTION_EGLIBC_FTRAVERSE))
+ tests-special += $(objpfx)ftwtest.out
+ endif
++endif
+
+ include ../Rules
+
+Index: git/libidn/Makefile
+===================================================================
+--- git.orig/libidn/Makefile 2014-08-29 20:00:47.316070587 -0700
++++ git/libidn/Makefile 2014-08-29 20:01:15.200070587 -0700
+@@ -16,6 +16,7 @@
+ # <http://www.gnu.org/licenses/>.
+
+ # Makefile for libidn subdirectory of GNU C Library.
++include ../option-groups.mak
+
+ subdir := libidn
+
+@@ -23,8 +24,8 @@
+
+ routines = idn-stub
+
+-extra-libs = libcidn
+-extra-libs-others = $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_IDN) = libcidn
++extra-libs-others-y = $(extra-libs-y)
+
+ libcidn-routines := punycode toutf8 nfkc stringprep rfc3454 profiles idna \
+ iconvme
+Index: git/libidn/toutf8.c
+===================================================================
+--- git.orig/libidn/toutf8.c 2014-08-29 20:00:47.332070587 -0700
++++ git/libidn/toutf8.c 2014-08-29 20:01:15.200070587 -0700
+@@ -33,6 +33,11 @@
+ /* Get strlen. */
+ #include <string.h>
+
++/* Get __OPTION_EGLIBC_LOCALE_CODE. */
++#ifdef _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ /* Get iconv_string. */
+ #include "iconvme.h"
+
+@@ -47,7 +52,11 @@
+ #endif
+
+ #ifdef _LIBC
+-# define stringprep_locale_charset() nl_langinfo (CODESET)
++# if __OPTION_EGLIBC_LOCALE_CODE
++# define stringprep_locale_charset() nl_langinfo (CODESET)
++# else
++# define stringprep_locale_charset() "ANSI_X3.4-1968"
++# endif
+ #else
+ /**
+ * stringprep_locale_charset - return charset used in current locale
+Index: git/libio/fileops.c
+===================================================================
+--- git.orig/libio/fileops.c 2014-08-29 20:00:47.352070587 -0700
++++ git/libio/fileops.c 2014-08-29 20:01:15.200070587 -0700
+@@ -38,6 +38,7 @@
+ #include <string.h>
+ #include <errno.h>
+ #include <unistd.h>
++#include <gnu/option-groups.h>
+ #include <stdlib.h>
+ #if _LIBC
+ # include "../wcsmbs/wcsmbsload.h"
+@@ -174,7 +175,7 @@
+
+ /* Free buffer. */
+ #if defined _LIBC || defined _GLIBCPP_USE_WCHAR_T
+- if (fp->_mode > 0)
++ if (_IO_is_wide (fp))
+ {
+ if (_IO_have_wbackup (fp))
+ _IO_free_wbackup_area (fp);
+@@ -359,6 +360,7 @@
+ cs = strstr (last_recognized + 1, ",ccs=");
+ if (cs != NULL)
+ {
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ /* Yep. Load the appropriate conversions and set the orientation
+ to wide. */
+ struct gconv_fcts fcts;
+@@ -423,6 +425,12 @@
+
+ /* Set the mode now. */
+ result->_mode = 1;
++#else
++ /* Treat this as if we couldn't find the given character set. */
++ (void) _IO_file_close_it (fp);
++ __set_errno (EINVAL);
++ return NULL;
++#endif
+ }
+ }
+
+Index: git/libio/__fpurge.c
+===================================================================
+--- git.orig/libio/__fpurge.c 2014-08-29 20:00:47.336070587 -0700
++++ git/libio/__fpurge.c 2014-08-29 20:01:15.200070587 -0700
+@@ -21,7 +21,7 @@
+ void
+ __fpurge (FILE *fp)
+ {
+- if (fp->_mode > 0)
++ if (_IO_is_wide (fp))
+ {
+ /* Wide-char stream. */
+ if (_IO_in_backup (fp))
+Index: git/libio/iofwide.c
+===================================================================
+--- git.orig/libio/iofwide.c 2014-08-29 20:00:47.360070587 -0700
++++ git/libio/iofwide.c 2014-08-29 20:01:15.200070587 -0700
+@@ -26,6 +26,7 @@
+
+ #include <libioP.h>
+ #ifdef _LIBC
++# include <gnu/option-groups.h>
+ # include <dlfcn.h>
+ # include <wchar.h>
+ #endif
+@@ -43,6 +44,8 @@
+ #endif
+
+
++#if ! defined _LIBC || __OPTION_POSIX_C_LANG_WIDE_CHAR
++
+ /* Prototypes of libio's codecvt functions. */
+ static enum __codecvt_result do_out (struct _IO_codecvt *codecvt,
+ __mbstate_t *statep,
+@@ -513,3 +516,26 @@
+ return MB_CUR_MAX;
+ #endif
+ }
++
++#else
++/* OPTION_POSIX_C_LANG_WIDE_CHAR is disabled. */
++
++#undef _IO_fwide
++int
++_IO_fwide (fp, mode)
++ _IO_FILE *fp;
++ int mode;
++{
++ /* Die helpfully if the user tries to create a wide stream; I
++ disbelieve that most users check the return value from
++ 'fwide (fp, 1)'. */
++ assert (mode <= 0);
++
++ /* We can only make streams byte-oriented, which is trivial. */
++ if (mode < 0)
++ fp->_mode = -1;
++
++ return fp->_mode;
++}
++
++#endif
+Index: git/libio/ioseekoff.c
+===================================================================
+--- git.orig/libio/ioseekoff.c 2014-08-29 20:00:47.364070587 -0700
++++ git/libio/ioseekoff.c 2014-08-29 20:01:15.200070587 -0700
+@@ -60,7 +60,7 @@
+ else
+ abort ();
+ }
+- if (_IO_fwide (fp, 0) < 0)
++ if (! _IO_is_wide (fp))
+ _IO_free_backup_area (fp);
+ else
+ _IO_free_wbackup_area (fp);
+Index: git/libio/ioseekpos.c
+===================================================================
+--- git.orig/libio/ioseekpos.c 2014-08-29 20:00:47.364070587 -0700
++++ git/libio/ioseekpos.c 2014-08-29 20:01:15.200070587 -0700
+@@ -35,7 +35,7 @@
+ /* If we have a backup buffer, get rid of it, since the __seekoff
+ callback may not know to do the right thing about it.
+ This may be over-kill, but it'll do for now. TODO */
+- if (_IO_fwide (fp, 0) <= 0)
++ if (! _IO_is_wide (fp))
+ {
+ if (_IO_have_backup (fp))
+ _IO_free_backup_area (fp);
+Index: git/libio/iosetbuffer.c
+===================================================================
+--- git.orig/libio/iosetbuffer.c 2014-08-29 20:00:47.364070587 -0700
++++ git/libio/iosetbuffer.c 2014-08-29 20:01:15.204070587 -0700
+@@ -24,6 +24,8 @@
+ This exception applies to code released by its copyright holders
+ in files containing the exception. */
+
++#include <gnu/option-groups.h>
++
+ #include "libioP.h"
+
+ void
+@@ -38,9 +40,11 @@
+ if (!buf)
+ size = 0;
+ (void) _IO_SETBUF (fp, buf, size);
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ if (_IO_vtable_offset (fp) == 0 && fp->_mode == 0 && _IO_CHECK_WIDE (fp))
+ /* We also have to set the buffer using the wide char function. */
+ (void) _IO_WSETBUF (fp, buf, size);
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+ _IO_release_lock (fp);
+ }
+ libc_hidden_def (_IO_setbuffer)
+Index: git/libio/libioP.h
+===================================================================
+--- git.orig/libio/libioP.h 2014-08-29 20:00:47.372070587 -0700
++++ git/libio/libioP.h 2014-08-29 20:01:15.204070587 -0700
+@@ -42,6 +42,10 @@
+ /*# include <comthread.h>*/
+ #endif
+
++#if defined _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ #include <math_ldbl_opt.h>
+
+ #include "iolibio.h"
+@@ -508,8 +512,20 @@
+
+
+ #if defined _LIBC || defined _GLIBCPP_USE_WCHAR_T
++
++/* _IO_is_wide (fp) is roughly equivalent to '_IO_fwide (fp, 0) > 0',
++ except that when OPTION_POSIX_C_LANG_WIDE_CHAR is disabled, it
++ expands to a constant, allowing the compiler to realize that it can
++ eliminate code that references wide stream handling functions.
++ This, in turn, allows us to omit them. */
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
++# define _IO_is_wide(_f) ((_f)->_mode > 0)
++#else
++# define _IO_is_wide(_f) (0)
++#endif
++
+ # define _IO_do_flush(_f) \
+- ((_f)->_mode <= 0 \
++ (! _IO_is_wide (_f) \
+ ? _IO_do_write(_f, (_f)->_IO_write_base, \
+ (_f)->_IO_write_ptr-(_f)->_IO_write_base) \
+ : _IO_wdo_write(_f, (_f)->_wide_data->_IO_write_base, \
+Index: git/libio/Makefile
+===================================================================
+--- git.orig/libio/Makefile 2014-08-29 20:00:47.332070587 -0700
++++ git/libio/Makefile 2014-08-29 20:01:15.204070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Specific makefile for libio.
+ #
++include ../option-groups.mak
++
+ subdir := libio
+
+ include ../Makeconfig
+@@ -27,16 +29,13 @@
+
+ routines := \
+ filedoalloc iofclose iofdopen iofflush iofgetpos iofgets iofopen \
+- iofopncook iofputs iofread iofsetpos ioftell wfiledoalloc \
++ iofopncook iofputs iofread iofsetpos ioftell \
+ iofwrite iogetdelim iogetline iogets iopadn iopopen ioputs \
+ ioseekoff ioseekpos iosetbuffer iosetvbuf ioungetc \
+ iovsprintf iovsscanf \
+ iofgetpos64 iofopen64 iofsetpos64 \
+- fputwc fputwc_u getwc getwc_u getwchar getwchar_u iofgetws iofgetws_u \
+- iofputws iofputws_u iogetwline iowpadn ioungetwc putwc putwc_u \
+- putwchar putwchar_u putchar putchar_u fwprintf swprintf vwprintf \
+- wprintf wscanf fwscanf vwscanf vswprintf iovswscanf swscanf wgenops \
+- wstrops wfileops iofwide fwide wmemstream \
++ putchar putchar_u \
++ iofwide \
+ \
+ clearerr feof ferror fileno fputc freopen fseek getc getchar \
+ memstream pclose putc putchar rewind setbuf setlinebuf vasprintf \
+@@ -47,25 +46,48 @@
+ __fpurge __fpending __fsetlocking \
+ \
+ libc_fatal fmemopen
+-
+-tests = tst_swprintf tst_wprintf tst_swscanf tst_wscanf tst_getwc tst_putwc \
+- tst_wprintf2 tst-widetext test-fmemopen tst-ext tst-ext2 \
+- tst-fgetws tst-ungetwc1 tst-ungetwc2 tst-swscanf tst-sscanf \
+- tst-mmap-setvbuf bug-ungetwc1 bug-ungetwc2 tst-atime tst-eof \
+- tst-freopen bug-rewind bug-rewind2 bug-ungetc bug-fseek \
+- tst-mmap-eofsync tst-mmap-fflushsync bug-mmap-fflush \
+- tst-mmap2-eofsync tst-mmap-offend bug-fopena+ bug-wfflush \
+- bug-ungetc2 bug-ftell bug-ungetc3 bug-ungetc4 tst-fopenloc2 \
+- tst-memstream1 tst-memstream2 \
+- tst-wmemstream1 tst-wmemstream2 \
+- bug-memstream1 bug-wmemstream1 \
+- tst-setvbuf1 tst-popen1 tst-fgetwc bug-wsetpos tst-fseek \
+- tst-fwrite-error tst-ftell-partial-wide tst-ftell-active-handler \
+- tst-ftell-append
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) += \
++ wfiledoalloc \
++ iowpadn \
++ swprintf \
++ vswprintf iovswscanf swscanf wgenops \
++ wstrops wfileops wmemstream
++routines-$(call option-disabled, OPTION_POSIX_C_LANG_WIDE_CHAR) += \
++ wdummyfileops
++routines-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) += \
++ fputwc fputwc_u getwc getwc_u getwchar getwchar_u iofgetws iofgetws_u \
++ iofputws iofputws_u iogetwline ioungetwc putwc putwc_u \
++ putwchar putwchar_u fwprintf vwprintf \
++ wprintf wscanf fwscanf vwscanf \
++ fwide
++
++tests = test-fmemopen tst-ext tst-ext2 \
++ tst-mmap-setvbuf tst-atime tst-eof \
++ tst-freopen bug-ungetc bug-fseek \
++ tst-mmap-eofsync tst-mmap-fflushsync bug-mmap-fflush \
++ tst-mmap2-eofsync tst-mmap-offend bug-fopena+ \
++ bug-ungetc2 bug-ungetc3 bug-ungetc4 \
++ tst-memstream1 tst-memstream2 \
++ bug-memstream1 tst-popen1 tst-fwrite-error \
++ tst-ftell-active-handler tst-ftell-append
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-swscanf tst-fgetws tst-setvbuf1 \
++ tst-ungetwc1 tst-ungetwc2 bug-ftell bug-ungetwc2 \
++ tst-widetext
++tests-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) \
++ += bug-rewind bug-rewind2 bug-ungetwc1 \
++ bug-wfflush bug-wmemstream1 tst-fopenloc2 \
++ tst_getwc \
++ tst_putwc tst_wprintf tst_wprintf2 tst_wscanf \
++ tst-fgetwc bug-wsetpos tst-fseek tst-ftell-partial-wide
++tests-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += tst_swprintf tst_swscanf \
++ tst-sscanf \
++ tst-wmemstream1 tst-wmemstream2
+ ifeq (yes,$(build-shared))
+ # Add test-fopenloc only if shared library is enabled since it depends on
+ # shared localedata objects.
+-tests += tst-fopenloc
++tests-$(OPTION_EGLIBC_LOCALE_CODE) += tst-fopenloc
+ endif
+ test-srcs = test-freopen
+
+@@ -164,13 +186,17 @@
+ oldiofsetpos64
+
+ ifeq ($(run-built-tests),yes)
++ifeq (y,$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO))
+ tests-special += $(objpfx)test-freopen.out
++endif
++ifeq (y,$(OPTION_EGLIBC_LOCALE_CODE))
+ ifeq (yes,$(build-shared))
+ # Run tst-fopenloc-cmp.out and tst-openloc-mem.out only if shared
+ # library is enabled since they depend on tst-fopenloc.out.
+ tests-special += $(objpfx)tst-fopenloc-cmp.out $(objpfx)tst-fopenloc-mem.out
+ endif
+ endif
++endif
+
+ include ../Rules
+
+Index: git/libio/wdummyfileops.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/libio/wdummyfileops.c 2014-08-29 20:01:15.204070587 -0700
+@@ -0,0 +1,161 @@
++/* Copyright (C) 2007 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA.
++
++ As a special exception, if you link the code in this file with
++ files compiled with a GNU compiler to produce an executable,
++ that does not cause the resulting executable to be covered by
++ the GNU Lesser General Public License. This exception does not
++ however invalidate any other reasons why the executable file
++ might be covered by the GNU Lesser General Public License.
++ This exception applies to code released by its copyright holders
++ in files containing the exception. */
++
++#include <assert.h>
++#include <stdio.h>
++#include <stdlib.h>
++#include <libioP.h>
++
++static void __THROW __attribute__ ((__noreturn__))
++_IO_wfile_wide_char_support_disabled (void)
++{
++ static const char errstr[]
++ = ("The application tried to use wide character I/O, but libc.so"
++ " was compiled\n"
++ "with the OPTION_POSIX_C_LANG_WIDE_CHAR option group disabled.\n");
++ __libc_write (STDERR_FILENO, errstr, sizeof (errstr) - 1);
++ abort ();
++}
++
++static void
++_IO_wfile_disabled_void_int (_IO_FILE *fp, int x)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static int
++_IO_wfile_disabled_int_int (_IO_FILE *fp, int x)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static int
++_IO_wfile_disabled_int_none (_IO_FILE *fp)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_size_t
++_IO_wfile_disabled_xsputn (_IO_FILE *fp, const void *data, _IO_size_t n)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_size_t
++_IO_wfile_disabled_xsgetn (_IO_FILE *fp, void *data, _IO_size_t n)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_off64_t
++_IO_wfile_disabled_seekoff (_IO_FILE *fp, _IO_off64_t off, int dir, int mode)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_off64_t
++_IO_wfile_disabled_seekpos (_IO_FILE *fp, _IO_off64_t pos, int flags)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_FILE *
++_IO_wfile_disabled_setbuf (_IO_FILE *fp, char *buffer, _IO_ssize_t length)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_ssize_t
++_IO_wfile_disabled_read (_IO_FILE *fp, void *buffer, _IO_ssize_t length)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_ssize_t
++_IO_wfile_disabled_write (_IO_FILE *fp, const void *buffer, _IO_ssize_t length)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static _IO_off64_t
++_IO_wfile_disabled_seek (_IO_FILE *fp, _IO_off64_t offset, int mode)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static int
++_IO_wfile_disabled_close (_IO_FILE *fp)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static int
++_IO_wfile_disabled_stat (_IO_FILE *fp, void *buf)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static int
++_IO_wfile_disabled_showmanyc (_IO_FILE *fp)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static void
++_IO_wfile_disabled_imbue (_IO_FILE *fp, void *locale)
++{
++ _IO_wfile_wide_char_support_disabled ();
++}
++
++static const struct _IO_jump_t _IO_wfile_jumps_disabled =
++{
++ JUMP_INIT_DUMMY,
++ JUMP_INIT(finish, _IO_wfile_disabled_void_int),
++ JUMP_INIT(overflow, _IO_wfile_disabled_int_int),
++ JUMP_INIT(underflow, _IO_wfile_disabled_int_none),
++ JUMP_INIT(uflow, _IO_wfile_disabled_int_none),
++ JUMP_INIT(pbackfail, _IO_wfile_disabled_int_int),
++ JUMP_INIT(xsputn, _IO_wfile_disabled_xsputn),
++ JUMP_INIT(xsgetn, _IO_wfile_disabled_xsgetn),
++ JUMP_INIT(seekoff, _IO_wfile_disabled_seekoff),
++ JUMP_INIT(seekpos, _IO_wfile_disabled_seekpos),
++ JUMP_INIT(setbuf, _IO_wfile_disabled_setbuf),
++ JUMP_INIT(sync, _IO_wfile_disabled_int_none),
++ JUMP_INIT(doallocate, _IO_wfile_disabled_int_none),
++ JUMP_INIT(read, _IO_wfile_disabled_read),
++ JUMP_INIT(write, _IO_wfile_disabled_write),
++ JUMP_INIT(seek, _IO_wfile_disabled_seek),
++ JUMP_INIT(close, _IO_wfile_disabled_close),
++ JUMP_INIT(stat, _IO_wfile_disabled_stat),
++ JUMP_INIT(showmanyc, _IO_wfile_disabled_showmanyc),
++ JUMP_INIT(imbue, _IO_wfile_disabled_imbue)
++};
++
++strong_alias (_IO_wfile_jumps_disabled, _IO_wfile_jumps)
++libc_hidden_data_def (_IO_wfile_jumps)
++strong_alias (_IO_wfile_jumps_disabled, _IO_wfile_jumps_mmap)
++strong_alias (_IO_wfile_jumps_disabled, _IO_wfile_jumps_maybe_mmap)
+Index: git/locale/catnames.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/locale/catnames.c 2014-08-29 20:01:15.204070587 -0700
+@@ -0,0 +1,48 @@
++/* Copyright (C) 2006 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA. */
++
++#include "localeinfo.h"
++
++/* Define an array of category names (also the environment variable names). */
++const union catnamestr_t _nl_category_names attribute_hidden =
++ {
++ {
++#define DEFINE_CATEGORY(category, category_name, items, a) \
++ category_name,
++#include "categories.def"
++#undef DEFINE_CATEGORY
++ }
++ };
++
++const uint8_t _nl_category_name_idxs[__LC_LAST] attribute_hidden =
++ {
++#define DEFINE_CATEGORY(category, category_name, items, a) \
++ [category] = offsetof (union catnamestr_t, CATNAMEMF (__LINE__)),
++#include "categories.def"
++#undef DEFINE_CATEGORY
++ };
++
++/* An array of their lengths, for convenience. */
++const uint8_t _nl_category_name_sizes[] attribute_hidden =
++ {
++#define DEFINE_CATEGORY(category, category_name, items, a) \
++ [category] = sizeof (category_name) - 1,
++#include "categories.def"
++#undef DEFINE_CATEGORY
++ [LC_ALL] = sizeof ("LC_ALL") - 1
++ };
+Index: git/locale/C-ctype.c
+===================================================================
+--- git.orig/locale/C-ctype.c 2014-08-29 20:00:47.396070587 -0700
++++ git/locale/C-ctype.c 2014-08-29 20:01:15.204070587 -0700
+@@ -19,8 +19,11 @@
+ #include "localeinfo.h"
+ #include <endian.h>
+ #include <stdint.h>
++#include <gnu/option-groups.h>
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ #include "C-translit.h"
++#endif
+
+ /* This table's entries are taken from POSIX.2 Table 2-6
+ ``LC_CTYPE Category Definition in the POSIX Locale''.
+@@ -647,6 +650,7 @@
+ { .word = L'7' },
+ { .word = L'8' },
+ { .word = L'9' },
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* _NL_CTYPE_TRANSLIT_TAB_SIZE */
+ { .word = NTRANSLIT },
+ /* _NL_CTYPE_TRANSLIT_FROM_IDX */
+@@ -657,6 +661,22 @@
+ { .wstr = translit_to_idx },
+ /* _NL_CTYPE_TRANSLIT_TO_TBL */
+ { .wstr = (uint32_t *) translit_to_tbl },
++#else
++ /* If the locale code isn't enabled, we don't have the
++ transliteration code in iconv/gconv_trans.c anyway, so there's
++ no need for the transliteration tables here. We'll fall back
++ on the default missing replacement, '?'. */
++ /* _NL_CTYPE_TRANSLIT_TAB_SIZE */
++ { .word = 0 },
++ /* _NL_CTYPE_TRANSLIT_FROM_IDX */
++ { .wstr = NULL },
++ /* _NL_CTYPE_TRANSLIT_FROM_TBL */
++ { .wstr = NULL },
++ /* _NL_CTYPE_TRANSLIT_TO_IDX */
++ { .wstr = NULL },
++ /* _NL_CTYPE_TRANSLIT_TO_TBL */
++ { .wstr = NULL },
++#endif
+ /* _NL_CTYPE_TRANSLIT_DEFAULT_MISSING_LEN */
+ { .word = 1 },
+ /* _NL_CTYPE_TRANSLIT_DEFAULT_MISSING */
+Index: git/locale/dummy-setlocale.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/locale/dummy-setlocale.c 2014-08-29 20:01:15.204070587 -0700
+@@ -0,0 +1,33 @@
++/* Copyright (C) 2006 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA. */
++
++#include <string.h>
++#include <locale.h>
++
++char *
++setlocale (int category, const char *locale)
++{
++ if (! locale
++ || locale[0] == '\0'
++ || strcmp (locale, "C") == 0
++ || strcmp (locale, "POSIX") == 0)
++ return (char *) "C";
++ else
++ return NULL;
++}
++libc_hidden_def (setlocale)
+Index: git/locale/localeinfo.h
+===================================================================
+--- git.orig/locale/localeinfo.h 2014-08-29 20:00:47.404070587 -0700
++++ git/locale/localeinfo.h 2014-08-29 20:01:15.204070587 -0700
+@@ -224,7 +224,7 @@
+ unused. We can manage this playing some tricks with weak references.
+ But with thread-local locale settings, it becomes quite ungainly unless
+ we can use __thread variables. So only in that case do we attempt this. */
+-#ifndef SHARED
++#if !defined SHARED && !defined IN_GLIBC_LOCALEDEF
+ # include <tls.h>
+ # define NL_CURRENT_INDIRECT 1
+ #endif
+Index: git/locale/Makefile
+===================================================================
+--- git.orig/locale/Makefile 2014-08-29 20:00:47.400070587 -0700
++++ git/locale/Makefile 2014-08-29 20:01:15.204070587 -0700
+@@ -18,27 +18,43 @@
+ #
+ # Makefile for locales.
+ #
++include ../option-groups.mak
++
+ subdir := locale
+
+ include ../Makeconfig
+
+ headers = locale.h bits/locale.h langinfo.h xlocale.h
+-routines = setlocale findlocale loadlocale loadarchive \
+- localeconv nl_langinfo nl_langinfo_l mb_cur_max \
+- newlocale duplocale freelocale uselocale
+-tests = tst-C-locale tst-locname tst-duplocale
++# catnames is needed by OPTION_EGLIBC_LOCALE_CODE and by the 'intl' code.
++# If we put the latter in an option group, too, we can omit catnames
++# when both option groups are disabled. libstdc++-v3 needs mb_cur_max.
++routines-y := catnames mb_cur_max
++routines-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += setlocale findlocale loadlocale loadarchive \
++ localeconv nl_langinfo nl_langinfo_l \
++ newlocale duplocale freelocale uselocale
++ifneq (y,$(OPTION_EGLIBC_LOCALE_CODE))
++routines-y += dummy-setlocale
++endif
++tests-$(OPTION_EGLIBC_LOCALE_CODE) += tst-C-locale tst-locname tst-duplocale
+ categories = ctype messages monetary numeric time paper name \
+ address telephone measurement identification collate
+-aux = $(categories:%=lc-%) $(categories:%=C-%) SYS_libc C_name \
+- xlocale localename global-locale coll-lookup
+-others = localedef locale
++# C-messages belongs in an intl option group.
++aux-y := C-ctype C-time \
++ SYS_libc C_name xlocale global-locale coll-lookup
++aux-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += $(filter-out $(aux-y), \
++ $(categories:%=lc-%) $(categories:%=C-%)) \
++ localename
++others-$(OPTION_EGLIBC_LOCALE_CODE) = localedef locale
+ #others-static = localedef locale
+-install-bin = localedef locale
+-extra-objs = $(localedef-modules:=.o) $(localedef-aux:=.o) \
++install-bin = $(others-y)
++extra-objs-$(OPTION_EGLIBC_LOCALE_CODE) \
++ = $(localedef-modules:=.o) $(localedef-aux:=.o) \
+ $(locale-modules:=.o) $(lib-modules:=.o)
+
+-extra-libs = libBrokenLocale
+-extra-libs-others = $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_LOCALE_CODE) = libBrokenLocale
++extra-libs-others = $(extra-libs-y)
+
+ libBrokenLocale-routines = broken_cur_max
+
+@@ -94,6 +110,9 @@
+ CFLAGS-charmap.c = -Wno-write-strings -Wno-char-subscripts
+ CFLAGS-locfile.c = -Wno-write-strings -Wno-char-subscripts
+ CFLAGS-charmap-dir.c = -Wno-write-strings
++ifneq (y,$(OPTION_EGLIBC_SPAWN))
++CFLAGS-charmap-dir.c += -DNO_UNCOMPRESS
++endif
+
+ # This makes sure -DNOT_IN_libc et al are passed for all these modules.
+ cpp-srcs-left := $(addsuffix .c,$(localedef-modules) $(localedef-aux) \
+Index: git/locale/programs/charmap-dir.c
+===================================================================
+--- git.orig/locale/programs/charmap-dir.c 2014-08-29 20:00:47.408070587 -0700
++++ git/locale/programs/charmap-dir.c 2014-08-29 20:01:15.204070587 -0700
+@@ -19,7 +19,9 @@
+ #include <error.h>
+ #include <fcntl.h>
+ #include <libintl.h>
++#ifndef NO_UNCOMPRESS
+ #include <spawn.h>
++#endif
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+@@ -156,6 +158,7 @@
+ return closedir (dir);
+ }
+
++#ifndef NO_UNCOMPRESS
+ /* Creates a subprocess decompressing the given pathname, and returns
+ a stream reading its output (the decompressed data). */
+ static
+@@ -204,6 +207,7 @@
+ }
+ return NULL;
+ }
++#endif
+
+ /* Opens a charmap for reading, given its name (not an alias name). */
+ FILE *
+@@ -226,6 +230,7 @@
+ if (stream != NULL)
+ return stream;
+
++#ifndef NO_UNCOMPRESS
+ memcpy (p, ".gz", 4);
+ stream = fopen_uncompressed (pathname, "gzip");
+ if (stream != NULL)
+@@ -235,6 +240,7 @@
+ stream = fopen_uncompressed (pathname, "bzip2");
+ if (stream != NULL)
+ return stream;
++#endif
+
+ return NULL;
+ }
+@@ -263,8 +269,8 @@
+ char *alias = NULL;
+ char junk[BUFSIZ];
+
+- if (fscanf (stream, " <code_set_name> %ms", &alias) == 1
+- || fscanf (stream, "%% alias %ms", &alias) == 1)
++ if (fscanf (stream, " <code_set_name> %as", &alias) == 1
++ || fscanf (stream, "%% alias %as", &alias) == 1)
+ {
+ aliases = (char **) xrealloc (aliases,
+ (naliases + 2) * sizeof (char *));
+Index: git/locale/programs/ld-collate.c
+===================================================================
+--- git.orig/locale/programs/ld-collate.c 2014-08-29 20:00:47.408070587 -0700
++++ git/locale/programs/ld-collate.c 2014-08-29 20:01:15.208070587 -0700
+@@ -350,7 +350,7 @@
+ }
+ if (wcs != NULL)
+ {
+- size_t nwcs = wcslen ((wchar_t *) wcs);
++ size_t nwcs = wcslen_uint32 (wcs);
+ uint32_t zero = 0;
+ /* Handle <U0000> as a single character. */
+ if (nwcs == 0)
+@@ -1776,8 +1776,7 @@
+
+ if ((*eptr)->nwcs == runp->nwcs)
+ {
+- int c = wmemcmp ((wchar_t *) (*eptr)->wcs,
+- (wchar_t *) runp->wcs, runp->nwcs);
++ int c = wmemcmp_uint32 ((*eptr)->wcs, runp->wcs, runp->nwcs);
+
+ if (c == 0)
+ {
+@@ -2010,9 +2009,9 @@
+ one consecutive entry. */
+ if (runp->wcnext != NULL
+ && runp->nwcs == runp->wcnext->nwcs
+- && wmemcmp ((wchar_t *) runp->wcs,
+- (wchar_t *)runp->wcnext->wcs,
+- runp->nwcs - 1) == 0
++ && wmemcmp_uint32 (runp->wcs,
++ runp->wcnext->wcs,
++ runp->nwcs - 1) == 0
+ && (runp->wcs[runp->nwcs - 1]
+ == runp->wcnext->wcs[runp->nwcs - 1] + 1))
+ {
+@@ -2036,9 +2035,9 @@
+ runp = runp->wcnext;
+ while (runp->wcnext != NULL
+ && runp->nwcs == runp->wcnext->nwcs
+- && wmemcmp ((wchar_t *) runp->wcs,
+- (wchar_t *)runp->wcnext->wcs,
+- runp->nwcs - 1) == 0
++ && wmemcmp_uint32 (runp->wcs,
++ runp->wcnext->wcs,
++ runp->nwcs - 1) == 0
+ && (runp->wcs[runp->nwcs - 1]
+ == runp->wcnext->wcs[runp->nwcs - 1] + 1));
+
+Index: git/locale/programs/ld-ctype.c
+===================================================================
+--- git.orig/locale/programs/ld-ctype.c 2014-08-29 20:00:47.408070587 -0700
++++ git/locale/programs/ld-ctype.c 2014-08-29 20:01:15.208070587 -0700
+@@ -957,7 +957,7 @@
+ allocate_arrays (ctype, charmap, ctype->repertoire);
+
+ default_missing_len = (ctype->default_missing
+- ? wcslen ((wchar_t *) ctype->default_missing)
++ ? wcslen_uint32 (ctype->default_missing)
+ : 0);
+
+ init_locale_data (&file, nelems);
+@@ -1968,7 +1968,7 @@
+ ignore = 1;
+ else
+ /* This value is usable. */
+- obstack_grow (ob, to_wstr, wcslen ((wchar_t *) to_wstr) * 4);
++ obstack_grow (ob, to_wstr, wcslen_uint32 (to_wstr) * 4);
+
+ first = 0;
+ }
+@@ -2516,8 +2516,8 @@
+ }
+
+ handle_tok_digit:
+- class_bit = _ISwdigit;
+- class256_bit = _ISdigit;
++ class_bit = BITw (tok_digit);
++ class256_bit = BIT (tok_digit);
+ handle_digits = 1;
+ goto read_charclass;
+
+@@ -4001,8 +4001,7 @@
+
+ while (idx < number)
+ {
+- int res = wcscmp ((const wchar_t *) sorted[idx]->from,
+- (const wchar_t *) runp->from);
++ int res = wcscmp_uint32 (sorted[idx]->from, runp->from);
+ if (res == 0)
+ {
+ replace = 1;
+@@ -4039,11 +4038,11 @@
+ for (cnt = 0; cnt < number; ++cnt)
+ {
+ struct translit_to_t *srunp;
+- from_len += wcslen ((const wchar_t *) sorted[cnt]->from) + 1;
++ from_len += wcslen_uint32 (sorted[cnt]->from) + 1;
+ srunp = sorted[cnt]->to;
+ while (srunp != NULL)
+ {
+- to_len += wcslen ((const wchar_t *) srunp->str) + 1;
++ to_len += wcslen_uint32 (srunp->str) + 1;
+ srunp = srunp->next;
+ }
+ /* Plus one for the extra NUL character marking the end of
+@@ -4067,18 +4066,18 @@
+ ctype->translit_from_idx[cnt] = from_len;
+ ctype->translit_to_idx[cnt] = to_len;
+
+- len = wcslen ((const wchar_t *) sorted[cnt]->from) + 1;
+- wmemcpy ((wchar_t *) &ctype->translit_from_tbl[from_len],
+- (const wchar_t *) sorted[cnt]->from, len);
++ len = wcslen_uint32 (sorted[cnt]->from) + 1;
++ wmemcpy_uint32 (&ctype->translit_from_tbl[from_len],
++ sorted[cnt]->from, len);
+ from_len += len;
+
+ ctype->translit_to_idx[cnt] = to_len;
+ srunp = sorted[cnt]->to;
+ while (srunp != NULL)
+ {
+- len = wcslen ((const wchar_t *) srunp->str) + 1;
+- wmemcpy ((wchar_t *) &ctype->translit_to_tbl[to_len],
+- (const wchar_t *) srunp->str, len);
++ len = wcslen_uint32 (srunp->str) + 1;
++ wmemcpy_uint32 (&ctype->translit_to_tbl[to_len],
++ srunp->str, len);
+ to_len += len;
+ srunp = srunp->next;
+ }
+Index: git/locale/programs/ld-messages.c
+===================================================================
+--- git.orig/locale/programs/ld-messages.c 2014-08-29 20:00:47.412070587 -0700
++++ git/locale/programs/ld-messages.c 2014-08-29 20:01:15.208070587 -0700
+@@ -25,6 +25,7 @@
+ #include <string.h>
+ #include <stdint.h>
+ #include <sys/uio.h>
++#include <gnu/option-groups.h>
+
+ #include <assert.h>
+
+@@ -124,6 +125,7 @@
+ }
+ else
+ {
++#if __OPTION_POSIX_REGEXP
+ int result;
+ regex_t re;
+
+@@ -140,6 +142,7 @@
+ }
+ else if (result != 0)
+ regfree (&re);
++#endif
+ }
+
+ if (messages->noexpr == NULL)
+@@ -158,6 +161,7 @@
+ }
+ else
+ {
++#if __OPTION_POSIX_REGEXP
+ int result;
+ regex_t re;
+
+@@ -174,6 +178,7 @@
+ }
+ else if (result != 0)
+ regfree (&re);
++#endif
+ }
+ }
+
+Index: git/locale/programs/ld-time.c
+===================================================================
+--- git.orig/locale/programs/ld-time.c 2014-08-29 20:00:47.412070587 -0700
++++ git/locale/programs/ld-time.c 2014-08-29 20:01:15.208070587 -0700
+@@ -215,8 +215,10 @@
+ }
+ else
+ {
++ static const uint32_t wt_fmt_ampm[]
++ = { '%','I',':','%','M',':','%','S',' ','%','p',0 };
+ time->t_fmt_ampm = "%I:%M:%S %p";
+- time->wt_fmt_ampm = (const uint32_t *) L"%I:%M:%S %p";
++ time->wt_fmt_ampm = wt_fmt_ampm;
+ }
+ }
+
+@@ -226,7 +228,7 @@
+ const int days_per_month[12] = { 31, 29, 31, 30, 31, 30,
+ 31, 31, 30, 31 ,30, 31 };
+ size_t idx;
+- wchar_t *wstr;
++ uint32_t *wstr;
+
+ time->era_entries =
+ (struct era_data *) xmalloc (time->num_era
+@@ -464,18 +466,18 @@
+ }
+
+ /* Now generate the wide character name and format. */
+- wstr = wcschr ((wchar_t *) time->wera[idx], L':');/* end direction */
+- wstr = wstr ? wcschr (wstr + 1, L':') : NULL; /* end offset */
+- wstr = wstr ? wcschr (wstr + 1, L':') : NULL; /* end start */
+- wstr = wstr ? wcschr (wstr + 1, L':') : NULL; /* end end */
++ wstr = wcschr_uint32 (time->wera[idx], L':'); /* end direction */
++ wstr = wstr ? wcschr_uint32 (wstr + 1, L':') : NULL; /* end offset */
++ wstr = wstr ? wcschr_uint32 (wstr + 1, L':') : NULL; /* end start */
++ wstr = wstr ? wcschr_uint32 (wstr + 1, L':') : NULL; /* end end */
+ if (wstr != NULL)
+ {
+- time->era_entries[idx].wname = (uint32_t *) wstr + 1;
+- wstr = wcschr (wstr + 1, L':'); /* end name */
++ time->era_entries[idx].wname = wstr + 1;
++ wstr = wcschr_uint32 (wstr + 1, L':'); /* end name */
+ if (wstr != NULL)
+ {
+ *wstr = L'\0';
+- time->era_entries[idx].wformat = (uint32_t *) wstr + 1;
++ time->era_entries[idx].wformat = wstr + 1;
+ }
+ else
+ time->era_entries[idx].wname =
+@@ -530,7 +532,16 @@
+ if (time->date_fmt == NULL)
+ time->date_fmt = "%a %b %e %H:%M:%S %Z %Y";
+ if (time->wdate_fmt == NULL)
+- time->wdate_fmt = (const uint32_t *) L"%a %b %e %H:%M:%S %Z %Y";
++ {
++ static const uint32_t wdate_fmt[] =
++ { '%','a',' ',
++ '%','b',' ',
++ '%','e',' ',
++ '%','H',':','%','M',':','%','S',' ',
++ '%','Z',' ',
++ '%','Y',0 };
++ time->wdate_fmt = wdate_fmt;
++ }
+ }
+
+
+Index: git/locale/programs/linereader.c
+===================================================================
+--- git.orig/locale/programs/linereader.c 2014-08-29 20:00:47.412070587 -0700
++++ git/locale/programs/linereader.c 2014-08-29 20:01:15.208070587 -0700
+@@ -595,7 +595,7 @@
+ {
+ int return_widestr = lr->return_widestr;
+ char *buf;
+- wchar_t *buf2 = NULL;
++ uint32_t *buf2 = NULL;
+ size_t bufact;
+ size_t bufmax = 56;
+
+Index: git/locale/programs/localedef.c
+===================================================================
+--- git.orig/locale/programs/localedef.c 2014-08-29 20:00:47.416070587 -0700
++++ git/locale/programs/localedef.c 2014-08-29 20:01:15.208070587 -0700
+@@ -114,6 +114,7 @@
+ #define OPT_LIST_ARCHIVE 309
+ #define OPT_LITTLE_ENDIAN 400
+ #define OPT_BIG_ENDIAN 401
++#define OPT_UINT32_ALIGN 402
+
+ /* Definitions of arguments for argp functions. */
+ static const struct argp_option options[] =
+@@ -150,6 +151,8 @@
+ N_("Generate little-endian output") },
+ { "big-endian", OPT_BIG_ENDIAN, NULL, 0,
+ N_("Generate big-endian output") },
++ { "uint32-align", OPT_UINT32_ALIGN, "ALIGNMENT", 0,
++ N_("Set the target's uint32_t alignment in bytes (default 4)") },
+ { NULL, 0, NULL, 0, NULL }
+ };
+
+@@ -239,12 +242,14 @@
+ ctype locale. (P1003.2 4.35.5.2) */
+ setlocale (LC_CTYPE, "POSIX");
+
++#ifndef NO_SYSCONF
+ /* Look whether the system really allows locale definitions. POSIX
+ defines error code 3 for this situation so I think it must be
+ a fatal error (see P1003.2 4.35.8). */
+ if (sysconf (_SC_2_LOCALEDEF) < 0)
+ WITH_CUR_LOCALE (error (3, 0, _("\
+ FATAL: system does not define `_POSIX2_LOCALEDEF'")));
++#endif
+
+ /* Process charmap file. */
+ charmap = charmap_read (charmap_file, verbose, 1, be_quiet, 1);
+@@ -338,6 +343,9 @@
+ case OPT_BIG_ENDIAN:
+ set_big_endian (true);
+ break;
++ case OPT_UINT32_ALIGN:
++ uint32_align_mask = strtol (arg, NULL, 0) - 1;
++ break;
+ case 'c':
+ force_output = 1;
+ break;
+Index: git/locale/programs/locfile.c
+===================================================================
+--- git.orig/locale/programs/locfile.c 2014-08-29 20:00:47.432070587 -0700
++++ git/locale/programs/locfile.c 2014-08-29 20:01:15.208070587 -0700
+@@ -544,6 +544,9 @@
+ machine running localedef. */
+ bool swap_endianness_p;
+
++/* The target's value of __align__(uint32_t) - 1. */
++unsigned int uint32_align_mask = 3;
++
+ /* When called outside a start_locale_structure/end_locale_structure
+ or start_locale_prelude/end_locale_prelude block, record that the
+ next byte in FILE's obstack will be the first byte of a new element.
+@@ -621,7 +624,7 @@
+ void
+ add_locale_wstring (struct locale_file *file, const uint32_t *string)
+ {
+- add_locale_uint32_array (file, string, wcslen ((const wchar_t *) string) + 1);
++ add_locale_uint32_array (file, string, wcslen_uint32 (string) + 1);
+ }
+
+ /* Record that FILE's next element is the 32-bit integer VALUE. */
+Index: git/locale/programs/locfile.h
+===================================================================
+--- git.orig/locale/programs/locfile.h 2014-08-29 20:00:47.432070587 -0700
++++ git/locale/programs/locfile.h 2014-08-29 20:01:15.208070587 -0700
+@@ -71,6 +71,8 @@
+
+ extern bool swap_endianness_p;
+
++extern unsigned int uint32_align_mask;
++
+ /* Change the output to be big-endian if BIG_ENDIAN is true and
+ little-endian otherwise. */
+ static inline void
+@@ -275,4 +277,49 @@
+ const struct charmap_t *charmap,
+ const char *output_path);
+
++static inline size_t
++wcslen_uint32 (const uint32_t *str)
++{
++ size_t len = 0;
++ while (str[len] != 0)
++ len++;
++ return len;
++}
++
++static inline int
++wmemcmp_uint32 (const uint32_t *s1, const uint32_t *s2, size_t n)
++{
++ while (n-- != 0)
++ {
++ int diff = *s1++ - *s2++;
++ if (diff != 0)
++ return diff;
++ }
++ return 0;
++}
++
++static inline int
++wcscmp_uint32 (const uint32_t *s1, const uint32_t *s2)
++{
++ while (*s1 != 0 && *s1 == *s2)
++ s1++, s2++;
++ return *s1 - *s2;
++}
++
++static inline uint32_t *
++wmemcpy_uint32 (uint32_t *s1, const uint32_t *s2, size_t n)
++{
++ return memcpy (s1, s2, n * sizeof (uint32_t));
++}
++
++static inline uint32_t *
++wcschr_uint32 (const uint32_t *s, uint32_t ch)
++{
++ do
++ if (*s == ch)
++ return (uint32_t *) s;
++ while (*s++ != 0);
++ return 0;
++}
++
+ #endif /* locfile.h */
+Index: git/locale/setlocale.c
+===================================================================
+--- git.orig/locale/setlocale.c 2014-08-29 20:00:47.432070587 -0700
++++ git/locale/setlocale.c 2014-08-29 20:01:15.208070587 -0700
+@@ -64,36 +64,6 @@
+ #endif
+
+
+-/* Define an array of category names (also the environment variable names). */
+-const union catnamestr_t _nl_category_names attribute_hidden =
+- {
+- {
+-#define DEFINE_CATEGORY(category, category_name, items, a) \
+- category_name,
+-#include "categories.def"
+-#undef DEFINE_CATEGORY
+- }
+- };
+-
+-const uint8_t _nl_category_name_idxs[__LC_LAST] attribute_hidden =
+- {
+-#define DEFINE_CATEGORY(category, category_name, items, a) \
+- [category] = offsetof (union catnamestr_t, CATNAMEMF (__LINE__)),
+-#include "categories.def"
+-#undef DEFINE_CATEGORY
+- };
+-
+-/* An array of their lengths, for convenience. */
+-const uint8_t _nl_category_name_sizes[] attribute_hidden =
+- {
+-#define DEFINE_CATEGORY(category, category_name, items, a) \
+- [category] = sizeof (category_name) - 1,
+-#include "categories.def"
+-#undef DEFINE_CATEGORY
+- [LC_ALL] = sizeof ("LC_ALL") - 1
+- };
+-
+-
+ #ifdef NL_CURRENT_INDIRECT
+ # define WEAK_POSTLOAD(postload) weak_extern (postload)
+ #else
+Index: git/locale/xlocale.c
+===================================================================
+--- git.orig/locale/xlocale.c 2014-08-29 20:00:47.436070587 -0700
++++ git/locale/xlocale.c 2014-08-29 20:01:15.208070587 -0700
+@@ -18,6 +18,7 @@
+ <http://www.gnu.org/licenses/>. */
+
+ #include <locale.h>
++#include <gnu/option-groups.h>
+ #include "localeinfo.h"
+
+ #define DEFINE_CATEGORY(category, category_name, items, a) \
+@@ -25,6 +26,19 @@
+ #include "categories.def"
+ #undef DEFINE_CATEGORY
+
++/* If the locale support code isn't enabled, don't generate strong
++ reference to the C locale_data structures here; let the Makefile
++ decide which ones to include. (In the static linking case, the
++ strong reference to the 'class', 'toupper', and 'tolower' tables
++ will cause C-ctype.o to be brought in, as it should be, even when
++ the reference to _nl_C_LC_CTYPE will be weak.) */
++#if ! __OPTION_EGLIBC_LOCALE_CODE
++# define DEFINE_CATEGORY(category, category_name, items, a) \
++ weak_extern (_nl_C_##category)
++# include "categories.def"
++# undef DEFINE_CATEGORY
++#endif
++
+ /* Defined in locale/C-ctype.c. */
+ extern const char _nl_C_LC_CTYPE_class[] attribute_hidden;
+ extern const char _nl_C_LC_CTYPE_toupper[] attribute_hidden;
+@@ -52,3 +66,26 @@
+ .__ctype_tolower = (const int *) _nl_C_LC_CTYPE_tolower + 128,
+ .__ctype_toupper = (const int *) _nl_C_LC_CTYPE_toupper + 128
+ };
++
++
++#if ! __OPTION_EGLIBC_LOCALE_CODE
++/* When locale code is enabled, these are each defined in the
++ appropriate lc-CATEGORY.c file, so that static links (when __thread
++ is supported) bring in only those lc-CATEGORY.o files for
++ categories the program actually uses; look for NL_CURRENT_INDIRECT
++ in localeinfo.h.
++
++ When locale code is disabled, the _nl_C_CATEGORY objects are the
++ only possible referents. At the moment, there isn't a way to get
++ __OPTION_EGLIBC_LOCALE_CODE defined in every compilation unit that
++ #includes localeinfo.h, so we can't just turn off
++ NL_CURRENT_INDIRECT. So we'll define the _nl_current_CATEGORY
++ pointers here. */
++#if defined (NL_CURRENT_INDIRECT)
++#define DEFINE_CATEGORY(category, category_name, items, a) \
++ __thread struct __locale_data * const *_nl_current_##category \
++ attribute_hidden = &_nl_C_locobj.__locales[category];
++#include "categories.def"
++#undef DEFINE_CATEGORY
++#endif
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+Index: git/localedata/Makefile
+===================================================================
+--- git.orig/localedata/Makefile 2014-08-29 20:00:47.444070587 -0700
++++ git/localedata/Makefile 2014-08-29 20:01:15.212070587 -0700
+@@ -21,12 +21,22 @@
+
+ include ../Makeconfig
+
+-# List with all available character set descriptions.
+-charmaps := $(wildcard charmaps/[A-I]*) $(wildcard charmaps/[J-Z]*)
++include ../option-groups.mak
+
+ # List with all available character set descriptions.
+-locales := $(wildcard locales/*)
++all-charmaps := $(wildcard charmaps/[A-I]*) $(wildcard charmaps/[J-Z]*)
++
++all-locales := $(wildcard locales/*)
+
++# If the EGLIBC_LOCALES option group is not enabled, trim the
++# list of charmap and locale source files.
++ifeq ($(OPTION_EGLIBC_LOCALES),y)
++charmaps := $(all-charmaps)
++locales := $(all-locales)
++else
++charmaps :=
++locales := locales/POSIX
++endif
+
+ subdir-dirs = tests-mbwc
+ vpath %.c tests-mbwc
+@@ -71,14 +81,20 @@
+ tst_wcsxfrm tst_wctob tst_wctomb tst_wctrans \
+ tst_wctype tst_wcwidth
+
+-tests = $(locale_test_suite) tst-digits tst-setlocale bug-iconv-trans \
++# Since these tests build their own locale files, they're not
++# dependent on the OPTION_EGLIBC_LOCALES option group. But they do
++# need the locale functions to be present.
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += $(locale_test_suite) tst-digits tst-setlocale bug-iconv-trans \
+ tst-leaks tst-mbswcs1 tst-mbswcs2 tst-mbswcs3 tst-mbswcs4 tst-mbswcs5 \
+ tst-mbswcs6 tst-xlocale1 tst-xlocale2 bug-usesetlocale \
+ tst-strfmon1 tst-sscanf bug-setlocale1 tst-setlocale2 tst-setlocale3 \
+ tst-wctype
++ifeq (y,$(OPTION_EGLIBC_LOCALE_CODE))
+ tests-static = bug-setlocale1-static
+ tests += $(tests-static)
+-ifeq (yes,$(build-shared))
++endif
++ifeq (yesy,$(build-shared)$(OPTION_EGLIBC_LOCALE_CODE))
+ ifneq (no,$(PERL))
+ tests-special += $(objpfx)mtrace-tst-leaks.out
+ endif
+@@ -92,12 +108,14 @@
+
+ tests: $(objdir)/iconvdata/gconv-modules
+
++ifeq (y,$(OPTION_EGLIBC_LOCALE_CODE))
+ tests-special += $(objpfx)sort-test.out $(objpfx)tst-fmon.out \
+ $(objpfx)tst-locale.out $(objpfx)tst-rpmatch.out \
+ $(objpfx)tst-trans.out $(objpfx)tst-ctype.out \
+ $(objpfx)tst-langinfo.out $(objpfx)tst-langinfo-static.out \
+ $(objpfx)tst-numeric.out
+ tests-static += tst-langinfo-static
++endif
+
+ ifeq ($(run-built-tests),yes)
+ # We have to generate locales
+@@ -143,9 +161,13 @@
+ $(addprefix $(objpfx),$(CTYPE_FILES)): %: \
+ gen-locale.sh $(common-objpfx)locale/localedef Makefile \
+ $(addprefix charmaps/,$(CHARMAPS)) $(addprefix locales/,$(LOCALE_SRCS))
+- @$(SHELL) gen-locale.sh $(common-objpfx) \
+- '$(built-program-cmd-before-env)' '$(run-program-env)' \
+- '$(built-program-cmd-after-env)' $@; \
++ @$(SHELL) gen-locale.sh $(common-objpfx) \
++ '$(if $(cross-localedef), \
++ $(cross-localedef), \
++ $(built-program-cmd-before-env) \
++ $(run-program-env) \
++ $(built-program-cmd-after-env))' \
++ $@; \
+ $(evaluate-test)
+
+ $(addsuffix .out,$(addprefix $(objpfx),$(tests))): %: \
+@@ -213,6 +235,11 @@
+
+ include SUPPORTED
+
++# Only install locale data if OPTION_EGLIBC_LOCALES is selected.
++ifneq ($(OPTION_EGLIBC_LOCALES),y)
++SUPPORTED-LOCALES :=
++endif
++
+ INSTALL-SUPPORTED-LOCALES=$(addprefix install-, $(SUPPORTED-LOCALES))
+
+ # Sometimes the whole collection of locale files should be installed.
+Index: git/login/Makefile
+===================================================================
+--- git.orig/login/Makefile 2014-08-29 20:00:47.736070587 -0700
++++ git/login/Makefile 2014-08-29 20:01:15.212070587 -0700
+@@ -18,6 +18,7 @@
+ #
+ # Sub-makefile for login portion of the library.
+ #
++include ../option-groups.mak
+
+ subdir := login
+
+@@ -25,14 +26,16 @@
+
+ headers := utmp.h bits/utmp.h lastlog.h pty.h
+
+-routines := getlogin getlogin_r setlogin getlogin_r_chk \
+- getutent getutent_r getutid getutline getutid_r getutline_r \
+- utmp_file utmpname updwtmp getpt grantpt unlockpt ptsname \
+- ptsname_r_chk
++routines := getpt grantpt unlockpt ptsname ptsname_r_chk
++routines-$(OPTION_EGLIBC_UTMP) \
++ += getutent getutent_r getutid getutline getutid_r getutline_r \
++ utmp_file utmpname updwtmp
++routines-$(OPTION_EGLIBC_GETLOGIN) += getlogin getlogin_r getlogin_r_chk
++routines-$(OPTION_EGLIBC_BSD) += setlogin
+
+ CFLAGS-grantpt.c = -DLIBEXECDIR='"$(libexecdir)"'
+
+-others = utmpdump
++others-$(OPTION_EGLIBC_UTMP) += utmpdump
+
+ ifeq (yes,$(build-pt-chown))
+ others += pt_chown
+@@ -46,8 +49,8 @@
+ tests := tst-utmp tst-utmpx tst-grantpt tst-ptsname
+
+ # Build the -lutil library with these extra functions.
+-extra-libs := libutil
+-extra-libs-others := $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_UTMP) := libutil
++extra-libs-others := $(extra-libs-y)
+
+ libutil-routines:= login login_tty logout logwtmp openpty forkpty
+
+Index: git/Makeconfig
+===================================================================
+--- git.orig/Makeconfig 2014-08-29 20:00:42.956070587 -0700
++++ git/Makeconfig 2014-08-29 20:01:15.212070587 -0700
+@@ -582,7 +582,7 @@
+ # and run on the build system, causes that program with those
+ # arguments to be run on the host for which the library is built.
+ ifndef test-wrapper
+-test-wrapper =
++test-wrapper = $(cross-test-wrapper)
+ endif
+ # Likewise, but the name of the program is preceded by
+ # <variable>=<value> assignments for environment variables.
+@@ -1057,6 +1057,24 @@
+ libm = $(common-objpfx)math/libm.a
+ endif
+
++# Generate a header file that #defines preprocessor symbols indicating
++# which option groups are enabled. Note that the option-groups.config file
++# may not exist at all.
++before-compile += $(common-objpfx)gnu/option-groups.h
++common-generated += gnu/option-groups.h gnu/option-groups.stmp
++headers += gnu/option-groups.h
++$(common-objpfx)gnu/option-groups.h: $(common-objpfx)gnu/option-groups.stmp; @:
++$(common-objpfx)gnu/option-groups.stmp: \
++ $(..)scripts/option-groups.awk \
++ $(..)option-groups.defaults \
++ $(wildcard $(common-objpfx)option-groups.config)
++ $(make-target-directory)
++ @rm -f ${@:stmp=T} $@
++ LC_ALL=C $(AWK) -f $^ > ${@:stmp=T}
++ $(move-if-change) ${@:stmp=T} ${@:stmp=h}
++ touch $@
++
++
+ # These are the subdirectories containing the library source. The order
+ # is more or less arbitrary. The sorting step will take care of the
+ # dependencies.
+Index: git/Makerules
+===================================================================
+--- git.orig/Makerules 2014-08-29 20:00:42.960070587 -0700
++++ git/Makerules 2014-08-29 20:01:15.212070587 -0700
+@@ -379,6 +379,25 @@
+ endef
+ endif
+
++# Include targets in the selected option groups.
++aux += $(aux-y)
++extra-libs += $(extra-libs-y)
++extra-libs-others += $(extra-libs-others-y)
++extra-objs += $(extra-objs-y)
++install-bin += $(install-bin-y)
++install-others += $(install-others-y)
++install-sbin += $(install-sbin-y)
++modules += $(modules-y)
++others += $(others-y)
++others-pie += $(others-pie-y)
++routines += $(routines-y)
++static-only-routines += $(static-only-routines-y)
++sysdep_routines += $(sysdep_routines-y)
++test-srcs += $(test-srcs-y)
++tests += $(tests-y)
++xtests += $(xtests-y)
++
++
+ # Modify the list of routines we build for different targets
+
+ ifeq (yes,$(build-shared))
+Index: git/malloc/Makefile
+===================================================================
+--- git.orig/malloc/Makefile 2014-08-29 20:00:47.760070587 -0700
++++ git/malloc/Makefile 2014-08-29 20:01:15.212070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for malloc routines
+ #
++include ../option-groups.mak
++
+ subdir := malloc
+
+ include ../Makeconfig
+@@ -36,9 +38,15 @@
+ non-lib.a := libmcheck.a
+
+ # Additional library.
++ifeq ($(OPTION_EGLIBC_MEMUSAGE),y)
+ extra-libs = libmemusage
+ extra-libs-others = $(extra-libs)
+
++ifdef OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
++CPPFLAGS-memusage += -D__OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE=$(OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE)
++endif
++endif
++
+ libmemusage-routines = memusage
+ libmemusage-inhibit-o = $(filter-out .os,$(object-suffixes))
+
+@@ -67,7 +75,7 @@
+ # Unless we get a test for the availability of libgd which also works
+ # for cross-compiling we disable the memusagestat generation in this
+ # situation.
+-ifneq ($(cross-compiling),yes)
++ifeq ($(cross-compiling)$(OPTION_EGLIBC_MEMUSAGE),noy)
+ # If the gd library is available we build the `memusagestat' program.
+ ifneq ($(LIBGD),no)
+ others: $(objpfx)memusage
+Index: git/malloc/memusage.c
+===================================================================
+--- git.orig/malloc/memusage.c 2014-08-29 20:00:47.768070587 -0700
++++ git/malloc/memusage.c 2014-08-29 20:01:15.212070587 -0700
+@@ -33,6 +33,7 @@
+ #include <stdint.h>
+ #include <sys/mman.h>
+ #include <sys/time.h>
++#include <gnu/option-groups.h>
+
+ #include <memusage.h>
+
+@@ -93,7 +94,11 @@
+ #define peak_stack peak_use[1]
+ #define peak_total peak_use[2]
+
+-#define DEFAULT_BUFFER_SIZE 32768
++#ifndef __OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
++# define DEFAULT_BUFFER_SIZE 32768
++#else
++# define DEFAULT_BUFFER_SIZE __OPTION_EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
++#endif
+ static size_t buffer_size;
+
+ static int fd = -1;
+Index: git/malloc/memusage.sh
+===================================================================
+--- git.orig/malloc/memusage.sh 2014-08-29 20:00:47.768070587 -0700
++++ git/malloc/memusage.sh 2014-08-29 20:01:15.212070587 -0700
+@@ -35,7 +35,7 @@
+
+ # Print help message
+ do_help() {
+- echo $"Usage: memusage [OPTION]... PROGRAM [PROGRAMOPTION]...
++ printf $"Usage: memusage [OPTION]... PROGRAM [PROGRAMOPTION]...
+ Profile memory usage of PROGRAM.
+
+ -n,--progname=NAME Name of the program file to profile
+Index: git/math/Makefile
+===================================================================
+--- git.orig/math/Makefile 2014-08-29 20:00:47.836070587 -0700
++++ git/math/Makefile 2014-08-29 20:01:15.212070587 -0700
+@@ -21,6 +21,8 @@
+
+ include ../Makeconfig
+
++include ../option-groups.mak
++
+ # Installed header files.
+ headers := math.h bits/mathcalls.h bits/mathinline.h bits/huge_val.h \
+ bits/huge_valf.h bits/huge_vall.h bits/inf.h bits/nan.h \
+@@ -33,8 +35,8 @@
+
+ # Build the -lm library.
+
+-extra-libs := libm
+-extra-libs-others = $(extra-libs)
++extra-libs-$(OPTION_EGLIBC_LIBM) := libm
++extra-libs-others-$(OPTION_EGLIBC_LIBM) = $(extra-libs-$(OPTION_EGLIBC_LIBM))
+
+ libm-support = k_standard s_lib_version s_matherr s_signgam \
+ fclrexcpt fgetexcptflg fraiseexcpt fsetexcptflg \
+Index: git/misc/err.c
+===================================================================
+--- git.orig/misc/err.c 2014-08-29 20:00:48.232070587 -0700
++++ git/misc/err.c 2014-08-29 20:01:15.212070587 -0700
+@@ -22,6 +22,7 @@
+ #include <errno.h>
+ #include <string.h>
+ #include <stdio.h>
++#include <gnu/option-groups.h>
+
+ #include <wchar.h>
+ #define flockfile(s) _IO_flockfile (s)
+@@ -37,6 +38,7 @@
+ va_end (ap); \
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ static void
+ convert_and_print (const char *format, __gnuc_va_list ap)
+ {
+@@ -81,6 +83,7 @@
+
+ __vfwprintf (stderr, wformat, ap);
+ }
++#endif
+
+ void
+ vwarnx (const char *format, __gnuc_va_list ap)
+@@ -88,9 +91,13 @@
+ flockfile (stderr);
+ if (_IO_fwide (stderr, 0) > 0)
+ {
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ __fwprintf (stderr, L"%s: ", __progname);
+ convert_and_print (format, ap);
+ putwc_unlocked (L'\n', stderr);
++#else
++ abort ();
++#endif
+ }
+ else
+ {
+@@ -111,6 +118,7 @@
+ flockfile (stderr);
+ if (_IO_fwide (stderr, 0) > 0)
+ {
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ __fwprintf (stderr, L"%s: ", __progname);
+ if (format)
+ {
+@@ -119,6 +127,9 @@
+ }
+ __set_errno (error);
+ __fwprintf (stderr, L"%m\n");
++#else
++ abort ();
++#endif
+ }
+ else
+ {
+Index: git/misc/error.c
+===================================================================
+--- git.orig/misc/error.c 2014-08-29 20:00:48.232070587 -0700
++++ git/misc/error.c 2014-08-29 20:01:15.212070587 -0700
+@@ -35,6 +35,7 @@
+ #endif
+
+ #ifdef _LIBC
++# include <gnu/option-groups.h>
+ # include <libintl.h>
+ # include <stdbool.h>
+ # include <stdint.h>
+@@ -205,6 +206,7 @@
+ #if _LIBC
+ if (_IO_fwide (stderr, 0) > 0)
+ {
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ size_t len = strlen (message) + 1;
+ wchar_t *wmessage = NULL;
+ mbstate_t st;
+@@ -265,6 +267,9 @@
+
+ if (use_malloc)
+ free (wmessage);
++#else
++ abort ();
++#endif
+ }
+ else
+ #endif
+Index: git/misc/Makefile
+===================================================================
+--- git.orig/misc/Makefile 2014-08-29 20:00:48.232070587 -0700
++++ git/misc/Makefile 2014-08-29 20:01:15.212070587 -0700
+@@ -19,6 +19,10 @@
+ # Sub-makefile for misc portion of the library.
+ #
+
++# Some system-dependent implementations of these functions use option
++# groups (see sysdeps/unix/sysv/linux/Makefile, for example).
++include ../option-groups.mak
++
+ subdir := misc
+
+ include ../Makeconfig
+@@ -46,40 +50,47 @@
+ select pselect \
+ acct chroot fsync sync fdatasync syncfs reboot \
+ gethostid sethostid \
+- revoke vhangup \
++ vhangup \
+ swapon swapoff mktemp mkstemp mkstemp64 mkdtemp \
+ mkostemp mkostemp64 mkstemps mkstemps64 mkostemps mkostemps64 \
+ ualarm usleep \
+ gtty stty \
+ ptrace \
+- fstab mntent mntent_r \
++ mntent mntent_r \
+ utimes lutimes futimes futimesat \
+ truncate ftruncate truncate64 ftruncate64 \
+- chflags fchflags \
+ insremque getttyent getusershell getpass ttyslot \
+ syslog syscall daemon \
+ mmap mmap64 munmap mprotect msync madvise mincore remap_file_pages\
+ mlock munlock mlockall munlockall \
+- efgcvt efgcvt_r qefgcvt qefgcvt_r \
+ hsearch hsearch_r tsearch lsearch \
+ err error ustat \
+- getsysstats dirname regexp \
++ getsysstats dirname \
+ getloadavg getclktck \
+ fgetxattr flistxattr fremovexattr fsetxattr getxattr \
+ listxattr lgetxattr llistxattr lremovexattr lsetxattr \
+ removexattr setxattr getauxval ifunc-impl-list
+
++routines-$(OPTION_POSIX_REGEXP) += regexp
++routines-$(OPTION_EGLIBC_FSTAB) += fstab
++routines-$(OPTION_EGLIBC_BSD) += chflags fchflags revoke
++routines-$(OPTION_EGLIBC_FCVT) += efgcvt efgcvt_r qefgcvt qefgcvt_r
++
+ generated += tst-error1.mtrace tst-error1-mem.out
+
+ aux := init-misc
+ install-lib := libg.a
+ gpl2lgpl := error.c error.h
+
+-tests := tst-dirname tst-tsearch tst-fdset tst-efgcvt tst-mntent tst-hsearch \
+- tst-error1 tst-pselect tst-insremque tst-mntent2 bug-hsearch1
++tests := tst-dirname tst-tsearch tst-fdset tst-mntent tst-hsearch \
++ tst-pselect tst-insremque tst-mntent2 bug-hsearch1
++tests-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) += tst-error1
++tests-$(OPTION_EGLIBC_FCVT) += tst-efgcvt
+ ifeq ($(run-built-tests),yes)
++ifeq (y,$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO))
+ tests-special += $(objpfx)tst-error1-mem.out
+ endif
++endif
+
+ CFLAGS-select.c = -fexceptions -fasynchronous-unwind-tables
+ CFLAGS-tsearch.c = $(uses-callbacks)
+Index: git/misc/sys/xattr.h
+===================================================================
+--- git.orig/misc/sys/xattr.h 2014-08-29 20:00:52.644070587 -0700
++++ git/misc/sys/xattr.h 2014-08-29 20:01:15.216070587 -0700
+@@ -26,7 +26,6 @@
+
+ /* The following constants should be used for the fifth parameter of
+ `*setxattr'. */
+-#ifndef __USE_KERNEL_XATTR_DEFS
+ enum
+ {
+ XATTR_CREATE = 1, /* set value, fail if attr already exists. */
+@@ -34,7 +33,6 @@
+ XATTR_REPLACE = 2 /* set value, fail if attr does not exist. */
+ #define XATTR_REPLACE XATTR_REPLACE
+ };
+-#endif
+
+ /* Set the attribute NAME of the file pointed to by PATH to VALUE (which
+ is SIZE bytes long). Return 0 on success, -1 for errors. */
+Index: git/misc/tst-efgcvt.c
+===================================================================
+--- git.orig/misc/tst-efgcvt.c 2014-08-29 20:00:52.652070587 -0700
++++ git/misc/tst-efgcvt.c 2014-08-29 20:01:15.216070587 -0700
+@@ -59,7 +59,7 @@
+ { 123.01, -4, 3, "" },
+ { 126.71, -4, 3, "" },
+ { 0.0, 4, 1, "0000" },
+-#if DBL_MANT_DIG == 53
++#if DBL_MANT_DIG == 53 && !(defined __powerpc__ && defined __NO_FPRS__ && !defined _SOFT_FLOAT && !defined _SOFT_DOUBLE)
+ { 0x1p-1074, 3, -323, "494" },
+ { -0x1p-1074, 3, -323, "494" },
+ #endif
+Index: git/nis/Makefile
+===================================================================
+--- git.orig/nis/Makefile 2014-08-29 20:00:52.660070587 -0700
++++ git/nis/Makefile 2014-08-29 20:01:15.216070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for NIS/NIS+ part.
+ #
++include ../option-groups.mak
++
+ subdir := nis
+
+ include ../Makeconfig
+@@ -30,19 +32,26 @@
+
+ # These are the databases available for the nis (and perhaps later nisplus)
+ # service. This must be a superset of the services in nss.
+-databases = proto service hosts network grp pwd rpc ethers \
+- spwd netgrp alias publickey
++databases-y := proto service hosts network grp pwd rpc ethers \
++ spwd netgrp publickey
++databases-$(OPTION_EGLIBC_DB_ALIASES) += alias
+
+ # Specify rules for the nss_* modules.
+-services := nis nisplus compat
++# The 'compat' module includes nis support, and the 'nss' directory
++# includes a bare-bones "files" library, so we'll include 'compat' in
++# OPTION_EGLIBC_NIS.
++services-y :=
++services-$(OPTION_EGLIBC_NIS) += nis nisplus compat
++
++extra-libs-$(OPTION_EGLIBC_NIS) += libnsl
++extra-libs-y += $(services-y:%=libnss_%)
+
+-extra-libs = libnsl $(services:%=libnss_%)
+ # These libraries will be built in the `others' pass rather than
+ # the `lib' pass, because they depend on libc.so being built already.
+-extra-libs-others = $(extra-libs)
++extra-libs-others-y += $(extra-libs-y)
+
+ # The sources are found in the appropriate subdir.
+-subdir-dirs = $(services:%=nss_%)
++subdir-dirs = $(services-y:%=nss_%)
+ vpath %.c $(subdir-dirs)
+
+ libnsl-routines = yp_xdr ypclnt ypupdate_xdr \
+@@ -60,11 +69,11 @@
+ libnss_compat-routines := $(addprefix compat-,grp pwd spwd initgroups)
+ libnss_compat-inhibit-o = $(filter-out .os,$(object-suffixes))
+
+-libnss_nis-routines := $(addprefix nis-,$(databases)) nis-initgroups \
++libnss_nis-routines := $(addprefix nis-,$(databases-y)) nis-initgroups \
+ nss-nis
+ libnss_nis-inhibit-o = $(filter-out .os,$(object-suffixes))
+
+-libnss_nisplus-routines := $(addprefix nisplus-,$(databases)) nisplus-parser \
++libnss_nisplus-routines := $(addprefix nisplus-,$(databases-y)) nisplus-parser \
+ nss-nisplus nisplus-initgroups
+ libnss_nisplus-inhibit-o = $(filter-out .os,$(object-suffixes))
+
+@@ -80,12 +89,12 @@
+ # Target-specific variable setting to link objects using deprecated
+ # RPC interfaces with the version of libc.so that makes them available
+ # for new links:
+-$(services:%=$(objpfx)libnss_%.so) $(objpfx)libnsl.so: \
++$(services-y:%=$(objpfx)libnss_%.so) $(objpfx)libnsl.so: \
+ libc-for-link = $(libnsl-libc)
+
+
+ ifeq ($(build-shared),yes)
+-$(others:%=$(objpfx)%): $(objpfx)libnsl.so$(libnsl.so-version)
++$(others-y:%=$(objpfx)%): $(objpfx)libnsl.so$(libnsl.so-version)
+ else
+-$(others:%=$(objpfx)%): $(objpfx)libnsl.a
++$(others-y:%=$(objpfx)%): $(objpfx)libnsl.a
+ endif
+Index: git/nptl/Makefile
+===================================================================
+--- git.orig/nptl/Makefile 2014-08-29 20:00:52.704070587 -0700
++++ git/nptl/Makefile 2014-08-29 20:01:15.216070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for NPTL portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := nptl
+
+ include ../Makeconfig
+@@ -116,7 +118,7 @@
+ pt-raise pt-system \
+ flockfile ftrylockfile funlockfile \
+ sigaction \
+- herrno res pt-allocrtsig \
++ pt-allocrtsig \
+ pthread_kill_other_threads \
+ pthread_getaffinity pthread_setaffinity \
+ pthread_attr_getaffinity pthread_attr_setaffinity \
+@@ -136,6 +138,8 @@
+ # pthread_setgid pthread_setegid pthread_setregid \
+ # pthread_setresgid
+
++libpthread-routines-$(OPTION_EGLIBC_INET) := herrno res
++
+ libpthread-shared-only-routines = version pt-allocrtsig unwind-forcedunwind
+ libpthread-static-only-routines = pthread_atfork
+
+@@ -210,7 +214,7 @@
+ tst-mutexpi1 tst-mutexpi2 tst-mutexpi3 tst-mutexpi4 tst-mutexpi5 \
+ tst-mutexpi5a tst-mutexpi6 tst-mutexpi7 tst-mutexpi7a tst-mutexpi8 \
+ tst-mutexpi9 \
+- tst-spin1 tst-spin2 tst-spin3 tst-spin4 \
++ tst-spin1 tst-spin2 tst-spin3 \
+ tst-cond1 tst-cond2 tst-cond3 tst-cond4 tst-cond5 tst-cond6 tst-cond7 \
+ tst-cond8 tst-cond9 tst-cond10 tst-cond11 tst-cond12 tst-cond13 \
+ tst-cond14 tst-cond15 tst-cond16 tst-cond17 tst-cond18 tst-cond19 \
+@@ -244,14 +248,14 @@
+ tst-cancel6 tst-cancel7 tst-cancel8 tst-cancel9 tst-cancel10 \
+ tst-cancel11 tst-cancel12 tst-cancel13 tst-cancel14 tst-cancel15 \
+ tst-cancel16 tst-cancel17 tst-cancel18 tst-cancel19 tst-cancel20 \
+- tst-cancel21 tst-cancel22 tst-cancel23 tst-cancel24 tst-cancel25 \
++ tst-cancel21 tst-cancel22 tst-cancel23 tst-cancel25 \
+ tst-cancel-self tst-cancel-self-cancelstate \
+ tst-cancel-self-canceltype tst-cancel-self-testcancel \
+ tst-cleanup0 tst-cleanup1 tst-cleanup2 tst-cleanup3 tst-cleanup4 \
+ tst-flock1 tst-flock2 \
+ tst-signal1 tst-signal2 tst-signal3 tst-signal4 tst-signal5 \
+ tst-signal6 tst-signal7 \
+- tst-exec1 tst-exec2 tst-exec3 tst-exec4 \
++ tst-exec2 tst-exec3 tst-exec4 \
+ tst-exit1 tst-exit2 tst-exit3 \
+ tst-stdio1 tst-stdio2 \
+ tst-stack1 tst-stack2 tst-stack3 tst-pthread-getattr \
+@@ -259,13 +263,12 @@
+ tst-unload \
+ tst-dlsym1 \
+ tst-sysconf \
+- tst-locale1 tst-locale2 \
++ tst-locale2 \
+ tst-umask1 \
+ tst-popen1 \
+ tst-clock1 \
+ tst-context1 \
+ tst-sched1 \
+- tst-backtrace1 \
+ tst-abstime \
+ tst-vfork1 tst-vfork2 tst-vfork1x tst-vfork2x \
+ tst-getpid1 tst-getpid2 tst-getpid3 \
+@@ -275,6 +278,17 @@
+ tst-mutexpp1 tst-mutexpp6 tst-mutexpp10
+ test-srcs = tst-oddstacklimit
+
++# This test uses the posix_spawn functions.
++tests-$(OPTION_EGLIBC_SPAWN) += tst-exec1
++
++# This test uses the 'backtrace' functions.
++tests-$(OPTION_EGLIBC_BACKTRACE) += tst-backtrace1
++
++# This test is written in C++.
++tests-$(OPTION_EGLIBC_CXX_TESTS) += tst-cancel24
++
++tests-$(OPTION_EGLIBC_LOCALE_CODE) += tst-locale1
++
+ # Files which must not be linked with libpthread.
+ tests-nolibpthread = tst-unload
+
+Index: git/nptl/pthread_create.c
+===================================================================
+--- git.orig/nptl/pthread_create.c 2014-08-29 20:00:52.764070587 -0700
++++ git/nptl/pthread_create.c 2014-08-29 20:01:15.216070587 -0700
+@@ -31,6 +31,7 @@
+ #include <kernel-features.h>
+ #include <exit-thread.h>
+
++#include <gnu/option-groups.h>
+ #include <shlib-compat.h>
+
+ #include <stap-probe.h>
+@@ -240,8 +241,10 @@
+ THREAD_SETMEM (pd, cpuclock_offset, now);
+ #endif
+
++#if __OPTION_EGLIBC_INET
+ /* Initialize resolver state pointer. */
+ __resp = &pd->res;
++#endif
+
+ /* Initialize pointers to locale data. */
+ __ctype_init ();
+@@ -322,8 +325,10 @@
+ /* Run the destructor for the thread-local data. */
+ __nptl_deallocate_tsd ();
+
++#if __OPTION_EGLIBC_INET
+ /* Clean up any state libc stored in thread-local variables. */
+ __libc_thread_freeres ();
++#endif
+
+ /* If this is the last thread we terminate the process now. We
+ do not notify the debugger, it might just irritate it if there
+Index: git/nscd/Makefile
+===================================================================
+--- git.orig/nscd/Makefile 2014-08-29 20:00:52.948070587 -0700
++++ git/nscd/Makefile 2014-08-29 20:01:15.216070587 -0700
+@@ -18,14 +18,17 @@
+ #
+ # Sub-makefile for nscd portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := nscd
+
+ include ../Makeconfig
+
+ ifneq ($(use-nscd),no)
+-routines := nscd_getpw_r nscd_getgr_r nscd_gethst_r nscd_getai \
++routines-$(OPTION_EGLIBC_INET) += \
++ nscd_getpw_r nscd_getgr_r nscd_gethst_r nscd_getai \
+ nscd_initgroups nscd_getserv_r nscd_netgroup
+-aux := nscd_helper
++aux-$(OPTION_EGLIBC_INET) += nscd_helper
+ endif
+
+ # To find xmalloc.c
+@@ -37,14 +40,18 @@
+ dbg_log nscd_conf nscd_stat cache mem nscd_setup_thread \
+ xmalloc xstrdup aicache initgrcache gai res_hconf \
+ netgroupcache
+-
++ifneq (y,$(OPTION_EGLIBC_NIS))
++# If we haven't build libnsl.so, then we'll need to include our
++# own copy of nis_hash.
++nscd-modules += nis_hash
++endif
+ ifeq ($(build-nscd)$(have-thread-library),yesyes)
+
+-others += nscd
+-others-pie += nscd
+-install-sbin := nscd
++others-$(OPTION_EGLIBC_INET) += nscd
++others-pie-$(OPTION_EGLIBC_INET) += nscd
++install-sbin-$(OPTION_EGLIBC_INET) += nscd
+
+-extra-objs = $(nscd-modules:=.o)
++extra-objs-$(OPTION_EGLIBC_INET) += $(nscd-modules:=.o)
+
+ endif
+
+@@ -101,7 +108,15 @@
+ $(objpfx)nscd: $(nscd-modules:%=$(objpfx)%.o)
+
+ ifeq ($(build-shared),yes)
+-$(objpfx)nscd: $(shared-thread-library) $(common-objpfx)nis/libnsl.so
++$(objpfx)nscd: $(shared-thread-library)
++else
++$(objpfx)nscd: $(static-thread-library)
++endif
++
++ifeq (y,$(OPTION_EGLIBC_NIS))
++ifeq ($(build-shared),yes)
++$(objpfx)nscd: $(common-objpfx)nis/libnsl.so
+ else
+-$(objpfx)nscd: $(static-thread-library) $(common-objpfx)nis/libnsl.a
++$(objpfx)nscd: $(common-objpfx)nis/libnsl.a
++endif
+ endif
+Index: git/nscd/nis_hash.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/nscd/nis_hash.c 2014-08-29 20:01:15.216070587 -0700
+@@ -0,0 +1,3 @@
++/* If OPTION_EGLIBC_NIS is disabled, nscd can't get this from libnsl.so;
++ we need our own copy. */
++#include "../nis/nis_hash.c"
+Index: git/nss/fixed-nsswitch.conf
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/nss/fixed-nsswitch.conf 2014-08-29 20:01:15.216070587 -0700
+@@ -0,0 +1,22 @@
++# /etc/nsswitch.conf
++#
++# Example configuration for fixed name service.
++# See the description of OPTION_EGLIBC_NSSWITCH in option-groups.def
++# for details.
++#
++
++aliases: files
++
++passwd: files
++group: files
++shadow: files
++
++hosts: files dns
++networks: files dns
++
++protocols: files
++services: files
++ethers: files
++rpc: files
++
++netgroup: files
+Index: git/nss/fixed-nsswitch.functions
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/nss/fixed-nsswitch.functions 2014-08-29 20:01:15.216070587 -0700
+@@ -0,0 +1,121 @@
++/* List of functions defined for fixed NSS in GNU C Library.
++ Copyright (C) 1996, 1997, 1998, 2005 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA. */
++
++/* When OPTION_EGLIBC_NSSWITCH is disabled (see option-groups.def),
++ EGLIBC does not use the 'dlopen' and 'dlsym' functions to look for
++ database query functions in the individual name service libraries.
++ Instead, it uses a set of functions chosen at compile time, as
++ directed by the OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS file. This
++ file is a sample of what you might use there.
++
++ This file is C source code; it should only contain invocations of
++ the following macros:
++
++ - DEFINE_ENT (DATABASE, SERVICE, X)
++
++ Declare the 'setXent', 'getXent_r', and 'endXent' functions that
++ query DATABASE using the service library 'libnss_SERVICE.so.2'.
++ DATABASE should be the full name of the database as it appears in
++ 'nsswitch.conf', like 'passwd' or 'aliases'.
++
++ (The non-reentrant 'getXent' functions are implemented in terms
++ of the reentrant 'getXent_r' functions, so there is no need to
++ refer to them explicitly here.)
++
++ - DEFINE_GETBY (DATABASE, SERVICE, X, KEY)
++
++ Declare the 'getXbyKEY_r' functions that query DATABASE using
++ SERVICE. DATABASE and SERVICE are as described above.
++
++ (The non-reentrant 'getXbyKEY' functions are implemented in terms
++ of the reentrant 'getXbyKEY_r' functions, so there is no need to
++ refer to them explicitly here.)
++
++ Use the special key 'name3' for the service library function that
++ implements the 'getaddrinfo' function.
++
++ - DEFINE_GET (DATABASE, SERVICE, QUERY)
++
++ Declare the 'getQUERY_r' functions that query DATABASE using
++ SERVICE. This is used for functions like 'getpwnam'.
++
++ (The non-reentrant 'getQUERY' functions are implemented in terms
++ of the reentrant 'getQUERY_r' functions, so there is no need to
++ refer to them explicitly here.)
++
++ This sample file only includes functions that consult the files in
++ '/etc', and the Domain Name System (DNS). */
++
++/* aliases */
++DEFINE_ENT (aliases, files, alias)
++DEFINE_GETBY (aliases, files, alias, name)
++
++/* ethers */
++DEFINE_ENT (ethers, files, ether)
++
++/* group */
++DEFINE_ENT (group, files, gr)
++DEFINE_GET (group, files, grgid)
++DEFINE_GET (group, files, grnam)
++
++/* hosts */
++DEFINE_ENT (hosts, files, host)
++DEFINE_GETBY (hosts, files, host, addr)
++DEFINE_GETBY (hosts, files, host, name)
++DEFINE_GETBY (hosts, files, host, name2)
++DEFINE_GET (hosts, files, hostton)
++DEFINE_GET (hosts, files, ntohost)
++DEFINE_GETBY (hosts, dns, host, addr)
++DEFINE_GETBY (hosts, dns, host, name)
++DEFINE_GETBY (hosts, dns, host, name2)
++DEFINE_GETBY (hosts, dns, host, name3)
++
++/* netgroup */
++DEFINE_ENT (netgroup, files, netgr)
++
++/* networks */
++DEFINE_ENT (networks, files, net)
++DEFINE_GETBY (networks, files, net, name)
++DEFINE_GETBY (networks, files, net, addr)
++DEFINE_GETBY (networks, dns, net, name)
++DEFINE_GETBY (networks, dns, net, addr)
++
++/* protocols */
++DEFINE_ENT (protocols, files, proto)
++DEFINE_GETBY (protocols, files, proto, name)
++DEFINE_GETBY (protocols, files, proto, number)
++
++/* passwd */
++DEFINE_ENT (passwd, files, pw)
++DEFINE_GET (passwd, files, pwnam)
++DEFINE_GET (passwd, files, pwuid)
++
++/* rpc */
++DEFINE_ENT (rpc, files, rpc)
++DEFINE_GETBY (rpc, files, rpc, name)
++DEFINE_GETBY (rpc, files, rpc, number)
++
++/* services */
++DEFINE_ENT (services, files, serv)
++DEFINE_GETBY (services, files, serv, name)
++DEFINE_GETBY (services, files, serv, port)
++
++/* shadow */
++DEFINE_ENT (shadow, files, sp)
++DEFINE_GET (shadow, files, spnam)
+Index: git/nss/gen-fixed-nsswitch.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/nss/gen-fixed-nsswitch.c 2014-08-29 20:01:15.216070587 -0700
+@@ -0,0 +1,803 @@
++/* gen-fixed-nsswitch.c --- generate fixed name service data structures
++ Copyright (C) 1996-1999, 2001-2006, 2007 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA. */
++
++#define _GNU_SOURCE
++
++#include <stdlib.h>
++#include <stdio.h>
++#include <errno.h>
++#include <string.h>
++#include <stdarg.h>
++#include <assert.h>
++#include <ctype.h>
++
++#include "gnu/lib-names.h"
++#include "nss.h"
++
++/* Provide a fallback definition to allow this file to be compiled outside
++ libc. */
++#ifndef internal_function
++# define internal_function
++#endif
++
++
++/* Simple utilities. */
++
++void __attribute__ ((noreturn))
++error (const char *message)
++{
++ fprintf (stderr, "%s\n", message);
++ exit (1);
++}
++
++
++void *
++check_alloc (void *p)
++{
++ if (p)
++ return p;
++ else
++ error ("out of memory");
++}
++
++void *
++xmalloc (size_t size)
++{
++ return check_alloc (malloc (size));
++}
++
++
++/* Format ARGS according to FORMAT, and return the result as a
++ malloc'ed string. */
++char *
++saprintf (const char *format, ...)
++{
++ va_list args;
++ size_t len;
++ char *buf;
++
++ va_start (args, format);
++ len = vsnprintf (NULL, 0, format, args);
++ va_end (args);
++
++ buf = xmalloc (len + 1);
++ va_start (args, format);
++ assert (len == vsnprintf (buf, len + 1, format, args));
++ va_end (args);
++
++ return buf;
++}
++
++
++
++/* Data structures representing the configuration file in memory. */
++
++/* These are copied from nsswitch.h.
++
++ We could simply #include that file, but this program runs on the
++ build machine and links against the build machine's libraries,
++ whereas that header is meant for use by target code; it uses
++ 'libc_hidden_proto', 'internal_function', and related hair. Since
++ we've copied the parsing code, we might as well copy the data
++ structure definitions as well. */
++
++/* Actions performed after lookup finished. */
++typedef enum
++{
++ NSS_ACTION_CONTINUE,
++ NSS_ACTION_RETURN
++} lookup_actions;
++
++
++typedef struct service_library
++{
++ /* Name of service (`files', `dns', `nis', ...). */
++ const char *name;
++ /* Pointer to the loaded shared library. */
++ void *lib_handle;
++ /* And the link to the next entry. */
++ struct service_library *next;
++} service_library;
++
++
++/* For mapping a function name to a function pointer. It is known in
++ nsswitch.c:nss_lookup_function that a string pointer for the lookup key
++ is the first member. */
++typedef struct
++{
++ const char *fct_name;
++ void *fct_ptr;
++} known_function;
++
++
++typedef struct service_user
++{
++ /* And the link to the next entry. */
++ struct service_user *next;
++ /* Action according to result. */
++ lookup_actions actions[5];
++ /* Link to the underlying library object. */
++ service_library *library;
++ /* Collection of known functions.
++
++ With OPTION_EGLIBC_NSSWITCH enabled, this is the root of a
++ 'tsearch'-style tree.
++
++ With OPTION_EGLIBC_NSSWITCH disabled, this is an array of
++ pointers to known_function structures, NULL-terminated. */
++ union
++ {
++ void *tree;
++ const known_function **array;
++ } known;
++ /* Name of the service (`files', `dns', `nis', ...). */
++ const char *name;
++} service_user;
++
++/* To access the action based on the status value use this macro. */
++#define nss_next_action(ni, status) ((ni)->actions[2 + status])
++
++
++typedef struct name_database_entry
++{
++ /* And the link to the next entry. */
++ struct name_database_entry *next;
++ /* List of service to be used. */
++ service_user *service;
++ /* Name of the database. */
++ const char *name;
++} name_database_entry;
++
++
++typedef struct name_database
++{
++ /* List of all known databases. */
++ name_database_entry *entry;
++ /* List of libraries with service implementation. */
++ service_library *library;
++} name_database;
++
++
++
++/* Gathering the contents of the FIXED_FUNCTIONS file. */
++
++/* It should be possible to generate this list automatically by
++ looking at the services and databases used in the nsswitch.conf
++ file, and having a hard-coded set of queries supported on each
++ database. */
++
++/* We #include the FIXED_FUNCTIONS file several times to build an
++ array of function structures holding its data. */
++enum function_kind {
++ fk_end = 0, /* Last entry. */
++ fk_setent, /* Like setpwent. */
++ fk_getent, /* Like getpwent. */
++ fk_endent, /* Like endpwent. */
++ fk_getby, /* Like gethostbyname. */
++ fk_get /* Like getpwnam. */
++};
++
++
++struct function {
++ /* What kind of function this is. */
++ enum function_kind kind;
++
++ /* The database and service of the function being hardwired in. */
++ char *database, *service;
++
++ /* The kind of entry being queried, for 'fk_setent', 'fk_getent',
++ 'fk_endent', and 'fk_getby' functions. */
++ char *entry;
++
++ /* The key, for 'fk_getby' entries. */
++ char *key;
++
++ /* The value and key, for 'fk_get' entries. */
++ char *value_and_key;
++};
++
++
++const struct function functions[] =
++ {
++
++#define DEFINE_ENT(database, service, entry) \
++ { fk_setent, #database, #service, #entry }, \
++ { fk_getent, #database, #service, #entry }, \
++ { fk_endent, #database, #service, #entry },
++#define DEFINE_GETBY(database, service, entry, key) \
++ { fk_getby, #database, #service, #entry, #key },
++#define DEFINE_GET(database, service, value_and_key) \
++ { fk_get, #database, #service, NULL, NULL, #value_and_key },
++
++#include FIXED_FUNCTIONS
++
++#undef DEFINE_ENT
++#undef DEFINE_GETBY
++#undef DEFINE_GET
++
++ { fk_end }
++ };
++
++
++/* Parsing the config file. Functions copied from nsswitch.c. */
++
++#define __strchrnul strchrnul
++#define __getline getline
++#define __strncasecmp strncasecmp
++
++/* Prototypes for the local functions. */
++static name_database *nss_parse_file (const char *fname) internal_function;
++static name_database_entry *nss_getline (char *line) internal_function;
++static service_user *nss_parse_service_list (const char *line)
++ internal_function;
++
++static name_database *
++internal_function
++nss_parse_file (const char *fname)
++{
++ FILE *fp;
++ name_database *result;
++ name_database_entry *last;
++ char *line;
++ size_t len;
++
++ /* Open the configuration file. */
++ fp = fopen (fname, "rc");
++ if (fp == NULL)
++ return NULL;
++
++ // /* No threads use this stream. */
++ // __fsetlocking (fp, FSETLOCKING_BYCALLER);
++
++ result = (name_database *) xmalloc (sizeof (name_database));
++
++ result->entry = NULL;
++ result->library = NULL;
++ last = NULL;
++ line = NULL;
++ len = 0;
++ do
++ {
++ name_database_entry *this;
++ ssize_t n;
++
++ n = __getline (&line, &len, fp);
++ if (n < 0)
++ break;
++ if (line[n - 1] == '\n')
++ line[n - 1] = '\0';
++
++ /* Because the file format does not know any form of quoting we
++ can search forward for the next '#' character and if found
++ make it terminating the line. */
++ *__strchrnul (line, '#') = '\0';
++
++ /* If the line is blank it is ignored. */
++ if (line[0] == '\0')
++ continue;
++
++ /* Each line completely specifies the actions for a database. */
++ this = nss_getline (line);
++ if (this != NULL)
++ {
++ if (last != NULL)
++ last->next = this;
++ else
++ result->entry = this;
++
++ last = this;
++ }
++ }
++ while (!feof_unlocked (fp));
++
++ /* Free the buffer. */
++ free (line);
++ /* Close configuration file. */
++ fclose (fp);
++
++ return result;
++}
++
++
++/* Read the source names:
++ `( <source> ( "[" "!"? (<status> "=" <action> )+ "]" )? )*'
++ */
++static service_user *
++internal_function
++nss_parse_service_list (const char *line)
++{
++ service_user *result = NULL, **nextp = &result;
++
++ while (1)
++ {
++ service_user *new_service;
++ const char *name;
++
++ while (isspace (line[0]))
++ ++line;
++ if (line[0] == '\0')
++ /* No source specified. */
++ return result;
++
++ /* Read <source> identifier. */
++ name = line;
++ while (line[0] != '\0' && !isspace (line[0]) && line[0] != '[')
++ ++line;
++ if (name == line)
++ return result;
++
++
++ new_service = (service_user *) xmalloc (sizeof (*new_service));
++ new_service->name = (char *) xmalloc (line - name + 1);
++
++ *((char *) __mempcpy ((char *) new_service->name, name, line - name))
++ = '\0';
++
++ /* Set default actions. */
++ new_service->actions[2 + NSS_STATUS_TRYAGAIN] = NSS_ACTION_CONTINUE;
++ new_service->actions[2 + NSS_STATUS_UNAVAIL] = NSS_ACTION_CONTINUE;
++ new_service->actions[2 + NSS_STATUS_NOTFOUND] = NSS_ACTION_CONTINUE;
++ new_service->actions[2 + NSS_STATUS_SUCCESS] = NSS_ACTION_RETURN;
++ new_service->actions[2 + NSS_STATUS_RETURN] = NSS_ACTION_RETURN;
++ new_service->library = NULL;
++ new_service->known.tree = NULL;
++ new_service->next = NULL;
++
++ while (isspace (line[0]))
++ ++line;
++
++ if (line[0] == '[')
++ {
++ /* Read criterions. */
++ do
++ ++line;
++ while (line[0] != '\0' && isspace (line[0]));
++
++ do
++ {
++ int not;
++ enum nss_status status;
++ lookup_actions action;
++
++ /* Grok ! before name to mean all statii but that one. */
++ not = line[0] == '!';
++ if (not)
++ ++line;
++
++ /* Read status name. */
++ name = line;
++ while (line[0] != '\0' && !isspace (line[0]) && line[0] != '='
++ && line[0] != ']')
++ ++line;
++
++ /* Compare with known statii. */
++ if (line - name == 7)
++ {
++ if (__strncasecmp (name, "SUCCESS", 7) == 0)
++ status = NSS_STATUS_SUCCESS;
++ else if (__strncasecmp (name, "UNAVAIL", 7) == 0)
++ status = NSS_STATUS_UNAVAIL;
++ else
++ return result;
++ }
++ else if (line - name == 8)
++ {
++ if (__strncasecmp (name, "NOTFOUND", 8) == 0)
++ status = NSS_STATUS_NOTFOUND;
++ else if (__strncasecmp (name, "TRYAGAIN", 8) == 0)
++ status = NSS_STATUS_TRYAGAIN;
++ else
++ return result;
++ }
++ else
++ return result;
++
++ while (isspace (line[0]))
++ ++line;
++ if (line[0] != '=')
++ return result;
++ do
++ ++line;
++ while (isspace (line[0]));
++
++ name = line;
++ while (line[0] != '\0' && !isspace (line[0]) && line[0] != '='
++ && line[0] != ']')
++ ++line;
++
++ if (line - name == 6 && __strncasecmp (name, "RETURN", 6) == 0)
++ action = NSS_ACTION_RETURN;
++ else if (line - name == 8
++ && __strncasecmp (name, "CONTINUE", 8) == 0)
++ action = NSS_ACTION_CONTINUE;
++ else
++ return result;
++
++ if (not)
++ {
++ /* Save the current action setting for this status,
++ set them all to the given action, and reset this one. */
++ const lookup_actions save = new_service->actions[2 + status];
++ new_service->actions[2 + NSS_STATUS_TRYAGAIN] = action;
++ new_service->actions[2 + NSS_STATUS_UNAVAIL] = action;
++ new_service->actions[2 + NSS_STATUS_NOTFOUND] = action;
++ new_service->actions[2 + NSS_STATUS_SUCCESS] = action;
++ new_service->actions[2 + status] = save;
++ }
++ else
++ new_service->actions[2 + status] = action;
++
++ /* Skip white spaces. */
++ while (isspace (line[0]))
++ ++line;
++ }
++ while (line[0] != ']');
++
++ /* Skip the ']'. */
++ ++line;
++ }
++
++ *nextp = new_service;
++ nextp = &new_service->next;
++ }
++}
++
++static name_database_entry *
++internal_function
++nss_getline (char *line)
++{
++ const char *name;
++ name_database_entry *result;
++ size_t len;
++
++ /* Ignore leading white spaces. ATTENTION: this is different from
++ what is implemented in Solaris. The Solaris man page says a line
++ beginning with a white space character is ignored. We regard
++ this as just another misfeature in Solaris. */
++ while (isspace (line[0]))
++ ++line;
++
++ /* Recognize `<database> ":"'. */
++ name = line;
++ while (line[0] != '\0' && !isspace (line[0]) && line[0] != ':')
++ ++line;
++ if (line[0] == '\0' || name == line)
++ /* Syntax error. */
++ return NULL;
++ *line++ = '\0';
++
++ len = strlen (name) + 1;
++
++ result = (name_database_entry *) xmalloc (sizeof (*result));
++ result->name = (char *) xmalloc (len);
++
++ /* Save the database name. */
++ memcpy ((char *) result->name, name, len);
++
++ /* Parse the list of services. */
++ result->service = nss_parse_service_list (line);
++
++ result->next = NULL;
++ return result;
++}
++
++
++
++/* Generating code for statically initialized nsswitch structures. */
++
++
++/* Return the service-neutral suffix of the name of the service
++ library function referred to by the function F. The result is
++ allocated with malloc. */
++char *
++known_function_suffix (const struct function *f)
++{
++ switch (f->kind)
++ {
++ case fk_setent:
++ return saprintf ("set%sent", f->entry);
++
++ case fk_getent:
++ return saprintf ("get%sent_r", f->entry);
++
++ case fk_endent:
++ return saprintf ("end%sent", f->entry);
++
++ case fk_getby:
++ return saprintf ("get%sby%s_r", f->entry, f->key);
++
++ case fk_get:
++ return saprintf ("get%s_r", f->value_and_key);
++
++ default:
++ abort ();
++ }
++}
++
++
++/* Return the name of the service library function referred to by the
++ function F. The result is allocated with malloc. */
++char *
++known_function_name (const struct function *f)
++{
++ return saprintf ("_nss_%s_%s", f->service, known_function_suffix (f));
++}
++
++
++/* Write initialized known_function structures to OUT for
++ all the functions we'll use. */
++void
++generate_known_functions (FILE *out)
++{
++ int i;
++
++ /* First, generate weak references to the functions. The service
++ libraries depend on libc, and if these references weren't weak,
++ we'd be making libc depend circularly on the service
++ libraries. */
++ for (i = 0; functions[i].kind; i++)
++ {
++ char *name = known_function_name (&functions[i]);
++ fprintf (out, "typeof (%s) %s __attribute__ ((weak));\n",
++ name, name);
++ }
++ fputs ("\n", out);
++
++ /* Then, a table mapping names to functions. */
++ fputs ("static const known_function fixed_known_functions[] = {\n",
++ out);
++ for (i = 0; functions[i].kind; i++)
++ {
++ const struct function *f = &functions[i];
++ char *suffix = known_function_suffix (f);
++
++ fprintf (out, " /* %2d */ { \"%s\", _nss_%s_%s },\n",
++ i, suffix, f->service, suffix);
++ }
++ fputs ("};\n", out);
++ fputs ("\n", out);
++}
++
++
++/* Print code to OUT for an initialized array of pointers to the
++ 'known_function' structures needed for USER, which is for
++ DATABASE. Return its name, allocated with malloc. */
++char *
++generate_known_function_list (FILE *out,
++ const name_database_entry *database,
++ const service_user *user)
++{
++ char *list_name = saprintf ("fixed_%s_%s_known_funcs",
++ database->name, user->name);
++ fprintf (out, "static const known_function *%s[] = {\n",
++ list_name);
++ int i;
++ for (i = 0; functions[i].kind; i++)
++ if (strcmp (functions[i].database, database->name) == 0
++ && strcmp (functions[i].service, user->name) == 0)
++ fprintf (out, " &fixed_known_functions[%d], /* %s */\n",
++ i, known_function_name (&functions[i]));
++ fputs (" NULL\n", out);
++ fputs ("};\n", out);
++ fputs ("\n", out);
++
++ return list_name;
++}
++
++
++/* Return the name of the status value STATUS, as a statically
++ allocated string. */
++const char *
++lookup_status_name (enum nss_status status)
++{
++ switch (status)
++ {
++ case NSS_STATUS_TRYAGAIN: return "NSS_STATUS_TRYAGAIN";
++ case NSS_STATUS_UNAVAIL: return "NSS_STATUS_UNAVAIL";
++ case NSS_STATUS_NOTFOUND: return "NSS_STATUS_NOTFOUND";
++ case NSS_STATUS_SUCCESS: return "NSS_STATUS_SUCCESS";
++ case NSS_STATUS_RETURN: return "NSS_STATUS_RETURN";
++ default: abort ();
++ };
++}
++
++
++/* Return the name of ACTION as a statically allocated string. */
++const char *
++lookup_action_name (lookup_actions action)
++{
++ switch (action)
++ {
++ case NSS_ACTION_CONTINUE: return "NSS_ACTION_CONTINUE";
++ case NSS_ACTION_RETURN: return "NSS_ACTION_RETURN";
++ default: abort ();
++ }
++}
++
++
++/* Print code to OUT for the list of service_user structures starting
++ with USER, which are all for DATABASE. Return the name of the
++ first structure in that list, or zero if USER is NULL. */
++char *
++generate_service_user_list (FILE *out,
++ name_database_entry *database,
++ service_user *user)
++{
++ if (user)
++ {
++ /* Generate the tail of the list. */
++ char *next_name = generate_service_user_list (out, database, user->next);
++ /* Generate our known function list. */
++ char *known_function_list_name =
++ generate_known_function_list (out, database, user);
++
++ char *name = saprintf ("fixed_%s_%s_user", database->name, user->name);
++
++ fprintf (out, "static const service_user %s = {\n", name);
++ if (next_name)
++ fprintf (out, " (service_user *) &%s,\n", next_name);
++ else
++ fprintf (out, " NULL, /* no next entry */\n");
++ fputs (" {\n", out);
++ int i;
++ for (i = 0; i < sizeof (user->actions) / sizeof (user->actions[0]); i++)
++ fprintf (out, " %s, /* %s */\n",
++ lookup_action_name (user->actions[i]),
++ lookup_status_name (i - 2));
++ fputs (" },\n", out);
++ fprintf (out, " NULL, /* we never need the service library */\n");
++ fprintf (out, " { .array = %s },\n", known_function_list_name);
++ fprintf (out, " \"%s\"\n", user->name);
++ fputs ("};\n", out);
++ fputs ("\n", out);
++
++ return name;
++ }
++ else
++ return NULL;
++}
++
++
++/* Print code to OUT for the list of name_database_entry structures
++ starting with DATABASE. Return the name of the first structure
++ in that list, or zero if DATABASE is NULL. */
++char *
++generate_name_database_entries (FILE *out, name_database_entry *database)
++{
++ if (database)
++ {
++ char *next_name = generate_name_database_entries (out, database->next);
++ char *service_user_name
++ = generate_service_user_list (out, database, database->service);
++ char *name = saprintf ("fixed_%s_name_database", database->name);
++
++ fprintf (out, "static const name_database_entry %s = {\n", name);
++
++ if (next_name)
++ fprintf (out, " (name_database_entry *) &%s,\n", next_name);
++ else
++ fprintf (out, " NULL,\n");
++
++ if (service_user_name)
++ fprintf (out, " (service_user *) &%s,\n", service_user_name);
++ else
++ fprintf (out, " NULL,\n");
++
++ fprintf (out, " \"%s\"\n", database->name);
++ fprintf (out, "};\n");
++ fputs ("\n", out);
++
++ return name;
++ }
++ else
++ return NULL;
++}
++
++
++void
++generate_name_database (FILE *out, name_database *service_table)
++{
++ /* Produce a linked list of the known name_database_entry
++ structures. */
++ char *entries = generate_name_database_entries (out, service_table->entry);
++
++ /* Now produce the main structure that points to them all. */
++ fprintf (out, "static const name_database fixed_name_database = {\n");
++ if (entries)
++ fprintf (out, " (name_database_entry *) &%s,\n", entries);
++ else
++ fprintf (out, " NULL,\n");
++ fputs (" NULL /* we don't need the libraries */\n"
++ "};\n",
++ out);
++}
++
++
++
++/* Generating the list of service libraries we generate references to. */
++
++/* String with revision number of the shared object files. */
++static const char *const nss_shlib_revision = LIBNSS_FILES_SO + 15;
++
++void
++generate_service_lib_list (FILE *out, name_database *service_table)
++{
++ int i, j;
++ int printed_any = 0;
++
++ for (i = 0; functions[i].kind; i++)
++ {
++ /* Mention each service library only once. */
++ for (j = 0; j < i; j++)
++ if (strcmp (functions[i].service, functions[j].service) == 0)
++ break;
++
++ if (j >= i)
++ {
++ if (printed_any)
++ putc (' ', out);
++ fprintf (out, "-lnss_%s",
++ functions[i].service,
++ nss_shlib_revision);
++ printed_any = 1;
++ }
++ }
++}
++
++
++/* Main. */
++
++int
++main (int argc, char **argv)
++{
++ if (argc != 4)
++ {
++ fprintf (stderr, "usage: gen-fixed-nsswitch HEADER SERVLIBS CONFIG\n");
++ exit (1);
++ }
++
++ name_database *service_table = nss_parse_file (argv[3]);
++
++ FILE *header = fopen (argv[1], "w");
++ if (! header)
++ {
++ fprintf (stderr,
++ "gen-fixed-nsswitch: couldn't open output file %s: %s\n",
++ argv[1], strerror (errno));
++ exit (1);
++ }
++ fputs ("/* Generated by nss/gen-fixed-nsswitch.c. */\n", header);
++ fputs ("\n", header);
++ generate_known_functions (header);
++ generate_name_database (header, service_table);
++ fclose (header);
++
++ FILE *service_lib_list = fopen (argv[2], "w");
++ if (! service_lib_list)
++ {
++ fprintf (stderr,
++ "gen-fixed-nsswitch: couldn't open output file %s: %s\n",
++ argv[2], strerror (errno));
++ exit (1);
++ }
++ generate_service_lib_list (service_lib_list, service_table);
++ fclose (service_lib_list);
++
++ return 0;
++}
+Index: git/nss/getent.c
+===================================================================
+--- git.orig/nss/getent.c 2014-08-29 20:00:52.976070587 -0700
++++ git/nss/getent.c 2014-08-29 20:01:15.216070587 -0700
+@@ -39,6 +39,7 @@
+ #include <netinet/ether.h>
+ #include <netinet/in.h>
+ #include <sys/socket.h>
++#include <gnu/option-groups.h>
+
+ /* Get libc version number. */
+ #include <version.h>
+@@ -91,6 +92,7 @@
+ fprintf (stream, gettext ("Written by %s.\n"), "Thorsten Kukuk");
+ }
+
++#if __OPTION_EGLIBC_DB_ALIASES
+ /* This is for aliases */
+ static void
+ print_aliases (struct aliasent *alias)
+@@ -135,7 +137,9 @@
+
+ return result;
+ }
++#endif /* __OPTION_EGLIBC_DB_ALIASES */
+
++#if __OPTION_EGLIBC_INET
+ /* This is for ethers */
+ static int
+ ethers_keys (int number, char *key[])
+@@ -179,6 +183,7 @@
+
+ return result;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* This is for group */
+ static void
+@@ -301,6 +306,7 @@
+ return result;
+ }
+
++#if __OPTION_EGLIBC_INET
+ /* This is for hosts */
+ static void
+ print_hosts (struct hostent *host)
+@@ -598,6 +604,7 @@
+
+ return result;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* Now is all for passwd */
+ static void
+@@ -650,6 +657,7 @@
+ return result;
+ }
+
++#if __OPTION_EGLIBC_INET
+ /* This is for protocols */
+ static void
+ print_protocols (struct protoent *proto)
+@@ -805,6 +813,7 @@
+
+ return result;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* This is for shadow */
+ static void
+@@ -871,21 +880,34 @@
+ } databases[] =
+ {
+ #define D(name) { #name, name ## _keys },
+-D(ahosts)
+-D(ahostsv4)
+-D(ahostsv6)
+-D(aliases)
+-D(ethers)
++
++#if __OPTION_EGLIBC_INET
++#define DN(name) D(name)
++#else
++#define DN(name)
++#endif
++
++#if __OPTION_EGLIBC_DB_ALIASES
++#define DA(name) D(name)
++#else
++#define DA(name)
++#endif
++
++DN(ahosts)
++DN(ahostsv4)
++DN(ahostsv6)
++DA(aliases)
++DN(ethers)
+ D(group)
+ D(gshadow)
+-D(hosts)
++DN(hosts)
+ D(initgroups)
+-D(netgroup)
+-D(networks)
++DN(netgroup)
++DN(networks)
+ D(passwd)
+-D(protocols)
+-D(rpc)
+-D(services)
++DN(protocols)
++DN(rpc)
++DN(services)
+ D(shadow)
+ #undef D
+ { NULL, NULL }
+Index: git/nss/getnssent_r.c
+===================================================================
+--- git.orig/nss/getnssent_r.c 2014-08-29 20:00:52.976070587 -0700
++++ git/nss/getnssent_r.c 2014-08-29 20:01:15.220070587 -0700
+@@ -16,6 +16,7 @@
+ <http://www.gnu.org/licenses/>. */
+
+ #include <errno.h>
++#include <gnu/option-groups.h>
+ #include <netdb.h>
+ #include "nsswitch.h"
+
+@@ -59,11 +60,13 @@
+ } fct;
+ int no_more;
+
++#if __OPTION_EGLIBC_INET
+ if (res && __res_maybe_init (&_res, 0) == -1)
+ {
+ __set_h_errno (NETDB_INTERNAL);
+ return;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* Cycle through the services and run their `setXXent' functions until
+ we find an available service. */
+@@ -101,11 +104,13 @@
+ } fct;
+ int no_more;
+
++#if __OPTION_EGLIBC_INET
+ if (res && __res_maybe_init (&_res, 0) == -1)
+ {
+ __set_h_errno (NETDB_INTERNAL);
+ return;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* Cycle through all the services and run their endXXent functions. */
+ no_more = setup (func_name, lookup_fct, &fct.ptr, nip, startp, 1);
+@@ -141,12 +146,14 @@
+ int no_more;
+ enum nss_status status;
+
++#if __OPTION_EGLIBC_INET
+ if (res && __res_maybe_init (&_res, 0) == -1)
+ {
+ *h_errnop = NETDB_INTERNAL;
+ *result = NULL;
+ return errno;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+
+ /* Initialize status to return if no more functions are found. */
+ status = NSS_STATUS_NOTFOUND;
+@@ -161,7 +168,7 @@
+ int is_last_nip = *nip == *last_nip;
+
+ status = DL_CALL_FCT (fct.f,
+- (resbuf, buffer, buflen, &errno, &h_errno));
++ (resbuf, buffer, buflen, &errno, h_errnop));
+
+ /* The status is NSS_STATUS_TRYAGAIN and errno is ERANGE the
+ provided buffer is too small. In this case we should give
+Index: git/nss/Makefile
+===================================================================
+--- git.orig/nss/Makefile 2014-08-29 20:00:52.972070587 -0700
++++ git/nss/Makefile 2014-08-29 20:01:15.220070587 -0700
+@@ -18,29 +18,36 @@
+ #
+ # Makefile for name service switch.
+ #
++include ../option-groups.mak
++
+ subdir := nss
+
+ include ../Makeconfig
+
+ headers := nss.h
+
+-# This is the trivial part which goes into libc itself.
+-routines = nsswitch getnssent getnssent_r digits_dots \
+- $(addsuffix -lookup,$(databases))
+-
+ # These are the databases that go through nss dispatch.
+ # Caution: if you add a database here, you must add its real name
+ # in databases.def, too.
+-databases = proto service hosts network grp pwd rpc ethers \
+- spwd netgrp key alias sgrp
++databases-y = grp pwd spwd sgrp
++databases-$(OPTION_EGLIBC_INET) \
++ += proto service hosts network rpc ethers \
++ netgrp key
++databases-$(OPTION_EGLIBC_DB_ALIASES) += alias
++
++# This is the trivial part which goes into libc itself.
++routines-y += nsswitch getnssent getnssent_r \
++ $(addsuffix -lookup,$(databases-y))
++routines-$(OPTION_EGLIBC_INET) += digits_dots
+
+ others := getent makedb
+ install-bin := getent makedb
+ makedb-modules = xmalloc hash-string
+ extra-objs += $(makedb-modules:=.o)
+
+-tests = test-netdb tst-nss-test1 test-digits-dots
+-xtests = bug-erange
++tests = tst-nss-test1
++tests-$(OPTION_EGLIBC_INET) += test-netdb test-digits-dots
++xtests-$(OPTION_EGLIBC_INET) += bug-erange
+
+ # Specify rules for the nss_* modules. We have some services.
+ services := files db
+@@ -55,7 +62,7 @@
+ vpath %.c $(subdir-dirs) ../locale/programs ../intl
+
+
+-libnss_files-routines := $(addprefix files-,$(databases)) \
++libnss_files-routines := $(addprefix files-,$(databases-y)) \
+ files-initgroups files-have_o_cloexec files-init
+
+ libnss_db-dbs := $(addprefix db-,\
+@@ -78,6 +85,45 @@
+ tests += $(tests-static)
+ endif
+
++ifneq ($(OPTION_EGLIBC_NSSWITCH),y)
++
++ifndef OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG
++$(error OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG variable left unset)
++endif
++
++ifndef OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS
++$(error OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS variable left unset)
++endif
++
++ifeq (,$(wildcard $(OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG)))
++$(warning OPTION_EGLIBC_NSSWITCH is disabled, but fixed config file)
++$(error does not exist: $(OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG))
++endif
++
++ifeq (,$(wildcard $(OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS)))
++$(warning OPTION_EGLIBC_NSSWITCH is disabled, but fixed functions file)
++$(error does not exist: $(OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS))
++endif
++
++before-compile := $(objpfx)fixed-nsswitch.h
++generated := fixed-nsswitch.h
++$(objpfx)fixed-nsswitch.h $(objfpx)fixed-nsswitch-libs: \
++ $(objpfx)gen-fixed-nsswitch \
++ $(OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG)
++ $< $(objpfx)fixed-nsswitch.h \
++ $(objpfx)fixed-nsswitch-libs \
++ $(OPTION_EGLIBC_NSSWITCH_FIXED_CONFIG)
++
++$(objpfx)gen-fixed-nsswitch: gen-fixed-nsswitch.c \
++ $(common-objpfx)option-groups.config \
++ $(OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS)
++ $(native-compile)
++gen-fixed-nsswitch-CFLAGS = \
++ -g3 -O -Wall \
++ -I $(objpfx) \
++ -DFIXED_FUNCTIONS='"$(OPTION_EGLIBC_NSSWITCH_FIXED_FUNCTIONS)"'
++endif
++
+ include ../Rules
+
+ ifeq (yes,$(have-selinux))
+Index: git/nss/nsswitch.c
+===================================================================
+--- git.orig/nss/nsswitch.c 2014-08-29 20:00:53.004070587 -0700
++++ git/nss/nsswitch.c 2014-08-29 20:01:15.220070587 -0700
+@@ -26,6 +26,7 @@
+ #include <stdio_ext.h>
+ #include <stdlib.h>
+ #include <string.h>
++#include <gnu/option-groups.h>
+
+ #include <aliases.h>
+ #include <grp.h>
+@@ -41,6 +42,15 @@
+ #include "../nscd/nscd_proto.h"
+ #include <sysdep.h>
+
++/* When OPTION_EGLIBC_NSSWITCH is disabled, we use fixed tables of
++ databases and services, generated at library build time. Thus:
++ - We can't reconfigure individual databases, so we don't need a
++ name-to-database map.
++ - We never add databases or service libraries, or look up functions
++ at runtime, so there's no need for a lock to protect our tables.
++ See ../option-groups.def for the details. */
++#if __OPTION_EGLIBC_NSSWITCH
++
+ /* Prototypes for the local functions. */
+ static name_database *nss_parse_file (const char *fname) internal_function;
+ static name_database_entry *nss_getline (char *line) internal_function;
+@@ -79,6 +89,9 @@
+
+ __libc_lock_define_initialized (static, lock)
+
++#define lock_nsswitch __libc_lock_lock (lock)
++#define unlock_nsswitch __libc_lock_unlock (lock)
++
+ #if !defined DO_STATIC_NSS || defined SHARED
+ /* String with revision number of the shared object files. */
+ static const char *const __nss_shlib_revision = LIBNSS_FILES_SO + 15;
+@@ -93,6 +106,20 @@
+ __libc_freeres. */
+ static name_database_entry *defconfig_entries;
+
++#else /* __OPTION_EGLIBC_NSSWITCH */
++
++/* Bring in the statically initialized service table we generated at
++ build time. */
++#include "fixed-nsswitch.h"
++
++const static name_database *service_table = &fixed_name_database;
++
++/* Nothing ever changes, so there's no need to lock anything. */
++#define lock_nsswitch (0)
++#define unlock_nsswitch (0)
++
++#endif /* __OPTION_EGLIBC_NSSWITCH */
++
+
+ #ifdef USE_NSCD
+ /* Nonzero if this is the nscd process. */
+@@ -109,20 +136,22 @@
+ const char *defconfig, service_user **ni)
+ {
+ /* Prevent multiple threads to change the service table. */
+- __libc_lock_lock (lock);
++ lock_nsswitch;
+
+ /* Reconsider database variable in case some other thread called
+ `__nss_configure_lookup' while we waited for the lock. */
+ if (*ni != NULL)
+ {
+- __libc_lock_unlock (lock);
++ unlock_nsswitch;
+ return 0;
+ }
+
++#if __OPTION_EGLIBC_NSSWITCH
+ /* Are we initialized yet? */
+ if (service_table == NULL)
+ /* Read config file. */
+ service_table = nss_parse_file (_PATH_NSSWITCH_CONF);
++#endif
+
+ /* Test whether configuration data is available. */
+ if (service_table != NULL)
+@@ -144,6 +173,7 @@
+ *ni = entry->service;
+ }
+
++#if __OPTION_EGLIBC_NSSWITCH
+ /* No configuration data is available, either because nsswitch.conf
+ doesn't exist or because it doesn't have a line for this database.
+
+@@ -166,13 +196,23 @@
+ {
+ entry->next = defconfig_entries;
+ entry->service = *ni;
+- entry->name[0] = '\0';
++ entry->name = "";
+ defconfig_entries = entry;
+ }
+ }
+ }
++#else
++ /* Without the dynamic behavior, we can't process defconfig. The
++ databases the user specified at library build time are all you
++ get. */
++ if (*ni == NULL)
++ {
++ unlock_nsswitch;
++ return -1;
++ }
++#endif
+
+- __libc_lock_unlock (lock);
++ unlock_nsswitch;
+
+ return *ni != NULL ? 0 : -1;
+ }
+@@ -252,6 +292,7 @@
+ libc_hidden_def (__nss_next2)
+
+
++#if __OPTION_EGLIBC_NSSWITCH
+ int
+ attribute_compat_text_section
+ __nss_next (service_user **ni, const char *fct_name, void **fctp, int status,
+@@ -300,13 +341,13 @@
+ }
+
+ /* Prevent multiple threads to change the service table. */
+- __libc_lock_lock (lock);
++ lock_nsswitch;
+
+ /* Install new rules. */
+ *databases[cnt].dbp = new_db;
+ __nss_database_custom[cnt] = true;
+
+- __libc_lock_unlock (lock);
++ unlock_nsswitch;
+
+ return 0;
+ }
+@@ -402,7 +443,7 @@
+ void **found, *result;
+
+ /* We now modify global data. Protect it. */
+- __libc_lock_lock (lock);
++ lock_nsswitch;
+
+ /* Search the tree of functions previously requested. Data in the
+ tree are `known_function' structures, whose first member is a
+@@ -413,7 +454,7 @@
+ enough to a pointer to our structure to use as a lookup key that
+ will be passed to `known_compare' (above). */
+
+- found = __tsearch (&fct_name, &ni->known, &known_compare);
++ found = __tsearch (&fct_name, &ni->known.tree, &known_compare);
+ if (found == NULL)
+ /* This means out-of-memory. */
+ result = NULL;
+@@ -440,7 +481,7 @@
+ #endif
+ /* Oops. We can't instantiate this node properly.
+ Remove it from the tree. */
+- __tdelete (&fct_name, &ni->known, &known_compare);
++ __tdelete (&fct_name, &ni->known.tree, &known_compare);
+ free (known);
+ result = NULL;
+ }
+@@ -520,13 +561,43 @@
+ }
+
+ /* Remove the lock. */
+- __libc_lock_unlock (lock);
++ unlock_nsswitch;
+
+ return result;
+ }
+ libc_hidden_def (__nss_lookup_function)
+
+
++#else /* below if ! __OPTION_EGLIBC_NSSWITCH */
++
++
++int
++__nss_configure_lookup (const char *dbname, const char *service_line)
++{
++ /* We can't dynamically configure lookup without
++ OPTION_EGLIBC_NSSWITCH. */
++ __set_errno (EINVAL);
++ return -1;
++}
++
++
++void *
++__nss_lookup_function (service_user *ni, const char *fct_name)
++{
++ int i;
++ const known_function **known = ni->known.array;
++
++ for (i = 0; known[i]; i++)
++ if (strcmp (fct_name, known[i]->fct_name) == 0)
++ return known[i]->fct_ptr;
++
++ return NULL;
++}
++libc_hidden_def (__nss_lookup_function)
++#endif
++
++
++#if __OPTION_EGLIBC_NSSWITCH
+ static name_database *
+ internal_function
+ nss_parse_file (const char *fname)
+@@ -632,8 +703,10 @@
+ + (line - name + 1));
+ if (new_service == NULL)
+ return result;
++ new_service->name = (char *) (new_service + 1);
+
+- *((char *) __mempcpy (new_service->name, name, line - name)) = '\0';
++ *((char *) __mempcpy ((char *) new_service->name, name, line - name))
++ = '\0';
+
+ /* Set default actions. */
+ new_service->actions[2 + NSS_STATUS_TRYAGAIN] = NSS_ACTION_CONTINUE;
+@@ -642,7 +715,7 @@
+ new_service->actions[2 + NSS_STATUS_SUCCESS] = NSS_ACTION_RETURN;
+ new_service->actions[2 + NSS_STATUS_RETURN] = NSS_ACTION_RETURN;
+ new_service->library = NULL;
+- new_service->known = NULL;
++ new_service->known.tree = NULL;
+ new_service->next = NULL;
+
+ while (isspace (line[0]))
+@@ -778,9 +851,10 @@
+ result = (name_database_entry *) malloc (sizeof (name_database_entry) + len);
+ if (result == NULL)
+ return NULL;
++ result->name = (char *) (result + 1);
+
+ /* Save the database name. */
+- memcpy (result->name, name, len);
++ memcpy ((char *) result->name, name, len);
+
+ /* Parse the list of services. */
+ result->service = nss_parse_service_list (line);
+@@ -816,6 +890,7 @@
+ return *currentp;
+ }
+ #endif
++#endif /* __OPTION_EGLIBC_NSSWITCH */
+
+
+ #if defined SHARED && defined USE_NSCD
+@@ -834,6 +909,7 @@
+ }
+
+
++#if __OPTION_EGLIBC_INET
+ /* Called by nscd and nscd alone. */
+ void
+ __nss_disable_nscd (void (*cb) (size_t, struct traced_file *))
+@@ -857,8 +933,10 @@
+ __nss_not_use_nscd_services = -1;
+ __nss_not_use_nscd_netgroup = -1;
+ }
++#endif /* __OPTION_EGLIBC_INET */
+ #endif
+
++#if __OPTION_EGLIBC_NSSWITCH
+ static void
+ free_database_entries (name_database_entry *entry)
+ {
+@@ -871,8 +949,8 @@
+ {
+ service_user *olds = service;
+
+- if (service->known != NULL)
+- __tdestroy (service->known, free);
++ if (service->known.tree != NULL)
++ __tdestroy (service->known.tree, free);
+
+ service = service->next;
+ free (olds);
+@@ -926,3 +1004,4 @@
+
+ free (top);
+ }
++#endif /* __OPTION_EGLIBC_NSSWITCH */
+Index: git/nss/nsswitch.h
+===================================================================
+--- git.orig/nss/nsswitch.h 2014-08-29 20:00:53.012070587 -0700
++++ git/nss/nsswitch.h 2014-08-29 20:01:15.220070587 -0700
+@@ -65,10 +65,20 @@
+ lookup_actions actions[5];
+ /* Link to the underlying library object. */
+ service_library *library;
+- /* Collection of known functions. */
+- void *known;
++ /* Collection of known functions.
++
++ With OPTION_EGLIBC_NSSWITCH enabled, this is the root of a
++ 'tsearch'-style tree.
++
++ With OPTION_EGLIBC_NSSWITCH disabled, this is an array of
++ pointers to known_function structures, NULL-terminated. */
++ union
++ {
++ void *tree;
++ const known_function **array;
++ } known;
+ /* Name of the service (`files', `dns', `nis', ...). */
+- char name[0];
++ const char *name;
+ } service_user;
+
+ /* To access the action based on the status value use this macro. */
+@@ -82,7 +92,7 @@
+ /* List of service to be used. */
+ service_user *service;
+ /* Name of the database. */
+- char name[0];
++ const char *name;
+ } name_database_entry;
+
+
+Index: git/posix/bug-regex1.c
+===================================================================
+--- git.orig/posix/bug-regex1.c 2014-08-29 20:00:53.184070587 -0700
++++ git/posix/bug-regex1.c 2014-08-29 20:01:15.220070587 -0700
+@@ -4,6 +4,7 @@
+ #include <string.h>
+ #include <regex.h>
+ #include <wchar.h>
++#include <gnu/option-groups.h>
+
+ int
+ main (void)
+@@ -17,7 +18,9 @@
+ memset (&regex, '\0', sizeof (regex));
+
+ setlocale (LC_ALL, "de_DE.ISO-8859-1");
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ fwide (stdout, -1);
++#endif
+
+ re_set_syntax (RE_SYNTAX_POSIX_EGREP | RE_DEBUG);
+
+Index: git/posix/bug-regex6.c
+===================================================================
+--- git.orig/posix/bug-regex6.c 2014-08-29 20:00:53.204070587 -0700
++++ git/posix/bug-regex6.c 2014-08-29 20:01:15.220070587 -0700
+@@ -22,6 +22,7 @@
+ #include <string.h>
+ #include <sys/types.h>
+ #include <regex.h>
++#include <gnu/option-groups.h>
+
+
+ int
+@@ -30,7 +31,12 @@
+ regex_t re;
+ regmatch_t mat[10];
+ int i, j, ret = 0;
+- const char *locales[] = { "C", "de_DE.UTF-8" };
++ const char *locales[] = {
++ "C",
++#if __OPTION_EGLIBC_LOCALE_CODE
++ "de_DE.UTF-8"
++#endif
++ };
+ const char *string = "http://www.regex.com/pattern/matching.html#intro";
+ regmatch_t expect[10] = {
+ { 0, 48 }, { 0, 5 }, { 0, 4 }, { 5, 20 }, { 7, 20 }, { 20, 42 },
+Index: git/posix/fnmatch.c
+===================================================================
+--- git.orig/posix/fnmatch.c 2014-08-29 20:00:53.208070587 -0700
++++ git/posix/fnmatch.c 2014-08-29 20:01:15.220070587 -0700
+@@ -30,6 +30,10 @@
+ #include <ctype.h>
+ #include <string.h>
+
++#if defined _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ #if defined STDC_HEADERS || defined _LIBC
+ # include <stdlib.h>
+ #endif
+@@ -131,7 +135,7 @@
+ # define ISWCTYPE(WC, WT) iswctype (WC, WT)
+ # endif
+
+-# if (HAVE_MBSTATE_T && HAVE_MBSRTOWCS) || _LIBC
++# if (HAVE_MBSTATE_T && HAVE_MBSRTOWCS) || (_LIBC && __OPTION_EGLIBC_LOCALE_CODE)
+ /* In this case we are implementing the multibyte character handling. */
+ # define HANDLE_MULTIBYTE 1
+ # endif
+Index: git/posix/fnmatch_loop.c
+===================================================================
+--- git.orig/posix/fnmatch_loop.c 2014-08-29 20:00:53.220070587 -0700
++++ git/posix/fnmatch_loop.c 2014-08-29 20:01:15.220070587 -0700
+@@ -15,6 +15,8 @@
+ License along with the GNU C Library; if not, see
+ <http://www.gnu.org/licenses/>. */
+
++#include <gnu/option-groups.h>
++
+ #include <stdint.h>
+
+ struct STRUCT
+@@ -54,10 +56,15 @@
+ const char *collseq = (const char *)
+ _NL_CURRENT(LC_COLLATE, _NL_COLLATE_COLLSEQWC);
+ # else
++# if __OPTION_EGLIBC_LOCALE_CODE
+ const UCHAR *collseq = (const UCHAR *)
+ _NL_CURRENT(LC_COLLATE, _NL_COLLATE_COLLSEQMB);
+-# endif
+-#endif
++# define COLLSEQ_BYTE_LOOKUP(ix) (collseq[(ix)])
++# else
++# define COLLSEQ_BYTE_LOOKUP(ix) (ix)
++# endif /* __OPTION_EGLIBC_LOCALE_CODE */
++# endif /* WIDE_CHAR_VERSION */
++#endif /* _LIBC */
+
+ while ((c = *p++) != L('\0'))
+ {
+@@ -277,7 +284,7 @@
+ /* Leave room for the null. */
+ CHAR str[CHAR_CLASS_MAX_LENGTH + 1];
+ size_t c1 = 0;
+-#if defined _LIBC || (defined HAVE_WCTYPE_H && defined HAVE_WCHAR_H)
++#if defined _LIBC ? __OPTION_POSIX_C_LANG_WIDE_CHAR : (defined HAVE_WCTYPE_H && defined HAVE_WCHAR_H)
+ wctype_t wt;
+ #endif
+ const CHAR *startp = p;
+@@ -307,7 +314,7 @@
+ }
+ str[c1] = L('\0');
+
+-#if defined _LIBC || (defined HAVE_WCTYPE_H && defined HAVE_WCHAR_H)
++#if defined _LIBC ? __OPTION_POSIX_C_LANG_WIDE_CHAR : (defined HAVE_WCTYPE_H && defined HAVE_WCHAR_H)
+ wt = IS_CHAR_CLASS (str);
+ if (wt == 0)
+ /* Invalid character class name. */
+@@ -681,8 +688,10 @@
+ else
+ lcollseq = __collseq_table_lookup (collseq, cold);
+ # else
+- fcollseq = collseq[fn];
+- lcollseq = is_seqval ? cold : collseq[(UCHAR) cold];
++ fcollseq = COLLSEQ_BYTE_LOOKUP (fn);
++ lcollseq = (is_seqval
++ ? cold
++ : COLLSEQ_BYTE_LOOKUP ((UCHAR) cold));
+ # endif
+
+ is_seqval = 0;
+@@ -858,7 +867,7 @@
+ goto matched;
+ }
+ # else
+- hcollseq = collseq[cend];
++ hcollseq = COLLSEQ_BYTE_LOOKUP (cend);
+ # endif
+ }
+
+Index: git/posix/glob.c
+===================================================================
+--- git.orig/posix/glob.c 2014-08-29 20:00:53.232070587 -0700
++++ git/posix/glob.c 2014-08-29 20:01:15.220070587 -0700
+@@ -25,6 +25,9 @@
+ #include <sys/types.h>
+ #include <sys/stat.h>
+ #include <stddef.h>
++#ifdef _LIBC
++# include <gnu/option-groups.h>
++#endif
+
+ /* Outcomment the following line for production quality code. */
+ /* #define NDEBUG 1 */
+@@ -607,6 +610,7 @@
+ if (home_dir == NULL || home_dir[0] == '\0')
+ home_dir = "c:/users/default"; /* poor default */
+ # else
++# if ! _LIBC || __OPTION_EGLIBC_GETLOGIN
+ if (home_dir == NULL || home_dir[0] == '\0')
+ {
+ int success;
+@@ -623,19 +627,19 @@
+ if (success)
+ {
+ struct passwd *p;
+-# if defined HAVE_GETPWNAM_R || defined _LIBC
++# if defined HAVE_GETPWNAM_R || defined _LIBC
+ long int pwbuflen = GETPW_R_SIZE_MAX ();
+ char *pwtmpbuf;
+ struct passwd pwbuf;
+ int malloc_pwtmpbuf = 0;
+ int save = errno;
+
+-# ifndef _LIBC
++# ifndef _LIBC
+ if (pwbuflen == -1)
+ /* `sysconf' does not support _SC_GETPW_R_SIZE_MAX.
+ Try a moderate value. */
+ pwbuflen = 1024;
+-# endif
++# endif
+ if (__libc_use_alloca (alloca_used + pwbuflen))
+ pwtmpbuf = alloca_account (pwbuflen, alloca_used);
+ else
+@@ -682,9 +686,9 @@
+ }
+ __set_errno (save);
+ }
+-# else
++# else
+ p = getpwnam (name);
+-# endif
++# endif
+ if (p != NULL)
+ {
+ if (!malloc_pwtmpbuf)
+@@ -713,6 +717,7 @@
+ }
+ }
+ }
++# endif /* ! _LIBC || __OPTION_EGLIBC_GETLOGIN */
+ if (home_dir == NULL || home_dir[0] == '\0')
+ {
+ if (flags & GLOB_TILDE_CHECK)
+Index: git/posix/Makefile
+===================================================================
+--- git.orig/posix/Makefile 2014-08-29 20:00:53.160070587 -0700
++++ git/posix/Makefile 2014-08-29 20:01:15.220070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for POSIX portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := posix
+
+ include ../Makeconfig
+@@ -43,13 +45,24 @@
+ getpgid setpgid getpgrp bsd-getpgrp setpgrp getsid setsid \
+ getresuid getresgid setresuid setresgid \
+ pathconf sysconf fpathconf \
+- glob glob64 fnmatch regex \
++ glob glob64 fnmatch \
+ confstr \
+ getopt getopt1 getopt_init \
+ sched_setp sched_getp sched_sets sched_gets sched_yield sched_primax \
+ sched_primin sched_rr_gi sched_getaffinity sched_setaffinity \
+- getaddrinfo gai_strerror wordexp \
+ pread pwrite pread64 pwrite64 \
++ posix_madvise \
++ get_child_max sched_cpucount sched_cpualloc sched_cpufree
++
++routines-$(OPTION_EGLIBC_INET) += getaddrinfo gai_strerror
++
++ifeq (y,$(OPTION_POSIX_REGEXP_GLIBC))
++routines-$(OPTION_POSIX_REGEXP) += regex
++else
++routines-$(OPTION_POSIX_REGEXP) += xregex
++endif
++
++routines-$(OPTION_EGLIBC_SPAWN) += \
+ spawn_faction_init spawn_faction_destroy spawn_faction_addclose \
+ spawn_faction_addopen spawn_faction_adddup2 \
+ spawnattr_init spawnattr_destroy \
+@@ -57,41 +70,53 @@
+ spawnattr_getflags spawnattr_setflags \
+ spawnattr_getpgroup spawnattr_setpgroup spawn spawnp spawni \
+ spawnattr_getsigmask spawnattr_getschedpolicy spawnattr_getschedparam \
+- spawnattr_setsigmask spawnattr_setschedpolicy spawnattr_setschedparam \
+- posix_madvise \
+- get_child_max sched_cpucount sched_cpualloc sched_cpufree
++ spawnattr_setsigmask spawnattr_setschedpolicy spawnattr_setschedparam
++routines-$(OPTION_EGLIBC_WORDEXP) += wordexp
+
+ aux := init-posix environ
+-tests := tstgetopt testfnm runtests runptests \
++tests := tstgetopt testfnm runtests \
+ tst-preadwrite tst-preadwrite64 test-vfork regexbug1 \
+- tst-getlogin tst-mmap tst-getaddrinfo tst-truncate \
+- tst-truncate64 tst-fork tst-fnmatch tst-regexloc tst-dir \
+- tst-chmod bug-regex1 bug-regex2 bug-regex3 bug-regex4 \
+- tst-gnuglob tst-regex bug-regex5 bug-regex6 bug-regex7 \
+- bug-regex8 bug-regex9 bug-regex10 bug-regex11 bug-regex12 \
+- bug-regex13 bug-regex14 bug-regex15 bug-regex16 \
+- bug-regex17 bug-regex18 bug-regex19 bug-regex20 \
+- bug-regex21 bug-regex22 bug-regex23 bug-regex24 \
+- bug-regex25 bug-regex26 bug-regex27 bug-regex28 \
+- bug-regex29 bug-regex30 bug-regex31 bug-regex32 \
+- bug-regex33 tst-nice tst-nanosleep tst-regex2 \
+- transbug tst-rxspencer tst-pcre tst-boost \
+- bug-ga1 tst-vfork1 tst-vfork2 tst-vfork3 tst-waitid \
+- tst-getaddrinfo2 bug-glob1 bug-glob2 bug-glob3 tst-sysconf \
++ tst-getlogin tst-mmap tst-truncate \
++ tst-truncate64 tst-fork tst-dir \
++ tst-chmod bug-regex2 bug-regex3 bug-regex4 \
++ tst-gnuglob bug-regex6 bug-regex7 \
++ bug-regex8 bug-regex9 bug-regex10 bug-regex12 \
++ bug-regex14 bug-regex15 \
++ bug-regex21 bug-regex24 \
++ bug-regex27 bug-regex28 bug-regex29 bug-regex30 \
++ bug-regex31 \
++ tst-nice tst-nanosleep \
++ transbug \
++ tst-vfork1 tst-vfork2 tst-vfork3 tst-waitid \
++ bug-glob1 bug-glob2 bug-glob3 tst-sysconf \
+ tst-execvp1 tst-execvp2 tst-execlp1 tst-execlp2 \
+ tst-execv1 tst-execv2 tst-execl1 tst-execl2 \
+ tst-execve1 tst-execve2 tst-execle1 tst-execle2 \
+- tst-execvp3 tst-execvp4 tst-rfc3484 tst-rfc3484-2 \
+- tst-rfc3484-3 \
+- tst-getaddrinfo3 tst-fnmatch2 tst-cpucount tst-cpuset \
++ tst-execvp3 tst-execvp4 \
++ tst-fnmatch2 tst-cpucount tst-cpuset \
+ bug-getopt1 bug-getopt2 bug-getopt3 bug-getopt4 \
+ bug-getopt5 tst-getopt_long1 bug-regex34 bug-regex35 \
+ tst-pathconf tst-getaddrinfo4 tst-rxspencer-no-utf8 \
+ tst-fnmatch3 bug-regex36
+-xtests := bug-ga2
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-fnmatch tst-regexloc bug-regex1 bug-regex5 \
++ bug-regex23 bug-regex25 bug-regex32 bug-regex33
++tests-$(OPTION_EGLIBC_INET) \
++ += tst-getaddrinfo bug-ga1 tst-getaddrinfo2 \
++ tst-rfc3484 tst-rfc3484-2 tst-rfc3484-3 tst-getaddrinfo3
++tests-$(OPTION_POSIX_REGEXP_GLIBC) \
++ += runptests bug-regex11 bug-regex13 bug-regex16 \
++ tst-regex2 tst-rxspencer tst-pcre tst-boost
++ifeq (yy,$(OPTION_EGLIBC_LOCALE_CODE)$(OPTION_POSIX_REGEXP_GLIBC))
++tests += tst-regex bug-regex17 bug-regex18 bug-regex19 bug-regex20 \
++ bug-regex22 bug-regex26
++endif
++xtests-$(OPTION_EGLIBC_INET) += bug-ga2
+ ifeq (yes,$(build-shared))
+ test-srcs := globtest
+-tests += wordexp-test tst-exec tst-spawn
++tests += tst-exec
++tests-$(OPTION_EGLIBC_SPAWN) += tst-spawn
++tests-$(OPTION_EGLIBC_WORDEXP) += wordexp-test
+ endif
+ tests-static = tst-exec-static tst-spawn-static
+ tests += $(tests-static)
+@@ -117,7 +142,10 @@
+
+ ifeq ($(run-built-tests),yes)
+ ifeq (yes,$(build-shared))
+-tests-special += $(objpfx)globtest.out $(objpfx)wordexp-tst.out
++tests-special += $(objpfx)globtest.out
++ifeq (y,$(OPTION_EGLIBC_WORDEXP))
++tests-special += $(objpfx)wordexp-tst.out
++endif
+ endif
+ endif
+
+@@ -125,12 +153,16 @@
+ # XXX Please note that for now we ignore the result of this test.
+ tests-special += $(objpfx)annexc.out
+ ifeq ($(run-built-tests),yes)
+-tests-special += $(objpfx)bug-regex2-mem.out $(objpfx)bug-regex14-mem.out \
++tests-special += $(objpfx)bug-regex2-mem.out \
+ $(objpfx)bug-regex21-mem.out $(objpfx)bug-regex31-mem.out \
+- $(objpfx)tst-rxspencer-no-utf8-mem.out $(objpfx)tst-pcre-mem.out \
+- $(objpfx)tst-boost-mem.out $(objpfx)tst-getconf.out \
++ $(objpfx)tst-getconf.out \
+ $(objpfx)bug-glob2-mem.out $(objpfx)tst-vfork3-mem.out \
+ $(objpfx)tst-fnmatch-mem.out $(objpfx)bug-regex36-mem.out
++ifeq (y,$(OPTION_POSIX_REGEXP_GLIBC))
++tests-special += $(objpfx)bug-regex14-mem $(objpfx)tst-rxspencer-no-utf8-mem \
++ $(objpfx)tst-pcre-mem $(objpfx)tst-boost-mem
++endif
++
+ xtests-special += $(objpfx)bug-ga2-mem.out
+ endif
+
+@@ -143,6 +175,8 @@
+ $(SHELL) $< $(common-objpfx) '$(test-via-rtld-prefix)' \
+ '$(test-program-prefix)' '$(test-wrapper-env)'; \
+ $(evaluate-test)
++LDLIBS-globtest += $(shell cat $(common-objpfx)nss/fixed-nsswitch-libs)
++
+ $(objpfx)wordexp-tst.out: wordexp-tst.sh $(objpfx)wordexp-test
+ $(SHELL) $< $(common-objpfx) '$(test-program-prefix-before-env)' \
+ '$(run-program-env)' '$(test-program-prefix-after-env)'; \
+@@ -205,7 +239,10 @@
+ tst-chmod-ARGS = $(objdir)
+ tst-vfork3-ARGS = --test-dir=$(objpfx)
+
+-tst-rxspencer-ARGS = --utf8 rxspencer/tests
++tst-rxspencer-ARGS = rxspencer/tests
++ifeq (y,$(OPTION_EGLIBC_LOCALE_CODE))
++tst-rxspencer-ARGS += --utf8
++endif
+ tst-rxspencer-no-utf8-ARGS = rxspencer/tests
+ tst-pcre-ARGS = PCRE.tests
+ tst-boost-ARGS = BOOST.tests
+Index: git/posix/regcomp.c
+===================================================================
+--- git.orig/posix/regcomp.c 2014-08-29 20:00:53.264070587 -0700
++++ git/posix/regcomp.c 2014-08-29 20:01:15.224070587 -0700
+@@ -18,6 +18,7 @@
+ <http://www.gnu.org/licenses/>. */
+
+ #include <stdint.h>
++#include <gnu/option-groups.h>
+
+ static reg_errcode_t re_compile_internal (regex_t *preg, const char * pattern,
+ size_t length, reg_syntax_t syntax);
+@@ -305,7 +306,7 @@
+ {
+ re_dfa_t *dfa = (re_dfa_t *) bufp->buffer;
+ int node_cnt;
+- int icase = (dfa->mb_cur_max == 1 && (bufp->syntax & RE_ICASE));
++ int icase = (dfa_mb_cur_max (dfa) == 1 && (bufp->syntax & RE_ICASE));
+ for (node_cnt = 0; node_cnt < init_state->nodes.nelem; ++node_cnt)
+ {
+ int node = init_state->nodes.elems[node_cnt];
+@@ -315,9 +316,9 @@
+ {
+ re_set_fastmap (fastmap, icase, dfa->nodes[node].opr.c);
+ #ifdef RE_ENABLE_I18N
+- if ((bufp->syntax & RE_ICASE) && dfa->mb_cur_max > 1)
++ if ((bufp->syntax & RE_ICASE) && dfa_mb_cur_max (dfa) > 1)
+ {
+- unsigned char *buf = alloca (dfa->mb_cur_max), *p;
++ unsigned char *buf = alloca (dfa_mb_cur_max (dfa)), *p;
+ wchar_t wc;
+ mbstate_t state;
+
+@@ -348,7 +349,11 @@
+ re_set_fastmap (fastmap, icase, ch);
+ }
+ }
+-#ifdef RE_ENABLE_I18N
++
++ /* When OPTION_EGLIBC_LOCALE_CODE is disabled, the current
++ locale is always C, which has no rules and no multi-byte
++ characters. */
++#if defined RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE
+ else if (type == COMPLEX_BRACKET)
+ {
+ re_charset_t *cset = dfa->nodes[node].opr.mbcset;
+@@ -376,7 +381,7 @@
+ i.e. where we would not find an invalid sequence. This only
+ applies to multibyte character sets; for single byte character
+ sets, the SIMPLE_BRACKET again suffices. */
+- if (dfa->mb_cur_max > 1
++ if (dfa_mb_cur_max (dfa) > 1
+ && (cset->nchar_classes || cset->non_match || cset->nranges
+ # ifdef _LIBC
+ || cset->nequiv_classes
+@@ -404,7 +409,7 @@
+ memset (&state, '\0', sizeof (state));
+ if (__wcrtomb (buf, cset->mbchars[i], &state) != (size_t) -1)
+ re_set_fastmap (fastmap, icase, *(unsigned char *) buf);
+- if ((bufp->syntax & RE_ICASE) && dfa->mb_cur_max > 1)
++ if ((bufp->syntax & RE_ICASE) && dfa_mb_cur_max (dfa) > 1)
+ {
+ if (__wcrtomb (buf, towlower (cset->mbchars[i]), &state)
+ != (size_t) -1)
+@@ -413,7 +418,7 @@
+ }
+ }
+ }
+-#endif /* RE_ENABLE_I18N */
++#endif /* RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE */
+ else if (type == OP_PERIOD
+ #ifdef RE_ENABLE_I18N
+ || type == OP_UTF8_PERIOD
+@@ -856,11 +861,15 @@
+
+ dfa->mb_cur_max = MB_CUR_MAX;
+ #ifdef _LIBC
+- if (dfa->mb_cur_max == 6
++ if (dfa_mb_cur_max (dfa) == 6
+ && strcmp (_NL_CURRENT (LC_CTYPE, _NL_CTYPE_CODESET_NAME), "UTF-8") == 0)
+ dfa->is_utf8 = 1;
++# if __OPTION_EGLIBC_LOCALE_CODE
+ dfa->map_notascii = (_NL_CURRENT_WORD (LC_CTYPE, _NL_CTYPE_MAP_TO_NONASCII)
+ != 0);
++# else
++ dfa->map_notascii = 0;
++# endif
+ #else
+ # ifdef HAVE_LANGINFO_CODESET
+ codeset_name = nl_langinfo (CODESET);
+@@ -886,7 +895,7 @@
+ #endif
+
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ {
+ if (dfa->is_utf8)
+ dfa->sb_char = (re_bitset_ptr_t) utf8_sb_map;
+@@ -1784,7 +1793,7 @@
+ token->word_char = 0;
+ #ifdef RE_ENABLE_I18N
+ token->mb_partial = 0;
+- if (input->mb_cur_max > 1 &&
++ if (string_mb_cur_max (input) > 1 &&
+ !re_string_first_byte (input, re_string_cur_idx (input)))
+ {
+ token->type = CHARACTER;
+@@ -1805,7 +1814,7 @@
+ token->opr.c = c2;
+ token->type = CHARACTER;
+ #ifdef RE_ENABLE_I18N
+- if (input->mb_cur_max > 1)
++ if (string_mb_cur_max (input) > 1)
+ {
+ wint_t wc = re_string_wchar_at (input,
+ re_string_cur_idx (input) + 1);
+@@ -1919,7 +1928,7 @@
+
+ token->type = CHARACTER;
+ #ifdef RE_ENABLE_I18N
+- if (input->mb_cur_max > 1)
++ if (string_mb_cur_max (input) > 1)
+ {
+ wint_t wc = re_string_wchar_at (input, re_string_cur_idx (input));
+ token->word_char = IS_WIDE_WORD_CHAR (wc) != 0;
+@@ -2019,7 +2028,7 @@
+ token->opr.c = c;
+
+ #ifdef RE_ENABLE_I18N
+- if (input->mb_cur_max > 1 &&
++ if (string_mb_cur_max (input) > 1 &&
+ !re_string_first_byte (input, re_string_cur_idx (input)))
+ {
+ token->type = CHARACTER;
+@@ -2242,7 +2251,7 @@
+ return NULL;
+ }
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ {
+ while (!re_string_eoi (regexp)
+ && !re_string_first_byte (regexp, re_string_cur_idx (regexp)))
+@@ -2380,7 +2389,7 @@
+ *err = REG_ESPACE;
+ return NULL;
+ }
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ dfa->has_mb_node = 1;
+ break;
+ case OP_WORD:
+@@ -2686,7 +2695,7 @@
+ However, for !_LIBC we have no collation elements: if the
+ character set is single byte, the single byte character set
+ that we build below suffices. parse_bracket_exp passes
+- no MBCSET if dfa->mb_cur_max == 1. */
++ no MBCSET if dfa_mb_cur_max (dfa) == 1. */
+ if (mbcset)
+ {
+ /* Check the space of the arrays. */
+@@ -2782,7 +2791,13 @@
+ reg_syntax_t syntax, reg_errcode_t *err)
+ {
+ #ifdef _LIBC
++#if __OPTION_EGLIBC_LOCALE_CODE
+ const unsigned char *collseqmb;
++# define COLLSEQMB_LOOKUP(ix) (collseqmb[(ix)])
++#else
++# define COLLSEQMB_LOOKUP(ix) (ix)
++#endif
++
+ const char *collseqwc;
+ uint32_t nrules;
+ int32_t table_size;
+@@ -2830,18 +2845,20 @@
+ if (MB_CUR_MAX == 1)
+ */
+ if (nrules == 0)
+- return collseqmb[br_elem->opr.ch];
++ return COLLSEQMB_LOOKUP (br_elem->opr.ch);
+ else
+ {
+ wint_t wc = __btowc (br_elem->opr.ch);
+ return __collseq_table_lookup (collseqwc, wc);
+ }
+ }
++#if __OPTION_EGLIBC_LOCALE_CODE
+ else if (br_elem->type == MB_CHAR)
+ {
+ if (nrules != 0)
+ return __collseq_table_lookup (collseqwc, br_elem->opr.wch);
+ }
++#endif
+ else if (br_elem->type == COLL_SYM)
+ {
+ size_t sym_name_len = strlen ((char *) br_elem->opr.name);
+@@ -2872,11 +2889,11 @@
+ {
+ /* No valid character. Match it as a single byte
+ character. */
+- return collseqmb[br_elem->opr.name[0]];
++ return COLLSEQMB_LOOKUP (br_elem->opr.name[0]);
+ }
+ }
+ else if (sym_name_len == 1)
+- return collseqmb[br_elem->opr.name[0]];
++ return COLLSEQMB_LOOKUP (br_elem->opr.name[0]);
+ }
+ return UINT_MAX;
+ }
+@@ -2916,7 +2933,7 @@
+ However, if we have no collation elements, and the character set
+ is single byte, the single byte character set that we
+ build below suffices. */
+- if (nrules > 0 || dfa->mb_cur_max > 1)
++ if (nrules > 0 || dfa_mb_cur_max (dfa) > 1)
+ {
+ /* Check the space of the arrays. */
+ if (BE (*range_alloc == mbcset->nranges, 0))
+@@ -2953,7 +2970,7 @@
+ if (MB_CUR_MAX == 1)
+ */
+ if (nrules == 0)
+- ch_collseq = collseqmb[ch];
++ ch_collseq = COLLSEQMB_LOOKUP (ch);
+ else
+ ch_collseq = __collseq_table_lookup (collseqwc, __btowc (ch));
+ if (start_collseq <= ch_collseq && ch_collseq <= end_collseq)
+@@ -3031,7 +3048,10 @@
+ re_bitset_ptr_t sbcset;
+ #ifdef RE_ENABLE_I18N
+ re_charset_t *mbcset;
+- int coll_sym_alloc = 0, range_alloc = 0, mbchar_alloc = 0;
++ int coll_sym_alloc = 0, range_alloc = 0;
++#if __OPTION_EGLIBC_LOCALE_CODE
++ int mbchar_alloc = 0;
++#endif
+ int equiv_class_alloc = 0, char_class_alloc = 0;
+ #endif /* not RE_ENABLE_I18N */
+ int non_match = 0;
+@@ -3039,9 +3059,15 @@
+ int token_len;
+ int first_round = 1;
+ #ifdef _LIBC
++#if __OPTION_EGLIBC_LOCALE_CODE
+ collseqmb = (const unsigned char *)
+ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_COLLSEQMB);
+ nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++#else
++ /* This is true when OPTION_EGLIBC_LOCALE_CODE is disabled, but the
++ compiler can't figure that out. */
++ nrules = 0;
++#endif
+ if (nrules)
+ {
+ /*
+@@ -3169,7 +3195,7 @@
+ #else
+ # ifdef RE_ENABLE_I18N
+ *err = build_range_exp (sbcset,
+- dfa->mb_cur_max > 1 ? mbcset : NULL,
++ dfa_mb_cur_max (dfa) > 1 ? mbcset : NULL,
+ &range_alloc, &start_elem, &end_elem);
+ # else
+ *err = build_range_exp (sbcset, &start_elem, &end_elem);
+@@ -3185,7 +3211,7 @@
+ case SB_CHAR:
+ bitset_set (sbcset, start_elem.opr.ch);
+ break;
+-#ifdef RE_ENABLE_I18N
++#if defined RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE
+ case MB_CHAR:
+ /* Check whether the array has enough space. */
+ if (BE (mbchar_alloc == mbcset->nmbchars, 0))
+@@ -3203,7 +3229,7 @@
+ }
+ mbcset->mbchars[mbcset->nmbchars++] = start_elem.opr.wch;
+ break;
+-#endif /* RE_ENABLE_I18N */
++#endif /* RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE */
+ case EQUIV_CLASS:
+ *err = build_equiv_class (sbcset,
+ #ifdef RE_ENABLE_I18N
+@@ -3253,11 +3279,11 @@
+
+ #ifdef RE_ENABLE_I18N
+ /* Ensure only single byte characters are set. */
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ bitset_mask (sbcset, dfa->sb_char);
+
+ if (mbcset->nmbchars || mbcset->ncoll_syms || mbcset->nequiv_classes
+- || mbcset->nranges || (dfa->mb_cur_max > 1 && (mbcset->nchar_classes
++ || mbcset->nranges || (dfa_mb_cur_max (dfa) > 1 && (mbcset->nchar_classes
+ || mbcset->non_match)))
+ {
+ bin_tree_t *mbc_tree;
+@@ -3326,7 +3352,7 @@
+ re_token_t *token, int token_len, re_dfa_t *dfa,
+ reg_syntax_t syntax, int accept_hyphen)
+ {
+-#ifdef RE_ENABLE_I18N
++#if defined RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE
+ int cur_char_size;
+ cur_char_size = re_string_char_size_at (regexp, re_string_cur_idx (regexp));
+ if (cur_char_size > 1)
+@@ -3336,7 +3362,7 @@
+ re_string_skip_bytes (regexp, cur_char_size);
+ return REG_NOERROR;
+ }
+-#endif /* RE_ENABLE_I18N */
++#endif /* RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE */
+ re_string_skip_bytes (regexp, token_len); /* Skip a token. */
+ if (token->type == OP_OPEN_COLL_ELEM || token->type == OP_OPEN_CHAR_CLASS
+ || token->type == OP_OPEN_EQUIV_CLASS)
+@@ -3416,7 +3442,9 @@
+ build_equiv_class (bitset_t sbcset, const unsigned char *name)
+ #endif /* not RE_ENABLE_I18N */
+ {
+-#ifdef _LIBC
++ /* When __OPTION_EGLIBC_LOCALE_CODE is disabled, only the C locale
++ is supported; it has no collation rules. */
++#if defined _LIBC && __OPTION_EGLIBC_LOCALE_CODE
+ uint32_t nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
+ if (nrules != 0)
+ {
+@@ -3488,7 +3516,7 @@
+ mbcset->equiv_classes[mbcset->nequiv_classes++] = idx1;
+ }
+ else
+-#endif /* _LIBC */
++#endif /* _LIBC && __OPTION_EGLIBC_LOCALE_CODE */
+ {
+ if (BE (strlen ((const char *) name) != 1, 0))
+ return REG_ECOLLATE;
+@@ -3522,7 +3550,7 @@
+ && (strcmp (name, "upper") == 0 || strcmp (name, "lower") == 0))
+ name = "alpha";
+
+-#ifdef RE_ENABLE_I18N
++#if defined RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE
+ /* Check the space of the arrays. */
+ if (BE (*char_class_alloc == mbcset->nchar_classes, 0))
+ {
+@@ -3538,7 +3566,7 @@
+ *char_class_alloc = new_char_class_alloc;
+ }
+ mbcset->char_classes[mbcset->nchar_classes++] = __wctype (name);
+-#endif /* RE_ENABLE_I18N */
++#endif /* RE_ENABLE_I18N && __OPTION_EGLIBC_LOCALE_CODE */
+
+ #define BUILD_CHARCLASS_LOOP(ctype_func) \
+ do { \
+@@ -3649,7 +3677,7 @@
+
+ #ifdef RE_ENABLE_I18N
+ /* Ensure only single byte characters are set. */
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ bitset_mask (sbcset, dfa->sb_char);
+ #endif
+
+@@ -3661,7 +3689,7 @@
+ goto build_word_op_espace;
+
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ {
+ bin_tree_t *mbc_tree;
+ /* Build a tree for complex bracket. */
+Index: git/posix/regexec.c
+===================================================================
+--- git.orig/posix/regexec.c 2014-08-29 20:00:53.268070587 -0700
++++ git/posix/regexec.c 2014-08-29 20:01:15.224070587 -0700
+@@ -18,6 +18,7 @@
+ <http://www.gnu.org/licenses/>. */
+
+ #include <stdint.h>
++#include <gnu/option-groups.h>
+
+ static reg_errcode_t match_ctx_init (re_match_context_t *cache, int eflags,
+ int n) internal_function;
+@@ -186,11 +187,11 @@
+ static int check_node_accept_bytes (const re_dfa_t *dfa, int node_idx,
+ const re_string_t *input, int idx)
+ internal_function;
+-# ifdef _LIBC
++# if defined _LIBC && __OPTION_EGLIBC_LOCALE_CODE
+ static unsigned int find_collation_sequence_value (const unsigned char *mbs,
+ size_t name_len)
+ internal_function;
+-# endif /* _LIBC */
++# endif /* _LIBC && __OPTION_EGLIBC_LOCALE_CODE */
+ #endif /* RE_ENABLE_I18N */
+ static int group_nodes_into_DFAstates (const re_dfa_t *dfa,
+ const re_dfastate_t *state,
+@@ -255,25 +256,9 @@
+ return err != REG_NOERROR;
+ }
+
+-#ifdef _LIBC
+-# include <shlib-compat.h>
+-versioned_symbol (libc, __regexec, regexec, GLIBC_2_3_4);
+-
+-# if SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)
+-__typeof__ (__regexec) __compat_regexec;
+-
+-int
+-attribute_compat_text_section
+-__compat_regexec (const regex_t *__restrict preg,
+- const char *__restrict string, size_t nmatch,
+- regmatch_t pmatch[], int eflags)
+-{
+- return regexec (preg, string, nmatch, pmatch,
+- eflags & (REG_NOTBOL | REG_NOTEOL));
+-}
+-compat_symbol (libc, __compat_regexec, regexec, GLIBC_2_0);
+-# endif
+-#endif
++/* EGLIBC: The code that used to be here was move to a separate file
++ so that it can be shared with xregex.c. */
++#include "regexec-compat.c"
+
+ /* Entry points for GNU code. */
+
+@@ -728,7 +713,7 @@
+ incr = (range < 0) ? -1 : 1;
+ left_lim = (range < 0) ? start + range : start;
+ right_lim = (range < 0) ? start : start + range;
+- sb = dfa->mb_cur_max == 1;
++ sb = dfa_mb_cur_max (dfa) == 1;
+ match_kind =
+ (fastmap
+ ? ((sb || !(preg->syntax & RE_ICASE || t) ? 4 : 0)
+@@ -3448,7 +3433,7 @@
+ if (BE (dest_states_word[i] == NULL && err != REG_NOERROR, 0))
+ goto out_free;
+
+- if (dest_states[i] != dest_states_word[i] && dfa->mb_cur_max > 1)
++ if (dest_states[i] != dest_states_word[i] && dfa_mb_cur_max (dfa) > 1)
+ need_word_trtable = 1;
+
+ dest_states_nl[i] = re_acquire_state_context (&err, dfa, &follows,
+@@ -3590,7 +3575,7 @@
+ else if (type == OP_PERIOD)
+ {
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ bitset_merge (accepts, dfa->sb_char);
+ else
+ #endif
+@@ -3641,7 +3626,7 @@
+ continue;
+ }
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ for (j = 0; j < BITSET_WORDS; ++j)
+ any_set |= (accepts[j] &= (dfa->word_char[j] | ~dfa->sb_char[j]));
+ else
+@@ -3660,7 +3645,7 @@
+ continue;
+ }
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ for (j = 0; j < BITSET_WORDS; ++j)
+ any_set |= (accepts[j] &= ~(dfa->word_char[j] & dfa->sb_char[j]));
+ else
+@@ -3832,12 +3817,6 @@
+ if (node->type == COMPLEX_BRACKET)
+ {
+ const re_charset_t *cset = node->opr.mbcset;
+-# ifdef _LIBC
+- const unsigned char *pin
+- = ((const unsigned char *) re_string_get_buffer (input) + str_idx);
+- int j;
+- uint32_t nrules;
+-# endif /* _LIBC */
+ int match_len = 0;
+ wchar_t wc = ((cset->nranges || cset->nchar_classes || cset->nmbchars)
+ ? re_string_wchar_at (input, str_idx) : 0);
+@@ -3849,6 +3828,7 @@
+ match_len = char_len;
+ goto check_node_accept_bytes_match;
+ }
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* match with character_class? */
+ for (i = 0; i < cset->nchar_classes; ++i)
+ {
+@@ -3859,8 +3839,16 @@
+ goto check_node_accept_bytes_match;
+ }
+ }
++#endif
++
++ /* When __OPTION_EGLIBC_LOCALE_CODE is disabled, only the C
++ locale is supported; it has no collation rules. */
++# if defined _LIBC && __OPTION_EGLIBC_LOCALE_CODE
++ const unsigned char *pin
++ = ((const unsigned char *) re_string_get_buffer (input) + str_idx);
++ int j;
++ uint32_t nrules;
+
+-# ifdef _LIBC
+ nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
+ if (nrules != 0)
+ {
+@@ -3953,8 +3941,12 @@
+ }
+ }
+ else
+-# endif /* _LIBC */
++# endif /* _LIBC && __OPTION_EGLIBC_LOCALE_CODE */
+ {
++ /* In the _LIBC version, if OPTION_EGLIBC_LOCALE_CODE is
++ disabled, there can be no multibyte range endpoints, and
++ cset->nranges is always zero. */
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* match with range expression? */
+ #if __GNUC__ >= 2
+ wchar_t cmp_buf[] = {L'\0', L'\0', wc, L'\0', L'\0', L'\0'};
+@@ -3973,6 +3965,7 @@
+ goto check_node_accept_bytes_match;
+ }
+ }
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+ }
+ check_node_accept_bytes_match:
+ if (!cset->non_match)
+@@ -3988,7 +3981,7 @@
+ return 0;
+ }
+
+-# ifdef _LIBC
++# if defined _LIBC && __OPTION_EGLIBC_LOCALE_CODE
+ static unsigned int
+ internal_function
+ find_collation_sequence_value (const unsigned char *mbs, size_t mbs_len)
+@@ -4046,7 +4039,7 @@
+ return UINT_MAX;
+ }
+ }
+-# endif /* _LIBC */
++# endif /* _LIBC && __OPTION_EGLIBC_LOCALE_CODE */
+ #endif /* RE_ENABLE_I18N */
+
+ /* Check whether the node accepts the byte which is IDX-th
+@@ -4137,7 +4130,7 @@
+ if (pstr->icase)
+ {
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ {
+ ret = build_wcs_upper_buffer (pstr);
+ if (BE (ret != REG_NOERROR, 0))
+@@ -4150,7 +4143,7 @@
+ else
+ {
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ build_wcs_buffer (pstr);
+ else
+ #endif /* RE_ENABLE_I18N */
+Index: git/posix/regexec-compat.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/posix/regexec-compat.c 2014-08-29 20:01:15.224070587 -0700
+@@ -0,0 +1,39 @@
++/* Extended regular expression matching and search library.
++ Copyright (C) 2008 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++ Contributed by Isamu Hasegawa <isamu@yamato.ibm.com>.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
++ 02111-1307 USA. */
++
++#ifdef _LIBC
++# include <shlib-compat.h>
++versioned_symbol (libc, __regexec, regexec, GLIBC_2_3_4);
++
++# if SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3_4)
++__typeof__ (__regexec) __compat_regexec;
++
++int
++attribute_compat_text_section
++__compat_regexec (const regex_t *__restrict preg,
++ const char *__restrict string, size_t nmatch,
++ regmatch_t pmatch[], int eflags)
++{
++ return regexec (preg, string, nmatch, pmatch,
++ eflags & (REG_NOTBOL | REG_NOTEOL));
++}
++compat_symbol (libc, __compat_regexec, regexec, GLIBC_2_0);
++# endif
++#endif
+Index: git/posix/regex.h
+===================================================================
+--- git.orig/posix/regex.h 2014-08-29 20:00:53.264070587 -0700
++++ git/posix/regex.h 2014-08-29 20:01:15.224070587 -0700
+@@ -21,6 +21,7 @@
+ #define _REGEX_H 1
+
+ #include <sys/types.h>
++#include <gnu/option-groups.h>
+
+ /* Allow the use in C++ code. */
+ #ifdef __cplusplus
+@@ -156,6 +157,8 @@
+ treated as 'a\{1'. */
+ # define RE_INVALID_INTERVAL_ORD (RE_DEBUG << 1)
+
++/* EGLIBC: Old regex implementation does not support these. */
++# if __OPTION_POSIX_REGEXP_GLIBC
+ /* If this bit is set, then ignore case when matching.
+ If not set, then case is significant. */
+ # define RE_ICASE (RE_INVALID_INTERVAL_ORD << 1)
+@@ -172,6 +175,7 @@
+ /* If this bit is set, then no_sub will be set to 1 during
+ re_compile_pattern. */
+ # define RE_NO_SUB (RE_CONTEXT_INVALID_DUP << 1)
++# endif /* __OPTION_POSIX_REGEXP_GLIBC */
+ #endif
+
+ /* This global variable defines the particular regexp syntax to use (for
+@@ -231,8 +235,13 @@
+ (RE_CHAR_CLASSES | RE_DOT_NEWLINE | RE_DOT_NOT_NULL \
+ | RE_INTERVALS | RE_NO_EMPTY_RANGES)
+
++#if __OPTION_POSIX_REGEXP_GLIBC
+ #define RE_SYNTAX_POSIX_BASIC \
+ (_RE_SYNTAX_POSIX_COMMON | RE_BK_PLUS_QM | RE_CONTEXT_INVALID_DUP)
++#else
++#define RE_SYNTAX_POSIX_BASIC \
++ (_RE_SYNTAX_POSIX_COMMON | RE_BK_PLUS_QM)
++#endif
+
+ /* Differs from ..._POSIX_BASIC only in that RE_BK_PLUS_QM becomes
+ RE_LIMITED_OPS, i.e., \? \+ \| are not recognized. Actually, this
+@@ -298,9 +307,11 @@
+ /* Like REG_NOTBOL, except for the end-of-line. */
+ #define REG_NOTEOL (1 << 1)
+
++#if __OPTION_POSIX_REGEXP_GLIBC
+ /* Use PMATCH[0] to delimit the start and end of the search in the
+ buffer. */
+ #define REG_STARTEND (1 << 2)
++#endif
+
+
+ /* If any error codes are removed, changed, or added, update the
+Index: git/posix/regex_internal.c
+===================================================================
+--- git.orig/posix/regex_internal.c 2014-08-29 20:00:53.264070587 -0700
++++ git/posix/regex_internal.c 2014-08-29 20:01:15.224070587 -0700
+@@ -43,8 +43,8 @@
+ int init_buf_len;
+
+ /* Ensure at least one character fits into the buffers. */
+- if (init_len < dfa->mb_cur_max)
+- init_len = dfa->mb_cur_max;
++ if (init_len < dfa_mb_cur_max (dfa))
++ init_len = dfa_mb_cur_max (dfa);
+ init_buf_len = (len + 1 < init_len) ? len + 1: init_len;
+ re_string_construct_common (str, len, pstr, trans, icase, dfa);
+
+@@ -55,7 +55,7 @@
+ pstr->word_char = dfa->word_char;
+ pstr->word_ops_used = dfa->word_ops_used;
+ pstr->mbs = pstr->mbs_allocated ? pstr->mbs : (unsigned char *) str;
+- pstr->valid_len = (pstr->mbs_allocated || dfa->mb_cur_max > 1) ? 0 : len;
++ pstr->valid_len = (pstr->mbs_allocated || dfa_mb_cur_max (dfa) > 1) ? 0 : len;
+ pstr->valid_raw_len = pstr->valid_len;
+ return REG_NOERROR;
+ }
+@@ -82,7 +82,7 @@
+ if (icase)
+ {
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ {
+ while (1)
+ {
+@@ -91,7 +91,7 @@
+ return ret;
+ if (pstr->valid_raw_len >= len)
+ break;
+- if (pstr->bufs_len > pstr->valid_len + dfa->mb_cur_max)
++ if (pstr->bufs_len > pstr->valid_len + dfa_mb_cur_max (dfa))
+ break;
+ ret = re_string_realloc_buffers (pstr, pstr->bufs_len * 2);
+ if (BE (ret != REG_NOERROR, 0))
+@@ -105,7 +105,7 @@
+ else
+ {
+ #ifdef RE_ENABLE_I18N
+- if (dfa->mb_cur_max > 1)
++ if (dfa_mb_cur_max (dfa) > 1)
+ build_wcs_buffer (pstr);
+ else
+ #endif /* RE_ENABLE_I18N */
+@@ -130,7 +130,7 @@
+ re_string_realloc_buffers (re_string_t *pstr, int new_buf_len)
+ {
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ {
+ wint_t *new_wcs;
+
+@@ -177,7 +177,7 @@
+ pstr->trans = trans;
+ pstr->icase = icase ? 1 : 0;
+ pstr->mbs_allocated = (trans != NULL || icase);
+- pstr->mb_cur_max = dfa->mb_cur_max;
++ pstr->mb_cur_max = dfa_mb_cur_max (dfa);
+ pstr->is_utf8 = dfa->is_utf8;
+ pstr->map_notascii = dfa->map_notascii;
+ pstr->stop = pstr->len;
+@@ -203,7 +203,7 @@
+ {
+ #ifdef _LIBC
+ unsigned char buf[MB_LEN_MAX];
+- assert (MB_LEN_MAX >= pstr->mb_cur_max);
++ assert (MB_LEN_MAX >= string_mb_cur_max (pstr));
+ #else
+ unsigned char buf[64];
+ #endif
+@@ -226,7 +226,7 @@
+ {
+ int i, ch;
+
+- for (i = 0; i < pstr->mb_cur_max && i < remain_len; ++i)
++ for (i = 0; i < string_mb_cur_max (pstr) && i < remain_len; ++i)
+ {
+ ch = pstr->raw_mbs [pstr->raw_mbs_idx + byte_idx + i];
+ buf[i] = pstr->mbs[byte_idx + i] = pstr->trans[ch];
+@@ -275,7 +275,7 @@
+ size_t mbclen;
+ #ifdef _LIBC
+ char buf[MB_LEN_MAX];
+- assert (MB_LEN_MAX >= pstr->mb_cur_max);
++ assert (MB_LEN_MAX >= string_mb_cur_max (pstr));
+ #else
+ char buf[64];
+ #endif
+@@ -369,7 +369,7 @@
+ {
+ int i, ch;
+
+- for (i = 0; i < pstr->mb_cur_max && i < remain_len; ++i)
++ for (i = 0; i < string_mb_cur_max (pstr) && i < remain_len; ++i)
+ {
+ ch = pstr->raw_mbs [pstr->raw_mbs_idx + src_idx + i];
+ buf[i] = pstr->trans[ch];
+@@ -567,8 +567,9 @@
+ }
+
+ /* This function re-construct the buffers.
+- Concretely, convert to wide character in case of pstr->mb_cur_max > 1,
+- convert to upper case in case of REG_ICASE, apply translation. */
++ Concretely, convert to wide character in case of
++ string_mb_cur_max (pstr) > 1, convert to upper case in case of
++ REG_ICASE, apply translation. */
+
+ static reg_errcode_t
+ internal_function __attribute_warn_unused_result__
+@@ -579,7 +580,7 @@
+ {
+ /* Reset buffer. */
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ memset (&pstr->cur_state, '\0', sizeof (mbstate_t));
+ #endif /* RE_ENABLE_I18N */
+ pstr->len = pstr->raw_len;
+@@ -670,7 +671,7 @@
+ pstr->tip_context = re_string_context_at (pstr, offset - 1,
+ eflags);
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ memmove (pstr->wcs, pstr->wcs + offset,
+ (pstr->valid_len - offset) * sizeof (wint_t));
+ #endif /* RE_ENABLE_I18N */
+@@ -699,7 +700,7 @@
+ #endif
+ pstr->valid_len = 0;
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ {
+ int wcs_idx;
+ wint_t wc = WEOF;
+@@ -711,7 +712,7 @@
+ /* Special case UTF-8. Multi-byte chars start with any
+ byte other than 0x80 - 0xbf. */
+ raw = pstr->raw_mbs + pstr->raw_mbs_idx;
+- end = raw + (offset - pstr->mb_cur_max);
++ end = raw + (offset - string_mb_cur_max (pstr));
+ if (end < pstr->raw_mbs)
+ end = pstr->raw_mbs;
+ p = raw + offset - 1;
+@@ -803,7 +804,7 @@
+
+ /* Then build the buffers. */
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1)
++ if (string_mb_cur_max (pstr) > 1)
+ {
+ if (pstr->icase)
+ {
+@@ -841,7 +842,7 @@
+ return re_string_peek_byte (pstr, idx);
+
+ #ifdef RE_ENABLE_I18N
+- if (pstr->mb_cur_max > 1
++ if (string_mb_cur_max (pstr) > 1
+ && ! re_string_is_single_byte_char (pstr, pstr->cur_idx + idx))
+ return re_string_peek_byte (pstr, idx);
+ #endif
+@@ -930,7 +931,7 @@
+ return ((eflags & REG_NOTEOL) ? CONTEXT_ENDBUF
+ : CONTEXT_NEWLINE | CONTEXT_ENDBUF);
+ #ifdef RE_ENABLE_I18N
+- if (input->mb_cur_max > 1)
++ if (string_mb_cur_max (input) > 1)
+ {
+ wint_t wc;
+ int wc_idx = idx;
+@@ -1444,7 +1445,7 @@
+ dfa->nodes[dfa->nodes_len].constraint = 0;
+ #ifdef RE_ENABLE_I18N
+ dfa->nodes[dfa->nodes_len].accept_mb =
+- (type == OP_PERIOD && dfa->mb_cur_max > 1) || type == COMPLEX_BRACKET;
++ (type == OP_PERIOD && dfa_mb_cur_max (dfa) > 1) || type == COMPLEX_BRACKET;
+ #endif
+ dfa->nexts[dfa->nodes_len] = -1;
+ re_node_set_init_empty (dfa->edests + dfa->nodes_len);
+Index: git/posix/regex_internal.h
+===================================================================
+--- git.orig/posix/regex_internal.h 2014-08-29 20:00:53.264070587 -0700
++++ git/posix/regex_internal.h 2014-08-29 20:01:15.224070587 -0700
+@@ -26,6 +26,10 @@
+ #include <stdlib.h>
+ #include <string.h>
+
++#if defined _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ #if defined HAVE_LANGINFO_H || defined HAVE_LANGINFO_CODESET || defined _LIBC
+ # include <langinfo.h>
+ #endif
+@@ -370,6 +374,13 @@
+ };
+ typedef struct re_string_t re_string_t;
+
++/* When OPTION_EGLIBC_LOCALE_CODE is disabled, this is always 1;
++ help the compiler make use of that fact. */
++#if __OPTION_EGLIBC_LOCALE_CODE
++# define string_mb_cur_max(str) ((str)->mb_cur_max + 0)
++#else
++# define string_mb_cur_max(str) (1)
++#endif
+
+ struct re_dfa_t;
+ typedef struct re_dfa_t re_dfa_t;
+@@ -655,6 +666,14 @@
+ __libc_lock_define (, lock)
+ };
+
++/* When OPTION_EGLIBC_LOCALE_CODE is disabled, this is always 1;
++ help the compiler make use of that fact. */
++#if __OPTION_EGLIBC_LOCALE_CODE
++# define dfa_mb_cur_max(dfa) ((dfa)->mb_cur_max + 0)
++#else
++# define dfa_mb_cur_max(dfa) (1)
++#endif
++
+ #define re_node_set_init_empty(set) memset (set, '\0', sizeof (re_node_set))
+ #define re_node_set_remove(set,id) \
+ (re_node_set_remove_at (set, re_node_set_contains (set, id) - 1))
+@@ -715,7 +734,7 @@
+ re_string_char_size_at (const re_string_t *pstr, int idx)
+ {
+ int byte_idx;
+- if (pstr->mb_cur_max == 1)
++ if (string_mb_cur_max (pstr) == 1)
+ return 1;
+ for (byte_idx = 1; idx + byte_idx < pstr->valid_len; ++byte_idx)
+ if (pstr->wcs[idx + byte_idx] != WEOF)
+@@ -727,7 +746,7 @@
+ internal_function __attribute__ ((pure, unused))
+ re_string_wchar_at (const re_string_t *pstr, int idx)
+ {
+- if (pstr->mb_cur_max == 1)
++ if (string_mb_cur_max (pstr) == 1)
+ return (wint_t) pstr->mbs[idx];
+ return (wint_t) pstr->wcs[idx];
+ }
+Index: git/posix/xregex.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/posix/xregex.c 2014-08-29 20:01:15.228070587 -0700
+@@ -0,0 +1,8212 @@
++/* Extended regular expression matching and search library,
++ version 0.12.
++ (Implements POSIX draft P1003.2/D11.2, except for some of the
++ internationalization features.)
++
++ Copyright (C) 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
++ 2002, 2005 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public
++ License as published by the Free Software Foundation; either
++ version 2.1 of the License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; if not, write to the Free
++ Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
++ 02110-1301 USA. */
++
++/* AIX requires this to be the first thing in the file. */
++#if defined _AIX && !defined __GNUC__ && !defined REGEX_MALLOC
++ #pragma alloca
++#endif
++
++#undef _GNU_SOURCE
++#define _GNU_SOURCE
++
++#ifndef INSIDE_RECURSION
++# ifdef HAVE_CONFIG_H
++# include <config.h>
++# endif
++#endif
++
++/*#include <ansidecl.h>*/
++
++#ifndef INSIDE_RECURSION
++
++# if defined STDC_HEADERS && !defined emacs
++# include <stddef.h>
++# else
++/* We need this for `regex.h', and perhaps for the Emacs include files. */
++# include <sys/types.h>
++# endif
++
++# define WIDE_CHAR_SUPPORT (HAVE_WCTYPE_H && HAVE_WCHAR_H && HAVE_BTOWC)
++
++/* For platform which support the ISO C amendement 1 functionality we
++ support user defined character classes. */
++# if WIDE_CHAR_SUPPORT
++/* Solaris 2.5 has a bug: <wchar.h> must be included before <wctype.h>. */
++# include <wchar.h>
++# include <wctype.h>
++# endif
++
++# ifdef _LIBC
++/* We have to keep the namespace clean. */
++# define regfree(preg) __regfree (preg)
++# define regexec(pr, st, nm, pm, ef) __regexec (pr, st, nm, pm, ef)
++# define regcomp(preg, pattern, cflags) __regcomp (preg, pattern, cflags)
++# define regerror(errcode, preg, errbuf, errbuf_size) \
++ __regerror(errcode, preg, errbuf, errbuf_size)
++# define re_set_registers(bu, re, nu, st, en) \
++ __re_set_registers (bu, re, nu, st, en)
++# define re_match_2(bufp, string1, size1, string2, size2, pos, regs, stop) \
++ __re_match_2 (bufp, string1, size1, string2, size2, pos, regs, stop)
++# define re_match(bufp, string, size, pos, regs) \
++ __re_match (bufp, string, size, pos, regs)
++# define re_search(bufp, string, size, startpos, range, regs) \
++ __re_search (bufp, string, size, startpos, range, regs)
++# define re_compile_pattern(pattern, length, bufp) \
++ __re_compile_pattern (pattern, length, bufp)
++# define re_set_syntax(syntax) __re_set_syntax (syntax)
++# define re_search_2(bufp, st1, s1, st2, s2, startpos, range, regs, stop) \
++ __re_search_2 (bufp, st1, s1, st2, s2, startpos, range, regs, stop)
++# define re_compile_fastmap(bufp) __re_compile_fastmap (bufp)
++
++# define btowc __btowc
++
++/* We are also using some library internals. */
++# include <locale/localeinfo.h>
++# include <locale/elem-hash.h>
++# include <langinfo.h>
++# include <locale/coll-lookup.h>
++# endif
++
++/* This is for other GNU distributions with internationalized messages. */
++# if (HAVE_LIBINTL_H && ENABLE_NLS) || defined _LIBC
++# include <libintl.h>
++# ifdef _LIBC
++# undef gettext
++# define gettext(msgid) __dcgettext ("libc", msgid, LC_MESSAGES)
++# endif
++# else
++# define gettext(msgid) (msgid)
++# endif
++
++# ifndef gettext_noop
++/* This define is so xgettext can find the internationalizable
++ strings. */
++# define gettext_noop(String) String
++# endif
++
++/* The `emacs' switch turns on certain matching commands
++ that make sense only in Emacs. */
++# ifdef emacs
++
++# include "lisp.h"
++# include "buffer.h"
++# include "syntax.h"
++
++# else /* not emacs */
++
++/* If we are not linking with Emacs proper,
++ we can't use the relocating allocator
++ even if config.h says that we can. */
++# undef REL_ALLOC
++
++# if defined STDC_HEADERS || defined _LIBC
++# include <stdlib.h>
++# else
++char *malloc ();
++char *realloc ();
++# endif
++
++/* When used in Emacs's lib-src, we need to get bzero and bcopy somehow.
++ If nothing else has been done, use the method below. */
++# ifdef INHIBIT_STRING_HEADER
++# if !(defined HAVE_BZERO && defined HAVE_BCOPY)
++# if !defined bzero && !defined bcopy
++# undef INHIBIT_STRING_HEADER
++# endif
++# endif
++# endif
++
++/* This is the normal way of making sure we have a bcopy and a bzero.
++ This is used in most programs--a few other programs avoid this
++ by defining INHIBIT_STRING_HEADER. */
++# ifndef INHIBIT_STRING_HEADER
++# if defined HAVE_STRING_H || defined STDC_HEADERS || defined _LIBC
++# include <string.h>
++# ifndef bzero
++# ifndef _LIBC
++# define bzero(s, n) (memset (s, '\0', n), (s))
++# else
++# define bzero(s, n) __bzero (s, n)
++# endif
++# endif
++# else
++# include <strings.h>
++# ifndef memcmp
++# define memcmp(s1, s2, n) bcmp (s1, s2, n)
++# endif
++# ifndef memcpy
++# define memcpy(d, s, n) (bcopy (s, d, n), (d))
++# endif
++# endif
++# endif
++
++/* Define the syntax stuff for \<, \>, etc. */
++
++/* This must be nonzero for the wordchar and notwordchar pattern
++ commands in re_match_2. */
++# ifndef Sword
++# define Sword 1
++# endif
++
++# ifdef SWITCH_ENUM_BUG
++# define SWITCH_ENUM_CAST(x) ((int)(x))
++# else
++# define SWITCH_ENUM_CAST(x) (x)
++# endif
++
++# endif /* not emacs */
++
++# if defined _LIBC || HAVE_LIMITS_H
++# include <limits.h>
++# endif
++
++# ifndef MB_LEN_MAX
++# define MB_LEN_MAX 1
++# endif
++
++/* Get the interface, including the syntax bits. */
++# include "regex.h"
++
++/* isalpha etc. are used for the character classes. */
++# include <ctype.h>
++
++/* Jim Meyering writes:
++
++ "... Some ctype macros are valid only for character codes that
++ isascii says are ASCII (SGI's IRIX-4.0.5 is one such system --when
++ using /bin/cc or gcc but without giving an ansi option). So, all
++ ctype uses should be through macros like ISPRINT... If
++ STDC_HEADERS is defined, then autoconf has verified that the ctype
++ macros don't need to be guarded with references to isascii. ...
++ Defining isascii to 1 should let any compiler worth its salt
++ eliminate the && through constant folding."
++ Solaris defines some of these symbols so we must undefine them first. */
++
++# undef ISASCII
++# if defined STDC_HEADERS || (!defined isascii && !defined HAVE_ISASCII)
++# define ISASCII(c) 1
++# else
++# define ISASCII(c) isascii(c)
++# endif
++
++# ifdef isblank
++# define ISBLANK(c) (ISASCII (c) && isblank (c))
++# else
++# define ISBLANK(c) ((c) == ' ' || (c) == '\t')
++# endif
++# ifdef isgraph
++# define ISGRAPH(c) (ISASCII (c) && isgraph (c))
++# else
++# define ISGRAPH(c) (ISASCII (c) && isprint (c) && !isspace (c))
++# endif
++
++# undef ISPRINT
++# define ISPRINT(c) (ISASCII (c) && isprint (c))
++# define ISDIGIT(c) (ISASCII (c) && isdigit (c))
++# define ISALNUM(c) (ISASCII (c) && isalnum (c))
++# define ISALPHA(c) (ISASCII (c) && isalpha (c))
++# define ISCNTRL(c) (ISASCII (c) && iscntrl (c))
++# define ISLOWER(c) (ISASCII (c) && islower (c))
++# define ISPUNCT(c) (ISASCII (c) && ispunct (c))
++# define ISSPACE(c) (ISASCII (c) && isspace (c))
++# define ISUPPER(c) (ISASCII (c) && isupper (c))
++# define ISXDIGIT(c) (ISASCII (c) && isxdigit (c))
++
++# ifdef _tolower
++# define TOLOWER(c) _tolower(c)
++# else
++# define TOLOWER(c) tolower(c)
++# endif
++
++# ifndef NULL
++# define NULL (void *)0
++# endif
++
++/* We remove any previous definition of `SIGN_EXTEND_CHAR',
++ since ours (we hope) works properly with all combinations of
++ machines, compilers, `char' and `unsigned char' argument types.
++ (Per Bothner suggested the basic approach.) */
++# undef SIGN_EXTEND_CHAR
++# if __STDC__
++# define SIGN_EXTEND_CHAR(c) ((signed char) (c))
++# else /* not __STDC__ */
++/* As in Harbison and Steele. */
++# define SIGN_EXTEND_CHAR(c) ((((unsigned char) (c)) ^ 128) - 128)
++# endif
++
++# ifndef emacs
++/* How many characters in the character set. */
++# define CHAR_SET_SIZE 256
++
++# ifdef SYNTAX_TABLE
++
++extern char *re_syntax_table;
++
++# else /* not SYNTAX_TABLE */
++
++static char re_syntax_table[CHAR_SET_SIZE];
++
++static void init_syntax_once (void);
++
++static void
++init_syntax_once (void)
++{
++ register int c;
++ static int done = 0;
++
++ if (done)
++ return;
++ bzero (re_syntax_table, sizeof re_syntax_table);
++
++ for (c = 0; c < CHAR_SET_SIZE; ++c)
++ if (ISALNUM (c))
++ re_syntax_table[c] = Sword;
++
++ re_syntax_table['_'] = Sword;
++
++ done = 1;
++}
++
++# endif /* not SYNTAX_TABLE */
++
++# define SYNTAX(c) re_syntax_table[(unsigned char) (c)]
++
++# endif /* emacs */
++
++/* Integer type for pointers. */
++# if !defined _LIBC && !defined HAVE_UINTPTR_T
++typedef unsigned long int uintptr_t;
++# endif
++
++/* Should we use malloc or alloca? If REGEX_MALLOC is not defined, we
++ use `alloca' instead of `malloc'. This is because using malloc in
++ re_search* or re_match* could cause memory leaks when C-g is used in
++ Emacs; also, malloc is slower and causes storage fragmentation. On
++ the other hand, malloc is more portable, and easier to debug.
++
++ Because we sometimes use alloca, some routines have to be macros,
++ not functions -- `alloca'-allocated space disappears at the end of the
++ function it is called in. */
++
++# ifdef REGEX_MALLOC
++
++# define REGEX_ALLOCATE malloc
++# define REGEX_REALLOCATE(source, osize, nsize) realloc (source, nsize)
++# define REGEX_FREE free
++
++# else /* not REGEX_MALLOC */
++
++/* Emacs already defines alloca, sometimes. */
++# ifndef alloca
++
++/* Make alloca work the best possible way. */
++# ifdef __GNUC__
++# define alloca __builtin_alloca
++# else /* not __GNUC__ */
++# if HAVE_ALLOCA_H
++# include <alloca.h>
++# endif /* HAVE_ALLOCA_H */
++# endif /* not __GNUC__ */
++
++# endif /* not alloca */
++
++# define REGEX_ALLOCATE alloca
++
++/* Assumes a `char *destination' variable. */
++# define REGEX_REALLOCATE(source, osize, nsize) \
++ (destination = (char *) alloca (nsize), \
++ memcpy (destination, source, osize))
++
++/* No need to do anything to free, after alloca. */
++# define REGEX_FREE(arg) ((void)0) /* Do nothing! But inhibit gcc warning. */
++
++# endif /* not REGEX_MALLOC */
++
++/* Define how to allocate the failure stack. */
++
++# if defined REL_ALLOC && defined REGEX_MALLOC
++
++# define REGEX_ALLOCATE_STACK(size) \
++ r_alloc (&failure_stack_ptr, (size))
++# define REGEX_REALLOCATE_STACK(source, osize, nsize) \
++ r_re_alloc (&failure_stack_ptr, (nsize))
++# define REGEX_FREE_STACK(ptr) \
++ r_alloc_free (&failure_stack_ptr)
++
++# else /* not using relocating allocator */
++
++# ifdef REGEX_MALLOC
++
++# define REGEX_ALLOCATE_STACK malloc
++# define REGEX_REALLOCATE_STACK(source, osize, nsize) realloc (source, nsize)
++# define REGEX_FREE_STACK free
++
++# else /* not REGEX_MALLOC */
++
++# define REGEX_ALLOCATE_STACK alloca
++
++# define REGEX_REALLOCATE_STACK(source, osize, nsize) \
++ REGEX_REALLOCATE (source, osize, nsize)
++/* No need to explicitly free anything. */
++# define REGEX_FREE_STACK(arg)
++
++# endif /* not REGEX_MALLOC */
++# endif /* not using relocating allocator */
++
++
++/* True if `size1' is non-NULL and PTR is pointing anywhere inside
++ `string1' or just past its end. This works if PTR is NULL, which is
++ a good thing. */
++# define FIRST_STRING_P(ptr) \
++ (size1 && string1 <= (ptr) && (ptr) <= string1 + size1)
++
++/* (Re)Allocate N items of type T using malloc, or fail. */
++# define TALLOC(n, t) ((t *) malloc ((n) * sizeof (t)))
++# define RETALLOC(addr, n, t) ((addr) = (t *) realloc (addr, (n) * sizeof (t)))
++# define RETALLOC_IF(addr, n, t) \
++ if (addr) RETALLOC((addr), (n), t); else (addr) = TALLOC ((n), t)
++# define REGEX_TALLOC(n, t) ((t *) REGEX_ALLOCATE ((n) * sizeof (t)))
++
++# define BYTEWIDTH 8 /* In bits. */
++
++# define STREQ(s1, s2) ((strcmp (s1, s2) == 0))
++
++# undef MAX
++# undef MIN
++# define MAX(a, b) ((a) > (b) ? (a) : (b))
++# define MIN(a, b) ((a) < (b) ? (a) : (b))
++
++typedef char boolean;
++# define false 0
++# define true 1
++
++static reg_errcode_t byte_regex_compile (const char *pattern, size_t size,
++ reg_syntax_t syntax,
++ struct re_pattern_buffer *bufp);
++
++static int byte_re_match_2_internal (struct re_pattern_buffer *bufp,
++ const char *string1, int size1,
++ const char *string2, int size2,
++ int pos,
++ struct re_registers *regs,
++ int stop);
++static int byte_re_search_2 (struct re_pattern_buffer *bufp,
++ const char *string1, int size1,
++ const char *string2, int size2,
++ int startpos, int range,
++ struct re_registers *regs, int stop);
++static int byte_re_compile_fastmap (struct re_pattern_buffer *bufp);
++
++#ifdef MBS_SUPPORT
++static reg_errcode_t wcs_regex_compile (const char *pattern, size_t size,
++ reg_syntax_t syntax,
++ struct re_pattern_buffer *bufp);
++
++
++static int wcs_re_match_2_internal (struct re_pattern_buffer *bufp,
++ const char *cstring1, int csize1,
++ const char *cstring2, int csize2,
++ int pos,
++ struct re_registers *regs,
++ int stop,
++ wchar_t *string1, int size1,
++ wchar_t *string2, int size2,
++ int *mbs_offset1, int *mbs_offset2);
++static int wcs_re_search_2 (struct re_pattern_buffer *bufp,
++ const char *string1, int size1,
++ const char *string2, int size2,
++ int startpos, int range,
++ struct re_registers *regs, int stop);
++static int wcs_re_compile_fastmap (struct re_pattern_buffer *bufp);
++#endif
++
++/* These are the command codes that appear in compiled regular
++ expressions. Some opcodes are followed by argument bytes. A
++ command code can specify any interpretation whatsoever for its
++ arguments. Zero bytes may appear in the compiled regular expression. */
++
++typedef enum
++{
++ no_op = 0,
++
++ /* Succeed right away--no more backtracking. */
++ succeed,
++
++ /* Followed by one byte giving n, then by n literal bytes. */
++ exactn,
++
++# ifdef MBS_SUPPORT
++ /* Same as exactn, but contains binary data. */
++ exactn_bin,
++# endif
++
++ /* Matches any (more or less) character. */
++ anychar,
++
++ /* Matches any one char belonging to specified set. First
++ following byte is number of bitmap bytes. Then come bytes
++ for a bitmap saying which chars are in. Bits in each byte
++ are ordered low-bit-first. A character is in the set if its
++ bit is 1. A character too large to have a bit in the map is
++ automatically not in the set. */
++ /* ifdef MBS_SUPPORT, following element is length of character
++ classes, length of collating symbols, length of equivalence
++ classes, length of character ranges, and length of characters.
++ Next, character class element, collating symbols elements,
++ equivalence class elements, range elements, and character
++ elements follow.
++ See regex_compile function. */
++ charset,
++
++ /* Same parameters as charset, but match any character that is
++ not one of those specified. */
++ charset_not,
++
++ /* Start remembering the text that is matched, for storing in a
++ register. Followed by one byte with the register number, in
++ the range 0 to one less than the pattern buffer's re_nsub
++ field. Then followed by one byte with the number of groups
++ inner to this one. (This last has to be part of the
++ start_memory only because we need it in the on_failure_jump
++ of re_match_2.) */
++ start_memory,
++
++ /* Stop remembering the text that is matched and store it in a
++ memory register. Followed by one byte with the register
++ number, in the range 0 to one less than `re_nsub' in the
++ pattern buffer, and one byte with the number of inner groups,
++ just like `start_memory'. (We need the number of inner
++ groups here because we don't have any easy way of finding the
++ corresponding start_memory when we're at a stop_memory.) */
++ stop_memory,
++
++ /* Match a duplicate of something remembered. Followed by one
++ byte containing the register number. */
++ duplicate,
++
++ /* Fail unless at beginning of line. */
++ begline,
++
++ /* Fail unless at end of line. */
++ endline,
++
++ /* Succeeds if at beginning of buffer (if emacs) or at beginning
++ of string to be matched (if not). */
++ begbuf,
++
++ /* Analogously, for end of buffer/string. */
++ endbuf,
++
++ /* Followed by two byte relative address to which to jump. */
++ jump,
++
++ /* Same as jump, but marks the end of an alternative. */
++ jump_past_alt,
++
++ /* Followed by two-byte relative address of place to resume at
++ in case of failure. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ on_failure_jump,
++
++ /* Like on_failure_jump, but pushes a placeholder instead of the
++ current string position when executed. */
++ on_failure_keep_string_jump,
++
++ /* Throw away latest failure point and then jump to following
++ two-byte relative address. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ pop_failure_jump,
++
++ /* Change to pop_failure_jump if know won't have to backtrack to
++ match; otherwise change to jump. This is used to jump
++ back to the beginning of a repeat. If what follows this jump
++ clearly won't match what the repeat does, such that we can be
++ sure that there is no use backtracking out of repetitions
++ already matched, then we change it to a pop_failure_jump.
++ Followed by two-byte address. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ maybe_pop_jump,
++
++ /* Jump to following two-byte address, and push a dummy failure
++ point. This failure point will be thrown away if an attempt
++ is made to use it for a failure. A `+' construct makes this
++ before the first repeat. Also used as an intermediary kind
++ of jump when compiling an alternative. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ dummy_failure_jump,
++
++ /* Push a dummy failure point and continue. Used at the end of
++ alternatives. */
++ push_dummy_failure,
++
++ /* Followed by two-byte relative address and two-byte number n.
++ After matching N times, jump to the address upon failure. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ succeed_n,
++
++ /* Followed by two-byte relative address, and two-byte number n.
++ Jump to the address N times, then fail. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ jump_n,
++
++ /* Set the following two-byte relative address to the
++ subsequent two-byte number. The address *includes* the two
++ bytes of number. */
++ /* ifdef MBS_SUPPORT, the size of address is 1. */
++ set_number_at,
++
++ wordchar, /* Matches any word-constituent character. */
++ notwordchar, /* Matches any char that is not a word-constituent. */
++
++ wordbeg, /* Succeeds if at word beginning. */
++ wordend, /* Succeeds if at word end. */
++
++ wordbound, /* Succeeds if at a word boundary. */
++ notwordbound /* Succeeds if not at a word boundary. */
++
++# ifdef emacs
++ ,before_dot, /* Succeeds if before point. */
++ at_dot, /* Succeeds if at point. */
++ after_dot, /* Succeeds if after point. */
++
++ /* Matches any character whose syntax is specified. Followed by
++ a byte which contains a syntax code, e.g., Sword. */
++ syntaxspec,
++
++ /* Matches any character whose syntax is not that specified. */
++ notsyntaxspec
++# endif /* emacs */
++} re_opcode_t;
++#endif /* not INSIDE_RECURSION */
++
++
++#ifdef BYTE
++# define CHAR_T char
++# define UCHAR_T unsigned char
++# define COMPILED_BUFFER_VAR bufp->buffer
++# define OFFSET_ADDRESS_SIZE 2
++# define PREFIX(name) byte_##name
++# define ARG_PREFIX(name) name
++# define PUT_CHAR(c) putchar (c)
++#else
++# ifdef WCHAR
++# define CHAR_T wchar_t
++# define UCHAR_T wchar_t
++# define COMPILED_BUFFER_VAR wc_buffer
++# define OFFSET_ADDRESS_SIZE 1 /* the size which STORE_NUMBER macro use */
++# define CHAR_CLASS_SIZE ((__alignof__(wctype_t)+sizeof(wctype_t))/sizeof(CHAR_T)+1)
++# define PREFIX(name) wcs_##name
++# define ARG_PREFIX(name) c##name
++/* Should we use wide stream?? */
++# define PUT_CHAR(c) printf ("%C", c);
++# define TRUE 1
++# define FALSE 0
++# else
++# ifdef MBS_SUPPORT
++# define WCHAR
++# define INSIDE_RECURSION
++# include "xregex.c"
++# undef INSIDE_RECURSION
++# endif
++# define BYTE
++# define INSIDE_RECURSION
++# include "xregex.c"
++# undef INSIDE_RECURSION
++# endif
++#endif
++
++#ifdef INSIDE_RECURSION
++/* Common operations on the compiled pattern. */
++
++/* Store NUMBER in two contiguous bytes starting at DESTINATION. */
++/* ifdef MBS_SUPPORT, we store NUMBER in 1 element. */
++
++# ifdef WCHAR
++# define STORE_NUMBER(destination, number) \
++ do { \
++ *(destination) = (UCHAR_T)(number); \
++ } while (0)
++# else /* BYTE */
++# define STORE_NUMBER(destination, number) \
++ do { \
++ (destination)[0] = (number) & 0377; \
++ (destination)[1] = (number) >> 8; \
++ } while (0)
++# endif /* WCHAR */
++
++/* Same as STORE_NUMBER, except increment DESTINATION to
++ the byte after where the number is stored. Therefore, DESTINATION
++ must be an lvalue. */
++/* ifdef MBS_SUPPORT, we store NUMBER in 1 element. */
++
++# define STORE_NUMBER_AND_INCR(destination, number) \
++ do { \
++ STORE_NUMBER (destination, number); \
++ (destination) += OFFSET_ADDRESS_SIZE; \
++ } while (0)
++
++/* Put into DESTINATION a number stored in two contiguous bytes starting
++ at SOURCE. */
++/* ifdef MBS_SUPPORT, we store NUMBER in 1 element. */
++
++# ifdef WCHAR
++# define EXTRACT_NUMBER(destination, source) \
++ do { \
++ (destination) = *(source); \
++ } while (0)
++# else /* BYTE */
++# define EXTRACT_NUMBER(destination, source) \
++ do { \
++ (destination) = *(source) & 0377; \
++ (destination) += SIGN_EXTEND_CHAR (*((source) + 1)) << 8; \
++ } while (0)
++# endif
++
++# ifdef DEBUG
++static void PREFIX(extract_number) (int *dest, UCHAR_T *source);
++static void
++PREFIX(extract_number) (int *dest, UCHAR_T *source)
++{
++# ifdef WCHAR
++ *dest = *source;
++# else /* BYTE */
++ int temp = SIGN_EXTEND_CHAR (*(source + 1));
++ *dest = *source & 0377;
++ *dest += temp << 8;
++# endif
++}
++
++# ifndef EXTRACT_MACROS /* To debug the macros. */
++# undef EXTRACT_NUMBER
++# define EXTRACT_NUMBER(dest, src) PREFIX(extract_number) (&dest, src)
++# endif /* not EXTRACT_MACROS */
++
++# endif /* DEBUG */
++
++/* Same as EXTRACT_NUMBER, except increment SOURCE to after the number.
++ SOURCE must be an lvalue. */
++
++# define EXTRACT_NUMBER_AND_INCR(destination, source) \
++ do { \
++ EXTRACT_NUMBER (destination, source); \
++ (source) += OFFSET_ADDRESS_SIZE; \
++ } while (0)
++
++# ifdef DEBUG
++static void PREFIX(extract_number_and_incr) (int *destination,
++ UCHAR_T **source);
++static void
++PREFIX(extract_number_and_incr) (int *destination, UCHAR_T **source)
++{
++ PREFIX(extract_number) (destination, *source);
++ *source += OFFSET_ADDRESS_SIZE;
++}
++
++# ifndef EXTRACT_MACROS
++# undef EXTRACT_NUMBER_AND_INCR
++# define EXTRACT_NUMBER_AND_INCR(dest, src) \
++ PREFIX(extract_number_and_incr) (&dest, &src)
++# endif /* not EXTRACT_MACROS */
++
++# endif /* DEBUG */
++
++
++
++/* If DEBUG is defined, Regex prints many voluminous messages about what
++ it is doing (if the variable `debug' is nonzero). If linked with the
++ main program in `iregex.c', you can enter patterns and strings
++ interactively. And if linked with the main program in `main.c' and
++ the other test files, you can run the already-written tests. */
++
++# ifdef DEBUG
++
++# ifndef DEFINED_ONCE
++
++/* We use standard I/O for debugging. */
++# include <stdio.h>
++
++/* It is useful to test things that ``must'' be true when debugging. */
++# include <assert.h>
++
++static int debug;
++
++# define DEBUG_STATEMENT(e) e
++# define DEBUG_PRINT1(x) if (debug) printf (x)
++# define DEBUG_PRINT2(x1, x2) if (debug) printf (x1, x2)
++# define DEBUG_PRINT3(x1, x2, x3) if (debug) printf (x1, x2, x3)
++# define DEBUG_PRINT4(x1, x2, x3, x4) if (debug) printf (x1, x2, x3, x4)
++# endif /* not DEFINED_ONCE */
++
++# define DEBUG_PRINT_COMPILED_PATTERN(p, s, e) \
++ if (debug) PREFIX(print_partial_compiled_pattern) (s, e)
++# define DEBUG_PRINT_DOUBLE_STRING(w, s1, sz1, s2, sz2) \
++ if (debug) PREFIX(print_double_string) (w, s1, sz1, s2, sz2)
++
++
++/* Print the fastmap in human-readable form. */
++
++# ifndef DEFINED_ONCE
++void
++print_fastmap (char *fastmap)
++{
++ unsigned was_a_range = 0;
++ unsigned i = 0;
++
++ while (i < (1 << BYTEWIDTH))
++ {
++ if (fastmap[i++])
++ {
++ was_a_range = 0;
++ putchar (i - 1);
++ while (i < (1 << BYTEWIDTH) && fastmap[i])
++ {
++ was_a_range = 1;
++ i++;
++ }
++ if (was_a_range)
++ {
++ printf ("-");
++ putchar (i - 1);
++ }
++ }
++ }
++ putchar ('\n');
++}
++# endif /* not DEFINED_ONCE */
++
++
++/* Print a compiled pattern string in human-readable form, starting at
++ the START pointer into it and ending just before the pointer END. */
++
++void
++PREFIX(print_partial_compiled_pattern) (UCHAR_T *start, UCHAR_T *end)
++{
++ int mcnt, mcnt2;
++ UCHAR_T *p1;
++ UCHAR_T *p = start;
++ UCHAR_T *pend = end;
++
++ if (start == NULL)
++ {
++ printf ("(null)\n");
++ return;
++ }
++
++ /* Loop over pattern commands. */
++ while (p < pend)
++ {
++# ifdef _LIBC
++ printf ("%td:\t", p - start);
++# else
++ printf ("%ld:\t", (long int) (p - start));
++# endif
++
++ switch ((re_opcode_t) *p++)
++ {
++ case no_op:
++ printf ("/no_op");
++ break;
++
++ case exactn:
++ mcnt = *p++;
++ printf ("/exactn/%d", mcnt);
++ do
++ {
++ putchar ('/');
++ PUT_CHAR (*p++);
++ }
++ while (--mcnt);
++ break;
++
++# ifdef MBS_SUPPORT
++ case exactn_bin:
++ mcnt = *p++;
++ printf ("/exactn_bin/%d", mcnt);
++ do
++ {
++ printf("/%lx", (long int) *p++);
++ }
++ while (--mcnt);
++ break;
++# endif /* MBS_SUPPORT */
++
++ case start_memory:
++ mcnt = *p++;
++ printf ("/start_memory/%d/%ld", mcnt, (long int) *p++);
++ break;
++
++ case stop_memory:
++ mcnt = *p++;
++ printf ("/stop_memory/%d/%ld", mcnt, (long int) *p++);
++ break;
++
++ case duplicate:
++ printf ("/duplicate/%ld", (long int) *p++);
++ break;
++
++ case anychar:
++ printf ("/anychar");
++ break;
++
++ case charset:
++ case charset_not:
++ {
++# ifdef WCHAR
++ int i, length;
++ wchar_t *workp = p;
++ printf ("/charset [%s",
++ (re_opcode_t) *(workp - 1) == charset_not ? "^" : "");
++ p += 5;
++ length = *workp++; /* the length of char_classes */
++ for (i=0 ; i<length ; i++)
++ printf("[:%lx:]", (long int) *p++);
++ length = *workp++; /* the length of collating_symbol */
++ for (i=0 ; i<length ;)
++ {
++ printf("[.");
++ while(*p != 0)
++ PUT_CHAR((i++,*p++));
++ i++,p++;
++ printf(".]");
++ }
++ length = *workp++; /* the length of equivalence_class */
++ for (i=0 ; i<length ;)
++ {
++ printf("[=");
++ while(*p != 0)
++ PUT_CHAR((i++,*p++));
++ i++,p++;
++ printf("=]");
++ }
++ length = *workp++; /* the length of char_range */
++ for (i=0 ; i<length ; i++)
++ {
++ wchar_t range_start = *p++;
++ wchar_t range_end = *p++;
++ printf("%C-%C", range_start, range_end);
++ }
++ length = *workp++; /* the length of char */
++ for (i=0 ; i<length ; i++)
++ printf("%C", *p++);
++ putchar (']');
++# else
++ register int c, last = -100;
++ register int in_range = 0;
++
++ printf ("/charset [%s",
++ (re_opcode_t) *(p - 1) == charset_not ? "^" : "");
++
++ assert (p + *p < pend);
++
++ for (c = 0; c < 256; c++)
++ if (c / 8 < *p
++ && (p[1 + (c/8)] & (1 << (c % 8))))
++ {
++ /* Are we starting a range? */
++ if (last + 1 == c && ! in_range)
++ {
++ putchar ('-');
++ in_range = 1;
++ }
++ /* Have we broken a range? */
++ else if (last + 1 != c && in_range)
++ {
++ putchar (last);
++ in_range = 0;
++ }
++
++ if (! in_range)
++ putchar (c);
++
++ last = c;
++ }
++
++ if (in_range)
++ putchar (last);
++
++ putchar (']');
++
++ p += 1 + *p;
++# endif /* WCHAR */
++ }
++ break;
++
++ case begline:
++ printf ("/begline");
++ break;
++
++ case endline:
++ printf ("/endline");
++ break;
++
++ case on_failure_jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/on_failure_jump to %td", p + mcnt - start);
++# else
++ printf ("/on_failure_jump to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case on_failure_keep_string_jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/on_failure_keep_string_jump to %td", p + mcnt - start);
++# else
++ printf ("/on_failure_keep_string_jump to %ld",
++ (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case dummy_failure_jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/dummy_failure_jump to %td", p + mcnt - start);
++# else
++ printf ("/dummy_failure_jump to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case push_dummy_failure:
++ printf ("/push_dummy_failure");
++ break;
++
++ case maybe_pop_jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/maybe_pop_jump to %td", p + mcnt - start);
++# else
++ printf ("/maybe_pop_jump to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case pop_failure_jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/pop_failure_jump to %td", p + mcnt - start);
++# else
++ printf ("/pop_failure_jump to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case jump_past_alt:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/jump_past_alt to %td", p + mcnt - start);
++# else
++ printf ("/jump_past_alt to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case jump:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++# ifdef _LIBC
++ printf ("/jump to %td", p + mcnt - start);
++# else
++ printf ("/jump to %ld", (long int) (p + mcnt - start));
++# endif
++ break;
++
++ case succeed_n:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++ p1 = p + mcnt;
++ PREFIX(extract_number_and_incr) (&mcnt2, &p);
++# ifdef _LIBC
++ printf ("/succeed_n to %td, %d times", p1 - start, mcnt2);
++# else
++ printf ("/succeed_n to %ld, %d times",
++ (long int) (p1 - start), mcnt2);
++# endif
++ break;
++
++ case jump_n:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++ p1 = p + mcnt;
++ PREFIX(extract_number_and_incr) (&mcnt2, &p);
++ printf ("/jump_n to %d, %d times", p1 - start, mcnt2);
++ break;
++
++ case set_number_at:
++ PREFIX(extract_number_and_incr) (&mcnt, &p);
++ p1 = p + mcnt;
++ PREFIX(extract_number_and_incr) (&mcnt2, &p);
++# ifdef _LIBC
++ printf ("/set_number_at location %td to %d", p1 - start, mcnt2);
++# else
++ printf ("/set_number_at location %ld to %d",
++ (long int) (p1 - start), mcnt2);
++# endif
++ break;
++
++ case wordbound:
++ printf ("/wordbound");
++ break;
++
++ case notwordbound:
++ printf ("/notwordbound");
++ break;
++
++ case wordbeg:
++ printf ("/wordbeg");
++ break;
++
++ case wordend:
++ printf ("/wordend");
++ break;
++
++# ifdef emacs
++ case before_dot:
++ printf ("/before_dot");
++ break;
++
++ case at_dot:
++ printf ("/at_dot");
++ break;
++
++ case after_dot:
++ printf ("/after_dot");
++ break;
++
++ case syntaxspec:
++ printf ("/syntaxspec");
++ mcnt = *p++;
++ printf ("/%d", mcnt);
++ break;
++
++ case notsyntaxspec:
++ printf ("/notsyntaxspec");
++ mcnt = *p++;
++ printf ("/%d", mcnt);
++ break;
++# endif /* emacs */
++
++ case wordchar:
++ printf ("/wordchar");
++ break;
++
++ case notwordchar:
++ printf ("/notwordchar");
++ break;
++
++ case begbuf:
++ printf ("/begbuf");
++ break;
++
++ case endbuf:
++ printf ("/endbuf");
++ break;
++
++ default:
++ printf ("?%ld", (long int) *(p-1));
++ }
++
++ putchar ('\n');
++ }
++
++# ifdef _LIBC
++ printf ("%td:\tend of pattern.\n", p - start);
++# else
++ printf ("%ld:\tend of pattern.\n", (long int) (p - start));
++# endif
++}
++
++
++void
++PREFIX(print_compiled_pattern) (struct re_pattern_buffer *bufp)
++{
++ UCHAR_T *buffer = (UCHAR_T*) bufp->buffer;
++
++ PREFIX(print_partial_compiled_pattern) (buffer, buffer
++ + bufp->used / sizeof(UCHAR_T));
++ printf ("%ld bytes used/%ld bytes allocated.\n",
++ bufp->used, bufp->allocated);
++
++ if (bufp->fastmap_accurate && bufp->fastmap)
++ {
++ printf ("fastmap: ");
++ print_fastmap (bufp->fastmap);
++ }
++
++# ifdef _LIBC
++ printf ("re_nsub: %Zd\t", bufp->re_nsub);
++# else
++ printf ("re_nsub: %ld\t", (long int) bufp->re_nsub);
++# endif
++ printf ("regs_alloc: %d\t", bufp->regs_allocated);
++ printf ("can_be_null: %d\t", bufp->can_be_null);
++ printf ("newline_anchor: %d\n", bufp->newline_anchor);
++ printf ("no_sub: %d\t", bufp->no_sub);
++ printf ("not_bol: %d\t", bufp->not_bol);
++ printf ("not_eol: %d\t", bufp->not_eol);
++ printf ("syntax: %lx\n", bufp->syntax);
++ /* Perhaps we should print the translate table? */
++}
++
++
++void
++PREFIX(print_double_string) (const CHAR_T *where, const CHAR_T *string1,
++ int size1, const CHAR_T *string2, int size2)
++{
++ int this_char;
++
++ if (where == NULL)
++ printf ("(null)");
++ else
++ {
++ int cnt;
++
++ if (FIRST_STRING_P (where))
++ {
++ for (this_char = where - string1; this_char < size1; this_char++)
++ PUT_CHAR (string1[this_char]);
++
++ where = string2;
++ }
++
++ cnt = 0;
++ for (this_char = where - string2; this_char < size2; this_char++)
++ {
++ PUT_CHAR (string2[this_char]);
++ if (++cnt > 100)
++ {
++ fputs ("...", stdout);
++ break;
++ }
++ }
++ }
++}
++
++# ifndef DEFINED_ONCE
++void
++printchar (int c)
++{
++ putc (c, stderr);
++}
++# endif
++
++# else /* not DEBUG */
++
++# ifndef DEFINED_ONCE
++# undef assert
++# define assert(e)
++
++# define DEBUG_STATEMENT(e)
++# define DEBUG_PRINT1(x)
++# define DEBUG_PRINT2(x1, x2)
++# define DEBUG_PRINT3(x1, x2, x3)
++# define DEBUG_PRINT4(x1, x2, x3, x4)
++# endif /* not DEFINED_ONCE */
++# define DEBUG_PRINT_COMPILED_PATTERN(p, s, e)
++# define DEBUG_PRINT_DOUBLE_STRING(w, s1, sz1, s2, sz2)
++
++# endif /* not DEBUG */
++
++
++
++# ifdef WCHAR
++/* This convert a multibyte string to a wide character string.
++ And write their correspondances to offset_buffer(see below)
++ and write whether each wchar_t is binary data to is_binary.
++ This assume invalid multibyte sequences as binary data.
++ We assume offset_buffer and is_binary is already allocated
++ enough space. */
++
++static size_t convert_mbs_to_wcs (CHAR_T *dest, const unsigned char* src,
++ size_t len, int *offset_buffer,
++ char *is_binary);
++static size_t
++convert_mbs_to_wcs (CHAR_T *dest, const unsigned char*src, size_t len,
++ int *offset_buffer, char *is_binary)
++ /* It hold correspondances between src(char string) and
++ dest(wchar_t string) for optimization.
++ e.g. src = "xxxyzz"
++ dest = {'X', 'Y', 'Z'}
++ (each "xxx", "y" and "zz" represent one multibyte character
++ corresponding to 'X', 'Y' and 'Z'.)
++ offset_buffer = {0, 0+3("xxx"), 0+3+1("y"), 0+3+1+2("zz")}
++ = {0, 3, 4, 6}
++ */
++{
++ wchar_t *pdest = dest;
++ const unsigned char *psrc = src;
++ size_t wc_count = 0;
++
++ mbstate_t mbs;
++ int i, consumed;
++ size_t mb_remain = len;
++ size_t mb_count = 0;
++
++ /* Initialize the conversion state. */
++ memset (&mbs, 0, sizeof (mbstate_t));
++
++ offset_buffer[0] = 0;
++ for( ; mb_remain > 0 ; ++wc_count, ++pdest, mb_remain -= consumed,
++ psrc += consumed)
++ {
++#ifdef _LIBC
++ consumed = __mbrtowc (pdest, psrc, mb_remain, &mbs);
++#else
++ consumed = mbrtowc (pdest, psrc, mb_remain, &mbs);
++#endif
++
++ if (consumed <= 0)
++ /* failed to convert. maybe src contains binary data.
++ So we consume 1 byte manualy. */
++ {
++ *pdest = *psrc;
++ consumed = 1;
++ is_binary[wc_count] = TRUE;
++ }
++ else
++ is_binary[wc_count] = FALSE;
++ /* In sjis encoding, we use yen sign as escape character in
++ place of reverse solidus. So we convert 0x5c(yen sign in
++ sjis) to not 0xa5(yen sign in UCS2) but 0x5c(reverse
++ solidus in UCS2). */
++ if (consumed == 1 && (int) *psrc == 0x5c && (int) *pdest == 0xa5)
++ *pdest = (wchar_t) *psrc;
++
++ offset_buffer[wc_count + 1] = mb_count += consumed;
++ }
++
++ /* Fill remain of the buffer with sentinel. */
++ for (i = wc_count + 1 ; i <= len ; i++)
++ offset_buffer[i] = mb_count + 1;
++
++ return wc_count;
++}
++
++# endif /* WCHAR */
++
++#else /* not INSIDE_RECURSION */
++
++/* Set by `re_set_syntax' to the current regexp syntax to recognize. Can
++ also be assigned to arbitrarily: each pattern buffer stores its own
++ syntax, so it can be changed between regex compilations. */
++/* This has no initializer because initialized variables in Emacs
++ become read-only after dumping. */
++reg_syntax_t re_syntax_options;
++
++
++/* Specify the precise syntax of regexps for compilation. This provides
++ for compatibility for various utilities which historically have
++ different, incompatible syntaxes.
++
++ The argument SYNTAX is a bit mask comprised of the various bits
++ defined in regex.h. We return the old syntax. */
++
++reg_syntax_t
++re_set_syntax (reg_syntax_t syntax)
++{
++ reg_syntax_t ret = re_syntax_options;
++
++ re_syntax_options = syntax;
++# ifdef DEBUG
++ if (syntax & RE_DEBUG)
++ debug = 1;
++ else if (debug) /* was on but now is not */
++ debug = 0;
++# endif /* DEBUG */
++ return ret;
++}
++# ifdef _LIBC
++weak_alias (__re_set_syntax, re_set_syntax)
++# endif
++
++/* This table gives an error message for each of the error codes listed
++ in regex.h. Obviously the order here has to be same as there.
++ POSIX doesn't require that we do anything for REG_NOERROR,
++ but why not be nice? */
++
++static const char *re_error_msgid[] =
++ {
++ gettext_noop ("Success"), /* REG_NOERROR */
++ gettext_noop ("No match"), /* REG_NOMATCH */
++ gettext_noop ("Invalid regular expression"), /* REG_BADPAT */
++ gettext_noop ("Invalid collation character"), /* REG_ECOLLATE */
++ gettext_noop ("Invalid character class name"), /* REG_ECTYPE */
++ gettext_noop ("Trailing backslash"), /* REG_EESCAPE */
++ gettext_noop ("Invalid back reference"), /* REG_ESUBREG */
++ gettext_noop ("Unmatched [ or [^"), /* REG_EBRACK */
++ gettext_noop ("Unmatched ( or \\("), /* REG_EPAREN */
++ gettext_noop ("Unmatched \\{"), /* REG_EBRACE */
++ gettext_noop ("Invalid content of \\{\\}"), /* REG_BADBR */
++ gettext_noop ("Invalid range end"), /* REG_ERANGE */
++ gettext_noop ("Memory exhausted"), /* REG_ESPACE */
++ gettext_noop ("Invalid preceding regular expression"), /* REG_BADRPT */
++ gettext_noop ("Premature end of regular expression"), /* REG_EEND */
++ gettext_noop ("Regular expression too big"), /* REG_ESIZE */
++ gettext_noop ("Unmatched ) or \\)") /* REG_ERPAREN */
++ };
++
++#endif /* INSIDE_RECURSION */
++
++#ifndef DEFINED_ONCE
++/* Avoiding alloca during matching, to placate r_alloc. */
++
++/* Define MATCH_MAY_ALLOCATE unless we need to make sure that the
++ searching and matching functions should not call alloca. On some
++ systems, alloca is implemented in terms of malloc, and if we're
++ using the relocating allocator routines, then malloc could cause a
++ relocation, which might (if the strings being searched are in the
++ ralloc heap) shift the data out from underneath the regexp
++ routines.
++
++ Here's another reason to avoid allocation: Emacs
++ processes input from X in a signal handler; processing X input may
++ call malloc; if input arrives while a matching routine is calling
++ malloc, then we're scrod. But Emacs can't just block input while
++ calling matching routines; then we don't notice interrupts when
++ they come in. So, Emacs blocks input around all regexp calls
++ except the matching calls, which it leaves unprotected, in the
++ faith that they will not malloc. */
++
++/* Normally, this is fine. */
++# define MATCH_MAY_ALLOCATE
++
++/* When using GNU C, we are not REALLY using the C alloca, no matter
++ what config.h may say. So don't take precautions for it. */
++# ifdef __GNUC__
++# undef C_ALLOCA
++# endif
++
++/* The match routines may not allocate if (1) they would do it with malloc
++ and (2) it's not safe for them to use malloc.
++ Note that if REL_ALLOC is defined, matching would not use malloc for the
++ failure stack, but we would still use it for the register vectors;
++ so REL_ALLOC should not affect this. */
++# if (defined C_ALLOCA || defined REGEX_MALLOC) && defined emacs
++# undef MATCH_MAY_ALLOCATE
++# endif
++#endif /* not DEFINED_ONCE */
++
++#ifdef INSIDE_RECURSION
++/* Failure stack declarations and macros; both re_compile_fastmap and
++ re_match_2 use a failure stack. These have to be macros because of
++ REGEX_ALLOCATE_STACK. */
++
++
++/* Number of failure points for which to initially allocate space
++ when matching. If this number is exceeded, we allocate more
++ space, so it is not a hard limit. */
++# ifndef INIT_FAILURE_ALLOC
++# define INIT_FAILURE_ALLOC 5
++# endif
++
++/* Roughly the maximum number of failure points on the stack. Would be
++ exactly that if always used MAX_FAILURE_ITEMS items each time we failed.
++ This is a variable only so users of regex can assign to it; we never
++ change it ourselves. */
++
++
++# ifndef DEFINED_ONCE
++
++# ifdef INT_IS_16BIT
++# define RE_M_F_TYPE long int
++# else
++# define RE_M_F_TYPE int
++# endif /* INT_IS_16BIT */
++
++# ifdef MATCH_MAY_ALLOCATE
++/* 4400 was enough to cause a crash on Alpha OSF/1,
++ whose default stack limit is 2mb. */
++# define RE_M_F_DEFAULT 4000
++# else
++# define RE_M_F_DEFAULT 2000
++# endif /* MATCH_MAY_ALLOCATE */
++
++# include <shlib-compat.h>
++
++# if SHLIB_COMPAT (libc, GLIBC_2_0, GLIBC_2_3)
++link_warning (re_max_failures, "the 're_max_failures' variable is obsolete and will go away.")
++RE_M_F_TYPE re_max_failures = RE_M_F_DEFAULT;
++# else
++RE_M_F_TYPE re_max_failures attribute_hidden = RE_M_F_DEFAULT;
++# endif /* SHLIB_COMPAT */
++
++# undef RE_M_F_TYPE
++# undef RE_M_F_DEFAULT
++
++# endif /* DEFINED_ONCE */
++
++# ifdef INT_IS_16BIT
++
++union PREFIX(fail_stack_elt)
++{
++ UCHAR_T *pointer;
++ long int integer;
++};
++
++typedef union PREFIX(fail_stack_elt) PREFIX(fail_stack_elt_t);
++
++typedef struct
++{
++ PREFIX(fail_stack_elt_t) *stack;
++ unsigned long int size;
++ unsigned long int avail; /* Offset of next open position. */
++} PREFIX(fail_stack_type);
++
++# else /* not INT_IS_16BIT */
++
++union PREFIX(fail_stack_elt)
++{
++ UCHAR_T *pointer;
++ int integer;
++};
++
++typedef union PREFIX(fail_stack_elt) PREFIX(fail_stack_elt_t);
++
++typedef struct
++{
++ PREFIX(fail_stack_elt_t) *stack;
++ unsigned size;
++ unsigned avail; /* Offset of next open position. */
++} PREFIX(fail_stack_type);
++
++# endif /* INT_IS_16BIT */
++
++# ifndef DEFINED_ONCE
++# define FAIL_STACK_EMPTY() (fail_stack.avail == 0)
++# define FAIL_STACK_PTR_EMPTY() (fail_stack_ptr->avail == 0)
++# define FAIL_STACK_FULL() (fail_stack.avail == fail_stack.size)
++# endif
++
++
++/* Define macros to initialize and free the failure stack.
++ Do `return -2' if the alloc fails. */
++
++# ifdef MATCH_MAY_ALLOCATE
++# define INIT_FAIL_STACK() \
++ do { \
++ fail_stack.stack = (PREFIX(fail_stack_elt_t) *) \
++ REGEX_ALLOCATE_STACK (INIT_FAILURE_ALLOC * sizeof (PREFIX(fail_stack_elt_t))); \
++ \
++ if (fail_stack.stack == NULL) \
++ return -2; \
++ \
++ fail_stack.size = INIT_FAILURE_ALLOC; \
++ fail_stack.avail = 0; \
++ } while (0)
++
++# define RESET_FAIL_STACK() REGEX_FREE_STACK (fail_stack.stack)
++# else
++# define INIT_FAIL_STACK() \
++ do { \
++ fail_stack.avail = 0; \
++ } while (0)
++
++# define RESET_FAIL_STACK()
++# endif
++
++
++/* Double the size of FAIL_STACK, up to approximately `re_max_failures' items.
++
++ Return 1 if succeeds, and 0 if either ran out of memory
++ allocating space for it or it was already too large.
++
++ REGEX_REALLOCATE_STACK requires `destination' be declared. */
++
++# define DOUBLE_FAIL_STACK(fail_stack) \
++ ((fail_stack).size > (unsigned) (re_max_failures * MAX_FAILURE_ITEMS) \
++ ? 0 \
++ : ((fail_stack).stack = (PREFIX(fail_stack_elt_t) *) \
++ REGEX_REALLOCATE_STACK ((fail_stack).stack, \
++ (fail_stack).size * sizeof (PREFIX(fail_stack_elt_t)), \
++ ((fail_stack).size << 1) * sizeof (PREFIX(fail_stack_elt_t))),\
++ \
++ (fail_stack).stack == NULL \
++ ? 0 \
++ : ((fail_stack).size <<= 1, \
++ 1)))
++
++
++/* Push pointer POINTER on FAIL_STACK.
++ Return 1 if was able to do so and 0 if ran out of memory allocating
++ space to do so. */
++# define PUSH_PATTERN_OP(POINTER, FAIL_STACK) \
++ ((FAIL_STACK_FULL () \
++ && !DOUBLE_FAIL_STACK (FAIL_STACK)) \
++ ? 0 \
++ : ((FAIL_STACK).stack[(FAIL_STACK).avail++].pointer = POINTER, \
++ 1))
++
++/* Push a pointer value onto the failure stack.
++ Assumes the variable `fail_stack'. Probably should only
++ be called from within `PUSH_FAILURE_POINT'. */
++# define PUSH_FAILURE_POINTER(item) \
++ fail_stack.stack[fail_stack.avail++].pointer = (UCHAR_T *) (item)
++
++/* This pushes an integer-valued item onto the failure stack.
++ Assumes the variable `fail_stack'. Probably should only
++ be called from within `PUSH_FAILURE_POINT'. */
++# define PUSH_FAILURE_INT(item) \
++ fail_stack.stack[fail_stack.avail++].integer = (item)
++
++/* Push a fail_stack_elt_t value onto the failure stack.
++ Assumes the variable `fail_stack'. Probably should only
++ be called from within `PUSH_FAILURE_POINT'. */
++# define PUSH_FAILURE_ELT(item) \
++ fail_stack.stack[fail_stack.avail++] = (item)
++
++/* These three POP... operations complement the three PUSH... operations.
++ All assume that `fail_stack' is nonempty. */
++# define POP_FAILURE_POINTER() fail_stack.stack[--fail_stack.avail].pointer
++# define POP_FAILURE_INT() fail_stack.stack[--fail_stack.avail].integer
++# define POP_FAILURE_ELT() fail_stack.stack[--fail_stack.avail]
++
++/* Used to omit pushing failure point id's when we're not debugging. */
++# ifdef DEBUG
++# define DEBUG_PUSH PUSH_FAILURE_INT
++# define DEBUG_POP(item_addr) *(item_addr) = POP_FAILURE_INT ()
++# else
++# define DEBUG_PUSH(item)
++# define DEBUG_POP(item_addr)
++# endif
++
++
++/* Push the information about the state we will need
++ if we ever fail back to it.
++
++ Requires variables fail_stack, regstart, regend, reg_info, and
++ num_regs_pushed be declared. DOUBLE_FAIL_STACK requires `destination'
++ be declared.
++
++ Does `return FAILURE_CODE' if runs out of memory. */
++
++# define PUSH_FAILURE_POINT(pattern_place, string_place, failure_code) \
++ do { \
++ char *destination; \
++ /* Must be int, so when we don't save any registers, the arithmetic \
++ of 0 + -1 isn't done as unsigned. */ \
++ /* Can't be int, since there is not a shred of a guarantee that int \
++ is wide enough to hold a value of something to which pointer can \
++ be assigned */ \
++ active_reg_t this_reg; \
++ \
++ DEBUG_STATEMENT (failure_id++); \
++ DEBUG_STATEMENT (nfailure_points_pushed++); \
++ DEBUG_PRINT2 ("\nPUSH_FAILURE_POINT #%u:\n", failure_id); \
++ DEBUG_PRINT2 (" Before push, next avail: %d\n", (fail_stack).avail);\
++ DEBUG_PRINT2 (" size: %d\n", (fail_stack).size);\
++ \
++ DEBUG_PRINT2 (" slots needed: %ld\n", NUM_FAILURE_ITEMS); \
++ DEBUG_PRINT2 (" available: %d\n", REMAINING_AVAIL_SLOTS); \
++ \
++ /* Ensure we have enough space allocated for what we will push. */ \
++ while (REMAINING_AVAIL_SLOTS < NUM_FAILURE_ITEMS) \
++ { \
++ if (!DOUBLE_FAIL_STACK (fail_stack)) \
++ return failure_code; \
++ \
++ DEBUG_PRINT2 ("\n Doubled stack; size now: %d\n", \
++ (fail_stack).size); \
++ DEBUG_PRINT2 (" slots available: %d\n", REMAINING_AVAIL_SLOTS);\
++ } \
++ \
++ /* Push the info, starting with the registers. */ \
++ DEBUG_PRINT1 ("\n"); \
++ \
++ if (1) \
++ for (this_reg = lowest_active_reg; this_reg <= highest_active_reg; \
++ this_reg++) \
++ { \
++ DEBUG_PRINT2 (" Pushing reg: %lu\n", this_reg); \
++ DEBUG_STATEMENT (num_regs_pushed++); \
++ \
++ DEBUG_PRINT2 (" start: %p\n", regstart[this_reg]); \
++ PUSH_FAILURE_POINTER (regstart[this_reg]); \
++ \
++ DEBUG_PRINT2 (" end: %p\n", regend[this_reg]); \
++ PUSH_FAILURE_POINTER (regend[this_reg]); \
++ \
++ DEBUG_PRINT2 (" info: %p\n ", \
++ reg_info[this_reg].word.pointer); \
++ DEBUG_PRINT2 (" match_null=%d", \
++ REG_MATCH_NULL_STRING_P (reg_info[this_reg])); \
++ DEBUG_PRINT2 (" active=%d", IS_ACTIVE (reg_info[this_reg])); \
++ DEBUG_PRINT2 (" matched_something=%d", \
++ MATCHED_SOMETHING (reg_info[this_reg])); \
++ DEBUG_PRINT2 (" ever_matched=%d", \
++ EVER_MATCHED_SOMETHING (reg_info[this_reg])); \
++ DEBUG_PRINT1 ("\n"); \
++ PUSH_FAILURE_ELT (reg_info[this_reg].word); \
++ } \
++ \
++ DEBUG_PRINT2 (" Pushing low active reg: %ld\n", lowest_active_reg);\
++ PUSH_FAILURE_INT (lowest_active_reg); \
++ \
++ DEBUG_PRINT2 (" Pushing high active reg: %ld\n", highest_active_reg);\
++ PUSH_FAILURE_INT (highest_active_reg); \
++ \
++ DEBUG_PRINT2 (" Pushing pattern %p:\n", pattern_place); \
++ DEBUG_PRINT_COMPILED_PATTERN (bufp, pattern_place, pend); \
++ PUSH_FAILURE_POINTER (pattern_place); \
++ \
++ DEBUG_PRINT2 (" Pushing string %p: `", string_place); \
++ DEBUG_PRINT_DOUBLE_STRING (string_place, string1, size1, string2, \
++ size2); \
++ DEBUG_PRINT1 ("'\n"); \
++ PUSH_FAILURE_POINTER (string_place); \
++ \
++ DEBUG_PRINT2 (" Pushing failure id: %u\n", failure_id); \
++ DEBUG_PUSH (failure_id); \
++ } while (0)
++
++# ifndef DEFINED_ONCE
++/* This is the number of items that are pushed and popped on the stack
++ for each register. */
++# define NUM_REG_ITEMS 3
++
++/* Individual items aside from the registers. */
++# ifdef DEBUG
++# define NUM_NONREG_ITEMS 5 /* Includes failure point id. */
++# else
++# define NUM_NONREG_ITEMS 4
++# endif
++
++/* We push at most this many items on the stack. */
++/* We used to use (num_regs - 1), which is the number of registers
++ this regexp will save; but that was changed to 5
++ to avoid stack overflow for a regexp with lots of parens. */
++# define MAX_FAILURE_ITEMS (5 * NUM_REG_ITEMS + NUM_NONREG_ITEMS)
++
++/* We actually push this many items. */
++# define NUM_FAILURE_ITEMS \
++ (((0 \
++ ? 0 : highest_active_reg - lowest_active_reg + 1) \
++ * NUM_REG_ITEMS) \
++ + NUM_NONREG_ITEMS)
++
++/* How many items can still be added to the stack without overflowing it. */
++# define REMAINING_AVAIL_SLOTS ((fail_stack).size - (fail_stack).avail)
++# endif /* not DEFINED_ONCE */
++
++
++/* Pops what PUSH_FAIL_STACK pushes.
++
++ We restore into the parameters, all of which should be lvalues:
++ STR -- the saved data position.
++ PAT -- the saved pattern position.
++ LOW_REG, HIGH_REG -- the highest and lowest active registers.
++ REGSTART, REGEND -- arrays of string positions.
++ REG_INFO -- array of information about each subexpression.
++
++ Also assumes the variables `fail_stack' and (if debugging), `bufp',
++ `pend', `string1', `size1', `string2', and `size2'. */
++# define POP_FAILURE_POINT(str, pat, low_reg, high_reg, regstart, regend, reg_info)\
++{ \
++ DEBUG_STATEMENT (unsigned failure_id;) \
++ active_reg_t this_reg; \
++ const UCHAR_T *string_temp; \
++ \
++ assert (!FAIL_STACK_EMPTY ()); \
++ \
++ /* Remove failure points and point to how many regs pushed. */ \
++ DEBUG_PRINT1 ("POP_FAILURE_POINT:\n"); \
++ DEBUG_PRINT2 (" Before pop, next avail: %d\n", fail_stack.avail); \
++ DEBUG_PRINT2 (" size: %d\n", fail_stack.size); \
++ \
++ assert (fail_stack.avail >= NUM_NONREG_ITEMS); \
++ \
++ DEBUG_POP (&failure_id); \
++ DEBUG_PRINT2 (" Popping failure id: %u\n", failure_id); \
++ \
++ /* If the saved string location is NULL, it came from an \
++ on_failure_keep_string_jump opcode, and we want to throw away the \
++ saved NULL, thus retaining our current position in the string. */ \
++ string_temp = POP_FAILURE_POINTER (); \
++ if (string_temp != NULL) \
++ str = (const CHAR_T *) string_temp; \
++ \
++ DEBUG_PRINT2 (" Popping string %p: `", str); \
++ DEBUG_PRINT_DOUBLE_STRING (str, string1, size1, string2, size2); \
++ DEBUG_PRINT1 ("'\n"); \
++ \
++ pat = (UCHAR_T *) POP_FAILURE_POINTER (); \
++ DEBUG_PRINT2 (" Popping pattern %p:\n", pat); \
++ DEBUG_PRINT_COMPILED_PATTERN (bufp, pat, pend); \
++ \
++ /* Restore register info. */ \
++ high_reg = (active_reg_t) POP_FAILURE_INT (); \
++ DEBUG_PRINT2 (" Popping high active reg: %ld\n", high_reg); \
++ \
++ low_reg = (active_reg_t) POP_FAILURE_INT (); \
++ DEBUG_PRINT2 (" Popping low active reg: %ld\n", low_reg); \
++ \
++ if (1) \
++ for (this_reg = high_reg; this_reg >= low_reg; this_reg--) \
++ { \
++ DEBUG_PRINT2 (" Popping reg: %ld\n", this_reg); \
++ \
++ reg_info[this_reg].word = POP_FAILURE_ELT (); \
++ DEBUG_PRINT2 (" info: %p\n", \
++ reg_info[this_reg].word.pointer); \
++ \
++ regend[this_reg] = (const CHAR_T *) POP_FAILURE_POINTER (); \
++ DEBUG_PRINT2 (" end: %p\n", regend[this_reg]); \
++ \
++ regstart[this_reg] = (const CHAR_T *) POP_FAILURE_POINTER (); \
++ DEBUG_PRINT2 (" start: %p\n", regstart[this_reg]); \
++ } \
++ else \
++ { \
++ for (this_reg = highest_active_reg; this_reg > high_reg; this_reg--) \
++ { \
++ reg_info[this_reg].word.integer = 0; \
++ regend[this_reg] = 0; \
++ regstart[this_reg] = 0; \
++ } \
++ highest_active_reg = high_reg; \
++ } \
++ \
++ set_regs_matched_done = 0; \
++ DEBUG_STATEMENT (nfailure_points_popped++); \
++} /* POP_FAILURE_POINT */
++
++/* Structure for per-register (a.k.a. per-group) information.
++ Other register information, such as the
++ starting and ending positions (which are addresses), and the list of
++ inner groups (which is a bits list) are maintained in separate
++ variables.
++
++ We are making a (strictly speaking) nonportable assumption here: that
++ the compiler will pack our bit fields into something that fits into
++ the type of `word', i.e., is something that fits into one item on the
++ failure stack. */
++
++
++/* Declarations and macros for re_match_2. */
++
++typedef union
++{
++ PREFIX(fail_stack_elt_t) word;
++ struct
++ {
++ /* This field is one if this group can match the empty string,
++ zero if not. If not yet determined, `MATCH_NULL_UNSET_VALUE'. */
++# define MATCH_NULL_UNSET_VALUE 3
++ unsigned match_null_string_p : 2;
++ unsigned is_active : 1;
++ unsigned matched_something : 1;
++ unsigned ever_matched_something : 1;
++ } bits;
++} PREFIX(register_info_type);
++
++# ifndef DEFINED_ONCE
++# define REG_MATCH_NULL_STRING_P(R) ((R).bits.match_null_string_p)
++# define IS_ACTIVE(R) ((R).bits.is_active)
++# define MATCHED_SOMETHING(R) ((R).bits.matched_something)
++# define EVER_MATCHED_SOMETHING(R) ((R).bits.ever_matched_something)
++
++
++/* Call this when have matched a real character; it sets `matched' flags
++ for the subexpressions which we are currently inside. Also records
++ that those subexprs have matched. */
++# define SET_REGS_MATCHED() \
++ do \
++ { \
++ if (!set_regs_matched_done) \
++ { \
++ active_reg_t r; \
++ set_regs_matched_done = 1; \
++ for (r = lowest_active_reg; r <= highest_active_reg; r++) \
++ { \
++ MATCHED_SOMETHING (reg_info[r]) \
++ = EVER_MATCHED_SOMETHING (reg_info[r]) \
++ = 1; \
++ } \
++ } \
++ } \
++ while (0)
++# endif /* not DEFINED_ONCE */
++
++/* Registers are set to a sentinel when they haven't yet matched. */
++static CHAR_T PREFIX(reg_unset_dummy);
++# define REG_UNSET_VALUE (&PREFIX(reg_unset_dummy))
++# define REG_UNSET(e) ((e) == REG_UNSET_VALUE)
++
++/* Subroutine declarations and macros for regex_compile. */
++static void PREFIX(store_op1) (re_opcode_t op, UCHAR_T *loc, int arg);
++static void PREFIX(store_op2) (re_opcode_t op, UCHAR_T *loc,
++ int arg1, int arg2);
++static void PREFIX(insert_op1) (re_opcode_t op, UCHAR_T *loc,
++ int arg, UCHAR_T *end);
++static void PREFIX(insert_op2) (re_opcode_t op, UCHAR_T *loc,
++ int arg1, int arg2, UCHAR_T *end);
++static boolean PREFIX(at_begline_loc_p) (const CHAR_T *pattern,
++ const CHAR_T *p,
++ reg_syntax_t syntax);
++static boolean PREFIX(at_endline_loc_p) (const CHAR_T *p,
++ const CHAR_T *pend,
++ reg_syntax_t syntax);
++# ifdef WCHAR
++static reg_errcode_t wcs_compile_range (CHAR_T range_start,
++ const CHAR_T **p_ptr,
++ const CHAR_T *pend,
++ char *translate,
++ reg_syntax_t syntax,
++ UCHAR_T *b,
++ CHAR_T *char_set);
++static void insert_space (int num, CHAR_T *loc, CHAR_T *end);
++# else /* BYTE */
++static reg_errcode_t byte_compile_range (unsigned int range_start,
++ const char **p_ptr,
++ const char *pend,
++ RE_TRANSLATE_TYPE translate,
++ reg_syntax_t syntax,
++ unsigned char *b);
++# endif /* WCHAR */
++
++/* Fetch the next character in the uncompiled pattern---translating it
++ if necessary. Also cast from a signed character in the constant
++ string passed to us by the user to an unsigned char that we can use
++ as an array index (in, e.g., `translate'). */
++/* ifdef MBS_SUPPORT, we translate only if character <= 0xff,
++ because it is impossible to allocate 4GB array for some encodings
++ which have 4 byte character_set like UCS4. */
++# ifndef PATFETCH
++# ifdef WCHAR
++# define PATFETCH(c) \
++ do {if (p == pend) return REG_EEND; \
++ c = (UCHAR_T) *p++; \
++ if (translate && (c <= 0xff)) c = (UCHAR_T) translate[c]; \
++ } while (0)
++# else /* BYTE */
++# define PATFETCH(c) \
++ do {if (p == pend) return REG_EEND; \
++ c = (unsigned char) *p++; \
++ if (translate) c = (unsigned char) translate[c]; \
++ } while (0)
++# endif /* WCHAR */
++# endif
++
++/* Fetch the next character in the uncompiled pattern, with no
++ translation. */
++# define PATFETCH_RAW(c) \
++ do {if (p == pend) return REG_EEND; \
++ c = (UCHAR_T) *p++; \
++ } while (0)
++
++/* Go backwards one character in the pattern. */
++# define PATUNFETCH p--
++
++
++/* If `translate' is non-null, return translate[D], else just D. We
++ cast the subscript to translate because some data is declared as
++ `char *', to avoid warnings when a string constant is passed. But
++ when we use a character as a subscript we must make it unsigned. */
++/* ifdef MBS_SUPPORT, we translate only if character <= 0xff,
++ because it is impossible to allocate 4GB array for some encodings
++ which have 4 byte character_set like UCS4. */
++
++# ifndef TRANSLATE
++# ifdef WCHAR
++# define TRANSLATE(d) \
++ ((translate && ((UCHAR_T) (d)) <= 0xff) \
++ ? (char) translate[(unsigned char) (d)] : (d))
++# else /* BYTE */
++# define TRANSLATE(d) \
++ (translate ? (char) translate[(unsigned char) (d)] : (char) (d))
++# endif /* WCHAR */
++# endif
++
++
++/* Macros for outputting the compiled pattern into `buffer'. */
++
++/* If the buffer isn't allocated when it comes in, use this. */
++# define INIT_BUF_SIZE (32 * sizeof(UCHAR_T))
++
++/* Make sure we have at least N more bytes of space in buffer. */
++# ifdef WCHAR
++# define GET_BUFFER_SPACE(n) \
++ while (((unsigned long)b - (unsigned long)COMPILED_BUFFER_VAR \
++ + (n)*sizeof(CHAR_T)) > bufp->allocated) \
++ EXTEND_BUFFER ()
++# else /* BYTE */
++# define GET_BUFFER_SPACE(n) \
++ while ((unsigned long) (b - bufp->buffer + (n)) > bufp->allocated) \
++ EXTEND_BUFFER ()
++# endif /* WCHAR */
++
++/* Make sure we have one more byte of buffer space and then add C to it. */
++# define BUF_PUSH(c) \
++ do { \
++ GET_BUFFER_SPACE (1); \
++ *b++ = (UCHAR_T) (c); \
++ } while (0)
++
++
++/* Ensure we have two more bytes of buffer space and then append C1 and C2. */
++# define BUF_PUSH_2(c1, c2) \
++ do { \
++ GET_BUFFER_SPACE (2); \
++ *b++ = (UCHAR_T) (c1); \
++ *b++ = (UCHAR_T) (c2); \
++ } while (0)
++
++
++/* As with BUF_PUSH_2, except for three bytes. */
++# define BUF_PUSH_3(c1, c2, c3) \
++ do { \
++ GET_BUFFER_SPACE (3); \
++ *b++ = (UCHAR_T) (c1); \
++ *b++ = (UCHAR_T) (c2); \
++ *b++ = (UCHAR_T) (c3); \
++ } while (0)
++
++/* Store a jump with opcode OP at LOC to location TO. We store a
++ relative address offset by the three bytes the jump itself occupies. */
++# define STORE_JUMP(op, loc, to) \
++ PREFIX(store_op1) (op, loc, (int) ((to) - (loc) - (1 + OFFSET_ADDRESS_SIZE)))
++
++/* Likewise, for a two-argument jump. */
++# define STORE_JUMP2(op, loc, to, arg) \
++ PREFIX(store_op2) (op, loc, (int) ((to) - (loc) - (1 + OFFSET_ADDRESS_SIZE)), arg)
++
++/* Like `STORE_JUMP', but for inserting. Assume `b' is the buffer end. */
++# define INSERT_JUMP(op, loc, to) \
++ PREFIX(insert_op1) (op, loc, (int) ((to) - (loc) - (1 + OFFSET_ADDRESS_SIZE)), b)
++
++/* Like `STORE_JUMP2', but for inserting. Assume `b' is the buffer end. */
++# define INSERT_JUMP2(op, loc, to, arg) \
++ PREFIX(insert_op2) (op, loc, (int) ((to) - (loc) - (1 + OFFSET_ADDRESS_SIZE)),\
++ arg, b)
++
++/* This is not an arbitrary limit: the arguments which represent offsets
++ into the pattern are two bytes long. So if 2^16 bytes turns out to
++ be too small, many things would have to change. */
++/* Any other compiler which, like MSC, has allocation limit below 2^16
++ bytes will have to use approach similar to what was done below for
++ MSC and drop MAX_BUF_SIZE a bit. Otherwise you may end up
++ reallocating to 0 bytes. Such thing is not going to work too well.
++ You have been warned!! */
++# ifndef DEFINED_ONCE
++# if defined _MSC_VER && !defined WIN32
++/* Microsoft C 16-bit versions limit malloc to approx 65512 bytes.
++ The REALLOC define eliminates a flurry of conversion warnings,
++ but is not required. */
++# define MAX_BUF_SIZE 65500L
++# define REALLOC(p,s) realloc ((p), (size_t) (s))
++# else
++# define MAX_BUF_SIZE (1L << 16)
++# define REALLOC(p,s) realloc ((p), (s))
++# endif
++
++/* Extend the buffer by twice its current size via realloc and
++ reset the pointers that pointed into the old block to point to the
++ correct places in the new one. If extending the buffer results in it
++ being larger than MAX_BUF_SIZE, then flag memory exhausted. */
++# if __BOUNDED_POINTERS__
++# define SET_HIGH_BOUND(P) (__ptrhigh (P) = __ptrlow (P) + bufp->allocated)
++# define MOVE_BUFFER_POINTER(P) \
++ (__ptrlow (P) += incr, SET_HIGH_BOUND (P), __ptrvalue (P) += incr)
++# define ELSE_EXTEND_BUFFER_HIGH_BOUND \
++ else \
++ { \
++ SET_HIGH_BOUND (b); \
++ SET_HIGH_BOUND (begalt); \
++ if (fixup_alt_jump) \
++ SET_HIGH_BOUND (fixup_alt_jump); \
++ if (laststart) \
++ SET_HIGH_BOUND (laststart); \
++ if (pending_exact) \
++ SET_HIGH_BOUND (pending_exact); \
++ }
++# else
++# define MOVE_BUFFER_POINTER(P) (P) += incr
++# define ELSE_EXTEND_BUFFER_HIGH_BOUND
++# endif
++# endif /* not DEFINED_ONCE */
++
++# ifdef WCHAR
++# define EXTEND_BUFFER() \
++ do { \
++ UCHAR_T *old_buffer = COMPILED_BUFFER_VAR; \
++ int wchar_count; \
++ if (bufp->allocated + sizeof(UCHAR_T) > MAX_BUF_SIZE) \
++ return REG_ESIZE; \
++ bufp->allocated <<= 1; \
++ if (bufp->allocated > MAX_BUF_SIZE) \
++ bufp->allocated = MAX_BUF_SIZE; \
++ /* How many characters the new buffer can have? */ \
++ wchar_count = bufp->allocated / sizeof(UCHAR_T); \
++ if (wchar_count == 0) wchar_count = 1; \
++ /* Truncate the buffer to CHAR_T align. */ \
++ bufp->allocated = wchar_count * sizeof(UCHAR_T); \
++ RETALLOC (COMPILED_BUFFER_VAR, wchar_count, UCHAR_T); \
++ bufp->buffer = (char*)COMPILED_BUFFER_VAR; \
++ if (COMPILED_BUFFER_VAR == NULL) \
++ return REG_ESPACE; \
++ /* If the buffer moved, move all the pointers into it. */ \
++ if (old_buffer != COMPILED_BUFFER_VAR) \
++ { \
++ int incr = COMPILED_BUFFER_VAR - old_buffer; \
++ MOVE_BUFFER_POINTER (b); \
++ MOVE_BUFFER_POINTER (begalt); \
++ if (fixup_alt_jump) \
++ MOVE_BUFFER_POINTER (fixup_alt_jump); \
++ if (laststart) \
++ MOVE_BUFFER_POINTER (laststart); \
++ if (pending_exact) \
++ MOVE_BUFFER_POINTER (pending_exact); \
++ } \
++ ELSE_EXTEND_BUFFER_HIGH_BOUND \
++ } while (0)
++# else /* BYTE */
++# define EXTEND_BUFFER() \
++ do { \
++ UCHAR_T *old_buffer = COMPILED_BUFFER_VAR; \
++ if (bufp->allocated == MAX_BUF_SIZE) \
++ return REG_ESIZE; \
++ bufp->allocated <<= 1; \
++ if (bufp->allocated > MAX_BUF_SIZE) \
++ bufp->allocated = MAX_BUF_SIZE; \
++ bufp->buffer = (UCHAR_T *) REALLOC (COMPILED_BUFFER_VAR, \
++ bufp->allocated); \
++ if (COMPILED_BUFFER_VAR == NULL) \
++ return REG_ESPACE; \
++ /* If the buffer moved, move all the pointers into it. */ \
++ if (old_buffer != COMPILED_BUFFER_VAR) \
++ { \
++ int incr = COMPILED_BUFFER_VAR - old_buffer; \
++ MOVE_BUFFER_POINTER (b); \
++ MOVE_BUFFER_POINTER (begalt); \
++ if (fixup_alt_jump) \
++ MOVE_BUFFER_POINTER (fixup_alt_jump); \
++ if (laststart) \
++ MOVE_BUFFER_POINTER (laststart); \
++ if (pending_exact) \
++ MOVE_BUFFER_POINTER (pending_exact); \
++ } \
++ ELSE_EXTEND_BUFFER_HIGH_BOUND \
++ } while (0)
++# endif /* WCHAR */
++
++# ifndef DEFINED_ONCE
++/* Since we have one byte reserved for the register number argument to
++ {start,stop}_memory, the maximum number of groups we can report
++ things about is what fits in that byte. */
++# define MAX_REGNUM 255
++
++/* But patterns can have more than `MAX_REGNUM' registers. We just
++ ignore the excess. */
++typedef unsigned regnum_t;
++
++
++/* Macros for the compile stack. */
++
++/* Since offsets can go either forwards or backwards, this type needs to
++ be able to hold values from -(MAX_BUF_SIZE - 1) to MAX_BUF_SIZE - 1. */
++/* int may be not enough when sizeof(int) == 2. */
++typedef long pattern_offset_t;
++
++typedef struct
++{
++ pattern_offset_t begalt_offset;
++ pattern_offset_t fixup_alt_jump;
++ pattern_offset_t inner_group_offset;
++ pattern_offset_t laststart_offset;
++ regnum_t regnum;
++} compile_stack_elt_t;
++
++
++typedef struct
++{
++ compile_stack_elt_t *stack;
++ unsigned size;
++ unsigned avail; /* Offset of next open position. */
++} compile_stack_type;
++
++
++# define INIT_COMPILE_STACK_SIZE 32
++
++# define COMPILE_STACK_EMPTY (compile_stack.avail == 0)
++# define COMPILE_STACK_FULL (compile_stack.avail == compile_stack.size)
++
++/* The next available element. */
++# define COMPILE_STACK_TOP (compile_stack.stack[compile_stack.avail])
++
++# endif /* not DEFINED_ONCE */
++
++/* Set the bit for character C in a list. */
++# ifndef DEFINED_ONCE
++# define SET_LIST_BIT(c) \
++ (b[((unsigned char) (c)) / BYTEWIDTH] \
++ |= 1 << (((unsigned char) c) % BYTEWIDTH))
++# endif /* DEFINED_ONCE */
++
++/* Get the next unsigned number in the uncompiled pattern. */
++# define GET_UNSIGNED_NUMBER(num) \
++ { \
++ while (p != pend) \
++ { \
++ PATFETCH (c); \
++ if (c < '0' || c > '9') \
++ break; \
++ if (num <= RE_DUP_MAX) \
++ { \
++ if (num < 0) \
++ num = 0; \
++ num = num * 10 + c - '0'; \
++ } \
++ } \
++ }
++
++# ifndef DEFINED_ONCE
++# if WIDE_CHAR_SUPPORT
++/* The GNU C library provides support for user-defined character classes
++ and the functions from ISO C amendement 1. */
++# ifdef CHARCLASS_NAME_MAX
++# define CHAR_CLASS_MAX_LENGTH CHARCLASS_NAME_MAX
++# else
++/* This shouldn't happen but some implementation might still have this
++ problem. Use a reasonable default value. */
++# define CHAR_CLASS_MAX_LENGTH 256
++# endif
++
++# ifdef _LIBC
++# define IS_CHAR_CLASS(string) __wctype (string)
++# else
++# define IS_CHAR_CLASS(string) wctype (string)
++# endif
++# else
++# define CHAR_CLASS_MAX_LENGTH 6 /* Namely, `xdigit'. */
++
++# define IS_CHAR_CLASS(string) \
++ (STREQ (string, "alpha") || STREQ (string, "upper") \
++ || STREQ (string, "lower") || STREQ (string, "digit") \
++ || STREQ (string, "alnum") || STREQ (string, "xdigit") \
++ || STREQ (string, "space") || STREQ (string, "print") \
++ || STREQ (string, "punct") || STREQ (string, "graph") \
++ || STREQ (string, "cntrl") || STREQ (string, "blank"))
++# endif
++# endif /* DEFINED_ONCE */
++
++# ifndef MATCH_MAY_ALLOCATE
++
++/* If we cannot allocate large objects within re_match_2_internal,
++ we make the fail stack and register vectors global.
++ The fail stack, we grow to the maximum size when a regexp
++ is compiled.
++ The register vectors, we adjust in size each time we
++ compile a regexp, according to the number of registers it needs. */
++
++static PREFIX(fail_stack_type) fail_stack;
++
++/* Size with which the following vectors are currently allocated.
++ That is so we can make them bigger as needed,
++ but never make them smaller. */
++# ifdef DEFINED_ONCE
++static int regs_allocated_size;
++
++static const char ** regstart, ** regend;
++static const char ** old_regstart, ** old_regend;
++static const char **best_regstart, **best_regend;
++static const char **reg_dummy;
++# endif /* DEFINED_ONCE */
++
++static PREFIX(register_info_type) *PREFIX(reg_info);
++static PREFIX(register_info_type) *PREFIX(reg_info_dummy);
++
++/* Make the register vectors big enough for NUM_REGS registers,
++ but don't make them smaller. */
++
++static void
++PREFIX(regex_grow_registers) (int num_regs)
++{
++ if (num_regs > regs_allocated_size)
++ {
++ RETALLOC_IF (regstart, num_regs, const char *);
++ RETALLOC_IF (regend, num_regs, const char *);
++ RETALLOC_IF (old_regstart, num_regs, const char *);
++ RETALLOC_IF (old_regend, num_regs, const char *);
++ RETALLOC_IF (best_regstart, num_regs, const char *);
++ RETALLOC_IF (best_regend, num_regs, const char *);
++ RETALLOC_IF (PREFIX(reg_info), num_regs, PREFIX(register_info_type));
++ RETALLOC_IF (reg_dummy, num_regs, const char *);
++ RETALLOC_IF (PREFIX(reg_info_dummy), num_regs, PREFIX(register_info_type));
++
++ regs_allocated_size = num_regs;
++ }
++}
++
++# endif /* not MATCH_MAY_ALLOCATE */
++
++# ifndef DEFINED_ONCE
++static boolean group_in_compile_stack (compile_stack_type compile_stack,
++ regnum_t regnum);
++# endif /* not DEFINED_ONCE */
++
++/* `regex_compile' compiles PATTERN (of length SIZE) according to SYNTAX.
++ Returns one of error codes defined in `regex.h', or zero for success.
++
++ Assumes the `allocated' (and perhaps `buffer') and `translate'
++ fields are set in BUFP on entry.
++
++ If it succeeds, results are put in BUFP (if it returns an error, the
++ contents of BUFP are undefined):
++ `buffer' is the compiled pattern;
++ `syntax' is set to SYNTAX;
++ `used' is set to the length of the compiled pattern;
++ `fastmap_accurate' is zero;
++ `re_nsub' is the number of subexpressions in PATTERN;
++ `not_bol' and `not_eol' are zero;
++
++ The `fastmap' and `newline_anchor' fields are neither
++ examined nor set. */
++
++/* Return, freeing storage we allocated. */
++# ifdef WCHAR
++# define FREE_STACK_RETURN(value) \
++ return (free(pattern), free(mbs_offset), free(is_binary), free (compile_stack.stack), value)
++# else
++# define FREE_STACK_RETURN(value) \
++ return (free (compile_stack.stack), value)
++# endif /* WCHAR */
++
++static reg_errcode_t
++PREFIX(regex_compile) (const char *ARG_PREFIX(pattern),
++ size_t ARG_PREFIX(size), reg_syntax_t syntax,
++ struct re_pattern_buffer *bufp)
++{
++ /* We fetch characters from PATTERN here. Even though PATTERN is
++ `char *' (i.e., signed), we declare these variables as unsigned, so
++ they can be reliably used as array indices. */
++ register UCHAR_T c, c1;
++
++#ifdef WCHAR
++ /* A temporary space to keep wchar_t pattern and compiled pattern. */
++ CHAR_T *pattern, *COMPILED_BUFFER_VAR;
++ size_t size;
++ /* offset buffer for optimization. See convert_mbs_to_wc. */
++ int *mbs_offset = NULL;
++ /* It hold whether each wchar_t is binary data or not. */
++ char *is_binary = NULL;
++ /* A flag whether exactn is handling binary data or not. */
++ char is_exactn_bin = FALSE;
++#endif /* WCHAR */
++
++ /* A random temporary spot in PATTERN. */
++ const CHAR_T *p1;
++
++ /* Points to the end of the buffer, where we should append. */
++ register UCHAR_T *b;
++
++ /* Keeps track of unclosed groups. */
++ compile_stack_type compile_stack;
++
++ /* Points to the current (ending) position in the pattern. */
++#ifdef WCHAR
++ const CHAR_T *p;
++ const CHAR_T *pend;
++#else /* BYTE */
++ const CHAR_T *p = pattern;
++ const CHAR_T *pend = pattern + size;
++#endif /* WCHAR */
++
++ /* How to translate the characters in the pattern. */
++ RE_TRANSLATE_TYPE translate = bufp->translate;
++
++ /* Address of the count-byte of the most recently inserted `exactn'
++ command. This makes it possible to tell if a new exact-match
++ character can be added to that command or if the character requires
++ a new `exactn' command. */
++ UCHAR_T *pending_exact = 0;
++
++ /* Address of start of the most recently finished expression.
++ This tells, e.g., postfix * where to find the start of its
++ operand. Reset at the beginning of groups and alternatives. */
++ UCHAR_T *laststart = 0;
++
++ /* Address of beginning of regexp, or inside of last group. */
++ UCHAR_T *begalt;
++
++ /* Address of the place where a forward jump should go to the end of
++ the containing expression. Each alternative of an `or' -- except the
++ last -- ends with a forward jump of this sort. */
++ UCHAR_T *fixup_alt_jump = 0;
++
++ /* Counts open-groups as they are encountered. Remembered for the
++ matching close-group on the compile stack, so the same register
++ number is put in the stop_memory as the start_memory. */
++ regnum_t regnum = 0;
++
++#ifdef WCHAR
++ /* Initialize the wchar_t PATTERN and offset_buffer. */
++ p = pend = pattern = TALLOC(csize + 1, CHAR_T);
++ mbs_offset = TALLOC(csize + 1, int);
++ is_binary = TALLOC(csize + 1, char);
++ if (pattern == NULL || mbs_offset == NULL || is_binary == NULL)
++ {
++ free(pattern);
++ free(mbs_offset);
++ free(is_binary);
++ return REG_ESPACE;
++ }
++ pattern[csize] = L'\0'; /* sentinel */
++ size = convert_mbs_to_wcs(pattern, cpattern, csize, mbs_offset, is_binary);
++ pend = p + size;
++ if (size < 0)
++ {
++ free(pattern);
++ free(mbs_offset);
++ free(is_binary);
++ return REG_BADPAT;
++ }
++#endif
++
++#ifdef DEBUG
++ DEBUG_PRINT1 ("\nCompiling pattern: ");
++ if (debug)
++ {
++ unsigned debug_count;
++
++ for (debug_count = 0; debug_count < size; debug_count++)
++ PUT_CHAR (pattern[debug_count]);
++ putchar ('\n');
++ }
++#endif /* DEBUG */
++
++ /* Initialize the compile stack. */
++ compile_stack.stack = TALLOC (INIT_COMPILE_STACK_SIZE, compile_stack_elt_t);
++ if (compile_stack.stack == NULL)
++ {
++#ifdef WCHAR
++ free(pattern);
++ free(mbs_offset);
++ free(is_binary);
++#endif
++ return REG_ESPACE;
++ }
++
++ compile_stack.size = INIT_COMPILE_STACK_SIZE;
++ compile_stack.avail = 0;
++
++ /* Initialize the pattern buffer. */
++ bufp->syntax = syntax;
++ bufp->fastmap_accurate = 0;
++ bufp->not_bol = bufp->not_eol = 0;
++
++ /* Set `used' to zero, so that if we return an error, the pattern
++ printer (for debugging) will think there's no pattern. We reset it
++ at the end. */
++ bufp->used = 0;
++
++ /* Always count groups, whether or not bufp->no_sub is set. */
++ bufp->re_nsub = 0;
++
++#if !defined emacs && !defined SYNTAX_TABLE
++ /* Initialize the syntax table. */
++ init_syntax_once ();
++#endif
++
++ if (bufp->allocated == 0)
++ {
++ if (bufp->buffer)
++ { /* If zero allocated, but buffer is non-null, try to realloc
++ enough space. This loses if buffer's address is bogus, but
++ that is the user's responsibility. */
++#ifdef WCHAR
++ /* Free bufp->buffer and allocate an array for wchar_t pattern
++ buffer. */
++ free(bufp->buffer);
++ COMPILED_BUFFER_VAR = TALLOC (INIT_BUF_SIZE/sizeof(UCHAR_T),
++ UCHAR_T);
++#else
++ RETALLOC (COMPILED_BUFFER_VAR, INIT_BUF_SIZE, UCHAR_T);
++#endif /* WCHAR */
++ }
++ else
++ { /* Caller did not allocate a buffer. Do it for them. */
++ COMPILED_BUFFER_VAR = TALLOC (INIT_BUF_SIZE / sizeof(UCHAR_T),
++ UCHAR_T);
++ }
++
++ if (!COMPILED_BUFFER_VAR) FREE_STACK_RETURN (REG_ESPACE);
++#ifdef WCHAR
++ bufp->buffer = (char*)COMPILED_BUFFER_VAR;
++#endif /* WCHAR */
++ bufp->allocated = INIT_BUF_SIZE;
++ }
++#ifdef WCHAR
++ else
++ COMPILED_BUFFER_VAR = (UCHAR_T*) bufp->buffer;
++#endif
++
++ begalt = b = COMPILED_BUFFER_VAR;
++
++ /* Loop through the uncompiled pattern until we're at the end. */
++ while (p != pend)
++ {
++ PATFETCH (c);
++
++ switch (c)
++ {
++ case '^':
++ {
++ if ( /* If at start of pattern, it's an operator. */
++ p == pattern + 1
++ /* If context independent, it's an operator. */
++ || syntax & RE_CONTEXT_INDEP_ANCHORS
++ /* Otherwise, depends on what's come before. */
++ || PREFIX(at_begline_loc_p) (pattern, p, syntax))
++ BUF_PUSH (begline);
++ else
++ goto normal_char;
++ }
++ break;
++
++
++ case '$':
++ {
++ if ( /* If at end of pattern, it's an operator. */
++ p == pend
++ /* If context independent, it's an operator. */
++ || syntax & RE_CONTEXT_INDEP_ANCHORS
++ /* Otherwise, depends on what's next. */
++ || PREFIX(at_endline_loc_p) (p, pend, syntax))
++ BUF_PUSH (endline);
++ else
++ goto normal_char;
++ }
++ break;
++
++
++ case '+':
++ case '?':
++ if ((syntax & RE_BK_PLUS_QM)
++ || (syntax & RE_LIMITED_OPS))
++ goto normal_char;
++ handle_plus:
++ case '*':
++ /* If there is no previous pattern... */
++ if (!laststart)
++ {
++ if (syntax & RE_CONTEXT_INVALID_OPS)
++ FREE_STACK_RETURN (REG_BADRPT);
++ else if (!(syntax & RE_CONTEXT_INDEP_OPS))
++ goto normal_char;
++ }
++
++ {
++ /* Are we optimizing this jump? */
++ boolean keep_string_p = false;
++
++ /* 1 means zero (many) matches is allowed. */
++ char zero_times_ok = 0, many_times_ok = 0;
++
++ /* If there is a sequence of repetition chars, collapse it
++ down to just one (the right one). We can't combine
++ interval operators with these because of, e.g., `a{2}*',
++ which should only match an even number of `a's. */
++
++ for (;;)
++ {
++ zero_times_ok |= c != '+';
++ many_times_ok |= c != '?';
++
++ if (p == pend)
++ break;
++
++ PATFETCH (c);
++
++ if (c == '*'
++ || (!(syntax & RE_BK_PLUS_QM) && (c == '+' || c == '?')))
++ ;
++
++ else if (syntax & RE_BK_PLUS_QM && c == '\\')
++ {
++ if (p == pend) FREE_STACK_RETURN (REG_EESCAPE);
++
++ PATFETCH (c1);
++ if (!(c1 == '+' || c1 == '?'))
++ {
++ PATUNFETCH;
++ PATUNFETCH;
++ break;
++ }
++
++ c = c1;
++ }
++ else
++ {
++ PATUNFETCH;
++ break;
++ }
++
++ /* If we get here, we found another repeat character. */
++ }
++
++ /* Star, etc. applied to an empty pattern is equivalent
++ to an empty pattern. */
++ if (!laststart)
++ break;
++
++ /* Now we know whether or not zero matches is allowed
++ and also whether or not two or more matches is allowed. */
++ if (many_times_ok)
++ { /* More than one repetition is allowed, so put in at the
++ end a backward relative jump from `b' to before the next
++ jump we're going to put in below (which jumps from
++ laststart to after this jump).
++
++ But if we are at the `*' in the exact sequence `.*\n',
++ insert an unconditional jump backwards to the .,
++ instead of the beginning of the loop. This way we only
++ push a failure point once, instead of every time
++ through the loop. */
++ assert (p - 1 > pattern);
++
++ /* Allocate the space for the jump. */
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++
++ /* We know we are not at the first character of the pattern,
++ because laststart was nonzero. And we've already
++ incremented `p', by the way, to be the character after
++ the `*'. Do we have to do something analogous here
++ for null bytes, because of RE_DOT_NOT_NULL? */
++ if (TRANSLATE (*(p - 2)) == TRANSLATE ('.')
++ && zero_times_ok
++ && p < pend && TRANSLATE (*p) == TRANSLATE ('\n')
++ && !(syntax & RE_DOT_NEWLINE))
++ { /* We have .*\n. */
++ STORE_JUMP (jump, b, laststart);
++ keep_string_p = true;
++ }
++ else
++ /* Anything else. */
++ STORE_JUMP (maybe_pop_jump, b, laststart -
++ (1 + OFFSET_ADDRESS_SIZE));
++
++ /* We've added more stuff to the buffer. */
++ b += 1 + OFFSET_ADDRESS_SIZE;
++ }
++
++ /* On failure, jump from laststart to b + 3, which will be the
++ end of the buffer after this jump is inserted. */
++ /* ifdef WCHAR, 'b + 1 + OFFSET_ADDRESS_SIZE' instead of
++ 'b + 3'. */
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++ INSERT_JUMP (keep_string_p ? on_failure_keep_string_jump
++ : on_failure_jump,
++ laststart, b + 1 + OFFSET_ADDRESS_SIZE);
++ pending_exact = 0;
++ b += 1 + OFFSET_ADDRESS_SIZE;
++
++ if (!zero_times_ok)
++ {
++ /* At least one repetition is required, so insert a
++ `dummy_failure_jump' before the initial
++ `on_failure_jump' instruction of the loop. This
++ effects a skip over that instruction the first time
++ we hit that loop. */
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++ INSERT_JUMP (dummy_failure_jump, laststart, laststart +
++ 2 + 2 * OFFSET_ADDRESS_SIZE);
++ b += 1 + OFFSET_ADDRESS_SIZE;
++ }
++ }
++ break;
++
++
++ case '.':
++ laststart = b;
++ BUF_PUSH (anychar);
++ break;
++
++
++ case '[':
++ {
++ boolean had_char_class = false;
++#ifdef WCHAR
++ CHAR_T range_start = 0xffffffff;
++#else
++ unsigned int range_start = 0xffffffff;
++#endif
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++#ifdef WCHAR
++ /* We assume a charset(_not) structure as a wchar_t array.
++ charset[0] = (re_opcode_t) charset(_not)
++ charset[1] = l (= length of char_classes)
++ charset[2] = m (= length of collating_symbols)
++ charset[3] = n (= length of equivalence_classes)
++ charset[4] = o (= length of char_ranges)
++ charset[5] = p (= length of chars)
++
++ charset[6] = char_class (wctype_t)
++ charset[6+CHAR_CLASS_SIZE] = char_class (wctype_t)
++ ...
++ charset[l+5] = char_class (wctype_t)
++
++ charset[l+6] = collating_symbol (wchar_t)
++ ...
++ charset[l+m+5] = collating_symbol (wchar_t)
++ ifdef _LIBC we use the index if
++ _NL_COLLATE_SYMB_EXTRAMB instead of
++ wchar_t string.
++
++ charset[l+m+6] = equivalence_classes (wchar_t)
++ ...
++ charset[l+m+n+5] = equivalence_classes (wchar_t)
++ ifdef _LIBC we use the index in
++ _NL_COLLATE_WEIGHT instead of
++ wchar_t string.
++
++ charset[l+m+n+6] = range_start
++ charset[l+m+n+7] = range_end
++ ...
++ charset[l+m+n+2o+4] = range_start
++ charset[l+m+n+2o+5] = range_end
++ ifdef _LIBC we use the value looked up
++ in _NL_COLLATE_COLLSEQ instead of
++ wchar_t character.
++
++ charset[l+m+n+2o+6] = char
++ ...
++ charset[l+m+n+2o+p+5] = char
++
++ */
++
++ /* We need at least 6 spaces: the opcode, the length of
++ char_classes, the length of collating_symbols, the length of
++ equivalence_classes, the length of char_ranges, the length of
++ chars. */
++ GET_BUFFER_SPACE (6);
++
++ /* Save b as laststart. And We use laststart as the pointer
++ to the first element of the charset here.
++ In other words, laststart[i] indicates charset[i]. */
++ laststart = b;
++
++ /* We test `*p == '^' twice, instead of using an if
++ statement, so we only need one BUF_PUSH. */
++ BUF_PUSH (*p == '^' ? charset_not : charset);
++ if (*p == '^')
++ p++;
++
++ /* Push the length of char_classes, the length of
++ collating_symbols, the length of equivalence_classes, the
++ length of char_ranges and the length of chars. */
++ BUF_PUSH_3 (0, 0, 0);
++ BUF_PUSH_2 (0, 0);
++
++ /* Remember the first position in the bracket expression. */
++ p1 = p;
++
++ /* charset_not matches newline according to a syntax bit. */
++ if ((re_opcode_t) b[-6] == charset_not
++ && (syntax & RE_HAT_LISTS_NOT_NEWLINE))
++ {
++ BUF_PUSH('\n');
++ laststart[5]++; /* Update the length of characters */
++ }
++
++ /* Read in characters and ranges, setting map bits. */
++ for (;;)
++ {
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ PATFETCH (c);
++
++ /* \ might escape characters inside [...] and [^...]. */
++ if ((syntax & RE_BACKSLASH_ESCAPE_IN_LISTS) && c == '\\')
++ {
++ if (p == pend) FREE_STACK_RETURN (REG_EESCAPE);
++
++ PATFETCH (c1);
++ BUF_PUSH(c1);
++ laststart[5]++; /* Update the length of chars */
++ range_start = c1;
++ continue;
++ }
++
++ /* Could be the end of the bracket expression. If it's
++ not (i.e., when the bracket expression is `[]' so
++ far), the ']' character bit gets set way below. */
++ if (c == ']' && p != p1 + 1)
++ break;
++
++ /* Look ahead to see if it's a range when the last thing
++ was a character class. */
++ if (had_char_class && c == '-' && *p != ']')
++ FREE_STACK_RETURN (REG_ERANGE);
++
++ /* Look ahead to see if it's a range when the last thing
++ was a character: if this is a hyphen not at the
++ beginning or the end of a list, then it's the range
++ operator. */
++ if (c == '-'
++ && !(p - 2 >= pattern && p[-2] == '[')
++ && !(p - 3 >= pattern && p[-3] == '[' && p[-2] == '^')
++ && *p != ']')
++ {
++ reg_errcode_t ret;
++ /* Allocate the space for range_start and range_end. */
++ GET_BUFFER_SPACE (2);
++ /* Update the pointer to indicate end of buffer. */
++ b += 2;
++ ret = wcs_compile_range (range_start, &p, pend, translate,
++ syntax, b, laststart);
++ if (ret != REG_NOERROR) FREE_STACK_RETURN (ret);
++ range_start = 0xffffffff;
++ }
++ else if (p[0] == '-' && p[1] != ']')
++ { /* This handles ranges made up of characters only. */
++ reg_errcode_t ret;
++
++ /* Move past the `-'. */
++ PATFETCH (c1);
++ /* Allocate the space for range_start and range_end. */
++ GET_BUFFER_SPACE (2);
++ /* Update the pointer to indicate end of buffer. */
++ b += 2;
++ ret = wcs_compile_range (c, &p, pend, translate, syntax, b,
++ laststart);
++ if (ret != REG_NOERROR) FREE_STACK_RETURN (ret);
++ range_start = 0xffffffff;
++ }
++
++ /* See if we're at the beginning of a possible character
++ class. */
++ else if (syntax & RE_CHAR_CLASSES && c == '[' && *p == ':')
++ { /* Leave room for the null. */
++ char str[CHAR_CLASS_MAX_LENGTH + 1];
++
++ PATFETCH (c);
++ c1 = 0;
++
++ /* If pattern is `[[:'. */
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (;;)
++ {
++ PATFETCH (c);
++ if ((c == ':' && *p == ']') || p == pend)
++ break;
++ if (c1 < CHAR_CLASS_MAX_LENGTH)
++ str[c1++] = c;
++ else
++ /* This is in any case an invalid class name. */
++ str[0] = '\0';
++ }
++ str[c1] = '\0';
++
++ /* If isn't a word bracketed by `[:' and `:]':
++ undo the ending character, the letters, and leave
++ the leading `:' and `[' (but store them as character). */
++ if (c == ':' && *p == ']')
++ {
++ wctype_t wt;
++ uintptr_t alignedp;
++
++ /* Query the character class as wctype_t. */
++ wt = IS_CHAR_CLASS (str);
++ if (wt == 0)
++ FREE_STACK_RETURN (REG_ECTYPE);
++
++ /* Throw away the ] at the end of the character
++ class. */
++ PATFETCH (c);
++
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ /* Allocate the space for character class. */
++ GET_BUFFER_SPACE(CHAR_CLASS_SIZE);
++ /* Update the pointer to indicate end of buffer. */
++ b += CHAR_CLASS_SIZE;
++ /* Move data which follow character classes
++ not to violate the data. */
++ insert_space(CHAR_CLASS_SIZE,
++ laststart + 6 + laststart[1],
++ b - 1);
++ alignedp = ((uintptr_t)(laststart + 6 + laststart[1])
++ + __alignof__(wctype_t) - 1)
++ & ~(uintptr_t)(__alignof__(wctype_t) - 1);
++ /* Store the character class. */
++ *((wctype_t*)alignedp) = wt;
++ /* Update length of char_classes */
++ laststart[1] += CHAR_CLASS_SIZE;
++
++ had_char_class = true;
++ }
++ else
++ {
++ c1++;
++ while (c1--)
++ PATUNFETCH;
++ BUF_PUSH ('[');
++ BUF_PUSH (':');
++ laststart[5] += 2; /* Update the length of characters */
++ range_start = ':';
++ had_char_class = false;
++ }
++ }
++ else if (syntax & RE_CHAR_CLASSES && c == '[' && (*p == '='
++ || *p == '.'))
++ {
++ CHAR_T str[128]; /* Should be large enough. */
++ CHAR_T delim = *p; /* '=' or '.' */
++# ifdef _LIBC
++ uint32_t nrules =
++ _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++# endif
++ PATFETCH (c);
++ c1 = 0;
++
++ /* If pattern is `[[=' or '[[.'. */
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (;;)
++ {
++ PATFETCH (c);
++ if ((c == delim && *p == ']') || p == pend)
++ break;
++ if (c1 < sizeof (str) - 1)
++ str[c1++] = c;
++ else
++ /* This is in any case an invalid class name. */
++ str[0] = '\0';
++ }
++ str[c1] = '\0';
++
++ if (c == delim && *p == ']' && str[0] != '\0')
++ {
++ unsigned int i, offset;
++ /* If we have no collation data we use the default
++ collation in which each character is in a class
++ by itself. It also means that ASCII is the
++ character set and therefore we cannot have character
++ with more than one byte in the multibyte
++ representation. */
++
++ /* If not defined _LIBC, we push the name and
++ `\0' for the sake of matching performance. */
++ int datasize = c1 + 1;
++
++# ifdef _LIBC
++ int32_t idx = 0;
++ if (nrules == 0)
++# endif
++ {
++ if (c1 != 1)
++ FREE_STACK_RETURN (REG_ECOLLATE);
++ }
++# ifdef _LIBC
++ else
++ {
++ const int32_t *table;
++ const int32_t *weights;
++ const int32_t *extra;
++ const int32_t *indirect;
++ wint_t *cp;
++
++ /* This #include defines a local function! */
++# include <locale/weightwc.h>
++
++ if(delim == '=')
++ {
++ /* We push the index for equivalence class. */
++ cp = (wint_t*)str;
++
++ table = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_TABLEWC);
++ weights = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_WEIGHTWC);
++ extra = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_EXTRAWC);
++ indirect = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_INDIRECTWC);
++
++ idx = findidx ((const wint_t**)&cp, c1);
++ if (idx == 0 || cp < (wint_t*) str + c1)
++ /* This is no valid character. */
++ FREE_STACK_RETURN (REG_ECOLLATE);
++
++ str[0] = (wchar_t)idx;
++ }
++ else /* delim == '.' */
++ {
++ /* We push collation sequence value
++ for collating symbol. */
++ int32_t table_size;
++ const int32_t *symb_table;
++ const unsigned char *extra;
++ int32_t idx;
++ int32_t elem;
++ int32_t second;
++ int32_t hash;
++ char char_str[c1];
++
++ /* We have to convert the name to a single-byte
++ string. This is possible since the names
++ consist of ASCII characters and the internal
++ representation is UCS4. */
++ for (i = 0; i < c1; ++i)
++ char_str[i] = str[i];
++
++ table_size =
++ _NL_CURRENT_WORD (LC_COLLATE,
++ _NL_COLLATE_SYMB_HASH_SIZEMB);
++ symb_table = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_SYMB_TABLEMB);
++ extra = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_SYMB_EXTRAMB);
++
++ /* Locate the character in the hashing table. */
++ hash = elem_hash (char_str, c1);
++
++ idx = 0;
++ elem = hash % table_size;
++ second = hash % (table_size - 2);
++ while (symb_table[2 * elem] != 0)
++ {
++ /* First compare the hashing value. */
++ if (symb_table[2 * elem] == hash
++ && c1 == extra[symb_table[2 * elem + 1]]
++ && memcmp (char_str,
++ &extra[symb_table[2 * elem + 1]
++ + 1], c1) == 0)
++ {
++ /* Yep, this is the entry. */
++ idx = symb_table[2 * elem + 1];
++ idx += 1 + extra[idx];
++ break;
++ }
++
++ /* Next entry. */
++ elem += second;
++ }
++
++ if (symb_table[2 * elem] != 0)
++ {
++ /* Compute the index of the byte sequence
++ in the table. */
++ idx += 1 + extra[idx];
++ /* Adjust for the alignment. */
++ idx = (idx + 3) & ~3;
++
++ str[0] = (wchar_t) idx + 4;
++ }
++ else if (symb_table[2 * elem] == 0 && c1 == 1)
++ {
++ /* No valid character. Match it as a
++ single byte character. */
++ had_char_class = false;
++ BUF_PUSH(str[0]);
++ /* Update the length of characters */
++ laststart[5]++;
++ range_start = str[0];
++
++ /* Throw away the ] at the end of the
++ collating symbol. */
++ PATFETCH (c);
++ /* exit from the switch block. */
++ continue;
++ }
++ else
++ FREE_STACK_RETURN (REG_ECOLLATE);
++ }
++ datasize = 1;
++ }
++# endif
++ /* Throw away the ] at the end of the equivalence
++ class (or collating symbol). */
++ PATFETCH (c);
++
++ /* Allocate the space for the equivalence class
++ (or collating symbol) (and '\0' if needed). */
++ GET_BUFFER_SPACE(datasize);
++ /* Update the pointer to indicate end of buffer. */
++ b += datasize;
++
++ if (delim == '=')
++ { /* equivalence class */
++ /* Calculate the offset of char_ranges,
++ which is next to equivalence_classes. */
++ offset = laststart[1] + laststart[2]
++ + laststart[3] +6;
++ /* Insert space. */
++ insert_space(datasize, laststart + offset, b - 1);
++
++ /* Write the equivalence_class and \0. */
++ for (i = 0 ; i < datasize ; i++)
++ laststart[offset + i] = str[i];
++
++ /* Update the length of equivalence_classes. */
++ laststart[3] += datasize;
++ had_char_class = true;
++ }
++ else /* delim == '.' */
++ { /* collating symbol */
++ /* Calculate the offset of the equivalence_classes,
++ which is next to collating_symbols. */
++ offset = laststart[1] + laststart[2] + 6;
++ /* Insert space and write the collationg_symbol
++ and \0. */
++ insert_space(datasize, laststart + offset, b-1);
++ for (i = 0 ; i < datasize ; i++)
++ laststart[offset + i] = str[i];
++
++ /* In re_match_2_internal if range_start < -1, we
++ assume -range_start is the offset of the
++ collating symbol which is specified as
++ the character of the range start. So we assign
++ -(laststart[1] + laststart[2] + 6) to
++ range_start. */
++ range_start = -(laststart[1] + laststart[2] + 6);
++ /* Update the length of collating_symbol. */
++ laststart[2] += datasize;
++ had_char_class = false;
++ }
++ }
++ else
++ {
++ c1++;
++ while (c1--)
++ PATUNFETCH;
++ BUF_PUSH ('[');
++ BUF_PUSH (delim);
++ laststart[5] += 2; /* Update the length of characters */
++ range_start = delim;
++ had_char_class = false;
++ }
++ }
++ else
++ {
++ had_char_class = false;
++ BUF_PUSH(c);
++ laststart[5]++; /* Update the length of characters */
++ range_start = c;
++ }
++ }
++
++#else /* BYTE */
++ /* Ensure that we have enough space to push a charset: the
++ opcode, the length count, and the bitset; 34 bytes in all. */
++ GET_BUFFER_SPACE (34);
++
++ laststart = b;
++
++ /* We test `*p == '^' twice, instead of using an if
++ statement, so we only need one BUF_PUSH. */
++ BUF_PUSH (*p == '^' ? charset_not : charset);
++ if (*p == '^')
++ p++;
++
++ /* Remember the first position in the bracket expression. */
++ p1 = p;
++
++ /* Push the number of bytes in the bitmap. */
++ BUF_PUSH ((1 << BYTEWIDTH) / BYTEWIDTH);
++
++ /* Clear the whole map. */
++ bzero (b, (1 << BYTEWIDTH) / BYTEWIDTH);
++
++ /* charset_not matches newline according to a syntax bit. */
++ if ((re_opcode_t) b[-2] == charset_not
++ && (syntax & RE_HAT_LISTS_NOT_NEWLINE))
++ SET_LIST_BIT ('\n');
++
++ /* Read in characters and ranges, setting map bits. */
++ for (;;)
++ {
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ PATFETCH (c);
++
++ /* \ might escape characters inside [...] and [^...]. */
++ if ((syntax & RE_BACKSLASH_ESCAPE_IN_LISTS) && c == '\\')
++ {
++ if (p == pend) FREE_STACK_RETURN (REG_EESCAPE);
++
++ PATFETCH (c1);
++ SET_LIST_BIT (c1);
++ range_start = c1;
++ continue;
++ }
++
++ /* Could be the end of the bracket expression. If it's
++ not (i.e., when the bracket expression is `[]' so
++ far), the ']' character bit gets set way below. */
++ if (c == ']' && p != p1 + 1)
++ break;
++
++ /* Look ahead to see if it's a range when the last thing
++ was a character class. */
++ if (had_char_class && c == '-' && *p != ']')
++ FREE_STACK_RETURN (REG_ERANGE);
++
++ /* Look ahead to see if it's a range when the last thing
++ was a character: if this is a hyphen not at the
++ beginning or the end of a list, then it's the range
++ operator. */
++ if (c == '-'
++ && !(p - 2 >= pattern && p[-2] == '[')
++ && !(p - 3 >= pattern && p[-3] == '[' && p[-2] == '^')
++ && *p != ']')
++ {
++ reg_errcode_t ret
++ = byte_compile_range (range_start, &p, pend, translate,
++ syntax, b);
++ if (ret != REG_NOERROR) FREE_STACK_RETURN (ret);
++ range_start = 0xffffffff;
++ }
++
++ else if (p[0] == '-' && p[1] != ']')
++ { /* This handles ranges made up of characters only. */
++ reg_errcode_t ret;
++
++ /* Move past the `-'. */
++ PATFETCH (c1);
++
++ ret = byte_compile_range (c, &p, pend, translate, syntax, b);
++ if (ret != REG_NOERROR) FREE_STACK_RETURN (ret);
++ range_start = 0xffffffff;
++ }
++
++ /* See if we're at the beginning of a possible character
++ class. */
++
++ else if (syntax & RE_CHAR_CLASSES && c == '[' && *p == ':')
++ { /* Leave room for the null. */
++ char str[CHAR_CLASS_MAX_LENGTH + 1];
++
++ PATFETCH (c);
++ c1 = 0;
++
++ /* If pattern is `[[:'. */
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (;;)
++ {
++ PATFETCH (c);
++ if ((c == ':' && *p == ']') || p == pend)
++ break;
++ if (((int) c1) < CHAR_CLASS_MAX_LENGTH)
++ str[c1++] = c;
++ else
++ /* This is in any case an invalid class name. */
++ str[0] = '\0';
++ }
++ str[c1] = '\0';
++
++ /* If isn't a word bracketed by `[:' and `:]':
++ undo the ending character, the letters, and leave
++ the leading `:' and `[' (but set bits for them). */
++ if (c == ':' && *p == ']')
++ {
++# if WIDE_CHAR_SUPPORT
++ boolean is_lower = STREQ (str, "lower");
++ boolean is_upper = STREQ (str, "upper");
++ wctype_t wt;
++ int ch;
++
++ wt = IS_CHAR_CLASS (str);
++ if (wt == 0)
++ FREE_STACK_RETURN (REG_ECTYPE);
++
++ /* Throw away the ] at the end of the character
++ class. */
++ PATFETCH (c);
++
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (ch = 0; ch < 1 << BYTEWIDTH; ++ch)
++ {
++# ifdef _LIBC
++ if (__iswctype (__btowc (ch), wt))
++ SET_LIST_BIT (ch);
++# else
++ if (iswctype (btowc (ch), wt))
++ SET_LIST_BIT (ch);
++# endif
++
++ if (translate && (is_upper || is_lower)
++ && (ISUPPER (ch) || ISLOWER (ch)))
++ SET_LIST_BIT (ch);
++ }
++
++ had_char_class = true;
++# else
++ int ch;
++ boolean is_alnum = STREQ (str, "alnum");
++ boolean is_alpha = STREQ (str, "alpha");
++ boolean is_blank = STREQ (str, "blank");
++ boolean is_cntrl = STREQ (str, "cntrl");
++ boolean is_digit = STREQ (str, "digit");
++ boolean is_graph = STREQ (str, "graph");
++ boolean is_lower = STREQ (str, "lower");
++ boolean is_print = STREQ (str, "print");
++ boolean is_punct = STREQ (str, "punct");
++ boolean is_space = STREQ (str, "space");
++ boolean is_upper = STREQ (str, "upper");
++ boolean is_xdigit = STREQ (str, "xdigit");
++
++ if (!IS_CHAR_CLASS (str))
++ FREE_STACK_RETURN (REG_ECTYPE);
++
++ /* Throw away the ] at the end of the character
++ class. */
++ PATFETCH (c);
++
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (ch = 0; ch < 1 << BYTEWIDTH; ch++)
++ {
++ /* This was split into 3 if's to
++ avoid an arbitrary limit in some compiler. */
++ if ( (is_alnum && ISALNUM (ch))
++ || (is_alpha && ISALPHA (ch))
++ || (is_blank && ISBLANK (ch))
++ || (is_cntrl && ISCNTRL (ch)))
++ SET_LIST_BIT (ch);
++ if ( (is_digit && ISDIGIT (ch))
++ || (is_graph && ISGRAPH (ch))
++ || (is_lower && ISLOWER (ch))
++ || (is_print && ISPRINT (ch)))
++ SET_LIST_BIT (ch);
++ if ( (is_punct && ISPUNCT (ch))
++ || (is_space && ISSPACE (ch))
++ || (is_upper && ISUPPER (ch))
++ || (is_xdigit && ISXDIGIT (ch)))
++ SET_LIST_BIT (ch);
++ if ( translate && (is_upper || is_lower)
++ && (ISUPPER (ch) || ISLOWER (ch)))
++ SET_LIST_BIT (ch);
++ }
++ had_char_class = true;
++# endif /* libc || wctype.h */
++ }
++ else
++ {
++ c1++;
++ while (c1--)
++ PATUNFETCH;
++ SET_LIST_BIT ('[');
++ SET_LIST_BIT (':');
++ range_start = ':';
++ had_char_class = false;
++ }
++ }
++ else if (syntax & RE_CHAR_CLASSES && c == '[' && *p == '=')
++ {
++ unsigned char str[MB_LEN_MAX + 1];
++# ifdef _LIBC
++ uint32_t nrules =
++ _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++# endif
++
++ PATFETCH (c);
++ c1 = 0;
++
++ /* If pattern is `[[='. */
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (;;)
++ {
++ PATFETCH (c);
++ if ((c == '=' && *p == ']') || p == pend)
++ break;
++ if (c1 < MB_LEN_MAX)
++ str[c1++] = c;
++ else
++ /* This is in any case an invalid class name. */
++ str[0] = '\0';
++ }
++ str[c1] = '\0';
++
++ if (c == '=' && *p == ']' && str[0] != '\0')
++ {
++ /* If we have no collation data we use the default
++ collation in which each character is in a class
++ by itself. It also means that ASCII is the
++ character set and therefore we cannot have character
++ with more than one byte in the multibyte
++ representation. */
++# ifdef _LIBC
++ if (nrules == 0)
++# endif
++ {
++ if (c1 != 1)
++ FREE_STACK_RETURN (REG_ECOLLATE);
++
++ /* Throw away the ] at the end of the equivalence
++ class. */
++ PATFETCH (c);
++
++ /* Set the bit for the character. */
++ SET_LIST_BIT (str[0]);
++ }
++# ifdef _LIBC
++ else
++ {
++ /* Try to match the byte sequence in `str' against
++ those known to the collate implementation.
++ First find out whether the bytes in `str' are
++ actually from exactly one character. */
++ const int32_t *table;
++ const unsigned char *weights;
++ const unsigned char *extra;
++ const int32_t *indirect;
++ int32_t idx;
++ const unsigned char *cp = str;
++ int ch;
++
++ /* This #include defines a local function! */
++# include <locale/weight.h>
++
++ table = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_TABLEMB);
++ weights = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_WEIGHTMB);
++ extra = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_EXTRAMB);
++ indirect = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_INDIRECTMB);
++
++ idx = findidx (&cp, c1);
++ if (idx == 0 || cp < str + c1)
++ /* This is no valid character. */
++ FREE_STACK_RETURN (REG_ECOLLATE);
++
++ /* Throw away the ] at the end of the equivalence
++ class. */
++ PATFETCH (c);
++
++ /* Now we have to go throught the whole table
++ and find all characters which have the same
++ first level weight.
++
++ XXX Note that this is not entirely correct.
++ we would have to match multibyte sequences
++ but this is not possible with the current
++ implementation. */
++ for (ch = 1; ch < 256; ++ch)
++ /* XXX This test would have to be changed if we
++ would allow matching multibyte sequences. */
++ if (table[ch] > 0)
++ {
++ int32_t idx2 = table[ch];
++ size_t len = weights[idx2];
++
++ /* Test whether the lenghts match. */
++ if (weights[idx] == len)
++ {
++ /* They do. New compare the bytes of
++ the weight. */
++ size_t cnt = 0;
++
++ while (cnt < len
++ && (weights[idx + 1 + cnt]
++ == weights[idx2 + 1 + cnt]))
++ ++cnt;
++
++ if (cnt == len)
++ /* They match. Mark the character as
++ acceptable. */
++ SET_LIST_BIT (ch);
++ }
++ }
++ }
++# endif
++ had_char_class = true;
++ }
++ else
++ {
++ c1++;
++ while (c1--)
++ PATUNFETCH;
++ SET_LIST_BIT ('[');
++ SET_LIST_BIT ('=');
++ range_start = '=';
++ had_char_class = false;
++ }
++ }
++ else if (syntax & RE_CHAR_CLASSES && c == '[' && *p == '.')
++ {
++ unsigned char str[128]; /* Should be large enough. */
++# ifdef _LIBC
++ uint32_t nrules =
++ _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++# endif
++
++ PATFETCH (c);
++ c1 = 0;
++
++ /* If pattern is `[[.'. */
++ if (p == pend) FREE_STACK_RETURN (REG_EBRACK);
++
++ for (;;)
++ {
++ PATFETCH (c);
++ if ((c == '.' && *p == ']') || p == pend)
++ break;
++ if (c1 < sizeof (str))
++ str[c1++] = c;
++ else
++ /* This is in any case an invalid class name. */
++ str[0] = '\0';
++ }
++ str[c1] = '\0';
++
++ if (c == '.' && *p == ']' && str[0] != '\0')
++ {
++ /* If we have no collation data we use the default
++ collation in which each character is the name
++ for its own class which contains only the one
++ character. It also means that ASCII is the
++ character set and therefore we cannot have character
++ with more than one byte in the multibyte
++ representation. */
++# ifdef _LIBC
++ if (nrules == 0)
++# endif
++ {
++ if (c1 != 1)
++ FREE_STACK_RETURN (REG_ECOLLATE);
++
++ /* Throw away the ] at the end of the equivalence
++ class. */
++ PATFETCH (c);
++
++ /* Set the bit for the character. */
++ SET_LIST_BIT (str[0]);
++ range_start = ((const unsigned char *) str)[0];
++ }
++# ifdef _LIBC
++ else
++ {
++ /* Try to match the byte sequence in `str' against
++ those known to the collate implementation.
++ First find out whether the bytes in `str' are
++ actually from exactly one character. */
++ int32_t table_size;
++ const int32_t *symb_table;
++ const unsigned char *extra;
++ int32_t idx;
++ int32_t elem;
++ int32_t second;
++ int32_t hash;
++
++ table_size =
++ _NL_CURRENT_WORD (LC_COLLATE,
++ _NL_COLLATE_SYMB_HASH_SIZEMB);
++ symb_table = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_SYMB_TABLEMB);
++ extra = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_SYMB_EXTRAMB);
++
++ /* Locate the character in the hashing table. */
++ hash = elem_hash ((const char *) str, c1);
++
++ idx = 0;
++ elem = hash % table_size;
++ second = hash % (table_size - 2);
++ while (symb_table[2 * elem] != 0)
++ {
++ /* First compare the hashing value. */
++ if (symb_table[2 * elem] == hash
++ && c1 == extra[symb_table[2 * elem + 1]]
++ && memcmp (str,
++ &extra[symb_table[2 * elem + 1]
++ + 1],
++ c1) == 0)
++ {
++ /* Yep, this is the entry. */
++ idx = symb_table[2 * elem + 1];
++ idx += 1 + extra[idx];
++ break;
++ }
++
++ /* Next entry. */
++ elem += second;
++ }
++
++ if (symb_table[2 * elem] == 0)
++ /* This is no valid character. */
++ FREE_STACK_RETURN (REG_ECOLLATE);
++
++ /* Throw away the ] at the end of the equivalence
++ class. */
++ PATFETCH (c);
++
++ /* Now add the multibyte character(s) we found
++ to the accept list.
++
++ XXX Note that this is not entirely correct.
++ we would have to match multibyte sequences
++ but this is not possible with the current
++ implementation. Also, we have to match
++ collating symbols, which expand to more than
++ one file, as a whole and not allow the
++ individual bytes. */
++ c1 = extra[idx++];
++ if (c1 == 1)
++ range_start = extra[idx];
++ while (c1-- > 0)
++ {
++ SET_LIST_BIT (extra[idx]);
++ ++idx;
++ }
++ }
++# endif
++ had_char_class = false;
++ }
++ else
++ {
++ c1++;
++ while (c1--)
++ PATUNFETCH;
++ SET_LIST_BIT ('[');
++ SET_LIST_BIT ('.');
++ range_start = '.';
++ had_char_class = false;
++ }
++ }
++ else
++ {
++ had_char_class = false;
++ SET_LIST_BIT (c);
++ range_start = c;
++ }
++ }
++
++ /* Discard any (non)matching list bytes that are all 0 at the
++ end of the map. Decrease the map-length byte too. */
++ while ((int) b[-1] > 0 && b[b[-1] - 1] == 0)
++ b[-1]--;
++ b += b[-1];
++#endif /* WCHAR */
++ }
++ break;
++
++
++ case '(':
++ if (syntax & RE_NO_BK_PARENS)
++ goto handle_open;
++ else
++ goto normal_char;
++
++
++ case ')':
++ if (syntax & RE_NO_BK_PARENS)
++ goto handle_close;
++ else
++ goto normal_char;
++
++
++ case '\n':
++ if (syntax & RE_NEWLINE_ALT)
++ goto handle_alt;
++ else
++ goto normal_char;
++
++
++ case '|':
++ if (syntax & RE_NO_BK_VBAR)
++ goto handle_alt;
++ else
++ goto normal_char;
++
++
++ case '{':
++ if (syntax & RE_INTERVALS && syntax & RE_NO_BK_BRACES)
++ goto handle_interval;
++ else
++ goto normal_char;
++
++
++ case '\\':
++ if (p == pend) FREE_STACK_RETURN (REG_EESCAPE);
++
++ /* Do not translate the character after the \, so that we can
++ distinguish, e.g., \B from \b, even if we normally would
++ translate, e.g., B to b. */
++ PATFETCH_RAW (c);
++
++ switch (c)
++ {
++ case '(':
++ if (syntax & RE_NO_BK_PARENS)
++ goto normal_backslash;
++
++ handle_open:
++ bufp->re_nsub++;
++ regnum++;
++
++ if (COMPILE_STACK_FULL)
++ {
++ RETALLOC (compile_stack.stack, compile_stack.size << 1,
++ compile_stack_elt_t);
++ if (compile_stack.stack == NULL) return REG_ESPACE;
++
++ compile_stack.size <<= 1;
++ }
++
++ /* These are the values to restore when we hit end of this
++ group. They are all relative offsets, so that if the
++ whole pattern moves because of realloc, they will still
++ be valid. */
++ COMPILE_STACK_TOP.begalt_offset = begalt - COMPILED_BUFFER_VAR;
++ COMPILE_STACK_TOP.fixup_alt_jump
++ = fixup_alt_jump ? fixup_alt_jump - COMPILED_BUFFER_VAR + 1 : 0;
++ COMPILE_STACK_TOP.laststart_offset = b - COMPILED_BUFFER_VAR;
++ COMPILE_STACK_TOP.regnum = regnum;
++
++ /* We will eventually replace the 0 with the number of
++ groups inner to this one. But do not push a
++ start_memory for groups beyond the last one we can
++ represent in the compiled pattern. */
++ if (regnum <= MAX_REGNUM)
++ {
++ COMPILE_STACK_TOP.inner_group_offset = b
++ - COMPILED_BUFFER_VAR + 2;
++ BUF_PUSH_3 (start_memory, regnum, 0);
++ }
++
++ compile_stack.avail++;
++
++ fixup_alt_jump = 0;
++ laststart = 0;
++ begalt = b;
++ /* If we've reached MAX_REGNUM groups, then this open
++ won't actually generate any code, so we'll have to
++ clear pending_exact explicitly. */
++ pending_exact = 0;
++ break;
++
++
++ case ')':
++ if (syntax & RE_NO_BK_PARENS) goto normal_backslash;
++
++ if (COMPILE_STACK_EMPTY)
++ {
++ if (syntax & RE_UNMATCHED_RIGHT_PAREN_ORD)
++ goto normal_backslash;
++ else
++ FREE_STACK_RETURN (REG_ERPAREN);
++ }
++
++ handle_close:
++ if (fixup_alt_jump)
++ { /* Push a dummy failure point at the end of the
++ alternative for a possible future
++ `pop_failure_jump' to pop. See comments at
++ `push_dummy_failure' in `re_match_2'. */
++ BUF_PUSH (push_dummy_failure);
++
++ /* We allocated space for this jump when we assigned
++ to `fixup_alt_jump', in the `handle_alt' case below. */
++ STORE_JUMP (jump_past_alt, fixup_alt_jump, b - 1);
++ }
++
++ /* See similar code for backslashed left paren above. */
++ if (COMPILE_STACK_EMPTY)
++ {
++ if (syntax & RE_UNMATCHED_RIGHT_PAREN_ORD)
++ goto normal_char;
++ else
++ FREE_STACK_RETURN (REG_ERPAREN);
++ }
++
++ /* Since we just checked for an empty stack above, this
++ ``can't happen''. */
++ assert (compile_stack.avail != 0);
++ {
++ /* We don't just want to restore into `regnum', because
++ later groups should continue to be numbered higher,
++ as in `(ab)c(de)' -- the second group is #2. */
++ regnum_t this_group_regnum;
++
++ compile_stack.avail--;
++ begalt = COMPILED_BUFFER_VAR + COMPILE_STACK_TOP.begalt_offset;
++ fixup_alt_jump
++ = COMPILE_STACK_TOP.fixup_alt_jump
++ ? COMPILED_BUFFER_VAR + COMPILE_STACK_TOP.fixup_alt_jump - 1
++ : 0;
++ laststart = COMPILED_BUFFER_VAR + COMPILE_STACK_TOP.laststart_offset;
++ this_group_regnum = COMPILE_STACK_TOP.regnum;
++ /* If we've reached MAX_REGNUM groups, then this open
++ won't actually generate any code, so we'll have to
++ clear pending_exact explicitly. */
++ pending_exact = 0;
++
++ /* We're at the end of the group, so now we know how many
++ groups were inside this one. */
++ if (this_group_regnum <= MAX_REGNUM)
++ {
++ UCHAR_T *inner_group_loc
++ = COMPILED_BUFFER_VAR + COMPILE_STACK_TOP.inner_group_offset;
++
++ *inner_group_loc = regnum - this_group_regnum;
++ BUF_PUSH_3 (stop_memory, this_group_regnum,
++ regnum - this_group_regnum);
++ }
++ }
++ break;
++
++
++ case '|': /* `\|'. */
++ if (syntax & RE_LIMITED_OPS || syntax & RE_NO_BK_VBAR)
++ goto normal_backslash;
++ handle_alt:
++ if (syntax & RE_LIMITED_OPS)
++ goto normal_char;
++
++ /* Insert before the previous alternative a jump which
++ jumps to this alternative if the former fails. */
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++ INSERT_JUMP (on_failure_jump, begalt,
++ b + 2 + 2 * OFFSET_ADDRESS_SIZE);
++ pending_exact = 0;
++ b += 1 + OFFSET_ADDRESS_SIZE;
++
++ /* The alternative before this one has a jump after it
++ which gets executed if it gets matched. Adjust that
++ jump so it will jump to this alternative's analogous
++ jump (put in below, which in turn will jump to the next
++ (if any) alternative's such jump, etc.). The last such
++ jump jumps to the correct final destination. A picture:
++ _____ _____
++ | | | |
++ | v | v
++ a | b | c
++
++ If we are at `b', then fixup_alt_jump right now points to a
++ three-byte space after `a'. We'll put in the jump, set
++ fixup_alt_jump to right after `b', and leave behind three
++ bytes which we'll fill in when we get to after `c'. */
++
++ if (fixup_alt_jump)
++ STORE_JUMP (jump_past_alt, fixup_alt_jump, b);
++
++ /* Mark and leave space for a jump after this alternative,
++ to be filled in later either by next alternative or
++ when know we're at the end of a series of alternatives. */
++ fixup_alt_jump = b;
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++ b += 1 + OFFSET_ADDRESS_SIZE;
++
++ laststart = 0;
++ begalt = b;
++ break;
++
++
++ case '{':
++ /* If \{ is a literal. */
++ if (!(syntax & RE_INTERVALS)
++ /* If we're at `\{' and it's not the open-interval
++ operator. */
++ || (syntax & RE_NO_BK_BRACES))
++ goto normal_backslash;
++
++ handle_interval:
++ {
++ /* If got here, then the syntax allows intervals. */
++
++ /* At least (most) this many matches must be made. */
++ int lower_bound = -1, upper_bound = -1;
++
++ /* Place in the uncompiled pattern (i.e., just after
++ the '{') to go back to if the interval is invalid. */
++ const CHAR_T *beg_interval = p;
++
++ if (p == pend)
++ goto invalid_interval;
++
++ GET_UNSIGNED_NUMBER (lower_bound);
++
++ if (c == ',')
++ {
++ GET_UNSIGNED_NUMBER (upper_bound);
++ if (upper_bound < 0)
++ upper_bound = RE_DUP_MAX;
++ }
++ else
++ /* Interval such as `{1}' => match exactly once. */
++ upper_bound = lower_bound;
++
++ if (! (0 <= lower_bound && lower_bound <= upper_bound))
++ goto invalid_interval;
++
++ if (!(syntax & RE_NO_BK_BRACES))
++ {
++ if (c != '\\' || p == pend)
++ goto invalid_interval;
++ PATFETCH (c);
++ }
++
++ if (c != '}')
++ goto invalid_interval;
++
++ /* If it's invalid to have no preceding re. */
++ if (!laststart)
++ {
++ if (syntax & RE_CONTEXT_INVALID_OPS
++ && !(syntax & RE_INVALID_INTERVAL_ORD))
++ FREE_STACK_RETURN (REG_BADRPT);
++ else if (syntax & RE_CONTEXT_INDEP_OPS)
++ laststart = b;
++ else
++ goto unfetch_interval;
++ }
++
++ /* We just parsed a valid interval. */
++
++ if (RE_DUP_MAX < upper_bound)
++ FREE_STACK_RETURN (REG_BADBR);
++
++ /* If the upper bound is zero, don't want to succeed at
++ all; jump from `laststart' to `b + 3', which will be
++ the end of the buffer after we insert the jump. */
++ /* ifdef WCHAR, 'b + 1 + OFFSET_ADDRESS_SIZE'
++ instead of 'b + 3'. */
++ if (upper_bound == 0)
++ {
++ GET_BUFFER_SPACE (1 + OFFSET_ADDRESS_SIZE);
++ INSERT_JUMP (jump, laststart, b + 1
++ + OFFSET_ADDRESS_SIZE);
++ b += 1 + OFFSET_ADDRESS_SIZE;
++ }
++
++ /* Otherwise, we have a nontrivial interval. When
++ we're all done, the pattern will look like:
++ set_number_at <jump count> <upper bound>
++ set_number_at <succeed_n count> <lower bound>
++ succeed_n <after jump addr> <succeed_n count>
++ <body of loop>
++ jump_n <succeed_n addr> <jump count>
++ (The upper bound and `jump_n' are omitted if
++ `upper_bound' is 1, though.) */
++ else
++ { /* If the upper bound is > 1, we need to insert
++ more at the end of the loop. */
++ unsigned nbytes = 2 + 4 * OFFSET_ADDRESS_SIZE +
++ (upper_bound > 1) * (2 + 4 * OFFSET_ADDRESS_SIZE);
++
++ GET_BUFFER_SPACE (nbytes);
++
++ /* Initialize lower bound of the `succeed_n', even
++ though it will be set during matching by its
++ attendant `set_number_at' (inserted next),
++ because `re_compile_fastmap' needs to know.
++ Jump to the `jump_n' we might insert below. */
++ INSERT_JUMP2 (succeed_n, laststart,
++ b + 1 + 2 * OFFSET_ADDRESS_SIZE
++ + (upper_bound > 1) * (1 + 2 * OFFSET_ADDRESS_SIZE)
++ , lower_bound);
++ b += 1 + 2 * OFFSET_ADDRESS_SIZE;
++
++ /* Code to initialize the lower bound. Insert
++ before the `succeed_n'. The `5' is the last two
++ bytes of this `set_number_at', plus 3 bytes of
++ the following `succeed_n'. */
++ /* ifdef WCHAR, The '1+2*OFFSET_ADDRESS_SIZE'
++ is the 'set_number_at', plus '1+OFFSET_ADDRESS_SIZE'
++ of the following `succeed_n'. */
++ PREFIX(insert_op2) (set_number_at, laststart, 1
++ + 2 * OFFSET_ADDRESS_SIZE, lower_bound, b);
++ b += 1 + 2 * OFFSET_ADDRESS_SIZE;
++
++ if (upper_bound > 1)
++ { /* More than one repetition is allowed, so
++ append a backward jump to the `succeed_n'
++ that starts this interval.
++
++ When we've reached this during matching,
++ we'll have matched the interval once, so
++ jump back only `upper_bound - 1' times. */
++ STORE_JUMP2 (jump_n, b, laststart
++ + 2 * OFFSET_ADDRESS_SIZE + 1,
++ upper_bound - 1);
++ b += 1 + 2 * OFFSET_ADDRESS_SIZE;
++
++ /* The location we want to set is the second
++ parameter of the `jump_n'; that is `b-2' as
++ an absolute address. `laststart' will be
++ the `set_number_at' we're about to insert;
++ `laststart+3' the number to set, the source
++ for the relative address. But we are
++ inserting into the middle of the pattern --
++ so everything is getting moved up by 5.
++ Conclusion: (b - 2) - (laststart + 3) + 5,
++ i.e., b - laststart.
++
++ We insert this at the beginning of the loop
++ so that if we fail during matching, we'll
++ reinitialize the bounds. */
++ PREFIX(insert_op2) (set_number_at, laststart,
++ b - laststart,
++ upper_bound - 1, b);
++ b += 1 + 2 * OFFSET_ADDRESS_SIZE;
++ }
++ }
++ pending_exact = 0;
++ break;
++
++ invalid_interval:
++ if (!(syntax & RE_INVALID_INTERVAL_ORD))
++ FREE_STACK_RETURN (p == pend ? REG_EBRACE : REG_BADBR);
++ unfetch_interval:
++ /* Match the characters as literals. */
++ p = beg_interval;
++ c = '{';
++ if (syntax & RE_NO_BK_BRACES)
++ goto normal_char;
++ else
++ goto normal_backslash;
++ }
++
++#ifdef emacs
++ /* There is no way to specify the before_dot and after_dot
++ operators. rms says this is ok. --karl */
++ case '=':
++ BUF_PUSH (at_dot);
++ break;
++
++ case 's':
++ laststart = b;
++ PATFETCH (c);
++ BUF_PUSH_2 (syntaxspec, syntax_spec_code[c]);
++ break;
++
++ case 'S':
++ laststart = b;
++ PATFETCH (c);
++ BUF_PUSH_2 (notsyntaxspec, syntax_spec_code[c]);
++ break;
++#endif /* emacs */
++
++
++ case 'w':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ laststart = b;
++ BUF_PUSH (wordchar);
++ break;
++
++
++ case 'W':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ laststart = b;
++ BUF_PUSH (notwordchar);
++ break;
++
++
++ case '<':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (wordbeg);
++ break;
++
++ case '>':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (wordend);
++ break;
++
++ case 'b':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (wordbound);
++ break;
++
++ case 'B':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (notwordbound);
++ break;
++
++ case '`':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (begbuf);
++ break;
++
++ case '\'':
++ if (syntax & RE_NO_GNU_OPS)
++ goto normal_char;
++ BUF_PUSH (endbuf);
++ break;
++
++ case '1': case '2': case '3': case '4': case '5':
++ case '6': case '7': case '8': case '9':
++ if (syntax & RE_NO_BK_REFS)
++ goto normal_char;
++
++ c1 = c - '0';
++
++ if (c1 > regnum)
++ FREE_STACK_RETURN (REG_ESUBREG);
++
++ /* Can't back reference to a subexpression if inside of it. */
++ if (group_in_compile_stack (compile_stack, (regnum_t) c1))
++ goto normal_char;
++
++ laststart = b;
++ BUF_PUSH_2 (duplicate, c1);
++ break;
++
++
++ case '+':
++ case '?':
++ if (syntax & RE_BK_PLUS_QM)
++ goto handle_plus;
++ else
++ goto normal_backslash;
++
++ default:
++ normal_backslash:
++ /* You might think it would be useful for \ to mean
++ not to translate; but if we don't translate it
++ it will never match anything. */
++ c = TRANSLATE (c);
++ goto normal_char;
++ }
++ break;
++
++
++ default:
++ /* Expects the character in `c'. */
++ normal_char:
++ /* If no exactn currently being built. */
++ if (!pending_exact
++#ifdef WCHAR
++ /* If last exactn handle binary(or character) and
++ new exactn handle character(or binary). */
++ || is_exactn_bin != is_binary[p - 1 - pattern]
++#endif /* WCHAR */
++
++ /* If last exactn not at current position. */
++ || pending_exact + *pending_exact + 1 != b
++
++ /* We have only one byte following the exactn for the count. */
++ || *pending_exact == (1 << BYTEWIDTH) - 1
++
++ /* If followed by a repetition operator. */
++ || *p == '*' || *p == '^'
++ || ((syntax & RE_BK_PLUS_QM)
++ ? *p == '\\' && (p[1] == '+' || p[1] == '?')
++ : (*p == '+' || *p == '?'))
++ || ((syntax & RE_INTERVALS)
++ && ((syntax & RE_NO_BK_BRACES)
++ ? *p == '{'
++ : (p[0] == '\\' && p[1] == '{'))))
++ {
++ /* Start building a new exactn. */
++
++ laststart = b;
++
++#ifdef WCHAR
++ /* Is this exactn binary data or character? */
++ is_exactn_bin = is_binary[p - 1 - pattern];
++ if (is_exactn_bin)
++ BUF_PUSH_2 (exactn_bin, 0);
++ else
++ BUF_PUSH_2 (exactn, 0);
++#else
++ BUF_PUSH_2 (exactn, 0);
++#endif /* WCHAR */
++ pending_exact = b - 1;
++ }
++
++ BUF_PUSH (c);
++ (*pending_exact)++;
++ break;
++ } /* switch (c) */
++ } /* while p != pend */
++
++
++ /* Through the pattern now. */
++
++ if (fixup_alt_jump)
++ STORE_JUMP (jump_past_alt, fixup_alt_jump, b);
++
++ if (!COMPILE_STACK_EMPTY)
++ FREE_STACK_RETURN (REG_EPAREN);
++
++ /* If we don't want backtracking, force success
++ the first time we reach the end of the compiled pattern. */
++ if (syntax & RE_NO_POSIX_BACKTRACKING)
++ BUF_PUSH (succeed);
++
++#ifdef WCHAR
++ free (pattern);
++ free (mbs_offset);
++ free (is_binary);
++#endif
++ free (compile_stack.stack);
++
++ /* We have succeeded; set the length of the buffer. */
++#ifdef WCHAR
++ bufp->used = (uintptr_t) b - (uintptr_t) COMPILED_BUFFER_VAR;
++#else
++ bufp->used = b - bufp->buffer;
++#endif
++
++#ifdef DEBUG
++ if (debug)
++ {
++ DEBUG_PRINT1 ("\nCompiled pattern: \n");
++ PREFIX(print_compiled_pattern) (bufp);
++ }
++#endif /* DEBUG */
++
++#ifndef MATCH_MAY_ALLOCATE
++ /* Initialize the failure stack to the largest possible stack. This
++ isn't necessary unless we're trying to avoid calling alloca in
++ the search and match routines. */
++ {
++ int num_regs = bufp->re_nsub + 1;
++
++ /* Since DOUBLE_FAIL_STACK refuses to double only if the current size
++ is strictly greater than re_max_failures, the largest possible stack
++ is 2 * re_max_failures failure points. */
++ if (fail_stack.size < (2 * re_max_failures * MAX_FAILURE_ITEMS))
++ {
++ fail_stack.size = (2 * re_max_failures * MAX_FAILURE_ITEMS);
++
++# ifdef emacs
++ if (! fail_stack.stack)
++ fail_stack.stack
++ = (PREFIX(fail_stack_elt_t) *) xmalloc (fail_stack.size
++ * sizeof (PREFIX(fail_stack_elt_t)));
++ else
++ fail_stack.stack
++ = (PREFIX(fail_stack_elt_t) *) xrealloc (fail_stack.stack,
++ (fail_stack.size
++ * sizeof (PREFIX(fail_stack_elt_t))));
++# else /* not emacs */
++ if (! fail_stack.stack)
++ fail_stack.stack
++ = (PREFIX(fail_stack_elt_t) *) malloc (fail_stack.size
++ * sizeof (PREFIX(fail_stack_elt_t)));
++ else
++ fail_stack.stack
++ = (PREFIX(fail_stack_elt_t) *) realloc (fail_stack.stack,
++ (fail_stack.size
++ * sizeof (PREFIX(fail_stack_elt_t))));
++# endif /* not emacs */
++ }
++
++ PREFIX(regex_grow_registers) (num_regs);
++ }
++#endif /* not MATCH_MAY_ALLOCATE */
++
++ return REG_NOERROR;
++} /* regex_compile */
++
++/* Subroutines for `regex_compile'. */
++
++/* Store OP at LOC followed by two-byte integer parameter ARG. */
++/* ifdef WCHAR, integer parameter is 1 wchar_t. */
++
++static void
++PREFIX(store_op1) (re_opcode_t op, UCHAR_T *loc, int arg)
++{
++ *loc = (UCHAR_T) op;
++ STORE_NUMBER (loc + 1, arg);
++}
++
++
++/* Like `store_op1', but for two two-byte parameters ARG1 and ARG2. */
++/* ifdef WCHAR, integer parameter is 1 wchar_t. */
++
++static void
++PREFIX(store_op2) (re_opcode_t op, UCHAR_T *loc, int arg1, int arg2)
++{
++ *loc = (UCHAR_T) op;
++ STORE_NUMBER (loc + 1, arg1);
++ STORE_NUMBER (loc + 1 + OFFSET_ADDRESS_SIZE, arg2);
++}
++
++
++/* Copy the bytes from LOC to END to open up three bytes of space at LOC
++ for OP followed by two-byte integer parameter ARG. */
++/* ifdef WCHAR, integer parameter is 1 wchar_t. */
++
++static void
++PREFIX(insert_op1) (re_opcode_t op, UCHAR_T *loc, int arg, UCHAR_T *end)
++{
++ register UCHAR_T *pfrom = end;
++ register UCHAR_T *pto = end + 1 + OFFSET_ADDRESS_SIZE;
++
++ while (pfrom != loc)
++ *--pto = *--pfrom;
++
++ PREFIX(store_op1) (op, loc, arg);
++}
++
++
++/* Like `insert_op1', but for two two-byte parameters ARG1 and ARG2. */
++/* ifdef WCHAR, integer parameter is 1 wchar_t. */
++
++static void
++PREFIX(insert_op2) (re_opcode_t op, UCHAR_T *loc, int arg1,
++ int arg2, UCHAR_T *end)
++{
++ register UCHAR_T *pfrom = end;
++ register UCHAR_T *pto = end + 1 + 2 * OFFSET_ADDRESS_SIZE;
++
++ while (pfrom != loc)
++ *--pto = *--pfrom;
++
++ PREFIX(store_op2) (op, loc, arg1, arg2);
++}
++
++
++/* P points to just after a ^ in PATTERN. Return true if that ^ comes
++ after an alternative or a begin-subexpression. We assume there is at
++ least one character before the ^. */
++
++static boolean
++PREFIX(at_begline_loc_p) (const CHAR_T *pattern, const CHAR_T *p,
++ reg_syntax_t syntax)
++{
++ const CHAR_T *prev = p - 2;
++ boolean prev_prev_backslash = prev > pattern && prev[-1] == '\\';
++
++ return
++ /* After a subexpression? */
++ (*prev == '(' && (syntax & RE_NO_BK_PARENS || prev_prev_backslash))
++ /* After an alternative? */
++ || (*prev == '|' && (syntax & RE_NO_BK_VBAR || prev_prev_backslash));
++}
++
++
++/* The dual of at_begline_loc_p. This one is for $. We assume there is
++ at least one character after the $, i.e., `P < PEND'. */
++
++static boolean
++PREFIX(at_endline_loc_p) (const CHAR_T *p, const CHAR_T *pend,
++ reg_syntax_t syntax)
++{
++ const CHAR_T *next = p;
++ boolean next_backslash = *next == '\\';
++ const CHAR_T *next_next = p + 1 < pend ? p + 1 : 0;
++
++ return
++ /* Before a subexpression? */
++ (syntax & RE_NO_BK_PARENS ? *next == ')'
++ : next_backslash && next_next && *next_next == ')')
++ /* Before an alternative? */
++ || (syntax & RE_NO_BK_VBAR ? *next == '|'
++ : next_backslash && next_next && *next_next == '|');
++}
++
++#else /* not INSIDE_RECURSION */
++
++/* Returns true if REGNUM is in one of COMPILE_STACK's elements and
++ false if it's not. */
++
++static boolean
++group_in_compile_stack (compile_stack_type compile_stack, regnum_t regnum)
++{
++ int this_element;
++
++ for (this_element = compile_stack.avail - 1;
++ this_element >= 0;
++ this_element--)
++ if (compile_stack.stack[this_element].regnum == regnum)
++ return true;
++
++ return false;
++}
++#endif /* not INSIDE_RECURSION */
++
++#ifdef INSIDE_RECURSION
++
++#ifdef WCHAR
++/* This insert space, which size is "num", into the pattern at "loc".
++ "end" must point the end of the allocated buffer. */
++static void
++insert_space (int num, CHAR_T *loc, CHAR_T *end)
++{
++ register CHAR_T *pto = end;
++ register CHAR_T *pfrom = end - num;
++
++ while (pfrom >= loc)
++ *pto-- = *pfrom--;
++}
++#endif /* WCHAR */
++
++#ifdef WCHAR
++static reg_errcode_t
++wcs_compile_range (CHAR_T range_start_char, const CHAR_T **p_ptr,
++ const CHAR_T *pend, RE_TRANSLATE_TYPE translate,
++ reg_syntax_t syntax, CHAR_T *b, CHAR_T *char_set)
++{
++ const CHAR_T *p = *p_ptr;
++ CHAR_T range_start, range_end;
++ reg_errcode_t ret;
++# ifdef _LIBC
++ uint32_t nrules;
++ uint32_t start_val, end_val;
++# endif
++ if (p == pend)
++ return REG_ERANGE;
++
++# ifdef _LIBC
++ nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++ if (nrules != 0)
++ {
++ const char *collseq = (const char *) _NL_CURRENT(LC_COLLATE,
++ _NL_COLLATE_COLLSEQWC);
++ const unsigned char *extra = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_SYMB_EXTRAMB);
++
++ if (range_start_char < -1)
++ {
++ /* range_start is a collating symbol. */
++ int32_t *wextra;
++ /* Retreive the index and get collation sequence value. */
++ wextra = (int32_t*)(extra + char_set[-range_start_char]);
++ start_val = wextra[1 + *wextra];
++ }
++ else
++ start_val = collseq_table_lookup(collseq, TRANSLATE(range_start_char));
++
++ end_val = collseq_table_lookup (collseq, TRANSLATE (p[0]));
++
++ /* Report an error if the range is empty and the syntax prohibits
++ this. */
++ ret = ((syntax & RE_NO_EMPTY_RANGES)
++ && (start_val > end_val))? REG_ERANGE : REG_NOERROR;
++
++ /* Insert space to the end of the char_ranges. */
++ insert_space(2, b - char_set[5] - 2, b - 1);
++ *(b - char_set[5] - 2) = (wchar_t)start_val;
++ *(b - char_set[5] - 1) = (wchar_t)end_val;
++ char_set[4]++; /* ranges_index */
++ }
++ else
++# endif
++ {
++ range_start = (range_start_char >= 0)? TRANSLATE (range_start_char):
++ range_start_char;
++ range_end = TRANSLATE (p[0]);
++ /* Report an error if the range is empty and the syntax prohibits
++ this. */
++ ret = ((syntax & RE_NO_EMPTY_RANGES)
++ && (range_start > range_end))? REG_ERANGE : REG_NOERROR;
++
++ /* Insert space to the end of the char_ranges. */
++ insert_space(2, b - char_set[5] - 2, b - 1);
++ *(b - char_set[5] - 2) = range_start;
++ *(b - char_set[5] - 1) = range_end;
++ char_set[4]++; /* ranges_index */
++ }
++ /* Have to increment the pointer into the pattern string, so the
++ caller isn't still at the ending character. */
++ (*p_ptr)++;
++
++ return ret;
++}
++#else /* BYTE */
++/* Read the ending character of a range (in a bracket expression) from the
++ uncompiled pattern *P_PTR (which ends at PEND). We assume the
++ starting character is in `P[-2]'. (`P[-1]' is the character `-'.)
++ Then we set the translation of all bits between the starting and
++ ending characters (inclusive) in the compiled pattern B.
++
++ Return an error code.
++
++ We use these short variable names so we can use the same macros as
++ `regex_compile' itself. */
++
++static reg_errcode_t
++byte_compile_range (unsigned int range_start_char, const char **p_ptr,
++ const char *pend, RE_TRANSLATE_TYPE translate,
++ reg_syntax_t syntax, unsigned char *b)
++{
++ unsigned this_char;
++ const char *p = *p_ptr;
++ reg_errcode_t ret;
++# if _LIBC
++ const unsigned char *collseq;
++ unsigned int start_colseq;
++ unsigned int end_colseq;
++# else
++ unsigned end_char;
++# endif
++
++ if (p == pend)
++ return REG_ERANGE;
++
++ /* Have to increment the pointer into the pattern string, so the
++ caller isn't still at the ending character. */
++ (*p_ptr)++;
++
++ /* Report an error if the range is empty and the syntax prohibits this. */
++ ret = syntax & RE_NO_EMPTY_RANGES ? REG_ERANGE : REG_NOERROR;
++
++# if _LIBC
++ collseq = (const unsigned char *) _NL_CURRENT (LC_COLLATE,
++ _NL_COLLATE_COLLSEQMB);
++
++ start_colseq = collseq[(unsigned char) TRANSLATE (range_start_char)];
++ end_colseq = collseq[(unsigned char) TRANSLATE (p[0])];
++ for (this_char = 0; this_char <= (unsigned char) -1; ++this_char)
++ {
++ unsigned int this_colseq = collseq[(unsigned char) TRANSLATE (this_char)];
++
++ if (start_colseq <= this_colseq && this_colseq <= end_colseq)
++ {
++ SET_LIST_BIT (TRANSLATE (this_char));
++ ret = REG_NOERROR;
++ }
++ }
++# else
++ /* Here we see why `this_char' has to be larger than an `unsigned
++ char' -- we would otherwise go into an infinite loop, since all
++ characters <= 0xff. */
++ range_start_char = TRANSLATE (range_start_char);
++ /* TRANSLATE(p[0]) is casted to char (not unsigned char) in TRANSLATE,
++ and some compilers cast it to int implicitly, so following for_loop
++ may fall to (almost) infinite loop.
++ e.g. If translate[p[0]] = 0xff, end_char may equals to 0xffffffff.
++ To avoid this, we cast p[0] to unsigned int and truncate it. */
++ end_char = ((unsigned)TRANSLATE(p[0]) & ((1 << BYTEWIDTH) - 1));
++
++ for (this_char = range_start_char; this_char <= end_char; ++this_char)
++ {
++ SET_LIST_BIT (TRANSLATE (this_char));
++ ret = REG_NOERROR;
++ }
++# endif
++
++ return ret;
++}
++#endif /* WCHAR */
++
++/* re_compile_fastmap computes a ``fastmap'' for the compiled pattern in
++ BUFP. A fastmap records which of the (1 << BYTEWIDTH) possible
++ characters can start a string that matches the pattern. This fastmap
++ is used by re_search to skip quickly over impossible starting points.
++
++ The caller must supply the address of a (1 << BYTEWIDTH)-byte data
++ area as BUFP->fastmap.
++
++ We set the `fastmap', `fastmap_accurate', and `can_be_null' fields in
++ the pattern buffer.
++
++ Returns 0 if we succeed, -2 if an internal error. */
++
++#ifdef WCHAR
++/* local function for re_compile_fastmap.
++ truncate wchar_t character to char. */
++static unsigned char truncate_wchar (CHAR_T c);
++
++static unsigned char
++truncate_wchar (CHAR_T c)
++{
++ unsigned char buf[MB_CUR_MAX];
++ mbstate_t state;
++ int retval;
++ memset (&state, '\0', sizeof (state));
++# ifdef _LIBC
++ retval = __wcrtomb (buf, c, &state);
++# else
++ retval = wcrtomb (buf, c, &state);
++# endif
++ return retval > 0 ? buf[0] : (unsigned char) c;
++}
++#endif /* WCHAR */
++
++static int
++PREFIX(re_compile_fastmap) (struct re_pattern_buffer *bufp)
++{
++ int j, k;
++#ifdef MATCH_MAY_ALLOCATE
++ PREFIX(fail_stack_type) fail_stack;
++#endif
++#ifndef REGEX_MALLOC
++ char *destination;
++#endif
++
++ register char *fastmap = bufp->fastmap;
++
++#ifdef WCHAR
++ /* We need to cast pattern to (wchar_t*), because we casted this compiled
++ pattern to (char*) in regex_compile. */
++ UCHAR_T *pattern = (UCHAR_T*)bufp->buffer;
++ register UCHAR_T *pend = (UCHAR_T*) (bufp->buffer + bufp->used);
++#else /* BYTE */
++ UCHAR_T *pattern = bufp->buffer;
++ register UCHAR_T *pend = pattern + bufp->used;
++#endif /* WCHAR */
++ UCHAR_T *p = pattern;
++
++#ifdef REL_ALLOC
++ /* This holds the pointer to the failure stack, when
++ it is allocated relocatably. */
++ fail_stack_elt_t *failure_stack_ptr;
++#endif
++
++ /* Assume that each path through the pattern can be null until
++ proven otherwise. We set this false at the bottom of switch
++ statement, to which we get only if a particular path doesn't
++ match the empty string. */
++ boolean path_can_be_null = true;
++
++ /* We aren't doing a `succeed_n' to begin with. */
++ boolean succeed_n_p = false;
++
++ assert (fastmap != NULL && p != NULL);
++
++ INIT_FAIL_STACK ();
++ bzero (fastmap, 1 << BYTEWIDTH); /* Assume nothing's valid. */
++ bufp->fastmap_accurate = 1; /* It will be when we're done. */
++ bufp->can_be_null = 0;
++
++ while (1)
++ {
++ if (p == pend || *p == (UCHAR_T) succeed)
++ {
++ /* We have reached the (effective) end of pattern. */
++ if (!FAIL_STACK_EMPTY ())
++ {
++ bufp->can_be_null |= path_can_be_null;
++
++ /* Reset for next path. */
++ path_can_be_null = true;
++
++ p = fail_stack.stack[--fail_stack.avail].pointer;
++
++ continue;
++ }
++ else
++ break;
++ }
++
++ /* We should never be about to go beyond the end of the pattern. */
++ assert (p < pend);
++
++ switch (SWITCH_ENUM_CAST ((re_opcode_t) *p++))
++ {
++
++ /* I guess the idea here is to simply not bother with a fastmap
++ if a backreference is used, since it's too hard to figure out
++ the fastmap for the corresponding group. Setting
++ `can_be_null' stops `re_search_2' from using the fastmap, so
++ that is all we do. */
++ case duplicate:
++ bufp->can_be_null = 1;
++ goto done;
++
++
++ /* Following are the cases which match a character. These end
++ with `break'. */
++
++#ifdef WCHAR
++ case exactn:
++ fastmap[truncate_wchar(p[1])] = 1;
++ break;
++#else /* BYTE */
++ case exactn:
++ fastmap[p[1]] = 1;
++ break;
++#endif /* WCHAR */
++#ifdef MBS_SUPPORT
++ case exactn_bin:
++ fastmap[p[1]] = 1;
++ break;
++#endif
++
++#ifdef WCHAR
++ /* It is hard to distinguish fastmap from (multi byte) characters
++ which depends on current locale. */
++ case charset:
++ case charset_not:
++ case wordchar:
++ case notwordchar:
++ bufp->can_be_null = 1;
++ goto done;
++#else /* BYTE */
++ case charset:
++ for (j = *p++ * BYTEWIDTH - 1; j >= 0; j--)
++ if (p[j / BYTEWIDTH] & (1 << (j % BYTEWIDTH)))
++ fastmap[j] = 1;
++ break;
++
++
++ case charset_not:
++ /* Chars beyond end of map must be allowed. */
++ for (j = *p * BYTEWIDTH; j < (1 << BYTEWIDTH); j++)
++ fastmap[j] = 1;
++
++ for (j = *p++ * BYTEWIDTH - 1; j >= 0; j--)
++ if (!(p[j / BYTEWIDTH] & (1 << (j % BYTEWIDTH))))
++ fastmap[j] = 1;
++ break;
++
++
++ case wordchar:
++ for (j = 0; j < (1 << BYTEWIDTH); j++)
++ if (SYNTAX (j) == Sword)
++ fastmap[j] = 1;
++ break;
++
++
++ case notwordchar:
++ for (j = 0; j < (1 << BYTEWIDTH); j++)
++ if (SYNTAX (j) != Sword)
++ fastmap[j] = 1;
++ break;
++#endif /* WCHAR */
++
++ case anychar:
++ {
++ int fastmap_newline = fastmap['\n'];
++
++ /* `.' matches anything ... */
++ for (j = 0; j < (1 << BYTEWIDTH); j++)
++ fastmap[j] = 1;
++
++ /* ... except perhaps newline. */
++ if (!(bufp->syntax & RE_DOT_NEWLINE))
++ fastmap['\n'] = fastmap_newline;
++
++ /* Return if we have already set `can_be_null'; if we have,
++ then the fastmap is irrelevant. Something's wrong here. */
++ else if (bufp->can_be_null)
++ goto done;
++
++ /* Otherwise, have to check alternative paths. */
++ break;
++ }
++
++#ifdef emacs
++ case syntaxspec:
++ k = *p++;
++ for (j = 0; j < (1 << BYTEWIDTH); j++)
++ if (SYNTAX (j) == (enum syntaxcode) k)
++ fastmap[j] = 1;
++ break;
++
++
++ case notsyntaxspec:
++ k = *p++;
++ for (j = 0; j < (1 << BYTEWIDTH); j++)
++ if (SYNTAX (j) != (enum syntaxcode) k)
++ fastmap[j] = 1;
++ break;
++
++
++ /* All cases after this match the empty string. These end with
++ `continue'. */
++
++
++ case before_dot:
++ case at_dot:
++ case after_dot:
++ continue;
++#endif /* emacs */
++
++
++ case no_op:
++ case begline:
++ case endline:
++ case begbuf:
++ case endbuf:
++ case wordbound:
++ case notwordbound:
++ case wordbeg:
++ case wordend:
++ case push_dummy_failure:
++ continue;
++
++
++ case jump_n:
++ case pop_failure_jump:
++ case maybe_pop_jump:
++ case jump:
++ case jump_past_alt:
++ case dummy_failure_jump:
++ EXTRACT_NUMBER_AND_INCR (j, p);
++ p += j;
++ if (j > 0)
++ continue;
++
++ /* Jump backward implies we just went through the body of a
++ loop and matched nothing. Opcode jumped to should be
++ `on_failure_jump' or `succeed_n'. Just treat it like an
++ ordinary jump. For a * loop, it has pushed its failure
++ point already; if so, discard that as redundant. */
++ if ((re_opcode_t) *p != on_failure_jump
++ && (re_opcode_t) *p != succeed_n)
++ continue;
++
++ p++;
++ EXTRACT_NUMBER_AND_INCR (j, p);
++ p += j;
++
++ /* If what's on the stack is where we are now, pop it. */
++ if (!FAIL_STACK_EMPTY ()
++ && fail_stack.stack[fail_stack.avail - 1].pointer == p)
++ fail_stack.avail--;
++
++ continue;
++
++
++ case on_failure_jump:
++ case on_failure_keep_string_jump:
++ handle_on_failure_jump:
++ EXTRACT_NUMBER_AND_INCR (j, p);
++
++ /* For some patterns, e.g., `(a?)?', `p+j' here points to the
++ end of the pattern. We don't want to push such a point,
++ since when we restore it above, entering the switch will
++ increment `p' past the end of the pattern. We don't need
++ to push such a point since we obviously won't find any more
++ fastmap entries beyond `pend'. Such a pattern can match
++ the null string, though. */
++ if (p + j < pend)
++ {
++ if (!PUSH_PATTERN_OP (p + j, fail_stack))
++ {
++ RESET_FAIL_STACK ();
++ return -2;
++ }
++ }
++ else
++ bufp->can_be_null = 1;
++
++ if (succeed_n_p)
++ {
++ EXTRACT_NUMBER_AND_INCR (k, p); /* Skip the n. */
++ succeed_n_p = false;
++ }
++
++ continue;
++
++
++ case succeed_n:
++ /* Get to the number of times to succeed. */
++ p += OFFSET_ADDRESS_SIZE;
++
++ /* Increment p past the n for when k != 0. */
++ EXTRACT_NUMBER_AND_INCR (k, p);
++ if (k == 0)
++ {
++ p -= 2 * OFFSET_ADDRESS_SIZE;
++ succeed_n_p = true; /* Spaghetti code alert. */
++ goto handle_on_failure_jump;
++ }
++ continue;
++
++
++ case set_number_at:
++ p += 2 * OFFSET_ADDRESS_SIZE;
++ continue;
++
++
++ case start_memory:
++ case stop_memory:
++ p += 2;
++ continue;
++
++
++ default:
++ abort (); /* We have listed all the cases. */
++ } /* switch *p++ */
++
++ /* Getting here means we have found the possible starting
++ characters for one path of the pattern -- and that the empty
++ string does not match. We need not follow this path further.
++ Instead, look at the next alternative (remembered on the
++ stack), or quit if no more. The test at the top of the loop
++ does these things. */
++ path_can_be_null = false;
++ p = pend;
++ } /* while p */
++
++ /* Set `can_be_null' for the last path (also the first path, if the
++ pattern is empty). */
++ bufp->can_be_null |= path_can_be_null;
++
++ done:
++ RESET_FAIL_STACK ();
++ return 0;
++}
++
++#else /* not INSIDE_RECURSION */
++
++int
++re_compile_fastmap (struct re_pattern_buffer *bufp)
++{
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ return wcs_re_compile_fastmap(bufp);
++ else
++# endif
++ return byte_re_compile_fastmap(bufp);
++} /* re_compile_fastmap */
++#ifdef _LIBC
++weak_alias (__re_compile_fastmap, re_compile_fastmap)
++#endif
++
++
++/* Set REGS to hold NUM_REGS registers, storing them in STARTS and
++ ENDS. Subsequent matches using PATTERN_BUFFER and REGS will use
++ this memory for recording register information. STARTS and ENDS
++ must be allocated using the malloc library routine, and must each
++ be at least NUM_REGS * sizeof (regoff_t) bytes long.
++
++ If NUM_REGS == 0, then subsequent matches should allocate their own
++ register data.
++
++ Unless this function is called, the first search or match using
++ PATTERN_BUFFER will allocate its own register data, without
++ freeing the old data. */
++
++void
++re_set_registers (struct re_pattern_buffer *bufp,
++ struct re_registers *regs, unsigned num_regs,
++ regoff_t *starts, regoff_t *ends)
++{
++ if (num_regs)
++ {
++ bufp->regs_allocated = REGS_REALLOCATE;
++ regs->num_regs = num_regs;
++ regs->start = starts;
++ regs->end = ends;
++ }
++ else
++ {
++ bufp->regs_allocated = REGS_UNALLOCATED;
++ regs->num_regs = 0;
++ regs->start = regs->end = (regoff_t *) 0;
++ }
++}
++#ifdef _LIBC
++weak_alias (__re_set_registers, re_set_registers)
++#endif
++
++/* Searching routines. */
++
++/* Like re_search_2, below, but only one string is specified, and
++ doesn't let you say where to stop matching. */
++
++int
++re_search (struct re_pattern_buffer *bufp, const char *string, int size,
++ int startpos, int range, struct re_registers *regs)
++{
++ return re_search_2 (bufp, NULL, 0, string, size, startpos, range,
++ regs, size);
++}
++#ifdef _LIBC
++weak_alias (__re_search, re_search)
++#endif
++
++
++/* Using the compiled pattern in BUFP->buffer, first tries to match the
++ virtual concatenation of STRING1 and STRING2, starting first at index
++ STARTPOS, then at STARTPOS + 1, and so on.
++
++ STRING1 and STRING2 have length SIZE1 and SIZE2, respectively.
++
++ RANGE is how far to scan while trying to match. RANGE = 0 means try
++ only at STARTPOS; in general, the last start tried is STARTPOS +
++ RANGE.
++
++ In REGS, return the indices of the virtual concatenation of STRING1
++ and STRING2 that matched the entire BUFP->buffer and its contained
++ subexpressions.
++
++ Do not consider matching one past the index STOP in the virtual
++ concatenation of STRING1 and STRING2.
++
++ We return either the position in the strings at which the match was
++ found, -1 if no match, or -2 if error (such as failure
++ stack overflow). */
++
++int
++re_search_2 (struct re_pattern_buffer *bufp, const char *string1, int size1,
++ const char *string2, int size2, int startpos, int range,
++ struct re_registers *regs, int stop)
++{
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ return wcs_re_search_2 (bufp, string1, size1, string2, size2, startpos,
++ range, regs, stop);
++ else
++# endif
++ return byte_re_search_2 (bufp, string1, size1, string2, size2, startpos,
++ range, regs, stop);
++} /* re_search_2 */
++#ifdef _LIBC
++weak_alias (__re_search_2, re_search_2)
++#endif
++
++#endif /* not INSIDE_RECURSION */
++
++#ifdef INSIDE_RECURSION
++
++#ifdef MATCH_MAY_ALLOCATE
++# define FREE_VAR(var) if (var) REGEX_FREE (var); var = NULL
++#else
++# define FREE_VAR(var) if (var) free (var); var = NULL
++#endif
++
++#ifdef WCHAR
++# define MAX_ALLOCA_SIZE 2000
++
++# define FREE_WCS_BUFFERS() \
++ do { \
++ if (size1 > MAX_ALLOCA_SIZE) \
++ { \
++ free (wcs_string1); \
++ free (mbs_offset1); \
++ } \
++ else \
++ { \
++ FREE_VAR (wcs_string1); \
++ FREE_VAR (mbs_offset1); \
++ } \
++ if (size2 > MAX_ALLOCA_SIZE) \
++ { \
++ free (wcs_string2); \
++ free (mbs_offset2); \
++ } \
++ else \
++ { \
++ FREE_VAR (wcs_string2); \
++ FREE_VAR (mbs_offset2); \
++ } \
++ } while (0)
++
++#endif
++
++
++static int
++PREFIX(re_search_2) (struct re_pattern_buffer *bufp, const char *string1,
++ int size1, const char *string2, int size2,
++ int startpos, int range,
++ struct re_registers *regs, int stop)
++{
++ int val;
++ register char *fastmap = bufp->fastmap;
++ register RE_TRANSLATE_TYPE translate = bufp->translate;
++ int total_size = size1 + size2;
++ int endpos = startpos + range;
++#ifdef WCHAR
++ /* We need wchar_t* buffers correspond to cstring1, cstring2. */
++ wchar_t *wcs_string1 = NULL, *wcs_string2 = NULL;
++ /* We need the size of wchar_t buffers correspond to csize1, csize2. */
++ int wcs_size1 = 0, wcs_size2 = 0;
++ /* offset buffer for optimizatoin. See convert_mbs_to_wc. */
++ int *mbs_offset1 = NULL, *mbs_offset2 = NULL;
++ /* They hold whether each wchar_t is binary data or not. */
++ char *is_binary = NULL;
++#endif /* WCHAR */
++
++ /* Check for out-of-range STARTPOS. */
++ if (startpos < 0 || startpos > total_size)
++ return -1;
++
++ /* Fix up RANGE if it might eventually take us outside
++ the virtual concatenation of STRING1 and STRING2.
++ Make sure we won't move STARTPOS below 0 or above TOTAL_SIZE. */
++ if (endpos < 0)
++ range = 0 - startpos;
++ else if (endpos > total_size)
++ range = total_size - startpos;
++
++ /* If the search isn't to be a backwards one, don't waste time in a
++ search for a pattern that must be anchored. */
++ if (bufp->used > 0 && range > 0
++ && ((re_opcode_t) bufp->buffer[0] == begbuf
++ /* `begline' is like `begbuf' if it cannot match at newlines. */
++ || ((re_opcode_t) bufp->buffer[0] == begline
++ && !bufp->newline_anchor)))
++ {
++ if (startpos > 0)
++ return -1;
++ else
++ range = 1;
++ }
++
++#ifdef emacs
++ /* In a forward search for something that starts with \=.
++ don't keep searching past point. */
++ if (bufp->used > 0 && (re_opcode_t) bufp->buffer[0] == at_dot && range > 0)
++ {
++ range = PT - startpos;
++ if (range <= 0)
++ return -1;
++ }
++#endif /* emacs */
++
++ /* Update the fastmap now if not correct already. */
++ if (fastmap && !bufp->fastmap_accurate)
++ if (re_compile_fastmap (bufp) == -2)
++ return -2;
++
++#ifdef WCHAR
++ /* Allocate wchar_t array for wcs_string1 and wcs_string2 and
++ fill them with converted string. */
++ if (size1 != 0)
++ {
++ if (size1 > MAX_ALLOCA_SIZE)
++ {
++ wcs_string1 = TALLOC (size1 + 1, CHAR_T);
++ mbs_offset1 = TALLOC (size1 + 1, int);
++ is_binary = TALLOC (size1 + 1, char);
++ }
++ else
++ {
++ wcs_string1 = REGEX_TALLOC (size1 + 1, CHAR_T);
++ mbs_offset1 = REGEX_TALLOC (size1 + 1, int);
++ is_binary = REGEX_TALLOC (size1 + 1, char);
++ }
++ if (!wcs_string1 || !mbs_offset1 || !is_binary)
++ {
++ if (size1 > MAX_ALLOCA_SIZE)
++ {
++ free (wcs_string1);
++ free (mbs_offset1);
++ free (is_binary);
++ }
++ else
++ {
++ FREE_VAR (wcs_string1);
++ FREE_VAR (mbs_offset1);
++ FREE_VAR (is_binary);
++ }
++ return -2;
++ }
++ wcs_size1 = convert_mbs_to_wcs(wcs_string1, string1, size1,
++ mbs_offset1, is_binary);
++ wcs_string1[wcs_size1] = L'\0'; /* for a sentinel */
++ if (size1 > MAX_ALLOCA_SIZE)
++ free (is_binary);
++ else
++ FREE_VAR (is_binary);
++ }
++ if (size2 != 0)
++ {
++ if (size2 > MAX_ALLOCA_SIZE)
++ {
++ wcs_string2 = TALLOC (size2 + 1, CHAR_T);
++ mbs_offset2 = TALLOC (size2 + 1, int);
++ is_binary = TALLOC (size2 + 1, char);
++ }
++ else
++ {
++ wcs_string2 = REGEX_TALLOC (size2 + 1, CHAR_T);
++ mbs_offset2 = REGEX_TALLOC (size2 + 1, int);
++ is_binary = REGEX_TALLOC (size2 + 1, char);
++ }
++ if (!wcs_string2 || !mbs_offset2 || !is_binary)
++ {
++ FREE_WCS_BUFFERS ();
++ if (size2 > MAX_ALLOCA_SIZE)
++ free (is_binary);
++ else
++ FREE_VAR (is_binary);
++ return -2;
++ }
++ wcs_size2 = convert_mbs_to_wcs(wcs_string2, string2, size2,
++ mbs_offset2, is_binary);
++ wcs_string2[wcs_size2] = L'\0'; /* for a sentinel */
++ if (size2 > MAX_ALLOCA_SIZE)
++ free (is_binary);
++ else
++ FREE_VAR (is_binary);
++ }
++#endif /* WCHAR */
++
++
++ /* Loop through the string, looking for a place to start matching. */
++ for (;;)
++ {
++ /* If a fastmap is supplied, skip quickly over characters that
++ cannot be the start of a match. If the pattern can match the
++ null string, however, we don't need to skip characters; we want
++ the first null string. */
++ if (fastmap && startpos < total_size && !bufp->can_be_null)
++ {
++ if (range > 0) /* Searching forwards. */
++ {
++ register const char *d;
++ register int lim = 0;
++ int irange = range;
++
++ if (startpos < size1 && startpos + range >= size1)
++ lim = range - (size1 - startpos);
++
++ d = (startpos >= size1 ? string2 - size1 : string1) + startpos;
++
++ /* Written out as an if-else to avoid testing `translate'
++ inside the loop. */
++ if (translate)
++ while (range > lim
++ && !fastmap[(unsigned char)
++ translate[(unsigned char) *d++]])
++ range--;
++ else
++ while (range > lim && !fastmap[(unsigned char) *d++])
++ range--;
++
++ startpos += irange - range;
++ }
++ else /* Searching backwards. */
++ {
++ register CHAR_T c = (size1 == 0 || startpos >= size1
++ ? string2[startpos - size1]
++ : string1[startpos]);
++
++ if (!fastmap[(unsigned char) TRANSLATE (c)])
++ goto advance;
++ }
++ }
++
++ /* If can't match the null string, and that's all we have left, fail. */
++ if (range >= 0 && startpos == total_size && fastmap
++ && !bufp->can_be_null)
++ {
++#ifdef WCHAR
++ FREE_WCS_BUFFERS ();
++#endif
++ return -1;
++ }
++
++#ifdef WCHAR
++ val = wcs_re_match_2_internal (bufp, string1, size1, string2,
++ size2, startpos, regs, stop,
++ wcs_string1, wcs_size1,
++ wcs_string2, wcs_size2,
++ mbs_offset1, mbs_offset2);
++#else /* BYTE */
++ val = byte_re_match_2_internal (bufp, string1, size1, string2,
++ size2, startpos, regs, stop);
++#endif /* BYTE */
++
++#ifndef REGEX_MALLOC
++# ifdef C_ALLOCA
++ alloca (0);
++# endif
++#endif
++
++ if (val >= 0)
++ {
++#ifdef WCHAR
++ FREE_WCS_BUFFERS ();
++#endif
++ return startpos;
++ }
++
++ if (val == -2)
++ {
++#ifdef WCHAR
++ FREE_WCS_BUFFERS ();
++#endif
++ return -2;
++ }
++
++ advance:
++ if (!range)
++ break;
++ else if (range > 0)
++ {
++ range--;
++ startpos++;
++ }
++ else
++ {
++ range++;
++ startpos--;
++ }
++ }
++#ifdef WCHAR
++ FREE_WCS_BUFFERS ();
++#endif
++ return -1;
++}
++
++#ifdef WCHAR
++/* This converts PTR, a pointer into one of the search wchar_t strings
++ `string1' and `string2' into an multibyte string offset from the
++ beginning of that string. We use mbs_offset to optimize.
++ See convert_mbs_to_wcs. */
++# define POINTER_TO_OFFSET(ptr) \
++ (FIRST_STRING_P (ptr) \
++ ? ((regoff_t)(mbs_offset1 != NULL? mbs_offset1[(ptr)-string1] : 0)) \
++ : ((regoff_t)((mbs_offset2 != NULL? mbs_offset2[(ptr)-string2] : 0) \
++ + csize1)))
++#else /* BYTE */
++/* This converts PTR, a pointer into one of the search strings `string1'
++ and `string2' into an offset from the beginning of that string. */
++# define POINTER_TO_OFFSET(ptr) \
++ (FIRST_STRING_P (ptr) \
++ ? ((regoff_t) ((ptr) - string1)) \
++ : ((regoff_t) ((ptr) - string2 + size1)))
++#endif /* WCHAR */
++
++/* Macros for dealing with the split strings in re_match_2. */
++
++#define MATCHING_IN_FIRST_STRING (dend == end_match_1)
++
++/* Call before fetching a character with *d. This switches over to
++ string2 if necessary. */
++#define PREFETCH() \
++ while (d == dend) \
++ { \
++ /* End of string2 => fail. */ \
++ if (dend == end_match_2) \
++ goto fail; \
++ /* End of string1 => advance to string2. */ \
++ d = string2; \
++ dend = end_match_2; \
++ }
++
++/* Test if at very beginning or at very end of the virtual concatenation
++ of `string1' and `string2'. If only one string, it's `string2'. */
++#define AT_STRINGS_BEG(d) ((d) == (size1 ? string1 : string2) || !size2)
++#define AT_STRINGS_END(d) ((d) == end2)
++
++
++/* Test if D points to a character which is word-constituent. We have
++ two special cases to check for: if past the end of string1, look at
++ the first character in string2; and if before the beginning of
++ string2, look at the last character in string1. */
++#ifdef WCHAR
++/* Use internationalized API instead of SYNTAX. */
++# define WORDCHAR_P(d) \
++ (iswalnum ((wint_t)((d) == end1 ? *string2 \
++ : (d) == string2 - 1 ? *(end1 - 1) : *(d))) != 0 \
++ || ((d) == end1 ? *string2 \
++ : (d) == string2 - 1 ? *(end1 - 1) : *(d)) == L'_')
++#else /* BYTE */
++# define WORDCHAR_P(d) \
++ (SYNTAX ((d) == end1 ? *string2 \
++ : (d) == string2 - 1 ? *(end1 - 1) : *(d)) \
++ == Sword)
++#endif /* WCHAR */
++
++/* Disabled due to a compiler bug -- see comment at case wordbound */
++#if 0
++/* Test if the character before D and the one at D differ with respect
++ to being word-constituent. */
++#define AT_WORD_BOUNDARY(d) \
++ (AT_STRINGS_BEG (d) || AT_STRINGS_END (d) \
++ || WORDCHAR_P (d - 1) != WORDCHAR_P (d))
++#endif
++
++/* Free everything we malloc. */
++#ifdef MATCH_MAY_ALLOCATE
++# ifdef WCHAR
++# define FREE_VARIABLES() \
++ do { \
++ REGEX_FREE_STACK (fail_stack.stack); \
++ FREE_VAR (regstart); \
++ FREE_VAR (regend); \
++ FREE_VAR (old_regstart); \
++ FREE_VAR (old_regend); \
++ FREE_VAR (best_regstart); \
++ FREE_VAR (best_regend); \
++ FREE_VAR (reg_info); \
++ FREE_VAR (reg_dummy); \
++ FREE_VAR (reg_info_dummy); \
++ if (!cant_free_wcs_buf) \
++ { \
++ FREE_VAR (string1); \
++ FREE_VAR (string2); \
++ FREE_VAR (mbs_offset1); \
++ FREE_VAR (mbs_offset2); \
++ } \
++ } while (0)
++# else /* BYTE */
++# define FREE_VARIABLES() \
++ do { \
++ REGEX_FREE_STACK (fail_stack.stack); \
++ FREE_VAR (regstart); \
++ FREE_VAR (regend); \
++ FREE_VAR (old_regstart); \
++ FREE_VAR (old_regend); \
++ FREE_VAR (best_regstart); \
++ FREE_VAR (best_regend); \
++ FREE_VAR (reg_info); \
++ FREE_VAR (reg_dummy); \
++ FREE_VAR (reg_info_dummy); \
++ } while (0)
++# endif /* WCHAR */
++#else
++# ifdef WCHAR
++# define FREE_VARIABLES() \
++ do { \
++ if (!cant_free_wcs_buf) \
++ { \
++ FREE_VAR (string1); \
++ FREE_VAR (string2); \
++ FREE_VAR (mbs_offset1); \
++ FREE_VAR (mbs_offset2); \
++ } \
++ } while (0)
++# else /* BYTE */
++# define FREE_VARIABLES() ((void)0) /* Do nothing! But inhibit gcc warning. */
++# endif /* WCHAR */
++#endif /* not MATCH_MAY_ALLOCATE */
++
++/* These values must meet several constraints. They must not be valid
++ register values; since we have a limit of 255 registers (because
++ we use only one byte in the pattern for the register number), we can
++ use numbers larger than 255. They must differ by 1, because of
++ NUM_FAILURE_ITEMS above. And the value for the lowest register must
++ be larger than the value for the highest register, so we do not try
++ to actually save any registers when none are active. */
++#define NO_HIGHEST_ACTIVE_REG (1 << BYTEWIDTH)
++#define NO_LOWEST_ACTIVE_REG (NO_HIGHEST_ACTIVE_REG + 1)
++
++#else /* not INSIDE_RECURSION */
++/* Matching routines. */
++
++#ifndef emacs /* Emacs never uses this. */
++/* re_match is like re_match_2 except it takes only a single string. */
++
++int
++re_match (struct re_pattern_buffer *bufp, const char *string,
++ int size, int pos, struct re_registers *regs)
++{
++ int result;
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ result = wcs_re_match_2_internal (bufp, NULL, 0, string, size,
++ pos, regs, size,
++ NULL, 0, NULL, 0, NULL, NULL);
++ else
++# endif
++ result = byte_re_match_2_internal (bufp, NULL, 0, string, size,
++ pos, regs, size);
++# ifndef REGEX_MALLOC
++# ifdef C_ALLOCA
++ alloca (0);
++# endif
++# endif
++ return result;
++}
++# ifdef _LIBC
++weak_alias (__re_match, re_match)
++# endif
++#endif /* not emacs */
++
++#endif /* not INSIDE_RECURSION */
++
++#ifdef INSIDE_RECURSION
++static boolean PREFIX(group_match_null_string_p) (UCHAR_T **p,
++ UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info);
++static boolean PREFIX(alt_match_null_string_p) (UCHAR_T *p,
++ UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info);
++static boolean PREFIX(common_op_match_null_string_p) (UCHAR_T **p,
++ UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info);
++static int PREFIX(bcmp_translate) (const CHAR_T *s1, const CHAR_T *s2,
++ register int len,
++ RE_TRANSLATE_TYPE translate);
++#else /* not INSIDE_RECURSION */
++
++/* re_match_2 matches the compiled pattern in BUFP against the
++ the (virtual) concatenation of STRING1 and STRING2 (of length SIZE1
++ and SIZE2, respectively). We start matching at POS, and stop
++ matching at STOP.
++
++ If REGS is non-null and the `no_sub' field of BUFP is nonzero, we
++ store offsets for the substring each group matched in REGS. See the
++ documentation for exactly how many groups we fill.
++
++ We return -1 if no match, -2 if an internal error (such as the
++ failure stack overflowing). Otherwise, we return the length of the
++ matched substring. */
++
++int
++re_match_2 (struct re_pattern_buffer *bufp, const char *string1, int size1,
++ const char *string2, int size2, int pos,
++ struct re_registers *regs, int stop)
++{
++ int result;
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ result = wcs_re_match_2_internal (bufp, string1, size1, string2, size2,
++ pos, regs, stop,
++ NULL, 0, NULL, 0, NULL, NULL);
++ else
++# endif
++ result = byte_re_match_2_internal (bufp, string1, size1, string2, size2,
++ pos, regs, stop);
++
++#ifndef REGEX_MALLOC
++# ifdef C_ALLOCA
++ alloca (0);
++# endif
++#endif
++ return result;
++}
++#ifdef _LIBC
++weak_alias (__re_match_2, re_match_2)
++#endif
++
++#endif /* not INSIDE_RECURSION */
++
++#ifdef INSIDE_RECURSION
++
++#ifdef WCHAR
++static int count_mbs_length (int *, int);
++
++/* This check the substring (from 0, to length) of the multibyte string,
++ to which offset_buffer correspond. And count how many wchar_t_characters
++ the substring occupy. We use offset_buffer to optimization.
++ See convert_mbs_to_wcs. */
++
++static int
++count_mbs_length(int *offset_buffer, int length)
++{
++ int upper, lower;
++
++ /* Check whether the size is valid. */
++ if (length < 0)
++ return -1;
++
++ if (offset_buffer == NULL)
++ return 0;
++
++ /* If there are no multibyte character, offset_buffer[i] == i.
++ Optmize for this case. */
++ if (offset_buffer[length] == length)
++ return length;
++
++ /* Set up upper with length. (because for all i, offset_buffer[i] >= i) */
++ upper = length;
++ lower = 0;
++
++ while (true)
++ {
++ int middle = (lower + upper) / 2;
++ if (middle == lower || middle == upper)
++ break;
++ if (offset_buffer[middle] > length)
++ upper = middle;
++ else if (offset_buffer[middle] < length)
++ lower = middle;
++ else
++ return middle;
++ }
++
++ return -1;
++}
++#endif /* WCHAR */
++
++/* This is a separate function so that we can force an alloca cleanup
++ afterwards. */
++#ifdef WCHAR
++static int
++wcs_re_match_2_internal (struct re_pattern_buffer *bufp,
++ const char *cstring1, int csize1,
++ const char *cstring2, int csize2,
++ int pos,
++ struct re_registers *regs,
++ int stop,
++ /* string1 == string2 == NULL means string1/2, size1/2 and
++ mbs_offset1/2 need seting up in this function. */
++ /* We need wchar_t* buffers correspond to cstring1, cstring2. */
++ wchar_t *string1, int size1,
++ wchar_t *string2, int size2,
++ /* offset buffer for optimizatoin. See convert_mbs_to_wc. */
++ int *mbs_offset1, int *mbs_offset2)
++#else /* BYTE */
++static int
++byte_re_match_2_internal (struct re_pattern_buffer *bufp,
++ const char *string1, int size1,
++ const char *string2, int size2,
++ int pos,
++ struct re_registers *regs, int stop)
++#endif /* BYTE */
++{
++ /* General temporaries. */
++ int mcnt;
++ UCHAR_T *p1;
++#ifdef WCHAR
++ /* They hold whether each wchar_t is binary data or not. */
++ char *is_binary = NULL;
++ /* If true, we can't free string1/2, mbs_offset1/2. */
++ int cant_free_wcs_buf = 1;
++#endif /* WCHAR */
++
++ /* Just past the end of the corresponding string. */
++ const CHAR_T *end1, *end2;
++
++ /* Pointers into string1 and string2, just past the last characters in
++ each to consider matching. */
++ const CHAR_T *end_match_1, *end_match_2;
++
++ /* Where we are in the data, and the end of the current string. */
++ const CHAR_T *d, *dend;
++
++ /* Where we are in the pattern, and the end of the pattern. */
++#ifdef WCHAR
++ UCHAR_T *pattern, *p;
++ register UCHAR_T *pend;
++#else /* BYTE */
++ UCHAR_T *p = bufp->buffer;
++ register UCHAR_T *pend = p + bufp->used;
++#endif /* WCHAR */
++
++ /* Mark the opcode just after a start_memory, so we can test for an
++ empty subpattern when we get to the stop_memory. */
++ UCHAR_T *just_past_start_mem = 0;
++
++ /* We use this to map every character in the string. */
++ RE_TRANSLATE_TYPE translate = bufp->translate;
++
++ /* Failure point stack. Each place that can handle a failure further
++ down the line pushes a failure point on this stack. It consists of
++ restart, regend, and reg_info for all registers corresponding to
++ the subexpressions we're currently inside, plus the number of such
++ registers, and, finally, two char *'s. The first char * is where
++ to resume scanning the pattern; the second one is where to resume
++ scanning the strings. If the latter is zero, the failure point is
++ a ``dummy''; if a failure happens and the failure point is a dummy,
++ it gets discarded and the next next one is tried. */
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, this is global. */
++ PREFIX(fail_stack_type) fail_stack;
++#endif
++#ifdef DEBUG
++ static unsigned failure_id;
++ unsigned nfailure_points_pushed = 0, nfailure_points_popped = 0;
++#endif
++
++#ifdef REL_ALLOC
++ /* This holds the pointer to the failure stack, when
++ it is allocated relocatably. */
++ fail_stack_elt_t *failure_stack_ptr;
++#endif
++
++ /* We fill all the registers internally, independent of what we
++ return, for use in backreferences. The number here includes
++ an element for register zero. */
++ size_t num_regs = bufp->re_nsub + 1;
++
++ /* The currently active registers. */
++ active_reg_t lowest_active_reg = NO_LOWEST_ACTIVE_REG;
++ active_reg_t highest_active_reg = NO_HIGHEST_ACTIVE_REG;
++
++ /* Information on the contents of registers. These are pointers into
++ the input strings; they record just what was matched (on this
++ attempt) by a subexpression part of the pattern, that is, the
++ regnum-th regstart pointer points to where in the pattern we began
++ matching and the regnum-th regend points to right after where we
++ stopped matching the regnum-th subexpression. (The zeroth register
++ keeps track of what the whole pattern matches.) */
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, these are global. */
++ const CHAR_T **regstart, **regend;
++#endif
++
++ /* If a group that's operated upon by a repetition operator fails to
++ match anything, then the register for its start will need to be
++ restored because it will have been set to wherever in the string we
++ are when we last see its open-group operator. Similarly for a
++ register's end. */
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, these are global. */
++ const CHAR_T **old_regstart, **old_regend;
++#endif
++
++ /* The is_active field of reg_info helps us keep track of which (possibly
++ nested) subexpressions we are currently in. The matched_something
++ field of reg_info[reg_num] helps us tell whether or not we have
++ matched any of the pattern so far this time through the reg_num-th
++ subexpression. These two fields get reset each time through any
++ loop their register is in. */
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, this is global. */
++ PREFIX(register_info_type) *reg_info;
++#endif
++
++ /* The following record the register info as found in the above
++ variables when we find a match better than any we've seen before.
++ This happens as we backtrack through the failure points, which in
++ turn happens only if we have not yet matched the entire string. */
++ unsigned best_regs_set = false;
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, these are global. */
++ const CHAR_T **best_regstart, **best_regend;
++#endif
++
++ /* Logically, this is `best_regend[0]'. But we don't want to have to
++ allocate space for that if we're not allocating space for anything
++ else (see below). Also, we never need info about register 0 for
++ any of the other register vectors, and it seems rather a kludge to
++ treat `best_regend' differently than the rest. So we keep track of
++ the end of the best match so far in a separate variable. We
++ initialize this to NULL so that when we backtrack the first time
++ and need to test it, it's not garbage. */
++ const CHAR_T *match_end = NULL;
++
++ /* This helps SET_REGS_MATCHED avoid doing redundant work. */
++ int set_regs_matched_done = 0;
++
++ /* Used when we pop values we don't care about. */
++#ifdef MATCH_MAY_ALLOCATE /* otherwise, these are global. */
++ const CHAR_T **reg_dummy;
++ PREFIX(register_info_type) *reg_info_dummy;
++#endif
++
++#ifdef DEBUG
++ /* Counts the total number of registers pushed. */
++ unsigned num_regs_pushed = 0;
++#endif
++
++ DEBUG_PRINT1 ("\n\nEntering re_match_2.\n");
++
++ INIT_FAIL_STACK ();
++
++#ifdef MATCH_MAY_ALLOCATE
++ /* Do not bother to initialize all the register variables if there are
++ no groups in the pattern, as it takes a fair amount of time. If
++ there are groups, we include space for register 0 (the whole
++ pattern), even though we never use it, since it simplifies the
++ array indexing. We should fix this. */
++ if (bufp->re_nsub)
++ {
++ regstart = REGEX_TALLOC (num_regs, const CHAR_T *);
++ regend = REGEX_TALLOC (num_regs, const CHAR_T *);
++ old_regstart = REGEX_TALLOC (num_regs, const CHAR_T *);
++ old_regend = REGEX_TALLOC (num_regs, const CHAR_T *);
++ best_regstart = REGEX_TALLOC (num_regs, const CHAR_T *);
++ best_regend = REGEX_TALLOC (num_regs, const CHAR_T *);
++ reg_info = REGEX_TALLOC (num_regs, PREFIX(register_info_type));
++ reg_dummy = REGEX_TALLOC (num_regs, const CHAR_T *);
++ reg_info_dummy = REGEX_TALLOC (num_regs, PREFIX(register_info_type));
++
++ if (!(regstart && regend && old_regstart && old_regend && reg_info
++ && best_regstart && best_regend && reg_dummy && reg_info_dummy))
++ {
++ FREE_VARIABLES ();
++ return -2;
++ }
++ }
++ else
++ {
++ /* We must initialize all our variables to NULL, so that
++ `FREE_VARIABLES' doesn't try to free them. */
++ regstart = regend = old_regstart = old_regend = best_regstart
++ = best_regend = reg_dummy = NULL;
++ reg_info = reg_info_dummy = (PREFIX(register_info_type) *) NULL;
++ }
++#endif /* MATCH_MAY_ALLOCATE */
++
++ /* The starting position is bogus. */
++#ifdef WCHAR
++ if (pos < 0 || pos > csize1 + csize2)
++#else /* BYTE */
++ if (pos < 0 || pos > size1 + size2)
++#endif
++ {
++ FREE_VARIABLES ();
++ return -1;
++ }
++
++#ifdef WCHAR
++ /* Allocate wchar_t array for string1 and string2 and
++ fill them with converted string. */
++ if (string1 == NULL && string2 == NULL)
++ {
++ /* We need seting up buffers here. */
++
++ /* We must free wcs buffers in this function. */
++ cant_free_wcs_buf = 0;
++
++ if (csize1 != 0)
++ {
++ string1 = REGEX_TALLOC (csize1 + 1, CHAR_T);
++ mbs_offset1 = REGEX_TALLOC (csize1 + 1, int);
++ is_binary = REGEX_TALLOC (csize1 + 1, char);
++ if (!string1 || !mbs_offset1 || !is_binary)
++ {
++ FREE_VAR (string1);
++ FREE_VAR (mbs_offset1);
++ FREE_VAR (is_binary);
++ return -2;
++ }
++ }
++ if (csize2 != 0)
++ {
++ string2 = REGEX_TALLOC (csize2 + 1, CHAR_T);
++ mbs_offset2 = REGEX_TALLOC (csize2 + 1, int);
++ is_binary = REGEX_TALLOC (csize2 + 1, char);
++ if (!string2 || !mbs_offset2 || !is_binary)
++ {
++ FREE_VAR (string1);
++ FREE_VAR (mbs_offset1);
++ FREE_VAR (string2);
++ FREE_VAR (mbs_offset2);
++ FREE_VAR (is_binary);
++ return -2;
++ }
++ size2 = convert_mbs_to_wcs(string2, cstring2, csize2,
++ mbs_offset2, is_binary);
++ string2[size2] = L'\0'; /* for a sentinel */
++ FREE_VAR (is_binary);
++ }
++ }
++
++ /* We need to cast pattern to (wchar_t*), because we casted this compiled
++ pattern to (char*) in regex_compile. */
++ p = pattern = (CHAR_T*)bufp->buffer;
++ pend = (CHAR_T*)(bufp->buffer + bufp->used);
++
++#endif /* WCHAR */
++
++ /* Initialize subexpression text positions to -1 to mark ones that no
++ start_memory/stop_memory has been seen for. Also initialize the
++ register information struct. */
++ for (mcnt = 1; (unsigned) mcnt < num_regs; mcnt++)
++ {
++ regstart[mcnt] = regend[mcnt]
++ = old_regstart[mcnt] = old_regend[mcnt] = REG_UNSET_VALUE;
++
++ REG_MATCH_NULL_STRING_P (reg_info[mcnt]) = MATCH_NULL_UNSET_VALUE;
++ IS_ACTIVE (reg_info[mcnt]) = 0;
++ MATCHED_SOMETHING (reg_info[mcnt]) = 0;
++ EVER_MATCHED_SOMETHING (reg_info[mcnt]) = 0;
++ }
++
++ /* We move `string1' into `string2' if the latter's empty -- but not if
++ `string1' is null. */
++ if (size2 == 0 && string1 != NULL)
++ {
++ string2 = string1;
++ size2 = size1;
++ string1 = 0;
++ size1 = 0;
++#ifdef WCHAR
++ mbs_offset2 = mbs_offset1;
++ csize2 = csize1;
++ mbs_offset1 = NULL;
++ csize1 = 0;
++#endif
++ }
++ end1 = string1 + size1;
++ end2 = string2 + size2;
++
++ /* Compute where to stop matching, within the two strings. */
++#ifdef WCHAR
++ if (stop <= csize1)
++ {
++ mcnt = count_mbs_length(mbs_offset1, stop);
++ end_match_1 = string1 + mcnt;
++ end_match_2 = string2;
++ }
++ else
++ {
++ if (stop > csize1 + csize2)
++ stop = csize1 + csize2;
++ end_match_1 = end1;
++ mcnt = count_mbs_length(mbs_offset2, stop-csize1);
++ end_match_2 = string2 + mcnt;
++ }
++ if (mcnt < 0)
++ { /* count_mbs_length return error. */
++ FREE_VARIABLES ();
++ return -1;
++ }
++#else
++ if (stop <= size1)
++ {
++ end_match_1 = string1 + stop;
++ end_match_2 = string2;
++ }
++ else
++ {
++ end_match_1 = end1;
++ end_match_2 = string2 + stop - size1;
++ }
++#endif /* WCHAR */
++
++ /* `p' scans through the pattern as `d' scans through the data.
++ `dend' is the end of the input string that `d' points within. `d'
++ is advanced into the following input string whenever necessary, but
++ this happens before fetching; therefore, at the beginning of the
++ loop, `d' can be pointing at the end of a string, but it cannot
++ equal `string2'. */
++#ifdef WCHAR
++ if (size1 > 0 && pos <= csize1)
++ {
++ mcnt = count_mbs_length(mbs_offset1, pos);
++ d = string1 + mcnt;
++ dend = end_match_1;
++ }
++ else
++ {
++ mcnt = count_mbs_length(mbs_offset2, pos-csize1);
++ d = string2 + mcnt;
++ dend = end_match_2;
++ }
++
++ if (mcnt < 0)
++ { /* count_mbs_length return error. */
++ FREE_VARIABLES ();
++ return -1;
++ }
++#else
++ if (size1 > 0 && pos <= size1)
++ {
++ d = string1 + pos;
++ dend = end_match_1;
++ }
++ else
++ {
++ d = string2 + pos - size1;
++ dend = end_match_2;
++ }
++#endif /* WCHAR */
++
++ DEBUG_PRINT1 ("The compiled pattern is:\n");
++ DEBUG_PRINT_COMPILED_PATTERN (bufp, p, pend);
++ DEBUG_PRINT1 ("The string to match is: `");
++ DEBUG_PRINT_DOUBLE_STRING (d, string1, size1, string2, size2);
++ DEBUG_PRINT1 ("'\n");
++
++ /* This loops over pattern commands. It exits by returning from the
++ function if the match is complete, or it drops through if the match
++ fails at this starting point in the input data. */
++ for (;;)
++ {
++#ifdef _LIBC
++ DEBUG_PRINT2 ("\n%p: ", p);
++#else
++ DEBUG_PRINT2 ("\n0x%x: ", p);
++#endif
++
++ if (p == pend)
++ { /* End of pattern means we might have succeeded. */
++ DEBUG_PRINT1 ("end of pattern ... ");
++
++ /* If we haven't matched the entire string, and we want the
++ longest match, try backtracking. */
++ if (d != end_match_2)
++ {
++ /* 1 if this match ends in the same string (string1 or string2)
++ as the best previous match. */
++ boolean same_str_p = (FIRST_STRING_P (match_end)
++ == MATCHING_IN_FIRST_STRING);
++ /* 1 if this match is the best seen so far. */
++ boolean best_match_p;
++
++ /* AIX compiler got confused when this was combined
++ with the previous declaration. */
++ if (same_str_p)
++ best_match_p = d > match_end;
++ else
++ best_match_p = !MATCHING_IN_FIRST_STRING;
++
++ DEBUG_PRINT1 ("backtracking.\n");
++
++ if (!FAIL_STACK_EMPTY ())
++ { /* More failure points to try. */
++
++ /* If exceeds best match so far, save it. */
++ if (!best_regs_set || best_match_p)
++ {
++ best_regs_set = true;
++ match_end = d;
++
++ DEBUG_PRINT1 ("\nSAVING match as best so far.\n");
++
++ for (mcnt = 1; (unsigned) mcnt < num_regs; mcnt++)
++ {
++ best_regstart[mcnt] = regstart[mcnt];
++ best_regend[mcnt] = regend[mcnt];
++ }
++ }
++ goto fail;
++ }
++
++ /* If no failure points, don't restore garbage. And if
++ last match is real best match, don't restore second
++ best one. */
++ else if (best_regs_set && !best_match_p)
++ {
++ restore_best_regs:
++ /* Restore best match. It may happen that `dend ==
++ end_match_1' while the restored d is in string2.
++ For example, the pattern `x.*y.*z' against the
++ strings `x-' and `y-z-', if the two strings are
++ not consecutive in memory. */
++ DEBUG_PRINT1 ("Restoring best registers.\n");
++
++ d = match_end;
++ dend = ((d >= string1 && d <= end1)
++ ? end_match_1 : end_match_2);
++
++ for (mcnt = 1; (unsigned) mcnt < num_regs; mcnt++)
++ {
++ regstart[mcnt] = best_regstart[mcnt];
++ regend[mcnt] = best_regend[mcnt];
++ }
++ }
++ } /* d != end_match_2 */
++
++ succeed_label:
++ DEBUG_PRINT1 ("Accepting match.\n");
++ /* If caller wants register contents data back, do it. */
++ if (regs && !bufp->no_sub)
++ {
++ /* Have the register data arrays been allocated? */
++ if (bufp->regs_allocated == REGS_UNALLOCATED)
++ { /* No. So allocate them with malloc. We need one
++ extra element beyond `num_regs' for the `-1' marker
++ GNU code uses. */
++ regs->num_regs = MAX (RE_NREGS, num_regs + 1);
++ regs->start = TALLOC (regs->num_regs, regoff_t);
++ regs->end = TALLOC (regs->num_regs, regoff_t);
++ if (regs->start == NULL || regs->end == NULL)
++ {
++ FREE_VARIABLES ();
++ return -2;
++ }
++ bufp->regs_allocated = REGS_REALLOCATE;
++ }
++ else if (bufp->regs_allocated == REGS_REALLOCATE)
++ { /* Yes. If we need more elements than were already
++ allocated, reallocate them. If we need fewer, just
++ leave it alone. */
++ if (regs->num_regs < num_regs + 1)
++ {
++ regs->num_regs = num_regs + 1;
++ RETALLOC (regs->start, regs->num_regs, regoff_t);
++ RETALLOC (regs->end, regs->num_regs, regoff_t);
++ if (regs->start == NULL || regs->end == NULL)
++ {
++ FREE_VARIABLES ();
++ return -2;
++ }
++ }
++ }
++ else
++ {
++ /* These braces fend off a "empty body in an else-statement"
++ warning under GCC when assert expands to nothing. */
++ assert (bufp->regs_allocated == REGS_FIXED);
++ }
++
++ /* Convert the pointer data in `regstart' and `regend' to
++ indices. Register zero has to be set differently,
++ since we haven't kept track of any info for it. */
++ if (regs->num_regs > 0)
++ {
++ regs->start[0] = pos;
++#ifdef WCHAR
++ if (MATCHING_IN_FIRST_STRING)
++ regs->end[0] = mbs_offset1 != NULL ?
++ mbs_offset1[d-string1] : 0;
++ else
++ regs->end[0] = csize1 + (mbs_offset2 != NULL ?
++ mbs_offset2[d-string2] : 0);
++#else
++ regs->end[0] = (MATCHING_IN_FIRST_STRING
++ ? ((regoff_t) (d - string1))
++ : ((regoff_t) (d - string2 + size1)));
++#endif /* WCHAR */
++ }
++
++ /* Go through the first `min (num_regs, regs->num_regs)'
++ registers, since that is all we initialized. */
++ for (mcnt = 1; (unsigned) mcnt < MIN (num_regs, regs->num_regs);
++ mcnt++)
++ {
++ if (REG_UNSET (regstart[mcnt]) || REG_UNSET (regend[mcnt]))
++ regs->start[mcnt] = regs->end[mcnt] = -1;
++ else
++ {
++ regs->start[mcnt]
++ = (regoff_t) POINTER_TO_OFFSET (regstart[mcnt]);
++ regs->end[mcnt]
++ = (regoff_t) POINTER_TO_OFFSET (regend[mcnt]);
++ }
++ }
++
++ /* If the regs structure we return has more elements than
++ were in the pattern, set the extra elements to -1. If
++ we (re)allocated the registers, this is the case,
++ because we always allocate enough to have at least one
++ -1 at the end. */
++ for (mcnt = num_regs; (unsigned) mcnt < regs->num_regs; mcnt++)
++ regs->start[mcnt] = regs->end[mcnt] = -1;
++ } /* regs && !bufp->no_sub */
++
++ DEBUG_PRINT4 ("%u failure points pushed, %u popped (%u remain).\n",
++ nfailure_points_pushed, nfailure_points_popped,
++ nfailure_points_pushed - nfailure_points_popped);
++ DEBUG_PRINT2 ("%u registers pushed.\n", num_regs_pushed);
++
++#ifdef WCHAR
++ if (MATCHING_IN_FIRST_STRING)
++ mcnt = mbs_offset1 != NULL ? mbs_offset1[d-string1] : 0;
++ else
++ mcnt = (mbs_offset2 != NULL ? mbs_offset2[d-string2] : 0) +
++ csize1;
++ mcnt -= pos;
++#else
++ mcnt = d - pos - (MATCHING_IN_FIRST_STRING
++ ? string1
++ : string2 - size1);
++#endif /* WCHAR */
++
++ DEBUG_PRINT2 ("Returning %d from re_match_2.\n", mcnt);
++
++ FREE_VARIABLES ();
++ return mcnt;
++ }
++
++ /* Otherwise match next pattern command. */
++ switch (SWITCH_ENUM_CAST ((re_opcode_t) *p++))
++ {
++ /* Ignore these. Used to ignore the n of succeed_n's which
++ currently have n == 0. */
++ case no_op:
++ DEBUG_PRINT1 ("EXECUTING no_op.\n");
++ break;
++
++ case succeed:
++ DEBUG_PRINT1 ("EXECUTING succeed.\n");
++ goto succeed_label;
++
++ /* Match the next n pattern characters exactly. The following
++ byte in the pattern defines n, and the n bytes after that
++ are the characters to match. */
++ case exactn:
++#ifdef MBS_SUPPORT
++ case exactn_bin:
++#endif
++ mcnt = *p++;
++ DEBUG_PRINT2 ("EXECUTING exactn %d.\n", mcnt);
++
++ /* This is written out as an if-else so we don't waste time
++ testing `translate' inside the loop. */
++ if (translate)
++ {
++ do
++ {
++ PREFETCH ();
++#ifdef WCHAR
++ if (*d <= 0xff)
++ {
++ if ((UCHAR_T) translate[(unsigned char) *d++]
++ != (UCHAR_T) *p++)
++ goto fail;
++ }
++ else
++ {
++ if (*d++ != (CHAR_T) *p++)
++ goto fail;
++ }
++#else
++ if ((UCHAR_T) translate[(unsigned char) *d++]
++ != (UCHAR_T) *p++)
++ goto fail;
++#endif /* WCHAR */
++ }
++ while (--mcnt);
++ }
++ else
++ {
++ do
++ {
++ PREFETCH ();
++ if (*d++ != (CHAR_T) *p++) goto fail;
++ }
++ while (--mcnt);
++ }
++ SET_REGS_MATCHED ();
++ break;
++
++
++ /* Match any character except possibly a newline or a null. */
++ case anychar:
++ DEBUG_PRINT1 ("EXECUTING anychar.\n");
++
++ PREFETCH ();
++
++ if ((!(bufp->syntax & RE_DOT_NEWLINE) && TRANSLATE (*d) == '\n')
++ || (bufp->syntax & RE_DOT_NOT_NULL && TRANSLATE (*d) == '\000'))
++ goto fail;
++
++ SET_REGS_MATCHED ();
++ DEBUG_PRINT2 (" Matched `%ld'.\n", (long int) *d);
++ d++;
++ break;
++
++
++ case charset:
++ case charset_not:
++ {
++ register UCHAR_T c;
++#ifdef WCHAR
++ unsigned int i, char_class_length, coll_symbol_length,
++ equiv_class_length, ranges_length, chars_length, length;
++ CHAR_T *workp, *workp2, *charset_top;
++#define WORK_BUFFER_SIZE 128
++ CHAR_T str_buf[WORK_BUFFER_SIZE];
++# ifdef _LIBC
++ uint32_t nrules;
++# endif /* _LIBC */
++#endif /* WCHAR */
++ boolean negate = (re_opcode_t) *(p - 1) == charset_not;
++
++ DEBUG_PRINT2 ("EXECUTING charset%s.\n", negate ? "_not" : "");
++ PREFETCH ();
++ c = TRANSLATE (*d); /* The character to match. */
++#ifdef WCHAR
++# ifdef _LIBC
++ nrules = _NL_CURRENT_WORD (LC_COLLATE, _NL_COLLATE_NRULES);
++# endif /* _LIBC */
++ charset_top = p - 1;
++ char_class_length = *p++;
++ coll_symbol_length = *p++;
++ equiv_class_length = *p++;
++ ranges_length = *p++;
++ chars_length = *p++;
++ /* p points charset[6], so the address of the next instruction
++ (charset[l+m+n+2o+k+p']) equals p[l+m+n+2*o+p'],
++ where l=length of char_classes, m=length of collating_symbol,
++ n=equivalence_class, o=length of char_range,
++ p'=length of character. */
++ workp = p;
++ /* Update p to indicate the next instruction. */
++ p += char_class_length + coll_symbol_length+ equiv_class_length +
++ 2*ranges_length + chars_length;
++
++ /* match with char_class? */
++ for (i = 0; i < char_class_length ; i += CHAR_CLASS_SIZE)
++ {
++ wctype_t wctype;
++ uintptr_t alignedp = ((uintptr_t)workp
++ + __alignof__(wctype_t) - 1)
++ & ~(uintptr_t)(__alignof__(wctype_t) - 1);
++ wctype = *((wctype_t*)alignedp);
++ workp += CHAR_CLASS_SIZE;
++# ifdef _LIBC
++ if (__iswctype((wint_t)c, wctype))
++ goto char_set_matched;
++# else
++ if (iswctype((wint_t)c, wctype))
++ goto char_set_matched;
++# endif
++ }
++
++ /* match with collating_symbol? */
++# ifdef _LIBC
++ if (nrules != 0)
++ {
++ const unsigned char *extra = (const unsigned char *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_SYMB_EXTRAMB);
++
++ for (workp2 = workp + coll_symbol_length ; workp < workp2 ;
++ workp++)
++ {
++ int32_t *wextra;
++ wextra = (int32_t*)(extra + *workp++);
++ for (i = 0; i < *wextra; ++i)
++ if (TRANSLATE(d[i]) != wextra[1 + i])
++ break;
++
++ if (i == *wextra)
++ {
++ /* Update d, however d will be incremented at
++ char_set_matched:, we decrement d here. */
++ d += i - 1;
++ goto char_set_matched;
++ }
++ }
++ }
++ else /* (nrules == 0) */
++# endif
++ /* If we can't look up collation data, we use wcscoll
++ instead. */
++ {
++ for (workp2 = workp + coll_symbol_length ; workp < workp2 ;)
++ {
++ const CHAR_T *backup_d = d, *backup_dend = dend;
++# ifdef _LIBC
++ length = __wcslen (workp);
++# else
++ length = wcslen (workp);
++# endif
++
++ /* If wcscoll(the collating symbol, whole string) > 0,
++ any substring of the string never match with the
++ collating symbol. */
++# ifdef _LIBC
++ if (__wcscoll (workp, d) > 0)
++# else
++ if (wcscoll (workp, d) > 0)
++# endif
++ {
++ workp += length + 1;
++ continue;
++ }
++
++ /* First, we compare the collating symbol with
++ the first character of the string.
++ If it don't match, we add the next character to
++ the compare buffer in turn. */
++ for (i = 0 ; i < WORK_BUFFER_SIZE-1 ; i++, d++)
++ {
++ int match;
++ if (d == dend)
++ {
++ if (dend == end_match_2)
++ break;
++ d = string2;
++ dend = end_match_2;
++ }
++
++ /* add next character to the compare buffer. */
++ str_buf[i] = TRANSLATE(*d);
++ str_buf[i+1] = '\0';
++
++# ifdef _LIBC
++ match = __wcscoll (workp, str_buf);
++# else
++ match = wcscoll (workp, str_buf);
++# endif
++ if (match == 0)
++ goto char_set_matched;
++
++ if (match < 0)
++ /* (str_buf > workp) indicate (str_buf + X > workp),
++ because for all X (str_buf + X > str_buf).
++ So we don't need continue this loop. */
++ break;
++
++ /* Otherwise(str_buf < workp),
++ (str_buf+next_character) may equals (workp).
++ So we continue this loop. */
++ }
++ /* not matched */
++ d = backup_d;
++ dend = backup_dend;
++ workp += length + 1;
++ }
++ }
++ /* match with equivalence_class? */
++# ifdef _LIBC
++ if (nrules != 0)
++ {
++ const CHAR_T *backup_d = d, *backup_dend = dend;
++ /* Try to match the equivalence class against
++ those known to the collate implementation. */
++ const int32_t *table;
++ const int32_t *weights;
++ const int32_t *extra;
++ const int32_t *indirect;
++ int32_t idx, idx2;
++ wint_t *cp;
++ size_t len;
++
++ /* This #include defines a local function! */
++# include <locale/weightwc.h>
++
++ table = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_TABLEWC);
++ weights = (const wint_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_WEIGHTWC);
++ extra = (const wint_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_EXTRAWC);
++ indirect = (const int32_t *)
++ _NL_CURRENT (LC_COLLATE, _NL_COLLATE_INDIRECTWC);
++
++ /* Write 1 collating element to str_buf, and
++ get its index. */
++ idx2 = 0;
++
++ for (i = 0 ; idx2 == 0 && i < WORK_BUFFER_SIZE - 1; i++)
++ {
++ cp = (wint_t*)str_buf;
++ if (d == dend)
++ {
++ if (dend == end_match_2)
++ break;
++ d = string2;
++ dend = end_match_2;
++ }
++ str_buf[i] = TRANSLATE(*(d+i));
++ str_buf[i+1] = '\0'; /* sentinel */
++ idx2 = findidx ((const wint_t**)&cp, i);
++ }
++
++ /* Update d, however d will be incremented at
++ char_set_matched:, we decrement d here. */
++ d = backup_d + ((wchar_t*)cp - (wchar_t*)str_buf - 1);
++ if (d >= dend)
++ {
++ if (dend == end_match_2)
++ d = dend;
++ else
++ {
++ d = string2;
++ dend = end_match_2;
++ }
++ }
++
++ len = weights[idx2];
++
++ for (workp2 = workp + equiv_class_length ; workp < workp2 ;
++ workp++)
++ {
++ idx = (int32_t)*workp;
++ /* We already checked idx != 0 in regex_compile. */
++
++ if (idx2 != 0 && len == weights[idx])
++ {
++ int cnt = 0;
++ while (cnt < len && (weights[idx + 1 + cnt]
++ == weights[idx2 + 1 + cnt]))
++ ++cnt;
++
++ if (cnt == len)
++ goto char_set_matched;
++ }
++ }
++ /* not matched */
++ d = backup_d;
++ dend = backup_dend;
++ }
++ else /* (nrules == 0) */
++# endif
++ /* If we can't look up collation data, we use wcscoll
++ instead. */
++ {
++ for (workp2 = workp + equiv_class_length ; workp < workp2 ;)
++ {
++ const CHAR_T *backup_d = d, *backup_dend = dend;
++# ifdef _LIBC
++ length = __wcslen (workp);
++# else
++ length = wcslen (workp);
++# endif
++
++ /* If wcscoll(the collating symbol, whole string) > 0,
++ any substring of the string never match with the
++ collating symbol. */
++# ifdef _LIBC
++ if (__wcscoll (workp, d) > 0)
++# else
++ if (wcscoll (workp, d) > 0)
++# endif
++ {
++ workp += length + 1;
++ break;
++ }
++
++ /* First, we compare the equivalence class with
++ the first character of the string.
++ If it don't match, we add the next character to
++ the compare buffer in turn. */
++ for (i = 0 ; i < WORK_BUFFER_SIZE - 1 ; i++, d++)
++ {
++ int match;
++ if (d == dend)
++ {
++ if (dend == end_match_2)
++ break;
++ d = string2;
++ dend = end_match_2;
++ }
++
++ /* add next character to the compare buffer. */
++ str_buf[i] = TRANSLATE(*d);
++ str_buf[i+1] = '\0';
++
++# ifdef _LIBC
++ match = __wcscoll (workp, str_buf);
++# else
++ match = wcscoll (workp, str_buf);
++# endif
++
++ if (match == 0)
++ goto char_set_matched;
++
++ if (match < 0)
++ /* (str_buf > workp) indicate (str_buf + X > workp),
++ because for all X (str_buf + X > str_buf).
++ So we don't need continue this loop. */
++ break;
++
++ /* Otherwise(str_buf < workp),
++ (str_buf+next_character) may equals (workp).
++ So we continue this loop. */
++ }
++ /* not matched */
++ d = backup_d;
++ dend = backup_dend;
++ workp += length + 1;
++ }
++ }
++
++ /* match with char_range? */
++# ifdef _LIBC
++ if (nrules != 0)
++ {
++ uint32_t collseqval;
++ const char *collseq = (const char *)
++ _NL_CURRENT(LC_COLLATE, _NL_COLLATE_COLLSEQWC);
++
++ collseqval = collseq_table_lookup (collseq, c);
++
++ for (; workp < p - chars_length ;)
++ {
++ uint32_t start_val, end_val;
++
++ /* We already compute the collation sequence value
++ of the characters (or collating symbols). */
++ start_val = (uint32_t) *workp++; /* range_start */
++ end_val = (uint32_t) *workp++; /* range_end */
++
++ if (start_val <= collseqval && collseqval <= end_val)
++ goto char_set_matched;
++ }
++ }
++ else
++# endif
++ {
++ /* We set range_start_char at str_buf[0], range_end_char
++ at str_buf[4], and compared char at str_buf[2]. */
++ str_buf[1] = 0;
++ str_buf[2] = c;
++ str_buf[3] = 0;
++ str_buf[5] = 0;
++ for (; workp < p - chars_length ;)
++ {
++ wchar_t *range_start_char, *range_end_char;
++
++ /* match if (range_start_char <= c <= range_end_char). */
++
++ /* If range_start(or end) < 0, we assume -range_start(end)
++ is the offset of the collating symbol which is specified
++ as the character of the range start(end). */
++
++ /* range_start */
++ if (*workp < 0)
++ range_start_char = charset_top - (*workp++);
++ else
++ {
++ str_buf[0] = *workp++;
++ range_start_char = str_buf;
++ }
++
++ /* range_end */
++ if (*workp < 0)
++ range_end_char = charset_top - (*workp++);
++ else
++ {
++ str_buf[4] = *workp++;
++ range_end_char = str_buf + 4;
++ }
++
++# ifdef _LIBC
++ if (__wcscoll (range_start_char, str_buf+2) <= 0
++ && __wcscoll (str_buf+2, range_end_char) <= 0)
++# else
++ if (wcscoll (range_start_char, str_buf+2) <= 0
++ && wcscoll (str_buf+2, range_end_char) <= 0)
++# endif
++ goto char_set_matched;
++ }
++ }
++
++ /* match with char? */
++ for (; workp < p ; workp++)
++ if (c == *workp)
++ goto char_set_matched;
++
++ negate = !negate;
++
++ char_set_matched:
++ if (negate) goto fail;
++#else
++ /* Cast to `unsigned' instead of `unsigned char' in case the
++ bit list is a full 32 bytes long. */
++ if (c < (unsigned) (*p * BYTEWIDTH)
++ && p[1 + c / BYTEWIDTH] & (1 << (c % BYTEWIDTH)))
++ negate = !negate;
++
++ p += 1 + *p;
++
++ if (!negate) goto fail;
++#undef WORK_BUFFER_SIZE
++#endif /* WCHAR */
++ SET_REGS_MATCHED ();
++ d++;
++ break;
++ }
++
++
++ /* The beginning of a group is represented by start_memory.
++ The arguments are the register number in the next byte, and the
++ number of groups inner to this one in the next. The text
++ matched within the group is recorded (in the internal
++ registers data structure) under the register number. */
++ case start_memory:
++ DEBUG_PRINT3 ("EXECUTING start_memory %ld (%ld):\n",
++ (long int) *p, (long int) p[1]);
++
++ /* Find out if this group can match the empty string. */
++ p1 = p; /* To send to group_match_null_string_p. */
++
++ if (REG_MATCH_NULL_STRING_P (reg_info[*p]) == MATCH_NULL_UNSET_VALUE)
++ REG_MATCH_NULL_STRING_P (reg_info[*p])
++ = PREFIX(group_match_null_string_p) (&p1, pend, reg_info);
++
++ /* Save the position in the string where we were the last time
++ we were at this open-group operator in case the group is
++ operated upon by a repetition operator, e.g., with `(a*)*b'
++ against `ab'; then we want to ignore where we are now in
++ the string in case this attempt to match fails. */
++ old_regstart[*p] = REG_MATCH_NULL_STRING_P (reg_info[*p])
++ ? REG_UNSET (regstart[*p]) ? d : regstart[*p]
++ : regstart[*p];
++ DEBUG_PRINT2 (" old_regstart: %d\n",
++ POINTER_TO_OFFSET (old_regstart[*p]));
++
++ regstart[*p] = d;
++ DEBUG_PRINT2 (" regstart: %d\n", POINTER_TO_OFFSET (regstart[*p]));
++
++ IS_ACTIVE (reg_info[*p]) = 1;
++ MATCHED_SOMETHING (reg_info[*p]) = 0;
++
++ /* Clear this whenever we change the register activity status. */
++ set_regs_matched_done = 0;
++
++ /* This is the new highest active register. */
++ highest_active_reg = *p;
++
++ /* If nothing was active before, this is the new lowest active
++ register. */
++ if (lowest_active_reg == NO_LOWEST_ACTIVE_REG)
++ lowest_active_reg = *p;
++
++ /* Move past the register number and inner group count. */
++ p += 2;
++ just_past_start_mem = p;
++
++ break;
++
++
++ /* The stop_memory opcode represents the end of a group. Its
++ arguments are the same as start_memory's: the register
++ number, and the number of inner groups. */
++ case stop_memory:
++ DEBUG_PRINT3 ("EXECUTING stop_memory %ld (%ld):\n",
++ (long int) *p, (long int) p[1]);
++
++ /* We need to save the string position the last time we were at
++ this close-group operator in case the group is operated
++ upon by a repetition operator, e.g., with `((a*)*(b*)*)*'
++ against `aba'; then we want to ignore where we are now in
++ the string in case this attempt to match fails. */
++ old_regend[*p] = REG_MATCH_NULL_STRING_P (reg_info[*p])
++ ? REG_UNSET (regend[*p]) ? d : regend[*p]
++ : regend[*p];
++ DEBUG_PRINT2 (" old_regend: %d\n",
++ POINTER_TO_OFFSET (old_regend[*p]));
++
++ regend[*p] = d;
++ DEBUG_PRINT2 (" regend: %d\n", POINTER_TO_OFFSET (regend[*p]));
++
++ /* This register isn't active anymore. */
++ IS_ACTIVE (reg_info[*p]) = 0;
++
++ /* Clear this whenever we change the register activity status. */
++ set_regs_matched_done = 0;
++
++ /* If this was the only register active, nothing is active
++ anymore. */
++ if (lowest_active_reg == highest_active_reg)
++ {
++ lowest_active_reg = NO_LOWEST_ACTIVE_REG;
++ highest_active_reg = NO_HIGHEST_ACTIVE_REG;
++ }
++ else
++ { /* We must scan for the new highest active register, since
++ it isn't necessarily one less than now: consider
++ (a(b)c(d(e)f)g). When group 3 ends, after the f), the
++ new highest active register is 1. */
++ UCHAR_T r = *p - 1;
++ while (r > 0 && !IS_ACTIVE (reg_info[r]))
++ r--;
++
++ /* If we end up at register zero, that means that we saved
++ the registers as the result of an `on_failure_jump', not
++ a `start_memory', and we jumped to past the innermost
++ `stop_memory'. For example, in ((.)*) we save
++ registers 1 and 2 as a result of the *, but when we pop
++ back to the second ), we are at the stop_memory 1.
++ Thus, nothing is active. */
++ if (r == 0)
++ {
++ lowest_active_reg = NO_LOWEST_ACTIVE_REG;
++ highest_active_reg = NO_HIGHEST_ACTIVE_REG;
++ }
++ else
++ highest_active_reg = r;
++ }
++
++ /* If just failed to match something this time around with a
++ group that's operated on by a repetition operator, try to
++ force exit from the ``loop'', and restore the register
++ information for this group that we had before trying this
++ last match. */
++ if ((!MATCHED_SOMETHING (reg_info[*p])
++ || just_past_start_mem == p - 1)
++ && (p + 2) < pend)
++ {
++ boolean is_a_jump_n = false;
++
++ p1 = p + 2;
++ mcnt = 0;
++ switch ((re_opcode_t) *p1++)
++ {
++ case jump_n:
++ is_a_jump_n = true;
++ case pop_failure_jump:
++ case maybe_pop_jump:
++ case jump:
++ case dummy_failure_jump:
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ if (is_a_jump_n)
++ p1 += OFFSET_ADDRESS_SIZE;
++ break;
++
++ default:
++ /* do nothing */ ;
++ }
++ p1 += mcnt;
++
++ /* If the next operation is a jump backwards in the pattern
++ to an on_failure_jump right before the start_memory
++ corresponding to this stop_memory, exit from the loop
++ by forcing a failure after pushing on the stack the
++ on_failure_jump's jump in the pattern, and d. */
++ if (mcnt < 0 && (re_opcode_t) *p1 == on_failure_jump
++ && (re_opcode_t) p1[1+OFFSET_ADDRESS_SIZE] == start_memory
++ && p1[2+OFFSET_ADDRESS_SIZE] == *p)
++ {
++ /* If this group ever matched anything, then restore
++ what its registers were before trying this last
++ failed match, e.g., with `(a*)*b' against `ab' for
++ regstart[1], and, e.g., with `((a*)*(b*)*)*'
++ against `aba' for regend[3].
++
++ Also restore the registers for inner groups for,
++ e.g., `((a*)(b*))*' against `aba' (register 3 would
++ otherwise get trashed). */
++
++ if (EVER_MATCHED_SOMETHING (reg_info[*p]))
++ {
++ unsigned r;
++
++ EVER_MATCHED_SOMETHING (reg_info[*p]) = 0;
++
++ /* Restore this and inner groups' (if any) registers. */
++ for (r = *p; r < (unsigned) *p + (unsigned) *(p + 1);
++ r++)
++ {
++ regstart[r] = old_regstart[r];
++
++ /* xx why this test? */
++ if (old_regend[r] >= regstart[r])
++ regend[r] = old_regend[r];
++ }
++ }
++ p1++;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ PUSH_FAILURE_POINT (p1 + mcnt, d, -2);
++
++ goto fail;
++ }
++ }
++
++ /* Move past the register number and the inner group count. */
++ p += 2;
++ break;
++
++
++ /* \<digit> has been turned into a `duplicate' command which is
++ followed by the numeric value of <digit> as the register number. */
++ case duplicate:
++ {
++ register const CHAR_T *d2, *dend2;
++ int regno = *p++; /* Get which register to match against. */
++ DEBUG_PRINT2 ("EXECUTING duplicate %d.\n", regno);
++
++ /* Can't back reference a group which we've never matched. */
++ if (REG_UNSET (regstart[regno]) || REG_UNSET (regend[regno]))
++ goto fail;
++
++ /* Where in input to try to start matching. */
++ d2 = regstart[regno];
++
++ /* Where to stop matching; if both the place to start and
++ the place to stop matching are in the same string, then
++ set to the place to stop, otherwise, for now have to use
++ the end of the first string. */
++
++ dend2 = ((FIRST_STRING_P (regstart[regno])
++ == FIRST_STRING_P (regend[regno]))
++ ? regend[regno] : end_match_1);
++ for (;;)
++ {
++ /* If necessary, advance to next segment in register
++ contents. */
++ while (d2 == dend2)
++ {
++ if (dend2 == end_match_2) break;
++ if (dend2 == regend[regno]) break;
++
++ /* End of string1 => advance to string2. */
++ d2 = string2;
++ dend2 = regend[regno];
++ }
++ /* At end of register contents => success */
++ if (d2 == dend2) break;
++
++ /* If necessary, advance to next segment in data. */
++ PREFETCH ();
++
++ /* How many characters left in this segment to match. */
++ mcnt = dend - d;
++
++ /* Want how many consecutive characters we can match in
++ one shot, so, if necessary, adjust the count. */
++ if (mcnt > dend2 - d2)
++ mcnt = dend2 - d2;
++
++ /* Compare that many; failure if mismatch, else move
++ past them. */
++ if (translate
++ ? PREFIX(bcmp_translate) (d, d2, mcnt, translate)
++ : memcmp (d, d2, mcnt*sizeof(UCHAR_T)))
++ goto fail;
++ d += mcnt, d2 += mcnt;
++
++ /* Do this because we've match some characters. */
++ SET_REGS_MATCHED ();
++ }
++ }
++ break;
++
++
++ /* begline matches the empty string at the beginning of the string
++ (unless `not_bol' is set in `bufp'), and, if
++ `newline_anchor' is set, after newlines. */
++ case begline:
++ DEBUG_PRINT1 ("EXECUTING begline.\n");
++
++ if (AT_STRINGS_BEG (d))
++ {
++ if (!bufp->not_bol) break;
++ }
++ else if (d[-1] == '\n' && bufp->newline_anchor)
++ {
++ break;
++ }
++ /* In all other cases, we fail. */
++ goto fail;
++
++
++ /* endline is the dual of begline. */
++ case endline:
++ DEBUG_PRINT1 ("EXECUTING endline.\n");
++
++ if (AT_STRINGS_END (d))
++ {
++ if (!bufp->not_eol) break;
++ }
++
++ /* We have to ``prefetch'' the next character. */
++ else if ((d == end1 ? *string2 : *d) == '\n'
++ && bufp->newline_anchor)
++ {
++ break;
++ }
++ goto fail;
++
++
++ /* Match at the very beginning of the data. */
++ case begbuf:
++ DEBUG_PRINT1 ("EXECUTING begbuf.\n");
++ if (AT_STRINGS_BEG (d))
++ break;
++ goto fail;
++
++
++ /* Match at the very end of the data. */
++ case endbuf:
++ DEBUG_PRINT1 ("EXECUTING endbuf.\n");
++ if (AT_STRINGS_END (d))
++ break;
++ goto fail;
++
++
++ /* on_failure_keep_string_jump is used to optimize `.*\n'. It
++ pushes NULL as the value for the string on the stack. Then
++ `pop_failure_point' will keep the current value for the
++ string, instead of restoring it. To see why, consider
++ matching `foo\nbar' against `.*\n'. The .* matches the foo;
++ then the . fails against the \n. But the next thing we want
++ to do is match the \n against the \n; if we restored the
++ string value, we would be back at the foo.
++
++ Because this is used only in specific cases, we don't need to
++ check all the things that `on_failure_jump' does, to make
++ sure the right things get saved on the stack. Hence we don't
++ share its code. The only reason to push anything on the
++ stack at all is that otherwise we would have to change
++ `anychar's code to do something besides goto fail in this
++ case; that seems worse than this. */
++ case on_failure_keep_string_jump:
++ DEBUG_PRINT1 ("EXECUTING on_failure_keep_string_jump");
++
++ EXTRACT_NUMBER_AND_INCR (mcnt, p);
++#ifdef _LIBC
++ DEBUG_PRINT3 (" %d (to %p):\n", mcnt, p + mcnt);
++#else
++ DEBUG_PRINT3 (" %d (to 0x%x):\n", mcnt, p + mcnt);
++#endif
++
++ PUSH_FAILURE_POINT (p + mcnt, NULL, -2);
++ break;
++
++
++ /* Uses of on_failure_jump:
++
++ Each alternative starts with an on_failure_jump that points
++ to the beginning of the next alternative. Each alternative
++ except the last ends with a jump that in effect jumps past
++ the rest of the alternatives. (They really jump to the
++ ending jump of the following alternative, because tensioning
++ these jumps is a hassle.)
++
++ Repeats start with an on_failure_jump that points past both
++ the repetition text and either the following jump or
++ pop_failure_jump back to this on_failure_jump. */
++ case on_failure_jump:
++ on_failure:
++ DEBUG_PRINT1 ("EXECUTING on_failure_jump");
++
++ EXTRACT_NUMBER_AND_INCR (mcnt, p);
++#ifdef _LIBC
++ DEBUG_PRINT3 (" %d (to %p)", mcnt, p + mcnt);
++#else
++ DEBUG_PRINT3 (" %d (to 0x%x)", mcnt, p + mcnt);
++#endif
++
++ /* If this on_failure_jump comes right before a group (i.e.,
++ the original * applied to a group), save the information
++ for that group and all inner ones, so that if we fail back
++ to this point, the group's information will be correct.
++ For example, in \(a*\)*\1, we need the preceding group,
++ and in \(zz\(a*\)b*\)\2, we need the inner group. */
++
++ /* We can't use `p' to check ahead because we push
++ a failure point to `p + mcnt' after we do this. */
++ p1 = p;
++
++ /* We need to skip no_op's before we look for the
++ start_memory in case this on_failure_jump is happening as
++ the result of a completed succeed_n, as in \(a\)\{1,3\}b\1
++ against aba. */
++ while (p1 < pend && (re_opcode_t) *p1 == no_op)
++ p1++;
++
++ if (p1 < pend && (re_opcode_t) *p1 == start_memory)
++ {
++ /* We have a new highest active register now. This will
++ get reset at the start_memory we are about to get to,
++ but we will have saved all the registers relevant to
++ this repetition op, as described above. */
++ highest_active_reg = *(p1 + 1) + *(p1 + 2);
++ if (lowest_active_reg == NO_LOWEST_ACTIVE_REG)
++ lowest_active_reg = *(p1 + 1);
++ }
++
++ DEBUG_PRINT1 (":\n");
++ PUSH_FAILURE_POINT (p + mcnt, d, -2);
++ break;
++
++
++ /* A smart repeat ends with `maybe_pop_jump'.
++ We change it to either `pop_failure_jump' or `jump'. */
++ case maybe_pop_jump:
++ EXTRACT_NUMBER_AND_INCR (mcnt, p);
++ DEBUG_PRINT2 ("EXECUTING maybe_pop_jump %d.\n", mcnt);
++ {
++ register UCHAR_T *p2 = p;
++
++ /* Compare the beginning of the repeat with what in the
++ pattern follows its end. If we can establish that there
++ is nothing that they would both match, i.e., that we
++ would have to backtrack because of (as in, e.g., `a*a')
++ then we can change to pop_failure_jump, because we'll
++ never have to backtrack.
++
++ This is not true in the case of alternatives: in
++ `(a|ab)*' we do need to backtrack to the `ab' alternative
++ (e.g., if the string was `ab'). But instead of trying to
++ detect that here, the alternative has put on a dummy
++ failure point which is what we will end up popping. */
++
++ /* Skip over open/close-group commands.
++ If what follows this loop is a ...+ construct,
++ look at what begins its body, since we will have to
++ match at least one of that. */
++ while (1)
++ {
++ if (p2 + 2 < pend
++ && ((re_opcode_t) *p2 == stop_memory
++ || (re_opcode_t) *p2 == start_memory))
++ p2 += 3;
++ else if (p2 + 2 + 2 * OFFSET_ADDRESS_SIZE < pend
++ && (re_opcode_t) *p2 == dummy_failure_jump)
++ p2 += 2 + 2 * OFFSET_ADDRESS_SIZE;
++ else
++ break;
++ }
++
++ p1 = p + mcnt;
++ /* p1[0] ... p1[2] are the `on_failure_jump' corresponding
++ to the `maybe_finalize_jump' of this case. Examine what
++ follows. */
++
++ /* If we're at the end of the pattern, we can change. */
++ if (p2 == pend)
++ {
++ /* Consider what happens when matching ":\(.*\)"
++ against ":/". I don't really understand this code
++ yet. */
++ p[-(1+OFFSET_ADDRESS_SIZE)] = (UCHAR_T)
++ pop_failure_jump;
++ DEBUG_PRINT1
++ (" End of pattern: change to `pop_failure_jump'.\n");
++ }
++
++ else if ((re_opcode_t) *p2 == exactn
++#ifdef MBS_SUPPORT
++ || (re_opcode_t) *p2 == exactn_bin
++#endif
++ || (bufp->newline_anchor && (re_opcode_t) *p2 == endline))
++ {
++ register UCHAR_T c
++ = *p2 == (UCHAR_T) endline ? '\n' : p2[2];
++
++ if (((re_opcode_t) p1[1+OFFSET_ADDRESS_SIZE] == exactn
++#ifdef MBS_SUPPORT
++ || (re_opcode_t) p1[1+OFFSET_ADDRESS_SIZE] == exactn_bin
++#endif
++ ) && p1[3+OFFSET_ADDRESS_SIZE] != c)
++ {
++ p[-(1+OFFSET_ADDRESS_SIZE)] = (UCHAR_T)
++ pop_failure_jump;
++#ifdef WCHAR
++ DEBUG_PRINT3 (" %C != %C => pop_failure_jump.\n",
++ (wint_t) c,
++ (wint_t) p1[3+OFFSET_ADDRESS_SIZE]);
++#else
++ DEBUG_PRINT3 (" %c != %c => pop_failure_jump.\n",
++ (char) c,
++ (char) p1[3+OFFSET_ADDRESS_SIZE]);
++#endif
++ }
++
++#ifndef WCHAR
++ else if ((re_opcode_t) p1[3] == charset
++ || (re_opcode_t) p1[3] == charset_not)
++ {
++ int negate = (re_opcode_t) p1[3] == charset_not;
++
++ if (c < (unsigned) (p1[4] * BYTEWIDTH)
++ && p1[5 + c / BYTEWIDTH] & (1 << (c % BYTEWIDTH)))
++ negate = !negate;
++
++ /* `negate' is equal to 1 if c would match, which means
++ that we can't change to pop_failure_jump. */
++ if (!negate)
++ {
++ p[-3] = (unsigned char) pop_failure_jump;
++ DEBUG_PRINT1 (" No match => pop_failure_jump.\n");
++ }
++ }
++#endif /* not WCHAR */
++ }
++#ifndef WCHAR
++ else if ((re_opcode_t) *p2 == charset)
++ {
++ /* We win if the first character of the loop is not part
++ of the charset. */
++ if ((re_opcode_t) p1[3] == exactn
++ && ! ((int) p2[1] * BYTEWIDTH > (int) p1[5]
++ && (p2[2 + p1[5] / BYTEWIDTH]
++ & (1 << (p1[5] % BYTEWIDTH)))))
++ {
++ p[-3] = (unsigned char) pop_failure_jump;
++ DEBUG_PRINT1 (" No match => pop_failure_jump.\n");
++ }
++
++ else if ((re_opcode_t) p1[3] == charset_not)
++ {
++ int idx;
++ /* We win if the charset_not inside the loop
++ lists every character listed in the charset after. */
++ for (idx = 0; idx < (int) p2[1]; idx++)
++ if (! (p2[2 + idx] == 0
++ || (idx < (int) p1[4]
++ && ((p2[2 + idx] & ~ p1[5 + idx]) == 0))))
++ break;
++
++ if (idx == p2[1])
++ {
++ p[-3] = (unsigned char) pop_failure_jump;
++ DEBUG_PRINT1 (" No match => pop_failure_jump.\n");
++ }
++ }
++ else if ((re_opcode_t) p1[3] == charset)
++ {
++ int idx;
++ /* We win if the charset inside the loop
++ has no overlap with the one after the loop. */
++ for (idx = 0;
++ idx < (int) p2[1] && idx < (int) p1[4];
++ idx++)
++ if ((p2[2 + idx] & p1[5 + idx]) != 0)
++ break;
++
++ if (idx == p2[1] || idx == p1[4])
++ {
++ p[-3] = (unsigned char) pop_failure_jump;
++ DEBUG_PRINT1 (" No match => pop_failure_jump.\n");
++ }
++ }
++ }
++#endif /* not WCHAR */
++ }
++ p -= OFFSET_ADDRESS_SIZE; /* Point at relative address again. */
++ if ((re_opcode_t) p[-1] != pop_failure_jump)
++ {
++ p[-1] = (UCHAR_T) jump;
++ DEBUG_PRINT1 (" Match => jump.\n");
++ goto unconditional_jump;
++ }
++ /* Note fall through. */
++
++
++ /* The end of a simple repeat has a pop_failure_jump back to
++ its matching on_failure_jump, where the latter will push a
++ failure point. The pop_failure_jump takes off failure
++ points put on by this pop_failure_jump's matching
++ on_failure_jump; we got through the pattern to here from the
++ matching on_failure_jump, so didn't fail. */
++ case pop_failure_jump:
++ {
++ /* We need to pass separate storage for the lowest and
++ highest registers, even though we don't care about the
++ actual values. Otherwise, we will restore only one
++ register from the stack, since lowest will == highest in
++ `pop_failure_point'. */
++ active_reg_t dummy_low_reg, dummy_high_reg;
++ UCHAR_T *pdummy = NULL;
++ const CHAR_T *sdummy = NULL;
++
++ DEBUG_PRINT1 ("EXECUTING pop_failure_jump.\n");
++ POP_FAILURE_POINT (sdummy, pdummy,
++ dummy_low_reg, dummy_high_reg,
++ reg_dummy, reg_dummy, reg_info_dummy);
++ }
++ /* Note fall through. */
++
++ unconditional_jump:
++#ifdef _LIBC
++ DEBUG_PRINT2 ("\n%p: ", p);
++#else
++ DEBUG_PRINT2 ("\n0x%x: ", p);
++#endif
++ /* Note fall through. */
++
++ /* Unconditionally jump (without popping any failure points). */
++ case jump:
++ EXTRACT_NUMBER_AND_INCR (mcnt, p); /* Get the amount to jump. */
++ DEBUG_PRINT2 ("EXECUTING jump %d ", mcnt);
++ p += mcnt; /* Do the jump. */
++#ifdef _LIBC
++ DEBUG_PRINT2 ("(to %p).\n", p);
++#else
++ DEBUG_PRINT2 ("(to 0x%x).\n", p);
++#endif
++ break;
++
++
++ /* We need this opcode so we can detect where alternatives end
++ in `group_match_null_string_p' et al. */
++ case jump_past_alt:
++ DEBUG_PRINT1 ("EXECUTING jump_past_alt.\n");
++ goto unconditional_jump;
++
++
++ /* Normally, the on_failure_jump pushes a failure point, which
++ then gets popped at pop_failure_jump. We will end up at
++ pop_failure_jump, also, and with a pattern of, say, `a+', we
++ are skipping over the on_failure_jump, so we have to push
++ something meaningless for pop_failure_jump to pop. */
++ case dummy_failure_jump:
++ DEBUG_PRINT1 ("EXECUTING dummy_failure_jump.\n");
++ /* It doesn't matter what we push for the string here. What
++ the code at `fail' tests is the value for the pattern. */
++ PUSH_FAILURE_POINT (NULL, NULL, -2);
++ goto unconditional_jump;
++
++
++ /* At the end of an alternative, we need to push a dummy failure
++ point in case we are followed by a `pop_failure_jump', because
++ we don't want the failure point for the alternative to be
++ popped. For example, matching `(a|ab)*' against `aab'
++ requires that we match the `ab' alternative. */
++ case push_dummy_failure:
++ DEBUG_PRINT1 ("EXECUTING push_dummy_failure.\n");
++ /* See comments just above at `dummy_failure_jump' about the
++ two zeroes. */
++ PUSH_FAILURE_POINT (NULL, NULL, -2);
++ break;
++
++ /* Have to succeed matching what follows at least n times.
++ After that, handle like `on_failure_jump'. */
++ case succeed_n:
++ EXTRACT_NUMBER (mcnt, p + OFFSET_ADDRESS_SIZE);
++ DEBUG_PRINT2 ("EXECUTING succeed_n %d.\n", mcnt);
++
++ assert (mcnt >= 0);
++ /* Originally, this is how many times we HAVE to succeed. */
++ if (mcnt > 0)
++ {
++ mcnt--;
++ p += OFFSET_ADDRESS_SIZE;
++ STORE_NUMBER_AND_INCR (p, mcnt);
++#ifdef _LIBC
++ DEBUG_PRINT3 (" Setting %p to %d.\n", p - OFFSET_ADDRESS_SIZE
++ , mcnt);
++#else
++ DEBUG_PRINT3 (" Setting 0x%x to %d.\n", p - OFFSET_ADDRESS_SIZE
++ , mcnt);
++#endif
++ }
++ else if (mcnt == 0)
++ {
++#ifdef _LIBC
++ DEBUG_PRINT2 (" Setting two bytes from %p to no_op.\n",
++ p + OFFSET_ADDRESS_SIZE);
++#else
++ DEBUG_PRINT2 (" Setting two bytes from 0x%x to no_op.\n",
++ p + OFFSET_ADDRESS_SIZE);
++#endif /* _LIBC */
++
++#ifdef WCHAR
++ p[1] = (UCHAR_T) no_op;
++#else
++ p[2] = (UCHAR_T) no_op;
++ p[3] = (UCHAR_T) no_op;
++#endif /* WCHAR */
++ goto on_failure;
++ }
++ break;
++
++ case jump_n:
++ EXTRACT_NUMBER (mcnt, p + OFFSET_ADDRESS_SIZE);
++ DEBUG_PRINT2 ("EXECUTING jump_n %d.\n", mcnt);
++
++ /* Originally, this is how many times we CAN jump. */
++ if (mcnt)
++ {
++ mcnt--;
++ STORE_NUMBER (p + OFFSET_ADDRESS_SIZE, mcnt);
++
++#ifdef _LIBC
++ DEBUG_PRINT3 (" Setting %p to %d.\n", p + OFFSET_ADDRESS_SIZE,
++ mcnt);
++#else
++ DEBUG_PRINT3 (" Setting 0x%x to %d.\n", p + OFFSET_ADDRESS_SIZE,
++ mcnt);
++#endif /* _LIBC */
++ goto unconditional_jump;
++ }
++ /* If don't have to jump any more, skip over the rest of command. */
++ else
++ p += 2 * OFFSET_ADDRESS_SIZE;
++ break;
++
++ case set_number_at:
++ {
++ DEBUG_PRINT1 ("EXECUTING set_number_at.\n");
++
++ EXTRACT_NUMBER_AND_INCR (mcnt, p);
++ p1 = p + mcnt;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p);
++#ifdef _LIBC
++ DEBUG_PRINT3 (" Setting %p to %d.\n", p1, mcnt);
++#else
++ DEBUG_PRINT3 (" Setting 0x%x to %d.\n", p1, mcnt);
++#endif
++ STORE_NUMBER (p1, mcnt);
++ break;
++ }
++
++#if 0
++ /* The DEC Alpha C compiler 3.x generates incorrect code for the
++ test WORDCHAR_P (d - 1) != WORDCHAR_P (d) in the expansion of
++ AT_WORD_BOUNDARY, so this code is disabled. Expanding the
++ macro and introducing temporary variables works around the bug. */
++
++ case wordbound:
++ DEBUG_PRINT1 ("EXECUTING wordbound.\n");
++ if (AT_WORD_BOUNDARY (d))
++ break;
++ goto fail;
++
++ case notwordbound:
++ DEBUG_PRINT1 ("EXECUTING notwordbound.\n");
++ if (AT_WORD_BOUNDARY (d))
++ goto fail;
++ break;
++#else
++ case wordbound:
++ {
++ boolean prevchar, thischar;
++
++ DEBUG_PRINT1 ("EXECUTING wordbound.\n");
++ if (AT_STRINGS_BEG (d) || AT_STRINGS_END (d))
++ break;
++
++ prevchar = WORDCHAR_P (d - 1);
++ thischar = WORDCHAR_P (d);
++ if (prevchar != thischar)
++ break;
++ goto fail;
++ }
++
++ case notwordbound:
++ {
++ boolean prevchar, thischar;
++
++ DEBUG_PRINT1 ("EXECUTING notwordbound.\n");
++ if (AT_STRINGS_BEG (d) || AT_STRINGS_END (d))
++ goto fail;
++
++ prevchar = WORDCHAR_P (d - 1);
++ thischar = WORDCHAR_P (d);
++ if (prevchar != thischar)
++ goto fail;
++ break;
++ }
++#endif
++
++ case wordbeg:
++ DEBUG_PRINT1 ("EXECUTING wordbeg.\n");
++ if (!AT_STRINGS_END (d) && WORDCHAR_P (d)
++ && (AT_STRINGS_BEG (d) || !WORDCHAR_P (d - 1)))
++ break;
++ goto fail;
++
++ case wordend:
++ DEBUG_PRINT1 ("EXECUTING wordend.\n");
++ if (!AT_STRINGS_BEG (d) && WORDCHAR_P (d - 1)
++ && (AT_STRINGS_END (d) || !WORDCHAR_P (d)))
++ break;
++ goto fail;
++
++#ifdef emacs
++ case before_dot:
++ DEBUG_PRINT1 ("EXECUTING before_dot.\n");
++ if (PTR_CHAR_POS ((unsigned char *) d) >= point)
++ goto fail;
++ break;
++
++ case at_dot:
++ DEBUG_PRINT1 ("EXECUTING at_dot.\n");
++ if (PTR_CHAR_POS ((unsigned char *) d) != point)
++ goto fail;
++ break;
++
++ case after_dot:
++ DEBUG_PRINT1 ("EXECUTING after_dot.\n");
++ if (PTR_CHAR_POS ((unsigned char *) d) <= point)
++ goto fail;
++ break;
++
++ case syntaxspec:
++ DEBUG_PRINT2 ("EXECUTING syntaxspec %d.\n", mcnt);
++ mcnt = *p++;
++ goto matchsyntax;
++
++ case wordchar:
++ DEBUG_PRINT1 ("EXECUTING Emacs wordchar.\n");
++ mcnt = (int) Sword;
++ matchsyntax:
++ PREFETCH ();
++ /* Can't use *d++ here; SYNTAX may be an unsafe macro. */
++ d++;
++ if (SYNTAX (d[-1]) != (enum syntaxcode) mcnt)
++ goto fail;
++ SET_REGS_MATCHED ();
++ break;
++
++ case notsyntaxspec:
++ DEBUG_PRINT2 ("EXECUTING notsyntaxspec %d.\n", mcnt);
++ mcnt = *p++;
++ goto matchnotsyntax;
++
++ case notwordchar:
++ DEBUG_PRINT1 ("EXECUTING Emacs notwordchar.\n");
++ mcnt = (int) Sword;
++ matchnotsyntax:
++ PREFETCH ();
++ /* Can't use *d++ here; SYNTAX may be an unsafe macro. */
++ d++;
++ if (SYNTAX (d[-1]) == (enum syntaxcode) mcnt)
++ goto fail;
++ SET_REGS_MATCHED ();
++ break;
++
++#else /* not emacs */
++ case wordchar:
++ DEBUG_PRINT1 ("EXECUTING non-Emacs wordchar.\n");
++ PREFETCH ();
++ if (!WORDCHAR_P (d))
++ goto fail;
++ SET_REGS_MATCHED ();
++ d++;
++ break;
++
++ case notwordchar:
++ DEBUG_PRINT1 ("EXECUTING non-Emacs notwordchar.\n");
++ PREFETCH ();
++ if (WORDCHAR_P (d))
++ goto fail;
++ SET_REGS_MATCHED ();
++ d++;
++ break;
++#endif /* not emacs */
++
++ default:
++ abort ();
++ }
++ continue; /* Successfully executed one pattern command; keep going. */
++
++
++ /* We goto here if a matching operation fails. */
++ fail:
++ if (!FAIL_STACK_EMPTY ())
++ { /* A restart point is known. Restore to that state. */
++ DEBUG_PRINT1 ("\nFAIL:\n");
++ POP_FAILURE_POINT (d, p,
++ lowest_active_reg, highest_active_reg,
++ regstart, regend, reg_info);
++
++ /* If this failure point is a dummy, try the next one. */
++ if (!p)
++ goto fail;
++
++ /* If we failed to the end of the pattern, don't examine *p. */
++ assert (p <= pend);
++ if (p < pend)
++ {
++ boolean is_a_jump_n = false;
++
++ /* If failed to a backwards jump that's part of a repetition
++ loop, need to pop this failure point and use the next one. */
++ switch ((re_opcode_t) *p)
++ {
++ case jump_n:
++ is_a_jump_n = true;
++ case maybe_pop_jump:
++ case pop_failure_jump:
++ case jump:
++ p1 = p + 1;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ p1 += mcnt;
++
++ if ((is_a_jump_n && (re_opcode_t) *p1 == succeed_n)
++ || (!is_a_jump_n
++ && (re_opcode_t) *p1 == on_failure_jump))
++ goto fail;
++ break;
++ default:
++ /* do nothing */ ;
++ }
++ }
++
++ if (d >= string1 && d <= end1)
++ dend = end_match_1;
++ }
++ else
++ break; /* Matching at this starting point really fails. */
++ } /* for (;;) */
++
++ if (best_regs_set)
++ goto restore_best_regs;
++
++ FREE_VARIABLES ();
++
++ return -1; /* Failure to match. */
++} /* re_match_2 */
++
++/* Subroutine definitions for re_match_2. */
++
++
++/* We are passed P pointing to a register number after a start_memory.
++
++ Return true if the pattern up to the corresponding stop_memory can
++ match the empty string, and false otherwise.
++
++ If we find the matching stop_memory, sets P to point to one past its number.
++ Otherwise, sets P to an undefined byte less than or equal to END.
++
++ We don't handle duplicates properly (yet). */
++
++static boolean
++PREFIX(group_match_null_string_p) (UCHAR_T **p, UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info)
++{
++ int mcnt;
++ /* Point to after the args to the start_memory. */
++ UCHAR_T *p1 = *p + 2;
++
++ while (p1 < end)
++ {
++ /* Skip over opcodes that can match nothing, and return true or
++ false, as appropriate, when we get to one that can't, or to the
++ matching stop_memory. */
++
++ switch ((re_opcode_t) *p1)
++ {
++ /* Could be either a loop or a series of alternatives. */
++ case on_failure_jump:
++ p1++;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++
++ /* If the next operation is not a jump backwards in the
++ pattern. */
++
++ if (mcnt >= 0)
++ {
++ /* Go through the on_failure_jumps of the alternatives,
++ seeing if any of the alternatives cannot match nothing.
++ The last alternative starts with only a jump,
++ whereas the rest start with on_failure_jump and end
++ with a jump, e.g., here is the pattern for `a|b|c':
++
++ /on_failure_jump/0/6/exactn/1/a/jump_past_alt/0/6
++ /on_failure_jump/0/6/exactn/1/b/jump_past_alt/0/3
++ /exactn/1/c
++
++ So, we have to first go through the first (n-1)
++ alternatives and then deal with the last one separately. */
++
++
++ /* Deal with the first (n-1) alternatives, which start
++ with an on_failure_jump (see above) that jumps to right
++ past a jump_past_alt. */
++
++ while ((re_opcode_t) p1[mcnt-(1+OFFSET_ADDRESS_SIZE)] ==
++ jump_past_alt)
++ {
++ /* `mcnt' holds how many bytes long the alternative
++ is, including the ending `jump_past_alt' and
++ its number. */
++
++ if (!PREFIX(alt_match_null_string_p) (p1, p1 + mcnt -
++ (1 + OFFSET_ADDRESS_SIZE),
++ reg_info))
++ return false;
++
++ /* Move to right after this alternative, including the
++ jump_past_alt. */
++ p1 += mcnt;
++
++ /* Break if it's the beginning of an n-th alternative
++ that doesn't begin with an on_failure_jump. */
++ if ((re_opcode_t) *p1 != on_failure_jump)
++ break;
++
++ /* Still have to check that it's not an n-th
++ alternative that starts with an on_failure_jump. */
++ p1++;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ if ((re_opcode_t) p1[mcnt-(1+OFFSET_ADDRESS_SIZE)] !=
++ jump_past_alt)
++ {
++ /* Get to the beginning of the n-th alternative. */
++ p1 -= 1 + OFFSET_ADDRESS_SIZE;
++ break;
++ }
++ }
++
++ /* Deal with the last alternative: go back and get number
++ of the `jump_past_alt' just before it. `mcnt' contains
++ the length of the alternative. */
++ EXTRACT_NUMBER (mcnt, p1 - OFFSET_ADDRESS_SIZE);
++
++ if (!PREFIX(alt_match_null_string_p) (p1, p1 + mcnt, reg_info))
++ return false;
++
++ p1 += mcnt; /* Get past the n-th alternative. */
++ } /* if mcnt > 0 */
++ break;
++
++
++ case stop_memory:
++ assert (p1[1] == **p);
++ *p = p1 + 2;
++ return true;
++
++
++ default:
++ if (!PREFIX(common_op_match_null_string_p) (&p1, end, reg_info))
++ return false;
++ }
++ } /* while p1 < end */
++
++ return false;
++} /* group_match_null_string_p */
++
++
++/* Similar to group_match_null_string_p, but doesn't deal with alternatives:
++ It expects P to be the first byte of a single alternative and END one
++ byte past the last. The alternative can contain groups. */
++
++static boolean
++PREFIX(alt_match_null_string_p) (UCHAR_T *p, UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info)
++{
++ int mcnt;
++ UCHAR_T *p1 = p;
++
++ while (p1 < end)
++ {
++ /* Skip over opcodes that can match nothing, and break when we get
++ to one that can't. */
++
++ switch ((re_opcode_t) *p1)
++ {
++ /* It's a loop. */
++ case on_failure_jump:
++ p1++;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ p1 += mcnt;
++ break;
++
++ default:
++ if (!PREFIX(common_op_match_null_string_p) (&p1, end, reg_info))
++ return false;
++ }
++ } /* while p1 < end */
++
++ return true;
++} /* alt_match_null_string_p */
++
++
++/* Deals with the ops common to group_match_null_string_p and
++ alt_match_null_string_p.
++
++ Sets P to one after the op and its arguments, if any. */
++
++static boolean
++PREFIX(common_op_match_null_string_p) (UCHAR_T **p, UCHAR_T *end,
++ PREFIX(register_info_type) *reg_info)
++{
++ int mcnt;
++ boolean ret;
++ int reg_no;
++ UCHAR_T *p1 = *p;
++
++ switch ((re_opcode_t) *p1++)
++ {
++ case no_op:
++ case begline:
++ case endline:
++ case begbuf:
++ case endbuf:
++ case wordbeg:
++ case wordend:
++ case wordbound:
++ case notwordbound:
++#ifdef emacs
++ case before_dot:
++ case at_dot:
++ case after_dot:
++#endif
++ break;
++
++ case start_memory:
++ reg_no = *p1;
++ assert (reg_no > 0 && reg_no <= MAX_REGNUM);
++ ret = PREFIX(group_match_null_string_p) (&p1, end, reg_info);
++
++ /* Have to set this here in case we're checking a group which
++ contains a group and a back reference to it. */
++
++ if (REG_MATCH_NULL_STRING_P (reg_info[reg_no]) == MATCH_NULL_UNSET_VALUE)
++ REG_MATCH_NULL_STRING_P (reg_info[reg_no]) = ret;
++
++ if (!ret)
++ return false;
++ break;
++
++ /* If this is an optimized succeed_n for zero times, make the jump. */
++ case jump:
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ if (mcnt >= 0)
++ p1 += mcnt;
++ else
++ return false;
++ break;
++
++ case succeed_n:
++ /* Get to the number of times to succeed. */
++ p1 += OFFSET_ADDRESS_SIZE;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++
++ if (mcnt == 0)
++ {
++ p1 -= 2 * OFFSET_ADDRESS_SIZE;
++ EXTRACT_NUMBER_AND_INCR (mcnt, p1);
++ p1 += mcnt;
++ }
++ else
++ return false;
++ break;
++
++ case duplicate:
++ if (!REG_MATCH_NULL_STRING_P (reg_info[*p1]))
++ return false;
++ break;
++
++ case set_number_at:
++ p1 += 2 * OFFSET_ADDRESS_SIZE;
++
++ default:
++ /* All other opcodes mean we cannot match the empty string. */
++ return false;
++ }
++
++ *p = p1;
++ return true;
++} /* common_op_match_null_string_p */
++
++
++/* Return zero if TRANSLATE[S1] and TRANSLATE[S2] are identical for LEN
++ bytes; nonzero otherwise. */
++
++static int
++PREFIX(bcmp_translate) (const CHAR_T *s1, const CHAR_T *s2, register int len,
++ RE_TRANSLATE_TYPE translate)
++{
++ register const UCHAR_T *p1 = (const UCHAR_T *) s1;
++ register const UCHAR_T *p2 = (const UCHAR_T *) s2;
++ while (len)
++ {
++#ifdef WCHAR
++ if (((*p1<=0xff)?translate[*p1++]:*p1++)
++ != ((*p2<=0xff)?translate[*p2++]:*p2++))
++ return 1;
++#else /* BYTE */
++ if (translate[*p1++] != translate[*p2++]) return 1;
++#endif /* WCHAR */
++ len--;
++ }
++ return 0;
++}
++
++
++#else /* not INSIDE_RECURSION */
++
++/* Entry points for GNU code. */
++
++/* re_compile_pattern is the GNU regular expression compiler: it
++ compiles PATTERN (of length SIZE) and puts the result in BUFP.
++ Returns 0 if the pattern was valid, otherwise an error string.
++
++ Assumes the `allocated' (and perhaps `buffer') and `translate' fields
++ are set in BUFP on entry.
++
++ We call regex_compile to do the actual compilation. */
++
++const char *
++re_compile_pattern (const char *pattern, size_t length,
++ struct re_pattern_buffer *bufp)
++{
++ reg_errcode_t ret;
++
++ /* GNU code is written to assume at least RE_NREGS registers will be set
++ (and at least one extra will be -1). */
++ bufp->regs_allocated = REGS_UNALLOCATED;
++
++ /* And GNU code determines whether or not to get register information
++ by passing null for the REGS argument to re_match, etc., not by
++ setting no_sub. */
++ bufp->no_sub = 0;
++
++ /* Match anchors at newline. */
++ bufp->newline_anchor = 1;
++
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ ret = wcs_regex_compile (pattern, length, re_syntax_options, bufp);
++ else
++# endif
++ ret = byte_regex_compile (pattern, length, re_syntax_options, bufp);
++
++ if (!ret)
++ return NULL;
++ return gettext (re_error_msgid[(int) ret]);
++}
++#ifdef _LIBC
++weak_alias (__re_compile_pattern, re_compile_pattern)
++#endif
++
++/* Entry points compatible with 4.2 BSD regex library. We don't define
++ them unless specifically requested. */
++
++#if defined _REGEX_RE_COMP || defined _LIBC
++
++/* BSD has one and only one pattern buffer. */
++static struct re_pattern_buffer re_comp_buf;
++
++char *
++#ifdef _LIBC
++/* Make these definitions weak in libc, so POSIX programs can redefine
++ these names if they don't use our functions, and still use
++ regcomp/regexec below without link errors. */
++weak_function
++#endif
++re_comp (const char *s)
++{
++ reg_errcode_t ret;
++
++ if (!s)
++ {
++ if (!re_comp_buf.buffer)
++ return (char *) gettext ("No previous regular expression");
++ return 0;
++ }
++
++ if (!re_comp_buf.buffer)
++ {
++ re_comp_buf.buffer = (unsigned char *) malloc (200);
++ if (re_comp_buf.buffer == NULL)
++ return (char *) gettext (re_error_msgid[(int) REG_ESPACE]);
++ re_comp_buf.allocated = 200;
++
++ re_comp_buf.fastmap = (char *) malloc (1 << BYTEWIDTH);
++ if (re_comp_buf.fastmap == NULL)
++ return (char *) gettext (re_error_msgid[(int) REG_ESPACE]);
++ }
++
++ /* Since `re_exec' always passes NULL for the `regs' argument, we
++ don't need to initialize the pattern buffer fields which affect it. */
++
++ /* Match anchors at newlines. */
++ re_comp_buf.newline_anchor = 1;
++
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ ret = wcs_regex_compile (s, strlen (s), re_syntax_options, &re_comp_buf);
++ else
++# endif
++ ret = byte_regex_compile (s, strlen (s), re_syntax_options, &re_comp_buf);
++
++ if (!ret)
++ return NULL;
++
++ /* Yes, we're discarding `const' here if !HAVE_LIBINTL. */
++ return (char *) gettext (re_error_msgid[(int) ret]);
++}
++
++
++int
++#ifdef _LIBC
++weak_function
++#endif
++re_exec (const char *s)
++{
++ const int len = strlen (s);
++ return
++ 0 <= re_search (&re_comp_buf, s, len, 0, len, (struct re_registers *) 0);
++}
++
++#endif /* _REGEX_RE_COMP */
++
++/* POSIX.2 functions. Don't define these for Emacs. */
++
++#ifndef emacs
++
++/* regcomp takes a regular expression as a string and compiles it.
++
++ PREG is a regex_t *. We do not expect any fields to be initialized,
++ since POSIX says we shouldn't. Thus, we set
++
++ `buffer' to the compiled pattern;
++ `used' to the length of the compiled pattern;
++ `syntax' to RE_SYNTAX_POSIX_EXTENDED if the
++ REG_EXTENDED bit in CFLAGS is set; otherwise, to
++ RE_SYNTAX_POSIX_BASIC;
++ `newline_anchor' to REG_NEWLINE being set in CFLAGS;
++ `fastmap' to an allocated space for the fastmap;
++ `fastmap_accurate' to zero;
++ `re_nsub' to the number of subexpressions in PATTERN.
++
++ PATTERN is the address of the pattern string.
++
++ CFLAGS is a series of bits which affect compilation.
++
++ If REG_EXTENDED is set, we use POSIX extended syntax; otherwise, we
++ use POSIX basic syntax.
++
++ If REG_NEWLINE is set, then . and [^...] don't match newline.
++ Also, regexec will try a match beginning after every newline.
++
++ If REG_ICASE is set, then we considers upper- and lowercase
++ versions of letters to be equivalent when matching.
++
++ If REG_NOSUB is set, then when PREG is passed to regexec, that
++ routine will report only success or failure, and nothing about the
++ registers.
++
++ It returns 0 if it succeeds, nonzero if it doesn't. (See regex.h for
++ the return codes and their meanings.) */
++
++int
++regcomp (regex_t *preg, const char *pattern, int cflags)
++{
++ reg_errcode_t ret;
++ reg_syntax_t syntax
++ = (cflags & REG_EXTENDED) ?
++ RE_SYNTAX_POSIX_EXTENDED : RE_SYNTAX_POSIX_BASIC;
++
++ /* regex_compile will allocate the space for the compiled pattern. */
++ preg->buffer = 0;
++ preg->allocated = 0;
++ preg->used = 0;
++
++ /* Try to allocate space for the fastmap. */
++ preg->fastmap = (char *) malloc (1 << BYTEWIDTH);
++
++ if (cflags & REG_ICASE)
++ {
++ int i;
++
++ preg->translate
++ = (RE_TRANSLATE_TYPE) malloc (CHAR_SET_SIZE
++ * sizeof (*(RE_TRANSLATE_TYPE)0));
++ if (preg->translate == NULL)
++ return (int) REG_ESPACE;
++
++ /* Map uppercase characters to corresponding lowercase ones. */
++ for (i = 0; i < CHAR_SET_SIZE; i++)
++ preg->translate[i] = ISUPPER (i) ? TOLOWER (i) : i;
++ }
++ else
++ preg->translate = NULL;
++
++ /* If REG_NEWLINE is set, newlines are treated differently. */
++ if (cflags & REG_NEWLINE)
++ { /* REG_NEWLINE implies neither . nor [^...] match newline. */
++ syntax &= ~RE_DOT_NEWLINE;
++ syntax |= RE_HAT_LISTS_NOT_NEWLINE;
++ /* It also changes the matching behavior. */
++ preg->newline_anchor = 1;
++ }
++ else
++ preg->newline_anchor = 0;
++
++ preg->no_sub = !!(cflags & REG_NOSUB);
++
++ /* POSIX says a null character in the pattern terminates it, so we
++ can use strlen here in compiling the pattern. */
++# ifdef MBS_SUPPORT
++ if (MB_CUR_MAX != 1)
++ ret = wcs_regex_compile (pattern, strlen (pattern), syntax, preg);
++ else
++# endif
++ ret = byte_regex_compile (pattern, strlen (pattern), syntax, preg);
++
++ /* POSIX doesn't distinguish between an unmatched open-group and an
++ unmatched close-group: both are REG_EPAREN. */
++ if (ret == REG_ERPAREN) ret = REG_EPAREN;
++
++ if (ret == REG_NOERROR && preg->fastmap)
++ {
++ /* Compute the fastmap now, since regexec cannot modify the pattern
++ buffer. */
++ if (re_compile_fastmap (preg) == -2)
++ {
++ /* Some error occurred while computing the fastmap, just forget
++ about it. */
++ free (preg->fastmap);
++ preg->fastmap = NULL;
++ }
++ }
++
++ return (int) ret;
++}
++#ifdef _LIBC
++weak_alias (__regcomp, regcomp)
++#endif
++
++
++/* regexec searches for a given pattern, specified by PREG, in the
++ string STRING.
++
++ If NMATCH is zero or REG_NOSUB was set in the cflags argument to
++ `regcomp', we ignore PMATCH. Otherwise, we assume PMATCH has at
++ least NMATCH elements, and we set them to the offsets of the
++ corresponding matched substrings.
++
++ EFLAGS specifies `execution flags' which affect matching: if
++ REG_NOTBOL is set, then ^ does not match at the beginning of the
++ string; if REG_NOTEOL is set, then $ does not match at the end.
++
++ We return 0 if we find a match and REG_NOMATCH if not. */
++
++int
++regexec (const regex_t *preg, const char *string, size_t nmatch,
++ regmatch_t pmatch[], int eflags)
++{
++ int ret;
++ struct re_registers regs;
++ regex_t private_preg;
++ int len = strlen (string);
++ boolean want_reg_info = !preg->no_sub && nmatch > 0;
++
++ private_preg = *preg;
++
++ private_preg.not_bol = !!(eflags & REG_NOTBOL);
++ private_preg.not_eol = !!(eflags & REG_NOTEOL);
++
++ /* The user has told us exactly how many registers to return
++ information about, via `nmatch'. We have to pass that on to the
++ matching routines. */
++ private_preg.regs_allocated = REGS_FIXED;
++
++ if (want_reg_info)
++ {
++ regs.num_regs = nmatch;
++ regs.start = TALLOC (nmatch * 2, regoff_t);
++ if (regs.start == NULL)
++ return (int) REG_NOMATCH;
++ regs.end = regs.start + nmatch;
++ }
++
++ /* Perform the searching operation. */
++ ret = re_search (&private_preg, string, len,
++ /* start: */ 0, /* range: */ len,
++ want_reg_info ? &regs : (struct re_registers *) 0);
++
++ /* Copy the register information to the POSIX structure. */
++ if (want_reg_info)
++ {
++ if (ret >= 0)
++ {
++ unsigned r;
++
++ for (r = 0; r < nmatch; r++)
++ {
++ pmatch[r].rm_so = regs.start[r];
++ pmatch[r].rm_eo = regs.end[r];
++ }
++ }
++
++ /* If we needed the temporary register info, free the space now. */
++ free (regs.start);
++ }
++
++ /* We want zero return to mean success, unlike `re_search'. */
++ return ret >= 0 ? (int) REG_NOERROR : (int) REG_NOMATCH;
++}
++#ifdef _LIBC
++/* EGLIBC: This is handled in regexec-compat.c. */
++/*weak_alias (__regexec, regexec)*/
++#include "regexec-compat.c"
++#endif
++
++
++/* Returns a message corresponding to an error code, ERRCODE, returned
++ from either regcomp or regexec. We don't use PREG here. */
++
++size_t
++regerror (int errcode, const regex_t *preg __attribute__ ((unused)),
++ char *errbuf, size_t errbuf_size)
++{
++ const char *msg;
++ size_t msg_size;
++
++ if (errcode < 0
++ || errcode >= (int) (sizeof (re_error_msgid)
++ / sizeof (re_error_msgid[0])))
++ /* Only error codes returned by the rest of the code should be passed
++ to this routine. If we are given anything else, or if other regex
++ code generates an invalid error code, then the program has a bug.
++ Dump core so we can fix it. */
++ abort ();
++
++ msg = gettext (re_error_msgid[errcode]);
++
++ msg_size = strlen (msg) + 1; /* Includes the null. */
++
++ if (errbuf_size != 0)
++ {
++ if (msg_size > errbuf_size)
++ {
++#if defined HAVE_MEMPCPY || defined _LIBC
++ *((char *) mempcpy (errbuf, msg, errbuf_size - 1)) = '\0';
++#else
++ memcpy (errbuf, msg, errbuf_size - 1);
++ errbuf[errbuf_size - 1] = 0;
++#endif
++ }
++ else
++ memcpy (errbuf, msg, msg_size);
++ }
++
++ return msg_size;
++}
++#ifdef _LIBC
++weak_alias (__regerror, regerror)
++#endif
++
++
++/* Free dynamically allocated space used by PREG. */
++
++void
++regfree (regex_t *preg)
++{
++ if (preg->buffer != NULL)
++ free (preg->buffer);
++ preg->buffer = NULL;
++
++ preg->allocated = 0;
++ preg->used = 0;
++
++ if (preg->fastmap != NULL)
++ free (preg->fastmap);
++ preg->fastmap = NULL;
++ preg->fastmap_accurate = 0;
++
++ if (preg->translate != NULL)
++ free (preg->translate);
++ preg->translate = NULL;
++}
++#ifdef _LIBC
++weak_alias (__regfree, regfree)
++#endif
++
++#endif /* not emacs */
++
++#endif /* not INSIDE_RECURSION */
++
++
++#undef STORE_NUMBER
++#undef STORE_NUMBER_AND_INCR
++#undef EXTRACT_NUMBER
++#undef EXTRACT_NUMBER_AND_INCR
++
++#undef DEBUG_PRINT_COMPILED_PATTERN
++#undef DEBUG_PRINT_DOUBLE_STRING
++
++#undef INIT_FAIL_STACK
++#undef RESET_FAIL_STACK
++#undef DOUBLE_FAIL_STACK
++#undef PUSH_PATTERN_OP
++#undef PUSH_FAILURE_POINTER
++#undef PUSH_FAILURE_INT
++#undef PUSH_FAILURE_ELT
++#undef POP_FAILURE_POINTER
++#undef POP_FAILURE_INT
++#undef POP_FAILURE_ELT
++#undef DEBUG_PUSH
++#undef DEBUG_POP
++#undef PUSH_FAILURE_POINT
++#undef POP_FAILURE_POINT
++
++#undef REG_UNSET_VALUE
++#undef REG_UNSET
++
++#undef PATFETCH
++#undef PATFETCH_RAW
++#undef PATUNFETCH
++#undef TRANSLATE
++
++#undef INIT_BUF_SIZE
++#undef GET_BUFFER_SPACE
++#undef BUF_PUSH
++#undef BUF_PUSH_2
++#undef BUF_PUSH_3
++#undef STORE_JUMP
++#undef STORE_JUMP2
++#undef INSERT_JUMP
++#undef INSERT_JUMP2
++#undef EXTEND_BUFFER
++#undef GET_UNSIGNED_NUMBER
++#undef FREE_STACK_RETURN
++
++# undef POINTER_TO_OFFSET
++# undef MATCHING_IN_FRST_STRING
++# undef PREFETCH
++# undef AT_STRINGS_BEG
++# undef AT_STRINGS_END
++# undef WORDCHAR_P
++# undef FREE_VAR
++# undef FREE_VARIABLES
++# undef NO_HIGHEST_ACTIVE_REG
++# undef NO_LOWEST_ACTIVE_REG
++
++# undef CHAR_T
++# undef UCHAR_T
++# undef COMPILED_BUFFER_VAR
++# undef OFFSET_ADDRESS_SIZE
++# undef CHAR_CLASS_SIZE
++# undef PREFIX
++# undef ARG_PREFIX
++# undef PUT_CHAR
++# undef BYTE
++# undef WCHAR
++
++# define DEFINED_ONCE
+Index: git/pwd/Makefile
+===================================================================
+--- git.orig/pwd/Makefile 2014-08-29 20:00:53.316070587 -0700
++++ git/pwd/Makefile 2014-08-29 20:01:15.232070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for pwd portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := pwd
+
+ include ../Makeconfig
+Index: git/resolv/Makefile
+===================================================================
+--- git.orig/resolv/Makefile 2014-08-29 20:00:53.320070587 -0700
++++ git/resolv/Makefile 2014-08-29 20:01:15.232070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for resolv portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := resolv
+
+ include ../Makeconfig
+@@ -27,20 +29,21 @@
+ arpa/nameser.h arpa/nameser_compat.h \
+ sys/bitypes.h
+
+-routines := herror inet_addr inet_ntop inet_pton nsap_addr res_init \
+- res_hconf res_libc res-state
++routines-$(OPTION_EGLIBC_INET) \
++ += herror inet_addr inet_ntop inet_pton nsap_addr res_init \
++ res_hconf res_libc res-state
+
+-tests = tst-aton tst-leaks tst-inet_ntop
+-xtests = tst-leaks2
++tests-$(OPTION_EGLIBC_INET) += tst-aton tst-leaks tst-inet_ntop
++xtests-$(OPTION_EGLIBC_INET) += tst-leaks2
+
+ generate := mtrace-tst-leaks.out tst-leaks.mtrace tst-leaks2.mtrace
+
+-extra-libs := libresolv libnss_dns
++extra-libs-$(OPTION_EGLIBC_INET) += libresolv libnss_dns
+ ifeq ($(have-thread-library),yes)
+-extra-libs += libanl
+-routines += gai_sigqueue
++extra-libs-$(OPTION_EGLIBC_INET_ANL) += libanl
++routines-$(OPTION_EGLIBC_INET) += gai_sigqueue
+ endif
+-extra-libs-others = $(extra-libs)
++extra-libs-others-y += $(extra-libs-y)
+ libresolv-routines := gethnamaddr res_comp res_debug \
+ res_data res_mkquery res_query res_send \
+ inet_net_ntop inet_net_pton inet_neta base64 \
+@@ -60,7 +63,7 @@
+ static-only-routines += $(libnss_dns-routines) $(libresolv-routines)
+ endif
+
+-ifeq (yesyes,$(build-shared)$(have-thread-library))
++ifeq (yesyesy,$(build-shared)$(have-thread-library)$(OPTION_EGLIBC_INET_ANL))
+ tests: $(objpfx)ga_test
+ endif
+
+Index: git/stdio-common/fxprintf.c
+===================================================================
+--- git.orig/stdio-common/fxprintf.c 2014-08-29 20:00:53.544070587 -0700
++++ git/stdio-common/fxprintf.c 2014-08-29 20:01:15.232070587 -0700
+@@ -23,6 +23,7 @@
+ #include <wchar.h>
+ #include <string.h>
+ #include <libioP.h>
++#include <gnu/option-groups.h>
+
+
+ int
+@@ -37,6 +38,7 @@
+ int res;
+ if (_IO_fwide (fp, 0) > 0)
+ {
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ size_t len = strlen (fmt) + 1;
+ wchar_t wfmt[len];
+ for (size_t i = 0; i < len; ++i)
+@@ -45,6 +47,9 @@
+ wfmt[i] = fmt[i];
+ }
+ res = __vfwprintf (fp, wfmt, ap);
++#else
++ abort();
++#endif
+ }
+ else
+ res = _IO_vfprintf (fp, fmt, ap);
+Index: git/stdio-common/_i18n_number.h
+===================================================================
+--- git.orig/stdio-common/_i18n_number.h 2014-08-29 20:00:53.500070587 -0700
++++ git/stdio-common/_i18n_number.h 2014-08-29 20:01:15.232070587 -0700
+@@ -19,10 +19,13 @@
+ #include <stdbool.h>
+ #include <wchar.h>
+ #include <wctype.h>
++#include <gnu/option-groups.h>
+
+ #include "../locale/outdigits.h"
+ #include "../locale/outdigitswc.h"
+
++#if __OPTION_EGLIBC_LOCALE_CODE
++
+ static CHAR_T *
+ _i18n_number_rewrite (CHAR_T *w, CHAR_T *rear_ptr, CHAR_T *end)
+ {
+@@ -115,3 +118,13 @@
+
+ return w;
+ }
++
++#else
++
++static CHAR_T *
++_i18n_number_rewrite (CHAR_T *w, CHAR_T *rear_ptr, CHAR_T *end)
++{
++ return w;
++}
++
++#endif
+Index: git/stdio-common/Makefile
+===================================================================
+--- git.orig/stdio-common/Makefile 2014-08-29 20:00:53.500070587 -0700
++++ git/stdio-common/Makefile 2014-08-29 20:01:15.232070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Specific makefile for stdio-common.
+ #
++include ../option-groups.mak
++
+ subdir := stdio-common
+
+ include ../Makeconfig
+@@ -30,7 +32,7 @@
+ vfprintf vprintf printf_fp reg-printf printf-prs printf_fphex \
+ reg-modifier reg-type \
+ printf_size fprintf printf snprintf sprintf asprintf dprintf \
+- vfwprintf vfscanf vfwscanf \
++ vfscanf \
+ fscanf scanf sscanf \
+ perror psignal \
+ tmpfile tmpfile64 tmpnam tmpnam_r tempnam tempname \
+@@ -41,23 +43,37 @@
+ isoc99_vsscanf \
+ psiginfo
+
+-aux := errlist siglist printf-parsemb printf-parsewc fxprintf
++# Ideally, _itowa and itowa-digits would be in this option group as
++# well, but it is used unconditionally by printf_fp and printf_fphex,
++# and it didn't seem straightforward to disentangle it.
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) += \
++ vfwprintf vfwscanf
++
++aux := errlist siglist printf-parsemb fxprintf
++aux-$(OPTION_POSIX_C_LANG_WIDE_CHAR) += printf-parsewc
+
+ tests := tstscanf test_rdwr test-popen tstgetln test-fseek \
+ temptest tst-fileno test-fwrite tst-ungetc tst-ferror \
+ xbug errnobug \
+ bug1 bug2 bug3 bug4 bug5 bug6 bug7 bug8 bug9 bug10 bug11 bug12 bug13 \
+- tfformat tiformat tllformat tstdiomisc tst-printfsz tst-wc-printf \
++ tfformat tiformat tllformat tstdiomisc tst-printfsz \
+ scanf1 scanf2 scanf3 scanf4 scanf5 scanf7 scanf8 scanf9 scanf10 \
+- scanf11 scanf12 tst-tmpnam tst-cookie tst-obprintf tst-sscanf \
+- tst-swprintf tst-fseek tst-fmemopen test-vfprintf tst-gets \
+- tst-perror tst-sprintf tst-rndseek tst-fdopen tst-fphex bug14 \
++ scanf11 scanf12 tst-tmpnam tst-cookie tst-obprintf \
++ scanf11 scanf12 tst-tmpnam tst-cookie tst-obprintf \
++ tst-fseek tst-fmemopen tst-gets \
++ tst-sprintf tst-rndseek tst-fdopen tst-fphex \
+ tst-popen tst-unlockedio tst-fmemopen2 tst-put-error tst-fgets \
+- tst-fwrite bug16 bug17 tst-swscanf tst-sprintf2 bug18 bug18a \
+- bug19 bug19a tst-popen2 scanf13 scanf14 scanf15 bug20 bug21 bug22 \
+- scanf16 scanf17 tst-setvbuf1 tst-grouping bug23 bug24 \
+- bug-vfprintf-nargs tst-long-dbl-fphex tst-fphex-wide tst-sprintf3 \
+- bug25 tst-printf-round bug26
++ tst-fwrite bug16 bug17 tst-sprintf2 bug18 \
++ bug19 tst-popen2 scanf14 scanf15 bug21 bug22 scanf16 scanf17 \
++ tst-setvbuf1 bug23 bug24 bug-vfprintf-nargs tst-sprintf3 bug25 \
++ tst-printf-round bug26
++
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-sscanf tst-swprintf test-vfprintf bug14 scanf13 tst-grouping
++tests-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) \
++ += tst-perror bug19a bug20 tst-long-dbl-fphex tst-fphex-wide
++tests-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += bug18a tst-swscanf tst-wc-printf
+
+ test-srcs = tst-unbputc tst-printf
+
+Index: git/stdio-common/printf_fp.c
+===================================================================
+--- git.orig/stdio-common/printf_fp.c 2014-08-29 20:00:53.548070587 -0700
++++ git/stdio-common/printf_fp.c 2014-08-29 20:01:15.232070587 -0700
+@@ -39,6 +39,7 @@
+ #include <unistd.h>
+ #include <stdlib.h>
+ #include <wchar.h>
++#include <gnu/option-groups.h>
+ #include <stdbool.h>
+ #include <rounding-mode.h>
+
+@@ -148,6 +149,10 @@
+ wchar_t thousands_sep, int ngroups)
+ internal_function;
+
++/* Ideally, when OPTION_EGLIBC_LOCALE_CODE is disabled, this should do
++ all its work in ordinary characters, rather than doing it in wide
++ characters and then converting at the end. But that is a challenge
++ for another day. */
+
+ int
+ ___printf_fp (FILE *fp,
+@@ -206,7 +211,14 @@
+ mp_limb_t cy;
+
+ /* Nonzero if this is output on a wide character stream. */
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ int wide = info->wide;
++#else
++ /* This should never be called on a wide-oriented stream when
++ OPTION_POSIX_C_LANG_WIDE_CHAR is disabled, but the compiler can't
++ be trusted to figure that out. */
++ const int wide = 0;
++#endif
+
+ /* Buffer in which we produce the output. */
+ wchar_t *wbuffer = NULL;
+@@ -258,6 +270,7 @@
+
+
+ /* Figure out the decimal point character. */
++#if __OPTION_EGLIBC_LOCALE_CODE
+ if (info->extra == 0)
+ {
+ decimal = _NL_CURRENT (LC_NUMERIC, DECIMAL_POINT);
+@@ -277,7 +290,13 @@
+ /* The decimal point character must not be zero. */
+ assert (*decimal != '\0');
+ assert (decimalwc != L'\0');
++#else
++ /* Hard-code values from 'C' locale. */
++ decimal = ".";
++ decimalwc = L'.';
++#endif
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ if (info->group)
+ {
+ if (info->extra == 0)
+@@ -321,6 +340,9 @@
+ }
+ else
+ grouping = NULL;
++#else
++ grouping = NULL;
++#endif
+
+ /* Fetch the argument value. */
+ #ifndef __NO_LONG_DOUBLE_MATH
+Index: git/stdio-common/printf_fphex.c
+===================================================================
+--- git.orig/stdio-common/printf_fphex.c 2014-08-29 20:00:53.548070587 -0700
++++ git/stdio-common/printf_fphex.c 2014-08-29 20:01:15.232070587 -0700
+@@ -28,6 +28,7 @@
+ #include <_itoa.h>
+ #include <_itowa.h>
+ #include <locale/localeinfo.h>
++#include <gnu/option-groups.h>
+ #include <stdbool.h>
+ #include <rounding-mode.h>
+
+@@ -139,10 +140,18 @@
+ int done = 0;
+
+ /* Nonzero if this is output on a wide character stream. */
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ int wide = info->wide;
++#else
++ /* This should never be called on a wide-oriented stream when
++ OPTION_POSIX_C_LANG_WIDE_CHAR is disabled, but the compiler can't
++ be trusted to figure that out. */
++ const int wide = 0;
++#endif
+
+
+ /* Figure out the decimal point character. */
++#if __OPTION_EGLIBC_LOCALE_CODE
+ if (info->extra == 0)
+ {
+ decimal = _NL_CURRENT (LC_NUMERIC, DECIMAL_POINT);
+@@ -156,6 +165,10 @@
+ }
+ /* The decimal point character must never be zero. */
+ assert (*decimal != '\0' && decimalwc != L'\0');
++#else
++ decimal = ".";
++ decimalwc = L'.';
++#endif
+
+
+ /* Fetch the argument value. */
+Index: git/stdio-common/printf_size.c
+===================================================================
+--- git.orig/stdio-common/printf_size.c 2014-08-29 20:00:53.548070587 -0700
++++ git/stdio-common/printf_size.c 2014-08-29 20:01:15.232070587 -0700
+@@ -23,6 +23,7 @@
+ #include <math.h>
+ #include <printf.h>
+ #include <libioP.h>
++#include <gnu/option-groups.h>
+
+
+ /* This defines make it possible to use the same code for GNU C library and
+@@ -116,7 +117,14 @@
+
+ struct printf_info fp_info;
+ int done = 0;
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ int wide = info->wide;
++#else
++ /* This should never be called on a wide-oriented stream when
++ OPTION_POSIX_C_LANG_WIDE_CHAR is disabled, but the compiler can't
++ be trusted to figure that out. */
++ const int wide = 0;
++#endif
+ int res;
+
+ /* Fetch the argument value. */
+Index: git/stdio-common/scanf14.c
+===================================================================
+--- git.orig/stdio-common/scanf14.c 2014-08-29 20:00:53.548070587 -0700
++++ git/stdio-common/scanf14.c 2014-08-29 20:01:15.232070587 -0700
+@@ -2,6 +2,7 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <wchar.h>
++#include <gnu/option-groups.h>
+
+ #define FAIL() \
+ do { \
+@@ -36,6 +37,7 @@
+ FAIL ();
+ else if (d != 2.25 || memcmp (c, " x", 2) != 0)
+ FAIL ();
++#if __OPTION_EGLIBC_LOCALE_CODE
+ if (sscanf (" 3.25S x", "%4aS%3c", &lsp, c) != 2)
+ FAIL ();
+ else
+@@ -45,6 +47,7 @@
+ memset (lsp, 'x', sizeof L"3.25");
+ free (lsp);
+ }
++#endif
+ if (sscanf ("4.25[0-9.] x", "%a[0-9.]%8c", &sp, c) != 2)
+ FAIL ();
+ else
+Index: git/stdio-common/tstdiomisc.c
+===================================================================
+--- git.orig/stdio-common/tstdiomisc.c 2014-08-29 20:00:53.584070587 -0700
++++ git/stdio-common/tstdiomisc.c 2014-08-29 20:01:15.232070587 -0700
+@@ -3,6 +3,7 @@
+ #include <stdio.h>
+ #include <string.h>
+ #include <wchar.h>
++#include <gnu/option-groups.h>
+
+ static int
+ t1 (void)
+@@ -125,6 +126,7 @@
+ printf ("expected \"-inf -INF -inf -INF -inf -INF -inf -INF\", got \"%s\"\n",
+ buf);
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ swprintf (wbuf, sizeof wbuf / sizeof (wbuf[0]), L"%a %A %e %E %f %F %g %G",
+ qnanval, qnanval, qnanval, qnanval,
+ qnanval, qnanval, qnanval, qnanval);
+@@ -162,6 +164,7 @@
+ result |= wcscmp (wbuf, L"-inf -INF -inf -INF -inf -INF -inf -INF") != 0;
+ printf ("expected L\"-inf -INF -inf -INF -inf -INF -inf -INF\", got L\"%S\"\n",
+ wbuf);
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+
+ lqnanval = NAN;
+
+@@ -206,6 +209,7 @@
+ printf ("expected \"-inf -INF -inf -INF -inf -INF -inf -INF\", got \"%s\"\n",
+ buf);
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ swprintf (wbuf, sizeof wbuf / sizeof (wbuf[0]),
+ L"%La %LA %Le %LE %Lf %LF %Lg %LG",
+ lqnanval, lqnanval, lqnanval, lqnanval,
+@@ -250,6 +254,7 @@
+ result |= wcscmp (wbuf, L"-inf -INF -inf -INF -inf -INF -inf -INF") != 0;
+ printf ("expected L\"-inf -INF -inf -INF -inf -INF -inf -INF\", got L\"%S\"\n",
+ wbuf);
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+
+ return result;
+ }
+Index: git/stdio-common/tst-popen.c
+===================================================================
+--- git.orig/stdio-common/tst-popen.c 2014-08-29 20:00:53.576070587 -0700
++++ git/stdio-common/tst-popen.c 2014-08-29 20:01:15.232070587 -0700
+@@ -19,6 +19,7 @@
+ #include <stdio.h>
+ #include <string.h>
+ #include <wchar.h>
++#include <gnu/option-groups.h>
+
+ static int
+ do_test (void)
+@@ -34,12 +35,14 @@
+ return 1;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ /* POSIX says that pipe streams are byte-oriented. */
+ if (fwide (f, 0) >= 0)
+ {
+ puts ("popen did not return byte-oriented stream");
+ result = 1;
+ }
++#endif
+
+ if (getline (&line, &len, f) != 5)
+ {
+Index: git/stdio-common/tst-sprintf.c
+===================================================================
+--- git.orig/stdio-common/tst-sprintf.c 2014-08-29 20:00:53.580070587 -0700
++++ git/stdio-common/tst-sprintf.c 2014-08-29 20:01:15.236070587 -0700
+@@ -2,6 +2,7 @@
+ #include <stdlib.h>
+ #include <locale.h>
+ #include <string.h>
++#include <gnu/option-groups.h>
+
+
+ int
+@@ -10,12 +11,14 @@
+ char buf[100];
+ int result = 0;
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
+ if (sprintf (buf, "%.0ls", L"foo") != 0
+ || strlen (buf) != 0)
+ {
+ puts ("sprintf (buf, \"%.0ls\", L\"foo\") produced some output");
+ result = 1;
+ }
++#endif /* __OPTION_POSIX_C_LANG_WIDE_CHAR */
+
+ #define SIZE (1024*70000)
+ #define STR(x) #x
+Index: git/stdio-common/vfprintf.c
+===================================================================
+--- git.orig/stdio-common/vfprintf.c 2014-08-29 20:00:53.588070587 -0700
++++ git/stdio-common/vfprintf.c 2014-08-29 20:01:15.236070587 -0700
+@@ -29,6 +29,7 @@
+ #include <_itoa.h>
+ #include <locale/localeinfo.h>
+ #include <stdio.h>
++#include <gnu/option-groups.h>
+
+ /* This code is shared between the standard stdio implementation found
+ in GNU C library and the libio implementation originally found in
+@@ -138,6 +139,18 @@
+ # define EOF WEOF
+ #endif
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
++# define MULTIBYTE_SUPPORT (1)
++#else
++# define MULTIBYTE_SUPPORT (0)
++#endif
++
++#if __OPTION_EGLIBC_LOCALE_CODE
++# define LOCALE_SUPPORT (1)
++#else
++# define LOCALE_SUPPORT (0)
++#endif
++
+ #include "_i18n_number.h"
+
+ /* Include the shared code for parsing the format string. */
+@@ -1123,8 +1136,11 @@
+ # define process_string_arg(fspec) \
+ LABEL (form_character): \
+ /* Character. */ \
+- if (is_long) \
+- goto LABEL (form_wcharacter); \
++ if (is_long) \
++ { \
++ assert (MULTIBYTE_SUPPORT); \
++ goto LABEL (form_wcharacter); \
++ } \
+ --width; /* Account for the character itself. */ \
+ if (!left) \
+ PAD (' '); \
+@@ -1137,6 +1153,7 @@
+ break; \
+ \
+ LABEL (form_wcharacter): \
++ assert (MULTIBYTE_SUPPORT); \
+ { \
+ /* Wide character. */ \
+ char buf[MB_CUR_MAX]; \
+@@ -1203,6 +1220,7 @@
+ } \
+ else \
+ { \
++ assert (MULTIBYTE_SUPPORT); \
+ const wchar_t *s2 = (const wchar_t *) string; \
+ mbstate_t mbstate; \
+ \
+@@ -1403,7 +1421,9 @@
+ LABEL (flag_quote):
+ group = 1;
+
+- if (grouping == (const char *) -1)
++ if (! LOCALE_SUPPORT)
++ grouping = NULL;
++ else if (grouping == (const char *) -1)
+ {
+ #ifdef COMPILE_WPRINTF
+ thousands_sep = _NL_CURRENT_WORD (LC_NUMERIC,
+@@ -1702,7 +1722,9 @@
+ free (workstart);
+ workstart = NULL;
+
+- if (grouping == (const char *) -1)
++ if (! LOCALE_SUPPORT)
++ grouping = NULL;
++ else if (grouping == (const char *) -1)
+ {
+ #ifdef COMPILE_WPRINTF
+ thousands_sep = _NL_CURRENT_WORD (LC_NUMERIC,
+Index: git/stdio-common/vfscanf.c
+===================================================================
+--- git.orig/stdio-common/vfscanf.c 2014-08-29 20:00:53.588070587 -0700
++++ git/stdio-common/vfscanf.c 2014-08-29 20:01:15.236070587 -0700
+@@ -29,6 +29,7 @@
+ #include <wctype.h>
+ #include <bits/libc-lock.h>
+ #include <locale/localeinfo.h>
++#include <gnu/option-groups.h>
+
+ #ifdef __GNUC__
+ # define HAVE_LONGLONG
+@@ -133,6 +134,12 @@
+ # define WINT_T int
+ #endif
+
++#if __OPTION_POSIX_C_LANG_WIDE_CHAR
++# define MULTIBYTE_SUPPORT (1)
++#else
++# define MULTIBYTE_SUPPORT (0)
++#endif
++
+ #define encode_error() do { \
+ errval = 4; \
+ __set_errno (EILSEQ); \
+@@ -316,24 +323,35 @@
+ ARGCHECK (s, format);
+
+ {
+-#ifndef COMPILE_WSCANF
++#if __OPTION_EGLIBC_LOCALE_CODE && !defined (COMPILE_WSCANF)
+ struct __locale_data *const curnumeric = loc->__locales[LC_NUMERIC];
+ #endif
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* Figure out the decimal point character. */
+-#ifdef COMPILE_WSCANF
++# ifdef COMPILE_WSCANF
+ decimal = _NL_CURRENT_WORD (LC_NUMERIC, _NL_NUMERIC_DECIMAL_POINT_WC);
+-#else
++# else
+ decimal = curnumeric->values[_NL_ITEM_INDEX (DECIMAL_POINT)].string;
+-#endif
++# endif
+ /* Figure out the thousands separator character. */
+-#ifdef COMPILE_WSCANF
++# ifdef COMPILE_WSCANF
+ thousands = _NL_CURRENT_WORD (LC_NUMERIC, _NL_NUMERIC_THOUSANDS_SEP_WC);
+-#else
++# else
+ thousands = curnumeric->values[_NL_ITEM_INDEX (THOUSANDS_SEP)].string;
+ if (*thousands == '\0')
+ thousands = NULL;
+-#endif
++# endif
++#else /* if ! __OPTION_EGLIBC_LOCALE_CODE */
++ /* Hard-code values from the C locale. */
++# ifdef COMPILE_WSCANF
++ decimal = L'.';
++ thousands = L'\0';
++# else
++ decimal = ".";
++ thousands = NULL;
++# endif
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+ }
+
+ /* Lock the stream. */
+@@ -385,6 +403,8 @@
+ #ifndef COMPILE_WSCANF
+ if (!isascii ((unsigned char) *f))
+ {
++ assert (MULTIBYTE_SUPPORT);
++
+ /* Non-ASCII, may be a multibyte. */
+ int len = __mbrlen (f, strlen (f), &state);
+ if (len > 0)
+@@ -830,6 +850,8 @@
+ }
+ /* FALLTHROUGH */
+ case L_('C'):
++ assert (MULTIBYTE_SUPPORT);
++
+ if (width == -1)
+ width = 1;
+
+@@ -1172,6 +1194,8 @@
+ /* FALLTHROUGH */
+
+ case L_('S'):
++ assert (MULTIBYTE_SUPPORT);
++
+ {
+ #ifndef COMPILE_WSCANF
+ mbstate_t cstate;
+@@ -1419,10 +1443,17 @@
+ const char *mbdigits[10];
+ const char *mbdigits_extended[10];
+ #endif
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* "to_inpunct" is a map from ASCII digits to their
+ equivalent in locale. This is defined for locales
+ which use an extra digits set. */
+ wctrans_t map = __wctrans ("to_inpunct");
++#else
++ /* This will always be the case when
++ OPTION_EGLIBC_LOCALE_CODE is disabled, but the
++ compiler can't figure that out. */
++ wctrans_t map = NULL;
++#endif
+ int n;
+
+ from_level = 0;
+@@ -2088,6 +2119,7 @@
+ --width;
+ }
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ wctrans_t map;
+ if (__builtin_expect ((flags & I18N) != 0, 0)
+ /* Hexadecimal floats make no sense, fixing localized
+@@ -2304,6 +2336,7 @@
+ ;
+ #endif
+ }
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+
+ /* Have we read any character? If we try to read a number
+ in hexadecimal notation and we have read only the `0x'
+@@ -2343,7 +2376,10 @@
+
+ case L_('['): /* Character class. */
+ if (flags & LONG)
+- STRING_ARG (wstr, wchar_t, 100);
++ {
++ assert (MULTIBYTE_SUPPORT);
++ STRING_ARG (wstr, wchar_t, 100);
++ }
+ else
+ STRING_ARG (str, char, 100);
+
+@@ -2417,6 +2453,7 @@
+ if (flags & LONG)
+ {
+ size_t now = read_in;
++ assert (MULTIBYTE_SUPPORT);
+ #ifdef COMPILE_WSCANF
+ if (__glibc_unlikely (inchar () == WEOF))
+ input_error ();
+Index: git/stdlib/Makefile
+===================================================================
+--- git.orig/stdlib/Makefile 2014-08-29 20:00:53.588070587 -0700
++++ git/stdlib/Makefile 2014-08-29 20:01:15.236070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for stdlib routines
+ #
++include ../option-groups.mak
++
+ subdir := stdlib
+
+ include ../Makeconfig
+@@ -30,7 +32,7 @@
+ alloca.h fmtmsg.h \
+ bits/stdlib-bsearch.h
+
+-routines := \
++routines-y := \
+ atof atoi atol atoll \
+ abort \
+ bsearch qsort msort \
+@@ -39,7 +41,6 @@
+ quick_exit at_quick_exit cxa_at_quick_exit cxa_thread_atexit_impl \
+ abs labs llabs \
+ div ldiv lldiv \
+- mblen mbstowcs mbtowc wcstombs wctomb \
+ random random_r rand rand_r \
+ drand48 erand48 lrand48 nrand48 mrand48 jrand48 \
+ srand48 seed48 lcong48 \
+@@ -52,9 +53,18 @@
+ strtof_l strtod_l strtold_l \
+ system canonicalize \
+ a64l l64a \
+- rpmatch strfmon strfmon_l getsubopt xpg_basename fmtmsg \
+- strtoimax strtoumax wcstoimax wcstoumax \
++ getsubopt xpg_basename \
++ strtoimax strtoumax \
+ getcontext setcontext makecontext swapcontext
++routines-$(OPTION_EGLIBC_LOCALE_CODE) += \
++ strfmon strfmon_l
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) += \
++ mblen mbstowcs mbtowc wcstombs wctomb \
++ wcstoimax wcstoumax
++ifeq (yy,$(OPTION_EGLIBC_LOCALE_CODE)$(OPTION_POSIX_REGEXP))
++routines-y += rpmatch
++endif
++routines-$(OPTION_EGLIBC_FMTMSG) += fmtmsg
+ aux = grouping groupingwc tens_in_limb
+
+ # These routines will be omitted from the libc shared object.
+@@ -62,20 +72,22 @@
+ # linked against when the shared library will be used.
+ static-only-routines = atexit at_quick_exit
+
+-test-srcs := tst-fmtmsg
+-tests := tst-strtol tst-strtod testmb testrand testsort testdiv \
++test-srcs-$(OPTION_EGLIBC_FMTMSG) := tst-fmtmsg
++tests := tst-strtol tst-strtod testrand testsort testdiv \
+ test-canon test-canon2 tst-strtoll tst-environ \
+ tst-xpg-basename tst-random tst-random2 tst-bsearch \
+ tst-limits tst-rand48 bug-strtod tst-setcontext \
+- test-a64l tst-qsort tst-system testmb2 bug-strtod2 \
+- tst-atof1 tst-atof2 tst-strtod2 tst-strtod3 tst-rand48-2 \
+- tst-makecontext tst-strtod4 tst-strtod5 tst-qsort2 \
+- tst-makecontext2 tst-strtod6 tst-unsetenv1 \
+- tst-makecontext3 bug-getcontext bug-fmtmsg1 \
++ test-a64l tst-qsort tst-system bug-strtod2 \
++ tst-atof1 tst-atof2 tst-strtod2 tst-rand48-2 \
++ tst-makecontext tst-qsort2 tst-makecontext2 tst-strtod6 \
++ tst-unsetenv1 tst-makecontext3 bug-getcontext bug-fmtmsg1 \
+ tst-secure-getenv tst-strtod-overflow tst-strtod-round \
+ tst-tininess tst-strtod-underflow tst-tls-atexit
+ tests-static := tst-secure-getenv
+-
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-strtod3 tst-strtod4 tst-strtod5 testmb2
++tests-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += testmb
+ modules-names = tst-tls-atexit-lib
+
+ ifeq ($(build-shared),yes)
+@@ -115,8 +127,10 @@
+ tests-special += $(objpfx)isomac.out
+
+ ifeq ($(run-built-tests),yes)
++ifeq (y,$(OPTION_EGLIBC_FMTMSG))
+ tests-special += $(objpfx)tst-fmtmsg.out
+ endif
++endif
+
+ include ../Rules
+
+Index: git/stdlib/strtod_l.c
+===================================================================
+--- git.orig/stdlib/strtod_l.c 2014-08-29 20:00:53.648070587 -0700
++++ git/stdlib/strtod_l.c 2014-08-29 20:01:15.236070587 -0700
+@@ -17,6 +17,7 @@
+ License along with the GNU C Library; if not, see
+ <http://www.gnu.org/licenses/>. */
+
++#include <gnu/option-groups.h>
+ #include <xlocale.h>
+
+ extern double ____strtod_l_internal (const char *, char **, int, __locale_t);
+@@ -548,6 +549,7 @@
+ /* Used in several places. */
+ int cnt;
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ struct __locale_data *current = loc->__locales[LC_NUMERIC];
+
+ if (__glibc_unlikely (group))
+@@ -586,6 +588,17 @@
+ decimal_len = strlen (decimal);
+ assert (decimal_len > 0);
+ #endif
++#else /* if ! __OPTION_EGLIBC_LOCALE_CODE */
++ /* Hard-code values from the 'C' locale. */
++ grouping = NULL;
++#ifdef USE_WIDE_CHAR
++ decimal = L'.';
++# define decimal_len 1
++#else
++ decimal = ".";
++ decimal_len = 1;
++#endif
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+
+ /* Prepare number representation. */
+ exponent = 0;
+Index: git/stdlib/tst-strtod.c
+===================================================================
+--- git.orig/stdlib/tst-strtod.c 2014-08-29 20:00:53.700070587 -0700
++++ git/stdlib/tst-strtod.c 2014-08-29 20:01:15.236070587 -0700
+@@ -23,6 +23,7 @@
+ #include <errno.h>
+ #include <string.h>
+ #include <math.h>
++#include <gnu/option-groups.h>
+
+ struct ltest
+ {
+@@ -176,7 +177,9 @@
+
+ status |= long_dbl ();
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ status |= locale_test ();
++#endif
+
+ return status ? EXIT_FAILURE : EXIT_SUCCESS;
+ }
+@@ -219,6 +222,7 @@
+ return 0;
+ }
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* Perform a few tests in a locale with thousands separators. */
+ static int
+ locale_test (void)
+@@ -276,3 +280,4 @@
+
+ return result;
+ }
++#endif /* __OPTION_EGLIBC_LOCALE_CODE */
+Index: git/streams/Makefile
+===================================================================
+--- git.orig/streams/Makefile 2014-08-29 20:00:53.712070587 -0700
++++ git/streams/Makefile 2014-08-29 20:01:15.236070587 -0700
+@@ -18,11 +18,14 @@
+ #
+ # Makefile for streams.
+ #
++include ../option-groups.mak
++
+ subdir := streams
+
+ include ../Makeconfig
+
+ headers = stropts.h sys/stropts.h bits/stropts.h bits/xtitypes.h
+-routines = isastream getmsg getpmsg putmsg putpmsg fattach fdetach
++routines-$(OPTION_EGLIBC_STREAMS) \
++ += isastream getmsg getpmsg putmsg putpmsg fattach fdetach
+
+ include ../Rules
+Index: git/string/Makefile
+===================================================================
+--- git.orig/string/Makefile 2014-08-29 20:00:53.716070587 -0700
++++ git/string/Makefile 2014-08-29 20:01:15.236070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for string portion of library.
+ #
++include ../option-groups.mak
++
+ subdir := string
+
+ include ../Makeconfig
+@@ -39,10 +41,12 @@
+ $(addprefix argz-,append count create ctsep next \
+ delete extract insert stringify \
+ addsep replace) \
+- envz basename \
++ basename \
+ strcoll_l strxfrm_l string-inlines memrchr \
+ xpg-strerror strerror_l
+
++routines-$(OPTION_EGLIBC_ENVZ) += envz
++
+ strop-tests := memchr memcmp memcpy memmove mempcpy memset memccpy \
+ stpcpy stpncpy strcat strchr strcmp strcpy strcspn \
+ strlen strncmp strncpy strpbrk strrchr strspn memmem \
+@@ -51,10 +55,12 @@
+ tests := tester inl-tester noinl-tester testcopy test-ffs \
+ tst-strlen stratcliff tst-svc tst-inlcall \
+ bug-strncat1 bug-strspn1 bug-strpbrk1 tst-bswap \
+- tst-strtok tst-strxfrm bug-strcoll1 tst-strfry \
++ tst-strtok tst-strfry \
+ bug-strtok1 $(addprefix test-,$(strop-tests)) \
+- bug-envz1 tst-strxfrm2 tst-endian tst-svc2 \
+- tst-strtok_r
++ tst-strxfrm2 tst-endian tst-svc2 tst-strtok_r
++tests-$(OPTION_EGLIBC_ENVZ) += bug-envz1
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-strxfrm bug-strcoll1
+
+ xtests = tst-strcoll-overflow
+
+Index: git/string/strcoll_l.c
+===================================================================
+--- git.orig/string/strcoll_l.c 2014-08-29 20:00:53.744070587 -0700
++++ git/string/strcoll_l.c 2014-08-29 20:01:15.240070587 -0700
+@@ -25,6 +25,7 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <sys/param.h>
++#include <gnu/option-groups.h>
+
+ #ifndef STRING_TYPE
+ # define STRING_TYPE char
+@@ -472,7 +473,11 @@
+ STRCOLL (const STRING_TYPE *s1, const STRING_TYPE *s2, __locale_t l)
+ {
+ struct __locale_data *current = l->__locales[LC_COLLATE];
++#if __OPTION_EGLIBC_LOCALE_CODE
+ uint_fast32_t nrules = current->values[_NL_ITEM_INDEX (_NL_COLLATE_NRULES)].word;
++#else
++ const uint_fast32_t nrules = 0;
++#endif
+ /* We don't assign the following values right away since it might be
+ unnecessary in case there are no rules. */
+ const unsigned char *rulesets;
+Index: git/string/strerror_l.c
+===================================================================
+--- git.orig/string/strerror_l.c 2014-08-29 20:00:53.744070587 -0700
++++ git/string/strerror_l.c 2014-08-29 20:01:15.240070587 -0700
+@@ -21,6 +21,7 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <sys/param.h>
++#include <gnu/option-groups.h>
+
+
+ static __thread char *last_value;
+@@ -29,10 +30,14 @@
+ static const char *
+ translate (const char *str, locale_t loc)
+ {
++#if __OPTION_EGLIBC_LOCALE_CODE
+ locale_t oldloc = __uselocale (loc);
+ const char *res = _(str);
+ __uselocale (oldloc);
+ return res;
++#else
++ return str;
++#endif
+ }
+
+
+Index: git/string/strxfrm_l.c
+===================================================================
+--- git.orig/string/strxfrm_l.c 2014-08-29 20:00:53.748070587 -0700
++++ git/string/strxfrm_l.c 2014-08-29 20:01:15.240070587 -0700
+@@ -24,6 +24,7 @@
+ #include <stdlib.h>
+ #include <string.h>
+ #include <sys/param.h>
++#include <gnu/option-groups.h>
+
+ #ifndef STRING_TYPE
+ # define STRING_TYPE char
+@@ -85,7 +86,11 @@
+ STRXFRM (STRING_TYPE *dest, const STRING_TYPE *src, size_t n, __locale_t l)
+ {
+ struct __locale_data *current = l->__locales[LC_COLLATE];
++#if __OPTION_EGLIBC_LOCALE_CODE
+ uint_fast32_t nrules = current->values[_NL_ITEM_INDEX (_NL_COLLATE_NRULES)].word;
++#else
++ const uint_fast32_t nrules = 0;
++#endif
+ /* We don't assign the following values right away since it might be
+ unnecessary in case there are no rules. */
+ const unsigned char *rulesets;
+Index: git/string/test-strcmp.c
+===================================================================
+--- git.orig/string/test-strcmp.c 2014-08-29 20:00:53.752070587 -0700
++++ git/string/test-strcmp.c 2014-08-29 20:01:15.240070587 -0700
+@@ -329,34 +329,6 @@
+ FOR_EACH_IMPL (impl, 0)
+ check_result (impl, s1 + i1, s2 + i2, exp_result);
+ }
+-
+- /* Test cases where there are multiple zero bytes after the first. */
+-
+- for (size_t i = 0; i < 16 + 1; i++)
+- {
+- s1[i] = 0x00;
+- s2[i] = 0x00;
+- }
+-
+- for (size_t i = 0; i < 16; i++)
+- {
+- int exp_result;
+-
+- for (int val = 0x01; val < 0x100; val++)
+- {
+- for (size_t j = 0; j < i; j++)
+- {
+- s1[j] = val;
+- s2[j] = val;
+- }
+-
+- s2[i] = val;
+-
+- exp_result = SIMPLE_STRCMP (s1, s2);
+- FOR_EACH_IMPL (impl, 0)
+- check_result (impl, s1, s2, exp_result);
+- }
+- }
+ }
+
+
+Index: git/string/tst-strxfrm2.c
+===================================================================
+--- git.orig/string/tst-strxfrm2.c 2014-08-29 20:00:53.756070587 -0700
++++ git/string/tst-strxfrm2.c 2014-08-29 20:01:15.240070587 -0700
+@@ -1,6 +1,7 @@
+ #include <locale.h>
+ #include <stdio.h>
+ #include <string.h>
++#include <gnu/option-groups.h>
+
+ static int
+ do_test (void)
+@@ -38,6 +39,7 @@
+ res = 1;
+ }
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ if (setlocale (LC_ALL, "de_DE.UTF-8") == NULL)
+ {
+ puts ("setlocale failed");
+@@ -75,6 +77,7 @@
+ res = 1;
+ }
+ }
++#endif
+
+ return res;
+ }
+Index: git/string/tst-strxfrm.c
+===================================================================
+--- git.orig/string/tst-strxfrm.c 2014-08-29 20:00:53.756070587 -0700
++++ git/string/tst-strxfrm.c 2014-08-29 20:01:15.240070587 -0700
+@@ -3,6 +3,7 @@
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
++#include <gnu/option-groups.h>
+
+
+ char const string[] = "";
+@@ -64,8 +65,10 @@
+ int result = 0;
+
+ result |= test ("C");
++#if __OPTION_EGLIBC_LOCALE_CODE
+ result |= test ("en_US.ISO-8859-1");
+ result |= test ("de_DE.UTF-8");
++#endif
+
+ return result;
+ }
+Index: git/sunrpc/Makefile
+===================================================================
+--- git.orig/sunrpc/Makefile 2014-08-29 20:00:53.760070587 -0700
++++ git/sunrpc/Makefile 2014-08-29 20:01:15.240070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Sub-makefile for sunrpc portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := sunrpc
+
+ include ../Makeconfig
+@@ -55,7 +57,6 @@
+ headers-not-in-tirpc = $(addprefix rpc/,key_prot.h rpc_des.h) \
+ $(rpcsvc:%=rpcsvc/%) rpcsvc/bootparam.h
+ headers = rpc/netdb.h
+-install-others = $(inst_sysconfdir)/rpc
+ generated += $(rpcsvc:%.x=rpcsvc/%.h) $(rpcsvc:%.x=x%.c) $(rpcsvc:%.x=x%.stmp) \
+ $(rpcsvc:%.x=rpcsvc/%.stmp) rpcgen
+ generated-dirs += rpcsvc
+@@ -65,18 +66,28 @@
+ endif
+
+ ifeq ($(build-shared),yes)
+-need-export-routines := auth_des auth_unix clnt_gen clnt_perr clnt_tcp \
++need-export-routines-$(OPTION_EGLIBC_SUNRPC) += \
++ auth_des auth_unix clnt_gen clnt_perr clnt_tcp \
+ clnt_udp get_myaddr key_call netname pm_getport \
+- rpc_thread svc svc_tcp svc_udp xcrypt xdr_array xdr \
++ rpc_thread svc svc_tcp svc_udp xdr_array xdr \
+ xdr_intXX_t xdr_mem xdr_ref xdr_sizeof xdr_stdio \
+ svc_run
+
+-routines := auth_none authuxprot bindrsvprt clnt_raw clnt_simp \
++need-export-routines-y += xcrypt
++
++need-export-routines := $(need-export-routines-y)
++
++routines-$(OPTION_EGLIBC_SUNRPC) \
++ += auth_none authuxprot bindrsvprt clnt_raw clnt_simp \
+ rpc_dtable getrpcport pmap_clnt pm_getmaps pmap_prot pmap_prot2 \
+ pmap_rmt rpc_prot rpc_common rpc_cmsg svc_auth svc_authux svc_raw \
+ svc_simple xdr_float xdr_rec publickey authdes_prot \
+- des_crypt des_impl des_soft key_prot openchild rtime svcauth_des \
+- clnt_unix svc_unix create_xid $(need-export-routines)
++ key_prot openchild rtime svcauth_des \
++ clnt_unix svc_unix create_xid
++
++# xdecrypt is also used by nss/nss_files/files-key.c.
++routines-y += des_crypt des_impl des_soft $(need-export-routines)
++
+ ifneq ($(link-obsolete-rpc),yes)
+ # We only add the RPC for compatibility to libc.so.
+ shared-only-routines = $(routines)
+@@ -85,25 +96,28 @@
+
+ # We do not build rpcinfo anymore. It is not needed for a bootstrap
+ # and not wanted on complete systems.
+-# others := rpcinfo
+-# install-sbin := rpcinfo
+-install-bin := rpcgen
++# others-$(OPTION_EGLIBC_SUNRPC) += rpcinfo
++# install-sbin-$(OPTION_EGLIBC_SUNRPC) += rpcinfo
++install-bin-$(OPTION_EGLIBC_SUNRPC) += rpcgen
+ rpcgen-objs = rpc_main.o rpc_hout.o rpc_cout.o rpc_parse.o \
+ rpc_scan.o rpc_util.o rpc_svcout.o rpc_clntout.o \
+ rpc_tblout.o rpc_sample.o
+-extra-objs = $(rpcgen-objs) $(addprefix cross-,$(rpcgen-objs))
+-others += rpcgen
++extra-objs-$(OPTION_EGLIBC_SUNRPC) = $(rpcgen-objs) $(addprefix cross-,$(rpcgen-objs))
++others-$(OPTION_EGLIBC_SUNRPC) += rpcgen
++
++install-others-$(OPTION_EGLIBC_SUNRPC) += $(inst_sysconfdir)/rpc
+
+-tests = tst-xdrmem tst-xdrmem2
+-xtests := tst-getmyaddr
++tests-$(OPTION_EGLIBC_SUNRPC) = tst-xdrmem tst-xdrmem2
++xtests-$(OPTION_EGLIBC_SUNRPC) := tst-getmyaddr
+
+ ifeq ($(have-thread-library),yes)
+-xtests += thrsvc
++xtests-$(OPTION_EGLIBC_SUNRPC) += thrsvc
+ endif
+
+ headers += $(rpcsvc:%.x=rpcsvc/%.h)
+-extra-libs := librpcsvc
+-extra-libs-others := librpcsvc # Make it in `others' pass, not `lib' pass.
++extra-libs-$(OPTION_EGLIBC_SUNRPC) += librpcsvc
++# Make it in `others' pass, not `lib' pass.
++extra-libs-others-y += $(extra-libs-y)
+ librpcsvc-routines = $(rpcsvc:%.x=x%)
+ librpcsvc-inhibit-o = .os # Build no shared rpcsvc library.
+ omit-deps = $(librpcsvc-routines)
+Index: git/sysdeps/generic/ldsodefs.h
+===================================================================
+--- git.orig/sysdeps/generic/ldsodefs.h 2014-08-29 20:00:53.904070587 -0700
++++ git/sysdeps/generic/ldsodefs.h 2014-08-29 20:01:15.240070587 -0700
+@@ -425,6 +425,12 @@
+ # undef __rtld_global_attribute__
+ #endif
+
++#if __OPTION_EGLIBC_RTLD_DEBUG
++# define GLRO_dl_debug_mask GLRO(dl_debug_mask)
++#else
++# define GLRO_dl_debug_mask 0
++#endif
++
+ #ifndef SHARED
+ # define GLRO(name) _##name
+ #else
+@@ -437,8 +443,10 @@
+ {
+ #endif
+
++#if __OPTION_EGLIBC_RTLD_DEBUG
+ /* If nonzero the appropriate debug information is printed. */
+ EXTERN int _dl_debug_mask;
++#endif
+ #define DL_DEBUG_LIBS (1 << 0)
+ #define DL_DEBUG_IMPCALLS (1 << 1)
+ #define DL_DEBUG_BINDINGS (1 << 2)
+Index: git/sysdeps/gnu/Makefile
+===================================================================
+--- git.orig/sysdeps/gnu/Makefile 2014-08-29 20:00:53.924070587 -0700
++++ git/sysdeps/gnu/Makefile 2014-08-29 20:01:15.240070587 -0700
+@@ -57,7 +57,8 @@
+ endif
+
+ ifeq ($(subdir),login)
+-sysdep_routines += setutxent getutxent endutxent getutxid getutxline \
++sysdep_routines-$(OPTION_EGLIBC_UTMPX) \
++ += setutxent getutxent endutxent getutxid getutxline \
+ pututxline utmpxname updwtmpx getutmpx getutmp
+
+ sysdep_headers += utmpx.h bits/utmpx.h
+Index: git/sysdeps/ieee754/ldbl-opt/Makefile
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-opt/Makefile 2014-08-29 20:00:54.452070587 -0700
++++ git/sysdeps/ieee754/ldbl-opt/Makefile 2014-08-29 20:01:15.244070587 -0700
+@@ -11,19 +11,18 @@
+ routines += math_ldbl_opt nldbl-compat
+
+ extra-libs += libnldbl
+-libnldbl-calls = asprintf dprintf fprintf fscanf fwprintf fwscanf iovfscanf \
++libnldbl-calls = asprintf dprintf fprintf fscanf iovfscanf \
+ obstack_printf obstack_vprintf printf scanf snprintf \
+- sprintf sscanf swprintf swscanf vasprintf vdprintf vfprintf \
+- vfscanf vfwprintf vfwscanf vprintf vscanf vsnprintf \
+- vsprintf vsscanf vswprintf vswscanf vwprintf vwscanf \
+- wprintf wscanf printf_fp printf_size \
+- fprintf_chk fwprintf_chk printf_chk snprintf_chk sprintf_chk \
+- swprintf_chk vfprintf_chk vfwprintf_chk vprintf_chk \
+- vsnprintf_chk vsprintf_chk vswprintf_chk vwprintf_chk \
+- wprintf_chk asprintf_chk vasprintf_chk dprintf_chk \
++ sprintf sscanf vasprintf vdprintf vfprintf \
++ vfscanf vprintf vscanf vsnprintf \
++ vsprintf vsscanf \
++ printf_fp printf_size \
++ fprintf_chk printf_chk snprintf_chk sprintf_chk \
++ vfprintf_chk vprintf_chk \
++ vsnprintf_chk vsprintf_chk \
++ asprintf_chk vasprintf_chk dprintf_chk \
+ vdprintf_chk obstack_printf_chk obstack_vprintf_chk \
+ syslog syslog_chk vsyslog vsyslog_chk \
+- strfmon strfmon_l \
+ strtold strtold_l strtoldint wcstold wcstold_l wcstoldint \
+ qecvt qfcvt qgcvt qecvt_r qfcvt_r \
+ isinf isnan finite signbit scalb log2 lgamma_r ceil \
+@@ -38,9 +37,15 @@
+ casinh cexp clog cproj csin csinh csqrt ctan ctanh cpow \
+ cabs carg cimag creal clog10 \
+ isoc99_scanf isoc99_fscanf isoc99_sscanf \
+- isoc99_vscanf isoc99_vfscanf isoc99_vsscanf \
++ isoc99_vscanf isoc99_vfscanf isoc99_vsscanf
++libnldbl-calls-$(OPTION_EGLIBC_LOCALE_CODE) += strfmon strfmon_l
++libnldbl-calls-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) += fwprintf fwscanf \
++ swprintf swscanf vfwprintf vfwscanf vswprintf vswscanf \
++ vwprintf vwscanf wprintf wscanf fwprintf_chk swprintf_chk \
++ vfwprintf_chk vswprintf_chk vwprintf_chk wprintf_chk \
+ isoc99_wscanf isoc99_fwscanf isoc99_swscanf \
+ isoc99_vwscanf isoc99_vfwscanf isoc99_vswscanf
++libnldbl-calls += $(libnldbl-calls-y)
+ libnldbl-routines = $(libnldbl-calls:%=nldbl-%)
+ libnldbl-inhibit-o = $(object-suffixes)
+ libnldbl-static-only-routines = $(libnldbl-routines)
+Index: git/sysdeps/ieee754/ldbl-opt/nldbl-compat.c
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-opt/nldbl-compat.c 2014-08-29 20:00:54.468070587 -0700
++++ git/sysdeps/ieee754/ldbl-opt/nldbl-compat.c 2014-08-29 20:01:15.244070587 -0700
+@@ -26,6 +26,7 @@
+ #include <locale/localeinfo.h>
+ #include <sys/syslog.h>
+ #include <bits/libc-lock.h>
++#include <gnu/option-groups.h>
+
+ #include "nldbl-compat.h"
+
+@@ -33,20 +34,14 @@
+ libc_hidden_proto (__nldbl_vsscanf)
+ libc_hidden_proto (__nldbl_vsprintf)
+ libc_hidden_proto (__nldbl_vfscanf)
+-libc_hidden_proto (__nldbl_vfwscanf)
+ libc_hidden_proto (__nldbl_vdprintf)
+-libc_hidden_proto (__nldbl_vswscanf)
+-libc_hidden_proto (__nldbl_vfwprintf)
+-libc_hidden_proto (__nldbl_vswprintf)
+ libc_hidden_proto (__nldbl_vsnprintf)
+ libc_hidden_proto (__nldbl_vasprintf)
+ libc_hidden_proto (__nldbl_obstack_vprintf)
+-libc_hidden_proto (__nldbl___vfwprintf_chk)
+ libc_hidden_proto (__nldbl___vsnprintf_chk)
+ libc_hidden_proto (__nldbl___vfprintf_chk)
+ libc_hidden_proto (__nldbl___vsyslog_chk)
+ libc_hidden_proto (__nldbl___vsprintf_chk)
+-libc_hidden_proto (__nldbl___vswprintf_chk)
+ libc_hidden_proto (__nldbl___vasprintf_chk)
+ libc_hidden_proto (__nldbl___vdprintf_chk)
+ libc_hidden_proto (__nldbl___obstack_vprintf_chk)
+@@ -54,8 +49,17 @@
+ libc_hidden_proto (__nldbl___vstrfmon_l)
+ libc_hidden_proto (__nldbl___isoc99_vsscanf)
+ libc_hidden_proto (__nldbl___isoc99_vfscanf)
++
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++libc_hidden_proto (__nldbl_vfwscanf)
++libc_hidden_proto (__nldbl_vswscanf)
++libc_hidden_proto (__nldbl_vfwprintf)
++libc_hidden_proto (__nldbl_vswprintf)
++libc_hidden_proto (__nldbl___vfwprintf_chk)
++libc_hidden_proto (__nldbl___vswprintf_chk)
+ libc_hidden_proto (__nldbl___isoc99_vswscanf)
+ libc_hidden_proto (__nldbl___isoc99_vfwscanf)
++#endif
+
+ static void
+ __nldbl_cleanup (void *arg)
+@@ -117,6 +121,7 @@
+ }
+ weak_alias (__nldbl_fprintf, __nldbl__IO_fprintf)
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section weak_function
+ __nldbl_fwprintf (FILE *stream, const wchar_t *fmt, ...)
+@@ -130,6 +135,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -226,6 +232,7 @@
+ return done;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl_swprintf (wchar_t *s, size_t n, const wchar_t *fmt, ...)
+@@ -239,6 +246,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section weak_function
+@@ -264,6 +272,7 @@
+ }
+ libc_hidden_def (__nldbl_vdprintf)
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section weak_function
+ __nldbl_vfwprintf (FILE *s, const wchar_t *fmt, va_list ap)
+@@ -275,6 +284,7 @@
+ return res;
+ }
+ libc_hidden_def (__nldbl_vfwprintf)
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -297,6 +307,7 @@
+ libc_hidden_def (__nldbl_vsnprintf)
+ weak_alias (__nldbl_vsnprintf, __nldbl___vsnprintf)
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section weak_function
+ __nldbl_vswprintf (wchar_t *string, size_t maxlen, const wchar_t *fmt,
+@@ -330,6 +341,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -419,6 +431,7 @@
+ return done;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl_vfwscanf (FILE *s, const wchar_t *fmt, va_list ap)
+@@ -491,6 +504,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -506,6 +520,7 @@
+ return done;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl___fwprintf_chk (FILE *stream, int flag, const wchar_t *fmt, ...)
+@@ -519,6 +534,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -563,6 +579,7 @@
+ return done;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl___swprintf_chk (wchar_t *s, size_t n, int flag, size_t slen,
+@@ -577,6 +594,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -590,6 +608,7 @@
+ }
+ libc_hidden_def (__nldbl___vfprintf_chk)
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl___vfwprintf_chk (FILE *s, int flag, const wchar_t *fmt, va_list ap)
+@@ -601,6 +620,7 @@
+ return res;
+ }
+ libc_hidden_def (__nldbl___vfwprintf_chk)
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -635,6 +655,7 @@
+ }
+ libc_hidden_def (__nldbl___vsprintf_chk)
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl___vswprintf_chk (wchar_t *string, size_t maxlen, int flag, size_t slen,
+@@ -668,6 +689,7 @@
+
+ return done;
+ }
++#endif
+
+ int
+ attribute_compat_text_section
+@@ -775,6 +797,7 @@
+ return ___printf_fp (fp, &info_no_ldbl, args);
+ }
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ ssize_t
+ attribute_compat_text_section
+ __nldbl_strfmon (char *s, size_t maxsize, const char *format, ...)
+@@ -829,6 +852,7 @@
+ return res;
+ }
+ libc_hidden_def (__nldbl___vstrfmon_l)
++#endif
+
+ void
+ attribute_compat_text_section
+@@ -941,6 +965,7 @@
+ return done;
+ }
+
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ int
+ attribute_compat_text_section
+ __nldbl___isoc99_vfwscanf (FILE *s, const wchar_t *fmt, va_list ap)
+@@ -1014,6 +1039,7 @@
+
+ return done;
+ }
++#endif
+
+ #if LONG_DOUBLE_COMPAT(libc, GLIBC_2_0)
+ compat_symbol (libc, __nldbl__IO_printf, _IO_printf, GLIBC_2_0);
+@@ -1057,6 +1083,7 @@
+ compat_symbol (libc, __nldbl___strfmon_l, __strfmon_l, GLIBC_2_1);
+ #endif
+ #if LONG_DOUBLE_COMPAT(libc, GLIBC_2_2)
++# if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
+ compat_symbol (libc, __nldbl_swprintf, swprintf, GLIBC_2_2);
+ compat_symbol (libc, __nldbl_vwprintf, vwprintf, GLIBC_2_2);
+ compat_symbol (libc, __nldbl_wprintf, wprintf, GLIBC_2_2);
+@@ -1069,6 +1096,7 @@
+ compat_symbol (libc, __nldbl_vswscanf, vswscanf, GLIBC_2_2);
+ compat_symbol (libc, __nldbl_vwscanf, vwscanf, GLIBC_2_2);
+ compat_symbol (libc, __nldbl_wscanf, wscanf, GLIBC_2_2);
++# endif
+ #endif
+ #if LONG_DOUBLE_COMPAT(libc, GLIBC_2_3)
+ compat_symbol (libc, __nldbl_strfmon_l, strfmon_l, GLIBC_2_3);
+Index: git/sysdeps/ieee754/ldbl-opt/nldbl-compat.h
+===================================================================
+--- git.orig/sysdeps/ieee754/ldbl-opt/nldbl-compat.h 2014-08-29 20:00:54.468070587 -0700
++++ git/sysdeps/ieee754/ldbl-opt/nldbl-compat.h 2014-08-29 20:01:15.244070587 -0700
+@@ -30,6 +30,7 @@
+ #include <math.h>
+ #include <monetary.h>
+ #include <sys/syslog.h>
++#include <gnu/option-groups.h>
+
+
+ /* Declare the __nldbl_NAME function the wrappers call that's in libc.so. */
+@@ -37,19 +38,15 @@
+
+ NLDBL_DECL (_IO_vfscanf);
+ NLDBL_DECL (vfscanf);
+-NLDBL_DECL (vfwscanf);
+ NLDBL_DECL (obstack_vprintf);
+ NLDBL_DECL (vasprintf);
+ NLDBL_DECL (dprintf);
+ NLDBL_DECL (vdprintf);
+ NLDBL_DECL (fprintf);
+ NLDBL_DECL (vfprintf);
+-NLDBL_DECL (vfwprintf);
+ NLDBL_DECL (vsnprintf);
+ NLDBL_DECL (vsprintf);
+ NLDBL_DECL (vsscanf);
+-NLDBL_DECL (vswprintf);
+-NLDBL_DECL (vswscanf);
+ NLDBL_DECL (__asprintf);
+ NLDBL_DECL (asprintf);
+ NLDBL_DECL (__printf_fp);
+@@ -66,12 +63,18 @@
+ NLDBL_DECL (__isoc99_vscanf);
+ NLDBL_DECL (__isoc99_vfscanf);
+ NLDBL_DECL (__isoc99_vsscanf);
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++NLDBL_DECL (vfwscanf);
++NLDBL_DECL (vfwprintf);
++NLDBL_DECL (vswprintf);
++NLDBL_DECL (vswscanf);
+ NLDBL_DECL (__isoc99_wscanf);
+ NLDBL_DECL (__isoc99_fwscanf);
+ NLDBL_DECL (__isoc99_swscanf);
+ NLDBL_DECL (__isoc99_vwscanf);
+ NLDBL_DECL (__isoc99_vfwscanf);
+ NLDBL_DECL (__isoc99_vswscanf);
++#endif
+
+ /* This one does not exist in the normal interface, only
+ __nldbl___vstrfmon really exists. */
+@@ -82,22 +85,23 @@
+ since we don't compile with _FORTIFY_SOURCE. */
+ extern int __nldbl___vfprintf_chk (FILE *__restrict, int,
+ const char *__restrict, _G_va_list);
+-extern int __nldbl___vfwprintf_chk (FILE *__restrict, int,
+- const wchar_t *__restrict, __gnuc_va_list);
+ extern int __nldbl___vsprintf_chk (char *__restrict, int, size_t,
+ const char *__restrict, _G_va_list) __THROW;
+ extern int __nldbl___vsnprintf_chk (char *__restrict, size_t, int, size_t,
+ const char *__restrict, _G_va_list)
+ __THROW;
+-extern int __nldbl___vswprintf_chk (wchar_t *__restrict, size_t, int, size_t,
+- const wchar_t *__restrict, __gnuc_va_list)
+- __THROW;
+ extern int __nldbl___vasprintf_chk (char **, int, const char *, _G_va_list)
+ __THROW;
+ extern int __nldbl___vdprintf_chk (int, int, const char *, _G_va_list);
+ extern int __nldbl___obstack_vprintf_chk (struct obstack *, int, const char *,
+ _G_va_list) __THROW;
+ extern void __nldbl___vsyslog_chk (int, int, const char *, va_list);
+-
++#if __OPTION_POSIX_WIDE_CHAR_DEVICE_IO
++extern int __nldbl___vfwprintf_chk (FILE *__restrict, int,
++ const wchar_t *__restrict, __gnuc_va_list);
++extern int __nldbl___vswprintf_chk (wchar_t *__restrict, size_t, int, size_t,
++ const wchar_t *__restrict, __gnuc_va_list)
++ __THROW;
++#endif
+
+ #endif /* __NLDBL_COMPAT_H */
+Index: git/sysdeps/unix/sysv/linux/gethostid.c
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/gethostid.c 2014-08-29 20:00:58.840070587 -0700
++++ git/sysdeps/unix/sysv/linux/gethostid.c 2014-08-29 20:01:15.244070587 -0700
+@@ -21,6 +21,7 @@
+ #include <unistd.h>
+ #include <netdb.h>
+ #include <not-cancel.h>
++#include <gnu/option-groups.h>
+
+ #define HOSTIDFILE "/etc/hostid"
+
+@@ -89,6 +90,7 @@
+ return id;
+ }
+
++#if __OPTION_EGLIBC_INET
+ /* Getting from the file was not successful. An intelligent guess for
+ a unique number of a host is its IP address. Return this. */
+ if (__gethostname (hostname, MAXHOSTNAMELEN) < 0 || hostname[0] == '\0')
+@@ -115,5 +117,9 @@
+ /* For the return value to be not exactly the IP address we do some
+ bit fiddling. */
+ return (int32_t) (in.s_addr << 16 | in.s_addr >> 16);
++#else
++ /* Return an arbitrary value. */
++ return 0;
++#endif
+ }
+ #endif
+Index: git/sysdeps/unix/sysv/linux/libc_fatal.c
+===================================================================
+--- git.orig/sysdeps/unix/sysv/linux/libc_fatal.c 2014-08-29 20:00:58.980070587 -0700
++++ git/sysdeps/unix/sysv/linux/libc_fatal.c 2014-08-29 20:01:15.244070587 -0700
+@@ -23,6 +23,7 @@
+ #include <string.h>
+ #include <sys/mman.h>
+ #include <sys/uio.h>
++#include <gnu/option-groups.h>
+
+ static bool
+ writev_for_fatal (int fd, const struct iovec *iov, size_t niov, size_t total)
+@@ -40,6 +41,7 @@
+ static void
+ backtrace_and_maps (int do_abort, bool written, int fd)
+ {
++#if __OPTION_EGLIBC_BACKTRACE
+ if (do_abort > 1 && written)
+ {
+ void *addrs[64];
+@@ -62,6 +64,7 @@
+ close_not_cancel_no_status (fd2);
+ }
+ }
++#endif /* __OPTION_EGLIBC_BACKTRACE */
+ }
+ #define BEFORE_ABORT backtrace_and_maps
+
+Index: git/time/Makefile
+===================================================================
+--- git.orig/time/Makefile 2014-08-29 20:00:59.504070587 -0700
++++ git/time/Makefile 2014-08-29 20:01:15.244070587 -0700
+@@ -18,6 +18,8 @@
+ #
+ # Makefile for time routines
+ #
++include ../option-groups.mak
++
+ subdir := time
+
+ include ../Makeconfig
+@@ -30,14 +32,20 @@
+ tzfile getitimer setitimer \
+ stime dysize timegm ftime \
+ getdate strptime strptime_l \
+- strftime wcsftime strftime_l wcsftime_l \
++ strftime strftime_l \
+ timespec_get
+-aux := era alt_digit lc-time-cleanup
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ := wcsftime wcsftime_l
++aux-$(OPTION_EGLIBC_LOCALE_CODE) += alt_digit era lc-time-cleanup
+
+-tests := test_time clocktest tst-posixtz tst-strptime tst_wcsftime \
+- tst-getdate tst-mktime tst-mktime2 tst-ftime_l tst-strftime \
++tests := test_time clocktest tst-posixtz \
++ tst-getdate tst-mktime tst-mktime2 tst-strftime \
+ tst-mktime3 tst-strptime2 bug-asctime bug-asctime_r bug-mktime1 \
+ tst-strptime3 bug-getdate1 tst-strptime-whitespace
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-strptime tst-ftime_l
++tests-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) \
++ += tst_wcsftime
+
+ include ../Rules
+
+Index: git/time/strftime_l.c
+===================================================================
+--- git.orig/time/strftime_l.c 2014-08-29 20:00:59.528070587 -0700
++++ git/time/strftime_l.c 2014-08-29 20:01:15.244070587 -0700
+@@ -35,6 +35,10 @@
+ # include "../locale/localeinfo.h"
+ #endif
+
++#ifdef _LIBC
++# include <gnu/option-groups.h>
++#endif
++
+ #if defined emacs && !defined HAVE_BCOPY
+ # define HAVE_MEMCPY 1
+ #endif
+@@ -882,7 +886,7 @@
+ case L_('C'):
+ if (modifier == L_('E'))
+ {
+-#if HAVE_STRUCT_ERA_ENTRY
++#if (! _LIBC || __OPTION_EGLIBC_LOCALE_CODE) && HAVE_STRUCT_ERA_ENTRY
+ struct era_entry *era = _nl_get_era_entry (tp HELPER_LOCALE_ARG);
+ if (era)
+ {
+@@ -955,7 +959,7 @@
+
+ if (modifier == L_('O') && 0 <= number_value)
+ {
+-#ifdef _NL_CURRENT
++#if (! _LIBC || __OPTION_EGLIBC_LOCALE_CODE) && defined (_NL_CURRENT)
+ /* Get the locale specific alternate representation of
+ the number NUMBER_VALUE. If none exist NULL is returned. */
+ const CHAR_T *cp = nl_get_alt_digit (number_value
+@@ -1260,7 +1264,7 @@
+ case L_('Y'):
+ if (modifier == 'E')
+ {
+-#if HAVE_STRUCT_ERA_ENTRY
++#if (! _LIBC || __OPTION_EGLIBC_LOCALE_CODE) && HAVE_STRUCT_ERA_ENTRY
+ struct era_entry *era = _nl_get_era_entry (tp HELPER_LOCALE_ARG);
+ if (era)
+ {
+@@ -1285,7 +1289,7 @@
+ case L_('y'):
+ if (modifier == L_('E'))
+ {
+-#if HAVE_STRUCT_ERA_ENTRY
++#if (! _LIBC || __OPTION_EGLIBC_LOCALE_CODE) && HAVE_STRUCT_ERA_ENTRY
+ struct era_entry *era = _nl_get_era_entry (tp HELPER_LOCALE_ARG);
+ if (era)
+ {
+Index: git/time/strptime_l.c
+===================================================================
+--- git.orig/time/strptime_l.c 2014-08-29 20:00:59.528070587 -0700
++++ git/time/strptime_l.c 2014-08-29 20:01:15.244070587 -0700
+@@ -29,6 +29,7 @@
+
+ #ifdef _LIBC
+ # define HAVE_LOCALTIME_R 0
++# include <gnu/option-groups.h>
+ # include "../locale/localeinfo.h"
+ #endif
+
+@@ -84,7 +85,7 @@
+ if (val < from || val > to) \
+ return NULL; \
+ } while (0)
+-#ifdef _NL_CURRENT
++#if (! _LIBC || __OPTION_EGLIBC_LOCALE_CODE) && defined (_NL_CURRENT)
+ # define get_alt_number(from, to, n) \
+ ({ \
+ __label__ do_normal; \
+@@ -820,6 +821,7 @@
+ s.want_xday = 1;
+ break;
+ case 'C':
++#if ! _LIBC || __OPTION_EGLIBC_LOCALE_CODE
+ if (s.decided != raw)
+ {
+ if (s.era_cnt >= 0)
+@@ -856,10 +858,12 @@
+
+ s.decided = raw;
+ }
++#endif
+ /* The C locale has no era information, so use the
+ normal representation. */
+ goto match_century;
+ case 'y':
++#if ! _LIBC || __OPTION_EGLIBC_LOCALE_CODE
+ if (s.decided != raw)
+ {
+ get_number(0, 9999, 4);
+@@ -918,9 +922,10 @@
+
+ s.decided = raw;
+ }
+-
++#endif
+ goto match_year_in_century;
+ case 'Y':
++#if ! _LIBC || __OPTION_EGLIBC_LOCALE_CODE
+ if (s.decided != raw)
+ {
+ num_eras = _NL_CURRENT_WORD (LC_TIME,
+@@ -948,6 +953,7 @@
+
+ s.decided = raw;
+ }
++#endif
+ get_number (0, 9999, 4);
+ tm->tm_year = val - 1900;
+ s.want_century = 0;
+@@ -1118,6 +1124,7 @@
+ tm->tm_year = (s.century - 19) * 100;
+ }
+
++#if ! _LIBC || __OPTION_EGLIBC_LOCALE_CODE
+ if (s.era_cnt != -1)
+ {
+ era = _nl_select_era_entry (s.era_cnt HELPER_LOCALE_ARG);
+@@ -1132,6 +1139,7 @@
+ tm->tm_year = era->start_date[0];
+ }
+ else
++#endif
+ if (s.want_era)
+ {
+ /* No era found but we have seen an E modifier. Rectify some
+Index: git/timezone/Makefile
+===================================================================
+--- git.orig/timezone/Makefile 2014-08-29 20:01:14.044070587 -0700
++++ git/timezone/Makefile 2014-08-29 20:01:15.244070587 -0700
+@@ -115,7 +115,7 @@
+
+ $(objpfx)tzselect: tzselect.ksh $(common-objpfx)config.make
+ sed -e 's|/bin/bash|/bin/sh|' \
+- -e 's|TZDIR=[^}]*|TZDIR=$(zonedir)|' \
++ -e '/TZDIR=/s|\$$(pwd)|$(zonedir)|' \
+ -e '/TZVERSION=/s|see_Makefile|"$(version)"|' \
+ -e '/PKGVERSION=/s|=.*|="$(PKGVERSION)"|' \
+ -e '/REPORT_BUGS_TO=/s|=.*|="$(REPORT_BUGS_TO)"|' \
+Index: git/wcsmbs/Makefile
+===================================================================
+--- git.orig/wcsmbs/Makefile 2014-08-29 20:00:59.548070587 -0700
++++ git/wcsmbs/Makefile 2014-08-29 20:01:15.244070587 -0700
+@@ -18,15 +18,21 @@
+ #
+ # Sub-makefile for wcsmbs portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := wcsmbs
+
+ include ../Makeconfig
+
+ headers := wchar.h bits/wchar.h bits/wchar2.h bits/wchar-ldbl.h uchar.h
+
+-routines := wcscat wcschr wcscmp wcscpy wcscspn wcsdup wcslen wcsncat \
++# These functions are used by printf_fp.c, even in the plain case; see
++# comments there for OPTION_EGLIBC_LOCALE_CODE.
++routines := wmemcpy wmemset
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ := wcscat wcschr wcscmp wcscpy wcscspn wcsdup wcslen wcsncat \
+ wcsncmp wcsncpy wcspbrk wcsrchr wcsspn wcstok wcsstr wmemchr \
+- wmemcmp wmemcpy wmemmove wmemset wcpcpy wcpncpy wmempcpy \
++ wmemcmp wmemmove wcpcpy wcpncpy wmempcpy \
+ btowc wctob mbsinit \
+ mbrlen mbrtowc wcrtomb mbsrtowcs wcsrtombs \
+ mbsnrtowcs wcsnrtombs wcsnlen wcschrnul \
+@@ -38,14 +44,19 @@
+ wcscoll_l wcsxfrm_l \
+ wcscasecmp wcsncase wcscasecmp_l wcsncase_l \
+ wcsmbsload mbsrtowcs_l \
+- isoc99_wscanf isoc99_vwscanf isoc99_fwscanf isoc99_vfwscanf \
+ isoc99_swscanf isoc99_vswscanf \
+ mbrtoc16 c16rtomb
++routines-$(OPTION_POSIX_WIDE_CHAR_DEVICE_IO) \
++ += isoc99_wscanf isoc99_vwscanf isoc99_fwscanf isoc99_vfwscanf
+
+ strop-tests := wcscmp wmemcmp wcslen wcschr wcsrchr wcscpy
+-tests := tst-wcstof wcsmbs-tst1 tst-wcsnlen tst-btowc tst-mbrtowc \
+- tst-wcrtomb tst-wcpncpy tst-mbsrtowcs tst-wchar-h tst-mbrtowc2 \
+- tst-c16c32-1 wcsatcliff $(addprefix test-,$(strop-tests))
++tests := tst-wchar-h
++tests-$(OPTION_EGLIBC_LOCALE_CODE) \
++ += tst-btowc tst-mbrtowc tst-mbrtowc2 tst-wcrtomb tst-c16c32-1
++tests-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += tst-wcstof wcsmbs-tst1 tst-wcsnlen \
++ tst-wcpncpy tst-mbsrtowcs \
++ wcsatcliff $(addprefix test-,$(strop-tests))
+ tests-ifunc := $(strop-tests:%=test-%-ifunc)
+ tests += $(tests-ifunc)
+
+Index: git/wcsmbs/wcsmbsload.c
+===================================================================
+--- git.orig/wcsmbs/wcsmbsload.c 2014-08-29 20:00:59.580070587 -0700
++++ git/wcsmbs/wcsmbsload.c 2014-08-29 20:01:15.248070587 -0700
+@@ -21,6 +21,7 @@
+ #include <limits.h>
+ #include <stdlib.h>
+ #include <string.h>
++#include <gnu/option-groups.h>
+
+ #include <locale/localeinfo.h>
+ #include <wcsmbsload.h>
+@@ -143,6 +144,7 @@
+ })
+
+
++#if __OPTION_EGLIBC_LOCALE_CODE
+ /* Some of the functions here must not be used while setlocale is called. */
+ __libc_rwlock_define (extern, __libc_setlocale_lock attribute_hidden)
+
+@@ -211,6 +213,17 @@
+
+ __libc_rwlock_unlock (__libc_setlocale_lock);
+ }
++#else
++void
++internal_function
++__wcsmbs_load_conv (struct __locale_data *new_category)
++{
++ /* When OPTION_EGLIBC_LOCALE_CODE is disabled, we should never reach
++ this point: there is no way to change locales, so every locale
++ passed to get_gconv_fcts should be _nl_C_LC_CTYPE. */
++ abort ();
++}
++#endif
+
+
+ /* Clone the current conversion function set. */
+Index: git/wctype/Makefile
+===================================================================
+--- git.orig/wctype/Makefile 2014-08-29 20:00:59.584070587 -0700
++++ git/wctype/Makefile 2014-08-29 20:01:15.248070587 -0700
+@@ -18,14 +18,20 @@
+ #
+ # Sub-makefile for wctype portion of the library.
+ #
++include ../option-groups.mak
++
+ subdir := wctype
+
+ include ../Makeconfig
+
+ headers := wctype.h
+-routines := wcfuncs wctype iswctype wctrans towctrans \
+- wcfuncs_l wctype_l iswctype_l wctrans_l towctrans_l
++routines := wctrans towctrans towctrans_l
++routines-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ := wcfuncs wctype iswctype \
++ wcfuncs_l wctype_l iswctype_l wctrans_l
+
+-tests := test_wctype test_wcfuncs bug-wctypeh
++tests :=
++tests-$(OPTION_POSIX_C_LANG_WIDE_CHAR) \
++ += test_wctype test_wcfuncs bug-wctypeh
+
+ include ../Rules
+Index: git/sysdeps/nptl/Makefile
+===================================================================
+--- git.orig/sysdeps/nptl/Makefile 2014-08-29 20:00:58.036070587 -0700
++++ git/sysdeps/nptl/Makefile 2014-08-29 20:01:15.248070587 -0700
+@@ -18,6 +18,9 @@
+
+ ifeq ($(subdir),nptl)
+ libpthread-sysdep_routines += errno-loc
++ifeq ($(OPTION_EGLIBC_BIG_MACROS),n)
++sysdep_routines += small-macros-fns
++endif
+ endif
+
+ ifeq ($(subdir),rt)
+Index: git/sysdeps/nptl/bits/libc-lock.h
+===================================================================
+--- git.orig/sysdeps/nptl/bits/libc-lock.h 2014-08-29 20:00:58.036070587 -0700
++++ git/sysdeps/nptl/bits/libc-lock.h 2014-08-29 20:01:15.248070587 -0700
+@@ -24,6 +24,14 @@
+ #include <stddef.h>
+
+
++#ifdef _LIBC
++# include <lowlevellock.h>
++# include <tls.h>
++# include <pthread-functions.h>
++# include <errno.h> /* For EBUSY. */
++# include <gnu/option-groups.h> /* For __OPTION_EGLIBC_BIG_MACROS. */
++#endif
++
+ /* Mutex type. */
+ #if defined _LIBC || defined _IO_MTSAFE_IO
+ # if (defined NOT_IN_libc && !defined IS_IN_libpthread) || !defined _LIBC
+@@ -87,6 +95,14 @@
+
+ /* Lock the recursive named lock variable. */
+ #if defined _LIBC && (!defined NOT_IN_libc || defined IS_IN_libpthread)
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern void __libc_lock_lock_recursive_fn (__libc_lock_recursive_t *);
++libc_hidden_proto (__libc_lock_lock_recursive_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
+ # define __libc_lock_lock_recursive(NAME) \
+ do { \
+ void *self = THREAD_SELF; \
+@@ -97,6 +113,10 @@
+ } \
+ ++(NAME).cnt; \
+ } while (0)
++# else
++# define __libc_lock_lock_recursive(NAME) \
++ __libc_lock_lock_recursive_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # define __libc_lock_lock_recursive(NAME) \
+ __libc_maybe_call (__pthread_mutex_lock, (&(NAME).mutex), 0)
+@@ -104,6 +124,14 @@
+
+ /* Try to lock the recursive named lock variable. */
+ #if defined _LIBC && (!defined NOT_IN_libc || defined IS_IN_libpthread)
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern int __libc_lock_trylock_recursive_fn (__libc_lock_recursive_t *);
++libc_hidden_proto (__libc_lock_trylock_recursive_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
+ # define __libc_lock_trylock_recursive(NAME) \
+ ({ \
+ int result = 0; \
+@@ -122,6 +150,10 @@
+ ++(NAME).cnt; \
+ result; \
+ })
++# else
++# define __libc_lock_trylock_recursive(NAME) \
++ __libc_lock_trylock_recursive_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # define __libc_lock_trylock_recursive(NAME) \
+ __libc_maybe_call (__pthread_mutex_trylock, (&(NAME).mutex), 0)
+@@ -129,6 +161,14 @@
+
+ /* Unlock the recursive named lock variable. */
+ #if defined _LIBC && (!defined NOT_IN_libc || defined IS_IN_libpthread)
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS, or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern void __libc_lock_unlock_recursive_fn (__libc_lock_recursive_t *);
++libc_hidden_proto (__libc_lock_unlock_recursive_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
+ /* We do no error checking here. */
+ # define __libc_lock_unlock_recursive(NAME) \
+ do { \
+@@ -138,6 +178,10 @@
+ lll_unlock ((NAME).lock, LLL_PRIVATE); \
+ } \
+ } while (0)
++# else
++# define __libc_lock_unlock_recursive(NAME) \
++ __libc_lock_unlock_recursive_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # define __libc_lock_unlock_recursive(NAME) \
+ __libc_maybe_call (__pthread_mutex_unlock, (&(NAME).mutex), 0)
+Index: git/sysdeps/nptl/bits/libc-lockP.h
+===================================================================
+--- git.orig/sysdeps/nptl/bits/libc-lockP.h 2014-08-29 20:00:58.044070587 -0700
++++ git/sysdeps/nptl/bits/libc-lockP.h 2014-08-29 20:01:15.248070587 -0700
+@@ -33,6 +33,8 @@
+ #include <lowlevellock.h>
+ #include <tls.h>
+ #include <pthread-functions.h>
++#include <errno.h> /* For EBUSY. */
++#include <gnu/option-groups.h> /* For __OPTION_EGLIBC_BIG_MACROS. */
+
+ /* Mutex type. */
+ #if defined NOT_IN_libc && !defined IS_IN_libpthread
+@@ -159,10 +161,22 @@
+
+ /* Lock the named lock variable. */
+ #if !defined NOT_IN_libc || defined IS_IN_libpthread
+-# ifndef __libc_lock_lock
+-# define __libc_lock_lock(NAME) \
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern void __libc_lock_lock_fn (__libc_lock_t *);
++libc_hidden_proto (__libc_lock_lock_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
++# ifndef __libc_lock_lock
++# define __libc_lock_lock(NAME) \
+ ({ lll_lock (NAME, LLL_PRIVATE); 0; })
+-# endif
++# endif
++# else
++# define __libc_lock_lock(NAME) \
++ __libc_lock_lock_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # undef __libc_lock_lock
+ # define __libc_lock_lock(NAME) \
+@@ -175,10 +189,22 @@
+
+ /* Try to lock the named lock variable. */
+ #if !defined NOT_IN_libc || defined IS_IN_libpthread
+-# ifndef __libc_lock_trylock
+-# define __libc_lock_trylock(NAME) \
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern int __libc_lock_trylock_fn (__libc_lock_t *);
++libc_hidden_proto (__libc_lock_trylock_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
++# ifndef __libc_lock_trylock
++# define __libc_lock_trylock(NAME) \
+ lll_trylock (NAME)
+-# endif
++# endif
++# else
++# define __libc_lock_trylock(NAME) \
++ __libc_lock_trylock_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # undef __libc_lock_trylock
+ # define __libc_lock_trylock(NAME) \
+@@ -194,8 +220,20 @@
+
+ /* Unlock the named lock variable. */
+ #if !defined NOT_IN_libc || defined IS_IN_libpthread
++# if __OPTION_EGLIBC_BIG_MACROS != 1
++/* EGLIBC: Declare wrapper function for a big macro if either
++ !__OPTION_EGLIBC_BIG_MACROS, or we are using a back door from
++ small-macros-fns.c (__OPTION_EGLIBC_BIG_MACROS == 2). */
++extern void __libc_lock_unlock_fn (__libc_lock_t *);
++libc_hidden_proto (__libc_lock_unlock_fn);
++# endif /* __OPTION_EGLIBC_BIG_MACROS != 1 */
++# if __OPTION_EGLIBC_BIG_MACROS
+ # define __libc_lock_unlock(NAME) \
+ lll_unlock (NAME, LLL_PRIVATE)
++# else
++# define __libc_lock_unlock(NAME) \
++ __libc_lock_unlock_fn (&(NAME))
++# endif /* __OPTION_EGLIBC_BIG_MACROS */
+ #else
+ # define __libc_lock_unlock(NAME) \
+ __libc_maybe_call (__pthread_mutex_unlock, (&(NAME)), 0)
+Index: git/sysdeps/nptl/small-macros-fns.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/nptl/small-macros-fns.c 2014-08-29 20:01:15.248070587 -0700
+@@ -0,0 +1,72 @@
++/* EGLIBC: function wrappers for big macros.
++ Copyright (C) 2009 Free Software Foundation, Inc.
++ This file is part of the GNU C Library.
++
++ The GNU C Library is free software; you can redistribute it and/or
++ modify it under the terms of the GNU Lesser General Public License as
++ published by the Free Software Foundation; either version 2.1 of the
++ License, or (at your option) any later version.
++
++ The GNU C Library is distributed in the hope that it will be useful,
++ but WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ Lesser General Public License for more details.
++
++ You should have received a copy of the GNU Lesser General Public
++ License along with the GNU C Library; see the file COPYING.LIB. If not,
++ write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ Boston, MA 02111-1307, USA. */
++
++#include <gnu/option-groups.h>
++
++/* Handle macros from ./bits/libc-lock.h. */
++#if defined _LIBC && (!defined NOT_IN_libc || defined IS_IN_libpthread)
++
++/* Get the macros for function bodies through a back door. */
++# undef __OPTION_EGLIBC_BIG_MACROS
++# define __OPTION_EGLIBC_BIG_MACROS 2
++# include <bits/libc-lock.h>
++
++void
++__libc_lock_lock_fn (__libc_lock_t *name)
++{
++ __libc_lock_lock (*name);
++}
++libc_hidden_def (__libc_lock_lock_fn);
++
++void
++__libc_lock_lock_recursive_fn (__libc_lock_recursive_t *name)
++{
++ __libc_lock_lock_recursive (*name);
++}
++libc_hidden_def (__libc_lock_lock_recursive_fn);
++
++int
++__libc_lock_trylock_fn (__libc_lock_t *name)
++{
++ return __libc_lock_trylock (*name);
++}
++libc_hidden_def (__libc_lock_trylock_fn);
++
++int
++__libc_lock_trylock_recursive_fn (__libc_lock_recursive_t *name)
++{
++ return __libc_lock_trylock_recursive (*name);
++}
++libc_hidden_def (__libc_lock_trylock_recursive_fn);
++
++void
++__libc_lock_unlock_fn (__libc_lock_t *name)
++{
++ __libc_lock_unlock (*name);
++}
++libc_hidden_def (__libc_lock_unlock_fn);
++
++void
++__libc_lock_unlock_recursive_fn (__libc_lock_recursive_t *name)
++{
++ __libc_lock_unlock_recursive (*name);
++}
++libc_hidden_def (__libc_lock_unlock_recursive_fn);
++
++#endif /*defined _LIBC && (!defined NOT_IN_libc || defined IS_IN_libpthread)*/
+Index: git/include/libc-symbols.h
+===================================================================
+--- git.orig/include/libc-symbols.h 2014-08-29 20:00:47.144070587 -0700
++++ git/include/libc-symbols.h 2014-08-29 20:01:15.248070587 -0700
+@@ -60,8 +60,11 @@
+ /* Define these macros for the benefit of portable GNU code that wants to check
+ them. Of course, STDC_HEADERS is never false when building libc! */
+ #define STDC_HEADERS 1
+-#define HAVE_MBSTATE_T 1
+-#define HAVE_MBSRTOWCS 1
++
++#if __OPTION_EGLIBC_LOCALE_CODE
++# define HAVE_MBSTATE_T 1
++# define HAVE_MBSRTOWCS 1
++#endif
+ #define HAVE_LIBINTL_H 1
+ #define HAVE_WCTYPE_H 1
+ #define HAVE_ISWCTYPE 1
+Index: git/crypt/crypt_common.c
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/crypt/crypt_common.c 2014-08-29 20:01:15.248070587 -0700
+@@ -0,0 +1,42 @@
++/*
++ * crypt: crypt(3) implementation
++ *
++ * Copyright (C) 1991-2014 Free Software Foundation, Inc.
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Lesser General Public
++ * License as published by the Free Software Foundation; either
++ * version 2.1 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Lesser General Public License for more details.
++ *
++ * You should have received a copy of the GNU Lesser General Public
++ * License along with this library; see the file COPYING.LIB. If not,
++ * see <http://www.gnu.org/licenses/>.
++ *
++ * General Support routines
++ *
++ */
++
++#include "crypt-private.h"
++
++/* Table with characters for base64 transformation. */
++static const char b64t[64] =
++"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
++
++void
++__b64_from_24bit (char **cp, int *buflen,
++ unsigned int b2, unsigned int b1, unsigned int b0,
++ int n)
++{
++ unsigned int w = (b2 << 16) | (b1 << 8) | b0;
++ while (n-- > 0 && (*buflen) > 0)
++ {
++ *(*cp)++ = b64t[w & 0x3f];
++ --(*buflen);
++ w >>= 6;
++ }
++}
+Index: git/crypt/crypt_util.c
+===================================================================
+--- git.orig/crypt/crypt_util.c 2014-08-29 20:00:43.028070587 -0700
++++ git/crypt/crypt_util.c 2014-08-29 20:01:15.248070587 -0700
+@@ -242,10 +242,6 @@
+ */
+ static ufc_long efp[16][64][2];
+
+-/* Table with characters for base64 transformation. */
+-static const char b64t[64] =
+-"./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
+-
+ /*
+ * For use by the old, non-reentrant routines
+ * (crypt/encrypt/setkey)
+@@ -949,17 +945,3 @@
+ {
+ __setkey_r(__key, &_ufc_foobar);
+ }
+-
+-void
+-__b64_from_24bit (char **cp, int *buflen,
+- unsigned int b2, unsigned int b1, unsigned int b0,
+- int n)
+-{
+- unsigned int w = (b2 << 16) | (b1 << 8) | b0;
+- while (n-- > 0 && (*buflen) > 0)
+- {
+- *(*cp)++ = b64t[w & 0x3f];
+- --(*buflen);
+- w >>= 6;
+- }
+-}
+Index: git/sysdeps/arm/Makefile
+===================================================================
+--- git.orig/sysdeps/arm/Makefile 2014-08-29 20:29:37.000000000 -0700
++++ git/sysdeps/arm/Makefile 2014-08-29 20:31:09.904070587 -0700
+@@ -37,10 +37,13 @@
+ # get offset to rtld_global._dl_hwcap
+ gen-as-const-headers += rtld-global-offsets.sym tlsdesc.sym
+ aeabi_constants = aeabi_lcsts aeabi_sighandlers aeabi_math
+-aeabi_routines = aeabi_assert aeabi_localeconv aeabi_errno_addr \
++aeabi_routines = aeabi_assert aeabi_errno_addr \
+ aeabi_mb_cur_max aeabi_atexit aeabi_memclr aeabi_memcpy \
+ aeabi_memmove aeabi_memset \
+ aeabi_read_tp libc-aeabi_read_tp
++ifeq (y,$(OPTION_EGLIBC_LOCALE_CODE))
++aeabi_routines += aeabi_localeconv
++endif
+
+ sysdep_routines += $(aeabi_constants) $(aeabi_routines)
+ static-only-routines += $(aeabi_constants) aeabi_read_tp
diff --git a/meta/recipes-core/glibc/glibc/eglibc.patch b/meta/recipes-core/glibc/glibc/eglibc.patch
new file mode 100644
index 0000000000..fdfabc3a06
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/eglibc.patch
@@ -0,0 +1,602 @@
+Instruction documents from eglibc
+
+Upstream-Status: Pending
+
+Index: git/EGLIBC.cross-building
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/EGLIBC.cross-building 2014-08-27 07:27:25.580070587 +0000
+@@ -0,0 +1,383 @@
++ -*- mode: text -*-
++
++ Cross-Compiling EGLIBC
++ Jim Blandy <jimb@codesourcery.com>
++
++
++Introduction
++
++Most GNU tools have a simple build procedure: you run their
++'configure' script, and then you run 'make'. Unfortunately, the
++process of cross-compiling the GNU C library is quite a bit more
++involved:
++
++1) Build a cross-compiler, with certain facilities disabled.
++
++2) Configure the C library using the compiler you built in step 1).
++ Build a few of the C run-time object files, but not the rest of the
++ library. Install the library's header files and the run-time
++ object files, and create a dummy libc.so.
++
++3) Build a second cross-compiler, using the header files and object
++ files you installed in step 2.
++
++4) Configure, build, and install a fresh C library, using the compiler
++ built in step 3.
++
++5) Build a third cross-compiler, based on the C library built in step 4.
++
++The reason for this complexity is that, although GCC and the GNU C
++library are distributed separately, they are not actually independent
++of each other: GCC requires the C library's headers and some object
++files to compile its own libraries, while the C library depends on
++GCC's libraries. EGLIBC includes features and bug fixes to the stock
++GNU C library that simplify this process, but the fundamental
++interdependency stands.
++
++In this document, we explain how to cross-compile an EGLIBC/GCC pair
++from source. Our intended audience is developers who are already
++familiar with the GNU toolchain and comfortable working with
++cross-development tools. While we do present a worked example to
++accompany the explanation, for clarity's sake we do not cover many of
++the options available to cross-toolchain users.
++
++
++Preparation
++
++EGLIBC requires recent versions of the GNU binutils, GCC, and the
++Linux kernel. The web page <http://www.eglibc.org/prerequisites>
++documents the current requirements, and lists patches needed for
++certain target architectures. As of this writing, these build
++instructions have been tested with binutils 2.22.51, GCC 4.6.2,
++and Linux 3.1.
++
++First, let's set some variables, to simplify later commands. We'll
++build EGLIBC and GCC for an ARM target, known to the Linux kernel
++as 'arm', and we'll do the build on an Intel x86_64 Linux box:
++
++ $ build=x86_64-pc-linux-gnu
++ $ host=$build
++ $ target=arm-none-linux-gnueabi
++ $ linux_arch=arm
++
++We're using the aforementioned versions of Binutils, GCC, and Linux:
++
++ $ binutilsv=binutils-2.22.51
++ $ gccv=gcc-4.6.2
++ $ linuxv=linux-3.1
++
++We're carrying out the entire process under '~/cross-build', which
++contains unpacked source trees for binutils, gcc, and linux kernel,
++along with EGLIBC svn trunk (which can be checked-out with
++'svn co http://www.eglibc.org/svn/trunk eglibc'):
++
++ $ top=$HOME/cross-build/$target
++ $ src=$HOME/cross-build/src
++ $ ls $src
++ binutils-2.22.51 eglibc gcc-4.6.2 linux-3.1
++
++We're going to place our build directories in a subdirectory 'obj',
++we'll install the cross-development toolchain in 'tools', and we'll
++place our sysroot (containing files to be installed on the target
++system) in 'sysroot':
++
++ $ obj=$top/obj
++ $ tools=$top/tools
++ $ sysroot=$top/sysroot
++
++
++Binutils
++
++Configuring and building binutils for the target is straightforward:
++
++ $ mkdir -p $obj/binutils
++ $ cd $obj/binutils
++ $ $src/$binutilsv/configure \
++ > --target=$target \
++ > --prefix=$tools \
++ > --with-sysroot=$sysroot
++ $ make
++ $ make install
++
++
++The First GCC
++
++For our work, we need a cross-compiler targeting an ARM Linux
++system. However, that configuration includes the shared library
++'libgcc_s.so', which is compiled against the EGLIBC headers (which we
++haven't installed yet) and linked against 'libc.so' (which we haven't
++built yet).
++
++Fortunately, there are configuration options for GCC which tell it not
++to build 'libgcc_s.so'. The '--without-headers' option is supposed to
++take care of this, but its implementation is incomplete, so you must
++also configure with the '--with-newlib' option. While '--with-newlib'
++appears to mean "Use the Newlib C library", its effect is to tell the
++GCC build machinery, "Don't assume there is a C library available."
++
++We also need to disable some of the libraries that would normally be
++built along with GCC, and specify that only the compiler for the C
++language is needed.
++
++So, we create a build directory, configure, make, and install.
++
++ $ mkdir -p $obj/gcc1
++ $ cd $obj/gcc1
++ $ $src/$gccv/configure \
++ > --target=$target \
++ > --prefix=$tools \
++ > --without-headers --with-newlib \
++ > --disable-shared --disable-threads --disable-libssp \
++ > --disable-libgomp --disable-libmudflap --disable-libquadmath \
++ > --disable-decimal-float --disable-libffi \
++ > --enable-languages=c
++ $ PATH=$tools/bin:$PATH make
++ $ PATH=$tools/bin:$PATH make install
++
++
++Linux Kernel Headers
++
++To configure EGLIBC, we also need Linux kernel headers in place.
++Fortunately, the Linux makefiles have a target that installs them for
++us. Since the process does modify the source tree a bit, we make a
++copy first:
++
++ $ cp -r $src/$linuxv $obj/linux
++ $ cd $obj/linux
++
++Now we're ready to install the headers into the sysroot:
++
++ $ PATH=$tools/bin:$PATH \
++ > make headers_install \
++ > ARCH=$linux_arch CROSS_COMPILE=$target- \
++ > INSTALL_HDR_PATH=$sysroot/usr
++
++
++EGLIBC Headers and Preliminary Objects
++
++Using the cross-compiler we've just built, we can now configure EGLIBC
++well enough to install the headers and build the object files that the
++full cross-compiler will need:
++
++ $ mkdir -p $obj/eglibc-headers
++ $ cd $obj/eglibc-headers
++ $ BUILD_CC=gcc \
++ > CC=$tools/bin/$target-gcc \
++ > CXX=$tools/bin/$target-g++ \
++ > AR=$tools/bin/$target-ar \
++ > RANLIB=$tools/bin/$target-ranlib \
++ > $src/eglibc/libc/configure \
++ > --prefix=/usr \
++ > --with-headers=$sysroot/usr/include \
++ > --build=$build \
++ > --host=$target \
++ > --disable-profile --without-gd --without-cvs \
++ > --enable-add-ons=nptl,libidn,../ports
++
++The option '--prefix=/usr' may look strange, but you should never
++configure EGLIBC with a prefix other than '/usr': in various places,
++EGLIBC's build system checks whether the prefix is '/usr', and does
++special handling only if that is the case. Unless you use this
++prefix, you will get a sysroot that does not use the standard Linux
++directory layouts and cannot be used as a basis for the root
++filesystem on your target system compatibly with normal GLIBC
++installations.
++
++The '--with-headers' option tells EGLIBC where the Linux headers have
++been installed.
++
++The '--enable-add-ons=nptl,libidn,../ports' option tells EGLIBC to look
++for the listed glibc add-ons. Most notably the ports add-on (located
++just above the libc sources in the EGLIBC svn tree) is required to
++support ARM targets.
++
++We can now use the 'install-headers' makefile target to install the
++headers:
++
++ $ make install-headers install_root=$sysroot \
++ > install-bootstrap-headers=yes
++
++The 'install_root' variable indicates where the files should actually
++be installed; its value is treated as the parent of the '--prefix'
++directory we passed to the configure script, so the headers will go in
++'$sysroot/usr/include'. The 'install-bootstrap-headers' variable
++requests special handling for certain tricky header files.
++
++Next, there are a few object files needed to link shared libraries,
++which we build and install by hand:
++
++ $ mkdir -p $sysroot/usr/lib
++ $ make csu/subdir_lib
++ $ cp csu/crt1.o csu/crti.o csu/crtn.o $sysroot/usr/lib
++
++Finally, 'libgcc_s.so' requires a 'libc.so' to link against. However,
++since we will never actually execute its code, it doesn't matter what
++it contains. So, treating '/dev/null' as a C source file, we produce
++a dummy 'libc.so' in one step:
++
++ $ $tools/bin/$target-gcc -nostdlib -nostartfiles -shared -x c /dev/null \
++ > -o $sysroot/usr/lib/libc.so
++
++
++The Second GCC
++
++With the EGLIBC headers and selected object files installed, we can
++now build a GCC that is capable of compiling EGLIBC. We configure,
++build, and install the second GCC, again building only the C compiler,
++and avoiding libraries we won't use:
++
++ $ mkdir -p $obj/gcc2
++ $ cd $obj/gcc2
++ $ $src/$gccv/configure \
++ > --target=$target \
++ > --prefix=$tools \
++ > --with-sysroot=$sysroot \
++ > --disable-libssp --disable-libgomp --disable-libmudflap \
++ > --disable-libffi --disable-libquadmath \
++ > --enable-languages=c
++ $ PATH=$tools/bin:$PATH make
++ $ PATH=$tools/bin:$PATH make install
++
++
++EGLIBC, Complete
++
++With the second compiler built and installed, we're now ready for the
++full EGLIBC build:
++
++ $ mkdir -p $obj/eglibc
++ $ cd $obj/eglibc
++ $ BUILD_CC=gcc \
++ > CC=$tools/bin/$target-gcc \
++ > CXX=$tools/bin/$target-g++ \
++ > AR=$tools/bin/$target-ar \
++ > RANLIB=$tools/bin/$target-ranlib \
++ > $src/eglibc/libc/configure \
++ > --prefix=/usr \
++ > --with-headers=$sysroot/usr/include \
++ > --with-kconfig=$obj/linux/scripts/kconfig \
++ > --build=$build \
++ > --host=$target \
++ > --disable-profile --without-gd --without-cvs \
++ > --enable-add-ons=nptl,libidn,../ports
++
++Note the additional '--with-kconfig' option. This tells EGLIBC where to
++find the host config tools used by the kernel 'make config' and 'make
++menuconfig'. These tools can be re-used by EGLIBC for its own 'make
++*config' support, which will create 'option-groups.config' for you.
++But first make sure those tools have been built by running some
++dummy 'make *config' calls in the kernel directory:
++
++ $ cd $obj/linux
++ $ PATH=$tools/bin:$PATH make config \
++ > ARCH=$linux_arch CROSS_COMPILE=$target- \
++ $ PATH=$tools/bin:$PATH make menuconfig \
++ > ARCH=$linux_arch CROSS_COMPILE=$target- \
++
++Now we can configure and build the full EGLIBC:
++
++ $ cd $obj/eglibc
++ $ PATH=$tools/bin:$PATH make defconfig
++ $ PATH=$tools/bin:$PATH make menuconfig
++ $ PATH=$tools/bin:$PATH make
++ $ PATH=$tools/bin:$PATH make install install_root=$sysroot
++
++At this point, we have a complete EGLIBC installation in '$sysroot',
++with header files, library files, and most of the C runtime startup
++files in place.
++
++
++The Third GCC
++
++Finally, we recompile GCC against this full installation, enabling
++whatever languages and libraries we would like to use:
++
++ $ mkdir -p $obj/gcc3
++ $ cd $obj/gcc3
++ $ $src/$gccv/configure \
++ > --target=$target \
++ > --prefix=$tools \
++ > --with-sysroot=$sysroot \
++ > --enable-__cxa_atexit \
++ > --disable-libssp --disable-libgomp --disable-libmudflap \
++ > --enable-languages=c,c++
++ $ PATH=$tools/bin:$PATH make
++ $ PATH=$tools/bin:$PATH make install
++
++The '--enable-__cxa_atexit' option tells GCC what sort of C++
++destructor support to expect from the C library; it's required with
++EGLIBC.
++
++And since GCC's installation process isn't designed to help construct
++sysroot trees, we must manually copy certain libraries into place in
++the sysroot.
++
++ $ cp -d $tools/$target/lib/libgcc_s.so* $sysroot/lib
++ $ cp -d $tools/$target/lib/libstdc++.so* $sysroot/usr/lib
++
++
++Trying Things Out
++
++At this point, '$tools' contains a cross toolchain ready to use
++the EGLIBC installation in '$sysroot':
++
++ $ cat > hello.c <<EOF
++ > #include <stdio.h>
++ > int
++ > main (int argc, char **argv)
++ > {
++ > puts ("Hello, world!");
++ > return 0;
++ > }
++ > EOF
++ $ $tools/bin/$target-gcc -Wall hello.c -o hello
++ $ cat > c++-hello.cc <<EOF
++ > #include <iostream>
++ > int
++ > main (int argc, char **argv)
++ > {
++ > std::cout << "Hello, C++ world!" << std::endl;
++ > return 0;
++ > }
++ > EOF
++ $ $tools/bin/$target-g++ -Wall c++-hello.cc -o c++-hello
++
++
++We can use 'readelf' to verify that these are indeed executables for
++our target, using our dynamic linker:
++
++ $ $tools/bin/$target-readelf -hl hello
++ ELF Header:
++ ...
++ Type: EXEC (Executable file)
++ Machine: ARM
++
++ ...
++ Program Headers:
++ Type Offset VirtAddr PhysAddr FileSiz MemSiz Flg Align
++ PHDR 0x000034 0x10000034 0x10000034 0x00100 0x00100 R E 0x4
++ INTERP 0x000134 0x00008134 0x00008134 0x00013 0x00013 R 0x1
++ [Requesting program interpreter: /lib/ld-linux.so.3]
++ LOAD 0x000000 0x00008000 0x00008000 0x0042c 0x0042c R E 0x8000
++ ...
++
++Looking at the dynamic section of the installed 'libgcc_s.so', we see
++that the 'NEEDED' entry for the C library does include the '.6'
++suffix, indicating that was linked against our fully build EGLIBC, and
++not our dummy 'libc.so':
++
++ $ $tools/bin/$target-readelf -d $sysroot/lib/libgcc_s.so.1
++ Dynamic section at offset 0x1083c contains 24 entries:
++ Tag Type Name/Value
++ 0x00000001 (NEEDED) Shared library: [libc.so.6]
++ 0x0000000e (SONAME) Library soname: [libgcc_s.so.1]
++ ...
++
++
++And on the target machine, we can run our programs:
++
++ $ $sysroot/lib/ld.so.1 --library-path $sysroot/lib:$sysroot/usr/lib \
++ > ./hello
++ Hello, world!
++ $ $sysroot/lib/ld.so.1 --library-path $sysroot/lib:$sysroot/usr/lib \
++ > ./c++-hello
++ Hello, C++ world!
+Index: git/EGLIBC.cross-testing
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/EGLIBC.cross-testing 2014-08-27 07:24:41.532070587 +0000
+@@ -0,0 +1,205 @@
++ -*- mode: text -*-
++
++ Cross-Testing With EGLIBC
++ Jim Blandy <jimb@codesourcery.com>
++
++
++Introduction
++
++Developers writing software for embedded systems often use a desktop
++or other similarly capable computer for development, but need to run
++tests on the embedded system, or perhaps on a simulator. When
++configured for cross-compilation, the stock GNU C library simply
++disables running tests altogether: the command 'make tests' builds
++test programs, but does not run them. EGLIBC, however, provides
++facilities for compiling tests and generating data files on the build
++system, but running the test programs themselves on a remote system or
++simulator.
++
++
++Test environment requirements
++
++The test environment must meet certain conditions for EGLIBC's
++cross-testing facilities to work:
++
++- Shared filesystems. The 'build' system, on which you configure and
++ compile EGLIBC, and the 'host' system, on which you intend to run
++ EGLIBC, must share a filesystem containing the EGLIBC build and
++ source trees. Files must appear at the same paths on both systems.
++
++- Remote-shell like invocation. There must be a way to run a program
++ on the host system from the build system, passing it properly quoted
++ command-line arguments, setting environment variables, and
++ inheriting the caller's standard input and output.
++
++
++Usage
++
++To use EGLIBC's cross-testing support, provide values for the
++following Make variables when you invoke 'make':
++
++- cross-test-wrapper
++
++ This should be the name of the cross-testing wrapper command, along
++ with any arguments.
++
++- cross-localedef
++
++ This should be the name of a cross-capable localedef program, like
++ that included in the EGLIBC 'localedef' module, along with any
++ arguments needed.
++
++These are each explained in detail below.
++
++
++The Cross-Testing Wrapper
++
++To run test programs reliably, the stock GNU C library takes care to
++ensure that test programs use the newly compiled dynamic linker and
++shared libraries, and never the host system's installed libraries. To
++accomplish this, it runs the tests by explicitly invoking the dynamic
++linker from the build tree, passing it a list of build tree
++directories to search for shared libraries, followed by the name of
++the executable to run and its arguments.
++
++For example, where one might normally run a test program like this:
++
++ $ ./tst-foo arg1 arg2
++
++the GNU C library might run that program like this:
++
++ $ $objdir/elf/ld-linux.so.3 --library-path $objdir \
++ ./tst-foo arg1 arg2
++
++(where $objdir is the path to the top of the build tree, and the
++trailing backslash indicates a continuation of the command). In other
++words, each test program invocation is 'wrapped up' inside an explicit
++invocation of the dynamic linker, which must itself execute the test
++program, having loaded shared libraries from the appropriate
++directories.
++
++To support cross-testing, EGLIBC allows the developer to optionally
++set the 'cross-test-wrapper' Make variable to another wrapper command,
++to which it passes the entire dynamic linker invocation shown above as
++arguments. For example, if the developer supplies a wrapper of
++'my-wrapper hostname', then EGLIBC would run the test above as
++follows:
++
++ $ my-wrapper hostname \
++ $objdir/elf/ld-linux.so.3 --library-path $objdir \
++ ./tst-foo arg1 arg2
++
++The 'my-wrapper' command is responsible for executing the command
++given on the host system.
++
++Since tests are run in varying directories, the wrapper should either
++be in your command search path, or 'cross-test-wrapper' should give an
++absolute path for the wrapper.
++
++The wrapper must meet several requirements:
++
++- It must preserve the current directory. As explained above, the
++ build directory tree must be visible on both the build and host
++ systems, at the same path. The test wrapper must ensure that the
++ current directory it inherits is also inherited by the dynamic
++ linker (and thus the test program itself).
++
++- It must preserve environment variables' values. Many EGLIBC tests
++ set environment variables for test runs; in native testing, it
++ invokes programs like this:
++
++ $ GCONV_PATH=$objdir/iconvdata \
++ $objdir/elf/ld-linux.so.3 --library-path $objdir \
++ ./tst-foo arg1 arg2
++
++ With the cross-testing wrapper, that invocation becomes:
++
++ $ GCONV_PATH=$objdir/iconvdata \
++ my-wrapper hostname \
++ $objdir/elf/ld-linux.so.3 --library-path $objdir \
++ ./tst-foo arg1 arg2
++
++ Here, 'my-wrapper' must ensure that the value it sees for
++ 'GCONV_PATH' will be seen by the dynamic linker, and thus 'tst-foo'
++ itself. (The wrapper supplied with GLIBC simply preserves the
++ values of *all* enviroment variables, with a fixed set of
++ exceptions.)
++
++ If your wrapper is a shell script, take care to correctly propagate
++ environment variables whose values contain spaces and shell
++ metacharacters.
++
++- It must pass the command's arguments, unmodified. The arguments
++ seen by the test program should be exactly those seen by the wrapper
++ (after whatever arguments are given to the wrapper itself). The
++ EGLIBC test framework performs all needed shell word splitting and
++ expansion (wildcard expansion, parameter substitution, and so on)
++ before invoking the wrapper; further expansion may break the tests.
++
++
++The 'cross-test-ssh.sh' script
++
++If you want to use 'ssh' (or something sufficiently similar) to run
++test programs on your host system, EGLIBC includes a shell script,
++'scripts/cross-test-ssh.sh', which you can use as your wrapper
++command. This script takes care of setting the test command's current
++directory, propagating environment variable values, and carrying
++command-line arguments, all across an 'ssh' connection. You may even
++supply an alternative to 'ssh' on the command line, if needed.
++
++For more details, pass 'cross-test-ssh.sh' the '--help' option.
++
++
++The Cross-Compiling Locale Definition Command
++
++Some EGLIBC tests rely on locales generated especially for the test
++process. In a native configuration, these tests simply run the
++'localedef' command built by the normal EGLIBC build process,
++'locale/localedef', to process and install their locales. However, in
++a cross-compiling configuration, this 'localedef' is built for the
++host system, not the build system, and since it requires quite a bit
++of memory to run (we have seen it fail on systems with 64MiB of
++memory), it may not be practical to run it on the host system.
++
++If set, EGLIBC uses the 'cross-localedef' Make variable as the command
++to run on the build system to process and install locales. The
++localedef program built from the EGLIBC 'localedef' module is
++suitable.
++
++The value of 'cross-localedef' may also include command-line arguments
++to be passed to the program; if you are using EGLIBC's 'localedef',
++you may include endianness and 'uint32_t' alignment arguments here.
++
++
++Example
++
++In developing EGLIBC's cross-testing facility, we invoked 'make' with
++the following script:
++
++ #!/bin/sh
++
++ srcdir=...
++ test_hostname=...
++ localedefdir=...
++ cross_gxx=...-g++
++
++ wrapper="$srcdir/scripts/cross-test-ssh.sh $test_hostname"
++ localedef="$localedefdir/localedef --little-endian --uint32-align=4"
++
++ make cross-test-wrapper="$wrapper" \
++ cross-localedef="$localedef" \
++ CXX="$cross_gxx" \
++ "$@"
++
++
++Other Cross-Testing Concerns
++
++Here are notes on some other issues which you may encounter in running
++the EGLIBC tests in a cross-compiling environment:
++
++- Some tests require a C++ cross-compiler; you should set the 'CXX'
++ Make variable to the name of an appropriate cross-compiler.
++
++- Some tests require access to libstdc++.so.6 and libgcc_s.so.1; we
++ simply place copies of these libraries in the top EGLIBC build
++ directory.
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/meta/recipes-core/glibc/glibc/etc/ld.so.conf
index e69de29bb2..e69de29bb2 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ b/meta/recipes-core/glibc/glibc/etc/ld.so.conf
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/fix-tibetian-locales.patch b/meta/recipes-core/glibc/glibc/fix-tibetian-locales.patch
index 25c43a9fe6..9ab9fdcf12 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/fix-tibetian-locales.patch
+++ b/meta/recipes-core/glibc/glibc/fix-tibetian-locales.patch
@@ -10,11 +10,11 @@ Upstream-Status: Pending
Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Index: libc/localedata/locales/bo_CN
+Index: git/localedata/locales/bo_CN
===================================================================
---- libc.orig/localedata/locales/bo_CN 2012-11-17 09:50:14.000000000 -0800
-+++ libc/localedata/locales/bo_CN 2013-01-04 08:55:15.593612288 -0800
-@@ -145,7 +145,7 @@
+--- git.orig/localedata/locales/bo_CN 2014-08-29 10:35:22.464070587 -0700
++++ git/localedata/locales/bo_CN 2014-08-29 10:35:22.456070587 -0700
+@@ -146,7 +146,7 @@
LC_NAME
% FIXME
@@ -23,11 +23,11 @@ Index: libc/localedata/locales/bo_CN
% name_gen "FIXME"
% name_miss "FIXME"
% name_mr "FIXME"
-Index: libc/localedata/locales/bo_IN
+Index: git/localedata/locales/bo_IN
===================================================================
---- libc.orig/localedata/locales/bo_IN 2012-11-17 09:50:14.000000000 -0800
-+++ libc/localedata/locales/bo_IN 2013-01-04 08:54:12.345609028 -0800
-@@ -70,7 +70,7 @@
+--- git.orig/localedata/locales/bo_IN 2014-08-29 10:35:22.464070587 -0700
++++ git/localedata/locales/bo_IN 2014-08-29 10:35:22.456070587 -0700
+@@ -71,7 +71,7 @@
LC_NAME
% FIXME
diff --git a/meta/recipes-core/glibc/glibc/fix_am_rootsbindir.patch b/meta/recipes-core/glibc/glibc/fix_am_rootsbindir.patch
new file mode 100644
index 0000000000..668e8bf678
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/fix_am_rootsbindir.patch
@@ -0,0 +1,29 @@
+sysdeps/gnu/configure.ac: handle correctly $libc_cv_rootsbindir
+
+Upstream-Status:Pending
+Signed-off-by: Matthieu Crapet <Matthieu.Crapet@ingenico.com>
+
+Index: git/sysdeps/gnu/configure
+===================================================================
+--- git.orig/sysdeps/gnu/configure 2014-08-27 07:24:38.572070587 +0000
++++ git/sysdeps/gnu/configure 2014-08-27 07:24:41.308070587 +0000
+@@ -32,6 +32,6 @@
+ else
+ libc_cv_localstatedir=$localstatedir
+ fi
+- libc_cv_rootsbindir=/sbin
++ test -n "$libc_cv_rootsbindir" || libc_cv_rootsbindir=/sbin
+ ;;
+ esac
+Index: git/sysdeps/gnu/configure.ac
+===================================================================
+--- git.orig/sysdeps/gnu/configure.ac 2014-08-27 07:24:38.572070587 +0000
++++ git/sysdeps/gnu/configure.ac 2014-08-27 07:24:41.308070587 +0000
+@@ -21,6 +21,6 @@
+ else
+ libc_cv_localstatedir=$localstatedir
+ fi
+- libc_cv_rootsbindir=/sbin
++ test -n "$libc_cv_rootsbindir" || libc_cv_rootsbindir=/sbin
+ ;;
+ esac
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/fsl-ppc-no-fsqrt.patch b/meta/recipes-core/glibc/glibc/fsl-ppc-no-fsqrt.patch
index 511ee9d19e..f88eaf444e 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/fsl-ppc-no-fsqrt.patch
+++ b/meta/recipes-core/glibc/glibc/fsl-ppc-no-fsqrt.patch
@@ -7,12 +7,12 @@ emit fsqrt intructions
Upstream-Status: Pending
-Index: libc/sysdeps/powerpc/fpu/math_private.h
+Index: git/sysdeps/powerpc/fpu/math_private.h
===================================================================
---- libc.orig/sysdeps/powerpc/fpu/math_private.h
-+++ libc/sysdeps/powerpc/fpu/math_private.h
+--- git.orig/sysdeps/powerpc/fpu/math_private.h 2014-08-29 10:31:30.224070587 -0700
++++ git/sysdeps/powerpc/fpu/math_private.h 2014-08-29 10:31:30.212070587 -0700
@@ -25,10 +25,12 @@
- #include <dl-procinfo.h>
+ #include <fenv_private.h>
#include_next <math_private.h>
-# if __WORDSIZE == 64 || defined _ARCH_PWR4
@@ -28,10 +28,10 @@ Index: libc/sysdeps/powerpc/fpu/math_private.h
# endif
extern double __slow_ieee754_sqrt (double);
-Index: libc/ports/sysdeps/powerpc/powerpc64/e5500/fpu/math_private.h
+Index: git/sysdeps/powerpc/powerpc64/e5500/fpu/math_private.h
===================================================================
---- /dev/null
-+++ libc/ports/sysdeps/powerpc/powerpc64/e5500/fpu/math_private.h
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e5500/fpu/math_private.h 2014-08-29 10:31:30.212070587 -0700
@@ -0,0 +1,9 @@
+#ifndef _E5500_MATH_PRIVATE_H_
+#define _E5500_MATH_PRIVATE_H_ 1
@@ -42,10 +42,10 @@ Index: libc/ports/sysdeps/powerpc/powerpc64/e5500/fpu/math_private.h
+#include_next <math_private.h>
+
+#endif /* _E5500_MATH_PRIVATE_H_ */
-Index: libc/ports/sysdeps/powerpc/powerpc64/e6500/fpu/math_private.h
+Index: git/sysdeps/powerpc/powerpc64/e6500/fpu/math_private.h
===================================================================
---- /dev/null
-+++ libc/ports/sysdeps/powerpc/powerpc64/e6500/fpu/math_private.h
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e6500/fpu/math_private.h 2014-08-29 10:31:30.212070587 -0700
@@ -0,0 +1,9 @@
+#ifndef _E6500_MATH_PRIVATE_H_
+#define _E6500_MATH_PRIVATE_H_ 1
@@ -56,10 +56,10 @@ Index: libc/ports/sysdeps/powerpc/powerpc64/e6500/fpu/math_private.h
+#include_next <math_private.h>
+
+#endif /* _E6500_MATH_PRIVATE_H_ */
-Index: libc/ports/sysdeps/powerpc/powerpc32/e500mc/fpu/math_private.h
+Index: git/sysdeps/powerpc/powerpc32/e500mc/fpu/math_private.h
===================================================================
---- /dev/null
-+++ libc/ports/sysdeps/powerpc/powerpc32/e500mc/fpu/math_private.h
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e500mc/fpu/math_private.h 2014-08-29 10:31:30.212070587 -0700
@@ -0,0 +1,9 @@
+#ifndef _E500MC_MATH_PRIVATE_H_
+#define _E500MC_MATH_PRIVATE_H_ 1
@@ -70,10 +70,10 @@ Index: libc/ports/sysdeps/powerpc/powerpc32/e500mc/fpu/math_private.h
+#include_next <math_private.h>
+
+#endif /* _E500MC_MATH_PRIVATE_H_ */
-Index: libc/ports/sysdeps/powerpc/powerpc32/e5500/fpu/math_private.h
+Index: git/sysdeps/powerpc/powerpc32/e5500/fpu/math_private.h
===================================================================
---- /dev/null
-+++ libc/ports/sysdeps/powerpc/powerpc32/e5500/fpu/math_private.h
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e5500/fpu/math_private.h 2014-08-29 10:31:30.216070587 -0700
@@ -0,0 +1,9 @@
+#ifndef _E5500_MATH_PRIVATE_H_
+#define _E5500_MATH_PRIVATE_H_ 1
@@ -84,10 +84,10 @@ Index: libc/ports/sysdeps/powerpc/powerpc32/e5500/fpu/math_private.h
+#include_next <math_private.h>
+
+#endif /* _E5500_MATH_PRIVATE_H_ */
-Index: libc/ports/sysdeps/powerpc/powerpc32/e6500/fpu/math_private.h
+Index: git/sysdeps/powerpc/powerpc32/e6500/fpu/math_private.h
===================================================================
---- /dev/null
-+++ libc/ports/sysdeps/powerpc/powerpc32/e6500/fpu/math_private.h
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e6500/fpu/math_private.h 2014-08-29 10:31:30.216070587 -0700
@@ -0,0 +1,9 @@
+#ifndef _E6500_MATH_PRIVATE_H_
+#define _E6500_MATH_PRIVATE_H_ 1
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/generate-supported.mk b/meta/recipes-core/glibc/glibc/generate-supported.mk
index d2a28c2dc6..d2a28c2dc6 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/generate-supported.mk
+++ b/meta/recipes-core/glibc/glibc/generate-supported.mk
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/glibc.fix_sqrt2.patch b/meta/recipes-core/glibc/glibc/glibc.fix_sqrt2.patch
index 689b79c61c..f5ed1bfeef 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/glibc.fix_sqrt2.patch
+++ b/meta/recipes-core/glibc/glibc/glibc.fix_sqrt2.patch
@@ -1,10 +1,10 @@
Signed-of-by: Edmar Wienskoski <edmar@freescale.com>
Upstream-Status: Pending
-Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c 2014-08-29 10:34:07.768070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -140,10 +140,10 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c 2014-08-29 10:34:07.768070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -246,10 +246,10 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c 2014-08-29 10:34:07.768070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -385,10 +385,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -491,10 +491,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -630,10 +630,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -736,10 +736,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -875,10 +875,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -981,10 +981,10 @@ Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -1120,10 +1120,10 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -1226,10 +1226,10 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c 2014-08-29 10:34:07.772070587 -0700
@@ -0,0 +1,134 @@
+/* Double-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -1365,10 +1365,10 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
+ }
+ return f_wash (b);
+}
-Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
===================================================================
---- /dev/null
-+++ libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1,101 @@
+/* Single-precision floating point square root.
+ Copyright (C) 2010 Free Software Foundation, Inc.
@@ -1471,46 +1471,46 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
+ }
+ return f_washf (b);
+}
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/603e/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc32/603e/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/603e/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc32/603e/fpu/Implies 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc32/603e/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e300c3/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e300c3/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e300c3/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e300c3/fpu/Implies 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1,2 @@
+# e300c3 is a variant of 603e so use the same optimizations for sqrt
+powerpc/powerpc32/603e/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e500mc/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e500mc/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e500mc/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e500mc/fpu/Implies 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc32/e500mc/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e5500/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e5500/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e5500/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e5500/fpu/Implies 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc32/e5500/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e6500/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e6500/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc32/e6500/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc32/e6500/fpu/Implies 2014-08-29 10:34:07.776070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc32/e6500/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc64/e5500/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc64/e5500/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc64/e5500/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc64/e5500/fpu/Implies 2014-08-29 10:34:07.780070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc64/e5500/fpu
-Index: libc/sysdeps/unix/sysv/linux/powerpc/powerpc64/e6500/fpu/Implies
+Index: git/sysdeps/unix/sysv/linux/powerpc/powerpc64/e6500/fpu/Implies
===================================================================
---- /dev/null
-+++ libc/sysdeps/unix/sysv/linux/powerpc/powerpc64/e6500/fpu/Implies
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/sysdeps/unix/sysv/linux/powerpc/powerpc64/e6500/fpu/Implies 2014-08-29 10:34:07.780070587 -0700
@@ -0,0 +1 @@
+powerpc/powerpc64/e6500/fpu
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/grok_gold.patch b/meta/recipes-core/glibc/glibc/grok_gold.patch
index d46737af98..26875c79d4 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/grok_gold.patch
+++ b/meta/recipes-core/glibc/glibc/grok_gold.patch
@@ -6,11 +6,11 @@ Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Backport
-Index: libc/configure
+Index: git/configure
===================================================================
---- libc.orig/configure
-+++ libc/configure
-@@ -4654,7 +4654,7 @@ else
+--- git.orig/configure 2014-08-29 10:32:34.464070587 -0700
++++ git/configure 2014-08-29 10:32:34.456070587 -0700
+@@ -4592,7 +4592,7 @@
# Found it, now check the version.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking version of $LD" >&5
$as_echo_n "checking version of $LD... " >&6; }
@@ -19,11 +19,11 @@ Index: libc/configure
case $ac_prog_version in
'') ac_prog_version="v. ?.??, bad"; ac_verc_fail=yes;;
2.1[0-9][0-9]*|2.[2-9][0-9]*|[3-9].*|[1-9][0-9]*)
-Index: libc/configure.ac
+Index: git/configure.ac
===================================================================
---- libc.orig/configure.ac
-+++ libc/configure.ac
-@@ -990,7 +990,7 @@ AC_CHECK_PROG_VER(AS, $AS, --version,
+--- git.orig/configure.ac 2014-08-29 10:32:34.464070587 -0700
++++ git/configure.ac 2014-08-29 10:32:34.460070587 -0700
+@@ -930,7 +930,7 @@
[GNU assembler.* \([0-9]*\.[0-9.]*\)],
[2.1[0-9][0-9]*|2.[2-9][0-9]*|[3-9].*|[1-9][0-9]*], AS=: critic_missing="$critic_missing as")
AC_CHECK_PROG_VER(LD, $LD, --version,
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/initgroups_keys.patch b/meta/recipes-core/glibc/glibc/initgroups_keys.patch
index be29856b03..32aa15a533 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/initgroups_keys.patch
+++ b/meta/recipes-core/glibc/glibc/initgroups_keys.patch
@@ -5,16 +5,16 @@ as undefined symbol
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
-Index: libc/nss/getent.c
+Index: git/nss/getent.c
===================================================================
---- libc.orig/nss/getent.c 2012-03-09 09:41:57.099581559 -0800
-+++ libc/nss/getent.c 2012-03-09 09:42:13.095582334 -0800
-@@ -898,7 +898,7 @@
+--- git.orig/nss/getent.c 2014-08-27 05:15:25.996070587 +0000
++++ git/nss/getent.c 2014-08-27 05:16:00.048070587 +0000
+@@ -879,7 +879,7 @@
D(group)
D(gshadow)
- DN(hosts)
+ D(hosts)
-D(initgroups)
+DN(initgroups)
- DN(netgroup)
- DN(networks)
+ D(netgroup)
+ D(networks)
D(passwd)
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/ld-search-order.patch b/meta/recipes-core/glibc/glibc/ld-search-order.patch
index e83a0ad6d2..f518bc7642 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/ld-search-order.patch
+++ b/meta/recipes-core/glibc/glibc/ld-search-order.patch
@@ -19,12 +19,12 @@ means we need to switch the order of 3 and 4 above to make this work effectively
RP 14/10/2010
-Index: libc/elf/dl-load.c
+Index: git/elf/dl-load.c
===================================================================
---- libc.orig/elf/dl-load.c 2012-12-02 13:11:45.000000000 -0800
-+++ libc/elf/dl-load.c 2013-01-09 07:00:59.135223084 -0800
-@@ -2215,7 +2215,14 @@
- fd = open_path (name, namelen, mode & __RTLD_SECURE,
+--- git.orig/elf/dl-load.c 2014-08-28 17:32:46.292070587 -0700
++++ git/elf/dl-load.c 2014-08-28 17:33:56.048070587 -0700
+@@ -2050,7 +2050,14 @@
+ fd = open_path (name, namelen, mode,
&loader->l_runpath_dirs, &realname, &fb, loader,
LA_SER_RUNPATH, &found_other_class);
-
@@ -38,19 +38,19 @@ Index: libc/elf/dl-load.c
+ /* Finally try ld.so.cache */
#ifdef USE_LDCONFIG
if (fd == -1
- && (__builtin_expect (! (mode & __RTLD_SECURE), 1)
-@@ -2283,14 +2290,6 @@
+ && (__glibc_likely ((mode & __RTLD_SECURE) == 0)
+@@ -2113,14 +2120,6 @@
}
#endif
- /* Finally, try the default path. */
- if (fd == -1
- && ((l = loader ?: GL(dl_ns)[nsid]._ns_loaded) == NULL
-- || __builtin_expect (!(l->l_flags_1 & DF_1_NODEFLIB), 1))
+- || __glibc_likely (!(l->l_flags_1 & DF_1_NODEFLIB)))
- && rtld_search_dirs.dirs != (void *) -1)
-- fd = open_path (name, namelen, mode & __RTLD_SECURE, &rtld_search_dirs,
+- fd = open_path (name, namelen, mode, &rtld_search_dirs,
- &realname, &fb, l, LA_SER_DEFAULT, &found_other_class);
-
/* Add another newline when we are tracing the library loading. */
- if (__builtin_expect (GLRO_dl_debug_mask & DL_DEBUG_LIBS, 0))
+ if (__glibc_unlikely (GLRO_dl_debug_mask & DL_DEBUG_LIBS))
_dl_debug_printf ("\n");
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/mips-rld-map-check.patch b/meta/recipes-core/glibc/glibc/mips-rld-map-check.patch
index 9b646fea95..9f593d6359 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/mips-rld-map-check.patch
+++ b/meta/recipes-core/glibc/glibc/mips-rld-map-check.patch
@@ -11,9 +11,10 @@ Upstream-Status: Pending
---
-diff -ru glibc-2.10.1.orig/ports/sysdeps/mips/dl-machine.h glibc-2.10.1/ports/sysdeps/mips/dl-machine.h
---- glibc-2.10.1.orig/ports/sysdeps/mips/dl-machine.h 2009-05-16 16:36:20.000000000 +0800
-+++ glibc-2.10.1/ports/sysdeps/mips/dl-machine.h 2010-09-19 09:11:53.000000000 +0800
+Index: git/sysdeps/mips/dl-machine.h
+===================================================================
+--- git.orig/sysdeps/mips/dl-machine.h 2014-08-27 04:58:11.840070587 +0000
++++ git/sysdeps/mips/dl-machine.h 2014-08-27 04:58:11.832070587 +0000
@@ -70,7 +70,8 @@
/* If there is a DT_MIPS_RLD_MAP entry in the dynamic section, fill it in
with the run-time address of the r_debug structure */
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/multilib_readlib.patch b/meta/recipes-core/glibc/glibc/multilib_readlib.patch
index 1542b1b519..13ffc46f91 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/multilib_readlib.patch
+++ b/meta/recipes-core/glibc/glibc/multilib_readlib.patch
@@ -5,9 +5,11 @@ variable EGLIBC_KNOWN_INTERPRETER_NAMES.
Lianhao Lu, 08/01/2011
---- libc/elf/readlib.c.orig 2011-08-12 17:05:51.864470837 +0800
-+++ libc/elf/readlib.c 2011-08-12 17:06:39.346942074 +0800
-@@ -52,6 +52,7 @@
+Index: git/elf/readlib.c
+===================================================================
+--- git.orig/elf/readlib.c 2014-08-29 10:34:16.824070587 -0700
++++ git/elf/readlib.c 2014-08-29 10:34:16.816070587 -0700
+@@ -51,6 +51,7 @@
#ifdef SYSDEP_KNOWN_INTERPRETER_NAMES
SYSDEP_KNOWN_INTERPRETER_NAMES
#endif
diff --git a/meta/recipes-core/glibc/glibc/option-groups.patch b/meta/recipes-core/glibc/glibc/option-groups.patch
new file mode 100644
index 0000000000..198be73524
--- /dev/null
+++ b/meta/recipes-core/glibc/glibc/option-groups.patch
@@ -0,0 +1,1397 @@
+Eglibc option group infrastructure
+
+Upstream-Status: Pending
+
+Index: git/option-groups.def
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/option-groups.def 2014-08-27 07:26:51.652070587 +0000
+@@ -0,0 +1,868 @@
++# This file documents the option groups EGLIBC currently supports, in
++# a format akin to the Linux Kconfig system's. The syntax may change
++# over time.
++#
++# An entry of the form:
++#
++# config GROUP_NAME
++# bool "one-line explanation of what this option group controls"
++# help
++# Multi-line help explaining the option group's meaning in
++# some detail, terminated by indentation level.
++#
++# defines an option group whose variable is GROUP_NAME, with
++# meaningful values 'y' (enabled) and 'n' (disabled). The
++# documentation is formatted to be consumed by some sort of
++# interactive configuration interface, but EGLIBC doesn't have such an
++# interface yet.
++#
++# An option may have a 'depends on' line, indicating which other options
++# must also be enabled if this option is. At present, EGLIBC doesn't
++# check that these dependencies are satisfied.
++#
++# Option group variables get their default values from the file
++# 'option-groups.defaults', in the top directory of the EGLIBC source
++# tree. By default, all EGLIBC option groups are enabled --- their
++# variables are set to 'y'.
++#
++# After including 'option-groups.defaults', the EGLIBC make machinery
++# includes the file 'option-groups.config' from the top of the build
++# tree, if it is present. Developers can place assignments to option
++# group variables in that file to override the defaults. For example,
++# to disable an option group, place a line of the form:
++#
++# OPTION_GROUP_NAME = n
++#
++# in 'option-groups.config' at the top of your build tree. To
++# explicitly enable an option group, you may also write:
++#
++# OPTION_GROUP_NAME = y
++#
++# although this simply reestablishes the value already set by
++# 'option-groups.defaults'.
++
++config EGLIBC_ADVANCED_INET6
++ bool "IPv6 Advanced Sockets API support (RFC3542)"
++ depends on EGLIBC_INET
++ help
++ This option group includes the functions specified by RFC 3542,
++ "Advanced Sockets Application Program Interface (API) for
++ IPv6".
++
++ This option group includes the following functions:
++
++ inet6_opt_append
++ inet6_opt_find
++ inet6_opt_finish
++ inet6_opt_get_val
++ inet6_opt_init
++ inet6_option_alloc
++ inet6_option_append
++ inet6_option_find
++ inet6_option_init
++ inet6_option_next
++ inet6_option_space
++ inet6_opt_next
++ inet6_opt_set_val
++ inet6_rth_add
++ inet6_rth_getaddr
++ inet6_rth_init
++ inet6_rth_reverse
++ inet6_rth_segments
++ inet6_rth_space
++
++config EGLIBC_BACKTRACE
++ bool "Functions for producing backtraces"
++ help
++ This option group includes functions for producing a list of
++ the function calls that are currently active in a thread, from
++ within the thread itself. These functions are often used
++ within signal handlers, to produce diagnostic output.
++
++ This option group includes the following functions:
++
++ backtrace
++ backtrace_symbols
++ backtrace_symbols_fd
++
++config EGLIBC_BIG_MACROS
++ bool "Use extensive inline code"
++ help
++ This option group specifies whether certain pieces of code
++ should be inlined to achieve maximum speed. If this option
++ group is not selected, function calls will be used instead,
++ hence reducing the library footprint.
++
++config EGLIBC_BSD
++ bool "BSD-specific functions, and their compatibility stubs"
++ help
++ This option group includes functions specific to BSD kernels.
++ A number of these functions have stub versions that are also
++ included in libraries built for non-BSD systems for
++ compatibility.
++
++ This option group includes the following functions:
++
++ chflags
++ fchflags
++ lchmod
++ revoke
++ setlogin
++
++config EGLIBC_CXX_TESTS
++ bool "Tests that link against the standard C++ library."
++ depends on POSIX_WIDE_CHAR_DEVICE_IO && EGLIBC_LIBM
++ help
++ This option group does not include any C library functions;
++ instead, it controls which EGLIBC tests an ordinary 'make
++ tests' runs. With this group disabled, tests that would
++ normally link against the standard C++ library are not
++ run.
++
++ The standard C++ library depends on the math library 'libm' and
++ the wide character I/O functions included in EGLIBC. So those
++ option groups must be enabled if this test is enabled.
++
++config EGLIBC_CATGETS
++ bool "Functions for accessing message catalogs"
++ depends on EGLIBC_LOCALE_CODE
++ help
++ This option group includes functions for accessing message
++ catalogs: catopen, catclose, and catgets.
++
++ This option group depends on the EGLIBC_LOCALE_CODE
++ option group.
++
++config EGLIBC_CHARSETS
++ bool "iconv/gconv character set conversion libraries"
++ help
++ This option group includes support for character sets other
++ than ASCII (ANSI_X3.4-1968) and Unicode and ISO-10646 in their
++ various encodings. This affects both the character sets
++ supported by the wide and multibyte character functions, and
++ those supported by the 'iconv' functions.
++
++ With this option group disabled, EGLIBC supports only the
++ following character sets:
++
++ ANSI_X3.4 - ASCII
++ ANSI_X3.4-1968
++ ANSI_X3.4-1986
++ ASCII
++ CP367
++ CSASCII
++ IBM367
++ ISO-IR-6
++ ISO646-US
++ ISO_646.IRV:1991
++ OSF00010020
++ US
++ US-ASCII
++
++ 10646-1:1993 - ISO 10646, in big-endian UCS4 form
++ 10646-1:1993/UCS4
++ CSUCS4
++ ISO-10646
++ ISO-10646/UCS4
++ OSF00010104
++ OSF00010105
++ OSF00010106
++ UCS-4
++ UCS-4BE
++ UCS4
++
++ UCS-4LE - ISO 10646, in little-endian UCS4 form
++
++ ISO-10646/UTF-8 - ISO 10646, in UTF-8 form
++ ISO-10646/UTF8
++ ISO-IR-193
++ OSF05010001
++ UTF-8
++ UTF8
++
++ ISO-10646/UCS2 - ISO 10646, in target-endian UCS2 form
++ OSF00010100
++ OSF00010101
++ OSF00010102
++ UCS-2
++ UCS2
++
++ UCS-2BE - ISO 10646, in big-endian UCS2 form
++ UNICODEBIG
++
++ UCS-2LE - ISO 10646, in little-endian UCS2 form
++ UNICODELITTLE
++
++ WCHAR_T - EGLIBC's internal form (target-endian,
++ 32-bit ISO 10646)
++
++config EGLIBC_CRYPT
++ bool "Encryption library"
++ help
++ This option group includes the `libcrypt' library which
++ provides functions for one-way encryption. Supported
++ encryption algorithms include MD5, SHA-256, SHA-512 and DES.
++
++config EGLIBC_CRYPT_UFC
++ bool "Ultra fast `crypt' implementation"
++ depends on EGLIBC_CRYPT
++ help
++ This option group provides ultra fast DES-based implementation of
++ the `crypt' function. When this option group is disabled,
++ (a) the library will not provide the setkey[_r] and encrypt[_r]
++ functions and (b) the crypt[_r] function will return NULL and set the
++ errno to ENOSYS if /salt/ passed does not correspond to either MD5,
++ SHA-256 or SHA-512 algorithm.
++
++config EGLIBC_DB_ALIASES
++ bool "Functions for accessing the mail aliases database"
++ help
++ This option group includues functions for looking up mail
++ aliases in '/etc/aliases' or using nsswitch. It includes the
++ following functions:
++
++ endaliasent
++ getaliasbyname
++ getaliasbyname_r
++ getaliasent
++ getaliasent_r
++ setaliasent
++
++ When this option group is disabled, the NSS service libraries
++ also lack support for querying their mail alias tables.
++
++config EGLIBC_ENVZ
++ bool "Functions for handling envz-style environment vectors."
++ help
++ This option group contains functions for creating and operating
++ on envz vectors. An "envz vector" is a vector of strings in a
++ contiguous block of memory, where each element is a name-value
++ pair, and elements are separated from their neighbors by null
++ characters.
++
++ This option group includes the following functions:
++
++ envz_add envz_merge
++ envz_entry envz_remove
++ envz_get envz_strip
++
++config EGLIBC_FCVT
++ bool "Functions for converting floating-point numbers to strings"
++ help
++ This option group includes functions for converting
++ floating-point numbers to strings.
++
++ This option group includes the following functions:
++
++ ecvt qecvt
++ ecvt_r qecvt_r
++ fcvt qfcvt
++ fcvt_r qfcvt_r
++ gcvt qgcvt
++
++config EGLIBC_FMTMSG
++ bool "Functions for formatting messages"
++ help
++ This option group includes the following functions:
++
++ addseverity fmtmsg
++
++config EGLIBC_FSTAB
++ bool "Access functions for 'fstab'"
++ help
++ This option group includes functions for reading the mount
++ point specification table, '/etc/fstab'. These functions are
++ not included in the POSIX standard, which provides the
++ 'getmntent' family of functions instead.
++
++ This option group includes the following functions:
++
++ endfsent getfsspec
++ getfsent setfsent
++ getfsfile
++
++config EGLIBC_FTRAVERSE
++ bool "Functions for traversing file hierarchies"
++ help
++ This option group includes functions for traversing file
++ UNIX file hierachies.
++
++ This option group includes the following functions:
++
++ fts_open ftw
++ fts_read nftw
++ fts_children ftw64
++ fts_set nftw64
++ fts_close
++
++config EGLIBC_GETLOGIN
++ bool "The getlogin function"
++ depends on EGLIBC_UTMP
++ help
++ This function group includes the 'getlogin' and 'getlogin_r'
++ functions, which return the user name associated by the login
++ activity with the current process's controlling terminal.
++
++ With this option group disabled, the 'glob' function will not
++ fall back on 'getlogin' to find the user's login name for tilde
++ expansion when the 'HOME' environment variable is not set.
++
++config EGLIBC_IDN
++ bool "International domain names support"
++ help
++ This option group includes the `libcidn' library which
++ provides support for international domain names.
++
++config EGLIBC_INET
++ bool "Networking support"
++ help
++ This option group includes networking-specific functions and
++ data. With EGLIBC_INET disabled, the EGLIBC
++ installation and API changes as follows:
++
++ - The following libraries are not installed:
++
++ libnsl
++ libnss_compat
++ libnss_dns
++ libnss_hesiod
++ libnss_nis
++ libnss_nisplus
++ libresolv
++
++ - The following functions and variables are omitted from libc:
++
++ authdes_create hstrerror svc_fdset
++ authdes_getucred htonl svc_getreq
++ authdes_pk_create htons svc_getreq_common
++ authnone_create if_freenameindex svc_getreq_poll
++ authunix_create if_indextoname svc_getreqset
++ authunix_create_default if_nameindex svc_max_pollfd
++ bindresvport if_nametoindex svc_pollfd
++ callrpc in6addr_any svcraw_create
++ cbc_crypt in6addr_loopback svc_register
++ clnt_broadcast inet6_opt_append svc_run
++ clnt_create inet6_opt_find svc_sendreply
++ clnt_pcreateerror inet6_opt_finish svctcp_create
++ clnt_perrno inet6_opt_get_val svcudp_bufcreate
++ clnt_perror inet6_opt_init svcudp_create
++ clntraw_create inet6_option_alloc svcudp_enablecache
++ clnt_spcreateerror inet6_option_append svcunix_create
++ clnt_sperrno inet6_option_find svcunixfd_create
++ clnt_sperror inet6_option_init svc_unregister
++ clnttcp_create inet6_option_next user2netname
++ clntudp_bufcreate inet6_option_space xdecrypt
++ clntudp_create inet6_opt_next xdr_accepted_reply
++ clntunix_create inet6_opt_set_val xdr_array
++ des_setparity inet6_rth_add xdr_authdes_cred
++ ecb_crypt inet6_rth_getaddr xdr_authdes_verf
++ endaliasent inet6_rth_init xdr_authunix_parms
++ endhostent inet6_rth_reverse xdr_bool
++ endnetent inet6_rth_segments xdr_bytes
++ endnetgrent inet6_rth_space xdr_callhdr
++ endprotoent inet_addr xdr_callmsg
++ endrpcent inet_aton xdr_char
++ endservent inet_lnaof xdr_cryptkeyarg
++ ether_aton inet_makeaddr xdr_cryptkeyarg2
++ ether_aton_r inet_netof xdr_cryptkeyres
++ ether_hostton inet_network xdr_des_block
++ ether_line inet_nsap_addr xdr_double
++ ether_ntoa inet_nsap_ntoa xdr_enum
++ ether_ntoa_r inet_ntoa xdr_float
++ ether_ntohost inet_ntop xdr_free
++ freeaddrinfo inet_pton xdr_getcredres
++ freeifaddrs innetgr xdr_hyper
++ gai_strerror iruserok xdr_int
++ getaddrinfo iruserok_af xdr_int16_t
++ getaliasbyname key_decryptsession xdr_int32_t
++ getaliasbyname_r key_decryptsession_pk xdr_int64_t
++ getaliasent key_encryptsession xdr_int8_t
++ getaliasent_r key_encryptsession_pk xdr_keybuf
++ gethostbyaddr key_gendes xdr_key_netstarg
++ gethostbyaddr_r key_get_conv xdr_key_netstres
++ gethostbyname key_secretkey_is_set xdr_keystatus
++ gethostbyname2 key_setnet xdr_long
++ gethostbyname2_r key_setsecret xdr_longlong_t
++ gethostbyname_r netname2host xdrmem_create
++ gethostent netname2user xdr_netnamestr
++ gethostent_r ntohl xdr_netobj
++ getifaddrs ntohs xdr_opaque
++ getipv4sourcefilter passwd2des xdr_opaque_auth
++ get_myaddress pmap_getmaps xdr_pmap
++ getnameinfo pmap_getport xdr_pmaplist
++ getnetbyaddr pmap_rmtcall xdr_pointer
++ getnetbyaddr_r pmap_set xdr_quad_t
++ getnetbyname pmap_unset xdrrec_create
++ getnetbyname_r rcmd xdrrec_endofrecord
++ getnetent rcmd_af xdrrec_eof
++ getnetent_r registerrpc xdrrec_skiprecord
++ getnetgrent res_init xdr_reference
++ getnetgrent_r rexec xdr_rejected_reply
++ getnetname rexec_af xdr_replymsg
++ getprotobyname rexecoptions xdr_rmtcall_args
++ getprotobyname_r rpc_createerr xdr_rmtcallres
++ getprotobynumber rresvport xdr_short
++ getprotobynumber_r rresvport_af xdr_sizeof
++ getprotoent rtime xdrstdio_create
++ getprotoent_r ruserok xdr_string
++ getpublickey ruserok_af xdr_u_char
++ getrpcbyname ruserpass xdr_u_hyper
++ getrpcbyname_r setaliasent xdr_u_int
++ getrpcbynumber sethostent xdr_uint16_t
++ getrpcbynumber_r setipv4sourcefilter xdr_uint32_t
++ getrpcent setnetent xdr_uint64_t
++ getrpcent_r setnetgrent xdr_uint8_t
++ getrpcport setprotoent xdr_u_long
++ getsecretkey setrpcent xdr_u_longlong_t
++ getservbyname setservent xdr_union
++ getservbyname_r setsourcefilter xdr_unixcred
++ getservbyport svcauthdes_stats xdr_u_quad_t
++ getservbyport_r svcerr_auth xdr_u_short
++ getservent svcerr_decode xdr_vector
++ getservent_r svcerr_noproc xdr_void
++ getsourcefilter svcerr_noprog xdr_wrapstring
++ h_errlist svcerr_progvers xencrypt
++ h_errno svcerr_systemerr xprt_register
++ herror svcerr_weakauth xprt_unregister
++ h_nerr svc_exit
++ host2netname svcfd_create
++
++ - The rpcgen, nscd, and rpcinfo commands are not installed.
++
++ - The 'rpc' file (a text file listing RPC services) is not installed.
++
++ Socket-related system calls do not fall in this option group,
++ because many are also used for other inter-process
++ communication mechanisms. For example, the 'syslog' routines
++ use Unix-domain sockets to communicate with the syslog daemon;
++ syslog is valuable in non-networked contexts.
++
++config EGLIBC_INET_ANL
++ bool "Asynchronous name lookup"
++ depends on EGLIBC_INET
++ help
++ This option group includes the `libanl' library which
++ provides support for asynchronous name lookup.
++
++config EGLIBC_LIBM
++ bool "libm (math library)"
++ help
++ This option group includes the 'libm' library, containing
++ mathematical functions. If this option group is omitted, then
++ an EGLIBC installation does not include shared or unshared versions
++ of the math library.
++
++ Note that this does not remove all floating-point related
++ functionality from EGLIBC; for example, 'printf' and 'scanf'
++ can still print and read floating-point values with this option
++ group disabled.
++
++ Note that the ISO Standard C++ library 'libstdc++' depends on
++ EGLIBC's math library 'libm'. If you disable this option
++ group, you will not be able to build 'libstdc++' against the
++ resulting EGLIBC installation.
++
++config EGLIBC_LOCALES
++ bool "Locale definitions"
++ help
++ This option group includes all locale definitions other than
++ that for the "C" locale. If this option group is omitted, then
++ only the "C" locale is supported.
++
++
++config EGLIBC_LOCALE_CODE
++ bool "Locale functions"
++ depends on POSIX_C_LANG_WIDE_CHAR
++ help
++ This option group includes locale support functions, programs,
++ and libraries. With EGLIBC_LOCALE_CODE disabled,
++ EGLIBC supports only the 'C' locale (also known as 'POSIX'),
++ and ignores the settings of the 'LANG' and 'LC_*' environment
++ variables.
++
++ With EGLIBC_LOCALE_CODE disabled, the following
++ functions are omitted from libc:
++
++ duplocale localeconv nl_langinfo rpmatch strfmon_l
++ freelocale newlocale nl_langinfo_l strfmon uselocale
++
++ Furthermore, only the LC_CTYPE and LC_TIME categories of the
++ standard "C" locale are available.
++
++ The EGLIBC_CATGETS option group depends on this option group.
++
++
++config EGLIBC_MEMUSAGE
++ bool "Memory profiling library"
++ help
++ This option group includes the `libmemusage' library and
++ the `memusage' and `memusagestat' utilities.
++ These components provide memory profiling functions.
++
++config EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE
++ int "Memory profiling library buffer size"
++ depends on EGLIBC_MEMUSAGE
++ default "32768"
++ help
++ Libmemusage library buffers the profiling data in memory
++ before writing it out to disk. By default, the library
++ allocates 1.5M buffer, which can be substantial for some
++ systems. EGLIBC_MEMUSAGE_DEFAULT_BUFFER_SIZE option
++ allows to change the default buffer size. It specifies
++ the number of entries the buffer should have.
++ On most architectures one buffer entry amounts to 48 bytes,
++ so setting this option to the value of 512 will reduce the size of
++ the memory buffer to 24K.
++
++config EGLIBC_NIS
++ bool "Support for NIS, NIS+, and the special 'compat' services."
++ depends on EGLIBC_INET && EGLIBC_SUNRPC
++ help
++ This option group includes the NIS, NIS+, and 'compat' Name
++ Service Switch service libraries. When it is disabled, those
++ services libraries are not installed; you should remove any
++ references to them from your 'nsswitch.conf' file.
++
++ This option group depends on the EGLIBC_INET option
++ group; you must enable that to enable this option group.
++
++config EGLIBC_NSSWITCH
++ bool "Name service switch (nsswitch) support"
++ help
++ This option group includes support for the 'nsswitch' facility.
++ With this option group enabled, all EGLIBC functions for
++ accessing various system databases (passwords and groups;
++ networking; aliases; public keys; and so on) consult the
++ '/etc/nsswitch.conf' configuration file to decide how to handle
++ queries.
++
++ With this option group disabled, EGLIBC uses a fixed list of
++ services to satisfy queries on each database, as requested by
++ configuration files specified when EGLIBC is built. Your
++ 'option-groups.config' file must set the following two
++ variables:
++
++config EGLIBC_NSSWITCH_FIXED_CONFIG
++ string "Nsswitch fixed config filename"
++ depends on !EGLIBC_NSSWITCH
++ default ""
++ help
++ Set this to the name of a file whose contents observe the
++ same syntax as an ordinary '/etc/nsswitch.conf' file. The
++ EGLIBC build process parses this file just as EGLIBC would
++ at run time if EGLIBC_NSSWITCH were enabled, and
++ produces a C library that uses the nsswitch service
++ libraries to search for database entries as this file
++ specifies, instead of consulting '/etc/nsswitch.conf' at run
++ time.
++
++ This should be an absolute filename. The EGLIBC build
++ process may use it from several different working
++ directories. It may include references to Makefile
++ variables like 'common-objpfx' (the top of the build tree,
++ with a trailing slash), or '..' (the top of the source tree,
++ with a trailing slash).
++
++ The EGLIBC source tree includes a sample configuration file
++ named 'nss/fixed-nsswitch.conf'; for simple configurations,
++ you will probably want to delete references to databases not
++ needed on your system.
++
++config EGLIBC_NSSWITCH_FIXED_FUNCTIONS
++ string "Nsswitch fixed functions filename"
++ depends on !EGLIBC_NSSWITCH
++ default ""
++ help
++ The EGLIBC build process uses this file to decide which
++ functions to make available from which service libraries.
++ The file 'nss/fixed-nsswitch.functions' serves as a sample
++ configuration file for this setting, and explains its syntax
++ and meaning in more detail.
++
++ This should be an absolute file name. The EGLIBC build
++ process may use it from several different working
++ directories. It may include references to Makefile
++ variables like 'common-objpfx' (the top of the build tree,
++ with a trailing slash), or '..' (the top of the source tree,
++ with a trailing slash).
++
++ Be sure to mention each function in each service you wish to
++ use. If you do not mention a service's function here, the
++ EGLIBC database access functions will not find it, even if
++ it is listed in the EGLIBC_NSSWITCH_FIXED_CONFIG
++ file.
++
++ In this arrangement, EGLIBC will not use the 'dlopen' and
++ 'dlsym' functions to find database access functions. Instead,
++ libc hard-codes references to the service libraries' database
++ access functions. You must explicitly link your program
++ against the name service libraries (those whose names start
++ with 'libnss_', in the sysroot's '/lib' directory) whose
++ functions you intend to use. This arrangement helps
++ system-wide static analysis tools decide which functions a
++ system actually uses.
++
++ Note that some nsswitch service libraries require other option
++ groups to be enabled; for example, the EGLIBC_INET
++ option group must be enabled to use the 'libnss_dns.so.2'
++ service library, which uses the Domain Name System network
++ protocol to answer queries.
++
++config EGLIBC_RCMD
++ bool "Support for 'rcmd' and related library functions"
++ depends on EGLIBC_INET
++ help
++ This option group includes functions for running commands on
++ remote machines via the 'rsh' protocol, and doing authentication
++ related to those functions. This also includes functions that
++ use the 'rexec' protocol.
++
++ This option group includes the following functions:
++
++ rcmd ruserok
++ rcmd_af ruserok_af
++ rexec iruserok
++ rexec_af iruserok_af
++ rresvport ruserpass
++ rresvport_af
++
++config EGLIBC_RTLD_DEBUG
++ bool "Runtime linker debug print outs"
++ help
++ This option group enables debug output of the runtime linker
++ which is activated via LD_DEBUG and LD_TRACE_PRELINKING
++ environment variables. Disabling this option group yields
++ a smaller runtime linker binary.
++ BEWARE: Disabling this option group is likely to break
++ the `ldd' utility which may also be used by the prelinker.
++ In particular, the `--unused' ldd option will not work correctly.
++
++config EGLIBC_SPAWN
++ bool "Support for POSIX posix_spawn functions"
++ help
++ This option group includes the POSIX functions for executing
++ programs in child processes without using 'fork' or 'vfork'.
++
++ This option group includes the following functions:
++
++ posix_spawn
++ posix_spawnattr_destroy
++ posix_spawnattr_getflags
++ posix_spawnattr_getpgroup
++ posix_spawnattr_getschedparam
++ posix_spawnattr_getschedpolicy
++ posix_spawnattr_getsigdefault
++ posix_spawnattr_getsigmask
++ posix_spawnattr_init
++ posix_spawnattr_setflags
++ posix_spawnattr_setpgroup
++ posix_spawnattr_setschedparam
++ posix_spawnattr_setschedpolicy
++ posix_spawnattr_setsigdefault
++ posix_spawnattr_setsigmask
++ posix_spawn_file_actions_addclose
++ posix_spawn_file_actions_adddup2
++ posix_spawn_file_actions_addopen
++ posix_spawn_file_actions_destroy
++ posix_spawn_file_actions_init
++ posix_spawnp
++
++ This option group also provides the ability for the iconv,
++ localedef, and locale programs to operate transparently on
++ compressed charset definitions. When this option group is
++ disabled, those programs will only operate on uncompressed
++ charmap files.
++
++config EGLIBC_STREAMS
++ bool "Support for accessing STREAMS."
++ help
++ This option group includes functions for reading and writing
++ messages to and from STREAMS. The STREAMS interface provides a
++ uniform mechanism for implementing networking services and other
++ character-based I/O. (STREAMS are not to be confused with
++ <stdio.h> FILE objects, also called 'streams'.)
++
++ This option group includes the following functions:
++
++ getmsg putpmsg
++ getpmsg fattach
++ isastream fdetach
++ putmsg
++
++config EGLIBC_SUNRPC
++ bool "Support for the Sun 'RPC' protocol."
++ depends on EGLIBC_INET
++ help
++ This option group includes support for the Sun RPC protocols,
++ including the 'rpcgen' and 'rpcinfo' programs.
++
++config EGLIBC_UTMP
++ bool "Older access functions for 'utmp' login records"
++ help
++ This option group includes the older 'utent' family of
++ functions for accessing user login records in the 'utmp' file.
++ POSIX omits these functions in favor of the 'utxent' family,
++ and they are obsolete on systems other than Linux.
++
++ This option group includes the following functions:
++
++ endutent
++ getutent
++ getutent_r
++ getutid
++ getutid_r
++ getutline
++ getutline_r
++ logwtmp
++ pututline
++ setutent
++ updwtmp
++ utmpname
++
++ This option group includes the following libraries:
++
++ libutil.so (and libutil.a)
++
++config EGLIBC_UTMPX
++ bool "POSIX access functions for 'utmp' login records"
++ depends on EGLIBC_UTMP
++ help
++ This option group includes the POSIX functions for reading and
++ writing user login records in the 'utmp' file (usually
++ '/var/run/utmp'). The POSIX functions operate on 'struct
++ utmpx' structures, as opposed to the family of older 'utent'
++ functions, which operate on 'struct utmp' structures.
++
++ This option group includes the following functions:
++
++ endutxent
++ getutmp
++ getutmpx
++ getutxent
++ getutxid
++ getutxline
++ pututxline
++ setutxent
++ updwtmpx
++ utmpxname
++
++config EGLIBC_WORDEXP
++ bool "Shell-style word expansion"
++ help
++ This option group includes the 'wordexp' function for
++ performing word expansion in the manner of the shell, and the
++ accompanying 'wordfree' function.
++
++config POSIX_C_LANG_WIDE_CHAR
++ bool "ISO C library wide character functions, excluding I/O"
++ help
++ This option group includes the functions defined by the ISO C
++ standard for working with wide and multibyte characters in
++ memory. Functions for reading and writing wide and multibyte
++ characters from and to files call in the
++ POSIX_WIDE_CHAR_DEVICE_IO option group.
++
++ This option group includes the following functions:
++
++ btowc mbsinit wcscspn wcstoll
++ iswalnum mbsrtowcs wcsftime wcstombs
++ iswalpha mbstowcs wcslen wcstoul
++ iswblank mbtowc wcsncat wcstoull
++ iswcntrl swprintf wcsncmp wcstoumax
++ iswctype swscanf wcsncpy wcsxfrm
++ iswdigit towctrans wcspbrk wctob
++ iswgraph towlower wcsrchr wctomb
++ iswlower towupper wcsrtombs wctrans
++ iswprint vswprintf wcsspn wctype
++ iswpunct vswscanf wcsstr wmemchr
++ iswspace wcrtomb wcstod wmemcmp
++ iswupper wcscat wcstof wmemcpy
++ iswxdigit wcschr wcstoimax wmemmove
++ mblen wcscmp wcstok wmemset
++ mbrlen wcscoll wcstol
++ mbrtowc wcscpy wcstold
++
++config POSIX_REGEXP
++ bool "Regular expressions"
++ help
++ This option group includes the POSIX regular expression
++ functions, and the associated non-POSIX extensions and
++ compatibility functions.
++
++ With POSIX_REGEXP disabled, the following functions are
++ omitted from libc:
++
++ re_comp re_max_failures regcomp
++ re_compile_fastmap re_search regerror
++ re_compile_pattern re_search_2 regexec
++ re_exec re_set_registers regfree
++ re_match re_set_syntax rpmatch
++ re_match_2 re_syntax_options
++
++ Furthermore, the compatibility regexp interface defined in the
++ <regexp.h> header file, 'compile', 'step', and 'advance', is
++ omitted.
++
++config POSIX_REGEXP_GLIBC
++ bool "Regular expressions from GLIBC"
++ depends on POSIX_REGEXP
++ help
++ This option group specifies which regular expression
++ library to use. The choice is between regex
++ implementation from GLIBC and regex implementation from
++ libiberty. The GLIBC variant is fully POSIX conformant and
++ optimized for speed; regex from libiberty is more than twice
++ as small while still is enough for most practical purposes.
++
++config POSIX_WIDE_CHAR_DEVICE_IO
++ bool "Input and output functions for wide characters"
++ depends on POSIX_C_LANG_WIDE_CHAR
++ help
++ This option group includes functions for reading and writing
++ wide characters to and from <stdio.h> streams.
++
++ This option group includes the following functions:
++
++ fgetwc fwprintf putwchar vwscanf
++ fgetws fwscanf ungetwc wprintf
++ fputwc getwc vfwprintf wscanf
++ fputws getwchar vfwscanf
++ fwide putwc vwprintf
++
++ This option group further includes the following unlocked
++ variants of the above functions:
++
++ fgetwc_unlocked getwc_unlocked
++ fgetws_unlocked getwchar_unlocked
++ fputwc_unlocked putwc_unlocked
++ fputws_unlocked putwchar_unlocked
++
++ Note that the GNU standard C++ library, 'libstdc++.so', uses
++ some of these functions; you will not be able to link or run
++ C++ programs if you disable this option group.
++
++ This option group also affects the behavior of the following
++ functions:
++
++ fdopen
++ fopen
++ fopen64
++ freopen
++ freopen64
++
++ These functions all take an OPENTYPE parameter which may
++ contain a string of the form ",ccs=CHARSET", indicating that
++ the underlying file uses the character set named CHARSET.
++ This produces a wide-oriented stream, which is only useful
++ when the functions included in this option group are present.
++ If the user attempts to open a file specifying a character set
++ in the OPENTYPE parameter, and EGLIBC was built with this
++ option group disabled, the function returns NULL, and sets
++ errno to EINVAL.
++
++
++# This helps Emacs users browse this file using the page motion commands
++# and commands like 'pages-directory'.
++# Local Variables:
++# page-delimiter: "^config\\s-"
++# End:
+Index: git/option-groups.mak
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/option-groups.mak 2014-08-27 07:26:51.652070587 +0000
+@@ -0,0 +1,41 @@
++# Setup file for subdirectory Makefiles that define EGLIBC option groups.
++
++# EGLIBC shouldn't need to override this. However, the
++# cross-build-friendly localedef includes this makefile to get option
++# group variable definitions; it uses a single build tree for all the
++# multilibs, and needs to be able to specify a different option group
++# configuration file for each multilib.
++option_group_config_file ?= $(objdir)/option-groups.config
++
++# Read the default settings for all options.
++# We're included before ../Rules, so we can't assume $(..) is set.
++include $(firstword $(..) ../)option-groups.defaults
++
++# Read the developer's option group selections, overriding the
++# defaults from option-groups.defaults.
++-include $(option_group_config_file)
++
++# $(call option-disabled, VAR) is 'y' if VAR is not 'y', or 'n' otherwise.
++# VAR should be a variable name, not a variable reference; this is
++# less general, but more terse for the intended use.
++# You can use it to add a file to a list if an option group is
++# disabled, like this:
++# routines-$(call option-disabled, OPTION_POSIX_C_LANG_WIDE_CHAR) += ...
++define option-disabled
++$(firstword $(subst y,n,$(filter y,$($(strip $(1))))) y)
++endef
++
++# Establish 'routines-y', etc. as simply-expanded variables.
++aux-y :=
++extra-libs-others-y :=
++extra-libs-y :=
++extra-objs-y :=
++install-bin-y :=
++install-others-y :=
++install-sbin-y :=
++others-y :=
++others-pie-y :=
++routines-y :=
++test-srcs-y :=
++tests-y :=
++xtests-y :=
+Index: git/option-groups.defaults
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/option-groups.defaults 2014-08-27 07:24:41.652070587 +0000
+@@ -0,0 +1,47 @@
++# This file sets default values for all option group variables
++# mentioned in option-groups.def; see that file for a description of
++# each option group.
++#
++# Subdirectory makefiles include this file before including the user's
++# settings from option-groups.config at the top of the build tree;
++# that file need only refer to those options whose default settings
++# are to be changed.
++#
++# By default, all option groups are enabled.
++OPTION_EGLIBC_ADVANCED_INET6 = y
++OPTION_EGLIBC_BACKTRACE = y
++OPTION_EGLIBC_BIG_MACROS = y
++OPTION_EGLIBC_BSD = y
++OPTION_EGLIBC_CXX_TESTS = y
++OPTION_EGLIBC_CATGETS = y
++OPTION_EGLIBC_CHARSETS = y
++OPTION_EGLIBC_CRYPT = y
++OPTION_EGLIBC_CRYPT_UFC = y
++OPTION_EGLIBC_DB_ALIASES = y
++OPTION_EGLIBC_ENVZ = y
++OPTION_EGLIBC_FCVT = y
++OPTION_EGLIBC_FMTMSG = y
++OPTION_EGLIBC_FSTAB = y
++OPTION_EGLIBC_FTRAVERSE = y
++OPTION_EGLIBC_GETLOGIN = y
++OPTION_EGLIBC_IDN = y
++OPTION_EGLIBC_INET = y
++OPTION_EGLIBC_INET_ANL = y
++OPTION_EGLIBC_LIBM = y
++OPTION_EGLIBC_LOCALES = y
++OPTION_EGLIBC_LOCALE_CODE = y
++OPTION_EGLIBC_MEMUSAGE = y
++OPTION_EGLIBC_NIS = y
++OPTION_EGLIBC_NSSWITCH = y
++OPTION_EGLIBC_RCMD = y
++OPTION_EGLIBC_RTLD_DEBUG = y
++OPTION_EGLIBC_SPAWN = y
++OPTION_EGLIBC_STREAMS = y
++OPTION_EGLIBC_SUNRPC = y
++OPTION_EGLIBC_UTMP = y
++OPTION_EGLIBC_UTMPX = y
++OPTION_EGLIBC_WORDEXP = y
++OPTION_POSIX_C_LANG_WIDE_CHAR = y
++OPTION_POSIX_REGEXP = y
++OPTION_POSIX_REGEXP_GLIBC = y
++OPTION_POSIX_WIDE_CHAR_DEVICE_IO = y
+Index: git/Makefile
+===================================================================
+--- git.orig/Makefile 2014-08-27 07:24:37.540070587 +0000
++++ git/Makefile 2014-08-27 07:24:41.656070587 +0000
+@@ -24,6 +24,7 @@
+
+ include Makeconfig
+
++include options-config/Makefile
+
+ # This is the default target; it makes everything except the tests.
+ .PHONY: all
+Index: git/EGLIBC.option-groups
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/EGLIBC.option-groups 2014-08-27 07:24:41.656070587 +0000
+@@ -0,0 +1,122 @@
++ -*- mode: text -*-
++
++ The EGLIBC Component Configuration System
++ Jim Blandy <jimb@codesourcery.com>
++
++Introduction
++
++The GNU C library (GLIBC) provides a broad range of functionality,
++ranging from internationalization support to transcendental
++mathematical functions. Its website boasts that "nearly all known and
++useful functions from any other C library are available." This
++exhaustive approach has been one of GLIBC's strengths on desktop and
++server systems, but it has also given GLIBC a large footprint, both in
++memory and on disk, making it a challenge to use in embedded systems
++with limited resources.
++
++The Embedded GNU C library (EGLIBC) is a variant of the GNU C library
++designed to work well on embedded systems. In particular, EGLIBC's
++component configuration system allows embedded developers to build
++customized versions of the library that include only the features
++their application uses, reducing its space requirements.
++
++EGLIBC's component configuration system categorizes the library's
++functions into "option groups", and allows you to include or exclude
++option groups individually. Some option groups depend on others;
++EGLIBC tracks these relationships, and ensures that the selected
++configuration yields a functioning library.
++
++
++Consistent and Predictable Behavior
++
++A flexible configuration system is a mixed blessing: if the options
++offered are poorly designed, it can be hard to see which choices will
++have the desired effects, and choices with obscure consequences can
++make debugging difficult. EGLIBC's configuration follows some general
++principles to reduce these risks:
++
++- EGLIBC has a single default configuration for each target
++ architecture.
++
++- In the default configuration, all option groups are enabled, and
++ EGLIBC is upwardly API- and ABI-compatible with GLIBC.
++
++- As much as possible, configurations only affect what functions are
++ present, not how they behave. If the system works with an option
++ group disabled, it will still work with it enabled.
++
++- As much as possible, configurations only select option groups ---
++ they do not describe characteristics of the target architecture.
++
++These rules mean that you have a simple debugging strategy available
++if you suspect that your EGLIBC configuration might be the source of a
++problem: fall back to the default configuration, re-test, and then
++disable option groups one by one, until the problem reappears.
++
++
++The Option Groups
++
++To see the current full list of implemented option groups, refer to the
++file 'option-groups.def' at the top of the source tree, or run
++'make menuconfig' from the top-level build directory.
++
++The POSIX.1-2001 specification includes a suggested partition of all
++the functions in the POSIX C API into option groups: math functions
++like 'sin' and 'cos'; networking functions like 'socket' and
++'connect'; and so on. EGLIBC could use this partitioning as the basis
++for future option groups.
++
++
++Implementation
++
++The EGLIBC component configuration system resembles the approach used
++by the Linux kernel to select device drivers, network protocols, and
++other features. A file named 'option-groups.config' in the top-level
++build directory contains assignments to Make variables, each of which
++enables or disables a particular option group. If the variable's
++value is set to 'y', then the option group is enabled; if it set to
++anything else, the option group is omitted. The file
++'option-groups.defaults', at the top of the source tree, establishes
++default values for all variables; all option groups are enabled by
++default.
++
++For example, the following 'option-groups.config' would omit locale
++data, but include mathematical functions, and everything else:
++
++ OPTION_EGLIBC_LOCALES = n
++ OPTION_EGLIBC_LIBM = y
++
++Like the Linux kernel, EGLIBC supports a similar set of '*config' make
++targets to make it easier to create 'option-groups.config', with all
++dependencies between option groups automatically satisfied. Run
++'make help' to see the list of supported make config targets. For
++example, 'make menuconfig' will update the current config utilising a
++menu based program.
++
++The option group names and their type (boolean, int, hex, string), help
++description, and dependencies with other option groups, are described by
++'option-groups.def' at the top of the source tree, analogous to the
++'Kconfig' files in the Linux kernel.
++
++In general, each option group variable controls whether a given set of
++object files in EGLIBC is compiled and included in the final
++libraries, or omitted from the build.
++
++Each subdirectory's Makefile categorizes its routines, libraries, and
++executables by option group. For example, EGLIBC's 'math/Makefile'
++places the 'libm' library in the OPTION_EGLIBC_LIBM group as follows:
++
++ extra-libs-$(OPTION_EGLIBC_LIBM) := libm
++
++Finally, common code in 'Makerules' cites the value of the variable
++'extra-libs-y', selecting only those libraries that belong to enabled
++option groups to be built.
++
++
++Current Status and Future Directions
++
++The EGLIBC component configuration system described here is still
++under development.
++
++We have used the system to subset some portions of EGLIBC's
++Index: libc/configure.ac
+Index: git/configure.ac
+===================================================================
+--- git.orig/configure.ac 2014-08-27 07:24:41.196070587 +0000
++++ git/configure.ac 2014-08-27 07:24:41.656070587 +0000
+@@ -127,6 +127,16 @@
+ [sysheaders=''])
+ AC_SUBST(sysheaders)
+
++AC_ARG_WITH([kconfig],
++ AC_HELP_STRING([--with-kconfig=PATH],
++ [location of kconfig tools to use (from Linux
++ kernel builds) to re-use for configuring EGLIBC
++ option groups]),
++ [KCONFIG_TOOLS=$withval],
++ [KCONFIG_TOOLS=''])
++AC_SUBST(KCONFIG_TOOLS)
++
++
+ AC_SUBST(use_default_link)
+ AC_ARG_WITH([default-link],
+ AC_HELP_STRING([--with-default-link],
+Index: git/config.make.in
+===================================================================
+--- git.orig/config.make.in 2014-08-27 07:24:37.560070587 +0000
++++ git/config.make.in 2014-08-27 07:24:41.656070587 +0000
+@@ -46,6 +46,8 @@
+ c++-sysincludes = @CXX_SYSINCLUDES@
+ all-warnings = @all_warnings@
+
++kconfig_tools = @KCONFIG_TOOLS@
++
+ have-z-combreloc = @libc_cv_z_combreloc@
+ have-z-execstack = @libc_cv_z_execstack@
+ have-Bgroup = @libc_cv_Bgroup@
+Index: git/configure
+===================================================================
+--- git.orig/configure 2014-08-27 07:24:41.192070587 +0000
++++ git/configure 2014-08-27 07:24:41.660070587 +0000
+@@ -619,6 +619,7 @@
+ PERL
+ BASH_SHELL
+ libc_cv_gcc_static_libgcc
++KCONFIG_TOOLS
+ CXX_SYSINCLUDES
+ SYSINCLUDES
+ AUTOCONF
+@@ -733,6 +734,7 @@
+ with_binutils
+ with_selinux
+ with_headers
++with_kconfig
+ with_default_link
+ enable_sanity_checks
+ enable_shared
+@@ -1437,6 +1439,9 @@
+ --with-selinux if building with SELinux support
+ --with-headers=PATH location of system headers to use (for example
+ /usr/src/linux/include) [default=compiler default]
++ --with-kconfig=PATH location of kconfig tools to use (from Linux kernel
++ builds) to re-use for configuring EGLIBC option
++ groups
+ --with-default-link do not use explicit linker scripts
+ --with-cpu=CPU select code for CPU variant
+
+@@ -3400,6 +3405,14 @@
+
+
+
++# Check whether --with-kconfig was given.
++if test "${with_kconfig+set}" = set; then
++ withval=$with_kconfig; KCONFIG_TOOLS=$withval
++else
++ KCONFIG_TOOLS=''
++fi
++
++
+
+ # Check whether --with-default-link was given.
+ if test "${with_default_link+set}" = set; then :
+Index: git/options-config/Makefile
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/options-config/Makefile 2014-08-27 07:24:41.652070587 +0000
+@@ -0,0 +1,55 @@
++# ===========================================================================
++# EGLIBC option-groups configuration targets
++# These targets are included from top-level makefile
++
++ifneq ($(kconfig_tools),)
++ifneq (no,$(PERL))
++
++ocdir := options-config
++
++OconfigDefaults := option-groups.defaults
++OconfigDefaults_tmp := $(common-objpfx).tmp.defconfig
++OconfigDef := option-groups.def
++Oconfig := $(common-objpfx)option-groups.config
++Oconfig_tmp := $(common-objpfx).tmp.config
++
++conf := $(kconfig_tools)/conf
++mconf := $(kconfig_tools)/mconf
++
++preproc := $(PERL) $(ocdir)/config-preproc.pl
++postproc := $(PERL) $(ocdir)/config-postproc.pl
++
++PHONY += defconfig config menuconfig
++
++defconfig: $(conf) $(OconfigDefaults) $(OconfigDef)
++ rm -f $(OconfigDefaults_tmp)
++ rm -f $(Oconfig_tmp)
++ $(preproc) $(OconfigDefaults) > $(OconfigDefaults_tmp)
++ KCONFIG_CONFIG=$(Oconfig_tmp) $< --defconfig=$(OconfigDefaults_tmp) \
++ $(OconfigDef)
++ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
++ rm $(Oconfig_tmp)
++ rm $(OconfigDefaults_tmp)
++
++config: $(conf) $(OconfigDefaults) $(OconfigDef)
++ rm -f $(Oconfig_tmp)
++ $(preproc) $(wildcard $(Oconfig)) > $(Oconfig_tmp)
++ KCONFIG_CONFIG=$(Oconfig_tmp) $< --oldaskconfig $(OconfigDef)
++ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
++ rm $(Oconfig_tmp)
++
++menuconfig: $(mconf) $(OconfigDefaults) $(OconfigDef)
++ rm -f $(Oconfig_tmp)
++ $(preproc) $(wildcard $(Oconfig)) > $(Oconfig_tmp)
++ KCONFIG_CONFIG=$(Oconfig_tmp) $< $(OconfigDef)
++ $(postproc) $(OconfigDefaults) $(Oconfig_tmp) > $(Oconfig)
++ rm $(Oconfig_tmp)
++
++# Help text used by make help
++help:
++ @echo ' defconfig - New config with default from default config'
++ @echo ' config - Update current config utilising a line-oriented program'
++ @echo ' menuconfig - Update current config utilising a menu based program'
++
++endif
++endif
+Index: git/options-config/config-postproc.pl
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/options-config/config-postproc.pl 2014-08-27 07:24:41.652070587 +0000
+@@ -0,0 +1,58 @@
++#!/usr/bin/perl
++
++$usage = "usage: $0 <default config file> <config file>\n";
++
++die "$usage" unless @ARGV;
++$defaults = shift @ARGV;
++die "$usage" unless @ARGV;
++die "Could not open $ARGV[0]" unless -T $ARGV[0];
++
++sub yank {
++ @option = grep(!($_ =~ /$_[0]\s*=/), @option);
++}
++
++open(DEFAULTS, $defaults) || die "Could not open $defaults\n";
++
++# get the full list of available options using the default config file
++$i = 0;
++while (<DEFAULTS>) {
++ if (/^\s*OPTION_(\w+\s*=.*$)/) {
++ $option[$i++] = $1;
++ }
++}
++
++# now go through the config file, making the necessary changes
++while (<>) {
++ if (/Linux Kernel Configuration/) {
++ # change title
++ s/Linux Kernel/Option Groups/;
++ print;
++ } elsif (/^\s*CONFIG_(\w+)\s*=/) {
++ # this is an explicit option set line, change CONFIG_ to OPTION_
++ # before printing and remove this option from option list
++ $opt = $1;
++ yank($opt);
++ s/CONFIG_/OPTION_/g;
++ print;
++ } elsif (/^\s*#\s+CONFIG_(\w+) is not set/) {
++ # this is a comment line for an unset boolean option, change CONFIG_
++ # to OPTION_, remove this option from option list, and convert to
++ # explicit OPTION_FOO=n
++ $opt = $1;
++ yank($opt);
++ s/CONFIG_/OPTION_/g;
++ print "OPTION_$opt=n\n";
++ } else {
++ print;
++ }
++}
++
++# any boolean options left in @options, are options that were not mentioned in
++# the config file, and implicitly that means the option must be set =n,
++# so do that here.
++foreach $opt (@option) {
++ if ($opt =~ /=\s*[yn]/) {
++ $opt =~ s/=\s*[yn]/=n/;
++ print "OPTION_$opt\n";
++ }
++}
+Index: git/options-config/config-preproc.pl
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/options-config/config-preproc.pl 2014-08-27 07:24:41.652070587 +0000
+@@ -0,0 +1,8 @@
++#!/usr/bin/perl
++
++if (@ARGV) {
++ while (<>) {
++ s/OPTION_/CONFIG_/g;
++ print;
++ }
++}
+Index: git/scripts/option-groups.awk
+===================================================================
+--- /dev/null 1970-01-01 00:00:00.000000000 +0000
++++ git/scripts/option-groups.awk 2014-08-27 07:26:51.652070587 +0000
+@@ -0,0 +1,63 @@
++# option-groups.awk --- generate option group header file
++# Given input files containing makefile-style assignments to variables,
++# print out a header file that #defines an appropriate preprocessor
++# symbol for each variable left set to 'y'.
++
++BEGIN { FS="=" }
++
++# Trim spaces.
++{ gsub (/[[:blank:]]/, "") }
++
++# Skip comments.
++/^#/ { next }
++
++# Process assignments.
++NF == 2 {
++ vars[$1] = $2
++}
++
++# Print final values.
++END {
++ print "/* This file is automatically generated by scripts/option-groups.awk"
++ print " in the EGLIBC source tree."
++ print ""
++ print " It defines macros that indicate which EGLIBC option groups were"
++ print " configured in 'option-groups.config' when this C library was"
++ print " built. For each option group named OPTION_foo, it #defines"
++ print " __OPTION_foo to be 1 if the group is enabled, or #defines that"
++ print " symbol to be 0 if the group is disabled. */"
++ print ""
++ print "#ifndef __GNU_OPTION_GROUPS_H"
++ print "#define __GNU_OPTION_GROUPS_H"
++ print ""
++
++ # Produce a sorted list of variable names.
++ i=0
++ for (var in vars)
++ names[i++] = var
++ n = asort (names)
++
++ for (i = 1; i <= n; i++)
++ {
++ var = names[i]
++ if (var ~ /^OPTION_/)
++ {
++ if (vars[var] == "y")
++ print "#define __" var " 1"
++ else if (vars[var] == "n")
++ print "#define __" var " 0"
++ else if (vars[var] ~ /^[0-9]+/ ||
++ vars[var] ~ /^0x[0-9aAbBcCdDeEfF]+/ ||
++ vars[var] ~ /^\"/)
++ print "#define __" var " " vars[var]
++ else
++ print "/* #undef __" var " */"
++ # Ignore variables that don't have boolean, int, hex, or
++ # string values. Ideally, this would be driven by the types
++ # given in option-groups.def.
++ }
++ }
++
++ print ""
++ print "#endif /* __GNU_OPTION_GROUPS_H */"
++}
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/ppc-sqrt_finite.patch b/meta/recipes-core/glibc/glibc/ppc-sqrt_finite.patch
index 6ea666b1d6..6ea666b1d6 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/ppc-sqrt_finite.patch
+++ b/meta/recipes-core/glibc/glibc/ppc-sqrt_finite.patch
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/ppc_slow_ieee754_sqrt.patch b/meta/recipes-core/glibc/glibc/ppc_slow_ieee754_sqrt.patch
index 60532cbd03..5b819bc458 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/ppc_slow_ieee754_sqrt.patch
+++ b/meta/recipes-core/glibc/glibc/ppc_slow_ieee754_sqrt.patch
@@ -3,11 +3,11 @@
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
-Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
-@@ -40,7 +40,7 @@ static const float half = 0.5;
+--- git.orig/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c 2014-08-29 10:35:02.604070587 -0700
+@@ -40,7 +40,7 @@
simultaneously. */
double
@@ -16,8 +16,8 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
{
if (__builtin_expect (b > 0, 1))
{
-@@ -77,7 +77,7 @@ __ieee754_sqrt (double b)
-
+@@ -77,7 +77,7 @@
+
/* Handle small numbers by scaling. */
if (__builtin_expect ((u.parts.msw & 0x7ff00000) <= 0x02000000, 0))
- return __ieee754_sqrt (b * two108) * twom54;
@@ -25,7 +25,7 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
#define FMADD(a_, c_, b_) \
({ double __r; \
-@@ -126,4 +126,12 @@ __ieee754_sqrt (double b)
+@@ -126,4 +126,12 @@
}
return f_wash (b);
}
@@ -38,11 +38,11 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
-@@ -38,7 +38,7 @@ static const float threehalf = 1.5;
+--- git.orig/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c 2014-08-29 10:35:02.604070587 -0700
+@@ -38,7 +38,7 @@
square root. */
float
@@ -51,7 +51,7 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
{
if (__builtin_expect (b > 0, 1))
{
-@@ -93,4 +93,10 @@ __ieee754_sqrtf (float b)
+@@ -93,4 +93,10 @@
}
return f_washf (b);
}
@@ -62,11 +62,11 @@ Index: libc/sysdeps/powerpc/powerpc32/603e/fpu/e_sqrtf.c
+ return __slow_ieee754_sqrtf (x);
+}
strong_alias (__ieee754_sqrtf, __sqrtf_finite)
-Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
-@@ -40,7 +40,7 @@ static const float half = 0.5;
+--- git.orig/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c 2014-08-29 10:35:02.604070587 -0700
+@@ -40,7 +40,7 @@
simultaneously. */
double
@@ -75,8 +75,8 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
{
if (__builtin_expect (b > 0, 1))
{
-@@ -77,7 +77,7 @@ __ieee754_sqrt (double b)
-
+@@ -77,7 +77,7 @@
+
/* Handle small numbers by scaling. */
if (__builtin_expect ((u.parts.msw & 0x7ff00000) <= 0x02000000, 0))
- return __ieee754_sqrt (b * two108) * twom54;
@@ -84,7 +84,7 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
#define FMADD(a_, c_, b_) \
({ double __r; \
-@@ -126,4 +126,12 @@ __ieee754_sqrt (double b)
+@@ -126,4 +126,12 @@
}
return f_wash (b);
}
@@ -97,11 +97,11 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
-@@ -38,7 +38,7 @@ static const float threehalf = 1.5;
+--- git.orig/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c 2014-08-29 10:35:02.604070587 -0700
+@@ -38,7 +38,7 @@
square root. */
float
@@ -110,7 +110,7 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
{
if (__builtin_expect (b > 0, 1))
{
-@@ -93,4 +93,11 @@ __ieee754_sqrtf (float b)
+@@ -93,4 +93,11 @@
}
return f_washf (b);
}
@@ -122,11 +122,11 @@ Index: libc/sysdeps/powerpc/powerpc64/e5500/fpu/e_sqrtf.c
+}
+
strong_alias (__ieee754_sqrtf, __sqrtf_finite)
-Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
-@@ -41,10 +41,10 @@ static const float half = 0.5;
+--- git.orig/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c 2014-08-29 10:35:02.604070587 -0700
+@@ -41,10 +41,10 @@
#ifdef __STDC__
double
@@ -139,8 +139,8 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
double b;
#endif
{
-@@ -83,7 +83,7 @@ __ieee754_sqrt (b)
-
+@@ -83,7 +83,7 @@
+
/* Handle small numbers by scaling. */
if (__builtin_expect ((u.parts.msw & 0x7ff00000) <= 0x02000000, 0))
- return __ieee754_sqrt (b * two108) * twom54;
@@ -148,7 +148,7 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
#define FMADD(a_, c_, b_) \
({ double __r; \
-@@ -132,4 +132,12 @@ __ieee754_sqrt (b)
+@@ -132,4 +132,12 @@
}
return f_wash (b);
}
@@ -161,11 +161,11 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
-@@ -39,10 +39,10 @@ static const float threehalf = 1.5;
+--- git.orig/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c 2014-08-29 10:35:02.604070587 -0700
+@@ -39,10 +39,10 @@
#ifdef __STDC__
float
@@ -178,7 +178,7 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
float b;
#endif
{
-@@ -99,4 +99,12 @@ __ieee754_sqrtf (b)
+@@ -99,4 +99,12 @@
}
return f_washf (b);
}
@@ -191,11 +191,11 @@ Index: libc/sysdeps/powerpc/powerpc64/e6500/fpu/e_sqrtf.c
+}
+
strong_alias (__ieee754_sqrtf, __sqrtf_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
-@@ -41,10 +41,10 @@ static const float half = 0.5;
+--- git.orig/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c 2014-08-29 10:35:02.608070587 -0700
+@@ -41,10 +41,10 @@
#ifdef __STDC__
double
@@ -208,8 +208,8 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
double b;
#endif
{
-@@ -83,7 +83,7 @@ __ieee754_sqrt (b)
-
+@@ -83,7 +83,7 @@
+
/* Handle small numbers by scaling. */
if (__builtin_expect ((u.parts.msw & 0x7ff00000) <= 0x02000000, 0))
- return __ieee754_sqrt (b * two108) * twom54;
@@ -217,7 +217,7 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
#define FMADD(a_, c_, b_) \
({ double __r; \
-@@ -132,4 +132,12 @@ __ieee754_sqrt (b)
+@@ -132,4 +132,12 @@
}
return f_wash (b);
}
@@ -230,11 +230,11 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
-@@ -39,10 +39,10 @@ static const float threehalf = 1.5;
+--- git.orig/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c 2014-08-29 10:35:02.608070587 -0700
+@@ -39,10 +39,10 @@
#ifdef __STDC__
float
@@ -247,7 +247,7 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
float b;
#endif
{
-@@ -99,4 +99,12 @@ __ieee754_sqrtf (b)
+@@ -99,4 +99,12 @@
}
return f_washf (b);
}
@@ -260,11 +260,11 @@ Index: libc/sysdeps/powerpc/powerpc32/e500mc/fpu/e_sqrtf.c
+}
+
strong_alias (__ieee754_sqrtf, __sqrtf_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
-@@ -41,10 +41,10 @@ static const float half = 0.5;
+--- git.orig/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c 2014-08-29 10:35:02.608070587 -0700
+@@ -41,10 +41,10 @@
#ifdef __STDC__
double
@@ -277,8 +277,8 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
double b;
#endif
{
-@@ -83,7 +83,7 @@ __ieee754_sqrt (b)
-
+@@ -83,7 +83,7 @@
+
/* Handle small numbers by scaling. */
if (__builtin_expect ((u.parts.msw & 0x7ff00000) <= 0x02000000, 0))
- return __ieee754_sqrt (b * two108) * twom54;
@@ -286,7 +286,7 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
#define FMADD(a_, c_, b_) \
({ double __r; \
-@@ -132,4 +132,12 @@ __ieee754_sqrt (b)
+@@ -132,4 +132,12 @@
}
return f_wash (b);
}
@@ -299,11 +299,11 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
-@@ -39,10 +39,10 @@ static const float threehalf = 1.5;
+--- git.orig/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c 2014-08-29 10:35:02.608070587 -0700
+@@ -39,10 +39,10 @@
#ifdef __STDC__
float
@@ -316,7 +316,7 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
float b;
#endif
{
-@@ -99,4 +99,12 @@ __ieee754_sqrtf (b)
+@@ -99,4 +99,12 @@
}
return f_washf (b);
}
@@ -329,11 +329,11 @@ Index: libc/sysdeps/powerpc/powerpc32/e5500/fpu/e_sqrtf.c
+}
+
strong_alias (__ieee754_sqrtf, __sqrtf_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
+Index: git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
-+++ libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
-@@ -132,4 +132,12 @@ __ieee754_sqrt (b)
+--- git.orig/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c 2014-08-29 10:35:02.608070587 -0700
+@@ -132,4 +132,12 @@
}
return f_wash (b);
}
@@ -346,11 +346,11 @@ Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrt.c
+}
+
strong_alias (__ieee754_sqrt, __sqrt_finite)
-Index: libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
+Index: git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
===================================================================
---- libc.orig/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
-+++ libc/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c
-@@ -99,4 +99,12 @@ __ieee754_sqrtf (b)
+--- git.orig/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c 2014-08-29 10:35:02.616070587 -0700
++++ git/sysdeps/powerpc/powerpc32/e6500/fpu/e_sqrtf.c 2014-08-29 10:35:02.608070587 -0700
+@@ -99,4 +99,12 @@
}
return f_washf (b);
}
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/ppce6500-32b_slow_ieee754_sqrt.patch b/meta/recipes-core/glibc/glibc/ppce6500-32b_slow_ieee754_sqrt.patch
index 4c6c1070c3..4c6c1070c3 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/ppce6500-32b_slow_ieee754_sqrt.patch
+++ b/meta/recipes-core/glibc/glibc/ppce6500-32b_slow_ieee754_sqrt.patch
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk.patch b/meta/recipes-core/glibc/glibc/relocatable_sdk.patch
index ca5f17ba58..ca5f17ba58 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk.patch
+++ b/meta/recipes-core/glibc/glibc/relocatable_sdk.patch
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk_fix_openpath.patch b/meta/recipes-core/glibc/glibc/relocatable_sdk_fix_openpath.patch
index f164f8f9ae..f164f8f9ae 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/relocatable_sdk_fix_openpath.patch
+++ b/meta/recipes-core/glibc/glibc/relocatable_sdk_fix_openpath.patch
diff --git a/meta/recipes-core/eglibc/eglibc-2.19/timezone-re-written-tzselect-as-posix-sh.patch b/meta/recipes-core/glibc/glibc/timezone-re-written-tzselect-as-posix-sh.patch
index 1477ea2cc5..55547deae7 100644
--- a/meta/recipes-core/eglibc/eglibc-2.19/timezone-re-written-tzselect-as-posix-sh.patch
+++ b/meta/recipes-core/glibc/glibc/timezone-re-written-tzselect-as-posix-sh.patch
@@ -10,23 +10,24 @@ Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
timezone/tzselect.ksh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
-diff --git a/timezone/Makefile b/timezone/Makefile
---- a/timezone/Makefile
-+++ b/timezone/Makefile
-@@ -113,7 +113,7 @@ $(testdata)/Asia/Tokyo: asia $(zic-deps)
+Index: git/timezone/Makefile
+===================================================================
+--- git.orig/timezone/Makefile 2014-08-27 05:35:58.008070587 +0000
++++ git/timezone/Makefile 2014-08-27 05:36:37.908070587 +0000
+@@ -114,7 +114,7 @@
$(objpfx)tzselect: tzselect.ksh $(common-objpfx)config.make
-- sed -e 's|/bin/bash|$(KSH)|g' \
-+ sed -e 's|/bin/bash|/bin/sh|g' \
- -e '/TZDIR=/s|\$$(pwd)|$(zonedir)|' \
+- sed -e 's|/bin/bash|$(BASH)|' \
++ sed -e 's|/bin/bash|/bin/sh|' \
+ -e 's|TZDIR=[^}]*|TZDIR=$(zonedir)|' \
-e '/TZVERSION=/s|see_Makefile|"$(version)"|' \
-e '/PKGVERSION=/s|=.*|="$(PKGVERSION)"|' \
-diff --git a/timezone/tzselect.ksh b/timezone/tzselect.ksh
-index 9d70691..25f45a8 100644
---- a/timezone/tzselect.ksh
-+++ b/timezone/tzselect.ksh
-@@ -35,7 +35,7 @@ REPORT_BUGS_TO=tz@iana.org
+Index: git/timezone/tzselect.ksh
+===================================================================
+--- git.orig/timezone/tzselect.ksh 2014-08-27 05:35:58.008070587 +0000
++++ git/timezone/tzselect.ksh 2014-08-27 05:35:58.000070587 +0000
+@@ -35,7 +35,7 @@
# Specify default values for environment variables if they are unset.
: ${AWK=awk}
@@ -35,6 +36,3 @@ index 9d70691..25f45a8 100644
# Check for awk Posix compliance.
($AWK -v x=y 'BEGIN { exit 123 }') </dev/null >/dev/null 2>&1
---
-1.8.1.2
-
diff --git a/meta/recipes-core/eglibc/eglibc_2.19.bb b/meta/recipes-core/glibc/glibc_2.20.bb
index 19bd52d5d7..9dd5e67ce8 100644
--- a/meta/recipes-core/eglibc/eglibc_2.19.bb
+++ b/meta/recipes-core/glibc/glibc_2.20.bb
@@ -1,9 +1,12 @@
-require eglibc.inc
+require glibc.inc
DEPENDS += "gperf-native kconfig-frontends-native"
-SRC_URI = "http://downloads.yoctoproject.org/releases/eglibc/eglibc-${PV}-svnr25243.tar.bz2 \
- file://eglibc-svn-arm-lowlevellock-include-tls.patch \
+PV = "2.20"
+
+SRCREV = "b8079dd0d360648e4e8de48656c5c38972621072"
+
+SRC_URI = "git://sourceware.org/git/glibc.git;branch=release/${PV}/master \
file://IO-acquire-lock-fix.patch \
file://mips-rld-map-check.patch \
file://etc/ld.so.conf \
@@ -11,27 +14,37 @@ SRC_URI = "http://downloads.yoctoproject.org/releases/eglibc/eglibc-${PV}-svnr25
file://glibc.fix_sqrt2.patch \
file://multilib_readlib.patch \
file://ppc-sqrt_finite.patch \
- file://GLRO_dl_debug_mask.patch \
- file://initgroups_keys.patch \
- file://eglibc_fix_findidx_parameters.patch \
file://ppc_slow_ieee754_sqrt.patch \
- file://fileops-without-wchar-io.patch \
file://add_resource_h_to_wait_h.patch \
- file://0001-eglibc-menuconfig-support.patch \
- file://0002-eglibc-menuconfig-hex-string-options.patch \
- file://0003-eglibc-menuconfig-build-instructions.patch \
file://fsl-ppc-no-fsqrt.patch \
file://0001-R_ARM_TLS_DTPOFF32.patch \
file://0001-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch \
file://fix-tibetian-locales.patch \
file://ppce6500-32b_slow_ieee754_sqrt.patch \
file://grok_gold.patch \
- file://fix_am_rootsbindir.patch;striplevel=2 \
+ file://fix_am_rootsbindir.patch \
+ ${EGLIBCPATCHES} \
+ ${CVEPATCHES} \
+ "
+EGLIBCPATCHES = "\
file://timezone-re-written-tzselect-as-posix-sh.patch \
+ file://eglibc.patch \
+ file://option-groups.patch \
+ file://GLRO_dl_debug_mask.patch \
+ file://eglibc-header-bootstrap.patch \
+ file://eglibc-resolv-dynamic.patch \
+ file://eglibc-ppc8xx-cache-line-workaround.patch \
+ file://eglibc-sh4-fpscr_values.patch \
+ file://eglibc-use-option-groups.patch \
"
-SRC_URI[md5sum] = "197836c2ba42fb146e971222647198dd"
-SRC_URI[sha256sum] = "baaa030531fc308f7820c46acdf8e1b2f8e3c1f40bcd28b6e440d1c95d170d4c"
+# file://eglibc-install-pic-archives.patch \
+# file://initgroups_keys.patch \
+#
+CVEPATCHES = "\
+ file://CVE-2014-7817-wordexp-fails-to-honour-WRDE_NOCMD.patch \
+ file://CVE-2012-3406-Stack-overflow-in-vfprintf-BZ-16617.patch \
+ "
LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \
file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \
@@ -41,7 +54,7 @@ SRC_URI_append_class-nativesdk = " file://ld-search-order.patch \
file://relocatable_sdk.patch \
file://relocatable_sdk_fix_openpath.patch \
"
-S = "${WORKDIR}/eglibc-${PV}/libc"
+S = "${WORKDIR}/git"
B = "${WORKDIR}/build-${TARGET_SYS}"
PACKAGES_DYNAMIC = ""
@@ -85,7 +98,7 @@ do_patch_append() {
bb.build.exec_func('do_fix_readlib_c', d)
}
-# for mips eglibc now builds syscall tables for all abi's
+# for mips glibc now builds syscall tables for all abi's
# so we make sure that we choose right march option which is
# compatible with o32,n32 and n64 abi's
# e.g. -march=mips32 is not compatible with n32 and n64 therefore
@@ -140,6 +153,6 @@ do_compile () {
}
-require eglibc-package.inc
+require glibc-package.inc
BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/32and64bit.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/32and64bit.patch
index cdfeaeadd8..cdfeaeadd8 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/32and64bit.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/32and64bit.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/README b/meta/recipes-core/glibc/ldconfig-native-2.12.1/README
index 43fb983729..43fb983729 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/README
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/README
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/endian-ness_handling.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling.patch
index 7f8e4db78a..7f8e4db78a 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/endian-ness_handling.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling.patch
diff --git a/meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling_fix.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling_fix.patch
new file mode 100644
index 0000000000..6aecfe5268
--- /dev/null
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endian-ness_handling_fix.patch
@@ -0,0 +1,47 @@
+Upstream-Status: Inappropriate [embedded specific]
+
+Fix problem during parsing of ELF headers for 64bit on big-endian.
+Some header fields were read with wrong size.
+
+2014/10/24
+Par Olsson <Par.Olsson@windriver.com>
+Shan Hai <shan.hai@windriver.com>
+
+diff --git a/readelflib.c b/readelflib.c
+index 3f5b25b..0bf0de3 100644
+--- a/readelflib.c
++++ b/readelflib.c
+@@ -261,8 +261,8 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ int i;
+ unsigned int j;
+ Elf64_Addr loadaddr;
+- unsigned int dynamic_addr;
+- size_t dynamic_size;
++ Elf64_Addr dynamic_addr;
++ Elf64_Xword dynamic_size;
+ char *program_interpreter;
+
+ Elf64_Ehdr *elf_header;
+@@ -311,7 +311,7 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ error (0, 0, _("more than one dynamic segment\n"));
+
+ dynamic_addr = read64(segment->p_offset, be);
+- dynamic_size = read32(segment->p_filesz, be);
++ dynamic_size = read64(segment->p_filesz, be);
+ break;
+
+ case PT_INTERP:
+@@ -329,11 +329,11 @@ process_elf_file64 (const char *file_name, const char *lib, int *flag,
+ break;
+
+ case PT_NOTE:
+- if (!*osversion && read32(segment->p_filesz, be) >= 32 && read32(segment->p_align, be) >= 4)
++ if (!*osversion && read64(segment->p_filesz, be) >= 32 && read64(segment->p_align, be) >= 4)
+ {
+ Elf64_Word *abi_note = (Elf64_Word *) (file_contents
+ + read64(segment->p_offset, be));
+- Elf64_Addr size = read32(segment->p_filesz, be);
++ Elf64_Xword size = read64(segment->p_filesz, be);
+
+ while (read32(abi_note [0], be) != 4 || read32(abi_note [1], be) != 16
+ || read32(abi_note [2], be) != 1
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/endianess-header.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endianess-header.patch
index a18b2c20de..a18b2c20de 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/endianess-header.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/endianess-header.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/flag_fix.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/flag_fix.patch
index 4e9aab9416..4e9aab9416 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/flag_fix.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/flag_fix.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch
index 5ed4f6ff69..5ed4f6ff69 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-default-to-all-multilib-dirs.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2 b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2
index dc1e79888e..dc1e79888e 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig-native-2.12.1.tar.bz2
Binary files differ
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig.patch
index 52986e61c7..52986e61c7 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch
index 27bc411078..27bc411078 100644
--- a/meta/recipes-core/eglibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch
+++ b/meta/recipes-core/glibc/ldconfig-native-2.12.1/ldconfig_aux-cache_path_fix.patch
diff --git a/meta/recipes-core/eglibc/ldconfig-native_2.12.1.bb b/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb
index 7c3463589b..1debf8ee2f 100644
--- a/meta/recipes-core/eglibc/ldconfig-native_2.12.1.bb
+++ b/meta/recipes-core/glibc/ldconfig-native_2.12.1.bb
@@ -12,6 +12,7 @@ SRC_URI = "file://ldconfig-native-2.12.1.tar.bz2 \
file://flag_fix.patch \
file://endianess-header.patch \
file://ldconfig-default-to-all-multilib-dirs.patch \
+ file://endian-ness_handling_fix.patch \
"
PR = "r2"
diff --git a/meta/recipes-core/eglibc/site_config/funcs b/meta/recipes-core/glibc/site_config/funcs
index ccc85392d7..ccc85392d7 100644
--- a/meta/recipes-core/eglibc/site_config/funcs
+++ b/meta/recipes-core/glibc/site_config/funcs
diff --git a/meta/recipes-core/eglibc/site_config/headers b/meta/recipes-core/glibc/site_config/headers
index 609ab53797..609ab53797 100644
--- a/meta/recipes-core/eglibc/site_config/headers
+++ b/meta/recipes-core/glibc/site_config/headers
diff --git a/meta/recipes-core/eglibc/site_config/types b/meta/recipes-core/glibc/site_config/types
index 178bd85a00..178bd85a00 100644
--- a/meta/recipes-core/eglibc/site_config/types
+++ b/meta/recipes-core/glibc/site_config/types
diff --git a/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmx b/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmx
index b04a1b6a73..6472e87509 100644
--- a/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmx
+++ b/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmx
@@ -1,17 +1,23 @@
.encoding = "UTF-8"
-displayname = "Yocto Build Appliance"
-guestos = "other"
-tools.syncTime = "FALSE"
-virtualhw.version = "8"
config.version = "8"
+virtualHW.version = "10"
numvcpus = "2"
-cpuid.coresPerSocket = "1"
vcpu.hotadd = "TRUE"
-mem.hotadd = "TRUE"
memsize = "4096"
-svga.autodetect = "TRUE"
+mem.hotadd = "TRUE"
+sata0.present = "TRUE"
+sata0:0.present = "TRUE"
+sata0:0.fileName = "Yocto_Build_Appliance.vmdk"
+ethernet0.present = "TRUE"
+ethernet0.virtualDev = "e1000"
+ethernet0.wakeOnPcktRcv = "FALSE"
+ethernet0.addressType = "generated"
+usb.present = "TRUE"
+ehci.pciSlotNumber = "0"
+sound.present = "TRUE"
+sound.fileName = "-1"
+sound.autodetect = "TRUE"
pciBridge0.present = "TRUE"
-mks.enable3d = "TRUE"
pciBridge4.present = "TRUE"
pciBridge4.virtualDev = "pcieRootPort"
pciBridge4.functions = "8"
@@ -25,28 +31,16 @@ pciBridge7.present = "TRUE"
pciBridge7.virtualDev = "pcieRootPort"
pciBridge7.functions = "8"
vmci0.present = "TRUE"
-floppy0.present = "TRUE"
-floppy0.fileType = "device"
-floppy0.autodetect = "FALSE"
-floppy0.startConnected = "FALSE"
-ide1:0.present = "TRUE"
-ide1:0.deviceType = "atapi-cdrom"
-ide1:0.autodetect = "TRUE"
-ide1:0.startConnected = "FALSE"
-ide0:0.present = "TRUE"
-ide0:0.deviceType = "disk"
-ide0:0.fileName = "Yocto_Build_Appliance.vmdk"
-usb.present = "TRUE"
-scsi0.virtualDev = "lsilogic"
-scsi0.present = "TRUE"
-ethernet0.present = "TRUE"
-ethernet0.virtualDev = "e1000"
-ethernet0.connectionType = "bridged"
-ethernet0.startConnected = "TRUE"
-ethernet0.addressType = "generated"
-sound.present = "TRUE"
-sound.virtualDev = "es1371"
-sound.autodetect = "TRUE"
-extendedConfigFile = "Yocto_Build_Appliance.vmxf"
-sound.fileName = "-1"
+hpet0.present = "TRUE"
+usb.vbluetooth.startConnected = "TRUE"
+displayName = "Yocto Build Appliance"
+guestOS = "other3xlinux-64"
virtualHW.productCompatibility = "hosted"
+gui.exitOnCLIHLT = "FALSE"
+powerType.powerOff = "soft"
+powerType.powerOn = "soft"
+powerType.suspend = "soft"
+powerType.reset = "soft"
+extendedConfigFile = "Yocto_Build_Appliance.vmxf"
+scsi0:0.present = "FALSE"
+floppy0.present = "FALSE"
diff --git a/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmxf b/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmxf
index ca3f0264d6..9e941ff2f3 100644
--- a/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmxf
+++ b/meta/recipes-core/images/build-appliance-image/Yocto_Build_Appliance.vmxf
@@ -1,7 +1,7 @@
<?xml version="1.0"?>
<Foundry>
<VM>
-<VMId type="string">52 e4 0b df 7b 70 21 f8-88 56 a7 26 47 43 95 93</VMId>
+<VMId type="string">52 a5 d8 cb ed 6c 85 48-cd 99 68 af cc 30 a0 98</VMId>
<ClientMetaData>
<clientMetaDataAttributes/>
<HistoryEventList/></ClientMetaData>
diff --git a/meta/recipes-core/images/build-appliance-image_8.0.bb b/meta/recipes-core/images/build-appliance-image_8.0.bb
index 1c07156cdd..7651ff8537 100644
--- a/meta/recipes-core/images/build-appliance-image_8.0.bb
+++ b/meta/recipes-core/images/build-appliance-image_8.0.bb
@@ -21,8 +21,8 @@ IMAGE_FSTYPES = "vmdk"
inherit core-image
-SRCREV ?= "cc7d457392133ad2ecf7335447a4a01a0d8a8e4e"
-SRC_URI = "git://git.yoctoproject.org/poky \
+SRCREV ?= "f0a2a2f44587f02fa7f434e82f91a6de2231f6b6"
+SRC_URI = "git://git.yoctoproject.org/poky;branch=dizzy \
file://Yocto_Build_Appliance.vmx \
file://Yocto_Build_Appliance.vmxf \
"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/{{=machine}}/interfaces b/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/qemuarm64/interfaces
index 16967763e5..16967763e5 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/{{=machine}}/interfaces
+++ b/meta/recipes-core/init-ifupdown/init-ifupdown-1.0/qemuarm64/interfaces
diff --git a/meta/recipes-core/initscripts/initscripts_1.0.bb b/meta/recipes-core/initscripts/initscripts_1.0.bb
index 7273a82367..afdc53830a 100644
--- a/meta/recipes-core/initscripts/initscripts_1.0.bb
+++ b/meta/recipes-core/initscripts/initscripts_1.0.bb
@@ -139,11 +139,15 @@ do_install () {
MASKED_SCRIPTS = " \
banner \
bootmisc \
+ checkfs \
checkroot \
devpts \
+ dmesg \
hostname \
mountall \
mountnfs \
+ populate-volatile \
+ read-only-rootfs-hook \
rmnologin \
sysfs \
urandom"
@@ -158,3 +162,5 @@ pkg_postinst_${PN} () {
done
fi
}
+
+CONFFILES_${PN} += "${sysconfdir}/init.d/checkroot.sh"
diff --git a/meta/recipes-core/libxml/libxml2.inc b/meta/recipes-core/libxml/libxml2.inc
index 2dffc5ac9f..1314bbfb8d 100644
--- a/meta/recipes-core/libxml/libxml2.inc
+++ b/meta/recipes-core/libxml/libxml2.inc
@@ -21,17 +21,20 @@ SRC_URI = "ftp://xmlsoft.org/libxml2/libxml2-${PV}.tar.gz;name=libtar \
file://libxml2-CVE-2014-0191-fix.patch \
file://python-sitepackages-dir.patch \
file://libxml-m4-use-pkgconfig.patch \
+ file://configure.ac-fix-cross-compiling-warning.patch \
"
BINCONFIG = "${bindir}/xml2-config"
inherit autotools pkgconfig binconfig-disabled pythonnative ptest
-RDEPENDS_${PN}-ptest_append_libc-glibc += "eglibc-gconv-ebcdic-us eglibc-gconv-ibm1141"
+RDEPENDS_${PN}-ptest += "python-core"
+
+RDEPENDS_${PN}-ptest_append_libc-glibc += "glibc-gconv-ebcdic-us glibc-gconv-ibm1141"
# We don't DEPEND on binutils for ansidecl.h so ensure we don't use the header
do_configure_prepend () {
- sed -i -e '/.*ansidecl.h.*/d' ${S}/configure.in
+ sed -i -e '/.*ansidecl.h.*/d' ${S}/configure.ac
}
do_configure_prepend_class-nativesdk () {
@@ -39,10 +42,11 @@ do_configure_prepend_class-nativesdk () {
export PYTHON_SITE_PACKAGES="${PYTHON_SITEPACKAGES_DIR}"
}
+# WARNING: zlib is require for RPM use
EXTRA_OECONF = "--without-python --without-debug --without-legacy --with-catalog --without-docbook --with-c14n --without-lzma --with-fexceptions"
-EXTRA_OECONF_class-native = "--with-python=${STAGING_BINDIR}/python --without-legacy --without-docbook --with-c14n --without-lzma"
-EXTRA_OECONF_class-nativesdk = "--with-python=${STAGING_BINDIR}/python --without-legacy --without-docbook --with-c14n --without-lzma"
-EXTRA_OECONF_linuxstdbase = "--without-python --with-debug --with-legacy --with-docbook --with-c14n --without-lzma"
+EXTRA_OECONF_class-native = "--with-python=${STAGING_BINDIR}/python --without-legacy --without-docbook --with-c14n --without-lzma --with-zlib"
+EXTRA_OECONF_class-nativesdk = "--with-python=${STAGING_BINDIR}/python --without-legacy --without-docbook --with-c14n --without-lzma --with-zlib"
+EXTRA_OECONF_linuxstdbase = "--without-python --with-debug --with-legacy --with-docbook --with-c14n --without-lzma --with-zlib"
# required for pythong binding
export HOST_SYS
@@ -62,7 +66,7 @@ PACKAGES += "${PN}-utils ${PN}-python"
FILES_${PN}-dbg += "${PYTHON_SITEPACKAGES_DIR}/.debug"
FILES_${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/*.a"
-FILES_${PN}-dev += "${libdir}/xml2Conf.sh"
+FILES_${PN}-dev += "${libdir}/xml2Conf.sh ${libdir}/cmake/*"
FILES_${PN}-utils += "${bindir}/*"
FILES_${PN}-python += "${PYTHON_SITEPACKAGES_DIR}"
diff --git a/meta/recipes-core/libxml/libxml2/configure.ac-fix-cross-compiling-warning.patch b/meta/recipes-core/libxml/libxml2/configure.ac-fix-cross-compiling-warning.patch
new file mode 100644
index 0000000000..2f8079b052
--- /dev/null
+++ b/meta/recipes-core/libxml/libxml2/configure.ac-fix-cross-compiling-warning.patch
@@ -0,0 +1,45 @@
+configure.ac: fix cross compiling warning
+
+There is a warning while cross compiling which triggered a
+failure by do_qa_configure
+...
+|configure:12652: checking for gzread in -lz
+|configure:12677: mips-poky-linux-gcc -meb -mabi=32 -mhard-float
+ -L/lib conftest.c -lz >&5
+|ld: warning: library search path "/lib" is unsafe for cross-compilation
+...
+
+While do the lib checking, do not add '-L${Z_DIR}/lib' to LDFLAGS could fix it.
+
+Upstream-Status: Inappropriate [oe specific]
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ configure.ac | 5 +----
+ 1 file changed, 1 insertion(+), 4 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 9a90600..0bac8a4 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -391,8 +391,6 @@ if test "$with_zlib" = "no"; then
+ echo "Disabling compression support"
+ else
+ AC_CHECK_HEADERS(zlib.h,
+- [SAVE_LDFLAGS="${LDFLAGS}"
+- LDFLAGS="-L${Z_DIR}/lib"
+ AC_CHECK_LIB(z, gzread,[
+ AC_DEFINE([HAVE_LIBZ], [1], [Have compression library])
+ WITH_ZLIB=1
+@@ -406,8 +404,7 @@ else
+ esac]
+ else
+ Z_LIBS="-lz"
+- fi])
+- LDFLAGS="${SAVE_LDFLAGS}"])
++ fi]))
+ fi
+
+ AC_SUBST(Z_CFLAGS)
+--
+1.9.1
+
diff --git a/meta/recipes-core/libxml/libxml2/libxml2-CVE-2014-3660.patch b/meta/recipes-core/libxml/libxml2/libxml2-CVE-2014-3660.patch
new file mode 100644
index 0000000000..b9621c93eb
--- /dev/null
+++ b/meta/recipes-core/libxml/libxml2/libxml2-CVE-2014-3660.patch
@@ -0,0 +1,147 @@
+From be2a7edaf289c5da74a4f9ed3a0b6c733e775230 Mon Sep 17 00:00:00 2001
+From: Daniel Veillard <veillard@redhat.com>
+Date: Thu, 16 Oct 2014 13:59:47 +0800
+Subject: Fix for CVE-2014-3660
+
+Issues related to the billion laugh entity expansion which happened to
+escape the initial set of fixes
+
+Upstream-status: Backport
+Reference: https://git.gnome.org/browse/libxml2/commit/?id=be2a7edaf289c5da74a4f9ed3a0b6c733e775230&context=3&ignorews=0&ss=0
+
+Signed-off-by: Joe MacDonald <joe_macdonald@mentor.com>
+
+diff --git a/parser.c b/parser.c
+index f51e8d2..1d93967 100644
+--- a/parser.c
++++ b/parser.c
+@@ -130,6 +130,29 @@ xmlParserEntityCheck(xmlParserCtxtPtr ctxt, size_t size,
+ return (0);
+ if (ctxt->lastError.code == XML_ERR_ENTITY_LOOP)
+ return (1);
++
++ /*
++ * This may look absurd but is needed to detect
++ * entities problems
++ */
++ if ((ent != NULL) && (ent->etype != XML_INTERNAL_PREDEFINED_ENTITY) &&
++ (ent->content != NULL) && (ent->checked == 0)) {
++ unsigned long oldnbent = ctxt->nbentities;
++ xmlChar *rep;
++
++ ent->checked = 1;
++
++ rep = xmlStringDecodeEntities(ctxt, ent->content,
++ XML_SUBSTITUTE_REF, 0, 0, 0);
++
++ ent->checked = (ctxt->nbentities - oldnbent + 1) * 2;
++ if (rep != NULL) {
++ if (xmlStrchr(rep, '<'))
++ ent->checked |= 1;
++ xmlFree(rep);
++ rep = NULL;
++ }
++ }
+ if (replacement != 0) {
+ if (replacement < XML_MAX_TEXT_LENGTH)
+ return(0);
+@@ -189,9 +212,12 @@ xmlParserEntityCheck(xmlParserCtxtPtr ctxt, size_t size,
+ return (0);
+ } else {
+ /*
+- * strange we got no data for checking just return
++ * strange we got no data for checking
+ */
+- return (0);
++ if (((ctxt->lastError.code != XML_ERR_UNDECLARED_ENTITY) &&
++ (ctxt->lastError.code != XML_WAR_UNDECLARED_ENTITY)) ||
++ (ctxt->nbentities <= 10000))
++ return (0);
+ }
+ xmlFatalErr(ctxt, XML_ERR_ENTITY_LOOP, NULL);
+ return (1);
+@@ -2589,6 +2615,7 @@ xmlParserHandlePEReference(xmlParserCtxtPtr ctxt) {
+ name, NULL);
+ ctxt->valid = 0;
+ }
++ xmlParserEntityCheck(ctxt, 0, NULL, 0);
+ } else if (ctxt->input->free != deallocblankswrapper) {
+ input = xmlNewBlanksWrapperInputStream(ctxt, entity);
+ if (xmlPushInput(ctxt, input) < 0)
+@@ -2759,6 +2786,7 @@ xmlStringLenDecodeEntities(xmlParserCtxtPtr ctxt, const xmlChar *str, int len,
+ if ((ctxt->lastError.code == XML_ERR_ENTITY_LOOP) ||
+ (ctxt->lastError.code == XML_ERR_INTERNAL_ERROR))
+ goto int_error;
++ xmlParserEntityCheck(ctxt, 0, ent, 0);
+ if (ent != NULL)
+ ctxt->nbentities += ent->checked / 2;
+ if ((ent != NULL) &&
+@@ -2810,6 +2838,7 @@ xmlStringLenDecodeEntities(xmlParserCtxtPtr ctxt, const xmlChar *str, int len,
+ ent = xmlParseStringPEReference(ctxt, &str);
+ if (ctxt->lastError.code == XML_ERR_ENTITY_LOOP)
+ goto int_error;
++ xmlParserEntityCheck(ctxt, 0, ent, 0);
+ if (ent != NULL)
+ ctxt->nbentities += ent->checked / 2;
+ if (ent != NULL) {
+@@ -7312,6 +7341,7 @@ xmlParseReference(xmlParserCtxtPtr ctxt) {
+ (ret != XML_WAR_UNDECLARED_ENTITY)) {
+ xmlFatalErrMsgStr(ctxt, XML_ERR_UNDECLARED_ENTITY,
+ "Entity '%s' failed to parse\n", ent->name);
++ xmlParserEntityCheck(ctxt, 0, ent, 0);
+ } else if (list != NULL) {
+ xmlFreeNodeList(list);
+ list = NULL;
+@@ -7418,7 +7448,7 @@ xmlParseReference(xmlParserCtxtPtr ctxt) {
+ /*
+ * We are copying here, make sure there is no abuse
+ */
+- ctxt->sizeentcopy += ent->length;
++ ctxt->sizeentcopy += ent->length + 5;
+ if (xmlParserEntityCheck(ctxt, 0, ent, ctxt->sizeentcopy))
+ return;
+
+@@ -7466,7 +7496,7 @@ xmlParseReference(xmlParserCtxtPtr ctxt) {
+ /*
+ * We are copying here, make sure there is no abuse
+ */
+- ctxt->sizeentcopy += ent->length;
++ ctxt->sizeentcopy += ent->length + 5;
+ if (xmlParserEntityCheck(ctxt, 0, ent, ctxt->sizeentcopy))
+ return;
+
+@@ -7652,6 +7682,7 @@ xmlParseEntityRef(xmlParserCtxtPtr ctxt) {
+ ctxt->sax->reference(ctxt->userData, name);
+ }
+ }
++ xmlParserEntityCheck(ctxt, 0, ent, 0);
+ ctxt->valid = 0;
+ }
+
+@@ -7845,6 +7876,7 @@ xmlParseStringEntityRef(xmlParserCtxtPtr ctxt, const xmlChar ** str) {
+ "Entity '%s' not defined\n",
+ name);
+ }
++ xmlParserEntityCheck(ctxt, 0, ent, 0);
+ /* TODO ? check regressions ctxt->valid = 0; */
+ }
+
+@@ -8004,6 +8036,7 @@ xmlParsePEReference(xmlParserCtxtPtr ctxt)
+ name, NULL);
+ ctxt->valid = 0;
+ }
++ xmlParserEntityCheck(ctxt, 0, NULL, 0);
+ } else {
+ /*
+ * Internal checking in case the entity quest barfed
+@@ -8243,6 +8276,7 @@ xmlParseStringPEReference(xmlParserCtxtPtr ctxt, const xmlChar **str) {
+ name, NULL);
+ ctxt->valid = 0;
+ }
++ xmlParserEntityCheck(ctxt, 0, NULL, 0);
+ } else {
+ /*
+ * Internal checking in case the entity quest barfed
+--
+cgit v0.10.1
+
diff --git a/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch b/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch
index a697ddf873..e83c8325e5 100644
--- a/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch
+++ b/meta/recipes-core/libxml/libxml2/python-sitepackages-dir.patch
@@ -4,12 +4,18 @@ The python binary used when building for nativesdk doesn't give us the
correct path here so we need to be able to specify it ourselves.
Upstream-Status: Inappropriate [config]
-
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
---- a/configure.in
-+++ b/configure.in
-@@ -743,7 +743,8 @@ dnl
+Rebase to 2.9.2
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ configure.ac | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+--- a/configure.ac
++++ b/configure.ac
+@@ -813,7 +813,8 @@ dnl
PYTHON_VERSION=
PYTHON_INCLUDES=
@@ -19,3 +25,6 @@ Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
PYTHON_TESTS=
pythondir=
if test "$with_python" != "no" ; then
+--
+1.9.1
+
diff --git a/meta/recipes-core/libxml/libxml2_2.9.1.bb b/meta/recipes-core/libxml/libxml2_2.9.2.bb
index 0b6ac5d5c6..f0cfa59309 100644
--- a/meta/recipes-core/libxml/libxml2_2.9.1.bb
+++ b/meta/recipes-core/libxml/libxml2_2.9.2.bb
@@ -2,7 +2,7 @@ require libxml2.inc
SRC_URI += "http://www.w3.org/XML/Test/xmlts20080827.tar.gz;name=testtar"
-SRC_URI[libtar.md5sum] = "9c0cfef285d5c4a5c80d00904ddab380"
-SRC_URI[libtar.sha256sum] = "fd3c64cb66f2c4ea27e934d275904d92cec494a8e8405613780cbc8a71680fdb"
+SRC_URI[libtar.md5sum] = "9e6a9aca9d155737868b3dc5fd82f788"
+SRC_URI[libtar.sha256sum] = "5178c30b151d044aefb1b08bf54c3003a0ac55c59c866763997529d60770d5bc"
SRC_URI[testtar.md5sum] = "ae3d1ebe000a3972afa104ca7f0e1b4a"
SRC_URI[testtar.sha256sum] = "96151685cec997e1f9f3387e3626d61e6284d4d6e66e0e440c209286c03e9cc7"
diff --git a/meta/recipes-core/meta/buildtools-tarball.bb b/meta/recipes-core/meta/buildtools-tarball.bb
index c54d9e8699..302510c798 100644
--- a/meta/recipes-core/meta/buildtools-tarball.bb
+++ b/meta/recipes-core/meta/buildtools-tarball.bb
@@ -9,41 +9,23 @@ TOOLCHAIN_TARGET_TASK ?= ""
TOOLCHAIN_HOST_TASK ?= "\
nativesdk-python-core \
- nativesdk-python-textutils \
- nativesdk-python-sqlite3 \
- nativesdk-python-pickle \
- nativesdk-python-logging \
- nativesdk-python-elementtree \
- nativesdk-python-curses \
- nativesdk-python-compile \
- nativesdk-python-compiler \
- nativesdk-python-fcntl \
- nativesdk-python-shell \
+ nativesdk-python-modules \
nativesdk-python-misc \
- nativesdk-python-multiprocessing \
- nativesdk-python-subprocess \
- nativesdk-python-xmlrpc \
- nativesdk-python-netclient \
- nativesdk-python-netserver \
- nativesdk-python-distutils \
- nativesdk-python-unixadmin \
- nativesdk-python-compression \
- nativesdk-python-json \
- nativesdk-python-unittest \
- nativesdk-python-mmap \
- nativesdk-python-difflib \
- nativesdk-python-pprint \
nativesdk-python-git \
- nativesdk-python-pkgutil \
nativesdk-ncurses-terminfo-base \
nativesdk-chrpath \
nativesdk-tar \
+ nativesdk-buildtools-perl-dummy \
nativesdk-git \
+ nativesdk-git-perltools \
nativesdk-pigz \
nativesdk-make \
nativesdk-wget \
+ nativesdk-ca-certificates \
"
+SDK_PACKAGE_ARCHS =+ "buildtools-dummy-${SDKPKGSUFFIX}"
+
TOOLCHAIN_OUTPUTNAME ?= "${SDK_NAME}-buildtools-nativesdk-standalone-${DISTRO_VERSION}"
RDEPENDS = "${TOOLCHAIN_HOST_TASK}"
@@ -68,4 +50,6 @@ create_sdk_files_append () {
# so instead of exporting the variable, we use a comment here.
echo '#OECORE_NATIVE_SYSROOT="${SDKPATHNATIVE}"' >> $script
toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${SDK_SYS}
+
+ echo 'export GIT_SSL_CAINFO="${SDKPATHNATIVE}${sysconfdir}/ssl/certs/ca-certificates.crt"' >>$script
}
diff --git a/meta/recipes-core/meta/meta-environment.bb b/meta/recipes-core/meta/meta-environment.bb
index cf142ef5ca..b3737bb5cf 100644
--- a/meta/recipes-core/meta/meta-environment.bb
+++ b/meta/recipes-core/meta/meta-environment.bb
@@ -6,32 +6,57 @@ PR = "r8"
EXCLUDE_FROM_WORLD = "1"
+ORIGOS := "${TARGET_OS}"
+
+REAL_MULTIMACH_TARGET_SYS = "${TUNE_PKGARCH}${TARGET_VENDOR}-${ORIGOS}"
+
inherit toolchain-scripts
TOOLCHAIN_NEED_CONFIGSITE_CACHE += "zlib"
-REAL_MULTIMACH_TARGET_SYS = "${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS}"
SDK_DIR = "${WORKDIR}/sdk"
SDK_OUTPUT = "${SDK_DIR}/image"
-SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${TARGET_SYS}"
+SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${REAL_MULTIMACH_TARGET_SYS}"
inherit cross-canadian
-do_generate_content[nostamp] = "1"
-do_generate_content() {
+do_generate_content[cleandirs] = "${SDK_OUTPUT}"
+do_generate_content[dirs] = "${SDK_OUTPUT}/${SDKPATH}"
+python do_generate_content() {
+ # Handle multilibs in the SDK environment, siteconfig, etc files...
+ localdata = bb.data.createCopy(d)
- rm -rf ${SDK_OUTPUT}
- mkdir -p ${SDK_OUTPUT}/${SDKPATH}
+ # make sure we only use the WORKDIR value from 'd', or it can change
+ localdata.setVar('WORKDIR', d.getVar('WORKDIR', True))
- toolchain_create_sdk_siteconfig ${SDK_OUTPUT}/${SDKPATH}/site-config-${REAL_MULTIMACH_TARGET_SYS}
+ # make sure we only use the SDKTARGETSYSROOT value from 'd'
+ localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True))
+ localdata.setVar('TARGET_OS', d.getVar('ORIGOS', True))
+ localdata.setVar('libdir', d.getVar('target_libdir', False))
- toolchain_create_sdk_env_script ${SDK_OUTPUT}/${SDKPATH}/environment-setup-${REAL_MULTIMACH_TARGET_SYS} ${REAL_MULTIMACH_TARGET_SYS} '##SDKTARGETSYSROOT##' ${target_libdir}
+ # Process DEFAULTTUNE
+ bb.build.exec_func("create_sdk_files", localdata)
- # Add version information
- toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${REAL_MULTIMACH_TARGET_SYS}
+ variants = d.getVar("MULTILIB_VARIANTS", True) or ""
+ for item in variants.split():
+ # Load overrides from 'd' to avoid having to reset the value...
+ overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item
+ localdata.setVar("OVERRIDES", overrides)
+ localdata.setVar("MLPREFIX", item + "-")
+ bb.data.update_data(localdata)
+ bb.build.exec_func("create_sdk_files", localdata)
}
addtask generate_content before do_install after do_compile
-do_install[nostamp] = "1"
+create_sdk_files() {
+ # Setup site file for external use
+ toolchain_create_sdk_siteconfig ${SDK_OUTPUT}/${SDKPATH}/site-config-${REAL_MULTIMACH_TARGET_SYS}
+
+ toolchain_create_sdk_env_script ${SDK_OUTPUT}/${SDKPATH}/environment-setup-${REAL_MULTIMACH_TARGET_SYS}
+
+ # Add version information
+ toolchain_create_sdk_version ${SDK_OUTPUT}/${SDKPATH}/version-${REAL_MULTIMACH_TARGET_SYS}
+}
+
do_install() {
install -d ${D}/${SDKPATH}
install -m 0644 -t ${D}/${SDKPATH} ${SDK_OUTPUT}/${SDKPATH}/*
diff --git a/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
new file mode 100644
index 0000000000..d971c3ca3d
--- /dev/null
+++ b/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb
@@ -0,0 +1,19 @@
+SUMMARY = "Dummy package which ensures perl is excluded from buildtools"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302"
+
+inherit nativesdk
+
+# Put it somewhere separate to ensure it's never used except when we want it
+PACKAGE_ARCH = "buildtools-dummy-${SDKPKGSUFFIX}"
+
+PERLPACKAGES = "nativesdk-perl \
+ nativesdk-perl-module-file-path"
+
+ALLOW_EMPTY_${PN} = "1"
+
+python populate_packages_prepend() {
+ d.appendVar('RPROVIDES_${PN}', '${PERLPACKAGES}')
+ d.appendVar('RCONFLICTS_${PN}', '${PERLPACKAGES}')
+}
+
diff --git a/meta/recipes-core/meta/uninative-tarball.bb b/meta/recipes-core/meta/uninative-tarball.bb
new file mode 100644
index 0000000000..e17685a117
--- /dev/null
+++ b/meta/recipes-core/meta/uninative-tarball.bb
@@ -0,0 +1,48 @@
+SUMMARY = "libc and patchelf tarball for use with uninative.bbclass"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \
+ file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+
+TOOLCHAIN_TARGET_TASK = ""
+
+TOOLCHAIN_HOST_TASK = "\
+ nativesdk-glibc \
+ nativesdk-patchelf \
+ "
+
+INHIBIT_DEFAULT_DEPS = "1"
+DEPENDS += "patchelf-native"
+
+TOOLCHAIN_OUTPUTNAME ?= "${BUILD_ARCH}-nativesdk-libc"
+
+RDEPENDS = "${TOOLCHAIN_HOST_TASK}"
+
+EXCLUDE_FROM_WORLD = "1"
+
+inherit meta
+inherit populate_sdk
+
+deltask install
+deltask package
+
+SDK_PACKAGING_FUNC = ""
+
+fakeroot create_sdk_files() {
+ cp ${COREBASE}/scripts/relocate_sdk.py ${SDK_OUTPUT}/${SDKPATH}/
+
+ # Replace the ##DEFAULT_INSTALL_DIR## with the correct pattern.
+ # Escape special characters like '+' and '.' in the SDKPATH
+ escaped_sdkpath=$(echo ${SDKPATH}/sysroots/${SDK_SYS} |sed -e "s:[\+\.]:\\\\\\\\\0:g")
+ sed -i -e "s:##DEFAULT_INSTALL_DIR##:$escaped_sdkpath:" ${SDK_OUTPUT}/${SDKPATH}/relocate_sdk.py
+}
+
+
+fakeroot tar_sdk() {
+ mkdir -p ${SDK_DEPLOY}
+ cd ${SDK_OUTPUT}/${SDKPATH}
+ mv sysroots/${SDK_SYS} ./${BUILD_SYS}
+ rm sysroots -rf
+ patchelf --set-interpreter ${@''.join('a' for n in xrange(1024))} ./${BUILD_SYS}/usr/bin/patchelf
+ mv ./${BUILD_SYS}/usr/bin/patchelf ./${BUILD_SYS}/usr/bin/patchelf-uninative
+ tar ${SDKTAROPTS} -c --file=${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.tar.bz2 .
+}
diff --git a/meta/recipes-core/ncurses/ncurses.inc b/meta/recipes-core/ncurses/ncurses.inc
index 521d0e435c..10f7dd175d 100644
--- a/meta/recipes-core/ncurses/ncurses.inc
+++ b/meta/recipes-core/ncurses/ncurses.inc
@@ -98,6 +98,8 @@ do_configure() {
# not the case for /dev/null redirections)
export cf_cv_working_poll=yes
+ # The --enable-pc-files requires PKG_CONFIG_LIBDIR existed
+ mkdir -p ${PKG_CONFIG_LIBDIR}
( cd ${S}; gnu-configize --force )
ncurses_configure "narrowc" || \
return 1
diff --git a/meta/recipes-core/netbase/netbase_5.2.bb b/meta/recipes-core/netbase/netbase_5.3.bb
index ab7ae6527c..4ac0221c9e 100644
--- a/meta/recipes-core/netbase/netbase_5.2.bb
+++ b/meta/recipes-core/netbase/netbase_5.3.bb
@@ -6,11 +6,11 @@ LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://debian/copyright;md5=3dd6192d306f582dee7687da3d8748ab"
PE = "1"
-SRC_URI = "${DEBIAN_MIRROR}/main/n/netbase/netbase_${PV}.tar.gz \
+SRC_URI = "${DEBIAN_MIRROR}/main/n/netbase/netbase_${PV}.tar.xz \
file://hosts"
-SRC_URI[md5sum] = "3a01bfce6a28e1743412198abd241262"
-SRC_URI[sha256sum] = "d62ba56d62b9b121664828175c2a237a7014ef32df8a4116ea33cff332de3307"
+SRC_URI[md5sum] = "2637a27fd3de02a278d2b5be7e6558c1"
+SRC_URI[sha256sum] = "81f6c69795044d62b8ad959cf9daf049d0545fd466c52860ad3f933b1e97b88b"
do_install () {
install -d ${D}/${mandir}/man8 ${D}${sysconfdir}
diff --git a/meta/recipes-core/os-release/os-release.bb b/meta/recipes-core/os-release/os-release.bb
new file mode 100644
index 0000000000..33e958137f
--- /dev/null
+++ b/meta/recipes-core/os-release/os-release.bb
@@ -0,0 +1,36 @@
+inherit allarch
+
+SUMMARY = "Operating system identification"
+DESCRIPTION = "The /etc/os-release file contains operating system identification data."
+LICENSE = "MIT"
+INHIBIT_DEFAULT_DEPS = "1"
+
+do_fetch[noexec] = "1"
+do_unpack[noexec] = "1"
+do_patch[noexec] = "1"
+do_configure[noexec] = "1"
+
+# Other valid fields: BUILD_ID ID_LIKE ANSI_COLOR CPE_NAME
+# HOME_URL SUPPORT_URL BUG_REPORT_URL
+OS_RELEASE_FIELDS = "ID ID_LIKE NAME VERSION VERSION_ID PRETTY_NAME"
+
+ID = "${DISTRO}"
+NAME = "${DISTRO_NAME}"
+VERSION = "${DISTRO_VERSION}${@' (%s)' % DISTRO_CODENAME if 'DISTRO_CODENAME' in d else ''}"
+VERSION_ID = "${DISTRO_VERSION}"
+PRETTY_NAME = "${DISTRO_NAME} ${VERSION}"
+BUILD_ID ?= "${DATETIME}"
+
+python do_compile () {
+ with open(d.expand('${B}/os-release'), 'w') as f:
+ for field in d.getVar('OS_RELEASE_FIELDS', True).split():
+ value = d.getVar(field, True)
+ if value:
+ f.write('{0}={1}\n'.format(field, value))
+}
+do_compile[vardeps] += "${OS_RELEASE_FIELDS}"
+
+do_install () {
+ install -d ${D}${sysconfdir}
+ install -m 0644 os-release ${D}${sysconfdir}/
+}
diff --git a/meta/recipes-core/packagegroups/packagegroup-base.bb b/meta/recipes-core/packagegroups/packagegroup-base.bb
index 13a4fe4f0c..f4b2cd574d 100644
--- a/meta/recipes-core/packagegroups/packagegroup-base.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-base.bb
@@ -2,6 +2,11 @@ SUMMARY = "Merge machine and distro options to create a basic machine task/packa
LICENSE = "MIT"
PR = "r83"
+#
+# packages which content depend on MACHINE_FEATURES need to be MACHINE_ARCH
+#
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
inherit packagegroup
PROVIDES = "${PACKAGES}"
@@ -39,11 +44,6 @@ PACKAGES = ' \
\
'
-#
-# packages which content depend on MACHINE_FEATURES need to be MACHINE_ARCH
-#
-PACKAGE_ARCH = "${MACHINE_ARCH}"
-
# Override by distro if needed
VIRTUAL-RUNTIME_keymaps ?= "keymaps"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-boot.bb b/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
index bdc4a1d413..09f537372e 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-boot.bb
@@ -7,10 +7,10 @@ DESCRIPTION = "The minimal set of packages required to boot the system"
LICENSE = "MIT"
PR = "r17"
-inherit packagegroup
-
PACKAGE_ARCH = "${MACHINE_ARCH}"
+inherit packagegroup
+
#
# Set by the machine configuration with packages essential for device bootup
#
@@ -27,6 +27,7 @@ VIRTUAL-RUNTIME_keymaps ?= "keymaps"
SYSVINIT_SCRIPTS = "${@bb.utils.contains('MACHINE_FEATURES', 'rtc', 'busybox-hwclock', '', d)} \
modutils-initscripts \
init-ifupdown \
+ ${VIRTUAL-RUNTIME_initscripts} \
"
RDEPENDS_${PN} = "\
@@ -38,7 +39,6 @@ RDEPENDS_${PN} = "\
netbase \
${VIRTUAL-RUNTIME_login_manager} \
${VIRTUAL-RUNTIME_init_manager} \
- ${VIRTUAL-RUNTIME_initscripts} \
${VIRTUAL-RUNTIME_dev_manager} \
${VIRTUAL-RUNTIME_update-alternatives} \
${MACHINE_ESSENTIAL_EXTRA_RDEPENDS}"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb b/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
index 5d1ce97c2c..154a55cea2 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-standalone-sdk-target.bb
@@ -7,6 +7,7 @@ inherit packagegroup
RDEPENDS_${PN} = "\
libgcc \
libgcc-dev \
+ libgcov-dev \
libstdc++ \
libstdc++-dev \
${LIBC_DEPENDENCIES} \
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
index 79bedfb02f..82347b975d 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-debug.bb
@@ -7,9 +7,7 @@ LICENSE = "MIT"
inherit packagegroup
-PR = "r2"
-
-PACKAGE_ARCH = "${MACHINE_ARCH}"
+PR = "r3"
MTRACE = ""
MTRACE_libc-glibc = "libc-mtrace"
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
index 8c9dd8b6f9..f0ba8b90fa 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb
@@ -7,11 +7,13 @@ LICENSE = "MIT"
PR = "r3"
-inherit packagegroup
-
PACKAGE_ARCH = "${MACHINE_ARCH}"
+inherit packagegroup
+
PROFILE_TOOLS_X = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'sysprof', '', d)}"
+# sysprof doesn't support aarch64
+PROFILE_TOOLS_X_aarch64 = ""
PROFILE_TOOLS_SYSTEMD = "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd-analyze', '', d)}"
RRECOMMENDS_${PN} = "\
@@ -53,7 +55,6 @@ LTTNGTOOLS_aarch64 = ""
LTTNGMODULES = "lttng-modules"
LTTNGMODULES_aarch64 = ""
-LTTNGMODULES_arm = ""
BABELTRACE = "babeltrace"
BABELTRACE_aarch64 = ""
diff --git a/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb b/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
index 15ed7f92a5..33a3eee258 100644
--- a/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-core-tools-testapps.bb
@@ -7,10 +7,10 @@ LICENSE = "MIT"
PR = "r2"
-inherit packagegroup
-
PACKAGE_ARCH = "${MACHINE_ARCH}"
+inherit packagegroup
+
# kexec-tools doesn't work on Mips
KEXECTOOLS ?= "kexec"
KEXECTOOLS_mips ?= ""
diff --git a/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb b/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
index 8e3b917c75..f95ce77fd2 100644
--- a/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
+++ b/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb
@@ -41,19 +41,14 @@ RDEPENDS_packagegroup-self-hosted-host-tools = "\
hdparm \
iptables \
lsb \
- xdg-utils \
mc \
mc-fish \
mc-helpers \
mc-helpers-perl \
mc-helpers-python \
- leafpad \
- ${MIDORI} \
- pcmanfm \
parted \
pseudo \
screen \
- vte \
"
RRECOMMENDS_packagegroup-self-hosted-host-tools = "\
@@ -64,7 +59,7 @@ RRECOMMENDS_packagegroup-self-hosted-host-tools = "\
kernel-module-iptable-filter \
"
-# eglibc-utils: for rpcgen
+# glibc-utils: for rpcgen
RDEPENDS_packagegroup-self-hosted-sdk = "\
autoconf \
automake \
@@ -75,8 +70,8 @@ RDEPENDS_packagegroup-self-hosted-sdk = "\
cpp \
cpp-symlinks \
distcc \
- eglibc-utils \
- eglibc-gconv-ibm850 \
+ glibc-utils \
+ glibc-gconv-ibm850 \
file \
findutils \
g++ \
@@ -125,6 +120,7 @@ RDEPENDS_packagegroup-self-hosted-extended = "\
gettext \
gettext-runtime \
git \
+ git-perltools \
grep \
groff \
gzip \
@@ -224,6 +220,11 @@ RDEPENDS_packagegroup-self-hosted-graphics = "\
libx11-dev \
python-pygtk \
gtk-theme-clearlooks \
+ xdg-utils \
+ ${MIDORI} \
+ leafpad \
+ pcmanfm \
+ vte \
"
PTH = "pth"
PTH_libc-uclibc = ""
diff --git a/meta/recipes-core/psplash/psplash_git.bb b/meta/recipes-core/psplash/psplash_git.bb
index 628ced4d49..7f0dc7859f 100644
--- a/meta/recipes-core/psplash/psplash_git.bb
+++ b/meta/recipes-core/psplash/psplash_git.bb
@@ -5,7 +5,7 @@ SECTION = "base"
LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://psplash.h;beginline=1;endline=16;md5=840fb2356b10a85bed78dd09dc7745c6"
-SRCREV = "afd4e228c606a9998feae44a3fed4474803240b7"
+SRCREV = "14c8f7b705de944beb4de3f296506d80871e410f"
PV = "0.1+git${SRCPV}"
PR = "r15"
@@ -109,3 +109,13 @@ FILES_${PN} += "/mnt/.psplash"
INITSCRIPT_NAME = "psplash.sh"
INITSCRIPT_PARAMS = "start 0 S . stop 20 0 1 6 ."
+
+DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}"
+pkg_postinst_${PN} () {
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ if [ -n "$D" ]; then
+ OPTS="--root=$D"
+ fi
+ systemctl $OPTS mask psplash.service
+ fi
+}
diff --git a/meta/recipes-core/readline/readline-6.3/readline-dispatch-multikey.patch b/meta/recipes-core/readline/readline-6.3/readline-dispatch-multikey.patch
new file mode 100644
index 0000000000..54d1ac6eb9
--- /dev/null
+++ b/meta/recipes-core/readline/readline-6.3/readline-dispatch-multikey.patch
@@ -0,0 +1,32 @@
+From 8ef852a5be72c75e17f2510bea52455f809b56ce Mon Sep 17 00:00:00 2001
+From: Chet Ramey <chet.ramey@case.edu>
+Date: Fri, 28 Mar 2014 14:07:42 -0400
+Subject: [PATCH 04/10] Readline-6.3 patch 2
+
+Fixes multi-key issue identified in this thread:
+http://lists.gnu.org/archive/html/bug-readline/2014-03/msg00010.html
+
+Upstream-Status: Backport
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+---
+ readline.c | 3 ++-
+ 1 files changed, 2 insertions(+), 1 deletions(-)
+
+diff --git a/readline.c b/readline.c
+index eb4eae3..abb29a0 100644
+--- a/readline.c
++++ b/readline.c
+@@ -744,7 +744,8 @@ _rl_dispatch_callback (cxt)
+ r = _rl_subseq_result (r, cxt->oldmap, cxt->okey, (cxt->flags & KSEQ_SUBSEQ));
+
+ RL_CHECK_SIGNALS ();
+- if (r == 0) /* success! */
++ /* We only treat values < 0 specially to simulate recursion. */
++ if (r >= 0 || (r == -1 && (cxt->flags & KSEQ_SUBSEQ) == 0)) /* success! or failure! */
+ {
+ _rl_keyseq_chain_dispose ();
+ RL_UNSETSTATE (RL_STATE_MULTIKEY);
+--
+1.8.3.1
+
diff --git a/meta/recipes-core/readline/readline-6.3/readline63-003 b/meta/recipes-core/readline/readline-6.3/readline63-003
new file mode 100644
index 0000000000..98a9d810b6
--- /dev/null
+++ b/meta/recipes-core/readline/readline-6.3/readline63-003
@@ -0,0 +1,43 @@
+readline: Security Advisory - readline - CVE-2014-2524
+
+Upstream-Status: Backport
+
+Signed-off-by: Yue Tao <yue.tao@windriver.com>
+
+ READLINE PATCH REPORT
+ =====================
+
+Readline-Release: 6.3
+Patch-ID: readline63-003
+
+Bug-Reported-by:
+Bug-Reference-ID:
+Bug-Reference-URL:
+
+Bug-Description:
+
+There are debugging functions in the readline release that are theoretically
+exploitable as security problems. They are not public functions, but have
+global linkage.
+
+Patch (apply with `patch -p0'):
+
+*** ../readline-6.3/util.c 2013-09-02 13:36:12.000000000 -0400
+--- util.c 2014-03-20 10:25:53.000000000 -0400
+***************
+*** 477,480 ****
+--- 479,483 ----
+ }
+
++ #if defined (DEBUG)
+ #if defined (USE_VARARGS)
+ static FILE *_rl_tracefp;
+***************
+*** 539,542 ****
+--- 542,546 ----
+ }
+ #endif
++ #endif /* DEBUG */
+
+
+
diff --git a/meta/recipes-core/readline/readline_6.3.bb b/meta/recipes-core/readline/readline_6.3.bb
index aa30f668b4..55964a6cf8 100644
--- a/meta/recipes-core/readline/readline_6.3.bb
+++ b/meta/recipes-core/readline/readline_6.3.bb
@@ -1,5 +1,8 @@
require readline.inc
+SRC_URI += "file://readline63-003 \
+ file://readline-dispatch-multikey.patch"
+
SRC_URI[archive.md5sum] = "33c8fb279e981274f485fd91da77e94a"
SRC_URI[archive.sha256sum] = "56ba6071b9462f980c5a72ab0023893b65ba6debb4eeb475d7a563dc65cafd43"
diff --git a/meta/recipes-core/systemd/systemd-serialgetty/serial-getty@.service b/meta/recipes-core/systemd/systemd-serialgetty/serial-getty@.service
index 6dd335c0bc..182167befe 100644
--- a/meta/recipes-core/systemd/systemd-serialgetty/serial-getty@.service
+++ b/meta/recipes-core/systemd/systemd-serialgetty/serial-getty@.service
@@ -21,7 +21,7 @@ IgnoreOnIsolate=yes
[Service]
Environment="TERM=xterm"
-ExecStart=-/sbin/agetty -8 --keep-baud %I @BAUDRATE@ $TERM
+ExecStart=-/sbin/agetty -8 -L --keep-baud %I @BAUDRATE@ $TERM
Type=idle
Restart=always
RestartSec=0
diff --git a/meta/recipes-core/systemd/systemd-systemctl/systemctl b/meta/recipes-core/systemd/systemd-systemctl/systemctl
index b37f27abfb..2e632b00bc 100755
--- a/meta/recipes-core/systemd/systemd-systemctl/systemctl
+++ b/meta/recipes-core/systemd/systemd-systemctl/systemctl
@@ -77,18 +77,31 @@ for service in $services; do
exit 0
fi
- echo "Try to find location of $service..."
+ service_base_file=`echo $service | sed 's/\(@\).*\(\.[^.]\+\)/\1\2/'`
+ if [ -z `echo $service | sed '/@/p;d'` ]; then
+ echo "Try to find location of $service..."
+ service_template=false
+ else
+ echo "Try to find location of template $service_base_file of instance $service..."
+ service_template=true
+ if [ -z `echo $service | sed 's/^.\+@\(.*\)\.[^.]\+/\1/'` ]; then
+ instance_specified=false
+ else
+ instance_specified=true
+ fi
+ fi
+
# find service file
for p in $ROOT/etc/systemd/system \
$ROOT/lib/systemd/system \
$ROOT/usr/lib/systemd/system; do
- if [ -e $p/$service ]; then
- service_file=$p/$service
+ if [ -e $p/$service_base_file ]; then
+ service_file=$p/$service_base_file
service_file=${service_file##$ROOT}
fi
done
if [ -z "$service_file" ]; then
- echo "'$service' couldn't be found; exiting with error"
+ echo "'$service_base_file' couldn't be found; exiting with error"
exit 1
fi
echo "Found $service in $service_file"
@@ -115,13 +128,29 @@ for service in $services; do
for r in $wanted_by; do
echo "WantedBy=$r found in $service"
if [ "$action" = "enable" ]; then
+ enable_service=$service
+ if [ "$service_template" = true -a "$instance_specified" = false ]; then
+ default_instance=$(sed '/^DefaultInstance[[:space:]]*=/s,[^=]*=,,p;d' "$ROOT/$service_file")
+ if [ -z $default_instance ]; then
+ echo "Template unit without instance or DefaultInstance directive, nothing to enable"
+ continue
+ else
+ echo "Found DefaultInstance $default_instance, enabling it"
+ enable_service=$(echo $service | sed "s/@/@$default_instance/")
+ fi
+ fi
mkdir -p $ROOT/etc/systemd/system/$r.wants
- ln -s $service_file $ROOT/etc/systemd/system/$r.wants
- echo "Enabled $service for $wanted_by."
+ ln -s $service_file $ROOT/etc/systemd/system/$r.wants/$enable_service
+ echo "Enabled $enable_service for $wanted_by."
else
- rm -f $ROOT/etc/systemd/system/$r.wants/$service
+ if [ "$service_template" = true -a "$instance_specified" = false ]; then
+ disable_service="$ROOT/etc/systemd/system/$r.wants/`echo $service | sed 's/@/@*/'`"
+ else
+ disable_service="$ROOT/etc/systemd/system/$r.wants/$service"
+ fi
+ rm -f $disable_service
rmdir --ignore-fail-on-non-empty -p $ROOT/etc/systemd/system/$r.wants
- echo "Disabled $service for $wanted_by."
+ echo "Disabled ${disable_service##$ROOT/etc/systemd/system/$r.wants/} for $wanted_by."
fi
done
diff --git a/meta/recipes-core/systemd/systemd/0001-Make-root-s-home-directory-configurable.patch b/meta/recipes-core/systemd/systemd/0001-Make-root-s-home-directory-configurable.patch
new file mode 100644
index 0000000000..c5ad29174c
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-Make-root-s-home-directory-configurable.patch
@@ -0,0 +1,180 @@
+From 4dc8dee0435bb63cbe474004b0a8df0363fe94dd Mon Sep 17 00:00:00 2001
+From: Dan McGregor <dan.mcgregor@usask.ca>
+Date: Fri, 5 Sep 2014 06:28:58 -0600
+Subject: [PATCH] Make root's home directory configurable.
+
+OpenEmbedded has a configurable home directory for root. Allow
+systemd to be built using its idea of what root's home directory
+should be.
+
+Upstream-Status: Pending
+
+Signed-off-by: Dan McGregor <dan.mcgregor@usask.ca>
+---
+ Makefile.am | 2 ++
+ configure.ac | 7 +++++++
+ src/core/unit-printf.c | 2 +-
+ src/nspawn/nspawn.c | 4 ++--
+ src/shared/util.c | 4 ++--
+ units/console-shell.service.m4.in | 4 ++--
+ units/emergency.service.in | 4 ++--
+ units/rescue.service.m4.in | 4 ++--
+ 8 files changed, 20 insertions(+), 11 deletions(-)
+
+diff --git a/Makefile.am b/Makefile.am
+index 4028112..5d18f5c 100644
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -191,6 +191,7 @@ AM_CPPFLAGS = \
+ -DKEXEC=\"$(KEXEC)\" \
+ -DLIBDIR=\"$(libdir)\" \
+ -DROOTLIBDIR=\"$(rootlibdir)\" \
++ -DROOTHOMEDIR=\"$(roothomedir)\" \
+ -DTEST_DIR=\"$(abs_top_srcdir)/test\" \
+ -I $(top_srcdir)/src \
+ -I $(top_builddir)/src/shared \
+@@ -5584,6 +5585,7 @@ EXTRA_DIST += \
+ substitutions = \
+ '|rootlibexecdir=$(rootlibexecdir)|' \
+ '|rootbindir=$(rootbindir)|' \
++ '|roothomedir=$(roothomedir)|' \
+ '|bindir=$(bindir)|' \
+ '|SYSTEMCTL=$(rootbindir)/systemctl|' \
+ '|SYSTEMD_NOTIFY=$(rootbindir)/systemd-notify|' \
+diff --git a/configure.ac b/configure.ac
+index 18b7198..365bc73 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -1272,6 +1272,11 @@ AC_ARG_WITH([rootlibdir],
+ [],
+ [with_rootlibdir=${libdir}])
+
++AC_ARG_WITH([roothomedir],
++ AS_HELP_STRING([--with-roothomedir=DIR], [Home directory for the root user]),
++ [],
++ [with_roothomedir=/root])
++
+ AC_ARG_WITH([pamlibdir],
+ AS_HELP_STRING([--with-pamlibdir=DIR], [Directory for PAM modules]),
+ [],
+@@ -1317,6 +1322,7 @@ AC_SUBST([pamlibdir], [$with_pamlibdir])
+ AC_SUBST([pamconfdir], [$with_pamconfdir])
+ AC_SUBST([rootprefix], [$with_rootprefix])
+ AC_SUBST([rootlibdir], [$with_rootlibdir])
++AC_SUBST([roothomedir], [$with_roothomedir])
+
+ AC_CONFIG_FILES([
+ Makefile po/Makefile.in
+@@ -1400,6 +1406,7 @@ AC_MSG_RESULT([
+ include_prefix: ${INCLUDE_PREFIX}
+ lib dir: ${libdir}
+ rootlib dir: ${with_rootlibdir}
++ root home dir: ${with_roothomedir}
+ SysV init scripts: ${SYSTEM_SYSVINIT_PATH}
+ SysV rc?.d directories: ${SYSTEM_SYSVRCND_PATH}
+ Build Python: ${PYTHON}
+diff --git a/src/core/unit-printf.c b/src/core/unit-printf.c
+index 62599d0..852d34c 100644
+--- a/src/core/unit-printf.c
++++ b/src/core/unit-printf.c
+@@ -259,7 +259,7 @@ static int specifier_user_home(char specifier, void *data, void *userdata, char
+ * best of it if we can, but fail if we can't */
+
+ if (!c->user || streq(c->user, "root") || streq(c->user, "0"))
+- n = strdup("/root");
++ n = strdup(ROOTHOMEDIR);
+ else
+ return -ENOTSUP;
+
+diff --git a/src/nspawn/nspawn.c b/src/nspawn/nspawn.c
+index d01da45..3e876d5 100644
+--- a/src/nspawn/nspawn.c
++++ b/src/nspawn/nspawn.c
+@@ -3312,7 +3312,7 @@ int main(int argc, char *argv[]) {
+ if (r < 0)
+ _exit(EXIT_FAILURE);
+
+- if ((asprintf((char**)(envp + n_env++), "HOME=%s", home ? home: "/root") < 0) ||
++ if ((asprintf((char**)(envp + n_env++), "HOME=%s", home ? home: ROOTHOMEDIR) < 0) ||
+ (asprintf((char**)(envp + n_env++), "USER=%s", arg_user ? arg_user : "root") < 0) ||
+ (asprintf((char**)(envp + n_env++), "LOGNAME=%s", arg_user ? arg_user : "root") < 0)) {
+ log_oom();
+@@ -3402,7 +3402,7 @@ int main(int argc, char *argv[]) {
+ } else if (argc > optind)
+ execvpe(argv[optind], argv + optind, env_use);
+ else {
+- chdir(home ? home : "/root");
++ chdir(home ? home : ROOTHOMEDIR);
+ execle("/bin/bash", "-bash", NULL, env_use);
+ execle("/bin/sh", "-sh", NULL, env_use);
+ }
+diff --git a/src/shared/util.c b/src/shared/util.c
+index 85a570a..aef6033 100644
+--- a/src/shared/util.c
++++ b/src/shared/util.c
+@@ -4377,7 +4377,7 @@ int get_user_creds(
+ *gid = 0;
+
+ if (home)
+- *home = "/root";
++ *home = ROOTHOMEDIR;
+
+ if (shell)
+ *shell = "/bin/sh";
+@@ -5363,7 +5363,7 @@ int get_home_dir(char **_h) {
+ /* Hardcode home directory for root to avoid NSS */
+ u = getuid();
+ if (u == 0) {
+- h = strdup("/root");
++ h = strdup(ROOTHOMEDIR);
+ if (!h)
+ return -ENOMEM;
+
+diff --git a/units/console-shell.service.m4.in b/units/console-shell.service.m4.in
+index 3f4904a..e2af652 100644
+--- a/units/console-shell.service.m4.in
++++ b/units/console-shell.service.m4.in
+@@ -15,8 +15,8 @@ After=rc-local.service
+ Before=getty.target
+
+ [Service]
+-Environment=HOME=/root
+-WorkingDirectory=/root
++Environment=HOME=@roothomedir@
++WorkingDirectory=@roothomedir@
+ ExecStart=-/sbin/sulogin
+ ExecStopPost=-@SYSTEMCTL@ poweroff
+ Type=idle
+diff --git a/units/emergency.service.in b/units/emergency.service.in
+index 91fc1bb..659547e 100644
+--- a/units/emergency.service.in
++++ b/units/emergency.service.in
+@@ -13,8 +13,8 @@ Conflicts=shutdown.target
+ Before=shutdown.target
+
+ [Service]
+-Environment=HOME=/root
+-WorkingDirectory=/root
++Environment=HOME=@roothomedir@
++WorkingDirectory=@roothomedir@
+ ExecStartPre=-/bin/plymouth quit
+ ExecStartPre=-/bin/echo -e 'Welcome to emergency mode! After logging in, type "journalctl -xb" to view\\nsystem logs, "systemctl reboot" to reboot, "systemctl default" to try again\\nto boot into default mode.'
+ ExecStart=-/bin/sh -c "/sbin/sulogin; @SYSTEMCTL@ --fail --no-block default"
+diff --git a/units/rescue.service.m4.in b/units/rescue.service.m4.in
+index ef54369..7aad86f 100644
+--- a/units/rescue.service.m4.in
++++ b/units/rescue.service.m4.in
+@@ -14,8 +14,8 @@ After=sysinit.target plymouth-start.service
+ Before=shutdown.target
+
+ [Service]
+-Environment=HOME=/root
+-WorkingDirectory=/root
++Environment=HOME=@roothomedir@
++WorkingDirectory=@roothomedir@
+ ExecStartPre=-/bin/plymouth quit
+ ExecStartPre=-/bin/echo -e 'Welcome to rescue mode! Type "systemctl default" or ^D to enter default mode.\\nType "journalctl -xb" to view system logs. Type "systemctl reboot" to reboot.'
+ ExecStart=-/bin/sh -c "/sbin/sulogin; @SYSTEMCTL@ --fail --no-block default"
+--
+1.9.3
+
diff --git a/meta/recipes-core/systemd/systemd/0001-add-support-for-executing-scripts-under-etc-rcS.d.patch b/meta/recipes-core/systemd/systemd/0001-add-support-for-executing-scripts-under-etc-rcS.d.patch
new file mode 100644
index 0000000000..9aa07c1b10
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-add-support-for-executing-scripts-under-etc-rcS.d.patch
@@ -0,0 +1,138 @@
+Upstream-Status: Inappropriate [OE specific]
+
+Subject: add support for executing scripts under /etc/rcS.d/
+
+To be compatible, all services translated from scripts under /etc/rcS.d would
+run before services translated from scripts under /etc/rcN.d.
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/sysv-generator/sysv-generator.c | 50 ++++++++++++++++++++++++++++---------
+ 1 file changed, 38 insertions(+), 12 deletions(-)
+
+diff --git a/src/sysv-generator/sysv-generator.c b/src/sysv-generator/sysv-generator.c
+index 9a869ba..10c55c0 100644
+--- a/src/sysv-generator/sysv-generator.c
++++ b/src/sysv-generator/sysv-generator.c
+@@ -43,7 +43,8 @@
+
+ typedef enum RunlevelType {
+ RUNLEVEL_UP,
+- RUNLEVEL_DOWN
++ RUNLEVEL_DOWN,
++ RUNLEVEL_SYSINIT
+ } RunlevelType;
+
+ static const struct {
+@@ -58,6 +59,9 @@ static const struct {
+ { "rc4.d", SPECIAL_RUNLEVEL4_TARGET, RUNLEVEL_UP },
+ { "rc5.d", SPECIAL_RUNLEVEL5_TARGET, RUNLEVEL_UP },
+
++ /* Debian style rcS.d, also adopted by OE */
++ { "rcS.d", SPECIAL_SYSINIT_TARGET, RUNLEVEL_SYSINIT},
++
+ /* Standard SysV runlevels for shutdown */
+ { "rc0.d", SPECIAL_POWEROFF_TARGET, RUNLEVEL_DOWN },
+ { "rc6.d", SPECIAL_REBOOT_TARGET, RUNLEVEL_DOWN }
+@@ -66,7 +70,7 @@ static const struct {
+ directories in this order, and we want to make sure that
+ sysv_start_priority is known when we first load the
+ unit. And that value we only know from S links. Hence
+- UP must be read before DOWN */
++ UP/SYSINIT must be read before DOWN */
+ };
+
+ typedef struct SysvStub {
+@@ -82,6 +86,8 @@ typedef struct SysvStub {
+ char **conflicts;
+ bool has_lsb;
+ bool reload;
++ bool default_dependencies;
++ bool from_rcsd;
+ } SysvStub;
+
+ const char *arg_dest = "/tmp";
+@@ -156,6 +162,9 @@ static int generate_unit_file(SysvStub *s) {
+ "Description=%s\n",
+ s->path, s->description);
+
++ if (!s->default_dependencies)
++ fprintf(f, "DefaultDependencies=no\n");
++
+ if (!isempty(before))
+ fprintf(f, "Before=%s\n", before);
+ if (!isempty(after))
+@@ -661,18 +670,30 @@ static int fix_order(SysvStub *s, Hashmap *all_services) {
+ if (s->has_lsb && other->has_lsb)
+ continue;
+
+- if (other->sysv_start_priority < s->sysv_start_priority) {
+- r = strv_extend(&s->after, other->name);
++ /* All scripts under /etc/rcS.d should execute before scripts under
++ * /etc/rcN.d */
++ if (!other->from_rcsd && s->from_rcsd) {
++ r = strv_extend(&s->before, other->name);
+ if (r < 0)
+ return log_oom();
+- }
+- else if (other->sysv_start_priority > s->sysv_start_priority) {
+- r = strv_extend(&s->before, other->name);
++ } else if (other->from_rcsd && !s->from_rcsd) {
++ r = strv_extend(&s->after, other->name);
+ if (r < 0)
+ return log_oom();
+- }
+- else
+- continue;
++ } else {
++ if (other->sysv_start_priority < s->sysv_start_priority) {
++ r = strv_extend(&s->after, other->name);
++ if (r < 0)
++ return log_oom();
++ }
++ else if (other->sysv_start_priority > s->sysv_start_priority) {
++ r = strv_extend(&s->before, other->name);
++ if (r < 0)
++ return log_oom();
++ }
++ else
++ continue;
++ }
+
+ /* FIXME: Maybe we should compare the name here lexicographically? */
+ }
+@@ -725,6 +746,8 @@ static int enumerate_sysv(LookupPaths lp, Hashmap *all_services) {
+ return log_oom();
+
+ service->sysv_start_priority = -1;
++ service->default_dependencies = true;
++ service->from_rcsd = false;
+ service->name = name;
+ service->path = fpath;
+
+@@ -810,9 +833,11 @@ static int set_dependencies_from_rcnd(LookupPaths lp, Hashmap *all_services) {
+
+ if (de->d_name[0] == 'S') {
+
+- if (rcnd_table[i].type == RUNLEVEL_UP) {
++ if (rcnd_table[i].type == RUNLEVEL_UP || rcnd_table[i].type == RUNLEVEL_SYSINIT) {
+ service->sysv_start_priority =
+ MAX(a*10 + b, service->sysv_start_priority);
++ service->default_dependencies = (rcnd_table[i].type == RUNLEVEL_SYSINIT)?false:true;
++ service->from_rcsd = (rcnd_table[i].type == RUNLEVEL_SYSINIT)?true:false;
+ }
+
+ r = set_ensure_allocated(&runlevel_services[i],
+@@ -825,7 +850,8 @@ static int set_dependencies_from_rcnd(LookupPaths lp, Hashmap *all_services) {
+ goto finish;
+
+ } else if (de->d_name[0] == 'K' &&
+- (rcnd_table[i].type == RUNLEVEL_DOWN)) {
++ (rcnd_table[i].type == RUNLEVEL_DOWN ||
++ rcnd_table[i].type == RUNLEVEL_SYSINIT)) {
+
+ r = set_ensure_allocated(&shutdown_services,
+ trivial_hash_func, trivial_compare_func);
+--
+1.9.1
+
diff --git a/meta/recipes-core/systemd/systemd/0001-journal-Fix-navigating-backwards-missing-entries.patch b/meta/recipes-core/systemd/systemd/0001-journal-Fix-navigating-backwards-missing-entries.patch
new file mode 100644
index 0000000000..621a0da87c
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-journal-Fix-navigating-backwards-missing-entries.patch
@@ -0,0 +1,34 @@
+From 2173cbf847fc53ca24950e77958c902edecfc207 Mon Sep 17 00:00:00 2001
+From: Olivier Brunel <jjk@jjacky.com>
+Date: Fri, 5 Dec 2014 16:06:45 +0100
+Subject: [PATCH] journal: Fix navigating backwards missing entries
+
+With DIRECTION_UP (i.e. navigating backwards) in generic_array_bisect() when the
+needle was found as the last item in the array, it wasn't actually processed as
+match, resulting in entries being missed.
+
+https://bugs.freedesktop.org/show_bug.cgi?id=86855
+
+Upstream-Status: Backport
+
+Signed-off-by: Jonathan Liu <net147@gmail.com>
+---
+ src/journal/journal-file.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/journal/journal-file.c b/src/journal/journal-file.c
+index 7858435..c5d2d19 100644
+--- a/src/journal/journal-file.c
++++ b/src/journal/journal-file.c
+@@ -1657,7 +1657,7 @@ static int generic_array_bisect(
+ }
+ }
+
+- if (k > n) {
++ if (k >= n) {
+ if (direction == DIRECTION_UP) {
+ i = n;
+ subtract_one = true;
+--
+2.1.3
+
diff --git a/meta/recipes-core/systemd/systemd/0001-missing.h-add-fake-__NR_memfd_create-for-MIPS.patch b/meta/recipes-core/systemd/systemd/0001-missing.h-add-fake-__NR_memfd_create-for-MIPS.patch
new file mode 100644
index 0000000000..448ef1a917
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-missing.h-add-fake-__NR_memfd_create-for-MIPS.patch
@@ -0,0 +1,29 @@
+Upstream-Status: Backport
+
+Subject: missing.h: add fake __NR_memfd_create for MIPS
+
+We don't have the correct __NR_memfd_create syscall number yet, so set it to
+0xffffffff for now to prevent compile time errors.
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/shared/missing.h | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/src/shared/missing.h b/src/shared/missing.h
+index 3ff1a21..3051cb5 100644
+--- a/src/shared/missing.h
++++ b/src/shared/missing.h
+@@ -167,6 +167,9 @@ static inline int pivot_root(const char *new_root, const char *put_old) {
+ # define __NR_fanotify_mark 5296
+ # endif
+ # endif
++# ifndef __NR_memfd_create
++# define __NR_memfd_create 0xffffffff /* FIXME */
++# endif
+ #else
+ # ifndef __NR_fanotify_init
+ # define __NR_fanotify_init 338
+--
+1.9.1
+
diff --git a/meta/recipes-core/systemd/systemd/0001-systemd-user-avoid-using-system-auth.patch b/meta/recipes-core/systemd/systemd/0001-systemd-user-avoid-using-system-auth.patch
new file mode 100644
index 0000000000..97701f707c
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-systemd-user-avoid-using-system-auth.patch
@@ -0,0 +1,27 @@
+Upstream-Status: Inappropriate [oe specific]
+
+Subject: systemd-user: avoid using system-auth
+
+In OE, we don't provide system-auth, instead, we use common-* files.
+So modify systemd-user file to use common-* files.
+
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
+---
+ src/login/systemd-user | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/login/systemd-user b/src/login/systemd-user
+index 8112d74..99635af 100644
+--- a/src/login/systemd-user
++++ b/src/login/systemd-user
+@@ -2,5 +2,5 @@
+ #
+ # Used by systemd --user instances.
+
+-account include system-auth
+-session include system-auth
++account include common-account
++session include common-session
+--
+1.9.1
+
diff --git a/meta/recipes-core/systemd/systemd/0001-tmpfiles.d-etc.conf-disable-resolv.conf-symlink.patch b/meta/recipes-core/systemd/systemd/0001-tmpfiles.d-etc.conf-disable-resolv.conf-symlink.patch
new file mode 100644
index 0000000000..7218322d4c
--- /dev/null
+++ b/meta/recipes-core/systemd/systemd/0001-tmpfiles.d-etc.conf-disable-resolv.conf-symlink.patch
@@ -0,0 +1,35 @@
+From f0ab1600fb56d680e6aba3d0d51dfb9ffa3d9403 Mon Sep 17 00:00:00 2001
+From: "Peter A. Bigot" <pab@pabigot.com>
+Date: Thu, 18 Sep 2014 08:36:54 -0500
+Subject: [PATCH] tmpfiles.d/etc.conf: disable resolv.conf symlink
+
+This link is valid only if ENABLE_RESOLVED is configured for systemd.
+If left unconditional, the symlink is created preventing connman from
+storing the configuration it received from DHCP or other sources.
+
+Upstream has a TODO to fix this, but has not done so as of this date.
+Provide a temporary workaround for OE until this is done properly
+upstream.
+
+Upstream-Status: Inappropriate [OE-specific]
+Signed-off-by: Peter A. Bigot <pab@pabigot.com>
+---
+ tmpfiles.d/etc.conf | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tmpfiles.d/etc.conf b/tmpfiles.d/etc.conf
+index b23272c..5364dd8 100644
+--- a/tmpfiles.d/etc.conf
++++ b/tmpfiles.d/etc.conf
+@@ -10,6 +10,7 @@
+ L /etc/os-release - - - - ../usr/lib/os-release
+ L /etc/localtime - - - - ../usr/share/zoneinfo/UTC
+ L+ /etc/mtab - - - - ../proc/self/mounts
+-L /etc/resolv.conf - - - - ../run/systemd/resolve/resolv.conf
++# TODO: conditional on ENABLE_RESOLVED
++#L /etc/resolv.conf - - - - ../run/systemd/resolve/resolv.conf
+ C /etc/nsswitch.conf - - - -
+ C /etc/pam.d - - - -
+--
+1.8.5.5
+
diff --git a/meta/recipes-core/systemd/systemd/0001-uClibc-doesn-t-implement-pwritev-preadv.patch b/meta/recipes-core/systemd/systemd/0001-uClibc-doesn-t-implement-pwritev-preadv.patch
deleted file mode 100644
index 9fdb3c9ab3..0000000000
--- a/meta/recipes-core/systemd/systemd/0001-uClibc-doesn-t-implement-pwritev-preadv.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-Upstream-Status: Inappropriate [uclibc specific]
-
-From 7be9273548bcb1f57d011fc252965e45dd2a058c Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Wed, 21 Aug 2013 19:09:27 -0700
-Subject: [PATCH] uClibc doesn't implement pwritev/preadv
-
-Lets stub out the testcase for building.
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- src/libsystemd-bus/test-bus-memfd.c | 2 ++
- 1 file changed, 2 insertions(+)
-
-Index: systemd-209/src/libsystemd/sd-bus/test-bus-memfd.c
-===================================================================
---- systemd-209.orig/src/libsystemd/sd-bus/test-bus-memfd.c 2014-02-19 15:03:09.983254602 -0800
-+++ systemd-209/src/libsystemd/sd-bus/test-bus-memfd.c 2014-02-19 23:42:10.636652864 -0800
-@@ -151,6 +151,7 @@
- /* check content */
- assert_se(memcmp(buf, "ll", 2) == 0);
-
-+#ifndef __UCLIBC__
- /* writev it out*/
- iov[0].iov_base = (char *)"ABC";
- iov[0].iov_len = 3;
-@@ -173,6 +174,7 @@
- assert_se(memcmp(bufv[0], "ABC", 3) == 0);
- assert_se(memcmp(bufv[1], "DEF", 3) == 0);
- assert_se(memcmp(bufv[2], "GHI", 3) == 0);
-+#endif /* __UCLIBC__ */
-
- sd_memfd_free(m);
-
diff --git a/meta/recipes-core/systemd/systemd/run-ptest b/meta/recipes-core/systemd/systemd/run-ptest
index 2f6bd93a5b..a2d61c2894 100644
--- a/meta/recipes-core/systemd/systemd/run-ptest
+++ b/meta/recipes-core/systemd/systemd/run-ptest
@@ -1,3 +1,5 @@
+#!/bin/sh
+
tar -C test -xJf test/sys.tar.xz
make test/rules-test.sh.log
make test/udev-test.pl.log
diff --git a/meta/recipes-core/systemd/systemd/systemd-older-kernel.patch b/meta/recipes-core/systemd/systemd/systemd-older-kernel.patch
deleted file mode 100644
index 18b50e797a..0000000000
--- a/meta/recipes-core/systemd/systemd/systemd-older-kernel.patch
+++ /dev/null
@@ -1,56 +0,0 @@
-From 6109f02dcc4f2d7a461c5772bab494f5753a2203 Mon Sep 17 00:00:00 2001
-From: Robert Yang <liezhi.yang@windriver.com>
-Date: Thu, 29 May 2014 08:09:07 +0000
-Subject: [PATCH] rtnl-types.c: check IFLA_VLAN_PROTOCOL
-
-The older kernel's linux/if_link.h doesn't have IFLA_VLAN_PROTOCOL, we need
-check whether it has been defined or not.
-
-The maintainer said that he would fix it:
-
-http://thread.gmane.org/gmane.comp.sysutils.systemd.devel/18200
-
-Also we need redefine IFLA_MAX from 34 to 35 when define IFLA_CARRIER,
-otherwise there would be error:
-
-| src/libsystemd/sd-rtnl/rtnl-types.c:233:9: error: array index in initializer exceeds array bounds
-| [IFLA_CARRIER] = { .type = NLA_U8 },
-
-Upstream-Status: Pending
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
----
- src/libsystemd/sd-rtnl/rtnl-types.c | 2 ++
- src/shared/missing.h | 2 ++
- 2 files changed, 4 insertions(+)
-
-diff --git a/src/libsystemd/sd-rtnl/rtnl-types.c b/src/libsystemd/sd-rtnl/rtnl-types.c
-index 44ac5ec..ab6161f 100644
---- a/src/libsystemd/sd-rtnl/rtnl-types.c
-+++ b/src/libsystemd/sd-rtnl/rtnl-types.c
-@@ -67,7 +67,9 @@ static const NLType rtnl_link_info_data_vlan_types[IFLA_VLAN_MAX + 1] = {
- [IFLA_VLAN_EGRESS_QOS] = { .type = NLA_NESTED },
- [IFLA_VLAN_INGRESS_QOS] = { .type = NLA_NESTED },
- */
-+#ifdef IFLA_VLAN_PROTOCOL
- [IFLA_VLAN_PROTOCOL] = { .type = NLA_U16 },
-+#endif
- };
-
- static const NLType rtnl_link_info_data_bond_types[IFLA_BOND_MAX + 1] = {
-diff --git a/src/shared/missing.h b/src/shared/missing.h
-index d5ec2f8..732853f 100644
---- a/src/shared/missing.h
-+++ b/src/shared/missing.h
-@@ -94,6 +94,8 @@
-
- #ifndef IFLA_CARRIER
- #define IFLA_CARRIER 33
-+ #undef IFLA_MAX
-+ #define IFLA_MAX 35
- #ifndef IFLA_NUM_RX_QUEUES
- #define IFLA_NUM_RX_QUEUES 32
- #ifndef IFLA_NUM_TX_QUEUES
---
-1.8.3.4
-
diff --git a/meta/recipes-core/systemd/systemd_213.bb b/meta/recipes-core/systemd/systemd_216.bb
index ccdc10b873..eceead9e2c 100644
--- a/meta/recipes-core/systemd/systemd_213.bb
+++ b/meta/recipes-core/systemd/systemd_216.bb
@@ -17,9 +17,9 @@ SECTION = "base/shell"
inherit gtk-doc useradd pkgconfig autotools perlnative update-rc.d update-alternatives qemu systemd ptest gettext
-SRCREV = "c9679c652b3c31f2510e8805d81630680ebc7e95"
+SRCREV = "5d0ae62c665262c4c55536457e84e278c252cc0b"
-PV = "213+git${SRCPV}"
+PV = "216+git${SRCPV}"
SRC_URI = "git://anongit.freedesktop.org/systemd/systemd;branch=master;protocol=git \
file://binfmt-install.patch \
@@ -28,14 +28,18 @@ SRC_URI = "git://anongit.freedesktop.org/systemd/systemd;branch=master;protocol=
file://systemd-pam-fix-fallocate.patch \
file://systemd-pam-fix-mkostemp.patch \
file://optional_secure_getenv.patch \
- file://0001-uClibc-doesn-t-implement-pwritev-preadv.patch \
file://uclibc-sysinfo_h.patch \
file://uclibc-get-physmem.patch \
+ file://0001-add-support-for-executing-scripts-under-etc-rcS.d.patch \
+ file://0001-missing.h-add-fake-__NR_memfd_create-for-MIPS.patch \
+ file://0001-Make-root-s-home-directory-configurable.patch \
+ file://0001-systemd-user-avoid-using-system-auth.patch \
+ file://0001-journal-Fix-navigating-backwards-missing-entries.patch \
file://touchscreen.rules \
file://00-create-volatile.conf \
file://init \
file://run-ptest \
- file://systemd-older-kernel.patch \
+ ${@bb.utils.contains('PACKAGECONFIG', 'resolved', '', 'file://0001-tmpfiles.d-etc.conf-disable-resolv.conf-symlink.patch', d)} \
"
S = "${WORKDIR}/git"
@@ -48,6 +52,7 @@ LDFLAGS_append_libc-uclibc = " -lrt"
GTKDOC_DOCDIR = "${S}/docs/"
PACKAGECONFIG ??= "xz"
+PACKAGECONFIG[journal-upload] = "--enable-libcurl,--disable-libcurl,curl"
# Sign the journal for anti-tampering
PACKAGECONFIG[gcrypt] = "--enable-gcrypt,--disable-gcrypt,libgcrypt"
# regardless of PACKAGECONFIG, libgcrypt is always required to expand
@@ -57,6 +62,12 @@ DEPENDS += "libgcrypt"
PACKAGECONFIG[xz] = "--enable-xz,--disable-xz,xz"
PACKAGECONFIG[cryptsetup] = "--enable-libcryptsetup,--disable-libcryptsetup,cryptsetup"
PACKAGECONFIG[microhttpd] = "--enable-microhttpd,--disable-microhttpd,libmicrohttpd"
+PACKAGECONFIG[elfutils] = "--enable-elfutils,--disable-elfutils,elfutils"
+PACKAGECONFIG[resolved] = "--enable-resolved,--disable-resolved"
+PACKAGECONFIG[networkd] = "--enable-networkd,--disable-networkd"
+PACKAGECONFIG[libidn] = "--enable-libidn,--disable-libidn,libidn"
+PACKAGECONFIG[audit] = "--enable-audit,--disable-audit,audit"
+PACKAGECONFIG[manpages] = "--enable-manpages,--disable-manpages,libxslt-native xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
CACHED_CONFIGUREVARS = "ac_cv_path_KILL=${base_bindir}/kill"
@@ -69,8 +80,8 @@ rootlibexecdir = "${rootprefix}/lib"
# The gtk+ tools should get built as a separate recipe e.g. systemd-tools
EXTRA_OECONF = " --with-rootprefix=${rootprefix} \
--with-rootlibdir=${rootlibdir} \
+ --with-roothomedir=${ROOT_HOME} \
${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--enable-pam', '--disable-pam', d)} \
- --disable-manpages \
--disable-coredump \
--disable-introspection \
--disable-kdbus \
@@ -94,7 +105,6 @@ do_configure_prepend() {
else
cp -r ${S}/units ${S}/units.pre_sed
fi
- sed -i -e 's:=/root:=${ROOT_HOME}:g' ${S}/units/*.service*
sed -i '/ln --relative --help/d' ${S}/configure.ac
sed -i -e 's:\$(LN_S) --relative -f:lnr:g' ${S}/Makefile.am
sed -i -e 's:\$(LN_S) --relative:lnr:g' ${S}/Makefile.am
@@ -126,6 +136,21 @@ do_install() {
# Delete journal README, as log can be symlinked inside volatile.
rm -f ${D}/${localstatedir}/log/README
+
+ # Create symlinks for systemd-update-utmp-runlevel.service
+ install -d ${D}${systemd_unitdir}/system/graphical.target.wants
+ install -d ${D}${systemd_unitdir}/system/multi-user.target.wants
+ install -d ${D}${systemd_unitdir}/system/poweroff.target.wants
+ install -d ${D}${systemd_unitdir}/system/reboot.target.wants
+ install -d ${D}${systemd_unitdir}/system/rescue.target.wants
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_unitdir}/system/graphical.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_unitdir}/system/multi-user.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_unitdir}/system/poweroff.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_unitdir}/system/reboot.target.wants/systemd-update-utmp-runlevel.service
+ ln -sf ../systemd-update-utmp-runlevel.service ${D}${systemd_unitdir}/system/rescue.target.wants/systemd-update-utmp-runlevel.service
+
+ # Enable journal to forward message to syslog daemon
+ sed -i -e 's/.*ForwardToSyslog.*/ForwardToSyslog=yes/' ${D}${sysconfdir}/systemd/journald.conf
}
do_install_ptest () {
@@ -148,10 +173,10 @@ python populate_packages_prepend (){
systemdlibdir = d.getVar("rootlibdir", True)
do_split_packages(d, systemdlibdir, '^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True)
}
-PACKAGES_DYNAMIC += "^lib(udev|gudev|systemd).*"
+PACKAGES_DYNAMIC += "^lib(udev|systemd).*"
PACKAGES =+ "${PN}-gui ${PN}-vconsole-setup ${PN}-initramfs ${PN}-analyze ${PN}-kernel-install \
- ${PN}-rpm-macros ${PN}-binfmt ${PN}-pam ${PN}-zsh"
+ ${PN}-rpm-macros ${PN}-binfmt ${PN}-pam ${PN}-zsh libgudev"
SYSTEMD_PACKAGES = "${PN}-binfmt"
SYSTEMD_SERVICE_${PN}-binfmt = "systemd-binfmt.service"
@@ -165,6 +190,8 @@ FILES_${PN}-analyze = "${bindir}/systemd-analyze"
FILES_${PN}-initramfs = "/init"
RDEPENDS_${PN}-initramfs = "${PN}"
+FILES_libgudev = "${libdir}/libgudev*${SOLIBS}"
+
# The test cases need perl and bash to run correctly.
RDEPENDS_${PN}-ptest += "perl bash"
FILES_${PN}-ptest += "${libdir}/udev/rules.d"
@@ -193,7 +220,7 @@ FILES_${PN}-binfmt = "${sysconfdir}/binfmt.d/ \
${systemd_unitdir}/system/systemd-binfmt.service"
RRECOMMENDS_${PN}-binfmt = "kernel-module-binfmt-misc"
-RRECOMMENDS_${PN}-vconsole-setup = "kbd kbd-consolefonts"
+RRECOMMENDS_${PN}-vconsole-setup = "kbd kbd-consolefonts kbd-keymaps"
CONFFILES_${PN} = "${sysconfdir}/systemd/journald.conf \
${sysconfdir}/systemd/logind.conf \
@@ -206,6 +233,7 @@ FILES_${PN} = " ${base_bindir}/* \
${datadir}/dbus-1/system-services \
${datadir}/polkit-1 \
${datadir}/${BPN} \
+ ${datadir}/factory \
${sysconfdir}/bash_completion.d/ \
${sysconfdir}/dbus-1/ \
${sysconfdir}/machine-id \
@@ -218,7 +246,7 @@ FILES_${PN} = " ${base_bindir}/* \
${rootlibexecdir}/systemd/* \
${systemd_unitdir}/* \
${base_libdir}/security/*.so \
- ${libdir}/libnss_myhostname.so.2 \
+ ${libdir}/libnss_* \
/cgroup \
${bindir}/systemd* \
${bindir}/busctl \
@@ -231,6 +259,7 @@ FILES_${PN} = " ${base_bindir}/* \
${exec_prefix}/lib/systemd \
${exec_prefix}/lib/modules-load.d \
${exec_prefix}/lib/sysctl.d \
+ ${exec_prefix}/lib/sysusers.d \
${localstatedir} \
/lib/udev/rules.d/70-uaccess.rules \
/lib/udev/rules.d/71-seat.rules \
@@ -248,7 +277,7 @@ RDEPENDS_${PN} += "volatile-binds"
RRECOMMENDS_${PN} += "systemd-serialgetty systemd-compat-units udev-hwdb\
util-linux-agetty \
util-linux-fsck e2fsprogs-e2fsck \
- kernel-module-autofs4 kernel-module-unix kernel-module-ipv6 \
+ kernel-module-autofs4 kernel-module-unix kernel-module-ipv6 os-release \
"
PACKAGES =+ "udev-dbg udev udev-hwdb"
diff --git a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
index c3244b4602..657ef02204 100644
--- a/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
+++ b/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb
@@ -23,7 +23,7 @@ do_install() {
do
j=`echo ${i} | sed s/\;/\ /g`
label=`echo ${i} | sed -e 's/^.*;tty//' -e 's/;.*//'`
- echo "$label:12345:respawn:${base_sbindir}/getty ${j}" >> ${D}${sysconfdir}/inittab
+ echo "$label:12345:respawn:${base_sbindir}/getty -L ${j}" >> ${D}${sysconfdir}/inittab
done
if [ "${USE_VT}" = "1" ]; then
diff --git a/meta/recipes-core/uclibc/uclibc-git.inc b/meta/recipes-core/uclibc/uclibc-git.inc
index 69e834d6d1..a2b2353c5a 100644
--- a/meta/recipes-core/uclibc/uclibc-git.inc
+++ b/meta/recipes-core/uclibc/uclibc-git.inc
@@ -1,4 +1,4 @@
-SRCREV="5eddde8f094ef52dca06695cc598e3b2556dcccb"
+SRCREV = "48a0006012679ff0eda6f256da958d73a924fb57"
PV = "0.9.33+git${SRCPV}"
@@ -23,7 +23,7 @@ SRC_URI = "git://uclibc.org/uClibc.git;branch=master \
file://0001-Revert-utent.c-wtent.c-move-functions-from-utxent.c.patch \
file://0001-Add-eventfd_read-and-eventfd_write.patch \
file://0002-wire-setns-syscall.patch \
- file://0003-fcntl.h-Define-F_SETPIPE_SZ-and-F_GETPIPE_SZ.patch \
- file://0004-Add-clock_adjtime-syscall.patch \
+ file://0001-Define-IPTOS_CLASS_-macros-according-to-RFC-2474.patch \
+ file://0001-timex-Sync-with-glibc.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-core/uclibc/uclibc-git/0001-Define-IPTOS_CLASS_-macros-according-to-RFC-2474.patch b/meta/recipes-core/uclibc/uclibc-git/0001-Define-IPTOS_CLASS_-macros-according-to-RFC-2474.patch
new file mode 100644
index 0000000000..d613823695
--- /dev/null
+++ b/meta/recipes-core/uclibc/uclibc-git/0001-Define-IPTOS_CLASS_-macros-according-to-RFC-2474.patch
@@ -0,0 +1,75 @@
+From be8ed13a90c528adfbe3c8543946bb2c5a2ad713 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 25 Aug 2014 15:50:36 -0700
+Subject: [PATCH] Define IPTOS_CLASS_* macros according to RFC 2474
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-status: Pending
+---
+ include/netinet/ip.h | 42 ++++++++++++++++++++++++++++++------------
+ 1 file changed, 30 insertions(+), 12 deletions(-)
+
+diff --git a/include/netinet/ip.h b/include/netinet/ip.h
+index 19e1249..3fe58b9 100644
+--- a/include/netinet/ip.h
++++ b/include/netinet/ip.h
+@@ -188,7 +188,25 @@ struct ip_timestamp
+ #define IPTOS_DSCP_EF 0xb8
+
+ /*
+- * Definitions for IP type of service (ip_tos)
++ * In RFC 2474, Section 4.2.2.1, the Class Selector Codepoints subsume
++ * the old ToS Precedence values.
++ */
++#define IPTOS_CLASS_MASK 0xe0
++#define IPTOS_CLASS(class) ((class) & IPTOS_CLASS_MASK)
++#define IPTOS_CLASS_CS0 0x00
++#define IPTOS_CLASS_CS1 0x20
++#define IPTOS_CLASS_CS2 0x40
++#define IPTOS_CLASS_CS3 0x60
++#define IPTOS_CLASS_CS4 0x80
++#define IPTOS_CLASS_CS5 0xa0
++#define IPTOS_CLASS_CS6 0xc0
++#define IPTOS_CLASS_CS7 0xe0
++
++#define IPTOS_CLASS_DEFAULT IPTOS_CLASS_CS0
++
++/*
++ * Definitions for IP type of service (ip_tos) [deprecated; use DSCP
++ * and CS definitions above instead.]
+ */
+ #define IPTOS_TOS_MASK 0x1E
+ #define IPTOS_TOS(tos) ((tos) & IPTOS_TOS_MASK)
+@@ -199,18 +217,18 @@ struct ip_timestamp
+ #define IPTOS_MINCOST IPTOS_LOWCOST
+
+ /*
+- * Definitions for IP precedence (also in ip_tos) (hopefully unused)
++ * Definitions for IP precedence (also in ip_tos) [also deprecated.]
+ */
+-#define IPTOS_PREC_MASK 0xe0
+-#define IPTOS_PREC(tos) ((tos) & IPTOS_PREC_MASK)
+-#define IPTOS_PREC_NETCONTROL 0xe0
+-#define IPTOS_PREC_INTERNETCONTROL 0xc0
+-#define IPTOS_PREC_CRITIC_ECP 0xa0
+-#define IPTOS_PREC_FLASHOVERRIDE 0x80
+-#define IPTOS_PREC_FLASH 0x60
+-#define IPTOS_PREC_IMMEDIATE 0x40
+-#define IPTOS_PREC_PRIORITY 0x20
+-#define IPTOS_PREC_ROUTINE 0x00
++#define IPTOS_PREC_MASK IPTOS_CLASS_MASK
++#define IPTOS_PREC(tos) IPTOS_CLASS(tos)
++#define IPTOS_PREC_NETCONTROL IPTOS_CLASS_CS7
++#define IPTOS_PREC_INTERNETCONTROL IPTOS_CLASS_CS6
++#define IPTOS_PREC_CRITIC_ECP IPTOS_CLASS_CS5
++#define IPTOS_PREC_FLASHOVERRIDE IPTOS_CLASS_CS4
++#define IPTOS_PREC_FLASH IPTOS_CLASS_CS3
++#define IPTOS_PREC_IMMEDIATE IPTOS_CLASS_CS2
++#define IPTOS_PREC_PRIORITY IPTOS_CLASS_CS1
++#define IPTOS_PREC_ROUTINE IPTOS_CLASS_CS0
+
+ /*
+ * Definitions for options.
+--
+2.1.0
+
diff --git a/meta/recipes-core/uclibc/uclibc-git/0001-timex-Sync-with-glibc.patch b/meta/recipes-core/uclibc/uclibc-git/0001-timex-Sync-with-glibc.patch
new file mode 100644
index 0000000000..5d6a0a5366
--- /dev/null
+++ b/meta/recipes-core/uclibc/uclibc-git/0001-timex-Sync-with-glibc.patch
@@ -0,0 +1,33 @@
+From f489cc44a209f6c4370e94c9c788fc9cc4820be1 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Mon, 25 Aug 2014 16:22:57 -0700
+Subject: [PATCH] timex: Sync with glibc
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-status: Pending
+---
+ include/sys/timex.h | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/include/sys/timex.h b/include/sys/timex.h
+index 9082a28..57059bd 100644
+--- a/include/sys/timex.h
++++ b/include/sys/timex.h
+@@ -67,8 +67,12 @@ struct timex
+ #define ADJ_ESTERROR 0x0008 /* estimated time error */
+ #define ADJ_STATUS 0x0010 /* clock status */
+ #define ADJ_TIMECONST 0x0020 /* pll time constant */
+-#define ADJ_TICK 0x4000 /* tick value */
+-#define ADJ_OFFSET_SINGLESHOT 0x8001 /* old-fashioned adjtime */
++#define ADJ_TAI 0x0080 /* set TAI offset */
++#define ADJ_MICRO 0x1000 /* select microsecond resolution */
++#define ADJ_NANO 0x2000 /* select nanosecond resolution */
++#define ADJ_TICK 0x4000 /* tick value */
++#define ADJ_OFFSET_SINGLESHOT 0x8001 /* old-fashioned adjtime */
++#define ADJ_OFFSET_SS_READ 0xa001 /* read-only adjtime */
+
+ /* xntp 3.4 compatibility names */
+ #define MOD_OFFSET ADJ_OFFSET
+--
+2.1.0
+
diff --git a/meta/recipes-core/uclibc/uclibc-git/0003-fcntl.h-Define-F_SETPIPE_SZ-and-F_GETPIPE_SZ.patch b/meta/recipes-core/uclibc/uclibc-git/0003-fcntl.h-Define-F_SETPIPE_SZ-and-F_GETPIPE_SZ.patch
deleted file mode 100644
index f4387db9c7..0000000000
--- a/meta/recipes-core/uclibc/uclibc-git/0003-fcntl.h-Define-F_SETPIPE_SZ-and-F_GETPIPE_SZ.patch
+++ /dev/null
@@ -1,377 +0,0 @@
-From 7791d129d777e481a1e429815edcd05978438840 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Thu, 20 Feb 2014 01:12:14 -0800
-Subject: [PATCH 3/3] fcntl.h: Define F_SETPIPE_SZ and F_GETPIPE_SZ
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
----
- libc/sysdeps/linux/alpha/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/arc/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/arm/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/bfin/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/cris/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/e1/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/frv/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/h8300/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/hppa/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/i386/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/i960/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/ia64/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/m68k/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/metag/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/microblaze/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/mips/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/nios/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/nios2/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/powerpc/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/sh/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/sh64/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/sparc/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/v850/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/vax/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/x86_64/bits/fcntl.h | 2 ++
- libc/sysdeps/linux/xtensa/bits/fcntl.h | 2 ++
- 26 files changed, 52 insertions(+)
-
-diff --git a/libc/sysdeps/linux/alpha/bits/fcntl.h b/libc/sysdeps/linux/alpha/bits/fcntl.h
-index dd32529..a44be9e 100644
---- a/libc/sysdeps/linux/alpha/bits/fcntl.h
-+++ b/libc/sysdeps/linux/alpha/bits/fcntl.h
-@@ -94,6 +94,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* for F_[GET|SET]FD */
-diff --git a/libc/sysdeps/linux/arc/bits/fcntl.h b/libc/sysdeps/linux/arc/bits/fcntl.h
-index 71136da..1cb9600 100755
---- a/libc/sysdeps/linux/arc/bits/fcntl.h
-+++ b/libc/sysdeps/linux/arc/bits/fcntl.h
-@@ -87,6 +87,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/arm/bits/fcntl.h b/libc/sysdeps/linux/arm/bits/fcntl.h
-index f1a54f0..aedc154 100644
---- a/libc/sysdeps/linux/arm/bits/fcntl.h
-+++ b/libc/sysdeps/linux/arm/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/bfin/bits/fcntl.h b/libc/sysdeps/linux/bfin/bits/fcntl.h
-index c6cba56..e987824 100644
---- a/libc/sysdeps/linux/bfin/bits/fcntl.h
-+++ b/libc/sysdeps/linux/bfin/bits/fcntl.h
-@@ -98,6 +98,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/cris/bits/fcntl.h b/libc/sysdeps/linux/cris/bits/fcntl.h
-index acc5e25..029bb80 100644
---- a/libc/sysdeps/linux/cris/bits/fcntl.h
-+++ b/libc/sysdeps/linux/cris/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/e1/bits/fcntl.h b/libc/sysdeps/linux/e1/bits/fcntl.h
-index da699c8..2e0e6ba 100644
---- a/libc/sysdeps/linux/e1/bits/fcntl.h
-+++ b/libc/sysdeps/linux/e1/bits/fcntl.h
-@@ -93,6 +93,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/frv/bits/fcntl.h b/libc/sysdeps/linux/frv/bits/fcntl.h
-index 3aacc9d..5a7d9ef 100644
---- a/libc/sysdeps/linux/frv/bits/fcntl.h
-+++ b/libc/sysdeps/linux/frv/bits/fcntl.h
-@@ -95,6 +95,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/h8300/bits/fcntl.h b/libc/sysdeps/linux/h8300/bits/fcntl.h
-index d0b8310..45deec4 100644
---- a/libc/sysdeps/linux/h8300/bits/fcntl.h
-+++ b/libc/sysdeps/linux/h8300/bits/fcntl.h
-@@ -93,6 +93,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/hppa/bits/fcntl.h b/libc/sysdeps/linux/hppa/bits/fcntl.h
-index 1bb41ce..abb3372 100644
---- a/libc/sysdeps/linux/hppa/bits/fcntl.h
-+++ b/libc/sysdeps/linux/hppa/bits/fcntl.h
-@@ -96,6 +96,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* for F_[GET|SET]FL */
-diff --git a/libc/sysdeps/linux/i386/bits/fcntl.h b/libc/sysdeps/linux/i386/bits/fcntl.h
-index d48e62a..79b69d4 100644
---- a/libc/sysdeps/linux/i386/bits/fcntl.h
-+++ b/libc/sysdeps/linux/i386/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/i960/bits/fcntl.h b/libc/sysdeps/linux/i960/bits/fcntl.h
-index e2fcbe6..f6e145d 100644
---- a/libc/sysdeps/linux/i960/bits/fcntl.h
-+++ b/libc/sysdeps/linux/i960/bits/fcntl.h
-@@ -93,6 +93,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/ia64/bits/fcntl.h b/libc/sysdeps/linux/ia64/bits/fcntl.h
-index 1ff0ed5..fedefb6 100644
---- a/libc/sysdeps/linux/ia64/bits/fcntl.h
-+++ b/libc/sysdeps/linux/ia64/bits/fcntl.h
-@@ -95,6 +95,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/m68k/bits/fcntl.h b/libc/sysdeps/linux/m68k/bits/fcntl.h
-index d7beb6c..66df337 100644
---- a/libc/sysdeps/linux/m68k/bits/fcntl.h
-+++ b/libc/sysdeps/linux/m68k/bits/fcntl.h
-@@ -98,6 +98,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/metag/bits/fcntl.h b/libc/sysdeps/linux/metag/bits/fcntl.h
-index c4f641b..e10abd7 100644
---- a/libc/sysdeps/linux/metag/bits/fcntl.h
-+++ b/libc/sysdeps/linux/metag/bits/fcntl.h
-@@ -100,6 +100,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/microblaze/bits/fcntl.h b/libc/sysdeps/linux/microblaze/bits/fcntl.h
-index a2e3573..20b7597 100644
---- a/libc/sysdeps/linux/microblaze/bits/fcntl.h
-+++ b/libc/sysdeps/linux/microblaze/bits/fcntl.h
-@@ -98,6 +98,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- # define F_SETPIPE_SZ 1031 /* Set of pipe page size array */
- # define F_GETPIPE_SZ 1032 /* Get of pipe page size array */
- #endif
-diff --git a/libc/sysdeps/linux/mips/bits/fcntl.h b/libc/sysdeps/linux/mips/bits/fcntl.h
-index 4291f6e..8c4c115 100644
---- a/libc/sysdeps/linux/mips/bits/fcntl.h
-+++ b/libc/sysdeps/linux/mips/bits/fcntl.h
-@@ -111,6 +111,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/nios/bits/fcntl.h b/libc/sysdeps/linux/nios/bits/fcntl.h
-index 5854c18..36ca766 100644
---- a/libc/sysdeps/linux/nios/bits/fcntl.h
-+++ b/libc/sysdeps/linux/nios/bits/fcntl.h
-@@ -96,6 +96,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/nios2/bits/fcntl.h b/libc/sysdeps/linux/nios2/bits/fcntl.h
-index d7beb6c..66df337 100644
---- a/libc/sysdeps/linux/nios2/bits/fcntl.h
-+++ b/libc/sysdeps/linux/nios2/bits/fcntl.h
-@@ -98,6 +98,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/powerpc/bits/fcntl.h b/libc/sysdeps/linux/powerpc/bits/fcntl.h
-index 217f54a..d150a31 100644
---- a/libc/sysdeps/linux/powerpc/bits/fcntl.h
-+++ b/libc/sysdeps/linux/powerpc/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/sh/bits/fcntl.h b/libc/sysdeps/linux/sh/bits/fcntl.h
-index 5c9f047..aceaec6 100644
---- a/libc/sysdeps/linux/sh/bits/fcntl.h
-+++ b/libc/sysdeps/linux/sh/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/sh64/bits/fcntl.h b/libc/sysdeps/linux/sh64/bits/fcntl.h
-index ff741cb..b319e8b 100644
---- a/libc/sysdeps/linux/sh64/bits/fcntl.h
-+++ b/libc/sysdeps/linux/sh64/bits/fcntl.h
-@@ -95,6 +95,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/sparc/bits/fcntl.h b/libc/sysdeps/linux/sparc/bits/fcntl.h
-index 235d2ad..7e80d9e 100644
---- a/libc/sysdeps/linux/sparc/bits/fcntl.h
-+++ b/libc/sysdeps/linux/sparc/bits/fcntl.h
-@@ -106,6 +106,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- #if __WORDSIZE == 64
-diff --git a/libc/sysdeps/linux/v850/bits/fcntl.h b/libc/sysdeps/linux/v850/bits/fcntl.h
-index d0b8310..45deec4 100644
---- a/libc/sysdeps/linux/v850/bits/fcntl.h
-+++ b/libc/sysdeps/linux/v850/bits/fcntl.h
-@@ -93,6 +93,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/vax/bits/fcntl.h b/libc/sysdeps/linux/vax/bits/fcntl.h
-index ff5bff3..a30d5e1 100644
---- a/libc/sysdeps/linux/vax/bits/fcntl.h
-+++ b/libc/sysdeps/linux/vax/bits/fcntl.h
-@@ -92,6 +92,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FL. */
-diff --git a/libc/sysdeps/linux/x86_64/bits/fcntl.h b/libc/sysdeps/linux/x86_64/bits/fcntl.h
-index a899dcf..02e011d 100644
---- a/libc/sysdeps/linux/x86_64/bits/fcntl.h
-+++ b/libc/sysdeps/linux/x86_64/bits/fcntl.h
-@@ -113,6 +113,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
-diff --git a/libc/sysdeps/linux/xtensa/bits/fcntl.h b/libc/sysdeps/linux/xtensa/bits/fcntl.h
-index 5d28547..4e9aa7e 100644
---- a/libc/sysdeps/linux/xtensa/bits/fcntl.h
-+++ b/libc/sysdeps/linux/xtensa/bits/fcntl.h
-@@ -99,6 +99,8 @@
- # define F_NOTIFY 1026 /* Request notfications on a directory. */
- # define F_DUPFD_CLOEXEC 1030 /* Duplicate file descriptor with
- close-on-exit set on new fd. */
-+# define F_SETPIPE_SZ 1031 /* Set pipe page size array. */
-+# define F_GETPIPE_SZ 1032 /* Get pipe page size array. */
- #endif
-
- /* For F_[GET|SET]FD. */
---
-1.9.0
-
diff --git a/meta/recipes-core/uclibc/uclibc-git/0004-Add-clock_adjtime-syscall.patch b/meta/recipes-core/uclibc/uclibc-git/0004-Add-clock_adjtime-syscall.patch
deleted file mode 100644
index c596db67b1..0000000000
--- a/meta/recipes-core/uclibc/uclibc-git/0004-Add-clock_adjtime-syscall.patch
+++ /dev/null
@@ -1,75 +0,0 @@
-Upstream-Status: Pending
-
-From 8e19e651145554fbcb90179f3dfbc7ea8a07c900 Mon Sep 17 00:00:00 2001
-From: Khem Raj <raj.khem@gmail.com>
-Date: Sat, 15 Mar 2014 09:32:20 -0700
-Subject: [PATCH 4/4] Add clock_adjtime syscall
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
----
- include/sys/timex.h | 5 ++++-
- libc/sysdeps/linux/common/Makefile.in | 2 +-
- libc/sysdeps/linux/common/clock_adjtime.c | 15 +++++++++++++++
- 3 files changed, 20 insertions(+), 2 deletions(-)
- create mode 100644 libc/sysdeps/linux/common/clock_adjtime.c
-
-diff --git a/include/sys/timex.h b/include/sys/timex.h
-index 621afce..9082a28 100644
---- a/include/sys/timex.h
-+++ b/include/sys/timex.h
-@@ -20,6 +20,7 @@
-
- #include <features.h>
- #include <sys/time.h>
-+#include <time.h>
-
- /* These definitions from linux/timex.h as of 2.2.0. */
-
-@@ -125,7 +126,9 @@ libc_hidden_proto(adjtimex)
- extern int ntp_gettime (struct ntptimeval *__ntv) __THROW;
- extern int ntp_adjtime (struct timex *__tntx) __THROW;
- #endif
--
-+#if defined __UCLIBC_HAS_REALTIME__
-+extern int clock_adjtime (clockid_t __clock_id, struct timex *__ntx) __THROW;
-+#endif
- __END_DECLS
-
- #endif /* sys/timex.h */
-diff --git a/libc/sysdeps/linux/common/Makefile.in b/libc/sysdeps/linux/common/Makefile.in
-index 10d9884..bb985b6 100644
---- a/libc/sysdeps/linux/common/Makefile.in
-+++ b/libc/sysdeps/linux/common/Makefile.in
-@@ -84,7 +84,7 @@ CSRC-$(ARCH_USE_MMU) += msync.c
- CSRC-$(UCLIBC_BSD_SPECIFIC) += mincore.c setdomainname.c
- CSRC-$(UCLIBC_NTP_LEGACY) += ntp_gettime.c
- # aio_cancel|aio_error|aio_fsync|aio_read|aio_return|aio_suspend|aio_write|clock_getres|clock_gettime|clock_settime|clock_settime|fdatasync|lio_listio|mlockall|munlockall|mlock|munlock|mq_close|mq_getattr|mq_notify|mq_open|mq_receive|mq_timedreceive|mq_send|mq_timedsend|mq_setattr|mq_unlink|nanosleep|sched_getparam|sched_get_priority_max|sched_get_priority_min|sched_getscheduler|sched_rr_get_interval|sched_setparam|sched_setscheduler|sem_close|sem_destroy|sem_getvalue|sem_init|sem_open|sem_post|sem_trywait|sem_wait|sem_unlink|sem_wait|shm_open|shm_unlink|sigqueue|sigtimedwait|sigwaitinfo|timer_create|timer_delete|timer_getoverrun|timer_gettime|timer_settime
--CSRC-$(UCLIBC_HAS_REALTIME) += clock_getres.c clock_gettime.c clock_settime.c \
-+CSRC-$(UCLIBC_HAS_REALTIME) += clock_adjtime.c clock_getres.c clock_gettime.c clock_settime.c \
- fdatasync.c mlockall.c mlock.c munlockall.c munlock.c \
- nanosleep.c __rt_sigtimedwait.c __rt_sigwaitinfo.c sched_getparam.c \
- sched_get_priority_max.c sched_get_priority_min.c sched_getscheduler.c \
-diff --git a/libc/sysdeps/linux/common/clock_adjtime.c b/libc/sysdeps/linux/common/clock_adjtime.c
-new file mode 100644
-index 0000000..968ec27
---- /dev/null
-+++ b/libc/sysdeps/linux/common/clock_adjtime.c
-@@ -0,0 +1,15 @@
-+/*
-+ * clock_adjtime() for uClibc
-+ *
-+ * Copyright (C) 2005 by Peter Kjellerstedt <pkj@axis.com>
-+ * Copyright (C) 2000-2006 Erik Andersen <andersen@uclibc.org>
-+ *
-+ * Licensed under the LGPL v2.1, see the file COPYING.LIB in this tarball.
-+ */
-+
-+#include <sys/syscall.h>
-+#include <sys/timex.h>
-+
-+#ifdef __NR_clock_adjtime
-+_syscall2(int, clock_adjtime, clockid_t, clock_id, struct timex*, ntx)
-+#endif
---
-1.9.0
-
diff --git a/meta/recipes-core/uclibc/uclibc-git/remove_attribute_optimize_Os.patch b/meta/recipes-core/uclibc/uclibc-git/remove_attribute_optimize_Os.patch
index d2a8a81b94..fb52faa7a0 100644
--- a/meta/recipes-core/uclibc/uclibc-git/remove_attribute_optimize_Os.patch
+++ b/meta/recipes-core/uclibc/uclibc-git/remove_attribute_optimize_Os.patch
@@ -5,8 +5,8 @@ Upstream-Status: Pending
Index: git/libpthread/nptl/pthread_mutex_timedlock.c
===================================================================
---- git.orig/libpthread/nptl/pthread_mutex_timedlock.c 2013-01-21 16:22:20.000000000 -0800
-+++ git/libpthread/nptl/pthread_mutex_timedlock.c 2013-01-21 16:24:12.275557670 -0800
+--- git.orig/libpthread/nptl/pthread_mutex_timedlock.c 2014-08-25 10:58:06.000000000 -0700
++++ git/libpthread/nptl/pthread_mutex_timedlock.c 2014-08-25 11:00:42.078242266 -0700
@@ -28,7 +28,9 @@
* error: can't find a register in class ‘GENERAL_REGS’ while reloading ‘asm’
*/
@@ -17,124 +17,3 @@ Index: git/libpthread/nptl/pthread_mutex_timedlock.c
pthread_mutex_timedlock (
pthread_mutex_t *mutex,
const struct timespec *abstime)
-Index: git/libc/sysdeps/linux/powerpc/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/powerpc/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/powerpc/bits/mathinline.h 2013-01-21 16:24:12.275557670 -0800
-@@ -26,7 +26,7 @@
- #ifdef __cplusplus
- # define __MATH_INLINE __inline
- #else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- #endif /* __cplusplus */
-
- #if defined __GNUC__ && !defined _SOFT_FLOAT
-Index: git/libc/sysdeps/linux/alpha/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/alpha/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/alpha/bits/mathinline.h 2013-01-21 16:24:12.275557670 -0800
-@@ -24,7 +24,7 @@
- #ifdef __cplusplus
- # define __MATH_INLINE __inline
- #else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- #endif
-
- #if defined __USE_ISOC99 && defined __GNUC__ && !__GNUC_PREREQ(3,0)
-Index: git/libc/sysdeps/linux/i386/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/i386/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/i386/bits/mathinline.h 2013-01-21 16:24:12.275557670 -0800
-@@ -25,7 +25,7 @@
- #ifdef __cplusplus
- # define __MATH_INLINE __inline
- #else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- #endif
-
-
-Index: git/libc/sysdeps/linux/ia64/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/ia64/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/ia64/bits/mathinline.h 2013-01-21 16:24:12.275557670 -0800
-@@ -23,7 +23,7 @@
- #ifdef __cplusplus
- # define __MATH_INLINE __inline
- #else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- #endif
-
- #if defined __USE_ISOC99 && defined __GNUC__ && __GNUC__ >= 2
-Index: git/libc/sysdeps/linux/m68k/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/m68k/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/m68k/bits/mathinline.h 2013-01-21 16:24:12.275557670 -0800
-@@ -91,7 +91,7 @@
- # ifdef __cplusplus
- # define __m81_inline __inline
- # else
--# define __m81_inline extern __inline
-+# define __m81_inline __extern_inline
- # endif
- # define __M81_MATH_INLINES 1
- #endif
-@@ -350,14 +350,14 @@
- /* Note that there must be no whitespace before the argument passed for
- NAME, to make token pasting work correctly with -traditional. */
- # define __inline_forward_c(rettype, name, args1, args2) \
--extern __inline rettype __attribute__((__const__)) \
-+__extern_inline rettype __attribute__((__const__)) \
- name args1 \
- { \
- return __CONCAT(__,name) args2; \
- }
-
- # define __inline_forward(rettype, name, args1, args2) \
--extern __inline rettype name args1 \
-+__extern_inline rettype name args1 \
- { \
- return __CONCAT(__,name) args2; \
- }
-Index: git/libc/sysdeps/linux/mips/sys/tas.h
-===================================================================
---- git.orig/libc/sysdeps/linux/mips/sys/tas.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/mips/sys/tas.h 2013-01-21 16:24:12.275557670 -0800
-@@ -29,7 +29,7 @@
- #ifdef __USE_EXTERN_INLINES
-
- # ifndef _EXTERN_INLINE
--# define _EXTERN_INLINE extern __inline
-+# define _EXTERN_INLINE __extern_inline
- # endif
-
- _EXTERN_INLINE int
-Index: git/libc/sysdeps/linux/sparc/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/sparc/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/sparc/bits/mathinline.h 2013-01-21 16:24:12.279557671 -0800
-@@ -130,7 +130,7 @@
- # ifdef __cplusplus
- # define __MATH_INLINE __inline
- # else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- # endif /* __cplusplus */
-
- /* The gcc, version 2.7 or below, has problems with all this inlining
-Index: git/libc/sysdeps/linux/x86_64/bits/mathinline.h
-===================================================================
---- git.orig/libc/sysdeps/linux/x86_64/bits/mathinline.h 2013-01-21 16:22:20.000000000 -0800
-+++ git/libc/sysdeps/linux/x86_64/bits/mathinline.h 2013-01-21 16:24:12.279557671 -0800
-@@ -24,7 +24,7 @@
- #ifdef __cplusplus
- # define __MATH_INLINE __inline
- #else
--# define __MATH_INLINE extern __inline
-+# define __MATH_INLINE __extern_inline
- #endif
-
-
diff --git a/meta/recipes-core/uclibc/uclibc.inc b/meta/recipes-core/uclibc/uclibc.inc
index a86ac3ddf7..18587ca273 100644
--- a/meta/recipes-core/uclibc/uclibc.inc
+++ b/meta/recipes-core/uclibc/uclibc.inc
@@ -93,10 +93,10 @@ python () {
# if we use TARGET_CC_ARCH="-march=mips32" we end up
# with conflicting march options to gcc. Here we
# ask for MIPS32 ISA to match the chosen arch
-
- if "mips32" in d.getVar("TUNE_FEATURES",True):
+ tune = d.getVar("DEFAULTTUNE", True)
+ if tune in ['mips32', 'mips32r2']:
d.setVar('configmangle_append',
- "/^### MIPS32_CHECK$/a\\\nCONFIG_MIPS_ISA_MIPS32=y\n\n")
+ "/^### MIPS32_CHECK$/a\\\nCONFIG_MIPS_ISA_%s=y\n\n" % (tune.upper()))
if "${OE_FEATURES}":
d.setVar('configmangle_append',
"/^### DISTRO FEATURES$/a\\\n%s\n\n" %
diff --git a/meta/recipes-core/udev/udev.inc b/meta/recipes-core/udev/udev.inc
index 11204aaa41..280da10b48 100644
--- a/meta/recipes-core/udev/udev.inc
+++ b/meta/recipes-core/udev/udev.inc
@@ -31,7 +31,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/hotplug/udev-${PV}.tar.gz \
file://init"
inherit autotools pkgconfig update-rc.d ptest
-RDEPENDS_${PN}-ptest += "make"
+RDEPENDS_${PN}-ptest += "make perl"
libexecdir = "${base_libdir}"
EXTRA_OECONF = "--disable-introspection \
diff --git a/meta/recipes-core/udev/udev/init b/meta/recipes-core/udev/udev/init
index f2c84d5d3e..94dbba37de 100644
--- a/meta/recipes-core/udev/udev/init
+++ b/meta/recipes-core/udev/udev/init
@@ -14,21 +14,28 @@ export TZ=/etc/localtime
[ -d /sys/class ] || exit 1
[ -r /proc/mounts ] || exit 1
[ -x @UDEVD@ ] || exit 1
+SYSCONF_CACHED="/etc/udev/cache.data"
+SYSCONF_TMP="/dev/shm/udev.cache"
+DEVCACHE_REGEN="/dev/shm/udev-regen" # create to request cache regen
+
+# A list of files which are used as a criteria to judge whether the udev cache could be reused.
+CMP_FILE_LIST="/proc/version /proc/cmdline /proc/devices"
+[ -f /proc/atags ] && CMP_FILE_LIST="$CMP_FILE_LIST /proc/atags"
+
+# List of files whose metadata (size/mtime/name) will be included in cached
+# system state.
+META_FILE_LIST="lib/udev/rules.d/* etc/udev/rules.d/*"
+
+# Command to compute system configuration.
+sysconf_cmd () {
+ cat -- $CMP_FILE_LIST
+ stat -L -c '%s %Y %n' -- $META_FILE_LIST | awk -F/ '{print $1 " " $NF;}'
+}
+
[ -f /etc/default/udev-cache ] && . /etc/default/udev-cache
[ -f /etc/udev/udev.conf ] && . /etc/udev/udev.conf
[ -f /etc/default/rcS ] && . /etc/default/rcS
-readfiles () {
- READDATA=""
- for filename in $@; do
- if [ -r $filename ]; then
- while read line; do
- READDATA="$READDATA$line"
- done < $filename
- fi
- done
-}
-
kill_udevd () {
pid=`pidof -x udevd`
[ -n "$pid" ] && kill $pid
@@ -57,38 +64,35 @@ case "$1" in
# the automount rule for udev needs /tmp directory available, as /tmp is a symlink
# to /var/tmp which in turn is a symlink to /var/volatile/tmp, we need to make sure
# /var/volatile/tmp directory to be available.
- mkdir -p /var/volatile/tmp
+ mkdir -m 1777 -p /var/volatile/tmp
# Cache handling.
- # A list of files which are used as a criteria to judge whether the udev cache could be reused.
- CMP_FILE_LIST="/proc/version /proc/cmdline /proc/devices /proc/atags"
if [ "$DEVCACHE" != "" ]; then
if [ -e $DEVCACHE ]; then
- readfiles $CMP_FILE_LIST
- NEWDATA="$READDATA"
- readfiles /etc/udev/cache.data
- OLDDATA="$READDATA"
- if [ "$OLDDATA" = "$NEWDATA" ]; then
- (cd /; tar xf $DEVCACHE > /dev/null 2>&1)
+ sysconf_cmd > "$SYSCONF_TMP"
+ if cmp $SYSCONF_CACHED $SYSCONF_TMP >/dev/null; then
+ tar xmf $DEVCACHE -C / -m
not_first_boot=1
[ "$VERBOSE" != "no" ] && echo "udev: using cache file $DEVCACHE"
- [ -e /dev/shm/udev.cache ] && rm -f /dev/shm/udev.cache
+ [ -e $SYSCONF_TMP ] && rm -f "$SYSCONF_TMP"
+ [ -e "$DEVCACHE_REGEN" ] && rm -f "$DEVCACHE_REGEN"
else
# Output detailed reason why the cached /dev is not used
- if [ "$VERBOSE" != "no" ]; then
- echo "udev: udev cache not used"
- echo "udev: we use $CMP_FILE_LIST as criteria to judge whether the cache /dev could be resued"
- echo "udev: olddata: $OLDDATA"
- echo "udev: newdata: $NEWDATA"
- fi
- echo "$NEWDATA" > /dev/shm/udev.cache
+ cat <<EOF
+udev: Not using udev cache because of changes detected in the following files:
+udev: $CMP_FILE_LIST
+udev: $META_FILE_LIST
+udev: The udev cache will be regenerated. To identify the detected changes,
+udev: compare the cached sysconf at $SYSCONF_CACHED
+udev: against the current sysconf at $SYSCONF_TMP
+EOF
+ touch "$DEVCACHE_REGEN"
fi
else
if [ "$ROOTFS_READ_ONLY" != "yes" ]; then
# If rootfs is not read-only, it's possible that a new udev cache would be generated;
# otherwise, we do not bother to read files.
- readfiles $CMP_FILE_LIST
- echo "$READDATA" > /dev/shm/udev.cache
+ touch "$DEVCACHE_REGEN"
fi
fi
fi
@@ -97,7 +101,7 @@ case "$1" in
kill_udevd > "/dev/null" 2>&1
# trigger the sorted events
- echo -e '\000\000\000\000' > /proc/sys/kernel/hotplug
+ [ -e /proc/sys/kernel/hotplug ] && echo -e '\000' >/proc/sys/kernel/hotplug
@UDEVD@ -d
udevadm control --env=STARTUP=1
diff --git a/meta/recipes-core/udev/udev/udev-cache b/meta/recipes-core/udev/udev/udev-cache
index db5a513e14..895b1971c4 100644
--- a/meta/recipes-core/udev/udev/udev-cache
+++ b/meta/recipes-core/udev/udev/udev-cache
@@ -16,6 +16,25 @@ export TZ=/etc/localtime
[ -d /sys/class ] || exit 1
[ -f /etc/default/rcS ] && . /etc/default/rcS
+DEVCACHE_TMP="/dev/shm/udev-cache-tmp.tar"
+SYSCONF_CACHED="/etc/udev/cache.data"
+SYSCONF_TMP="/dev/shm/udev.cache"
+DEVCACHE_REGEN="/dev/shm/udev-regen" # create to request cache regen
+
+# A list of files which are used as a criteria to judge whether the udev cache could be reused.
+CMP_FILE_LIST="/proc/version /proc/cmdline /proc/devices"
+[ -f /proc/atags ] && CMP_FILE_LIST="$CMP_FILE_LIST /proc/atags"
+
+# List of files whose metadata (size/mtime/name) will be included in cached
+# system state.
+META_FILE_LIST="lib/udev/rules.d/* etc/udev/rules.d/*"
+
+# Command to compute system configuration.
+sysconf_cmd () {
+ cat -- $CMP_FILE_LIST
+ stat -L -c '%s %Y %n' -- $META_FILE_LIST | awk -F/ '{print $1 " " $NF;}'
+}
+
[ -f /etc/default/udev-cache ] && . /etc/default/udev-cache
if [ "$ROOTFS_READ_ONLY" = "yes" ]; then
@@ -23,10 +42,28 @@ if [ "$ROOTFS_READ_ONLY" = "yes" ]; then
exit 0
fi
-if [ "$DEVCACHE" != "" -a -e /dev/shm/udev.cache ]; then
- echo "Populating dev cache"
- (cd /; tar cf "$DEVCACHE" dev)
- mv /dev/shm/udev.cache /etc/udev/cache.data
+[ "$DEVCACHE" != "" ] || exit 0
+[ "${VERBOSE}" == "no" ] || echo -n "udev-cache: checking for ${DEVCACHE_REGEN}... "
+if ! [ -e "$DEVCACHE_REGEN" ]; then
+ [ "${VERBOSE}" == "no" ] || echo "not found."
+ exit 0
fi
+[ "${VERBOSE}" == "no" ] || echo "found."
+echo "Populating dev cache"
+
+(
+ set -e
+ trap 'echo "udev-cache: update failed!"' EXIT
+ udevadm control --stop-exec-queue
+ sysconf_cmd > "$SYSCONF_TMP"
+ find /dev -xdev \( -type b -o -type c -o -type l \) | cut -c 2- \
+ | xargs tar cf "${DEVCACHE_TMP}" -T-
+ gzip < "${DEVCACHE_TMP}" > "$DEVCACHE"
+ rm -f "${DEVCACHE_TMP}"
+ mv "$SYSCONF_TMP" "$SYSCONF_CACHED"
+ udevadm control --start-exec-queue
+ rm -f "$DEVCACHE_REGEN"
+ trap - EXIT
+) &
exit 0
diff --git a/meta/recipes-core/udev/udev/udev-cache.default b/meta/recipes-core/udev/udev/udev-cache.default
index 20933361a9..a3b732698d 100644
--- a/meta/recipes-core/udev/udev/udev-cache.default
+++ b/meta/recipes-core/udev/udev/udev-cache.default
@@ -1,5 +1,5 @@
# Default for /etc/init.d/udev
# Comment this out to disable device cache
-DEVCACHE="/etc/dev.tar"
+DEVCACHE="/etc/udev-cache.tar.gz"
PROBE_PLATFORM_BUS="yes"
diff --git a/meta/recipes-core/util-linux/util-linux.inc b/meta/recipes-core/util-linux/util-linux.inc
index ffb84c4b64..e6c00e437e 100644
--- a/meta/recipes-core/util-linux/util-linux.inc
+++ b/meta/recipes-core/util-linux/util-linux.inc
@@ -37,7 +37,7 @@ PACKAGES =+ "util-linux-agetty util-linux-fdisk util-linux-cfdisk util-linux-sfd
util-linux-mkfs util-linux-mcookie util-linux-reset \
util-linux-mkfs.cramfs util-linux-fsck.cramfs util-linux-fstrim \
util-linux-partx ${PN}-bash-completion util-linux-hwclock \
- util-linux-findfs"
+ util-linux-findfs util-linux-getopt"
SHARED_EXTRA_OECONF = "--disable-use-tty-group \
--disable-makeinstall-chown \
@@ -81,6 +81,7 @@ FILES_util-linux-reset = "${base_bindir}/reset"
FILES_util-linux-partx = "${sbindir}/partx"
FILES_util-linux-hwclock = "${base_sbindir}/hwclock.${BPN}"
FILES_util-linux-findfs = "${sbindir}/findfs"
+FILES_util-linux-getopt = "${bindir}/getopt.${BPN}"
FILES_util-linux-libblkid = "${base_libdir}/libblkid.so.*"
FILES_util-linux-libmount = "${base_libdir}/libmount.so.*"
@@ -135,7 +136,7 @@ do_install () {
mkdir -p ${D}${base_bindir}
sbinprogs="agetty ctrlaltdel cfdisk vipw vigr"
- sbinprogs_a="pivot_root hwclock mkswap mkfs.minix fsck.minix losetup swapon swapoff fdisk readprofile fsck blkid blockdev fstrim sulogin"
+ sbinprogs_a="pivot_root hwclock mkswap mkfs.minix fsck.minix losetup swapon swapoff fdisk readprofile fsck blkid blockdev fstrim sulogin switch_root"
usrbinprogs_a="chfn chsh hexdump last logger mesg newgrp renice utmpdump wall setsid chrt flock getopt"
binprogs_a="dmesg kill more umount mount login reset su"
@@ -167,9 +168,9 @@ inherit update-alternatives
ALTERNATIVE_PRIORITY = "100"
-ALTERNATIVE_${PN} = "dmesg kill more mkswap blockdev pivot_root"
+ALTERNATIVE_${PN} = "dmesg kill more mkswap blockdev pivot_root switch_root"
ALTERNATIVE_${PN} += "mkfs.minix hexdump last logger mesg renice wall"
-ALTERNATIVE_${PN} += "setsid chrt flock utmpdump eject getopt sulogin"
+ALTERNATIVE_${PN} += "setsid chrt flock utmpdump eject sulogin"
ALTERNATIVE_LINK_NAME[dmesg] = "${base_bindir}/dmesg"
ALTERNATIVE_LINK_NAME[kill] = "${base_bindir}/kill"
@@ -177,11 +178,10 @@ ALTERNATIVE_LINK_NAME[more] = "${base_bindir}/more"
ALTERNATIVE_LINK_NAME[mkswap] = "${base_sbindir}/mkswap"
ALTERNATIVE_LINK_NAME[blockdev] = "${base_sbindir}/blockdev"
ALTERNATIVE_LINK_NAME[pivot_root] = "${base_sbindir}/pivot_root"
+ALTERNATIVE_LINK_NAME[switch_root] = "${base_sbindir}/switch_root"
ALTERNATIVE_LINK_NAME[mkfs.minix] = "${base_sbindir}/mkfs.minix"
ALTERNATIVE_LINK_NAME[eject] = "${bindir}/eject"
-ALTERNATIVE_LINK_NAME[getopt] = "${base_bindir}/getopt"
ALTERNATIVE_LINK_NAME[sulogin] = "${base_sbindir}/sulogin"
-ALTERNATIVE_TARGET[getopt] = "${bindir}/getopt"
ALTERNATIVE_${PN}-doc = "mountpoint.1 last.1 mesg.1 wall.1 sulogin.8 utmpdump.1 reset.1"
@@ -233,6 +233,8 @@ ALTERNATIVE_util-linux-reset = "reset"
ALTERNATIVE_LINK_NAME[reset] = "${bindir}/reset"
ALTERNATIVE_TARGET[reset] = "${base_bindir}/reset"
+ALTERNATIVE_util-linux-getopt = "getopt"
+
BBCLASSEXTEND = "native nativesdk"
python do_package_prepend () {
diff --git a/meta/recipes-core/volatile-binds/volatile-binds.bb b/meta/recipes-core/volatile-binds/volatile-binds.bb
index 4080ff7ef2..694ba1e487 100644
--- a/meta/recipes-core/volatile-binds/volatile-binds.bb
+++ b/meta/recipes-core/volatile-binds/volatile-binds.bb
@@ -28,7 +28,7 @@ def volatile_systemd_services(d):
services.append("%s.service" % what[1:].replace("/", "-"))
return " ".join(services)
-SYSTEMD_SERVICE_volatile-binds = "${@volatile_systemd_services(d)}"
+SYSTEMD_SERVICE_${PN} = "${@volatile_systemd_services(d)}"
FILES_${PN} += "${systemd_unitdir}/system/*.service"
diff --git a/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch b/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch
new file mode 100644
index 0000000000..650794f504
--- /dev/null
+++ b/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch
@@ -0,0 +1,45 @@
+Obey LDFLAGS for tests
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Pending
+
+--- zlib-1.2.8.orig/Makefile.in
++++ zlib-1.2.8/Makefile.in
+@@ -26,7 +26,7 @@ CFLAGS=-O
+
+ SFLAGS=-O
+ LDFLAGS=
+-TEST_LDFLAGS=-L. libz.a
++TEST_LDFLAGS=-L. $(LDFLAGS)
+ LDSHARED=$(CC)
+ CPP=$(CC) -E
+
+@@ -176,22 +176,22 @@ placebo $(SHAREDLIBV): $(PIC_OBJS) libz.
+ -@rmdir objs
+
+ example$(EXE): example.o $(STATICLIB)
+- $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS)
++ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(STATICLIB)
+
+ minigzip$(EXE): minigzip.o $(STATICLIB)
+- $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS)
++ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(STATICLIB)
+
+ examplesh$(EXE): example.o $(SHAREDLIBV)
+- $(CC) $(CFLAGS) -o $@ example.o -L. $(SHAREDLIBV)
++ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(SHAREDLIBV)
+
+ minigzipsh$(EXE): minigzip.o $(SHAREDLIBV)
+- $(CC) $(CFLAGS) -o $@ minigzip.o -L. $(SHAREDLIBV)
++ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(SHAREDLIBV)
+
+ example64$(EXE): example64.o $(STATICLIB)
+- $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS)
++ $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) $(STATICLIB)
+
+ minigzip64$(EXE): minigzip64.o $(STATICLIB)
+- $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS)
++ $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) $(STATICLIB)
+
+ install-libs: $(LIBS)
+ -@if [ ! -d $(DESTDIR)$(exec_prefix) ]; then mkdir -p $(DESTDIR)$(exec_prefix); fi
diff --git a/meta/recipes-core/zlib/zlib_1.2.8.bb b/meta/recipes-core/zlib/zlib_1.2.8.bb
index bdfa045b7b..ca0ba3ff6e 100644
--- a/meta/recipes-core/zlib/zlib_1.2.8.bb
+++ b/meta/recipes-core/zlib/zlib_1.2.8.bb
@@ -10,6 +10,7 @@ LIC_FILES_CHKSUM = "file://zlib.h;beginline=4;endline=23;md5=fde612df1e5933c428b
SRC_URI = "http://www.zlib.net/${BPN}-${PV}.tar.xz \
file://remove.ldconfig.call.patch \
file://Makefile-runtests.patch \
+ file://ldflags-tests.patch \
file://run-ptest \
"
diff --git a/meta/recipes-devtools/apt/apt-0.9.9.4/apt-0.9.9.4-CVE-2014-0478.patch b/meta/recipes-devtools/apt/apt-0.9.9.4/apt-0.9.9.4-CVE-2014-0478.patch
new file mode 100644
index 0000000000..79a6897572
--- /dev/null
+++ b/meta/recipes-devtools/apt/apt-0.9.9.4/apt-0.9.9.4-CVE-2014-0478.patch
@@ -0,0 +1,193 @@
+This patch comes from:
+https://bugs.debian.org/cgi-bin/bugreport.cgi?msg=73;filename=apt_0.9.7.9%2Bdeb7u2.debdiff;att=1;bug=749795
+
+Upstream-Status: Backport
+
+Signed-off-by: Wenlin Kang <wenlin.kang@windriver.com>
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+
+diff -uarN apt-0.9.9.4-org/cmdline/apt-get.cc apt-0.9.9.4/cmdline/apt-get.cc
+--- apt-0.9.9.4-org/cmdline/apt-get.cc 2014-08-29 15:37:42.587156134 +0800
++++ apt-0.9.9.4/cmdline/apt-get.cc 2014-08-29 15:51:16.672334086 +0800
+@@ -1046,25 +1046,8 @@
+ return true;
+ }
+ /*}}}*/
+-// CheckAuth - check if each download comes form a trusted source /*{{{*/
+-// ---------------------------------------------------------------------
+-/* */
+-static bool CheckAuth(pkgAcquire& Fetcher)
++static bool AuthPrompt(std::string UntrustedList, bool const PromptUser)
+ {
+- string UntrustedList;
+- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
+- {
+- if (!(*I)->IsTrusted())
+- {
+- UntrustedList += string((*I)->ShortDesc()) + " ";
+- }
+- }
+-
+- if (UntrustedList == "")
+- {
+- return true;
+- }
+-
+ ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
+
+ if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+@@ -1073,6 +1056,9 @@
+ return true;
+ }
+
++ if (PromptUser == false)
++ return _error->Error(_("Some packages could not be authenticated"));
++
+ if (_config->FindI("quiet",0) < 2
+ && _config->FindB("APT::Get::Assume-Yes",false) == false)
+ {
+@@ -1090,6 +1076,28 @@
+ return _error->Error(_("There are problems and -y was used without --force-yes"));
+ }
+ /*}}}*/
++// CheckAuth - check if each download comes form a trusted source /*{{{*/
++// ---------------------------------------------------------------------
++/* */
++static bool CheckAuth(pkgAcquire& Fetcher, bool PromptUser=true)
++{
++ string UntrustedList;
++ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
++ {
++ if (!(*I)->IsTrusted())
++ {
++ UntrustedList += string((*I)->ShortDesc()) + " ";
++ }
++ }
++
++ if (UntrustedList == "")
++ {
++ return true;
++ }
++
++ return AuthPrompt(UntrustedList, PromptUser);
++}
++
+ // InstallPackages - Actually download and install the packages /*{{{*/
+ // ---------------------------------------------------------------------
+ /* This displays the informative messages describing what is going to
+@@ -2482,6 +2490,7 @@
+
+ // Load the requestd sources into the fetcher
+ unsigned J = 0;
++ std::string UntrustedList;
+ for (const char **I = CmdL.FileList + 1; *I != 0; I++, J++)
+ {
+ string Src;
+@@ -2491,7 +2500,10 @@
+ delete[] Dsc;
+ return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
+ }
+-
++
++ if (Last->Index().IsTrusted() == false)
++ UntrustedList += Src + " ";
++
+ string srec = Last->AsStr();
+ string::size_type pos = srec.find("\nVcs-");
+ while (pos != string::npos)
+@@ -2575,7 +2587,11 @@
+ Last->Index().SourceInfo(*Last,*I),Src);
+ }
+ }
+-
++
++ // check authentication status of the source as well
++ if (UntrustedList != "" && !AuthPrompt(UntrustedList, false))
++ return false;
++
+ // Display statistics
+ unsigned long long FetchBytes = Fetcher.FetchNeeded();
+ unsigned long long FetchPBytes = Fetcher.PartialPresent();
+diff -uarN apt-0.9.9.4-org/test/integration/framework apt-0.9.9.4/test/integration/framework
+--- apt-0.9.9.4-org/test/integration/framework 2014-08-29 15:37:42.623156154 +0800
++++ apt-0.9.9.4/test/integration/framework 2014-08-29 15:55:23.592197940 +0800
+@@ -151,7 +151,7 @@
+ mkdir rootdir aptarchive keys
+ cd rootdir
+ mkdir -p etc/apt/apt.conf.d etc/apt/sources.list.d etc/apt/trusted.gpg.d etc/apt/preferences.d
+- mkdir -p var/cache var/lib var/log
++ mkdir -p var/cache var/lib var/log tmp
+ mkdir -p var/lib/dpkg/info var/lib/dpkg/updates var/lib/dpkg/triggers
+ touch var/lib/dpkg/available
+ mkdir -p usr/lib/apt
+@@ -910,3 +910,35 @@
+ local IGNORE
+ read IGNORE
+ }
++
++testsuccess() {
++ if [ "$1" = '--nomsg' ]; then
++ shift
++ else
++ msgtest 'Test for successful execution of' "$*"
++ fi
++ local OUTPUT="${TMPWORKINGDIRECTORY}/rootdir/tmp/testsuccess.output"
++ if $@ >${OUTPUT} 2>&1; then
++ msgpass
++ else
++ echo >&2
++ cat >&2 $OUTPUT
++ msgfail
++ fi
++}
++
++testfailure() {
++ if [ "$1" = '--nomsg' ]; then
++ shift
++ else
++ msgtest 'Test for failure in execution of' "$*"
++ fi
++ local OUTPUT="${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output"
++ if $@ >${OUTPUT} 2>&1; then
++ echo >&2
++ cat >&2 $OUTPUT
++ msgfail
++ else
++ msgpass
++ fi
++}
+diff -uarN apt-0.9.9.4-org/test/integration/test-apt-get-source-authenticated apt-0.9.9.4/test/integration/test-apt-get-source-authenticated
+--- apt-0.9.9.4-org/test/integration/test-apt-get-source-authenticated 1970-01-01 08:00:00.000000000 +0800
++++ apt-0.9.9.4/test/integration/test-apt-get-source-authenticated 2014-08-29 15:58:06.137156796 +0800
+@@ -0,0 +1,31 @@
++#!/bin/sh
++#
++# Regression test for debian bug #749795. Ensure that we fail with
++# a error if apt-get source foo will download a source that comes
++# from a unauthenticated repository
++#
++set -e
++
++TESTDIR=$(readlink -f $(dirname $0))
++. $TESTDIR/framework
++
++setupenvironment
++configarchitecture "i386"
++
++# a "normal" package with source and binary
++buildsimplenativepackage 'foo' 'all' '2.0'
++
++setupaptarchive --no-update
++
++APTARCHIVE=$(readlink -f ./aptarchive)
++rm -f $APTARCHIVE/dists/unstable/*Release*
++
++# update without authenticated InRelease file
++testsuccess aptget update
++
++# this all should fail
++testfailure aptget install -y foo
++testfailure aptget source foo
++
++# allow overriding the warning
++testsuccess aptget source --allow-unauthenticated foo
diff --git a/meta/recipes-devtools/apt/apt-package.inc b/meta/recipes-devtools/apt/apt-package.inc
index 0897d3a9fc..a553aa21fe 100644
--- a/meta/recipes-devtools/apt/apt-package.inc
+++ b/meta/recipes-devtools/apt/apt-package.inc
@@ -42,6 +42,7 @@ FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}"
do_install () {
set -x
install -d ${D}${bindir}
+ install -m 0755 bin/apt-key ${D}${bindir}/
install -m 0755 bin/apt-cdrom ${D}${bindir}/
install -m 0755 bin/apt-get ${D}${bindir}/
install -m 0755 bin/apt-config ${D}${bindir}/
diff --git a/meta/recipes-devtools/apt/apt.inc b/meta/recipes-devtools/apt/apt.inc
index b528c00fd8..378021a327 100644
--- a/meta/recipes-devtools/apt/apt.inc
+++ b/meta/recipes-devtools/apt/apt.inc
@@ -11,6 +11,7 @@ SRC_URI = "${DEBIAN_MIRROR}/main/a/apt/apt_${PV}.tar.gz \
file://truncate-filename.patch \
file://nodoc.patch \
file://disable-configure-in-makefile.patch \
+ file://apt-0.9.9.4-CVE-2014-0478.patch \
"
inherit autotools gettext
diff --git a/meta/recipes-devtools/apt/apt_0.9.9.4.bb b/meta/recipes-devtools/apt/apt_0.9.9.4.bb
index ae9ed6cb66..77cbff1c0c 100644
--- a/meta/recipes-devtools/apt/apt_0.9.9.4.bb
+++ b/meta/recipes-devtools/apt/apt_0.9.9.4.bb
@@ -1,5 +1,5 @@
DEPENDS = "curl db"
-RDEPENDS_${PN} = "dpkg"
+RDEPENDS_${PN} = "dpkg bash debianutils"
LIC_FILES_CHKSUM = "file://COPYING.GPL;md5=0636e73ff0215e8d672dc4c32c317bb3"
require apt.inc
diff --git a/meta/recipes-devtools/autoconf/autoconf_2.69.bb b/meta/recipes-devtools/autoconf/autoconf_2.69.bb
index 6614a7b780..41a17d2913 100644
--- a/meta/recipes-devtools/autoconf/autoconf_2.69.bb
+++ b/meta/recipes-devtools/autoconf/autoconf_2.69.bb
@@ -22,6 +22,6 @@ SRC_URI[sha256sum] = "954bd69b391edc12d6a4a51a2dd1476543da5c6bbf05a95b59dc0dd6fd
SRC_URI_append_class-native = " file://fix_path_xtra.patch"
-EXTRA_OECONF += "ac_cv_path_M4=m4"
+EXTRA_OECONF += "ac_cv_path_M4=m4 ac_cv_prog_TEST_EMACS=no"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/autogen/autogen-native_5.18.3.bb b/meta/recipes-devtools/autogen/autogen-native_5.18.3.bb
index 61bd21fc38..abc0a42102 100644
--- a/meta/recipes-devtools/autogen/autogen-native_5.18.3.bb
+++ b/meta/recipes-devtools/autogen/autogen-native_5.18.3.bb
@@ -18,7 +18,7 @@ SRC_URI[sha256sum] = "73d05a689105eb9b8be54f32498c99ddbd360776fc61cf45be6a2a4eb4
DEPENDS = "guile-native libtool-native libxml2-native"
-inherit autotools texinfo native
+inherit autotools texinfo native pkgconfig
# autogen-native links against libguile which may have been relocated with sstate
# these environment variables ensure there isn't a relocation issue
diff --git a/meta/recipes-devtools/binutils/binutils-2.24.inc b/meta/recipes-devtools/binutils/binutils-2.24.inc
index 8f3216f2bf..63c928712e 100644
--- a/meta/recipes-devtools/binutils/binutils-2.24.inc
+++ b/meta/recipes-devtools/binutils/binutils-2.24.inc
@@ -32,6 +32,14 @@ SRC_URI = "\
file://replace_macros_with_static_inline.patch \
file://0001-Fix-MMIX-build-breakage-from-bfd_set_section_vma-cha.patch \
file://binutils-uninitialised-warning.patch \
+ file://binutils_CVE-2014-8484.patch \
+ file://binutils_CVE-2014-8485.patch \
+ file://binutils_CVE-2014-8501.patch \
+ file://binutils_CVE-2014-8502_1.patch \
+ file://binutils_CVE-2014-8502.patch \
+ file://binutils_CVE-2014-8503.patch \
+ file://binutils_CVE-2014-8504.patch \
+ file://binutils_CVE-2014-8737.patch \
"
SRC_URI[md5sum] = "e0f71a7b2ddab0f8612336ac81d9636b"
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8484.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8484.patch
new file mode 100644
index 0000000000..e789499477
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8484.patch
@@ -0,0 +1,67 @@
+Upstream-Status: Backport
+
+CVE-2014-8484 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From bd25671c6f202c4a5108883caa2adb24ff6f361f Mon Sep 17 00:00:00 2001
+From: Alan Modra <amodra@gmail.com>
+Date: Fri, 29 Aug 2014 10:36:29 +0930
+Subject: [PATCH] Report an error for S-records with less than the miniumum
+ size
+
+ * srec.c (srec_scan): Revert last change. Report an error for
+ S-records with less than the miniumum byte count.
+---
+ bfd/ChangeLog | 5 +++++
+ bfd/srec.c | 18 +++++++++++++++---
+ 2 files changed, 20 insertions(+), 3 deletions(-)
+
+Index: binutils-2.24/bfd/srec.c
+===================================================================
+--- binutils-2.24.orig/bfd/srec.c
++++ binutils-2.24/bfd/srec.c
+@@ -455,7 +455,7 @@ srec_scan (bfd *abfd)
+ {
+ file_ptr pos;
+ char hdr[3];
+- unsigned int bytes;
++ unsigned int bytes, min_bytes;
+ bfd_vma address;
+ bfd_byte *data;
+ unsigned char check_sum;
+@@ -478,6 +478,19 @@ srec_scan (bfd *abfd)
+ }
+
+ check_sum = bytes = HEX (hdr + 1);
++ min_bytes = 3;
++ if (hdr[0] == '2' || hdr[0] == '8')
++ min_bytes = 4;
++ else if (hdr[0] == '3' || hdr[0] == '7')
++ min_bytes = 5;
++ if (bytes < min_bytes)
++ {
++ (*_bfd_error_handler) (_("%B:%d: byte count %d too small\n"),
++ abfd, lineno, bytes);
++ bfd_set_error (bfd_error_bad_value);
++ goto error_return;
++ }
++
+ if (bytes * 2 > bufsize)
+ {
+ if (buf != NULL)
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,3 +1,8 @@
++2014-08-29 Alan Modra <amodra@gmail.com>
++
++ * srec.c (srec_scan): Revert last change. Report an error for
++ S-records with less than the miniumum byte count.
++
+ 2013-12-02 Tristan Gingold <gingold@adacore.com>
+
+ * configure.in: Bump version to 2.24
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8485.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8485.patch
new file mode 100644
index 0000000000..ec3308b4f4
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8485.patch
@@ -0,0 +1,102 @@
+Upstream-Status: Backport
+
+CVE-2014-8485 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From 493a33860c71cac998f1a56d6d87d6faa801fbaa Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Mon, 27 Oct 2014 12:43:16 +0000
+Subject: [PATCH] This patch closes a potential security hole in applications
+ that use the bfd library to parse binaries containing maliciously corrupt
+ section group headers.
+
+ PR binutils/17510
+ * elf.c (setup_group): Improve handling of corrupt group
+ sections.
+---
+ bfd/ChangeLog | 6 ++++++
+ bfd/elf.c | 34 ++++++++++++++++++++++++++++++----
+ 2 files changed, 36 insertions(+), 4 deletions(-)
+
+Index: binutils-2.24/bfd/elf.c
+===================================================================
+--- binutils-2.24.orig/bfd/elf.c
++++ binutils-2.24/bfd/elf.c
+@@ -608,9 +608,10 @@ setup_group (bfd *abfd, Elf_Internal_Shd
+ if (shdr->contents == NULL)
+ {
+ _bfd_error_handler
+- (_("%B: Corrupt size field in group section header: 0x%lx"), abfd, shdr->sh_size);
++ (_("%B: corrupt size field in group section header: 0x%lx"), abfd, shdr->sh_size);
+ bfd_set_error (bfd_error_bad_value);
+- return FALSE;
++ -- num_group;
++ continue;
+ }
+
+ memset (shdr->contents, 0, amt);
+@@ -618,7 +619,16 @@ setup_group (bfd *abfd, Elf_Internal_Shd
+ if (bfd_seek (abfd, shdr->sh_offset, SEEK_SET) != 0
+ || (bfd_bread (shdr->contents, shdr->sh_size, abfd)
+ != shdr->sh_size))
+- return FALSE;
++ {
++ _bfd_error_handler
++ (_("%B: invalid size field in group section header: 0x%lx"), abfd, shdr->sh_size);
++ bfd_set_error (bfd_error_bad_value);
++ -- num_group;
++ /* PR 17510: If the group contents are even partially
++ corrupt, do not allow any of the contents to be used. */
++ memset (shdr->contents, 0, amt);
++ continue;
++ }
+
+ /* Translate raw contents, a flag word followed by an
+ array of elf section indices all in target byte order,
+@@ -651,6 +661,21 @@ setup_group (bfd *abfd, Elf_Internal_Shd
+ }
+ }
+ }
++
++ /* PR 17510: Corrupt binaries might contain invalid groups. */
++ if (num_group != (unsigned) elf_tdata (abfd)->num_group)
++ {
++ elf_tdata (abfd)->num_group = num_group;
++
++ /* If all groups are invalid then fail. */
++ if (num_group == 0)
++ {
++ elf_tdata (abfd)->group_sect_ptr = NULL;
++ elf_tdata (abfd)->num_group = num_group = -1;
++ (*_bfd_error_handler) (_("%B: no valid group sections found"), abfd);
++ bfd_set_error (bfd_error_bad_value);
++ }
++ }
+ }
+ }
+
+@@ -716,6 +741,7 @@ setup_group (bfd *abfd, Elf_Internal_Shd
+ {
+ (*_bfd_error_handler) (_("%B: no group info for section %A"),
+ abfd, newsect);
++ return FALSE;
+ }
+ return TRUE;
+ }
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,3 +1,9 @@
++2014-10-27 Nick Clifton <nickc@redhat.com>
++
++ PR binutils/17510
++ * elf.c (setup_group): Improve handling of corrupt group
++ sections.
++
+ 2014-08-29 Alan Modra <amodra@gmail.com>
+
+ * srec.c (srec_scan): Revert last change. Report an error for
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8501.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8501.patch
new file mode 100644
index 0000000000..a48fe9b23b
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8501.patch
@@ -0,0 +1,60 @@
+Upstream-Status: Backport
+
+CVE-2014-8501 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From 7e1e19887abd24aeb15066b141cdff5541e0ec8e Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Mon, 27 Oct 2014 14:45:06 +0000
+Subject: [PATCH] Fix a seg-fault in strings and other binutuils when parsing a
+ corrupt PE executable with an invalid value in the NumberOfRvaAndSizes field
+ of the AOUT header.
+
+ PR binutils/17512
+ * peXXigen.c (_bfd_XXi_swap_aouthdr_in): Handle corrupt binaries
+ with an invalid value for NumberOfRvaAndSizes.
+---
+ bfd/ChangeLog | 4 ++++
+ bfd/peXXigen.c | 12 ++++++++++++
+ 2 files changed, 16 insertions(+)
+
+Index: binutils-2.24/bfd/peXXigen.c
+===================================================================
+--- binutils-2.24.orig/bfd/peXXigen.c
++++ binutils-2.24/bfd/peXXigen.c
+@@ -460,6 +460,18 @@ _bfd_XXi_swap_aouthdr_in (bfd * abfd,
+ {
+ int idx;
+
++ /* PR 17512: Corrupt PE binaries can cause seg-faults. */
++ if (a->NumberOfRvaAndSizes > 16)
++ {
++ (*_bfd_error_handler)
++ (_("%B: aout header specifies an invalid number of data-directory entries: %d"),
++ abfd, a->NumberOfRvaAndSizes);
++ /* Paranoia: If the number is corrupt, then assume that the
++ actual entries themselves might be corrupt as well. */
++ a->NumberOfRvaAndSizes = 0;
++ }
++
++
+ for (idx = 0; idx < a->NumberOfRvaAndSizes; idx++)
+ {
+ /* If data directory is empty, rva also should be 0. */
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,5 +1,9 @@
+ 2014-10-27 Nick Clifton <nickc@redhat.com>
+
++ PR binutils/17512
++ * peXXigen.c (_bfd_XXi_swap_aouthdr_in): Handle corrupt binaries
++ with an invalid value for NumberOfRvaAndSizes.
++
+ PR binutils/17510
+ * elf.c (setup_group): Improve handling of corrupt group
+ sections.
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502.patch
new file mode 100644
index 0000000000..05af65bad1
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502.patch
@@ -0,0 +1,89 @@
+Upstream-Status: Backport
+
+CVE-2014-8502 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From 5a4b0ccc20ba30caef53b01bee2c0aaa5b855339 Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Tue, 28 Oct 2014 15:42:56 +0000
+Subject: [PATCH] More fixes for corrupt binaries crashing the binutils.
+
+ PR binutils/17512
+ * elf.c (bfd_section_from_shdr): Allocate and free the recursion
+ detection table on a per-bfd basis.
+ * peXXigen.c (pe_print_edata): Handle binaries with a truncated
+ export table.
+---
+ bfd/ChangeLog | 8 ++++++++
+ bfd/elf.c | 16 +++++++++++++---
+ bfd/peXXigen.c | 9 +++++++++
+ 3 files changed, 30 insertions(+), 3 deletions(-)
+
+Index: binutils-2.24/bfd/peXXigen.c
+===================================================================
+--- binutils-2.24.orig/bfd/peXXigen.c
++++ binutils-2.24/bfd/peXXigen.c
+@@ -1438,6 +1438,15 @@ pe_print_edata (bfd * abfd, void * vfile
+ }
+ }
+
++ /* PR 17512: Handle corrupt PE binaries. */
++ if (datasize < 36)
++ {
++ fprintf (file,
++ _("\nThere is an export table in %s, but it is too small (%d)\n"),
++ section->name, (int) datasize);
++ return TRUE;
++ }
++
+ fprintf (file, _("\nThere is an export table in %s at 0x%lx\n"),
+ section->name, (unsigned long) addr);
+
+Index: binutils-2.24/bfd/elf.c
+===================================================================
+--- binutils-2.24.orig/bfd/elf.c
++++ binutils-2.24/bfd/elf.c
+@@ -1576,6 +1576,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ const char *name;
+ bfd_boolean ret = TRUE;
+ static bfd_boolean * sections_being_created = NULL;
++ static bfd * sections_being_created_abfd = NULL;
+ static unsigned int nesting = 0;
+
+ if (shindex >= elf_numsections (abfd))
+@@ -1588,13 +1589,20 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ loop. Detect this here, by refusing to load a section that we are
+ already in the process of loading. We only trigger this test if
+ we have nested at least three sections deep as normal ELF binaries
+- can expect to recurse at least once. */
++ can expect to recurse at least once.
++
++ FIXME: It would be better if this array was attached to the bfd,
++ rather than being held in a static pointer. */
++
++ if (sections_being_created_abfd != abfd)
++ sections_being_created = NULL;
+
+ if (sections_being_created == NULL)
+ {
+ /* FIXME: It would be more efficient to attach this array to the bfd somehow. */
+ sections_being_created = (bfd_boolean *)
+ bfd_zalloc (abfd, elf_numsections (abfd) * sizeof (bfd_boolean));
++ sections_being_created_abfd = abfd;
+ }
+ if (sections_being_created [shindex])
+ {
+@@ -2098,7 +2106,10 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ if (sections_being_created)
+ sections_being_created [shindex] = FALSE;
+ if (-- nesting == 0)
++ {
+ sections_being_created = NULL;
++ sections_being_created_abfd = abfd;
++ }
+ return ret;
+ }
+
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502_1.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502_1.patch
new file mode 100644
index 0000000000..9e0c9c8b3c
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8502_1.patch
@@ -0,0 +1,523 @@
+Upstream-Status: Backport
+
+CVE-2014-8502 supporting patch.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From bf67003b4567600ed3022a439207ac8f26454f91 Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Mon, 27 Oct 2014 18:05:37 +0000
+Subject: [PATCH] This fixes more seg-faults in tools like "strings" and
+ "objdump" when presented with corrupt binaries.
+
+ PR binutils/17512
+ * elf.c (bfd_section_from_shdr): Detect and warn about ELF
+ binaries with a group of sections linked by the string table
+ indicies.
+ * peXXigen.c (pe_print_edata): Detect out of range rvas and
+ entry counts for the Export Address table, Name Pointer table
+ and Ordinal table.
+---
+ bfd/ChangeLog | 5 ++
+ bfd/elf.c | 194 ++++++++++++++++++++++++++++++++++++++-------------------
+ bfd/peXXigen.c | 18 +++++-
+ 3 files changed, 150 insertions(+), 67 deletions(-)
+
+Index: binutils-2.24/bfd/elf.c
+===================================================================
+--- binutils-2.24.orig/bfd/elf.c
++++ binutils-2.24/bfd/elf.c
+@@ -1574,38 +1574,67 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ Elf_Internal_Ehdr *ehdr;
+ const struct elf_backend_data *bed;
+ const char *name;
++ bfd_boolean ret = TRUE;
++ static bfd_boolean * sections_being_created = NULL;
++ static unsigned int nesting = 0;
+
+ if (shindex >= elf_numsections (abfd))
+ return FALSE;
+
++ if (++ nesting > 3)
++ {
++ /* PR17512: A corrupt ELF binary might contain a recursive group of
++ sections, each the string indicies pointing to the next in the
++ loop. Detect this here, by refusing to load a section that we are
++ already in the process of loading. We only trigger this test if
++ we have nested at least three sections deep as normal ELF binaries
++ can expect to recurse at least once. */
++
++ if (sections_being_created == NULL)
++ {
++ /* FIXME: It would be more efficient to attach this array to the bfd somehow. */
++ sections_being_created = (bfd_boolean *)
++ bfd_zalloc (abfd, elf_numsections (abfd) * sizeof (bfd_boolean));
++ }
++ if (sections_being_created [shindex])
++ {
++ (*_bfd_error_handler)
++ (_("%B: warning: loop in section dependencies detected"), abfd);
++ return FALSE;
++ }
++ sections_being_created [shindex] = TRUE;
++ }
++
+ hdr = elf_elfsections (abfd)[shindex];
+ ehdr = elf_elfheader (abfd);
+ name = bfd_elf_string_from_elf_section (abfd, ehdr->e_shstrndx,
+ hdr->sh_name);
+ if (name == NULL)
+- return FALSE;
++ goto fail;
+
+ bed = get_elf_backend_data (abfd);
+ switch (hdr->sh_type)
+ {
+ case SHT_NULL:
+ /* Inactive section. Throw it away. */
+- return TRUE;
++ goto success;
+
+- case SHT_PROGBITS: /* Normal section with contents. */
+- case SHT_NOBITS: /* .bss section. */
+- case SHT_HASH: /* .hash section. */
+- case SHT_NOTE: /* .note section. */
++ case SHT_PROGBITS: /* Normal section with contents. */
++ case SHT_NOBITS: /* .bss section. */
++ case SHT_HASH: /* .hash section. */
++ case SHT_NOTE: /* .note section. */
+ case SHT_INIT_ARRAY: /* .init_array section. */
+ case SHT_FINI_ARRAY: /* .fini_array section. */
+ case SHT_PREINIT_ARRAY: /* .preinit_array section. */
+ case SHT_GNU_LIBLIST: /* .gnu.liblist section. */
+ case SHT_GNU_HASH: /* .gnu.hash section. */
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+ case SHT_DYNAMIC: /* Dynamic linking information. */
+ if (! _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex))
+- return FALSE;
++ goto fail;
++
+ if (hdr->sh_link > elf_numsections (abfd))
+ {
+ /* PR 10478: Accept Solaris binaries with a sh_link
+@@ -1619,11 +1648,11 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ break;
+ /* Otherwise fall through. */
+ default:
+- return FALSE;
++ goto fail;
+ }
+ }
+ else if (elf_elfsections (abfd)[hdr->sh_link] == NULL)
+- return FALSE;
++ goto fail;
+ else if (elf_elfsections (abfd)[hdr->sh_link]->sh_type != SHT_STRTAB)
+ {
+ Elf_Internal_Shdr *dynsymhdr;
+@@ -1652,24 +1681,26 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ }
+ }
+ }
+- break;
++ goto success;
+
+- case SHT_SYMTAB: /* A symbol table */
++ case SHT_SYMTAB: /* A symbol table. */
+ if (elf_onesymtab (abfd) == shindex)
+- return TRUE;
++ goto success;
+
+ if (hdr->sh_entsize != bed->s->sizeof_sym)
+- return FALSE;
++ goto fail;
++
+ if (hdr->sh_info * hdr->sh_entsize > hdr->sh_size)
+ {
+ if (hdr->sh_size != 0)
+- return FALSE;
++ goto fail;
+ /* Some assemblers erroneously set sh_info to one with a
+ zero sh_size. ld sees this as a global symbol count
+ of (unsigned) -1. Fix it here. */
+ hdr->sh_info = 0;
+- return TRUE;
++ goto success;
+ }
++
+ BFD_ASSERT (elf_onesymtab (abfd) == 0);
+ elf_onesymtab (abfd) = shindex;
+ elf_tdata (abfd)->symtab_hdr = *hdr;
+@@ -1686,7 +1717,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ && (abfd->flags & DYNAMIC) != 0
+ && ! _bfd_elf_make_section_from_shdr (abfd, hdr, name,
+ shindex))
+- return FALSE;
++ goto fail;
+
+ /* Go looking for SHT_SYMTAB_SHNDX too, since if there is one we
+ can't read symbols without that section loaded as well. It
+@@ -1712,26 +1743,29 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ break;
+ }
+ if (i != shindex)
+- return bfd_section_from_shdr (abfd, i);
++ ret = bfd_section_from_shdr (abfd, i);
+ }
+- return TRUE;
++ goto success;
+
+- case SHT_DYNSYM: /* A dynamic symbol table */
++ case SHT_DYNSYM: /* A dynamic symbol table. */
+ if (elf_dynsymtab (abfd) == shindex)
+- return TRUE;
++ goto success;
+
+ if (hdr->sh_entsize != bed->s->sizeof_sym)
+- return FALSE;
++ goto fail;
++
+ if (hdr->sh_info * hdr->sh_entsize > hdr->sh_size)
+ {
+ if (hdr->sh_size != 0)
+- return FALSE;
++ goto fail;
++
+ /* Some linkers erroneously set sh_info to one with a
+ zero sh_size. ld sees this as a global symbol count
+ of (unsigned) -1. Fix it here. */
+ hdr->sh_info = 0;
+- return TRUE;
++ goto success;
+ }
++
+ BFD_ASSERT (elf_dynsymtab (abfd) == 0);
+ elf_dynsymtab (abfd) = shindex;
+ elf_tdata (abfd)->dynsymtab_hdr = *hdr;
+@@ -1740,34 +1774,38 @@ bfd_section_from_shdr (bfd *abfd, unsign
+
+ /* Besides being a symbol table, we also treat this as a regular
+ section, so that objcopy can handle it. */
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+- case SHT_SYMTAB_SHNDX: /* Symbol section indices when >64k sections */
++ case SHT_SYMTAB_SHNDX: /* Symbol section indices when >64k sections. */
+ if (elf_symtab_shndx (abfd) == shindex)
+- return TRUE;
++ goto success;
+
+ BFD_ASSERT (elf_symtab_shndx (abfd) == 0);
+ elf_symtab_shndx (abfd) = shindex;
+ elf_tdata (abfd)->symtab_shndx_hdr = *hdr;
+ elf_elfsections (abfd)[shindex] = &elf_tdata (abfd)->symtab_shndx_hdr;
+- return TRUE;
++ goto success;
+
+- case SHT_STRTAB: /* A string table */
++ case SHT_STRTAB: /* A string table. */
+ if (hdr->bfd_section != NULL)
+- return TRUE;
++ goto success;
++
+ if (ehdr->e_shstrndx == shindex)
+ {
+ elf_tdata (abfd)->shstrtab_hdr = *hdr;
+ elf_elfsections (abfd)[shindex] = &elf_tdata (abfd)->shstrtab_hdr;
+- return TRUE;
++ goto success;
+ }
++
+ if (elf_elfsections (abfd)[elf_onesymtab (abfd)]->sh_link == shindex)
+ {
+ symtab_strtab:
+ elf_tdata (abfd)->strtab_hdr = *hdr;
+ elf_elfsections (abfd)[shindex] = &elf_tdata (abfd)->strtab_hdr;
+- return TRUE;
++ goto success;
+ }
++
+ if (elf_elfsections (abfd)[elf_dynsymtab (abfd)]->sh_link == shindex)
+ {
+ dynsymtab_strtab:
+@@ -1776,8 +1814,9 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ elf_elfsections (abfd)[shindex] = hdr;
+ /* We also treat this as a regular section, so that objcopy
+ can handle it. */
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name,
+- shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name,
++ shindex);
++ goto success;
+ }
+
+ /* If the string table isn't one of the above, then treat it as a
+@@ -1795,9 +1834,9 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ {
+ /* Prevent endless recursion on broken objects. */
+ if (i == shindex)
+- return FALSE;
++ goto fail;
+ if (! bfd_section_from_shdr (abfd, i))
+- return FALSE;
++ goto fail;
+ if (elf_onesymtab (abfd) == i)
+ goto symtab_strtab;
+ if (elf_dynsymtab (abfd) == i)
+@@ -1805,7 +1844,8 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ }
+ }
+ }
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+ case SHT_REL:
+ case SHT_RELA:
+@@ -1820,7 +1860,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ if (hdr->sh_entsize
+ != (bfd_size_type) (hdr->sh_type == SHT_REL
+ ? bed->s->sizeof_rel : bed->s->sizeof_rela))
+- return FALSE;
++ goto fail;
+
+ /* Check for a bogus link to avoid crashing. */
+ if (hdr->sh_link >= num_sec)
+@@ -1828,8 +1868,9 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ ((*_bfd_error_handler)
+ (_("%B: invalid link %lu for reloc section %s (index %u)"),
+ abfd, hdr->sh_link, name, shindex));
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name,
+- shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name,
++ shindex);
++ goto success;
+ }
+
+ /* For some incomprehensible reason Oracle distributes
+@@ -1870,7 +1911,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ if ((elf_elfsections (abfd)[hdr->sh_link]->sh_type == SHT_SYMTAB
+ || elf_elfsections (abfd)[hdr->sh_link]->sh_type == SHT_DYNSYM)
+ && ! bfd_section_from_shdr (abfd, hdr->sh_link))
+- return FALSE;
++ goto fail;
+
+ /* If this reloc section does not use the main symbol table we
+ don't treat it as a reloc section. BFD can't adequately
+@@ -1885,14 +1926,18 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ || hdr->sh_info >= num_sec
+ || elf_elfsections (abfd)[hdr->sh_info]->sh_type == SHT_REL
+ || elf_elfsections (abfd)[hdr->sh_info]->sh_type == SHT_RELA)
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name,
+- shindex);
++ {
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name,
++ shindex);
++ goto success;
++ }
+
+ if (! bfd_section_from_shdr (abfd, hdr->sh_info))
+- return FALSE;
++ goto fail;
++
+ target_sect = bfd_section_from_elf_index (abfd, hdr->sh_info);
+ if (target_sect == NULL)
+- return FALSE;
++ goto fail;
+
+ esdt = elf_section_data (target_sect);
+ if (hdr->sh_type == SHT_RELA)
+@@ -1904,7 +1949,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ amt = sizeof (*hdr2);
+ hdr2 = (Elf_Internal_Shdr *) bfd_alloc (abfd, amt);
+ if (hdr2 == NULL)
+- return FALSE;
++ goto fail;
+ *hdr2 = *hdr;
+ *p_hdr = hdr2;
+ elf_elfsections (abfd)[shindex] = hdr2;
+@@ -1920,34 +1965,40 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ target_sect->use_rela_p = 1;
+ }
+ abfd->flags |= HAS_RELOC;
+- return TRUE;
++ goto success;
+ }
+
+ case SHT_GNU_verdef:
+ elf_dynverdef (abfd) = shindex;
+ elf_tdata (abfd)->dynverdef_hdr = *hdr;
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+ case SHT_GNU_versym:
+ if (hdr->sh_entsize != sizeof (Elf_External_Versym))
+- return FALSE;
++ goto fail;
++
+ elf_dynversym (abfd) = shindex;
+ elf_tdata (abfd)->dynversym_hdr = *hdr;
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+ case SHT_GNU_verneed:
+ elf_dynverref (abfd) = shindex;
+ elf_tdata (abfd)->dynverref_hdr = *hdr;
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
+
+ case SHT_SHLIB:
+- return TRUE;
++ goto success;
+
+ case SHT_GROUP:
+ if (! IS_VALID_GROUP_SECTION_HEADER (hdr, GRP_ENTRY_SIZE))
+- return FALSE;
++ goto fail;
++
+ if (!_bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex))
+- return FALSE;
++ goto fail;
++
+ if (hdr->contents != NULL)
+ {
+ Elf_Internal_Group *idx = (Elf_Internal_Group *) hdr->contents;
+@@ -1973,7 +2024,7 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ }
+ }
+ }
+- break;
++ goto success;
+
+ default:
+ /* Possibly an attributes section. */
+@@ -1981,14 +2032,14 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ || hdr->sh_type == bed->obj_attrs_section_type)
+ {
+ if (! _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex))
+- return FALSE;
++ goto fail;
+ _bfd_elf_parse_attributes (abfd, hdr);
+- return TRUE;
++ goto success;
+ }
+
+ /* Check for any processor-specific section types. */
+ if (bed->elf_backend_section_from_shdr (abfd, hdr, name, shindex))
+- return TRUE;
++ goto success;
+
+ if (hdr->sh_type >= SHT_LOUSER && hdr->sh_type <= SHT_HIUSER)
+ {
+@@ -2000,9 +2051,12 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ "specific section `%s' [0x%8x]"),
+ abfd, name, hdr->sh_type);
+ else
+- /* Allow sections reserved for applications. */
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name,
+- shindex);
++ {
++ /* Allow sections reserved for applications. */
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name,
++ shindex);
++ goto success;
++ }
+ }
+ else if (hdr->sh_type >= SHT_LOPROC
+ && hdr->sh_type <= SHT_HIPROC)
+@@ -2023,8 +2077,11 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ "`%s' [0x%8x]"),
+ abfd, name, hdr->sh_type);
+ else
+- /* Otherwise it should be processed. */
+- return _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ {
++ /* Otherwise it should be processed. */
++ ret = _bfd_elf_make_section_from_shdr (abfd, hdr, name, shindex);
++ goto success;
++ }
+ }
+ else
+ /* FIXME: We should handle this section. */
+@@ -2032,10 +2089,17 @@ bfd_section_from_shdr (bfd *abfd, unsign
+ (_("%B: don't know how to handle section `%s' [0x%8x]"),
+ abfd, name, hdr->sh_type);
+
+- return FALSE;
++ goto fail;
+ }
+
+- return TRUE;
++ fail:
++ ret = FALSE;
++ success:
++ if (sections_being_created)
++ sections_being_created [shindex] = FALSE;
++ if (-- nesting == 0)
++ sections_being_created = NULL;
++ return ret;
+ }
+
+ /* Return the local symbol specified by ABFD, R_SYMNDX. */
+Index: binutils-2.24/bfd/peXXigen.c
+===================================================================
+--- binutils-2.24.orig/bfd/peXXigen.c
++++ binutils-2.24/bfd/peXXigen.c
+@@ -1528,7 +1528,12 @@ pe_print_edata (bfd * abfd, void * vfile
+ _("\nExport Address Table -- Ordinal Base %ld\n"),
+ edt.base);
+
+- for (i = 0; i < edt.num_functions; ++i)
++ /* PR 17512: Handle corrupt PE binaries. */
++ if (edt.eat_addr + (edt.num_functions * 4) - adj >= datasize)
++ fprintf (file, _("\tInvalid Export Address Table rva (0x%lx) or entry count (0x%lx)\n"),
++ (long) edt.eat_addr,
++ (long) edt.num_functions);
++ else for (i = 0; i < edt.num_functions; ++i)
+ {
+ bfd_vma eat_member = bfd_get_32 (abfd,
+ data + edt.eat_addr + (i * 4) - adj);
+@@ -1564,7 +1569,16 @@ pe_print_edata (bfd * abfd, void * vfile
+ fprintf (file,
+ _("\n[Ordinal/Name Pointer] Table\n"));
+
+- for (i = 0; i < edt.num_names; ++i)
++ /* PR 17512: Handle corrupt PE binaries. */
++ if (edt.npt_addr + (edt.num_names * 4) - adj >= datasize)
++ fprintf (file, _("\tInvalid Name Pointer Table rva (0x%lx) or entry count (0x%lx)\n"),
++ (long) edt.npt_addr,
++ (long) edt.num_names);
++ else if (edt.ot_addr + (edt.num_names * 2) - adj >= datasize)
++ fprintf (file, _("\tInvalid Ordinal Table rva (0x%lx) or entry count (0x%lx)\n"),
++ (long) edt.ot_addr,
++ (long) edt.num_names);
++ else for (i = 0; i < edt.num_names; ++i)
+ {
+ bfd_vma name_ptr = bfd_get_32 (abfd,
+ data +
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,8 +1,13 @@
+ 2014-10-27 Nick Clifton <nickc@redhat.com>
+
+ PR binutils/17512
++ * elf.c (bfd_section_from_shdr): Detect and warn about ELF
++ binaries with a group of sections linked by the string table
++ indicies.
+ * peXXigen.c (_bfd_XXi_swap_aouthdr_in): Handle corrupt binaries
+ with an invalid value for NumberOfRvaAndSizes.
++ (pe_print_edata): Detect out of range rvas and entry counts for
++ the Export Address table, Name Pointer table and Ordinal table.
+
+ PR binutils/17510
+ * elf.c (setup_group): Improve handling of corrupt group
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8503.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8503.patch
new file mode 100644
index 0000000000..2dd3354fc1
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8503.patch
@@ -0,0 +1,47 @@
+Upstream-Status: Backport
+
+CVE-2014-8503 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From 0102ea8cec5fc509bba6c91df61b7ce23a799d32 Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Thu, 30 Oct 2014 17:16:17 +0000
+Subject: [PATCH] Fixes a seg-fault in the ihex parser when it encounters a
+ malformed ihex file.
+
+ PR binutils/17512
+ * ihex.c (ihex_scan): Fix typo in invocation of ihex_bad_byte.
+---
+ bfd/ChangeLog | 1 +
+ bfd/ihex.c | 2 +-
+ 2 files changed, 2 insertions(+), 1 deletion(-)
+
+Index: binutils-2.24/bfd/ihex.c
+===================================================================
+--- binutils-2.24.orig/bfd/ihex.c
++++ binutils-2.24/bfd/ihex.c
+@@ -322,7 +322,7 @@ ihex_scan (bfd *abfd)
+ {
+ if (! ISHEX (buf[i]))
+ {
+- ihex_bad_byte (abfd, lineno, hdr[i], error);
++ ihex_bad_byte (abfd, lineno, buf[i], error);
+ goto error_return;
+ }
+ }
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,3 +1,8 @@
++2014-10-30 Nick Clifton <nickc@redhat.com>
++
++ PR binutils/17512
++ * ihex.c (ihex_scan): Fix typo in invocation of ihex_bad_byte.
++
+ 2014-10-27 Nick Clifton <nickc@redhat.com>
+
+ PR binutils/17512
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8504.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8504.patch
new file mode 100644
index 0000000000..b4d1d1ff61
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8504.patch
@@ -0,0 +1,75 @@
+Upstream-Status: Backport
+
+CVE-2014-8504 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From 708d7d0d11f0f2d776171979aa3479e8e12a38a0 Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Tue, 28 Oct 2014 10:48:14 +0000
+Subject: [PATCH] This patch fixes a flaw in the SREC parser which could cause
+ a stack overflow and potential secuiryt breach.
+
+ PR binutils/17510
+ * srec.c (srec_bad_byte): Increase size of buf to allow for
+ negative values.
+ (srec_scan): Use an unsigned char buffer to hold header bytes.
+---
+ bfd/ChangeLog | 8 ++++++++
+ bfd/elf.c | 2 +-
+ bfd/peXXigen.c | 1 -
+ bfd/srec.c | 4 ++--
+ 4 files changed, 11 insertions(+), 4 deletions(-)
+
+Index: binutils-2.24/bfd/ChangeLog
+===================================================================
+--- binutils-2.24.orig/bfd/ChangeLog
++++ binutils-2.24/bfd/ChangeLog
+@@ -1,3 +1,11 @@
++2014-10-28 Andreas Schwab <schwab@suse.de>
++ Nick Clifton <nickc@redhat.com>
++
++ PR binutils/17510
++ * srec.c (srec_bad_byte): Increase size of buf to allow for
++ negative values.
++ (srec_scan): Use an unsigned char buffer to hold header bytes.
++
+ 2014-10-30 Nick Clifton <nickc@redhat.com>
+
+ PR binutils/17512
+Index: binutils-2.24/bfd/peXXigen.c
+===================================================================
+--- binutils-2.24.orig/bfd/peXXigen.c
++++ binutils-2.24/bfd/peXXigen.c
+@@ -471,7 +471,6 @@ _bfd_XXi_swap_aouthdr_in (bfd * abfd,
+ a->NumberOfRvaAndSizes = 0;
+ }
+
+-
+ for (idx = 0; idx < a->NumberOfRvaAndSizes; idx++)
+ {
+ /* If data directory is empty, rva also should be 0. */
+Index: binutils-2.24/bfd/srec.c
+===================================================================
+--- binutils-2.24.orig/bfd/srec.c
++++ binutils-2.24/bfd/srec.c
+@@ -248,7 +248,7 @@ srec_bad_byte (bfd *abfd,
+ }
+ else
+ {
+- char buf[10];
++ char buf[40];
+
+ if (! ISPRINT (c))
+ sprintf (buf, "\\%03o", (unsigned int) c);
+@@ -454,7 +454,7 @@ srec_scan (bfd *abfd)
+ case 'S':
+ {
+ file_ptr pos;
+- char hdr[3];
++ unsigned char hdr[3];
+ unsigned int bytes, min_bytes;
+ bfd_vma address;
+ bfd_byte *data;
diff --git a/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8737.patch b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8737.patch
new file mode 100644
index 0000000000..4a84562201
--- /dev/null
+++ b/meta/recipes-devtools/binutils/binutils/binutils_CVE-2014-8737.patch
@@ -0,0 +1,177 @@
+Upstream-Status: Backport
+
+CVE-2014-8737 fix.
+
+[YOCTO #7084]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+From dd9b91de2149ee81d47f708e7b0bbf57da10ad42 Mon Sep 17 00:00:00 2001
+From: Nick Clifton <nickc@redhat.com>
+Date: Thu, 6 Nov 2014 14:49:10 +0000
+Subject: [PATCH] Prevent archive memebers with illegal pathnames from being
+ extracted from an archive.
+
+ PR binutils/17552, binutils/17533
+ * bucomm.c (is_valid_archive_path): New function. Returns false
+ for absolute pathnames and pathnames that include /../.
+ * bucomm.h (is_valid_archive_path): Add prototype.
+ * ar.c (extract_file): Use new function to check for valid
+ pathnames when extracting files from an archive.
+ * objcopy.c (copy_archive): Likewise.
+ * doc/binutils.texi: Update documentation to mention the
+ limitation on pathname of archive members.
+---
+ binutils/ChangeLog | 16 ++++++++++++++--
+ binutils/ar.c | 9 +++++++++
+ binutils/bucomm.c | 26 ++++++++++++++++++++++++++
+ binutils/bucomm.h | 12 ++++++++----
+ binutils/doc/binutils.texi | 3 ++-
+ binutils/objcopy.c | 6 ++++++
+ 6 files changed, 65 insertions(+), 7 deletions(-)
+
+Index: binutils-2.24/binutils/ar.c
+===================================================================
+--- binutils-2.24.orig/binutils/ar.c
++++ binutils-2.24/binutils/ar.c
+@@ -1031,6 +1031,15 @@ extract_file (bfd *abfd)
+ bfd_size_type size;
+ struct stat buf;
+
++ /* PR binutils/17533: Do not allow directory traversal
++ outside of the current directory tree. */
++ if (! is_valid_archive_path (bfd_get_filename (abfd)))
++ {
++ non_fatal (_("illegal pathname found in archive member: %s"),
++ bfd_get_filename (abfd));
++ return;
++ }
++
+ if (bfd_stat_arch_elt (abfd, &buf) != 0)
+ /* xgettext:c-format */
+ fatal (_("internal stat error on %s"), bfd_get_filename (abfd));
+Index: binutils-2.24/binutils/bucomm.c
+===================================================================
+--- binutils-2.24.orig/binutils/bucomm.c
++++ binutils-2.24/binutils/bucomm.c
+@@ -624,3 +624,29 @@ bfd_get_archive_filename (const bfd *abf
+ bfd_get_filename (abfd));
+ return buf;
+ }
++
++/* Returns TRUE iff PATHNAME, a filename of an archive member,
++ is valid for writing. For security reasons absolute paths
++ and paths containing /../ are not allowed. See PR 17533. */
++
++bfd_boolean
++is_valid_archive_path (char const * pathname)
++{
++ const char * n = pathname;
++
++ if (IS_ABSOLUTE_PATH (n))
++ return FALSE;
++
++ while (*n)
++ {
++ if (*n == '.' && *++n == '.' && ( ! *++n || IS_DIR_SEPARATOR (*n)))
++ return FALSE;
++
++ while (*n && ! IS_DIR_SEPARATOR (*n))
++ n++;
++ while (IS_DIR_SEPARATOR (*n))
++ n++;
++ }
++
++ return TRUE;
++}
+Index: binutils-2.24/binutils/bucomm.h
+===================================================================
+--- binutils-2.24.orig/binutils/bucomm.h
++++ binutils-2.24/binutils/bucomm.h
+@@ -23,6 +23,8 @@
+ #ifndef _BUCOMM_H
+ #define _BUCOMM_H
+
++/* In bucomm.c. */
++
+ /* Return the filename in a static buffer. */
+ const char *bfd_get_archive_filename (const bfd *);
+
+@@ -58,20 +60,22 @@ bfd_vma parse_vma (const char *, const c
+
+ off_t get_file_size (const char *);
+
++bfd_boolean is_valid_archive_path (char const *);
++
+ extern char *program_name;
+
+-/* filemode.c */
++/* In filemode.c. */
+ void mode_string (unsigned long, char *);
+
+-/* version.c */
++/* In version.c. */
+ extern void print_version (const char *);
+
+-/* rename.c */
++/* In rename.c. */
+ extern void set_times (const char *, const struct stat *);
+
+ extern int smart_rename (const char *, const char *, int);
+
+-/* libiberty. */
++/* In libiberty. */
+ void *xmalloc (size_t);
+
+ void *xrealloc (void *, size_t);
+Index: binutils-2.24/binutils/doc/binutils.texi
+===================================================================
+--- binutils-2.24.orig/binutils/doc/binutils.texi
++++ binutils-2.24/binutils/doc/binutils.texi
+@@ -234,7 +234,8 @@ a normal archive. Instead the elements
+ individually to the second archive.
+
+ The paths to the elements of the archive are stored relative to the
+-archive itself.
++archive itself. For security reasons absolute paths and paths with a
++@code{/../} component are not allowed.
+
+ @cindex compatibility, @command{ar}
+ @cindex @command{ar} compatibility
+Index: binutils-2.24/binutils/objcopy.c
+===================================================================
+--- binutils-2.24.orig/binutils/objcopy.c
++++ binutils-2.24/binutils/objcopy.c
+@@ -2206,6 +2206,12 @@ copy_archive (bfd *ibfd, bfd *obfd, cons
+ bfd_boolean del = TRUE;
+ bfd_boolean ok_object;
+
++ /* PR binutils/17533: Do not allow directory traversal
++ outside of the current directory tree by archive members. */
++ if (! is_valid_archive_path (bfd_get_filename (this_element)))
++ fatal (_("illegal pathname found in archive member: %s"),
++ bfd_get_filename (this_element));
++
+ /* Create an output file for this member. */
+ output_name = concat (dir, "/",
+ bfd_get_filename (this_element), (char *) 0);
+Index: binutils-2.24/binutils/ChangeLog
+===================================================================
+--- binutils-2.24.orig/binutils/ChangeLog
++++ binutils-2.24/binutils/ChangeLog
+@@ -1,3 +1,15 @@
++2014-11-06 Nick Clifton <nickc@redhat.com>
++
++ PR binutils/17552, binutils/17533
++ * bucomm.c (is_valid_archive_path): New function. Returns false
++ for absolute pathnames and pathnames that include /../.
++ * bucomm.h (is_valid_archive_path): Add prototype.
++ * ar.c (extract_file): Use new function to check for valid
++ pathnames when extracting files from an archive.
++ * objcopy.c (copy_archive): Likewise.
++ * doc/binutils.texi: Update documentation to mention the
++ limitation on pathname of archive members.
++
+ 2013-11-22 Cory Fields <cory@coryfields.com>
+
+ * windres.c (define_resource): Use zero for timestamp, making
diff --git a/meta/recipes-devtools/bootchart2/bootchart2/bootchartd-no-bashism.patch b/meta/recipes-devtools/bootchart2/bootchart2/bootchartd-no-bashism.patch
new file mode 100644
index 0000000000..b013995a0d
--- /dev/null
+++ b/meta/recipes-devtools/bootchart2/bootchart2/bootchartd-no-bashism.patch
@@ -0,0 +1,27 @@
+From ffb5bae5d588aaea6325623462a8070a04767196 Mon Sep 17 00:00:00 2001
+From: Robert Yang <liezhi.yang@windriver.com>
+Date: Thu, 14 Aug 2014 08:33:28 -0700
+Subject: [PATCH] bootchartd.in: no bashism
+
+No bashism in bootchartd.in, so use /bin/sh.
+
+Upstream-Status: Pending
+
+Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+---
+ bootchartd.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/bootchartd.in b/bootchartd.in
+index a408bab..064f6a4 100755
+--- a/bootchartd.in
++++ b/bootchartd.in
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+ #
+ # Bootchart logger script
+ # Ziga Mahkovec <ziga.mahkovec@klika.si>
+--
+1.7.9.5
+
diff --git a/meta/recipes-devtools/bootchart2/bootchart2_git.bb b/meta/recipes-devtools/bootchart2/bootchart2_git.bb
index 411630491f..5fc7211f79 100644
--- a/meta/recipes-devtools/bootchart2/bootchart2_git.bb
+++ b/meta/recipes-devtools/bootchart2/bootchart2_git.bb
@@ -91,8 +91,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=44ac4678311254db62edf8fd39cb8124"
# one commit beyond 1.14.6 for a systemd-related bugfix
PV = "0.14.6+git${SRCPV}"
-SRC_URI = "git://github.com/mmeeks/bootchart.git"
-SRC_URI += "file://bootchartd_stop.sh"
+SRC_URI = "git://github.com/mmeeks/bootchart.git \
+ file://bootchartd_stop.sh \
+ file://bootchartd-no-bashism.patch \
+ "
SRCREV = "b65ed43b0ae832080fb728245de9ef1a4b48d8b5"
@@ -118,6 +120,12 @@ do_compile_append_class-native () {
chmod +x ${S}/pybootchartgui
}
+do_compile_prepend () {
+ export PY_LIBDIR="${libdir}/${PYTHON_DIR}"
+ export BINDIR="${bindir}"
+ export LIBDIR="${base_libdir}"
+}
+
do_install () {
install -d ${D}${sysconfdir} # needed for -native
export PY_LIBDIR="${libdir}/${PYTHON_DIR}"
diff --git a/meta/recipes-devtools/btrfs-tools/btrfs-tools_git.bb b/meta/recipes-devtools/btrfs-tools/btrfs-tools_git.bb
index 0eed13bfb1..ef850570fb 100644
--- a/meta/recipes-devtools/btrfs-tools/btrfs-tools_git.bb
+++ b/meta/recipes-devtools/btrfs-tools/btrfs-tools_git.bb
@@ -12,13 +12,13 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=fcb02dc552a041dee27e4b85c7396067"
SECTION = "base"
DEPENDS = "util-linux attr e2fsprogs lzo acl"
-SRCREV = "24cf4d8c3ee924b474f68514e0167cc2e602a48d"
-SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/mason/btrfs-progs.git \
- file://nodocs.patch"
+SRCREV = "44cdb62d3478c834f41c87ea79d261b9c8982dce"
+SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git"
S = "${WORKDIR}/git"
PV = "3.14.2+git${SRCPV}"
+EXTRA_OEMAKE += "DISABLE_DOCUMENTATION=1"
do_install () {
oe_runmake 'DESTDIR=${D}' install
diff --git a/meta/recipes-devtools/cmake/cmake.inc b/meta/recipes-devtools/cmake/cmake.inc
index 8592a23046..fb451fde6f 100644
--- a/meta/recipes-devtools/cmake/cmake.inc
+++ b/meta/recipes-devtools/cmake/cmake.inc
@@ -13,8 +13,6 @@ CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV',1).split('.')[0:2])}"
SRC_URI = "http://www.cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \
file://support-oe-qt4-tools-names.patch \
- file://aarch64-cmake.patch \
- file://aarch64-kwsys.patch \
file://qt4-fail-silent.patch \
file://cmake-2.8.11.2-FindFreetype.patch \
"
diff --git a/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake b/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake
new file mode 100644
index 0000000000..60014bbf2b
--- /dev/null
+++ b/meta/recipes-devtools/cmake/cmake/OEToolchainConfig.cmake
@@ -0,0 +1,18 @@
+set( CMAKE_SYSTEM_NAME Linux )
+set( CMAKE_C_FLAGS $ENV{CFLAGS} CACHE STRING "" FORCE )
+set( CMAKE_CXX_FLAGS $ENV{CXXFLAGS} CACHE STRING "" FORCE )
+set( CMAKE ASM_FLAGS ${CMAKE_C_FLAGS} CACHE STRING "" FORCE )
+set( CMAKE_LDFLAGS_FLAGS ${CMAKE_CXX_FLAGS} CACHE STRING "" FORCE )
+set( CMAKE_FIND_ROOT_PATH $ENV{OECORE_TARGET_SYSROOT} $ENV{OECORE_NATIVE_SYSROOT} )
+set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER )
+set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )
+set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )
+
+string(REGEX MATCH "sysroots/([a-zA-Z0-9]+)" CMAKE_SYSTEM_PROCESSOR $ENV{SDKTARGETSYSROOT})
+string(REGEX REPLACE "sysroots/" "" CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR})
+
+# Include the toolchain configuration subscripts
+file( GLOB toolchain_config_files "${CMAKE_TOOLCHAIN_FILE}.d/*.cmake" )
+foreach(config ${toolchain_config_files})
+ include(${config})
+endforeach()
diff --git a/meta/recipes-devtools/cmake/cmake/aarch64-cmake.patch b/meta/recipes-devtools/cmake/cmake/aarch64-cmake.patch
deleted file mode 100644
index 3e26d58267..0000000000
--- a/meta/recipes-devtools/cmake/cmake/aarch64-cmake.patch
+++ /dev/null
@@ -1,37 +0,0 @@
-From: Riku Voipio <riku.voipio@linaro.org>
-Date: Fri, 21 Dec 2012 11:20:02 +0000 (+0200)
-Subject: KWIML: Teach ABI.h about Aarch64
-X-Git-Url: http://cmake.org/gitweb?p=cmake.git;a=commitdiff_plain;h=34916522
-
-KWIML: Teach ABI.h about Aarch64
-
-The __aarch64__ defines Aarch64, while __AARCH64EB__ defines bigendian
-and __AARCH64EL__ little endian. Only little endian tested, no big
-endian toolchain exists yet.
-
-Signed-off-by: Riku Voipio <riku.voipio@linaro.org>
-
-Hand edited by: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org> to get it applied in OE
-
-Upstream-Status: Backport
----
-
-diff --git a/ABI.h.in b/ABI.h.in
-index f93ddba..7f4772a 100644
---- a/Utilities/KWIML/ABI.h.in
-+++ b/Utilities/KWIML/ABI.h.in
-@@ -418,6 +418,14 @@ suppression macro @KWIML@_ABI_NO_VERIFY was defined.
- #elif defined(__vax__)
- # define @KWIML@_ABI_ENDIAN_ID @KWIML@_ABI_ENDIAN_ID_BIG
-
-+/* Aarch64 */
-+#elif defined(__aarch64__)
-+# if !defined(__AARCH64EB__)
-+# define @KWIML@_ABI_ENDIAN_ID @KWIML@_ABI_ENDIAN_ID_LITTLE
-+# else
-+# define @KWIML@_ABI_ENDIAN_ID @KWIML@_ABI_ENDIAN_ID_BIG
-+# endif
-+
- /* Unknown CPU */
- #elif !defined(@KWIML@_ABI_NO_ERROR_ENDIAN)
- # error "Byte order of target CPU unknown."
diff --git a/meta/recipes-devtools/cmake/cmake/aarch64-kwsys.patch b/meta/recipes-devtools/cmake/cmake/aarch64-kwsys.patch
deleted file mode 100644
index 2b68eae6c2..0000000000
--- a/meta/recipes-devtools/cmake/cmake/aarch64-kwsys.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-From: KWSys Robot <kwrobot@kitware.com>
-Date: Fri, 21 Dec 2012 13:29:37 +0000 (-0500)
-Subject: KWSys 2012-12-21 (8ce09af5)
-X-Git-Url: http://cmake.org/gitweb?p=cmake.git;a=commitdiff_plain;h=567e7d94
-
-KWSys 2012-12-21 (8ce09af5)
-
-Extract upstream KWSys using the following shell commands.
-
-$ git archive --prefix=upstream-kwsys/ 8ce09af5 | tar x
-$ git shortlog --no-merges --abbrev=8 --format='%h %s' 933eb822..8ce09af5
-Riku Voipio (1):
- 8ce09af5 CPU: Add Aarch64 support
-
-Change-Id: I4bd0a97abaa0f958e2679afe2d4ad4bcc37434a0
-
-Hand edited by: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org> to get it applied in OE
-
-Upstream-Status: Backport
----
-
-diff --git a/CPU.h.in b/CPU.h.in
-index ecd29d1..2e1a584 100644
---- a/Source/kwsys/CPU.h.in
-+++ b/Source/kwsys/CPU.h.in
-@@ -98,6 +98,14 @@
- #elif defined(__SYSC_ZARCH__)
- # define @KWSYS_NAMESPACE@_CPU_ENDIAN_ID @KWSYS_NAMESPACE@_CPU_ENDIAN_ID_BIG
-
-+/* Aarch64 */
-+#elif defined(__aarch64__)
-+# if !defined(__AARCH64EB__)
-+# define @KWSYS_NAMESPACE@_CPU_ENDIAN_ID @KWSYS_NAMESPACE@_CPU_ENDIAN_ID_LITTLE
-+# else
-+# define @KWSYS_NAMESPACE@_CPU_ENDIAN_ID @KWSYS_NAMESPACE@_CPU_ENDIAN_ID_BIG
-+# endif
-+
- /* Unknown CPU */
- #else
- # define @KWSYS_NAMESPACE@_CPU_ENDIAN_ID 0
diff --git a/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh b/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh
new file mode 100644
index 0000000000..0eb56b66fa
--- /dev/null
+++ b/meta/recipes-devtools/cmake/cmake/environment.d-cmake.sh
@@ -0,0 +1 @@
+alias cmake="cmake -DCMAKE_TOOLCHAIN_FILE=$OECORE_NATIVE_SYSROOT/usr/share/cmake/OEToolchainConfig.cmake"
diff --git a/meta/recipes-devtools/cmake/cmake_2.8.12.2.bb b/meta/recipes-devtools/cmake/cmake_2.8.12.2.bb
index de2ac6b219..66a6af629c 100644
--- a/meta/recipes-devtools/cmake/cmake_2.8.12.2.bb
+++ b/meta/recipes-devtools/cmake/cmake_2.8.12.2.bb
@@ -6,6 +6,10 @@ DEPENDS += "curl expat zlib libarchive"
SRC_URI += "file://dont-run-cross-binaries.patch"
+SRC_URI_append_class-nativesdk = " \
+ file://OEToolchainConfig.cmake \
+ file://environment.d-cmake.sh"
+
SRC_URI[md5sum] = "17c6513483d23590cbce6957ec6d1e66"
SRC_URI[sha256sum] = "8c6574e9afabcb9fc66f463bb1f2f051958d86c85c37fccf067eb1a44a120e5e"
@@ -32,6 +36,16 @@ EXTRA_OECMAKE=" \
${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '-DKWSYS_LFS_WORKS=1', '-DKWSYS_LFS_DISABLE=1', d)} \
"
+do_install_append_class-nativesdk() {
+ mkdir -p ${D}${datadir}/cmake
+ install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/
+
+ mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d
+ install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${SDKPATHNATIVE}/environment-setup.d/cmake.sh
+}
+
+FILES_${PN}_append_class-nativesdk = " ${SDKPATHNATIVE}"
+
FILES_${PN} += "${datadir}/cmake-${CMAKE_MAJOR_VERSION}"
FILES_${PN}-doc += "${docdir}/cmake-${CMAKE_MAJOR_VERSION}"
diff --git a/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets/docbook-xsl-stylesheets-no-bashism-in-docbook-xsl-up.patch b/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets/docbook-xsl-stylesheets-no-bashism-in-docbook-xsl-up.patch
new file mode 100644
index 0000000000..0559a5c0d2
--- /dev/null
+++ b/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets/docbook-xsl-stylesheets-no-bashism-in-docbook-xsl-up.patch
@@ -0,0 +1,24 @@
+docbook-xsl-stylesheets: no bashism in docbook-xsl-update
+
+The checkbashisms shows there is no bashism, so use /bin/sh.
+
+Upstream-Status: Pending
+
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ tools/bin/docbook-xsl-update | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tools/bin/docbook-xsl-update b/tools/bin/docbook-xsl-update
+index aba0016..cc73aa2 100755
+--- a/tools/bin/docbook-xsl-update
++++ b/tools/bin/docbook-xsl-update
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+ # vim: number
+
+ # docbook-xsl-update - Update environment to latest docbook-xsl snapshot
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.78.1.bb b/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.78.1.bb
index 39528c89c4..2a1bdc47f7 100644
--- a/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.78.1.bb
+++ b/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.78.1.bb
@@ -5,6 +5,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a6eeeed43d498c22a835382533356462"
SRC_URI = "${SOURCEFORGE_MIRROR}/docbook/docbook-xsl-${PV}.tar.bz2 \
file://docbook-xsl.xml \
+ file://docbook-xsl-stylesheets-no-bashism-in-docbook-xsl-up.patch \
"
SRC_URI[md5sum] = "6dd0f89131cc35bf4f2ed105a1c17771"
@@ -18,7 +19,7 @@ BBCLASSEXTEND = "native"
SSTATEPOSTINSTFUNCS_append_class-native = " docbook_xsl_stylesheets_sstate_postinst"
SYSROOT_PREPROCESS_FUNCS_append_class-native = " docbook_xsl_stylesheets_sysroot_preprocess"
-do_configre (){
+do_configure (){
:
}
@@ -63,5 +64,6 @@ docbook_xsl_stylesheets_sysroot_preprocess () {
sed -i -e "s|file:///usr/share/xml|file://${datadir}/xml|g" ${SYSROOT_DESTDIR}${sysconfdir}/xml/docbook-xsl.xml
}
+RDEPENDS_${PN} += "perl"
FILES_${PN} = "${datadir}/xml/* ${sysconfdir}/xml/docbook-xsl.xml"
FILES_${PN}-doc = "${datadir}/doc/*"
diff --git a/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb b/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb
index a6a7ddcbd9..b59b25019c 100644
--- a/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb
+++ b/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb
@@ -27,7 +27,7 @@ SRC_URI[sha256sum] = "0eac6d12388b3d9ed78684529c1b0d9346fa2abbe406c4d4a3eb5a023c
CFLAGS_append = " -D_FILE_OFFSET_BITS=64"
do_install () {
- oe_runmake "PREFIX=${D}" "SBINDIR=${D}${sbindir}" \
+ oe_runmake "PREFIX=${D}" "SBINDIR=${D}${base_sbindir}" \
"MANDIR=${D}${mandir}/man8" install
}
diff --git a/meta/recipes-devtools/dpkg/dpkg.inc b/meta/recipes-devtools/dpkg/dpkg.inc
index 929906dfd8..c3c51ebe80 100644
--- a/meta/recipes-devtools/dpkg/dpkg.inc
+++ b/meta/recipes-devtools/dpkg/dpkg.inc
@@ -2,8 +2,7 @@ SUMMARY = "Package maintenance system from Debian"
LICENSE = "GPLv2.0+"
SECTION = "base"
-SRC_URI = "${DEBIAN_MIRROR}/main/d/dpkg/dpkg_${PV}.tar.xz \
- file://ignore_extra_fields.patch"
+SRC_URI = "${DEBIAN_MIRROR}/main/d/dpkg/dpkg_${PV}.tar.xz"
DEPENDS = "zlib bzip2 perl ncurses"
DEPENDS_class-native = "bzip2-replacement-native zlib-native virtual/update-alternatives-native gettext-native perl-native"
diff --git a/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471-CVE-2014-3127.patch b/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471-CVE-2014-3127.patch
deleted file mode 100644
index e59c6661ea..0000000000
--- a/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471-CVE-2014-3127.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-dpkg: Security Advisory - CVE-2014-3127
-
-commit a12eb58959d0a10584a428f4a3103a49204c410f upstream
-
-dpkg 1.15.9 on Debian squeeze introduces support for the "C-style
-encoded filenames" feature without recognizing that the squeeze patch
-program lacks this feature, which triggers an interaction error that
-allows remote attackers to conduct directory traversal attacks and
-modify files outside of the intended directories via a crafted source
-package.
-
-NOTE: this can be considered a release engineering problem in the
-effort to fix CVE-2014-0471.
-
-Upstream-Status: Backport
-
-Signed-off-by: Wenlin Kang <wenlin.kang@windriver.com>
-Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
-=====================================================
-diff -uarN dpkg-1.17.1-org/scripts/Dpkg/Source/Patch.pm dpkg-1.17.1/scripts/Dpkg/Source/Patch.pm
---- dpkg-1.17.1-org/scripts/Dpkg/Source/Patch.pm 2014-06-05 16:32:41.765446564 +0800
-+++ dpkg-1.17.1/scripts/Dpkg/Source/Patch.pm 2014-06-05 16:37:21.461446359 +0800
-@@ -324,31 +324,6 @@
- return $line;
- }
-
--my %ESCAPE = ((
-- 'a' => "\a",
-- 'b' => "\b",
-- 'f' => "\f",
-- 'n' => "\n",
-- 'r' => "\r",
-- 't' => "\t",
-- 'v' => "\cK",
-- '\\' => '\\',
-- '"' => '"',
--), (
-- map { sprintf('%03o', $_) => chr($_) } (0..255)
--));
--
--sub _unescape {
-- my ($diff, $str) = @_;
--
-- if (exists $ESCAPE{$str}) {
-- return $ESCAPE{$str};
-- } else {
-- error(_g('diff %s patches file with unknown escape sequence \\%s'),
-- $diff, $str);
-- }
--}
--
- # Fetch the header filename ignoring the optional timestamp
- sub _fetch_filename {
- my ($diff, $header) = @_;
-@@ -358,12 +333,7 @@
-
- # Is it a C-style string?
- if ($header =~ m/^"/) {
-- $header =~ m/^"((?:[^\\"]|\\.)*)"/;
-- error(_g('diff %s patches file with unbalanced quote'), $diff)
-- unless defined $1;
--
-- $header = $1;
-- $header =~ s/\\([0-3][0-7]{2}|.)/_unescape($diff, $1)/eg;
-+ error(_g('diff %s patches file with C-style encoded filename'), $diff);
- } else {
- # Tab is the official separator, it's always used when
- # filename contain spaces. Try it first, otherwise strip on space
diff --git a/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471.patch b/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471.patch
deleted file mode 100644
index 195d309506..0000000000
--- a/meta/recipes-devtools/dpkg/dpkg/dpkg-1.17.4-CVE-2014-0471.patch
+++ /dev/null
@@ -1,97 +0,0 @@
-dpkg: Security Advisory - CVE-2014-0471
-
-commit a82651188476841d190c58693f95827d61959b51 upstream
-
-Directory traversal vulnerability in the unpacking functionality in
-dpkg before 1.15.9, 1.16.x before 1.16.13, and 1.17.x before 1.17.8
-allows remote attackers to write arbitrary files via a crafted source
-package, related to "C-style filename quoting."
-
-Upstream-Status: Backport
-
-Signed-off-by: Wenlin Kang <wenlin.kang@windriver.com>
-Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
-===================================================
-diff -uarN dpkg-1.17.1-org/scripts/Dpkg/Source/Patch.pm dpkg-1.17.1/scripts/Dpkg/Source/Patch.pm
---- dpkg-1.17.1-org/scripts/Dpkg/Source/Patch.pm 2014-06-05 15:24:07.422446284 +0800
-+++ dpkg-1.17.1/scripts/Dpkg/Source/Patch.pm 2014-06-05 15:41:37.746446314 +0800
-@@ -324,14 +324,53 @@
- return $line;
- }
-
--# Strip timestamp
--sub _strip_ts {
-- my $header = shift;
--
-- # Tab is the official separator, it's always used when
-- # filename contain spaces. Try it first, otherwise strip on space
-- # if there's no tab
-- $header =~ s/\s.*// unless ($header =~ s/\t.*//);
-+my %ESCAPE = ((
-+ 'a' => "\a",
-+ 'b' => "\b",
-+ 'f' => "\f",
-+ 'n' => "\n",
-+ 'r' => "\r",
-+ 't' => "\t",
-+ 'v' => "\cK",
-+ '\\' => '\\',
-+ '"' => '"',
-+), (
-+ map { sprintf('%03o', $_) => chr($_) } (0..255)
-+));
-+
-+sub _unescape {
-+ my ($diff, $str) = @_;
-+
-+ if (exists $ESCAPE{$str}) {
-+ return $ESCAPE{$str};
-+ } else {
-+ error(_g('diff %s patches file with unknown escape sequence \\%s'),
-+ $diff, $str);
-+ }
-+}
-+
-+# Fetch the header filename ignoring the optional timestamp
-+sub _fetch_filename {
-+ my ($diff, $header) = @_;
-+
-+ # Strip any leading spaces.
-+ $header =~ s/^\s+//;
-+
-+ # Is it a C-style string?
-+ if ($header =~ m/^"/) {
-+ $header =~ m/^"((?:[^\\"]|\\.)*)"/;
-+ error(_g('diff %s patches file with unbalanced quote'), $diff)
-+ unless defined $1;
-+
-+ $header = $1;
-+ $header =~ s/\\([0-3][0-7]{2}|.)/_unescape($diff, $1)/eg;
-+ } else {
-+ # Tab is the official separator, it's always used when
-+ # filename contain spaces. Try it first, otherwise strip on space
-+ # if there's no tab
-+ $header =~ s/\s.*// unless $header =~ s/\t.*//;
-+ }
-+
- return $header;
- }
-
-@@ -400,7 +439,7 @@
- unless(s/^--- //) {
- error(_g("expected ^--- in line %d of diff `%s'"), $., $diff);
- }
-- $path{old} = $_ = _strip_ts($_);
-+ $path{old} = $_ = _fetch_filename($diff, $_);
- $fn{old} = $_ if $_ ne '/dev/null' and s{^[^/]*/+}{$destdir/};
- if (/\.dpkg-orig$/) {
- error(_g("diff `%s' patches file with name ending .dpkg-orig"), $diff);
-@@ -412,7 +451,7 @@
- unless (s/^\+\+\+ //) {
- error(_g("line after --- isn't as expected in diff `%s' (line %d)"), $diff, $.);
- }
-- $path{new} = $_ = _strip_ts($_);
-+ $path{new} = $_ = _fetch_filename($diff, $_);
- $fn{new} = $_ if $_ ne '/dev/null' and s{^[^/]*/+}{$destdir/};
-
- unless (defined $fn{old} or defined $fn{new}) {
diff --git a/meta/recipes-devtools/dpkg/dpkg/ignore_extra_fields.patch b/meta/recipes-devtools/dpkg/dpkg/ignore_extra_fields.patch
deleted file mode 100644
index 4e3eb97ed8..0000000000
--- a/meta/recipes-devtools/dpkg/dpkg/ignore_extra_fields.patch
+++ /dev/null
@@ -1,21 +0,0 @@
-Upstream-Status: Inappropriate [workaround]
-
-Index: dpkg-1.16.8/dpkg-deb/build.c
-===================================================================
---- dpkg-1.16.8.orig/dpkg-deb/build.c
-+++ dpkg-1.16.8/dpkg-deb/build.c
-@@ -340,13 +340,13 @@ check_new_pkg(const char *dir)
- if (pkg->priority == pri_other)
- warning(_("'%s' contains user-defined Priority value '%s'"),
- controlfile, pkg->otherpriority);
-- for (field = pkg->available.arbs; field; field = field->next) {
-+ /*for (field = pkg->available.arbs; field; field = field->next) {
- if (known_arbitrary_field(field))
- continue;
-
- warning(_("'%s' contains user-defined field '%s'"), controlfile,
- field->name);
-- }
-+ }*/
-
- free(controlfile);
diff --git a/meta/recipes-devtools/dpkg/dpkg/no-vla-warning.patch b/meta/recipes-devtools/dpkg/dpkg/no-vla-warning.patch
index 0e57dbc7fc..f660b18646 100644
--- a/meta/recipes-devtools/dpkg/dpkg/no-vla-warning.patch
+++ b/meta/recipes-devtools/dpkg/dpkg/no-vla-warning.patch
@@ -11,22 +11,22 @@ Upstream-Status: Pending
Signed-off-by: Donn Seeley <donn.seeley@windriver.com>
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
+Signed-off-by: Aníbal Limón <anibal.limon@linux.intel.com>
---
m4/dpkg-compiler.m4 | 1 -
1 file changed, 1 deletion(-)
diff --git a/m4/dpkg-compiler.m4 b/m4/dpkg-compiler.m4
-index f1c760b..500b5ad 100644
+index 53f67c6..6e66a43 100644
--- a/m4/dpkg-compiler.m4
+++ b/m4/dpkg-compiler.m4
-@@ -72,7 +72,6 @@ if test "x$enable_compiler_warnings" = "xyes"; then
- DPKG_WARNING_ALL([-Wformat-security])
- DPKG_WARNING_ALL([-Wpointer-arith])
- DPKG_WARNING_ALL([-Wlogical-op])
-- DPKG_WARNING_ALL([-Wvla])
- DPKG_WARNING_ALL([-Winit-self])
- DPKG_WARNING_ALL([-Wwrite-strings])
- DPKG_WARNING_ALL([-Wcast-align])
---
-1.7.10.4
-
+@@ -49,7 +49,6 @@ AC_DEFUN([DPKG_CHECK_COMPILER_WARNINGS], [
+ DPKG_CHECK_COMPILER_FLAG([-Wformat-security])
+ DPKG_CHECK_COMPILER_FLAG([-Wpointer-arith])
+ DPKG_CHECK_COMPILER_FLAG([-Wlogical-op])
+- DPKG_CHECK_COMPILER_FLAG([-Wvla])
+ DPKG_CHECK_COMPILER_FLAG([-Winit-self])
+ DPKG_CHECK_COMPILER_FLAG([-Wwrite-strings])
+ DPKG_CHECK_COMPILER_FLAG([-Wcast-align])
+---
+1.17.21
diff --git a/meta/recipes-devtools/dpkg/dpkg_1.17.4.bb b/meta/recipes-devtools/dpkg/dpkg_1.17.21.bb
index 83526f3c96..15c0277015 100644
--- a/meta/recipes-devtools/dpkg/dpkg_1.17.4.bb
+++ b/meta/recipes-devtools/dpkg/dpkg_1.17.21.bb
@@ -12,10 +12,8 @@ SRC_URI += "file://noman.patch \
file://dpkg-configure.service \
file://glibc2.5-sync_file_range.patch \
file://no-vla-warning.patch \
- file://dpkg-1.17.4-CVE-2014-0471.patch \
- file://dpkg-1.17.4-CVE-2014-0471-CVE-2014-3127.patch \
"
-SRC_URI[md5sum] = "cc25086e1e3bd9512a95f14cfe9002e1"
-SRC_URI[sha256sum] = "01cdc81c33e77c3d7c40df17e19171794542be7cf12e411381ffcaa8f87b1854"
+SRC_URI[md5sum] = "765a96fd0180196613bbfa3c4aef0775"
+SRC_URI[sha256sum] = "3ed776627181cb9c1c9ba33f94a6319084be2e9ec9c23dd61ce784c4f602cf05"
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/0012-Fix-musl-build-failures.patch b/meta/recipes-devtools/e2fsprogs/e2fsprogs/0012-Fix-musl-build-failures.patch
new file mode 100644
index 0000000000..2624f90e72
--- /dev/null
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/0012-Fix-musl-build-failures.patch
@@ -0,0 +1,54 @@
+From c6ff7feb9038d6e8aaffe2e69b205ad5fa33df2f Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Mon, 18 Aug 2014 21:02:56 +0200
+Subject: [PATCH] Fix musl build failures
+
+In lib/ext2fs/unix_io.c, __u64 should be used instead of __uint64_t. This type
+is guaranteed by the e2fsprogs build system.
+(795c02def3681a99cc792a5ebc162d06f8a1eeb7)
+
+In misc/create_inode.c, <limits.h> is needed for the definition of PATH_MAX.
+(bbccc6f3c6a106721fb6f1ef4df6bc32c7986235)
+
+Both of these fixes have been made upstream with the git commit IDs given but
+those are larger commits containing other changes not needed here.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+
+Upstream-status: Backport
+---
+ lib/ext2fs/unix_io.c | 6 +++---
+ misc/create_inode.c | 1 +
+ 2 files changed, 4 insertions(+), 3 deletions(-)
+
+diff --git a/lib/ext2fs/unix_io.c b/lib/ext2fs/unix_io.c
+index 19be630..0cc0f52 100644
+--- a/lib/ext2fs/unix_io.c
++++ b/lib/ext2fs/unix_io.c
+@@ -931,10 +931,10 @@ static errcode_t unix_discard(io_channel channel, unsigned long long block,
+
+ if (channel->flags & CHANNEL_FLAGS_BLOCK_DEVICE) {
+ #ifdef BLKDISCARD
+- __uint64_t range[2];
++ __u64 range[2];
+
+- range[0] = (__uint64_t)(block) * channel->block_size;
+- range[1] = (__uint64_t)(count) * channel->block_size;
++ range[0] = (__u64)(block) * channel->block_size;
++ range[1] = (__u64)(count) * channel->block_size;
+
+ ret = ioctl(data->dev, BLKDISCARD, &range);
+ #else
+diff --git a/misc/create_inode.c b/misc/create_inode.c
+index 6d8de04..fcec5aa 100644
+--- a/misc/create_inode.c
++++ b/misc/create_inode.c
+@@ -1,4 +1,5 @@
+ #include "create_inode.h"
++#include <limits.h>
+
+ #if __STDC_VERSION__ < 199901L
+ # if __GNUC__ >= 2
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs/acinclude.m4 b/meta/recipes-devtools/e2fsprogs/e2fsprogs/acinclude.m4
index 4b00668476..c0bd7dbdee 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs/acinclude.m4
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs/acinclude.m4
@@ -1,7 +1,56 @@
-# Extracted from the package's shipped aclocal.m4. Custom macros should be in
+# Extracted from the package's shipped aclocal.m4. Custom macros should be in
# acinclude.m4 so running aclocal doesn't blow them away.
#
-# RP 1/6/2010
+# Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+# from http://autoconf-archive.cryp.to/ax_tls.html
+#
+# This was licensed under the GPL with the following exception:
+#
+# As a special exception, the respective Autoconf Macro's copyright
+# owner gives unlimited permission to copy, distribute and modify the
+# configure scripts that are the output of Autoconf when processing
+# the Macro. You need not follow the terms of the GNU General Public
+# License when using or distributing such scripts, even though
+# portions of the text of the Macro appear in them. The GNU General
+# Public License (GPL) does govern all other use of the material that
+# constitutes the Autoconf Macro.
+#
+# This special exception to the GPL applies to versions of the
+# Autoconf Macro released by the Autoconf Macro Archive. When you make
+# and distribute a modified version of the Autoconf Macro, you may
+# extend this special exception to the GPL to apply to your modified
+# version as well.
+#
+AC_DEFUN([AX_TLS], [
+ AC_MSG_CHECKING(for thread local storage (TLS) class)
+ AC_CACHE_VAL(ac_cv_tls, [
+ ax_tls_keywords="__thread __declspec(thread) none"
+ for ax_tls_keyword in $ax_tls_keywords; do
+ case $ax_tls_keyword in
+ none) ac_cv_tls=none ; break ;;
+ *)
+ AC_TRY_COMPILE(
+ [#include <stdlib.h>
+ static void
+ foo(void) {
+ static ] $ax_tls_keyword [ int bar;
+ exit(1);
+ }],
+ [],
+ [ac_cv_tls=$ax_tls_keyword ; break],
+ ac_cv_tls=none
+ )
+ esac
+ done
+])
+
+ if test "$ac_cv_tls" != "none"; then
+ dnl AC_DEFINE([TLS], [], [If the compiler supports a TLS storage class define it to that here])
+ AC_DEFINE_UNQUOTED([TLS], $ac_cv_tls, [If the compiler supports a TLS storage class define it to that here])
+ fi
+ AC_MSG_RESULT($ac_cv_tls)
+])
# ===========================================================================
# http://www.nongnu.org/autoconf-archive/check_gnu_make.html
diff --git a/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb b/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
index 530255474d..1fb4a6cd85 100644
--- a/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
+++ b/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.42.9.bb
@@ -16,6 +16,7 @@ SRC_URI += "file://acinclude.m4 \
file://0009-misc-create_inode.c-handle-hardlinks.patch \
file://0010-debugfs-use-the-functions-in-misc-create_inode.c.patch \
file://0011-mke2fs.8.in-update-the-manual-for-the-d-option.patch \
+ file://0012-Fix-musl-build-failures.patch \
file://0001-e2fsprogs-fix-cross-compilation-problem.patch \
file://misc-mke2fs.c-return-error-when-failed-to-populate-fs.patch \
"
@@ -23,18 +24,13 @@ SRC_URI += "file://acinclude.m4 \
SRC_URI[md5sum] = "3f8e41e63b432ba114b33f58674563f7"
SRC_URI[sha256sum] = "2f92ac06e92fa00f2ada3ee67dad012d74d685537527ad1241d82f2d041f2802"
-EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-elf-shlibs --disable-libuuid --disable-uuidd"
+EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-elf-shlibs --disable-libuuid --disable-uuidd --enable-verbose-makecmds"
EXTRA_OECONF_darwin = "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-bsd-shlibs"
do_configure_prepend () {
cp ${WORKDIR}/acinclude.m4 ${S}/
}
-do_compile_prepend () {
- find ./ -print | grep -v ./patches | xargs chmod u=rwX
- ( cd ${S}/util; ${BUILD_CC} subst.c -o ${B}/util/subst )
-}
-
do_install () {
oe_runmake 'DESTDIR=${D}' install
oe_runmake 'DESTDIR=${D}' install-libs
@@ -58,6 +54,12 @@ do_install () {
mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs
}
+do_install_append_class-target() {
+ # Clean host path in compile_et, mk_cmds
+ sed -i -e "s,ET_DIR=\"${S}/lib/et\",ET_DIR=\"${datadir}/et\",g" ${D}${bindir}/compile_et
+ sed -i -e "s,SS_DIR=\"${S}/lib/ss\",SS_DIR=\"${datadir}/ss\",g" ${D}${bindir}/mk_cmds
+}
+
RDEPENDS_e2fsprogs = "e2fsprogs-badblocks"
RRECOMMENDS_e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck"
diff --git a/meta/recipes-devtools/elfutils/elfutils/arm_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff
index d4e4675ad5..d4e4675ad5 100644
--- a/meta/recipes-devtools/elfutils/elfutils/arm_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/elf_additions.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff
index 5baa709000..5baa709000 100644
--- a/meta/recipes-devtools/elfutils/elfutils/elf_additions.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/elfutils-ar-c-fix-num-passed-to-memset.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch
index b619619ec0..b619619ec0 100644
--- a/meta/recipes-devtools/elfutils/elfutils/elfutils-ar-c-fix-num-passed-to-memset.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/elfutils-fsize.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch
index 0ff353d0de..0ff353d0de 100644
--- a/meta/recipes-devtools/elfutils/elfutils/elfutils-fsize.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/fix-build-gcc-4.8.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch
index 0e28690207..0e28690207 100644
--- a/meta/recipes-devtools/elfutils/elfutils/fix-build-gcc-4.8.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/fix_for_gcc-4.7.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch
index bd634b4418..bd634b4418 100644
--- a/meta/recipes-devtools/elfutils/elfutils/fix_for_gcc-4.7.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/hppa_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff
index a86b97c683..a86b97c683 100644
--- a/meta/recipes-devtools/elfutils/elfutils/hppa_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/m68k_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff
index 5b621f92ff..5b621f92ff 100644
--- a/meta/recipes-devtools/elfutils/elfutils/m68k_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/mips_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff
index 3f81a75b1a..3f81a75b1a 100644
--- a/meta/recipes-devtools/elfutils/elfutils/mips_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch
index 2b5dad368a..2b5dad368a 100644
--- a/meta/recipes-devtools/elfutils/elfutils/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/redhat-portability.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff
index b8a912c412..b8a912c412 100644
--- a/meta/recipes-devtools/elfutils/elfutils/redhat-portability.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/redhat-robustify.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff
index cd398549df..cd398549df 100644
--- a/meta/recipes-devtools/elfutils/elfutils/redhat-robustify.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/remove-unused.patch b/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch
index 6a19791480..6a19791480 100644
--- a/meta/recipes-devtools/elfutils/elfutils/remove-unused.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils/testsuite-ignore-elflint.diff b/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff
index d792d5fd73..d792d5fd73 100644
--- a/meta/recipes-devtools/elfutils/elfutils/testsuite-ignore-elflint.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/CVE-2014-0172.patch b/meta/recipes-devtools/elfutils/elfutils-0.158/CVE-2014-0172.patch
deleted file mode 100644
index 6c44314589..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/CVE-2014-0172.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-From 7f1eec317db79627b473c5b149a22a1b20d1f68f Mon Sep 17 00:00:00 2001
-From: Mark Wielaard <mjw@redhat.com>
-Date: Wed, 9 Apr 2014 11:33:23 +0200
-Subject: [PATCH] CVE-2014-0172 Check for overflow before calling malloc to
- uncompress data.
-
-https://bugzilla.redhat.com/show_bug.cgi?id=1085663
-
-Reported-by: Florian Weimer <fweimer@redhat.com>
-Signed-off-by: Mark Wielaard <mjw@redhat.com>
-
-Index: elfutils-0.158/libdw/dwarf_begin_elf.c
-===================================================================
---- elfutils-0.158.orig/libdw/dwarf_begin_elf.c 2014-05-01 17:10:01.928213292 +0000
-+++ elfutils-0.158/libdw/dwarf_begin_elf.c 2014-05-01 17:10:01.924213375 +0000
-@@ -1,5 +1,5 @@
- /* Create descriptor from ELF descriptor for processing file.
-- Copyright (C) 2002-2011 Red Hat, Inc.
-+ Copyright (C) 2002-2011, 2014 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2002.
-
-@@ -289,6 +289,12 @@
- memcpy (&size, data->d_buf + 4, sizeof size);
- size = be64toh (size);
-
-+ /* Check for unsigned overflow so malloc always allocated
-+ enough memory for both the Elf_Data header and the
-+ uncompressed section data. */
-+ if (unlikely (sizeof (Elf_Data) + size < size))
-+ break;
-+
- Elf_Data *zdata = malloc (sizeof (Elf_Data) + size);
- if (unlikely (zdata == NULL))
- break;
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/core_filename.patch b/meta/recipes-devtools/elfutils/elfutils-0.158/core_filename.patch
deleted file mode 100644
index e2f0576d7b..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/core_filename.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From: Matthias Klose <doko@ubuntu.com>
-Date: Tue, 7 Jan 2014 10:25:29 +0100
-Subject: [PATCH] tests: backtrace-subr.sh (check_native_core) should check
- core file name.
-
-Needed when /proc/sys/kernel/core_uses_pid is set to 0. Try to rename
-the core file, and if it does still fail, skip the test.
-
-diff --git a/tests/backtrace-subr.sh b/tests/backtrace-subr.sh
-index e7ece91..62b873c 100644
---- a/tests/backtrace-subr.sh
-+++ b/tests/backtrace-subr.sh
-@@ -111,6 +111,11 @@ check_native_core()
-
- # Skip the test if we cannot adjust core ulimit.
- core="core.`ulimit -c unlimited || exit 77; set +ex; testrun ${abs_builddir}/$child --gencore; true`"
-+ # see if /proc/sys/kernel/core_uses_pid is set to 0
-+ if [ -f core ]; then
-+ mv core "$core"
-+ fi
-+ if [ ! -f "$core" ]; then exit 77; fi
-
- if [ "x$SAVED_VALGRIND_CMD" != "x" ]; then
- VALGRIND_CMD="$SAVED_VALGRIND_CMD"
---
-1.9.2
-
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/elf_additions.diff b/meta/recipes-devtools/elfutils/elfutils-0.158/elf_additions.diff
deleted file mode 100644
index 671c8ee1ae..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/elf_additions.diff
+++ /dev/null
@@ -1,77 +0,0 @@
-Upstream-Status: Backport
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- libelf/elf.h | 27 +++++++++++++++++++++++++--
- 1 file changed, 25 insertions(+), 2 deletions(-)
-
-diff --git a/libelf/elf.h b/libelf/elf.h
---- a/libelf/elf.h
-+++ b/libelf/elf.h
-@@ -142,6 +142,7 @@ typedef struct
- #define ELFOSABI_NETBSD 2 /* NetBSD. */
- #define ELFOSABI_GNU 3 /* Object uses GNU ELF extensions. */
- #define ELFOSABI_LINUX ELFOSABI_GNU /* Compatibility alias. */
-+#define ELFOSABI_HURD 4 /* GNU/Hurd */
- #define ELFOSABI_SOLARIS 6 /* Sun Solaris. */
- #define ELFOSABI_AIX 7 /* IBM AIX. */
- #define ELFOSABI_IRIX 8 /* SGI Irix. */
-@@ -149,8 +150,13 @@ typedef struct
- #define ELFOSABI_TRU64 10 /* Compaq TRU64 UNIX. */
- #define ELFOSABI_MODESTO 11 /* Novell Modesto. */
- #define ELFOSABI_OPENBSD 12 /* OpenBSD. */
-+#define ELFOSABI_OPENVMS 13 /* OpenVMS */
-+#define ELFOSABI_NSK 14 /* Hewlett-Packard Non-Stop Kernel */
-+#define ELFOSABI_AROS 15 /* Amiga Research OS */
-+/* 64-255 Architecture-specific value range */
- #define ELFOSABI_ARM_AEABI 64 /* ARM EABI */
- #define ELFOSABI_ARM 97 /* ARM */
-+/* This is deprecated? It's not in the latest version anymore. */
- #define ELFOSABI_STANDALONE 255 /* Standalone (embedded) application */
-
- #define EI_ABIVERSION 8 /* ABI version */
-@@ -205,7 +211,7 @@ typedef struct
- #define EM_H8_300H 47 /* Hitachi H8/300H */
- #define EM_H8S 48 /* Hitachi H8S */
- #define EM_H8_500 49 /* Hitachi H8/500 */
--#define EM_IA_64 50 /* Intel Merced */
-+#define EM_IA_64 50 /* Intel IA64 */
- #define EM_MIPS_X 51 /* Stanford MIPS-X */
- #define EM_COLDFIRE 52 /* Motorola Coldfire */
- #define EM_68HC12 53 /* Motorola M68HC12 */
-@@ -219,7 +225,8 @@ typedef struct
- #define EM_TINYJ 61 /* Advanced Logic Corp. Tinyj emb.fam*/
- #define EM_X86_64 62 /* AMD x86-64 architecture */
- #define EM_PDSP 63 /* Sony DSP Processor */
--
-+#define EM_PDP10 64 /* Digital Equipment Corp. PDP-10 */
-+#define EM_PDP11 65 /* Digital Equipment Corp. PDP-11 */
- #define EM_FX66 66 /* Siemens FX66 microcontroller */
- #define EM_ST9PLUS 67 /* STMicroelectronics ST9+ 8/16 mc */
- #define EM_ST7 68 /* STmicroelectronics ST7 8 bit mc */
-@@ -249,6 +256,22 @@ typedef struct
- #define EM_OPENRISC 92 /* OpenRISC 32-bit embedded processor */
- #define EM_ARC_A5 93 /* ARC Cores Tangent-A5 */
- #define EM_XTENSA 94 /* Tensilica Xtensa Architecture */
-+#define EM_VIDEOCORE 95 /* Alphamosaic VideoCore processor */
-+#define EM_TMM_GPP 96 /* Thompson Multimedia General Purpose Processor */
-+#define EM_NS32K 97 /* National Semiconductor 32000 series */
-+#define EM_TPC 98 /* Tenor Network TPC processor */
-+#define EM_SNP1K 99 /* Trebia SNP 1000 processor */
-+#define EM_ST200 100 /* STMicroelectronics (www.st.com) ST200 microcontroller */
-+#define EM_IP2K 101 /* Ubicom IP2XXX microcontroller family */
-+#define EM_MAX 102 /* MAX Processor */
-+#define EM_CR 103 /* National Semiconductor CompactRISC */
-+#define EM_F2MC16 104 /* Fujitsu F2MC16 */
-+#define EM_MSP430 105 /* TI msp430 micro controller */
-+#define EM_BLACKFIN 106 /* Analog Devices Blackfin (DSP) processor */
-+#define EM_SE_C33 107 /* S1C33 Family of Seiko Epson processors */
-+#define EM_SEP 108 /* Sharp embedded microprocessor */
-+#define EM_ARCA 109 /* Arca RISC Microprocessor */
-+
- #define EM_AARCH64 183 /* ARM AARCH64 */
- #define EM_TILEPRO 188 /* Tilera TILEPro */
- #define EM_MICROBLAZE 189 /* Xilinx MicroBlaze */
---
-1.8.1.2
-
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/m4-biarch.m4-tweak-AC_RUN_IFELSE-for-cross-compiling.patch b/meta/recipes-devtools/elfutils/elfutils-0.158/m4-biarch.m4-tweak-AC_RUN_IFELSE-for-cross-compiling.patch
deleted file mode 100644
index 1dbd52d557..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/m4-biarch.m4-tweak-AC_RUN_IFELSE-for-cross-compiling.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-m4/biarch.m4: tweak AC_RUN_IFELSE for cross-compiling
-
-Macro: AC_RUN_IFELSE (input,
- [action-if-true],
- [action-if-false],
- [action-if-cross-compiling])
-
-Add the missing [action-if-cross-compiling] part to support
-cross-compiling.
-
-Upstream-Status: inappropriate [oe specific]
-
-Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
----
- m4/biarch.m4 | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/m4/biarch.m4 b/m4/biarch.m4
---- a/m4/biarch.m4
-+++ b/m4/biarch.m4
-@@ -40,7 +40,9 @@ AC_CACHE_CHECK([whether $biarch_CC makes executables we can run],
- save_CC="$CC"
- CC="$biarch_CC"
- AC_RUN_IFELSE([AC_LANG_PROGRAM([], [])],
-- utrace_cv_cc_biarch=yes, utrace_cv_cc_biarch=no)
-+ utrace_cv_cc_biarch=yes,
-+ utrace_cv_cc_biarch=no,
-+ utrace_cv_cc_biarch=yes)
- CC="$save_CC"])], [utrace_cv_cc_biarch=no])
- AS_IF([test $utrace_cv_cc_biarch != yes], [dnl
- AC_MSG_WARN([not running biarch tests, $biarch_CC does not work])])])
---
-1.8.1.2
-
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/redhat-robustify.diff b/meta/recipes-devtools/elfutils/elfutils-0.158/redhat-robustify.diff
deleted file mode 100644
index f358a85466..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/redhat-robustify.diff
+++ /dev/null
@@ -1,1756 +0,0 @@
---- elfutils/libdwfl/ChangeLog
-+++ elfutils/libdwfl/ChangeLog
-@@ -680,6 +680,11 @@
- * dwfl_module_getdwarf.c (open_elf): Clear errno before CBFAIL.
- Reported by Kurt Roeckx <kurt@roeckx.be>.
-
-+2011-03-23 Petr Machata <pmachata@redhat.com>
-+
-+ * relocate.c (relocate_section): Use gelf_fsize instead of relying
-+ on shdr->sh_entsize.
-+
- 2011-02-11 Roland McGrath <roland@redhat.com>
-
- * linux-kernel-modules.c (try_kernel_name): Try .gz, .bz2, .xz
---- elfutils/libdwfl/relocate.c
-+++ elfutils/libdwfl/relocate.c
-@@ -1,5 +1,5 @@
- /* Relocate debug information.
-- Copyright (C) 2005-2010 Red Hat, Inc.
-+ Copyright (C) 2005-2011 Red Hat, Inc.
- This file is part of elfutils.
-
- This file is free software; you can redistribute it and/or modify
-@@ -456,7 +456,10 @@ relocate_section (Dwfl_Module *mod, Elf
- }
- }
-
-- size_t nrels = shdr->sh_size / shdr->sh_entsize;
-+ size_t sh_entsize
-+ = gelf_fsize (relocated, shdr->sh_type == SHT_REL ? ELF_T_REL : ELF_T_RELA,
-+ 1, EV_CURRENT);
-+ size_t nrels = shdr->sh_size / sh_entsize;
- size_t complete = 0;
- if (shdr->sh_type == SHT_REL)
- for (size_t relidx = 0; !result && relidx < nrels; ++relidx)
-@@ -558,7 +561,7 @@ relocate_section (Dwfl_Module *mod, Elf
- nrels = next;
- }
-
-- shdr->sh_size = reldata->d_size = nrels * shdr->sh_entsize;
-+ shdr->sh_size = reldata->d_size = nrels * sh_entsize;
- gelf_update_shdr (scn, shdr);
- }
-
---- elfutils/libelf/ChangeLog
-+++ elfutils/libelf/ChangeLog
-@@ -754,10 +754,53 @@
- If section content hasn't been read yet, do it before looking for the
- block size. If no section data present, infer size of section header.
-
-+2005-05-14 Jakub Jelinek <jakub@redhat.com>
-+
-+ * libelfP.h (INVALID_NDX): Define.
-+ * gelf_getdyn.c (gelf_getdyn): Use it. Remove ndx < 0 test if any.
-+ * gelf_getlib.c (gelf_getlib): Likewise.
-+ * gelf_getmove.c (gelf_getmove): Likewise.
-+ * gelf_getrel.c (gelf_getrel): Likewise.
-+ * gelf_getrela.c (gelf_getrela): Likewise.
-+ * gelf_getsym.c (gelf_getsym): Likewise.
-+ * gelf_getsyminfo.c (gelf_getsyminfo): Likewise.
-+ * gelf_getsymshndx.c (gelf_getsymshndx): Likewise.
-+ * gelf_getversym.c (gelf_getversym): Likewise.
-+ * gelf_update_dyn.c (gelf_update_dyn): Likewise.
-+ * gelf_update_lib.c (gelf_update_lib): Likewise.
-+ * gelf_update_move.c (gelf_update_move): Likewise.
-+ * gelf_update_rel.c (gelf_update_rel): Likewise.
-+ * gelf_update_rela.c (gelf_update_rela): Likewise.
-+ * gelf_update_sym.c (gelf_update_sym): Likewise.
-+ * gelf_update_syminfo.c (gelf_update_syminfo): Likewise.
-+ * gelf_update_symshndx.c (gelf_update_symshndx): Likewise.
-+ * gelf_update_versym.c (gelf_update_versym): Likewise.
-+ * elf_newscn.c (elf_newscn): Check for overflow.
-+ * elf32_updatefile.c (__elfw2(LIBELFBITS,updatemmap)): Likewise.
-+ (__elfw2(LIBELFBITS,updatefile)): Likewise.
-+ * elf_begin.c (file_read_elf): Likewise.
-+ * elf32_newphdr.c (elfw2(LIBELFBITS,newphdr)): Likewise.
-+ * elf_getarsym.c (elf_getarsym): Likewise.
-+ * elf32_getshdr.c (elfw2(LIBELFBITS,getshdr)): Likewise.
- 2005-05-11 Ulrich Drepper <drepper@redhat.com>
-
- * elf.h: Update again.
-
-+2005-05-17 Jakub Jelinek <jakub@redhat.com>
-+
-+ * elf32_getphdr.c (elfw2(LIBELFBITS,getphdr)): Check if program header
-+ table fits into object's bounds.
-+ * elf_getshstrndx.c (elf_getshstrndx): Add elf->start_offset to
-+ elf->map_address. Check if first section header fits into object's
-+ bounds.
-+ * elf32_getshdr.c (elfw2(LIBELFBITS,getshdr)):
-+ Check if section header table fits into object's bounds.
-+ * elf_begin.c (get_shnum): Ensure section headers fits into
-+ object's bounds.
-+ (file_read_elf): Make sure scncnt is small enough to allocate both
-+ ElfXX_Shdr and Elf_Scn array. Make sure section and program header
-+ tables fit into object's bounds. Avoid memory leak on failure.
-+
- 2005-05-09 Ulrich Drepper <drepper@redhat.com>
-
- * elf.h: Update from glibc.
---- elfutils/libelf/elf32_getphdr.c
-+++ elfutils/libelf/elf32_getphdr.c
-@@ -93,6 +93,16 @@ __elfw2(LIBELFBITS,getphdr_wrlock) (elf)
-
- if (elf->map_address != NULL)
- {
-+ /* First see whether the information in the ELF header is
-+ valid and it does not ask for too much. */
-+ if (unlikely (ehdr->e_phoff >= elf->maximum_size)
-+ || unlikely (elf->maximum_size - ehdr->e_phoff < size))
-+ {
-+ /* Something is wrong. */
-+ __libelf_seterrno (ELF_E_INVALID_PHDR);
-+ goto out;
-+ }
-+
- /* All the data is already mapped. Use it. */
- void *file_phdr = ((char *) elf->map_address
- + elf->start_offset + ehdr->e_phoff);
---- elfutils/libelf/elf32_getshdr.c
-+++ elfutils/libelf/elf32_getshdr.c
-@@ -60,7 +60,8 @@ load_shdr_wrlock (Elf_Scn *scn)
- goto out;
-
- size_t shnum;
-- if (__elf_getshdrnum_rdlock (elf, &shnum) != 0)
-+ if (__elf_getshdrnum_rdlock (elf, &shnum) != 0
-+ || shnum > SIZE_MAX / sizeof (ElfW2(LIBELFBITS,Shdr)))
- goto out;
- size_t size = shnum * sizeof (ElfW2(LIBELFBITS,Shdr));
-
-@@ -77,6 +78,16 @@ load_shdr_wrlock (Elf_Scn *scn)
-
- if (elf->map_address != NULL)
- {
-+ /* First see whether the information in the ELF header is
-+ valid and it does not ask for too much. */
-+ if (unlikely (ehdr->e_shoff >= elf->maximum_size)
-+ || unlikely (elf->maximum_size - ehdr->e_shoff < size))
-+ {
-+ /* Something is wrong. */
-+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER);
-+ goto free_and_out;
-+ }
-+
- ElfW2(LIBELFBITS,Shdr) *notcvt;
-
- /* All the data is already mapped. If we could use it
---- elfutils/libelf/elf32_newphdr.c
-+++ elfutils/libelf/elf32_newphdr.c
-@@ -114,6 +114,12 @@ elfw2(LIBELFBITS,newphdr) (elf, count)
- || count == PN_XNUM
- || elf->state.ELFW(elf,LIBELFBITS).phdr == NULL)
- {
-+ if (unlikely (count > SIZE_MAX / sizeof (ElfW2(LIBELFBITS,Phdr))))
-+ {
-+ result = NULL;
-+ goto out;
-+ }
-+
- /* Allocate a new program header with the appropriate number of
- elements. */
- result = (ElfW2(LIBELFBITS,Phdr) *)
---- elfutils/libelf/elf32_updatefile.c
-+++ elfutils/libelf/elf32_updatefile.c
-@@ -202,6 +202,9 @@ __elfw2(LIBELFBITS,updatemmap) (Elf *elf
- /* Write all the sections. Well, only those which are modified. */
- if (shnum > 0)
- {
-+ if (unlikely (shnum > SIZE_MAX / sizeof (Elf_Scn *)))
-+ return 1;
-+
- Elf_ScnList *list = &elf->state.ELFW(elf,LIBELFBITS).scns;
- Elf_Scn **scns = (Elf_Scn **) alloca (shnum * sizeof (Elf_Scn *));
- char *const shdr_start = ((char *) elf->map_address + elf->start_offset
-@@ -624,6 +627,10 @@ __elfw2(LIBELFBITS,updatefile) (Elf *elf
- /* Write all the sections. Well, only those which are modified. */
- if (shnum > 0)
- {
-+ if (unlikely (shnum > SIZE_MAX / (sizeof (Elf_Scn *)
-+ + sizeof (ElfW2(LIBELFBITS,Shdr)))))
-+ return 1;
-+
- off_t shdr_offset = elf->start_offset + ehdr->e_shoff;
- #if EV_NUM != 2
- xfct_t shdr_fctp = __elf_xfctstom[__libelf_version - 1][EV_CURRENT - 1][ELFW(ELFCLASS, LIBELFBITS) - 1][ELF_T_SHDR];
---- elfutils/libelf/elf_begin.c
-+++ elfutils/libelf/elf_begin.c
-@@ -144,7 +144,8 @@ get_shnum (void *map_address, unsigned c
-
- if (unlikely (result == 0) && ehdr.e32->e_shoff != 0)
- {
-- if (ehdr.e32->e_shoff + sizeof (Elf32_Shdr) > maxsize)
-+ if (unlikely (ehdr.e32->e_shoff >= maxsize)
-+ || unlikely (maxsize - ehdr.e32->e_shoff < sizeof (Elf32_Shdr)))
- /* Cannot read the first section header. */
- return 0;
-
-@@ -192,7 +193,8 @@ get_shnum (void *map_address, unsigned c
-
- if (unlikely (result == 0) && ehdr.e64->e_shoff != 0)
- {
-- if (ehdr.e64->e_shoff + sizeof (Elf64_Shdr) > maxsize)
-+ if (unlikely (ehdr.e64->e_shoff >= maxsize)
-+ || unlikely (ehdr.e64->e_shoff + sizeof (Elf64_Shdr) > maxsize))
- /* Cannot read the first section header. */
- return 0;
-
-@@ -264,6 +266,15 @@ file_read_elf (int fildes, void *map_add
- /* Could not determine the number of sections. */
- return NULL;
-
-+ /* Check for too many sections. */
-+ if (e_ident[EI_CLASS] == ELFCLASS32)
-+ {
-+ if (scncnt > SIZE_MAX / (sizeof (Elf_Scn) + sizeof (Elf32_Shdr)))
-+ return NULL;
-+ }
-+ else if (scncnt > SIZE_MAX / (sizeof (Elf_Scn) + sizeof (Elf64_Shdr)))
-+ return NULL;
-+
- /* We can now allocate the memory. Even if there are no section headers,
- we allocate space for a zeroth section in case we need it later. */
- const size_t scnmax = (scncnt ?: (cmd == ELF_C_RDWR || cmd == ELF_C_RDWR_MMAP)
-@@ -303,6 +314,16 @@ file_read_elf (int fildes, void *map_add
- {
- /* We can use the mmapped memory. */
- elf->state.elf32.ehdr = ehdr;
-+
-+ if (unlikely (ehdr->e_shoff >= maxsize)
-+ || unlikely (maxsize - ehdr->e_shoff
-+ < scncnt * sizeof (Elf32_Shdr)))
-+ {
-+ free_and_out:
-+ free (elf);
-+ __libelf_seterrno (ELF_E_INVALID_FILE);
-+ return NULL;
-+ }
- elf->state.elf32.shdr
- = (Elf32_Shdr *) ((char *) ehdr + ehdr->e_shoff);
-
-@@ -389,6 +410,11 @@ file_read_elf (int fildes, void *map_add
- {
- /* We can use the mmapped memory. */
- elf->state.elf64.ehdr = ehdr;
-+
-+ if (unlikely (ehdr->e_shoff >= maxsize)
-+ || unlikely (ehdr->e_shoff
-+ + scncnt * sizeof (Elf32_Shdr) > maxsize))
-+ goto free_and_out;
- elf->state.elf64.shdr
- = (Elf64_Shdr *) ((char *) ehdr + ehdr->e_shoff);
-
---- elfutils/libelf/elf_getarsym.c
-+++ elfutils/libelf/elf_getarsym.c
-@@ -183,6 +183,9 @@ elf_getarsym (elf, ptr)
- size_t index_size = atol (tmpbuf);
-
- if (SARMAG + sizeof (struct ar_hdr) + index_size > elf->maximum_size
-+#if SIZE_MAX <= 4294967295U
-+ || n >= SIZE_MAX / sizeof (Elf_Arsym)
-+#endif
- || n * w > index_size)
- {
- /* This index table cannot be right since it does not fit into
---- elfutils/libelf/elf_getshdrstrndx.c
-+++ elfutils/libelf/elf_getshdrstrndx.c
-@@ -104,10 +104,25 @@ elf_getshdrstrndx (elf, dst)
- if (elf->map_address != NULL
- && elf->state.elf32.ehdr->e_ident[EI_DATA] == MY_ELFDATA
- && (ALLOW_UNALIGNED
-- || (((size_t) ((char *) elf->map_address + offset))
-+ || (((size_t) ((char *) elf->map_address
-+ + elf->start_offset + offset))
- & (__alignof__ (Elf32_Shdr) - 1)) == 0))
-- /* We can directly access the memory. */
-- num = ((Elf32_Shdr *) (elf->map_address + offset))->sh_link;
-+ {
-+ /* First see whether the information in the ELF header is
-+ valid and it does not ask for too much. */
-+ if (unlikely (elf->maximum_size - offset
-+ < sizeof (Elf32_Shdr)))
-+ {
-+ /* Something is wrong. */
-+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER);
-+ result = -1;
-+ goto out;
-+ }
-+
-+ /* We can directly access the memory. */
-+ num = ((Elf32_Shdr *) (elf->map_address + elf->start_offset
-+ + offset))->sh_link;
-+ }
- else
- {
- /* We avoid reading in all the section headers. Just read
-@@ -142,10 +157,25 @@ elf_getshdrstrndx (elf, dst)
- if (elf->map_address != NULL
- && elf->state.elf64.ehdr->e_ident[EI_DATA] == MY_ELFDATA
- && (ALLOW_UNALIGNED
-- || (((size_t) ((char *) elf->map_address + offset))
-+ || (((size_t) ((char *) elf->map_address
-+ + elf->start_offset + offset))
- & (__alignof__ (Elf64_Shdr) - 1)) == 0))
-- /* We can directly access the memory. */
-- num = ((Elf64_Shdr *) (elf->map_address + offset))->sh_link;
-+ {
-+ /* First see whether the information in the ELF header is
-+ valid and it does not ask for too much. */
-+ if (unlikely (elf->maximum_size - offset
-+ < sizeof (Elf64_Shdr)))
-+ {
-+ /* Something is wrong. */
-+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER);
-+ result = -1;
-+ goto out;
-+ }
-+
-+ /* We can directly access the memory. */
-+ num = ((Elf64_Shdr *) (elf->map_address + elf->start_offset
-+ + offset))->sh_link;
-+ }
- else
- {
- /* We avoid reading in all the section headers. Just read
---- elfutils/libelf/elf_newscn.c
-+++ elfutils/libelf/elf_newscn.c
-@@ -83,10 +83,18 @@ elf_newscn (elf)
- else
- {
- /* We must allocate a new element. */
-- Elf_ScnList *newp;
-+ Elf_ScnList *newp = NULL;
-
- assert (elf->state.elf.scnincr > 0);
-
-+ if (
-+#if SIZE_MAX <= 4294967295U
-+ likely (elf->state.elf.scnincr
-+ < SIZE_MAX / 2 / sizeof (Elf_Scn) - sizeof (Elf_ScnList))
-+#else
-+ 1
-+#endif
-+ )
- newp = (Elf_ScnList *) calloc (sizeof (Elf_ScnList)
- + ((elf->state.elf.scnincr *= 2)
- * sizeof (Elf_Scn)), 1);
---- elfutils/libelf/gelf_getdyn.c
-+++ elfutils/libelf/gelf_getdyn.c
-@@ -1,5 +1,5 @@
- /* Get information from dynamic table at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -72,7 +72,7 @@ gelf_getdyn (data, ndx, dst)
- table entries has to be adopted. The user better has provided
- a buffer where we can store the information. While copying the
- data we are converting the format. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Dyn) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Dyn, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -93,7 +93,7 @@ gelf_getdyn (data, ndx, dst)
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Dyn) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, GElf_Dyn, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_getlib.c
-+++ elfutils/libelf/gelf_getlib.c
-@@ -1,5 +1,5 @@
- /* Get library from table at the given index.
-- Copyright (C) 2004 Red Hat, Inc.
-+ Copyright (C) 2004-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2004.
-
-@@ -65,7 +65,7 @@ gelf_getlib (data, ndx, dst)
- /* The data is already in the correct form. Just make sure the
- index is OK. */
- GElf_Lib *result = NULL;
-- if (unlikely ((ndx + 1) * sizeof (GElf_Lib) > data->d_size))
-+ if (INVALID_NDX (ndx, GElf_Lib, data))
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- else
- {
---- elfutils/libelf/gelf_getmove.c
-+++ elfutils/libelf/gelf_getmove.c
-@@ -1,5 +1,5 @@
- /* Get move structure at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -62,7 +62,7 @@ gelf_getmove (data, ndx, dst)
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Move) > data->d_size))
-+ if (INVALID_NDX (ndx, GElf_Move, data))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_getrela.c
-+++ elfutils/libelf/gelf_getrela.c
-@@ -1,5 +1,5 @@
- /* Get RELA relocation information at given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -50,12 +50,6 @@ gelf_getrela (data, ndx, dst)
- if (data_scn == NULL)
- return NULL;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return NULL;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_RELA))
- {
- __libelf_seterrno (ELF_E_INVALID_HANDLE);
-@@ -72,7 +66,7 @@ gelf_getrela (data, ndx, dst)
- if (scn->elf->class == ELFCLASS32)
- {
- /* We have to convert the data. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Rela) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Rela, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- result = NULL;
-@@ -93,7 +87,7 @@ gelf_getrela (data, ndx, dst)
- {
- /* Simply copy the data after we made sure we are actually getting
- correct data. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Rela) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- result = NULL;
---- elfutils/libelf/gelf_getrel.c
-+++ elfutils/libelf/gelf_getrel.c
-@@ -1,5 +1,5 @@
- /* Get REL relocation information at given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -50,12 +50,6 @@ gelf_getrel (data, ndx, dst)
- if (data_scn == NULL)
- return NULL;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return NULL;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_REL))
- {
- __libelf_seterrno (ELF_E_INVALID_HANDLE);
-@@ -72,7 +66,7 @@ gelf_getrel (data, ndx, dst)
- if (scn->elf->class == ELFCLASS32)
- {
- /* We have to convert the data. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Rel) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Rel, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- result = NULL;
-@@ -92,7 +86,7 @@ gelf_getrel (data, ndx, dst)
- {
- /* Simply copy the data after we made sure we are actually getting
- correct data. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Rel) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- result = NULL;
---- elfutils/libelf/gelf_getsym.c
-+++ elfutils/libelf/gelf_getsym.c
-@@ -1,5 +1,5 @@
- /* Get symbol information from symbol table at the given index.
-- Copyright (C) 1999, 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 1999-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 1999.
-
-@@ -69,7 +69,7 @@ gelf_getsym (data, ndx, dst)
- table entries has to be adopted. The user better has provided
- a buffer where we can store the information. While copying the
- data we are converting the format. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > data->d_size))
-+ if (INVALID_NDX (ndx, Elf32_Sym, data))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -98,7 +98,7 @@ gelf_getsym (data, ndx, dst)
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Sym) > data->d_size))
-+ if (INVALID_NDX (ndx, GElf_Sym, data))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_getsyminfo.c
-+++ elfutils/libelf/gelf_getsyminfo.c
-@@ -1,5 +1,5 @@
- /* Get additional symbol information from symbol table at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -63,7 +63,7 @@ gelf_getsyminfo (data, ndx, dst)
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Syminfo) > data->d_size))
-+ if (INVALID_NDX (ndx, GElf_Syminfo, data))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_getsymshndx.c
-+++ elfutils/libelf/gelf_getsymshndx.c
-@@ -1,6 +1,6 @@
- /* Get symbol information and separate section index from symbol table
- at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -69,7 +69,7 @@ gelf_getsymshndx (symdata, shndxdata, nd
- section index table. */
- if (likely (shndxdata_scn != NULL))
- {
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Word) > shndxdata_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Word, &shndxdata_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -89,7 +89,7 @@ gelf_getsymshndx (symdata, shndxdata, nd
- table entries has to be adopted. The user better has provided
- a buffer where we can store the information. While copying the
- data we are converting the format. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > symdata->d_size))
-+ if (INVALID_NDX (ndx, Elf32_Sym, symdata))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -118,7 +118,7 @@ gelf_getsymshndx (symdata, shndxdata, nd
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Sym) > symdata->d_size))
-+ if (INVALID_NDX (ndx, GElf_Sym, symdata))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_getversym.c
-+++ elfutils/libelf/gelf_getversym.c
-@@ -1,5 +1,5 @@
- /* Get symbol version information at the given index.
-- Copyright (C) 1999, 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 1999-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 1999.
-
-@@ -71,7 +71,7 @@ gelf_getversym (data, ndx, dst)
-
- /* The data is already in the correct form. Just make sure the
- index is OK. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Versym) > data->d_size))
-+ if (INVALID_NDX (ndx, GElf_Versym, data))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- result = NULL;
---- elfutils/libelf/gelf_update_dyn.c
-+++ elfutils/libelf/gelf_update_dyn.c
-@@ -1,5 +1,5 @@
- /* Update information in dynamic table at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -50,12 +50,6 @@ gelf_update_dyn (data, ndx, src)
- if (data == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_DYN))
- {
- /* The type of the data better should match. */
-@@ -81,7 +75,7 @@ gelf_update_dyn (data, ndx, src)
- }
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Dyn) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Dyn, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -95,7 +89,7 @@ gelf_update_dyn (data, ndx, src)
- else
- {
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Dyn) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Dyn, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_lib.c
-+++ elfutils/libelf/gelf_update_lib.c
-@@ -1,5 +1,5 @@
- /* Update library in table at the given index.
-- Copyright (C) 2004 Red Hat, Inc.
-+ Copyright (C) 2004-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2004.
-
-@@ -47,12 +47,6 @@ gelf_update_lib (data, ndx, src)
- if (data == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- Elf_Data_Scn *data_scn = (Elf_Data_Scn *) data;
- if (unlikely (data_scn->d.d_type != ELF_T_LIB))
- {
-@@ -66,7 +60,7 @@ gelf_update_lib (data, ndx, src)
-
- /* Check whether we have to resize the data buffer. */
- int result = 0;
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Lib) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Lib, &data_scn->d))
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- else
- {
---- elfutils/libelf/gelf_update_move.c
-+++ elfutils/libelf/gelf_update_move.c
-@@ -1,5 +1,5 @@
- /* Update move structure at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -54,8 +54,7 @@ gelf_update_move (data, ndx, src)
- assert (sizeof (GElf_Move) == sizeof (Elf64_Move));
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely (ndx < 0)
-- || unlikely ((ndx + 1) * sizeof (GElf_Move) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, GElf_Move, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- return 0;
---- elfutils/libelf/gelf_update_rela.c
-+++ elfutils/libelf/gelf_update_rela.c
-@@ -1,5 +1,5 @@
- /* Update RELA relocation information at given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -47,12 +47,6 @@ gelf_update_rela (Elf_Data *dst, int ndx
- if (dst == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_RELA))
- {
- /* The type of the data better should match. */
-@@ -80,7 +74,7 @@ gelf_update_rela (Elf_Data *dst, int ndx
- }
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Rela) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Rela, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -96,7 +90,7 @@ gelf_update_rela (Elf_Data *dst, int ndx
- else
- {
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Rela) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_rel.c
-+++ elfutils/libelf/gelf_update_rel.c
-@@ -1,5 +1,5 @@
- /* Update REL relocation information at given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -47,12 +47,6 @@ gelf_update_rel (Elf_Data *dst, int ndx,
- if (dst == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_REL))
- {
- /* The type of the data better should match. */
-@@ -78,7 +72,7 @@ gelf_update_rel (Elf_Data *dst, int ndx,
- }
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Rel) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Rel, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -93,7 +87,7 @@ gelf_update_rel (Elf_Data *dst, int ndx,
- else
- {
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Rel) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_sym.c
-+++ elfutils/libelf/gelf_update_sym.c
-@@ -1,5 +1,5 @@
- /* Update symbol information in symbol table at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -51,12 +51,6 @@ gelf_update_sym (data, ndx, src)
- if (data == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_SYM))
- {
- /* The type of the data better should match. */
-@@ -81,7 +75,7 @@ gelf_update_sym (data, ndx, src)
- }
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Sym, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -104,7 +98,7 @@ gelf_update_sym (data, ndx, src)
- else
- {
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Sym) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Sym, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_syminfo.c
-+++ elfutils/libelf/gelf_update_syminfo.c
-@@ -1,5 +1,5 @@
- /* Update additional symbol information in symbol table at the given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -51,12 +51,6 @@ gelf_update_syminfo (data, ndx, src)
- if (data == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (data_scn->d.d_type != ELF_T_SYMINFO))
- {
- /* The type of the data better should match. */
-@@ -72,7 +66,7 @@ gelf_update_syminfo (data, ndx, src)
- rwlock_wrlock (scn->elf->lock);
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (GElf_Syminfo) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, GElf_Syminfo, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_symshndx.c
-+++ elfutils/libelf/gelf_update_symshndx.c
-@@ -1,6 +1,6 @@
- /* Update symbol information and section index in symbol table at the
- given index.
-- Copyright (C) 2000, 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2000-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2000.
-
-@@ -56,12 +56,6 @@ gelf_update_symshndx (symdata, shndxdata
- if (symdata == NULL)
- return 0;
-
-- if (unlikely (ndx < 0))
-- {
-- __libelf_seterrno (ELF_E_INVALID_INDEX);
-- return 0;
-- }
--
- if (unlikely (symdata_scn->d.d_type != ELF_T_SYM))
- {
- /* The type of the data better should match. */
-@@ -107,7 +101,7 @@ gelf_update_symshndx (symdata, shndxdata
- }
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > symdata_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf32_Sym, &symdata_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
-@@ -130,7 +124,7 @@ gelf_update_symshndx (symdata, shndxdata
- else
- {
- /* Check whether we have to resize the data buffer. */
-- if (unlikely ((ndx + 1) * sizeof (Elf64_Sym) > symdata_scn->d.d_size))
-+ if (INVALID_NDX (ndx, Elf64_Sym, &symdata_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- goto out;
---- elfutils/libelf/gelf_update_versym.c
-+++ elfutils/libelf/gelf_update_versym.c
-@@ -1,5 +1,5 @@
- /* Update symbol version information.
-- Copyright (C) 2001, 2002 Red Hat, Inc.
-+ Copyright (C) 2001-2009 Red Hat, Inc.
- This file is part of elfutils.
- Written by Ulrich Drepper <drepper@redhat.com>, 2001.
-
-@@ -54,8 +54,7 @@ gelf_update_versym (data, ndx, src)
- assert (sizeof (GElf_Versym) == sizeof (Elf64_Versym));
-
- /* Check whether we have to resize the data buffer. */
-- if (unlikely (ndx < 0)
-- || unlikely ((ndx + 1) * sizeof (GElf_Versym) > data_scn->d.d_size))
-+ if (INVALID_NDX (ndx, GElf_Versym, &data_scn->d))
- {
- __libelf_seterrno (ELF_E_INVALID_INDEX);
- return 0;
---- elfutils/libelf/libelfP.h
-+++ elfutils/libelf/libelfP.h
-@@ -587,4 +587,8 @@ extern uint32_t __libelf_crc32 (uint32_t
- /* Align offset to 4 bytes as needed for note name and descriptor data. */
- #define NOTE_ALIGN(n) (((n) + 3) & -4U)
-
-+/* Convenience macro. */
-+#define INVALID_NDX(ndx, type, data) \
-+ unlikely ((data)->d_size / sizeof (type) <= (unsigned int) (ndx))
-+
- #endif /* libelfP.h */
---- elfutils/src/ChangeLog
-+++ elfutils/src/ChangeLog
-@@ -702,6 +702,12 @@
-
- * readelf.c (dwarf_attr_string): Grok DW_AT_GNU_odr_signature.
-
-+2011-03-23 Petr Machata <pmachata@redhat.com>
-+
-+ * readelf.c (handle_dynamic, handle_relocs_rel)
-+ (handle_relocs_rela, handle_versym, print_liblist):
-+ Use gelf_fsize instead of relying on shdr->sh_entsize.
-+
- 2011-02-11 Roland McGrath <roland@redhat.com>
-
- * elfcmp.c (verbose): New variable.
-@@ -2414,6 +2420,16 @@
- object symbols or symbols with unknown type.
- (check_rel): Likewise.
-
-+2005-06-09 Roland McGrath <roland@redhat.com>
-+
-+ * readelf.c (handle_dynamic, handle_symtab): Check for bogus sh_link.
-+ (handle_verneed, handle_verdef, handle_versym, handle_hash): Likewise.
-+ (handle_scngrp): Check for bogus sh_info.
-+
-+ * strip.c (handle_elf): Check for bogus values in sh_link, sh_info,
-+ st_shndx, e_shstrndx, and SHT_GROUP or SHT_SYMTAB_SHNDX data.
-+ Don't use assert on input values, instead bail with "illformed" error.
-+
- 2005-06-08 Roland McGrath <roland@redhat.com>
-
- * readelf.c (print_ops): Add consts.
-@@ -2459,6 +2475,19 @@
-
- * readelf.c (dwarf_tag_string): Add new tags.
-
-+2005-05-17 Jakub Jelinek <jakub@redhat.com>
-+
-+ * elflint.c (check_hash): Don't check entries beyond end of section.
-+ (check_note): Don't crash if gelf_rawchunk fails.
-+ (section_name): Return <invalid> if gelf_getshdr returns NULL.
-+
-+2005-05-14 Jakub Jelinek <jakub@redhat.com>
-+
-+ * elflint.c (section_name): Return "<invalid>" instead of
-+ crashing on invalid section name.
-+ (check_symtab, is_rel_dyn, check_rela, check_rel, check_dynamic,
-+ check_symtab_shndx, check_hash, check_versym): Robustify.
-+
- 2005-05-08 Roland McGrath <roland@redhat.com>
-
- * strip.c (handle_elf): Don't translate hash and versym data formats,
---- elfutils/src/elflint.c
-+++ elfutils/src/elflint.c
-@@ -123,6 +123,10 @@ static uint32_t shstrndx;
- /* Array to count references in section groups. */
- static int *scnref;
-
-+/* Numbers of sections and program headers. */
-+static unsigned int shnum;
-+static unsigned int phnum;
-+
-
- int
- main (int argc, char *argv[])
-@@ -311,10 +315,19 @@ section_name (Ebl *ebl, int idx)
- {
- GElf_Shdr shdr_mem;
- GElf_Shdr *shdr;
-+ const char *ret;
-+
-+ if ((unsigned int) idx > shnum)
-+ return "<invalid>";
-
- shdr = gelf_getshdr (elf_getscn (ebl->elf, idx), &shdr_mem);
-+ if (shdr == NULL)
-+ return "<invalid>";
-
-- return elf_strptr (ebl->elf, shstrndx, shdr->sh_name);
-+ ret = elf_strptr (ebl->elf, shstrndx, shdr->sh_name);
-+ if (ret == NULL)
-+ return "<invalid>";
-+ return ret;
- }
-
-
-@@ -337,11 +350,6 @@ static const int valid_e_machine[] =
- (sizeof (valid_e_machine) / sizeof (valid_e_machine[0]))
-
-
--/* Numbers of sections and program headers. */
--static unsigned int shnum;
--static unsigned int phnum;
--
--
- static void
- check_elf_header (Ebl *ebl, GElf_Ehdr *ehdr, size_t size)
- {
-@@ -625,7 +633,8 @@ section [%2d] '%s': symbol table cannot
- }
- }
-
-- if (shdr->sh_entsize != gelf_fsize (ebl->elf, ELF_T_SYM, 1, EV_CURRENT))
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_SYM, 1, EV_CURRENT);
-+ if (shdr->sh_entsize != sh_entsize)
- ERROR (gettext ("\
- section [%2u] '%s': entry size is does not match ElfXX_Sym\n"),
- idx, section_name (ebl, idx));
-@@ -663,7 +672,7 @@ section [%2d] '%s': XINDEX for zeroth en
- xndxscnidx, section_name (ebl, xndxscnidx));
- }
-
-- for (size_t cnt = 1; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ for (size_t cnt = 1; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- sym = gelf_getsymshndx (data, xndxdata, cnt, &sym_mem, &xndx);
- if (sym == NULL)
-@@ -683,7 +692,8 @@ section [%2d] '%s': symbol %zu: invalid
- else
- {
- name = elf_strptr (ebl->elf, shdr->sh_link, sym->st_name);
-- assert (name != NULL);
-+ assert (name != NULL
-+ || strshdr->sh_type != SHT_STRTAB);
- }
-
- if (sym->st_shndx == SHN_XINDEX)
-@@ -1040,9 +1050,11 @@ is_rel_dyn (Ebl *ebl, const GElf_Ehdr *e
- {
- GElf_Shdr rcshdr_mem;
- const GElf_Shdr *rcshdr = gelf_getshdr (scn, &rcshdr_mem);
-- assert (rcshdr != NULL);
-
-- if (rcshdr->sh_type == SHT_DYNAMIC)
-+ if (rcshdr == NULL)
-+ break;
-+
-+ if (rcshdr->sh_type == SHT_DYNAMIC && rcshdr->sh_entsize)
- {
- /* Found the dynamic section. Look through it. */
- Elf_Data *d = elf_getdata (scn, NULL);
-@@ -1052,7 +1064,9 @@ is_rel_dyn (Ebl *ebl, const GElf_Ehdr *e
- {
- GElf_Dyn dyn_mem;
- GElf_Dyn *dyn = gelf_getdyn (d, cnt, &dyn_mem);
-- assert (dyn != NULL);
-+
-+ if (dyn == NULL)
-+ break;
-
- if (dyn->d_tag == DT_RELCOUNT)
- {
-@@ -1066,7 +1080,9 @@ section [%2d] '%s': DT_RELCOUNT used for
- /* Does the number specified number of relative
- relocations exceed the total number of
- relocations? */
-- if (dyn->d_un.d_val > shdr->sh_size / shdr->sh_entsize)
-+ if (shdr->sh_entsize != 0
-+ && dyn->d_un.d_val > (shdr->sh_size
-+ / shdr->sh_entsize))
- ERROR (gettext ("\
- section [%2d] '%s': DT_RELCOUNT value %d too high for this section\n"),
- idx, section_name (ebl, idx),
-@@ -1226,7 +1242,8 @@ section [%2d] '%s': no relocations for m
- }
- }
-
-- if (shdr->sh_entsize != gelf_fsize (ebl->elf, reltype, 1, EV_CURRENT))
-+ size_t sh_entsize = gelf_fsize (ebl->elf, reltype, 1, EV_CURRENT);
-+ if (shdr->sh_entsize != sh_entsize)
- ERROR (gettext (reltype == ELF_T_RELA ? "\
- section [%2d] '%s': section entry size does not match ElfXX_Rela\n" : "\
- section [%2d] '%s': section entry size does not match ElfXX_Rel\n"),
-@@ -1449,7 +1466,8 @@ check_rela (Ebl *ebl, GElf_Ehdr *ehdr, G
- Elf_Data *symdata = elf_getdata (symscn, NULL);
- enum load_state state = state_undecided;
-
-- for (size_t cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_RELA, 1, EV_CURRENT);
-+ for (size_t cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- GElf_Rela rela_mem;
- GElf_Rela *rela = gelf_getrela (data, cnt, &rela_mem);
-@@ -1499,7 +1517,8 @@ check_rel (Ebl *ebl, GElf_Ehdr *ehdr, GE
- Elf_Data *symdata = elf_getdata (symscn, NULL);
- enum load_state state = state_undecided;
-
-- for (size_t cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_REL, 1, EV_CURRENT);
-+ for (size_t cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- GElf_Rel rel_mem;
- GElf_Rel *rel = gelf_getrel (data, cnt, &rel_mem);
-@@ -1598,7 +1617,8 @@ section [%2d] '%s': referenced as string
- shdr->sh_link, section_name (ebl, shdr->sh_link),
- idx, section_name (ebl, idx));
-
-- if (shdr->sh_entsize != gelf_fsize (ebl->elf, ELF_T_DYN, 1, EV_CURRENT))
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_DYN, 1, EV_CURRENT);
-+ if (shdr->sh_entsize != sh_entsize)
- ERROR (gettext ("\
- section [%2d] '%s': section entry size does not match ElfXX_Dyn\n"),
- idx, section_name (ebl, idx));
-@@ -1608,7 +1628,7 @@ section [%2d] '%s': section entry size d
- idx, section_name (ebl, idx));
-
- bool non_null_warned = false;
-- for (cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ for (cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- GElf_Dyn dyn_mem;
- GElf_Dyn *dyn = gelf_getdyn (data, cnt, &dyn_mem);
-@@ -1880,6 +1900,8 @@ section [%2d] '%s': entry size does not
- idx, section_name (ebl, idx));
-
- if (symshdr != NULL
-+ && shdr->sh_entsize
-+ && symshdr->sh_entsize
- && (shdr->sh_size / shdr->sh_entsize
- < symshdr->sh_size / symshdr->sh_entsize))
- ERROR (gettext ("\
-@@ -1906,6 +1928,12 @@ section [%2d] '%s': extended section ind
- }
-
- Elf_Data *data = elf_getdata (elf_getscn (ebl->elf, idx), NULL);
-+ if (data == NULL)
-+ {
-+ ERROR (gettext ("section [%2d] '%s': cannot get section data\n"),
-+ idx, section_name (ebl, idx));
-+ return;
-+ }
-
- if (*((Elf32_Word *) data->d_buf) != 0)
- ERROR (gettext ("symbol 0 should have zero extended section index\n"));
-@@ -1948,7 +1976,7 @@ section [%2d] '%s': hash table section i
-
- size_t maxidx = nchain;
-
-- if (symshdr != NULL)
-+ if (symshdr != NULL && symshdr->sh_entsize != 0)
- {
- size_t symsize = symshdr->sh_size / symshdr->sh_entsize;
-
-@@ -1959,18 +1987,28 @@ section [%2d] '%s': hash table section i
- maxidx = symsize;
- }
-
-+ Elf32_Word *buf = (Elf32_Word *) data->d_buf;
-+ Elf32_Word *end = (Elf32_Word *) ((char *) data->d_buf + shdr->sh_size);
- size_t cnt;
- for (cnt = 2; cnt < 2 + nbucket; ++cnt)
-- if (((Elf32_Word *) data->d_buf)[cnt] >= maxidx)
-+ {
-+ if (buf + cnt >= end)
-+ break;
-+ else if (buf[cnt] >= maxidx)
- ERROR (gettext ("\
- section [%2d] '%s': hash bucket reference %zu out of bounds\n"),
- idx, section_name (ebl, idx), cnt - 2);
-+ }
-
- for (; cnt < 2 + nbucket + nchain; ++cnt)
-- if (((Elf32_Word *) data->d_buf)[cnt] >= maxidx)
-+ {
-+ if (buf + cnt >= end)
-+ break;
-+ else if (buf[cnt] >= maxidx)
- ERROR (gettext ("\
- section [%2d] '%s': hash chain reference %zu out of bounds\n"),
- idx, section_name (ebl, idx), cnt - 2 - nbucket);
-+ }
- }
-
-
-@@ -2000,18 +2038,28 @@ section [%2d] '%s': hash table section i
- maxidx = symsize;
- }
-
-+ Elf64_Xword *buf = (Elf64_Xword *) data->d_buf;
-+ Elf64_Xword *end = (Elf64_Xword *) ((char *) data->d_buf + shdr->sh_size);
- size_t cnt;
- for (cnt = 2; cnt < 2 + nbucket; ++cnt)
-- if (((Elf64_Xword *) data->d_buf)[cnt] >= maxidx)
-+ {
-+ if (buf + cnt >= end)
-+ break;
-+ else if (buf[cnt] >= maxidx)
- ERROR (gettext ("\
- section [%2d] '%s': hash bucket reference %zu out of bounds\n"),
- idx, section_name (ebl, idx), cnt - 2);
-+ }
-
- for (; cnt < 2 + nbucket + nchain; ++cnt)
-- if (((Elf64_Xword *) data->d_buf)[cnt] >= maxidx)
-+ {
-+ if (buf + cnt >= end)
-+ break;
-+ else if (buf[cnt] >= maxidx)
- ERROR (gettext ("\
- section [%2d] '%s': hash chain reference %" PRIu64 " out of bounds\n"),
-- idx, section_name (ebl, idx), (uint64_t) (cnt - 2 - nbucket));
-+ idx, section_name (ebl, idx), (uint64_t) cnt - 2 - nbucket);
-+ }
- }
-
-
-@@ -2036,7 +2084,7 @@ section [%2d] '%s': bitmask size not pow
- if (shdr->sh_size < (4 + bitmask_words + nbuckets) * sizeof (Elf32_Word))
- {
- ERROR (gettext ("\
--section [%2d] '%s': hash table section is too small (is %ld, expected at least%ld)\n"),
-+section [%2d] '%s': hash table section is too small (is %ld, expected at least %ld)\n"),
- idx, section_name (ebl, idx), (long int) shdr->sh_size,
- (long int) ((4 + bitmask_words + nbuckets) * sizeof (Elf32_Word)));
- return;
-@@ -2708,8 +2756,9 @@ section [%2d] '%s' refers in sh_link to
-
- /* The number of elements in the version symbol table must be the
- same as the number of symbols. */
-- if (shdr->sh_size / shdr->sh_entsize
-- != symshdr->sh_size / symshdr->sh_entsize)
-+ if (shdr->sh_entsize && symshdr->sh_entsize
-+ && (shdr->sh_size / shdr->sh_entsize
-+ != symshdr->sh_size / symshdr->sh_entsize))
- ERROR (gettext ("\
- section [%2d] '%s' has different number of entries than symbol table [%2d] '%s'\n"),
- idx, section_name (ebl, idx),
---- elfutils/src/readelf.c
-+++ elfutils/src/readelf.c
-@@ -1364,6 +1364,8 @@ handle_scngrp (Ebl *ebl, Elf_Scn *scn, G
- Elf32_Word *grpref = (Elf32_Word *) data->d_buf;
-
- GElf_Sym sym_mem;
-+ GElf_Sym *sym = gelf_getsym (symdata, shdr->sh_info, &sym_mem);
-+
- printf ((grpref[0] & GRP_COMDAT)
- ? ngettext ("\
- \nCOMDAT section group [%2zu] '%s' with signature '%s' contains %zu entry:\n",
-@@ -1376,8 +1378,8 @@ handle_scngrp (Ebl *ebl, Elf_Scn *scn, G
- data->d_size / sizeof (Elf32_Word) - 1),
- elf_ndxscn (scn),
- elf_strptr (ebl->elf, shstrndx, shdr->sh_name),
-- elf_strptr (ebl->elf, symshdr->sh_link,
-- gelf_getsym (symdata, shdr->sh_info, &sym_mem)->st_name)
-+ (sym == NULL ? NULL
-+ : elf_strptr (ebl->elf, symshdr->sh_link, sym->st_name))
- ?: gettext ("<INVALID SYMBOL>"),
- data->d_size / sizeof (Elf32_Word) - 1);
-
-@@ -1528,10 +1530,12 @@ static void
- handle_dynamic (Ebl *ebl, Elf_Scn *scn, GElf_Shdr *shdr)
- {
- int class = gelf_getclass (ebl->elf);
-- GElf_Shdr glink;
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink;
- Elf_Data *data;
- size_t cnt;
- size_t shstrndx;
-+ size_t sh_entsize;
-
- /* Get the data of the section. */
- data = elf_getdata (scn, NULL);
-@@ -1543,21 +1547,26 @@ handle_dynamic (Ebl *ebl, Elf_Scn *scn,
- error (EXIT_FAILURE, 0,
- gettext ("cannot get section header string table index"));
-
-+ sh_entsize = gelf_fsize (ebl->elf, ELF_T_DYN, 1, EV_CURRENT);
-+
-+ glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), &glink_mem);
-+ if (glink == NULL)
-+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+
- printf (ngettext ("\
- \nDynamic segment contains %lu entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n",
- "\
- \nDynamic segment contains %lu entries:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n",
-- shdr->sh_size / shdr->sh_entsize),
-- (unsigned long int) (shdr->sh_size / shdr->sh_entsize),
-+ shdr->sh_size / sh_entsize),
-+ (unsigned long int) (shdr->sh_size / sh_entsize),
- class == ELFCLASS32 ? 10 : 18, shdr->sh_addr,
- shdr->sh_offset,
- (int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
- fputs_unlocked (gettext (" Type Value\n"), stdout);
-
-- for (cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ for (cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- GElf_Dyn dynmem;
- GElf_Dyn *dyn = gelf_getdyn (data, cnt, &dynmem);
-@@ -1706,7 +1715,8 @@ static void
- handle_relocs_rel (Ebl *ebl, GElf_Ehdr *ehdr, Elf_Scn *scn, GElf_Shdr *shdr)
- {
- int class = gelf_getclass (ebl->elf);
-- int nentries = shdr->sh_size / shdr->sh_entsize;
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_REL, 1, EV_CURRENT);
-+ int nentries = shdr->sh_size / sh_entsize;
-
- /* Get the data of the section. */
- Elf_Data *data = elf_getdata (scn, NULL);
-@@ -1892,7 +1902,8 @@ static void
- handle_relocs_rela (Ebl *ebl, GElf_Ehdr *ehdr, Elf_Scn *scn, GElf_Shdr *shdr)
- {
- int class = gelf_getclass (ebl->elf);
-- int nentries = shdr->sh_size / shdr->sh_entsize;
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_RELA, 1, EV_CURRENT);
-+ int nentries = shdr->sh_size / sh_entsize;
-
- /* Get the data of the section. */
- Elf_Data *data = elf_getdata (scn, NULL);
-@@ -2139,6 +2150,13 @@ handle_symtab (Ebl *ebl, Elf_Scn *scn, G
- error (EXIT_FAILURE, 0,
- gettext ("cannot get section header string table index"));
-
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-+ &glink_mem);
-+ if (glink == NULL)
-+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+
- /* Now we can compute the number of entries in the section. */
- unsigned int nsyms = data->d_size / (class == ELFCLASS32
- ? sizeof (Elf32_Sym)
-@@ -2149,15 +2167,12 @@ handle_symtab (Ebl *ebl, Elf_Scn *scn, G
- nsyms),
- (unsigned int) elf_ndxscn (scn),
- elf_strptr (ebl->elf, shstrndx, shdr->sh_name), nsyms);
-- GElf_Shdr glink;
- printf (ngettext (" %lu local symbol String table: [%2u] '%s'\n",
- " %lu local symbols String table: [%2u] '%s'\n",
- shdr->sh_info),
- (unsigned long int) shdr->sh_info,
- (unsigned int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
-
- fputs_unlocked (class == ELFCLASS32
- ? gettext ("\
-@@ -2393,7 +2408,13 @@ handle_verneed (Ebl *ebl, Elf_Scn *scn,
- error (EXIT_FAILURE, 0,
- gettext ("cannot get section header string table index"));
-
-- GElf_Shdr glink;
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-+ &glink_mem);
-+ if (glink == NULL)
-+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+
- printf (ngettext ("\
- \nVersion needs section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n",
- "\
-@@ -2404,9 +2425,7 @@ handle_verneed (Ebl *ebl, Elf_Scn *scn,
- class == ELFCLASS32 ? 10 : 18, shdr->sh_addr,
- shdr->sh_offset,
- (unsigned int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
-
- unsigned int offset = 0;
- for (int cnt = shdr->sh_info; --cnt >= 0; )
-@@ -2459,8 +2478,14 @@ handle_verdef (Ebl *ebl, Elf_Scn *scn, G
- error (EXIT_FAILURE, 0,
- gettext ("cannot get section header string table index"));
-
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-+ &glink_mem);
-+ if (glink == NULL)
-+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+
- int class = gelf_getclass (ebl->elf);
-- GElf_Shdr glink;
- printf (ngettext ("\
- \nVersion definition section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n",
- "\
-@@ -2472,9 +2497,7 @@ handle_verdef (Ebl *ebl, Elf_Scn *scn, G
- class == ELFCLASS32 ? 10 : 18, shdr->sh_addr,
- shdr->sh_offset,
- (unsigned int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
-
- unsigned int offset = 0;
- for (int cnt = shdr->sh_info; --cnt >= 0; )
-@@ -2736,25 +2759,30 @@ handle_versym (Ebl *ebl, Elf_Scn *scn, G
- filename = NULL;
- }
-
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-+ &glink_mem);
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_HALF, 1, EV_CURRENT);
-+ if (glink == NULL)
-+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+
- /* Print the header. */
-- GElf_Shdr glink;
- printf (ngettext ("\
- \nVersion symbols section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'",
- "\
- \nVersion symbols section [%2u] '%s' contains %d entries:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'",
-- shdr->sh_size / shdr->sh_entsize),
-+ shdr->sh_size / sh_entsize),
- (unsigned int) elf_ndxscn (scn),
- elf_strptr (ebl->elf, shstrndx, shdr->sh_name),
-- (int) (shdr->sh_size / shdr->sh_entsize),
-+ (int) (shdr->sh_size / sh_entsize),
- class == ELFCLASS32 ? 10 : 18, shdr->sh_addr,
- shdr->sh_offset,
- (unsigned int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
-
- /* Now we can finally look at the actual contents of this section. */
-- for (unsigned int cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt)
-+ for (unsigned int cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt)
- {
- if (cnt % 2 == 0)
- printf ("\n %4d:", cnt);
-@@ -2803,7 +2831,17 @@ print_hash_info (Ebl *ebl, Elf_Scn *scn,
- for (Elf32_Word cnt = 0; cnt < nbucket; ++cnt)
- ++counts[lengths[cnt]];
-
-- GElf_Shdr glink;
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf,
-+ shdr->sh_link),
-+ &glink_mem);
-+ if (glink == NULL)
-+ {
-+ error (0, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+ return;
-+ }
-+
- printf (ngettext ("\
- \nHistogram for bucket list length in section [%2u] '%s' (total of %d bucket):\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n",
- "\
-@@ -2816,9 +2854,7 @@ print_hash_info (Ebl *ebl, Elf_Scn *scn,
- shdr->sh_addr,
- shdr->sh_offset,
- (unsigned int) shdr->sh_link,
-- elf_strptr (ebl->elf, shstrndx,
-- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link),
-- &glink)->sh_name));
-+ elf_strptr (ebl->elf, shstrndx, glink->sh_name));
-
- if (extrastr != NULL)
- fputs (extrastr, stdout);
-@@ -3078,7 +3114,8 @@ print_liblist (Ebl *ebl)
-
- if (shdr != NULL && shdr->sh_type == SHT_GNU_LIBLIST)
- {
-- int nentries = shdr->sh_size / shdr->sh_entsize;
-+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_LIB, 1, EV_CURRENT);
-+ int nentries = shdr->sh_size / sh_entsize;
- printf (ngettext ("\
- \nLibrary list section [%2zu] '%s' at offset %#0" PRIx64 " contains %d entry:\n",
- "\
-@@ -4398,6 +4435,16 @@ print_decoded_aranges_section (Ebl *ebl,
- return;
- }
-
-+ GElf_Shdr glink_mem;
-+ GElf_Shdr *glink;
-+ glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), &glink_mem);
-+ if (glink == NULL)
-+ {
-+ error (0, 0, gettext ("invalid sh_link value in section %Zu"),
-+ elf_ndxscn (scn));
-+ return;
-+ }
-+
- printf (ngettext ("\
- \nDWARF section [%2zu] '%s' at offset %#" PRIx64 " contains %zu entry:\n",
- "\
---- elfutils/src/strip.c
-+++ elfutils/src/strip.c
-@@ -565,6 +565,11 @@ handle_elf (int fd, Elf *elf, const char
- goto fail_close;
- }
-
-+ if (shstrndx >= shnum)
-+ goto illformed;
-+
-+#define elf_assert(test) do { if (!(test)) goto illformed; } while (0)
-+
- /* Storage for section information. We leave room for two more
- entries since we unconditionally create a section header string
- table. Maybe some weird tool created an ELF file without one.
-@@ -586,7 +591,7 @@ handle_elf (int fd, Elf *elf, const char
- {
- /* This should always be true (i.e., there should not be any
- holes in the numbering). */
-- assert (elf_ndxscn (scn) == cnt);
-+ elf_assert (elf_ndxscn (scn) == cnt);
-
- shdr_info[cnt].scn = scn;
-
-@@ -599,6 +604,7 @@ handle_elf (int fd, Elf *elf, const char
- shdr_info[cnt].shdr.sh_name);
- if (shdr_info[cnt].name == NULL)
- {
-+ illformed:
- error (0, 0, gettext ("illformed file '%s'"), fname);
- goto fail_close;
- }
-@@ -608,6 +614,8 @@ handle_elf (int fd, Elf *elf, const char
-
- /* Remember the shdr.sh_link value. */
- shdr_info[cnt].old_sh_link = shdr_info[cnt].shdr.sh_link;
-+ if (shdr_info[cnt].old_sh_link >= shnum)
-+ goto illformed;
-
- /* Sections in files other than relocatable object files which
- are not loaded can be freely moved by us. In relocatable
-@@ -620,7 +628,7 @@ handle_elf (int fd, Elf *elf, const char
- appropriate reference. */
- if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB_SHNDX))
- {
-- assert (shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx == 0);
-+ elf_assert (shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx == 0);
- shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx = cnt;
- }
- else if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_GROUP))
-@@ -637,7 +645,12 @@ handle_elf (int fd, Elf *elf, const char
- for (inner = 1;
- inner < shdr_info[cnt].data->d_size / sizeof (Elf32_Word);
- ++inner)
-+ {
-+ if (grpref[inner] < shnum)
- shdr_info[grpref[inner]].group_idx = cnt;
-+ else
-+ goto illformed;
-+ }
-
- if (inner == 1 || (inner == 2 && (grpref[0] & GRP_COMDAT) == 0))
- /* If the section group contains only one element and this
-@@ -648,7 +661,7 @@ handle_elf (int fd, Elf *elf, const char
- }
- else if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_GNU_versym))
- {
-- assert (shdr_info[shdr_info[cnt].shdr.sh_link].version_idx == 0);
-+ elf_assert (shdr_info[shdr_info[cnt].shdr.sh_link].version_idx == 0);
- shdr_info[shdr_info[cnt].shdr.sh_link].version_idx = cnt;
- }
-
-@@ -656,7 +669,7 @@ handle_elf (int fd, Elf *elf, const char
- discarded right away. */
- if ((shdr_info[cnt].shdr.sh_flags & SHF_GROUP) != 0)
- {
-- assert (shdr_info[cnt].group_idx != 0);
-+ elf_assert (shdr_info[cnt].group_idx != 0);
-
- if (shdr_info[shdr_info[cnt].group_idx].idx == 0)
- {
-@@ -732,10 +745,14 @@ handle_elf (int fd, Elf *elf, const char
- {
- /* If a relocation section is marked as being removed make
- sure the section it is relocating is removed, too. */
-- if ((shdr_info[cnt].shdr.sh_type == SHT_REL
-+ if (shdr_info[cnt].shdr.sh_type == SHT_REL
- || shdr_info[cnt].shdr.sh_type == SHT_RELA)
-- && shdr_info[shdr_info[cnt].shdr.sh_info].idx != 0)
-- shdr_info[cnt].idx = 1;
-+ {
-+ if (shdr_info[cnt].shdr.sh_info >= shnum)
-+ goto illformed;
-+ else if (shdr_info[shdr_info[cnt].shdr.sh_info].idx != 0)
-+ shdr_info[cnt].idx = 1;
-+ }
-
- /* If a group section is marked as being removed make
- sure all the sections it contains are being removed, too. */
-@@ -779,7 +796,7 @@ handle_elf (int fd, Elf *elf, const char
- if (shdr_info[cnt].symtab_idx != 0
- && shdr_info[shdr_info[cnt].symtab_idx].data == NULL)
- {
-- assert (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB);
-+ elf_assert (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB);
-
- shdr_info[shdr_info[cnt].symtab_idx].data
- = elf_getdata (shdr_info[shdr_info[cnt].symtab_idx].scn,
-@@ -819,6 +836,9 @@ handle_elf (int fd, Elf *elf, const char
- else if (scnidx == SHN_XINDEX)
- scnidx = xndx;
-
-+ if (scnidx >= shnum)
-+ goto illformed;
-+
- if (shdr_info[scnidx].idx == 0)
- /* This symbol table has a real symbol in
- a discarded section. So preserve the
-@@ -849,12 +869,16 @@ handle_elf (int fd, Elf *elf, const char
- }
-
- /* Handle references through sh_info. */
-- if (SH_INFO_LINK_P (&shdr_info[cnt].shdr)
-- && shdr_info[shdr_info[cnt].shdr.sh_info].idx == 0)
-+ if (SH_INFO_LINK_P (&shdr_info[cnt].shdr))
-+ {
-+ if (shdr_info[cnt].shdr.sh_info >= shnum)
-+ goto illformed;
-+ else if ( shdr_info[shdr_info[cnt].shdr.sh_info].idx == 0)
- {
- shdr_info[shdr_info[cnt].shdr.sh_info].idx = 1;
- changes |= shdr_info[cnt].shdr.sh_info < cnt;
- }
-+ }
-
- /* Mark the section as investigated. */
- shdr_info[cnt].idx = 2;
-@@ -995,7 +1019,7 @@ handle_elf (int fd, Elf *elf, const char
- error (EXIT_FAILURE, 0, gettext ("while generating output file: %s"),
- elf_errmsg (-1));
-
-- assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx);
-+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx);
-
- /* Add this name to the section header string table. */
- shdr_info[cnt].se = ebl_strtabadd (shst, shdr_info[cnt].name, 0);
-@@ -1032,7 +1056,7 @@ handle_elf (int fd, Elf *elf, const char
- error (EXIT_FAILURE, 0,
- gettext ("while create section header section: %s"),
- elf_errmsg (-1));
-- assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx);
-+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx);
-
- shdr_info[cnt].data = elf_newdata (shdr_info[cnt].newscn);
- if (shdr_info[cnt].data == NULL)
-@@ -1089,7 +1113,7 @@ handle_elf (int fd, Elf *elf, const char
- error (EXIT_FAILURE, 0,
- gettext ("while create section header section: %s"),
- elf_errmsg (-1));
-- assert (elf_ndxscn (shdr_info[cnt].newscn) == idx);
-+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == idx);
-
- /* Finalize the string table and fill in the correct indices in the
- section headers. */
-@@ -1179,20 +1203,20 @@ handle_elf (int fd, Elf *elf, const char
- shndxdata = elf_getdata (shdr_info[shdr_info[cnt].symtab_idx].scn,
- NULL);
-
-- assert ((versiondata->d_size / sizeof (Elf32_Word))
-+ elf_assert ((versiondata->d_size / sizeof (Elf32_Word))
- >= shdr_info[cnt].data->d_size / elsize);
- }
-
- if (shdr_info[cnt].version_idx != 0)
- {
-- assert (shdr_info[cnt].shdr.sh_type == SHT_DYNSYM);
-+ elf_assert (shdr_info[cnt].shdr.sh_type == SHT_DYNSYM);
- /* This section has associated version
- information. We have to modify that
- information, too. */
- versiondata = elf_getdata (shdr_info[shdr_info[cnt].version_idx].scn,
- NULL);
-
-- assert ((versiondata->d_size / sizeof (GElf_Versym))
-+ elf_assert ((versiondata->d_size / sizeof (GElf_Versym))
- >= shdr_info[cnt].data->d_size / elsize);
- }
-
-@@ -1247,7 +1271,7 @@ handle_elf (int fd, Elf *elf, const char
- sec = shdr_info[sym->st_shndx].idx;
- else
- {
-- assert (shndxdata != NULL);
-+ elf_assert (shndxdata != NULL);
-
- sec = shdr_info[xshndx].idx;
- }
-@@ -1268,7 +1292,7 @@ handle_elf (int fd, Elf *elf, const char
- nxshndx = sec;
- }
-
-- assert (sec < SHN_LORESERVE || shndxdata != NULL);
-+ elf_assert (sec < SHN_LORESERVE || shndxdata != NULL);
-
- if ((inner != destidx || nshndx != sym->st_shndx
- || (shndxdata != NULL && nxshndx != xshndx))
-@@ -1295,9 +1319,11 @@ handle_elf (int fd, Elf *elf, const char
- {
- size_t sidx = (sym->st_shndx != SHN_XINDEX
- ? sym->st_shndx : xshndx);
-- assert (GELF_ST_TYPE (sym->st_info) == STT_SECTION
-- || (shdr_info[sidx].shdr.sh_type == SHT_GROUP
-- && shdr_info[sidx].shdr.sh_info == inner));
-+ elf_assert (GELF_ST_TYPE (sym->st_info) == STT_SECTION
-+ || ((shdr_info[sidx].shdr.sh_type
-+ == SHT_GROUP)
-+ && (shdr_info[sidx].shdr.sh_info
-+ == inner)));
- }
- }
-
-@@ -1485,11 +1511,11 @@ handle_elf (int fd, Elf *elf, const char
- {
- GElf_Sym sym_mem;
- GElf_Sym *sym = gelf_getsym (symd, inner, &sym_mem);
-- assert (sym != NULL);
-+ elf_assert (sym != NULL);
-
- const char *name = elf_strptr (elf, strshndx,
- sym->st_name);
-- assert (name != NULL);
-+ elf_assert (name != NULL);
- size_t hidx = elf_hash (name) % nbucket;
-
- if (bucket[hidx] == 0)
-@@ -1508,8 +1534,8 @@ handle_elf (int fd, Elf *elf, const char
- else
- {
- /* Alpha and S390 64-bit use 64-bit SHT_HASH entries. */
-- assert (shdr_info[cnt].shdr.sh_entsize
-- == sizeof (Elf64_Xword));
-+ elf_assert (shdr_info[cnt].shdr.sh_entsize
-+ == sizeof (Elf64_Xword));
-
- Elf64_Xword *bucket = (Elf64_Xword *) hashd->d_buf;
-
-@@ -1539,11 +1565,11 @@ handle_elf (int fd, Elf *elf, const char
- {
- GElf_Sym sym_mem;
- GElf_Sym *sym = gelf_getsym (symd, inner, &sym_mem);
-- assert (sym != NULL);
-+ elf_assert (sym != NULL);
-
- const char *name = elf_strptr (elf, strshndx,
- sym->st_name);
-- assert (name != NULL);
-+ elf_assert (name != NULL);
- size_t hidx = elf_hash (name) % nbucket;
-
- if (bucket[hidx] == 0)
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/unwind_non_linux.patch b/meta/recipes-devtools/elfutils/elfutils-0.158/unwind_non_linux.patch
deleted file mode 100644
index 870ec23703..0000000000
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/unwind_non_linux.patch
+++ /dev/null
@@ -1,256 +0,0 @@
-From 02cefdaa6429e620d6457fdb3ad9934f194c5a93 Mon Sep 17 00:00:00 2001
-From: Kurt Roeckx <kurt@roeckx.be>
-Date: Tue, 22 Apr 2014 21:46:22 +0200
-Subject: [PATCH] Unwinding is only supported on Linux
-
-Index: elfutils-0.158/backends/i386_initreg.c
-===================================================================
---- elfutils-0.158.orig/backends/i386_initreg.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/backends/i386_initreg.c 2014-05-01 17:11:18.866616384 +0000
-@@ -44,7 +44,7 @@
- ebl_tid_registers_t *setfunc __attribute__ ((unused)),
- void *arg __attribute__ ((unused)))
- {
--#if !defined __i386__ && !defined __x86_64__
-+#if (!defined __i386__ && !defined __x86_64__) || !defined(__linux__)
- return false;
- #else /* __i386__ || __x86_64__ */
- struct user_regs_struct user_regs;
-Index: elfutils-0.158/backends/x86_64_initreg.c
-===================================================================
---- elfutils-0.158.orig/backends/x86_64_initreg.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/backends/x86_64_initreg.c 2014-05-01 17:11:18.866616384 +0000
-@@ -44,7 +44,7 @@
- ebl_tid_registers_t *setfunc __attribute__ ((unused)),
- void *arg __attribute__ ((unused)))
- {
--#ifndef __x86_64__
-+#if !defined(__x86_64__) || !defined(__linux__)
- return false;
- #else /* __x86_64__ */
- struct user_regs_struct user_regs;
-Index: elfutils-0.158/libdwfl/linux-pid-attach.c
-===================================================================
---- elfutils-0.158.orig/libdwfl/linux-pid-attach.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/libdwfl/linux-pid-attach.c 2014-05-01 17:12:47.980766442 +0000
-@@ -37,6 +37,8 @@
- # define MAX(a, b) ((a) > (b) ? (a) : (b))
- #endif
-
-+#ifdef __linux__
-+
- struct pid_arg
- {
- DIR *dir;
-@@ -358,3 +360,87 @@
- return 0;
- }
- INTDEF (dwfl_linux_proc_attach)
-+
-+#else /* __linux__ */
-+
-+static pid_t
-+pid_next_thread (Dwfl *dwfl __attribute__ ((unused)),
-+ void *dwfl_arg __attribute__ ((unused)),
-+ void **thread_argp __attribute__ ((unused)))
-+{
-+ errno = ENOSYS;
-+ __libdwfl_seterrno (DWFL_E_ERRNO);
-+ return -1;
-+}
-+
-+static bool
-+pid_getthread (Dwfl *dwfl __attribute__ ((unused)),
-+ pid_t tid __attribute__ ((unused)),
-+ void *dwfl_arg __attribute__ ((unused)),
-+ void **thread_argp __attribute__ ((unused)))
-+{
-+ errno = ENOSYS;
-+ __libdwfl_seterrno (DWFL_E_ERRNO);
-+ return false;
-+}
-+
-+static bool
-+pid_memory_read (Dwfl *dwfl __attribute__ ((unused)),
-+ Dwarf_Addr addr __attribute__ ((unused)),
-+ Dwarf_Word *result __attribute__ ((unused)),
-+ void *arg __attribute__ ((unused)))
-+{
-+ errno = ENOSYS;
-+ __libdwfl_seterrno (DWFL_E_ERRNO);
-+ return false;
-+}
-+
-+static bool
-+pid_set_initial_registers (Dwfl_Thread *thread __attribute__ ((unused)),
-+ void *thread_arg __attribute__ ((unused)))
-+{
-+ errno = ENOSYS;
-+ __libdwfl_seterrno (DWFL_E_ERRNO);
-+ return false;
-+}
-+
-+static void
-+pid_detach (Dwfl *dwfl __attribute__ ((unused)),
-+ void *dwfl_arg __attribute__ ((unused)))
-+{
-+}
-+
-+static void
-+pid_thread_detach (Dwfl_Thread *thread __attribute__ ((unused)),
-+ void *thread_arg __attribute__ ((unused)))
-+{
-+}
-+
-+static const Dwfl_Thread_Callbacks pid_thread_callbacks =
-+{
-+ pid_next_thread,
-+ pid_getthread,
-+ pid_memory_read,
-+ pid_set_initial_registers,
-+ pid_detach,
-+ pid_thread_detach,
-+};
-+
-+int
-+dwfl_linux_proc_attach (Dwfl *dwfl __attribute__ ((unused)),
-+ pid_t pid __attribute__ ((unused)),
-+ bool assume_ptrace_stopped __attribute__ ((unused)))
-+{
-+ return ENOSYS;
-+}
-+INTDEF (dwfl_linux_proc_attach)
-+
-+struct __libdwfl_pid_arg *
-+internal_function
-+__libdwfl_get_pid_arg (Dwfl *dwfl __attribute__ ((unused)))
-+{
-+ return NULL;
-+}
-+
-+#endif /* ! __linux __ */
-+
-Index: elfutils-0.158/tests/backtrace-child.c
-===================================================================
---- elfutils-0.158.orig/tests/backtrace-child.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/tests/backtrace-child.c 2014-05-01 17:11:18.866616384 +0000
-@@ -79,6 +79,18 @@
- #include <stdio.h>
- #include <unistd.h>
-
-+#ifndef __linux__
-+
-+int
-+main (int argc __attribute__ ((unused)), char **argv)
-+{
-+ fprintf (stderr, "%s: Unwinding not supported for this architecture\n",
-+ argv[0]);
-+ return 77;
-+}
-+
-+#else /* __linux__ */
-+
- #if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)
- #define NOINLINE_NOCLONE __attribute__ ((noinline, noclone))
- #else
-@@ -221,3 +233,6 @@
- /* Not reached. */
- abort ();
- }
-+
-+#endif /* ! __linux__ */
-+
-Index: elfutils-0.158/tests/backtrace-data.c
-===================================================================
---- elfutils-0.158.orig/tests/backtrace-data.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/tests/backtrace-data.c 2014-05-01 17:11:18.866616384 +0000
-@@ -40,7 +40,7 @@
- #include <string.h>
- #include ELFUTILS_HEADER(dwfl)
-
--#ifndef __x86_64__
-+#if !defined(__x86_64__) || !defined(__linux__)
-
- int
- main (int argc __attribute__ ((unused)), char **argv)
-@@ -50,7 +50,7 @@
- return 77;
- }
-
--#else /* __x86_64__ */
-+#else /* __x86_64__ && __linux__ */
-
- /* The only arch specific code is set_initial_registers. */
-
-Index: elfutils-0.158/tests/backtrace-dwarf.c
-===================================================================
---- elfutils-0.158.orig/tests/backtrace-dwarf.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/tests/backtrace-dwarf.c 2014-05-01 17:11:18.866616384 +0000
-@@ -25,6 +25,18 @@
- #include <sys/ptrace.h>
- #include ELFUTILS_HEADER(dwfl)
-
-+#ifndef __linux__
-+
-+int
-+main (int argc __attribute__ ((unused)), char **argv)
-+{
-+ fprintf (stderr, "%s: Unwinding not supported for this architecture\n",
-+ argv[0]);
-+ return 77;
-+}
-+
-+#else /* __linux__ */
-+
- static void cleanup_13_abort (void);
- #define main cleanup_13_main
- #include "cleanup-13.c"
-@@ -148,3 +160,6 @@
- /* There is an exit (0) call if we find the "main" frame, */
- error (1, 0, "dwfl_getthreads: %s", dwfl_errmsg (-1));
- }
-+
-+#endif /* ! __linux__ */
-+
-Index: elfutils-0.158/tests/backtrace-subr.sh
-===================================================================
---- elfutils-0.158.orig/tests/backtrace-subr.sh 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/tests/backtrace-subr.sh 2014-05-01 17:11:18.866616384 +0000
-@@ -84,6 +84,7 @@
- echo ./backtrace ./backtrace.$arch.{exec,core}
- testrun ${abs_builddir}/backtrace -e ./backtrace.$arch.exec --core=./backtrace.$arch.core 1>backtrace.$arch.bt 2>backtrace.$arch.err || true
- cat backtrace.$arch.{bt,err}
-+ check_unsupported backtrace.$arch.err backtrace.$arch.core
- check_all backtrace.$arch.{bt,err} backtrace.$arch.core
- }
-
-Index: elfutils-0.158/tests/backtrace.c
-===================================================================
---- elfutils-0.158.orig/tests/backtrace.c 2014-05-01 17:11:18.870616302 +0000
-+++ elfutils-0.158/tests/backtrace.c 2014-05-01 17:11:18.866616384 +0000
-@@ -39,6 +39,18 @@
- #include <sys/syscall.h>
- #include ELFUTILS_HEADER(dwfl)
-
-+#ifndef __linux__
-+
-+int
-+main (int argc __attribute__ ((unused)), char **argv)
-+{
-+ fprintf (stderr, "%s: Unwinding not supported for this architecture\n",
-+ argv[0]);
-+ return 77;
-+}
-+
-+#else /* __linux__ */
-+
- static int
- dump_modules (Dwfl_Module *mod, void **userdata __attribute__ ((unused)),
- const char *name, Dwarf_Addr start,
-@@ -452,3 +464,6 @@
- dwfl_end (dwfl);
- return 0;
- }
-+
-+#endif /* ! __linux__ */
-+
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/arm_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_backend.diff
index fc3d6dc3f4..c97c4e470d 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/arm_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_backend.diff
@@ -1,7 +1,25 @@
-Index: elfutils-0.158/backends/arm_init.c
-===================================================================
---- elfutils-0.158.orig/backends/arm_init.c 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/backends/arm_init.c 2014-04-21 11:13:24.374519343 +0000
+From 0db1687eee0b4d16ccbc40db5a06b574fca6614c Mon Sep 17 00:00:00 2001
+From: Hongxu Jia <hongxu.jia@windriver.com>
+Date: Fri, 14 Nov 2014 15:25:42 +0800
+Subject: [PATCH] Rebase arm_backend.diff from 0.159 to 0.160
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ backends/arm_init.c | 18 ++++-
+ backends/arm_regs.c | 132 ++++++++++++++++++++++++++++++++++++
+ backends/arm_retval.c | 44 +++++++++++-
+ backends/libebl_arm.h | 9 +++
+ libelf/elf.h | 11 +++
+ tests/run-addrcfi.sh | 93 ++++++++++++++++++++++++-
+ tests/run-allregs.sh | 95 +++++++++++++++++++++++++-
+ tests/run-readelf-mixed-corenote.sh | 11 ++-
+ 8 files changed, 401 insertions(+), 12 deletions(-)
+ create mode 100644 backends/libebl_arm.h
+
+diff --git a/backends/arm_init.c b/backends/arm_init.c
+index 3283c97..8b57d3f 100644
+--- a/backends/arm_init.c
++++ b/backends/arm_init.c
@@ -35,21 +35,32 @@
#define RELOC_PREFIX R_ARM_
#include "libebl_CPU.h"
@@ -36,7 +54,7 @@ Index: elfutils-0.158/backends/arm_init.c
/* We handle it. */
eh->name = "ARM";
arm_init_reloc (eh);
-@@ -61,7 +72,10 @@
+@@ -61,7 +72,10 @@ arm_init (elf, machine, eh, ehlen)
HOOK (eh, core_note);
HOOK (eh, auxv_info);
HOOK (eh, check_object_attribute);
@@ -46,12 +64,12 @@ Index: elfutils-0.158/backends/arm_init.c
+ else
+ eh->return_value_location = arm_return_value_location_hard;
HOOK (eh, abi_cfi);
+ HOOK (eh, check_reloc_target_type);
- return MODVERSION;
-Index: elfutils-0.158/backends/arm_regs.c
-===================================================================
---- elfutils-0.158.orig/backends/arm_regs.c 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/backends/arm_regs.c 2014-04-21 11:13:24.374519343 +0000
+diff --git a/backends/arm_regs.c b/backends/arm_regs.c
+index 21c5ad3..4ee1039 100644
+--- a/backends/arm_regs.c
++++ b/backends/arm_regs.c
@@ -31,6 +31,7 @@
#endif
@@ -60,7 +78,7 @@ Index: elfutils-0.158/backends/arm_regs.c
#include <dwarf.h>
#define BACKEND arm_
-@@ -76,6 +77,9 @@
+@@ -76,6 +77,9 @@ arm_register_info (Ebl *ebl __attribute__ ((unused)),
break;
case 16 + 0 ... 16 + 7:
@@ -70,7 +88,7 @@ Index: elfutils-0.158/backends/arm_regs.c
regno += 96 - 16;
/* Fall through. */
case 96 + 0 ... 96 + 7:
-@@ -87,11 +91,139 @@
+@@ -87,11 +91,139 @@ arm_register_info (Ebl *ebl __attribute__ ((unused)),
namelen = 2;
break;
@@ -210,11 +228,11 @@ Index: elfutils-0.158/backends/arm_regs.c
*setname = "VFP";
*type = DW_ATE_float;
*bits = 64;
-Index: elfutils-0.158/backends/arm_retval.c
-===================================================================
---- elfutils-0.158.orig/backends/arm_retval.c 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/backends/arm_retval.c 2014-04-21 11:13:24.374519343 +0000
-@@ -48,6 +48,13 @@
+diff --git a/backends/arm_retval.c b/backends/arm_retval.c
+index 7aced74..052132e 100644
+--- a/backends/arm_retval.c
++++ b/backends/arm_retval.c
+@@ -48,6 +48,13 @@ static const Dwarf_Op loc_intreg[] =
#define nloc_intreg 1
#define nloc_intregs(n) (2 * (n))
@@ -228,7 +246,7 @@ Index: elfutils-0.158/backends/arm_retval.c
/* The return value is a structure and is actually stored in stack space
passed in a hidden argument by the caller. But, the compiler
helpfully returns the address of that space in r0. */
-@@ -58,8 +65,9 @@
+@@ -58,8 +65,9 @@ static const Dwarf_Op loc_aggregate[] =
#define nloc_aggregate 1
@@ -240,11 +258,11 @@ Index: elfutils-0.158/backends/arm_retval.c
{
/* Start with the function's type, and get the DW_AT_type attribute,
which is the type of the return value. */
-@@ -112,14 +120,31 @@
- else
- return -1;
- }
-+ if (tag == DW_TAG_base_type)
+@@ -98,14 +106,31 @@ arm_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp)
+ else
+ return -1;
+ }
++ if (tag == DW_TAG_base_type)
+ {
+ Dwarf_Word encoding;
+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding,
@@ -259,20 +277,20 @@ Index: elfutils-0.158/backends/arm_retval.c
+ goto aggregate;
+ }
+ }
- if (size <= 16)
- {
- intreg:
- *locp = loc_intreg;
- return size <= 4 ? nloc_intreg : nloc_intregs ((size + 3) / 4);
- }
-+ /* fall through. */
+ if (size <= 16)
+ {
+ intreg:
+ *locp = loc_intreg;
+ return size <= 4 ? nloc_intreg : nloc_intregs ((size + 3) / 4);
+ }
++ /* fall through. */
- aggregate:
-+ /* XXX TODO sometimes aggregates are returned in r0 (-mabi=aapcs) */
- *locp = loc_aggregate;
- return nloc_aggregate;
-
-@@ -138,3 +163,18 @@
+ aggregate:
++ /* XXX TODO sometimes aggregates are returned in r0 (-mabi=aapcs) */
+ *locp = loc_aggregate;
+ return nloc_aggregate;
+ }
+@@ -125,3 +150,18 @@ arm_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp)
DWARF and might be valid. */
return -2;
}
@@ -291,11 +309,26 @@ Index: elfutils-0.158/backends/arm_retval.c
+ return arm_return_value_location_ (functypedie, locp, 0);
+}
+
-Index: elfutils-0.158/libelf/elf.h
-===================================================================
---- elfutils-0.158.orig/libelf/elf.h 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/libelf/elf.h 2014-04-21 11:13:24.374519343 +0000
-@@ -2318,6 +2318,9 @@
+diff --git a/backends/libebl_arm.h b/backends/libebl_arm.h
+new file mode 100644
+index 0000000..c00770c
+--- /dev/null
++++ b/backends/libebl_arm.h
+@@ -0,0 +1,9 @@
++#ifndef _LIBEBL_ARM_H
++#define _LIBEBL_ARM_H 1
++
++#include <libdw.h>
++
++extern int arm_return_value_location_soft(Dwarf_Die *, const Dwarf_Op **locp);
++extern int arm_return_value_location_hard(Dwarf_Die *, const Dwarf_Op **locp);
++
++#endif
+diff --git a/libelf/elf.h b/libelf/elf.h
+index a3cce3e..0891674 100644
+--- a/libelf/elf.h
++++ b/libelf/elf.h
+@@ -2346,6 +2346,9 @@ typedef Elf32_Addr Elf32_Conflict;
#define EF_ARM_EABI_VER4 0x04000000
#define EF_ARM_EABI_VER5 0x05000000
@@ -305,7 +338,7 @@ Index: elfutils-0.158/libelf/elf.h
/* Additional symbol types for Thumb. */
#define STT_ARM_TFUNC STT_LOPROC /* A Thumb function. */
#define STT_ARM_16BIT STT_HIPROC /* A Thumb label. */
-@@ -2335,12 +2338,19 @@
+@@ -2363,12 +2366,19 @@ typedef Elf32_Addr Elf32_Conflict;
/* Processor specific values for the Phdr p_type field. */
#define PT_ARM_EXIDX (PT_LOPROC + 1) /* ARM unwind segment. */
@@ -325,7 +358,7 @@ Index: elfutils-0.158/libelf/elf.h
/* AArch64 relocs. */
-@@ -2619,6 +2629,7 @@
+@@ -2647,6 +2657,7 @@ typedef Elf32_Addr Elf32_Conflict;
TLS block (LDR, STR). */
#define R_ARM_TLS_IE12GP 111 /* 12 bit GOT entry relative
to GOT origin (LDR). */
@@ -333,25 +366,122 @@ Index: elfutils-0.158/libelf/elf.h
#define R_ARM_ME_TOO 128 /* Obsolete. */
#define R_ARM_THM_TLS_DESCSEQ 129
#define R_ARM_THM_TLS_DESCSEQ16 129
-Index: elfutils-0.158/backends/libebl_arm.h
-===================================================================
---- /dev/null 1970-01-01 00:00:00.000000000 +0000
-+++ elfutils-0.158/backends/libebl_arm.h 2014-04-21 11:13:24.374519343 +0000
-@@ -0,0 +1,9 @@
-+#ifndef _LIBEBL_ARM_H
-+#define _LIBEBL_ARM_H 1
-+
-+#include <libdw.h>
-+
-+extern int arm_return_value_location_soft(Dwarf_Die *, const Dwarf_Op **locp);
-+extern int arm_return_value_location_hard(Dwarf_Die *, const Dwarf_Op **locp);
-+
-+#endif
-Index: elfutils-0.158/tests/run-allregs.sh
-===================================================================
---- elfutils-0.158.orig/tests/run-allregs.sh 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/tests/run-allregs.sh 2014-04-21 11:13:24.378519252 +0000
-@@ -2671,7 +2671,28 @@
+diff --git a/tests/run-addrcfi.sh b/tests/run-addrcfi.sh
+index 5d33246..78464a8 100755
+--- a/tests/run-addrcfi.sh
++++ b/tests/run-addrcfi.sh
+@@ -2530,6 +2530,38 @@ dwarf_cfi_addrframe (.eh_frame): no matching address range
+ FPA reg21 (f5): undefined
+ FPA reg22 (f6): undefined
+ FPA reg23 (f7): undefined
++ VFP reg64 (s0): undefined
++ VFP reg65 (s1): undefined
++ VFP reg66 (s2): undefined
++ VFP reg67 (s3): undefined
++ VFP reg68 (s4): undefined
++ VFP reg69 (s5): undefined
++ VFP reg70 (s6): undefined
++ VFP reg71 (s7): undefined
++ VFP reg72 (s8): undefined
++ VFP reg73 (s9): undefined
++ VFP reg74 (s10): undefined
++ VFP reg75 (s11): undefined
++ VFP reg76 (s12): undefined
++ VFP reg77 (s13): undefined
++ VFP reg78 (s14): undefined
++ VFP reg79 (s15): undefined
++ VFP reg80 (s16): undefined
++ VFP reg81 (s17): undefined
++ VFP reg82 (s18): undefined
++ VFP reg83 (s19): undefined
++ VFP reg84 (s20): undefined
++ VFP reg85 (s21): undefined
++ VFP reg86 (s22): undefined
++ VFP reg87 (s23): undefined
++ VFP reg88 (s24): undefined
++ VFP reg89 (s25): undefined
++ VFP reg90 (s26): undefined
++ VFP reg91 (s27): undefined
++ VFP reg92 (s28): undefined
++ VFP reg93 (s29): undefined
++ VFP reg94 (s30): undefined
++ VFP reg95 (s31): undefined
+ FPA reg96 (f0): undefined
+ FPA reg97 (f1): undefined
+ FPA reg98 (f2): undefined
+@@ -2538,7 +2570,66 @@ dwarf_cfi_addrframe (.eh_frame): no matching address range
+ FPA reg101 (f5): undefined
+ FPA reg102 (f6): undefined
+ FPA reg103 (f7): undefined
+- integer reg128 (spsr): undefined
++ MMX reg104 (wcgr0): undefined
++ MMX reg105 (wcgr1): undefined
++ MMX reg106 (wcgr2): undefined
++ MMX reg107 (wcgr3): undefined
++ MMX reg108 (wcgr4): undefined
++ MMX reg109 (wcgr5): undefined
++ MMX reg110 (wcgr6): undefined
++ MMX reg111 (wcgr7): undefined
++ MMX reg112 (wr0): undefined
++ MMX reg113 (wr1): undefined
++ MMX reg114 (wr2): undefined
++ MMX reg115 (wr3): undefined
++ MMX reg116 (wr4): undefined
++ MMX reg117 (wr5): undefined
++ MMX reg118 (wr6): undefined
++ MMX reg119 (wr7): undefined
++ MMX reg120 (wr8): undefined
++ MMX reg121 (wr9): undefined
++ MMX reg122 (wr10): undefined
++ MMX reg123 (wr11): undefined
++ MMX reg124 (wr12): undefined
++ MMX reg125 (wr13): undefined
++ MMX reg126 (wr14): undefined
++ MMX reg127 (wr15): undefined
++ state reg128 (spsr): undefined
++ state reg129 (spsr_fiq): undefined
++ state reg130 (spsr_irq): undefined
++ state reg131 (spsr_abt): undefined
++ state reg132 (spsr_und): undefined
++ state reg133 (spsr_svc): undefined
++ integer reg144 (r8_usr): undefined
++ integer reg145 (r9_usr): undefined
++ integer reg146 (r10_usr): undefined
++ integer reg147 (r11_usr): undefined
++ integer reg148 (r12_usr): undefined
++ integer reg149 (r13_usr): undefined
++ integer reg150 (r14_usr): undefined
++ integer reg151 (r8_fiq): undefined
++ integer reg152 (r9_fiq): undefined
++ integer reg153 (r10_fiq): undefined
++ integer reg154 (r11_fiq): undefined
++ integer reg155 (r12_fiq): undefined
++ integer reg156 (r13_fiq): undefined
++ integer reg157 (r14_fiq): undefined
++ integer reg158 (r13_irq): undefined
++ integer reg159 (r14_irq): undefined
++ integer reg160 (r13_abt): undefined
++ integer reg161 (r14_abt): undefined
++ integer reg162 (r13_und): undefined
++ integer reg163 (r14_und): undefined
++ integer reg164 (r13_svc): undefined
++ integer reg165 (r14_svc): undefined
++ MMX reg192 (wc0): undefined
++ MMX reg193 (wc1): undefined
++ MMX reg194 (wc2): undefined
++ MMX reg195 (wc3): undefined
++ MMX reg196 (wc4): undefined
++ MMX reg197 (wc5): undefined
++ MMX reg198 (wc6): undefined
++ MMX reg199 (wc7): undefined
+ VFP reg256 (d0): undefined
+ VFP reg257 (d1): undefined
+ VFP reg258 (d2): undefined
+diff --git a/tests/run-allregs.sh b/tests/run-allregs.sh
+index 6f3862e..13557d5 100755
+--- a/tests/run-allregs.sh
++++ b/tests/run-allregs.sh
+@@ -2671,7 +2671,28 @@ integer registers:
13: sp (sp), address 32 bits
14: lr (lr), address 32 bits
15: pc (pc), address 32 bits
@@ -381,7 +511,7 @@ Index: elfutils-0.158/tests/run-allregs.sh
FPA registers:
16: f0 (f0), float 96 bits
17: f1 (f1), float 96 bits
-@@ -2689,7 +2710,72 @@
+@@ -2689,7 +2710,72 @@ FPA registers:
101: f5 (f5), float 96 bits
102: f6 (f6), float 96 bits
103: f7 (f7), float 96 bits
@@ -454,7 +584,7 @@ Index: elfutils-0.158/tests/run-allregs.sh
256: d0 (d0), float 64 bits
257: d1 (d1), float 64 bits
258: d2 (d2), float 64 bits
-@@ -2722,6 +2808,13 @@
+@@ -2722,6 +2808,13 @@ VFP registers:
285: d29 (d29), float 64 bits
286: d30 (d30), float 64 bits
287: d31 (d31), float 64 bits
@@ -468,11 +598,11 @@ Index: elfutils-0.158/tests/run-allregs.sh
EOF
# See run-readelf-mixed-corenote.sh for instructions to regenerate
-Index: elfutils-0.158/tests/run-readelf-mixed-corenote.sh
-===================================================================
---- elfutils-0.158.orig/tests/run-readelf-mixed-corenote.sh 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/tests/run-readelf-mixed-corenote.sh 2014-04-21 11:13:24.378519252 +0000
-@@ -30,12 +30,11 @@
+diff --git a/tests/run-readelf-mixed-corenote.sh b/tests/run-readelf-mixed-corenote.sh
+index 01e4594..9a8a380 100755
+--- a/tests/run-readelf-mixed-corenote.sh
++++ b/tests/run-readelf-mixed-corenote.sh
+@@ -30,12 +30,11 @@ Note segment of 892 bytes at offset 0x274:
pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063
utime: 0.000000, stime: 0.010000, cutime: 0.000000, cstime: 0.000000
orig_r0: -1, fpvalid: 1
@@ -490,114 +620,6 @@ Index: elfutils-0.158/tests/run-readelf-mixed-corenote.sh
CORE 124 PRPSINFO
state: 0, sname: R, zomb: 0, nice: 0, flag: 0x00400500
uid: 0, gid: 0, pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063
-Index: elfutils-0.158/tests/run-addrcfi.sh
-===================================================================
---- elfutils-0.158.orig/tests/run-addrcfi.sh 2014-04-21 11:13:24.378519252 +0000
-+++ elfutils-0.158/tests/run-addrcfi.sh 2014-04-21 11:13:24.378519252 +0000
-@@ -2530,6 +2530,38 @@
- FPA reg21 (f5): undefined
- FPA reg22 (f6): undefined
- FPA reg23 (f7): undefined
-+ VFP reg64 (s0): undefined
-+ VFP reg65 (s1): undefined
-+ VFP reg66 (s2): undefined
-+ VFP reg67 (s3): undefined
-+ VFP reg68 (s4): undefined
-+ VFP reg69 (s5): undefined
-+ VFP reg70 (s6): undefined
-+ VFP reg71 (s7): undefined
-+ VFP reg72 (s8): undefined
-+ VFP reg73 (s9): undefined
-+ VFP reg74 (s10): undefined
-+ VFP reg75 (s11): undefined
-+ VFP reg76 (s12): undefined
-+ VFP reg77 (s13): undefined
-+ VFP reg78 (s14): undefined
-+ VFP reg79 (s15): undefined
-+ VFP reg80 (s16): undefined
-+ VFP reg81 (s17): undefined
-+ VFP reg82 (s18): undefined
-+ VFP reg83 (s19): undefined
-+ VFP reg84 (s20): undefined
-+ VFP reg85 (s21): undefined
-+ VFP reg86 (s22): undefined
-+ VFP reg87 (s23): undefined
-+ VFP reg88 (s24): undefined
-+ VFP reg89 (s25): undefined
-+ VFP reg90 (s26): undefined
-+ VFP reg91 (s27): undefined
-+ VFP reg92 (s28): undefined
-+ VFP reg93 (s29): undefined
-+ VFP reg94 (s30): undefined
-+ VFP reg95 (s31): undefined
- FPA reg96 (f0): undefined
- FPA reg97 (f1): undefined
- FPA reg98 (f2): undefined
-@@ -2538,7 +2570,66 @@
- FPA reg101 (f5): undefined
- FPA reg102 (f6): undefined
- FPA reg103 (f7): undefined
-- integer reg128 (spsr): undefined
-+ MMX reg104 (wcgr0): undefined
-+ MMX reg105 (wcgr1): undefined
-+ MMX reg106 (wcgr2): undefined
-+ MMX reg107 (wcgr3): undefined
-+ MMX reg108 (wcgr4): undefined
-+ MMX reg109 (wcgr5): undefined
-+ MMX reg110 (wcgr6): undefined
-+ MMX reg111 (wcgr7): undefined
-+ MMX reg112 (wr0): undefined
-+ MMX reg113 (wr1): undefined
-+ MMX reg114 (wr2): undefined
-+ MMX reg115 (wr3): undefined
-+ MMX reg116 (wr4): undefined
-+ MMX reg117 (wr5): undefined
-+ MMX reg118 (wr6): undefined
-+ MMX reg119 (wr7): undefined
-+ MMX reg120 (wr8): undefined
-+ MMX reg121 (wr9): undefined
-+ MMX reg122 (wr10): undefined
-+ MMX reg123 (wr11): undefined
-+ MMX reg124 (wr12): undefined
-+ MMX reg125 (wr13): undefined
-+ MMX reg126 (wr14): undefined
-+ MMX reg127 (wr15): undefined
-+ state reg128 (spsr): undefined
-+ state reg129 (spsr_fiq): undefined
-+ state reg130 (spsr_irq): undefined
-+ state reg131 (spsr_abt): undefined
-+ state reg132 (spsr_und): undefined
-+ state reg133 (spsr_svc): undefined
-+ integer reg144 (r8_usr): undefined
-+ integer reg145 (r9_usr): undefined
-+ integer reg146 (r10_usr): undefined
-+ integer reg147 (r11_usr): undefined
-+ integer reg148 (r12_usr): undefined
-+ integer reg149 (r13_usr): undefined
-+ integer reg150 (r14_usr): undefined
-+ integer reg151 (r8_fiq): undefined
-+ integer reg152 (r9_fiq): undefined
-+ integer reg153 (r10_fiq): undefined
-+ integer reg154 (r11_fiq): undefined
-+ integer reg155 (r12_fiq): undefined
-+ integer reg156 (r13_fiq): undefined
-+ integer reg157 (r14_fiq): undefined
-+ integer reg158 (r13_irq): undefined
-+ integer reg159 (r14_irq): undefined
-+ integer reg160 (r13_abt): undefined
-+ integer reg161 (r14_abt): undefined
-+ integer reg162 (r13_und): undefined
-+ integer reg163 (r14_und): undefined
-+ integer reg164 (r13_svc): undefined
-+ integer reg165 (r14_svc): undefined
-+ MMX reg192 (wc0): undefined
-+ MMX reg193 (wc1): undefined
-+ MMX reg194 (wc2): undefined
-+ MMX reg195 (wc3): undefined
-+ MMX reg196 (wc4): undefined
-+ MMX reg197 (wc5): undefined
-+ MMX reg198 (wc6): undefined
-+ MMX reg199 (wc7): undefined
- VFP reg256 (d0): undefined
- VFP reg257 (d1): undefined
- VFP reg258 (d2): undefined
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.160/arm_func_value.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_func_value.patch
new file mode 100644
index 0000000000..eeb2063807
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_func_value.patch
@@ -0,0 +1,166 @@
+From: Mark Wielaard <mjw@redhat.com>
+Date: Sun, 15 Jun 2014 11:30:35 +0200
+Subject: libebl: Add sym_func_value hook.
+
+The ARM EABI says that the zero bit of function symbol st_value indicates
+whether the symbol points to a THUMB or ARM function. Add a new ebl hook
+to adjust the st_value in such a case so that we get the actual value that
+the symbol points to. It isn't easily possible to reuse the existing
+resolve_sym_value for this purpose, so we end up with another hook that
+can be used from dwfl_module_getsym and elflint.
+
+Rebase arm_func_value.patch from 0.159 to 0.160
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ backends/arm_init.c | 1 +
+ backends/arm_symbol.c | 8 ++++++++
+ libdwfl/dwfl_module_getsym.c | 2 +-
+ libebl/Makefile.am | 3 ++-
+ libebl/ebl-hooks.h | 3 +++
+ libebl/eblsymfuncval.c | 43 +++++++++++++++++++++++++++++++++++++++++++
+ libebl/libebl.h | 11 +++++++++++
+ 7 files changed, 69 insertions(+), 2 deletions(-)
+ create mode 100644 libebl/eblsymfuncval.c
+
+diff --git a/backends/arm_init.c b/backends/arm_init.c
+index 8b57d3f..2266829 100644
+--- a/backends/arm_init.c
++++ b/backends/arm_init.c
+@@ -78,6 +78,7 @@ arm_init (elf, machine, eh, ehlen)
+ eh->return_value_location = arm_return_value_location_hard;
+ HOOK (eh, abi_cfi);
+ HOOK (eh, check_reloc_target_type);
++ HOOK (eh, sym_func_value);
+
+ /* We only unwind the core integer registers. */
+ eh->frame_nregs = 16;
+diff --git a/backends/arm_symbol.c b/backends/arm_symbol.c
+index cd467ff..49fca55 100644
+--- a/backends/arm_symbol.c
++++ b/backends/arm_symbol.c
+@@ -129,3 +129,11 @@ arm_check_reloc_target_type (Ebl *ebl __attribute__ ((unused)), Elf64_Word sh_ty
+ {
+ return sh_type == SHT_ARM_EXIDX;
+ }
++
++/* ARM EABI says that the low bit indicates whether the function
++ symbol value is a THUMB function or not. Mask it off. */
++GElf_Addr
++arm_sym_func_value (Ebl *ebl __attribute__ ((unused)), GElf_Addr val)
++{
++ return val & ~(GElf_Addr)1;
++}
+diff --git a/libdwfl/dwfl_module_getsym.c b/libdwfl/dwfl_module_getsym.c
+index 42d2b67..fb192d7 100644
+--- a/libdwfl/dwfl_module_getsym.c
++++ b/libdwfl/dwfl_module_getsym.c
+@@ -119,7 +119,7 @@ __libdwfl_getsym (Dwfl_Module *mod, int ndx, GElf_Sym *sym, GElf_Addr *addr,
+ descriptors). */
+
+ char *ident;
+- GElf_Addr st_value = sym->st_value & ebl_func_addr_mask (mod->ebl);
++ GElf_Addr st_value = ebl_sym_func_value (mod->ebl, sym->st_value);
+ *resolved = false;
+ if (! adjust_st_value && mod->e_type != ET_REL && alloc
+ && (GELF_ST_TYPE (sym->st_info) == STT_FUNC
+diff --git a/libebl/Makefile.am b/libebl/Makefile.am
+index ec4477b..889c21b 100644
+--- a/libebl/Makefile.am
++++ b/libebl/Makefile.am
+@@ -55,7 +55,8 @@ gen_SOURCES = eblopenbackend.c eblclosebackend.c eblstrtab.c \
+ eblsysvhashentrysize.c eblauxvinfo.c eblcheckobjattr.c \
+ ebl_check_special_section.c ebl_syscall_abi.c eblabicfi.c \
+ eblstother.c eblinitreg.c ebldwarftoregno.c eblnormalizepc.c \
+- eblunwind.c eblresolvesym.c eblcheckreloctargettype.c
++ eblunwind.c eblresolvesym.c eblcheckreloctargettype.c \
++ eblsymfuncval.c
+
+ libebl_a_SOURCES = $(gen_SOURCES)
+
+diff --git a/libebl/ebl-hooks.h b/libebl/ebl-hooks.h
+index e1186f8..160a821 100644
+--- a/libebl/ebl-hooks.h
++++ b/libebl/ebl-hooks.h
+@@ -191,5 +191,8 @@ bool EBLHOOK(unwind) (Ebl *ebl, Dwarf_Addr pc, ebl_tid_registers_t *setfunc,
+ (e.g. function descriptor resolving) */
+ bool EBLHOOK(resolve_sym_value) (Ebl *ebl, GElf_Addr *addr);
+
++/* Returns the real value of a symbol function address or offset. */
++GElf_Addr EBLHOOK(sym_func_value) (Ebl *ebl, GElf_Addr val);
++
+ /* Destructor for ELF backend handle. */
+ void EBLHOOK(destr) (struct ebl *);
+diff --git a/libebl/eblsymfuncval.c b/libebl/eblsymfuncval.c
+new file mode 100644
+index 0000000..c0b322f
+--- /dev/null
++++ b/libebl/eblsymfuncval.c
+@@ -0,0 +1,43 @@
++/* Turn a symbol function value into a real function address or offset.
++ Copyright (C) 2014 Red Hat, Inc.
++ This file is part of elfutils.
++
++ This file is free software; you can redistribute it and/or modify
++ it under the terms of either
++
++ * the GNU Lesser General Public License as published by the Free
++ Software Foundation; either version 3 of the License, or (at
++ your option) any later version
++
++ or
++
++ * the GNU General Public License as published by the Free
++ Software Foundation; either version 2 of the License, or (at
++ your option) any later version
++
++ or both in parallel, as here.
++
++ elfutils is distributed in the hope that it will be useful, but
++ WITHOUT ANY WARRANTY; without even the implied warranty of
++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ General Public License for more details.
++
++ You should have received copies of the GNU General Public License and
++ the GNU Lesser General Public License along with this program. If
++ not, see <http://www.gnu.org/licenses/>. */
++
++#ifdef HAVE_CONFIG_H
++# include <config.h>
++#endif
++
++#include <libeblP.h>
++#include <assert.h>
++
++GElf_Addr
++ebl_sym_func_value (Ebl *ebl, GElf_Addr val)
++{
++ if (ebl == NULL || ebl->sym_func_value == NULL)
++ return val;
++
++ return ebl->sym_func_value (ebl, val);
++}
+diff --git a/libebl/libebl.h b/libebl/libebl.h
+index bb993bf..40cf635 100644
+--- a/libebl/libebl.h
++++ b/libebl/libebl.h
+@@ -459,6 +459,17 @@ extern bool ebl_unwind (Ebl *ebl, Dwarf_Addr pc, ebl_tid_registers_t *setfunc,
+ extern bool ebl_resolve_sym_value (Ebl *ebl, GElf_Addr *addr)
+ __nonnull_attribute__ (2);
+
++/* Returns the real value of a symbol function address or offset
++ (e.g. when the st_value contains some flag bits that need to be
++ masked off). This is different from ebl_resolve_sym_value which
++ only works for actual symbol addresses (in non-ET_REL files) that
++ might resolve to an address in a different section.
++ ebl_sym_func_value is called to turn the given value into the a
++ real address or offset (the original value might not be a real
++ address). This works for both ET_REL when the value is a section
++ offset or ET_EXEC or ET_DYN symbol values, which are addresses. */
++extern GElf_Addr ebl_sym_func_value (Ebl *ebl, GElf_Addr val);
++
+ #ifdef __cplusplus
+ }
+ #endif
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.160/arm_unwind_ret_mask.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_unwind_ret_mask.patch
new file mode 100644
index 0000000000..8abb36bb49
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/arm_unwind_ret_mask.patch
@@ -0,0 +1,83 @@
+From: Mark Wielaard <mjw@redhat.com>
+Date: Sun, 15 Jun 2014 12:30:02 +0200
+Subject: libebl: Add ebl_unwind_ret_mask.
+
+Another ARM oddity. A return value address in an unwind will contain an
+extra bit to indicate whether to return to a regular ARM or THUMB function.
+Add a new ebl function to return a mask to use to get the actual return
+address during an unwind ebl_unwind_ret_mask.
+
+Rebase arm_unwind_ret_mask.patch from 0.159 to 0.160
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ backends/arm_init.c | 3 +++
+ libebl/eblinitreg.c | 8 ++++++++
+ libebl/libebl.h | 4 ++++
+ libebl/libeblP.h | 6 ++++++
+ 4 files changed, 21 insertions(+)
+
+diff --git a/backends/arm_init.c b/backends/arm_init.c
+index 2266829..f8df042 100644
+--- a/backends/arm_init.c
++++ b/backends/arm_init.c
+@@ -87,5 +87,8 @@ arm_init (elf, machine, eh, ehlen)
+ /* Bit zero encodes whether an function address is THUMB or ARM. */
+ eh->func_addr_mask = ~(GElf_Addr)1;
+
++ /* Bit zero encodes whether to return to a THUMB or ARM function. */
++ eh->unwind_ret_mask = ~(GElf_Addr)1;
++
+ return MODVERSION;
+ }
+diff --git a/libebl/eblinitreg.c b/libebl/eblinitreg.c
+index 5729b3c..ca681c0 100644
+--- a/libebl/eblinitreg.c
++++ b/libebl/eblinitreg.c
+@@ -56,3 +56,11 @@ ebl_func_addr_mask (Ebl *ebl)
+ return ((ebl == NULL || ebl->func_addr_mask == 0)
+ ? ~(GElf_Addr)0 : ebl->func_addr_mask);
+ }
++
++GElf_Addr
++ebl_unwind_ret_mask (Ebl *ebl)
++{
++ return ((ebl == NULL || ebl->unwind_ret_mask == 0)
++ ? ~(GElf_Addr)0 : ebl->unwind_ret_mask);
++}
++
+diff --git a/libebl/libebl.h b/libebl/libebl.h
+index 40cf635..be70027 100644
+--- a/libebl/libebl.h
++++ b/libebl/libebl.h
+@@ -420,6 +420,10 @@ extern size_t ebl_frame_nregs (Ebl *ebl)
+ tables) is needed. */
+ extern GElf_Addr ebl_func_addr_mask (Ebl *ebl);
+
++/* Mask to use for unwind return address in case the architecture adds
++ some extra non-address bits to it. */
++extern GElf_Addr ebl_unwind_ret_mask (Ebl *ebl);
++
+ /* Convert *REGNO as is in DWARF to a lower range suitable for
+ Dwarf_Frame->REGS indexing. */
+ extern bool ebl_dwarf_to_regno (Ebl *ebl, unsigned *regno)
+diff --git a/libebl/libeblP.h b/libebl/libeblP.h
+index dbd67f3..e18ace6 100644
+--- a/libebl/libeblP.h
++++ b/libebl/libeblP.h
+@@ -70,6 +70,12 @@ struct ebl
+ otherwise it should be the actual mask to use. */
+ GElf_Addr func_addr_mask;
+
++ /* Mask to use to get the return address from an unwind in case the
++ architecture adds some extra non-address bits to it. When not
++ initialized (0) then ebl_unwind_ret_mask will return ~0, otherwise
++ it should be the actual mask to use. */
++ GElf_Addr unwind_ret_mask;
++
+ /* Function descriptor load address and table as used by
+ ebl_resolve_sym_value if available for this arch. */
+ GElf_Addr fd_addr;
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/fixheadercheck.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/fixheadercheck.patch
index 8796e9a394..8796e9a394 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/fixheadercheck.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/fixheadercheck.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/hppa_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/hppa_backend.diff
index 6c19d176e0..d51a720073 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/hppa_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/hppa_backend.diff
@@ -478,7 +478,7 @@ Index: elfutils-0.158/backends/parisc_retval.c
+ /* Follow typedefs and qualifiers to get to the actual type. */
+ while (tag == DW_TAG_typedef
+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type
-+ || tag == DW_TAG_restrict_type || tag == DW_TAG_mutable_type)
++ || tag == DW_TAG_restrict_type)
+ {
+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem);
+ typedie = dwarf_formref_die (attr, &die_mem);
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/m68k_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/m68k_backend.diff
index 48bffdad0c..f5b566f2d5 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/m68k_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/m68k_backend.diff
@@ -409,7 +409,7 @@ Index: elfutils-0.158/backends/m68k_retval.c
+ /* Follow typedefs and qualifiers to get to the actual type. */
+ while (tag == DW_TAG_typedef
+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type
-+ || tag == DW_TAG_restrict_type || tag == DW_TAG_mutable_type)
++ || tag == DW_TAG_restrict_type)
+ {
+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem);
+ typedie = dwarf_formref_die (attr, &die_mem);
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/mips_backend.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/mips_backend.diff
index b188927a0b..d73a3f2869 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/mips_backend.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/mips_backend.diff
@@ -496,7 +496,7 @@ Index: elfutils-0.158/backends/mips_retval.c
+ /* Follow typedefs and qualifiers to get to the actual type. */
+ while (tag == DW_TAG_typedef
+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type
-+ || tag == DW_TAG_restrict_type || tag == DW_TAG_mutable_type)
++ || tag == DW_TAG_restrict_type)
+ {
+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem);
+ typedie = dwarf_formref_die (attr, &die_mem);
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/mips_readelf_w.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/mips_readelf_w.patch
index 8e669e7199..8e669e7199 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/mips_readelf_w.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/mips_readelf_w.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.160/non_linux.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/non_linux.patch
new file mode 100644
index 0000000000..35b1b389fb
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/non_linux.patch
@@ -0,0 +1,35 @@
+Index: elfutils-0.159/libdwfl/linux-pid-attach.c
+===================================================================
+--- elfutils-0.159.orig/libdwfl/linux-pid-attach.c
++++ elfutils-0.159/libdwfl/linux-pid-attach.c
+@@ -393,6 +393,16 @@ pid_getthread (Dwfl *dwfl __attribute__
+ return false;
+ }
+
++bool
++internal_function
++__libdwfl_ptrace_attach (pid_t tid __attribute__ ((unused)),
++ bool *tid_was_stoppedp __attribute__ ((unused)))
++{
++ errno = ENOSYS;
++ __libdwfl_seterrno (DWFL_E_ERRNO);
++ return false;
++}
++
+ static bool
+ pid_memory_read (Dwfl *dwfl __attribute__ ((unused)),
+ Dwarf_Addr addr __attribute__ ((unused)),
+@@ -419,6 +429,13 @@ pid_detach (Dwfl *dwfl __attribute__ ((u
+ {
+ }
+
++void
++internal_function
++__libdwfl_ptrace_detach (pid_t tid __attribute__ ((unused)),
++ bool tid_was_stopped __attribute__ ((unused)))
++{
++}
++
+ static void
+ pid_thread_detach (Dwfl_Thread *thread __attribute__ ((unused)),
+ void *thread_arg __attribute__ ((unused)))
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/redhat-portability.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/redhat-portability.diff
index 5a75375690..a358b11fe6 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/redhat-portability.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/redhat-portability.diff
@@ -1,6 +1,8 @@
---- elfutils/backends/ChangeLog
-+++ elfutils/backends/ChangeLog
-@@ -292,6 +292,10 @@
+Index: elfutils-0.159/backends/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/backends/ChangeLog
++++ elfutils-0.159/backends/ChangeLog
+@@ -364,6 +364,10 @@
* ppc_attrs.c (ppc_check_object_attribute): Handle tag
GNU_Power_ABI_Struct_Return.
@@ -11,7 +13,7 @@
2008-10-04 Ulrich Drepper <drepper@redhat.com>
* i386_reloc.def: Fix entries for TLS_GOTDESC, TLS_DESC_CALL, and
-@@ -619,6 +623,11 @@
+@@ -691,6 +695,11 @@
* sparc_init.c: Likewise.
* x86_64_init.c: Likewise.
@@ -23,7 +25,7 @@
2005-11-19 Roland McGrath <roland@redhat.com>
* ppc64_reloc.def: REL30 -> ADDR30.
-@@ -641,6 +650,9 @@
+@@ -713,6 +722,9 @@
* Makefile.am (uninstall): Don't try to remove $(pkgincludedir).
(CLEANFILES): Add libebl_$(m).so.
@@ -33,20 +35,24 @@
* ppc_reloc.def: Update bits per Alan Modra <amodra@bigpond.net.au>.
* ppc64_reloc.def: Likewise.
---- elfutils/backends/Makefile.am
-+++ elfutils/backends/Makefile.am
-@@ -124,7 +124,7 @@ libebl_%.so libebl_%.map: libebl_%_pic.a
+Index: elfutils-0.159/backends/Makefile.am
+===================================================================
+--- elfutils-0.159.orig/backends/Makefile.am
++++ elfutils-0.159/backends/Makefile.am
+@@ -119,7 +119,7 @@ libebl_%.so libebl_%.map: libebl_%_pic.a
$(LINK) -shared -o $(@:.map=.so) \
-Wl,--whole-archive $< $(cpu_$*) -Wl,--no-whole-archive \
-Wl,--version-script,$(@:.so=.map) \
-- -Wl,-z,defs -Wl,--as-needed $(libelf) $(libdw) $(libmudflap)
-+ -Wl,-z,defs $(LD_AS_NEEDED) $(libelf) $(libdw) $(libmudflap)
+- -Wl,-z,defs -Wl,--as-needed $(libelf) $(libdw)
++ -Wl,-z,defs $(LD_AS_NEEDED) $(libelf) $(libdw)
$(textrel_check)
libebl_i386.so: $(cpu_i386)
---- elfutils/ChangeLog
-+++ elfutils/ChangeLog
-@@ -118,6 +118,8 @@
+Index: elfutils-0.159/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/ChangeLog
++++ elfutils-0.159/ChangeLog
+@@ -148,6 +148,8 @@
2012-01-24 Mark Wielaard <mjw@redhat.com>
@@ -55,7 +61,7 @@
* COPYING: Fix address. Updated version from gnulib.
2012-01-23 Mark Wielaard <mjw@redhat.com>
-@@ -136,6 +138,9 @@
+@@ -166,6 +168,9 @@
2011-10-08 Mike Frysinger <vapier@gentoo.org>
@@ -65,7 +71,7 @@
* configure.ac: Fix use of AC_ARG_ENABLE to handle $enableval correctly.
2011-10-02 Ulrich Drepper <drepper@gmail.com>
-@@ -157,6 +162,10 @@
+@@ -187,6 +192,10 @@
* configure.ac (LOCALEDIR, DATADIRNAME): Removed.
@@ -76,7 +82,7 @@
2009-09-21 Ulrich Drepper <drepper@redhat.com>
* configure.ac: Update for more modern autoconf.
-@@ -165,6 +174,10 @@
+@@ -195,6 +204,10 @@
* configure.ac (zip_LIBS): Check for liblzma too.
@@ -87,7 +93,7 @@
2009-04-19 Roland McGrath <roland@redhat.com>
* configure.ac (eu_version): Round down here, not in version.h macros.
-@@ -176,6 +189,8 @@
+@@ -206,6 +219,8 @@
2009-01-23 Roland McGrath <roland@redhat.com>
@@ -96,7 +102,7 @@
* configure.ac (zlib check): Check for gzdirect, need zlib >= 1.2.2.3.
* configure.ac (__thread check): Use AC_LINK_IFELSE, in case of
-@@ -256,6 +271,10 @@
+@@ -286,6 +301,10 @@
* configure.ac: Add dummy automake conditional to get dependencies
for non-generic linker right. See src/Makefile.am.
@@ -107,7 +113,7 @@
2005-11-18 Roland McGrath <roland@redhat.com>
* Makefile.am (DISTCHECK_CONFIGURE_FLAGS): New variable.
-@@ -303,6 +322,17 @@
+@@ -333,6 +352,17 @@
* Makefile.am (all_SUBDIRS): Add libdwfl.
* configure.ac: Write libdwfl/Makefile.
@@ -125,9 +131,11 @@
2005-05-19 Roland McGrath <roland@redhat.com>
* configure.ac [AH_BOTTOM] (INTDECL, _INTDECL): New macros.
---- elfutils/config/ChangeLog
-+++ elfutils/config/ChangeLog
-@@ -44,6 +44,10 @@
+Index: elfutils-0.159/config/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/config/ChangeLog
++++ elfutils-0.159/config/ChangeLog
+@@ -58,6 +58,10 @@
* known-dwarf.awk: Use gawk.
@@ -138,17 +146,19 @@
2010-07-02 Ulrich Drepper <drepper@redhat.com>
* elfutils.spec.in: Add more BuildRequires.
---- elfutils/config/eu.am
-+++ elfutils/config/eu.am
+Index: elfutils-0.159/config/eu.am
+===================================================================
+--- elfutils-0.159.orig/config/eu.am
++++ elfutils-0.159/config/eu.am
@@ -1,6 +1,6 @@
## Common automake fragments for elfutils subdirectory makefiles.
##
--## Copyright (C) 2010 Red Hat, Inc.
-+## Copyright (C) 2010-2011 Red Hat, Inc.
+-## Copyright (C) 2010, 2014 Red Hat, Inc.
++## Copyright (C) 2010-2011, 2014 Red Hat, Inc.
##
## This file is part of elfutils.
##
-@@ -29,14 +29,20 @@
+@@ -29,13 +29,21 @@
## not, see <http://www.gnu.org/licenses/>.
##
@@ -157,43 +167,26 @@
+
DEFS = -D_GNU_SOURCE -DHAVE_CONFIG_H -DLOCALEDIR='"${localedir}"'
AM_CPPFLAGS = -I. -I$(srcdir) -I$(top_srcdir)/lib -I..
- AM_CFLAGS = -std=gnu99 -Wall -Wshadow \
-- $(if $($(*F)_no_Werror),,-Werror) \
+-AM_CFLAGS = -std=gnu99 -Wall -Wshadow -Wformat=2 \
++AM_CFLAGS = -std=gnu99 -Wall -Wshadow \
+ $(if $($(*F)_no_Werror),,-Werror) \
- $(if $($(*F)_no_Wunused),,-Wunused -Wextra) \
+ $(if $($(*F)_no_Wunused),,-Wunused $(WEXTRA)) \
- $(if $($(*F)_no_Wformat),-Wno-format,-Wformat=2) \
++ $(if $($(*F)_no_Wformat),-Wno-format,-Wformat=2) \
$($(*F)_CFLAGS)
+if BUILD_WERROR
+AM_CFLAGS += $(if $($(*F)_no_Werror),,-Werror)
+endif
+
- if MUDFLAP
- AM_CFLAGS += -fmudflap
- libmudflap = -lmudflap
---- elfutils/config.h.in
-+++ elfutils/config.h.in
-@@ -6,6 +6,9 @@
- /* Defined if libdw should support GNU ref_alt FORM, dwz multi files. */
- #undef ENABLE_DWZ
+ COMPILE.os = $(filter-out -fprofile-arcs -ftest-coverage, $(COMPILE))
-+/* Have __builtin_popcount. */
-+#undef HAVE_BUILTIN_POPCOUNT
-+
- /* Define to 1 if you have the <inttypes.h> header file. */
- #undef HAVE_INTTYPES_H
-
-@@ -102,4 +105,7 @@
- /* Define for large files, on AIX-style hosts. */
- #undef _LARGE_FILES
-
-+/* Stubbed out if missing compiler support. */
-+#undef __thread
-+
- #include <eu-config.h>
---- elfutils/configure.ac
-+++ elfutils/configure.ac
-@@ -99,6 +99,54 @@ CFLAGS="$old_CFLAGS"])
+ %.os: %.c %.o
+Index: elfutils-0.159/configure.ac
+===================================================================
+--- elfutils-0.159.orig/configure.ac
++++ elfutils-0.159/configure.ac
+@@ -89,6 +89,54 @@ CFLAGS="$old_CFLAGS"])
AS_IF([test "x$ac_cv_c99" != xyes],
AC_MSG_ERROR([gcc with C99 support required]))
@@ -248,7 +241,7 @@
AC_CACHE_CHECK([for __thread support], ac_cv_tls, [dnl
# Use the same flags that we use for our DSOs, so the test is representative.
# Some old compiler/linker/libc combinations fail some ways and not others.
-@@ -114,7 +162,10 @@ static __thread int a; int foo (int b) {
+@@ -104,7 +152,10 @@ static __thread int a; int foo (int b) {
CFLAGS="$save_CFLAGS"
LDFLAGS="$save_LDFLAGS"])
AS_IF([test "x$ac_cv_tls" != xyes],
@@ -260,9 +253,9 @@
dnl This test must come as early as possible after the compiler configuration
dnl tests, because the choice of the file model can (in principle) affect
-@@ -213,6 +264,11 @@ AM_CONDITIONAL(USE_VALGRIND, test "$use_
+@@ -183,6 +234,11 @@ AM_CONDITIONAL(USE_VALGRIND, test "$use_
AM_CONDITIONAL(BUILD_STATIC, [dnl
- test "$use_mudflap" = yes -o "$use_gprof" = yes -o "$use_gcov" = yes])
+ test "$use_gprof" = yes -o "$use_gcov" = yes])
+AC_ARG_ENABLE([werror],
+AS_HELP_STRING([--disable-werror],[do not build with -Werror]),
@@ -272,7 +265,7 @@
AC_ARG_ENABLE([tests-rpath],
AS_HELP_STRING([--enable-tests-rpath],[build $ORIGIN-using rpath into tests]),
[tests_use_rpath=$enableval], [tests_use_rpath=no])
-@@ -324,7 +380,7 @@ case "$eu_version" in
+@@ -297,7 +353,7 @@ case "$eu_version" in
esac
# Round up to the next release API (x.y) version.
@@ -281,9 +274,11 @@
AC_CHECK_SIZEOF(long)
---- elfutils/lib/ChangeLog
-+++ elfutils/lib/ChangeLog
-@@ -61,6 +61,9 @@
+Index: elfutils-0.159/lib/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/lib/ChangeLog
++++ elfutils-0.159/lib/ChangeLog
+@@ -65,6 +65,9 @@
2009-01-23 Roland McGrath <roland@redhat.com>
@@ -293,7 +288,7 @@
* eu-config.h: Add multiple inclusion protection.
2009-01-17 Ulrich Drepper <drepper@redhat.com>
-@@ -117,6 +120,11 @@
+@@ -121,6 +124,11 @@
* Makefile.am (libeu_a_SOURCES): Add it.
* system.h: Declare crc32_file.
@@ -305,8 +300,10 @@
2005-04-30 Ulrich Drepper <drepper@redhat.com>
* Makefile.am: Use -ffunction-sections for xmalloc.c.
---- elfutils/lib/eu-config.h
-+++ elfutils/lib/eu-config.h
+Index: elfutils-0.159/lib/eu-config.h
+===================================================================
+--- elfutils-0.159.orig/lib/eu-config.h
++++ elfutils-0.159/lib/eu-config.h
@@ -162,6 +162,17 @@ asm (".section predict_data, \"aw\"; .pr
/* This macro is used by the tests conditionalize for standalone building. */
#define ELFUTILS_HEADER(name) <lib##name.h>
@@ -325,9 +322,11 @@
#ifdef SHARED
# define OLD_VERSION(name, version) \
---- elfutils/libasm/ChangeLog
-+++ elfutils/libasm/ChangeLog
-@@ -75,6 +75,11 @@
+Index: elfutils-0.159/libasm/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libasm/ChangeLog
++++ elfutils-0.159/libasm/ChangeLog
+@@ -79,6 +79,11 @@
* asm_error.c: Add new error ASM_E_IOERROR.
* libasmP.h: Add ASM_E_IOERROR definition.
@@ -339,9 +338,11 @@
2005-02-15 Ulrich Drepper <drepper@redhat.com>
* Makefile.am (AM_CFLAGS): Add -Wunused -Wextra -Wformat=2.
---- elfutils/libcpu/ChangeLog
-+++ elfutils/libcpu/ChangeLog
-@@ -47,6 +47,9 @@
+Index: elfutils-0.159/libcpu/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libcpu/ChangeLog
++++ elfutils-0.159/libcpu/ChangeLog
+@@ -51,6 +51,9 @@
2009-01-23 Roland McGrath <roland@redhat.com>
@@ -351,7 +352,7 @@
* Makefile.am (i386_parse_CFLAGS): Use quotes around command
substitution that can produce leading whitespace.
-@@ -376,6 +379,11 @@
+@@ -380,6 +383,11 @@
* defs/i386.doc: New file.
* defs/x86_64: New file.
@@ -363,8 +364,10 @@
2005-02-15 Ulrich Drepper <drepper@redhat.com>
* Makefile (AM_CFLAGS): Add -Wunused -Wextra -Wformat=2.
---- elfutils/libcpu/i386_disasm.c
-+++ elfutils/libcpu/i386_disasm.c
+Index: elfutils-0.159/libcpu/i386_disasm.c
+===================================================================
+--- elfutils-0.159.orig/libcpu/i386_disasm.c
++++ elfutils-0.159/libcpu/i386_disasm.c
@@ -822,6 +822,7 @@ i386_disasm (const uint8_t **startp, con
default:
@@ -373,9 +376,11 @@
}
}
else
---- elfutils/libdw/ChangeLog
-+++ elfutils/libdw/ChangeLog
-@@ -346,6 +346,10 @@
+Index: elfutils-0.159/libdw/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libdw/ChangeLog
++++ elfutils-0.159/libdw/ChangeLog
+@@ -420,6 +420,10 @@
* Makefile.am (known-dwarf.h): Run gawk on config/known-dwarf.awk.
@@ -386,7 +391,7 @@
2011-07-14 Mark Wielaard <mjw@redhat.com>
* libdw.h (dwarf_offdie): Fix documentation to mention .debug_info.
-@@ -705,6 +709,10 @@
+@@ -779,6 +783,10 @@
* dwarf_hasattr_integrate.c: Integrate DW_AT_specification too.
@@ -397,7 +402,7 @@
2009-08-10 Roland McGrath <roland@redhat.com>
* dwarf_getscopevar.c: Use dwarf_diename.
-@@ -1473,6 +1481,11 @@
+@@ -1547,6 +1555,11 @@
2005-05-31 Roland McGrath <roland@redhat.com>
@@ -409,9 +414,11 @@
* dwarf_formref_die.c (dwarf_formref_die): Add CU header offset to
formref offset.
---- elfutils/libdw/dwarf_begin_elf.c
-+++ elfutils/libdw/dwarf_begin_elf.c
-@@ -48,6 +48,14 @@
+Index: elfutils-0.159/libdw/dwarf_begin_elf.c
+===================================================================
+--- elfutils-0.159.orig/libdw/dwarf_begin_elf.c
++++ elfutils-0.159/libdw/dwarf_begin_elf.c
+@@ -47,6 +47,14 @@
#if USE_ZLIB
# include <endian.h>
# define crc32 loser_crc32
@@ -426,9 +433,11 @@
# include <zlib.h>
# undef crc32
#endif
---- elfutils/libdw/libdw.h
-+++ elfutils/libdw/libdw.h
-@@ -879,7 +879,7 @@ extern Dwarf_OOM dwarf_new_oom_handler (
+Index: elfutils-0.159/libdw/libdw.h
+===================================================================
+--- elfutils-0.159.orig/libdw/libdw.h
++++ elfutils-0.159/libdw/libdw.h
+@@ -891,7 +891,7 @@ extern Dwarf_OOM dwarf_new_oom_handler (
/* Inline optimizations. */
@@ -437,9 +446,11 @@
/* Return attribute code of given attribute. */
__libdw_extern_inline unsigned int
dwarf_whatattr (Dwarf_Attribute *attr)
---- elfutils/libdwfl/ChangeLog
-+++ elfutils/libdwfl/ChangeLog
-@@ -283,6 +283,21 @@
+Index: elfutils-0.159/libdwfl/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libdwfl/ChangeLog
++++ elfutils-0.159/libdwfl/ChangeLog
+@@ -421,6 +421,21 @@
(dwfl_module_addrsym) (i_to_symfile): New function.
(dwfl_module_addrsym) (search_table): Use it.
@@ -461,7 +472,7 @@
2013-11-07 Jan Kratochvil <jan.kratochvil@redhat.com>
Mark Wielaard <mjw@redhat.com>
-@@ -2048,6 +2063,11 @@
+@@ -2186,6 +2201,11 @@
2005-07-21 Roland McGrath <roland@redhat.com>
@@ -473,8 +484,10 @@
* Makefile.am (noinst_HEADERS): Add loc2c.c.
* test2.c (main): Check sscanf result to quiet warning.
---- elfutils/libdwfl/linux-core-attach.c
-+++ elfutils/libdwfl/linux-core-attach.c
+Index: elfutils-0.159/libdwfl/linux-core-attach.c
+===================================================================
+--- elfutils-0.159.orig/libdwfl/linux-core-attach.c
++++ elfutils-0.159/libdwfl/linux-core-attach.c
@@ -29,6 +29,35 @@
#include "libdwflP.h"
#include <fcntl.h>
@@ -511,40 +524,43 @@
#ifndef MIN
# define MIN(a, b) ((a) < (b) ? (a) : (b))
---- elfutils/libdwfl/linux-pid-attach.c
-+++ elfutils/libdwfl/linux-pid-attach.c
-@@ -268,13 +268,24 @@ pid_thread_detach (Dwfl_Thread *thread,
- pid_arg->tid_attached = 0;
- if (! pid_arg->assume_ptrace_stopped)
- {
-+ // Older kernels (tested kernel-2.6.18-348.12.1.el5.x86_64) need special
-+ // handling of the detachment to keep the process State: T (stopped).
-+ if (pid_arg->tid_was_stopped)
-+ syscall (__NR_tkill, tid, SIGSTOP);
- /* This handling is needed only on older Linux kernels such as
-- 2.6.32-358.23.2.el6.ppc64. Later kernels such as
-- 3.11.7-200.fc19.x86_64 remember the T (stopped) state
-- themselves and no longer need to pass SIGSTOP during
-- PTRACE_DETACH. */
-+ 2.6.32-358.23.2.el6.ppc64. Later kernels such as 3.11.7-200.fc19.x86_64
-+ remember the T (stopped) state themselves and no longer need to pass
-+ SIGSTOP during PTRACE_DETACH. */
- ptrace (PTRACE_DETACH, tid, NULL,
- (void *) (intptr_t) (pid_arg->tid_was_stopped ? SIGSTOP : 0));
-+ if (pid_arg->tid_was_stopped)
-+ {
-+ // Wait till the SIGSTOP settles down.
-+ int i;
-+ for (i = 0; i < 100000; i++)
-+ if (linux_proc_pid_is_stopped (tid))
-+ break;
-+ }
- }
+Index: elfutils-0.159/libdwfl/linux-pid-attach.c
+===================================================================
+--- elfutils-0.159.orig/libdwfl/linux-pid-attach.c
++++ elfutils-0.159/libdwfl/linux-pid-attach.c
+@@ -255,6 +255,11 @@ void
+ internal_function
+ __libdwfl_ptrace_detach (pid_t tid, bool tid_was_stopped)
+ {
++ // Older kernels (tested kernel-2.6.18-348.12.1.el5.x86_64) need special
++ // handling of the detachment to keep the process State: T (stopped).
++ if (tid_was_stopped)
++ syscall (__NR_tkill, tid, SIGSTOP);
++
+ /* This handling is needed only on older Linux kernels such as
+ 2.6.32-358.23.2.el6.ppc64. Later kernels such as
+ 3.11.7-200.fc19.x86_64 remember the T (stopped) state
+@@ -262,6 +267,15 @@ __libdwfl_ptrace_detach (pid_t tid, bool
+ PTRACE_DETACH. */
+ ptrace (PTRACE_DETACH, tid, NULL,
+ (void *) (intptr_t) (tid_was_stopped ? SIGSTOP : 0));
++
++ if (tid_was_stopped)
++ {
++ // Wait till the SIGSTOP settles down.
++ int i;
++ for (i = 0; i < 100000; i++)
++ if (linux_proc_pid_is_stopped (tid))
++ break;
++ }
}
---- elfutils/libebl/ChangeLog
-+++ elfutils/libebl/ChangeLog
-@@ -738,6 +738,11 @@
+ static void
+Index: elfutils-0.159/libebl/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libebl/ChangeLog
++++ elfutils-0.159/libebl/ChangeLog
+@@ -748,6 +748,11 @@
* Makefile.am (libebl_*_so_SOURCES): Set to $(*_SRCS) so dependency
tracking works right.
@@ -556,9 +572,11 @@
2005-05-21 Ulrich Drepper <drepper@redhat.com>
* libebl_x86_64.map: Add x86_64_core_note.
---- elfutils/libelf/ChangeLog
-+++ elfutils/libelf/ChangeLog
-@@ -85,6 +85,11 @@
+Index: elfutils-0.159/libelf/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/libelf/ChangeLog
++++ elfutils-0.159/libelf/ChangeLog
+@@ -135,6 +135,11 @@
* elf-knowledge.h (SECTION_STRIP_P): Remove < SHT_NUM check.
@@ -570,7 +588,7 @@
2011-02-26 Mark Wielaard <mjw@redhat.com>
* elf_end.c (elf_end): Call rwlock_unlock before rwlock_fini.
-@@ -762,6 +767,11 @@
+@@ -812,6 +817,11 @@
* elf.h: Update from glibc.
@@ -582,8 +600,10 @@
2005-05-08 Roland McGrath <roland@redhat.com>
* elf_begin.c (read_file) [_MUDFLAP]: Don't use mmap for now.
---- elfutils/libelf/common.h
-+++ elfutils/libelf/common.h
+Index: elfutils-0.159/libelf/common.h
+===================================================================
+--- elfutils-0.159.orig/libelf/common.h
++++ elfutils-0.159/libelf/common.h
@@ -139,7 +139,7 @@ libelf_release_all (Elf *elf)
(Var) = (sizeof (Var) == 1 \
? (unsigned char) (Var) \
@@ -602,8 +622,10 @@
: (sizeof (Var) == 4 \
? bswap_32 (Var) \
: bswap_64 (Var))))
---- elfutils/libelf/gnuhash_xlate.h
-+++ elfutils/libelf/gnuhash_xlate.h
+Index: elfutils-0.159/libelf/gnuhash_xlate.h
+===================================================================
+--- elfutils-0.159.orig/libelf/gnuhash_xlate.h
++++ elfutils-0.159/libelf/gnuhash_xlate.h
@@ -1,5 +1,5 @@
/* Conversion functions for versioning information.
- Copyright (C) 2006, 2007 Red Hat, Inc.
@@ -622,8 +644,10 @@
len -= 4;
}
}
---- elfutils/src/addr2line.c
-+++ elfutils/src/addr2line.c
+Index: elfutils-0.159/src/addr2line.c
+===================================================================
+--- elfutils-0.159.orig/src/addr2line.c
++++ elfutils-0.159/src/addr2line.c
@@ -540,10 +540,10 @@ handle_address (const char *string, Dwfl
bool parsed = false;
int i, j;
@@ -637,9 +661,11 @@
{
default:
break;
---- elfutils/src/ChangeLog
-+++ elfutils/src/ChangeLog
-@@ -964,8 +964,16 @@
+Index: elfutils-0.159/src/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/src/ChangeLog
++++ elfutils-0.159/src/ChangeLog
+@@ -1112,8 +1112,16 @@
* readelf.c (attr_callback): Use print_block only when we don't use
print_ops.
@@ -656,7 +682,7 @@
* ar.c (do_oper_extract): Use pathconf instead of statfs.
2009-08-01 Ulrich Drepper <drepper@redhat.com>
-@@ -1129,6 +1137,8 @@
+@@ -1277,6 +1285,8 @@
* readelf.c (print_debug_frame_section): Use t instead of j formats
for ptrdiff_t OFFSET.
@@ -665,7 +691,7 @@
2009-01-21 Ulrich Drepper <drepper@redhat.com>
* elflint.c (check_program_header): Fix typo in .eh_frame_hdr section
-@@ -1312,6 +1322,11 @@
+@@ -1460,6 +1470,11 @@
that matches its PT_LOAD's p_flags &~ PF_W. On sparc, PF_X really
is valid in RELRO.
@@ -677,7 +703,7 @@
2008-02-29 Roland McGrath <roland@redhat.com>
* readelf.c (print_attributes): Add a cast.
-@@ -1563,6 +1578,8 @@
+@@ -1711,6 +1726,8 @@
* readelf.c (hex_dump): Fix rounding error in whitespace calculation.
@@ -686,7 +712,7 @@
2007-10-15 Roland McGrath <roland@redhat.com>
* make-debug-archive.in: New file.
-@@ -2002,6 +2019,10 @@
+@@ -2150,6 +2167,10 @@
* elflint.c (valid_e_machine): Add EM_ALPHA.
Reported by Christian Aichinger <Greek0@gmx.net>.
@@ -697,7 +723,7 @@
2006-08-08 Ulrich Drepper <drepper@redhat.com>
* elflint.c (check_dynamic): Don't require DT_HASH for DT_SYMTAB.
-@@ -2078,6 +2099,10 @@
+@@ -2226,6 +2247,10 @@
* Makefile.am: Add hacks to create dependency files for non-generic
linker.
@@ -708,7 +734,7 @@
2006-06-12 Ulrich Drepper <drepper@redhat.com>
* ldgeneric.c (ld_generic_generate_sections): Don't create .interp
-@@ -2426,6 +2451,11 @@
+@@ -2574,6 +2599,11 @@
* readelf.c (print_debug_loc_section): Fix indentation for larger
address size.
@@ -720,8 +746,10 @@
2005-05-30 Roland McGrath <roland@redhat.com>
* readelf.c (print_debug_line_section): Print section offset of each
---- elfutils/src/findtextrel.c
-+++ elfutils/src/findtextrel.c
+Index: elfutils-0.159/src/findtextrel.c
+===================================================================
+--- elfutils-0.159.orig/src/findtextrel.c
++++ elfutils-0.159/src/findtextrel.c
@@ -496,7 +496,11 @@ ptrcompare (const void *p1, const void *
@@ -735,8 +763,10 @@
GElf_Addr addr, Elf *elf, Elf_Scn *symscn, Dwarf *dw,
const char *fname, bool more_than_one, void **knownsrcs)
{
---- elfutils/src/ld.h
-+++ elfutils/src/ld.h
+Index: elfutils-0.159/src/ld.h
+===================================================================
+--- elfutils-0.159.orig/src/ld.h
++++ elfutils-0.159/src/ld.h
@@ -1114,6 +1114,7 @@ extern bool dynamically_linked_p (void);
/* Checked whether the symbol is undefined and referenced from a DSO. */
@@ -752,21 +782,27 @@
+#endif /* Optimizing and not GCC 4.2. */
#endif /* ld.h */
---- elfutils/src/Makefile.am
-+++ elfutils/src/Makefile.am
-@@ -95,6 +95,9 @@ addr2line_no_Wformat = yes
+Index: elfutils-0.159/src/Makefile.am
+===================================================================
+--- elfutils-0.159.orig/src/Makefile.am
++++ elfutils-0.159/src/Makefile.am
+@@ -89,6 +89,11 @@ endif
# XXX While the file is not finished, don't warn about this
ldgeneric_no_Wunused = yes
-+# Buggy old compilers.
++# Buggy old compilers or libc headers.
+readelf_no_Werror = yes
++strings_no_Werror = yes
++addr2line_no_Wformat = yes
+
- readelf_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) $(libmudflap) -ldl
- nm_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) $(libmudflap) -ldl \
+ readelf_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) -ldl
+ nm_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) -ldl \
$(demanglelib)
---- elfutils/src/readelf.c
-+++ elfutils/src/readelf.c
-@@ -4171,10 +4171,12 @@ listptr_base (struct listptr *p)
+Index: elfutils-0.159/src/readelf.c
+===================================================================
+--- elfutils-0.159.orig/src/readelf.c
++++ elfutils-0.159/src/readelf.c
+@@ -4239,10 +4239,12 @@ listptr_base (struct listptr *p)
return base;
}
@@ -781,7 +817,7 @@
struct listptr *p1 = (void *) a;
struct listptr *p2 = (void *) b;
-@@ -4263,8 +4265,11 @@ static void
+@@ -4331,8 +4333,11 @@ static void
sort_listptr (struct listptr_table *table, const char *name)
{
if (table->n > 0)
@@ -795,7 +831,7 @@
}
static bool
-@@ -9151,7 +9156,7 @@ dump_archive_index (Elf *elf, const char
+@@ -9252,7 +9257,7 @@ dump_archive_index (Elf *elf, const char
if (unlikely (elf_rand (elf, as_off) == 0)
|| unlikely ((subelf = elf_begin (-1, ELF_C_READ_MMAP, elf))
== NULL))
@@ -804,8 +840,10 @@
while (1)
#endif
error (EXIT_FAILURE, 0,
---- elfutils/src/strings.c
-+++ elfutils/src/strings.c
+Index: elfutils-0.159/src/strings.c
+===================================================================
+--- elfutils-0.159.orig/src/strings.c
++++ elfutils-0.159/src/strings.c
@@ -43,6 +43,10 @@
#include <system.h>
@@ -817,7 +855,7 @@
/* Prototypes of local functions. */
static int read_fd (int fd, const char *fname, off64_t fdlen);
-@@ -483,8 +487,13 @@ map_file (int fd, off64_t start_off, off
+@@ -489,8 +493,13 @@ map_file (int fd, off64_t start_off, off
fd, start_off);
if (mem != MAP_FAILED)
{
@@ -831,7 +869,7 @@
break;
}
if (errno != EINVAL && errno != ENOMEM)
-@@ -576,9 +585,11 @@ read_block (int fd, const char *fname, o
+@@ -581,9 +590,11 @@ read_block (int fd, const char *fname, o
elfmap_off = from & ~(ps - 1);
elfmap_base = elfmap = map_file (fd, elfmap_off, fdlen, &elfmap_size);
@@ -843,8 +881,10 @@
}
if (unlikely (elfmap == MAP_FAILED))
---- elfutils/src/strip.c
-+++ elfutils/src/strip.c
+Index: elfutils-0.159/src/strip.c
+===================================================================
+--- elfutils-0.159.orig/src/strip.c
++++ elfutils-0.159/src/strip.c
@@ -45,6 +45,12 @@
#include <libebl.h>
#include <system.h>
@@ -877,7 +917,7 @@
}
/* Open the file. */
-@@ -2060,7 +2076,7 @@ while computing checksum for debug infor
+@@ -2086,7 +2102,7 @@ while computing checksum for debug infor
/* If requested, preserve the timestamp. */
if (tvp != NULL)
{
@@ -886,7 +926,7 @@
{
error (0, errno, gettext ("\
cannot set access and modification date of '%s'"),
-@@ -2117,7 +2133,7 @@ handle_ar (int fd, Elf *elf, const char
+@@ -2143,7 +2159,7 @@ handle_ar (int fd, Elf *elf, const char
if (tvp != NULL)
{
@@ -895,8 +935,10 @@
{
error (0, errno, gettext ("\
cannot set access and modification date of '%s'"), fname);
---- elfutils/tests/backtrace.c
-+++ elfutils/tests/backtrace.c
+Index: elfutils-0.159/tests/backtrace.c
+===================================================================
+--- elfutils-0.159.orig/tests/backtrace.c
++++ elfutils-0.159/tests/backtrace.c
@@ -36,6 +36,7 @@
#include <fcntl.h>
#include <string.h>
@@ -904,10 +946,12 @@
+#include <sys/syscall.h>
#include ELFUTILS_HEADER(dwfl)
- static int
---- elfutils/tests/ChangeLog
-+++ elfutils/tests/ChangeLog
-@@ -123,6 +123,13 @@
+ #ifndef __linux__
+Index: elfutils-0.159/tests/ChangeLog
+===================================================================
+--- elfutils-0.159.orig/tests/ChangeLog
++++ elfutils-0.159/tests/ChangeLog
+@@ -283,6 +283,13 @@
2013-12-02 Jan Kratochvil <jan.kratochvil@redhat.com>
@@ -921,7 +965,7 @@
* Makefile.am (check_PROGRAMS): Add backtrace, backtrace-child,
backtrace-data and backtrace-dwarf.
(BUILT_SOURCES, clean-local, backtrace-child-biarch): New.
-@@ -987,6 +994,8 @@
+@@ -1147,6 +1154,8 @@
2008-01-21 Roland McGrath <roland@redhat.com>
@@ -930,7 +974,7 @@
* testfile45.S.bz2: Add tests for cltq, cqto.
* testfile45.expect.bz2: Adjust.
-@@ -1695,6 +1704,11 @@
+@@ -1855,6 +1864,11 @@
* Makefile.am (TESTS): Add run-elflint-test.sh.
(EXTRA_DIST): Add run-elflint-test.sh and testfile18.bz2.
@@ -942,8 +986,10 @@
2005-05-24 Ulrich Drepper <drepper@redhat.com>
* get-files.c (main): Use correct format specifier.
---- elfutils/tests/line2addr.c
-+++ elfutils/tests/line2addr.c
+Index: elfutils-0.159/tests/line2addr.c
+===================================================================
+--- elfutils-0.159.orig/tests/line2addr.c
++++ elfutils-0.159/tests/line2addr.c
@@ -124,7 +124,7 @@ main (int argc, char *argv[])
{
struct args a = { .arg = argv[cnt] };
@@ -953,3 +999,15 @@
{
default:
case 0:
+Index: elfutils-0.159/tests/Makefile.am
+===================================================================
+--- elfutils-0.159.orig/tests/Makefile.am
++++ elfutils-0.159/tests/Makefile.am
+@@ -356,6 +356,7 @@ get_lines_LDADD = $(libdw) $(libelf)
+ get_files_LDADD = $(libdw) $(libelf)
+ get_aranges_LDADD = $(libdw) $(libelf)
+ allfcts_LDADD = $(libdw) $(libelf)
++line2addr_no_Wformat = yes
+ line2addr_LDADD = $(libdw)
+ addrscopes_LDADD = $(libdw)
+ funcscopes_LDADD = $(libdw)
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/scanf-format.patch b/meta/recipes-devtools/elfutils/elfutils-0.160/scanf-format.patch
index c08519cf53..c08519cf53 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/scanf-format.patch
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/scanf-format.patch
diff --git a/meta/recipes-devtools/elfutils/elfutils-0.158/testsuite-ignore-elflint.diff b/meta/recipes-devtools/elfutils/elfutils-0.160/testsuite-ignore-elflint.diff
index eae5796de3..eae5796de3 100644
--- a/meta/recipes-devtools/elfutils/elfutils-0.158/testsuite-ignore-elflint.diff
+++ b/meta/recipes-devtools/elfutils/elfutils-0.160/testsuite-ignore-elflint.diff
diff --git a/meta/recipes-devtools/elfutils/elfutils/Fix_elf_cvt_gunhash.patch b/meta/recipes-devtools/elfutils/elfutils/Fix_elf_cvt_gunhash.patch
new file mode 100644
index 0000000000..374cba555e
--- /dev/null
+++ b/meta/recipes-devtools/elfutils/elfutils/Fix_elf_cvt_gunhash.patch
@@ -0,0 +1,29 @@
+Fix elf_cvt_gunhash if dest and src are same.
+
+Upstream-status: Pending
+
+The 'dest' and 'src' can be same, we need to save the value of src32[2]
+before swaping it.
+
+Signed-off-by: Baoshan Pang <BaoShan.Pang@windriver.com>
+diff --git a/libelf/gnuhash_xlate.h b/libelf/gnuhash_xlate.h
+index 6faf113..04d9ca1 100644
+--- a/libelf/gnuhash_xlate.h
++++ b/libelf/gnuhash_xlate.h
+@@ -40,6 +40,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
+ words. We must detangle them here. */
+ Elf32_Word *dest32 = dest;
+ const Elf32_Word *src32 = src;
++ Elf32_Word save_src32_2 = src32[2]; // dest could be equal to src
+
+ /* First four control words, 32 bits. */
+ for (unsigned int cnt = 0; cnt < 4; ++cnt)
+@@ -50,7 +51,7 @@ elf_cvt_gnuhash (void *dest, const void *src, size_t len, int encode)
+ len -= 4;
+ }
+
+- Elf32_Word bitmask_words = encode ? src32[2] : dest32[2];
++ Elf32_Word bitmask_words = encode ? save_src32_2 : dest32[2];
+
+ /* Now the 64 bit words. */
+ Elf64_Xword *dest64 = (Elf64_Xword *) &dest32[4];
diff --git a/meta/recipes-devtools/elfutils/elfutils_0.148.bb b/meta/recipes-devtools/elfutils/elfutils_0.148.bb
index 110624297e..ab956390b3 100644
--- a/meta/recipes-devtools/elfutils/elfutils_0.148.bb
+++ b/meta/recipes-devtools/elfutils/elfutils_0.148.bb
@@ -32,6 +32,7 @@ SRC_URI += "\
file://dso-link-change.patch \
file://nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch \
file://elfutils-ar-c-fix-num-passed-to-memset.patch \
+ file://Fix_elf_cvt_gunhash.patch \
"
# Only apply when building uclibc based target recipe
SRC_URI_append_libc-uclibc = " file://uclibc-support.patch"
diff --git a/meta/recipes-devtools/elfutils/elfutils_0.158.bb b/meta/recipes-devtools/elfutils/elfutils_0.160.bb
index ef3dd0bb40..e02918e94a 100644
--- a/meta/recipes-devtools/elfutils/elfutils_0.158.bb
+++ b/meta/recipes-devtools/elfutils/elfutils_0.160.bb
@@ -7,14 +7,20 @@ DEPENDS = "libtool bzip2 zlib virtual/libintl"
SRC_URI = "https://fedorahosted.org/releases/e/l/elfutils/${PV}/elfutils-${PV}.tar.bz2"
-SRC_URI[md5sum] = "050a4909e452d01ab4747fd69d4036e0"
-SRC_URI[sha256sum] = "be27af5c21352f53e010342bf1c68e0b9e18232dbf3adec7e2f9b41f6bbe397d"
+SRC_URI[md5sum] = "7527f22dff8b1ac8c122cfc4d3d3bb1e"
+SRC_URI[sha256sum] = "741b556863c069ceab2d81eb54aeda8c34f46728859704eaf9baef8503e9a9d1"
-# Pick patches from debian
-# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.158-2.debian.tar.xz
+SRC_URI += "\
+ file://mempcpy.patch \
+ file://dso-link-change.patch \
+ file://Fix_elf_cvt_gunhash.patch \
+ file://fixheadercheck.patch \
+"
+
+# pick the patch from debian
+# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.159-4.debian.tar.xz
SRC_URI += "\
file://redhat-portability.diff \
- file://redhat-robustify.diff \
file://hppa_backend.diff \
file://arm_backend.diff \
file://mips_backend.diff \
@@ -22,14 +28,9 @@ SRC_URI += "\
file://testsuite-ignore-elflint.diff \
file://scanf-format.patch \
file://mips_readelf_w.patch \
- file://core_filename.patch \
- file://CVE-2014-0172.patch \
- file://unwind_non_linux.patch \
- file://elf_additions.diff \
- file://mempcpy.patch \
- file://dso-link-change.patch \
- file://m4-biarch.m4-tweak-AC_RUN_IFELSE-for-cross-compiling.patch \
- file://fixheadercheck.patch \
+ file://arm_func_value.patch \
+ file://arm_unwind_ret_mask.patch \
+ file://non_linux.patch \
"
# Only apply when building uclibc based target recipe
diff --git a/meta/recipes-devtools/expect/expect_5.45.bb b/meta/recipes-devtools/expect/expect_5.45.bb
index 970f4a5ccc..3e839a2fe3 100644
--- a/meta/recipes-devtools/expect/expect_5.45.bb
+++ b/meta/recipes-devtools/expect/expect_5.45.bb
@@ -35,6 +35,7 @@ do_install_append() {
install -m 0755 ${S}/fixline1 ${D}${libdir}/expect${PV}/
install -m 0755 ${S}/example/* ${D}${libdir}/expect${PV}/
rm ${D}${libdir}/expect${PV}/libexpect*.so
+ sed -e 's|$dir|${libdir}|' -i ${D}${libdir}/expect${PV}/pkgIndex.tcl
}
EXTRA_OECONF += "--includedir=${STAGING_INCDIR} \
diff --git a/meta/recipes-devtools/fdisk/gptfdisk_git.bb b/meta/recipes-devtools/fdisk/gptfdisk_git.bb
new file mode 100644
index 0000000000..477c1d986f
--- /dev/null
+++ b/meta/recipes-devtools/fdisk/gptfdisk_git.bb
@@ -0,0 +1,22 @@
+DESCRIPTION = "GPT fdisk is a disk partitioning tool loosely modeled on Linux fdisk, but used for modifying GUID Partition Table (GPT) disks. The related FixParts utility fixes some common problems on Master Boot Record (MBR) disks."
+
+LICENSE = "GPLv2"
+LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552"
+
+DEPENDS = "util-linux popt ncurses"
+
+PV = "0.8.10+git${SRCPV}"
+SRCREV = "a920398fa393f9d6301b32b191bc01e086ab8bc8"
+SRC_URI = "git://git.code.sf.net/p/gptfdisk/code"
+
+S = "${WORKDIR}/git"
+
+do_install() {
+ install -d ${D}${sbindir}
+ install -m 0755 cgdisk ${D}${sbindir}
+ install -m 0755 gdisk ${D}${sbindir}
+ install -m 0755 sgdisk ${D}${sbindir}
+ install -m 0755 fixparts ${D}${sbindir}
+}
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/file/file/debian-742262.patch b/meta/recipes-devtools/file/file/debian-742262.patch
index d57258c56b..1ef485e93c 100644
--- a/meta/recipes-devtools/file/file/debian-742262.patch
+++ b/meta/recipes-devtools/file/file/debian-742262.patch
@@ -9,10 +9,10 @@ Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
diff --git a/magic/Magdir/commands b/magic/Magdir/commands
--- a/magic/Magdir/commands
+++ b/magic/Magdir/commands
-@@ -50,6 +50,7 @@
+@@ -57,6 +57,7 @@
0 string/wt #!\ /usr/bin/awk awk script text executable
!:mime text/x-awk
- 0 regex =^\\s{0,100}BEGIN\\s{0,100}[{] awk script text
+ 0 regex/4096 =^\\s{0,100}BEGIN\\s{0,100}[{] awk or perl script text
+!:strength - 12
# AT&T Bell Labs' Plan 9 shell
diff --git a/meta/recipes-devtools/file/file_5.18.bb b/meta/recipes-devtools/file/file_5.21.bb
index f2f8d032e2..702ea87343 100644
--- a/meta/recipes-devtools/file/file_5.18.bb
+++ b/meta/recipes-devtools/file/file_5.21.bb
@@ -15,8 +15,8 @@ SRC_URI = "ftp://ftp.astron.com/pub/file/file-${PV}.tar.gz \
file://debian-742262.patch \
"
-SRC_URI[md5sum] = "d420d8f2990cd344673acfbf8d76ff5a"
-SRC_URI[sha256sum] = "6519fb706d583231c2419592ebecdbb21d33c62eaf7a1a0b24ddfcb80c08bf07"
+SRC_URI[md5sum] = "549fe96e09041eabece9de2bb28ef923"
+SRC_URI[sha256sum] = "1a48741d3923c4cc73267109b8a396c0ce3aebe004181f3efb1b0a228d230bb6"
inherit autotools
@@ -27,4 +27,9 @@ do_install_append_class-native() {
--magic-file ${datadir}/misc/magic.mgc
}
+do_install_append_class-nativesdk() {
+ create_cmdline_wrapper ${D}/${bindir}/file \
+ --magic-file ${datadir}/misc/magic.mgc
+}
+
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/flex/flex.inc b/meta/recipes-devtools/flex/flex.inc
index 54e4ec3b25..a4a26e2787 100644
--- a/meta/recipes-devtools/flex/flex.inc
+++ b/meta/recipes-devtools/flex/flex.inc
@@ -5,6 +5,7 @@ HOMEPAGE = "http://sourceforge.net/projects/flex/"
SECTION = "devel"
LICENSE = "BSD"
+DEPENDS += "${@'bison-native flex-native' if '${PTEST_ENABLED}' == '1' else ''}"
SRC_URI = "${SOURCEFORGE_MIRROR}/flex/flex-${PV}.tar.bz2 \
file://run-ptest \
@@ -28,7 +29,6 @@ do_install_append_class-nativesdk() {
}
RDEPENDS_${PN} += "m4"
-DEPENDS_${PN}-ptest += "bison-native flex-native"
do_compile_ptest() {
for i in `find ${S}/tests/ -type d |grep -Ev "concatenated-options|reject|table-opts" | awk -F/ '{print $NF}'`; \
diff --git a/meta/recipes-devtools/gcc/gcc-4.8/0051-fix-unwind-race.patch b/meta/recipes-devtools/gcc/gcc-4.8/0051-fix-unwind-race.patch
deleted file mode 100644
index e4fff127b3..0000000000
--- a/meta/recipes-devtools/gcc/gcc-4.8/0051-fix-unwind-race.patch
+++ /dev/null
@@ -1,33 +0,0 @@
-These is a race over the installation of files into the include/ directory between:
-
-| (cd `${PWDCMD-pwd}`/include ; \| tar -cf - .; exit 0) | (cd /home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/work/armv5te-oe-linux-gnueabi/gcc-cross-initial/4.8.2-r0/image/home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/sysroots/x86_64-linux/usr/lib/armv5te-oe-linux-gnueabi.gcc-cross-initial/gcc/arm-oe-linux-gnueabi/4.8.2/include; tar xpf - )
-
-and
-
-| /bin/install -c -m 644 unwind.h /home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/work/armv5te-oe-linux-gnueabi/gcc-cross-initial/4.8.2-r0/image/home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/sysroots/x86_64-linux/usr/lib/armv5te-oe-linux-gnueabi.gcc-cross-initial/gcc/arm-oe-linux-gnueabi/4.8.2/include
-| /bin/install: cannot create regular file '/home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/work/armv5te-oe-linux-gnueabi/gcc-cross-initial/4.8.2-r0/image/home/pokybuild/yocto-autobuilder/yocto-slave/nightly-oecore/build/build/tmp-eglibc/sysroots/x86_64-linux/usr/lib/armv5te-oe-linux-gnueabi.gcc-cross-initial/gcc/arm-oe-linux-gnueabi/4.8.2/include/unwind.h': File exists
-| make[1]: *** [install-unwind_h] Error 1
-
-which under the right circumstances leads to the above build failure. Since we don't
-need two copies of this file and we don't use install-no-fixincludes, we can disable
-the libgcc installation.
-
-RP 2014/04/10
-
-Upstream-Status: Pending [would need a rewrite into an acceptable patch form]
-
-Index: gcc-4.8.2/libgcc/Makefile.in
-===================================================================
---- gcc-4.8.2.orig/libgcc/Makefile.in 2013-02-04 19:06:20.000000000 +0000
-+++ gcc-4.8.2/libgcc/Makefile.in 2014-04-10 09:58:33.018748787 +0000
-@@ -1020,8 +1020,8 @@
- # This is however useful for "install-no-fixincludes" case, when only the gcc
- # internal headers are copied by gcc's install.
- install-unwind_h:
-- $(mkinstalldirs) $(DESTDIR)$(libsubdir)/include
-- $(INSTALL_DATA) unwind.h $(DESTDIR)$(libsubdir)/include
-+# $(mkinstalldirs) $(DESTDIR)$(libsubdir)/include
-+# $(INSTALL_DATA) unwind.h $(DESTDIR)$(libsubdir)/include
-
- all: install-unwind_h-forbuild
-
diff --git a/meta/recipes-devtools/gcc/gcc-4.9.inc b/meta/recipes-devtools/gcc/gcc-4.9.inc
index 25778bd272..2568e995ac 100644
--- a/meta/recipes-devtools/gcc/gcc-4.9.inc
+++ b/meta/recipes-devtools/gcc/gcc-4.9.inc
@@ -70,6 +70,10 @@ SRC_URI = "\
file://0053-gcc-fix-segfault-from-calling-free-on-non-malloc-d-a.patch \
file://0054-gcc-Makefile.in-fix-parallel-building-failure.patch \
file://0055-PR-rtl-optimization-61801.patch \
+ file://0056-top-level-reorder_gcc-bug-61144.patch \
+ file://0057-aarch64-config.patch \
+ file://0058-gcc-r212171.patch \
+ file://0059-gcc-PR-rtl-optimization-63348.patch \
"
SRC_URI[md5sum] = "fddf71348546af523353bd43d34919c1"
SRC_URI[sha256sum] = "d334781a124ada6f38e63b545e2a3b8c2183049515a1abab6d513f109f1d717e"
@@ -121,8 +125,8 @@ EXTRA_OECONF_INTERMEDIATE = "\
EXTRA_OECONF_append_libc-uclibc = " --disable-decimal-float "
EXTRA_OECONF_PATHS = "\
- --with-gxx-include-dir=${STAGING_DIR_TARGET}${target_includedir}/c++/${BINV} \
- --with-sysroot=${STAGING_DIR_TARGET} \
+ --with-gxx-include-dir=/not/exist{target_includedir}/c++/${BINV} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${STAGING_DIR_TARGET} \
"
diff --git a/meta/recipes-devtools/gcc/gcc-4.9/0023-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch b/meta/recipes-devtools/gcc/gcc-4.9/0023-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
index 9e4435cf59..fddfe9e5e8 100644
--- a/meta/recipes-devtools/gcc/gcc-4.9/0023-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
+++ b/meta/recipes-devtools/gcc/gcc-4.9/0023-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch
@@ -1,4 +1,4 @@
-From 46d52439052f0876a92dcf8a0ab9c60d75c8030b Mon Sep 17 00:00:00 2001
+From f7d49ca445e60faa1b5256c6b4f96c1ee5c0e353 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Fri, 29 Mar 2013 09:17:25 +0400
Subject: [PATCH 23/35] Use the defaults.h in ${B} instead of ${S}, and t-oe
@@ -11,6 +11,14 @@ gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build.
Signed-off-by: Khem Raj <raj.khem@gmail.com>
Upstream-Status: Pending
+
+While compiling gcc-crosssdk-initial-x86_64 on some host, there is
+occasionally failure that test the existance of default.h doesn't
+work, the reason is tm_include_list='** defaults.h' rather than
+tm_include_list='** ./defaults.h'
+
+So we add the test condition for this situation.
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
---
gcc/Makefile.in | 2 +-
gcc/configure | 4 ++--
@@ -19,10 +27,10 @@ Upstream-Status: Pending
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
-index 90a2bba..2320497 100644
+index d1ab22f..15fe4b6 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
-@@ -469,7 +469,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@
+@@ -483,7 +483,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@
TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@
xmake_file=@xmake_file@
@@ -32,10 +40,10 @@ index 90a2bba..2320497 100644
TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@
TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@
diff --git a/gcc/configure b/gcc/configure
-index bdab45a..d587993 100755
+index 5399b2b..60a04bd 100755
--- a/gcc/configure
+++ b/gcc/configure
-@@ -11539,8 +11539,8 @@ for f in $tm_file; do
+@@ -11631,8 +11631,8 @@ for f in $tm_file; do
tm_include_list="${tm_include_list} $f"
;;
defaults.h )
@@ -47,10 +55,10 @@ index bdab45a..d587993 100755
* )
tm_file_list="${tm_file_list} \$(srcdir)/config/$f"
diff --git a/gcc/configure.ac b/gcc/configure.ac
-index 5f5c909..5e5e84f 100644
+index f87c3b6..460e0d9 100644
--- a/gcc/configure.ac
+++ b/gcc/configure.ac
-@@ -1720,8 +1720,8 @@ for f in $tm_file; do
+@@ -1740,8 +1740,8 @@ for f in $tm_file; do
tm_include_list="${tm_include_list} $f"
;;
defaults.h )
@@ -62,7 +70,7 @@ index 5f5c909..5e5e84f 100644
* )
tm_file_list="${tm_file_list} \$(srcdir)/config/$f"
diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh
-index 29fdfc7..e048dce 100644
+index c7146ed..b153f45 100644
--- a/gcc/mkconfig.sh
+++ b/gcc/mkconfig.sh
@@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then
@@ -70,11 +78,11 @@ index 29fdfc7..e048dce 100644
echo '#ifdef IN_GCC' >> ${output}T
for file in "$@"; do
- if test x"$file" = x"defaults.h"; then
-+ if test x"$file" = x"./defaults.h"; then
++ if test x"$file" = x"./defaults.h" -o x"$file" = x"defaults.h"; then
postpone_defaults_h="yes"
else
echo "# include \"$file\"" >> ${output}T
-@@ -103,7 +103,7 @@ esac
+@@ -106,7 +106,7 @@ esac
# If we postponed including defaults.h, add the #include now.
if test x"$postpone_defaults_h" = x"yes"; then
@@ -84,5 +92,5 @@ index 29fdfc7..e048dce 100644
# Add multiple inclusion protection guard, part two.
--
-1.7.10.4
+1.9.1
diff --git a/meta/recipes-devtools/gcc/gcc-4.9/0056-top-level-reorder_gcc-bug-61144.patch b/meta/recipes-devtools/gcc/gcc-4.9/0056-top-level-reorder_gcc-bug-61144.patch
new file mode 100644
index 0000000000..f44893251c
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-4.9/0056-top-level-reorder_gcc-bug-61144.patch
@@ -0,0 +1,31 @@
+
+Upstream-Status: Backport
+
+Originally-submitted-by: Peter Urbanec <openembedded-devel@urbanec.net>
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+--- /dev/null
++++ b/meta/recipes-devtools/gcc/gcc-4.9/0056-top-level-reorder_gcc-bug-61144.patch
+@@ -0,0 +1,21 @@
++--- a/gcc/varpool.c 2014/10/05 02:50:01 215895
+++++ b/gcc/varpool.c 2014/10/05 04:52:19 215896
++@@ -329,8 +329,16 @@
++
++ /* Variables declared 'const' without an initializer
++ have zero as the initializer if they may not be
++- overridden at link or run time. */
++- if (!DECL_INITIAL (real_decl)
+++ overridden at link or run time.
+++
+++ It is actually requirement for C++ compiler to optimize const variables
+++ consistently. As a GNU extension, do not enfore this rule for user defined
+++ weak variables, so we support interposition on:
+++ static const int dummy = 0;
+++ extern const int foo __attribute__((__weak__, __alias__("dummy")));
+++ */
+++ if ((!DECL_INITIAL (real_decl)
+++ || (DECL_WEAK (decl) && !DECL_COMDAT (decl)))
++ && (DECL_EXTERNAL (decl) || decl_replaceable_p (decl)))
++ return error_mark_node;
++
+
diff --git a/meta/recipes-devtools/gcc/gcc-4.9/0057-aarch64-config.patch b/meta/recipes-devtools/gcc/gcc-4.9/0057-aarch64-config.patch
new file mode 100644
index 0000000000..f29559698a
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-4.9/0057-aarch64-config.patch
@@ -0,0 +1,32 @@
+Disable the MULTILIB_OSDIRNAMES and other multilib options.
+
+Hard coding the MULTILIB_OSDIRNAMES with ../lib64 is causing problems on
+systems where the libdir is NOT set to /lib64. This is allowed by the ABI, as
+long as the dynamic loader is present in /lib.
+
+We simply want to use the default rules in gcc to find and configure the
+normal libdir.
+
+Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
+
+Index: gcc-4.9.1/gcc/config/aarch64/t-aarch64-linux
+===================================================================
+--- gcc-4.9.1.orig/gcc/config/aarch64/t-aarch64-linux
++++ gcc-4.9.1/gcc/config/aarch64/t-aarch64-linux
+@@ -21,11 +21,11 @@
+ LIB1ASMSRC = aarch64/lib1funcs.asm
+ LIB1ASMFUNCS = _aarch64_sync_cache_range
+
+-AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be)
+-MULTILIB_OSDIRNAMES = .=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu)
+-MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu)
++#AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be)
++#MULTILIB_OSDIRNAMES = .=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu)
++#MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu)
+
+ # Disable the multilib for linux-gnu targets for the time being; focus
+ # on the baremetal targets.
+-MULTILIB_OPTIONS =
+-MULTILIB_DIRNAMES =
++#MULTILIB_OPTIONS =
++#MULTILIB_DIRNAMES =
diff --git a/meta/recipes-devtools/gcc/gcc-4.9/0058-gcc-r212171.patch b/meta/recipes-devtools/gcc/gcc-4.9/0058-gcc-r212171.patch
new file mode 100644
index 0000000000..4b312d4fa9
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-4.9/0058-gcc-r212171.patch
@@ -0,0 +1,113 @@
+From ca03cf1b133d66eb978c68f6dbc345e9aabcba88 Mon Sep 17 00:00:00 2001
+From: uros <uros@138bc75d-0d04-0410-961f-82ee72b054a4>
+Date: Mon, 30 Jun 2014 19:30:52 +0000
+Subject: [PATCH] r212171
+
+* except.c (emit_note_eh_region_end): New helper
+ function. (convert_to_eh_region_ranges): Use
+ emit_note_eh_region_end to emit EH_REGION_END note.
+ * jump.c (cleanup_barriers): Do not split a call and its
+ corresponding CALL_ARG_LOCATION note.
+
+git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@212171 138bc75d-0d04-0410-961f-82ee72b054a4
+
+Upstream-status: Backport [https://gcc.gnu.org/viewcvs/gcc?view=revision&revision=212171]
+Signed-off-by: Baoshan Pang <baoshan.pang@windriver.com>
+---
+ gcc/except.c | 23 ++++++++++++++++++-----
+ gcc/jump.c | 19 +++++++++++++++----
+ 2 files changed, 33 insertions(+), 9 deletions(-)
+
+diff --git a/gcc/except.c b/gcc/except.c
+index dc5c1d2..7ac114f 100644
+--- a/gcc/except.c
++++ b/gcc/except.c
+@@ -2466,6 +2466,20 @@ add_call_site (rtx landing_pad, int action, int section)
+ return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
+ }
+
++static rtx
++emit_note_eh_region_end (rtx insn)
++{
++ rtx next = NEXT_INSN (insn);
++
++ /* Make sure we do not split a call and its corresponding
++ CALL_ARG_LOCATION note. */
++ if (next && NOTE_P (next)
++ && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
++ insn = next;
++
++ return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
++}
++
+ /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
+ The new note numbers will not refer to region numbers, but
+ instead to call site entries. */
+@@ -2544,8 +2558,8 @@ convert_to_eh_region_ranges (void)
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
+ first_no_action_insn_before_switch);
+ NOTE_EH_HANDLER (note) = call_site;
+- note = emit_note_after (NOTE_INSN_EH_REGION_END,
+- last_no_action_insn_before_switch);
++ note
++ = emit_note_eh_region_end (last_no_action_insn_before_switch);
+ NOTE_EH_HANDLER (note) = call_site;
+ gcc_assert (last_action != -3
+ || (last_action_insn
+@@ -2569,8 +2583,7 @@ convert_to_eh_region_ranges (void)
+ first_no_action_insn = NULL_RTX;
+ }
+
+- note = emit_note_after (NOTE_INSN_EH_REGION_END,
+- last_action_insn);
++ note = emit_note_eh_region_end (last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+@@ -2617,7 +2630,7 @@ convert_to_eh_region_ranges (void)
+
+ if (last_action >= -1 && ! first_no_action_insn)
+ {
+- note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
++ note = emit_note_eh_region_end (last_action_insn);
+ NOTE_EH_HANDLER (note) = call_site;
+ }
+
+diff --git a/gcc/jump.c b/gcc/jump.c
+index 9418f65..a5e5f52 100644
+--- a/gcc/jump.c
++++ b/gcc/jump.c
+@@ -121,15 +121,26 @@ rebuild_jump_labels_chain (rtx chain)
+ static unsigned int
+ cleanup_barriers (void)
+ {
+- rtx insn, next, prev;
+- for (insn = get_insns (); insn; insn = next)
++ rtx insn;
++ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+- next = NEXT_INSN (insn);
+ if (BARRIER_P (insn))
+ {
+- prev = prev_nonnote_insn (insn);
++ rtx prev = prev_nonnote_insn (insn);
+ if (!prev)
+ continue;
++
++ if (CALL_P (prev))
++ {
++ /* Make sure we do not split a call and its corresponding
++ CALL_ARG_LOCATION note. */
++ rtx next = NEXT_INSN (prev);
++
++ if (NOTE_P (next)
++ && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
++ prev = next;
++ }
++
+ if (BARRIER_P (prev))
+ delete_insn (insn);
+ else if (prev != PREV_INSN (insn))
+--
+1.7.9.5
+
diff --git a/meta/recipes-devtools/gcc/gcc-4.9/0059-gcc-PR-rtl-optimization-63348.patch b/meta/recipes-devtools/gcc/gcc-4.9/0059-gcc-PR-rtl-optimization-63348.patch
new file mode 100644
index 0000000000..6d24aa4572
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-4.9/0059-gcc-PR-rtl-optimization-63348.patch
@@ -0,0 +1,59 @@
+From 6eae3e637fcc22d21b51d44d61e3a9cb4825e776 Mon Sep 17 00:00:00 2001
+From: Jackie Huang <jackie.huang@windriver.com>
+Date: Thu, 30 Oct 2014 20:37:14 -0700
+Subject: [PATCH]PR rtl-optimization/63348
+
+PR rtl-optimization/63348
+* emit-rtl.c (try_split): Do not emit extra barrier.
+
+Note: this patch is to fix the side effect introduced by r212171 which was reported at:
+https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63348
+
+git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@215613 138bc75d-0d04-0410-961f-82ee72b054a4
+
+Upstream-status: Backport [https://gcc.gnu.org/viewcvs/gcc?view=revision&revision=215613]
+Signed-off-by: Baoshan Pang <baoshan.pang@windriver.com>
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ gcc/emit-rtl.c | 11 -----------
+ 1 files changed, 0 insertions(+), 11 deletions(-)
+
+diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
+index 4736f8d..ae69dbd 100644
+--- a/gcc/emit-rtl.c
++++ b/gcc/emit-rtl.c
+@@ -3422,7 +3422,6 @@ try_split (rtx pat, rtx trial, int last)
+ {
+ rtx before = PREV_INSN (trial);
+ rtx after = NEXT_INSN (trial);
+- int has_barrier = 0;
+ rtx note, seq, tem;
+ int probability;
+ rtx insn_last, insn;
+@@ -3441,14 +3440,6 @@ try_split (rtx pat, rtx trial, int last)
+
+ split_branch_probability = -1;
+
+- /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
+- We may need to handle this specially. */
+- if (after && BARRIER_P (after))
+- {
+- has_barrier = 1;
+- after = NEXT_INSN (after);
+- }
+-
+ if (!seq)
+ return trial;
+
+@@ -3594,8 +3585,6 @@ try_split (rtx pat, rtx trial, int last)
+ tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
+
+ delete_insn (trial);
+- if (has_barrier)
+- emit_barrier_after (tem);
+
+ /* Recursively call try_split for each new insn created; by the
+ time control returns here that insn will be fully split, so
+--
+1.7.1
+
diff --git a/meta/recipes-devtools/gcc/gcc-common.inc b/meta/recipes-devtools/gcc/gcc-common.inc
index 497d06ae8b..04c701a974 100644
--- a/meta/recipes-devtools/gcc/gcc-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-common.inc
@@ -26,7 +26,7 @@ def get_gcc_mips_plt_setting(bb, d):
return ""
def get_long_double_setting(bb, d):
- if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC', True) in [ 'uclibc', 'eglibc' ]:
+ if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC', True) in [ 'uclibc', 'glibc' ]:
return "--with-long-double-128"
return ""
@@ -79,7 +79,6 @@ ${GNU_MIRROR}/gcc http://mirrors.rcn.net/pub/sourceware/gcc/releases/ \n \
${GNU_MIRROR}/gcc http://gcc.get-software.com/releases/ \n \
${GNU_MIRROR}/gcc http://gcc.get-software.com/releases/ \n \
"
-
#
# Set some default values
#
@@ -89,48 +88,12 @@ BINV = "${PV}"
S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}"
B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}"
-# SS means Shared Stamps directory
-SS = "${TMPDIR}/stamps/work-shared/gcc-${PV}-${PR}"
-do_fetch[stamp-base] = "${SS}"
-do_unpack[stamp-base] = "${SS}"
-do_patch[stamp-base] = "${SS}"
-do_preconfigure[stamp-base] = "${SS}"
-SSCLEAN = "${TMPDIR}/stamps/work-shared/gcc-[0-9]*-*"
-do_fetch[stamp-base-clean] = "${SSCLEAN}"
-do_unpack[stamp-base-clean] = "${SSCLEAN}"
-do_unpack[umask] = "022"
-do_patch[stamp-base-clean] = "${SSCLEAN}"
-do_preconfigure[stamp-base-clean] = "${SSCLEAN}"
-
-# SW means Shared Work directory
-SW = "${TMPDIR}/work-shared/gcc-${PV}-${PR}"
-SSTATE_SWSPEC = "sstate:gcc::${PV}:${PR}::${SSTATE_VERSION}:"
-WORKDIR_task-unpack = "${SW}"
-WORKDIR_task-patch = "${SW}"
-WORKDIR_task-preconfigure = "${SW}"
-
target_includedir ?= "${includedir}"
target_libdir ?= "${libdir}"
target_base_libdir ?= "${base_libdir}"
target_prefix ?= "${prefix}"
-CLEANFUNCS += "workshared_clean"
-# The do_clean should be exclusive since share ${S}
-do_clean[lockfiles] = "${SW}.clean.lock"
-
-python workshared_clean () {
- """clear the source directory"""
- dir = d.expand("${SW}")
- bb.note("Removing " + dir)
- oe.path.remove(dir)
-
- """clear the the stamps in work-shared"""
- dir = "%s.*" % bb.data.expand(d.getVarFlag('do_fetch', 'stamp-base', True), d)
- bb.note("Removing " + dir)
- oe.path.remove(dir)
-}
-
-# We need to ensure that for the shared work directory, the do_patch singatures match
+# We need to ensure that for the shared work directory, the do_patch signatures match
# The real WORKDIR location isn't a dependency for the shared workdir.
src_patches[vardepsexclude] = "WORKDIR"
should_apply[vardepsexclude] += "PN"
diff --git a/meta/recipes-devtools/gcc/gcc-configure-common.inc b/meta/recipes-devtools/gcc/gcc-configure-common.inc
index 48fb7995ca..0693118992 100644
--- a/meta/recipes-devtools/gcc/gcc-configure-common.inc
+++ b/meta/recipes-devtools/gcc/gcc-configure-common.inc
@@ -1,4 +1,5 @@
require gcc-multilib-config.inc
+require gcc-shared-source.inc
#
# Build the list of lanaguages to build.
#
@@ -93,16 +94,6 @@ _EOF
mv ${B}/gcc/defaults.h.new ${B}/gcc/defaults.h
}
-python do_preconfigure () {
- import subprocess
- cmd = d.expand('PATH=${PATH} cd ${S} && gnu-configize')
- subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
- # See 0044-gengtypes.patch, we need to regenerate this file
- bb.utils.remove(d.expand("${S}/gcc/gengtype-lex.c"))
-}
-addtask do_preconfigure after do_patch before do_configure
-do_preconfigure[depends] += "gnu-config-native:do_populate_sysroot autoconf-native:do_populate_sysroot"
-
do_configure () {
# Setup these vars for cross building only
# ... because foo_FOR_TARGET apparently gets misinterpreted inside the
diff --git a/meta/recipes-devtools/gcc/gcc-cross-canadian.inc b/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
index 0b1f37b908..0f79533771 100644
--- a/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross-canadian.inc
@@ -10,9 +10,9 @@ GCCMULTILIB = "--enable-multilib"
require gcc-configure-common.inc
EXTRA_OECONF_PATHS = "\
- --with-gxx-include-dir=${SDKPATH}/sysroots/${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS}${target_includedir}/c++/${BINV} \
+ --with-gxx-include-dir=/not/exist${target_includedir}/c++/${BINV} \
--with-build-time-tools=${STAGING_DIR_NATIVE}${prefix_native}/${TARGET_SYS}/bin \
- --with-sysroot=${SDKPATH}/sysroots/${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${STAGING_DIR_TARGET} \
"
# We have to point gcc at a sysroot but we don't need to rebuild if this changes
@@ -55,7 +55,6 @@ do_configure () {
export CPPFLAGS_FOR_TARGET="${TARGET_CPPFLAGS}"
export CXXFLAGS_FOR_TARGET="${TARGET_CXXFLAGS}"
export LDFLAGS_FOR_TARGET="${TARGET_LDFLAGS}"
- (cd ${S} && gnu-configize) || die "failure running gnu-configize"
oe_runconf
}
diff --git a/meta/recipes-devtools/gcc/gcc-cross-initial.inc b/meta/recipes-devtools/gcc/gcc-cross-initial.inc
index 6e2f25b562..7197447080 100644
--- a/meta/recipes-devtools/gcc/gcc-cross-initial.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross-initial.inc
@@ -21,7 +21,7 @@ EXTRA_OECONF = "\
--enable-languages=c \
${OPTSPACE} \
--program-prefix=${TARGET_PREFIX} \
- --with-sysroot=${STAGING_DIR_TARGET} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${GCCCROSS_BUILDSYSROOT} \
${EXTRA_OECONF_INITIAL} \
${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', '--with-ld=${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX}ld.bfd', '', d)} \
diff --git a/meta/recipes-devtools/gcc/gcc-cross.inc b/meta/recipes-devtools/gcc/gcc-cross.inc
index 4cdb55f3ee..97929501e8 100644
--- a/meta/recipes-devtools/gcc/gcc-cross.inc
+++ b/meta/recipes-devtools/gcc/gcc-cross.inc
@@ -25,17 +25,13 @@ EXTRA_OECONF += "\
"
EXTRA_OECONF_PATHS = "\
- --with-gxx-include-dir=${STAGING_DIR_TARGET}${target_includedir}/c++/${BINV} \
- --with-sysroot=${STAGING_DIR_TARGET} \
+ --with-gxx-include-dir=/not/exist${target_includedir}/c++/${BINV} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${STAGING_DIR_TARGET} \
"
ARCH_FLAGS_FOR_TARGET += "-isystem${STAGING_DIR_TARGET}${target_includedir}"
-do_configure_prepend () {
- sed -i 's/BUILD_INFO=info/BUILD_INFO=/' ${S}/gcc/configure
-}
-
do_compile () {
export CC="${BUILD_CC}"
export AR_FOR_TARGET="${TARGET_SYS}-ar"
diff --git a/meta/recipes-devtools/gcc/gcc-runtime.inc b/meta/recipes-devtools/gcc/gcc-runtime.inc
index 7ce84f14d9..bc36b186a0 100644
--- a/meta/recipes-devtools/gcc/gcc-runtime.inc
+++ b/meta/recipes-devtools/gcc/gcc-runtime.inc
@@ -4,7 +4,7 @@ CXXFLAGS := "${@oe_filter_out('-fvisibility-inlines-hidden', '${CXXFLAGS}', d)}"
EXTRA_OECONF_PATHS = "\
--with-gxx-include-dir=${includedir}/c++/${BINV} \
- --with-sysroot=${STAGING_DIR_TARGET} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${STAGING_DIR_TARGET} \
"
@@ -53,6 +53,9 @@ do_install () {
if [ -d ${D}${infodir} ]; then
rmdir --ignore-fail-on-non-empty -p ${D}${infodir}
fi
+ if [ "${TARGET_OS}" = "linux-gnuspe" ]; then
+ ln -s ${TARGET_SYS} ${D}${includedir}/c++/${BINV}/${TARGET_ARCH}${TARGET_VENDOR}-linux
+ fi
chown -R root:root ${D}
}
@@ -89,6 +92,37 @@ PACKAGES = "\
libatomic-dev \
libatomic-staticdev \
"
+
+# Most libraries are licensed with the exception, but
+# one library is really GPLv3.
+#
+LICENSE_${PN}-dbg = "GPL-3.0-with-GCC-exception & GPLv3"
+LICENSE_libstdc++ = "GPL-3.0-with-GCC-exception"
+LICENSE_libstdc++-precompile-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libstdc++-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libstdc++-staticdev = "GPL-3.0-with-GCC-exception"
+LICENSE_libg2c = "GPL-3.0-with-GCC-exception"
+LICENSE_libg2c-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libssp = "GPL-3.0-with-GCC-exception"
+LICENSE_libssp-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libssp-staticdev = "GPL-3.0-with-GCC-exception"
+LICENSE_libgfortran = "GPL-3.0-with-GCC-exception"
+LICENSE_libgfortran-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libgfortran-staticdev = "GPL-3.0-with-GCC-exception"
+LICENSE_libmudflap = "GPL-3.0-with-GCC-exception"
+LICENSE_libmudflap-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libmudflap-staticdev = "GPL-3.0-with-GCC-exception"
+LICENSE_libquadmath = "GPL-3.0-with-GCC-exception"
+LICENSE_libquadmath-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libquadmath-staticdev = "GPL-3.0-with-GCC-exception"
+LICENSE_libatomic = "GPL-3.0-with-GCC-exception"
+LICENSE_libatomic-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_libatomic-staticdev = "GPL-3.0-with-GCC-exception"
+
+LICENSE_libgomp = "GPLv3"
+LICENSE_libgomp-dev = "GPLv3"
+LICENSE_libgomp-staticdev = "GPLv3"
+
# The base package doesn't exist, so we clear the recommends.
RRECOMMENDS_${PN}-dbg = ""
diff --git a/meta/recipes-devtools/gcc/gcc-shared-source.inc b/meta/recipes-devtools/gcc/gcc-shared-source.inc
new file mode 100644
index 0000000000..cb5d9071b6
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-shared-source.inc
@@ -0,0 +1,9 @@
+do_fetch() {
+ :
+}
+do_fetch[noexec] = "1"
+deltask do_unpack
+deltask do_patch
+
+do_configure[depends] += "gcc-source:do_preconfigure"
+do_populate_lic[depends] += "gcc-source:do_unpack"
diff --git a/meta/recipes-devtools/gcc/gcc-source.inc b/meta/recipes-devtools/gcc/gcc-source.inc
new file mode 100644
index 0000000000..968830aa35
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-source.inc
@@ -0,0 +1,34 @@
+deltask do_configure
+deltask do_compile
+deltask do_package
+deltask do_package_write_rpm
+deltask do_package_write_ipk
+deltask do_package_write_deb
+deltask do_install
+deltask do_populate_sysroot
+deltask do_populate_lic
+deltask do_package_qa
+deltask do_packagedata
+deltask do_rm_work
+
+WORKDIR = "${TMPDIR}/work-shared/gcc-${PV}-${PR}"
+SSTATE_SWSPEC = "sstate:gcc::${PV}:${PR}::${SSTATE_VERSION}:"
+
+STAMP = "${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}"
+STAMPCLEAN = "${STAMPS_DIR}/work-shared/gcc-[0-9]*-*"
+
+INHIBIT_DEFAULT_DEPS = "1"
+DEPENDS = ""
+
+python do_preconfigure () {
+ import subprocess
+ cmd = d.expand('PATH=${PATH} cd ${S} && gnu-configize')
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
+ # See 0044-gengtypes.patch, we need to regenerate this file
+ bb.utils.remove(d.expand("${S}/gcc/gengtype-lex.c"))
+ cmd = d.expand("sed -i 's/BUILD_INFO=info/BUILD_INFO=/' ${S}/gcc/configure")
+ subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
+}
+addtask do_preconfigure after do_patch
+do_preconfigure[depends] += "gnu-config-native:do_populate_sysroot autoconf-native:do_populate_sysroot"
+
diff --git a/meta/recipes-devtools/gcc/gcc-source_4.8.bb b/meta/recipes-devtools/gcc/gcc-source_4.8.bb
new file mode 100644
index 0000000000..234b82efc6
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-source_4.8.bb
@@ -0,0 +1,2 @@
+require recipes-devtools/gcc/gcc-${PV}.inc
+require recipes-devtools/gcc/gcc-source.inc
diff --git a/meta/recipes-devtools/gcc/gcc-source_4.9.bb b/meta/recipes-devtools/gcc/gcc-source_4.9.bb
new file mode 100644
index 0000000000..234b82efc6
--- /dev/null
+++ b/meta/recipes-devtools/gcc/gcc-source_4.9.bb
@@ -0,0 +1,2 @@
+require recipes-devtools/gcc/gcc-${PV}.inc
+require recipes-devtools/gcc/gcc-source.inc
diff --git a/meta/recipes-devtools/gcc/libgcc-common.inc b/meta/recipes-devtools/gcc/libgcc-common.inc
index c81a80c6de..1e1e1c11fd 100644
--- a/meta/recipes-devtools/gcc/libgcc-common.inc
+++ b/meta/recipes-devtools/gcc/libgcc-common.inc
@@ -1,5 +1,7 @@
BPN = "libgcc"
+require gcc-shared-source.inc
+
INHIBIT_DEFAULT_DEPS = "1"
do_configure () {
diff --git a/meta/recipes-devtools/gcc/libgcc.inc b/meta/recipes-devtools/gcc/libgcc.inc
index 22c42d5eca..21cb8c1084 100644
--- a/meta/recipes-devtools/gcc/libgcc.inc
+++ b/meta/recipes-devtools/gcc/libgcc.inc
@@ -9,6 +9,13 @@ PACKAGES = "\
libgcov-dev \
"
+# All libgcc source is marked with the exception.
+#
+LICENSE_${PN} = "GPL-3.0-with-GCC-exception"
+LICENSE_${PN}-dev = "GPL-3.0-with-GCC-exception"
+LICENSE_${PN}-dbg = "GPL-3.0-with-GCC-exception"
+
+
FILES_${PN} = "${base_libdir}/libgcc*.so.*"
FILES_${PN}-dev = "\
${base_libdir}/libgcc*.so \
diff --git a/meta/recipes-devtools/gcc/libgfortran.inc b/meta/recipes-devtools/gcc/libgfortran.inc
index cf7942f41d..e42843d2f1 100644
--- a/meta/recipes-devtools/gcc/libgfortran.inc
+++ b/meta/recipes-devtools/gcc/libgfortran.inc
@@ -1,7 +1,7 @@
require gcc-configure-common.inc
EXTRA_OECONF_PATHS = "\
- --with-sysroot=${STAGING_DIR_TARGET} \
+ --with-sysroot=/not/exist \
--with-build-sysroot=${STAGING_DIR_TARGET} \
"
diff --git a/meta/recipes-devtools/gdb/gdb-7.7.1.inc b/meta/recipes-devtools/gdb/gdb-7.8.1.inc
index aa16d5a09f..6fa13bc582 100644
--- a/meta/recipes-devtools/gdb/gdb-7.7.1.inc
+++ b/meta/recipes-devtools/gdb/gdb-7.8.1.inc
@@ -6,5 +6,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \
S = "${WORKDIR}/${BPN}-${PV}"
-SRC_URI[md5sum] = "45b07b53d81832d32ccd4829465d4886"
-SRC_URI[sha256sum] = "eefadb9831e3695d1eaef34e98b8f1fb441df6fe5071317ea49c6bd6ba213eff"
+SRC_URI[md5sum] = "997492cc3475c96f35ecc8775248c9b1"
+SRC_URI[sha256sum] = "d7a923d876ecfa1cec4c1c79c014c9d8e58783a25855a95cf260275f61990647"
+
diff --git a/meta/recipes-devtools/gdb/gdb-common.inc b/meta/recipes-devtools/gdb/gdb-common.inc
index 599b405e03..a902f9a4ab 100644
--- a/meta/recipes-devtools/gdb/gdb-common.inc
+++ b/meta/recipes-devtools/gdb/gdb-common.inc
@@ -38,7 +38,7 @@ EXPAT = "--with-expat --with-libexpat-prefix=${STAGING_DIR_HOST}"
EXTRA_OECONF = "--disable-gdbtk --disable-tui --disable-x --disable-werror \
--with-curses --disable-multilib --with-system-readline --disable-sim \
- --without-lzma \
+ --without-lzma --without-guile \
${GDBPROPREFIX} ${EXPAT} \
${@bb.utils.contains('DISTRO_FEATURES', 'multiarch', '--enable-64-bit-bfd', '', d)} \
--disable-rpath \
diff --git a/meta/recipes-devtools/gdb/gdb-cross-canadian.inc b/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
index e594bb12f5..844dce9451 100644
--- a/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
+++ b/meta/recipes-devtools/gdb/gdb-cross-canadian.inc
@@ -13,6 +13,8 @@ GDBPROPREFIX = "--program-prefix='${TARGET_PREFIX}'"
EXTRA_OECONF_append = "--with-python=${WORKDIR}/python"
+SSTATE_DUPWHITELIST += "${STAGING_DATADIR}/gdb"
+
do_configure_prepend() {
cat > ${WORKDIR}/python << EOF
#! /bin/sh
diff --git a/meta/recipes-devtools/gdb/gdb-cross-canadian_7.7.1.bb b/meta/recipes-devtools/gdb/gdb-cross-canadian_7.8.1.bb
index 301035940c..301035940c 100644
--- a/meta/recipes-devtools/gdb/gdb-cross-canadian_7.7.1.bb
+++ b/meta/recipes-devtools/gdb/gdb-cross-canadian_7.8.1.bb
diff --git a/meta/recipes-devtools/gdb/gdb-cross.inc b/meta/recipes-devtools/gdb/gdb-cross.inc
index cb99b06c5b..6e44778cd3 100644
--- a/meta/recipes-devtools/gdb/gdb-cross.inc
+++ b/meta/recipes-devtools/gdb/gdb-cross.inc
@@ -1,8 +1,17 @@
require gdb-common.inc
-DEPENDS = "expat-native ncurses-native readline-native"
+DEPENDS = "expat-native ncurses-native readline-native python-native"
-EXTRA_OECONF += "--without-python"
+inherit pythonnative
+
+EXTRA_OECONF += "--with-python=${STAGING_BINDIR_NATIVE}/python-native/python"
+
+do_compile_prepend() {
+ export BUILD_SYS="${BUILD_SYS}"
+ export HOST_SYS="${HOST_SYS}"
+ export STAGING_LIBDIR="${STAGING_LIBDIR_NATIVE}"
+ export STAGING_INCDIR="${STAGING_INCDIR_NATIVE}"
+}
#EXTRA_OEMAKE += "LDFLAGS='${BUILD_LDFLAGS}'"
diff --git a/meta/recipes-devtools/gdb/gdb-cross_7.7.1.bb b/meta/recipes-devtools/gdb/gdb-cross_7.8.1.bb
index f9da486d7d..f9da486d7d 100644
--- a/meta/recipes-devtools/gdb/gdb-cross_7.7.1.bb
+++ b/meta/recipes-devtools/gdb/gdb-cross_7.8.1.bb
diff --git a/meta/recipes-devtools/gdb/gdb.inc b/meta/recipes-devtools/gdb/gdb.inc
index 3321a244e8..2c95e03b1b 100644
--- a/meta/recipes-devtools/gdb/gdb.inc
+++ b/meta/recipes-devtools/gdb/gdb.inc
@@ -2,8 +2,7 @@ require gdb-common.inc
inherit gettext
-SRC_URI += "file://kill_arm_map_symbols.patch \
- file://gdbserver-cflags-last.diff;striplevel=0 \
+SRC_URI += "file://gdbserver-cflags-last.diff;striplevel=0 \
file://renesas-sh-native-support.patch \
"
#LDFLAGS_append = " -s"
diff --git a/meta/recipes-devtools/gdb/gdb/kill_arm_map_symbols.patch b/meta/recipes-devtools/gdb/gdb/kill_arm_map_symbols.patch
deleted file mode 100644
index 9fc45b9f2a..0000000000
--- a/meta/recipes-devtools/gdb/gdb/kill_arm_map_symbols.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-Upstream-Status: Inappropriate [embedded specific]
-
-Index: gdb-7.3/gdb/arm-tdep.c
-===================================================================
---- gdb-7.3.orig/gdb/arm-tdep.c 2011-05-17 14:27:01.000000000 -0700
-+++ gdb-7.3/gdb/arm-tdep.c 2011-08-05 22:29:58.784201850 -0700
-@@ -7806,6 +7806,19 @@
- static void
- arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
- {
-+
-+ /* FIXME: We want gdb to ignore the ARM ELF mapping symbols when
-+ displaying disassembly so we use this horrible hack here to
-+ artifically set their address to the highest possible value.
-+ This is wrong of course, and it prevents the symbols from being
-+ used for their intended purpose - to distinguish between ARM
-+ and THUMB code. So we ought to find a better way to do this. */
-+ if (bfd_asymbol_name (sym)
-+ && bfd_asymbol_name (sym)[0] == '$'
-+ && bfd_asymbol_name (sym)[1] != 0
-+ && bfd_asymbol_name (sym)[2] == 0)
-+ SYMBOL_VALUE_ADDRESS(msym) = (CORE_ADDR) 0x7ffffffc;
-+
- if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
- == ST_BRANCH_TO_THUMB)
- MSYMBOL_SET_SPECIAL (msym);
diff --git a/meta/recipes-devtools/gdb/gdb_7.7.1.bb b/meta/recipes-devtools/gdb/gdb_7.8.1.bb
index 1abc9d8e76..1abc9d8e76 100644
--- a/meta/recipes-devtools/gdb/gdb_7.7.1.bb
+++ b/meta/recipes-devtools/gdb/gdb_7.8.1.bb
diff --git a/meta/recipes-devtools/git/git.inc b/meta/recipes-devtools/git/git.inc
index 7633577f67..5396628790 100644
--- a/meta/recipes-devtools/git/git.inc
+++ b/meta/recipes-devtools/git/git.inc
@@ -30,24 +30,24 @@ do_install () {
oe_runmake install DESTDIR="${D}" bindir=${bindir} \
template_dir=${datadir}/git-core/templates \
GIT_PYTHON_DIR=${D}${datadir}/git-core/python
-
- # ${libdir} is not applicable here, perl-native files are always
- # installed to /usr/lib on both 32/64 bits targets.
- rm -rf ${D}${exec_prefix}/lib/perl-native
- rmdir ${D}${exec_prefix}/lib || true
}
-PERLSEDFIXUP = " \
+perl_native_fixup () {
sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \
-e 's#${libdir}/perl-native/#${libdir}/#' \
- ${@d.getVar("PERLTOOLS", True).replace(' /',d.getVar('D', True) + '/')} \
-"
+ ${@d.getVar("PERLTOOLS", True).replace(' /',d.getVar('D', True) + '/')}
+
+ # ${libdir} is not applicable here, perl-native files are always
+ # installed to /usr/lib on both 32/64 bits targets.
+ mv ${D}${exec_prefix}/lib/perl-native/perl ${D}${libdir}
+ rmdir -p ${D}${exec_prefix}/lib/perl-native || true
+}
REL_GIT_EXEC_PATH = "${@os.path.relpath(libexecdir, bindir)}/git-core"
REL_GIT_TEMPLATE_DIR = "${@os.path.relpath(datadir, bindir)}/git-core/templates"
do_install_append_class-target () {
- ${PERLSEDFIXUP}
+ perl_native_fixup
}
do_install_append_class-native() {
@@ -60,7 +60,7 @@ do_install_append_class-nativesdk() {
create_wrapper ${D}${bindir}/git \
GIT_EXEC_PATH='`dirname $''realpath`'/${REL_GIT_EXEC_PATH} \
GIT_TEMPLATE_DIR='`dirname $''realpath`'/${REL_GIT_TEMPLATE_DIR}
- ${PERLSEDFIXUP}
+ perl_native_fixup
}
FILES_${PN} += "${datadir}/git-core ${libexecdir}/git-core/"
@@ -90,6 +90,7 @@ PERLTOOLS = " \
PACKAGES =+ "${PN}-perltools"
FILES_${PN}-perltools += " \
${PERLTOOLS} \
+ ${libdir}/perl \
${datadir}/perl \
"
RDEPENDS_${PN}-perltools = "${PN} perl perl-module-file-path findutils"
diff --git a/meta/recipes-devtools/git/git_2.0.1.bb b/meta/recipes-devtools/git/git_2.2.0.bb
index dbf32e1a03..7341d989b8 100644
--- a/meta/recipes-devtools/git/git_2.0.1.bb
+++ b/meta/recipes-devtools/git/git_2.2.0.bb
@@ -1,7 +1,7 @@
require git.inc
-SRC_URI[md5sum] = "981f5937840716cb563be1cc6292c8d7"
-SRC_URI[sha256sum] = "02609a06fb40db1f6a968867c0e82bcb959b85902747830de0fda53228712daf"
+SRC_URI[md5sum] = "2d5bbcc3e887cc4ba499f80420e2d5f7"
+SRC_URI[sha256sum] = "bea9548f5a39daaf7c3873b6a5be47d7f92cbf42d32957e1be955a2e0e7b83b4"
EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no ac_cv_c_c99_format=yes \
ac_cv_fread_reads_directories=${ac_cv_fread_reads_directories=yes} \
diff --git a/meta/recipes-devtools/gnu-config/gnu-config_20120814.bb b/meta/recipes-devtools/gnu-config/gnu-config_20120814.bb
index d67466ae79..f5fce6f020 100644
--- a/meta/recipes-devtools/gnu-config/gnu-config_20120814.bb
+++ b/meta/recipes-devtools/gnu-config/gnu-config_20120814.bb
@@ -17,6 +17,8 @@ SRC_URI = "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-${PV
SRC_URI[md5sum] = "bcfca5a2bb39edad4aae5a65efc84094"
SRC_URI[sha256sum] = "44f99a8e76f3e8e4fec0bb5ad4762f8e44366168554ce66cb85afbe2ed3efd8b"
+CLEANBROKEN = "1"
+
do_compile() {
:
}
diff --git a/meta/recipes-devtools/gnu-config/gnu-config_git.bb b/meta/recipes-devtools/gnu-config/gnu-config_git.bb
index 00fa759176..754a99a4b0 100644
--- a/meta/recipes-devtools/gnu-config/gnu-config_git.bb
+++ b/meta/recipes-devtools/gnu-config/gnu-config_git.bb
@@ -17,6 +17,8 @@ SRC_URI = "git://git.sv.gnu.org/config.git \
S = "${WORKDIR}/git"
+CLEANBROKEN = "1"
+
do_compile() {
:
}
diff --git a/meta/recipes-devtools/guile/files/arm_aarch64.patch b/meta/recipes-devtools/guile/files/arm_aarch64.patch
new file mode 100644
index 0000000000..f1788b62fb
--- /dev/null
+++ b/meta/recipes-devtools/guile/files/arm_aarch64.patch
@@ -0,0 +1,19 @@
+guile: add aarch64 recognition
+
+Assume little-endian.
+
+Upstream-Status: Pending
+
+Signed-off-by: joe.slater@windriver.com
+
+--- a/module/system/base/target.scm
++++ b/module/system/base/target.scm
+@@ -70,6 +70,8 @@
+ ((member cpu '("sparc" "sparc64" "powerpc" "powerpc64" "spu"
+ "mips" "mips64"))
+ (endianness big))
++ ((string-match "^aarch64" cpu)
++ (endianness little))
+ ((string-match "^arm.*eb" cpu)
+ (endianness big))
+ ((string-match "^arm.*" cpu)
diff --git a/meta/recipes-devtools/guile/files/workaround-ice-ssa-corruption.patch b/meta/recipes-devtools/guile/files/workaround-ice-ssa-corruption.patch
new file mode 100644
index 0000000000..6c348384a5
--- /dev/null
+++ b/meta/recipes-devtools/guile/files/workaround-ice-ssa-corruption.patch
@@ -0,0 +1,60 @@
+libguile/vm-i-system.c: workaround ice ssa corruption while compiling with option -g -O
+
+While compiling with option -g -O, there was a ssa corruption:
+..
+Unable to coalesce ssa_names 48 and 3476 which are marked as MUST COALESCE.
+sp_48(ab) and sp_3476(ab)
+guile-2.0.11/libguile/vm-engine.c: In function 'vm_debug_engine':
+guile-2.0.11/libguile/vm.c:673:19: internal compiler error: SSA corruption
+ #define VM_NAME vm_debug_engine
+ ^
+guile-2.0.11/libguile/vm-engine.c:39:1: note: in expansion of macro 'VM_NAME'
+ VM_NAME (SCM vm, SCM program, SCM *argv, int nargs)
+ ^
+Please submit a full bug report,
+with preprocessed source if appropriate.
+See <http://gcc.gnu.org/bugs.html> for instructions.
+...
+
+Tweak libguile/vm-i-system.c to add boundary value check to workaround it.
+
+Upstream-Status: Pending
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ libguile/vm-i-system.c | 20 ++++++++++++++++----
+ 1 file changed, 16 insertions(+), 4 deletions(-)
+
+diff --git a/libguile/vm-i-system.c b/libguile/vm-i-system.c
+--- a/libguile/vm-i-system.c
++++ b/libguile/vm-i-system.c
+@@ -625,10 +625,22 @@ VM_DEFINE_INSTRUCTION (47, bind_optionals_shuffle, "bind-optionals/shuffle", 6,
+ /* now shuffle up, from walk to ntotal */
+ {
+ scm_t_ptrdiff nshuf = sp - walk + 1, i;
+- sp = (fp - 1) + ntotal + nshuf;
+- CHECK_OVERFLOW ();
+- for (i = 0; i < nshuf; i++)
+- sp[-i] = walk[nshuf-i-1];
++ /* check the value of nshuf to workaround ice ssa corruption */
++ /* while compiling with -O -g */
++ if (nshuf > 0)
++ {
++ sp = (fp - 1) + ntotal + nshuf;
++ CHECK_OVERFLOW ();
++ for (i = 0; i < nshuf; i++)
++ sp[-i] = walk[nshuf-i-1];
++ }
++ else
++ {
++ sp = (fp - 1) + ntotal + nshuf;
++ CHECK_OVERFLOW ();
++ for (i = 0; i < nshuf; i++)
++ sp[-i] = walk[nshuf-i-1];
++ }
+ }
+ /* and fill optionals & keyword args with SCM_UNDEFINED */
+ while (walk <= (fp - 1) + ntotal)
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/guile/guile_2.0.11.bb b/meta/recipes-devtools/guile/guile_2.0.11.bb
index 5b1e061af1..f2c07599ec 100644
--- a/meta/recipes-devtools/guile/guile_2.0.11.bb
+++ b/meta/recipes-devtools/guile/guile_2.0.11.bb
@@ -19,6 +19,8 @@ SRC_URI = "${GNU_MIRROR}/guile/guile-${PV}.tar.xz \
file://opensuse/guile-64bit.patch \
file://guile_2.0.6_fix_sed_error.patch \
file://arm_endianness.patch \
+ file://arm_aarch64.patch \
+ file://workaround-ice-ssa-corruption.patch \
"
# file://debian/0001-Change-guile-to-guile-X.Y-for-info-pages.patch
diff --git a/meta/recipes-devtools/help2man/help2man-native_1.46.1.bb b/meta/recipes-devtools/help2man/help2man-native_1.46.4.bb
index a51c81f99c..5b90da9afa 100644
--- a/meta/recipes-devtools/help2man/help2man-native_1.46.1.bb
+++ b/meta/recipes-devtools/help2man/help2man-native_1.46.4.bb
@@ -6,8 +6,8 @@ DEPENDS = "autoconf-native automake-native"
SRC_URI = "${GNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.xz"
-SRC_URI[md5sum] = "63b8f14a81ce28ba119d588d00c63f43"
-SRC_URI[sha256sum] = "3dfd02a026149aad06887c1cb6062471779c100e00aecb79b8f9d01cf1581c47"
+SRC_URI[md5sum] = "a1b7fe49eddae8a2537ed74ee9ef11cb"
+SRC_URI[sha256sum] = "1ae7f15f53b0cc55b070ae49df2ee5caa942c71529054e157599427bba3c5633"
inherit autotools native
diff --git a/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.1.bb b/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.1.bb
index 460541440b..71c98a4fd1 100644
--- a/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.1.bb
+++ b/meta/recipes-devtools/i2c-tools/i2c-tools_3.1.1.bb
@@ -3,8 +3,6 @@ SECTION = "base"
LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
-RDEPENDS_${PN} += "perl"
-
SRC_URI = "http://dl.lm-sensors.org/i2c-tools/releases/${BP}.tar.bz2 \
file://Module.mk \
"
@@ -24,3 +22,13 @@ do_install_append() {
install -m 0644 include/linux/i2c-dev.h ${D}${includedir}/linux/i2c-dev-user.h
rm -f ${D}${includedir}/linux/i2c-dev.h
}
+
+PACKAGES =+ "${PN}-misc"
+FILES_${PN}-misc = "${sbindir}/i2c-stub-from-dump \
+ ${bindir}/ddcmon \
+ ${bindir}/decode-edid \
+ ${bindir}/decode-dimms \
+ ${bindir}/decode-vaio \
+ "
+RDEPENDS_${PN} += "${PN}-misc"
+RDEPENDS_${PN}-misc += "perl"
diff --git a/meta/recipes-devtools/insserv/files/run-ptest b/meta/recipes-devtools/insserv/files/run-ptest
index 495d1551c2..4a6da3030a 100644
--- a/meta/recipes-devtools/insserv/files/run-ptest
+++ b/meta/recipes-devtools/insserv/files/run-ptest
@@ -11,7 +11,7 @@ output() {
}
for i in test_simple_sequence test_undetected_loop; \
- do $i &>/dev/null ; output; \
+ do $i >/dev/null 2>&1; output; \
done
rm -rf ${tmpdir}
diff --git a/meta/recipes-devtools/installer/adt-installer/scripts/adt_installer_internal b/meta/recipes-devtools/installer/adt-installer/scripts/adt_installer_internal
index 00db301df0..2a8a30ccdc 100755
--- a/meta/recipes-devtools/installer/adt-installer/scripts/adt_installer_internal
+++ b/meta/recipes-devtools/installer/adt-installer/scripts/adt_installer_internal
@@ -167,7 +167,7 @@ for target_type in $YOCTOADT_TARGETS; do
[ -e "$env_script_original" ] && env_script=$env_script_original
[ -e "$env_script_relocated" ] && env_script=$env_script_relocated
- $SUDO sed -i -e "s%##SDKTARGETSYSROOT##%$target_sysroot%g" $env_script
+ $SUDO sed -i -e "s%SDKTARGETSYSROOT=.*%SDKTARGETSYSROOT=$target_sysroot%g" $env_script
done
if [ "$YOCTOADT_QEMU" == "Y" ] || [ "$YOCTOADT_QEMU" == "y" ]; then
diff --git a/meta/recipes-devtools/installer/adt-installer_1.0.bb b/meta/recipes-devtools/installer/adt-installer_1.0.bb
index a90a2b4afd..07070b54cf 100644
--- a/meta/recipes-devtools/installer/adt-installer_1.0.bb
+++ b/meta/recipes-devtools/installer/adt-installer_1.0.bb
@@ -29,6 +29,7 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d
LICENSE = "MIT"
PACKAGES = ""
+INHIBIT_DEFAULT_DEPS = "1"
PR = "r11"
diff --git a/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb b/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
index 2f57527e43..ef0019fc7d 100644
--- a/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
+++ b/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb
@@ -12,7 +12,7 @@ LICENSE = "GPL-2.0"
LIC_FILES_CHKSUM = "file://COPYING;md5=9b8cf60ff39767ff04b671fca8302408"
SECTION = "devel"
DEPENDS += "ncurses flex bison gperf-native pkgconfig-native"
-RDEPENDS_${PN} += "python"
+RDEPENDS_${PN} += "python bash"
SRC_URI = "http://ymorin.is-a-geek.org/download/${BPN}/${BP}.tar.xz"
SRC_URI[md5sum] = "b939280dcc83f8feabd87a1d5f9b00c2"
diff --git a/meta/recipes-devtools/libtool/libtool-2.4.2.inc b/meta/recipes-devtools/libtool/libtool-2.4.2.inc
index 9c50d46fe2..0f1964b57b 100644
--- a/meta/recipes-devtools/libtool/libtool-2.4.2.inc
+++ b/meta/recipes-devtools/libtool/libtool-2.4.2.inc
@@ -49,3 +49,5 @@ FILES_libltdl = "${libdir}/libltdl${SOLIBS}"
FILES_libltdl-dev = "${libdir}/libltdl${SOLIBSDEV} ${includedir}"
FILES_libltdl-staticdev = "${libdir}/libltdl.a"
FILES_libltdl-dbg = "${libdir}/.debug/"
+
+export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-devtools/libtool/libtool-cross_2.4.2.bb b/meta/recipes-devtools/libtool/libtool-cross_2.4.2.bb
index 72fad37eaf..34aae0bf13 100644
--- a/meta/recipes-devtools/libtool/libtool-cross_2.4.2.bb
+++ b/meta/recipes-devtools/libtool/libtool-cross_2.4.2.bb
@@ -39,5 +39,3 @@ libtoolcross_sysroot_preprocess () {
}
SSTATE_SCAN_FILES += "libtoolize *-libtool"
-
-export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-devtools/libtool/libtool-native_2.4.2.bb b/meta/recipes-devtools/libtool/libtool-native_2.4.2.bb
index f1051d84f3..f03859e061 100644
--- a/meta/recipes-devtools/libtool/libtool-native_2.4.2.bb
+++ b/meta/recipes-devtools/libtool/libtool-native_2.4.2.bb
@@ -21,5 +21,3 @@ do_install () {
install -d ${D}${bindir}/
install -m 0755 ${HOST_SYS}-libtool ${D}${bindir}/${HOST_SYS}-libtool
}
-
-export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-devtools/libtool/libtool/fix-final-rpath.patch b/meta/recipes-devtools/libtool/libtool/fix-final-rpath.patch
index 5c275ffd32..6f9d321eb3 100644
--- a/meta/recipes-devtools/libtool/libtool/fix-final-rpath.patch
+++ b/meta/recipes-devtools/libtool/libtool/fix-final-rpath.patch
@@ -6,12 +6,13 @@ This works around the issue until it gets sorted out upstream.
Fix suggested by Richard Purdie <richard.purdie@intel.com>
Signed-off-by: Scott Garman <scott.a.garman@intel.com>
+Signed-off-by: Randy Witt <randy.e.witt@linux.intel.com>
Index: libtool-2.4.2/libltdl/config/ltmain.m4sh
===================================================================
--- libtool-2.4.2.orig/libltdl/config/ltmain.m4sh
+++ libtool-2.4.2/libltdl/config/ltmain.m4sh
-@@ -7268,9 +7268,11 @@ EOF
+@@ -7268,9 +7268,11 @@
test "$opt_mode" != relink && rpath="$compile_rpath$rpath"
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
@@ -25,7 +26,7 @@ Index: libtool-2.4.2/libltdl/config/ltmain.m4sh
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
-@@ -7999,6 +8001,10 @@ EOF
+@@ -7999,6 +8001,10 @@
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
@@ -36,3 +37,14 @@ Index: libtool-2.4.2/libltdl/config/ltmain.m4sh
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
+@@ -8050,6 +8056,10 @@
+ hardcode_libdirs=
+ for libdir in $finalize_rpath; do
+ if test -n "$hardcode_libdir_flag_spec"; then
++ func_replace_sysroot "$libdir"
++ libdir=$func_replace_sysroot_result
++ func_stripname '=' '' "$libdir"
++ libdir=$func_stripname_result
+ if test -n "$hardcode_libdir_separator"; then
+ if test -z "$hardcode_libdirs"; then
+ hardcode_libdirs="$libdir"
diff --git a/meta/recipes-devtools/libtool/libtool_2.4.2.bb b/meta/recipes-devtools/libtool/libtool_2.4.2.bb
index a2eb4ea437..60643129be 100644
--- a/meta/recipes-devtools/libtool/libtool_2.4.2.bb
+++ b/meta/recipes-devtools/libtool/libtool_2.4.2.bb
@@ -2,11 +2,24 @@ require libtool-${PV}.inc
PR = "${INC_PR}.0"
+RDEPENDS_${PN} += "bash"
+
#
# We want the results of libtool-cross preserved - don't stage anything ourselves.
#
SYSROOT_PREPROCESS_FUNCS += "libtool_sysroot_preprocess"
+do_install_append () {
+ sed -e 's@--sysroot=${STAGING_DIR_HOST}@@g' \
+ -e 's@${STAGING_DIR_HOST}@@g' \
+ -e 's@^\(sys_lib_search_path_spec="\).*@\1${libdir} ${base_libdir}"@' \
+ -e 's@^\(compiler_lib_search_dirs="\).*@\1${libdir} ${base_libdir}"@' \
+ -e 's@^\(compiler_lib_search_path="\).*@\1${libdir} ${base_libdir}"@' \
+ -e 's@^\(predep_objects="\).*@\1"@' \
+ -e 's@^\(postdep_objects="\).*@\1"@' \
+ -i ${D}${bindir}/libtool
+}
+
libtool_sysroot_preprocess () {
rm -rf ${SYSROOT_DESTDIR}${bindir}/*
rm -rf ${SYSROOT_DESTDIR}${datadir}/aclocal/*
diff --git a/meta/recipes-devtools/libtool/nativesdk-libtool_2.4.2.bb b/meta/recipes-devtools/libtool/nativesdk-libtool_2.4.2.bb
index fff15e916d..7b5c97a97a 100644
--- a/meta/recipes-devtools/libtool/nativesdk-libtool_2.4.2.bb
+++ b/meta/recipes-devtools/libtool/nativesdk-libtool_2.4.2.bb
@@ -31,5 +31,3 @@ libtoolnativesdk_sysroot_preprocess () {
install -d ${SYSROOT_DESTDIR}${bindir_crossscripts}/
install -m 755 ${D}${bindir}/${HOST_SYS}-libtool ${SYSROOT_DESTDIR}${bindir_crossscripts}/${HOST_SYS}-libtool
}
-
-export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb b/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb
index ed6ab738a8..a8a90fc82e 100644
--- a/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb
+++ b/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb
@@ -19,3 +19,7 @@ inherit autotools-brokensep native
do_configure () {
oe_runconf
}
+
+do_install() {
+ oe_runmake 'DESTDIR=${D}' 'TMPDIR=${T}' install
+}
diff --git a/meta/recipes-devtools/m4/m4.inc b/meta/recipes-devtools/m4/m4.inc
index 4a83929f0c..e473e23824 100644
--- a/meta/recipes-devtools/m4/m4.inc
+++ b/meta/recipes-devtools/m4/m4.inc
@@ -5,6 +5,7 @@ GNU M4 also has built-in functions for including files, running shell commands,
inherit autotools texinfo
+EXTRA_OECONF += "--without-libsigsegv-prefix"
EXTRA_OEMAKE += "'infodir=${infodir}'"
LDFLAGS_prepend_libc-uclibc = " -lrt "
SRC_URI = "${GNU_MIRROR}/m4/m4-${PV}.tar.gz"
diff --git a/meta/recipes-devtools/make/make_4.0.bb b/meta/recipes-devtools/make/make_4.1.bb
index 38d328c8a5..a1b0d7c45f 100644
--- a/meta/recipes-devtools/make/make_4.0.bb
+++ b/meta/recipes-devtools/make/make_4.1.bb
@@ -6,7 +6,7 @@ require make.inc
EXTRA_OECONF += "--without-guile"
-SRC_URI[md5sum] = "571d470a7647b455e3af3f92d79f1c18"
-SRC_URI[sha256sum] = "e60686c7afede62cc8c86ad3012cf081ea4887daf9d223ce7115703b2bb2dbdb"
+SRC_URI[md5sum] = "57a7a224a822f94789a587ccbcedff69"
+SRC_URI[sha256sum] = "0bc7613389650ee6a24554b52572a272f7356164fd2c4132b0bcf13123e4fca5"
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/mklibs/files/sysrooted-ldso.patch b/meta/recipes-devtools/mklibs/files/sysrooted-ldso.patch
new file mode 100644
index 0000000000..75500a029a
--- /dev/null
+++ b/meta/recipes-devtools/mklibs/files/sysrooted-ldso.patch
@@ -0,0 +1,18 @@
+In cross builds we will have to respect sysroot
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+Upstream-Status: Pending
+
+Index: mklibs-0.1.39/src/mklibs
+===================================================================
+--- mklibs-0.1.39.orig/src/mklibs 2014-03-01 18:25:36.000000000 +0000
++++ mklibs-0.1.39/src/mklibs 2014-10-19 00:51:46.813534596 +0000
+@@ -495,7 +495,7 @@
+ present_symbols = {}
+ checked_libs = small_libs
+ checked_libs.extend(available_libs)
+- checked_libs.append(ldlib)
++ checked_libs.append(sysroot + ldlib)
+ for lib in checked_libs:
+ for symbol in provided_symbols(lib):
+ debug(DEBUG_SPAM, "present_symbols adding %s" % symbol)
diff --git a/meta/recipes-devtools/mklibs/mklibs-native_0.1.39.bb b/meta/recipes-devtools/mklibs/mklibs-native_0.1.39.bb
index ff8d5003ee..9885561fe3 100644
--- a/meta/recipes-devtools/mklibs/mklibs-native_0.1.39.bb
+++ b/meta/recipes-devtools/mklibs/mklibs-native_0.1.39.bb
@@ -9,6 +9,7 @@ DEPENDS = "python-native dpkg-native"
SRC_URI = "http://ftp.de.debian.org/debian/pool/main/m/mklibs/${BPN}_${PV}.tar.xz \
file://ac_init_fix.patch\
file://fix_STT_GNU_IFUNC.patch\
+ file://sysrooted-ldso.patch \
"
SRC_URI[md5sum] = "38a579a531401eb76f4bab4ccfb774a2"
diff --git a/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch b/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch
new file mode 100644
index 0000000000..05f1629d58
--- /dev/null
+++ b/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch
@@ -0,0 +1,44 @@
+Upstream-Status: Pending
+
+NEON instruction VLD1.64 was used to copy 64 bits data after type
+casting, and they will trigger alignment trap.
+This patch uses memcpy to avoid alignment problem.
+
+Signed-off-by: Yuanjie Huang <Yuanjie.Huang@windriver.com>
+
+diff --git a/mkfs.ubifs/key.h b/mkfs.ubifs/key.h
+index d3a02d4..e7e9218 100644
+--- a/mkfs.ubifs/key.h
++++ b/mkfs.ubifs/key.h
+@@ -141,10 +141,12 @@ static inline void data_key_init(union ubifs_key *key, ino_t inum,
+ */
+ static inline void key_write(const union ubifs_key *from, void *to)
+ {
+- union ubifs_key *t = to;
++ __le32 x[2];
+
+- t->j32[0] = cpu_to_le32(from->u32[0]);
+- t->j32[1] = cpu_to_le32(from->u32[1]);
++ x[0] = cpu_to_le32(from->u32[0]);
++ x[1] = cpu_to_le32(from->u32[1]);
++
++ memcpy(to, &x, 8);
+ memset(to + 8, 0, UBIFS_MAX_KEY_LEN - 8);
+ }
+
+@@ -156,10 +158,12 @@ static inline void key_write(const union ubifs_key *from, void *to)
+ */
+ static inline void key_write_idx(const union ubifs_key *from, void *to)
+ {
+- union ubifs_key *t = to;
++ __le32 x[2];
++
++ x[0] = cpu_to_le32(from->u32[0]);
++ x[1] = cpu_to_le32(from->u32[1]);
+
+- t->j32[0] = cpu_to_le32(from->u32[0]);
+- t->j32[1] = cpu_to_le32(from->u32[1]);
++ memcpy(to, &x, 8);
+ }
+
+ /**
diff --git a/meta/recipes-devtools/mtd/mtd-utils_git.bb b/meta/recipes-devtools/mtd/mtd-utils_git.bb
index f78bc7e7f9..52297c63ba 100644
--- a/meta/recipes-devtools/mtd/mtd-utils_git.bb
+++ b/meta/recipes-devtools/mtd/mtd-utils_git.bb
@@ -10,6 +10,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
SRCREV = "9f107132a6a073cce37434ca9cda6917dd8d866b"
SRC_URI = "git://git.infradead.org/mtd-utils.git \
file://add-exclusion-to-mkfs-jffs2-git-2.patch \
+ file://fix-armv7-neon-alignment.patch \
"
PV = "1.5.1+git${SRCPV}"
diff --git a/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch b/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch
new file mode 100644
index 0000000000..cb454917ff
--- /dev/null
+++ b/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch
@@ -0,0 +1,23 @@
+Upstream-Status: Backport
+
+Signed-off-by: Wenlin Kang <wenlin.kang@windriver.com>
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ Makefile.in | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/Makefile.in b/Makefile.in
+index 8f9305a..694e837 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -251,6 +251,7 @@ install-scripts: ${DESTDIR}$(bindir)/mtools
+ @$(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir)
+ @for j in $(SCRIPTS) ; do \
+ $(INSTALL_SCRIPT) $(srcdir)/scripts/$$j ${DESTDIR}$(bindir)/$$j ; \
++ $(INSTALL_PROGRAM) $(srcdir)/scripts/$$j ${DESTDIR}$(bindir)/$$j ; \
+ echo ${DESTDIR}$(bindir)/$$j ; \
+ done
+ rm -f ${DESTDIR}$(bindir)/lz
+--
+2.0.0
+
diff --git a/meta/recipes-devtools/mtools/mtools_3.9.9.bb b/meta/recipes-devtools/mtools/mtools_3.9.9.bb
index d995c8f042..f6dd3361c9 100644
--- a/meta/recipes-devtools/mtools/mtools_3.9.9.bb
+++ b/meta/recipes-devtools/mtools/mtools_3.9.9.bb
@@ -31,7 +31,9 @@ RRECOMMENDS_${PN} = "\
SRC_URI = "http://downloads.yoctoproject.org/mirror/sources/mtools-${PV}.tar.gz \
file://mtools-makeinfo.patch \
file://mtools.patch \
- file://no-x11.patch"
+ file://no-x11.patch \
+ file://fix-broken-lz.patch \
+"
SRC_URI[md5sum] = "3e68b857b4e1f3a6521d1dfefbd30a36"
SRC_URI[sha256sum] = "af083a73425d664d4607ef6c6564fd9319a0e47ee7c105259a45356cb834690e"
diff --git a/meta/recipes-devtools/nasm/nasm_2.11.05.bb b/meta/recipes-devtools/nasm/nasm_2.11.06.bb
index fbfd5853b8..4a69a6a92e 100644
--- a/meta/recipes-devtools/nasm/nasm_2.11.05.bb
+++ b/meta/recipes-devtools/nasm/nasm_2.11.06.bb
@@ -6,8 +6,8 @@ COMPATIBLE_HOST = '(x86_64|i.86).*-(linux|freebsd.*)'
SRC_URI = "http://www.nasm.us/pub/nasm/releasebuilds/${PV}/nasm-${PV}.tar.bz2 "
-SRC_URI[md5sum] = "f6b1db2858cad82bbb0c8c6f3e2b0fb2"
-SRC_URI[sha256sum] = "4a417bcf5cde5d203e228feea3f428a6ff4d700d558e4d79d50a30a695bdfae9"
+SRC_URI[md5sum] = "9d93e8df5f5c005567f47ed6ca3a93d4"
+SRC_URI[sha256sum] = "87b6d97d6981ca468f9dafda44cf1bb1ba122cec329d4d389af5db04fa0c3e6c"
inherit autotools-brokensep
diff --git a/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb b/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb
index 693c216ce4..2800a5deb7 100644
--- a/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb
+++ b/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb
@@ -7,7 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://opkg.py;beginline=1;endline=18;md5=15917491ad6bf7acc666ca5f7cc1e083"
PROVIDES += "virtual/update-alternatives"
-SRCREV = "eae0d8fa44e8594aa90eadf06e5f4fbeef314509"
+SRCREV = "53274f087565fd45d8452c5367997ba6a682a37a"
PV = "0.1.8+git${SRCPV}"
SRC_URI = "git://git.yoctoproject.org/opkg-utils"
diff --git a/meta/recipes-devtools/opkg/opkg-collateral.bb b/meta/recipes-devtools/opkg/opkg-collateral.bb
index b121827ac7..3825ffb064 100644
--- a/meta/recipes-devtools/opkg/opkg-collateral.bb
+++ b/meta/recipes-devtools/opkg/opkg-collateral.bb
@@ -13,7 +13,7 @@ do_compile () {
cat ${WORKDIR}/opkg.conf.comments >${WORKDIR}/opkg.conf
cat ${WORKDIR}/src >>${WORKDIR}/opkg.conf
cat ${WORKDIR}/dest >>${WORKDIR}/opkg.conf
- echo "lists_dir ext ${OPKGLIBDIR}/opkg" >>${WORKDIR}/opkg.conf
+ echo "option lists_dir ${OPKGLIBDIR}/opkg" >>${WORKDIR}/opkg.conf
}
do_install () {
diff --git a/meta/recipes-devtools/opkg/opkg.inc b/meta/recipes-devtools/opkg/opkg.inc
index ba21d84960..56c54b6ab7 100644
--- a/meta/recipes-devtools/opkg/opkg.inc
+++ b/meta/recipes-devtools/opkg/opkg.inc
@@ -27,7 +27,7 @@ OPKGLIBDIR = "${target_localstatedir}/lib"
PACKAGECONFIG ??= ""
-PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,gpgme libgpg-error"
+PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,gpgme libgpg-error,gnupg"
PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl"
PACKAGECONFIG[ssl-curl] = "--enable-ssl-curl,--disable-ssl-curl,curl openssl"
PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl"
diff --git a/meta/recipes-devtools/opkg/opkg/add-exclude.patch b/meta/recipes-devtools/opkg/opkg/add-exclude.patch
index c684534efb..48de923590 100644
--- a/meta/recipes-devtools/opkg/opkg/add-exclude.patch
+++ b/meta/recipes-devtools/opkg/opkg/add-exclude.patch
@@ -1,4 +1,4 @@
-From 5d707bbfcafd88b8b5b5821972c8c958fc3b2039 Mon Sep 17 00:00:00 2001
+From 60c3f93e95a3ca54ef0a7eebc5ef29a5d92d3110 Mon Sep 17 00:00:00 2001
From: Paul Barker <paul@paulbarker.me.uk>
Date: Fri, 28 Mar 2014 15:20:22 +0000
Subject: [PATCH 2/2] opkg-0.2.x: add-exclude
@@ -15,7 +15,7 @@ so there is no need to free the data.
v2: Use xmalloc instead of malloc and xrealloc instead of realloc. In opkg,
these functions are guaranteed not to return NULL.
-Upstream-Status: Pending
+Upstream-Status: Accepted for v0.3.0 release with modifications
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
Signed-off-by: Jonathan Liu <net147@gmail.com>
@@ -28,10 +28,10 @@ Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
4 files changed, 34 insertions(+)
diff --git a/libopkg/opkg_conf.c b/libopkg/opkg_conf.c
-index 1e65bad..9c4c854 100644
+index 4eee37b..1ab63fb 100644
--- a/libopkg/opkg_conf.c
+++ b/libopkg/opkg_conf.c
-@@ -442,6 +442,7 @@ opkg_conf_init(void)
+@@ -447,6 +447,7 @@ opkg_conf_init(void)
pkg_dest_list_init(&conf->pkg_dest_list);
pkg_dest_list_init(&conf->tmp_dest_list);
nv_pair_list_init(&conf->arch_list);
@@ -40,7 +40,7 @@ index 1e65bad..9c4c854 100644
return 0;
}
diff --git a/libopkg/opkg_conf.h b/libopkg/opkg_conf.h
-index 6045a58..ad58849 100644
+index 2f189e0..6d6e613 100644
--- a/libopkg/opkg_conf.h
+++ b/libopkg/opkg_conf.h
@@ -51,6 +51,8 @@ struct opkg_conf
@@ -53,10 +53,10 @@ index 6045a58..ad58849 100644
int restrict_to_default_dest;
pkg_dest_t *default_dest;
diff --git a/libopkg/pkg_depends.c b/libopkg/pkg_depends.c
-index d2d279e..b572e18 100644
+index 41bf206..eb630d1 100644
--- a/libopkg/pkg_depends.c
+++ b/libopkg/pkg_depends.c
-@@ -212,6 +212,22 @@ pkg_hash_fetch_unsatisfied_dependencies(pkg_t * pkg, pkg_vec_t *unsatisfied,
+@@ -204,6 +204,22 @@ pkg_hash_fetch_unsatisfied_dependencies(pkg_t * pkg, pkg_vec_t *unsatisfied,
continue;
}
@@ -80,7 +80,7 @@ index d2d279e..b572e18 100644
if (satisfying_pkg != NULL) {
satisfier_entry_pkg = satisfying_pkg;
diff --git a/src/opkg-cl.c b/src/opkg-cl.c
-index 0315d41..67366b9 100644
+index 6378380..f10d10b 100644
--- a/src/opkg-cl.c
+++ b/src/opkg-cl.c
@@ -45,6 +45,7 @@ enum {
@@ -91,7 +91,7 @@ index 0315d41..67366b9 100644
ARGS_OPT_NOACTION,
ARGS_OPT_DOWNLOAD_ONLY,
ARGS_OPT_NODEPS,
-@@ -95,6 +96,7 @@ static struct option long_options[] = {
+@@ -97,6 +98,7 @@ static struct option long_options[] = {
{"offline-root", 1, 0, 'o'},
{"add-arch", 1, 0, ARGS_OPT_ADD_ARCH},
{"add-dest", 1, 0, ARGS_OPT_ADD_DEST},
@@ -99,7 +99,7 @@ index 0315d41..67366b9 100644
{"test", 0, 0, ARGS_OPT_NOACTION},
{"tmp-dir", 1, 0, 't'},
{"tmp_dir", 1, 0, 't'},
-@@ -198,6 +200,18 @@ args_parse(int argc, char *argv[])
+@@ -200,6 +202,18 @@ args_parse(int argc, char *argv[])
}
free(tuple);
break;
@@ -118,7 +118,7 @@ index 0315d41..67366b9 100644
case ARGS_OPT_NOACTION:
conf->noaction = 1;
break;
-@@ -282,6 +296,7 @@ usage()
+@@ -287,6 +301,7 @@ usage()
printf("\t--offline-root <dir> offline installation of packages.\n");
printf("\t--add-arch <arch>:<prio> Register architecture with given priority\n");
printf("\t--add-dest <name>:<path> Register destination with given path\n");
@@ -127,5 +127,5 @@ index 0315d41..67366b9 100644
printf("\t than the higher version one if more\n");
printf("\t than one candidate is found.\n");
--
-2.0.4
+2.1.3
diff --git a/meta/recipes-devtools/opkg/opkg/libopkg-opkg_remove.c-avoid-remove-pkg-repeatly-with.patch b/meta/recipes-devtools/opkg/opkg/libopkg-opkg_remove.c-avoid-remove-pkg-repeatly-with.patch
new file mode 100644
index 0000000000..5e5eafcd97
--- /dev/null
+++ b/meta/recipes-devtools/opkg/opkg/libopkg-opkg_remove.c-avoid-remove-pkg-repeatly-with.patch
@@ -0,0 +1,39 @@
+From 41425d67d3589b1912416a17f740d6407c7834f2 Mon Sep 17 00:00:00 2001
+From: Hongxu Jia <hongxu.jia@windriver.com>
+Date: Wed, 8 Oct 2014 19:53:13 +0800
+Subject: [PATCH] libopkg/opkg_remove.c: avoid remove pkg repeatly with option
+ --force-removal-of-dependent-packages
+
+While remove pkg with '--force-removal-of-dependent-packages',
+pkg may be added to pkgs remove list multiple times, add status
+check to make sure pkg only be removed once.
+
+Upstream-Status: Backport
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
+---
+ libopkg/opkg_remove.c | 8 ++++++++
+ 1 file changed, 8 insertions(+)
+
+diff --git a/libopkg/opkg_remove.c b/libopkg/opkg_remove.c
+index 34f9154..a225e41 100644
+--- a/libopkg/opkg_remove.c
++++ b/libopkg/opkg_remove.c
+@@ -250,6 +250,14 @@ opkg_remove_pkg(pkg_t *pkg, int from_upgrade)
+ if ((parent_pkg = pkg->parent) == NULL)
+ return 0;
+
++ /* While remove pkg with '--force-removal-of-dependent-packages',
++ pkg may be added to remove list multiple times, add status
++ check to make sure pkg only be removed once. */
++ if (conf->force_removal_of_dependent_packages &&
++ pkg->state_flag & SF_FILELIST_CHANGED &&
++ pkg->state_status == SS_NOT_INSTALLED)
++ return 0;
++
+ /* only attempt to remove dependent installed packages if
+ * force_depends is not specified or the package is being
+ * replaced.
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/opkg/opkg/no-install-recommends.patch b/meta/recipes-devtools/opkg/opkg/no-install-recommends.patch
index bcca56c6ce..c2d244d26e 100644
--- a/meta/recipes-devtools/opkg/opkg/no-install-recommends.patch
+++ b/meta/recipes-devtools/opkg/opkg/no-install-recommends.patch
@@ -1,11 +1,11 @@
-From 610207c9bc82f20c77d6f234465e36857c997ea0 Mon Sep 17 00:00:00 2001
+From 2e2ccc7e7fc81a7eee2d004d3644efbc1be1ad73 Mon Sep 17 00:00:00 2001
From: Paul Barker <paul@paulbarker.me.uk>
Date: Fri, 28 Mar 2014 15:19:08 +0000
Subject: [PATCH 1/2] opkg-0.2.x: no-install-recommends
Add the ability to not install ANY recommended packages.
-Upstream-Status: Pending
+Upstream-Status: Accepted for v0.3.0 release with modifications
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
---
@@ -15,7 +15,7 @@ Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
3 files changed, 10 insertions(+), 1 deletion(-)
diff --git a/libopkg/opkg_conf.h b/libopkg/opkg_conf.h
-index 38fa375..6045a58 100644
+index e0e045a..2f189e0 100644
--- a/libopkg/opkg_conf.h
+++ b/libopkg/opkg_conf.h
@@ -82,6 +82,7 @@ struct opkg_conf
@@ -27,10 +27,10 @@ index 38fa375..6045a58 100644
char *overlay_root;
int query_all;
diff --git a/libopkg/pkg_depends.c b/libopkg/pkg_depends.c
-index a4df7de..d2d279e 100644
+index 8ab6508..41bf206 100644
--- a/libopkg/pkg_depends.c
+++ b/libopkg/pkg_depends.c
-@@ -19,6 +19,7 @@
+@@ -21,6 +21,7 @@
#include <ctype.h>
#include "pkg.h"
@@ -38,7 +38,7 @@ index a4df7de..d2d279e 100644
#include "opkg_utils.h"
#include "pkg_hash.h"
#include "opkg_message.h"
-@@ -204,7 +205,7 @@ pkg_hash_fetch_unsatisfied_dependencies(pkg_t * pkg, pkg_vec_t *unsatisfied,
+@@ -196,7 +197,7 @@ pkg_hash_fetch_unsatisfied_dependencies(pkg_t * pkg, pkg_vec_t *unsatisfied,
/* user request overrides package recommendation */
if (satisfying_pkg != NULL
&& (compound_depend->type == RECOMMEND || compound_depend->type == SUGGEST)
@@ -48,18 +48,18 @@ index a4df7de..d2d279e 100644
"%s at user request\n",
pkg->name, satisfying_pkg->name);
diff --git a/src/opkg-cl.c b/src/opkg-cl.c
-index b711511..0315d41 100644
+index a1d121f..6378380 100644
--- a/src/opkg-cl.c
+++ b/src/opkg-cl.c
-@@ -50,6 +50,7 @@ enum {
- ARGS_OPT_NODEPS,
+@@ -51,6 +51,7 @@ enum {
ARGS_OPT_AUTOREMOVE,
ARGS_OPT_CACHE,
+ ARGS_OPT_COMBINE,
+ ARGS_OPT_NOINSTALL_RECOMMENDS,
};
static struct option long_options[] = {
-@@ -89,6 +90,7 @@ static struct option long_options[] = {
+@@ -91,6 +92,7 @@ static struct option long_options[] = {
{"noaction", 0, 0, ARGS_OPT_NOACTION},
{"download-only", 0, 0, ARGS_OPT_DOWNLOAD_ONLY},
{"nodeps", 0, 0, ARGS_OPT_NODEPS},
@@ -67,7 +67,7 @@ index b711511..0315d41 100644
{"offline", 1, 0, 'o'},
{"offline-root", 1, 0, 'o'},
{"add-arch", 1, 0, ARGS_OPT_ADD_ARCH},
-@@ -199,6 +201,9 @@ args_parse(int argc, char *argv[])
+@@ -201,6 +203,9 @@ args_parse(int argc, char *argv[])
case ARGS_OPT_NOACTION:
conf->noaction = 1;
break;
@@ -77,7 +77,7 @@ index b711511..0315d41 100644
case ARGS_OPT_DOWNLOAD_ONLY:
conf->download_only = 1;
break;
-@@ -293,6 +298,8 @@ usage()
+@@ -300,6 +305,8 @@ usage()
printf("\t--noaction No action -- test only\n");
printf("\t--download-only No action -- download only\n");
printf("\t--nodeps Do not follow dependencies\n");
@@ -87,5 +87,5 @@ index b711511..0315d41 100644
printf("\t Remove package and all dependencies\n");
printf("\t--autoremove Remove packages that were installed\n");
--
-1.9.1
+2.1.3
diff --git a/meta/recipes-devtools/opkg/opkg_0.2.2.bb b/meta/recipes-devtools/opkg/opkg_0.2.4.bb
index 3dd74898b3..2cca63c560 100644
--- a/meta/recipes-devtools/opkg/opkg_0.2.2.bb
+++ b/meta/recipes-devtools/opkg/opkg_0.2.4.bb
@@ -4,9 +4,10 @@ SRC_URI = "http://downloads.yoctoproject.org/releases/${BPN}/${BPN}-${PV}.tar.gz
file://no-install-recommends.patch \
file://add-exclude.patch \
file://opkg-configure.service \
+ file://libopkg-opkg_remove.c-avoid-remove-pkg-repeatly-with.patch \
"
S = "${WORKDIR}/${BPN}-${PV}"
-SRC_URI[md5sum] = "b3ecef90d67d2aed2a14c2116a027482"
-SRC_URI[sha256sum] = "aa554ce7538544aac4f69e8274a0f9b8b433b8c3b1d00704bd393f713303a12b"
+SRC_URI[md5sum] = "40ed2aee15abc8d550539449630091bd"
+SRC_URI[sha256sum] = "0f40c7e457d81edf9aedc07c778f4697111ab163a38ef95999faece015453086"
diff --git a/meta/recipes-devtools/ossp-uuid/ossp-uuid/ldflags.patch b/meta/recipes-devtools/ossp-uuid/ossp-uuid/ldflags.patch
new file mode 100644
index 0000000000..f4f0ef870e
--- /dev/null
+++ b/meta/recipes-devtools/ossp-uuid/ossp-uuid/ldflags.patch
@@ -0,0 +1,26 @@
+Obey LDFLAGS
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Pending
+
+--- uuid-1.6.2.orig/Makefile.in
++++ uuid-1.6.2/Makefile.in
+@@ -113,15 +113,15 @@ all: $(TARGETS)
+ @$(LIBTOOL) --mode=compile $(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $<
+
+ $(LIB_NAME): $(LIB_OBJS)
+- @$(LIBTOOL) --mode=link $(CC) -o $(LIB_NAME) $(LIB_OBJS) -rpath $(libdir) \
++ @$(LIBTOOL) --mode=link $(CC) $(LDFLAGS) -o $(LIB_NAME) $(LIB_OBJS) -rpath $(libdir) \
+ -version-info `$(SHTOOL) version -l c -d libtool $(S)/uuid_vers.h`
+
+ $(DCE_NAME): $(DCE_OBJS)
+- @$(LIBTOOL) --mode=link $(CC) -o $(DCE_NAME) $(DCE_OBJS) -rpath $(libdir) \
++ @$(LIBTOOL) --mode=link $(CC) $(LDFLAGS) -o $(DCE_NAME) $(DCE_OBJS) -rpath $(libdir) \
+ -version-info `$(SHTOOL) version -l c -d libtool $(S)/uuid_vers.h`
+
+ $(CXX_NAME): $(CXX_OBJS)
+- @$(LIBTOOL) --mode=link $(CXX) -o $(CXX_NAME) $(CXX_OBJS) -rpath $(libdir) \
++ @$(LIBTOOL) --mode=link $(CXX) $(LDFLAGS) -o $(CXX_NAME) $(CXX_OBJS) -rpath $(libdir) \
+ -version-info `$(SHTOOL) version -l c -d libtool $(S)/uuid_vers.h`
+
+ $(PRG_NAME): $(PRG_OBJS) $(LIB_NAME)
diff --git a/meta/recipes-devtools/ossp-uuid/ossp-uuid_1.6.2.bb b/meta/recipes-devtools/ossp-uuid/ossp-uuid_1.6.2.bb
index f524854a2d..160990c142 100644
--- a/meta/recipes-devtools/ossp-uuid/ossp-uuid_1.6.2.bb
+++ b/meta/recipes-devtools/ossp-uuid/ossp-uuid_1.6.2.bb
@@ -25,6 +25,7 @@ SRC_URI = "http://gnome-build-stage-1.googlecode.com/files/uuid-1.6.2.tar.gz \
file://uuid-libtool.patch \
file://uuid-nostrip.patch \
file://install-pc.patch \
+ file://ldflags.patch \
"
SRC_URI[md5sum] = "5db0d43a9022a6ebbbc25337ae28942f"
SRC_URI[sha256sum] = "11a615225baa5f8bb686824423f50e4427acd3f70d394765bdff32801f0fd5b0"
diff --git a/meta/recipes-devtools/patchelf/patchelf_0.8.bb b/meta/recipes-devtools/patchelf/patchelf_0.8.bb
new file mode 100644
index 0000000000..c1b87f5539
--- /dev/null
+++ b/meta/recipes-devtools/patchelf/patchelf_0.8.bb
@@ -0,0 +1,12 @@
+SRC_URI = "http://nixos.org/releases/${BPN}/${BPN}-${PV}/${BPN}-${PV}.tar.bz2"
+LICENSE = "GPLv3"
+SUMMARY = "Tool to allow editing of RPATH and interpreter fields in ELF binaries"
+
+SRC_URI[md5sum] = "5b151e3c83b31f5931b4a9fc01635bfd"
+SRC_URI[sha256sum] = "c99f84d124347340c36707089ec8f70530abd56e7827c54d506eb4cc097a17e7"
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
+
+inherit autotools
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-devtools/pax-utils/pax-utils_0.8.1.bb b/meta/recipes-devtools/pax-utils/pax-utils_0.9.2.bb
index e79347f679..ac14306e10 100644
--- a/meta/recipes-devtools/pax-utils/pax-utils_0.8.1.bb
+++ b/meta/recipes-devtools/pax-utils/pax-utils_0.9.2.bb
@@ -9,8 +9,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a"
SRC_URI = "http://gentoo.osuosl.org/distfiles/pax-utils-${PV}.tar.xz"
-SRC_URI[md5sum] = "bc42279c5aff3682c12be40f72805cec"
-SRC_URI[sha256sum] = "844ff25b1a11bcef92ef34b22f576f226a772b67196818656f8874513438f5b9"
+SRC_URI[md5sum] = "34c41888cec67759c21333bef13e950c"
+SRC_URI[sha256sum] = "578801df0661b1b7b8fed0ce4a9859239f919fd37529907681e51091a1bcb4de"
+
+RDEPENDS_${PN} += "bash"
do_install() {
oe_runmake PREFIX=${D}${prefix} DESTDIR=${D} install
diff --git a/meta/recipes-devtools/perl/perl-5.20.0/config.sh b/meta/recipes-devtools/perl/perl-5.20.0/config.sh
index 2532c8d413..1ac1355829 100644
--- a/meta/recipes-devtools/perl/perl-5.20.0/config.sh
+++ b/meta/recipes-devtools/perl/perl-5.20.0/config.sh
@@ -1104,3 +1104,10 @@ usekernprocpathname='undef'
usensgetexecutablepath='undef'
st_ino_sign='1'
st_ino_size='4'
+
+# for Time-HiRes
+d_clock_nanosleep='define'
+d_clock_gettime='define'
+d_clock_getres='define'
+d_clock='define'
+d_nanosleep='define'
diff --git a/meta/recipes-devtools/perl/perl-5.20.0/run-ptest b/meta/recipes-devtools/perl/perl-5.20.0/run-ptest
index ed59b4b145..1e2dd1b66d 100644
--- a/meta/recipes-devtools/perl/perl-5.20.0/run-ptest
+++ b/meta/recipes-devtools/perl/perl-5.20.0/run-ptest
@@ -1,2 +1,2 @@
#!/bin/sh
-cd t && ./TEST | sed -u -e 's/^\([^. \t]*\)\.\.\.\+ok/PASS: \1/' -e 's/^\([^. \t]*\)\.\.\.\+skipped/SKIP: \1/' -e 's/^\([^. \t]*\)\.\.\.\+\(.*\)/FAIL: \1\n\2/'
+cd t && ./TEST | sed -u -e 's|\(.*\) .* ok$|PASS: \1|' -e 's|\(.*\) .* skipped|SKIP: \1|' -e 's|\(.*\) \.\(.*\)|FAIL: \1|'
diff --git a/meta/recipes-devtools/perl/perl-native_5.20.0.bb b/meta/recipes-devtools/perl/perl-native_5.20.0.bb
index 4364d41b35..01d40fcff2 100644
--- a/meta/recipes-devtools/perl/perl-native_5.20.0.bb
+++ b/meta/recipes-devtools/perl/perl-native_5.20.0.bb
@@ -103,8 +103,13 @@ do_install () {
install $i ${D}${libdir}/perl/${PV}/CORE
done
- create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl/'
- create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl/'
+ create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}'
+ create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/${PV}:${STAGING_LIBDIR}/perl${STAGING_LIBDIR}/perl:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}'
+
+ # Use /usr/bin/env nativeperl for the perl script.
+ for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do
+ sed -i -e 's|${bindir}/perl|/usr/bin/env nativeperl|' $f
+ done
}
SYSROOT_PREPROCESS_FUNCS += "perl_sysroot_create_wrapper"
diff --git a/meta/recipes-devtools/perl/perl-ptest.inc b/meta/recipes-devtools/perl/perl-ptest.inc
index 6a9df4ad4a..948ea7cddf 100644
--- a/meta/recipes-devtools/perl/perl-ptest.inc
+++ b/meta/recipes-devtools/perl/perl-ptest.inc
@@ -8,7 +8,7 @@ do_install_ptest () {
sed -e "s:\/opt:\/usr:" -i Porting/add-package.pl
sed -e "s:\/local\/gnu\/:\/:" -i hints/cxux.sh
tar -cf - * --exclude \*.o --exclude libperl.so --exclude Makefile --exclude makefile --exclude hostperl \
- --exclude miniperl --exclude generate_uudmap | ( cd ${D}${PTEST_PATH} && tar -xf - )
+ --exclude miniperl --exclude generate_uudmap --exclude patches | ( cd ${D}${PTEST_PATH} && tar -xf - )
sed -i -e "s,${D},,g" \
-e "s,--sysroot=${STAGING_DIR_HOST},,g" \
diff --git a/meta/recipes-devtools/perl/perl_5.20.0.bb b/meta/recipes-devtools/perl/perl_5.20.0.bb
index e984c906de..3ca0f53964 100644
--- a/meta/recipes-devtools/perl/perl_5.20.0.bb
+++ b/meta/recipes-devtools/perl/perl_5.20.0.bb
@@ -94,6 +94,11 @@ HOSTPERL = "${STAGING_BINDIR_NATIVE}/perl-native/perl${PV}"
# Where to find .so files - use the -native versions not those from the target build
export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/"
+# Where to find perl @INC/#include files
+# - use the -native versions not those from the target build
+export PERL_LIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/"
+export PERL_ARCHLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/"
+
# LDFLAGS for shared libraries
export LDDLFLAGS = "${LDFLAGS} -shared"
@@ -117,6 +122,16 @@ do_configure() {
# Make hostperl in build directory be the native perl
ln -sf ${HOSTPERL} hostperl
+ if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then
+ if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a -e Makefile ]; then
+ ${MAKE} clean
+ fi
+ find ${S} -name *.so -delete
+ fi
+ if [ -n "${CONFIGURESTAMPFILE}" ]; then
+ echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE}
+ fi
+
# Do our work in the cross subdir
cd Cross
@@ -189,7 +204,7 @@ do_compile() {
sed -i -e "s|\([ \"\']\+\)/usr/include|\1${STAGING_INCDIR}|g" ext/Errno/Errno_pm.PL
sed -i -e "s|\([ \"\']\+\)/usr/include|\1${STAGING_INCDIR}|g" cpan/Compress-Raw-Zlib/config.in
sed -i -e 's|/usr/lib|""|g' cpan/Compress-Raw-Zlib/config.in
- sed -i -e 's|SYSROOTLIB|${STAGING_LIBDIR}|g' cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm
+ sed -i -e 's|(@libpath, ".*"|(@libpath, "${STAGING_LIBDIR}"|g' cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm
cd Cross
oe_runmake perl LD="${CCLD}"
@@ -221,7 +236,7 @@ do_install() {
do_install_append_class-nativesdk () {
create_wrapper ${D}${bindir}/perl \
- PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}'
+ PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/site_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/vendor_perl/${PV}'
}
PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess"
diff --git a/meta/recipes-devtools/postinst-intercept/postinst-intercept_1.0.bb b/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb
index 41b9a6e49e..7dc45c68f2 100644
--- a/meta/recipes-devtools/postinst-intercept/postinst-intercept_1.0.bb
+++ b/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb
@@ -2,12 +2,12 @@ SUMMARY = "Postinstall scriptlets"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
-FILES_${PN}_append_class-nativesdk = " ${datadir}/postinst-intercepts/*"
+FILES_${PN}_append = " ${datadir}/postinst-intercepts/*"
do_configure[noexec] = "1"
do_compile[noexec] = "1"
-do_install_append_class-nativesdk() {
+do_install() {
install -d ${D}${datadir}/postinst-intercepts
install -m 755 ${COREBASE}/scripts/postinst-intercepts/postinst_intercept ${D}${datadir}/postinst-intercepts/
install -m 755 ${COREBASE}/scripts/postinst-intercepts/update_font_cache ${D}${datadir}/postinst-intercepts/
@@ -15,5 +15,5 @@ do_install_append_class-nativesdk() {
install -m 755 ${COREBASE}/scripts/postinst-intercepts/update_pixbuf_cache ${D}${datadir}/postinst-intercepts/
}
-BBCLASSEXTEND = "nativesdk"
+inherit nativesdk
INHIBIT_DEFAULT_DEPS = "1"
diff --git a/meta/recipes-devtools/pseudo/files/0001-pseudo_has_unload-add-function.patch b/meta/recipes-devtools/pseudo/files/0001-pseudo_has_unload-add-function.patch
deleted file mode 100644
index b5c81c9d3e..0000000000
--- a/meta/recipes-devtools/pseudo/files/0001-pseudo_has_unload-add-function.patch
+++ /dev/null
@@ -1,190 +0,0 @@
-From be97cb958f2934fa398fc8e344b25b84ebd4e90c Mon Sep 17 00:00:00 2001
-From: "Peter A. Bigot" <pab@pabigot.com>
-Date: Sun, 25 Aug 2013 19:22:09 -0500
-Subject: [PATCH] pseudo_has_unload: add function
-
-Various wrappers checked for a non-null pseudo_get_value("PSEUDO_UNLOAD") to
-determine whether the environment should include the pseudo variables. None
-of those checks freed the returned value when it was not null. The new
-check function does.
-
-The new check function also sees whether PSEUDO_UNLOAD was defined in the
-environment that should be used in the wrapped system call. This allows
-pkg_postinst scripts to strip out the LD_PRELOAD setting, for example before
-invoking qemu to execute commands in an environment that does not have
-libpseudo.so.
-
-[YOCTO #4843]
-
-Upstream-Status: Pending
-Signed-off-by: Peter A. Bigot <pab@pabigot.com>
----
- ports/common/guts/execv.c | 2 +-
- ports/common/guts/execve.c | 2 +-
- ports/common/guts/execvp.c | 2 +-
- ports/common/guts/fork.c | 2 +-
- ports/linux/newclone/pseudo_wrappers.c | 2 +-
- ports/linux/oldclone/pseudo_wrappers.c | 2 +-
- ports/unix/guts/popen.c | 2 +-
- ports/unix/guts/system.c | 2 +-
- pseudo.h | 1 +
- pseudo_util.c | 27 +++++++++++++++++++++++++++
- 10 files changed, 36 insertions(+), 8 deletions(-)
-
-diff --git a/ports/common/guts/execv.c b/ports/common/guts/execv.c
-index 763e1f9..3e1f820 100644
---- a/ports/common/guts/execv.c
-+++ b/ports/common/guts/execv.c
-@@ -19,7 +19,7 @@
- }
-
- pseudo_setupenv();
-- if (pseudo_get_value("PSEUDO_UNLOAD"))
-+ if (pseudo_has_unload(NULL))
- pseudo_dropenv();
-
- /* if exec() fails, we may end up taking signals unexpectedly...
-diff --git a/ports/common/guts/execve.c b/ports/common/guts/execve.c
-index a003657..ff6a44e 100644
---- a/ports/common/guts/execve.c
-+++ b/ports/common/guts/execve.c
-@@ -20,7 +20,7 @@
- }
-
- new_environ = pseudo_setupenvp(envp);
-- if (pseudo_get_value("PSEUDO_UNLOAD"))
-+ if (pseudo_has_unload(new_environ))
- new_environ = pseudo_dropenvp(new_environ);
-
- /* if exec() fails, we may end up taking signals unexpectedly...
-diff --git a/ports/common/guts/execvp.c b/ports/common/guts/execvp.c
-index 5e75be7..04253c3 100644
---- a/ports/common/guts/execvp.c
-+++ b/ports/common/guts/execvp.c
-@@ -20,7 +20,7 @@
- }
-
- pseudo_setupenv();
-- if (pseudo_get_value("PSEUDO_UNLOAD"))
-+ if (pseudo_has_unload(NULL))
- pseudo_dropenv();
-
- /* if exec() fails, we may end up taking signals unexpectedly...
-diff --git a/ports/common/guts/fork.c b/ports/common/guts/fork.c
-index df8abd7..bebe3b0 100644
---- a/ports/common/guts/fork.c
-+++ b/ports/common/guts/fork.c
-@@ -12,7 +12,7 @@
- */
- if (rc == 0) {
- pseudo_setupenv();
-- if (!pseudo_get_value("PSEUDO_UNLOAD")) {
-+ if (!pseudo_has_unload(NULL)) {
- pseudo_reinit_libpseudo();
- } else {
- pseudo_dropenv();
-diff --git a/ports/linux/newclone/pseudo_wrappers.c b/ports/linux/newclone/pseudo_wrappers.c
-index 9dbac42..257e8bb 100644
---- a/ports/linux/newclone/pseudo_wrappers.c
-+++ b/ports/linux/newclone/pseudo_wrappers.c
-@@ -28,7 +28,7 @@ int wrap_clone_child(void *args) {
-
- if (!(flags & CLONE_VM)) {
- pseudo_setupenv();
-- if (!pseudo_get_value("PSEUDO_UNLOAD")) {
-+ if (!pseudo_has_unload(NULL)) {
- pseudo_reinit_libpseudo();
- } else {
- pseudo_dropenv();
-diff --git a/ports/linux/oldclone/pseudo_wrappers.c b/ports/linux/oldclone/pseudo_wrappers.c
-index c0ce5dd..598d966 100644
---- a/ports/linux/oldclone/pseudo_wrappers.c
-+++ b/ports/linux/oldclone/pseudo_wrappers.c
-@@ -22,7 +22,7 @@ int wrap_clone_child(void *args) {
-
- if (!(flags & CLONE_VM)) {
- pseudo_setupenv();
-- if (!pseudo_get_value("PSEUDO_UNLOAD")) {
-+ if (!pseudo_has_unload(NULL)) {
- pseudo_reinit_libpseudo();
- } else {
- pseudo_dropenv();
-diff --git a/ports/unix/guts/popen.c b/ports/unix/guts/popen.c
-index 0ca16b0..5d44c0e 100644
---- a/ports/unix/guts/popen.c
-+++ b/ports/unix/guts/popen.c
-@@ -9,7 +9,7 @@
- * in ways that avoid our usual enforcement of the environment.
- */
- pseudo_setupenv();
-- if (pseudo_get_value("PSEUDO_UNLOAD"))
-+ if (pseudo_has_unload(NULL))
- pseudo_dropenv();
-
- rc = real_popen(command, mode);
-diff --git a/ports/unix/guts/system.c b/ports/unix/guts/system.c
-index 028b372..6351592 100644
---- a/ports/unix/guts/system.c
-+++ b/ports/unix/guts/system.c
-@@ -9,7 +9,7 @@
- return 1;
-
- pseudo_setupenv();
-- if (pseudo_get_value("PSEUDO_UNLOAD"))
-+ if (pseudo_has_unload(NULL))
- pseudo_dropenv();
-
- rc = real_system(command);
-diff --git a/pseudo.h b/pseudo.h
-index 56760a4..f600793 100644
---- a/pseudo.h
-+++ b/pseudo.h
-@@ -28,6 +28,7 @@ extern void pseudo_init_client(void);
- void pseudo_dump_env(char **envp);
- int pseudo_set_value(const char *key, const char *value);
- char *pseudo_get_value(const char *key);
-+int pseudo_has_unload(char * const *envp);
-
- #include "pseudo_tables.h"
-
-diff --git a/pseudo_util.c b/pseudo_util.c
-index 8d0969e..16c70e0 100644
---- a/pseudo_util.c
-+++ b/pseudo_util.c
-@@ -95,6 +95,33 @@ dump_env(char **envp) {
- }
- #endif
-
-+int
-+pseudo_has_unload(char * const *envp) {
-+ static const char unload[] = "PSEUDO_UNLOAD";
-+ static size_t unload_len = strlen(unload);
-+ size_t i = 0;
-+
-+ /* Is it in the caller environment? */
-+ if (NULL != getenv(unload))
-+ return 1;
-+
-+ /* Is it in the environment cache? */
-+ if (pseudo_util_initted == -1)
-+ pseudo_init_util();
-+ while (pseudo_env[i].key && strcmp(pseudo_env[i].key, unload))
-+ ++i;
-+ if (pseudo_env[i].key && pseudo_env[i].value)
-+ return 1;
-+
-+ /* Is it in the operational environment? */
-+ while (envp && *envp) {
-+ if ((!strncmp(*envp, unload, unload_len)) && ('=' == (*envp)[unload_len]))
-+ return 1;
-+ ++envp;
-+ }
-+ return 0;
-+}
-+
- /* Caller must free memory! */
- char *
- pseudo_get_value(const char *key) {
---
-1.7.9.5
-
diff --git a/meta/recipes-devtools/pseudo/files/fallback-group b/meta/recipes-devtools/pseudo/files/fallback-group
new file mode 100644
index 0000000000..81bf732312
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/files/fallback-group
@@ -0,0 +1,2 @@
+root:*:0:
+mail:*:8:
diff --git a/meta/recipes-devtools/pseudo/files/fallback-passwd b/meta/recipes-devtools/pseudo/files/fallback-passwd
new file mode 100644
index 0000000000..c1458dc0f6
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/files/fallback-passwd
@@ -0,0 +1 @@
+root::0:0:root:/home/root:/bin/sh
diff --git a/meta/recipes-devtools/pseudo/files/pseudo-1.5.1-install-directory-mode.patch b/meta/recipes-devtools/pseudo/files/pseudo-1.5.1-install-directory-mode.patch
deleted file mode 100644
index e8eaf13f74..0000000000
--- a/meta/recipes-devtools/pseudo/files/pseudo-1.5.1-install-directory-mode.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-Upstream-Status: Backport
-
-when install command sets the created directory mode, pseudo will change
-the mode of the directory to 0700 incorrectly.
-
-Signed-off-by: yanjun.zhu <yanjun.zhu@windriver.com>
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-
---- a/ports/unix/guts/mkdirat.c
-+++ b/ports/unix/guts/mkdirat.c
-@@ -25,6 +25,7 @@
- stat_rc = base_fstatat(dirfd, path, &buf, AT_SYMLINK_NOFOLLOW);
- #endif
- if (stat_rc != -1) {
-+ buf.st_mode = PSEUDO_DB_MODE(buf.st_mode, mode);
- pseudo_client_op(OP_MKDIR, 0, -1, dirfd, path, &buf);
- } else {
- pseudo_debug(1, "mkdir of %s succeeded, but stat failed: %s\n",
diff --git a/meta/recipes-devtools/pseudo/files/pseudo-fchmodat-permissions.patch b/meta/recipes-devtools/pseudo/files/pseudo-fchmodat-permissions.patch
deleted file mode 100644
index 7b1f82d577..0000000000
--- a/meta/recipes-devtools/pseudo/files/pseudo-fchmodat-permissions.patch
+++ /dev/null
@@ -1,264 +0,0 @@
-commit 7e67d082737b3df4788caf85fedd607b3acd9786
-Author: Peter Seebach <peter.seebach@windriver.com>
-Date: Fri May 16 15:53:06 2014 -0500
-
- permissions updates: improve fchmodat, mask out write bits
-
- Upstream-Status: Backport of several patches from 1.6 branch,
- combined.
-
- Backport from pseudo 1.6 of improvements to fchmodat (handle
- AT_SYMLINK_NOFOLLOW by rejecting it if the host system does,
- to make GNU tar happier), also mask out write bits from filesystem
- modes to avoid security problems.
-
- Also start tracking umask so we can use the right modes for
- open, mkdir, and mknod.
-
- The 1.6 patches are:
-
- 87c53ea58befef48677846693aab445df1850e16
- 3c716e0bab4f0cfe4be84caa9ce5fd5e3f5e2a23
- c98e4f43b5d6499748a5057134408f4ba4854fb4
- 2f71a021b725c1aa415439209a89327f0b997d02
- 14925786b55202d8147b0af719038e8a23ef73c0
-
-diff --git a/ChangeLog.txt b/ChangeLog.txt
-index 113f675..cc966ce 100644
---- a/ChangeLog.txt
-+++ b/ChangeLog.txt
-@@ -1,3 +1,18 @@
-+2014-05-27:
-+ * (seebs) start noticing umask, mask it out from open or mkdir
-+ calls rather than relying on underlying open/mkdir to do it.
-+
-+2014-05-16:
-+ * (seebs) fchmodat: don't drop flags, report failures, to improve
-+ compatibility/consistency. Cache the knowledge that
-+ AT_SYMLINK_NOFOLLOW gets ENOTSUP.
-+ * (seebs) mask out group/other write bits in real filesystem to
-+ reduce risks when assembling a rootfs including world-writeable
-+ directories.
-+
-+2014-05-15:
-+ * (seebs) drop flags when calling fchmodat() to appease GNU tar.
-+
- 2013-02-27:
- * (seebs) Oh, hey, what if I took out my debug messages?
- * (seebs) update docs a bit to reduce bitrot
-diff --git a/makewrappers b/makewrappers
-index e87cc56..0127766 100755
---- a/makewrappers
-+++ b/makewrappers
-@@ -204,6 +204,7 @@ class Function:
- 'uid_t': '0',
- 'int': '-1',
- 'long': '-1',
-+ 'mode_t': '0',
- 'ssize_t': '-1'
- }
-
-diff --git a/ports/darwin/guts/open.c b/ports/darwin/guts/open.c
-index c66cc15..520bb70 100644
---- a/ports/darwin/guts/open.c
-+++ b/ports/darwin/guts/open.c
-@@ -9,6 +9,9 @@
- struct stat buf = { };
- int existed = 1;
- int save_errno;
-+
-+ /* mask out mode bits appropriately */
-+ mode = mode & ~pseudo_umask;
- #ifdef PSEUDO_FORCE_ASYNCH
- flags &= ~O_SYNC;
- #endif
-diff --git a/ports/linux/guts/__xmknodat.c b/ports/linux/guts/__xmknodat.c
-index 59b4f2f..0888b8a 100644
---- a/ports/linux/guts/__xmknodat.c
-+++ b/ports/linux/guts/__xmknodat.c
-@@ -9,6 +9,9 @@
- pseudo_msg_t *msg;
- struct stat64 buf;
-
-+ /* mask out mode bits appropriately */
-+ mode = mode & ~pseudo_umask;
-+
- /* we don't use underlying call, so _ver is irrelevant to us */
- (void) ver;
-
-diff --git a/ports/linux/guts/openat.c b/ports/linux/guts/openat.c
-index 8460073..4053549 100644
---- a/ports/linux/guts/openat.c
-+++ b/ports/linux/guts/openat.c
-@@ -10,6 +10,9 @@
- int existed = 1;
- int save_errno;
-
-+ /* mask out mode bits appropriately */
-+ mode = mode & ~pseudo_umask;
-+
- #ifdef PSEUDO_NO_REAL_AT_FUNCTIONS
- if (dirfd != AT_FDCWD) {
- errno = ENOSYS;
-diff --git a/ports/unix/guts/fchmodat.c b/ports/unix/guts/fchmodat.c
-index 59a92ce..69a953c 100644
---- a/ports/unix/guts/fchmodat.c
-+++ b/ports/unix/guts/fchmodat.c
-@@ -8,6 +8,7 @@
- */
- PSEUDO_STATBUF buf;
- int save_errno = errno;
-+ static int picky_fchmodat = 0;
-
- #ifdef PSEUDO_NO_REAL_AT_FUNCTIONS
- if (dirfd != AT_FDCWD) {
-@@ -15,6 +16,16 @@
- return -1;
- }
- if (flags & AT_SYMLINK_NOFOLLOW) {
-+ /* Linux, as of this writing, will always reject this.
-+ * GNU tar relies on getting the rejection. To cut down
-+ * on traffic, we check for the failure, and if we saw
-+ * a failure previously, we reject it right away and tell
-+ * the caller to retry.
-+ */
-+ if (picky_fchmodat) {
-+ errno = ENOTSUP;
-+ return -1;
-+ }
- rc = base_lstat(path, &buf);
- } else {
- rc = base_stat(path, &buf);
-@@ -50,13 +61,22 @@
-
- /* user bits added so "root" can always access files. */
- #ifdef PSEUDO_NO_REAL_AT_FUNCTIONS
-- /* note: if path was a symlink, and AT_NOFOLLOW_SYMLINKS was
-+ /* note: if path was a symlink, and AT_SYMLINK_NOFOLLOW was
- * specified, we already bailed previously. */
- real_chmod(path, PSEUDO_FS_MODE(mode, S_ISDIR(buf.st_mode)));
- #else
-- real_fchmodat(dirfd, path, PSEUDO_FS_MODE(mode, S_ISDIR(buf.st_mode)), flags);
-+ rc = real_fchmodat(dirfd, path, PSEUDO_FS_MODE(mode, S_ISDIR(buf.st_mode)), flags);
-+ /* AT_SYMLINK_NOFOLLOW isn't supported by fchmodat. GNU tar
-+ * tries to use it anyway, figuring it can just retry if that
-+ * fails. So we want to report that *particular* failure instead
-+ * of doing the fallback.
-+ */
-+ if (rc == -1 && errno == ENOTSUP && (flags & AT_SYMLINK_NOFOLLOW)) {
-+ picky_fchmodat = 1;
-+ return -1;
-+ }
- #endif
-- /* we ignore a failure from underlying fchmod, because pseudo
-+ /* we otherwise ignore failures from underlying fchmod, because pseudo
- * may believe you are permitted to change modes that the filesystem
- * doesn't. Note that we also don't need to know whether the
- * file might be a (pseudo) block device or some such; pseudo
-diff --git a/ports/unix/guts/mkdirat.c b/ports/unix/guts/mkdirat.c
-index e846b70..e0b6af9 100644
---- a/ports/unix/guts/mkdirat.c
-+++ b/ports/unix/guts/mkdirat.c
-@@ -6,11 +6,14 @@
- * wrap_mkdirat(int dirfd, const char *path, mode_t mode) {
- * int rc = -1;
- */
-+ /* mask out mode bits appropriately */
-+ mode = mode & ~pseudo_umask;
- #ifdef PSEUDO_NO_REAL_AT_FUNCTIONS
- if (dirfd != AT_FDCWD) {
- errno = ENOSYS;
- return -1;
- }
-+
- rc = real_mkdir(path, PSEUDO_FS_MODE(mode, 1));
- #else
- rc = real_mkdirat(dirfd, path, PSEUDO_FS_MODE(mode, 1));
-diff --git a/ports/unix/guts/mknodat.c b/ports/unix/guts/mknodat.c
-index 6fd5b42..5d8d47c 100644
---- a/ports/unix/guts/mknodat.c
-+++ b/ports/unix/guts/mknodat.c
-@@ -10,6 +10,9 @@
- PSEUDO_STATBUF buf;
- int save_errno = errno;
-
-+ /* mask out mode bits appropriately */
-+ mode = mode & ~pseudo_umask;
-+
- #ifdef PSEUDO_NO_REAL_AT_FUNCTIONS
- if (dirfd != AT_FDCWD) {
- errno = ENOSYS;
-diff --git a/ports/unix/guts/umask.c b/ports/unix/guts/umask.c
-new file mode 100644
-index 0000000..6b060d3
---- /dev/null
-+++ b/ports/unix/guts/umask.c
-@@ -0,0 +1,14 @@
-+/*
-+ * Copyright (c) 2014 Wind River Systems; see
-+ * guts/COPYRIGHT for information.
-+ *
-+ * mode_t umask(mode_t mask)
-+ * mode_t rc = 0;
-+ */
-+
-+ pseudo_umask = mask;
-+ rc = real_umask(mask);
-+
-+/* return rc;
-+ * }
-+ */
-diff --git a/ports/unix/wrapfuncs.in b/ports/unix/wrapfuncs.in
-index 8460a65..e0e9739 100644
---- a/ports/unix/wrapfuncs.in
-+++ b/ports/unix/wrapfuncs.in
-@@ -67,3 +67,4 @@ void sync(void); /* async_skip= */
- int syncfs(int fd); /* async_skip=0 */
- int sync_file_range(int fd, off64_t offset, off64_t nbytes, unsigned int flags); /* async_skip=0 */
- int msync(void *addr, size_t length, int flags); /* async_skip=0 */
-+mode_t umask(mode_t mask);
-diff --git a/pseudo_client.c b/pseudo_client.c
-index b6d11a6..535c810 100644
---- a/pseudo_client.c
-+++ b/pseudo_client.c
-@@ -71,6 +71,8 @@ int pseudo_disabled = 0;
- int pseudo_allow_fsync = 0;
- static int pseudo_local_only = 0;
-
-+int pseudo_umask = 022;
-+
- static char **fd_paths = NULL;
- static int nfds = 0;
- static int messages = 0;
-@@ -219,6 +221,9 @@ pseudo_init_client(void) {
- if (!pseudo_disabled && !pseudo_inited) {
- char *pseudo_path = 0;
-
-+ pseudo_umask = umask(022);
-+ umask(pseudo_umask);
-+
- pseudo_path = pseudo_prefix_path(NULL);
- if (pseudo_prefix_dir_fd == -1) {
- if (pseudo_path) {
-diff --git a/pseudo_client.h b/pseudo_client.h
-index f36a772..5bf820e 100644
---- a/pseudo_client.h
-+++ b/pseudo_client.h
-@@ -72,6 +72,8 @@ extern char *pseudo_passwd;
- extern size_t pseudo_chroot_len;
- extern int pseudo_nosymlinkexp;
-
-+extern int pseudo_umask;
-+
- /* Root can read and write files, and enter directories which have no
- * read, write, or execute permissions. (But can't execute files without
- * execute permissions!)
-@@ -85,6 +87,6 @@ extern int pseudo_nosymlinkexp;
- * None of this will behave very sensibly if umask has 0700 bits in it;
- * this is a known limitation.
- */
--#define PSEUDO_FS_MODE(mode, isdir) ((mode) | S_IRUSR | S_IWUSR | ((isdir) ? S_IXUSR : 0))
--#define PSEUDO_DB_MODE(fs_mode, user_mode) (((fs_mode) & ~0700) | ((user_mode & 0700)))
-+#define PSEUDO_FS_MODE(mode, isdir) (((mode) | S_IRUSR | S_IWUSR | ((isdir) ? S_IXUSR : 0)) & ~(S_IWGRP | S_IWOTH))
-+#define PSEUDO_DB_MODE(fs_mode, user_mode) (((fs_mode) & ~0722) | ((user_mode & 0722)))
-
diff --git a/meta/recipes-devtools/pseudo/files/shutdownping.patch b/meta/recipes-devtools/pseudo/files/shutdownping.patch
deleted file mode 100644
index 8af8e0b594..0000000000
--- a/meta/recipes-devtools/pseudo/files/shutdownping.patch
+++ /dev/null
@@ -1,53 +0,0 @@
-There is a potential issue with the fastop code in pseudo since a process may
-exit and allow some other function to run before the server has processed
-the commands run by the process. Issues have been see with unpredictable
-file permissions.
-
-To avoid this, we ping the server before exitting which guarantees it has
-processed the current command queue.
-
-Debugged-by: RP
-Fix written by peter.seebach@windriver.com
-
-Upstream-Status: Submitted
-
-[YOCTO #5132]
-
-diff --git a/pseudo_client.c b/pseudo_client.c
-index f58ce4c..20943b6 100644
---- a/pseudo_client.c
-+++ b/pseudo_client.c
-@@ -75,6 +75,9 @@ static int nfds = 0;
- static int messages = 0;
- static struct timeval message_time = { .tv_sec = 0 };
- static int pseudo_inited = 0;
-+
-+static int sent_messages = 0;
-+
- int pseudo_nosymlinkexp = 0;
-
- /* note: these are int, not uid_t/gid_t, so I can use 'em with scanf */
-@@ -711,6 +714,11 @@ client_ping(void) {
- return 0;
- }
-
-+static void
-+void_client_ping(void) {
-+ client_ping();
-+}
-+
- int
- pseudo_fd(int fd, int how) {
- int newfd;
-@@ -1043,6 +1051,11 @@ pseudo_client_op(pseudo_op_t op, int access, int fd, int dirfd, const char *path
- /* disable wrappers */
- pseudo_antimagic();
-
-+ if (!sent_messages) {
-+ sent_messages = 1;
-+ atexit(void_client_ping);
-+ }
-+
- if (op == OP_RENAME) {
- va_list ap;
- va_start(ap, buf);
diff --git a/meta/recipes-devtools/pseudo/files/symver.patch b/meta/recipes-devtools/pseudo/files/symver.patch
deleted file mode 100644
index 8d1b377ec0..0000000000
--- a/meta/recipes-devtools/pseudo/files/symver.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-When running as pseudo-nativesdk, we might need to run host binaries
-linked against the host libc. Having a 2.14 libc dependency from memcpy is
-problematic so instruct the linker to use older symbols.
-
-Upstream-Status: Pending
-
-RP 2012/4/22
-
-Index: pseudo-1.3/pseudo.h
-===================================================================
---- pseudo-1.3.orig/pseudo.h 2012-04-22 12:17:59.078909060 +0000
-+++ pseudo-1.3/pseudo.h 2012-04-22 12:32:42.954888587 +0000
-@@ -29,6 +29,13 @@
- int pseudo_set_value(const char *key, const char *value);
- char *pseudo_get_value(const char *key);
-
-+#ifdef __amd64__
-+#define GLIBC_COMPAT_SYMBOL(SYM) __asm__(".symver " #SYM "," #SYM "@GLIBC_2.2.5")
-+#else
-+#define GLIBC_COMPAT_SYMBOL(SYM) __asm__(".symver " #SYM "," #SYM "@GLIBC_2.0")
-+#endif
-+GLIBC_COMPAT_SYMBOL(memcpy);
-+
- #include "pseudo_tables.h"
-
- extern void pseudo_debug_verbose(void);
diff --git a/meta/recipes-devtools/pseudo/pseudo-1.6.2/0001-pseudo_client.c-protect-pwd_lck-against-magic.patch b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0001-pseudo_client.c-protect-pwd_lck-against-magic.patch
new file mode 100644
index 0000000000..d0c0a27e54
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0001-pseudo_client.c-protect-pwd_lck-against-magic.patch
@@ -0,0 +1,56 @@
+From e11468a47369596f57c5e99bd0a3dd58b2c6d5e0 Mon Sep 17 00:00:00 2001
+From: "Peter A. Bigot" <pab@pabigot.com>
+Date: Sun, 12 Oct 2014 08:27:14 -0500
+Subject: [PATCH 1/3] pseudo_client.c: protect pwd_lck against magic
+
+While attempting to diagnose unexpected uid/gid assignment I added
+--without-passwd-fallback to the pseudo build. This caused build
+failures due to inability to lock /etc/passwd.
+
+Instrumentation revealed that attempts to create the lock file ended up
+with pseudo_etc_file() creating the correct lock name, but the
+subsequent open had an extra PSEUDO_PASSWD directory prefix causing
+it to fail.
+
+Inspection of pseudo_client shows the only other use of PSEUDO_ETC_FILE
+to be protected against magic. Applying the same shield to the
+unprotected calls in pseudo_pwd_lck_{open,close} fixes the issue.
+
+Upstream-Status: Pending
+Signed-off-by: Peter A. Bigot <pab@pabigot.com>
+---
+ pseudo_client.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/pseudo_client.c b/pseudo_client.c
+index 8deaa1b..442dd19 100644
+--- a/pseudo_client.c
++++ b/pseudo_client.c
+@@ -416,20 +416,24 @@ pseudo_pwd_lck_open(void) {
+ }
+ }
+ pseudo_pwd_lck_close();
++ pseudo_antimagic();
+ pseudo_pwd_lck_fd = PSEUDO_ETC_FILE(".pwd.lock",
+ pseudo_pwd_lck_name, O_RDWR | O_CREAT);
++ pseudo_magic();
+ return pseudo_pwd_lck_fd;
+ }
+
+ int
+ pseudo_pwd_lck_close(void) {
+ if (pseudo_pwd_lck_fd != -1) {
++ pseudo_antimagic();
+ close(pseudo_pwd_lck_fd);
+ if (pseudo_pwd_lck_name) {
+ unlink(pseudo_pwd_lck_name);
+ free(pseudo_pwd_lck_name);
+ pseudo_pwd_lck_name = 0;
+ }
++ pseudo_magic();
+ pseudo_pwd_lck_fd = -1;
+ return 0;
+ } else {
+--
+1.8.5.5
+
diff --git a/meta/recipes-devtools/pseudo/pseudo-1.6.2/0002-pseudo_util-modify-interface-to-pseudo_etc_file.patch b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0002-pseudo_util-modify-interface-to-pseudo_etc_file.patch
new file mode 100644
index 0000000000..c7006ef6df
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0002-pseudo_util-modify-interface-to-pseudo_etc_file.patch
@@ -0,0 +1,70 @@
+From f05def2bbd5507084672bc9072ffe0e5101e9b47 Mon Sep 17 00:00:00 2001
+From: "Peter A. Bigot" <pab@pabigot.com>
+Date: Sun, 12 Oct 2014 11:35:57 -0500
+Subject: [PATCH 2/3] pseudo_util: modify interface to pseudo_etc_file
+
+* Make the search directory pointers const: there is no reason why this
+ function should be allowed to mutate the directories.
+
+* Change the search directory argument from an array of pointers to a
+ pointer-to-pointers to prepare for an upcoming enhancement.
+
+Upstream-Status: Pending
+Signed-off-by: Peter A. Bigot <pab@pabigot.com>
+---
+ pseudo.h | 2 +-
+ pseudo_client.c | 2 +-
+ pseudo_util.c | 4 ++--
+ 3 files changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/pseudo.h b/pseudo.h
+index 92020e4..05813c1 100644
+--- a/pseudo.h
++++ b/pseudo.h
+@@ -86,7 +86,7 @@ extern int pseudo_logfile(char *defname);
+ extern ssize_t pseudo_sys_path_max(void);
+ extern ssize_t pseudo_path_max(void);
+ #define PSEUDO_PWD_MAX 4096
+-extern int pseudo_etc_file(const char *filename, char *realname, int flags, char *path[], int dircount);
++extern int pseudo_etc_file(const char *filename, char *realname, int flags, const char **search_dirs, int dircount);
+ extern void pseudo_stat32_from64(struct stat *, const struct stat64 *);
+ extern void pseudo_stat64_from32(struct stat64 *, const struct stat *);
+
+diff --git a/pseudo_client.c b/pseudo_client.c
+index 442dd19..7a4d7fa 100644
+--- a/pseudo_client.c
++++ b/pseudo_client.c
+@@ -93,7 +93,7 @@ gid_t pseudo_egid;
+ gid_t pseudo_sgid;
+ gid_t pseudo_fgid;
+
+-#define PSEUDO_ETC_FILE(filename, realname, flags) pseudo_etc_file(filename, realname, flags, (char *[]) { pseudo_chroot, pseudo_passwd, PSEUDO_PASSWD_FALLBACK }, PSEUDO_PASSWD_FALLBACK ? 3 : 2)
++#define PSEUDO_ETC_FILE(filename, realname, flags) pseudo_etc_file(filename, realname, flags, (const char *[]) { pseudo_chroot, pseudo_passwd, PSEUDO_PASSWD_FALLBACK }, PSEUDO_PASSWD_FALLBACK ? 3 : 2)
+
+ /* helper function to make a directory, just like mkdir -p.
+ * Can't use system() because the child shell would end up trying
+diff --git a/pseudo_util.c b/pseudo_util.c
+index e4e1fc8..647d3ad 100644
+--- a/pseudo_util.c
++++ b/pseudo_util.c
+@@ -1264,7 +1264,7 @@ FILE *pseudo_host_etc_group_file = &pseudo_fake_group_file;
+ #endif
+
+ int
+-pseudo_etc_file(const char *file, char *realname, int flags, char *search_dirs[], int dircount) {
++pseudo_etc_file(const char *file, char *realname, int flags, const char **search_dirs, int dircount) {
+ char filename[pseudo_path_max()];
+ int rc = -1;
+
+@@ -1280,7 +1280,7 @@ pseudo_etc_file(const char *file, char *realname, int flags, char *search_dirs[]
+ return -1;
+ }
+ for (i = 0; i < dircount; ++i) {
+- char *s = search_dirs[i];
++ const char *s = search_dirs[i];
+ if (!s)
+ continue;
+ #if PSEUDO_PORT_DARWIN
+--
+1.8.5.5
+
diff --git a/meta/recipes-devtools/pseudo/pseudo-1.6.2/0003-pseudo_client.c-support-multiple-directories-in-PSEU.patch b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0003-pseudo_client.c-support-multiple-directories-in-PSEU.patch
new file mode 100644
index 0000000000..e6c6284a24
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/pseudo-1.6.2/0003-pseudo_client.c-support-multiple-directories-in-PSEU.patch
@@ -0,0 +1,116 @@
+From 09f04dc36f21c179235109b3dcddce9dda9a8ba8 Mon Sep 17 00:00:00 2001
+From: "Peter A. Bigot" <pab@pabigot.com>
+Date: Sun, 12 Oct 2014 12:17:48 -0500
+Subject: [PATCH 3/3] pseudo_client.c: support multiple directories in
+ PSEUDO_PASSWD
+
+For OpenEmbedded it is highly unlikely that using the build host passwd
+file is the right approach. Most packages can be built with a pseudo
+that was configured --without-passwd-fallback, since
+PSEUDO_PASSWD=${STAGING_DIR_TARGET} suffices.
+
+This fails when building images, because image.bbclass (correctly)
+overrides to PSEUDO_PASSWD=${IMAGE_ROOTFS}. However, the rootfs
+/etc/passwd is not created until the post-install phase of base-passwd,
+which is long after a passwd file is required. For example, the smart
+RPM interface wants to look up uid 0 right away. The right solution
+here is to look first in ${IMAGE_ROOTFS}, then fallback to
+a location holding immutable files with the minimum user/group settings
+necessary to successfully get base-passwd onto the target.
+
+Rather than rework pseudo to change PSEUDO_PASSWD_FALLBACK to be a
+run-time rather than compile-time specification, rework the handling of
+PSEUDO_PASSWD so that it is a colon-separated list of directories that
+are processed in order.
+
+Upstream-Status: Pending
+Signed-off-by: Peter A. Bigot <pab@pabigot.com>
+---
+ pseudo_client.c | 50 +++++++++++++++++++++++++++++++++++++++++++++++++-
+ 1 file changed, 49 insertions(+), 1 deletion(-)
+
+diff --git a/pseudo_client.c b/pseudo_client.c
+index 7a4d7fa..b52b86a 100644
+--- a/pseudo_client.c
++++ b/pseudo_client.c
+@@ -75,6 +75,8 @@ int pseudo_umask = 022;
+
+ static char **fd_paths = NULL;
+ static int nfds = 0;
++static const char **passwd_paths = NULL;
++static int npasswd_paths = 0;
+ static int messages = 0;
+ static struct timeval message_time = { .tv_sec = 0 };
+ static int pseudo_inited = 0;
+@@ -93,7 +95,7 @@ gid_t pseudo_egid;
+ gid_t pseudo_sgid;
+ gid_t pseudo_fgid;
+
+-#define PSEUDO_ETC_FILE(filename, realname, flags) pseudo_etc_file(filename, realname, flags, (const char *[]) { pseudo_chroot, pseudo_passwd, PSEUDO_PASSWD_FALLBACK }, PSEUDO_PASSWD_FALLBACK ? 3 : 2)
++#define PSEUDO_ETC_FILE(filename, realname, flags) pseudo_etc_file(filename, realname, flags, passwd_paths, npasswd_paths)
+
+ /* helper function to make a directory, just like mkdir -p.
+ * Can't use system() because the child shell would end up trying
+@@ -117,6 +119,42 @@ mkdir_p(char *path) {
+ (void) mkdir(path, 0755);
+ }
+
++static int
++build_passwd_paths(const char **paths)
++{
++ int np = 0;
++
++ if (pseudo_chroot) {
++ if (paths) {
++ paths[np] = pseudo_chroot;
++ }
++ ++np;
++ }
++ if (pseudo_passwd) {
++ const char *cp = pseudo_passwd;
++ const char *next = strchr(cp, ':');
++ while (next) {
++ if (paths) {
++ paths[np] = strndup(cp, next-cp);
++ }
++ ++np;
++ cp = next+1;
++ next = strchr(cp, ':');
++ }
++ if (paths) {
++ paths[np] = strdup(cp);
++ }
++ ++np;
++ }
++ if (PSEUDO_PASSWD_FALLBACK) {
++ if (paths) {
++ paths[np] = PSEUDO_PASSWD_FALLBACK;
++ }
++ ++np;
++ }
++ return np;
++}
++
+ void
+ pseudo_init_client(void) {
+ char *env;
+@@ -329,6 +367,16 @@ pseudo_init_client(void) {
+ }
+ free(env);
+
++ npasswd_paths = build_passwd_paths(NULL);
++ if (npasswd_paths) {
++ passwd_paths = malloc(npasswd_paths * sizeof(*passwd_paths));
++ if (!passwd_paths) {
++ pseudo_diag("couldn't allocate space for passwd paths.\n");
++ exit(1);
++ }
++ build_passwd_paths(passwd_paths);
++ }
++
+ pseudo_inited = 1;
+ }
+ if (!pseudo_disabled)
+--
+1.8.5.5
+
diff --git a/meta/recipes-devtools/pseudo/pseudo_1.5.1.bb b/meta/recipes-devtools/pseudo/pseudo_1.5.1.bb
deleted file mode 100644
index 8d562ecbf7..0000000000
--- a/meta/recipes-devtools/pseudo/pseudo_1.5.1.bb
+++ /dev/null
@@ -1,20 +0,0 @@
-require pseudo.inc
-
-PR = "r5"
-
-SRC_URI = " \
- http://www.yoctoproject.org/downloads/${BPN}/${BPN}-${PV}.tar.bz2 \
- file://0001-pseudo_has_unload-add-function.patch \
- file://shutdownping.patch \
- file://pseudo-1.5.1-install-directory-mode.patch \
- file://pseudo-fchmodat-permissions.patch \
-"
-
-SRC_URI_append_class-nativesdk = " file://symver.patch"
-
-SRC_URI_append_class-native = " file://symver.patch"
-
-SRC_URI[md5sum] = "5ec67c7bff5fe68c56de500859c19172"
-SRC_URI[sha256sum] = "3b896f592f4d568569bd02323fad2d6b8c398e16ca36ee5a8947d2ff6c1d3d52"
-
-PSEUDO_EXTRA_OPTS ?= "--enable-force-async"
diff --git a/meta/recipes-devtools/pseudo/pseudo_1.6.1.bb b/meta/recipes-devtools/pseudo/pseudo_1.6.1.bb
deleted file mode 100644
index 2dff823b14..0000000000
--- a/meta/recipes-devtools/pseudo/pseudo_1.6.1.bb
+++ /dev/null
@@ -1,10 +0,0 @@
-require pseudo.inc
-
-SRC_URI = " \
- http://www.yoctoproject.org/downloads/${BPN}/${BPN}-${PV}.tar.bz2 \
-"
-
-SRC_URI[md5sum] = "c19e4811635d12f2a923e47061c7d336"
-SRC_URI[sha256sum] = "de9cc755b786bfbe2d416d35fab340f2b8ef9904cb2801be3092eeb7446a7c37"
-
-PSEUDO_EXTRA_OPTS ?= "--enable-force-async"
diff --git a/meta/recipes-devtools/pseudo/pseudo_1.6.2.bb b/meta/recipes-devtools/pseudo/pseudo_1.6.2.bb
new file mode 100644
index 0000000000..261c8d3abd
--- /dev/null
+++ b/meta/recipes-devtools/pseudo/pseudo_1.6.2.bb
@@ -0,0 +1,22 @@
+require pseudo.inc
+
+SRC_URI = " \
+ http://www.yoctoproject.org/downloads/${BPN}/${BPN}-${PV}.tar.bz2 \
+ file://0001-pseudo_client.c-protect-pwd_lck-against-magic.patch \
+ file://0002-pseudo_util-modify-interface-to-pseudo_etc_file.patch \
+ file://0003-pseudo_client.c-support-multiple-directories-in-PSEU.patch \
+ file://fallback-passwd \
+ file://fallback-group \
+"
+
+SRC_URI[md5sum] = "4d7b4f9d1b4aafa680ce94a5a9a52f1f"
+SRC_URI[sha256sum] = "c72be92689511ced7c419149c6aaa1b1a9e4dfc6409d1f16ab72cc35bc1e376a"
+
+PSEUDO_EXTRA_OPTS ?= "--enable-force-async --without-passwd-fallback"
+
+do_install_append_class-native () {
+ install -d ${D}${sysconfdir}
+ # The fallback files should never be modified
+ install -m 444 ${WORKDIR}/fallback-passwd ${D}${sysconfdir}/passwd
+ install -m 444 ${WORKDIR}/fallback-group ${D}${sysconfdir}/group
+}
diff --git a/meta/recipes-devtools/pseudo/pseudo_git.bb b/meta/recipes-devtools/pseudo/pseudo_git.bb
index d599cfb2fd..8c6f06ab46 100644
--- a/meta/recipes-devtools/pseudo/pseudo_git.bb
+++ b/meta/recipes-devtools/pseudo/pseudo_git.bb
@@ -1,7 +1,7 @@
require pseudo.inc
-SRCREV = "2765805641f09109029b88b38f27256dc1b52ba3"
-PV = "1.6.1+git${SRCPV}"
+SRCREV = "0dc29e7c67f25d0978a7da5cd8965514912c5b36"
+PV = "1.6.2+git${SRCPV}"
DEFAULT_PREFERENCE = "-1"
diff --git a/meta/recipes-devtools/python/fix-path.inc b/meta/recipes-devtools/python/fix-path.inc
deleted file mode 100644
index 0738c59ada..0000000000
--- a/meta/recipes-devtools/python/fix-path.inc
+++ /dev/null
@@ -1,22 +0,0 @@
-do_install_append() {
- for i in `find ${D} -name "*.py"` ; do \
- sed -i -e s:${D}::g $i
- done
-
- for i in `find ${D} -name "*.la"` ; do \
- sed -i -e s:${STAGING_LIBDIR}:${libdir}:g $i
- done
-
- if test -e ${D}${bindir} ; then
- for i in ${D}${bindir}/* ; do \
- sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
- done
- fi
-
- if test -e ${D}${sbindir} ; then
- for i in ${D}${sbindir}/* ; do \
- sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
- done
- fi
-}
-
diff --git a/meta/recipes-devtools/python/python-2.7-manifest.inc b/meta/recipes-devtools/python/python-2.7-manifest.inc
index a8afe13a79..26e2b2b22c 100644
--- a/meta/recipes-devtools/python/python-2.7-manifest.inc
+++ b/meta/recipes-devtools/python/python-2.7-manifest.inc
@@ -5,14 +5,18 @@
-PROVIDES+="${PN}-2to3 ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib "
+PROVIDES+="${PN}-2to3 ${PN}-argparse ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib "
-PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib ${PN}-modules"
+PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-argparse ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib ${PN}-modules"
SUMMARY_${PN}-2to3="Python automated Python 2 to 3 code translator"
RDEPENDS_${PN}-2to3="${PN}-core"
FILES_${PN}-2to3="${bindir}/2to3 ${libdir}/python2.7/lib2to3 "
+SUMMARY_${PN}-argparse="Python command line argument parser"
+RDEPENDS_${PN}-argparse="${PN}-core ${PN}-codecs ${PN}-textutils"
+FILES_${PN}-argparse="${libdir}/python2.7/argparse.* "
+
SUMMARY_${PN}-audio="Python Audio Handling"
RDEPENDS_${PN}-audio="${PN}-core"
FILES_${PN}-audio="${libdir}/python2.7/wave.* ${libdir}/python2.7/chunk.* ${libdir}/python2.7/sndhdr.* ${libdir}/python2.7/lib-dynload/ossaudiodev.so ${libdir}/python2.7/lib-dynload/audioop.so ${libdir}/python2.7/audiodev.* ${libdir}/python2.7/sunaudio.* ${libdir}/python2.7/sunau.* ${libdir}/python2.7/toaiff.* "
@@ -122,11 +126,11 @@ RDEPENDS_${PN}-importlib="${PN}-core"
FILES_${PN}-importlib="${libdir}/python2.7/importlib "
SUMMARY_${PN}-io="Python low-level I/O"
-RDEPENDS_${PN}-io="${PN}-core ${PN}-math ${PN}-textutils"
+RDEPENDS_${PN}-io="${PN}-core ${PN}-math ${PN}-textutils ${PN}-netclient"
FILES_${PN}-io="${libdir}/python2.7/lib-dynload/_socket.so ${libdir}/python2.7/lib-dynload/_io.so ${libdir}/python2.7/lib-dynload/_ssl.so ${libdir}/python2.7/lib-dynload/select.so ${libdir}/python2.7/lib-dynload/termios.so ${libdir}/python2.7/lib-dynload/cStringIO.so ${libdir}/python2.7/pipes.* ${libdir}/python2.7/socket.* ${libdir}/python2.7/ssl.* ${libdir}/python2.7/tempfile.* ${libdir}/python2.7/StringIO.* ${libdir}/python2.7/io.* ${libdir}/python2.7/_pyio.* "
SUMMARY_${PN}-json="Python JSON support"
-RDEPENDS_${PN}-json="${PN}-core ${PN}-math ${PN}-re"
+RDEPENDS_${PN}-json="${PN}-core ${PN}-math ${PN}-re ${PN}-codecs"
FILES_${PN}-json="${libdir}/python2.7/json ${libdir}/python2.7/lib-dynload/_json.so "
SUMMARY_${PN}-lang="Python low-level language support"
@@ -274,7 +278,7 @@ RDEPENDS_${PN}-zlib="${PN}-core"
FILES_${PN}-zlib="${libdir}/python2.7/lib-dynload/zlib.so "
SUMMARY_${PN}-modules="All Python modules"
-RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib "
+RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-argparse ${PN}-audio ${PN}-bsddb ${PN}-codecs ${PN}-compile ${PN}-compiler ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-hotshot ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-zlib "
ALLOW_EMPTY_${PN}-modules = "1"
diff --git a/meta/recipes-devtools/python/python-3.3-manifest.inc b/meta/recipes-devtools/python/python-3.3-manifest.inc
index 46a2d441af..61119c687c 100644
--- a/meta/recipes-devtools/python/python-3.3-manifest.inc
+++ b/meta/recipes-devtools/python/python-3.3-manifest.inc
@@ -5,9 +5,9 @@
-PROVIDES+="${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc "
+PROVIDES+="${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc "
-PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-modules"
+PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-modules"
SUMMARY_${PN}-2to3="Python automated Python 2 to 3 code translator"
RDEPENDS_${PN}-2to3="${PN}-core"
@@ -30,8 +30,8 @@ RDEPENDS_${PN}-compression="${PN}-core ${PN}-codecs"
FILES_${PN}-compression="${libdir}/python3.3/gzip.* ${libdir}/python3.3/zipfile.* ${libdir}/python3.3/tarfile.* ${libdir}/python3.3/lib-dynload/bz2.*.so "
SUMMARY_${PN}-core="Python interpreter and core modules"
-RDEPENDS_${PN}-core="${PN}-lang ${PN}-re"
-FILES_${PN}-core="${libdir}/python3.3/__future__.* ${libdir}/python3.3/_abcoll.* ${libdir}/python3.3/abc.* ${libdir}/python3.3/copy.* ${libdir}/python3.3/copy_reg.* ${libdir}/python3.3/ConfigParser.* ${libdir}/python3.3/genericpath.* ${libdir}/python3.3/getopt.* ${libdir}/python3.3/linecache.* ${libdir}/python3.3/new.* ${libdir}/python3.3/os.* ${libdir}/python3.3/posixpath.* ${libdir}/python3.3/struct.* ${libdir}/python3.3/warnings.* ${libdir}/python3.3/site.* ${libdir}/python3.3/stat.* ${libdir}/python3.3/UserDict.* ${libdir}/python3.3/UserList.* ${libdir}/python3.3/UserString.* ${libdir}/python3.3/lib-dynload/binascii.*.so ${libdir}/python3.3/lib-dynload/_struct.*.so ${libdir}/python3.3/lib-dynload/time.*.so ${libdir}/python3.3/lib-dynload/xreadlines.*.so ${libdir}/python3.3/types.* ${libdir}/python3.3/platform.* ${bindir}/python* ${libdir}/python3.3/_weakrefset.* ${libdir}/python3.3/sysconfig.* ${libdir}/python3.3/config/Makefile ${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/collections ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py "
+RDEPENDS_${PN}-core="${PN}-lang ${PN}-re ${PN}-reprlib ${PN}-codecs ${PN}-io ${PN}-math"
+FILES_${PN}-core="${libdir}/python3.3/__future__.* ${libdir}/python3.3/_abcoll.* ${libdir}/python3.3/abc.* ${libdir}/python3.3/copy.* ${libdir}/python3.3/copyreg.* ${libdir}/python3.3/ConfigParser.* ${libdir}/python3.3/genericpath.* ${libdir}/python3.3/getopt.* ${libdir}/python3.3/linecache.* ${libdir}/python3.3/new.* ${libdir}/python3.3/os.* ${libdir}/python3.3/posixpath.* ${libdir}/python3.3/struct.* ${libdir}/python3.3/warnings.* ${libdir}/python3.3/site.* ${libdir}/python3.3/stat.* ${libdir}/python3.3/UserDict.* ${libdir}/python3.3/UserList.* ${libdir}/python3.3/UserString.* ${libdir}/python3.3/lib-dynload/binascii.*.so ${libdir}/python3.3/lib-dynload/_struct.*.so ${libdir}/python3.3/lib-dynload/time.*.so ${libdir}/python3.3/lib-dynload/xreadlines.*.so ${libdir}/python3.3/types.* ${libdir}/python3.3/platform.* ${bindir}/python* ${libdir}/python3.3/_weakrefset.* ${libdir}/python3.3/sysconfig.* ${libdir}/python3.3/_sysconfigdata.* ${libdir}/python3.3/config/Makefile ${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/collections ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py "
SUMMARY_${PN}-crypt="Python basic cryptographic and hashing support"
RDEPENDS_${PN}-crypt="${PN}-core"
@@ -185,6 +185,10 @@ SUMMARY_${PN}-readline="Python readline support"
RDEPENDS_${PN}-readline="${PN}-core"
FILES_${PN}-readline="${libdir}/python3.3/lib-dynload/readline.*.so ${libdir}/python3.3/rlcompleter.* "
+SUMMARY_${PN}-reprlib="Python alternate repr() implementation"
+RDEPENDS_${PN}-reprlib="${PN}-core"
+FILES_${PN}-reprlib="${libdir}/python3.3/reprlib.py "
+
SUMMARY_${PN}-resource="Python resource control interface"
RDEPENDS_${PN}-resource="${PN}-core"
FILES_${PN}-resource="${libdir}/python3.3/lib-dynload/resource.*.so "
@@ -258,7 +262,7 @@ RDEPENDS_${PN}-xmlrpc="${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang"
FILES_${PN}-xmlrpc="${libdir}/python3.3/xmlrpclib.* ${libdir}/python3.3/SimpleXMLRPCServer.* ${libdir}/python3.3/DocXMLRPCServer.* ${libdir}/python3.3/xmlrpc "
SUMMARY_${PN}-modules="All Python modules"
-RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc "
+RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-elementtree ${PN}-email ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-robotparser ${PN}-shell ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc "
ALLOW_EMPTY_${PN}-modules = "1"
diff --git a/meta/recipes-devtools/python/python-argparse_1.2.1.bb b/meta/recipes-devtools/python/python-argparse_1.2.1.bb
deleted file mode 100644
index b2850a3831..0000000000
--- a/meta/recipes-devtools/python/python-argparse_1.2.1.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-SUMMARY = "Python command-line parsing library"
-SECTION = "devel/python"
-LICENSE = "PSF"
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=09d08bb5b7047e2688ea3faad6408aa8"
-SRCNAME = "argparse"
-PR = "r4"
-RDEPENDS_${PN} += "python-codecs python-textutils"
-
-SRC_URI = "http://argparse.googlecode.com/files/${SRCNAME}-${PV}.tar.gz"
-SRC_URI[md5sum] = "2fbef8cb61e506c706957ab6e135840c"
-SRC_URI[sha256sum] = "ddaf4b0a618335a32b6664d4ae038a1de8fbada3b25033f9021510ed2b3941a4"
-
-S = "${WORKDIR}/${SRCNAME}-${PV}"
-
-inherit setuptools
-
-BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/python/python-native_2.7.3.bb b/meta/recipes-devtools/python/python-native_2.7.3.bb
index 827654dfa2..e55f5feac2 100644
--- a/meta/recipes-devtools/python/python-native_2.7.3.bb
+++ b/meta/recipes-devtools/python/python-native_2.7.3.bb
@@ -58,4 +58,8 @@ do_install() {
# (these often end up too long for the #! parser in the kernel as the
# buffer is 128 bytes long).
ln -s python-native/python ${D}${bindir}/nativepython
+
+ # We don't want modules in ~/.local being used in preference to those
+ # installed in the native sysroot, so disable user site support.
+ sed -i -e 's,^\(ENABLE_USER_SITE = \).*,\1False,' ${D}${libdir}/python${PYTHON_MAJMIN}/site.py
}
diff --git a/meta/recipes-devtools/python/python-numpy/mips64/_numpyconfig.h b/meta/recipes-devtools/python/python-numpy/mips64/_numpyconfig.h
new file mode 100644
index 0000000000..be57ac27bf
--- /dev/null
+++ b/meta/recipes-devtools/python/python-numpy/mips64/_numpyconfig.h
@@ -0,0 +1,30 @@
+#define NPY_HAVE_ENDIAN_H 1
+#define NPY_SIZEOF_SHORT SIZEOF_SHORT
+#define NPY_SIZEOF_INT SIZEOF_INT
+#define NPY_SIZEOF_LONG SIZEOF_LONG
+#define NPY_SIZEOF_FLOAT 4
+#define NPY_SIZEOF_COMPLEX_FLOAT 8
+#define NPY_SIZEOF_DOUBLE 8
+#define NPY_SIZEOF_COMPLEX_DOUBLE 16
+#define NPY_SIZEOF_LONGDOUBLE 16
+#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32
+#define NPY_SIZEOF_PY_INTPTR_T 8
+#define NPY_SIZEOF_PY_LONG_LONG 8
+#define NPY_SIZEOF_LONGLONG 8
+#define NPY_NO_SMP 0
+#define NPY_HAVE_DECL_ISNAN
+#define NPY_HAVE_DECL_ISINF
+#define NPY_HAVE_DECL_ISFINITE
+#define NPY_HAVE_DECL_SIGNBIT
+#define NPY_USE_C99_COMPLEX 1
+#define NPY_HAVE_COMPLEX_DOUBLE 1
+#define NPY_HAVE_COMPLEX_FLOAT 1
+#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1
+#define NPY_USE_C99_FORMATS 1
+#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden")))
+#define NPY_ABI_VERSION 0x01000009
+#define NPY_API_VERSION 0x00000007
+
+#ifndef __STDC_FORMAT_MACROS
+#define __STDC_FORMAT_MACROS 1
+#endif
diff --git a/meta/recipes-devtools/python/python-numpy/mips64/config.h b/meta/recipes-devtools/python/python-numpy/mips64/config.h
new file mode 100644
index 0000000000..c30b868f2f
--- /dev/null
+++ b/meta/recipes-devtools/python/python-numpy/mips64/config.h
@@ -0,0 +1,139 @@
+#define HAVE_ENDIAN_H 1
+#define SIZEOF_PY_INTPTR_T 8
+#define SIZEOF_PY_LONG_LONG 8
+#define MATHLIB m
+#define HAVE_SIN 1
+#define HAVE_COS 1
+#define HAVE_TAN 1
+#define HAVE_SINH 1
+#define HAVE_COSH 1
+#define HAVE_TANH 1
+#define HAVE_FABS 1
+#define HAVE_FLOOR 1
+#define HAVE_CEIL 1
+#define HAVE_SQRT 1
+#define HAVE_LOG10 1
+#define HAVE_LOG 1
+#define HAVE_EXP 1
+#define HAVE_ASIN 1
+#define HAVE_ACOS 1
+#define HAVE_ATAN 1
+#define HAVE_FMOD 1
+#define HAVE_MODF 1
+#define HAVE_FREXP 1
+#define HAVE_LDEXP 1
+#define HAVE_RINT 1
+#define HAVE_TRUNC 1
+#define HAVE_EXP2 1
+#define HAVE_LOG2 1
+#define HAVE_ATAN2 1
+#define HAVE_POW 1
+#define HAVE_NEXTAFTER 1
+#define HAVE_SINF 1
+#define HAVE_COSF 1
+#define HAVE_TANF 1
+#define HAVE_SINHF 1
+#define HAVE_COSHF 1
+#define HAVE_TANHF 1
+#define HAVE_FABSF 1
+#define HAVE_FLOORF 1
+#define HAVE_CEILF 1
+#define HAVE_RINTF 1
+#define HAVE_TRUNCF 1
+#define HAVE_SQRTF 1
+#define HAVE_LOG10F 1
+#define HAVE_LOGF 1
+#define HAVE_LOG1PF 1
+#define HAVE_EXPF 1
+#define HAVE_EXPM1F 1
+#define HAVE_ASINF 1
+#define HAVE_ACOSF 1
+#define HAVE_ATANF 1
+#define HAVE_ASINHF 1
+#define HAVE_ACOSHF 1
+#define HAVE_ATANHF 1
+#define HAVE_HYPOTF 1
+#define HAVE_ATAN2F 1
+#define HAVE_POWF 1
+#define HAVE_FMODF 1
+#define HAVE_MODFF 1
+#define HAVE_FREXPF 1
+#define HAVE_LDEXPF 1
+#define HAVE_EXP2F 1
+#define HAVE_LOG2F 1
+#define HAVE_COPYSIGNF 1
+#define HAVE_NEXTAFTERF 1
+#define HAVE_SINL 1
+#define HAVE_COSL 1
+#define HAVE_TANL 1
+#define HAVE_SINHL 1
+#define HAVE_COSHL 1
+#define HAVE_TANHL 1
+#define HAVE_FABSL 1
+#define HAVE_FLOORL 1
+#define HAVE_CEILL 1
+#define HAVE_RINTL 1
+#define HAVE_TRUNCL 1
+#define HAVE_SQRTL 1
+#define HAVE_LOG10L 1
+#define HAVE_LOGL 1
+#define HAVE_LOG1PL 1
+#define HAVE_EXPL 1
+#define HAVE_EXPM1L 1
+#define HAVE_ASINL 1
+#define HAVE_ACOSL 1
+#define HAVE_ATANL 1
+#define HAVE_ASINHL 1
+#define HAVE_ACOSHL 1
+#define HAVE_ATANHL 1
+#define HAVE_HYPOTL 1
+#define HAVE_ATAN2L 1
+#define HAVE_POWL 1
+#define HAVE_FMODL 1
+#define HAVE_MODFL 1
+#define HAVE_FREXPL 1
+#define HAVE_LDEXPL 1
+#define HAVE_EXP2L 1
+#define HAVE_LOG2L 1
+#define HAVE_COPYSIGNL 1
+#define HAVE_NEXTAFTERL 1
+#define HAVE_DECL_SIGNBIT
+#define HAVE_COMPLEX_H 1
+#define HAVE_CREAL 1
+#define HAVE_CIMAG 1
+#define HAVE_CABS 1
+#define HAVE_CARG 1
+#define HAVE_CEXP 1
+#define HAVE_CSQRT 1
+#define HAVE_CLOG 1
+#define HAVE_CCOS 1
+#define HAVE_CSIN 1
+#define HAVE_CPOW 1
+#define HAVE_CREALF 1
+#define HAVE_CIMAGF 1
+#define HAVE_CABSF 1
+#define HAVE_CARGF 1
+#define HAVE_CEXPF 1
+#define HAVE_CSQRTF 1
+#define HAVE_CLOGF 1
+#define HAVE_CCOSF 1
+#define HAVE_CSINF 1
+#define HAVE_CPOWF 1
+#define HAVE_CREALL 1
+#define HAVE_CIMAGL 1
+#define HAVE_CABSL 1
+#define HAVE_CARGL 1
+#define HAVE_CEXPL 1
+#define HAVE_CSQRTL 1
+#define HAVE_CLOGL 1
+#define HAVE_CCOSL 1
+#define HAVE_CSINL 1
+#define HAVE_CPOWL 1
+#define HAVE_LDOUBLE_IEEE_QUAD_LE 1
+#ifndef __cplusplus
+/* #undef inline */
+#endif
+
+#ifndef _NPY_NPY_CONFIG_H_
+#error config.h should never be included directly, include npy_config.h instead
+#endif
diff --git a/meta/recipes-devtools/python/python-numpy_1.7.0.bb b/meta/recipes-devtools/python/python-numpy_1.7.0.bb
index 903ec7ba9c..6cb14c9432 100644
--- a/meta/recipes-devtools/python/python-numpy_1.7.0.bb
+++ b/meta/recipes-devtools/python/python-numpy_1.7.0.bb
@@ -46,6 +46,10 @@ CONFIGFILESURI_powerpc64 = " \
file://config.h \
file://_numpyconfig.h \
"
+CONFIGFILESURI_mips64 = " \
+ file://config.h \
+ file://_numpyconfig.h \
+"
S = "${WORKDIR}/numpy-${PV}"
diff --git a/meta/recipes-devtools/python/python-pycurl_7.19.3.1.bb b/meta/recipes-devtools/python/python-pycurl_7.19.5.bb
index 02d0ae9be0..e4619c740e 100644
--- a/meta/recipes-devtools/python/python-pycurl_7.19.3.1.bb
+++ b/meta/recipes-devtools/python/python-pycurl_7.19.5.bb
@@ -2,7 +2,7 @@ SUMMARY = "Python bindings for libcurl"
HOMEPAGE = "http://pycurl.sourceforge.net/"
SECTION = "devel/python"
LICENSE = "LGPLv2.1+ | MIT"
-LIC_FILES_CHKSUM = "file://README.rst;beginline=148;endline=163;md5=57e5ab0c0f964533fc59d93dec5695bb \
+LIC_FILES_CHKSUM = "file://README.rst;beginline=166;endline=181;md5=57e5ab0c0f964533fc59d93dec5695bb \
file://COPYING-LGPL;md5=3579a9fd0221d49a237aaa33492f988c \
file://COPYING-MIT;md5=e8200955c773b2a0fd6cea36ea5e87be"
@@ -15,8 +15,8 @@ SRC_URI = "\
file://no-static-link.patch \
"
-SRC_URI[archive.md5sum] = "6df8fa7fe8b680d93248da1f8d4fcd12"
-SRC_URI[archive.sha256sum] = "c0d673fe99a9de07239eabe77c798f1b043f60c02afaec1430ceaf59d7501a4f"
+SRC_URI[archive.md5sum] = "47b4eac84118e2606658122104e62072"
+SRC_URI[archive.sha256sum] = "69a0aa7c9dddbfe4cebf4d1f674c490faccf739fc930d85d8990ce2fd0551a43"
S = "${WORKDIR}/${SRCNAME}-${PV}"
inherit distutils
@@ -29,6 +29,11 @@ export STAGING_LIBDIR
BBCLASSEXTEND = "native"
+# Ensure the docstrings are generated as make clean will remove them
+do_compile_prepend() {
+ ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py docstrings
+}
+
do_install_append() {
rm -rf ${D}${datadir}/share
}
diff --git a/meta/recipes-devtools/python/python-pygobject_2.28.3.bb b/meta/recipes-devtools/python/python-pygobject_2.28.3.bb
index 2e65b3808a..45a0603eb7 100644
--- a/meta/recipes-devtools/python/python-pygobject_2.28.3.bb
+++ b/meta/recipes-devtools/python/python-pygobject_2.28.3.bb
@@ -2,8 +2,8 @@ SUMMARY = "Python GObject bindings"
SECTION = "devel/python"
LICENSE = "LGPLv2.1"
LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7"
-DEPENDS = "python python-pygobject-native glib-2.0"
-DEPENDS_class-native = "python-native glib-2.0-native"
+DEPENDS = "python python-pygobject-native libffi glib-2.0"
+DEPENDS_class-native = "python-native libffi-native glib-2.0-native"
RDEPENDS_class-native = ""
MAJ_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
diff --git a/meta/recipes-devtools/python/python-pygtk/fix-pygtk-2.0.pc.patch b/meta/recipes-devtools/python/python-pygtk/fix-pygtk-2.0.pc.patch
new file mode 100644
index 0000000000..b6156540bd
--- /dev/null
+++ b/meta/recipes-devtools/python/python-pygtk/fix-pygtk-2.0.pc.patch
@@ -0,0 +1,13 @@
+Upstream-Status: Inappropriate [configuration]
+
+Index: pygtk-2.24.0/pygtk-2.0.pc.in
+===================================================================
+--- pygtk-2.24.0.orig/pygtk-2.0.pc.in
++++ pygtk-2.24.0/pygtk-2.0.pc.in
+@@ -1,5 +1,6 @@
+ prefix=@prefix@
+ exec_prefix=@exec_prefix@
++libdir=@libdir@
+ includedir=@includedir@
+ datarootdir=@datarootdir@
+ datadir=@datadir@
diff --git a/meta/recipes-devtools/python/python-pygtk_2.24.0.bb b/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
index 8fc194f5e6..053f37fcbe 100644
--- a/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
+++ b/meta/recipes-devtools/python/python-pygtk_2.24.0.bb
@@ -15,6 +15,7 @@ SRC_URI = "ftp://ftp.gnome.org/pub/gnome/sources/pygtk/2.24/${SRCNAME}-${PV}.tar
file://fix-gtkunixprint.patch \
file://prevent_to_get_display_during_import.patch \
file://nodocs.patch \
+ file://fix-pygtk-2.0.pc.patch \
file://acinclude.m4 \
file://update-dependences-of-defs.c.patch"
@@ -39,17 +40,31 @@ do_configure_prepend() {
sed -i 's:tests docs:tests:' ${S}/Makefile.am
}
-# dirty fix #1: remove dependency on python-pygobject-dev
+# dirty fix #2: fix build system paths leaking in
do_install_append() {
- find ${D} -name "*.la"|xargs rm -f
- rm -f ${D}/${bindir}/pygtk-codegen-2.0
- rm -rf ${D}/${libdir}/pkgconfig
+ for i in `find ${D} -name "*.py"` ; do \
+ sed -i -e s:${D}::g $i
+ done
+
+ for i in `find ${D} -name "*.la"` ; do \
+ sed -i -e s:${STAGING_LIBDIR}:${libdir}:g $i
+ done
+
+ if test -e ${D}${bindir} ; then
+ for i in ${D}${bindir}/* ; do \
+ sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
+ done
+ fi
+
+ if test -e ${D}${sbindir} ; then
+ for i in ${D}${sbindir}/* ; do \
+ sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i
+ done
+ fi
+
sed -i -e '1s|^#!.*python|#!/usr/bin/env python|' ${D}${bindir}/pygtk-demo
}
-# dirty fix #2: fix build system paths leaking in
-require fix-path.inc
-
PACKAGES =+ "${PN}-demo"
FILES_${PN}-demo = " ${bindir}/pygtk-demo ${libdir}/pygtk "
RDEPENDS_${PN}-demo = "python-pygtk python-stringold python-lang"
diff --git a/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch b/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch
new file mode 100644
index 0000000000..8a27f2583a
--- /dev/null
+++ b/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch
@@ -0,0 +1,37 @@
+python-smartpm: Add checking for "rpm-ignoresize" option
+
+The do_rootfs takes a very long time when build host has mounted many NFS
+devices. syscall lstat() was being called on every filesystem mounted on the
+build host during building.
+The reason for the lstat() is that rpm is verifying that enough free disk space
+is available to do the install. However, since the install is into the target
+rootfs it should not matter how much free space there is in the host mounts.
+Add checking for "rpm-ignoresize", by it, smart can make whether RPM skip
+checking for diskspace when install a rpm package.
+
+Upstream-Status: Pending
+
+Signed-off-by: wenlin.kang <wenlin.kang@windriver.com>
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ smart/backends/rpm/pm.py | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/smart/backends/rpm/pm.py b/smart/backends/rpm/pm.py
+index 5da9ee6..f0488ec 100644
+--- a/smart/backends/rpm/pm.py
++++ b/smart/backends/rpm/pm.py
+@@ -241,6 +241,10 @@ class RPMPackageManager(PackageManager):
+ except AttributeError:
+ probfilter |= rpm.RPMPROB_FILTER_IGNOREARCH
+
++ if sysconf.get("rpm-ignoresize", False):
++ probfilter |= rpm.RPMPROB_FILTER_DISKNODES
++ probfilter |= rpm.RPMPROB_FILTER_DISKSPACE
++
+ if force or reinstall:
+ probfilter |= rpm.RPMPROB_FILTER_REPLACEPKG
+ probfilter |= rpm.RPMPROB_FILTER_REPLACEOLDFILES
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch b/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch
index 113618255f..45f794787c 100644
--- a/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch
+++ b/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch
@@ -1,4 +1,4 @@
-From 7ee23804a06f81476cc2b31a6db11b52d7af764e Mon Sep 17 00:00:00 2001
+From b105e7fe812da3ccaf7155c0fe14c8728b0d39a5 Mon Sep 17 00:00:00 2001
From: Mark Hatle <mark.hatle@windriver.com>
Date: Mon, 20 Jan 2014 14:30:52 +0000
Subject: [PATCH] Add mechanism to attempt install without failing
@@ -13,11 +13,32 @@ Upstream-Status: Pending
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
+
+For complementary and 'attemptonly' package processing, we should
+make sure the warn rather than error reported.
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
---
+ smart.py | 5 +++-
smart/commands/install.py | 5 ++++
smart/transaction.py | 65 +++++++++++++++++++++++++++++++++++------------
- 2 files changed, 54 insertions(+), 16 deletions(-)
+ 3 files changed, 58 insertions(+), 17 deletions(-)
+diff --git a/smart.py b/smart.py
+index c5c7a02..7e7fd34 100755
+--- a/smart.py
++++ b/smart.py
+@@ -179,7 +179,10 @@ def main(argv):
+ if opts and opts.log_level == "debug":
+ import traceback
+ traceback.print_exc()
+- if iface.object:
++ if iface.object and sysconf.has("attempt-install", soft=True):
++ iface.warning(unicode(e))
++ exitcode = 0
++ elif iface.object:
+ iface.error(unicode(e))
+ else:
+ sys.stderr.write(_("error: %s\n") % e)
diff --git a/smart/commands/install.py b/smart/commands/install.py
index 590222c..6ef9682 100644
--- a/smart/commands/install.py
@@ -163,5 +184,5 @@ index 5730a42..e3e61c6 100644
self._remove(pkg, changeset, locked, pending)
elif op is UPGRADE:
--
-1.8.4.2
+1.9.1
diff --git a/meta/recipes-devtools/python/python-smartpm/smart-dflags.patch b/meta/recipes-devtools/python/python-smartpm/smart-dflags.patch
index 3f27262156..531ea51cad 100644
--- a/meta/recipes-devtools/python/python-smartpm/smart-dflags.patch
+++ b/meta/recipes-devtools/python/python-smartpm/smart-dflags.patch
@@ -7,34 +7,39 @@ Upstream-Status: Pending
Signed-off-by: Paul Eggleton <paul.eggleton@linux.intel.com>
-diff --git a/smart/backends/rpm/pm.py b/smart/backends/rpm/pm.py
-index 707a146..aec82e7 100644
---- a/smart/backends/rpm/pm.py
-+++ b/smart/backends/rpm/pm.py
-@@ -106,6 +106,23 @@ class RPMPackageManager(PackageManager):
+[sgw - Added try/catch for rpm4 since it does not have setDFlags() API]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+
+Index: smart-1.4.1/smart/backends/rpm/pm.py
+===================================================================
+--- smart-1.4.1.orig/smart/backends/rpm/pm.py
++++ smart-1.4.1/smart/backends/rpm/pm.py
+@@ -106,6 +106,26 @@ class RPMPackageManager(PackageManager):
flags |= rpm.RPMTRANS_FLAG_TEST
ts.setFlags(flags)
-+ dflags = ts.setDFlags(0)
-+ if sysconf.get("rpm-noupgrade", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOUPGRADE
-+ if sysconf.get("rpm-norequires", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOREQUIRES
-+ if sysconf.get("rpm-noconflicts", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOCONFLICTS
-+ if sysconf.get("rpm-noobsoletes", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOOBSOLETES
-+ if sysconf.get("rpm-noparentdirs", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOPARENTDIRS
-+ if sysconf.get("rpm-nolinktos", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOLINKTOS
-+ if sysconf.get("rpm-nosuggest", False):
-+ dflags |= rpm.RPMDEPS_FLAG_NOSUGGEST
-+ ts.setDFlags(dflags)
++ try:
++ dflags = ts.setDFlags(0)
++ if sysconf.get("rpm-noupgrade", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOUPGRADE
++ if sysconf.get("rpm-norequires", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOREQUIRES
++ if sysconf.get("rpm-noconflicts", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOCONFLICTS
++ if sysconf.get("rpm-noobsoletes", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOOBSOLETES
++ if sysconf.get("rpm-noparentdirs", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOPARENTDIRS
++ if sysconf.get("rpm-nolinktos", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOLINKTOS
++ if sysconf.get("rpm-nosuggest", False):
++ dflags |= rpm.RPMDEPS_FLAG_NOSUGGEST
++ ts.setDFlags(dflags)
++ except AttributeError, ae:
++ pass
+
# Set rpm verbosity level.
levelname = sysconf.get('rpm-log-level')
level = {
---
-1.7.9.5
-
diff --git a/meta/recipes-devtools/python/python-smartpm/smart-rpm4-fixes.patch b/meta/recipes-devtools/python/python-smartpm/smart-rpm4-fixes.patch
new file mode 100644
index 0000000000..708ffe67d3
--- /dev/null
+++ b/meta/recipes-devtools/python/python-smartpm/smart-rpm4-fixes.patch
@@ -0,0 +1,49 @@
+
+This patch checks for rpm5 related functions in order to allow rpm4
+to work correctly. Currently the rpm4 archscore and filter work
+differently enough that they need to be changed.
+
+Upstream-Status: Inappropriate [OE-Core Specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: smart-1.4.1/smart/backends/rpm/base.py
+===================================================================
+--- smart-1.4.1.orig/smart/backends/rpm/base.py
++++ smart-1.4.1/smart/backends/rpm/base.py
+@@ -338,10 +338,14 @@ class RPMObsoletes(Depends):
+
+ _SCOREMAP = {}
+ def getArchScore(arch, _sm=_SCOREMAP):
+- if arch not in _sm:
+- score = rpm.archscore(arch)
+- _sm[arch] = score
+- return _sm.get(arch, 0)
++ try:
++ rpm.platformscore(arch)
++ if arch not in _sm:
++ score = rpm.archscore(arch)
++ _sm[arch] = score
++ return _sm.get(arch, 0)
++ except AttributeError:
++ return 1
+
+ # TODO: Embed color into nameprovides and obsoletes relations.
+ _COLORMAP = {"noarch": 0, "x86_64": 2, "ppc64": 2, "s390x": 2, "sparc64": 2}
+Index: smart-1.4.1/smart/backends/rpm/pm.py
+===================================================================
+--- smart-1.4.1.orig/smart/backends/rpm/pm.py
++++ smart-1.4.1/smart/backends/rpm/pm.py
+@@ -235,6 +235,12 @@ class RPMPackageManager(PackageManager):
+ if sysconf.get("rpm-order"):
+ ts.order()
+ probfilter = rpm.RPMPROB_FILTER_OLDPACKAGE
++ try:
++ # Test for RPM5 function
++ rpm.platformscore("")
++ except AttributeError:
++ probfilter |= rpm.RPMPROB_FILTER_IGNOREARCH
++
+ if force or reinstall:
+ probfilter |= rpm.RPMPROB_FILTER_REPLACEPKG
+ probfilter |= rpm.RPMPROB_FILTER_REPLACEOLDFILES
diff --git a/meta/recipes-devtools/python/python-smartpm_1.4.1.bb b/meta/recipes-devtools/python/python-smartpm_1.4.1.bb
index 09715794b3..c75f10fbf3 100644
--- a/meta/recipes-devtools/python/python-smartpm_1.4.1.bb
+++ b/meta/recipes-devtools/python/python-smartpm_1.4.1.bb
@@ -8,7 +8,7 @@ SECTION = "devel/python"
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://LICENSE;md5=393a5ca445f6965873eca0259a17f833"
-DEPENDS = "python rpm gettext-native"
+DEPENDS = "python rpm gettext-native python-rpm"
PR = "r9"
SRCNAME = "smart"
@@ -32,7 +32,9 @@ SRC_URI = "\
file://smart-config-ignore-all-recommends.patch \
file://smart-attempt.patch \
file://smart-filename-NAME_MAX.patch \
- "
+ file://smart-rpm4-fixes.patch \
+ file://smart-add-for-rpm-ignoresize-check.patch \
+ "
SRC_URI[md5sum] = "573ef32ba177a6b3c4bf7ef04873fcb6"
SRC_URI[sha256sum] = "b1d519ddb43d60f293b065c28870a5d9e8b591cd49e8c68caea48ace91085eba"
diff --git a/meta/recipes-devtools/python/python/python-2.7.3-CVE-2014-7185.patch b/meta/recipes-devtools/python/python/python-2.7.3-CVE-2014-7185.patch
new file mode 100644
index 0000000000..60ef145c7c
--- /dev/null
+++ b/meta/recipes-devtools/python/python/python-2.7.3-CVE-2014-7185.patch
@@ -0,0 +1,75 @@
+From 104eb318283dde5203aa6cf7384287bef181e308 Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Wed, 12 Nov 2014 01:58:02 -0500
+Subject: [PATCH] python: fix CVE-2014-7185
+
+Reference: http://bugs.python.org/issue21831
+
+CVE-2014-7185: Integer overflow in bufferobject.c in Python before
+2.7.8 allows context-dependent attackers to obtain sensitive
+information from process memory via a large size and offset in a
+"buffer" function.
+
+Upstream-Status: Backport
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+---
+ Lib/test/test_buffer.py | 6 ++++++
+ Misc/NEWS | 3 +++
+ Objects/bufferobject.c | 2 +-
+ 3 files changed, 10 insertions(+), 1 deletion(-)
+
+diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py
+index 6bdc34d..3ac1f8c 100644
+--- a/Lib/test/test_buffer.py
++++ b/Lib/test/test_buffer.py
+@@ -4,6 +4,7 @@ For now, tests just new or changed functionality.
+
+ """
+
++import sys
+ import unittest
+ from test import test_support
+
+@@ -21,6 +22,11 @@ class BufferTests(unittest.TestCase):
+ self.assertEqual(b[start:stop:step],
+ s[start:stop:step])
+
++ def test_large_buffer_size_and_offset(self):
++ data = bytearray('hola mundo')
++ buf = buffer(data, sys.maxsize, sys.maxsize)
++ self.assertEqual(buf[:4096], "")
++
+
+ def test_main():
+ with test_support.check_py3k_warnings(("buffer.. not supported",
+diff --git a/Misc/NEWS b/Misc/NEWS
+index e8778ad..77396c5 100644
+--- a/Misc/NEWS
++++ b/Misc/NEWS
+@@ -1896,6 +1896,9 @@ What's New in Python 2.7 Release Candidate 1?
+ Core and Builtins
+ -----------------
+
++- Issue #21831: Avoid integer overflow when large sizes and offsets are given to
++ the buffer type. CVE-2014-7185.
++
+ - Issue #8271: during the decoding of an invalid UTF-8 byte sequence, only the
+ start byte and the continuation byte(s) are now considered invalid, instead
+ of the number of bytes specified by the start byte.
+diff --git a/Objects/bufferobject.c b/Objects/bufferobject.c
+index c52f0bc..c542506 100644
+--- a/Objects/bufferobject.c
++++ b/Objects/bufferobject.c
+@@ -88,7 +88,7 @@ get_buf(PyBufferObject *self, void **ptr, Py_ssize_t *size,
+ *size = count;
+ else
+ *size = self->b_size;
+- if (offset + *size > count)
++ if (*size > count - offset)
+ *size = count - offset;
+ }
+ return 1;
+--
+1.7.9.5
+
diff --git a/meta/recipes-devtools/python/python/remove-BOM-insection-code.patch b/meta/recipes-devtools/python/python/remove-BOM-insection-code.patch
new file mode 100644
index 0000000000..3462620917
--- /dev/null
+++ b/meta/recipes-devtools/python/python/remove-BOM-insection-code.patch
@@ -0,0 +1,24 @@
+# HG changeset patch
+# User Vinay Sajip <vinay_sajip@yahoo.co.uk>
+# Date 1334583503 -3600
+# Node ID af46a001d5eca99a7ff44ed18256c235c5c3f1d0
+# Parent 33bc53e0aa9ea42c6a6bb077d182e62c8c569aa1
+Issue #14452: remove BOM insertion code.
+
+Backport from https://hg.python.org/cpython/rev/af46a001d5ec.
+
+Upstream-Status: Backport
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py
+--- a/Lib/logging/handlers.py
++++ b/Lib/logging/handlers.py
+@@ -821,8 +821,6 @@ class SysLogHandler(logging.Handler):
+ # Message is a string. Convert to bytes as required by RFC 5424
+ if type(msg) is unicode:
+ msg = msg.encode('utf-8')
+- if codecs:
+- msg = codecs.BOM_UTF8 + msg
+ msg = prio + msg
+ try:
+ if self.unixsocket:
diff --git a/meta/recipes-devtools/python/python3-distribute_0.6.32.bb b/meta/recipes-devtools/python/python3-distribute_0.6.32.bb
index 573096cb1c..0ef3a8762f 100644
--- a/meta/recipes-devtools/python/python3-distribute_0.6.32.bb
+++ b/meta/recipes-devtools/python/python3-distribute_0.6.32.bb
@@ -37,6 +37,8 @@ do_install_append() {
rm ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth
mv ${D}${bindir}/easy_install ${D}${bindir}/easy3_install
echo "./${SRCNAME}-${PV}-py${PYTHON_BASEVERSION}.egg" > ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth
+ sed -i -e '1s|^#!.*python|#!/usr/bin/env python3|' \
+ ${D}${PYTHON_SITEPACKAGES_DIR}/distribute-${PV}-py${PYTHON_BASEVERSION}.egg/setuptools/tests/test_resources.py
}
RDEPENDS_${PN} = "\
diff --git a/meta/recipes-devtools/python/python3/python3-setup.py-no-host-headers-libs.patch b/meta/recipes-devtools/python/python3/python3-setup.py-no-host-headers-libs.patch
new file mode 100644
index 0000000000..2bc8b8c460
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/python3-setup.py-no-host-headers-libs.patch
@@ -0,0 +1,33 @@
+From 43238e1ac13e32984d015c92a5841f3de1fe1d15 Mon Sep 17 00:00:00 2001
+From: Jackie Huang <jackie.huang@windriver.com>
+Date: Tue, 18 Nov 2014 00:07:07 -0500
+Subject: [PATCH] setup.py: no host headers libs
+
+When we are cross-compiling, setup.py should never look in /usr
+or /usr/local to find headers or libraries.
+
+Upstream-Status: Inappropriate [Cross compile specific]
+
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ setup.py | 3 ---
+ 1 file changed, 3 deletions(-)
+
+diff --git a/setup.py b/setup.py
+index f020b28..e8339cd 100644
+--- a/setup.py
++++ b/setup.py
+@@ -444,10 +444,7 @@ class PyBuildExt(build_ext):
+ if not cross_compiling:
+ add_dir_to_list(self.compiler.library_dirs, os.path.join('/usr/local', sys.lib))
+ add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
+- # only change this for cross builds for 3.3, issues on Mageia
+- if cross_compiling:
+ self.add_gcc_paths()
+- if not cross_compiling:
+ self.add_multiarch_paths()
+
+ # Add paths specified in the environment variables LDFLAGS and
+--
+2.0.0
+
diff --git a/meta/recipes-devtools/python/python3/python3-use-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch b/meta/recipes-devtools/python/python3/python3-use-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch
new file mode 100644
index 0000000000..74490d93a5
--- /dev/null
+++ b/meta/recipes-devtools/python/python3/python3-use-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch
@@ -0,0 +1,28 @@
+From 53ed216d7bf70dd2a925432b6805a701e5fc3e0e Mon Sep 17 00:00:00 2001
+From: Jackie Huang <jackie.huang@windriver.com>
+Date: Mon, 17 Nov 2014 06:44:47 +0000
+Subject: [PATCH] python3 use CROSSPYTHONPATH for PYTHON_FOR_BUILD
+
+Upstream-Status: Inappropriate [Cross compile specific]
+
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ configure.ac | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index 989baf9..2890c96 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -67,7 +67,7 @@ if test "$cross_compiling" = yes; then
+ AC_MSG_ERROR([python$PACKAGE_VERSION interpreter not found])
+ fi
+ AC_MSG_RESULT($interp)
+- PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib:$(srcdir)/Lib/$(PLATDIR) '$interp
++ PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(CROSSPYTHONPATH) '$interp
+ fi
+ elif test "$cross_compiling" = maybe; then
+ AC_MSG_ERROR([Cross compiling required --host=HOST-TUPLE and --build=ARCH])
+--
+2.0.1
+
diff --git a/meta/recipes-devtools/python/python3_3.3.3.bb b/meta/recipes-devtools/python/python3_3.3.3.bb
index a1cb12078f..2c1f63f5b8 100644
--- a/meta/recipes-devtools/python/python3_3.3.3.bb
+++ b/meta/recipes-devtools/python/python3_3.3.3.bb
@@ -37,6 +37,8 @@ SRC_URI += "\
file://sysroot-include-headers.patch \
file://unixccompiler.patch \
file://avoid-ncursesw-include-path.patch \
+ file://python3-use-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch \
+ file://python3-setup.py-no-host-headers-libs.patch \
"
SRC_URI[md5sum] = "f3ebe34d4d8695bf889279b54673e10c"
SRC_URI[sha256sum] = "e526e9b612f623888364d30cc9f3dfc34dcef39065c713bdbcddf47df84d8dcb"
@@ -62,6 +64,7 @@ TARGET_CC_ARCH_append_armv6 = " -D__SOFTFP__"
TARGET_CC_ARCH_append_armv7a = " -D__SOFTFP__"
TARGET_CC_ARCH += "-DNDEBUG -fno-inline"
EXTRA_OEMAKE += "CROSS_COMPILE=yes"
+EXTRA_OECONF += "CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/"
# No ctypes option for python 3
PYTHONLSBOPTS = ""
@@ -84,7 +87,7 @@ do_compile() {
cd -
# remove hardcoded ccache, see http://bugs.openembedded.net/show_bug.cgi?id=4144
- sed -i -e s,ccache,'$(CCACHE)', Makefile
+ sed -i -e s,ccache\ ,'$(CCACHE) ', Makefile
# remove any bogus LD_LIBRARY_PATH
sed -i -e s,RUNSHARED=.*,RUNSHARED=, Makefile
@@ -116,8 +119,8 @@ do_compile() {
ARCH=${TARGET_ARCH} \
OPT="${CFLAGS}" libpython3.so
- oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native3/pgen \
- HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native3/python3 \
+ oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python3-native/pgen \
+ HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python3-native/python3 \
STAGING_LIBDIR=${STAGING_LIBDIR} \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \
@@ -139,9 +142,8 @@ do_install() {
# rerun the build once again with original makefile this time
# run install in a separate step to avoid compile/install race
- oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native3/pgen \
- HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native3/python3 \
- CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \
+ oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python3-native/pgen \
+ HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python3-native/python3 \
STAGING_LIBDIR=${STAGING_LIBDIR} \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \
@@ -150,9 +152,8 @@ do_install() {
ARCH=${TARGET_ARCH} \
DESTDIR=${D} LIBDIR=${libdir}
- oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native3/pgen \
- HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native3/python3 \
- CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \
+ oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python3-native/pgen \
+ HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python3-native/python3 \
STAGING_LIBDIR=${STAGING_LIBDIR} \
STAGING_INCDIR=${STAGING_INCDIR} \
STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \
diff --git a/meta/recipes-devtools/python/python_2.7.3.bb b/meta/recipes-devtools/python/python_2.7.3.bb
index 58bccedb8f..9ded75b373 100644
--- a/meta/recipes-devtools/python/python_2.7.3.bb
+++ b/meta/recipes-devtools/python/python_2.7.3.bb
@@ -38,6 +38,8 @@ SRC_URI += "\
file://python-2.7.3-CVE-2014-1912.patch \
file://json-flaw-fix.patch \
file://posix_close.patch \
+ file://remove-BOM-insection-code.patch \
+ file://python-2.7.3-CVE-2014-7185.patch \
"
S = "${WORKDIR}/Python-${PV}"
@@ -49,6 +51,9 @@ inherit autotools multilib_header python-dir pythonnative
TARGET_CC_ARCH_append_armv6 = " -D__SOFTFP__"
TARGET_CC_ARCH_append_armv7a = " -D__SOFTFP__"
+# The following is a hack until we drop ac_cv_sizeof_off_t from site files
+EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'largefile', 'ac_cv_sizeof_off_t=8', '', d)}"
+
do_configure_prepend() {
rm -f ${S}/Makefile.orig
autoreconf -Wcross --verbose --install --force --exclude=autopoint Modules/_ctypes/libffi || bbnote "_ctypes failed to autoreconf"
@@ -75,7 +80,7 @@ do_compile() {
if [ ! -f Makefile.orig ]; then
install -m 0644 Makefile Makefile.orig
fi
- sed -i -e 's,^LDFLAGS=.*,LDFLAGS=-L. -L${STAGING_LIBDIR},g' \
+ sed -i -e 's#^LDFLAGS=.*#LDFLAGS=${LDFLAGS} -L. -L${STAGING_LIBDIR}#g' \
-e 's,libdir=${libdir},libdir=${STAGING_LIBDIR},g' \
-e 's,libexecdir=${libexecdir},libexecdir=${STAGING_DIR_HOST}${libexecdir},g' \
-e 's,^LIBDIR=.*,LIBDIR=${STAGING_LIBDIR},g' \
@@ -156,8 +161,8 @@ require python-${PYTHON_MAJMIN}-manifest.inc
# manual dependency additions
RPROVIDES_${PN}-core = "${PN}"
RRECOMMENDS_${PN}-core = "${PN}-readline"
+RRECOMMENDS_${PN}-core_append_class-nativesdk = " nativesdk-python-modules"
RRECOMMENDS_${PN}-crypt = "openssl"
-RRECOMMENDS_${PN}-crypt_class-nativesdk = "nativesdk-openssl"
# package libpython2
PACKAGES =+ "lib${BPN}2"
diff --git a/meta/recipes-devtools/qemu/qemu.inc b/meta/recipes-devtools/qemu/qemu.inc
index d47a44e222..c9a5d328f9 100644
--- a/meta/recipes-devtools/qemu/qemu.inc
+++ b/meta/recipes-devtools/qemu/qemu.inc
@@ -16,13 +16,14 @@ SRC_URI = "\
file://larger_default_ram_size.patch \
file://disable-grabs.patch \
file://exclude-some-arm-EABI-obsolete-syscalls.patch \
+ file://wacom.patch \
"
SRC_URI_append_class-native = "\
file://fix-libcap-header-issue-on-some-distro.patch \
"
-EXTRA_OECONF += "--target-list=${@get_qemu_target_list(d)} --disable-werror --disable-bluez --with-system-pixman --extra-cflags='${CFLAGS}'"
+EXTRA_OECONF += "--target-list=${@get_qemu_target_list(d)} --disable-werror --disable-bluez --disable-libiscsi --with-system-pixman --extra-cflags='${CFLAGS}'"
EXTRA_OECONF_class-nativesdk = "--target-list=${@get_qemu_target_list(d)} --disable-werror \
"
@@ -86,6 +87,9 @@ do_install_append() {
PACKAGECONFIG ??= "fdt sdl alsa"
PACKAGECONFIG_class-native ??= "fdt alsa"
PACKAGECONFIG_class-nativesdk ??= "fdt sdl"
+NATIVEDEPS = ""
+NATIVEDEPS_class-native = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'libxext-native', '',d)}"
+PACKAGECONFIG[sdl] = "--enable-sdl,--disable-sdl,libsdl ${NATIVEDEPS},"
PACKAGECONFIG[virtfs] = "--enable-virtfs --enable-attr,--disable-virtfs,libcap attr,"
PACKAGECONFIG[aio] = "--enable-linux-aio,--disable-linux-aio,libaio,"
PACKAGECONFIG[xfs] = "--enable-xfsctl,--disable-xfsctl,xfsprogs,"
@@ -100,15 +104,15 @@ PACKAGECONFIG[libcurl] = "--enable-curl,--disable-curl,libcurl,"
PACKAGECONFIG[nss] = "--enable-smartcard-nss,--disable-smartcard-nss,nss,"
PACKAGECONFIG[uuid] = "--enable-uuid,--disable-uuid,util-linux,"
PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses,"
-PACKAGECONFIG[gtk+] = "--enable-gtk,--disable-gtk,gtk+ libvte,"
+PACKAGECONFIG[gtk+] = "--enable-gtk --enable-vte,--disable-gtk --disable-vte,gtk+ libvte,"
PACKAGECONFIG[libcap-ng] = "--enable-cap-ng,--disable-cap-ng,libcap-ng,"
-PACKAGECONFIG[sdl] = "--enable-sdl,--disable-sdl,libsdl,"
PACKAGECONFIG[ssh2] = "--enable-libssh2,--disable-libssh2,libssh2,"
PACKAGECONFIG[libusb] = "--enable-libusb,--disable-libusb,libusb1"
PACKAGECONFIG[fdt] = "--enable-fdt,--disable-fdt,dtc"
PACKAGECONFIG[alsa] = ",,alsa-lib"
PACKAGECONFIG[glx] = "--enable-glx,--disable-glx,mesa"
PACKAGECONFIG[lzo] = "--enable-lzo,--disable-lzo,lzo"
+PACKAGECONFIG[numa] = "--enable-numa,--disable-numa,numactl"
EXTRA_OECONF += "${@bb.utils.contains('PACKAGECONFIG', 'alsa', '--audio-drv-list=oss,alsa', '', d)}"
diff --git a/meta/recipes-devtools/qemu/qemu/wacom.patch b/meta/recipes-devtools/qemu/qemu/wacom.patch
new file mode 100644
index 0000000000..cd06aa4ac6
--- /dev/null
+++ b/meta/recipes-devtools/qemu/qemu/wacom.patch
@@ -0,0 +1,130 @@
+The USB wacom device is missing a HID descriptor which causes it
+to fail to operate with recent kernels (e.g. 3.17).
+
+This patch adds a HID desriptor to the device, based upon one from
+real wcom device.
+
+Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
+
+Upstream-Status: Submitted
+2014/11/27
+
+Index: qemu-2.1.0/hw/usb/dev-wacom.c
+===================================================================
+--- qemu-2.1.0.orig/hw/usb/dev-wacom.c 2014-08-01 15:12:17.000000000 +0100
++++ qemu-2.1.0/hw/usb/dev-wacom.c 2014-10-12 12:13:30.540306042 +0100
+@@ -68,6 +68,89 @@
+ [STR_SERIALNUMBER] = "1",
+ };
+
++static const uint8_t qemu_tablet_hid_report_descriptor[] = {
++ 0x05, 0x01, /* Usage Page (Generic Desktop) */
++ 0x09, 0x02, /* Usage (Mouse) */
++ 0xa1, 0x01, /* Collection (Application) */
++ 0x85, 0x01, /* Report ID (1) */
++ 0x09, 0x01, /* Usage (Pointer) */
++ 0xa1, 0x00, /* Collection (Physical) */
++ 0x05, 0x09, /* Usage Page (Button) */
++ 0x19, 0x01, /* Usage Minimum (1) */
++ 0x29, 0x05, /* Usage Maximum (5) */
++ 0x15, 0x00, /* Logical Minimum (0) */
++ 0x25, 0x01, /* Logical Maximum (1) */
++ 0x95, 0x05, /* Report Count (5) */
++ 0x75, 0x01, /* Report Size (1) */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0x95, 0x01, /* Report Count (1) */
++ 0x75, 0x03, /* Report Size (3) */
++ 0x81, 0x01, /* Input (Constant) */
++ 0x05, 0x01, /* Usage Page (Generic Desktop) */
++ 0x09, 0x30, /* Usage (X) */
++ 0x09, 0x31, /* Usage (Y) */
++ 0x15, 0x81, /* Logical Minimum (-127) */
++ 0x25, 0x7f, /* Logical Maximum (127) */
++ 0x75, 0x08, /* Report Size (8) */
++ 0x95, 0x02, /* Report Count (2) */
++ 0x81, 0x06, /* Input (Data, Variable, Relative) */
++ 0xc0, /* End Collection */
++ 0xc0, /* End Collection */
++ 0x05, 0x0d, /* Usage Page (Digitizer) */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0xa1, 0x01, /* Collection (Application) */
++ 0x85, 0x02, /* Report ID (2) */
++ 0xa1, 0x00, /* Collection (Physical) */
++ 0x06, 0x00, 0xff, /* Usage Page (Vendor 0xff00) */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0x15, 0x00, /* Logical Minimum (0) */
++ 0x26, 0xff, 0x00, /* Logical Maximum (255) */
++ 0x75, 0x08, /* Report Size (8) */
++ 0x95, 0x08, /* Report Count (8) */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0xc0, /* End Collection */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0x85, 0x02, /* Report ID (2) */
++ 0x95, 0x01, /* Report Count (1) */
++ 0xb1, 0x02, /* FEATURE (2) */
++ 0xc0, /* End Collection */
++ 0x06, 0x00, 0xff, /* Usage Page (Vendor 0xff00) */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0xa1, 0x01, /* Collection (Application) */
++ 0x85, 0x02, /* Report ID (2) */
++ 0x05, 0x0d, /* Usage Page (Digitizer) */
++ 0x09, 0x22, /* Usage (Finger) */
++ 0xa1, 0x00, /* Collection (Physical) */
++ 0x06, 0x00, 0xff, /* Usage Page (Vendor 0xff00) */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0x15, 0x00, /* Logical Minimum (0) */
++ 0x26, 0xff, 0x00, /* Logical Maximum */
++ 0x75, 0x08, /* Report Size (8) */
++ 0x95, 0x02, /* Report Count (2) */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0x05, 0x01, /* Usage Page (Generic Desktop) */
++ 0x09, 0x30, /* Usage (X) */
++ 0x35, 0x00, /* Physical Minimum */
++ 0x46, 0xe0, 0x2e, /* Physical Maximum */
++ 0x26, 0xe0, 0x01, /* Logical Maximum */
++ 0x75, 0x10, /* Report Size (16) */
++ 0x95, 0x01, /* Report Count (1) */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0x09, 0x31, /* Usage (Y) */
++ 0x46, 0x40, 0x1f, /* Physical Maximum */
++ 0x26, 0x40, 0x01, /* Logical Maximum */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0x06, 0x00, 0xff, /* Usage Page (Vendor 0xff00) */
++ 0x09, 0x01, /* Usage (Digitizer) */
++ 0x26, 0xff, 0x00, /* Logical Maximum */
++ 0x75, 0x08, /* Report Size (8) */
++ 0x95, 0x0d, /* Report Count (13) */
++ 0x81, 0x02, /* Input (Data, Variable, Absolute) */
++ 0xc0, /* End Collection */
++ 0xc0, /* End Collection */
++};
++
++
+ static const USBDescIface desc_iface_wacom = {
+ .bInterfaceNumber = 0,
+ .bNumEndpoints = 1,
+@@ -85,7 +168,7 @@
+ 0x00, /* u8 country_code */
+ 0x01, /* u8 num_descriptors */
+ 0x22, /* u8 type: Report */
+- 0x6e, 0, /* u16 len */
++ sizeof(qemu_tablet_hid_report_descriptor), 0, /* u16 len */
+ },
+ },
+ },
+@@ -265,6 +350,15 @@
+ }
+
+ switch (request) {
++ case InterfaceRequest | USB_REQ_GET_DESCRIPTOR:
++ switch (value >> 8) {
++ case 0x22:
++ memcpy(data, qemu_tablet_hid_report_descriptor,
++ sizeof(qemu_tablet_hid_report_descriptor));
++ p->actual_length = sizeof(qemu_tablet_hid_report_descriptor);
++ break;
++ }
++ break;
+ case WACOM_SET_REPORT:
+ if (s->mouse_grabbed) {
+ qemu_remove_mouse_event_handler(s->eh_entry);
diff --git a/meta/recipes-devtools/qemu/qemu_2.1.0.bb b/meta/recipes-devtools/qemu/qemu_2.1.2.bb
index 9b8337a73d..55ae7b60a6 100644
--- a/meta/recipes-devtools/qemu/qemu_2.1.0.bb
+++ b/meta/recipes-devtools/qemu/qemu_2.1.2.bb
@@ -8,19 +8,11 @@ SRC_URI += "file://configure-fix-Darwin-target-detection.patch \
file://Qemu-Arm-versatilepb-Add-memory-size-checking.patch \
"
SRC_URI_prepend = "http://wiki.qemu-project.org/download/${BP}.tar.bz2"
-SRC_URI[md5sum] = "6726977292b448cbc7f89998fac6983b"
-SRC_URI[sha256sum] = "397e23184f4bf613589a8fe0c6542461dc2afdf17ed337e97e6fd2f31e8f8802"
+SRC_URI[md5sum] = "0ff197c4ed4b695620bc4734e77c888f"
+SRC_URI[sha256sum] = "fd10f5e45cf5a736fa5a3e1c279ae9821534e700beb7d1aab88a07648a394885"
COMPATIBLE_HOST_class-target_mips64 = "null"
-do_sanitize_sources() {
- # These .git files point to a nonexistent path "../.git/modules" and will confuse git
- # if it tries to recurse into those directories.
- rm -f ${S}/dtc/.git ${S}/pixman/.git
-}
-
-addtask sanitize_sources after do_unpack before do_patch
-
do_install_append() {
# Prevent QA warnings about installed ${localstatedir}/run
if [ -d ${D}${localstatedir}/run ]; then rmdir ${D}${localstatedir}/run; fi
diff --git a/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb b/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb
index d2981b5575..959cd6fba0 100644
--- a/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb
+++ b/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb
@@ -9,7 +9,7 @@ do_install () {
echo "#!/bin/sh" > ${D}${bindir_crossscripts}/qemuwrapper
qemu_binary=${@qemu_target_binary(d)}
- qemu_options='${@d.getVar("QEMU_OPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or d.getVar('QEMU_OPTIONS', True) or ""}'
+ qemu_options='${QEMU_OPTIONS}'
echo "$qemu_binary $qemu_options \"\$@\"" >> ${D}${bindir_crossscripts}/qemuwrapper
fallback_qemu_bin=
case $qemu_binary in
diff --git a/meta/recipes-devtools/quilt/quilt-0.63.inc b/meta/recipes-devtools/quilt/quilt-0.63.inc
index f3757859ff..433d6b4089 100644
--- a/meta/recipes-devtools/quilt/quilt-0.63.inc
+++ b/meta/recipes-devtools/quilt/quilt-0.63.inc
@@ -55,5 +55,5 @@ do_install_ptest() {
RDEPENDS_${PN}-ptest = "make file sed gawk diffutils findutils ed perl \
perl-module-filehandle perl-module-getopt-std \
perl-module-posix perl-module-file-temp \
- perl-module-text-parsewords \
+ perl-module-text-parsewords bash \
"
diff --git a/meta/recipes-devtools/quilt/quilt/run-ptest b/meta/recipes-devtools/quilt/quilt/run-ptest
index 1ea0dae128..958a9df5b6 100755
--- a/meta/recipes-devtools/quilt/quilt/run-ptest
+++ b/meta/recipes-devtools/quilt/quilt/run-ptest
@@ -1,5 +1,5 @@
#!/bin/sh
ln -sf /bin/ed /usr/bin/ed
-for i in `ls test/*.test |awk -F. '{print $1}' |awk -F/ '{print $2}'`; do make check-$i; if [ $? == 0 ]; then echo PASS: $i.test; else echo FAIL: $i.test; fi; done
+for i in `ls test/*.test |awk -F. '{print $1}' |awk -F/ '{print $2}'`; do make check-$i; if [ $? -eq 0 ]; then echo PASS: $i.test; else echo FAIL: $i.test; fi; done
rm -f /usr/bin/ed
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/add_RPMSENSE_MISSINGOK_to_rpmmodule.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/add_RPMSENSE_MISSINGOK_to_rpmmodule.patch
new file mode 100644
index 0000000000..b877870411
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/add_RPMSENSE_MISSINGOK_to_rpmmodule.patch
@@ -0,0 +1,20 @@
+Upstream-Status: Inappropriate [OE-Specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+diff --git a/python/rpmmodule.c b/python/rpmmodule.c
+index a4fe217..728c66c 100644
+--- a/python/rpmmodule.c
++++ b/python/rpmmodule.c
+@@ -396,6 +396,10 @@ static int initModule(PyObject *m)
+ REGISTER_ENUM(RPMSENSE_STRONG);
+ REGISTER_ENUM(RPMSENSE_CONFIG);
+
++#if defined(RPM_VENDOR_OE)
++ REGISTER_ENUM(RPMSENSE_MISSINGOK);
++#endif
++
+ REGISTER_ENUM(RPMTRANS_FLAG_TEST);
+ REGISTER_ENUM(RPMTRANS_FLAG_BUILD_PROBS);
+ REGISTER_ENUM(RPMTRANS_FLAG_NOSCRIPTS);
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/disable_shortcircuited.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/disable_shortcircuited.patch
new file mode 100644
index 0000000000..7a646de373
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/disable_shortcircuited.patch
@@ -0,0 +1,23 @@
+Upstream-Status: Pending
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+
+Index: rpm-4.11.2/build/pack.c
+===================================================================
+--- rpm-4.11.2.orig/build/pack.c
++++ rpm-4.11.2/build/pack.c
+@@ -571,9 +571,9 @@ rpmRC packageBinaries(rpmSpec spec, cons
+ headerPutBin(pkg->header, RPMTAG_SOURCEPKGID, spec->sourcePkgId,16);
+ }
+
+- if (cheating) {
+- (void) rpmlibNeedsFeature(pkg, "ShortCircuited", "4.9.0-1");
+- }
++// if (cheating) {
++// (void) rpmlibNeedsFeature(pkg, "ShortCircuited", "4.9.0-1");
++// }
+
+ { char *binFormat = rpmGetPath("%{_rpmfilename}", NULL);
+ char *binRpm, *binDir;
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/fix_libdir.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/fix_libdir.patch
new file mode 100644
index 0000000000..be0626c8b3
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/fix_libdir.patch
@@ -0,0 +1,19 @@
+Upstream-Status: Inappropriate [OE-Core specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+
+diff --git a/installplatform b/installplatform
+index 8c3eba0..fa15e91 100755
+--- a/installplatform
++++ b/installplatform
+@@ -112,7 +112,7 @@ for ARCH in noarch `grep ^arch_canon $RPMRC | cut -d: -f2`; do
+ [ -z "$CANONARCH" ] && continue
+
+ if [ "$OS" = "linux" ] && [ "$CANONCOLOR" = 3 ]; then
+- LIB=${LIB}64
++ LIB=${LIB}
+ fi
+
+ PPD="${DESTDIR}/${platformdir}/${ARCH}-${OS}"
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/pythondeps.sh b/meta/recipes-devtools/rpm/rpm-4.11.2/pythondeps.sh
new file mode 100755
index 0000000000..083b174f17
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/pythondeps.sh
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+[ $# -ge 1 ] || {
+ cat > /dev/null
+ exit 0
+}
+
+case $1 in
+-R|--requires)
+ shift
+ grep "/usr/\(lib[^/]*\|share\)/python[^/]*/" >/dev/null && echo "python"
+ exit 0
+ ;;
+esac
+
+exit 0
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/remove-db3-from-configure.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/remove-db3-from-configure.patch
new file mode 100644
index 0000000000..2640e54b5b
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/remove-db3-from-configure.patch
@@ -0,0 +1,26 @@
+
+Disable configuring the db3 directory since we will be using the
+external DB provided as part of OE-Core already, no need to have
+duplicate database code.
+
+Upstream-Status: Inappropriate [OE-Core Specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: rpm-4.11.2/configure.ac
+===================================================================
+--- rpm-4.11.2.orig/configure.ac
++++ rpm-4.11.2/configure.ac
+@@ -825,9 +825,9 @@ AC_SUBST(RPMCONFIGDIR)
+
+ AC_SUBST(OBJDUMP)
+
+-if test "$with_external_db" = no; then
+- AC_CONFIG_SUBDIRS(db3)
+-fi
++#if test "$with_external_db" = no; then
++# AC_CONFIG_SUBDIRS(db3)
++#fi
+
+ AM_CONDITIONAL([WITH_INTERNAL_DB],[test "$with_external_db" = no])
+ AM_CONDITIONAL([DOXYGEN],[test "$DOXYGEN" != no])
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/remove-dir-check.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/remove-dir-check.patch
new file mode 100644
index 0000000000..f1ecab7b0e
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/remove-dir-check.patch
@@ -0,0 +1,23 @@
+Upstream-Status: Pending
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+
+Index: rpm-4.11.2/build/files.c
+===================================================================
+--- rpm-4.11.2.orig/build/files.c
++++ rpm-4.11.2/build/files.c
+@@ -1321,12 +1321,6 @@ static rpmRC addFile(FileList fl, const
+ }
+ }
+
+- /* Error out when a non-directory is specified as one in spec */
+- if (fl->cur.isDir && (statp == &statbuf) && !S_ISDIR(statp->st_mode)) {
+- rpmlog(RPMLOG_ERR, _("Not a directory: %s\n"), diskPath);
+- goto exit;
+- }
+-
+ /* Don't recurse into explicit %dir, don't double-recurse from fts */
+ if ((fl->cur.isDir != 1) && (statp == &statbuf) && S_ISDIR(statp->st_mode)) {
+ return recurseDir(fl, diskPath);
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/rpm-scriptetexechelp.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/rpm-scriptetexechelp.patch
new file mode 100644
index 0000000000..9333dea441
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/rpm-scriptetexechelp.patch
@@ -0,0 +1,194 @@
+Upstream-Status: Inappropriate [OE-Core]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+
+Index: rpm-4.11.2/lib/psm.c
+===================================================================
+--- rpm-4.11.2.orig/lib/psm.c
++++ rpm-4.11.2/lib/psm.c
+@@ -421,7 +421,8 @@ static rpmRC runScript(rpmpsm psm, ARGV_
+
+ rpmswEnter(rpmtsOp(psm->ts, RPMTS_OP_SCRIPTLETS), 0);
+ rc = rpmScriptRun(script, arg1, arg2, sfd,
+- prefixes, warn_only, selinux);
++ prefixes, warn_only, selinux, rpmtsRootDir(psm->ts) );
++
+ rpmswExit(rpmtsOp(psm->ts, RPMTS_OP_SCRIPTLETS), 0);
+
+ /* Map warn-only errors to "notfound" for script stop callback */
+@@ -958,15 +959,49 @@ static rpmRC rpmpsmStage(rpmpsm psm, pkg
+ case PSM_DESTROY:
+ break;
+ case PSM_SCRIPT: /* Run current package scriptlets. */
++#ifdef RPM_VENDOR_OE
++ {
++ const char * scriptletWrapper = rpmExpand("%{?_cross_scriptlet_wrapper}", NULL);
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootOut();
++#endif
++
+ rc = runInstScript(psm);
++#ifdef RPM_VENDOR_OE
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootIn();
++ }
++#endif
+ break;
+ case PSM_TRIGGERS:
+ /* Run triggers in other package(s) this package sets off. */
++#ifdef RPM_VENDOR_OE
++ {
++ const char * scriptletWrapper = rpmExpand("%{?_cross_scriptlet_wrapper}", NULL);
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootOut();
++#endif
+ rc = runTriggers(psm);
++#ifdef RPM_VENDOR_OE
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootIn();
++ }
++#endif
+ break;
+ case PSM_IMMED_TRIGGERS:
+ /* Run triggers in this package other package(s) set off. */
++#ifdef RPM_VENDOR_OE
++ {
++ const char * scriptletWrapper = rpmExpand("%{?_cross_scriptlet_wrapper}", NULL);
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootOut();
++#endif
+ rc = runImmedTriggers(psm);
++#ifdef RPM_VENDOR_OE
++ if (scriptletWrapper && *scriptletWrapper)
++ (void) rpmChrootIn();
++ }
++#endif
+ break;
+
+ case PSM_RPMDB_ADD: {
+Index: rpm-4.11.2/lib/rpmscript.c
+===================================================================
+--- rpm-4.11.2.orig/lib/rpmscript.c
++++ rpm-4.11.2/lib/rpmscript.c
+@@ -92,7 +92,7 @@ static rpmRC runLuaScript(int selinux, A
+ static const char * const SCRIPT_PATH = "PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/X11R6/bin";
+
+ static void doScriptExec(int selinux, ARGV_const_t argv, ARGV_const_t prefixes,
+- FD_t scriptFd, FD_t out)
++ FD_t scriptFd, FD_t out, char * rootDir )
+ {
+ int pipes[2];
+ int flag;
+@@ -158,13 +158,12 @@ static void doScriptExec(int selinux, AR
+ setenv("RPM_INSTALL_PREFIX", *pf, 1);
+ }
+ }
+-
+- if (chdir("/") == 0) {
++ if (chdir(rootDir) == 0) {
+ /* XXX Don't mtrace into children. */
+ unsetenv("MALLOC_CHECK_");
+
+ /* Permit libselinux to do the scriptlet exec. */
+- if (selinux == 1) {
++ if (selinux == 1) {
+ xx = rpm_execcon(0, argv[0], argv, environ);
+ }
+
+@@ -175,12 +174,12 @@ static void doScriptExec(int selinux, AR
+ _exit(127); /* exit 127 for compatibility with bash(1) */
+ }
+
+-static char * writeScript(const char *cmd, const char *script)
++static char * writeScript(const char *cmd, const char *script, char * rootDir)
+ {
+ char *fn = NULL;
+ size_t slen = strlen(script);
+ int ok = 0;
+- FD_t fd = rpmMkTempFile("/", &fn);
++ FD_t fd = rpmMkTempFile(rootDir, &fn);
+
+ if (Ferror(fd))
+ goto exit;
+@@ -204,7 +203,7 @@ exit:
+ */
+ static rpmRC runExtScript(int selinux, ARGV_const_t prefixes,
+ const char *sname, rpmlogLvl lvl, FD_t scriptFd,
+- ARGV_t * argvp, const char *script, int arg1, int arg2)
++ ARGV_t * argvp, const char *script, int arg1, int arg2,char * rootDir)
+ {
+ FD_t out = NULL;
+ char * fn = NULL;
+@@ -215,7 +214,7 @@ static rpmRC runExtScript(int selinux, A
+ rpmlog(RPMLOG_DEBUG, "%s: scriptlet start\n", sname);
+
+ if (script) {
+- fn = writeScript(*argvp[0], script);
++ fn = writeScript(*argvp[0], script, rootDir);
+ if (fn == NULL) {
+ rpmlog(RPMLOG_ERR,
+ _("Couldn't create temporary file for %s: %s\n"),
+@@ -258,7 +257,7 @@ static rpmRC runExtScript(int selinux, A
+ } else if (pid == 0) {/* Child */
+ rpmlog(RPMLOG_DEBUG, "%s: execv(%s) pid %d\n",
+ sname, *argvp[0], (unsigned)getpid());
+- doScriptExec(selinux, *argvp, prefixes, scriptFd, out);
++ doScriptExec(selinux, *argvp, prefixes, scriptFd, out, rootDir);
+ }
+
+ do {
+@@ -297,13 +296,27 @@ exit:
+ }
+
+ rpmRC rpmScriptRun(rpmScript script, int arg1, int arg2, FD_t scriptFd,
+- ARGV_const_t prefixes, int warn_only, int selinux)
++ ARGV_const_t prefixes, int warn_only, int selinux, char * rootDir)
+ {
+ ARGV_t args = NULL;
+ rpmlogLvl lvl = warn_only ? RPMLOG_WARNING : RPMLOG_ERR;
+ rpmRC rc;
+-
+- if (script == NULL) return RPMRC_OK;
++#ifdef RPM_VENDOR_OE
++ const char * scriptletWrapper = rpmExpand("%{?_cross_scriptlet_wrapper}", NULL);
++#endif
++
++ if (script == NULL) return RPMRC_OK;
++#ifdef RPM_VENDOR_OE
++ if (scriptletWrapper && *scriptletWrapper) {
++ argvAdd(&args, scriptletWrapper);
++
++ if ( rootDir ) {
++ argvAdd(&args, rootDir);
++ } else {
++ argvAdd(&args, "/");
++ }
++ }
++#endif
+
+ /* construct a new argv as we can't modify the one from header */
+ if (script->args) {
+@@ -315,7 +328,7 @@ rpmRC rpmScriptRun(rpmScript script, int
+ if (rstreq(args[0], "<lua>")) {
+ rc = runLuaScript(selinux, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2);
+ } else {
+- rc = runExtScript(selinux, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2);
++ rc = runExtScript(selinux, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, rootDir);
+ }
+ argvFree(args);
+
+Index: rpm-4.11.2/lib/rpmscript.h
+===================================================================
+--- rpm-4.11.2.orig/lib/rpmscript.h
++++ rpm-4.11.2/lib/rpmscript.h
+@@ -29,7 +29,7 @@ rpmScript rpmScriptFree(rpmScript script
+
+ RPM_GNUC_INTERNAL
+ rpmRC rpmScriptRun(rpmScript script, int arg1, int arg2, FD_t scriptFd,
+- ARGV_const_t prefixes, int warn_only, int selinux);
++ ARGV_const_t prefixes, int warn_only, int selinux, char * rootDir);
+
+ RPM_GNUC_INTERNAL
+ rpmTagVal rpmScriptTag(rpmScript script);
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/support-suggests-tag.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/support-suggests-tag.patch
new file mode 100644
index 0000000000..3da608573a
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/support-suggests-tag.patch
@@ -0,0 +1,384 @@
+
+Upstream-Status: Pending
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+diff --git a/build/pack.c b/build/pack.c
+index b6b2bea..5c3d8df 100644
+--- a/build/pack.c
++++ b/build/pack.c
+@@ -273,8 +273,10 @@ static rpmTagVal depevrtags[] = {
+ RPMTAG_CONFLICTVERSION,
+ RPMTAG_ORDERVERSION,
+ RPMTAG_TRIGGERVERSION,
+- RPMTAG_SUGGESTSVERSION,
+- RPMTAG_ENHANCESVERSION,
++ RPMTAG_SUGGESTVERSION,
++ RPMTAG_ENHANCEVERSION,
++ RPMTAG_RECOMMENDVERSION,
++ RPMTAG_SUPPLEMENTVERSION,
+ 0
+ };
+
+diff --git a/build/parsePreamble.c b/build/parsePreamble.c
+index 5772bef..cbbbee3 100644
+--- a/build/parsePreamble.c
++++ b/build/parsePreamble.c
+@@ -821,6 +821,10 @@ static rpmRC handlePreambleTag(rpmSpec spec, Package pkg, rpmTagVal tag,
+ }
+ /* fallthrough */
+ case RPMTAG_PREREQ:
++ case RPMTAG_RECOMMENDFLAGS:
++ case RPMTAG_SUGGESTFLAGS:
++ case RPMTAG_SUPPLEMENTFLAGS:
++ case RPMTAG_ENHANCEFLAGS:
+ case RPMTAG_CONFLICTFLAGS:
+ case RPMTAG_OBSOLETEFLAGS:
+ case RPMTAG_PROVIDEFLAGS:
+@@ -922,6 +926,10 @@ static struct PreambleRec_s const preambleList[] = {
+ {RPMTAG_ICON, 0, 0, LEN_AND_STR("icon")},
+ {RPMTAG_PROVIDEFLAGS, 0, 0, LEN_AND_STR("provides")},
+ {RPMTAG_REQUIREFLAGS, 2, 0, LEN_AND_STR("requires")},
++ {RPMTAG_RECOMMENDFLAGS, 0, 0, LEN_AND_STR("recommends")},
++ {RPMTAG_SUGGESTFLAGS, 0, 0, LEN_AND_STR("suggests")},
++ {RPMTAG_SUPPLEMENTFLAGS, 0, 0, LEN_AND_STR("supplements")},
++ {RPMTAG_ENHANCEFLAGS, 0, 0, LEN_AND_STR("enhances")},
+ {RPMTAG_PREREQ, 2, 1, LEN_AND_STR("prereq")},
+ {RPMTAG_CONFLICTFLAGS, 0, 0, LEN_AND_STR("conflicts")},
+ {RPMTAG_OBSOLETEFLAGS, 0, 0, LEN_AND_STR("obsoletes")},
+diff --git a/build/parseReqs.c b/build/parseReqs.c
+index ba080a1..1427111 100644
+--- a/build/parseReqs.c
++++ b/build/parseReqs.c
+@@ -61,6 +61,18 @@ rpmRC parseRCPOT(rpmSpec spec, Package pkg, const char *field, rpmTagVal tagN,
+ nametag = RPMTAG_REQUIRENAME;
+ tagflags |= RPMSENSE_ANY;
+ break;
++ case RPMTAG_RECOMMENDFLAGS:
++ nametag = RPMTAG_RECOMMENDNAME;
++ break;
++ case RPMTAG_SUGGESTFLAGS:
++ nametag = RPMTAG_SUGGESTNAME;
++ break;
++ case RPMTAG_SUPPLEMENTFLAGS:
++ nametag = RPMTAG_SUPPLEMENTNAME;
++ break;
++ case RPMTAG_ENHANCEFLAGS:
++ nametag = RPMTAG_ENHANCENAME;
++ break;
+ case RPMTAG_PROVIDEFLAGS:
+ nametag = RPMTAG_PROVIDENAME;
+ break;
+diff --git a/build/reqprov.c b/build/reqprov.c
+index a368f42..c270af6 100644
+--- a/build/reqprov.c
++++ b/build/reqprov.c
+@@ -81,6 +81,30 @@ int addReqProv(Package pkg, rpmTagVal tagN,
+ extra = Flags & RPMSENSE_TRIGGER;
+ dsp = &pkg->triggers;
+ break;
++ case RPMTAG_RECOMMENDNAME:
++ versiontag = RPMTAG_RECOMMENDVERSION;
++ flagtag = RPMTAG_RECOMMENDFLAGS;
++ extra = Flags & _ALL_REQUIRES_MASK;
++ dsp = &pkg->recommends;
++ break;
++ case RPMTAG_SUGGESTNAME:
++ versiontag = RPMTAG_SUGGESTVERSION;
++ flagtag = RPMTAG_SUGGESTFLAGS;
++ extra = Flags & _ALL_REQUIRES_MASK;
++ dsp = &pkg->suggests;
++ break;
++ case RPMTAG_SUPPLEMENTNAME:
++ versiontag = RPMTAG_SUPPLEMENTVERSION;
++ flagtag = RPMTAG_SUPPLEMENTFLAGS;
++ extra = Flags & _ALL_REQUIRES_MASK;
++ dsp = &pkg->supplements;
++ break;
++ case RPMTAG_ENHANCENAME:
++ versiontag = RPMTAG_ENHANCEVERSION;
++ flagtag = RPMTAG_ENHANCEFLAGS;
++ extra = Flags & _ALL_REQUIRES_MASK;
++ dsp = &pkg->enhances;
++ break;
+ case RPMTAG_REQUIRENAME:
+ default:
+ tagN = RPMTAG_REQUIRENAME;
+diff --git a/build/rpmbuild_internal.h b/build/rpmbuild_internal.h
+index a9e4c7c..0a1977f 100644
+--- a/build/rpmbuild_internal.h
++++ b/build/rpmbuild_internal.h
+@@ -93,6 +93,10 @@ struct Package_s {
+ rpmds ds; /*!< Requires: N = EVR */
+ rpmds requires;
+ rpmds provides;
++ rpmds recommends;
++ rpmds suggests;
++ rpmds supplements;
++ rpmds enhances;
+ rpmds conflicts;
+ rpmds obsoletes;
+ rpmds triggers;
+diff --git a/build/spec.c b/build/spec.c
+index 703ec78..7ae2120 100644
+--- a/build/spec.c
++++ b/build/spec.c
+@@ -139,6 +139,11 @@ static Package freePackage(Package pkg)
+ pkg->ds = rpmdsFree(pkg->ds);
+ pkg->requires = rpmdsFree(pkg->requires);
+ pkg->provides = rpmdsFree(pkg->provides);
++ pkg->recommends = rpmdsFree(pkg->recommends);
++ pkg->suggests = rpmdsFree(pkg->suggests);
++ pkg->supplements = rpmdsFree(pkg->supplements);
++ pkg->enhances = rpmdsFree(pkg->enhances);
++
+ pkg->conflicts = rpmdsFree(pkg->conflicts);
+ pkg->obsoletes = rpmdsFree(pkg->obsoletes);
+ pkg->triggers = rpmdsFree(pkg->triggers);
+diff --git a/lib/rpmds.c b/lib/rpmds.c
+index 7a51167..1e5dda0 100644
+--- a/lib/rpmds.c
++++ b/lib/rpmds.c
+@@ -52,6 +52,22 @@ static int dsType(rpmTagVal tag,
+ t = "Requires";
+ evr = RPMTAG_REQUIREVERSION;
+ f = RPMTAG_REQUIREFLAGS;
++ } else if (tag == RPMTAG_SUPPLEMENTNAME) {
++ t = "Supplements";
++ evr = RPMTAG_SUPPLEMENTVERSION;
++ f = RPMTAG_SUPPLEMENTFLAGS;
++ } else if (tag == RPMTAG_ENHANCENAME) {
++ t = "Enhances";
++ evr = RPMTAG_ENHANCEVERSION;
++ f = RPMTAG_ENHANCEFLAGS;
++ } else if (tag == RPMTAG_RECOMMENDNAME) {
++ t = "Recommends";
++ evr = RPMTAG_RECOMMENDVERSION;
++ f = RPMTAG_RECOMMENDFLAGS;
++ } else if (tag == RPMTAG_SUGGESTNAME) {
++ t = "Suggests";
++ evr = RPMTAG_SUGGESTVERSION;
++ f = RPMTAG_SUGGESTFLAGS;
+ } else if (tag == RPMTAG_CONFLICTNAME) {
+ t = "Conflicts";
+ evr = RPMTAG_CONFLICTVERSION;
+diff --git a/lib/rpmtag.h b/lib/rpmtag.h
+index 64b03f1..b943229 100644
+--- a/lib/rpmtag.h
++++ b/lib/rpmtag.h
+@@ -217,14 +217,14 @@ typedef enum rpmTag_e {
+ RPMTAG_PRETRANSPROG = 1153, /* s[] */
+ RPMTAG_POSTTRANSPROG = 1154, /* s[] */
+ RPMTAG_DISTTAG = 1155, /* s */
+- RPMTAG_SUGGESTSNAME = 1156, /* s[] extension (unimplemented) */
+-#define RPMTAG_SUGGESTS RPMTAG_SUGGESTSNAME /* s[] (unimplemented) */
+- RPMTAG_SUGGESTSVERSION = 1157, /* s[] extension (unimplemented) */
+- RPMTAG_SUGGESTSFLAGS = 1158, /* i[] extension (unimplemented) */
+- RPMTAG_ENHANCESNAME = 1159, /* s[] extension placeholder (unimplemented) */
+-#define RPMTAG_ENHANCES RPMTAG_ENHANCESNAME /* s[] (unimplemented) */
+- RPMTAG_ENHANCESVERSION = 1160, /* s[] extension placeholder (unimplemented) */
+- RPMTAG_ENHANCESFLAGS = 1161, /* i[] extension placeholder (unimplemented) */
++ RPMTAG_OLDSUGGESTSNAME = 1156, /* s[] (unimplemented) */
++#define RPMTAG_OLDSUGGESTS RPMTAG_OLDSUGGESTSNAME /* s[] (unimplemented) */
++ RPMTAG_OLDSUGGESTSVERSION = 1157, /* s[] (unimplemented) */
++ RPMTAG_OLDSUGGESTSFLAGS = 1158, /* i[] (unimplemented) */
++ RPMTAG_OLDENHANCESNAME = 1159, /* s[] (unimplemented) */
++#define RPMTAG_OLDENHANCES RPMTAG_OLDENHANCESNAME /* s[] (unimplemented) */
++ RPMTAG_OLDENHANCESVERSION = 1160, /* s[] (unimplemented) */
++ RPMTAG_OLDENHANCESFLAGS = 1161, /* i[] (unimplemented) */
+ RPMTAG_PRIORITY = 1162, /* i[] extension placeholder (unimplemented) */
+ RPMTAG_CVSID = 1163, /* s (unimplemented) */
+ #define RPMTAG_SVNID RPMTAG_CVSID /* s (unimplemented) */
+@@ -261,6 +261,7 @@ typedef enum rpmTag_e {
+ RPMTAG_BUILDOBSOLETES = 1194, /* internal (unimplemented) */
+ RPMTAG_DBINSTANCE = 1195, /* i extension */
+ RPMTAG_NVRA = 1196, /* s extension */
++
+ /* tags 1997-4999 reserved */
+ RPMTAG_FILENAMES = 5000, /* s[] extension */
+ RPMTAG_FILEPROVIDE = 5001, /* s[] extension */
+@@ -307,6 +308,26 @@ typedef enum rpmTag_e {
+ RPMTAG_OBSOLETENEVRS = 5043, /* s[] extension */
+ RPMTAG_CONFLICTNEVRS = 5044, /* s[] extension */
+ RPMTAG_FILENLINKS = 5045, /* i[] extension */
++ RPMTAG_RECOMMENDNAME = 5046, /* s[] */
++#define RPMTAG_RECOMMENDS RPMTAG_RECOMMENDNAME /* s[] */
++ RPMTAG_RECOMMENDVERSION = 5047, /* s[] */
++ RPMTAG_RECOMMENDFLAGS = 5048, /* i[] */
++ RPMTAG_SUGGESTNAME = 5049, /* s[] */
++#define RPMTAG_SUGGESTS RPMTAG_SUGGESTNAME /* s[] */
++ RPMTAG_SUGGESTVERSION = 5050, /* s[] extension */
++ RPMTAG_SUGGESTFLAGS = 5051, /* i[] extension */
++ RPMTAG_SUPPLEMENTNAME = 5052, /* s[] */
++#define RPMTAG_SUPPLEMENTS RPMTAG_SUPPLEMENTNAME /* s[] */
++ RPMTAG_SUPPLEMENTVERSION = 5053, /* s[] */
++ RPMTAG_SUPPLEMENTFLAGS = 5054, /* i[] */
++ RPMTAG_ENHANCENAME = 5055, /* s[] */
++#define RPMTAG_ENHANCES RPMTAG_ENHANCENAME /* s[] */
++ RPMTAG_ENHANCEVERSION = 5056, /* s[] */
++ RPMTAG_ENHANCEFLAGS = 5057, /* i[] */
++ RPMTAG_RECOMMENDNEVRS = 5058, /* s[] extension */
++ RPMTAG_SUGGESTNEVRS = 5059, /* s[] extension */
++ RPMTAG_SUPPLEMENTNEVRS = 5060, /* s[] extension */
++ RPMTAG_ENHANCENEVRS = 5061, /* s[] extension */
+
+ RPMTAG_FIRSTFREE_TAG /*!< internal */
+ } rpmTag;
+diff --git a/lib/tagexts.c b/lib/tagexts.c
+index 29b2bae..e940310 100644
+--- a/lib/tagexts.c
++++ b/lib/tagexts.c
+@@ -761,6 +761,26 @@ static int requirenevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
+ return depnevrsTag(h, td, hgflags, RPMTAG_REQUIRENAME);
+ }
+
++static int recommendnevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
++{
++ return depnevrsTag(h, td, hgflags, RPMTAG_RECOMMENDNAME);
++}
++
++static int suggestnevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
++{
++ return depnevrsTag(h, td, hgflags, RPMTAG_SUGGESTNAME);
++}
++
++static int supplementnevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
++{
++ return depnevrsTag(h, td, hgflags, RPMTAG_SUPPLEMENTNAME);
++}
++
++static int enhancenevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
++{
++ return depnevrsTag(h, td, hgflags, RPMTAG_ENHANCENAME);
++}
++
+ static int providenevrsTag(Header h, rpmtd td, headerGetFlags hgflags)
+ {
+ return depnevrsTag(h, td, hgflags, RPMTAG_PROVIDENAME);
+@@ -823,6 +843,10 @@ static const struct headerTagFunc_s rpmHeaderTagExtensions[] = {
+ { RPMTAG_EPOCHNUM, epochnumTag },
+ { RPMTAG_INSTFILENAMES, instfilenamesTag },
+ { RPMTAG_REQUIRENEVRS, requirenevrsTag },
++ { RPMTAG_RECOMMENDNEVRS, recommendnevrsTag},
++ { RPMTAG_SUGGESTNEVRS, suggestnevrsTag},
++ { RPMTAG_SUPPLEMENTNEVRS, supplementnevrsTag},
++ { RPMTAG_ENHANCENEVRS, enhancenevrsTag},
+ { RPMTAG_PROVIDENEVRS, providenevrsTag },
+ { RPMTAG_OBSOLETENEVRS, obsoletenevrsTag },
+ { RPMTAG_CONFLICTNEVRS, conflictnevrsTag },
+diff --git a/rpmpopt.in b/rpmpopt.in
+index 805599e..036ab4e 100644
+--- a/rpmpopt.in
++++ b/rpmpopt.in
+@@ -67,6 +67,19 @@ rpm alias --requires --qf \
+ --POPTdesc=$"list capabilities required by package(s)"
+ rpm alias -R --requires
+
++rpm alias --recommends --qf \
++ "[%|VERBOSE?{%{RECOMMENDFLAGS:deptype}: }:{}|%{RECOMMENDNEVRS}\n]" \
++ --POPTdesc=$"list capabilities recommended by package(s)"
++rpm alias --suggests --qf \
++ "[%|VERBOSE?{%{SUGGESTFLAGS:deptype}: }:{}|%{SUGGESTNEVRS}\n]" \
++ --POPTdesc=$"list capabilities suggested by package(s)"
++rpm alias --supplements --qf \
++ "[%|VERBOSE?{%{SUPPLEMENTFLAGS:deptype}: }:{}|%{SUPPLEMENTNEVRS}\n]" \
++ --POPTdesc=$"list capabilities supplemented by package(s)"
++rpm alias --enhances --qf \
++ "[%|VERBOSE?{%{ENHANCEFLAGS:deptype}: }:{}|%{ENHANCENEVRS}\n]" \
++ --POPTdesc=$"list capabilities enhanced by package(s)"
++
+ rpm alias --info --qf '\
+ Name : %{NAME}\n\
+ %|EPOCH?{Epoch : %{EPOCH}\n}|\
+diff --git a/tests/data/SPECS/deptest.spec b/tests/data/SPECS/deptest.spec
+index cb4cbbd..7c47f6d 100644
+--- a/tests/data/SPECS/deptest.spec
++++ b/tests/data/SPECS/deptest.spec
+@@ -10,6 +10,10 @@ BuildArch: noarch
+ %{?provs:Provides: %{provs}}
+ %{?cfls:Conflicts: %{cfls}}
+ %{?obs:Obsoletes: %{obs}}
++%{?recs:Recommends: %{recs}}
++%{?sugs:Suggests: %{sugs}}
++%{?sups:Supplements: %{sups}}
++%{?ens:Enhances: %{ens}}
+
+ %description
+ %{summary}
+diff --git a/tests/rpmbuild.at b/tests/rpmbuild.at
+index 6230903..c4c954c 100644
+--- a/tests/rpmbuild.at
++++ b/tests/rpmbuild.at
+@@ -185,3 +185,25 @@ lrwxrwxrwx /opt/globtest/linkgood
+ ],
+ [])
+ AT_CLEANUP
++
++# ------------------------------
++# Check if weak and reverse requires can be built
++AT_SETUP([Weak and reverse requires])
++AT_KEYWORDS([build])
++AT_CHECK([
++
++runroot rpmbuild -bb --quiet \
++ --define "pkg weakdeps" \
++ --define "recs foo > 1.2.3" \
++ --define "sugs bar >= 0.1.2" \
++ --define "sups baz" \
++ --define "ens zap = 3" \
++ /data/SPECS/deptest.spec
++
++runroot rpm -qp --qf "[%{supplementname}\n]" /build/RPMS/noarch/deptest-weakdeps-1.0-1.noarch.rpm
++],
++[0],
++[baz
++],
++[ignore])
++AT_CLEANUP
+diff --git a/tests/rpmgeneral.at b/tests/rpmgeneral.at
+index 13131e2..80cca63 100644
+--- a/tests/rpmgeneral.at
++++ b/tests/rpmgeneral.at
+@@ -79,6 +79,11 @@ DISTTAG
+ DISTURL
+ DSAHEADER
+ E
++ENHANCEFLAGS
++ENHANCENAME
++ENHANCENEVRS
++ENHANCES
++ENHANCEVERSION
+ EPOCH
+ EPOCHNUM
+ EVR
+@@ -199,6 +204,11 @@ PROVIDES
+ PROVIDEVERSION
+ PUBKEYS
+ R
++RECOMMENDFLAGS
++RECOMMENDNAME
++RECOMMENDNEVRS
++RECOMMENDS
++RECOMMENDVERSION
+ RECONTEXTS
+ RELEASE
+ REMOVETID
+@@ -219,7 +229,17 @@ SOURCE
+ SOURCEPACKAGE
+ SOURCEPKGID
+ SOURCERPM
++SUGGESTFLAGS
++SUGGESTNAME
++SUGGESTNEVRS
++SUGGESTS
++SUGGESTVERSION
+ SUMMARY
++SUPPLEMENTFLAGS
++SUPPLEMENTNAME
++SUPPLEMENTNEVRS
++SUPPLEMENTS
++SUPPLEMENTVERSION
+ TRIGGERCONDS
+ TRIGGERFLAGS
+ TRIGGERINDEX
diff --git a/meta/recipes-devtools/rpm/rpm-4.11.2/use-pkgconfig-for-python.patch b/meta/recipes-devtools/rpm/rpm-4.11.2/use-pkgconfig-for-python.patch
new file mode 100644
index 0000000000..8d84cf8a11
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm-4.11.2/use-pkgconfig-for-python.patch
@@ -0,0 +1,38 @@
+
+Use pkgconfig to get the correct include paths
+
+Upstream-Status: Pending
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Signed-off-by: Ronan Le Martret <ronan.lemartret@open.eurogiciel.org>
+
+diff --git a/configure.ac b/configure.ac
+index e97f727..8179f44 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -540,10 +540,10 @@ esac],
+
+ AS_IF([test "$enable_python" = yes],[
+ AM_PATH_PYTHON([2.6],[
+- WITH_PYTHON_INCLUDE=`${PYTHON} -c 'from distutils.sysconfig import *; import sys; sys.stdout.write(get_python_inc())'`
++ WITH_PYTHON_INCLUDE=$(${PKG_CONFIG} --cflags-only-I python)
+ WITH_PYTHON_SUBPACKAGE=1
+ save_CPPFLAGS="$CPPFLAGS"
+- CPPFLAGS="$CPPFLAGS -I$WITH_PYTHON_INCLUDE"
++ CPPFLAGS="$CPPFLAGS $WITH_PYTHON_INCLUDE"
+ AC_CHECK_HEADER([Python.h],[],
+ [AC_MSG_ERROR([missing Python.h])
+ ])
+diff --git a/python/Makefile.am b/python/Makefile.am
+index fff51ae..f37cb9d 100644
+--- a/python/Makefile.am
++++ b/python/Makefile.am
+@@ -4,7 +4,7 @@ EXTRA_DIST = rpm/__init__.py rpm/transaction.py
+
+ AM_CPPFLAGS = -I$(top_builddir)/include/
+ AM_CPPFLAGS += -I$(top_srcdir)/python
+-AM_CPPFLAGS += -I@WITH_PYTHON_INCLUDE@
++AM_CPPFLAGS += @WITH_PYTHON_INCLUDE@
+
+ pkgpyexec_LTLIBRARIES = _rpmmodule.la _rpmbmodule.la _rpmsmodule.la
+ pkgpyexec_DATA = rpm/__init__.py rpm/transaction.py
diff --git a/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch b/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch
new file mode 100644
index 0000000000..23bc3361d9
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch
@@ -0,0 +1,49 @@
+From 64851c6622aff64787a9fcea26cccde183b7c743 Mon Sep 17 00:00:00 2001
+From: "Roy.Li" <rongqing.li@windriver.com>
+Date: Tue, 11 Nov 2014 16:28:22 +0800
+Subject: [PATCH] using poptParseArgvString to parse the
+ _gpg_check_password_cmd
+
+Upstream-Status: Pending
+
+Both __gpg_check_password_cmd and __gpg_sign_cmd include "%{_gpg_name}", but
+strace shows that gpg_name has a quote when run _gpg_check_password,
+but not when run __gpg_sign_cmd; for example, if gpg_name is "tester"
+
+ execve("/usr/bin/gpg", ["gpg", "--batch", "--no-verbose",
+ "--passphrase-fd", "3", "-u", "\"tester\"", "-so", "-"], [/* 20 vars */]) = 0
+
+ execve("/usr/bin/gpg", ["gpg", "--batch", "--no-verbose", "--no-armor",
+ "--passphrase-fd", "3", "--no-secmem-warning", "-u", "tester", "-sbo"..,) = 0
+
+it can be fixed by removing the quote around %{gpg_name} when define
+__gpg_check_password_cmd in macros/macros, like below, but if gpg_name includes
+space, it will not work.
+
+ %__gpg_check_password_cmd %{__gpg} \
+ gpg --batch --no-verbose --passphrase-fd 3 -u %{_gpg_name} -so -
+
+The poptParseArgvString function is used to parse _gpg_sign_cmd, so using
+poptParseArgvString to parse __gpg_check_password_cmd to fix this issue.
+
+Signed-off-by: Roy.Li <rongqing.li@windriver.com>
+---
+ rpmdb/signature.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/rpmdb/signature.c b/rpmdb/signature.c
+index c35e0ab..016e8d1 100644
+--- a/rpmdb/signature.c
++++ b/rpmdb/signature.c
+@@ -529,7 +529,7 @@ int rpmCheckPassPhrase(const char * passPhrase)
+ (void) setenv("GNUPGHOME", gpg_path, 1);
+
+ cmd = rpmExpand("%{?__gpg_check_password_cmd}", NULL);
+- rc = argvSplit(&av, cmd, NULL);
++ rc = poptParseArgvString(cmd, NULL, (const char ***)&av);
+ if (!rc)
+ rc = execve(av[0], (char *const *)av+1, environ);
+
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch b/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch
new file mode 100644
index 0000000000..d2d9b09845
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch
@@ -0,0 +1,24 @@
+chroot: realpath is required before expanding _dbpath
+
+_usr turned out to be a relative path to support dyanmic config, but it's
+being used somewhere as a indicator to locate substrings, so we must get
+the real path of it in advance.
+
+Upstream-Status: Inapproriate (OpenEmbedded specific)
+
+Signed-off-by: Ming Liu <ming.liu@windriver.com>
+
+diff -urpN a/rpmio/rpmrpc.c b/rpmio/rpmrpc.c
+--- a/rpmio/rpmrpc.c
++++ b/rpmio/rpmrpc.c
+@@ -257,7 +257,9 @@ int Open(const char * path, int flags, m
+ /* XXX if the open(2) fails, try to strip a possible chroot(2) prefix. */
+ if (fdno < 0 && errno == ENOENT) {
+ const char *dbpath = rpmExpand("%{?_dbpath}/", NULL);
+- const char * fn = strstr(path + 1, dbpath);
++ char resolved_dbpath[PATH_MAX];
++ realpath(dbpath, resolved_dbpath);
++ const char * fn = strstr(path + 1, resolved_dbpath);
+ if (fn)
+ fdno = open(fn, flags, mode);
+ dbpath = _free(dbpath);
diff --git a/meta/recipes-devtools/rpm/rpm_4.11.2.bb b/meta/recipes-devtools/rpm/rpm_4.11.2.bb
new file mode 100644
index 0000000000..86a14fae50
--- /dev/null
+++ b/meta/recipes-devtools/rpm/rpm_4.11.2.bb
@@ -0,0 +1,133 @@
+SUMMARY = "The RPM package management system"
+DESCRIPTION = "The RPM Package Manager (RPM) is a powerful command line driven \
+package management system capable of installing, uninstalling, \
+verifying, querying, and updating software packages. Each software \
+package consists of an archive of files along with information about \
+the package like its version, a description, etc."
+
+SUMMARY_${PN}-dev = "Development files for manipulating RPM packages"
+DESCRIPTION_${PN}-dev = "This package contains the RPM C library and header files. These \
+development files will simplify the process of writing programs that \
+manipulate RPM packages and databases. These files are intended to \
+simplify the process of creating graphical package managers or any \
+other tools that need an intimate knowledge of RPM packages in order \
+to function."
+
+SUMMARY_python-rpm = "Python bindings for apps which will manupulate RPM packages"
+DESCRIPTION_python-rpm = "The rpm-python package contains a module that permits applications \
+written in the Python programming language to use the interface \
+supplied by the RPM Package Manager libraries."
+
+HOMEPAGE = "http://www.rpm.org"
+LICENSE = "GPL-2.0+"
+LIC_FILES_CHKSUM ??= "file://${COMMON_LICENSE_DIR}/GPL-2.0;md5=801f80980d171dd6425610833a22dbe6"
+
+DEPENDS = "db libxml2 xz findutils file popt nss bzip2 elfutils patch attr zlib acl gzip make binutils python"
+
+SRC_URI += "http://rpm.org/releases/rpm-4.11.x/${BP}.tar.bz2 \
+ file://use-pkgconfig-for-python.patch \
+ file://remove-db3-from-configure.patch \
+ file://add_RPMSENSE_MISSINGOK_to_rpmmodule.patch \
+ file://support-suggests-tag.patch \
+ file://remove-dir-check.patch \
+ file://disable_shortcircuited.patch \
+ file://fix_libdir.patch \
+ file://rpm-scriptetexechelp.patch \
+ file://pythondeps.sh \
+ "
+
+SRC_URI[md5sum] = "876ac9948a88367054f8ddb5c0e87173"
+SRC_URI[sha256sum] = "403f8de632b33846ce5746f429c21a60f40dff9dcb56f1b4118f37a0652a48d4"
+
+inherit autotools-brokensep
+inherit pythonnative
+inherit pkgconfig
+inherit gettext
+
+EXTRA_OECONF += "--host=${HOST_SYS} \
+ --program-prefix= \
+ --prefix=${prefix} \
+ --exec-prefix=${prefix} \
+ --bindir=${prefix}/bin \
+ --sbindir=${prefix}/sbin \
+ --sysconfdir=${sysconfdir} \
+ --datadir=${prefix}/share \
+ --includedir=${prefix}/include \
+ --libdir=${prefix}/lib \
+ --libexecdir=${prefix}/libexec \
+ --localstatedir=${localstatedir} \
+ --sharedstatedir=${prefix}/com \
+ --mandir=${mandir} \
+ --infodir=${infodir} \
+ --disable-dependency-tracking \
+ --with-acl \
+ --without-lua \
+ --without-cap \
+ --enable-shared \
+ --enable-python \
+ --with-external-db \
+ "
+
+CPPFLAGS_append = " `pkg-config --cflags nss`"
+LDFLAGS_append = " -Wl,-Bsymbolic-functions -ffunction-sections"
+CCFLAGS_append = " -fPIC "
+CXXFLAGS_append = " -fPIC "
+CFLAGS_append = " -fPIC -DRPM_VENDOR_WINDRIVER -DRPM_VENDOR_POKY -DRPM_VENDOR_OE "
+
+do_configure_prepend() {
+ rm -rf sqlite
+ rm -f m4/libtool.m4
+ rm -f m4/lt*.m4
+ rm -rf db3/configure*
+}
+
+do_install_append() {
+ mv ${D}/${base_bindir}/rpm ${D}/${bindir}/
+ rmdir ${D}/${base_bindir}
+ rm -f ${D}${prefix}/lib/*.la
+ rm -f ${D}${prefix}/lib/rpm-plugins/*.la
+ rm -f ${D}/${libdir}/python%{with_python_version}/site-packages/*.{a,la}
+ rm -f ${D}/${libdir}/python%{with_python_version}/site-packages/rpm/*.{a,la}
+ rm -fr ${D}/var
+ install -d ${D}${prefix}/lib/rpm/bin
+ ln -s ../debugedit ${D}${prefix}/lib/rpm/bin/debugedit
+ ln -s ../rpmdeps ${D}${prefix}/lib/rpm/bin/rpmdeps-oecore
+ install -m 0755 ${WORKDIR}/pythondeps.sh ${D}/${libdir}/rpm/pythondeps.sh
+}
+
+pkg_postinst_${PN}() {
+
+ [ "x\$D" == "x" ] && ldconfig
+ test -f ${localstatedir}/lib/rpm/Packages || rpm --initdb
+ rm -f ${localstatedir}/lib/rpm/Filemd5s \
+ ${localstatedir}/lib/rpm/Filedigests \
+ ${localstatedir}/lib/rpm/Requireversion \
+ ${localstatedir}/lib/rpm/Provideversion
+
+}
+
+pkg_postrm_${PN}() {
+ [ "x\$D" == "x" ] && ldconfig
+
+}
+
+PACKAGES += "python-${PN}"
+PROVIDES += "python-rpm"
+
+FILES_${PN} += "${libdir}/rpm \
+ ${libdir}/rpm-plugins/exec.so \
+ "
+RDEPENDS_${PN} = "base-files run-postinsts"
+RDEPENDS_${PN}_class-native = "base-files run-postinsts"
+
+FILES_${PN}-dbg += "${libdir}/rpm/.debug/* \
+ ${libdir}/rpm-plugins/.debug/* \
+ ${libdir}/python2.7/site-packages/rpm/.debug/* \
+ "
+
+FILES_${PN}-dev += "${libdir}/python2.7/site-packages/rpm/*.la"
+
+FILES_python-${PN} = "${libdir}/python2.7/site-packages/rpm/*"
+RDEPENDS_python-${PN} = "${PN} python"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb b/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
index 27d01dd49a..a85e28fa1f 100644
--- a/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
+++ b/meta/recipes-devtools/rpm/rpm_5.4+cvs.bb
@@ -109,6 +109,7 @@ SRC_URI = "cvs://anonymous@rpm5.org/cvs;tag=rpm-5_4;module=rpm \
file://rpm-db5-or-db6.patch \
file://rpm-rpmpgp-fix.patch \
file://rpm-disable-Wno-override-init.patch \
+ file://rpm-realpath.patch \
"
# Uncomment the following line to enable platform score debugging
@@ -274,8 +275,6 @@ FILES_${PN}-libs = "${libdir}/librpm-*.so \
${libdir}/librpmbuild-*.so \
"
-RDEPENDS_${PN}-build += "bash"
-
FILES_${PN}-build = "${prefix}/src/rpm \
${bindir}/rpmbuild \
${bindir}/rpmbuild.real \
@@ -349,7 +348,7 @@ FILES_${PN}-build = "${prefix}/src/rpm \
RDEPENDS_${PN} = "base-files run-postinsts"
RDEPENDS_${PN}_class-native = ""
RDEPENDS_${PN}_class-nativesdk = ""
-RDEPENDS_${PN}-build = "file"
+RDEPENDS_${PN}-build = "file bash perl"
RDEPENDS_python-rpm = "${PN}"
diff --git a/meta/recipes-devtools/rpm/rpm_5.4.14.bb b/meta/recipes-devtools/rpm/rpm_5.4.14.bb
index 8fec6565c9..d99a300285 100644
--- a/meta/recipes-devtools/rpm/rpm_5.4.14.bb
+++ b/meta/recipes-devtools/rpm/rpm_5.4.14.bb
@@ -93,6 +93,8 @@ SRC_URI = "http://www.rpm5.org/files/rpm/rpm-5.4/rpm-5.4.14-0.20131024.src.rpm;e
file://rpm-db5-or-db6.patch \
file://rpm-disable-Wno-override-init.patch \
file://rpmqv_cc_b_gone.patch \
+ file://rpm-realpath.patch \
+ file://0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch \
"
# Uncomment the following line to enable platform score debugging
@@ -261,8 +263,6 @@ FILES_${PN}-libs = "${libdir}/librpm-*.so \
${libdir}/librpmbuild-*.so \
"
-RDEPENDS_${PN}-build += "bash"
-
FILES_${PN}-build = "${prefix}/src/rpm \
${bindir}/rpmbuild \
${bindir}/rpmbuild.real \
@@ -336,7 +336,7 @@ FILES_${PN}-build = "${prefix}/src/rpm \
RDEPENDS_${PN} = "base-files run-postinsts"
RDEPENDS_${PN}_class-native = ""
RDEPENDS_${PN}_class-nativesdk = ""
-RDEPENDS_${PN}-build = "file"
+RDEPENDS_${PN}-build = "file bash perl"
RDEPENDS_python-rpm = "${PN}"
@@ -344,6 +344,7 @@ FILES_python-rpm-dbg = "${libdir}/python*/site-packages/rpm/.debug/_*"
FILES_python-rpm-dev = "${libdir}/python*/site-packages/rpm/*.la"
FILES_python-rpm-staticdev = "${libdir}/python*/site-packages/rpm/*.a"
FILES_python-rpm = "${libdir}/python*/site-packages/rpm"
+PROVIDES += "python-rpm"
FILES_perl-module-rpm = "${libdir}/perl/*/* \
"
diff --git a/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c b/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
index 4e9d055f24..7f4caf9886 100644
--- a/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
+++ b/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c
@@ -275,7 +275,13 @@ int printDepList(rpmts *ts, int tscount)
char *name = strdup((char *)he->p.ptr);
/* Get its requires */
he->tag = RPMTAG_REQUIRENAME;
- rc = (headerGet(h, he, 0) != 1);
+ if (rc = (headerGet(h, he, 0) != 1)) {
+ if (debugmode) {
+ printf("DEBUG: %s requires null\n", name);
+ }
+ rc = 0;
+ continue;
+ }
ARGV_t reqs = (ARGV_t)he->p.ptr;
/* Get its requireflags */
he->tag = RPMTAG_REQUIREFLAGS;
diff --git a/meta/recipes-devtools/rsync/rsync_3.1.0.bb b/meta/recipes-devtools/rsync/rsync_3.1.0.bb
deleted file mode 100644
index 72c072dae5..0000000000
--- a/meta/recipes-devtools/rsync/rsync_3.1.0.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-require rsync.inc
-
-
-SRC_URI += "file://acinclude.m4"
-
-SRC_URI[md5sum] = "3be148772a33224771a8d4d2a028b132"
-SRC_URI[sha256sum] = "81ca23f77fc9b957eb9845a6024f41af0ff0c619b7f38576887c63fa38e2394e"
-
-EXTRA_OECONF += "--disable-xattr-support --disable-acl-support"
-
-# rsync 3.0 uses configure.sh instead of configure, and
-# makefile checks the existence of configure.sh
-do_configure_prepend () {
- rm -f ${S}/configure ${S}/configure.sh
- cp -f ${WORKDIR}/acinclude.m4 ${S}/
-
- # by default, if crosscompiling, rsync
- # disables a number of capabilities, hardlinking
- # symlinks and special files (ie devices)
- export rsync_cv_can_hardlink_special=yes
- export rsync_cv_can_hardlink_symlink=yes
-}
-
-do_configure_append () {
- cp -f ${S}/configure ${S}/configure.sh
-}
diff --git a/meta/recipes-devtools/rsync/rsync_3.1.1.bb b/meta/recipes-devtools/rsync/rsync_3.1.1.bb
new file mode 100644
index 0000000000..5ff8ae8bb4
--- /dev/null
+++ b/meta/recipes-devtools/rsync/rsync_3.1.1.bb
@@ -0,0 +1,27 @@
+require rsync.inc
+
+
+SRC_URI += "file://acinclude.m4"
+
+SRC_URI[md5sum] = "43bd6676f0b404326eee2d63be3cdcfe"
+SRC_URI[sha256sum] = "7de4364fcf5fe42f3bdb514417f1c40d10bbca896abe7e7f2c581c6ea08a2621"
+
+PACKAGECONFIG ??= "acl attr"
+PACKAGECONFIG[acl] = "--enable-acl-support,--disable-acl-support,acl,"
+PACKAGECONFIG[attr] = "--enable-xattr-support,--disable-xattr-support,attr,"
+
+# rsync 3.0 uses configure.sh instead of configure, and
+# makefile checks the existence of configure.sh
+do_configure_prepend () {
+ rm -f ${S}/configure ${S}/configure.sh
+ cp -f ${WORKDIR}/acinclude.m4 ${S}/
+
+ # By default, if crosscompiling, rsync disables a number of
+ # capabilities, hardlinking symlinks and special files (i.e. devices)
+ export rsync_cv_can_hardlink_special=yes
+ export rsync_cv_can_hardlink_symlink=yes
+}
+
+do_configure_append () {
+ cp -f ${S}/configure ${S}/configure.sh
+}
diff --git a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
index 822327aadc..85a043949e 100644
--- a/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
+++ b/meta/recipes-devtools/run-postinsts/run-postinsts/run-postinsts.service
@@ -13,5 +13,4 @@ RemainAfterExit=No
TimeoutSec=0
[Install]
-WantedBy=basic.target
WantedBy=sysinit.target
diff --git a/meta/recipes-devtools/squashfs-tools/squashfs-tools_4.3.bb b/meta/recipes-devtools/squashfs-tools/squashfs-tools_4.3.bb
index e3783fd81d..d36f0fea44 100644
--- a/meta/recipes-devtools/squashfs-tools/squashfs-tools_4.3.bb
+++ b/meta/recipes-devtools/squashfs-tools/squashfs-tools_4.3.bb
@@ -19,6 +19,7 @@ SRC_URI[lzma.md5sum] = "29d5ffd03a5a3e51aef6a74e9eafb759"
SRC_URI[lzma.sha256sum] = "c935fd04dd8e0e8c688a3078f3675d699679a90be81c12686837e0880aa0fa1e"
S = "${WORKDIR}/squashfs${PV}/squashfs-tools"
+SPDX_S = "${WORKDIR}/squashfs${PV}"
# EXTRA_OEMAKE is typically: -e MAKEFLAGS=
# the -e causes problems as CFLAGS is modified in the Makefile, so
diff --git a/meta/recipes-devtools/strace/strace-4.8/0001-Work-around-conflict-between-sys-ptrace.h-and-linux-.patch b/meta/recipes-devtools/strace/strace-4.8/0001-Work-around-conflict-between-sys-ptrace.h-and-linux-.patch
deleted file mode 100644
index 5a0090eb6d..0000000000
--- a/meta/recipes-devtools/strace/strace-4.8/0001-Work-around-conflict-between-sys-ptrace.h-and-linux-.patch
+++ /dev/null
@@ -1,108 +0,0 @@
-Upstream-Status: Backport
-
-From 0b4060f61f1bb101b5d8d084714b7d2feacdb199 Mon Sep 17 00:00:00 2001
-From: Ali Polatel <alip@exherbo.org>
-Date: Tue, 24 Sep 2013 20:04:32 +0300
-Subject: [PATCH] Work around conflict between <sys/ptrace.h> and
- <linux/ptrace.h>
-
-Since glibc-2.18~39 <sys/ptrace.h> defines ptrace_peeksiginfo_args
-which collides with <linux/ptrace.h>.
-
-* configure.ac: Check for `struct ptrace_peeksiginfo_args' in
-<sys/ptrace.h>.
-* process.c: Work around potential conflict between <sys/ptrace.h>
-and <linux/ptrace.h> by redefining ptrace_peeksiginfo_args.
-* signal.c: Likewise.
-* syscall.c: Likewise.
-* util.c: Likewise.
-
-Signed-off-by: Ali Polatel <alip@exherbo.org>
----
- configure.ac | 2 +-
- process.c | 4 ++++
- signal.c | 4 ++++
- syscall.c | 4 ++++
- util.c | 4 ++++
- 5 files changed, 17 insertions(+), 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index f19e4f2..aa4923a 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -257,7 +257,7 @@ AC_CHECK_MEMBERS([struct T_conn_res.QUEUE_ptr,
-
- AC_CHECK_TYPES([struct __old_kernel_stat],,, [#include <asm/stat.h>])
-
--AC_CHECK_TYPES([struct pt_all_user_regs, struct ia64_fpreg],,,
-+AC_CHECK_TYPES([struct pt_all_user_regs, struct ia64_fpreg, struct ptrace_peeksiginfo_args],,,
- [#include <sys/ptrace.h>])
-
- AC_CHECK_TYPES([struct user_desc],,, [#include <asm/ldt.h>])
-diff --git a/process.c b/process.c
-index 1a2181b..59428a4 100644
---- a/process.c
-+++ b/process.c
-@@ -63,7 +63,11 @@
- # ifdef HAVE_STRUCT_PT_ALL_USER_REGS
- # define pt_all_user_regs XXX_pt_all_user_regs
- # endif
-+# ifdef HAVE_STRUCT_PTRACE_PEEKSIGINFO_ARGS
-+# define ptrace_peeksiginfo_args XXX_ptrace_peeksiginfo_args
-+# endif
- # include <linux/ptrace.h>
-+# undef ptrace_peeksiginfo_args
- # undef ia64_fpreg
- # undef pt_all_user_regs
- #endif
-diff --git a/signal.c b/signal.c
-index 7fb9abf..3411ddd 100644
---- a/signal.c
-+++ b/signal.c
-@@ -51,7 +51,11 @@
- # ifdef HAVE_STRUCT_PT_ALL_USER_REGS
- # define pt_all_user_regs XXX_pt_all_user_regs
- # endif
-+# ifdef HAVE_STRUCT_PTRACE_PEEKSIGINFO_ARGS
-+# define ptrace_peeksiginfo_args XXX_ptrace_peeksiginfo_args
-+# endif
- # include <linux/ptrace.h>
-+# undef ptrace_peeksiginfo_args
- # undef ia64_fpreg
- # undef pt_all_user_regs
- #endif
-diff --git a/syscall.c b/syscall.c
-index 83a95bd..3477dcd 100644
---- a/syscall.c
-+++ b/syscall.c
-@@ -48,7 +48,11 @@
- # ifdef HAVE_STRUCT_PT_ALL_USER_REGS
- # define pt_all_user_regs XXX_pt_all_user_regs
- # endif
-+# ifdef HAVE_STRUCT_PTRACE_PEEKSIGINFO_ARGS
-+# define ptrace_peeksiginfo_args XXX_ptrace_peeksiginfo_args
-+# endif
- # include <linux/ptrace.h>
-+# undef ptrace_peeksiginfo_args
- # undef ia64_fpreg
- # undef pt_all_user_regs
- #endif
-diff --git a/util.c b/util.c
-index 0dab902..30a7f19 100644
---- a/util.c
-+++ b/util.c
-@@ -55,7 +55,11 @@
- # ifdef HAVE_STRUCT_PT_ALL_USER_REGS
- # define pt_all_user_regs XXX_pt_all_user_regs
- # endif
-+# ifdef HAVE_STRUCT_PTRACE_PEEKSIGINFO_ARGS
-+# define ptrace_peeksiginfo_args XXX_ptrace_peeksiginfo_args
-+# endif
- # include <linux/ptrace.h>
-+# undef ptrace_peeksiginfo_args
- # undef ia64_fpreg
- # undef pt_all_user_regs
- #endif
---
-1.8.2.1
-
diff --git a/meta/recipes-devtools/strace/strace-4.8/Makefile-ptest.patch b/meta/recipes-devtools/strace/strace-4.8/Makefile-ptest.patch
deleted file mode 100644
index f5556b23e3..0000000000
--- a/meta/recipes-devtools/strace/strace-4.8/Makefile-ptest.patch
+++ /dev/null
@@ -1,28 +0,0 @@
-strace: Add ptest
-
-Signed-off-by: Gabriel Barbu <gabriel.barbu@enea.com>
-Upstream-Status: Pending
-
---- old/tests/Makefile.am 2013-07-23 13:44:24.660481381 +0200
-+++ new/tests/Makefile.am 2013-07-23 16:22:42.937654391 +0200
-@@ -9,3 +9,20 @@
- EXTRA_DIST = init.sh $(TESTS)
-
- CLEANFILES = check.log
-+
-+buildtest-TESTS: $(check_PROGRAMS) $(TESTS)
-+
-+install-ptest:
-+ install $(BUILDDIR)/strace $(DESTDIR)
-+ install "$(srcdir)/.."/strace-log-merge $(DESTDIR)
-+ install -d $(DESTDIR)/$(TESTDIR)
-+ cp $(BUILDDIR)/$(TESTDIR)/Makefile $(DESTDIR)/$(TESTDIR)
-+ sed -i -e 's/^Makefile:/_Makefile:/' $(DESTDIR)/$(TESTDIR)/Makefile
-+ for file in $(check_PROGRAMS); do \
-+ install $(BUILDDIR)/$(TESTDIR)/$$file $(DESTDIR)/$(TESTDIR); \
-+ done
-+ for file in $(EXTRA_DIST); do \
-+ install $(srcdir)/$$file $(DESTDIR)/$(TESTDIR); \
-+ sed -i -e 's/$${srcdir=.}/./g' $(DESTDIR)/$(TESTDIR)/$$file; \
-+ done
-+ sed -i -e 's/$$srcdir/./g' $(DESTDIR)/$(TESTDIR)/net
diff --git a/meta/recipes-devtools/strace/strace-4.8/strace-fix-64-bit-process-detection.patch b/meta/recipes-devtools/strace/strace-4.8/strace-fix-64-bit-process-detection.patch
deleted file mode 100644
index a6579df598..0000000000
--- a/meta/recipes-devtools/strace/strace-4.8/strace-fix-64-bit-process-detection.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-powerpc64: fix 64-bit process detection on embedded
-
-* syscall.c (get_scno) [POWERPC64]: Fix 64-bit process detection
-on embedded powerpc.
-
-Upstream-Status: Backport
-
-Signed-off-by: James Yang <james.yang@freescale.com>
-Signed-off-by: Dmitry V. Levin <ldv@altlinux.org>
-
-diff --git a/syscall.c.orig b/syscall.c
-index 7efee0e..72d9453 100644
---- a/syscall.c.orig
-+++ b/syscall.c
-@@ -1207,11 +1207,14 @@ get_scno(struct tcb *tcp)
- /* Check for 64/32 bit mode. */
- if (upeek(tcp, sizeof(unsigned long)*PT_MSR, &val) < 0)
- return -1;
-- /* SF is bit 0 of MSR */
-- if (val < 0)
-- currpers = 0;
-- else
-- currpers = 1;
-+
-+ /*
-+ * Check for 64/32 bit mode.
-+ * Embedded implementations covered by Book E extension of PPC use
-+ * bit 0 (CM) of 32-bit Machine state register (MSR).
-+ * Other implementations use bit 0 (SF) of 64-bit MSR.
-+ */
-+ currpers = (val & 0x8000000080000000) ? 0 : 1;
- update_personality(tcp, currpers);
- # endif
- #elif defined(AVR32)
diff --git a/meta/recipes-devtools/strace/strace-4.9/Makefile-ptest.patch b/meta/recipes-devtools/strace/strace-4.9/Makefile-ptest.patch
new file mode 100644
index 0000000000..df013102d7
--- /dev/null
+++ b/meta/recipes-devtools/strace/strace-4.9/Makefile-ptest.patch
@@ -0,0 +1,53 @@
+strace: Add ptest
+
+Upstream-Status: Inappropriate
+
+Signed-off-by: Gabriel Barbu <gabriel.barbu@enea.com>
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ configure.ac | 2 +-
+ tests/Makefile.am | 18 ++++++++++++++++++
+ 2 files changed, 19 insertions(+), 1 deletion(-)
+
+diff --git a/configure.ac b/configure.ac
+index 054f85b..9aec566 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -6,7 +6,7 @@ AC_INIT([strace],
+ AC_CONFIG_SRCDIR([strace.c])
+ AC_CONFIG_AUX_DIR([.])
+ AC_CONFIG_HEADERS([config.h])
+-AM_INIT_AUTOMAKE([foreign dist-xz no-dist-gzip silent-rules parallel-tests])
++AM_INIT_AUTOMAKE([foreign dist-xz no-dist-gzip silent-rules serial-tests])
+ AM_MAINTAINER_MODE
+ AC_CANONICAL_HOST
+
+diff --git a/tests/Makefile.am b/tests/Makefile.am
+index 922452a..b17837f 100644
+--- a/tests/Makefile.am
++++ b/tests/Makefile.am
+@@ -37,3 +37,21 @@ TEST_LOG_COMPILER = $(srcdir)/run.sh
+ EXTRA_DIST = init.sh run.sh sigaction.awk $(TESTS)
+
+ CLEANFILES = $(TESTS:=.tmp)
++
++buildtest-TESTS: $(check_PROGRAMS) $(TESTS)
++
++install-ptest:
++ install $(BUILDDIR)/strace $(DESTDIR)
++ install "$(srcdir)/.."/strace-log-merge $(DESTDIR)
++ install -d $(DESTDIR)/$(TESTDIR)
++ cp $(BUILDDIR)/$(TESTDIR)/Makefile $(DESTDIR)/$(TESTDIR)
++ sed -i -e 's/^Makefile:/_Makefile:/' $(DESTDIR)/$(TESTDIR)/Makefile
++ sed -i -e 's/bash/sh/' $(DESTDIR)/$(TESTDIR)/Makefile
++ for file in $(check_PROGRAMS); do \
++ install $(BUILDDIR)/$(TESTDIR)/$$file $(DESTDIR)/$(TESTDIR); \
++ done
++ for file in $(EXTRA_DIST); do \
++ install $(srcdir)/$$file $(DESTDIR)/$(TESTDIR); \
++ sed -i -e 's/$${srcdir=.}/./g' $(DESTDIR)/$(TESTDIR)/$$file; \
++ done
++ for i in net net-fd scm_rights-fd sigaction; do sed -i -e 's/$$srcdir/./g' $(DESTDIR)/$(TESTDIR)/$$i.test; done
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/strace/strace-4.8/git-version-gen b/meta/recipes-devtools/strace/strace-4.9/git-version-gen
index 8fee74e121..8fee74e121 100755
--- a/meta/recipes-devtools/strace/strace-4.8/git-version-gen
+++ b/meta/recipes-devtools/strace/strace-4.9/git-version-gen
diff --git a/meta/recipes-devtools/strace/strace-4.8/run-ptest b/meta/recipes-devtools/strace/strace-4.9/run-ptest
index 133cf92d02..133cf92d02 100755
--- a/meta/recipes-devtools/strace/strace-4.8/run-ptest
+++ b/meta/recipes-devtools/strace/strace-4.9/run-ptest
diff --git a/meta/recipes-devtools/strace/strace-4.8/strace-add-configure-options.patch b/meta/recipes-devtools/strace/strace-4.9/strace-add-configure-options.patch
index 27266d9052..15e37bce03 100644
--- a/meta/recipes-devtools/strace/strace-4.8/strace-add-configure-options.patch
+++ b/meta/recipes-devtools/strace/strace-4.9/strace-add-configure-options.patch
@@ -3,18 +3,24 @@ Add options "aio" and "acl" to enable/disable libaio and acl support.
Upstream-Status: Pending
Signed-off-by: Kai Kang <kai.kang@windriver.com>
+Signed-off-by: Chong Lu <Chong.Lu@windriver.com>
+---
+ configure.ac | 27 ++++++++++++++++++++++++++-
+ 1 file changed, 26 insertions(+), 1 deletion(-)
---- strace-4.8/configure.ac.orig 2013-06-21 15:37:52.145892182 +0800
-+++ strace-4.8/configure.ac 2013-06-21 15:45:49.029909004 +0800
-@@ -219,7 +219,6 @@ AC_CHECK_HEADERS(m4_normalize([
- netinet/sctp.h
+diff --git a/configure.ac b/configure.ac
+index 054f85b..8ed49f1 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -238,7 +238,6 @@ AC_CHECK_HEADERS(m4_normalize([
poll.h
+ scsi/sg.h
stropts.h
- sys/acl.h
sys/asynch.h
sys/conf.h
sys/epoll.h
-@@ -231,6 +230,19 @@ AC_CHECK_HEADERS(m4_normalize([
+@@ -250,6 +249,18 @@ AC_CHECK_HEADERS(m4_normalize([
sys/uio.h
sys/vfs.h
]))
@@ -22,30 +28,25 @@ Signed-off-by: Kai Kang <kai.kang@windriver.com>
+AC_ARG_ENABLE([acl],
+ [AS_HELP_STRING([--enable-acl], [turn on acl support])],
+ [case $enableval in
-+ yes)
++ yes)
+ AC_CHECK_HEADERS([sys/acl.h])
+ ;;
+ no) ;;
+ *) AC_MSG_ERROR([bad value $enableval for aio option]) ;;
+ esac]
-+
+)
+
AC_CHECK_HEADERS([linux/icmp.h linux/in6.h linux/netlink.h linux/if_packet.h],
[], [], [#include <stddef.h>
#include <sys/socket.h>
-@@ -301,10 +313,19 @@ AC_CHECK_SIZEOF([long])
- AC_CHECK_SIZEOF([long long])
- AC_CHECK_SIZEOF([rlim_t],,[#include <sys/resource.h>])
+@@ -649,6 +660,20 @@ if test "x$st_cv_have___builtin_popcount" = xyes; then
+ [Define to 1 if the system provides __builtin_popcount function])
+ fi
--AC_CHECK_HEADERS([libaio.h], [
-- AC_CHECK_MEMBERS([struct iocb.u.c.flags],,, [#include <libaio.h>])
-- AC_CHECK_DECLS([IO_CMD_PWRITE, IO_CMD_PWRITEV],,, [#include <libaio.h>])
--])
+AC_ARG_ENABLE([aio],
+ [AS_HELP_STRING([--enable-aio], [turn on libaio support])],
+ [case $enableval in
-+ yes)
++ yes)
+ AC_CHECK_HEADERS([libaio.h], [
+ AC_CHECK_MEMBERS([struct iocb.u.c.flags],,, [#include <libaio.h>])
+ AC_CHECK_DECLS([IO_CMD_PWRITE, IO_CMD_PWRITEV],,, [#include <libaio.h>])
@@ -55,6 +56,10 @@ Signed-off-by: Kai Kang <kai.kang@windriver.com>
+ *) AC_MSG_ERROR([bad value $enableval for aio option]) ;;
+ esac]
+)
-
++
AC_PATH_PROG([PERL], [perl])
+ dnl stack trace with libunwind
+--
+1.9.1
+
diff --git a/meta/recipes-devtools/strace/strace_4.8.bb b/meta/recipes-devtools/strace/strace_4.9.bb
index ed738eaad7..cce130d58c 100644
--- a/meta/recipes-devtools/strace/strace_4.8.bb
+++ b/meta/recipes-devtools/strace/strace_4.9.bb
@@ -8,21 +8,20 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/strace/strace-${PV}.tar.xz \
file://git-version-gen \
file://strace-add-configure-options.patch \
file://Makefile-ptest.patch \
- file://strace-fix-64-bit-process-detection.patch \
file://run-ptest \
- file://0001-Work-around-conflict-between-sys-ptrace.h-and-linux-.patch \
"
-SRC_URI[md5sum] = "c575ef43829586801f514fd91bfe7575"
-SRC_URI[sha256sum] = "f492291f07a7c805c07a8395cce1ea054a6401ad414f4cc12185672215e1d7f8"
+SRC_URI[md5sum] = "885eafadb10f6c60464a266d3929a2a4"
+SRC_URI[sha256sum] = "095bfea5c540b91d297ccac73b21b92fd54a24599fd70395db87ff9eb7fd6f65"
inherit autotools ptest
-RDEPENDS_${PN}-ptest += "make"
+RDEPENDS_${PN}-ptest += "make coreutils grep gawk"
PACKAGECONFIG_class-target ?= "libaio ${@bb.utils.contains('DISTRO_FEATURES', 'acl', 'acl', '', d)}"
PACKAGECONFIG[libaio] = "--enable-aio,--disable-aio,libaio"
PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl"
+PACKAGECONFIG[libunwind] = "--with-libunwind, --without-libunwind, libunwind"
export INCLUDES = "-I. -I./linux"
diff --git a/meta/recipes-devtools/subversion/subversion-1.8.9/disable_macos.patch b/meta/recipes-devtools/subversion/subversion-1.8.10/disable_macos.patch
index ec3be496f3..ec3be496f3 100644
--- a/meta/recipes-devtools/subversion/subversion-1.8.9/disable_macos.patch
+++ b/meta/recipes-devtools/subversion/subversion-1.8.10/disable_macos.patch
diff --git a/meta/recipes-devtools/subversion/subversion-1.8.9/libtool2.patch b/meta/recipes-devtools/subversion/subversion-1.8.10/libtool2.patch
index 5cd572bfc8..5cd572bfc8 100644
--- a/meta/recipes-devtools/subversion/subversion-1.8.9/libtool2.patch
+++ b/meta/recipes-devtools/subversion/subversion-1.8.10/libtool2.patch
diff --git a/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3522.patch b/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3522.patch
new file mode 100644
index 0000000000..03d5b9710f
--- /dev/null
+++ b/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3522.patch
@@ -0,0 +1,439 @@
+Upstream-Status: Backport
+
+Signed-off-by: Yue Tao <yue.tao@windriver.com>
+
+diff --git a/subversion/libsvn_ra_serf/util.c.old b/subversion/libsvn_ra_serf/util.c
+index b6c0141..8b09770 100644
+--- a/subversion/libsvn_ra_serf/util.c.old
++++ b/subversion/libsvn_ra_serf/util.c
+@@ -21,7 +21,6 @@
+ #define APR_WANT_STRFUNC
+ #include <apr.h>
+ #include <apr_want.h>
+-#include <apr_fnmatch.h>
+
+ #include <serf.h>
+ #include <serf_bucket_types.h>
+@@ -30,6 +29,7 @@
+ #include "svn_private_config.h"
+ #include "svn_xml.h"
+ #include "private/svn_dep_compat.h"
++#include "private/svn_cert.h"
+
+ #include "ra_serf.h"
+
+@@ -113,7 +113,12 @@ ssl_server_cert(void *baton, int failures,
+ apr_uint32_t svn_failures;
+ svn_error_t *err;
+ apr_hash_t *issuer, *subject, *serf_cert;
++ apr_array_header_t *san;
+ void *creds;
++ svn_boolean_t found_matching_hostname = FALSE;
++ svn_boolean_t found_san_entry = FALSE;
++ svn_string_t *actual_hostname =
++ svn_string_create(conn->hostname, scratch_pool);
+
+ /* Implicitly approve any non-server certs. */
+ if (serf_ssl_cert_depth(cert) > 0)
+@@ -129,6 +134,7 @@ ssl_server_cert(void *baton, int failures,
+ serf_cert = serf_ssl_cert_certificate(cert, subpool);
+
+ cert_info.hostname = apr_hash_get(subject, "CN", APR_HASH_KEY_STRING);
++ san = apr_hash_get(serf_cert, "subjectAltName", APR_HASH_KEY_STRING);
+ cert_info.fingerprint = apr_hash_get(serf_cert, "sha1", APR_HASH_KEY_STRING);
+ if (! cert_info.fingerprint)
+ cert_info.fingerprint = apr_pstrdup(subpool, "<unknown>");
+@@ -145,16 +145,43 @@ ssl_server_cert(void *baton, int failures,
+
+ svn_failures = ssl_convert_serf_failures(failures);
+
+- /* Match server certificate CN with the hostname of the server */
+- if (cert_info.hostname)
++ /* Try to find matching server name via subjectAltName first... */
++ if (san)
+ {
+- if (apr_fnmatch(cert_info.hostname, conn->hostinfo,
+- APR_FNM_PERIOD) == APR_FNM_NOMATCH)
++ int i;
++ found_san_entry = san->nelts > 0;
++ for (i = 0; i < san->nelts; i++)
+ {
+- svn_failures |= SVN_AUTH_SSL_CNMISMATCH;
++ char *s = APR_ARRAY_IDX(san, i, char*);
++ svn_string_t *cert_hostname = svn_string_create(s, scratch_pool);
++
++ if (svn_cert__match_dns_identity(cert_hostname, actual_hostname))
++ {
++ found_matching_hostname = TRUE;
++ cert_info.hostname = s;
++ break;
++ }
+ }
+ }
+
++ /* Match server certificate CN with the hostname of the server iff
++ * we didn't find any subjectAltName fields and try to match them.
++ * Per RFC 2818 they are authoritative if present and CommonName
++ * should be ignored. */
++ if (!found_matching_hostname && !found_san_entry && cert_info.hostname)
++ {
++ svn_string_t *cert_hostname = svn_string_create(cert_info.hostname,
++ scratch_pool);
++
++ if (svn_cert__match_dns_identity(cert_hostname, actual_hostname))
++ {
++ found_matching_hostname = TRUE;
++ }
++ }
++
++ if (!found_matching_hostname)
++ svn_failures |= SVN_AUTH_SSL_CNMISMATCH;
++
+ svn_auth_set_parameter(conn->session->wc_callbacks->auth_baton,
+ SVN_AUTH_PARAM_SSL_SERVER_FAILURES,
+ &svn_failures);
+@@ -261,6 +293,10 @@ svn_ra_serf__conn_setup(apr_socket_t *sock,
+ if (!conn->ssl_context)
+ {
+ conn->ssl_context = serf_bucket_ssl_encrypt_context_get(rb);
++
++#if SERF_VERSION_AT_LEAST(1,0,0)
++ serf_ssl_set_hostname(conn->ssl_context, conn->hostinfo);
++#endif
+
+ serf_ssl_client_cert_provider_set(conn->ssl_context,
+ svn_ra_serf__handle_client_cert,
+diff --git a/subversion/libsvn_subr/dirent_uri.c.old b/subversion/libsvn_subr/dirent_uri.c
+index eef99ba..a5f9e7e 100644
+--- a/subversion/libsvn_subr/dirent_uri.c.old
++++ b/subversion/libsvn_subr/dirent_uri.c
+@@ -30,6 +30,7 @@
+ #include "svn_path.h"
+
+ #include "private_uri.h"
++#include "private/svn_cert.h"
+
+ /* The canonical empty path. Can this be changed? Well, change the empty
+ test below and the path library will work, not so sure about the fs/wc
+@@ -1194,3 +1195,81 @@ svn_uri_is_canonical(const char *uri, apr_pool_t *pool)
+
+ return TRUE;
+ }
++
++
++/* -------------- The cert API (see private/svn_cert.h) ------------- */
++
++svn_boolean_t
++svn_cert__match_dns_identity(svn_string_t *pattern, svn_string_t *hostname)
++{
++ apr_size_t pattern_pos = 0, hostname_pos = 0;
++
++ /* support leading wildcards that composed of the only character in the
++ * left-most label. */
++ if (pattern->len >= 2 &&
++ pattern->data[pattern_pos] == '*' &&
++ pattern->data[pattern_pos + 1] == '.')
++ {
++ while (hostname_pos < hostname->len &&
++ hostname->data[hostname_pos] != '.')
++ {
++ hostname_pos++;
++ }
++ /* Assume that the wildcard must match something. Rule 2 says
++ * that *.example.com should not match example.com. If the wildcard
++ * ends up not matching anything then it matches .example.com which
++ * seems to be essentially the same as just example.com */
++ if (hostname_pos == 0)
++ return FALSE;
++
++ pattern_pos++;
++ }
++
++ while (pattern_pos < pattern->len && hostname_pos < hostname->len)
++ {
++ char pattern_c = pattern->data[pattern_pos];
++ char hostname_c = hostname->data[hostname_pos];
++
++ /* fold case as described in RFC 4343.
++ * Note: We actually convert to lowercase, since our URI
++ * canonicalization code converts to lowercase and generally
++ * most certs are issued with lowercase DNS names, meaning
++ * this avoids the fold operation in most cases. The RFC
++ * suggests the opposite transformation, but doesn't require
++ * any specific implementation in any case. It is critical
++ * that this folding be locale independent so you can't use
++ * tolower(). */
++ pattern_c = canonicalize_to_lower(pattern_c);
++ hostname_c = canonicalize_to_lower(hostname_c);
++
++ if (pattern_c != hostname_c)
++ {
++ /* doesn't match */
++ return FALSE;
++ }
++ else
++ {
++ /* characters match so skip both */
++ pattern_pos++;
++ hostname_pos++;
++ }
++ }
++
++ /* ignore a trailing period on the hostname since this has no effect on the
++ * security of the matching. See the following for the long explanation as
++ * to why:
++ * https://bugzilla.mozilla.org/show_bug.cgi?id=134402#c28
++ */
++ if (pattern_pos == pattern->len &&
++ hostname_pos == hostname->len - 1 &&
++ hostname->data[hostname_pos] == '.')
++ hostname_pos++;
++
++ if (pattern_pos != pattern->len || hostname_pos != hostname->len)
++ {
++ /* end didn't match */
++ return FALSE;
++ }
++
++ return TRUE;
++}
+diff --git a/subversion/tests/libsvn_subr/dirent_uri-test.c.old b/subversion/tests/libsvn_subr/dirent_uri-test.c
+index d71d9c1..370b64a 100644
+--- a/subversion/tests/libsvn_subr/dirent_uri-test.c.old
++++ b/subversion/tests/libsvn_subr/dirent_uri-test.c
+@@ -31,6 +31,7 @@
+
+ #include "svn_pools.h"
+ #include "svn_dirent_uri.h"
++#include "private/svn_cert.h"
+
+ #include "../svn_test.h"
+ #include "../../libsvn_subr/private_uri.h"
+@@ -1671,6 +1672,145 @@ test_uri_internal_style(const char **msg,
+ return SVN_NO_ERROR;
+ }
+
++struct cert_match_dns_test {
++ const char *pattern;
++ const char *hostname;
++ svn_boolean_t expected;
++};
++
++static svn_error_t *
++run_cert_match_dns_tests(struct cert_match_dns_test *tests, apr_pool_t *pool)
++{
++ struct cert_match_dns_test *ct;
++ apr_pool_t *iterpool = svn_pool_create(pool);
++
++ for (ct = tests; ct->pattern; ct++)
++ {
++ svn_boolean_t result;
++ svn_string_t *pattern, *hostname;
++
++ svn_pool_clear(iterpool);
++
++ pattern = svn_string_create(ct->pattern, iterpool);
++ hostname = svn_string_create(ct->hostname, iterpool);
++
++ result = svn_cert__match_dns_identity(pattern, hostname);
++ if (result != ct->expected)
++ return svn_error_createf(SVN_ERR_TEST_FAILED, NULL,
++ "Expected %s but got %s for pattern '%s' on "
++ "hostname '%s'",
++ ct->expected ? "match" : "no match",
++ result ? "match" : "no match",
++ pattern->data, hostname->data);
++
++ }
++
++ svn_pool_destroy(iterpool);
++
++ return SVN_NO_ERROR;
++}
++
++static struct cert_match_dns_test cert_match_dns_tests[] = {
++ { "foo.example.com", "foo.example.com", TRUE }, /* exact match */
++ { "foo.example.com", "FOO.EXAMPLE.COM", TRUE }, /* case differences */
++ { "FOO.EXAMPLE.COM", "foo.example.com", TRUE },
++ { "*.example.com", "FoO.ExAmPlE.CoM", TRUE },
++ { "*.ExAmPlE.CoM", "foo.example.com", TRUE },
++ { "ABCDEFGHIJKLMNOPQRSTUVWXYZ", "abcdefghijklmnopqrstuvwxyz", TRUE },
++ { "abcdefghijklmnopqrstuvwxyz", "ABCDEFGHIJKLMNOPQRSTUVWXYZ", TRUE },
++ { "foo.example.com", "bar.example.com", FALSE }, /* difference at start */
++ { "foo.example.com", "foo.example.net", FALSE }, /* difference at end */
++ { "foo.example.com", "foo.example.commercial", FALSE }, /* hostname longer */
++ { "foo.example.commercial", "foo.example.com", FALSE }, /* pattern longer */
++ { "foo.example.comcom", "foo.example.com", FALSE }, /* repeated suffix */
++ { "foo.example.com", "foo.example.comcom", FALSE },
++ { "foo.example.com.com", "foo.example.com", FALSE },
++ { "foo.example.com", "foo.example.com.com", FALSE },
++ { "foofoo.example.com", "foo.example.com", FALSE }, /* repeated prefix */
++ { "foo.example.com", "foofoo.example.com", FALSE },
++ { "foo.foo.example.com", "foo.example.com", FALSE },
++ { "foo.example.com", "foo.foo.example.com", FALSE },
++ { "foo.*.example.com", "foo.bar.example.com", FALSE }, /* RFC 6125 s. 6.4.3
++ Rule 1 */
++ { "*.example.com", "foo.example.com", TRUE }, /* RFC 6125 s. 6.4.3 Rule 2 */
++ { "*.example.com", "bar.foo.example.com", FALSE }, /* Rule 2 */
++ { "*.example.com", "example.com", FALSE }, /* Rule 2 */
++ { "*.example.com", ".example.com", FALSE }, /* RFC doesn't say what to do
++ here and a leading period on
++ a hostname doesn't make sense
++ so we'll just reject this. */
++ { "*", "foo.example.com", FALSE }, /* wildcard must be left-most label,
++ implies that there must be more than
++ one label. */
++ { "*", "example.com", FALSE },
++ { "*", "com", FALSE },
++ { "*.example.com", "foo.example.net", FALSE }, /* difference in literal text
++ with a wildcard. */
++ { "*.com", "example.com", TRUE }, /* See Errata ID 3090 for RFC 6125,
++ probably shouldn't allow this but
++ we do for now. */
++ { "*.", "example.com", FALSE }, /* test some dubious 2 character wildcard
++ patterns */
++ { "*.", "example.", TRUE }, /* This one feels questionable */
++ { "*.", "example", FALSE },
++ { "*.", ".", FALSE },
++ { "a", "a", TRUE }, /* check that single letter exact matches work */
++ { "a", "b", FALSE }, /* and single letter not matches shouldn't */
++ { "*.*.com", "foo.example.com", FALSE }, /* unsupported wildcards */
++ { "*.*.com", "example.com", FALSE },
++ { "**.example.com", "foo.example.com", FALSE },
++ { "**.example.com", "example.com", FALSE },
++ { "f*.example.com", "foo.example.com", FALSE },
++ { "f*.example.com", "bar.example.com", FALSE },
++ { "*o.example.com", "foo.example.com", FALSE },
++ { "*o.example.com", "bar.example.com", FALSE },
++ { "f*o.example.com", "foo.example.com", FALSE },
++ { "f*o.example.com", "bar.example.com", FALSE },
++ { "foo.e*.com", "foo.example.com", FALSE },
++ { "foo.*e.com", "foo.example.com", FALSE },
++ { "foo.e*e.com", "foo.example.com", FALSE },
++ { "foo.example.com", "foo.example.com.", TRUE }, /* trailing dot */
++ { "*.example.com", "foo.example.com.", TRUE },
++ { "foo", "foo.", TRUE },
++ { "foo.example.com.", "foo.example.com", FALSE },
++ { "*.example.com.", "foo.example.com", FALSE },
++ { "foo.", "foo", FALSE },
++ { "foo.example.com", "foo.example.com..", FALSE },
++ { "*.example.com", "foo.example.com..", FALSE },
++ { "foo", "foo..", FALSE },
++ { "foo.example.com..", "foo.example.com", FALSE },
++ { "*.example.com..", "foo.example.com", FALSE },
++ { "foo..", "foo", FALSE },
++ { NULL }
++};
++
++static svn_error_t *
++test_cert_match_dns_identity(apr_pool_t *pool)
++{
++ return run_cert_match_dns_tests(cert_match_dns_tests, pool);
++}
++
++/* This test table implements results that should happen if we supported
++ * RFC 6125 s. 6.4.3 Rule 3. We don't so it's expected to fail for now. */
++static struct cert_match_dns_test rule3_tests[] = {
++ { "baz*.example.net", "baz1.example.net", TRUE },
++ { "*baz.example.net", "foobaz.example.net", TRUE },
++ { "b*z.example.net", "buuz.example.net", TRUE },
++ { "b*z.example.net", "bz.example.net", FALSE }, /* presume wildcard can't
++ match nothing */
++ { "baz*.example.net", "baz.example.net", FALSE },
++ { "*baz.example.net", "baz.example.net", FALSE },
++ { "b*z.example.net", "buuzuuz.example.net", TRUE }, /* presume wildcard
++ should be greedy */
++ { NULL }
++};
++
++static svn_error_t *
++test_rule3(apr_pool_t *pool)
++{
++ return run_cert_match_dns_tests(rule3_tests, pool);
++}
++
+
+ /* The test table. */
+
+@@ -1699,5 +1839,7 @@ struct svn_test_descriptor_t test_funcs[] =
+ SVN_TEST_PASS(test_uri_local_style),
+ SVN_TEST_PASS(test_dirent_internal_style),
+ SVN_TEST_PASS(test_uri_internal_style),
++ SVN_TEST_PASS(test_cert_match_dns_identity),
++ SVN_TEST_XFAIL(test_rule3),
+ SVN_TEST_NULL
+ };
+diff --git a/subversion/include/private/svn_cert.h b/subversion/include/private/svn_cert.h
+new file mode 100644
+index 0000000..32e32a0
+--- /dev/null
++++ b/subversion/include/private/svn_cert.h
+@@ -0,0 +1,68 @@
++/**
++ * @copyright
++ * ====================================================================
++ * Licensed to the Apache Software Foundation (ASF) under one
++ * or more contributor license agreements. See the NOTICE file
++ * distributed with this work for additional information
++ * regarding copyright ownership. The ASF licenses this file
++ * to you under the Apache License, Version 2.0 (the
++ * "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing,
++ * software distributed under the License is distributed on an
++ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
++ * KIND, either express or implied. See the License for the
++ * specific language governing permissions and limitations
++ * under the License.
++ * ====================================================================
++ * @endcopyright
++ *
++ * @file svn_cert.h
++ * @brief Implementation of certificate validation functions
++ */
++
++#ifndef SVN_CERT_H
++#define SVN_CERT_H
++
++#include <apr.h>
++
++#include "svn_types.h"
++#include "svn_string.h"
++
++#ifdef __cplusplus
++extern "C" {
++#endif /* __cplusplus */
++
++
++/* Return TRUE iff @a pattern matches @a hostname as defined
++ * by the matching rules of RFC 6125. In the context of RFC
++ * 6125 the pattern is the domain name portion of the presented
++ * identifier (which comes from the Common Name or a DNSName
++ * portion of the subjectAltName of an X.509 certificate) and
++ * the hostname is the source domain (i.e. the host portion
++ * of the URI the user entered).
++ *
++ * @note With respect to wildcards we only support matching
++ * wildcards in the left-most label and as the only character
++ * in the left-most label (i.e. we support RFC 6125 § 6.4.3
++ * Rule 1 and 2 but not the optional Rule 3). This may change
++ * in the future.
++ *
++ * @note Subversion does not at current support internationalized
++ * domain names. Both values are presumed to be in NR-LDH label
++ * or A-label form (see RFC 5890 for the definition).
++ *
++ * @since New in 1.9.
++ */
++svn_boolean_t
++svn_cert__match_dns_identity(svn_string_t *pattern, svn_string_t *hostname);
++
++
++#ifdef __cplusplus
++}
++#endif /* __cplusplus */
++
++#endif /* SVN_CERT_H */
diff --git a/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3528.patch b/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3528.patch
new file mode 100644
index 0000000000..23e738e985
--- /dev/null
+++ b/meta/recipes-devtools/subversion/subversion/subversion-CVE-2014-3528.patch
@@ -0,0 +1,29 @@
+Upstream-Status: Backport
+
+Signed-off-by: Yue Tao <yue.tao@windriver.com>
+
+diff --git a/subversion/libsvn_subr/config_auth.c.old b/subversion/libsvn_subr/config_auth.c
+index ff50270..c511d04 100644
+--- a/subversion/libsvn_subr/config_auth.c.old
++++ b/subversion/libsvn_subr/config_auth.c
+@@ -85,6 +85,7 @@ svn_config_read_auth_data(apr_hash_t **hash,
+ if (kind == svn_node_file)
+ {
+ svn_stream_t *stream;
++ svn_string_t *stored_realm;
+
+ SVN_ERR_W(svn_stream_open_readonly(&stream, auth_path, pool, pool),
+ _("Unable to open auth file for reading"));
+@@ -95,6 +96,12 @@ svn_config_read_auth_data(apr_hash_t **hash,
+ apr_psprintf(pool, _("Error parsing '%s'"),
+ svn_path_local_style(auth_path, pool)));
+
++ stored_realm = apr_hash_get(*hash, SVN_CONFIG_REALMSTRING_KEY,
++ APR_HASH_KEY_STRING);
++
++ if (!stored_realm || strcmp(stored_realm->data, realmstring) != 0)
++ *hash = NULL; /* Hash collision, or somebody tampering with storage */
++
+ SVN_ERR(svn_stream_close(stream));
+ }
+
diff --git a/meta/recipes-devtools/subversion/subversion_1.6.15.bb b/meta/recipes-devtools/subversion/subversion_1.6.15.bb
index 1bc637473b..b135bb7a3f 100644
--- a/meta/recipes-devtools/subversion/subversion_1.6.15.bb
+++ b/meta/recipes-devtools/subversion/subversion_1.6.15.bb
@@ -17,7 +17,10 @@ SRC_URI = "http://subversion.tigris.org/downloads/${BPN}-${PV}.tar.bz2 \
file://subversion-CVE-2013-4505.patch \
file://subversion-CVE-2013-1845.patch \
file://subversion-CVE-2013-1847-CVE-2013-1846.patch \
- file://subversion-CVE-2013-4277.patch"
+ file://subversion-CVE-2013-4277.patch \
+ file://subversion-CVE-2014-3522.patch \
+ file://subversion-CVE-2014-3528.patch \
+"
SRC_URI[md5sum] = "113fca1d9e4aa389d7dc2b210010fa69"
SRC_URI[sha256sum] = "b2919d603a5f3c19f42e3265c4b930e2376c43b3969b90ef9c42b2f72d5aaa45"
diff --git a/meta/recipes-devtools/subversion/subversion_1.8.9.bb b/meta/recipes-devtools/subversion/subversion_1.8.10.bb
index d134a5352c..b223d26b3d 100644
--- a/meta/recipes-devtools/subversion/subversion_1.8.9.bb
+++ b/meta/recipes-devtools/subversion/subversion_1.8.10.bb
@@ -13,8 +13,8 @@ SRC_URI = "${APACHE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2 \
file://libtool2.patch \
file://disable_macos.patch \
"
-SRC_URI[md5sum] = "bd495517a760ddd764ce449a891971db"
-SRC_URI[sha256sum] = "45d708a5c3ffbef4b2a1044c4716a053e680763743d1f7ba99d0369f6da49e33"
+SRC_URI[md5sum] = "3068256761b40863df96128834d6b71b"
+SRC_URI[sha256sum] = "1cc900c8a7974337c3ed389dc6b5c59012ec48c7d4107ae31fd7c929ded47dcc"
LIC_FILES_CHKSUM = "file://LICENSE;md5=1c2f0119e478700b5428e26386cff923"
diff --git a/meta/recipes-devtools/syslinux/syslinux_6.01.bb b/meta/recipes-devtools/syslinux/syslinux_6.01.bb
index 7885390eb3..20afae95da 100644
--- a/meta/recipes-devtools/syslinux/syslinux_6.01.bb
+++ b/meta/recipes-devtools/syslinux/syslinux_6.01.bb
@@ -66,6 +66,7 @@ do_install() {
PACKAGES += "${PN}-extlinux ${PN}-mbr ${PN}-chain ${PN}-pxelinux ${PN}-isolinux ${PN}-misc"
RDEPENDS_${PN} += "mtools"
+RDEPENDS_${PN}-misc += "perl"
FILES_${PN} = "${bindir}/syslinux"
FILES_${PN}-extlinux = "${sbindir}/extlinux"
diff --git a/meta/recipes-devtools/tcltk/tcl/run-ptest b/meta/recipes-devtools/tcltk/tcl/run-ptest
index a5d6548d2a..dadba655f2 100644
--- a/meta/recipes-devtools/tcltk/tcl/run-ptest
+++ b/meta/recipes-devtools/tcltk/tcl/run-ptest
@@ -1,8 +1,8 @@
#!/bin/sh
for i in `ls tests/*.test |awk -F/ '{print $2}'`; \
- do TCL_LIBRARY=library ./tcltest tests/all.tcl -file $i &>$i.tmp; \
+ do TCL_LIBRARY=library ./tcltest tests/all.tcl -file $i >$i.tmp 2>&1; \
grep -q "^Files with failing tests:" $i.tmp; \
- if [ $? == 0 ]; then echo "FAIL: $i"; \
+ if [ $? -eq 0 ]; then echo "FAIL: $i"; \
else echo "PASS: $i"; rm -f $i.tmp; fi; \
done
diff --git a/meta/recipes-devtools/tcltk/tcl_8.6.1.bb b/meta/recipes-devtools/tcltk/tcl_8.6.3.bb
index 27f73f2ce2..73c0d25567 100644
--- a/meta/recipes-devtools/tcltk/tcl_8.6.1.bb
+++ b/meta/recipes-devtools/tcltk/tcl_8.6.3.bb
@@ -24,8 +24,8 @@ SRC_URI = "${BASE_SRC_URI} \
file://alter-includedir.patch \
file://run-ptest \
"
-SRC_URI[md5sum] = "aae4b701ee527c6e4e1a6f9c7399882e"
-SRC_URI[sha256sum] = "16ee769248e64ba1cae6b4834fcc4e4edd7470d881410e8d58f7dd1434343514"
+SRC_URI[md5sum] = "db382feca91754b7f93da16dc4cdad1f"
+SRC_URI[sha256sum] = "6ce0778de0d50daaa9c345d7c1fd1288fb658f674028812e7eeee992e3051005"
SRC_URI_class-native = "${BASE_SRC_URI}"
@@ -81,6 +81,7 @@ FILES_${PN}-dev += "${libdir}/tclConfig.sh ${libdir}/tclooConfig.sh"
# isn't getting picked up by shlibs code
RDEPENDS_${PN} += "tcl-lib"
RDEPENDS_${PN}_class-native = ""
+RDEPENDS_${PN}-ptest += "libgcc"
BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/unifdef/files/unifdef.c b/meta/recipes-devtools/unifdef/files/unifdef.c
deleted file mode 100644
index 5cbac15608..0000000000
--- a/meta/recipes-devtools/unifdef/files/unifdef.c
+++ /dev/null
@@ -1,1005 +0,0 @@
-/*
- * Copyright (c) 2002 - 2005 Tony Finch <dot@dotat.at>. All rights reserved.
- *
- * This code is derived from software contributed to Berkeley by Dave Yost.
- * It was rewritten to support ANSI C by Tony Finch. The original version of
- * unifdef carried the following copyright notice. None of its code remains
- * in this version (though some of the names remain).
- *
- * Copyright (c) 1985, 1993
- * The Regents of the University of California. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
- * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
- * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
- * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-
-#include <sys/cdefs.h>
-
-#ifndef lint
-#if 0
-static const char copyright[] =
-"@(#) Copyright (c) 1985, 1993\n\
- The Regents of the University of California. All rights reserved.\n";
-#endif
-#ifdef __IDSTRING
-__IDSTRING(Berkeley, "@(#)unifdef.c 8.1 (Berkeley) 6/6/93");
-__IDSTRING(NetBSD, "$NetBSD: unifdef.c,v 1.8 2000/07/03 02:51:36 matt Exp $");
-__IDSTRING(dotat, "$dotat: things/unifdef.c,v 1.171 2005/03/08 12:38:48 fanf2 Exp $");
-#endif
-#endif /* not lint */
-#ifdef __FBSDID
-__FBSDID("$FreeBSD: /repoman/r/ncvs/src/usr.bin/unifdef/unifdef.c,v 1.20 2005/05/21 09:55:09 ru Exp $");
-#endif
-
-/*
- * unifdef - remove ifdef'ed lines
- *
- * Wishlist:
- * provide an option which will append the name of the
- * appropriate symbol after #else's and #endif's
- * provide an option which will check symbols after
- * #else's and #endif's to see that they match their
- * corresponding #ifdef or #ifndef
- *
- * The first two items above require better buffer handling, which would
- * also make it possible to handle all "dodgy" directives correctly.
- */
-
-#include <ctype.h>
-#include <err.h>
-#include <stdarg.h>
-#include <stdbool.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <unistd.h>
-
-size_t strlcpy(char *dst, const char *src, size_t siz);
-
-/* types of input lines: */
-typedef enum {
- LT_TRUEI, /* a true #if with ignore flag */
- LT_FALSEI, /* a false #if with ignore flag */
- LT_IF, /* an unknown #if */
- LT_TRUE, /* a true #if */
- LT_FALSE, /* a false #if */
- LT_ELIF, /* an unknown #elif */
- LT_ELTRUE, /* a true #elif */
- LT_ELFALSE, /* a false #elif */
- LT_ELSE, /* #else */
- LT_ENDIF, /* #endif */
- LT_DODGY, /* flag: directive is not on one line */
- LT_DODGY_LAST = LT_DODGY + LT_ENDIF,
- LT_PLAIN, /* ordinary line */
- LT_EOF, /* end of file */
- LT_COUNT
-} Linetype;
-
-static char const * const linetype_name[] = {
- "TRUEI", "FALSEI", "IF", "TRUE", "FALSE",
- "ELIF", "ELTRUE", "ELFALSE", "ELSE", "ENDIF",
- "DODGY TRUEI", "DODGY FALSEI",
- "DODGY IF", "DODGY TRUE", "DODGY FALSE",
- "DODGY ELIF", "DODGY ELTRUE", "DODGY ELFALSE",
- "DODGY ELSE", "DODGY ENDIF",
- "PLAIN", "EOF"
-};
-
-/* state of #if processing */
-typedef enum {
- IS_OUTSIDE,
- IS_FALSE_PREFIX, /* false #if followed by false #elifs */
- IS_TRUE_PREFIX, /* first non-false #(el)if is true */
- IS_PASS_MIDDLE, /* first non-false #(el)if is unknown */
- IS_FALSE_MIDDLE, /* a false #elif after a pass state */
- IS_TRUE_MIDDLE, /* a true #elif after a pass state */
- IS_PASS_ELSE, /* an else after a pass state */
- IS_FALSE_ELSE, /* an else after a true state */
- IS_TRUE_ELSE, /* an else after only false states */
- IS_FALSE_TRAILER, /* #elifs after a true are false */
- IS_COUNT
-} Ifstate;
-
-static char const * const ifstate_name[] = {
- "OUTSIDE", "FALSE_PREFIX", "TRUE_PREFIX",
- "PASS_MIDDLE", "FALSE_MIDDLE", "TRUE_MIDDLE",
- "PASS_ELSE", "FALSE_ELSE", "TRUE_ELSE",
- "FALSE_TRAILER"
-};
-
-/* state of comment parser */
-typedef enum {
- NO_COMMENT = false, /* outside a comment */
- C_COMMENT, /* in a comment like this one */
- CXX_COMMENT, /* between // and end of line */
- STARTING_COMMENT, /* just after slash-backslash-newline */
- FINISHING_COMMENT, /* star-backslash-newline in a C comment */
- CHAR_LITERAL, /* inside '' */
- STRING_LITERAL /* inside "" */
-} Comment_state;
-
-static char const * const comment_name[] = {
- "NO", "C", "CXX", "STARTING", "FINISHING", "CHAR", "STRING"
-};
-
-/* state of preprocessor line parser */
-typedef enum {
- LS_START, /* only space and comments on this line */
- LS_HASH, /* only space, comments, and a hash */
- LS_DIRTY /* this line can't be a preprocessor line */
-} Line_state;
-
-static char const * const linestate_name[] = {
- "START", "HASH", "DIRTY"
-};
-
-/*
- * Minimum translation limits from ISO/IEC 9899:1999 5.2.4.1
- */
-#define MAXDEPTH 64 /* maximum #if nesting */
-#define MAXLINE 4096 /* maximum length of line */
-#define MAXSYMS 4096 /* maximum number of symbols */
-
-/*
- * Sometimes when editing a keyword the replacement text is longer, so
- * we leave some space at the end of the tline buffer to accommodate this.
- */
-#define EDITSLOP 10
-
-/*
- * Globals.
- */
-
-static bool complement; /* -c: do the complement */
-static bool debugging; /* -d: debugging reports */
-static bool iocccok; /* -e: fewer IOCCC errors */
-static bool killconsts; /* -k: eval constant #ifs */
-static bool lnblank; /* -l: blank deleted lines */
-static bool lnnum; /* -n: add #line directives */
-static bool symlist; /* -s: output symbol list */
-static bool text; /* -t: this is a text file */
-
-static const char *symname[MAXSYMS]; /* symbol name */
-static const char *value[MAXSYMS]; /* -Dsym=value */
-static bool ignore[MAXSYMS]; /* -iDsym or -iUsym */
-static int nsyms; /* number of symbols */
-
-static FILE *input; /* input file pointer */
-static const char *filename; /* input file name */
-static int linenum; /* current line number */
-
-static char tline[MAXLINE+EDITSLOP];/* input buffer plus space */
-static char *keyword; /* used for editing #elif's */
-
-static Comment_state incomment; /* comment parser state */
-static Line_state linestate; /* #if line parser state */
-static Ifstate ifstate[MAXDEPTH]; /* #if processor state */
-static bool ignoring[MAXDEPTH]; /* ignore comments state */
-static int stifline[MAXDEPTH]; /* start of current #if */
-static int depth; /* current #if nesting */
-static int delcount; /* count of deleted lines */
-static bool keepthis; /* don't delete constant #if */
-
-static int exitstat; /* program exit status */
-
-static void addsym(bool, bool, char *);
-static void debug(const char *, ...);
-static void done(void);
-static void error(const char *);
-static int findsym(const char *);
-static void flushline(bool);
-static Linetype getline34(void);
-static Linetype ifeval(const char **);
-static void ignoreoff(void);
-static void ignoreon(void);
-static void keywordedit(const char *);
-static void nest(void);
-static void process(void);
-static const char *skipcomment(const char *);
-static const char *skipsym(const char *);
-static void state(Ifstate);
-static int strlcmp(const char *, const char *, size_t);
-static void unnest(void);
-static void usage(void);
-
-#define endsym(c) (!isalpha((unsigned char)c) && !isdigit((unsigned char)c) && c != '_')
-
-/*
- * The main program.
- */
-int
-main(int argc, char *argv[])
-{
- int opt;
-
- while ((opt = getopt(argc, argv, "i:D:U:I:cdeklnst")) != -1)
- switch (opt) {
- case 'i': /* treat stuff controlled by these symbols as text */
- /*
- * For strict backwards-compatibility the U or D
- * should be immediately after the -i but it doesn't
- * matter much if we relax that requirement.
- */
- opt = *optarg++;
- if (opt == 'D')
- addsym(true, true, optarg);
- else if (opt == 'U')
- addsym(true, false, optarg);
- else
- usage();
- break;
- case 'D': /* define a symbol */
- addsym(false, true, optarg);
- break;
- case 'U': /* undef a symbol */
- addsym(false, false, optarg);
- break;
- case 'I':
- /* no-op for compatibility with cpp */
- break;
- case 'c': /* treat -D as -U and vice versa */
- complement = true;
- break;
- case 'd':
- debugging = true;
- break;
- case 'e': /* fewer errors from dodgy lines */
- iocccok = true;
- break;
- case 'k': /* process constant #ifs */
- killconsts = true;
- break;
- case 'l': /* blank deleted lines instead of omitting them */
- lnblank = true;
- break;
- case 'n': /* add #line directive after deleted lines */
- lnnum = true;
- break;
- case 's': /* only output list of symbols that control #ifs */
- symlist = true;
- break;
- case 't': /* don't parse C comments */
- text = true;
- break;
- default:
- usage();
- }
- argc -= optind;
- argv += optind;
- if (argc > 1) {
- errx(2, "can only do one file");
- } else if (argc == 1 && strcmp(*argv, "-") != 0) {
- filename = *argv;
- input = fopen(filename, "r");
- if (input == NULL)
- err(2, "can't open %s", filename);
- } else {
- filename = "[stdin]";
- input = stdin;
- }
- process();
- abort(); /* bug */
-}
-
-static void
-usage(void)
-{
- fprintf(stderr, "usage: unifdef [-cdeklnst] [-Ipath]"
- " [-Dsym[=val]] [-Usym] [-iDsym[=val]] [-iUsym] ... [file]\n");
- exit(2);
-}
-
-/*
- * A state transition function alters the global #if processing state
- * in a particular way. The table below is indexed by the current
- * processing state and the type of the current line.
- *
- * Nesting is handled by keeping a stack of states; some transition
- * functions increase or decrease the depth. They also maintain the
- * ignore state on a stack. In some complicated cases they have to
- * alter the preprocessor directive, as follows.
- *
- * When we have processed a group that starts off with a known-false
- * #if/#elif sequence (which has therefore been deleted) followed by a
- * #elif that we don't understand and therefore must keep, we edit the
- * latter into a #if to keep the nesting correct.
- *
- * When we find a true #elif in a group, the following block will
- * always be kept and the rest of the sequence after the next #elif or
- * #else will be discarded. We edit the #elif into a #else and the
- * following directive to #endif since this has the desired behaviour.
- *
- * "Dodgy" directives are split across multiple lines, the most common
- * example being a multi-line comment hanging off the right of the
- * directive. We can handle them correctly only if there is no change
- * from printing to dropping (or vice versa) caused by that directive.
- * If the directive is the first of a group we have a choice between
- * failing with an error, or passing it through unchanged instead of
- * evaluating it. The latter is not the default to avoid questions from
- * users about unifdef unexpectedly leaving behind preprocessor directives.
- */
-typedef void state_fn(void);
-
-/* report an error */
-static void Eelif (void) { error("Inappropriate #elif"); }
-static void Eelse (void) { error("Inappropriate #else"); }
-static void Eendif(void) { error("Inappropriate #endif"); }
-static void Eeof (void) { error("Premature EOF"); }
-static void Eioccc(void) { error("Obfuscated preprocessor control line"); }
-/* plain line handling */
-static void print (void) { flushline(true); }
-static void drop (void) { flushline(false); }
-/* output lacks group's start line */
-static void Strue (void) { drop(); ignoreoff(); state(IS_TRUE_PREFIX); }
-static void Sfalse(void) { drop(); ignoreoff(); state(IS_FALSE_PREFIX); }
-static void Selse (void) { drop(); state(IS_TRUE_ELSE); }
-/* print/pass this block */
-static void Pelif (void) { print(); ignoreoff(); state(IS_PASS_MIDDLE); }
-static void Pelse (void) { print(); state(IS_PASS_ELSE); }
-static void Pendif(void) { print(); unnest(); }
-/* discard this block */
-static void Dfalse(void) { drop(); ignoreoff(); state(IS_FALSE_TRAILER); }
-static void Delif (void) { drop(); ignoreoff(); state(IS_FALSE_MIDDLE); }
-static void Delse (void) { drop(); state(IS_FALSE_ELSE); }
-static void Dendif(void) { drop(); unnest(); }
-/* first line of group */
-static void Fdrop (void) { nest(); Dfalse(); }
-static void Fpass (void) { nest(); Pelif(); }
-static void Ftrue (void) { nest(); Strue(); }
-static void Ffalse(void) { nest(); Sfalse(); }
-/* variable pedantry for obfuscated lines */
-static void Oiffy (void) { if (!iocccok) Eioccc(); Fpass(); ignoreon(); }
-static void Oif (void) { if (!iocccok) Eioccc(); Fpass(); }
-static void Oelif (void) { if (!iocccok) Eioccc(); Pelif(); }
-/* ignore comments in this block */
-static void Idrop (void) { Fdrop(); ignoreon(); }
-static void Itrue (void) { Ftrue(); ignoreon(); }
-static void Ifalse(void) { Ffalse(); ignoreon(); }
-/* edit this line */
-static void Mpass (void) { strncpy(keyword, "if ", 4); Pelif(); }
-static void Mtrue (void) { keywordedit("else\n"); state(IS_TRUE_MIDDLE); }
-static void Melif (void) { keywordedit("endif\n"); state(IS_FALSE_TRAILER); }
-static void Melse (void) { keywordedit("endif\n"); state(IS_FALSE_ELSE); }
-
-static state_fn * const trans_table[IS_COUNT][LT_COUNT] = {
-/* IS_OUTSIDE */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Eelif, Eelif, Eelif, Eelse, Eendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Eelif, Eelif, Eelif, Eelse, Eendif,
- print, done },
-/* IS_FALSE_PREFIX */
-{ Idrop, Idrop, Fdrop, Fdrop, Fdrop, Mpass, Strue, Sfalse,Selse, Dendif,
- Idrop, Idrop, Fdrop, Fdrop, Fdrop, Mpass, Eioccc,Eioccc,Eioccc,Eioccc,
- drop, Eeof },
-/* IS_TRUE_PREFIX */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Dfalse,Dfalse,Dfalse,Delse, Dendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Eioccc,Eioccc,Eioccc,Eioccc,Eioccc,
- print, Eeof },
-/* IS_PASS_MIDDLE */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Pelif, Mtrue, Delif, Pelse, Pendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Pelif, Oelif, Oelif, Pelse, Pendif,
- print, Eeof },
-/* IS_FALSE_MIDDLE */
-{ Idrop, Idrop, Fdrop, Fdrop, Fdrop, Pelif, Mtrue, Delif, Pelse, Pendif,
- Idrop, Idrop, Fdrop, Fdrop, Fdrop, Eioccc,Eioccc,Eioccc,Eioccc,Eioccc,
- drop, Eeof },
-/* IS_TRUE_MIDDLE */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Melif, Melif, Melif, Melse, Pendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Eioccc,Eioccc,Eioccc,Eioccc,Pendif,
- print, Eeof },
-/* IS_PASS_ELSE */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Eelif, Eelif, Eelif, Eelse, Pendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Eelif, Eelif, Eelif, Eelse, Pendif,
- print, Eeof },
-/* IS_FALSE_ELSE */
-{ Idrop, Idrop, Fdrop, Fdrop, Fdrop, Eelif, Eelif, Eelif, Eelse, Dendif,
- Idrop, Idrop, Fdrop, Fdrop, Fdrop, Eelif, Eelif, Eelif, Eelse, Eioccc,
- drop, Eeof },
-/* IS_TRUE_ELSE */
-{ Itrue, Ifalse,Fpass, Ftrue, Ffalse,Eelif, Eelif, Eelif, Eelse, Dendif,
- Oiffy, Oiffy, Fpass, Oif, Oif, Eelif, Eelif, Eelif, Eelse, Eioccc,
- print, Eeof },
-/* IS_FALSE_TRAILER */
-{ Idrop, Idrop, Fdrop, Fdrop, Fdrop, Dfalse,Dfalse,Dfalse,Delse, Dendif,
- Idrop, Idrop, Fdrop, Fdrop, Fdrop, Dfalse,Dfalse,Dfalse,Delse, Eioccc,
- drop, Eeof }
-/*TRUEI FALSEI IF TRUE FALSE ELIF ELTRUE ELFALSE ELSE ENDIF
- TRUEI FALSEI IF TRUE FALSE ELIF ELTRUE ELFALSE ELSE ENDIF (DODGY)
- PLAIN EOF */
-};
-
-/*
- * State machine utility functions
- */
-static void
-done(void)
-{
- if (incomment)
- error("EOF in comment");
- exit(exitstat);
-}
-static void
-ignoreoff(void)
-{
- if (depth == 0)
- abort(); /* bug */
- ignoring[depth] = ignoring[depth-1];
-}
-static void
-ignoreon(void)
-{
- ignoring[depth] = true;
-}
-static void
-keywordedit(const char *replacement)
-{
- size_t size = tline + sizeof(tline) - keyword;
- char *dst = keyword;
- const char *src = replacement;
- if (size != 0) {
- while ((--size != 0) && (*src != '\0'))
- *dst++ = *src++;
- *dst = '\0';
- }
- print();
-}
-static void
-nest(void)
-{
- depth += 1;
- if (depth >= MAXDEPTH)
- error("Too many levels of nesting");
- stifline[depth] = linenum;
-}
-static void
-unnest(void)
-{
- if (depth == 0)
- abort(); /* bug */
- depth -= 1;
-}
-static void
-state(Ifstate is)
-{
- ifstate[depth] = is;
-}
-
-/*
- * Write a line to the output or not, according to command line options.
- */
-static void
-flushline(bool keep)
-{
- if (symlist)
- return;
- if (keep ^ complement) {
- if (lnnum && delcount > 0)
- printf("#line %d\n", linenum);
- fputs(tline, stdout);
- delcount = 0;
- } else {
- if (lnblank)
- putc('\n', stdout);
- exitstat = 1;
- delcount += 1;
- }
-}
-
-/*
- * The driver for the state machine.
- */
-static void
-process(void)
-{
- Linetype lineval;
-
- for (;;) {
- linenum++;
- lineval = getline34();
- trans_table[ifstate[depth]][lineval]();
- debug("process %s -> %s depth %d",
- linetype_name[lineval],
- ifstate_name[ifstate[depth]], depth);
- }
-}
-
-/*
- * Parse a line and determine its type. We keep the preprocessor line
- * parser state between calls in the global variable linestate, with
- * help from skipcomment().
- */
-static Linetype
-getline34(void)
-{
- const char *cp;
- int cursym;
- int kwlen;
- Linetype retval;
- Comment_state wascomment;
-
- if (fgets(tline, MAXLINE, input) == NULL)
- return (LT_EOF);
- retval = LT_PLAIN;
- wascomment = incomment;
- cp = skipcomment(tline);
- if (linestate == LS_START) {
- if (*cp == '#') {
- linestate = LS_HASH;
- cp = skipcomment(cp + 1);
- } else if (*cp != '\0')
- linestate = LS_DIRTY;
- }
- if (!incomment && linestate == LS_HASH) {
- keyword = tline + (cp - tline);
- cp = skipsym(cp);
- kwlen = cp - keyword;
- /* no way can we deal with a continuation inside a keyword */
- if (strncmp(cp, "\\\n", 2) == 0)
- Eioccc();
- if (strlcmp("ifdef", keyword, kwlen) == 0 ||
- strlcmp("ifndef", keyword, kwlen) == 0) {
- cp = skipcomment(cp);
- if ((cursym = findsym(cp)) < 0)
- retval = LT_IF;
- else {
- retval = (keyword[2] == 'n')
- ? LT_FALSE : LT_TRUE;
- if (value[cursym] == NULL)
- retval = (retval == LT_TRUE)
- ? LT_FALSE : LT_TRUE;
- if (ignore[cursym])
- retval = (retval == LT_TRUE)
- ? LT_TRUEI : LT_FALSEI;
- }
- cp = skipsym(cp);
- } else if (strlcmp("if", keyword, kwlen) == 0)
- retval = ifeval(&cp);
- else if (strlcmp("elif", keyword, kwlen) == 0)
- retval = ifeval(&cp) - LT_IF + LT_ELIF;
- else if (strlcmp("else", keyword, kwlen) == 0)
- retval = LT_ELSE;
- else if (strlcmp("endif", keyword, kwlen) == 0)
- retval = LT_ENDIF;
- else {
- linestate = LS_DIRTY;
- retval = LT_PLAIN;
- }
- cp = skipcomment(cp);
- if (*cp != '\0') {
- linestate = LS_DIRTY;
- if (retval == LT_TRUE || retval == LT_FALSE ||
- retval == LT_TRUEI || retval == LT_FALSEI)
- retval = LT_IF;
- if (retval == LT_ELTRUE || retval == LT_ELFALSE)
- retval = LT_ELIF;
- }
- if (retval != LT_PLAIN && (wascomment || incomment)) {
- retval += LT_DODGY;
- if (incomment)
- linestate = LS_DIRTY;
- }
- /* skipcomment should have changed the state */
- if (linestate == LS_HASH)
- abort(); /* bug */
- }
- if (linestate == LS_DIRTY) {
- while (*cp != '\0')
- cp = skipcomment(cp + 1);
- }
- debug("parser %s comment %s line",
- comment_name[incomment], linestate_name[linestate]);
- return (retval);
-}
-
-/*
- * These are the binary operators that are supported by the expression
- * evaluator. Note that if support for division is added then we also
- * need short-circuiting booleans because of divide-by-zero.
- */
-static int op_lt(int a, int b) { return (a < b); }
-static int op_gt(int a, int b) { return (a > b); }
-static int op_le(int a, int b) { return (a <= b); }
-static int op_ge(int a, int b) { return (a >= b); }
-static int op_eq(int a, int b) { return (a == b); }
-static int op_ne(int a, int b) { return (a != b); }
-static int op_or(int a, int b) { return (a || b); }
-static int op_and(int a, int b) { return (a && b); }
-
-/*
- * An evaluation function takes three arguments, as follows: (1) a pointer to
- * an element of the precedence table which lists the operators at the current
- * level of precedence; (2) a pointer to an integer which will receive the
- * value of the expression; and (3) a pointer to a char* that points to the
- * expression to be evaluated and that is updated to the end of the expression
- * when evaluation is complete. The function returns LT_FALSE if the value of
- * the expression is zero, LT_TRUE if it is non-zero, or LT_IF if the
- * expression could not be evaluated.
- */
-struct ops;
-
-typedef Linetype eval_fn(const struct ops *, int *, const char **);
-
-static eval_fn eval_table, eval_unary;
-
-/*
- * The precedence table. Expressions involving binary operators are evaluated
- * in a table-driven way by eval_table. When it evaluates a subexpression it
- * calls the inner function with its first argument pointing to the next
- * element of the table. Innermost expressions have special non-table-driven
- * handling.
- */
-static const struct ops {
- eval_fn *inner;
- struct op {
- const char *str;
- int (*fn)(int, int);
- } op[5];
-} eval_ops[] = {
- { eval_table, { { "||", op_or } } },
- { eval_table, { { "&&", op_and } } },
- { eval_table, { { "==", op_eq },
- { "!=", op_ne } } },
- { eval_unary, { { "<=", op_le },
- { ">=", op_ge },
- { "<", op_lt },
- { ">", op_gt } } }
-};
-
-/*
- * Function for evaluating the innermost parts of expressions,
- * viz. !expr (expr) defined(symbol) symbol number
- * We reset the keepthis flag when we find a non-constant subexpression.
- */
-static Linetype
-eval_unary(const struct ops *ops, int *valp, const char **cpp)
-{
- const char *cp;
- char *ep;
- int sym;
-
- cp = skipcomment(*cpp);
- if (*cp == '!') {
- debug("eval%d !", ops - eval_ops);
- cp++;
- if (eval_unary(ops, valp, &cp) == LT_IF)
- return (LT_IF);
- *valp = !*valp;
- } else if (*cp == '(') {
- cp++;
- debug("eval%d (", ops - eval_ops);
- if (eval_table(eval_ops, valp, &cp) == LT_IF)
- return (LT_IF);
- cp = skipcomment(cp);
- if (*cp++ != ')')
- return (LT_IF);
- } else if (isdigit((unsigned char)*cp)) {
- debug("eval%d number", ops - eval_ops);
- *valp = strtol(cp, &ep, 0);
- cp = skipsym(cp);
- } else if (strncmp(cp, "defined", 7) == 0 && endsym(cp[7])) {
- cp = skipcomment(cp+7);
- debug("eval%d defined", ops - eval_ops);
- if (*cp++ != '(')
- return (LT_IF);
- cp = skipcomment(cp);
- sym = findsym(cp);
- if (sym < 0)
- return (LT_IF);
- *valp = (value[sym] != NULL);
- cp = skipsym(cp);
- cp = skipcomment(cp);
- if (*cp++ != ')')
- return (LT_IF);
- keepthis = false;
- } else if (!endsym(*cp)) {
- debug("eval%d symbol", ops - eval_ops);
- sym = findsym(cp);
- if (sym < 0)
- return (LT_IF);
- if (value[sym] == NULL)
- *valp = 0;
- else {
- *valp = strtol(value[sym], &ep, 0);
- if (*ep != '\0' || ep == value[sym])
- return (LT_IF);
- }
- cp = skipsym(cp);
- keepthis = false;
- } else {
- debug("eval%d bad expr", ops - eval_ops);
- return (LT_IF);
- }
-
- *cpp = cp;
- debug("eval%d = %d", ops - eval_ops, *valp);
- return (*valp ? LT_TRUE : LT_FALSE);
-}
-
-/*
- * Table-driven evaluation of binary operators.
- */
-static Linetype
-eval_table(const struct ops *ops, int *valp, const char **cpp)
-{
- const struct op *op;
- const char *cp;
- int val;
-
- debug("eval%d", ops - eval_ops);
- cp = *cpp;
- if (ops->inner(ops+1, valp, &cp) == LT_IF)
- return (LT_IF);
- for (;;) {
- cp = skipcomment(cp);
- for (op = ops->op; op->str != NULL; op++)
- if (strncmp(cp, op->str, strlen(op->str)) == 0)
- break;
- if (op->str == NULL)
- break;
- cp += strlen(op->str);
- debug("eval%d %s", ops - eval_ops, op->str);
- if (ops->inner(ops+1, &val, &cp) == LT_IF)
- return (LT_IF);
- *valp = op->fn(*valp, val);
- }
-
- *cpp = cp;
- debug("eval%d = %d", ops - eval_ops, *valp);
- return (*valp ? LT_TRUE : LT_FALSE);
-}
-
-/*
- * Evaluate the expression on a #if or #elif line. If we can work out
- * the result we return LT_TRUE or LT_FALSE accordingly, otherwise we
- * return just a generic LT_IF.
- */
-static Linetype
-ifeval(const char **cpp)
-{
- int ret;
- int val;
-
- debug("eval %s", *cpp);
- keepthis = killconsts ? false : true;
- ret = eval_table(eval_ops, &val, cpp);
- debug("eval = %d", val);
- return (keepthis ? LT_IF : ret);
-}
-
-/*
- * Skip over comments, strings, and character literals and stop at the
- * next character position that is not whitespace. Between calls we keep
- * the comment state in the global variable incomment, and we also adjust
- * the global variable linestate when we see a newline.
- * XXX: doesn't cope with the buffer splitting inside a state transition.
- */
-static const char *
-skipcomment(const char *cp)
-{
- if (text || ignoring[depth]) {
- for (; isspace((unsigned char)*cp); cp++)
- if (*cp == '\n')
- linestate = LS_START;
- return (cp);
- }
- while (*cp != '\0')
- /* don't reset to LS_START after a line continuation */
- if (strncmp(cp, "\\\n", 2) == 0)
- cp += 2;
- else switch (incomment) {
- case NO_COMMENT:
- if (strncmp(cp, "/\\\n", 3) == 0) {
- incomment = STARTING_COMMENT;
- cp += 3;
- } else if (strncmp(cp, "/*", 2) == 0) {
- incomment = C_COMMENT;
- cp += 2;
- } else if (strncmp(cp, "//", 2) == 0) {
- incomment = CXX_COMMENT;
- cp += 2;
- } else if (strncmp(cp, "\'", 1) == 0) {
- incomment = CHAR_LITERAL;
- linestate = LS_DIRTY;
- cp += 1;
- } else if (strncmp(cp, "\"", 1) == 0) {
- incomment = STRING_LITERAL;
- linestate = LS_DIRTY;
- cp += 1;
- } else if (strncmp(cp, "\n", 1) == 0) {
- linestate = LS_START;
- cp += 1;
- } else if (strchr(" \t", *cp) != NULL) {
- cp += 1;
- } else
- return (cp);
- continue;
- case CXX_COMMENT:
- if (strncmp(cp, "\n", 1) == 0) {
- incomment = NO_COMMENT;
- linestate = LS_START;
- }
- cp += 1;
- continue;
- case CHAR_LITERAL:
- case STRING_LITERAL:
- if ((incomment == CHAR_LITERAL && cp[0] == '\'') ||
- (incomment == STRING_LITERAL && cp[0] == '\"')) {
- incomment = NO_COMMENT;
- cp += 1;
- } else if (cp[0] == '\\') {
- if (cp[1] == '\0')
- cp += 1;
- else
- cp += 2;
- } else if (strncmp(cp, "\n", 1) == 0) {
- if (incomment == CHAR_LITERAL)
- error("unterminated char literal");
- else
- error("unterminated string literal");
- } else
- cp += 1;
- continue;
- case C_COMMENT:
- if (strncmp(cp, "*\\\n", 3) == 0) {
- incomment = FINISHING_COMMENT;
- cp += 3;
- } else if (strncmp(cp, "*/", 2) == 0) {
- incomment = NO_COMMENT;
- cp += 2;
- } else
- cp += 1;
- continue;
- case STARTING_COMMENT:
- if (*cp == '*') {
- incomment = C_COMMENT;
- cp += 1;
- } else if (*cp == '/') {
- incomment = CXX_COMMENT;
- cp += 1;
- } else {
- incomment = NO_COMMENT;
- linestate = LS_DIRTY;
- }
- continue;
- case FINISHING_COMMENT:
- if (*cp == '/') {
- incomment = NO_COMMENT;
- cp += 1;
- } else
- incomment = C_COMMENT;
- continue;
- default:
- abort(); /* bug */
- }
- return (cp);
-}
-
-/*
- * Skip over an identifier.
- */
-static const char *
-skipsym(const char *cp)
-{
- while (!endsym(*cp))
- ++cp;
- return (cp);
-}
-
-/*
- * Look for the symbol in the symbol table. If is is found, we return
- * the symbol table index, else we return -1.
- */
-static int
-findsym(const char *str)
-{
- const char *cp;
- int symind;
-
- cp = skipsym(str);
- if (cp == str)
- return (-1);
- if (symlist) {
- printf("%.*s\n", (int)(cp-str), str);
- /* we don't care about the value of the symbol */
- return (0);
- }
- for (symind = 0; symind < nsyms; ++symind) {
- if (strlcmp(symname[symind], str, cp-str) == 0) {
- debug("findsym %s %s", symname[symind],
- value[symind] ? value[symind] : "");
- return (symind);
- }
- }
- return (-1);
-}
-
-/*
- * Add a symbol to the symbol table.
- */
-static void
-addsym(bool ignorethis, bool definethis, char *sym)
-{
- int symind;
- char *val;
-
- symind = findsym(sym);
- if (symind < 0) {
- if (nsyms >= MAXSYMS)
- errx(2, "too many symbols");
- symind = nsyms++;
- }
- symname[symind] = sym;
- ignore[symind] = ignorethis;
- val = sym + (skipsym(sym) - sym);
- if (definethis) {
- if (*val == '=') {
- value[symind] = val+1;
- *val = '\0';
- } else if (*val == '\0')
- value[symind] = "";
- else
- usage();
- } else {
- if (*val != '\0')
- usage();
- value[symind] = NULL;
- }
-}
-
-/*
- * Compare s with n characters of t.
- * The same as strncmp() except that it checks that s[n] == '\0'.
- */
-static int
-strlcmp(const char *s, const char *t, size_t n)
-{
- while (n-- && *t != '\0')
- if (*s != *t)
- return ((unsigned char)*s - (unsigned char)*t);
- else
- ++s, ++t;
- return ((unsigned char)*s);
-}
-
-/*
- * Diagnostics.
- */
-static void
-debug(const char *msg, ...)
-{
- va_list ap;
-
- if (debugging) {
- va_start(ap, msg);
- vwarnx(msg, ap);
- va_end(ap);
- }
-}
-
-static void
-error(const char *msg)
-{
- if (depth == 0)
- warnx("%s: %d: %s", filename, linenum, msg);
- else
- warnx("%s: %d: %s (#if line %d depth %d)",
- filename, linenum, msg, stifline[depth], depth);
- errx(2, "output may be truncated");
-}
diff --git a/meta/recipes-devtools/unifdef/unifdef-native_2.6.18+git.bb b/meta/recipes-devtools/unifdef/unifdef-native_2.6.18+git.bb
deleted file mode 100644
index 9a28684236..0000000000
--- a/meta/recipes-devtools/unifdef/unifdef-native_2.6.18+git.bb
+++ /dev/null
@@ -1,21 +0,0 @@
-SUMMARY = "Kernel header preprocessor"
-SECTION = "devel"
-LICENSE = "BSD-2-Clause"
-
-PR = "r1"
-
-LIC_FILES_CHKSUM = "file://${WORKDIR}/unifdef.c;endline=32;md5=1a33f5c39aa718a89058721df61979bd"
-
-SRC_URI = "file://unifdef.c"
-
-inherit native
-
-do_compile() {
- ${CC} ${CFLAGS} ${LDFLAGS} -o unifdef ${WORKDIR}/unifdef.c
-}
-
-do_install() {
- install -d ${D}${bindir}
- install -m 0755 unifdef ${D}${bindir}
-}
-
diff --git a/meta/recipes-devtools/unifdef/unifdef_2.10.bb b/meta/recipes-devtools/unifdef/unifdef_2.10.bb
new file mode 100644
index 0000000000..b8a0079840
--- /dev/null
+++ b/meta/recipes-devtools/unifdef/unifdef_2.10.bb
@@ -0,0 +1,15 @@
+SUMMARY = "Selectively remove #ifdef statements from sources"
+SECTION = "devel"
+LICENSE = "BSD-2-Clause"
+
+LIC_FILES_CHKSUM = "file://unifdef.c;endline=32;md5=2cc23f0382a6f560f6a9ecf4e040c0da"
+
+SRC_URI = "http://dotat.at/prog/${BPN}/${BP}.tar.xz"
+SRC_URI[md5sum] = "bb5d895e5ebbba5c5cc0c2771cf97ebe"
+SRC_URI[sha256sum] = "3b9b2b6b1952e9b9c1b9f734edec270689a35bdbf33ae66b50e19b2ed0d2df06"
+
+do_install() {
+ oe_runmake install DESTDIR=${D} prefix=${prefix}
+}
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-devtools/vala/vala.inc b/meta/recipes-devtools/vala/vala.inc
index 64e7daffe1..63056fde4c 100644
--- a/meta/recipes-devtools/vala/vala.inc
+++ b/meta/recipes-devtools/vala/vala.inc
@@ -7,14 +7,12 @@ BBCLASSEXTEND = "native"
HOMEPAGE = "http://vala-project.org"
LICENSE = "LGPLv2.1"
LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
-INC_PR = "r1"
SHRT_VER = "${@d.getVar('PV',1).split('.')[0]}.${@d.getVar('PV',1).split('.')[1]}"
SRC_URI = "http://ftp.gnome.org/pub/GNOME/sources/${BPN}/${SHRT_VER}/${BP}.tar.xz"
-inherit autotools-brokensep pkgconfig
-
-EXTRA_OECONF = "--disable-vapigen"
+inherit autotools pkgconfig
+FILES_${PN} += "${datadir}/${BPN}-${SHRT_VER}/vapi ${libdir}/${BPN}-${SHRT_VER}/"
FILES_${PN}-doc += "${datadir}/devhelp"
-FILES_${PN} += "${datadir}/${BPN}-${SHRT_VER}/vapi"
+FILES_${PN}-dbg += "${libdir}/${BPN}-${SHRT_VER}/.debug"
diff --git a/meta/recipes-devtools/vala/vala-0.16.0/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch b/meta/recipes-devtools/vala/vala/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch
index e6c7941a77..ac9dc8c83a 100644
--- a/meta/recipes-devtools/vala/vala-0.16.0/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch
+++ b/meta/recipes-devtools/vala/vala/0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch
@@ -7,6 +7,8 @@ Subject: [PATCH] git-version-gen: don't append -dirty if we're not in git
* for example if we have some dirty directory and we unpack clean vala tarball in it, then it will append -dirty
Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
+
+Upstream-Status: Inappropriate [OE-Specific]
---
build-aux/git-version-gen | 25 ++++++++++++++-----------
1 files changed, 14 insertions(+), 11 deletions(-)
diff --git a/meta/recipes-devtools/vala/vala_0.16.0.bb b/meta/recipes-devtools/vala/vala_0.16.0.bb
deleted file mode 100644
index 92120b4a36..0000000000
--- a/meta/recipes-devtools/vala/vala_0.16.0.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require ${BPN}.inc
-
-PR = "${INC_PR}.1"
-
-SRC_URI += " file://0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch"
-
-SRC_URI[md5sum] = "84b742e9cca4c90fde3026c3793c03c1"
-SRC_URI[sha256sum] = "3adb37aa2b35e2e2daed47552e85dbcbf752c0f7768b269d856993620073a657"
diff --git a/meta/recipes-devtools/vala/vala_0.26.1.bb b/meta/recipes-devtools/vala/vala_0.26.1.bb
new file mode 100644
index 0000000000..3df9cd4bae
--- /dev/null
+++ b/meta/recipes-devtools/vala/vala_0.26.1.bb
@@ -0,0 +1,6 @@
+require ${BPN}.inc
+
+SRC_URI += " file://0001-git-version-gen-don-t-append-dirty-if-we-re-not-in-g.patch"
+
+SRC_URI[md5sum] = "723a03b822d4cc47abc4019685970a3e"
+SRC_URI[sha256sum] = "8407abb19ab3a58bbfc0d288abb47666ef81f76d0540258c03965e7545f59e6b"
diff --git a/meta/recipes-devtools/valgrind/valgrind/add-ptest.patch b/meta/recipes-devtools/valgrind/valgrind/add-ptest.patch
index fdc9cc0e83..225f84305f 100644
--- a/meta/recipes-devtools/valgrind/valgrind/add-ptest.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/add-ptest.patch
@@ -1,6 +1,6 @@
Modify vg_test wrapper to support PTEST formats
-This commit changes the valgrind regression test script vg_regtest to
+Change the valgrind regression test script vg_regtest to
support the yocto ptest stdout reporting format. The commit adds
'--yocto-ptest' as an optional argument to vg_regtest, which alters
the output to use the ptest infrastructure reporting format:
@@ -8,12 +8,16 @@ the output to use the ptest infrastructure reporting format:
instead of valgrind's internal test reporting format. Without the added
option, --yocto-ptest, the valgrind regression test output is unchanged.
+Enforce 30 seconds limit for the test.
+This resume execution of the remaining tests when valgrind hangs.
+
Upstream-Status: Pending
Signed-off-by: Dave Lerner <dave.lerner@windriver.com>
+Signed-off-by: Tudor Florea <tudor.florea@enea.com>
diff --git a/tests/vg_regtest.in b/tests/vg_regtest.in
-index 224385f..dbbd23d 100755
+index 224385f..3e0383b 100755
--- a/tests/vg_regtest.in
+++ b/tests/vg_regtest.in
@@ -39,11 +39,11 @@
@@ -55,7 +59,41 @@ index 224385f..dbbd23d 100755
} else {
die $usage;
}
-@@ -394,19 +397,21 @@ sub do_diffs($$$$)
+@@ -340,13 +343,28 @@ sub read_vgtest_file($)
+ #----------------------------------------------------------------------------
+ # Since most of the program time is spent in system() calls, need this to
+ # propagate a Ctrl-C enabling us to quit.
+-sub mysystem($)
++# Enforce 30 seconds limit for the test.
++# This resume execution of the remaining tests if valgrind hangs.
++sub mysystem($)
+ {
+- my $exit_code = system($_[0]);
+- ($exit_code == 2) and exit 1; # 2 is SIGINT
+- return $exit_code;
++ my $exit_code=0;
++ eval {
++ local $SIG{'ALRM'} = sub { die "timed out\n" };
++ alarm(30);
++ $exit_code = system($_[0]);
++ alarm (0);
++ ($exit_code == 2) and die "SIGINT\n"; # 2 is SIGINT
++ };
++ if ($@) {
++ if ($@ eq "timed out\n") {
++ print "timed out\n";
++ return 1;
++ }
++ if ($@ eq "SIGINT\n") {
++ exit 1;
++ }
++ }
+ }
+-
+ # if $keepunfiltered, copies $1 to $1.unfiltered.out
+ # renames $0 tp $1
+ sub filtered_rename($$)
+@@ -394,19 +412,21 @@ sub do_diffs($$$$)
# A match; remove .out and any previously created .diff files.
unlink("$name.$mid.out");
unlink(<$name.$mid.diff*>);
@@ -79,7 +117,7 @@ index 224385f..dbbd23d 100755
$vgtest =~ /^(.*)\.vgtest/;
my $name = $1;
my $fullname = "$dir/$name";
-@@ -425,7 +430,11 @@ sub do_one_test($$)
+@@ -425,7 +445,11 @@ sub do_one_test($$)
} elsif (256 == $prereq_res) {
# Nb: weird Perl-ism -- exit code of '1' is seen by Perl as 256...
# Prereq failed, skip.
@@ -92,7 +130,7 @@ index 224385f..dbbd23d 100755
return;
} else {
# Bad prereq; abort.
-@@ -438,7 +447,7 @@ sub do_one_test($$)
+@@ -438,7 +462,7 @@ sub do_one_test($$)
if (defined $progB) {
# If there is a progB, let's start it in background:
printf("%-16s valgrind $extraopts $vgopts $prog $args (progB: $progB $argsB)\n",
@@ -101,7 +139,7 @@ index 224385f..dbbd23d 100755
# progB.done used to detect child has finished. See below.
# Note: redirection of stdout and stderr is before $progB to allow argsB
# to e.g. redirect stdoutB to stderrB
-@@ -452,7 +461,8 @@ sub do_one_test($$)
+@@ -452,7 +476,8 @@ sub do_one_test($$)
. "touch progB.done) &");
}
} else {
@@ -111,7 +149,7 @@ index 224385f..dbbd23d 100755
}
# Pass the appropriate --tool option for the directory (can be overridden
-@@ -487,7 +497,7 @@ sub do_one_test($$)
+@@ -487,7 +512,7 @@ sub do_one_test($$)
# Find all the .stdout.exp files. If none, use /dev/null.
my @stdout_exps = <$name.stdout.exp*>;
@stdout_exps = ( "/dev/null" ) if (0 == scalar @stdout_exps);
@@ -120,7 +158,7 @@ index 224385f..dbbd23d 100755
# Filter stderr
$stderr_filter_args = $name if (! defined $stderr_filter_args);
-@@ -496,7 +506,7 @@ sub do_one_test($$)
+@@ -496,7 +521,7 @@ sub do_one_test($$)
# Find all the .stderr.exp files. At least one must exist.
my @stderr_exps = <$name.stderr.exp*>;
(0 != scalar @stderr_exps) or die "Could not find `$name.stderr.exp*'\n";
@@ -129,7 +167,7 @@ index 224385f..dbbd23d 100755
if (defined $progB) {
# wait for the child to be finished
-@@ -520,7 +530,7 @@ sub do_one_test($$)
+@@ -520,7 +545,7 @@ sub do_one_test($$)
# Find all the .stdoutB.exp files. If none, use /dev/null.
my @stdoutB_exps = <$name.stdoutB.exp*>;
@stdoutB_exps = ( "/dev/null" ) if (0 == scalar @stdoutB_exps);
@@ -138,7 +176,7 @@ index 224385f..dbbd23d 100755
# Filter stderr
$stderrB_filter_args = $name if (! defined $stderrB_filter_args);
-@@ -529,7 +539,7 @@ sub do_one_test($$)
+@@ -529,7 +554,7 @@ sub do_one_test($$)
# Find all the .stderrB.exp files. At least one must exist.
my @stderrB_exps = <$name.stderrB.exp*>;
(0 != scalar @stderrB_exps) or die "Could not find `$name.stderrB.exp*'\n";
@@ -147,7 +185,7 @@ index 224385f..dbbd23d 100755
}
# Maybe do post-test check
-@@ -541,7 +551,7 @@ sub do_one_test($$)
+@@ -541,7 +566,7 @@ sub do_one_test($$)
# Find all the .post.exp files. If none, use /dev/null.
my @post_exps = <$name.post.exp*>;
@post_exps = ( "/dev/null" ) if (0 == scalar @post_exps);
@@ -156,7 +194,7 @@ index 224385f..dbbd23d 100755
}
}
-@@ -550,6 +560,13 @@ sub do_one_test($$)
+@@ -550,6 +575,13 @@ sub do_one_test($$)
print("(cleanup operation failed: $cleanup)\n");
}
@@ -170,7 +208,7 @@ index 224385f..dbbd23d 100755
$num_tests_done++;
}
-@@ -589,7 +606,7 @@ sub test_one_dir($$)
+@@ -589,7 +621,7 @@ sub test_one_dir($$)
my $found_tests = (0 != (grep { $_ =~ /\.vgtest$/ } @fs));
if ($found_tests) {
@@ -179,7 +217,7 @@ index 224385f..dbbd23d 100755
}
foreach my $f (@fs) {
if (-d $f) {
-@@ -599,7 +616,7 @@ sub test_one_dir($$)
+@@ -599,7 +631,7 @@ sub test_one_dir($$)
}
}
if ($found_tests) {
@@ -188,7 +226,7 @@ index 224385f..dbbd23d 100755
}
chdir("..");
-@@ -625,10 +642,12 @@ sub summarise_results
+@@ -625,10 +657,12 @@ sub summarise_results
$num_failures{"stdout"}, plural($num_failures{"stdout"}),
$num_failures{"stderrB"}, plural($num_failures{"stderrB"}),
$num_failures{"stdoutB"}, plural($num_failures{"stdoutB"}),
diff --git a/meta/recipes-devtools/valgrind/valgrind/glibc-2.19.patch b/meta/recipes-devtools/valgrind/valgrind/glibc-2.19.patch
deleted file mode 100644
index 7714b997fe..0000000000
--- a/meta/recipes-devtools/valgrind/valgrind/glibc-2.19.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-Add support for compiling with glibc 2.19
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
-Index: valgrind-3.9.0/configure.ac
-===================================================================
---- valgrind-3.9.0.orig/configure.ac 2014-02-12 09:03:07.879873700 -0800
-+++ valgrind-3.9.0/configure.ac 2014-02-12 09:07:35.099871774 -0800
-@@ -918,6 +918,13 @@
- DEFAULT_SUPP="$srcdir/glibc-2.34567-NPTL-helgrind.supp ${DEFAULT_SUPP}"
- DEFAULT_SUPP="$srcdir/glibc-2.X-drd.supp ${DEFAULT_SUPP}"
- ;;
-+ 2.19)
-+ AC_MSG_RESULT(2.19 family)
-+ AC_DEFINE([GLIBC_2_19], 1, [Define to 1 if you're using glibc 2.19.x])
-+ DEFAULT_SUPP="glibc-2.X.supp ${DEFAULT_SUPP}"
-+ DEFAULT_SUPP="$srcdir/glibc-2.34567-NPTL-helgrind.supp ${DEFAULT_SUPP}"
-+ DEFAULT_SUPP="$srcdir/glibc-2.X-drd.supp ${DEFAULT_SUPP}"
-+ ;;
- darwin)
- AC_MSG_RESULT(Darwin)
- AC_DEFINE([DARWIN_LIBC], 1, [Define to 1 if you're using Darwin])
diff --git a/meta/recipes-devtools/valgrind/valgrind/glibc-2.20.patch b/meta/recipes-devtools/valgrind/valgrind/glibc-2.20.patch
new file mode 100644
index 0000000000..fab4044d17
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/glibc-2.20.patch
@@ -0,0 +1,30 @@
+Backport of glibc 2.19 and 2.20 support.
+
+Upstream-Status: Backport
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+diff --git a/configure.ac b/configure.ac
+index 229ab98..1c18108 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -918,6 +918,20 @@ case "${GLIBC_VERSION}" in
+ DEFAULT_SUPP="glibc-2.34567-NPTL-helgrind.supp ${DEFAULT_SUPP}"
+ DEFAULT_SUPP="glibc-2.X-drd.supp ${DEFAULT_SUPP}"
+ ;;
++ 2.19)
++ AC_MSG_RESULT(2.19 family)
++ AC_DEFINE([GLIBC_2_19], 1, [Define to 1 if you're using glibc 2.19.x])
++ DEFAULT_SUPP="glibc-2.X.supp ${DEFAULT_SUPP}"
++ DEFAULT_SUPP="$srcdir/glibc-2.34567-NPTL-helgrind.supp ${DEFAULT_SUPP}"
++ DEFAULT_SUPP="$srcdir/glibc-2.X-drd.supp ${DEFAULT_SUPP}"
++ ;;
++ 2.20)
++ AC_MSG_RESULT(2.20 family)
++ AC_DEFINE([GLIBC_2_20], 1, [Define to 1 if you're using glibc 2.20.x])
++ DEFAULT_SUPP="glibc-2.X.supp ${DEFAULT_SUPP}"
++ DEFAULT_SUPP="$srcdir/glibc-2.34567-NPTL-helgrind.supp ${DEFAULT_SUPP}"
++ DEFAULT_SUPP="$srcdir/glibc-2.X-drd.supp ${DEFAULT_SUPP}"
++ ;;
+ darwin)
+ AC_MSG_RESULT(Darwin)
+ AC_DEFINE([DARWIN_LIBC], 1, [Define to 1 if you're using Darwin])
diff --git a/meta/recipes-devtools/valgrind/valgrind/remove-arm-variant-specific.patch b/meta/recipes-devtools/valgrind/valgrind/remove-arm-variant-specific.patch
index 46dea60eb3..f84bf9ae9a 100644
--- a/meta/recipes-devtools/valgrind/valgrind/remove-arm-variant-specific.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/remove-arm-variant-specific.patch
@@ -8,13 +8,13 @@ Upstream-Status: Pending
Signed-off-by: Dave Lerner <dave.lerner@windriver.com>
-diff --git a/none/tests/arm/Makefile.am b/none/tests/arm/Makefile.am
-index 2a19f5b..ccdeb77 100644
---- a/none/tests/arm/Makefile.am
-+++ b/none/tests/arm/Makefile.am
-@@ -16,15 +16,16 @@ EXTRA_DIST = \
- vcvt_fixed_float_VFP.vgtest \
- vfp.stdout.exp vfp.stderr.exp vfp.vgtest
+Index: valgrind-3.10.0/none/tests/arm/Makefile.am
+===================================================================
+--- valgrind-3.10.0.orig/none/tests/arm/Makefile.am
++++ valgrind-3.10.0/none/tests/arm/Makefile.am
+@@ -17,15 +17,16 @@ EXTRA_DIST = \
+ vfp.stdout.exp vfp.stderr.exp vfp.vgtest \
+ vfpv4_fma.stdout.exp vfpv4_fma.stderr.exp vfpv4_fma.vgtest
+# For yocto:
+# Only include tests that don't require Thumb.
@@ -31,8 +31,8 @@ index 2a19f5b..ccdeb77 100644
- v6intThumb \
v6media \
vcvt_fixed_float_VFP \
- vfp
-@@ -34,32 +35,3 @@ AM_CXXFLAGS += @FLAG_M32@
+ vfp \
+@@ -36,34 +37,3 @@ AM_CXXFLAGS += @FLAG_M32@
AM_CCASFLAGS += @FLAG_M32@
allexec_CFLAGS = $(AM_CFLAGS) @FLAG_W_NO_NONNULL@
@@ -65,3 +65,5 @@ index 2a19f5b..ccdeb77 100644
-intdiv_CFLAGS = $(AM_CFLAGS) -g -mcpu=cortex-a15 -mthumb
-ldrt_CFLAGS = $(AM_CFLAGS) -g -mcpu=cortex-a8 -mthumb
-ldrt_arm_CFLAGS = $(AM_CFLAGS) -g -mcpu=cortex-a8 -marm
+-
+-vfpv4_fma_CFLAGS = $(AM_CFLAGS) -g -O0 -mcpu=cortex-a15 -mfpu=vfpv4 -marm
diff --git a/meta/recipes-devtools/valgrind/valgrind/remove-ppc-tests-failing-build.patch b/meta/recipes-devtools/valgrind/valgrind/remove-ppc-tests-failing-build.patch
index 2a14e1852c..1c640ffb71 100644
--- a/meta/recipes-devtools/valgrind/valgrind/remove-ppc-tests-failing-build.patch
+++ b/meta/recipes-devtools/valgrind/valgrind/remove-ppc-tests-failing-build.patch
@@ -36,10 +36,10 @@ Upstream-Status: Pending
Signed-off-by: Dave Lerner <dave.lerner@windriver.com>
-diff --git a/memcheck/tests/ppc32/Makefile.am b/memcheck/tests/ppc32/Makefile.am
-index bd70eea..1436e8e 100644
---- a/memcheck/tests/ppc32/Makefile.am
-+++ b/memcheck/tests/ppc32/Makefile.am
+Index: valgrind-3.10.0/memcheck/tests/ppc32/Makefile.am
+===================================================================
+--- valgrind-3.10.0.orig/memcheck/tests/ppc32/Makefile.am
++++ valgrind-3.10.0/memcheck/tests/ppc32/Makefile.am
@@ -7,8 +7,7 @@ EXTRA_DIST = $(noinst_SCRIPTS) \
power_ISA2_05.stderr.exp power_ISA2_05.stdout.exp power_ISA2_05.vgtest \
power_ISA2_05.stdout.exp_Without_FPPO
@@ -50,11 +50,11 @@ index bd70eea..1436e8e 100644
power_ISA2_05_CFLAGS = $(AM_CFLAGS) $(WERROR) -Winline -Wall -Wshadow -g \
-I$(top_srcdir)/include @FLAG_M32@
-diff --git a/none/tests/ppc32/Makefile.am b/none/tests/ppc32/Makefile.am
-index 4f581b6..91ce7e7 100644
---- a/none/tests/ppc32/Makefile.am
-+++ b/none/tests/ppc32/Makefile.am
-@@ -50,16 +50,8 @@ check_PROGRAMS = \
+Index: valgrind-3.10.0/none/tests/ppc32/Makefile.am
+===================================================================
+--- valgrind-3.10.0.orig/none/tests/ppc32/Makefile.am
++++ valgrind-3.10.0/none/tests/ppc32/Makefile.am
+@@ -52,16 +52,8 @@ check_PROGRAMS = \
allexec \
bug129390-ppc32 \
bug139050-ppc32 \
@@ -67,9 +67,9 @@ index 4f581b6..91ce7e7 100644
- test_isa_2_07_part1 \
- test_isa_2_07_part2 \
- test_tm \
-- test_touch_tm
+- test_touch_tm \
+ ldstrev lsw mftocrf mcrfs test_fx test_gx \
-+ twi tw xlc_dbl_u32 power6_bcmp
++ twi tw xlc_dbl_u32 power6_bcmp \
+ ldst_multiple \
+ data-cache-instructions
- AM_CFLAGS += @FLAG_M32@
- AM_CXXFLAGS += @FLAG_M32@
diff --git a/meta/recipes-devtools/valgrind/valgrind/run-ptest b/meta/recipes-devtools/valgrind/valgrind/run-ptest
index 7d0584ada9..f9a72ec4a9 100755
--- a/meta/recipes-devtools/valgrind/valgrind/run-ptest
+++ b/meta/recipes-devtools/valgrind/valgrind/run-ptest
@@ -1,4 +1,5 @@
-#!/bin/bash
+#!/bin/sh
+
# run-ptest - 'ptest' test infrastructure shell script that
# wraps the valgrind regression script vg_regtest.
# Must be run in the /usr/lib/valgrind/ptest directory.
diff --git a/meta/recipes-devtools/valgrind/valgrind/valgrind-remove-rpath.patch b/meta/recipes-devtools/valgrind/valgrind/valgrind-remove-rpath.patch
new file mode 100644
index 0000000000..07a272633e
--- /dev/null
+++ b/meta/recipes-devtools/valgrind/valgrind/valgrind-remove-rpath.patch
@@ -0,0 +1,25 @@
+
+Upstream-Status: Inapporpiate [embedded config]
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: valgrind-3.10.0/none/tests/Makefile.am
+===================================================================
+--- valgrind-3.10.0.orig/none/tests/Makefile.am
++++ valgrind-3.10.0/none/tests/Makefile.am
+@@ -280,7 +280,6 @@ threaded_fork_LDADD = -lpthread
+ threadederrno_LDADD = -lpthread
+ tls_SOURCES = tls.c tls2.c
+ tls_DEPENDENCIES = tls.so tls2.so
+-tls_LDFLAGS = -Wl,-rpath,$(abs_top_builddir)/none/tests
+ tls_LDADD = tls.so tls2.so -lpthread
+ tls_so_SOURCES = tls_so.c
+ tls_so_DEPENDENCIES = tls2.so
+@@ -288,7 +287,7 @@ if VGCONF_OS_IS_DARWIN
+ tls_so_LDFLAGS = -dynamic -dynamiclib -all_load -fpic
+ tls_so_LDADD = `pwd`/tls2.so
+ else
+- tls_so_LDFLAGS = -Wl,-rpath,$(abs_top_builddir)/none/tests -shared -fPIC
++ tls_so_LDFLAGS = -shared -fPIC
+ tls_so_LDADD = tls2.so
+ endif
+ tls_so_CFLAGS = $(AM_CFLAGS) -fPIC
diff --git a/meta/recipes-devtools/valgrind/valgrind_3.9.0.bb b/meta/recipes-devtools/valgrind/valgrind_3.10.1.bb
index 3c6aa13364..a25922ae09 100644
--- a/meta/recipes-devtools/valgrind/valgrind_3.9.0.bb
+++ b/meta/recipes-devtools/valgrind/valgrind_3.10.1.bb
@@ -9,22 +9,22 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=c46082167a314d785d012a244748d803 \
X11DEPENDS = "virtual/libx11"
DEPENDS = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '${X11DEPENDS}', '', d)}"
-PR = "r8"
SRC_URI = "http://www.valgrind.org/downloads/valgrind-${PV}.tar.bz2 \
file://fixed-perl-path.patch \
file://Added-support-for-PPC-instructions-mfatbu-mfatbl.patch \
file://sepbuildfix.patch \
- file://glibc-2.19.patch \
+ file://glibc-2.20.patch \
file://force-nostabs.patch \
file://remove-arm-variant-specific.patch \
file://remove-ppc-tests-failing-build.patch \
+ file://valgrind-remove-rpath.patch \
file://add-ptest.patch \
file://run-ptest \
"
-SRC_URI[md5sum] = "0947de8112f946b9ce64764af7be6df2"
-SRC_URI[sha256sum] = "e6af71a06bc2534541b07743e1d58dc3caf744f38205ca3e5b5a0bdf372ed6f0"
+SRC_URI[md5sum] = "60ddae962bc79e7c95cfc4667245707f"
+SRC_URI[sha256sum] = "fa253dc26ddb661b6269df58144eff607ea3f76a9bcfe574b0c7726e1dfcb997"
COMPATIBLE_HOST = '(i.86|x86_64|powerpc|powerpc64).*-linux'
COMPATIBLE_HOST_armv7a = 'arm.*-linux'
@@ -48,7 +48,7 @@ FILES_${PN}-dbg += "${libdir}/${PN}/*/.debug/*"
# redirect functions like strlen.
RRECOMMENDS_${PN} += "${TCLIBC}-dbg"
-RDEPENDS_${PN}-ptest += " sed perl eglibc-utils"
+RDEPENDS_${PN}-ptest += " sed perl glibc-utils perl-module-file-glob"
do_compile_ptest() {
oe_runmake check
diff --git a/meta/recipes-devtools/xmlto/xmlto_0.0.25.bb b/meta/recipes-devtools/xmlto/xmlto_0.0.25.bb
index 2747d6d2d6..2698b1c258 100644
--- a/meta/recipes-devtools/xmlto/xmlto_0.0.25.bb
+++ b/meta/recipes-devtools/xmlto/xmlto_0.0.25.bb
@@ -15,11 +15,12 @@ SRC_URI[sha256sum] = "c52b56d929e8d20fc19cd3b7ec238f8d039730c56ee311cc352e843147
inherit autotools
-# xmlto needs getopt/xmllint/xsltproc at runtime
+# xmlto needs getopt/xmllint/xsltproc/bash at runtime
RDEPENDS_${PN} = "docbook-xml-dtd4 \
docbook-xsl-stylesheets \
util-linux \
libxml2 \
+ bash \
"
RDEPENDS_${PN}_append_class-target = " \
libxslt-bin \
diff --git a/meta/recipes-extended/at/files/S99at b/meta/recipes-extended/at/at/atd.init
index eca379b3cd..eca379b3cd 100644
--- a/meta/recipes-extended/at/files/S99at
+++ b/meta/recipes-extended/at/at/atd.init
diff --git a/meta/recipes-extended/at/files/atd.service b/meta/recipes-extended/at/at/atd.service
index 6dc844504a..6dc844504a 100644
--- a/meta/recipes-extended/at/files/atd.service
+++ b/meta/recipes-extended/at/at/atd.service
diff --git a/meta/recipes-extended/at/files/configure-add-enable-pam.patch b/meta/recipes-extended/at/at/configure-add-enable-pam.patch
index 1e5efef128..1e5efef128 100644
--- a/meta/recipes-extended/at/files/configure-add-enable-pam.patch
+++ b/meta/recipes-extended/at/at/configure-add-enable-pam.patch
diff --git a/meta/recipes-extended/at/files/file_replacement_with_gplv2.patch b/meta/recipes-extended/at/at/file_replacement_with_gplv2.patch
index d5ef0032be..d5ef0032be 100644
--- a/meta/recipes-extended/at/files/file_replacement_with_gplv2.patch
+++ b/meta/recipes-extended/at/at/file_replacement_with_gplv2.patch
diff --git a/meta/recipes-extended/at/files/fix_parallel_build_error.patch b/meta/recipes-extended/at/at/fix_parallel_build_error.patch
index 66ff3e1c11..66ff3e1c11 100644
--- a/meta/recipes-extended/at/files/fix_parallel_build_error.patch
+++ b/meta/recipes-extended/at/at/fix_parallel_build_error.patch
diff --git a/meta/recipes-extended/at/files/pam.conf.patch b/meta/recipes-extended/at/at/pam.conf.patch
index dfe76d7487..b5ceb9ae82 100644
--- a/meta/recipes-extended/at/files/pam.conf.patch
+++ b/meta/recipes-extended/at/at/pam.conf.patch
@@ -3,19 +3,25 @@ oe doesn't support "@include", use the concrete directive instead.
Upstream-Status: Pending
Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
---- at-3.1.12/pam.conf.orig 2009-11-23 23:11:52.000000000 +0800
-+++ at-3.1.12/pam.conf 2011-07-15 11:14:04.132818950 +0800
-@@ -2,8 +2,8 @@
+Index: at-3.1.15/pam.conf
+===================================================================
+--- at-3.1.15.orig/pam.conf
++++ at-3.1.15/pam.conf
+@@ -1,10 +1,11 @@
+ #
# The PAM configuration file for the at daemon
#
-
--auth required pam_env.so
--@include common-auth
--@include common-account
--@include common-session-noninteractive
--session required pam_limits.so
++
+auth required pam_env.so
+auth include common-auth
+account include common-account
+session include common-session-noninteractive
+session required pam_limits.so
++session required pam_loginuid.so
+
+-auth required pam_env.so
+-@include common-auth
+-@include common-account
+-session required pam_loginuid.so
+-@include common-session-noninteractive
+-session required pam_limits.so
diff --git a/meta/recipes-extended/at/files/posixtm.c b/meta/recipes-extended/at/at/posixtm.c
index 5514ba4fe2..5514ba4fe2 100644
--- a/meta/recipes-extended/at/files/posixtm.c
+++ b/meta/recipes-extended/at/at/posixtm.c
diff --git a/meta/recipes-extended/at/files/posixtm.h b/meta/recipes-extended/at/at/posixtm.h
index e91749d736..e91749d736 100644
--- a/meta/recipes-extended/at/files/posixtm.h
+++ b/meta/recipes-extended/at/at/posixtm.h
diff --git a/meta/recipes-extended/at/at_3.1.14.bb b/meta/recipes-extended/at/at_3.1.16.bb
index 68a24b8cd0..f16edba5eb 100644
--- a/meta/recipes-extended/at/at_3.1.14.bb
+++ b/meta/recipes-extended/at/at_3.1.16.bb
@@ -7,9 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=4325afd396febcb659c36b49533135d4"
DEPENDS = "flex flex-native \
${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
-VIRTUAL-RUNTIME_initscripts ?= "initscripts"
RDEPENDS_${PN} = "${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_DEPS}', '', d)} \
- ${VIRTUAL-RUNTIME_initscripts} \
"
PAM_DEPS = "libpam libpam-runtime pam-plugin-env pam-plugin-limits"
@@ -22,15 +20,15 @@ SRC_URI = "${DEBIAN_MIRROR}/main/a/at/at_${PV}.orig.tar.gz \
file://posixtm.c \
file://posixtm.h \
file://file_replacement_with_gplv2.patch \
- file://S99at \
+ file://atd.init \
file://atd.service \
${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)}"
PAM_SRC_URI = "file://pam.conf.patch \
file://configure-add-enable-pam.patch"
-SRC_URI[md5sum] = "d41cfd79033b6e49a8838add59a42ac6"
-SRC_URI[sha256sum] = "cd092bf05d29c25b286f55a960ce8b8c3c5beb571d86ed8eb1dfb3b61291b3ae"
+SRC_URI[md5sum] = "d05da75d9b75d93917ffb16ab48b1e19"
+SRC_URI[sha256sum] = "cb9af59c6a54edce9536ba629841055409d1f89d8ae26494727a97141fb4d5c1"
EXTRA_OECONF += "ac_cv_path_SENDMAIL=/bin/true \
--with-daemon_username=root \
@@ -39,7 +37,10 @@ EXTRA_OECONF += "ac_cv_path_SENDMAIL=/bin/true \
--with-atspool=/var/spool/at/spool \
ac_cv_header_security_pam_appl_h=${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'yes', 'no', d)} "
-inherit autotools-brokensep systemd
+inherit autotools-brokensep systemd update-rc.d
+
+INITSCRIPT_NAME = "atd"
+INITSCRIPT_PARAMS = "defaults"
SYSTEMD_SERVICE_${PN} = "atd.service"
@@ -53,9 +54,7 @@ do_install () {
oe_runmake -e "IROOT=${D}" install
install -d ${D}${sysconfdir}/init.d
- install -d ${D}${sysconfdir}/rcS.d
- install -m 0755 ${WORKDIR}/S99at ${D}${sysconfdir}/init.d/atd
- ln -sf ../init.d/atd ${D}${sysconfdir}/rcS.d/S99at
+ install -m 0755 ${WORKDIR}/atd.init ${D}${sysconfdir}/init.d/atd
# install systemd unit files
install -d ${D}${systemd_unitdir}/system
diff --git a/meta/recipes-extended/bash/bash.inc b/meta/recipes-extended/bash/bash.inc
index 2fe0c6b574..a00db5c821 100644
--- a/meta/recipes-extended/bash/bash.inc
+++ b/meta/recipes-extended/bash/bash.inc
@@ -8,6 +8,7 @@ inherit autotools-brokensep gettext texinfo update-alternatives ptest
PARALLEL_MAKE = ""
+EXTRA_AUTORECONF += "--exclude=autoheader"
EXTRA_OECONF = "--enable-job-control"
ALTERNATIVE_${PN} = "sh"
@@ -15,8 +16,6 @@ ALTERNATIVE_LINK_NAME[sh] = "${base_bindir}/sh"
ALTERNATIVE_TARGET[sh] = "${base_bindir}/bash"
ALTERNATIVE_PRIORITY = "100"
-export AUTOHEADER = "true"
-
RDEPENDS_${PN} += "base-files"
RDEPENDS_${PN}_class-nativesdk = ""
RDEPENDS_${PN}-ptest += "make"
@@ -38,6 +37,10 @@ do_install_append () {
mv ${D}${bindir}/bash ${D}${base_bindir}
fi
}
+do_install_append_class-target () {
+ # Clean host path in bashbug
+ sed -i -e "s,${STAGING_DIR_TARGET},,g" ${D}${bindir}/bashbug
+}
do_install_ptest () {
make INSTALL_TEST_DIR=${D}${PTEST_PATH}/tests install-test
diff --git a/meta/recipes-extended/bash/bash_3.2.48.bb b/meta/recipes-extended/bash/bash_3.2.48.bb
index fe04b28e7c..35e624bb8b 100644
--- a/meta/recipes-extended/bash/bash_3.2.48.bb
+++ b/meta/recipes-extended/bash/bash_3.2.48.bb
@@ -6,9 +6,15 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=fd5d9bcabd8ed5a54a01ce8d183d592a"
PR = "r11"
SRC_URI = "${GNU_MIRROR}/bash/bash-${PV}.tar.gz;name=tarball \
- ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-049;apply=yes;striplevel=0;name=patch001 \
- ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-050;apply=yes;striplevel=0;name=patch002 \
- ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-051;apply=yes;striplevel=0;name=patch003 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-049;apply=yes;striplevel=0;name=patch049 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-050;apply=yes;striplevel=0;name=patch050 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-051;apply=yes;striplevel=0;name=patch051 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-052;apply=yes;striplevel=0;name=patch052 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-053;apply=yes;striplevel=0;name=patch053 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-054;apply=yes;striplevel=0;name=patch054 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-055;apply=yes;striplevel=0;name=patch055 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-056;apply=yes;striplevel=0;name=patch056 \
+ ${GNU_MIRROR}/bash/bash-3.2-patches/bash32-057;apply=yes;striplevel=0;name=patch057 \
file://mkbuiltins_have_stringize.patch \
file://build-tests.patch \
file://test-output.patch \
@@ -17,12 +23,21 @@ SRC_URI = "${GNU_MIRROR}/bash/bash-${PV}.tar.gz;name=tarball \
SRC_URI[tarball.md5sum] = "338dcf975a93640bb3eaa843ca42e3f8"
SRC_URI[tarball.sha256sum] = "128d281bd5682ba5f6953122915da71976357d7a76490d266c9173b1d0426348"
-SRC_URI[patch001.md5sum] = "af571a2d164d5abdcae4499e94e8892c"
-SRC_URI[patch001.sha256sum] = "b1217ed94bdb95dc878fa5cabbf8a164435eb0d9da23a392198f48566ee34a2f"
-SRC_URI[patch002.md5sum] = "8443d4385d73ec835abe401d90591377"
-SRC_URI[patch002.sha256sum] = "081bb03c580ecee63ba03b40beb3caf509eca29515b2e8dd3c078503609a1642"
-SRC_URI[patch003.md5sum] = "15c6653042e9814aa87120098fc7a849"
-SRC_URI[patch003.sha256sum] = "354886097cd95b4def77028f32ee01e2e088d58a98184fede9d3ce9320e218ef"
-
-SRC_URI[md5sum] = "338dcf975a93640bb3eaa843ca42e3f8"
-SRC_URI[sha256sum] = "128d281bd5682ba5f6953122915da71976357d7a76490d266c9173b1d0426348"
+SRC_URI[patch049.md5sum] = "af571a2d164d5abdcae4499e94e8892c"
+SRC_URI[patch049.sha256sum] = "b1217ed94bdb95dc878fa5cabbf8a164435eb0d9da23a392198f48566ee34a2f"
+SRC_URI[patch050.md5sum] = "8443d4385d73ec835abe401d90591377"
+SRC_URI[patch050.sha256sum] = "081bb03c580ecee63ba03b40beb3caf509eca29515b2e8dd3c078503609a1642"
+SRC_URI[patch051.md5sum] = "15c6653042e9814aa87120098fc7a849"
+SRC_URI[patch051.sha256sum] = "354886097cd95b4def77028f32ee01e2e088d58a98184fede9d3ce9320e218ef"
+SRC_URI[patch052.md5sum] = "691023a944bbb9003cc92ad462d91fa1"
+SRC_URI[patch052.sha256sum] = "a0eccf9ceda50871db10d21efdd74b99e35efbd55c970c400eeade012816bb61"
+SRC_URI[patch053.md5sum] = "eb97d1c9230a55283d9dac69d3de2e46"
+SRC_URI[patch053.sha256sum] = "fe6f0e96e0b966eaed9fb5e930ca12891f4380f30f9e0a773d200ff2063a864e"
+SRC_URI[patch054.md5sum] = "1107744058c43b247f597584b88ba0a6"
+SRC_URI[patch054.sha256sum] = "c6dab911e85688c542ce75afc175dbb4e5011de5102758e19a4a80dac1e79359"
+SRC_URI[patch055.md5sum] = "05d201176d3499e2dfa4a73d09d42f05"
+SRC_URI[patch055.sha256sum] = "c0e816700837942ed548da74e5917f74b70cbbbb10c9f2caf73e8e06a0713d0a"
+SRC_URI[patch056.md5sum] = "222eaa3a2c26f54a15aa5e08817a534a"
+SRC_URI[patch056.sha256sum] = "063a8d8d74e4407bf07a32b965b8ef6d213a66abdb6af26cc3584a437a56bbb4"
+SRC_URI[patch057.md5sum] = "47d98e3e042892495c5efe54ec6e5913"
+SRC_URI[patch057.sha256sum] = "5fc689394d515990f5ea74e2df765fc6e5e42ca44b4591b2c6f9be4b0cadf0f0"
diff --git a/meta/recipes-extended/bash/bash_4.3.bb b/meta/recipes-extended/bash/bash_4.3.bb
index 25b7410c52..54f21017db 100644
--- a/meta/recipes-extended/bash/bash_4.3.bb
+++ b/meta/recipes-extended/bash/bash_4.3.bb
@@ -5,6 +5,36 @@ LICENSE = "GPLv3+"
LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
SRC_URI = "${GNU_MIRROR}/bash/${BPN}-${PV}.tar.gz;name=tarball \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-001;apply=yes;striplevel=0;name=patch001 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-002;apply=yes;striplevel=0;name=patch002 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-003;apply=yes;striplevel=0;name=patch003 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-004;apply=yes;striplevel=0;name=patch004 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-005;apply=yes;striplevel=0;name=patch005 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-006;apply=yes;striplevel=0;name=patch006 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-007;apply=yes;striplevel=0;name=patch007 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-008;apply=yes;striplevel=0;name=patch008 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-009;apply=yes;striplevel=0;name=patch009 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-010;apply=yes;striplevel=0;name=patch010 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-011;apply=yes;striplevel=0;name=patch011 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-012;apply=yes;striplevel=0;name=patch012 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-013;apply=yes;striplevel=0;name=patch013 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-014;apply=yes;striplevel=0;name=patch014 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-015;apply=yes;striplevel=0;name=patch015 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-016;apply=yes;striplevel=0;name=patch016 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-017;apply=yes;striplevel=0;name=patch017 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-018;apply=yes;striplevel=0;name=patch018 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-019;apply=yes;striplevel=0;name=patch019 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-020;apply=yes;striplevel=0;name=patch020 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-021;apply=yes;striplevel=0;name=patch021 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-022;apply=yes;striplevel=0;name=patch022 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-023;apply=yes;striplevel=0;name=patch023 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-024;apply=yes;striplevel=0;name=patch024 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-025;apply=yes;striplevel=0;name=patch025 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-026;apply=yes;striplevel=0;name=patch026 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-027;apply=yes;striplevel=0;name=patch027 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-028;apply=yes;striplevel=0;name=patch028 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-029;apply=yes;striplevel=0;name=patch029 \
+ ${GNU_MIRROR}/bash/bash-4.3-patches/bash43-030;apply=yes;striplevel=0;name=patch030 \
file://execute_cmd.patch;striplevel=0 \
file://mkbuiltins_have_stringize.patch \
file://build-tests.patch \
@@ -15,4 +45,65 @@ SRC_URI = "${GNU_MIRROR}/bash/${BPN}-${PV}.tar.gz;name=tarball \
SRC_URI[tarball.md5sum] = "81348932d5da294953e15d4814c74dd1"
SRC_URI[tarball.sha256sum] = "afc687a28e0e24dc21b988fa159ff9dbcf6b7caa92ade8645cc6d5605cd024d4"
+SRC_URI[patch001.md5sum] = "1ab682b4e36afa4cf1b426aa7ac81c0d"
+SRC_URI[patch001.sha256sum] = "ecb3dff2648667513e31554b3ad054ccd89fce38e33367c9459ac3a285153742"
+SRC_URI[patch002.md5sum] = "8fc22cf50ec85da00f6af3d66f7ddc1b"
+SRC_URI[patch002.sha256sum] = "eee7cd7062ab29a9e4f02924d9c367264dcb8b162703f74ff6eb8f175a91502b"
+SRC_URI[patch003.md5sum] = "a41728eca78858758e26b5dea64ae506"
+SRC_URI[patch003.sha256sum] = "000e6eac50cd9053ce0630db01239dcdead04a2c2c351c47e2b51dac1ac1087d"
+SRC_URI[patch004.md5sum] = "bf8d53d227829d67235927689a03cc7a"
+SRC_URI[patch004.sha256sum] = "5ea0a42c6506720d26e6d3c5c358e9a0d49f6f189d69a8ed34d5935964821338"
+SRC_URI[patch005.md5sum] = "c0c00935c8b8ffff76e8ab77e7be7d15"
+SRC_URI[patch005.sha256sum] = "1ac83044032b9f5f11aeca8a344ae3c524ec2156185d3adbb8ad3e7a165aa3fa"
+SRC_URI[patch006.md5sum] = "6f01e364cd092faa28dd7119f47ddb5f"
+SRC_URI[patch006.sha256sum] = "a0648ee72d15e4a90c8b77a5c6b19f8d89e28c1bc881657d22fe26825f040213"
+SRC_URI[patch007.md5sum] = "dcf471d222bcd83283d3094e6ceeb6f8"
+SRC_URI[patch007.sha256sum] = "1113e321c59cf6a8648a36245bbe4217cf8acf948d71e67886dad7d486f8f3a3"
+SRC_URI[patch008.md5sum] = "f7553416646dc26c266454c78a916d36"
+SRC_URI[patch008.sha256sum] = "9941a98a4987192cc5ce3d45afe879983cad2f0bec96d441a4edd9033767f95e"
+SRC_URI[patch009.md5sum] = "7e73d2151f4064b484a4ba2c4b09960e"
+SRC_URI[patch009.sha256sum] = "c0226d6728946b2f53cdebf090bcd1c01627f01fee03295768605caa80bb40a5"
+SRC_URI[patch010.md5sum] = "a275463d21735bb6d7161f9fbd320d8f"
+SRC_URI[patch010.sha256sum] = "ce05799c0137314c70c7b6ea0477c90e1ac1d52e113344be8e32fa5a55c9f0b7"
+SRC_URI[patch011.md5sum] = "c17103ee20420d77e46b224c8d3fceda"
+SRC_URI[patch011.sha256sum] = "7c63402cdbc004a210f6c1c527b63b13d8bb9ec9c5a43d5c464a9010ff6f7f3b"
+SRC_URI[patch012.md5sum] = "3e2a057a19d02b3f92a3a09eacbc03ae"
+SRC_URI[patch012.sha256sum] = "3e1379030b35fbcf314e9e7954538cf4b43be1507142b29efae39eef997b8c12"
+SRC_URI[patch013.md5sum] = "fb377143a996d4ff087a2771bc8332f9"
+SRC_URI[patch013.sha256sum] = "bfa8ca5336ab1f5ef988434a4bdedf71604aa8a3659636afa2ce7c7446c42c79"
+SRC_URI[patch014.md5sum] = "1a1aaecc99a9d0cbc310e8e247dcc8b6"
+SRC_URI[patch014.sha256sum] = "5a4d6fa2365b6eb725a9d4966248b5edf7630a4aeb3fa8d526b877972658ac13"
+SRC_URI[patch015.md5sum] = "4f04387458a3c1b4d460d199f49991a8"
+SRC_URI[patch015.sha256sum] = "13293e8a24e003a44d7fe928c6b1e07b444511bed2d9406407e006df28355e8d"
+SRC_URI[patch016.md5sum] = "90e759709720c4f877525bebc9d5dc06"
+SRC_URI[patch016.sha256sum] = "92d60bcf49f61bd7f1ccb9602bead6f2c9946d79dea0e5ec0589bb3bfa5e0773"
+SRC_URI[patch017.md5sum] = "11e4046e1b86070f6adbb7ffc89641be"
+SRC_URI[patch017.sha256sum] = "1267c25c6b5ba57042a7bb6c569a6de02ffd0d29530489a16666c3b8a23e7780"
+SRC_URI[patch018.md5sum] = "cd5a9b46f5bea0dc0248c93c7dfac011"
+SRC_URI[patch018.sha256sum] = "7aa8b40a9e973931719d8cc72284a8fb3292b71b522db57a5a79052f021a3d58"
+SRC_URI[patch019.md5sum] = "cff4dc024d9d3456888aaaf8a36ca774"
+SRC_URI[patch019.sha256sum] = "a7a91475228015d676cafa86d2d7aa9c5d2139aa51485b6bbdebfdfbcf0d2d23"
+SRC_URI[patch020.md5sum] = "167839c5f147347f4a03d88ab97ff787"
+SRC_URI[patch020.sha256sum] = "ca5e86d87f178128641fe91f2f094875b8c1eb2de9e0d2e9154f5d5cc0336c98"
+SRC_URI[patch021.md5sum] = "1d350671c48dec30b34d8b81f09cd79d"
+SRC_URI[patch021.sha256sum] = "41439f06883e6bd11c591d9d5e9ae08afbc2abd4b935e1d244b08100076520a9"
+SRC_URI[patch022.md5sum] = "11c349af66a55481a3215ef2520bec36"
+SRC_URI[patch022.sha256sum] = "fd4d47bb95c65863f634c4706c65e1e3bae4ee8460c72045c0a0618689061a88"
+SRC_URI[patch023.md5sum] = "b3cb0d80fd0c47728264405cbb3b23c7"
+SRC_URI[patch023.sha256sum] = "9ac250c7397a8f53dbc84dfe790d2a418fbf1fe090bcece39b4a5c84a2d300d4"
+SRC_URI[patch024.md5sum] = "b5ea5600942acceb4b6f07313d2de74e"
+SRC_URI[patch024.sha256sum] = "3b505882a0a6090667d75824fc919524cd44cc3bd89dd08b7c4e622d3f960f6c"
+SRC_URI[patch025.md5sum] = "193c06f578d38ffdbaebae9c51a7551f"
+SRC_URI[patch025.sha256sum] = "1e5186f5c4a619bb134a1177d9e9de879f3bb85d9c5726832b03a762a2499251"
+SRC_URI[patch026.md5sum] = "922578e2be7ed03729454e92ee8d3f3a"
+SRC_URI[patch026.sha256sum] = "2ecc12201b3ba4273b63af4e9aad2305168cf9babf6d11152796db08724c214d"
+SRC_URI[patch027.md5sum] = "8ff6948b16f2db5c29b1b9ae1085bbe7"
+SRC_URI[patch027.sha256sum] = "1eb76ad28561d27f7403ff3c76a36e932928a4b58a01b868d663c165f076dabe"
+SRC_URI[patch028.md5sum] = "dd51fa67913b5dca45a702b672b3323f"
+SRC_URI[patch028.sha256sum] = "e8b0dbed4724fa7b9bd8ff77d12c7f03da0fbfc5f8251ef5cb8511eb082b469d"
+SRC_URI[patch029.md5sum] = "0729364c977ef4271e9f8dfafadacf67"
+SRC_URI[patch029.sha256sum] = "4cc4a397fe6bc63ecb97d030a4e44258ef2d4e076d0e90c77782968cc43d6292"
+SRC_URI[patch030.md5sum] = "efb709fdb1368945513de23ccbfae053"
+SRC_URI[patch030.sha256sum] = "85434f8a2f379d0c49a3ff6d9ffa12c8b157188dd739e556d638217d2a58385b"
+
BBCLASSEXTEND = "nativesdk"
diff --git a/meta/recipes-extended/byacc/byacc_20140422.bb b/meta/recipes-extended/byacc/byacc_20141128.bb
index cf57738bea..15c14c9733 100644
--- a/meta/recipes-extended/byacc/byacc_20140422.bb
+++ b/meta/recipes-extended/byacc/byacc_20141128.bb
@@ -7,5 +7,5 @@ LICENSE = "PD"
LIC_FILES_CHKSUM = "file://package/debian/copyright;md5=f186cf0d59bac042b75830396ec389a3"
require byacc.inc
-SRC_URI[md5sum] = "e7c13c5e207dc05eab9145cc9972397b"
-SRC_URI[sha256sum] = "2f104c7e200dd86844d5f3521e12cb55fc48a9c3da3480a65fde2ca8c053bdcc"
+SRC_URI[md5sum] = "acb0ff0fb6cc414a6b50c799794b2425"
+SRC_URI[sha256sum] = "f517fc21f08c1a1f010177357df58fc64eb1131011e5dcd48c19fe44c47198d0"
diff --git a/meta/recipes-extended/bzip2/bzip2_1.0.6.bb b/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
index 9dd9e6a1f9..ed12277566 100644
--- a/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
+++ b/meta/recipes-extended/bzip2/bzip2_1.0.6.bb
@@ -8,10 +8,10 @@ LICENSE = "bzip2"
LIC_FILES_CHKSUM = "file://LICENSE;beginline=8;endline=37;md5=40d9d1eb05736d1bfc86cfdd9106e6b2"
PR = "r5"
-SRC_URI = "http://www.bzip.org/${PV}/${BPN}-${PV}.tar.gz \
- file://configure.ac \
- file://run-ptest \
- file://Makefile.am"
+SRC_URI = "http://www.bzip.org/${PV}/${BP}.tar.gz \
+ file://configure.ac;subdir=${BP} \
+ file://Makefile.am;subdir=${BP} \
+ file://run-ptest"
SRC_URI[md5sum] = "00b516f4704d4a7cb50a1d97e6e8e15b"
SRC_URI[sha256sum] = "a2848f34fcd5d6cf47def00461fcb528a0484d8edef8208d6d2e2909dc61d9cd"
@@ -27,12 +27,6 @@ ALTERNATIVE_${PN} = "bunzip2 bzcat"
#install binaries to bzip2-native under sysroot for replacement-native
EXTRA_OECONF_append_class-native = " --bindir=${STAGING_BINDIR_NATIVE}/${PN}"
-do_extraunpack () {
- cp ${WORKDIR}/configure.ac ${S}/
- cp ${WORKDIR}/Makefile.am ${S}/
-}
-
-addtask extraunpack after do_unpack before do_patch
do_install_ptest () {
cp -f ${B}/Makefile ${D}${PTEST_PATH}/Makefile
diff --git a/meta/recipes-extended/cpio/cpio-2.11/fix-memory-overrun.patch b/meta/recipes-extended/cpio/cpio-2.11/fix-memory-overrun.patch
new file mode 100644
index 0000000000..89cd3cfa50
--- /dev/null
+++ b/meta/recipes-extended/cpio/cpio-2.11/fix-memory-overrun.patch
@@ -0,0 +1,220 @@
+cpio: Fix memory overrun on reading improperly created link records
+
+Signed-off-by: Bian Naimeng <biannm@cn.fujitsu.com>
+
+http://git.savannah.gnu.org/cgit/cpio.git/commit/?id=746f3ff670dcfcdd28fcc990e79cd6fccc7ae48d
+
+ * src/copyin.c (get_link_name): New function.
+ (list_file, copyin_link): use get_link_name
+
+ * tests/symlink-bad-length.at: New file.
+ * tests/symlink-long.at: New file.
+ * tests/Makefile.am: Add new files.
+ * tests/testsuite.at: Likewise.
+
+ See http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
+
+Upstream-Status: Backport
+
+Signed-off-by: Sergey Poznyakoff <gray@gnu.org.ua>
+
+diff -Nurp cpio-2.11.orig/src/copyin.c cpio-2.11/src/copyin.c
+--- cpio-2.11.orig/src/copyin.c 2010-02-15 18:02:23.000000000 +0800
++++ cpio-2.11/src/copyin.c 2014-12-08 13:14:04.355547508 +0800
+@@ -126,6 +126,28 @@ tape_skip_padding (int in_file_des, off_
+ }
+
+
++static char *
++get_link_name (struct cpio_file_stat *file_hdr, int in_file_des)
++{
++ off_t n = file_hdr->c_filesize + 1;
++ char *link_name;
++
++ if (n == 0 || n > SIZE_MAX)
++ {
++ error (0, 0, _("%s: stored filename length too big"), file_hdr->c_name);
++ link_name = NULL;
++ }
++ else
++ {
++ link_name = xmalloc (n);
++ tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
++ link_name[file_hdr->c_filesize] = '\0';
++ tape_skip_padding (in_file_des, file_hdr->c_filesize);
++ }
++ return link_name;
++}
++
++
+ static void
+ list_file(struct cpio_file_stat* file_hdr, int in_file_des)
+ {
+@@ -136,21 +158,16 @@ list_file(struct cpio_file_stat* file_hd
+ {
+ if (archive_format != arf_tar && archive_format != arf_ustar)
+ {
+- char *link_name = NULL; /* Name of hard and symbolic links. */
+-
+- link_name = (char *) xmalloc ((unsigned int) file_hdr->c_filesize + 1);
+- link_name[file_hdr->c_filesize] = '\0';
+- tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
+- long_format (file_hdr, link_name);
+- free (link_name);
+- tape_skip_padding (in_file_des, file_hdr->c_filesize);
+- return;
++ char *link_name = get_link_name (file_hdr, in_file_des);
++ if (link_name)
++ {
++ long_format (file_hdr, link_name);
++ free (link_name);
++ }
+ }
+ else
+- {
+ long_format (file_hdr, file_hdr->c_tar_linkname);
+- return;
+- }
++ return;
+ }
+ else
+ #endif
+@@ -650,10 +667,7 @@ copyin_link(struct cpio_file_stat *file_
+
+ if (archive_format != arf_tar && archive_format != arf_ustar)
+ {
+- link_name = (char *) xmalloc ((unsigned int) file_hdr->c_filesize + 1);
+- link_name[file_hdr->c_filesize] = '\0';
+- tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
+- tape_skip_padding (in_file_des, file_hdr->c_filesize);
++ link_name = get_link_name (file_hdr, in_file_des);
+ }
+ else
+ {
+diff -Nurp cpio-2.11.orig/tests/Makefile.am cpio-2.11/tests/Makefile.am
+--- cpio-2.11.orig/tests/Makefile.am 2010-02-15 18:02:23.000000000 +0800
++++ cpio-2.11/tests/Makefile.am 2014-12-08 13:14:49.931545727 +0800
+@@ -52,6 +52,8 @@ TESTSUITE_AT = \
+ setstat04.at\
+ setstat05.at\
+ symlink.at\
++ symlink-bad-length.at\
++ symlink-long.at\
+ version.at
+
+ TESTSUITE = $(srcdir)/testsuite
+diff -Nurp cpio-2.11.orig/tests/symlink-bad-length.at cpio-2.11/tests/symlink-bad-length.at
+--- cpio-2.11.orig/tests/symlink-bad-length.at 1970-01-01 08:00:00.000000000 +0800
++++ cpio-2.11/tests/symlink-bad-length.at 2014-12-08 13:17:45.979538847 +0800
+@@ -0,0 +1,49 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++# Copyright (C) 2014 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3, or (at your option)
++# any later version.
++
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
++# 02110-1301 USA.
++
++# Cpio v2.11 did segfault with badly set symlink length.
++# References:
++# http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
++
++AT_SETUP([symlink-bad-length])
++AT_KEYWORDS([symlink-long copyout])
++
++AT_DATA([ARCHIVE.base64],
++[x3EjAIBAtIEtJy8nAQAAAHRUYW0FAAAADQBGSUxFAABzb21lIGNvbnRlbnQKAMdxIwBgQ/+hLScv
++JwEAAAB0VEhuBQD/////TElOSwAARklMRcdxAAAAAAAAAAAAAAEAAAAAAAAACwAAAAAAVFJBSUxF
++UiEhIQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
++])
++
++AT_CHECK([
++base64 -d ARCHIVE.base64 > ARCHIVE || AT_SKIP_TEST
++cpio -ntv < ARCHIVE
++test $? -eq 2
++],
++[0],
++[-rw-rw-r-- 1 10029 10031 13 Nov 25 13:52 FILE
++],[cpio: LINK: stored filename length too big
++cpio: premature end of file
++])
++
++AT_CLEANUP
+diff -Nurp cpio-2.11.orig/tests/symlink-long.at cpio-2.11/tests/symlink-long.at
+--- cpio-2.11.orig/tests/symlink-long.at 1970-01-01 08:00:00.000000000 +0800
++++ cpio-2.11/tests/symlink-long.at 2014-12-08 13:17:57.219538408 +0800
+@@ -0,0 +1,46 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++# Copyright (C) 2014 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3, or (at your option)
++# any later version.
++
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
++# 02110-1301 USA.
++
++# Cpio v2.11.90 changed the way symlink name is read from archive.
++# References:
++# http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
++
++AT_SETUP([symlink-long])
++AT_KEYWORDS([symlink-long copyout])
++
++AT_CHECK([
++
++# len(dirname) > READBUFSIZE
++dirname=
++for i in {1..52}; do
++ dirname="xxxxxxxxx/$dirname"
++ mkdir "$dirname"
++done
++ln -s "$dirname" x || AT_SKIP_TEST
++
++echo x | cpio -o > ar
++list=`cpio -tv < ar | sed 's|.*-> ||'`
++test "$list" = "$dirname" && echo success || echo fail
++],
++[0],
++[success
++],[2 blocks
++2 blocks
++])
++
++AT_CLEANUP
+diff -Nurp cpio-2.11.orig/tests/testsuite.at cpio-2.11/tests/testsuite.at
+--- cpio-2.11.orig/tests/testsuite.at 2010-02-15 18:02:23.000000000 +0800
++++ cpio-2.11/tests/testsuite.at 2014-12-08 13:15:13.515544805 +0800
+@@ -31,6 +31,8 @@ m4_include([version.at])
+
+ m4_include([inout.at])
+ m4_include([symlink.at])
++m4_include([symlink-bad-length.at])
++m4_include([symlink-long.at])
+ m4_include([interdir.at])
+
+ m4_include([setstat01.at])
diff --git a/meta/recipes-extended/cpio/cpio-2.8/fix-memory-overrun.patch b/meta/recipes-extended/cpio/cpio-2.8/fix-memory-overrun.patch
new file mode 100644
index 0000000000..0148e70797
--- /dev/null
+++ b/meta/recipes-extended/cpio/cpio-2.8/fix-memory-overrun.patch
@@ -0,0 +1,217 @@
+cpio: Fix memory overrun on reading improperly created link records
+
+Signed-off-by: Bian Naimeng <biannm@cn.fujitsu.com>
+
+http://git.savannah.gnu.org/cgit/cpio.git/commit/?id=746f3ff670dcfcdd28fcc990e79cd6fccc7ae48d
+
+ * src/copyin.c (get_link_name): New function.
+ (list_file, copyin_link): use get_link_name
+
+ * tests/symlink-bad-length.at: New file.
+ * tests/symlink-long.at: New file.
+ * tests/Makefile.am: Add new files.
+ * tests/testsuite.at: Likewise.
+
+ See http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
+
+Upstream-Status: Backport
+
+Signed-off-by: Sergey Poznyakoff <gray@gnu.org.ua>
+
+diff -Nurp cpio-2.8.orig/src/copyin.c cpio-2.8/src/copyin.c
+--- cpio-2.8.orig/src/copyin.c 2007-06-07 19:58:03.000000000 +0800
++++ cpio-2.8/src/copyin.c 2014-12-08 11:30:01.159791484 +0800
+@@ -126,6 +126,28 @@ tape_skip_padding (int in_file_des, int
+ }
+
+
++static char *
++get_link_name (struct cpio_file_stat *file_hdr, int in_file_des)
++{
++ off_t n = file_hdr->c_filesize + 1;
++ char *link_name;
++
++ if (n == 0 || n > SIZE_MAX)
++ {
++ error (0, 0, _("%s: stored filename length too big"), file_hdr->c_name);
++ link_name = NULL;
++ }
++ else
++ {
++ link_name = xmalloc (n);
++ tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
++ link_name[file_hdr->c_filesize] = '\0';
++ tape_skip_padding (in_file_des, file_hdr->c_filesize);
++ }
++ return link_name;
++}
++
++
+ static void
+ list_file(struct cpio_file_stat* file_hdr, int in_file_des)
+ {
+@@ -136,21 +158,16 @@ list_file(struct cpio_file_stat* file_hd
+ {
+ if (archive_format != arf_tar && archive_format != arf_ustar)
+ {
+- char *link_name = NULL; /* Name of hard and symbolic links. */
+-
+- link_name = (char *) xmalloc ((unsigned int) file_hdr->c_filesize + 1);
+- link_name[file_hdr->c_filesize] = '\0';
+- tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
+- long_format (file_hdr, link_name);
+- free (link_name);
+- tape_skip_padding (in_file_des, file_hdr->c_filesize);
+- return;
++ char *link_name = get_link_name (file_hdr, in_file_des);
++ if (link_name)
++ {
++ long_format (file_hdr, link_name);
++ free (link_name);
++ }
+ }
+ else
+- {
+ long_format (file_hdr, file_hdr->c_tar_linkname);
+- return;
+- }
++ return;
+ }
+ else
+ #endif
+@@ -732,10 +749,7 @@ copyin_link(struct cpio_file_stat *file_
+
+ if (archive_format != arf_tar && archive_format != arf_ustar)
+ {
+- link_name = (char *) xmalloc ((unsigned int) file_hdr->c_filesize + 1);
+- link_name[file_hdr->c_filesize] = '\0';
+- tape_buffered_read (link_name, in_file_des, file_hdr->c_filesize);
+- tape_skip_padding (in_file_des, file_hdr->c_filesize);
++ link_name = get_link_name (file_hdr, in_file_des);
+ }
+ else
+ {
+diff -Nurp cpio-2.8.orig/tests/Makefile.am cpio-2.8/tests/Makefile.am
+--- cpio-2.8.orig/tests/Makefile.am 2006-10-24 18:32:13.000000000 +0800
++++ cpio-2.8/tests/Makefile.am 2014-12-08 11:30:52.387789482 +0800
+@@ -45,6 +45,8 @@ TESTSUITE_AT = \
+ testsuite.at\
+ inout.at\
+ symlink.at\
++ symlink-bad-length.at\
++ symlink-long.at\
+ version.at
+
+ TESTSUITE = $(srcdir)/testsuite
+diff -Nurp cpio-2.8.orig/tests/symlink-bad-length.at cpio-2.8/tests/symlink-bad-length.at
+--- cpio-2.8.orig/tests/symlink-bad-length.at 1970-01-01 08:00:00.000000000 +0800
++++ cpio-2.8/tests/symlink-bad-length.at 2014-12-08 11:33:25.283783507 +0800
+@@ -0,0 +1,49 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++# Copyright (C) 2014 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3, or (at your option)
++# any later version.
++
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
++# 02110-1301 USA.
++
++# Cpio v2.11 did segfault with badly set symlink length.
++# References:
++# http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
++
++AT_SETUP([symlink-bad-length])
++AT_KEYWORDS([symlink-long copyout])
++
++AT_DATA([ARCHIVE.base64],
++[x3EjAIBAtIEtJy8nAQAAAHRUYW0FAAAADQBGSUxFAABzb21lIGNvbnRlbnQKAMdxIwBgQ/+hLScv
++JwEAAAB0VEhuBQD/////TElOSwAARklMRcdxAAAAAAAAAAAAAAEAAAAAAAAACwAAAAAAVFJBSUxF
++UiEhIQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
++AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
++])
++
++AT_CHECK([
++base64 -d ARCHIVE.base64 > ARCHIVE || AT_SKIP_TEST
++cpio -ntv < ARCHIVE
++test $? -eq 2
++],
++[0],
++[-rw-rw-r-- 1 10029 10031 13 Nov 25 13:52 FILE
++],[cpio: LINK: stored filename length too big
++cpio: premature end of file
++])
++
++AT_CLEANUP
+diff -Nurp cpio-2.8.orig/tests/symlink-long.at cpio-2.8/tests/symlink-long.at
+--- cpio-2.8.orig/tests/symlink-long.at 1970-01-01 08:00:00.000000000 +0800
++++ cpio-2.8/tests/symlink-long.at 2014-12-08 11:34:28.807781024 +0800
+@@ -0,0 +1,46 @@
++# Process this file with autom4te to create testsuite. -*- Autotest -*-
++# Copyright (C) 2014 Free Software Foundation, Inc.
++
++# This program is free software; you can redistribute it and/or modify
++# it under the terms of the GNU General Public License as published by
++# the Free Software Foundation; either version 3, or (at your option)
++# any later version.
++
++# This program is distributed in the hope that it will be useful,
++# but WITHOUT ANY WARRANTY; without even the implied warranty of
++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++# GNU General Public License for more details.
++
++# You should have received a copy of the GNU General Public License
++# along with this program; if not, write to the Free Software
++# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
++# 02110-1301 USA.
++
++# Cpio v2.11.90 changed the way symlink name is read from archive.
++# References:
++# http://lists.gnu.org/archive/html/bug-cpio/2014-11/msg00007.html
++
++AT_SETUP([symlink-long])
++AT_KEYWORDS([symlink-long copyout])
++
++AT_CHECK([
++
++# len(dirname) > READBUFSIZE
++dirname=
++for i in {1..52}; do
++ dirname="xxxxxxxxx/$dirname"
++ mkdir "$dirname"
++done
++ln -s "$dirname" x || AT_SKIP_TEST
++
++echo x | cpio -o > ar
++list=`cpio -tv < ar | sed 's|.*-> ||'`
++test "$list" = "$dirname" && echo success || echo fail
++],
++[0],
++[success
++],[2 blocks
++2 blocks
++])
++
++AT_CLEANUP
+diff -Nurp cpio-2.8.orig/tests/testsuite.at cpio-2.8/tests/testsuite.at
+--- cpio-2.8.orig/tests/testsuite.at 2006-10-24 18:32:13.000000000 +0800
++++ cpio-2.8/tests/testsuite.at 2014-12-08 11:34:56.515779942 +0800
+@@ -31,3 +31,5 @@ m4_include([version.at])
+
+ m4_include([inout.at])
+ m4_include([symlink.at])
++m4_include([symlink-bad-length.at])
++m4_include([symlink-long.at])
diff --git a/meta/recipes-extended/cpio/cpio_2.11.bb b/meta/recipes-extended/cpio/cpio_2.11.bb
index 5f88b30f1e..c42db6f352 100644
--- a/meta/recipes-extended/cpio/cpio_2.11.bb
+++ b/meta/recipes-extended/cpio/cpio_2.11.bb
@@ -3,9 +3,10 @@ include cpio_v2.inc
LICENSE = "GPLv3"
LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
-PR = "r4"
+PR = "r5"
SRC_URI += "file://remove-gets.patch \
+ file://fix-memory-overrun.patch \
"
SRC_URI[md5sum] = "1112bb6c45863468b5496ba128792f6c"
diff --git a/meta/recipes-extended/cpio/cpio_2.8.bb b/meta/recipes-extended/cpio/cpio_2.8.bb
index b6da207b92..3f97dbe2ce 100644
--- a/meta/recipes-extended/cpio/cpio_2.8.bb
+++ b/meta/recipes-extended/cpio/cpio_2.8.bb
@@ -3,11 +3,12 @@ require cpio_v2.inc
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://COPYING;md5=b7f772ea3a2489231cb4872656cac34b"
-PR = "r3"
+PR = "r4"
SRC_URI += "file://m4extensions.patch \
- file://avoid_heap_overflow.patch \
- "
+ file://avoid_heap_overflow.patch \
+ file://fix-memory-overrun.patch \
+ "
SRC_URI[md5sum] = "0caa356e69e149fb49b76bacc64615a1"
SRC_URI[sha256sum] = "1b203248874c3b5a728b351f06513e5282f73e0170b7f207fbf8c39f28f6b4ad"
diff --git a/meta/recipes-extended/cracklib/cracklib_2.9.1.bb b/meta/recipes-extended/cracklib/cracklib_2.9.2.bb
index 99ccde3ba3..b66ca543b6 100644
--- a/meta/recipes-extended/cracklib/cracklib_2.9.1.bb
+++ b/meta/recipes-extended/cracklib/cracklib_2.9.2.bb
@@ -13,8 +13,8 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/cracklib/cracklib-${PV}.tar.gz \
file://0001-packlib.c-support-dictionary-byte-order-dependent.patch \
file://0002-craklib-fix-testnum-and-teststr-failed.patch"
-SRC_URI[md5sum] = "90536219c520add2ceb3c26f0d7da404"
-SRC_URI[sha256sum] = "408905c2539a97dc8cbbb6d7cd2046cb5647a345b4bda399220d9471be16d156"
+SRC_URI[md5sum] = "559072fdfc095cdb763c4de3471a889e"
+SRC_URI[sha256sum] = "c1c899291d443e99d1aecfbc879e4ac9c0cbc265574f47b487842da11e9759f5"
inherit autotools-brokensep gettext
diff --git a/meta/recipes-extended/cronie/cronie/crontab b/meta/recipes-extended/cronie/cronie/crontab
index cc9169eda9..22c4feb2dc 100644
--- a/meta/recipes-extended/cronie/cronie/crontab
+++ b/meta/recipes-extended/cronie/cronie/crontab
@@ -7,4 +7,8 @@
SHELL=/bin/sh
PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
-# m h dom mon dow user command
+# m h dom mon dow user command
+# 1 * * * * root cd / && run-parts /etc/cron.hourly
+# 30 7 * * * root cd / && run-parts /etc/cron.daily
+# 42 7 * * 7 root cd / && run-parts /etc/cron.weekly
+# 55 7 1 * * root cd / && run-parts /etc/cron.monthly
diff --git a/meta/recipes-extended/cronie/cronie/fix-out-of-tree-build.patch b/meta/recipes-extended/cronie/cronie/fix-out-of-tree-build.patch
deleted file mode 100644
index 7384a0bad4..0000000000
--- a/meta/recipes-extended/cronie/cronie/fix-out-of-tree-build.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-For an out of tree build, the cronie-common.h file was not being
-found correctly, so point it to the top_srcdir
-
-Upstream-Status: Submitted
-
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
-
-Index: cronie-1.4.11/anacron/Makefile.am
-===================================================================
---- cronie-1.4.11.orig/anacron/Makefile.am
-+++ cronie-1.4.11/anacron/Makefile.am
-@@ -9,6 +9,7 @@ common_nodist = anacron-paths.h
- nodist_anacron_SOURCES = $(common_nodist)
- BUILT_SOURCES = $(common_nodist)
-
-+AM_CFLAGS = -I$(top_srcdir)
-
- LDADD = $(LIBSELINUX) $(LIBPAM) $(LIBAUDIT)
-
-Index: cronie-1.4.11/src/Makefile.am
-===================================================================
---- cronie-1.4.11.orig/src/Makefile.am
-+++ cronie-1.4.11/src/Makefile.am
-@@ -15,6 +15,7 @@ nodist_crond_SOURCES = $(common_nodist)
- nodist_crontab_SOURCES = $(common_nodist)
- BUILT_SOURCES = $(common_nodist)
-
-+AM_CFLAGS = -I$(top_srcdir)
-
- LDADD = $(LIBSELINUX) $(LIBPAM) $(LIBAUDIT)
-
diff --git a/meta/recipes-extended/cronie/cronie_1.4.11.bb b/meta/recipes-extended/cronie/cronie_1.4.12.bb
index 02234f6a36..977332b3fc 100644
--- a/meta/recipes-extended/cronie/cronie_1.4.11.bb
+++ b/meta/recipes-extended/cronie/cronie_1.4.12.bb
@@ -7,16 +7,14 @@ HOMEPAGE = "https://fedorahosted.org/cronie/"
BUGTRACKER = "mmaslano@redhat.com"
# Internet Systems Consortium License
-LICENSE = "ISC & BSD"
-LIC_FILES_CHKSUM = "file://COPYING;md5=963ea0772a2adbdcd607a9b2ec320c11 \
+LICENSE = "ISC & BSD-3-Clause & BSD-2-Clause & GPLv2+"
+LIC_FILES_CHKSUM = "file://COPYING;md5=dd2a592170760e1386c769e1043b3722 \
file://src/cron.c;endline=20;md5=b425c334265026177128353a142633b4 \
file://src/popen.c;beginline=3;endline=31;md5=edd50742d8def712e9472dba353668a9"
SECTION = "utils"
-
SRC_URI = "https://fedorahosted.org/releases/c/r/cronie/cronie-${PV}.tar.gz \
- file://fix-out-of-tree-build.patch \
file://crond.init \
file://crontab \
file://crond.service \
@@ -25,18 +23,16 @@ SRC_URI = "https://fedorahosted.org/releases/c/r/cronie/cronie-${PV}.tar.gz \
PAM_SRC_URI = "file://crond_pam_config.patch"
PAM_DEPS = "libpam libpam-runtime pam-plugin-access pam-plugin-loginuid"
-SRC_URI[md5sum] = "2ba645cf54de17f138ef70312843862f"
-SRC_URI[sha256sum] = "fd08084cedddbb42499f80ddb7f2158195c3555c2ff40ee11d4ece2f9864d7be"
+SRC_URI[md5sum] = "199db91e514a4d75e3222d69874b132f"
+SRC_URI[sha256sum] = "0f5c9bf32f352599451c4ca0d6bc076d19e73ecfa5a90b34ecfe47c918c8bafd"
inherit autotools update-rc.d useradd systemd
-
PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}"
PACKAGECONFIG[audit] = "--with-audit,--without-audit,audit,"
PACKAGECONFIG[pam] = "--with-pam,--without-pam,libpam,${PAM_DEPS}"
-
INITSCRIPT_NAME = "crond"
INITSCRIPT_PARAMS = "start 90 2 3 4 5 . stop 60 0 1 6 ."
@@ -67,7 +63,7 @@ do_install_append () {
mkdir -p ${D}${sysconfdir}/cron.weekly
mkdir -p ${D}${sysconfdir}/cron.monthly
touch ${D}${sysconfdir}/cron.deny
-
+
# below setting is necessary to allow normal user using crontab
# setgid for crontab binary
@@ -82,4 +78,4 @@ do_install_append () {
}
FILES_${PN} += "${sysconfdir}/cron*"
-
+CONFFILES_${PN} += "${sysconfdir}/crontab"
diff --git a/meta/recipes-extended/cups/cups.inc b/meta/recipes-extended/cups/cups.inc
index adf21eb17e..16fdd4f363 100644
--- a/meta/recipes-extended/cups/cups.inc
+++ b/meta/recipes-extended/cups/cups.inc
@@ -7,12 +7,21 @@ SRC_URI = "http://www.cups.org/software/${PV}/${BP}-source.tar.bz2 \
file://use_echo_only_in_init.patch \
file://0001-don-t-try-to-run-generated-binaries.patch \
file://cups_serverbin.patch \
- file://cups-no-gcrypt.patch \
+ file://cups.socket \
+ file://cups.path \
+ file://cups.service \
"
LEAD_SONAME = "libcupsdriver.so"
-inherit autotools-brokensep binconfig
+CLEANBROKEN = "1"
+
+inherit autotools-brokensep binconfig useradd systemd
+
+USERADD_PACKAGES = "${PN}"
+GROUPADD_PARAM_${PN} = "--system lpadmin"
+
+SYSTEMD_SERVICE_${PN} = "cups.socket cups.path cups.service"
PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'zeroconf', 'avahi', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}"
@@ -24,7 +33,6 @@ EXTRA_OECONF = " \
--enable-gnutls \
--enable-dbus \
--enable-browsing \
- --disable-openssl \
--disable-gssapi \
--enable-debug \
--disable-relro \
@@ -68,6 +76,19 @@ fakeroot do_install () {
# Remove /var/run from package as cupsd will populate it on startup
rm -fr ${D}/${localstatedir}/run
rmdir ${D}/${libdir}/${BPN}/driver
+
+ # Remove sysinit script and symlinks if sysvinit is not in DISTRO_FEATURES
+ if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','false','true',d)}; then
+ rm -rf ${D}${sysconfdir}/init.d/
+ rm -rf ${D}${sysconfdir}/rc*
+ fi
+
+ # Install systemd unit files
+ install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/cups.socket ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/cups.path ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/cups.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,@SBINDIR@,${sbindir},g' ${D}${systemd_unitdir}/system/cups.service
}
python do_package_append() {
@@ -79,6 +100,7 @@ python do_package_append() {
PACKAGES =+ "${PN}-lib ${PN}-libimage"
+RDEPENDS_${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'procps', '', d)}"
FILES_${PN} += "${libdir}/cups/backend \
${libdir}/cups/cgi-bin \
${libdir}/cups/filter \
diff --git a/meta/recipes-extended/cups/cups/0001-don-t-try-to-run-generated-binaries.patch b/meta/recipes-extended/cups/cups/0001-don-t-try-to-run-generated-binaries.patch
index e6544b5607..5379eb6aa2 100644
--- a/meta/recipes-extended/cups/cups/0001-don-t-try-to-run-generated-binaries.patch
+++ b/meta/recipes-extended/cups/cups/0001-don-t-try-to-run-generated-binaries.patch
@@ -10,13 +10,13 @@ Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
ppdc/Makefile | 30 +++++++++++++++---------------
1 files changed, 15 insertions(+), 15 deletions(-)
-diff --git a/ppdc/Makefile b/ppdc/Makefile
-index 0288d47..fc87f1b 100644
---- a/ppdc/Makefile
-+++ b/ppdc/Makefile
-@@ -243,8 +243,8 @@ genstrings: genstrings.o libcupsppdc.a ../cups/libcups.a \
+Index: cups-2.0.0/ppdc/Makefile
+===================================================================
+--- cups-2.0.0.orig/ppdc/Makefile
++++ cups-2.0.0/ppdc/Makefile
+@@ -242,8 +242,8 @@ genstrings: genstrings.o libcupsppdc.a
$(CXX) $(ARCHFLAGS) $(LDFLAGS) -o genstrings genstrings.o \
- libcupsppdc.a ../cups/libcups.a $(LIBGSSAPI) $(SSLLIBS) \
+ libcupsppdc.a ../cups/$(LIBCUPSSTATIC) $(LIBGSSAPI) $(SSLLIBS) \
$(DNSSDLIBS) $(COMMONLIBS) $(LIBZ)
- echo Generating localization strings...
- ./genstrings >sample.c
@@ -25,9 +25,9 @@ index 0288d47..fc87f1b 100644
#
-@@ -261,9 +261,9 @@ ppdc-static: ppdc.o libcupsppdc.a ../cups/libcups.a foo.drv foo-fr.po
+@@ -260,9 +260,9 @@ ppdc-static: ppdc.o libcupsppdc.a ../cu
$(CXX) $(ARCHFLAGS) $(LDFLAGS) -o ppdc-static ppdc.o libcupsppdc.a \
- ../cups/libcups.a $(LIBGSSAPI) $(SSLLIBS) $(DNSSDLIBS) \
+ ../cups/$(LIBCUPSSTATIC) $(LIBGSSAPI) $(SSLLIBS) $(DNSSDLIBS) \
$(COMMONLIBS) $(LIBZ)
- echo Testing PPD compiler...
- ./ppdc-static -l en,fr -I ../data foo.drv
@@ -38,24 +38,26 @@ index 0288d47..fc87f1b 100644
#
-@@ -290,16 +290,16 @@ ppdi-static: ppdc-static ppdi.o libcupsppdc.a ../cups/libcups.a
- ../cups/libcups.a $(LIBGSSAPI) $(SSLLIBS) $(DNSSDLIBS) \
+@@ -288,17 +288,17 @@ ppdi-static: ppdc-static ppdi.o libcups
+ $(CXX) $(ARCHFLAGS) $(LDFLAGS) -o ppdi-static ppdi.o libcupsppdc.a \
+ ../cups/$(LIBCUPSSTATIC) $(LIBGSSAPI) $(SSLLIBS) $(DNSSDLIBS) \
$(COMMONLIBS) $(LIBZ)
- echo Testing PPD importer...
+- echo Testing PPD importer...
- $(RM) -r ppd ppd2 sample-import.drv
-- ./ppdc-static -I ../data sample.drv
+- ./ppdc-static -l en -I ../data sample.drv
- ./ppdi-static -I ../data -o sample-import.drv ppd/*
-- ./ppdc-static -I ../data -d ppd2 sample-import.drv
+- ./ppdc-static -l en -I ../data -d ppd2 sample-import.drv
- if diff -r ppd ppd2 >/dev/null; then \
- echo PPD import OK; \
- else \
- echo PPD import FAILED; \
- exit 1; \
- fi
++# echo Testing PPD importer...
+# $(RM) -r ppd ppd2 sample-import.drv
-+# ./ppdc-static -I ../data sample.drv
++# ./ppdc-static -l en -I ../data sample.drv
+# ./ppdi-static -I ../data -o sample-import.drv ppd/*
-+# ./ppdc-static -I ../data -d ppd2 sample-import.drv
++# ./ppdc-static -l en -I ../data -d ppd2 sample-import.drv
+# if diff -r ppd ppd2 >/dev/null; then \
+# echo PPD import OK; \
+# else \
@@ -65,6 +67,3 @@ index 0288d47..fc87f1b 100644
#
---
-1.6.6.1
-
diff --git a/meta/recipes-extended/cups/cups/cups-no-gcrypt.patch b/meta/recipes-extended/cups/cups/cups-no-gcrypt.patch
deleted file mode 100644
index 8bbcf39497..0000000000
--- a/meta/recipes-extended/cups/cups/cups-no-gcrypt.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-Description: Don't build-depend on libgcrypt, as nothing is used from it
-Author: Didier Raboud <odyx@debian.org>
-Bug-Debian: https://bugs.debian.org/638416
-Bug: http://www.cups.org/str.php?L????
-
-[CUPS Ticket #4399] -- http://www.cups.org/str.php?L4399
-
-Backported from http://www.cups.org/strfiles.php/3308/cups-no-gcrypt.patch
-
-Upstream-Status: Backport
-
-Signed-off-by: Armin Kuster <akuster@mvista.com>
-
-Last-Update: 2014-04-07
---- a/config-scripts/cups-ssl.m4
-+++ b/config-scripts/cups-ssl.m4
-@@ -66,7 +66,6 @@
- dnl Then look for GNU TLS...
- if test $have_ssl = 0 -a "x$enable_gnutls" != "xno" -a "x$PKGCONFIG" != x; then
- AC_PATH_PROG(LIBGNUTLSCONFIG,libgnutls-config)
-- AC_PATH_PROG(LIBGCRYPTCONFIG,libgcrypt-config)
- if $PKGCONFIG --exists gnutls; then
- have_ssl=1
- SSLLIBS=`$PKGCONFIG --libs gnutls`
-@@ -84,14 +83,6 @@
- if test $have_ssl = 1; then
- CUPS_SERVERCERT="ssl/server.crt"
- CUPS_SERVERKEY="ssl/server.key"
--
-- if $PKGCONFIG --exists gcrypt; then
-- SSLLIBS="$SSLLIBS `$PKGCONFIG --libs gcrypt`"
-- SSLFLAGS="$SSLFLAGS `$PKGCONFIG --cflags gcrypt`"
-- elif test "x$LIBGCRYPTCONFIG" != x; then
-- SSLLIBS="$SSLLIBS `$LIBGCRYPTCONFIG --libs`"
-- SSLFLAGS="$SSLFLAGS `$LIBGCRYPTCONFIG --cflags`"
-- fi
- fi
- fi
-
---- a/cups/http-private.h
-+++ b/cups/http-private.h
-@@ -80,7 +80,6 @@
- # elif defined HAVE_GNUTLS
- # include <gnutls/gnutls.h>
- # include <gnutls/x509.h>
--# include <gcrypt.h>
- # elif defined(HAVE_CDSASSL)
- # include <CoreFoundation/CoreFoundation.h>
- # include <Security/Security.h>
diff --git a/meta/recipes-extended/cups/cups/cups.path b/meta/recipes-extended/cups/cups/cups.path
new file mode 100644
index 0000000000..de8cc57c27
--- /dev/null
+++ b/meta/recipes-extended/cups/cups/cups.path
@@ -0,0 +1,8 @@
+[Unit]
+Description=CUPS Printer Service Spool
+
+[Path]
+PathExistsGlob=/var/spool/cups/d*
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-extended/cups/cups/cups.service b/meta/recipes-extended/cups/cups/cups.service
new file mode 100644
index 0000000000..7d3e839867
--- /dev/null
+++ b/meta/recipes-extended/cups/cups/cups.service
@@ -0,0 +1,10 @@
+[Unit]
+Description=CUPS Printing Service
+
+[Service]
+ExecStart=@SBINDIR@/cupsd -f
+PrivateTmp=true
+
+[Install]
+Also=cups.socket cups.path
+WantedBy=printer.target
diff --git a/meta/recipes-extended/cups/cups/cups.socket b/meta/recipes-extended/cups/cups/cups.socket
new file mode 100644
index 0000000000..33148705d6
--- /dev/null
+++ b/meta/recipes-extended/cups/cups/cups.socket
@@ -0,0 +1,8 @@
+[Unit]
+Description=CUPS Printing Service Sockets
+
+[Socket]
+ListenStream=/var/run/cups/cups.sock
+
+[Install]
+WantedBy=sockets.target
diff --git a/meta/recipes-extended/cups/cups_1.7.5.bb b/meta/recipes-extended/cups/cups_1.7.5.bb
deleted file mode 100644
index 5538cecfd5..0000000000
--- a/meta/recipes-extended/cups/cups_1.7.5.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require cups.inc
-
-LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=c5e50cb4b8f24b04636b719683a9102d"
-
-SRC_URI[md5sum] = "5d893edc2957005f78e2b2423fdace2e"
-SRC_URI[sha256sum] = "18cb4c6847dbaaaa05c8b35af787f19dd5c7686970b46548e72c711c6f26bd02"
diff --git a/meta/recipes-extended/cups/cups_2.0.1.bb b/meta/recipes-extended/cups/cups_2.0.1.bb
new file mode 100644
index 0000000000..c78b44d71f
--- /dev/null
+++ b/meta/recipes-extended/cups/cups_2.0.1.bb
@@ -0,0 +1,6 @@
+require cups.inc
+
+LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=6c5a350596fba02754bd96eb6df3afd0"
+
+SRC_URI[md5sum] = "7f7c33071035fb20d0879929a42da711"
+SRC_URI[sha256sum] = "60f7f2cca69f3b761a029fb314ee4299e3ea0fc02350f06d192cf31d58215525"
diff --git a/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb b/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
index ee4b8900bb..4e12169302 100644
--- a/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
+++ b/meta/recipes-extended/cwautomacros/cwautomacros_20110201.bb
@@ -5,11 +5,13 @@ LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://LICENSE;md5=eb723b61539feef013de476e68b5c50a"
SRC_URI = "http://download.berlios.de/cwautomacros/cwautomacros-${PV}.tar.bz2"
-
-
SRC_URI[md5sum] = "074afcb50d0a8bff10786a2954b2b02d"
SRC_URI[sha256sum] = "3115603b891f3a163c0bbb5fea2f3742113a183fa6745ee5e89e5f6d0e9f6121"
+do_configure() {
+ :
+}
+
do_install() {
oe_runmake CWAUTOMACROSPREFIX=${D}${prefix} install
}
diff --git a/meta/recipes-extended/diffutils/diffutils_3.3.bb b/meta/recipes-extended/diffutils/diffutils_3.3.bb
index a467f20aa4..b708962447 100644
--- a/meta/recipes-extended/diffutils/diffutils_3.3.bb
+++ b/meta/recipes-extended/diffutils/diffutils_3.3.bb
@@ -7,6 +7,8 @@ SRC_URI = "${GNU_MIRROR}/diffutils/diffutils-${PV}.tar.xz \
file://run-ptest \
"
+EXTRA_OECONF += "--without-libsigsegv-prefix"
+
do_configure_prepend () {
# Need to remove gettext macros with weird mix of versions
for i in codeset.m4 gettext_gl.m4 intlmacosx.m4 inttypes-pri.m4 lib-ld_gl.m4 lib-prefix_gl.m4 po_gl.m4 ssize_t.m4 wchar_t.m4 wint_t.m4; do
diff --git a/meta/recipes-extended/ethtool/ethtool/ethtool-uint.patch b/meta/recipes-extended/ethtool/ethtool/ethtool-uint.patch
new file mode 100644
index 0000000000..bb96a18d82
--- /dev/null
+++ b/meta/recipes-extended/ethtool/ethtool/ethtool-uint.patch
@@ -0,0 +1,50 @@
+Fix build with musl by using correct uint type names.
+
+This patch is taken from Sabotage Linux, the license statement for patches and
+build scripts in Sabotage Linux says:
+
+ To the extent possible under law, Christian Neukirchen has waived
+ all copyright and related or neighboring rights to this work.
+
+ http://creativecommons.org/publicdomain/zero/1.0/
+
+Therefore this should be good to include in OpenEmbedded.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+
+Upstream-status: Pending
+
+diff -u ethtool-3.14.org/internal.h ethtool-3.14/internal.h
+--- ethtool-3.14.org/internal.h
++++ ethtool-3.14/internal.h
+@@ -7,6 +7,7 @@
+ #include "ethtool-config.h"
+ #endif
+ #include <stdio.h>
++#include <stdint.h>
+ #include <stdlib.h>
+ #include <string.h>
+ #include <sys/types.h>
+@@ -17,16 +18,16 @@
+
+ /* ethtool.h expects these to be defined by <linux/types.h> */
+ #ifndef HAVE_BE_TYPES
+-typedef __uint16_t __be16;
+-typedef __uint32_t __be32;
++typedef uint16_t __be16;
++typedef uint32_t __be32;
+ typedef unsigned long long __be64;
+ #endif
+
+ typedef unsigned long long u64;
+-typedef __uint32_t u32;
+-typedef __uint16_t u16;
+-typedef __uint8_t u8;
+-typedef __int32_t s32;
++typedef uint32_t u32;
++typedef uint16_t u16;
++typedef uint8_t u8;
++typedef int32_t s32;
+
+ #include "ethtool-copy.h"
+ #include "net_tstamp-copy.h"
diff --git a/meta/recipes-extended/ethtool/ethtool_3.14.bb b/meta/recipes-extended/ethtool/ethtool_3.16.bb
index 49c79b68d7..92a0704800 100644
--- a/meta/recipes-extended/ethtool/ethtool_3.14.bb
+++ b/meta/recipes-extended/ethtool/ethtool_3.16.bb
@@ -9,10 +9,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
SRC_URI = "${KERNELORG_MIRROR}/software/network/ethtool/ethtool-${PV}.tar.gz \
file://run-ptest \
file://avoid_parallel_tests.patch \
+ file://ethtool-uint.patch \
"
-SRC_URI[md5sum] = "997e20c25c43ad5800f8ae3204455a8a"
-SRC_URI[sha256sum] = "b1a988d6e397bd9d5d73cfd64329f85121936a050174bbfe90f8701fd893a1df"
+SRC_URI[md5sum] = "3806bae34c153e2b9e2859e85c54788d"
+SRC_URI[sha256sum] = "a6f1433824af7b390ed4e66dfc7cee047d6d1f56dbb8ceef9fe905d63aef5275"
inherit autotools ptest
RDEPENDS_${PN}-ptest += "make"
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/01-27017.patch b/meta/recipes-extended/findutils/findutils-4.4.2/01-27017.patch
deleted file mode 100644
index 69c1486cf7..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/01-27017.patch
+++ /dev/null
@@ -1,781 +0,0 @@
-Upstream-Status: Backport
-
-commit af974034b68bf59337c7a384e488a518a77dfecd
-Author: James Youngman <jay@gnu.org>
-Date: Sat Jul 11 19:55:27 2009 +0100
-
- Fix Savannah bug #27017: find -D opt / -fstype ext3 -print , -quit coredump.
-
- Fix Savannah bug #27017: find -D opt / -fstype ext3 -print , -quit
- coredumps.
- * find/tree.c (set_new_parent): Initialise struct
- predicate->arg_text to NULL (instead of leaving it uninitialised).
- (get_new_pred_noarg): Likewise.
- (get_new_pred): Initialise predicate->arg_text to
- "ThisShouldBeSetToSomethingElse" to make it easier to notice
- bugs.
- (get_new_pred_chk_op): Use get_new_pred_noarg.
- (print_predicate): Use an if statement instead of
- two ternary operators.
- * find/util.c (insert_primary_withpred): Accept new argument, arg,
- being the argument (if any) of this predicate. Pass it to
- get_new_pred_chk_op.
- (insert_primary): Likewise (pass arg to insert_primary_withpred).
- (insert_primary_noarg): New function; calls insert_primary with
- arg=NULL.
- * find/parser.c (collect_arg_stat_info): Add an output parameter;
- the filename from which we collected the stat information.
- (parse_closeparen, parse_delete, parse_and, parse_or,
- parse_comma): Use get_new_pred_noarg.
- (parse_cnewer, parse_newer, parse_anewer): Use new
- collect_arg_stat_info and insert_primary interface.
- (parse_print, parse_prune, parse_nouser, parse_empty): Use
- insert_primary_noarg.
- (parse_accesscheck, parse_false): Use insert_primary_noarg.
- (parse_used, parse_iname, parse_fprint, insert_fprint,
- parse_fstype, parse_ilname): Use new collect_arg and
- insert_primary interfaces.
- (parse_ipath, parse_lname, do_parse_xmin, parse_name, parse_path,
- parse_perm, parse_size, parse_user, parse_time): Use new
- collect_arg and insert_primary_withpred interface.
- (parse_negate, parse_openparen): Use new get_new_pred_chk_op interface.
- (parse_newerXY, parse_nogroup): Use new insert_primary interface.
- (insert_regex, parse_samefile): Use new insert_primary_withpred
- interface.
- (insert_type, insert_fprintf, new_insert_exec_ok, insert_num): Use
- new insert_primary_withpred interface.
- * find/defs.h (struct predicate.arg_text): make const.
- Add declarations for new function get_new_pred_noarg and
- insert_primary_noarg. Add 'arg' parameter to get_new_pred_chk_op
- and insert_primary_withpred.
-
-diff --git a/ChangeLog b/ChangeLog
-index 6e346b8..e8ba0f8 100644
---- a/ChangeLog
-+++ b/ChangeLog
-@@ -1,0 +1,45 @@
-+2009-07-11 James Youngman <jay@gnu.org>
-+
-+ Fix Savannah bug #27017: find -D opt / -fstype ext3 -print , -quit
-+ coredumps.
-+ * find/tree.c (set_new_parent): Initialise struct
-+ predicate->arg_text to NULL (instead of leaving it uninitialised).
-+ (get_new_pred_noarg): Likewise.
-+ (get_new_pred): Initialise predicate->arg_text to
-+ "ThisShouldBeSetToSomethingElse" to make it easier to notice
-+ bugs.
-+ (get_new_pred_chk_op): Use get_new_pred_noarg.
-+ (print_predicate): Use an if statement instead of
-+ two ternary operators.
-+ * find/util.c (insert_primary_withpred): Accept new argument, arg,
-+ being the argument (if any) of this predicate. Pass it to
-+ get_new_pred_chk_op.
-+ (insert_primary): Likewise (pass arg to insert_primary_withpred).
-+ (insert_primary_noarg): New function; calls insert_primary with
-+ arg=NULL.
-+ * find/parser.c (collect_arg_stat_info): Add an output parameter;
-+ the filename from which we collected the stat information.
-+ (parse_closeparen, parse_delete, parse_and, parse_or,
-+ parse_comma): Use get_new_pred_noarg.
-+ (parse_cnewer, parse_newer, parse_anewer): Use new
-+ collect_arg_stat_info and insert_primary interface.
-+ (parse_print, parse_prune, parse_nouser, parse_empty): Use
-+ insert_primary_noarg.
-+ (parse_accesscheck, parse_false): Use insert_primary_noarg.
-+ (parse_used, parse_iname, parse_fprint, insert_fprint,
-+ parse_fstype, parse_ilname): Use new collect_arg and
-+ insert_primary interfaces.
-+ (parse_ipath, parse_lname, do_parse_xmin, parse_name, parse_path,
-+ parse_perm, parse_size, parse_user, parse_time): Use new
-+ collect_arg and insert_primary_withpred interface.
-+ (parse_negate, parse_openparen): Use new get_new_pred_chk_op interface.
-+ (parse_newerXY, parse_nogroup): Use new insert_primary interface.
-+ (insert_regex, parse_samefile): Use new insert_primary_withpred
-+ interface.
-+ (insert_type, insert_fprintf, new_insert_exec_ok, insert_num): Use
-+ new insert_primary_withpred interface.
-+ * find/defs.h (struct predicate.arg_text): make const.
-+ Add declarations for new function get_new_pred_noarg and
-+ insert_primary_noarg. Add 'arg' parameter to get_new_pred_chk_op
-+ and insert_primary_withpred.
-+
-diff --git a/find/defs.h b/find/defs.h
-index 1708d83..4539fd9 100644
---- a/find/defs.h
-+++ b/find/defs.h
-@@ -297,7 +297,7 @@ struct predicate
- boolean artificial;
-
- /* The raw text of the argument of this predicate. */
-- char *arg_text;
-+ const char *arg_text;
-
- /* Information needed by the predicate processor.
- Next to each member are listed the predicates that use it. */
-@@ -480,13 +480,16 @@ void show_success_rates(const struct predicate *node);
- /* tree.c */
- struct predicate * build_expression_tree PARAMS((int argc, char *argv[], int end_of_leading_options));
- struct predicate * get_eval_tree PARAMS((void));
-+struct predicate *get_new_pred_noarg (const struct parser_table *entry);
- struct predicate *get_new_pred PARAMS((const struct parser_table *entry));
--struct predicate *get_new_pred_chk_op PARAMS((const struct parser_table *entry));
-+struct predicate *get_new_pred_chk_op PARAMS((const struct parser_table *entry,
-+ const char *arg));
- float calculate_derived_rates PARAMS((struct predicate *p));
-
- /* util.c */
--struct predicate *insert_primary PARAMS((const struct parser_table *entry));
--struct predicate *insert_primary_withpred PARAMS((const struct parser_table *entry, PRED_FUNC fptr));
-+struct predicate *insert_primary PARAMS((const struct parser_table *entry, const char *arg));
-+struct predicate *insert_primary_noarg PARAMS((const struct parser_table *entry));
-+struct predicate *insert_primary_withpred PARAMS((const struct parser_table *entry, PRED_FUNC fptr, const char *arg));
- void usage PARAMS((FILE *fp, int status, char *msg));
- extern boolean check_nofollow(void);
- void complete_pending_execs(struct predicate *p);
-diff --git a/find/parser.c b/find/parser.c
-index 534b670..2e6b989 100644
---- a/find/parser.c
-+++ b/find/parser.c
-@@ -640,11 +640,13 @@ collect_arg(char **argv, int *arg_ptr, const char **collected_arg)
- }
-
- static boolean
--collect_arg_stat_info(char **argv, int *arg_ptr, struct stat *p)
-+collect_arg_stat_info(char **argv, int *arg_ptr, struct stat *p,
-+ const char **argument)
- {
- const char *filename;
- if (collect_arg(argv, arg_ptr, &filename))
- {
-+ *argument = filename;
- if (0 == (options.xstat)(filename, p))
- {
- return true;
-@@ -656,6 +658,7 @@ collect_arg_stat_info(char **argv, int *arg_ptr, struct stat *p)
- }
- else
- {
-+ *argument = NULL;
- return false;
- }
- }
-@@ -679,7 +682,7 @@ parse_and (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = get_new_pred (entry);
-+ our_pred = get_new_pred_noarg (entry);
- our_pred->pred_func = pred_and;
- our_pred->p_type = BI_OP;
- our_pred->p_prec = AND_PREC;
-@@ -691,11 +694,12 @@ static boolean
- parse_anewer (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- struct stat stat_newer;
-+ const char *arg;
-
- set_stat_placeholders(&stat_newer);
-- if (collect_arg_stat_info(argv, arg_ptr, &stat_newer))
-+ if (collect_arg_stat_info(argv, arg_ptr, &stat_newer, &arg))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, arg);
- our_pred->args.reftime.xval = XVAL_ATIME;
- our_pred->args.reftime.ts = get_stat_mtime(&stat_newer);
- our_pred->args.reftime.kind = COMP_GT;
-@@ -713,7 +717,7 @@ parse_closeparen (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = get_new_pred (entry);
-+ our_pred = get_new_pred_noarg (entry);
- our_pred->pred_func = pred_closeparen;
- our_pred->p_type = CLOSE_PAREN;
- our_pred->p_prec = NO_PREC;
-@@ -725,11 +729,12 @@ static boolean
- parse_cnewer (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- struct stat stat_newer;
-+ const char *arg;
-
- set_stat_placeholders(&stat_newer);
-- if (collect_arg_stat_info(argv, arg_ptr, &stat_newer))
-+ if (collect_arg_stat_info(argv, arg_ptr, &stat_newer, &arg))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, arg);
- our_pred->args.reftime.xval = XVAL_CTIME; /* like -newercm */
- our_pred->args.reftime.ts = get_stat_mtime(&stat_newer);
- our_pred->args.reftime.kind = COMP_GT;
-@@ -747,7 +752,7 @@ parse_comma (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = get_new_pred (entry);
-+ our_pred = get_new_pred_noarg (entry);
- our_pred->pred_func = pred_comma;
- our_pred->p_type = BI_OP;
- our_pred->p_prec = COMMA_PREC;
-@@ -786,7 +791,7 @@ parse_delete (const struct parser_table* entry, char *argv[], int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->side_effects = our_pred->no_default_print = true;
- /* -delete implies -depth */
- options.do_dir_first = false;
-@@ -831,7 +836,7 @@ parse_empty (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->est_success_rate = 0.01f; /* assume 1% of files are empty. */
- return true;
- }
-@@ -856,7 +861,7 @@ parse_false (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->side_effects = our_pred->no_default_print = false;
- our_pred->est_success_rate = 0.0f;
-@@ -866,7 +871,7 @@ parse_false (const struct parser_table* entry, char **argv, int *arg_ptr)
- static boolean
- insert_fls (const struct parser_table* entry, const char *filename)
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary_noarg (entry);
- if (filename)
- open_output_file (filename, &our_pred->args.printf_vec);
- else
-@@ -899,7 +904,7 @@ parse_fprint (const struct parser_table* entry, char **argv, int *arg_ptr)
- const char *filename;
- if (collect_arg(argv, arg_ptr, &filename))
- {
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, filename);
- open_output_file (filename, &our_pred->args.printf_vec);
- our_pred->side_effects = our_pred->no_default_print = true;
- our_pred->need_stat = our_pred->need_type = false;
-@@ -915,7 +920,7 @@ parse_fprint (const struct parser_table* entry, char **argv, int *arg_ptr)
- static boolean
- insert_fprint(const struct parser_table* entry, const char *filename)
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, filename);
- if (filename)
- open_output_file (filename, &our_pred->args.printf_vec);
- else
-@@ -960,7 +965,7 @@ parse_fstype (const struct parser_table* entry, char **argv, int *arg_ptr)
- const char *typename;
- if (collect_arg(argv, arg_ptr, &typename))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, typename);
- our_pred->args.str = typename;
-
- /* This is an expensive operation, so although there are
-@@ -1090,7 +1095,7 @@ parse_group (const struct parser_table* entry, char **argv, int *arg_ptr)
- return false;
- }
- }
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, groupname);
- our_pred->args.gid = gid;
- our_pred->est_success_rate = (our_pred->args.numinfo.l_val < 100) ? 0.99 : 0.2;
- return true;
-@@ -1160,7 +1165,7 @@ parse_ilname (const struct parser_table* entry, char **argv, int *arg_ptr)
- const char *name;
- if (collect_arg(argv, arg_ptr, &name))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, name);
- our_pred->args.str = name;
- /* Use the generic glob pattern estimator to figure out how many
- * links will match, but bear in mind that most files won't be links.
-@@ -1227,7 +1232,7 @@ parse_iname (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- if (check_name_arg("-iname", name))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, name);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->args.str = name;
- our_pred->est_success_rate = estimate_pattern_match_rate(name, 0);
-@@ -1268,7 +1273,7 @@ parse_ipath (const struct parser_table* entry, char **argv, int *arg_ptr)
- fnmatch_sanitycheck ();
- if (collect_arg (argv, arg_ptr, &name))
- {
-- struct predicate *our_pred = insert_primary_withpred (entry, pred_ipath);
-+ struct predicate *our_pred = insert_primary_withpred (entry, pred_ipath, name);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->args.str = name;
- our_pred->est_success_rate = estimate_pattern_match_rate (name, 0);
-@@ -1316,7 +1321,7 @@ parse_lname (const struct parser_table* entry, char **argv, int *arg_ptr)
- fnmatch_sanitycheck();
- if (collect_arg(argv, arg_ptr, &name))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, name);
- our_pred->args.str = name;
- our_pred->est_success_rate = 0.1 * estimate_pattern_match_rate(name, 0);
- return true;
-@@ -1391,7 +1396,7 @@ do_parse_xmin (const struct parser_table* entry,
- "arithmetic overflow while converting %s "
- "minutes to a number of seconds"))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, minutes);
- our_pred->args.reftime = tval;
- our_pred->est_success_rate = estimate_timestamp_success_rate(tval.ts.tv_sec);
- return true;
-@@ -1427,7 +1432,7 @@ parse_name (const struct parser_table* entry, char **argv, int *arg_ptr)
- fnmatch_sanitycheck();
- if (check_name_arg("-name", name))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, name);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->args.str = name;
- our_pred->est_success_rate = estimate_pattern_match_rate(name, 0);
-@@ -1445,7 +1450,7 @@ parse_negate (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) &argv;
- (void) &arg_ptr;
-
-- our_pred = get_new_pred_chk_op (entry);
-+ our_pred = get_new_pred_chk_op (entry, NULL);
- our_pred->pred_func = pred_negate;
- our_pred->p_type = UNI_OP;
- our_pred->p_prec = NEGATE_PREC;
-@@ -1458,11 +1463,12 @@ parse_newer (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- struct predicate *our_pred;
- struct stat stat_newer;
-+ const char *arg;
-
- set_stat_placeholders(&stat_newer);
-- if (collect_arg_stat_info(argv, arg_ptr, &stat_newer))
-+ if (collect_arg_stat_info(argv, arg_ptr, &stat_newer, &arg))
- {
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, arg);
- our_pred->args.reftime.ts = get_stat_mtime(&stat_newer);
- our_pred->args.reftime.xval = XVAL_MTIME;
- our_pred->args.reftime.kind = COMP_GT;
-@@ -1530,7 +1536,7 @@ parse_newerXY (const struct parser_table* entry, char **argv, int *arg_ptr)
- (*arg_ptr)++;
- }
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, argv[*arg_ptr]);
-
-
- switch (x)
-@@ -1623,7 +1629,7 @@ parse_nogroup (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) &argv;
- (void) &arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, NULL);
- our_pred->est_success_rate = 1e-4;
- #ifdef CACHE_IDS
- if (gid_unused == NULL)
-@@ -1660,7 +1666,7 @@ parse_nouser (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) arg_ptr;
-
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->est_success_rate = 1e-3;
- #ifdef CACHE_IDS
- if (uid_unused == NULL)
-@@ -1716,7 +1722,7 @@ parse_openparen (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = get_new_pred_chk_op (entry);
-+ our_pred = get_new_pred_chk_op (entry, NULL);
- our_pred->pred_func = pred_openparen;
- our_pred->p_type = OPEN_PAREN;
- our_pred->p_prec = NO_PREC;
-@@ -1732,7 +1738,7 @@ parse_or (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = get_new_pred (entry);
-+ our_pred = get_new_pred_noarg (entry);
- our_pred->pred_func = pred_or;
- our_pred->p_type = BI_OP;
- our_pred->p_prec = OR_PREC;
-@@ -1756,7 +1762,7 @@ parse_path (const struct parser_table* entry, char **argv, int *arg_ptr)
- const char *name;
- if (collect_arg(argv, arg_ptr, &name))
- {
-- struct predicate *our_pred = insert_primary_withpred (entry, pred_path);
-+ struct predicate *our_pred = insert_primary_withpred (entry, pred_path, name);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->args.str = name;
- our_pred->est_success_rate = estimate_pattern_match_rate (name, 0);
-@@ -1894,7 +1900,7 @@ parse_perm (const struct parser_table* entry, char **argv, int *arg_ptr)
- rate = 0.9986; /* probably matches anything but a broken symlink */
- }
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, perm_expr);
- our_pred->est_success_rate = rate;
- if (havekind)
- {
-@@ -1928,7 +1934,7 @@ parse_print (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- /* -print has the side effect of printing. This prevents us
- from doing undesired multiple printing when the user has
- already specified -print. */
-@@ -1981,7 +1987,7 @@ parse_prune (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- if (options.do_dir_first == false)
- our_pred->need_stat = our_pred->need_type = false;
- /* -prune has a side effect that it does not descend into
-@@ -1994,7 +2000,7 @@ parse_prune (const struct parser_table* entry, char **argv, int *arg_ptr)
- static boolean
- parse_quit (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary_noarg (entry);
- (void) argv;
- (void) arg_ptr;
- our_pred->need_stat = our_pred->need_type = false;
-@@ -2036,7 +2042,7 @@ insert_regex (char **argv,
- {
- struct re_pattern_buffer *re;
- const char *error_message;
-- struct predicate *our_pred = insert_primary_withpred (entry, pred_regex);
-+ struct predicate *our_pred = insert_primary_withpred (entry, pred_regex, rx);
- our_pred->need_stat = our_pred->need_type = false;
- re = xmalloc (sizeof (struct re_pattern_buffer));
- our_pred->args.regex = re;
-@@ -2061,6 +2067,7 @@ static boolean
- parse_size (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- struct predicate *our_pred;
-+ char *arg;
- uintmax_t num;
- char suffix;
- enum comparison_type c_type;
-@@ -2073,42 +2080,43 @@ parse_size (const struct parser_table* entry, char **argv, int *arg_ptr)
- */
- if ((argv == NULL) || (argv[*arg_ptr] == NULL))
- return false;
-+ arg = argv[*arg_ptr];
-
-- len = strlen (argv[*arg_ptr]);
-+ len = strlen (arg);
- if (len == 0)
- error (1, 0, _("invalid null argument to -size"));
-
-- suffix = argv[*arg_ptr][len - 1];
-+ suffix = arg[len - 1];
- switch (suffix)
- {
- case 'b':
- blksize = 512;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case 'c':
- blksize = 1;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case 'k':
- blksize = 1024;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case 'M': /* Megabytes */
- blksize = 1024*1024;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case 'G': /* Gigabytes */
- blksize = 1024*1024*1024;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case 'w':
- blksize = 2;
-- argv[*arg_ptr][len - 1] = '\0';
-+ arg[len - 1] = '\0';
- break;
-
- case '0':
-@@ -2127,14 +2135,14 @@ parse_size (const struct parser_table* entry, char **argv, int *arg_ptr)
- error (1, 0, _("invalid -size type `%c'"), argv[*arg_ptr][len - 1]);
- }
- /* TODO: accept fractional megabytes etc. ? */
-- if (!get_num (argv[*arg_ptr], &num, &c_type))
-+ if (!get_num (arg, &num, &c_type))
- {
- error(1, 0,
- _("Invalid argument `%s%c' to -size"),
-- argv[*arg_ptr], (int)suffix);
-+ arg, (int)suffix);
- return false;
- }
-- our_pred = insert_primary (entry);
-+our_pred = insert_primary (entry, arg);
- our_pred->args.size.kind = c_type;
- our_pred->args.size.blocksize = blksize;
- our_pred->args.size.size = num;
-@@ -2162,9 +2170,10 @@ parse_samefile (const struct parser_table* entry, char **argv, int *arg_ptr)
- struct predicate *our_pred;
- struct stat st, fst;
- int fd, openflags;
-+ const char *filename;
-
- set_stat_placeholders(&st);
-- if (!collect_arg_stat_info(argv, arg_ptr, &st))
-+ if (!collect_arg_stat_info(argv, arg_ptr, &st, &filename))
- return false;
-
- set_stat_placeholders(&fst);
-@@ -2289,7 +2298,7 @@ parse_samefile (const struct parser_table* entry, char **argv, int *arg_ptr)
- }
- }
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, filename);
- our_pred->args.samefileid.ino = st.st_ino;
- our_pred->args.samefileid.dev = st.st_dev;
- our_pred->args.samefileid.fd = fd;
-@@ -2350,7 +2359,7 @@ parse_true (const struct parser_table* entry, char **argv, int *arg_ptr)
- (void) argv;
- (void) arg_ptr;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->est_success_rate = 1.0f;
- return true;
-@@ -2369,7 +2378,7 @@ parse_accesscheck (const struct parser_table* entry, char **argv, int *arg_ptr)
- struct predicate *our_pred;
- (void) argv;
- (void) arg_ptr;
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary_noarg (entry);
- our_pred->need_stat = our_pred->need_type = false;
- our_pred->side_effects = our_pred->no_default_print = false;
- if (pred_is(our_pred, pred_executable))
-@@ -2414,7 +2423,7 @@ parse_used (const struct parser_table* entry, char **argv, int *arg_ptr)
- struct timespec zero = {0,0};
- if (get_relative_timestamp(offset_str, &tval, zero, DAYSECS, errmsg))
- {
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, offset_str);
- our_pred->args.reftime = tval;
- our_pred->est_success_rate = estimate_file_age_success_rate(tval.ts.tv_sec / DAYSECS);
- return true;
-@@ -2472,7 +2481,7 @@ parse_user (const struct parser_table* entry, char **argv, int *arg_ptr)
- return false;
- }
- }
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, username);
- our_pred->args.uid = uid;
- our_pred->est_success_rate = (our_pred->args.uid < 100) ? 0.99 : 0.2;
- return true;
-@@ -2650,7 +2659,7 @@ insert_type (char **argv, int *arg_ptr,
- error(1, 0, _("Unknown argument to -type: %c"), (*typeletter));
- return false;
- }
-- our_pred = insert_primary_withpred (entry, which_pred);
-+ our_pred = insert_primary_withpred (entry, which_pred, typeletter);
- our_pred->est_success_rate = rate;
-
- /* Figure out if we will need to stat the file, because if we don't
-@@ -2706,7 +2715,7 @@ insert_fprintf (struct format_val *vec,
- struct segment **segmentp; /* Address of current segment. */
- struct predicate *our_pred;
-
-- our_pred = insert_primary_withpred (entry, func);
-+ our_pred = insert_primary_withpred (entry, func, format_const);
- our_pred->side_effects = our_pred->no_default_print = true;
- our_pred->args.printf_vec = *vec;
- our_pred->need_type = false;
-@@ -3045,7 +3054,7 @@ new_insert_exec_ok (const char *action,
- if ((argv == NULL) || (argv[*arg_ptr] == NULL))
- return false;
-
-- our_pred = insert_primary_withpred (entry, func);
-+ our_pred = insert_primary_withpred (entry, func, "(some -exec* arguments)");
- our_pred->side_effects = our_pred->no_default_print = true;
- our_pred->need_type = our_pred->need_stat = false;
-
-@@ -3374,7 +3383,7 @@ parse_time (const struct parser_table* entry, char *argv[], int *arg_ptr)
- if (!get_relative_timestamp(timearg, &tval, origin, DAYSECS, errmsg))
- return false;
-
-- our_pred = insert_primary (entry);
-+ our_pred = insert_primary (entry, orig_timearg);
- our_pred->args.reftime = tval;
- our_pred->est_success_rate = estimate_timestamp_success_rate(tval.ts.tv_sec);
-
-@@ -3487,7 +3496,7 @@ insert_num (char **argv, int *arg_ptr, const struct parser_table *entry)
-
- if (get_num (numstr, &num, &c_type))
- {
-- struct predicate *our_pred = insert_primary (entry);
-+ struct predicate *our_pred = insert_primary (entry, numstr);
- our_pred->args.numinfo.kind = c_type;
- our_pred->args.numinfo.l_val = num;
-
-diff --git a/find/tree.c b/find/tree.c
-index 7420c60..60a0601 100644
---- a/find/tree.c
-+++ b/find/tree.c
-@@ -269,10 +269,14 @@ predicate_is_cost_free(const struct predicate *p)
- /* Prints a predicate */
- void print_predicate(FILE *fp, const struct predicate *p)
- {
-- fprintf (fp, "%s%s%s",
-- p->p_name,
-- p->arg_text ? " " : "",
-- p->arg_text ? p->arg_text : "");
-+ if (p->arg_text)
-+ {
-+ fprintf (fp, "%s %s", p->p_name, p->arg_text);
-+ }
-+ else
-+ {
-+ fprintf (fp, "%s", p->p_name);
-+ }
- }
-
-
-@@ -832,7 +836,8 @@ set_new_parent (struct predicate *curr, enum predicate_precedence high_prec, str
- new_parent->need_stat = false;
- new_parent->need_type = false;
- new_parent->p_cost = NeedsNothing;
--
-+ new_parent->arg_text = NULL;
-+
- switch (high_prec)
- {
- case COMMA_PREC:
-@@ -1393,6 +1398,18 @@ init_pred_perf(struct predicate *pred)
- p->visits = p->successes = 0;
- }
-
-+
-+struct predicate *
-+get_new_pred_noarg (const struct parser_table *entry)
-+{
-+ struct predicate *p = get_new_pred(entry);
-+ if (p)
-+ {
-+ p->arg_text = NULL;
-+ }
-+ return p;
-+}
-+
-
- /* Return a pointer to a new predicate structure, which has been
- linked in as the last one in the predicates list.
-@@ -1433,6 +1450,8 @@ get_new_pred (const struct parser_table *entry)
- last_pred->no_default_print = false;
- last_pred->need_stat = true;
- last_pred->need_type = true;
-+ last_pred->p_cost = NeedsUnknown;
-+ last_pred->arg_text = "ThisShouldBeSetToSomethingElse";
- last_pred->args.str = NULL;
- last_pred->pred_next = NULL;
- last_pred->pred_left = NULL;
-@@ -1449,7 +1468,8 @@ get_new_pred (const struct parser_table *entry)
- predicate is an operator. If it isn't, the AND operator is inserted. */
-
- struct predicate *
--get_new_pred_chk_op (const struct parser_table *entry)
-+get_new_pred_chk_op (const struct parser_table *entry,
-+ const char *arg)
- {
- struct predicate *new_pred;
- static const struct parser_table *entry_and = NULL;
-@@ -1471,13 +1491,14 @@ get_new_pred_chk_op (const struct parser_table *entry)
- case PRIMARY_TYPE:
- case CLOSE_PAREN:
- /* We need to interpose the and operator. */
-- new_pred = get_new_pred (entry_and);
-+ new_pred = get_new_pred_noarg (entry_and);
- new_pred->pred_func = pred_and;
- new_pred->p_name = "-a";
- new_pred->p_type = BI_OP;
- new_pred->p_prec = AND_PREC;
- new_pred->need_stat = false;
- new_pred->need_type = false;
-+ new_pred->arg_text = NULL;
- new_pred->args.str = NULL;
- new_pred->side_effects = false;
- new_pred->no_default_print = false;
-@@ -1488,6 +1509,7 @@ get_new_pred_chk_op (const struct parser_table *entry)
- }
-
- new_pred = get_new_pred (entry);
-+ new_pred->arg_text = arg;
- new_pred->parser_entry = entry;
- return new_pred;
- }
-diff --git a/find/util.c b/find/util.c
-index a06eada..cc9a3eb 100644
---- a/find/util.c
-+++ b/find/util.c
-@@ -89,11 +89,13 @@ static struct debug_option_assoc debugassoc[] =
- operator. */
-
- struct predicate *
--insert_primary_withpred (const struct parser_table *entry, PRED_FUNC pred_func)
-+insert_primary_withpred (const struct parser_table *entry,
-+ PRED_FUNC pred_func,
-+ const char *arg)
- {
- struct predicate *new_pred;
-
-- new_pred = get_new_pred_chk_op (entry);
-+ new_pred = get_new_pred_chk_op (entry, arg);
- new_pred->pred_func = pred_func;
- new_pred->p_name = entry->parser_name;
- new_pred->args.str = NULL;
-@@ -118,10 +120,16 @@ insert_primary_withpred (const struct parser_table *entry, PRED_FUNC pred_func)
- either not there at all (we are the very first node) or is an
- operator. */
- struct predicate *
--insert_primary (const struct parser_table *entry)
-+insert_primary (const struct parser_table *entry, const char *arg)
- {
- assert (entry->pred_func != NULL);
-- return insert_primary_withpred(entry, entry->pred_func);
-+ return insert_primary_withpred(entry, entry->pred_func, arg);
-+}
-+
-+struct predicate *
-+insert_primary_noarg (const struct parser_table *entry)
-+{
-+ return insert_primary(entry, NULL);
- }
-
-
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/02-28824.patch b/meta/recipes-extended/findutils/findutils-4.4.2/02-28824.patch
deleted file mode 100644
index c0ff3ff1c0..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/02-28824.patch
+++ /dev/null
@@ -1,294 +0,0 @@
-Upstream-Status: Backport
-
-commit 76ed377d6d3e4a83a00cabd401f751b37ecd1e7b
-Author: James Youngman <jay@gnu.org>
-Date: Sat Feb 20 13:11:45 2010 +0000
-
- Fix Savannah bug# 28824: "-ctime x" yields "missing argument to `-ctime'".
-
- * find/parser.c (parse_fls): If the argument is invalid, reverse
- the change that collect_arg() made to *arg_ptr (that is, don't
- consume the argument).
- (parse_fprint0): Likewise.
- (parse_gid): Likewise.
- (parse_group): Likewise.
- (parse_inum): Likewise.
- (parse_links): Likewise.
- (do_parse_xmin): Likewise.
- (parse_name): Likewise.
- (parse_printf): Likewise.
- (parse_uid): Likewise.
- (parse_used): Likewise.
- (parse_time): Likewise.
-
- Signed-off-by: James Youngman <jay@gnu.org>
-
-diff --git a/ChangeLog b/ChangeLog
-index d0ce1fe..13539a4 100644
---- a/ChangeLog
-+++ b/ChangeLog
-@@ -1,0 +1,19 @@
-+2010-02-20 James Youngman <jay@gnu.org>
-+
-+ Fix Savannah bug# 28824: "-ctime x" yields "missing argument to
-+ `-ctime'".
-+ * find/parser.c (parse_fls): If the argument is invalid, reverse
-+ the change that collect_arg() made to *arg_ptr (that is, don't
-+ consume the argument).
-+ (parse_fprint0): Likewise.
-+ (parse_gid): Likewise.
-+ (parse_group): Likewise.
-+ (parse_inum): Likewise.
-+ (parse_links): Likewise.
-+ (do_parse_xmin): Likewise.
-+ (parse_name): Likewise.
-+ (parse_printf): Likewise.
-+ (parse_uid): Likewise.
-+ (parse_used): Likewise.
-+ (parse_time): Likewise.
-+
-diff --git a/NEWS b/NEWS
-index 5394311..4e910df 100644
---- a/NEWS
-+++ b/NEWS
-@@ -4,5 +4,8 @@ GNU findutils NEWS - User visible changes. -*- outline -*- (allout)
-
- ** Bug Fixes
-
-+#28824: Corrected error message for "-ctime x".
-+ Likewise for -gid, -inum, -links, -mmin, -cmin, -amin,
-+ -uid, -used, -atime, -mtime, -ctime.
- #26537: find -prune now makes sure it has valid stat() information.
-
-diff --git a/find/parser.c b/find/parser.c
-index 2e6b989..08758ee 100644
---- a/find/parser.c
-+++ b/find/parser.c
-@@ -886,8 +886,14 @@ static boolean
- parse_fls (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *filename;
-- return collect_arg(argv, arg_ptr, &filename)
-- && insert_fls(entry, filename);
-+ if (collect_arg(argv, arg_ptr, &filename))
-+ {
-+ if (insert_fls(entry, filename))
-+ return true;
-+ else
-+ --*arg_ptr; /* don't consume the invalid arg. */
-+ }
-+ return false;
- }
-
- static boolean
-@@ -937,9 +943,13 @@ parse_fprint0 (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *filename;
- if (collect_arg(argv, arg_ptr, &filename))
-- return insert_fprint(entry, filename);
-- else
-- return false;
-+ {
-+ if (insert_fprint(entry, filename))
-+ return true;
-+ else
-+ --*arg_ptr; /* don't consume the bad arg. */
-+ }
-+ return false;
- }
-
- static float estimate_fstype_success_rate(const char *fsname)
-@@ -993,6 +1003,7 @@ parse_gid (const struct parser_table* entry, char **argv, int *arg_ptr)
- }
- else
- {
-+ --*arg_ptr; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -1049,6 +1060,7 @@ static boolean
- parse_group (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *groupname;
-+ const int saved_argc = *arg_ptr;
-
- if (collect_arg(argv, arg_ptr, &groupname))
- {
-@@ -1077,6 +1089,7 @@ parse_group (const struct parser_table* entry, char **argv, int *arg_ptr)
- "because it has the unexpected suffix %s"),
- quotearg_n_style(0, options.err_quoting_style, groupname),
- quotearg_n_style(1, options.err_quoting_style, groupname+gid_len));
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -1092,6 +1105,7 @@ parse_group (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- error(1, 0, _("argument to -group is empty, but should be a group name"));
- }
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -1256,6 +1270,7 @@ parse_inum (const struct parser_table* entry, char **argv, int *arg_ptr)
- }
- else
- {
-+ --*arg_ptr; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -1310,6 +1325,7 @@ parse_links (const struct parser_table* entry, char **argv, int *arg_ptr)
- }
- else
- {
-+ --*arg_ptr; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -1358,6 +1374,7 @@ insert_depthspec(const struct parser_table* entry, char **argv, int *arg_ptr,
- error(1, 0, _("Expected a positive decimal integer argument to %s, but got %s"),
- predicate,
- quotearg_n_style(0, options.err_quoting_style, depthstr));
-+ /* NOTREACHED */
- return false;
- }
- /* missing argument */
-@@ -1385,6 +1402,7 @@ do_parse_xmin (const struct parser_table* entry,
- enum xval xv)
- {
- const char *minutes;
-+ const int saved_argc = *arg_ptr;
-
- if (collect_arg(argv, arg_ptr, &minutes))
- {
-@@ -1401,6 +1419,11 @@ do_parse_xmin (const struct parser_table* entry,
- our_pred->est_success_rate = estimate_timestamp_success_rate(tval.ts.tv_sec);
- return true;
- }
-+ else
-+ {
-+ /* Don't consume the invalid argument. */
-+ *arg_ptr = saved_argc;
-+ }
- }
- return false;
- }
-@@ -1427,6 +1450,8 @@ static boolean
- parse_name (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *name;
-+ const int saved_argc = *arg_ptr;
-+
- if (collect_arg(argv, arg_ptr, &name))
- {
- fnmatch_sanitycheck();
-@@ -1438,6 +1463,10 @@ parse_name (const struct parser_table* entry, char **argv, int *arg_ptr)
- our_pred->est_success_rate = estimate_pattern_match_rate(name, 0);
- return true;
- }
-+ else
-+ {
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument. */
-+ }
- }
- return false;
- }
-@@ -1954,11 +1983,21 @@ static boolean
- parse_printf (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *format;
-+ const int saved_argc = *arg_ptr;
-+
- if (collect_arg(argv, arg_ptr, &format))
- {
- struct format_val fmt;
- open_stdout(&fmt);
-- return insert_fprintf (&fmt, entry, pred_fprintf, format);
-+ if (insert_fprintf (&fmt, entry, pred_fprintf, format))
-+ {
-+ return true;
-+ }
-+ else
-+ {
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument. */
-+ return false;
-+ }
- }
- return false;
- }
-@@ -1967,15 +2006,21 @@ static boolean
- parse_fprintf (const struct parser_table* entry, char **argv, int *arg_ptr)
- {
- const char *format, *filename;
-+ int saved_argc = *arg_ptr;
-+
- if (collect_arg(argv, arg_ptr, &filename))
- {
- if (collect_arg(argv, arg_ptr, &format))
- {
- struct format_val fmt;
- open_output_file (filename, &fmt);
-- return insert_fprintf (&fmt, entry, pred_fprintf, format);
-+ saved_argc = *arg_ptr;
-+
-+ if (insert_fprintf (&fmt, entry, pred_fprintf, format))
-+ return true;
- }
- }
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument. */
- return false;
- }
-
-@@ -2405,6 +2450,7 @@ parse_uid (const struct parser_table* entry, char **argv, int *arg_ptr)
- }
- else
- {
-+ --*arg_ptr; /* don't consume the invalid argument. */
- return false;
- }
- }
-@@ -2431,6 +2477,7 @@ parse_used (const struct parser_table* entry, char **argv, int *arg_ptr)
- else
- {
- error(1, 0, _("Invalid argument %s to -used"), offset_str);
-+ /*NOTREACHED*/
- return false;
- }
- }
-@@ -2610,6 +2657,7 @@ insert_type (char **argv, int *arg_ptr,
- if (strlen(typeletter) != 1u)
- {
- error(1, 0, _("Arguments to -type should contain only one letter"));
-+ /*NOTREACHED*/
- return false;
- }
-
-@@ -2657,6 +2705,7 @@ insert_type (char **argv, int *arg_ptr,
- #endif
- default: /* None of the above ... nuke 'em. */
- error(1, 0, _("Unknown argument to -type: %c"), (*typeletter));
-+ /*NOTREACHED*/
- return false;
- }
- our_pred = insert_primary_withpred (entry, which_pred, typeletter);
-@@ -3349,6 +3398,7 @@ parse_time (const struct parser_table* entry, char *argv[], int *arg_ptr)
- const char *errmsg = "arithmetic overflow while converting %s "
- "days to a number of seconds";
- struct timespec origin;
-+ const int saved_argc = *arg_ptr;
-
- if (!collect_arg(argv, arg_ptr, &timearg))
- return false;
-@@ -3381,7 +3431,10 @@ parse_time (const struct parser_table* entry, char *argv[], int *arg_ptr)
- timearg = orig_timearg;
-
- if (!get_relative_timestamp(timearg, &tval, origin, DAYSECS, errmsg))
-- return false;
-+ {
-+ *arg_ptr = saved_argc; /* don't consume the invalid argument */
-+ return false;
-+ }
-
- our_pred = insert_primary (entry, orig_timearg);
- our_pred->args.reftime = tval;
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/03-28872.patch b/meta/recipes-extended/findutils/findutils-4.4.2/03-28872.patch
deleted file mode 100644
index 940aaf6ff5..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/03-28872.patch
+++ /dev/null
@@ -1,58 +0,0 @@
-Upstream-Status: Backport
-
-commit 5f5eb921765794e8fc58c4bdffa2daa2ae34800f
-Author: James Youngman <jay@gnu.org>
-Date: Sat Feb 20 19:53:13 2010 +0000
-
- Fix Savannah bug#28872, Mistake in "Problems with -exec and filenames"
-
- * doc/find.texi (Problems with -exec and filenames): Add missing
- $0 argument in example for sh -c 'something "$@" sh ...
- * NEWS: Mention this change.
-
- Signed-off-by: James Youngman <jay@gnu.org>
-
-diff --git a/ChangeLog b/ChangeLog
-index 13539a4..e94ba96 100644
---- a/ChangeLog
-+++ b/ChangeLog
-@@ -1,5 +1,10 @@
- 2010-02-20 James Youngman <jay@gnu.org>
-
-+ Fix Savannah bug#28872, Mistake in "Problems with -exec and filenames"
-+ * doc/find.texi (Problems with -exec and filenames): Add missing
-+ $0 argument in example for sh -c 'something "$@" sh ...
-+ * NEWS: Mention this change.
-+
- Fix Savannah bug# 28824: "-ctime x" yields "missing argument to
- `-ctime'".
- * find/parser.c (parse_fls): If the argument is invalid, reverse
-diff --git a/NEWS b/NEWS
-index 4e910df..4c97be9 100644
---- a/NEWS
-+++ b/NEWS
-@@ -4,6 +4,9 @@ GNU findutils NEWS - User visible changes. -*- outline -*- (allout)
-
- ** Bug Fixes
-
-+#28872: Mistake in "#safer" example in "Problems with -exec and
-+ filenames" section of the Texinfo manual.
-+
- #28824: Corrected error message for "-ctime x".
- Likewise for -gid, -inum, -links, -mmin, -cmin, -amin,
- -uid, -used, -atime, -mtime, -ctime.
-diff --git a/doc/find.texi b/doc/find.texi
-index 2e5958d..391ffa0 100644
---- a/doc/find.texi
-+++ b/doc/find.texi
-@@ -4830,8 +4830,8 @@ problem:
-
- @example
- # safer
--find -exec sh -c 'something "$@@"' @{@} \;
--find -execdir sh -c 'something "$@@"' @{@}\;
-+find -exec sh -c 'something "$@@"' sh @{@} \;
-+find -execdir sh -c 'something "$@@"' sh @{@}\;
- @end example
-
- This approach is not guaranteed to avoid every problem, but it is much
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_doc.patch b/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_doc.patch
deleted file mode 100644
index a48cdc221c..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_doc.patch
+++ /dev/null
@@ -1,84 +0,0 @@
-Fix documentation build errors
-
-This fixes the following errors building the findutils documentation:
-find-maint.texi:45: misplaced {
-find-maint.texi:45: misplaced }
-find-maint.texi:236: warning: node next `Make the Compiler Find the Bugs' in menu `The File System Is Being Modified' and in sectioning `Factor Out Repeated Code' differ
-find-maint.texi:335: warning: node `Debugging is For Users Too' is next for `Factor Out Repeated Code' in sectioning but not in menu
-find-maint.texi:335: warning: node prev `Factor Out Repeated Code' in menu `Debugging is For Users Too' and in sectioning `Make the Compiler Find the Bugs' differ
-find-maint.texi:378: warning: node next `Debugging is For Users Too' in menu `Factor Out Repeated Code' and in sectioning `Don't Trust the File System Contents' differ
-find-maint.texi:378: warning: node prev `Debugging is For Users Too' in menu `Don't Trust the File System Contents' and in sectioning `Factor Out Repeated Code' differ
-find-maint.texi:392: warning: node next `Don't Trust the File System Contents' in menu `Debugging is For Users Too' and in sectioning `The File System Is Being Modified' differ
-find-maint.texi:392: warning: node prev `Don't Trust the File System Contents' in menu `The File System Is Being Modified' and in sectioning `Debugging is For Users Too' differ
-find-maint.texi:417: warning: node `Don't Trust the File System Contents' is next for `The File System Is Being Modified' in menu but not in sectioning
-find-maint.texi:417: warning: node prev `The File System Is Being Modified' in menu `Make the Compiler Find the Bugs' and in sectioning `Don't Trust the File System Contents' differ
-find.texi:53: misplaced {
-find.texi:53: misplaced }
-find.texi:1862: warning: node `Formatting Flags' is next for `Time Directives' in menu but not in sectioning
-find.texi:1975: warning: node `Formatting Flags' is next for `Combined Time Formats' in sectioning but not in menu
-find.texi:2004: warning: node prev `Formatting Flags' in menu `Time Directives' and in sectioning `Combined Time Formats' differ
-find.texi:2004: warning: node up `Formatting Flags' in menu `Format Directives' and in sectioning `Time Formats' differ
-find.texi:1893: node `Time Formats' lacks menu item for `Formatting Flags' despite being its Up target
-
-Upstream-Status: Backport
-Signed-off-by: Jonathan Liu <net147@gmail.com>
-
---- a/doc/find-maint.texi
-+++ b/doc/find-maint.texi
-@@ -42,7 +42,7 @@ Free Documentation License''.
-
- @page
- @vskip 0pt plus 1filll
--@insertcopying{}
-+@insertcopying
- @end titlepage
-
- @contents
-@@ -227,10 +227,10 @@ circumstances.
-
- @menu
- * Make the Compiler Find the Bugs::
-+* Factor Out Repeated Code::
- * The File System Is Being Modified::
- * Don't Trust the File System Contents::
- * Debugging is For Users Too::
--* Factor Out Repeated Code::
- @end menu
-
- @node Make the Compiler Find the Bugs
---- a/doc/find.texi
-+++ b/doc/find.texi
-@@ -50,7 +50,7 @@ Texts. A copy of the license is included in the section entitled
-
- @page
- @vskip 0pt plus 1filll
--@insertcopying{}
-+@insertcopying
- @end titlepage
-
- @contents
-@@ -1665,6 +1665,7 @@ no output is ever sent to it.
- * Escapes::
- * Format Directives::
- * Time Formats::
-+* Formatting Flags::
- @end menu
-
- @node Escapes
-@@ -1733,7 +1734,6 @@ from the novel you are reading.
- * Size Directives::
- * Location Directives::
- * Time Directives::
--* Formatting Flags::
- @end menu
-
- @node Name Directives
-@@ -2002,7 +2002,7 @@ seconds field includes a fractional part.
- @end table
-
- @node Formatting Flags
--@subsubsection Formatting Flags
-+@subsection Formatting Flags
-
- The @samp{%m} and @samp{%d} directives support the @samp{#}, @samp{0}
- and @samp{+} flags, but the other directives do not, even if they
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_automake-1.12.patch b/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_automake-1.12.patch
deleted file mode 100644
index d4a7e95728..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_automake-1.12.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-Upstream-Status: Pending
-
-This patch fixes following issue with automake 1.12
-
-| configure.ac:80: error: automatic de-ANSI-fication support has been removed
-
-Signed-Off-By: Nitin A Kamble <nitin.a.kamble@intel.com>
-2012/05/03
-
-Index: findutils-4.4.2/configure.ac
-===================================================================
---- findutils-4.4.2.orig/configure.ac
-+++ findutils-4.4.2/configure.ac
-@@ -77,8 +77,6 @@ AC_PROG_CPP
- dnl for gnulib
- gl_EARLY
-
--AM_C_PROTOTYPES
--
- AC_PROG_INSTALL
- AC_PROG_RANLIB
- dnl AC_PROG_LIBTOOL
diff --git a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_x32.patch b/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_x32.patch
deleted file mode 100644
index b78cc7539c..0000000000
--- a/meta/recipes-extended/findutils/findutils-4.4.2/findutils_fix_for_x32.patch
+++ /dev/null
@@ -1,40 +0,0 @@
-Upstream-Status: Pending
-
-Author: H.J. Lu <hjl.tools@gmail.com>
-
-Work around gnulib time_t assumption in findutils for x32
-
-time_t is 64bit and long int is 32bit on x32. But gnulib used in
-findutils assumes time_t values fit into long int. Such assumption is
-invalid for x32 and should be removed.
-
-This patch is a workaround to compile gnulib for x32.
-
-Signed-Off-By: Nitin A Kamble <nitin.a.kamble@intel.com> 2011/12/05
-
-
-Index: findutils-4.4.2/gnulib/lib/getdate.y
-===================================================================
---- findutils-4.4.2.orig/gnulib/lib/getdate.y
-+++ findutils-4.4.2/gnulib/lib/getdate.y
-@@ -114,7 +114,6 @@
- wraps around, but there's no portable way to check for that at
- compile-time. */
- verify (TYPE_IS_INTEGER (time_t));
--verify (LONG_MIN <= TYPE_MINIMUM (time_t) && TYPE_MAXIMUM (time_t) <= LONG_MAX);
-
- /* An integer value, and the number of digits in its textual
- representation. */
-Index: findutils-4.4.2/gnulib/lib/mktime.c
-===================================================================
---- findutils-4.4.2.orig/gnulib/lib/mktime.c
-+++ findutils-4.4.2/gnulib/lib/mktime.c
-@@ -166,7 +166,7 @@ ydhms_diff (long int year1, long int yda
- {
- verify (C99_integer_division, -1 / 2 == 0);
- verify (long_int_year_and_yday_are_wide_enough,
-- INT_MAX <= LONG_MAX / 2 || TIME_T_MAX <= UINT_MAX);
-+ INT_MAX <= TIME_T_MAX / 2 || TIME_T_MAX <= UINT_MAX);
-
- /* Compute intervening leap days correctly even if year is negative.
- Take care to avoid integer overflow here. */
diff --git a/meta/recipes-extended/findutils/findutils.inc b/meta/recipes-extended/findutils/findutils.inc
index 4f4068a091..37c84cc59b 100644
--- a/meta/recipes-extended/findutils/findutils.inc
+++ b/meta/recipes-extended/findutils/findutils.inc
@@ -7,7 +7,7 @@ BUGTRACKER = "http://savannah.gnu.org/bugs/?group=findutils"
SECTION = "console/utils"
-SRC_URI = "${GNU_MIRROR}/findutils/findutils-${PV}.tar.gz"
+SRC_URI = "ftp://alpha.gnu.org/gnu/${BPN}/${BP}.tar.gz"
inherit autotools gettext texinfo update-alternatives
diff --git a/meta/recipes-extended/findutils/findutils_4.4.2.bb b/meta/recipes-extended/findutils/findutils_4.4.2.bb
deleted file mode 100644
index faf2ebe462..0000000000
--- a/meta/recipes-extended/findutils/findutils_4.4.2.bb
+++ /dev/null
@@ -1,25 +0,0 @@
-require findutils.inc
-
-# GPLv2+ (<< 4.2.32), GPLv3+ (>= 4.2.32)
-LICENSE = "GPLv3+"
-LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
-
-PR = "r6"
-
-SRC_URI += "file://01-27017.patch \
- file://02-28824.patch \
- file://03-28872.patch \
- file://findutils_fix_for_x32.patch \
- file://findutils_fix_for_automake-1.12.patch \
- file://findutils_fix_doc.patch \
- "
-
-SRC_URI[md5sum] = "351cc4adb07d54877fa15f75fb77d39f"
-SRC_URI[sha256sum] = "434f32d171cbc0a5e72cfc5372c6fc4cb0e681f8dce566a0de5b6fccd702b62a"
-
-DEPENDS = "bison-native"
-
-# http://savannah.gnu.org/bugs/?27299
-CACHED_CONFIGUREVARS += "${@bb.utils.contains('DISTRO_FEATURES', 'libc-posix-clang-wchar', 'gl_cv_func_wcwidth_works=yes', '', d)}"
-
-EXTRA_OECONF += "ac_cv_path_SORT=${bindir}/sort"
diff --git a/meta/recipes-extended/findutils/findutils_4.5.14.bb b/meta/recipes-extended/findutils/findutils_4.5.14.bb
new file mode 100644
index 0000000000..9253637fc1
--- /dev/null
+++ b/meta/recipes-extended/findutils/findutils_4.5.14.bb
@@ -0,0 +1,15 @@
+require findutils.inc
+
+# GPLv2+ (<< 4.2.32), GPLv3+ (>= 4.2.32)
+LICENSE = "GPLv3+"
+LIC_FILES_CHKSUM = "file://COPYING;md5=f27defe1e96c2e1ecd4e0c9be8967949"
+
+DEPENDS = "bison-native"
+
+SRC_URI[md5sum] = "a8a8176282fd28e8d1234c84d847fa66"
+SRC_URI[sha256sum] = "0de3cf625a5c9f154eee3171e072515ffdde405244dd00502af617ac57b73ae2"
+
+# http://savannah.gnu.org/bugs/?27299
+CACHED_CONFIGUREVARS += "${@bb.utils.contains('DISTRO_FEATURES', 'libc-posix-clang-wchar', 'gl_cv_func_wcwidth_works=yes', '', d)}"
+
+EXTRA_OECONF += "ac_cv_path_SORT=${bindir}/sort"
diff --git a/meta/recipes-extended/gawk/gawk-4.1.1/run-ptest b/meta/recipes-extended/gawk/gawk-4.1.1/run-ptest
index 7d214eafbe..d23f0bf6d7 100644
--- a/meta/recipes-extended/gawk/gawk-4.1.1/run-ptest
+++ b/meta/recipes-extended/gawk/gawk-4.1.1/run-ptest
@@ -3,8 +3,8 @@
cd test
for i in `grep -vE "@|^$|#|Gt-dummy" Maketests |awk -F: '{print $1}'`; \
do LC_ALL=${GAWKLOCALE:-C} LANG=${GAWKLOCALE:-C} srcdir=`pwd` AWK=gawk CMP=cmp \
- make -f Maketests $i &>$i.tmp; \
+ make -f Maketests $i >$i.tmp 2>&1; \
grep -q "Error" $i.tmp; \
- if [ $? == 0 ]; then echo "FAIL: $i"; \
+ if [ $? -eq 0 ]; then echo "FAIL: $i"; \
else echo "PASS: $i"; rm -f $i.tmp; fi; \
done
diff --git a/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch b/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch
new file mode 100644
index 0000000000..a1c9368cc9
--- /dev/null
+++ b/meta/recipes-extended/ghostscript/ghostscript/cups-no-gcrypt.patch
@@ -0,0 +1,31 @@
+Subject: [PATCH] Don't build-depend on libgcrypt, as nothing is used from it
+
+Backported from http://www.cups.org/strfiles.php/3308/cups-no-gcrypt.patch
+
+This addresses the cryto dependency seen during build.
+
+Upstream-Status: Backport
+
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+
+Rebase the patch to ghostscript-9.15
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ cups/libs/cups/http-private.h | 1 -
+ 1 file changed, 1 deletion(-)
+
+diff --git a/cups/libs/cups/http-private.h b/cups/libs/cups/http-private.h
+index 99a85c3..a674852 100644
+--- a/cups/libs/cups/http-private.h
++++ b/cups/libs/cups/http-private.h
+@@ -80,7 +80,6 @@ typedef int socklen_t;
+ # elif defined HAVE_GNUTLS
+ # include <gnutls/gnutls.h>
+ # include <gnutls/x509.h>
+-# include <gcrypt.h>
+ # elif defined(HAVE_CDSASSL)
+ # include <CoreFoundation/CoreFoundation.h>
+ # include <Security/Security.h>
+--
+1.9.1
+
diff --git a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.02-parallel-make.patch b/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.02-parallel-make.patch
index 601f5f127e..ae9da6e08e 100644
--- a/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.02-parallel-make.patch
+++ b/meta/recipes-extended/ghostscript/ghostscript/ghostscript-9.02-parallel-make.patch
@@ -12,6 +12,9 @@ RP: Extended || true to all CP_ operations, they all can race e.g.:
Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+Rebase the patch to ghostscript-9.15
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+
diff --git a/base/expat.mak b/base/expat.mak
index 4ee9c8c..2e16a9d 100644
--- a/base/expat.mak
@@ -39,10 +42,10 @@ index 16f2b95..409f75b 100644
# Define the shared version.
$(FTGEN)freetype_1.dev : $(TOP_MAKEFILES) $(FT_MAK) $(ECHOGS_XE)
diff --git a/base/gs.mak b/base/gs.mak
-index 7d75fb0..2a9596c 100644
+index 3fc67df..1fc3e26 100644
--- a/base/gs.mak
+++ b/base/gs.mak
-@@ -433,7 +433,7 @@ $(gconfxx_h) : $(ld_tr)
+@@ -434,7 +434,7 @@ $(gconfxx_h) : $(ld_tr)
$(gconfig_h) : $(gconfxx_h)
$(RM_) $(gconfig_h)
@@ -78,7 +81,7 @@ index bb74630..44617a0 100644
# dev file for shared (separately built) jbig2dec library
$(JBIG2GEN)jbig2dec_1.dev : $(TOP_MAKEFILES) $(JBIG2_MAK) $(ECHOGS_XE)
diff --git a/base/jpeg.mak b/base/jpeg.mak
-index 5310a20..43cee63 100644
+index e300a04..9fdad5a 100644
--- a/base/jpeg.mak
+++ b/base/jpeg.mak
@@ -96,7 +96,7 @@ jconfig_h=$(GLGEN)jconfig.h
@@ -148,7 +151,7 @@ index 5310a20..43cee63 100644
# In order to avoid having to keep the dependency lists for the IJG code
# accurate, we simply make all of them depend on the only files that
-@@ -174,40 +174,40 @@ $(JGEN)jpegc0.dev : $(JPEG_MAK) $(ECHOGS_XE) $(jpegc0_)
+@@ -181,40 +181,40 @@ $(JGEN)jpegc0.dev : $(JPEG_MAK) $(ECHOGS_XE) $(jpegc0_)
$(SETMOD) $(JGEN)jpegc0 $(jpegc0_)
$(JOBJ)jcomapi.$(OBJ) : $(JSRC)jcomapi.c $(JDEP)
@@ -196,7 +199,7 @@ index 5310a20..43cee63 100644
jpege6=$(JOBJ)jcapimin.$(OBJ) $(JOBJ)jcapistd.$(OBJ) $(JOBJ)jcinit.$(OBJ)
-@@ -223,90 +223,90 @@ $(JGEN)jpege6.dev : $(JPEG_MAK) $(ECHOGS_XE) $(JGEN)jpegc0.dev $(jpege6) $(jpege
+@@ -230,90 +230,90 @@ $(JGEN)jpege6.dev : $(JPEG_MAK) $(ECHOGS_XE) $(JGEN)jpegc0.dev $(jpege6) $(jpege
$(ADDMOD) $(JGEN)jpege6 -obj $(jpege_3)
$(JOBJ)jcapimin.$(OBJ) : $(JSRC)jcapimin.c $(JDEP)
@@ -304,7 +307,7 @@ index 5310a20..43cee63 100644
jpegd6=$(JOBJ)jdapimin.$(OBJ) $(JOBJ)jdapistd.$(OBJ) $(JOBJ)jdinput.$(OBJ) $(JOBJ)jdhuff.$(OBJ)
-@@ -322,76 +322,76 @@ $(JGEN)jpegd6.dev : $(JPEG_MAK) $(ECHOGS_XE) $(JGEN)jpegc0.dev $(jpegd6) $(jpegd
+@@ -329,76 +329,76 @@ $(JGEN)jpegd6.dev : $(JPEG_MAK) $(ECHOGS_XE) $(JGEN)jpegc0.dev $(jpegd6) $(jpegd
$(ADDMOD) $(JGEN)jpegd6 -obj $(jpegd_3)
$(JOBJ)jdapimin.$(OBJ) : $(JSRC)jdapimin.c $(JDEP)
@@ -436,10 +439,10 @@ index 52f750c..33a9c16 100644
# dev file for shared (separately built) lcms library
$(LCMS2GEN)lcms2_1.dev : $(TOP_MAKEFILES) $(LCMS2_MAK) $(ECHOGS_XE)
diff --git a/base/lcups.mak b/base/lcups.mak
-index 0bfb3ac..fcdb170 100644
+index 7504c44..7b9b979 100644
--- a/base/lcups.mak
+++ b/base/lcups.mak
-@@ -114,7 +114,7 @@ libcups.config-clean :
+@@ -116,7 +116,7 @@ libcups.config-clean :
# instantiate the requested build option (shared or compiled in)
$(LIBCUPSGEN)lcups.dev : $(TOP_MAKEFILES) $(LIBCUPSGEN)lcups_$(SHARE_LCUPS).dev
@@ -448,7 +451,7 @@ index 0bfb3ac..fcdb170 100644
# Define the shared version.
$(LIBCUPSGEN)lcups_1.dev : $(TOP_MAKEFILES) $(LCUPS_MAK) $(ECHOGS_XE)
-@@ -131,7 +131,7 @@ $(LIBCUPSGEN)lcups_0.dev : $(TOP_MAKEFILES) $(LCUPS_MAK) $(ECHOGS_XE) \
+@@ -133,7 +133,7 @@ $(LIBCUPSGEN)lcups_0.dev : $(TOP_MAKEFILES) $(LCUPS_MAK) $(ECHOGS_XE) \
# for simplicity we have every source file depend on all headers
$(LIBCUPSGEN)$(D)cups$(D)config.h : $(LCUPSSRCDIR)$(D)libs$(D)config$(LCUPSBUILDTYPE).h
@@ -457,7 +460,7 @@ index 0bfb3ac..fcdb170 100644
$(LIBCUPSOBJ)adminutil.$(OBJ) : $(LIBCUPSSRC)adminutil.c $(LIBSCUPSHEADERS) $(LIBCUPSGEN)$(D)cups$(D)config.h
$(LCUPS_CC) $(LCUPSO_)adminutil.$(OBJ) $(C_) $(LIBCUPSSRC)adminutil.c
-@@ -218,7 +218,7 @@ $(LIBCUPSOBJ)mark.$(OBJ) : $(LIBCUPSSRC)mark.c $(LIBSCUPSHEADERS)
+@@ -220,7 +220,7 @@ $(LIBCUPSOBJ)mark.$(OBJ) : $(LIBCUPSSRC)mark.c $(LIBSCUPSHEADERS)
$(LCUPS_CC) $(LCUPSO_)mark.$(OBJ) $(C_) $(LIBCUPSSRC)mark.c
$(LIBCUPSOBJ)cups_md5.$(OBJ) : $(LIBCUPSSRC)md5.c $(LIBSCUPSHEADERS)
@@ -466,7 +469,7 @@ index 0bfb3ac..fcdb170 100644
$(LCUPS_CC) $(LCUPSO_)cups_md5.$(OBJ) $(C_) $(LIBCUPSGEN)cups_md5.c
$(LIBCUPSOBJ)md5passwd.$(OBJ) : $(LIBCUPSSRC)md5passwd.c $(LIBSCUPSHEADERS)
-@@ -255,7 +255,7 @@ $(LIBCUPSOBJ)snmp.$(OBJ) : $(LIBCUPSSRC)snmp.c $(LIBSCUPSHEADERS)
+@@ -257,7 +257,7 @@ $(LIBCUPSOBJ)snmp.$(OBJ) : $(LIBCUPSSRC)snmp.c $(LIBSCUPSHEADERS)
$(LCUPS_CC) $(LCUPSO_)snmp.$(OBJ) $(C_) $(LIBCUPSSRC)snmp.c
$(LIBCUPSOBJ)cups_snpf.$(OBJ) : $(LIBCUPSSRC)snprintf.c $(LIBSCUPSHEADERS)
@@ -475,18 +478,18 @@ index 0bfb3ac..fcdb170 100644
$(LCUPS_CC) $(LCUPSO_)cups_snpf.$(OBJ) $(C_) $(LIBCUPSGEN)cups_snpf.c
$(LIBCUPSOBJ)string.$(OBJ) : $(LIBCUPSSRC)string.c $(LIBSCUPSHEADERS)
-@@ -271,5 +271,5 @@ $(LIBCUPSOBJ)usersys.$(OBJ) : $(LIBCUPSSRC)usersys.c $(LIBSCUPSHEADERS)
- $(LCUPS_CC) $(LCUPSO_)usersys.$(OBJ) $(C_) $(LIBCUPSSRC)usersys.c
+@@ -279,5 +279,5 @@ $(LIBCUPSOBJ)thread.$(OBJ) : $(LIBCUPSSRC)thread.c $(LIBSCUPSHEADERS)
+ $(LCUPS_CC) $(LCUPSO_)thread.$(OBJ) $(C_) $(LIBCUPSSRC)thread.c
$(LIBCUPSOBJ)cups_util.$(OBJ) : $(LIBCUPSSRC)util.c $(LIBSCUPSHEADERS)
- $(CP_) $(LIBCUPSSRC)util.c $(LIBCUPSGEN)cups_util.c
+ $(CP_) $(LIBCUPSSRC)util.c $(LIBCUPSGEN)cups_util.c || true
$(LCUPS_CC) $(LCUPSO_)cups_util.$(OBJ) $(C_) $(LIBCUPSGEN)cups_util.c
diff --git a/base/lcupsi.mak b/base/lcupsi.mak
-index ea4047f..0102d93 100644
+index da47da5..fa7d74c 100644
--- a/base/lcupsi.mak
+++ b/base/lcupsi.mak
-@@ -73,7 +73,7 @@ libcupsi.config-clean :
+@@ -59,7 +59,7 @@ libcupsi.config-clean :
# instantiate the requested build option (shared or compiled in)
$(LIBCUPSIGEN)lcupsi.dev : $(TOP_MAKEFILES) $(LIBCUPSIGEN)lcupsi_$(SHARE_LCUPSI).dev
@@ -509,10 +512,10 @@ index 3f50892..97c76a0 100644
# external link .dev
$(LDF_JB2_GEN)ldf_jb2_1.dev : $(TOP_MAKEFILES) $(LDF_JB2_MAK) $(ECHOGS_XE)
diff --git a/base/lib.mak b/base/lib.mak
-index 3ad7db3..87b4f01 100644
+index 90c9249..fb11aba 100644
--- a/base/lib.mak
+++ b/base/lib.mak
-@@ -343,7 +343,7 @@ md5_=$(GLOBJ)md5.$(OBJ)
+@@ -350,7 +350,7 @@ md5_=$(GLOBJ)md5.$(OBJ)
$(GLOBJ)md5.$(OBJ) : $(GLSRC)md5.c $(AK) $(md5_h) $(std_h) $(MAKEDIRS) $(EXP)$(ECHOGS_XE)
$(EXP)$(ECHOGS_XE) -w $(GLGEN)md5.h -x 23 include -x 2022 memory_.h -x 22
$(EXP)$(ECHOGS_XE) -a $(GLGEN)md5.h -+R $(GLSRC)md5.h
@@ -521,7 +524,7 @@ index 3ad7db3..87b4f01 100644
$(GLCC) $(GLO_)md5.$(OBJ) $(C_) $(GLGEN)md5.c
$(RM_) $(GLGEN)md5.c $(GLGEN)md5.h
-@@ -624,19 +624,19 @@ $(GLOBJ)gconfig.$(OBJ) : $(gconfig_h) $(GLSRC)gconf.c $(AK) $(gx_h)\
+@@ -631,19 +631,19 @@ $(GLOBJ)gconfig.$(OBJ) : $(gconfig_h) $(GLSRC)gconf.c $(AK) $(gx_h)\
$(gxdevice_h) $(gxiclass_h) $(gxiodev_h) $(gxiparam_h) $(TOP_MAKEFILES)\
$(MAKEDDIRS)
$(RM_) $(GLGEN)gconfig.c
@@ -544,7 +547,7 @@ index 3ad7db3..87b4f01 100644
$(GLCCAUX) $(C_) $(AUXO_)gscdefs.$(OBJ) $(AUX)gscdefs.c
$(GLOBJ)gxacpath.$(OBJ) : $(GLSRC)gxacpath.c $(AK) $(gx_h)\
-@@ -1527,7 +1527,7 @@ $(GLOBJ)sjpegc_0.$(OBJ) : $(GLSRC)sjpegc.c $(AK) $(stdio__h) $(string__h)\
+@@ -1535,7 +1535,7 @@ $(GLOBJ)sjpegc_0.$(OBJ) : $(GLSRC)sjpegc.c $(AK) $(stdio__h) $(string__h)\
$(GLJCC) $(GLO_)sjpegc_0.$(OBJ) $(C_) $(GLSRC)sjpegc.c
$(GLOBJ)sjpegc.$(OBJ) : $(GLOBJ)sjpegc_$(SHARE_JPEG).$(OBJ)
@@ -553,7 +556,7 @@ index 3ad7db3..87b4f01 100644
# sdcparam is used by the filter operator and the PS/PDF writer.
# It is not included automatically in sdcte/d.
-@@ -1555,7 +1555,7 @@ $(GLOBJ)sdcte_0.$(OBJ) : $(GLSRC)sdcte.c $(AK)\
+@@ -1563,7 +1563,7 @@ $(GLOBJ)sdcte_0.$(OBJ) : $(GLSRC)sdcte.c $(AK)\
$(GLJCC) $(GLO_)sdcte_0.$(OBJ) $(C_) $(GLSRC)sdcte.c
$(GLOBJ)sdcte.$(OBJ) : $(GLOBJ)sdcte_$(SHARE_JPEG).$(OBJ) $(MAKEDIRS)
@@ -562,7 +565,7 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)sjpege_1.$(OBJ) : $(GLSRC)sjpege.c $(AK)\
-@@ -1571,7 +1571,7 @@ $(GLOBJ)sjpege_0.$(OBJ) : $(GLSRC)sjpege.c $(AK)\
+@@ -1579,7 +1579,7 @@ $(GLOBJ)sjpege_0.$(OBJ) : $(GLSRC)sjpege.c $(AK)\
$(GLJCC) $(GLO_)sjpege_0.$(OBJ) $(C_) $(GLSRC)sjpege.c
$(GLOBJ)sjpege.$(OBJ) : $(GLOBJ)sjpege_$(SHARE_JPEG).$(OBJ) $(MAKEDIRS)
@@ -571,7 +574,7 @@ index 3ad7db3..87b4f01 100644
# sdeparam is used by the filter operator and the PS/PDF writer.
# It is not included automatically in sdcte.
-@@ -1603,7 +1603,7 @@ $(GLOBJ)sdctd_0.$(OBJ) : $(GLSRC)sdctd.c $(AK)\
+@@ -1611,7 +1611,7 @@ $(GLOBJ)sdctd_0.$(OBJ) : $(GLSRC)sdctd.c $(AK)\
$(GLJCC) $(GLO_)sdctd_0.$(OBJ) $(C_) $(GLSRC)sdctd.c
$(GLOBJ)sdctd.$(OBJ) : $(GLOBJ)sdctd_$(SHARE_JPEG).$(OBJ) $(MAKEDIRS)
@@ -580,7 +583,7 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)sjpegd_1.$(OBJ) : $(GLSRC)sjpegd.c $(AK)\
-@@ -1620,7 +1620,7 @@ $(GLOBJ)sjpegd_0.$(OBJ) : $(GLSRC)sjpegd.c $(AK)\
+@@ -1628,7 +1628,7 @@ $(GLOBJ)sjpegd_0.$(OBJ) : $(GLSRC)sjpegd.c $(AK)\
$(GLOBJ)sjpegd.$(OBJ) : $(GLOBJ)sjpegd_$(SHARE_JPEG).$(OBJ) $(MAKEDIRS)
@@ -589,7 +592,7 @@ index 3ad7db3..87b4f01 100644
# sddparam is used by the filter operator.
# It is not included automatically in sdctd.
-@@ -1643,7 +1643,7 @@ $(GLD)lzwe.dev : $(LIB_MAK) $(ECHOGS_XE) $(lzwe_)
+@@ -1651,7 +1651,7 @@ $(GLD)lzwe.dev : $(LIB_MAK) $(ECHOGS_XE) $(lzwe_)
# We need slzwe.dev as a synonym for lzwe.dev for BAND_LIST_STORAGE = memory.
$(GLD)slzwe.dev : $(GLD)lzwe.dev
@@ -598,7 +601,7 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)slzwe.$(OBJ) : $(GLSRC)slzwe.c $(AK) $(stdio__h) $(gdebug_h)\
$(slzwx_h) $(strimpl_h) $(MAKEDIRS)
-@@ -1659,7 +1659,7 @@ $(GLD)lzwd.dev : $(LIB_MAK) $(ECHOGS_XE) $(lzwd_)
+@@ -1667,7 +1667,7 @@ $(GLD)lzwd.dev : $(LIB_MAK) $(ECHOGS_XE) $(lzwd_)
# We need slzwd.dev as a synonym for lzwd.dev for BAND_LIST_STORAGE = memory.
$(GLD)slzwd.dev : $(GLD)lzwd.dev
@@ -607,7 +610,7 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)slzwd.$(OBJ) : $(GLSRC)slzwd.c $(AK) $(stdio__h) $(gdebug_h)\
$(slzwx_h) $(strimpl_h) $(MAKEDIRS)
-@@ -1709,7 +1709,7 @@ $(GLOBJ)saes.$(OBJ) : $(GLSRC)saes.c $(AK) $(memory__h)\
+@@ -1717,7 +1717,7 @@ $(GLOBJ)saes.$(OBJ) : $(GLSRC)saes.c $(AK) $(memory__h)\
# ---------------- JBIG2 compression filter ---------------- #
$(GLD)sjbig2.dev : $(LIB_MAK) $(ECHOGS_XE) $(GLD)sjbig2_$(JBIG2_LIB).dev
@@ -616,7 +619,7 @@ index 3ad7db3..87b4f01 100644
# jbig2dec version
sjbig2_jbig2dec=$(GLOBJ)sjbig2.$(OBJ)
-@@ -1745,7 +1745,7 @@ $(GLOBJ)sjbig2_luratech.$(OBJ) : $(GLSRC)sjbig2_luratech.c $(AK) \
+@@ -1753,7 +1753,7 @@ $(GLOBJ)sjbig2_luratech.$(OBJ) : $(GLSRC)sjbig2_luratech.c $(AK) \
# ---------------- JPEG 2000 compression filter ---------------- #
$(GLD)sjpx.dev : $(LIB_MAK) $(ECHOGS_XE) $(GLD)sjpx_$(JPX_LIB).dev
@@ -625,7 +628,7 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)sjpx.$(OBJ) : $(GLSRC)sjpx.c $(AK) \
$(memory__h) $(gsmalloc_h) \
-@@ -1874,7 +1874,7 @@ $(GLOBJ)szlibc_0.$(OBJ) : $(GLSRC)szlibc.c $(AK) $(std_h)\
+@@ -1882,7 +1882,7 @@ $(GLOBJ)szlibc_0.$(OBJ) : $(GLSRC)szlibc.c $(AK) $(std_h)\
$(GLZCC) $(GLO_)szlibc_0.$(OBJ) $(C_) $(GLSRC)szlibc.c
$(GLOBJ)szlibc.$(OBJ) : $(GLOBJ)szlibc_$(SHARE_ZLIB).$(OBJ) $(MAKEDIRS)
@@ -634,7 +637,7 @@ index 3ad7db3..87b4f01 100644
szlibe_=$(szlibc_) $(GLOBJ)szlibe.$(OBJ)
$(GLD)szlibe.dev : $(LIB_MAK) $(ECHOGS_XE) $(ZGENDIR)$(D)zlibe.dev $(szlibe_)
-@@ -1890,7 +1890,7 @@ $(GLOBJ)szlibe_0.$(OBJ) : $(GLSRC)szlibe.c $(AK) $(std_h)\
+@@ -1898,7 +1898,7 @@ $(GLOBJ)szlibe_0.$(OBJ) : $(GLSRC)szlibe.c $(AK) $(std_h)\
$(GLZCC) $(GLO_)szlibe_0.$(OBJ) $(C_) $(GLSRC)szlibe.c
$(GLOBJ)szlibe.$(OBJ) : $(GLOBJ)szlibe_$(SHARE_ZLIB).$(OBJ) $(MAKEDIRS)
@@ -643,7 +646,7 @@ index 3ad7db3..87b4f01 100644
szlibd_=$(szlibc_) $(GLOBJ)szlibd.$(OBJ)
$(GLD)szlibd.dev : $(LIB_MAK) $(ECHOGS_XE) $(ZGENDIR)$(D)zlibd.dev $(szlibd_)
-@@ -1906,7 +1906,7 @@ $(GLOBJ)szlibd_0.$(OBJ) : $(GLSRC)szlibd.c $(AK) $(std_h) $(memory__h)\
+@@ -1914,7 +1914,7 @@ $(GLOBJ)szlibd_0.$(OBJ) : $(GLSRC)szlibd.c $(AK) $(std_h) $(memory__h)\
$(GLZCC) $(GLO_)szlibd_0.$(OBJ) $(C_) $(GLSRC)szlibd.c
$(GLOBJ)szlibd.$(OBJ) : $(GLOBJ)szlibd_$(SHARE_ZLIB).$(OBJ) $(MAKEDIRS)
@@ -652,7 +655,7 @@ index 3ad7db3..87b4f01 100644
# ---------------- Page devices ---------------- #
# We include this here, rather than in devs.mak, because it is more like
-@@ -2796,7 +2796,7 @@ $(GLOBJ)gsicc_lcms_0.$(OBJ) : $(GLSRC)gsicc_lcms.c\
+@@ -2804,7 +2804,7 @@ $(GLOBJ)gsicc_lcms_0.$(OBJ) : $(GLSRC)gsicc_lcms.c\
$(GLLCMSCC) $(GLO_)gsicc_lcms_0.$(OBJ) $(C_) $(GLSRC)gsicc_lcms.c
$(GLOBJ)gsicc_lcms.$(OBJ) : $(GLOBJ)gsicc_lcms_$(SHARE_LCMS).$(OBJ) $(gp_h)
@@ -661,16 +664,16 @@ index 3ad7db3..87b4f01 100644
$(GLOBJ)gsicc_lcms2_1.$(OBJ) : $(GLSRC)gsicc_lcms2.c\
-@@ -2808,7 +2808,7 @@ $(GLOBJ)gsicc_lcms2_0.$(OBJ) : $(GLSRC)gsicc_lcms2.c\
- $(GLLCMS2CC) $(GLO_)gsicc_lcms2_0.$(OBJ) $(C_) $(GLSRC)gsicc_lcms2.c
+@@ -2817,7 +2817,7 @@ $(GLOBJ)gsicc_lcms2_0.$(OBJ) : $(GLSRC)gsicc_lcms2.c\
- $(GLOBJ)gsicc_lcms2.$(OBJ) : $(GLOBJ)gsicc_lcms2_$(SHARE_LCMS).$(OBJ) $(gp_h)
+ $(GLOBJ)gsicc_lcms2.$(OBJ) : $(GLOBJ)gsicc_lcms2_$(SHARE_LCMS).$(OBJ) $(gp_h) \
+ $(gxsync_h)
- $(CP_) $(GLOBJ)gsicc_lcms2_$(SHARE_LCMS).$(OBJ) $(GLOBJ)gsicc_lcms2.$(OBJ)
+ $(CP_) $(GLOBJ)gsicc_lcms2_$(SHARE_LCMS).$(OBJ) $(GLOBJ)gsicc_lcms2.$(OBJ) || true
# Note that gsicc_create requires compile with lcms to obtain icc34.h
# header file that is used for creating ICC structures from PS objects.
-@@ -2828,7 +2828,7 @@ $(GLOBJ)gsicc_create_0.$(OBJ) : $(GLSRC)gsicc_create.c $(AK) $(string__h)\
+@@ -2837,7 +2837,7 @@ $(GLOBJ)gsicc_create_0.$(OBJ) : $(GLSRC)gsicc_create.c $(AK) $(string__h)\
$(GLLCMSCC) $(GLO_)gsicc_create_0.$(OBJ) $(C_) $(GLSRC)gsicc_create.c
$(GLOBJ)gsicc_create.$(OBJ) : $(GLOBJ)gsicc_create_$(SHARE_LCMS).$(OBJ) $(MAKEDIRS)
@@ -679,7 +682,7 @@ index 3ad7db3..87b4f01 100644
#include "icc34.h" /* Note this header is needed even if lcms is not compiled as default CMS */
-@@ -3132,7 +3132,7 @@ $(GLGEN)gsromfs1_1.c : $(MKROMFS_XE) $(PS_ROMFS_DEPS) $(MAKEDIRS)
+@@ -3141,7 +3141,7 @@ $(GLGEN)gsromfs1_1.c : $(MKROMFS_XE) $(PS_ROMFS_DEPS) $(MAKEDIRS)
$(PS_ROMFS_ARGS) $(GL_ROMFS_ARGS)
$(GLGEN)gsromfs1.c : $(GLGEN)gsromfs1_$(UFST_BRIDGE).c $(MAKEDIRS)
@@ -688,7 +691,7 @@ index 3ad7db3..87b4f01 100644
# the following module is only included if the romfs.dev FEATURE is enabled
$(GLOBJ)gsiorom_1.$(OBJ) : $(GLSRC)gsiorom.c $(gsiorom_h) \
-@@ -3148,7 +3148,7 @@ $(GLOBJ)gsiorom_0.$(OBJ) : $(GLSRC)gsiorom.c $(gsiorom_h) \
+@@ -3157,7 +3157,7 @@ $(GLOBJ)gsiorom_0.$(OBJ) : $(GLSRC)gsiorom.c $(gsiorom_h) \
$(GLCC) $(GLO_)gsiorom_0.$(OBJ) $(I_)$(ZI_)$(_I) $(C_) $(GLSRC)gsiorom.c
$(GLOBJ)gsiorom.$(OBJ) : $(GLOBJ)gsiorom_$(SHARE_ZLIB).$(OBJ) $(MAKEDIRS)
@@ -711,7 +714,7 @@ index ca1b7cc..ad6446d 100644
# external link .dev
$(LWF_JP2_GEN)lwf_jp2_1.dev : $(TOP_MAKEFILES) $(LWF_JP2_MAK) $(ECHOGS_XE)
diff --git a/base/macos-mcp.mak b/base/macos-mcp.mak
-index 4ded7f3..3599ed9 100644
+index 38a2937..2ab7cae 100644
--- a/base/macos-mcp.mak
+++ b/base/macos-mcp.mak
@@ -383,11 +383,11 @@ CWPROJ_XML=./ghostscript.mcp.xml
@@ -731,7 +734,7 @@ index 4ded7f3..3599ed9 100644
$(GS_XE): $(ld_tr) $(ECHOGS_XE) $(XE_ALL) $(CWPROJ_XML) $(PSOBJ)gsromfs$(COMPILE_INITS).$(OBJ)
diff --git a/base/openjpeg.mak b/base/openjpeg.mak
-index 32be240..4a6b429 100644
+index b194709..dac7901 100644
--- a/base/openjpeg.mak
+++ b/base/openjpeg.mak
@@ -99,7 +99,7 @@ open_jpeg_HDRS = \
@@ -744,7 +747,7 @@ index 32be240..4a6b429 100644
# external link .dev
$(OPEN_JPEG_GEN)openjpeg_1.dev : $(TOP_MAKEFILES) $(OPEN_JPEG_MAK) $(ECHOGS_XE)
diff --git a/base/openvms.mak b/base/openvms.mak
-index e5ae40b..286ee84 100644
+index 1ea06fe..84378a4 100644
--- a/base/openvms.mak
+++ b/base/openvms.mak
@@ -365,7 +365,7 @@ SH=
@@ -757,7 +760,7 @@ index e5ae40b..286ee84 100644
# Define the command for deleting (a) file(s) (including wild cards)
diff --git a/base/png.mak b/base/png.mak
-index f01c426..8a89884 100644
+index 1725ddb..0d54b80 100644
--- a/base/png.mak
+++ b/base/png.mak
@@ -79,7 +79,7 @@ png.config-clean :
@@ -772,7 +775,7 @@ index f01c426..8a89884 100644
@@ -134,7 +134,7 @@ $(PNGOBJ)pngget.$(OBJ) : $(PNGSRC)pngget.c $(PDEP)
# Define the version of libpng.dev that we are actually using.
- $(PNGGEN)libpng.dev : $(TOP_MAKEFILES) $(PNGGEN)libpng_$(SHARE_LIBPNG).dev
+ $(PNGGEN)libpng.dev : $(TOP_MAKEFILES) $(PNGGEN)libpng_$(SHARE_LIBPNG).dev $(MAKEDIRS)
- $(CP_) $(PNGGEN)libpng_$(SHARE_LIBPNG).dev $(PNGGEN)libpng.dev
+ $(CP_) $(PNGGEN)libpng_$(SHARE_LIBPNG).dev $(PNGGEN)libpng.dev || true
@@ -841,7 +844,7 @@ index 6aa3599..70a61f8 100644
RMN_=rm -f
diff --git a/base/zlib.mak b/base/zlib.mak
-index 6c50605..7fb8c9b 100644
+index 78d398a..3d48145 100644
--- a/base/zlib.mak
+++ b/base/zlib.mak
@@ -83,7 +83,7 @@ $(ZOBJ)zutil.$(OBJ) : $(ZSRC)zutil.c $(ZDEP)
@@ -865,17 +868,17 @@ index 6c50605..7fb8c9b 100644
@@ -127,7 +127,7 @@ $(ZOBJ)crc32.$(OBJ) : $(ZSRC)crc32.c $(ZDEP)
# Decoding (decompression) code.
- $(ZGEN)zlibd.dev : $(TOP_MAKEFILES) $(ZGEN)zlibd_$(SHARE_ZLIB).dev
+ $(ZGEN)zlibd.dev : $(TOP_MAKEFILES) $(ZGEN)zlibd_$(SHARE_ZLIB).dev $(MAKEDIRS)
- $(CP_) $(ZGEN)zlibd_$(SHARE_ZLIB).dev $(ZGEN)zlibd.dev
+ $(CP_) $(ZGEN)zlibd_$(SHARE_ZLIB).dev $(ZGEN)zlibd.dev || true
$(ZGEN)zlibd_1.dev : $(TOP_MAKEFILES) $(ZLIB_MAK) $(ECHOGS_XE)
$(SETMOD) $(ZGEN)zlibd_1 -lib $(ZLIB_NAME)
diff --git a/devices/devs.mak b/devices/devs.mak
-index bc22594..4bd0a11 100644
+index 0a668d4..a19082e 100644
--- a/devices/devs.mak
+++ b/devices/devs.mak
-@@ -1555,7 +1555,7 @@ libpng_dev=$(PNGGENDIR)$(D)libpng.dev
+@@ -1547,7 +1547,7 @@ libpng_dev=$(PNGGENDIR)$(D)libpng.dev
png_i_=-include $(PNGGENDIR)$(D)libpng
$(DEVOBJ)gdevpng.$(OBJ) : $(DEVSRC)gdevpng.c\
@@ -885,7 +888,7 @@ index bc22594..4bd0a11 100644
$(DD)pngmono.dev : $(DEVS_MAK) $(libpng_dev) $(png_) $(GLD)page.dev $(GDEV)
diff --git a/psi/int.mak b/psi/int.mak
-index 62cba0e..a1e963a 100644
+index 7d36e66..71b88f5 100644
--- a/psi/int.mak
+++ b/psi/int.mak
@@ -274,7 +274,7 @@ $(PSOBJ)iconfig.$(OBJ) : $(gconfig_h) $(PSSRC)iconf.c $(stdio__h)\
@@ -925,5 +928,5 @@ index 62cba0e..a1e963a 100644
fjpx_luratech=$(PSOBJ)zfjpx_luratech.$(OBJ)
--
-1.8.1.2
+1.9.1
diff --git a/meta/recipes-extended/ghostscript/ghostscript_9.14.bb b/meta/recipes-extended/ghostscript/ghostscript_9.15.bb
index 2e892f3e10..ee5483c20e 100644
--- a/meta/recipes-extended/ghostscript/ghostscript_9.14.bb
+++ b/meta/recipes-extended/ghostscript/ghostscript_9.15.bb
@@ -23,6 +23,7 @@ SRC_URI = "${SRC_URI_BASE} \
file://ghostscript-9.02-genarch.patch \
file://objarch.h \
file://ghostscript-9.02-parallel-make.patch \
+ file://cups-no-gcrypt.patch \
"
SRC_URI_class-native = "${SRC_URI_BASE} \
@@ -30,8 +31,8 @@ SRC_URI_class-native = "${SRC_URI_BASE} \
file://base-genht.c-add-a-preprocessor-define-to-allow-fope.patch \
"
-SRC_URI[md5sum] = "586494befb443363338c1b6379f13973"
-SRC_URI[sha256sum] = "ab2ba5ce11c8db396c9acf774a497182d7686d04670976cc3e690ada7db9f0d4"
+SRC_URI[md5sum] = "5a78ab0990ff6ec3a103576bc8777c46"
+SRC_URI[sha256sum] = "27f11e4fe5b89857ae745687281d1e4daf9681edc858a3f7e8e77ef09609777a"
EXTRA_OECONF = "--without-x --with-system-libtiff --without-jbig2dec \
--with-fontpath=${datadir}/fonts \
diff --git a/meta/recipes-extended/grep/grep_2.19.bb b/meta/recipes-extended/grep/grep_2.21.bb
index f38a9a6171..2c378d8442 100644
--- a/meta/recipes-extended/grep/grep_2.19.bb
+++ b/meta/recipes-extended/grep/grep_2.21.bb
@@ -7,8 +7,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=8006d9c814277c1bfc4ca22af94b59ee"
SRC_URI = "${GNU_MIRROR}/grep/grep-${PV}.tar.xz"
-SRC_URI[md5sum] = "ac732142227d9fe9567d71301e127979"
-SRC_URI[sha256sum] = "6388295be48cfcaf7665d9cd3914e6625ea000e9414132bfefd45cf1d8eec34d"
+SRC_URI[md5sum] = "43c48064d6409862b8a850db83c8038a"
+SRC_URI[sha256sum] = "5244a11c00dee8e7e5e714b9aaa053ac6cbfa27e104abee20d3c778e4bb0e5de"
inherit autotools gettext texinfo
@@ -36,3 +36,4 @@ ALTERNATIVE_LINK_NAME[grep] = "${base_bindir}/grep"
ALTERNATIVE_LINK_NAME[egrep] = "${base_bindir}/egrep"
ALTERNATIVE_LINK_NAME[fgrep] = "${base_bindir}/fgrep"
+export CONFIG_SHELL="/bin/sh"
diff --git a/meta/recipes-extended/grep/grep_2.5.1a.bb b/meta/recipes-extended/grep/grep_2.5.1a.bb
index 79842baac8..1ce112e43d 100644
--- a/meta/recipes-extended/grep/grep_2.5.1a.bb
+++ b/meta/recipes-extended/grep/grep_2.5.1a.bb
@@ -47,3 +47,5 @@ ALTERNATIVE_${PN} = "grep egrep fgrep"
ALTERNATIVE_LINK_NAME[grep] = "${base_bindir}/grep"
ALTERNATIVE_LINK_NAME[egrep] = "${base_bindir}/egrep"
ALTERNATIVE_LINK_NAME[fgrep] = "${base_bindir}/fgrep"
+
+export CONFIG_SHELL="/bin/sh"
diff --git a/meta/recipes-extended/gzip/gzip-1.6/wrong-path-fix.patch b/meta/recipes-extended/gzip/gzip-1.6/wrong-path-fix.patch
new file mode 100644
index 0000000000..92863d6c4b
--- /dev/null
+++ b/meta/recipes-extended/gzip/gzip-1.6/wrong-path-fix.patch
@@ -0,0 +1,31 @@
+fix MakeMaker issues with using wrong SHELL/GREP
+
+A set of substitution is being processed to all target scripts with sed by
+replacing some key words with the detected values at configure time, this
+is exactly not compliant with cross compling, and will cause missing path
+errors at run time like:
+"/usr/bin/zgrep: line 230: /usr/bin/grep: No such file or directory"
+
+Fixed by removing unneeded substitution and using real runtime paths
+instead.
+
+Signed-off-by: Ming Liu <ming.liu@windriver.com>
+
+Upstream-Status: Pending
+
+Index: gzip-1.3.12/Makefile.am
+===================================================================
+diff -urpN a/Makefile.am b/Makefile.am
+--- a/Makefile.am 2013-11-30 10:06:09.402234871 +0800
++++ b/Makefile.am 2013-11-30 10:13:42.952236025 +0800
+@@ -81,9 +81,8 @@ gzip.doc.gz: gzip.doc $(bin_PROGRAMS)
+ SUFFIXES = .in
+ .in:
+ $(AM_V_GEN)sed \
+- -e 's|/bin/sh|$(SHELL)|g' \
+ -e 's|[@]bindir@|'\''$(bindir)'\''|g' \
+- -e 's|[@]GREP@|$(GREP)|g' \
++ -e 's|[@]GREP@|$(base_bindir)/grep|g' \
+ -e 's|[@]VERSION@|$(VERSION)|g' \
+ $(srcdir)/$@.in >$@-t \
+ && chmod a+x $@-t \
diff --git a/meta/recipes-extended/gzip/gzip.inc b/meta/recipes-extended/gzip/gzip.inc
index eeeafe02c3..b90856e796 100644
--- a/meta/recipes-extended/gzip/gzip.inc
+++ b/meta/recipes-extended/gzip/gzip.inc
@@ -9,6 +9,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
file://gzip.h;beginline=8;endline=20;md5=6e47caaa630e0c8bf9f1bc8d94a8ed0e"
SRC_URI = "${GNU_MIRROR}/gzip/${BP}.tar.gz"
+SRC_URI_append_class-target = " file://wrong-path-fix.patch"
inherit autotools texinfo
@@ -30,3 +31,5 @@ ALTERNATIVE_${PN} = "gunzip gzip zcat"
ALTERNATIVE_LINK_NAME[gunzip] = "${base_bindir}/gunzip"
ALTERNATIVE_LINK_NAME[gzip] = "${base_bindir}/gzip"
ALTERNATIVE_LINK_NAME[zcat] = "${base_bindir}/zcat"
+
+export CONFIG_SHELL="/bin/sh"
diff --git a/meta/recipes-extended/hdparm/hdparm_9.43.bb b/meta/recipes-extended/hdparm/hdparm_9.45.bb
index 805f76a6b3..d11412d40c 100644
--- a/meta/recipes-extended/hdparm/hdparm_9.43.bb
+++ b/meta/recipes-extended/hdparm/hdparm_9.45.bb
@@ -18,8 +18,8 @@ RDEPENDS_wiper = "bash gawk stat"
SRC_URI = "${SOURCEFORGE_MIRROR}/hdparm/hdparm-${PV}.tar.gz "
-SRC_URI[md5sum] = "f73233be118d86c779a8463d8b6a3cdb"
-SRC_URI[sha256sum] = "2bbe92274971182192901ab220e94bd2e4896f924fa6b225d0cffd7d8c16b52a"
+SRC_URI[md5sum] = "1c75d0751a44928b6c4bc81fb16d7fe8"
+SRC_URI[sha256sum] = "23b01caa56a995cf0897877b6aff98ea622a5df255bc2894b1a7693387f38669"
EXTRA_OEMAKE += 'STRIP="echo"'
diff --git a/meta/recipes-extended/images/core-image-kernel-dev.bb b/meta/recipes-extended/images/core-image-kernel-dev.bb
new file mode 100644
index 0000000000..d14f658726
--- /dev/null
+++ b/meta/recipes-extended/images/core-image-kernel-dev.bb
@@ -0,0 +1,17 @@
+DESCRIPTION = "A development image that builds the kernel and packages that are \
+sensitive to kernel updates and version changes"
+
+# Could also be core-image-basic, but we'll keep this small for now
+require recipes-core/images/core-image-minimal.bb
+
+KERNEL_DEV_UTILS ?= "dropbear"
+KERNEL_DEV_TOOLS ?= "packagegroup-core-tools-profile packagegroup-core-buildessential kernel-devsrc"
+KERNEL_DEV_MODULE ?= ""
+
+CORE_IMAGE_EXTRA_INSTALL += "${KERNEL_DEV_MODULE} \
+ ${KERNEL_DEV_UTILS} \
+ ${KERNEL_DEV_TOOLS} \
+ "
+
+# We need extra space for things like kernel builds, etc.
+IMAGE_ROOTFS_EXTRA_SPACE_append += "+ 3000000"
diff --git a/meta/recipes-extended/less/less_458.bb b/meta/recipes-extended/less/less_471.bb
index 3e4518a339..81d354ccf0 100644
--- a/meta/recipes-extended/less/less_458.bb
+++ b/meta/recipes-extended/less/less_471.bb
@@ -26,8 +26,8 @@ DEPENDS = "ncurses"
SRC_URI = "http://www.greenwoodsoftware.com/${BPN}/${BPN}-${PV}.tar.gz"
-SRC_URI[md5sum] = "935b38aa2e73c888c210dedf8fd94f49"
-SRC_URI[sha256sum] = "e536c7819ede54b3d487f0ffc4c14b3620bed83734d92a81e89f62346db0fcac"
+SRC_URI[md5sum] = "9a40d29a2d84b41f9f36d7dd90b4f950"
+SRC_URI[sha256sum] = "37f613fa9a526378788d790a92217d59b523574cf7159f6538da8564b3fb27f8"
inherit autotools update-alternatives
diff --git a/meta/recipes-extended/libaio/libaio/libaio_fix_for_mips64.patch b/meta/recipes-extended/libaio/libaio/libaio_fix_for_mips64.patch
new file mode 100644
index 0000000000..0ef9f147be
--- /dev/null
+++ b/meta/recipes-extended/libaio/libaio/libaio_fix_for_mips64.patch
@@ -0,0 +1,42 @@
+From 62fd97fbc5c53835baa18f210fca593fc8b5c636 Mon Sep 17 00:00:00 2001
+From: Jianchuan Wang <jianchuan.wang@windriver.com>
+Date: Wed, 15 Oct 2014 07:04:02 +0800
+Subject: [PATCH] libaio: fix for mips64
+
+Add mips64 support in the libaio.h
+ - add macro PADDED/PADDEDptr/PADDEDul in the mips64 for structure iocb
+ to be matched userland with kernel
+
+Upstream-Status: Backport
+
+Signed-off-by: Jianchuan Wang <jianchuan.wang@windriver.com>
+---
+ src/libaio.h | 12 ++++++++++++
+ 1 file changed, 12 insertions(+)
+
+diff --git a/src/libaio.h b/src/libaio.h
+index ff99188..64ea8f3 100644
+--- a/src/libaio.h
++++ b/src/libaio.h
+@@ -95,6 +95,18 @@ typedef enum io_iocb_cmd {
+ #define PADDED(x, y) unsigned y; x
+ #define PADDEDptr(x, y) unsigned y; x
+ #define PADDEDul(x, y) unsigned y; unsigned long x
++#elif defined(__mips64)
++# if defined (__MIPSEB__) /* big endian, 64 bits */
++#define PADDED(x, y) unsigned y; x
++#define PADDEDptr(x,y) x
++#define PADDEDul(x, y) unsigned long x
++# elif defined(__MIPSEL__) /* little endian, 64 bits */
++#define PADDED(x, y) x, y
++#define PADDEDptr(x, y) x
++#define PADDEDul(x, y) unsigned long x
++# else
++# error "mips64: neither mipseb nor mipsel?"
++# endif
+ #elif defined(__mips__)
+ # if defined (__MIPSEB__) /* big endian, 32 bits */
+ #define PADDED(x, y) unsigned y; x
+--
+1.8.2.1
+
diff --git a/meta/recipes-extended/libaio/libaio_0.3.109.bb b/meta/recipes-extended/libaio/libaio_0.3.109.bb
index 978f5f5fb5..0483c711b9 100644
--- a/meta/recipes-extended/libaio/libaio_0.3.109.bb
+++ b/meta/recipes-extended/libaio/libaio_0.3.109.bb
@@ -15,6 +15,7 @@ SRC_URI = "${DEBIAN_MIRROR}/main/liba/libaio/libaio_${PV}.orig.tar.gz \
file://libaio-generic.patch \
file://libaio-aarch64.patch \
file://libaio_fix_for_mips_syscalls.patch \
+ file://libaio_fix_for_mips64.patch \
"
SRC_URI[md5sum] = "435a5b16ca6198eaf01155263d855756"
diff --git a/meta/recipes-extended/libtirpc/libtirpc_0.2.4.bb b/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb
index 88e2d820ee..3edf002497 100644
--- a/meta/recipes-extended/libtirpc/libtirpc_0.2.4.bb
+++ b/meta/recipes-extended/libtirpc/libtirpc_0.2.5.bb
@@ -10,15 +10,15 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=f835cce8852481e4b2bbbdd23b5e47f3 \
DEPENDS += "xz-native"
PROVIDES = "virtual/librpc"
-SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.bz2;name=libtirpc \
+SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2;name=libtirpc \
${GENTOO_MIRROR}/${BPN}-glibc-nfs.tar.xz;name=glibc-nfs \
file://libtirpc-0.2.1-fortify.patch \
"
SRC_URI_append_libc-uclibc = " file://remove-des-uclibc.patch"
-SRC_URI[libtirpc.md5sum] = "847995e8d002cbf1387bda05947be086"
-SRC_URI[libtirpc.sha256sum] = "45c3e21dfc23a5ba501f9dfc6671678316fdfdb8355a1ec404ae2aa2f81943a1"
+SRC_URI[libtirpc.md5sum] = "8cd41a5ef5a9b50d0fb6abb98af15368"
+SRC_URI[libtirpc.sha256sum] = "62f9de7c2c8686c568757730e1fef66502a0e00d6cacf33546d0267984e002db"
SRC_URI[glibc-nfs.md5sum] = "5ae500b9d0b6b72cb875bc04944b9445"
SRC_URI[glibc-nfs.sha256sum] = "2677cfedf626f3f5a8f6e507aed5bb8f79a7453b589d684dbbc086e755170d83"
diff --git a/meta/recipes-extended/lighttpd/lighttpd_1.4.35.bb b/meta/recipes-extended/lighttpd/lighttpd_1.4.35.bb
index 0acc37d94f..20d5c1caa0 100644
--- a/meta/recipes-extended/lighttpd/lighttpd_1.4.35.bb
+++ b/meta/recipes-extended/lighttpd/lighttpd_1.4.35.bb
@@ -51,7 +51,7 @@ SYSTEMD_SERVICE_${PN} = "lighttpd.service"
do_install_append() {
install -d ${D}${sysconfdir}/init.d ${D}${sysconfdir}/lighttpd.d ${D}/www/pages/dav
install -m 0755 ${WORKDIR}/lighttpd ${D}${sysconfdir}/init.d
- install -m 0755 ${WORKDIR}/lighttpd.conf ${D}${sysconfdir}
+ install -m 0644 ${WORKDIR}/lighttpd.conf ${D}${sysconfdir}
install -m 0644 ${WORKDIR}/index.html.lighttpd ${D}/www/pages/index.html
install -d ${D}${systemd_unitdir}/system
diff --git a/meta/recipes-extended/logrotate/logrotate/disable-check-different-filesystems.patch b/meta/recipes-extended/logrotate/logrotate/disable-check-different-filesystems.patch
index 43068bdbd7..43ebcffbae 100644
--- a/meta/recipes-extended/logrotate/logrotate/disable-check-different-filesystems.patch
+++ b/meta/recipes-extended/logrotate/logrotate/disable-check-different-filesystems.patch
@@ -7,19 +7,20 @@ Upstream-Status: Submitted
Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
---
- config.c | 8 --------
- 1 files changed, 0 insertions(+), 8 deletions(-)
+ config.c | 9 ---------
+ 1 file changed, 9 deletions(-)
diff --git a/config.c b/config.c
-index a85d1df..24575b3 100644
+index e0eadb7..c23092f 100644
--- a/config.c
+++ b/config.c
-@@ -1453,14 +1453,6 @@ static int readConfigFile(const char *configFile, struct logInfo *defConfig)
+@@ -1515,15 +1515,6 @@ static int readConfigFile(const char *configFile, struct logInfo *defConfig)
dirName, strerror(errno));
goto error;
}
-
-- if (sb.st_dev != sb2.st_dev) {
+- if (sb.st_dev != sb2.st_dev
+- && !(newlog->flags & (LOG_FLAG_COPYTRUNCATE | LOG_FLAG_COPY))) {
- message(MESS_ERROR,
- "%s:%d olddir %s and log file %s "
- "are on different devices\n", configFile,
@@ -30,5 +31,4 @@ index a85d1df..24575b3 100644
}
--
-1.7.4.1
-
+1.7.9.5
diff --git a/meta/recipes-extended/logrotate/logrotate_3.8.7.bb b/meta/recipes-extended/logrotate/logrotate_3.8.8.bb
index 3a6a2287db..133f6d7038 100644
--- a/meta/recipes-extended/logrotate/logrotate_3.8.7.bb
+++ b/meta/recipes-extended/logrotate/logrotate_3.8.8.bb
@@ -9,14 +9,36 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=18810669f13b87348459e611d31ab760"
SRC_URI = "https://fedorahosted.org/releases/l/o/logrotate/logrotate-${PV}.tar.gz \
file://act-as-mv-when-rotate.patch \
- file://disable-check-different-filesystems.patch \
file://update-the-manual.patch \
+ file://disable-check-different-filesystems.patch \
"
-SRC_URI[md5sum] = "99e08503ef24c3e2e3ff74cc5f3be213"
-SRC_URI[sha256sum] = "f6ba691f40e30e640efa2752c1f9499a3f9738257660994de70a45fe00d12b64"
+SRC_URI[md5sum] = "49846e873dddea15964cd0355b9943ca"
+SRC_URI[sha256sum] = "46a1510ef4a1f4359edd5f361112cfd1523942e85ff28e6cbb0c81bad1829d0f"
+
+PACKAGECONFIG ?= "\
+ ${@base_contains('DISTRO_FEATURES', 'acl', 'acl', '', d)} \
+ ${@base_contains('DISTRO_FEATURES', 'selinux', 'selinux', '', d)} \
+"
+
+# If RPM_OPT_FLAGS is unset, it adds -g itself rather than obeying our
+# optimization variables, so use it rather than EXTRA_CFLAGS.
+EXTRA_OEMAKE = "\
+ LFS= \
+ OS_NAME='${OS_NAME}' \
+ \
+ 'CC=${CC}' \
+ 'RPM_OPT_FLAGS=${CFLAGS}' \
+ 'EXTRA_LDFLAGS=${LDFLAGS}' \
+ \
+ ${@base_contains('PACKAGECONFIG', 'acl', 'WITH_ACL=yes', '', d)} \
+ ${@base_contains('PACKAGECONFIG', 'selinux', 'WITH_SELINUX=yes', '', d)} \
+"
-EXTRA_OEMAKE = ""
+# OS_NAME in the makefile defaults to `uname -s`. The behavior for
+# freebsd/netbsd is questionable, so leave it as Linux, which only sets
+# INSTALL=install and BASEDIR=/usr.
+OS_NAME = "Linux"
do_compile_prepend() {
# Make sure the recompile is OK
@@ -24,7 +46,7 @@ do_compile_prepend() {
}
do_install(){
- oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir}
+ oe_runmake install DESTDIR=${D} PREFIX=${D} MANDIR=${mandir} BINDIR=${bindir}
mkdir -p ${D}${sysconfdir}/logrotate.d
mkdir -p ${D}${sysconfdir}/cron.daily
mkdir -p ${D}${localstatedir}/lib
diff --git a/meta/recipes-extended/lsb/lsb_4.1.bb b/meta/recipes-extended/lsb/lsb_4.1.bb
index d265731e04..b4ef98eea7 100644
--- a/meta/recipes-extended/lsb/lsb_4.1.bb
+++ b/meta/recipes-extended/lsb/lsb_4.1.bb
@@ -5,7 +5,7 @@ LICENSE = "GPLv2+"
PR = "r2"
# lsb_release needs getopt
-RDEPENDS_${PN} += "util-linux"
+RDEPENDS_${PN} += "${VIRTUAL-RUNTIME_getopt}"
LIC_FILES_CHKSUM = "file://README;md5=12da544b1a3a5a1795a21160b49471cf"
@@ -21,6 +21,8 @@ SRC_URI[md5sum] = "30537ef5a01e0ca94b7b8eb6a36bb1e4"
SRC_URI[sha256sum] = "99321288f8d62e7a1d485b7c6bdccf06766fb8ca603c6195806e4457fdf17172"
S = "${WORKDIR}/lsb-release-1.4"
+CLEANBROKEN = "1"
+
do_install(){
oe_runmake install prefix=${D} mandir=${D}/${datadir}/man/ DESTDIR=${D}
@@ -119,3 +121,11 @@ FILES_${PN} += "/lib64 \
${base_libdir}/lsb/* \
${libdir}/sendmail \
"
+
+# The sysroot/${libdir}/sendmail conflicts with esmtp's, and it's a
+# symlink to ${sbindir}/sendmail which is meaningless for sysroot, so
+# remove it.
+SYSROOT_PREPROCESS_FUNCS += "remove_sysroot_sendmail"
+remove_sysroot_sendmail() {
+ rm -r "${SYSROOT_DESTDIR}${libdir}/sendmail"
+}
diff --git a/meta/recipes-extended/lsb/lsbinitscripts_9.55.bb b/meta/recipes-extended/lsb/lsbinitscripts_9.56.1.bb
index 6ab52e3f17..cf3a863e1d 100644
--- a/meta/recipes-extended/lsb/lsbinitscripts_9.55.bb
+++ b/meta/recipes-extended/lsb/lsbinitscripts_9.56.1.bb
@@ -6,12 +6,12 @@ DEPENDS = "popt glib-2.0"
LIC_FILES_CHKSUM = "file://COPYING;md5=ebf4e8b49780ab187d51bd26aaa022c6"
S="${WORKDIR}/initscripts-${PV}"
-SRC_URI = "http://pkgs.fedoraproject.org/repo/pkgs/initscripts/initscripts-9.55.tar.bz2/0672f648a9ee8607a2df65835c54f5e5/initscripts-9.55.tar.bz2 \
+SRC_URI = "http://pkgs.fedoraproject.org/repo/pkgs/initscripts/initscripts-9.56.1.tar.bz2/8ca2abb3877e8019a5e726c25501e8e3/initscripts-9.56.1.tar.bz2 \
file://functions.patch \
"
-SRC_URI[md5sum] = "0672f648a9ee8607a2df65835c54f5e5"
-SRC_URI[sha256sum] = "546d4403a4efa3c4fa6de06a195013d4e64738799c2c779e56d900e7b232a9fa"
+SRC_URI[md5sum] = "8ca2abb3877e8019a5e726c25501e8e3"
+SRC_URI[sha256sum] = "e6fbe1daa5cbfc6fab12ccac2955bde0c16ec8d9fbdb9f7c6c33fadc81da6574"
inherit update-alternatives
diff --git a/meta/recipes-extended/lsb/lsbtest/LSB_Test.sh b/meta/recipes-extended/lsb/lsbtest/LSB_Test.sh
index ab79985b1b..b9971a17d1 100644
--- a/meta/recipes-extended/lsb/lsbtest/LSB_Test.sh
+++ b/meta/recipes-extended/lsb/lsbtest/LSB_Test.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
# Copyright (C) 2012 Wind River Systems, Inc.
#
diff --git a/meta/recipes-extended/lsof/lsof_4.87.bb b/meta/recipes-extended/lsof/lsof_4.88.bb
index 5676cd9822..a96b06eae7 100644
--- a/meta/recipes-extended/lsof/lsof_4.87.bb
+++ b/meta/recipes-extended/lsof/lsof_4.88.bb
@@ -6,8 +6,8 @@ LICENSE = "BSD"
SRC_URI = "ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/lsof_${PV}.tar.bz2"
-SRC_URI[md5sum] = "80e2a76d0e05826db910ec88e631296c"
-SRC_URI[sha256sum] = "dfdd3709d82bc79ccdf3e404b84aafa9aede5948642a824ecaefd0aac589da2c"
+SRC_URI[md5sum] = "1b29c10db4aa88afcaeeaabeef6790db"
+SRC_URI[sha256sum] = "fe6f9b0e26b779ccd0ea5a0b6327c2b5c38d207a6db16f61ac01bd6c44e5c99b"
LOCALSRC = "file://${WORKDIR}/lsof_${PV}/lsof_${PV}_src.tar"
S = "${WORKDIR}/lsof_${PV}_src"
@@ -30,8 +30,8 @@ export LSOF_INCLUDE = "${STAGING_INCDIR}"
do_configure () {
export LSOF_AR="${AR} cr"
export LSOF_RANLIB="${RANLIB}"
- if [ "x${EGLIBCVERSION}" != "x" ];then
- LINUX_CLIB=`echo ${EGLIBCVERSION} |sed -e 's,\.,,g'`
+ if [ "x${GLIBCVERSION}" != "x" ];then
+ LINUX_CLIB=`echo ${GLIBCVERSION} |sed -e 's,\.,,g'`
LINUX_CLIB="-DGLIBCV=${LINUX_CLIB}"
export LINUX_CLIB
fi
diff --git a/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-bad-priority-inheritance-conditio.patch b/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-bad-priority-inheritance-conditio.patch
new file mode 100644
index 0000000000..631f9265ec
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-bad-priority-inheritance-conditio.patch
@@ -0,0 +1,48 @@
+From b601a8d1b39075a5339195fc0a4038f71ec3b49e Mon Sep 17 00:00:00 2001
+From: "Gary S. Robertson" <gary.robertson@linaro.org>
+Date: Wed, 27 Aug 2014 16:23:56 -0500
+Subject: [LTP][PATCH] Realtime tests: Fix bad priority inheritance conditionals
+
+testcases/realtime/lib/librttest.c and
+testcases/realtime/stress/pi-tests/testpi-3.c
+both referenced a non-existent autoconf configuration setting variable.
+Replaced the invalid variable name with the variable actually created
+by autoconf.
+
+Upstream-Status: Backported
+
+Signed-off-by: Gary S. Robertson <gary.robertson@linaro.org>
+---
+ testcases/realtime/lib/librttest.c | 2 +-
+ testcases/realtime/stress/pi-tests/testpi-3.c | 2 +-
+ 2 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/testcases/realtime/lib/librttest.c b/testcases/realtime/lib/librttest.c
+index c175148..3679058 100644
+--- a/testcases/realtime/lib/librttest.c
++++ b/testcases/realtime/lib/librttest.c
+@@ -586,7 +586,7 @@ void *busy_work_us(int us)
+
+ void init_pi_mutex(pthread_mutex_t * m)
+ {
+-#if HAVE_DECL_PTHREAD_PRIO_INHERIT
++#if HAS_PRIORITY_INHERIT
+ pthread_mutexattr_t attr;
+ int ret;
+ int protocol;
+diff --git a/testcases/realtime/stress/pi-tests/testpi-3.c b/testcases/realtime/stress/pi-tests/testpi-3.c
+index 30f38f6..e483945 100644
+--- a/testcases/realtime/stress/pi-tests/testpi-3.c
++++ b/testcases/realtime/stress/pi-tests/testpi-3.c
+@@ -365,7 +365,7 @@ int main(int argc, char *argv[])
+
+ printf("Start %s\n", argv[0]);
+
+-#if HAVE_DECL_PTHREAD_PRIO_INHERIT
++#if HAS_PRIORITY_INHERIT
+ if (!nopi) {
+ pthread_mutexattr_t mutexattr;
+ int protocol;
+--
+1.7.9.5
+
diff --git a/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-robust-mutex-conditionals.patch b/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-robust-mutex-conditionals.patch
new file mode 100644
index 0000000000..1fdd281013
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/0001-Realtime-tests-Fix-robust-mutex-conditionals.patch
@@ -0,0 +1,62 @@
+From 663a14423baea0e05ba79d90d2497dde5e4594bd Mon Sep 17 00:00:00 2001
+From: "Gary S. Robertson" <gary.robertson@linaro.org>
+Date: Thu, 11 Sep 2014 13:02:47 -0500
+Subject: [LTP][PATCH] Realtime tests: Fix robust mutex conditionals
+
+sbrk_mutex, testpi-5, and testpi-6 realtime tests in subdir
+testcases/realtime/func/pi-tests used compile time config variables
+which were not generated by autoconf in order to configure tests
+for robust mutexes. Changed these conditionals to use the config
+variables actually generated in the autoconf process.
+
+Upstream-Status: Backported
+
+Signed-off-by: Gary S. Robertson <gary.robertson@linaro.org>
+---
+ testcases/realtime/func/pi-tests/sbrk_mutex.c | 2 +-
+ testcases/realtime/func/pi-tests/testpi-5.c | 2 +-
+ testcases/realtime/func/pi-tests/testpi-6.c | 2 +-
+ 3 files changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/testcases/realtime/func/pi-tests/sbrk_mutex.c b/testcases/realtime/func/pi-tests/sbrk_mutex.c
+index 684021f..5c325b4 100644
+--- a/testcases/realtime/func/pi-tests/sbrk_mutex.c
++++ b/testcases/realtime/func/pi-tests/sbrk_mutex.c
+@@ -45,7 +45,7 @@
+ #include <unistd.h>
+ #include "librttest.h"
+
+-#if defined(HAS_PTHREAD_MUTEXTATTR_ROBUST_APIS) && defined(PTHREAD_MUTEX_ROBUST_NP)
++#if HAS_PTHREAD_MUTEXTATTR_ROBUST_APIS
+
+ #define NUM_MUTEXES 5000
+ #define NUM_THREADS 50
+diff --git a/testcases/realtime/func/pi-tests/testpi-5.c b/testcases/realtime/func/pi-tests/testpi-5.c
+index a1d93cc..70f02fd 100644
+--- a/testcases/realtime/func/pi-tests/testpi-5.c
++++ b/testcases/realtime/func/pi-tests/testpi-5.c
+@@ -69,7 +69,7 @@ int do_test(int argc, char **argv)
+ pthread_mutexattr_t mutexattr;
+ int retc, protocol;
+
+-#if HAS_PTHREAD_MUTEXATTR_PROTOCOL_FUNCTIONS
++#if HAS_PTHREAD_MUTEXTATTR_ROBUST_APIS
+
+ if (pthread_mutexattr_init(&mutexattr) != 0)
+ printf("Failed to init mutexattr\n");
+diff --git a/testcases/realtime/func/pi-tests/testpi-6.c b/testcases/realtime/func/pi-tests/testpi-6.c
+index b3c3e4a..f715eee 100644
+--- a/testcases/realtime/func/pi-tests/testpi-6.c
++++ b/testcases/realtime/func/pi-tests/testpi-6.c
+@@ -41,7 +41,7 @@
+ #include <unistd.h>
+ #include <librttest.h>
+
+-#if defined(PTHREAD_MUTEX_ROBUST_NP)
++#if HAS_PTHREAD_MUTEXTATTR_ROBUST_APIS
+ pthread_mutex_t child_mutex;
+
+ void *child_thread(void *arg)
+--
+1.7.9.5
+
diff --git a/meta/recipes-extended/ltp/ltp/add-knob-for-numa.patch b/meta/recipes-extended/ltp/ltp/add-knob-for-numa.patch
new file mode 100644
index 0000000000..064f00ae3d
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/add-knob-for-numa.patch
@@ -0,0 +1,39 @@
+[PATCH] add knob to control whether numa support should be checked
+
+Upstream-Status: Pending
+
+otherwise the random dependency will be generated
+
+Signed-off-by: Roy.Li <rongqing.li@windriver.com>
+---
+ configure.ac | 10 +-
+diff --git a/configure.ac b/configure.ac
+index 9f397e7..1357256 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -142,6 +142,12 @@ else
+ AC_SUBST([WITH_REALTIME_TESTSUITE],["no"])
+ fi
+
++AC_ARG_WITH([numa],
++ AC_HELP_STRING([--without-numa],
++ [without the numa support]),
++ [],[with_numa=yes],
++)
++
+ AC_CONFIG_SUBDIRS([utils/ffsb-6.0-rc2])
+
+ # END testsuites knobs
+@@ -159,7 +165,9 @@ LTP_CHECK_SIGNAL
+ LTP_CHECK_SYSCALL_EVENTFD
+ LTP_CHECK_SYSCALL_KEYCTL
+ LTP_CHECK_SYSCALL_MODIFY_LDT
++if test "x$with_numa" = xyes; then
+ LTP_CHECK_SYSCALL_NUMA
++fi
+ LTP_CHECK_SYSCALL_QUOTACTL
+ LTP_CHECK_SYSCALL_SIGNALFD
+ LTP_CHECK_SYSCALL_UNSHARE
+--
+1.9.1
+
diff --git a/meta/recipes-extended/ltp/ltp/add-knob-for-tirpc.patch b/meta/recipes-extended/ltp/ltp/add-knob-for-tirpc.patch
new file mode 100644
index 0000000000..36ff4c775d
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/add-knob-for-tirpc.patch
@@ -0,0 +1,37 @@
+[PATCH] add knob to control whether tirpc support should be checked
+
+Upstream-Status: Pending
+
+tirpc support is broken upstream. in the meantime, allow to disable tirpc.
+
+Signed-off-by: Fathi Boudra <fathi.boudra@linaro.org>
+---
+ configure.ac | 9 +++++++++
+ 1 file changed, 9 insertions(+)
+
+--- a/configure.ac
++++ b/configure.ac
+@@ -98,6 +98,13 @@ if test "x$with_python" = xyes; then
+ else
+ AC_SUBST([WITH_PYTHON],["no"])
+ fi
++
++# TI RPC
++AC_ARG_WITH([tirpc],
++ AC_HELP_STRING([--without-tirpc],
++ [without libtirpc support]),
++ [],[with_tirpc=yes],
++)
+ # END tools knobs
+
+ # Testsuites knobs
+@@ -182,7 +189,9 @@ LTP_CHECK_RENAMEAT
+ LTP_CHECK_FALLOCATE
+ LTP_CHECK_SYSCALL_FCNTL
+ LTP_CHECK_SYSCALL_PERF_EVENT_OPEN
++if test "x$with_tirpc" = xyes; then
+ LTP_CHECK_TIRPC
++fi
+ LTP_CHECK_TEE
+ LTP_CHECK_SPLICE
+ LTP_CHECK_VMSPLICE
diff --git a/meta/recipes-extended/ltp/ltp/automake-foreign.patch b/meta/recipes-extended/ltp/ltp/automake-foreign.patch
deleted file mode 100644
index c3dd891395..0000000000
--- a/meta/recipes-extended/ltp/ltp/automake-foreign.patch
+++ /dev/null
@@ -1,20 +0,0 @@
-Use foreign strictness to avoid automake errors.
-
-Upstream-Status: Submitted (https://github.com/linux-test-project/ltp/issues/16)
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/configure.ac b/configure.ac
-index 9f397e7..fc57957 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -4 +4 @@
--AM_INIT_AUTOMAKE
-+AM_INIT_AUTOMAKE([foreign])
---- a/utils/ffsb-6.0-rc2/configure.in
-+++ b/utils/ffsb-6.0-rc2/configure.in
-@@ -2,2 +2,3 @@ dnl Process this file with autoconf to produce a configure script.
--AC_INIT(main.c)
--AM_INIT_AUTOMAKE(ffsb, 6.0-RC2)
-+AC_INIT([ffsb], [6.0-RC2])
-+AC_CONFIG_SRCDIR([main.c])
-+AM_INIT_AUTOMAKE([foreign])
diff --git a/meta/recipes-extended/ltp/ltp/make-setregid02-work.patch b/meta/recipes-extended/ltp/ltp/make-setregid02-work.patch
new file mode 100644
index 0000000000..4836010bdf
--- /dev/null
+++ b/meta/recipes-extended/ltp/ltp/make-setregid02-work.patch
@@ -0,0 +1,61 @@
+[PATCH] make setregid02 work
+
+Upstream-Status: Inappropriate [configuration]
+
+there is no "nobody" group in oe-core, the user "nobody" belongs to
+"nogroup" group, so replace nobody with nogroup to make the test pass
+
+Signed-off-by: Roy.Li <rongqing.li@windriver.com>
+---
+ testcases/kernel/syscalls/setregid/setregid02.c | 16 ++++++++--------
+ 1 file changed, 8 insertions(+), 8 deletions(-)
+
+diff --git a/testcases/kernel/syscalls/setregid/setregid02.c b/testcases/kernel/syscalls/setregid/setregid02.c
+index 8058627..866bee4 100644
+--- a/testcases/kernel/syscalls/setregid/setregid02.c
++++ b/testcases/kernel/syscalls/setregid/setregid02.c
+@@ -41,7 +41,7 @@ static gid_t neg_one = -1;
+
+ static struct passwd *ltpuser;
+
+-static struct group nobody, root, bin;
++static struct group nogroup, root, bin;
+
+ /*
+ * The following structure contains all test data. Each structure in the array
+@@ -57,17 +57,17 @@ struct test_data_t {
+ char *test_msg;
+ } test_data[] = {
+ {
+- &neg_one, &root.gr_gid, EPERM, &nobody, &nobody,
++ &neg_one, &root.gr_gid, EPERM, &nogroup, &nogroup,
+ "After setregid(-1, root),"}, {
+- &neg_one, &bin.gr_gid, EPERM, &nobody, &nobody,
++ &neg_one, &bin.gr_gid, EPERM, &nogroup, &nogroup,
+ "After setregid(-1, bin)"}, {
+- &root.gr_gid, &neg_one, EPERM, &nobody, &nobody,
++ &root.gr_gid, &neg_one, EPERM, &nogroup, &nogroup,
+ "After setregid(root,-1),"}, {
+- &bin.gr_gid, &neg_one, EPERM, &nobody, &nobody,
++ &bin.gr_gid, &neg_one, EPERM, &nogroup, &nogroup,
+ "After setregid(bin, -1),"}, {
+- &root.gr_gid, &bin.gr_gid, EPERM, &nobody, &nobody,
++ &root.gr_gid, &bin.gr_gid, EPERM, &nogroup, &nogroup,
+ "After setregid(root, bin)"}, {
+- &bin.gr_gid, &root.gr_gid, EPERM, &nobody, &nobody,
++ &bin.gr_gid, &root.gr_gid, EPERM, &nogroup, &nogroup,
+ "After setregid(bin, root),"}
+ };
+
+@@ -165,7 +165,7 @@ static void setup(void)
+ } while (0)
+
+ GET_GID(root);
+- GET_GID(nobody);
++ GET_GID(nogroup);
+ GET_GID(bin);
+
+ TEST_PAUSE;
+--
+1.9.1
+
diff --git a/meta/recipes-extended/ltp/ltp_20140422.bb b/meta/recipes-extended/ltp/ltp_20140828.bb
index 30761fd920..0be18a71c3 100644
--- a/meta/recipes-extended/ltp/ltp_20140422.bb
+++ b/meta/recipes-extended/ltp/ltp_20140828.bb
@@ -19,13 +19,17 @@ LIC_FILES_CHKSUM = "\
file://utils/ffsb-6.0-rc2/COPYING;md5=c46082167a314d785d012a244748d803 \
"
-DEPENDS = "attr libaio libcap acl openssl"
-SRCREV = "f4c3bfe1eab51eb72caeb0f3336d2790c9a8bd1b"
+DEPENDS = "attr libaio libcap acl openssl zip-native"
+SRCREV = "2c341ad9177f36d9b953e84dee8cf88498286fe5"
SRC_URI = "git://github.com/linux-test-project/ltp.git \
file://0001-Rename-runtests_noltp.sh-script-so-have-unique-name.patch \
file://ltp-Do-not-link-against-libfl.patch \
- file://automake-foreign.patch \
+ file://make-setregid02-work.patch \
+ file://add-knob-for-numa.patch \
+ file://0001-Realtime-tests-Fix-bad-priority-inheritance-conditio.patch \
+ file://0001-Realtime-tests-Fix-robust-mutex-conditionals.patch \
+ file://add-knob-for-tirpc.patch \
"
S = "${WORKDIR}/git"
@@ -37,7 +41,11 @@ TARGET_CC_ARCH += "${LDFLAGS}"
export prefix = "/opt/ltp"
export exec_prefix = "/opt/ltp"
+PACKAGECONFIG[numa] = "--with-numa, --without-numa, numactl,"
+EXTRA_AUTORECONF += "-I ${S}/testcases/realtime/m4"
EXTRA_OECONF = " --with-power-management-testsuite --with-realtime-testsuite "
+# ltp network/rpc test cases ftbfs when libtirpc is found
+EXTRA_OECONF += " --without-tirpc "
# ltp doesn't regenerate ffsb-6.0-rc2 configure and hardcode configure call.
# we explicitly force regeneration of that directory and pass configure options.
@@ -65,7 +73,7 @@ do_install(){
find ${D} -type f -print | xargs grep "\!.*\/usr\/bin\/expect" | awk -F":" '{print $1}' | xargs rm -f
}
-RDEPENDS_${PN} = "perl e2fsprogs-mke2fs"
+RDEPENDS_${PN} = "perl e2fsprogs-mke2fs python-core libaio bash gawk"
FILES_${PN}-dbg += "\
/opt/ltp/runtest/.debug \
diff --git a/meta/recipes-extended/man-pages/man-pages_3.70.bb b/meta/recipes-extended/man-pages/man-pages_3.75.bb
index e1cda884c5..18e2bca8e4 100644
--- a/meta/recipes-extended/man-pages/man-pages_3.70.bb
+++ b/meta/recipes-extended/man-pages/man-pages_3.75.bb
@@ -7,8 +7,8 @@ LICENSE = "GPLv2+"
LIC_FILES_CHKSUM = "file://README;md5=8f2a3d43057d458e5066714980567a60"
SRC_URI = "${KERNELORG_MIRROR}/linux/docs/${BPN}/Archive/${BP}.tar.gz"
-SRC_URI[md5sum] = "992c773a0d9e3c4a7ba5055eadb58461"
-SRC_URI[sha256sum] = "70298623da10a70e2d124b4059129b8d79e50d4532fa1f8c1d8c67de48f7f965"
+SRC_URI[md5sum] = "74ad4aef657a046fe214bf981a9a2b30"
+SRC_URI[sha256sum] = "ba23f57a98d032a742ff506f32550dfa2c3076c8bf8d5f6b59305c6b015077d3"
RDEPENDS_${PN} = "man"
@@ -27,3 +27,10 @@ do_install() {
# Only deliveres man-pages so FILES_${PN} gets everything
FILES_${PN}-doc = ""
FILES_${PN} = "${mandir}/*"
+
+inherit update-alternatives
+
+ALTERNATIVE_PRIORITY = "100"
+ALTERNATIVE_${PN} = "passwd.5 getspnam.3"
+ALTERNATIVE_LINK_NAME[passwd.5] = "${mandir}/man5/passwd.5"
+ALTERNATIVE_LINK_NAME[getspnam.3] = "${mandir}/man3/getspnam.3"
diff --git a/meta/recipes-extended/man/man/man.conf b/meta/recipes-extended/man/man/man.conf
index bb3c69720e..fc10be2758 100644
--- a/meta/recipes-extended/man/man/man.conf
+++ b/meta/recipes-extended/man/man/man.conf
@@ -133,7 +133,8 @@ MANSECT 1:1p:8:2:3:3p:4:5:6:7:9:0p:tcl:n:l:p:o
# The command given must act as a filter.
#
.gz /bin/gunzip -c
-.bz2 /bin/bzip2 -c -d
+.bz2 /usr/bin/bunzip2 -c
+.xz /usr/bin/unxz -c
.z
.Z /bin/zcat
.F
diff --git a/meta/recipes-extended/man/man_1.6g.bb b/meta/recipes-extended/man/man_1.6g.bb
index a66e01cb9a..03fffd6015 100644
--- a/meta/recipes-extended/man/man_1.6g.bb
+++ b/meta/recipes-extended/man/man_1.6g.bb
@@ -9,6 +9,19 @@ PR = "r1"
DEPENDS = "groff less"
+def compress_pkg(d):
+ if "compress_doc" in (d.getVar("INHERIT", True) or "").split():
+ compress = d.getVar("DOC_COMPRESS", True)
+ if compress == "gz":
+ return "gzip"
+ elif compress == "bz2":
+ return "bzip2"
+ elif compress == "xz":
+ return "xz"
+ return ""
+
+RDEPENDS_${PN} += "${@compress_pkg(d)}"
+
SRC_URI = "http://primates.ximian.com/~flucifredi/${BPN}/${BPN}-${PV}.tar.gz \
file://man-1.5k-confpath.patch;striplevel=0 \
file://man-1.5h1-make.patch \
diff --git a/meta/recipes-extended/mc/mc/mc-CTRL.patch b/meta/recipes-extended/mc/mc/mc-CTRL.patch
new file mode 100644
index 0000000000..e23d9dd81c
--- /dev/null
+++ b/meta/recipes-extended/mc/mc/mc-CTRL.patch
@@ -0,0 +1,31 @@
+Fix build with musl by ensuring CTRL is defined.
+
+musl does not define CTRL in <termios.h>, we could include <sys/ttydefaults.h>
+explicitly but it's easier just to ensure CTRL is defined.
+
+This patch is taken from Sabotage Linux, the license statement for patches and
+build scripts in Sabotage Linux says:
+
+ To the extent possible under law, Christian Neukirchen has waived
+ all copyright and related or neighboring rights to this work.
+
+ http://creativecommons.org/publicdomain/zero/1.0/
+
+Therefore this should be good to include in OpenEmbedded.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+
+Upstream-status: Accepted (should be included in v4.8.14)
+
+diff -u mc-4.8.1.7.org/lib/tty/tty-ncurses.c mc-4.8.1.7/lib/tty/tty-ncurses.c
+--- mc-4.8.1.7.org/lib/tty/tty-ncurses.c
++++ mc-4.8.1.7/lib/tty/tty-ncurses.c
+@@ -65,7 +65,7 @@
+
+ /*** file scope macro definitions ****************************************************************/
+
+-#if defined(_AIX) && !defined(CTRL)
++#if !defined(CTRL)
+ #define CTRL(x) ((x) & 0x1f)
+ #endif
+
diff --git a/meta/recipes-extended/mc/mc_4.7.5.2.bb b/meta/recipes-extended/mc/mc_4.7.5.2.bb
index 0e4434c7d9..b47eb02323 100644
--- a/meta/recipes-extended/mc/mc_4.7.5.2.bb
+++ b/meta/recipes-extended/mc/mc_4.7.5.2.bb
@@ -8,7 +8,9 @@ RDEPENDS_${PN} = "ncurses-terminfo"
PR = "r3"
-SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2"
+SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2 \
+ file://mc-CTRL.patch \
+ "
SRC_URI[md5sum] = "bdae966244496cd4f6d282d80c9cf3c6"
SRC_URI[sha256sum] = "a68338862bb30017eb65ed569a58e80ab66ae8cef11c886440c9e9f4d1efc6ab"
diff --git a/meta/recipes-extended/mc/mc_4.8.12.bb b/meta/recipes-extended/mc/mc_4.8.13.bb
index b8971931d9..ef5b1a23e2 100644
--- a/meta/recipes-extended/mc/mc_4.8.12.bb
+++ b/meta/recipes-extended/mc/mc_4.8.13.bb
@@ -6,10 +6,12 @@ SECTION = "console/utils"
DEPENDS = "ncurses glib-2.0"
RDEPENDS_${PN} = "ncurses-terminfo"
-SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2"
+SRC_URI = "http://www.midnight-commander.org/downloads/${BPN}-${PV}.tar.bz2 \
+ file://mc-CTRL.patch \
+ "
-SRC_URI[md5sum] = "a64c426364bfaee56b628f6c0738aade"
-SRC_URI[sha256sum] = "1054fcc27a488771cbe5b85d7c10135fa1cd1b7682a19930d44b51a023e39396"
+SRC_URI[md5sum] = "12a521a50da6a86852177591b9623d5e"
+SRC_URI[sha256sum] = "22e1b809edba957eb9a392138bf87fea3877f7ca0b7463b7cc2eb94afa6f3e49"
inherit autotools gettext pkgconfig
diff --git a/meta/recipes-extended/mdadm/mdadm_3.3.1.bb b/meta/recipes-extended/mdadm/mdadm_3.3.2.bb
index 59bc587ef9..c9392b43f6 100644
--- a/meta/recipes-extended/mdadm/mdadm_3.3.1.bb
+++ b/meta/recipes-extended/mdadm/mdadm_3.3.2.bb
@@ -13,8 +13,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/raid/mdadm/${BPN}-${PV}.tar.xz \
file://gcc-4.9.patch \
"
-SRC_URI[md5sum] = "4227d48de62dfb217c92fa0c54171bbe"
-SRC_URI[sha256sum] = "d8c74112cfd77bdc1dbc1291fe8d9243c76d91bfa276fcb95f2a75ca7717ab02"
+SRC_URI[md5sum] = "44698d351501cac6a89072dc877eb220"
+SRC_URI[sha256sum] = "0f3a7e1a76b13892b79f9fceaf99ecb23513260389a768ac644ffa3ae06e5b8c"
CFLAGS += "-fno-strict-aliasing"
diff --git a/meta/recipes-extended/newt/libnewt-python_0.52.17.bb b/meta/recipes-extended/newt/libnewt-python_0.52.18.bb
index d591fbd3d2..d591fbd3d2 100644
--- a/meta/recipes-extended/newt/libnewt-python_0.52.17.bb
+++ b/meta/recipes-extended/newt/libnewt-python_0.52.18.bb
diff --git a/meta/recipes-extended/newt/libnewt_0.52.17.bb b/meta/recipes-extended/newt/libnewt_0.52.18.bb
index 28d5cf1a34..fa2493b554 100644
--- a/meta/recipes-extended/newt/libnewt_0.52.17.bb
+++ b/meta/recipes-extended/newt/libnewt_0.52.18.bb
@@ -17,16 +17,14 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2"
# slang needs to be >= 2.2
DEPENDS = "slang popt"
-PR = "r2"
-
SRC_URI = "https://fedorahosted.org/releases/n/e/newt/newt-${PV}.tar.gz \
file://remove_slang_include.patch \
file://fix_SHAREDDIR.patch \
file://cross_ar.patch \
"
-SRC_URI[md5sum] = "f36d4d908965a0c89fd6fd8b61a6118b"
-SRC_URI[sha256sum] = "69837973ef2ee2fa644426f1c3e48d2b18785ebcd382ef7fd01eb2e67d2d632b"
+SRC_URI[md5sum] = "685721bee1a318570704b19dcf31d268"
+SRC_URI[sha256sum] = "771b0e634ede56ae6a6acd910728bb5832ac13ddb0d1d27919d2498dab70c91e"
S = "${WORKDIR}/newt-${PV}"
@@ -34,6 +32,8 @@ EXTRA_OECONF = "--without-tcl --without-python"
inherit autotools-brokensep
+CLEANBROKEN = "1"
+
export STAGING_INCDIR
export STAGING_LIBDIR
diff --git a/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb b/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
index 30b6e1545f..dd96fbebaa 100644
--- a/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
+++ b/meta/recipes-extended/packagegroups/packagegroup-core-lsb.bb
@@ -128,7 +128,7 @@ RDEPENDS_packagegroup-core-lsb-core = "\
cups \
diffutils \
ed \
- eglibc-utils \
+ glibc-utils \
elfutils \
file \
findutils \
@@ -158,7 +158,7 @@ RDEPENDS_packagegroup-core-lsb-core = "\
util-linux \
xdg-utils \
\
- eglibc \
+ glibc \
libgcc \
libpam \
libxml2 \
@@ -245,13 +245,13 @@ RDEPENDS_packagegroup-core-lsb-runtime-add = "\
libxml-parser-perl \
libxml-perl \
libxml-sax-perl \
- eglibc-localedatas \
- eglibc-gconvs \
- eglibc-charmaps \
- eglibc-binaries \
- eglibc-localedata-posix \
- eglibc-extra-nss \
- eglibc-pcprofile \
+ glibc-localedatas \
+ glibc-gconvs \
+ glibc-charmaps \
+ glibc-binaries \
+ glibc-localedata-posix \
+ glibc-extra-nss \
+ glibc-pcprofile \
libclass-isa-perl \
libenv-perl \
libdumpvalue-perl \
diff --git a/meta/recipes-extended/pam/libpam/libpam-xtests-remove-bash-dependency.patch b/meta/recipes-extended/pam/libpam/libpam-xtests-remove-bash-dependency.patch
new file mode 100644
index 0000000000..680029ae0d
--- /dev/null
+++ b/meta/recipes-extended/pam/libpam/libpam-xtests-remove-bash-dependency.patch
@@ -0,0 +1,226 @@
+From 555407ff6e2f742df64ae93859f14a0fc1397829 Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Fri, 12 Sep 2014 05:35:05 -0400
+Subject: [PATCH] libpam/xtests: remove bash dependency
+
+There's not bash specific syntax in the xtest scripts:
+
+ # after below patches applied:
+ $ cd Linux-PAM-1.1.6/xtests
+ $ checkbashisms *.sh
+ No output
+
+Just remove the runtime dependency to bash.
+
+Upstream-Status: Pending
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+---
+ xtests/run-xtests.sh | 2 +-
+ xtests/tst-pam_access1.sh | 2 +-
+ xtests/tst-pam_access2.sh | 2 +-
+ xtests/tst-pam_access3.sh | 2 +-
+ xtests/tst-pam_access4.sh | 2 +-
+ xtests/tst-pam_assemble_line1.sh | 2 +-
+ xtests/tst-pam_group1.sh | 2 +-
+ xtests/tst-pam_limits1.sh | 2 +-
+ xtests/tst-pam_pwhistory1.sh | 2 +-
+ xtests/tst-pam_substack1.sh | 2 +-
+ xtests/tst-pam_substack2.sh | 2 +-
+ xtests/tst-pam_substack3.sh | 2 +-
+ xtests/tst-pam_substack4.sh | 2 +-
+ xtests/tst-pam_substack5.sh | 2 +-
+ xtests/tst-pam_succeed_if1.sh | 2 +-
+ xtests/tst-pam_unix1.sh | 2 +-
+ xtests/tst-pam_unix2.sh | 2 +-
+ xtests/tst-pam_unix3.sh | 2 +-
+ xtests/tst-pam_unix4.sh | 2 +-
+ 19 files changed, 19 insertions(+), 19 deletions(-)
+
+diff --git a/xtests/run-xtests.sh b/xtests/run-xtests.sh
+index 3a89057..1cf8684 100755
+--- a/xtests/run-xtests.sh
++++ b/xtests/run-xtests.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ SRCDIR=$1
+ shift 1
+diff --git a/xtests/tst-pam_access1.sh b/xtests/tst-pam_access1.sh
+index 180d256..70521d2 100755
+--- a/xtests/tst-pam_access1.sh
++++ b/xtests/tst-pam_access1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/groupadd tstpamaccess
+ /usr/sbin/useradd -G tstpamaccess -p '!!' tstpamaccess1
+diff --git a/xtests/tst-pam_access2.sh b/xtests/tst-pam_access2.sh
+index 0a30275..7e3e60f 100755
+--- a/xtests/tst-pam_access2.sh
++++ b/xtests/tst-pam_access2.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/groupadd tstpamaccess
+ /usr/sbin/useradd -p '!!' tstpamaccess2
+diff --git a/xtests/tst-pam_access3.sh b/xtests/tst-pam_access3.sh
+index 348e0c3..3630e2e 100755
+--- a/xtests/tst-pam_access3.sh
++++ b/xtests/tst-pam_access3.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd -p '!!' tstpamaccess3
+ ./tst-pam_access3
+diff --git a/xtests/tst-pam_access4.sh b/xtests/tst-pam_access4.sh
+index 61e7b44..4538df4 100755
+--- a/xtests/tst-pam_access4.sh
++++ b/xtests/tst-pam_access4.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd -p '!!' tstpamaccess4
+ ./tst-pam_access4
+diff --git a/xtests/tst-pam_assemble_line1.sh b/xtests/tst-pam_assemble_line1.sh
+index 248d47e..dc2a675 100755
+--- a/xtests/tst-pam_assemble_line1.sh
++++ b/xtests/tst-pam_assemble_line1.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authfail tst-pam_assemble_line1
+diff --git a/xtests/tst-pam_group1.sh b/xtests/tst-pam_group1.sh
+index b76377f..44faca9 100755
+--- a/xtests/tst-pam_group1.sh
++++ b/xtests/tst-pam_group1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/groupadd tstpamgrpg
+ /usr/sbin/useradd -p '!!' tstpamgrp
+diff --git a/xtests/tst-pam_limits1.sh b/xtests/tst-pam_limits1.sh
+index 4faa822..32c021d 100755
+--- a/xtests/tst-pam_limits1.sh
++++ b/xtests/tst-pam_limits1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd -p '!!' tstpamlimits
+ ./tst-pam_limits1
+diff --git a/xtests/tst-pam_pwhistory1.sh b/xtests/tst-pam_pwhistory1.sh
+index ddb3b8b..0f212e2 100644
+--- a/xtests/tst-pam_pwhistory1.sh
++++ b/xtests/tst-pam_pwhistory1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd tstpampwhistory
+ ./tst-pam_pwhistory1
+diff --git a/xtests/tst-pam_substack1.sh b/xtests/tst-pam_substack1.sh
+index 5260175..f1b72a7 100755
+--- a/xtests/tst-pam_substack1.sh
++++ b/xtests/tst-pam_substack1.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authfail tst-pam_substack1
+diff --git a/xtests/tst-pam_substack2.sh b/xtests/tst-pam_substack2.sh
+index c02f597..3804fa7 100755
+--- a/xtests/tst-pam_substack2.sh
++++ b/xtests/tst-pam_substack2.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authsucceed tst-pam_substack2
+diff --git a/xtests/tst-pam_substack3.sh b/xtests/tst-pam_substack3.sh
+index 0e572aa..aa48e8e 100755
+--- a/xtests/tst-pam_substack3.sh
++++ b/xtests/tst-pam_substack3.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authsucceed tst-pam_substack3
+diff --git a/xtests/tst-pam_substack4.sh b/xtests/tst-pam_substack4.sh
+index a3ef08a..958a07a 100755
+--- a/xtests/tst-pam_substack4.sh
++++ b/xtests/tst-pam_substack4.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authsucceed tst-pam_substack4
+diff --git a/xtests/tst-pam_substack5.sh b/xtests/tst-pam_substack5.sh
+index e2714fd..7e0da74 100755
+--- a/xtests/tst-pam_substack5.sh
++++ b/xtests/tst-pam_substack5.sh
+@@ -1,3 +1,3 @@
+-#!/bin/bash
++#!/bin/sh
+
+ exec ./tst-pam_authfail tst-pam_substack5
+diff --git a/xtests/tst-pam_succeed_if1.sh b/xtests/tst-pam_succeed_if1.sh
+index a643b2e..58e57b4 100755
+--- a/xtests/tst-pam_succeed_if1.sh
++++ b/xtests/tst-pam_succeed_if1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd -p '!!' tstpamtest
+ /usr/sbin/useradd -p '!!' pamtest
+diff --git a/xtests/tst-pam_unix1.sh b/xtests/tst-pam_unix1.sh
+index f75bd84..72deac0 100755
+--- a/xtests/tst-pam_unix1.sh
++++ b/xtests/tst-pam_unix1.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ /usr/sbin/useradd -p '!!' tstpamunix
+ ./tst-pam_unix1
+diff --git a/xtests/tst-pam_unix2.sh b/xtests/tst-pam_unix2.sh
+index 7093155..c04d6e6 100755
+--- a/xtests/tst-pam_unix2.sh
++++ b/xtests/tst-pam_unix2.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ # pamunix0 = 0aXKZztA.d1KY
+ /usr/sbin/useradd -p 0aXKZztA.d1KY tstpamunix
+diff --git a/xtests/tst-pam_unix3.sh b/xtests/tst-pam_unix3.sh
+index ef4a07c..b52db2b 100755
+--- a/xtests/tst-pam_unix3.sh
++++ b/xtests/tst-pam_unix3.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ # pamunix01 = 0aXKZztA.d1KYIuFXArmd2jU
+ /usr/sbin/useradd -p 0aXKZztA.d1KYIuFXArmd2jU tstpamunix
+diff --git a/xtests/tst-pam_unix4.sh b/xtests/tst-pam_unix4.sh
+index 787c2f9..e7976fd 100755
+--- a/xtests/tst-pam_unix4.sh
++++ b/xtests/tst-pam_unix4.sh
+@@ -1,4 +1,4 @@
+-#!/bin/bash
++#!/bin/sh
+
+ # pamunix01 = 0aXKZztA.d1KYIuFXArmd2jU
+ /usr/sbin/useradd -p 0aXKZztA.d1KYIuFXArmd2jU tstpamunix
+--
+1.7.9.5
+
diff --git a/meta/recipes-extended/pam/libpam_1.1.6.bb b/meta/recipes-extended/pam/libpam_1.1.6.bb
index c06709f512..b3b39da864 100644
--- a/meta/recipes-extended/pam/libpam_1.1.6.bb
+++ b/meta/recipes-extended/pam/libpam_1.1.6.bb
@@ -26,6 +26,7 @@ SRC_URI = "http://linux-pam.org/library/Linux-PAM-${PV}.tar.bz2 \
file://pam-security-abstract-securetty-handling.patch \
file://pam-unix-nullok-secure.patch \
file://pam_timestamp-fix-potential-directory-traversal-issu.patch \
+ file://libpam-xtests-remove-bash-dependency.patch \
"
SRC_URI[md5sum] = "7b73e58b7ce79ffa321d408de06db2c4"
SRC_URI[sha256sum] = "bab887d6280f47fc3963df3b95735a27a16f0f663636163ddf3acab5f1149fc2"
@@ -72,9 +73,24 @@ libpam_suffix = "suffix${@get_multilib_bit(d)}"
RPROVIDES_${PN} += "${PN}-${libpam_suffix}"
RPROVIDES_${PN}-runtime += "${PN}-runtime-${libpam_suffix}"
-RDEPENDS_${PN}-runtime = "${PN}-${libpam_suffix} pam-plugin-deny-${libpam_suffix} pam-plugin-permit-${libpam_suffix} pam-plugin-warn-${libpam_suffix} pam-plugin-unix-${libpam_suffix}"
-RDEPENDS_${PN}-xtests = "${PN}-${libpam_suffix} pam-plugin-access-${libpam_suffix} pam-plugin-debug-${libpam_suffix} pam-plugin-cracklib-${libpam_suffix} pam-plugin-pwhistory-${libpam_suffix} pam-plugin-succeed-if-${libpam_suffix} pam-plugin-time-${libpam_suffix} coreutils"
-#RRECOMMENDS_${PN} = "${PN}-runtime-${libpam_suffix}"
+RDEPENDS_${PN}-runtime = "${PN}-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-deny-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-permit-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-warn-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-unix-${libpam_suffix} \
+ "
+RDEPENDS_${PN}-xtests = "${PN}-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-access-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-debug-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-cracklib-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-pwhistory-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-succeed-if-${libpam_suffix} \
+ ${MLPREFIX}pam-plugin-time-${libpam_suffix} \
+ coreutils"
+
+# FIXME: Native suffix breaks here, disable it for now
+RRECOMMENDS_${PN} = "${PN}-runtime-${libpam_suffix}"
+RRECOMMENDS_${PN}_class-native = ""
python populate_packages_prepend () {
def pam_plugin_append_file(pn, dir, file):
@@ -87,13 +103,12 @@ python populate_packages_prepend () {
def pam_plugin_hook(file, pkg, pattern, format, basename):
pn = d.getVar('PN', True)
libpam_suffix = d.getVar('libpam_suffix', True)
- mlprefix = d.getVar('MLPREFIX', True) or ''
rdeps = d.getVar('RDEPENDS_' + pkg, True)
if rdeps:
- rdeps = rdeps + " " + mlprefix + pn + "-" + libpam_suffix
+ rdeps = rdeps + " " + pn + "-" + libpam_suffix
else:
- rdeps = mlprefix + pn + "-" + libpam_suffix
+ rdeps = pn + "-" + libpam_suffix
d.setVar('RDEPENDS_' + pkg, rdeps)
provides = d.getVar('RPROVIDES_' + pkg, True)
@@ -103,19 +118,22 @@ python populate_packages_prepend () {
provides = pkg + "-" + libpam_suffix
d.setVar('RPROVIDES_' + pkg, provides)
+ mlprefix = d.getVar('MLPREFIX', True) or ''
dvar = bb.data.expand('${WORKDIR}/package', d, True)
pam_libdir = d.expand('${base_libdir}/security')
pam_sbindir = d.expand('${sbindir}')
pam_filterdir = d.expand('${base_libdir}/security/pam_filter')
+ pam_pkgname = mlprefix + 'pam-plugin%s'
- do_split_packages(d, pam_libdir, '^pam(.*)\.so$', 'pam-plugin%s', 'PAM plugin for %s', hook=pam_plugin_hook, extra_depends='')
- mlprefix = d.getVar('MLPREFIX', True) or ''
+ do_split_packages(d, pam_libdir, '^pam(.*)\.so$', pam_pkgname,
+ 'PAM plugin for %s', hook=pam_plugin_hook, extra_depends='')
pam_plugin_append_file('%spam-plugin-unix' % mlprefix, pam_sbindir, 'unix_chkpwd')
pam_plugin_append_file('%spam-plugin-unix' % mlprefix, pam_sbindir, 'unix_update')
pam_plugin_append_file('%spam-plugin-tally' % mlprefix, pam_sbindir, 'pam_tally')
pam_plugin_append_file('%spam-plugin-tally2' % mlprefix, pam_sbindir, 'pam_tally2')
pam_plugin_append_file('%spam-plugin-timestamp' % mlprefix, pam_sbindir, 'pam_timestamp_check')
pam_plugin_append_file('%spam-plugin-mkhomedir' % mlprefix, pam_sbindir, 'mkhomedir_helper')
+ pam_plugin_append_file('%spam-plugin-console' % mlprefix, pam_sbindir, 'pam_console_apply')
do_split_packages(d, pam_filterdir, '^(.*)$', 'pam-filter-%s', 'PAM filter for %s', extra_depends='')
}
@@ -145,3 +163,9 @@ python do_pam_sanity () {
addtask pam_sanity before do_configure
BBCLASSEXTEND = "nativesdk native"
+
+CONFFILES_${PN}-runtime += "${sysconfdir}/pam.d/common-session"
+CONFFILES_${PN}-runtime += "${sysconfdir}/pam.d/common-auth"
+CONFFILES_${PN}-runtime += "${sysconfdir}/pam.d/common-password"
+CONFFILES_${PN}-runtime += "${sysconfdir}/pam.d/common-session-noninteractive"
+CONFFILES_${PN}-runtime += "${sysconfdir}/pam.d/common-account"
diff --git a/meta/recipes-extended/parted/parted-3.1/Makefile b/meta/recipes-extended/parted/files/Makefile
index ee90be0814..ee90be0814 100644
--- a/meta/recipes-extended/parted/parted-3.1/Makefile
+++ b/meta/recipes-extended/parted/files/Makefile
diff --git a/meta/recipes-extended/parted/files/fix-compile-failure-while-dis.patch b/meta/recipes-extended/parted/files/fix-compile-failure-while-dis.patch
new file mode 100644
index 0000000000..68ab715f32
--- /dev/null
+++ b/meta/recipes-extended/parted/files/fix-compile-failure-while-dis.patch
@@ -0,0 +1,57 @@
+From 060e74354774d36d2c11ef08e3e7ea9b9b6e23fb Mon Sep 17 00:00:00 2001
+From: Hongxu Jia <hongxu.jia@windriver.com>
+Date: Thu, 13 Nov 2014 11:29:33 +0800
+Subject: [PATCH] libparted/arch/linux.c: fix compile failure while
+ --disable-device-mapper
+
+While --disable-device-mapper, the MACRO ENABLE_DEVICE_MAPPER is
+undef, but it missed to scope some device mapper functions.
+
+Upstream-Status: Pending
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ libparted/arch/linux.c | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/libparted/arch/linux.c b/libparted/arch/linux.c
+index 6fd73c5..2afa479 100644
+--- a/libparted/arch/linux.c
++++ b/libparted/arch/linux.c
+@@ -2320,6 +2320,7 @@ zasprintf (const char *format, ...)
+ static char *
+ dm_canonical_path (PedDevice const *dev)
+ {
++#ifdef ENABLE_DEVICE_MAPPER
+ LinuxSpecific const *arch_specific = LINUX_SPECIFIC (dev);
+
+ /* Get map name from devicemapper */
+@@ -2337,6 +2338,7 @@ dm_canonical_path (PedDevice const *dev)
+ dm_task_destroy (task);
+ return dev_name;
+ err:
++#endif
+ return NULL;
+ }
+
+@@ -2957,13 +2959,15 @@ _disk_sync_part_table (PedDisk* disk)
+ unsigned long long *start,
+ unsigned long long *length);
+
+-
++#ifdef ENABLE_DEVICE_MAPPER
+ if (disk->dev->type == PED_DEVICE_DM) {
+ add_partition = _dm_add_partition;
+ remove_partition = _dm_remove_partition;
+ resize_partition = _dm_resize_partition;
+ get_partition_start_and_length = _dm_get_partition_start_and_length;
+- } else {
++ } else
++#endif
++ {
+ add_partition = _blkpg_add_partition;
+ remove_partition = _blkpg_remove_partition;
+ #ifdef BLKPG_RESIZE_PARTITION
+--
+1.9.1
+
diff --git a/meta/recipes-extended/parted/parted-3.1/fix-doc-mandir.patch b/meta/recipes-extended/parted/files/fix-doc-mandir.patch
index 0711d4e297..0711d4e297 100644
--- a/meta/recipes-extended/parted/parted-3.1/fix-doc-mandir.patch
+++ b/meta/recipes-extended/parted/files/fix-doc-mandir.patch
diff --git a/meta/recipes-extended/parted/parted-3.1/no_check.patch b/meta/recipes-extended/parted/files/no_check.patch
index 58d8db4426..58d8db4426 100644
--- a/meta/recipes-extended/parted/parted-3.1/no_check.patch
+++ b/meta/recipes-extended/parted/files/no_check.patch
diff --git a/meta/recipes-extended/parted/parted-3.1/run-ptest b/meta/recipes-extended/parted/files/run-ptest
index 695c5e8a7b..695c5e8a7b 100644
--- a/meta/recipes-extended/parted/parted-3.1/run-ptest
+++ b/meta/recipes-extended/parted/files/run-ptest
diff --git a/meta/recipes-extended/parted/parted-3.1/syscalls.patch b/meta/recipes-extended/parted/files/syscalls.patch
index e9bbe9a956..e9bbe9a956 100644
--- a/meta/recipes-extended/parted/parted-3.1/syscalls.patch
+++ b/meta/recipes-extended/parted/files/syscalls.patch
diff --git a/meta/recipes-extended/parted/parted-3.1/fix-deprecated-readline.patch b/meta/recipes-extended/parted/parted-3.1/fix-deprecated-readline.patch
deleted file mode 100644
index c4ca06d407..0000000000
--- a/meta/recipes-extended/parted/parted-3.1/fix-deprecated-readline.patch
+++ /dev/null
@@ -1,34 +0,0 @@
-
-From: Gustavo Zacarias
-Subject: bug#16929: [PATCH] ui: switch to new-style readline typedef
-Date: Mon, 3 Mar 2014 10:40:08 -0300
-
-The CPPFunction typedef (among others) have been deprecated in favour of
-specific prototyped typedefs since readline 4.2 (circa 2001).
-It's been working since because compatibility typedefs have been in
-place until they where removed in the recent readline 6.3 release.
-Switch to the new style to avoid build breakage.
-
-Signed-off-by: Gustavo Zacarias <address@hidden>
-
-Upstream-Status: Backport
-
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
-
----
- parted/ui.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/parted/ui.c b/parted/ui.c
-index 786deed..b33f6fc 100644
---- a/parted/ui.c
-+++ b/parted/ui.c
-@@ -1470,7 +1470,7 @@ init_readline (void)
- #ifdef HAVE_LIBREADLINE
- if (!opt_script_mode) {
- rl_initialize ();
-- rl_attempted_completion_function = (CPPFunction*) complete_function;
-+ rl_attempted_completion_function = (rl_completion_func_t *) complete_function;
- readline_state.in_readline = 0;
- }
- #endif
diff --git a/meta/recipes-extended/parted/parted-3.1/fix-dvh-overflows.patch b/meta/recipes-extended/parted/parted-3.1/fix-dvh-overflows.patch
deleted file mode 100644
index b1eae97988..0000000000
--- a/meta/recipes-extended/parted/parted-3.1/fix-dvh-overflows.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-Upstream-Status: Pending
-
-Signed-off-by: Ming Liu <ming.liu@windriver.com>
----
- dvh.h | 10 +++++-----
- 1 file changed, 5 insertions(+), 5 deletions(-)
-
-Index: parted-3.1/libparted/labels/dvh.h
-===================================================================
---- parted-3.1.orig/libparted/labels/dvh.h 2013-02-25 10:46:13.204477586 +0800
-+++ parted-3.1/libparted/labels/dvh.h 2013-02-25 10:47:20.954477065 +0800
-@@ -112,8 +112,8 @@ struct device_parameters {
-
- struct volume_directory {
- char vd_name[VDNAMESIZE]; /* name */
-- int vd_lbn; /* logical block number */
-- int vd_nbytes; /* file length in bytes */
-+ unsigned int vd_lbn; /* logical block number */
-+ unsigned int vd_nbytes; /* file length in bytes */
- };
-
- /*
-@@ -125,9 +125,9 @@ struct volume_directory {
- * NOTE: pt_firstlbn SHOULD BE CYLINDER ALIGNED
- */
- struct partition_table { /* one per logical partition */
-- int pt_nblks; /* # of logical blks in partition */
-- int pt_firstlbn; /* first lbn of partition */
-- int pt_type; /* use of partition */
-+ unsigned int pt_nblks; /* # of logical blks in partition */
-+ unsigned int pt_firstlbn; /* first lbn of partition */
-+ int pt_type; /* use of partition */
- };
-
- #define PTYPE_VOLHDR 0 /* partition is volume header */
diff --git a/meta/recipes-extended/parted/parted-3.1/fix-git-version-gen.patch b/meta/recipes-extended/parted/parted-3.1/fix-git-version-gen.patch
deleted file mode 100644
index a9f26e9781..0000000000
--- a/meta/recipes-extended/parted/parted-3.1/fix-git-version-gen.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-Upstream-Status: Accepted [Expected to be included in parted versions > 3.1]
-Upstream-URL: http://git.savannah.gnu.org/cgit/gnulib.git/patch/?id=cbc11ff0020eb9c04caea6b3e7dc4e4281dff1f9
-
-From cbc11ff0020eb9c04caea6b3e7dc4e4281dff1f9 Mon Sep 17 00:00:00 2001
-From: Andreas Oberritter <obi@opendreambox.org>
-Date: Sun, 18 Mar 2012 17:39:14 +0000
-Subject: git-version-gen: don't let "prefix" envvar cause trouble
-
-* build-aux/git-version-gen (prefix): Initialize properly,
-so as not to use a value specified via the environment.
-Details here: http://thread.gmane.org/gmane.comp.gnu.parted.bugs/10810
----
-diff --git a/build-aux/git-version-gen b/build-aux/git-version-gen
-index d5542a2..0fa9063 100755
---- a/build-aux/git-version-gen
-+++ b/build-aux/git-version-gen
-@@ -1,6 +1,6 @@
- #!/bin/sh
- # Print a version string.
--scriptversion=2012-01-06.07; # UTC
-+scriptversion=2012-03-18.17; # UTC
-
- # Copyright (C) 2007-2012 Free Software Foundation, Inc.
- #
-@@ -92,6 +92,8 @@ Options:
-
- Running without arguments will suffice in most cases."
-
-+prefix=v
-+
- while test $# -gt 0; do
- case $1 in
- --help) echo "$usage"; exit 0;;
-@@ -120,7 +122,6 @@ if test -z "$tarball_version_file"; then
- fi
-
- tag_sed_script="${tag_sed_script:-s/x/x/}"
--prefix="${prefix:-v}"
-
- nl='
- '
---
-cgit v0.9.0.2
diff --git a/meta/recipes-extended/parted/parted_3.1.bb b/meta/recipes-extended/parted/parted_3.2.bb
index 9057463c7e..655a825703 100644
--- a/meta/recipes-extended/parted/parted_3.1.bb
+++ b/meta/recipes-extended/parted/parted_3.2.bb
@@ -9,16 +9,14 @@ PR = "r1"
SRC_URI = "${GNU_MIRROR}/parted/parted-${PV}.tar.xz \
file://no_check.patch \
file://syscalls.patch \
- file://fix-git-version-gen.patch \
file://fix-doc-mandir.patch \
- file://fix-dvh-overflows.patch \
- file://fix-deprecated-readline.patch \
+ file://fix-compile-failure-while-dis.patch \
file://run-ptest \
file://Makefile \
"
-SRC_URI[md5sum] = "5d89d64d94bcfefa9ce8f59f4b81bdcb"
-SRC_URI[sha256sum] = "5e9cc1f91eaf016e5033d85b9b893fd6d3ffaca532a48de1082df9b94225ca15"
+SRC_URI[md5sum] = "0247b6a7b314f8edeb618159fa95f9cb"
+SRC_URI[sha256sum] = "858b589c22297cacdf437f3baff6f04b333087521ab274f7ab677cb8c6bb78e4"
EXTRA_OECONF = "--disable-device-mapper"
diff --git a/meta/recipes-extended/procps/procps-3.2.8/60_linux_version_init.patch b/meta/recipes-extended/procps/procps-3.2.8/60_linux_version_init.patch
deleted file mode 100644
index 203ccb3347..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/60_linux_version_init.patch
+++ /dev/null
@@ -1,54 +0,0 @@
-## 60_linux_init.dpatch by <david.sugar@canonical.com>
-##
-## All lines beginning with `## DP:' are a description of the patch.
-## DP: Fix Linux version detection which relied on elf loader side-effect.
-## DP: This patch also depends on 40_gnu-kbsd-version, which modified
-## DP: init_Linux_version().
-
-@DPATCH@
-
-Upstream-Status: inappropriate [upstream unmaintained]
----
- proc/sysinfo.c | 1 +
- proc/version.c | 5 +++--
- proc/version.h | 1 +
- 3 files changed, 5 insertions(+), 2 deletions(-)
-
-Index: procps-3.2.8/proc/sysinfo.c
-===================================================================
---- procps-3.2.8.orig/proc/sysinfo.c
-+++ procps-3.2.8/proc/sysinfo.c
-@@ -212,6 +212,7 @@ static int check_for_privs(void){
- static void init_libproc(void) __attribute__((constructor));
- static void init_libproc(void){
- have_privs = check_for_privs();
-+ init_Linux_version(); // make sure we have version before continuing...
- // ought to count CPUs in /proc/stat instead of relying
- // on glibc, which foolishly tries to parse /proc/cpuinfo
- //
-Index: procps-3.2.8/proc/version.c
-===================================================================
---- procps-3.2.8.orig/proc/version.c
-+++ procps-3.2.8/proc/version.c
-@@ -33,8 +33,7 @@ void display_version(void) {
-
- int linux_version_code;
-
--static void init_Linux_version(void) __attribute__((constructor));
--static void init_Linux_version(void) {
-+void init_Linux_version(void) {
- int x = 0, y = 0, z = 0; /* cleared in case sscanf() < 2 */
- FILE *fp;
- char buf[256];
-Index: procps-3.2.8/proc/version.h
-===================================================================
---- procps-3.2.8.orig/proc/version.h
-+++ procps-3.2.8/proc/version.h
-@@ -14,6 +14,7 @@
-
- EXTERN_C_BEGIN
-
-+extern void init_Linux_version(void); /* initialize linux version */
- extern void display_version(void); /* display suite version */
- extern const char procps_version[]; /* global buf for suite version */
-
diff --git a/meta/recipes-extended/procps/procps-3.2.8/detect_bitness.patch b/meta/recipes-extended/procps/procps-3.2.8/detect_bitness.patch
deleted file mode 100644
index 1523c3dfb2..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/detect_bitness.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-Do not try to detect 64bit/32bit system
-we already feed that information via compiler
-defaults
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Inappropriate [OE specific]
-Index: procps-3.2.8/Makefile
-===================================================================
---- procps-3.2.8.orig/Makefile 2014-05-03 01:00:01.707387583 -0700
-+++ procps-3.2.8/Makefile 2014-05-03 08:53:08.087175369 -0700
-@@ -118,15 +118,6 @@
- # until you go looking for a 64-bit curses library.
- check_gcc = $(shell if $(CC) $(ALL_CPPFLAGS) $(ALL_CFLAGS) dummy.c $(ALL_LDFLAGS) $(1) -o /dev/null $(CURSES) > /dev/null 2>&1; then echo "$(1)"; else echo "$(2)"; fi ;)
-
--# Be 64-bit if at all possible. In a cross-compiling situation, one may
--# do "make m64=-m32 lib64=lib" to produce 32-bit executables. DO NOT
--# attempt to use a 32-bit executable on a 64-bit kernel. Packagers MUST
--# produce separate executables for ppc and ppc64, s390 and s390x,
--# i386 and x86-64, mips and mips64, sparc and sparc64, and so on.
--# Failure to do so will cause data corruption.
--m64 := $(call check_gcc,-m64,$(call check_gcc,-mabi=64,))
--ALL_CFLAGS += $(m64)
--
- ALL_CFLAGS += $(call check_gcc,-Wdeclaration-after-statement,)
- ALL_CFLAGS += $(call check_gcc,-Wpadded,)
- ALL_CFLAGS += $(call check_gcc,-Wstrict-aliasing,)
diff --git a/meta/recipes-extended/procps/procps-3.2.8/gnu-kbsd-version.patch b/meta/recipes-extended/procps/procps-3.2.8/gnu-kbsd-version.patch
deleted file mode 100644
index 2582857e25..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/gnu-kbsd-version.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-Upstream-Status: Inappropriate [not author, no upstream]
-
-Imported from Debian.
-Source: http://anonscm.debian.org/gitweb/?p=collab-maint/procps.git;a=blob;f=debian/patches/gnu-kbsd-version.patch;h=fe5489fc772a3355ff8c0dcf9b953bf0c05aa9f8;hb=b460cfd726b019f8d918b380f78af4c19c5f3e50
-Bugtracker: http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=632749
-
-Stops procps utilities from printing a warning when used with
-kernels having only two digit versions, e.g. 3.0.
-
-Author: <csmall@debian.org>
-Description: Rework version parsing so its ok with other OSes
---- a/proc/version.c
-+++ b/proc/version.c
-@@ -35,15 +35,23 @@
-
- static void init_Linux_version(void) __attribute__((constructor));
- static void init_Linux_version(void) {
-- static struct utsname uts;
-- int x = 0, y = 0, z = 0; /* cleared in case sscanf() < 3 */
-+ int x = 0, y = 0, z = 0; /* cleared in case sscanf() < 2 */
-+ FILE *fp;
-+ char buf[256];
-
-- if (uname(&uts) == -1) /* failure implies impending death */
-- exit(1);
-- if (sscanf(uts.release, "%d.%d.%d", &x, &y, &z) < 3)
-+ if ( (fp=fopen("/proc/version","r")) == NULL) /* failure implies impending death */
-+ exit(1);
-+ if (fgets(buf, 256, fp) == NULL) {
-+ fprintf(stderr, "Cannot read kernel version from /proc/version\n");
-+ fclose(fp);
-+ exit(1);
-+ }
-+ fclose(fp);
-+ if (sscanf(buf, "Linux version %d.%d.%d", &x, &y, &z) < 2)
- fprintf(stderr, /* *very* unlikely to happen by accident */
- "Non-standard uts for running kernel:\n"
-- "release %s=%d.%d.%d gives version code %d\n",
-- uts.release, x, y, z, LINUX_VERSION(x,y,z));
-+ "release %s=%d.%d.%d gives version code %d\n",
-+ buf,
-+ x, y, z, LINUX_VERSION(x,y,z));
- linux_version_code = LINUX_VERSION(x, y, z);
- }
diff --git a/meta/recipes-extended/procps/procps-3.2.8/install.patch b/meta/recipes-extended/procps/procps-3.2.8/install.patch
deleted file mode 100644
index 2a59a5ff68..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/install.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-Upstream-Status: Inappropriate [configuration]
-
-diff -ruN procps-3.2.8-orig//Makefile procps-3.2.8/Makefile
---- procps-3.2.8-orig//Makefile 2011-08-23 22:06:46.471163999 +0800
-+++ procps-3.2.8/Makefile 2011-08-23 22:15:01.091163999 +0800
-@@ -29,9 +29,6 @@
- ln_sf := ln -sf
- install := install -D --owner 0 --group 0
-
--# Lame x86-64 /lib64 and /usr/lib64 abomination:
--lib64 := lib$(shell [ -d /lib64 ] && echo 64)
--
- usr/bin := $(DESTDIR)/usr/bin/
- bin := $(DESTDIR)/bin/
- sbin := $(DESTDIR)/sbin/
-@@ -39,8 +36,8 @@
- man1 := $(DESTDIR)/usr/share/man/man1/
- man5 := $(DESTDIR)/usr/share/man/man5/
- man8 := $(DESTDIR)/usr/share/man/man8/
--lib := $(DESTDIR)/$(lib64)/
--usr/lib := $(DESTDIR)/usr/$(lib64)/
-+lib := $(DESTDIR)/$(base_libdir)/
-+usr/lib := $(DESTDIR)/$(libdir)/
- usr/include := $(DESTDIR)/usr/include/
-
- #SKIP := $(bin)kill $(man1)kill.1
-@@ -222,10 +219,10 @@
- ###### install
-
- $(BINFILES) : all
-- $(install) --mode a=rx $(notdir $@) $@
-+ $(install) -m 555 $(notdir $@) $@
-
- $(MANFILES) : all
-- $(install) --mode a=r $(notdir $@) $@
-+ $(install) -m 444 $(notdir $@) $@
-
- install: $(filter-out $(SKIP) $(addprefix $(DESTDIR),$(SKIP)),$(INSTALL))
- cd $(usr/bin) && $(ln_f) skill snice
diff --git a/meta/recipes-extended/procps/procps-3.2.8/linux-limits.patch b/meta/recipes-extended/procps/procps-3.2.8/linux-limits.patch
deleted file mode 100644
index 2ca972482d..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/linux-limits.patch
+++ /dev/null
@@ -1,15 +0,0 @@
-Upstream-Status: Pending
-
-diff --git a/pwdx.c b/pwdx.c
-index cb96a52..29ebce2 100644
---- a/pwdx.c
-+++ b/pwdx.c
-@@ -13,7 +13,7 @@
- #include <stdlib.h>
- #include <sys/types.h>
- #include <regex.h>
--#include <limits.h>
-+#include <linux/limits.h>
- #include <unistd.h>
- #include <errno.h>
-
diff --git a/meta/recipes-extended/procps/procps-3.2.8/procmodule.patch b/meta/recipes-extended/procps/procps-3.2.8/procmodule.patch
deleted file mode 100644
index 2a65c3509f..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/procmodule.patch
+++ /dev/null
@@ -1,38 +0,0 @@
-Upstream-Status: Pending
-
-*** procps-3.2.5/proc/module.mk.orig Sun Jul 24 11:53:49 2005
---- procps-3.2.5/proc/module.mk Sun Jul 24 11:54:32 2005
-***************
-*** 96,102 ****
- #################### install rules ###########################
-
- $(lib)$(SOFILE) : proc/$(SONAME)
-! $(install) --mode a=rx $< $@
-
- ifneq ($(SOLINK),$(SOFILE))
- .PHONY: $(lib)$(SOLINK)
---- 96,102 ----
- #################### install rules ###########################
-
- $(lib)$(SOFILE) : proc/$(SONAME)
-! $(install) -m 555 $< $@
-
- ifneq ($(SOLINK),$(SOFILE))
- .PHONY: $(lib)$(SOLINK)
-***************
-*** 115,121 ****
- $(ldconfig)
-
- $(usr/lib)$(ANAME) : proc/$(ANAME)
-! $(install) --mode a=r $< $@
-
- # Junk anyway... supposed to go in /usr/include/$(NAME)
- #INSTALL += $(addprefix $(include),$(HDRFILES))
---- 115,121 ----
- $(ldconfig)
-
- $(usr/lib)$(ANAME) : proc/$(ANAME)
-! $(install) -m 444 $< $@
-
- # Junk anyway... supposed to go in /usr/include/$(NAME)
- #INSTALL += $(addprefix $(include),$(HDRFILES))
diff --git a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.7-top-remcpu.patch b/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.7-top-remcpu.patch
deleted file mode 100644
index 0306c8d639..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.7-top-remcpu.patch
+++ /dev/null
@@ -1,111 +0,0 @@
-Upstream-Status: Pending
-
-fix that top will quit after cpu offline
-
-top utiliy fails to read /proc/stat after cpu offline, because Cpu_tot
-is still the original cpu numbers when calling cpus_refresh, in which
-it is trying to read and sscanf Cpu_tot times /proc/stat.
-
-The patch is from procps-3.2.8-2.fc12.src.rpm
-
-Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
-
----
---- procps-3.2.7/top.c.remcpu 2006-07-10 10:41:11.000000000 +0200
-+++ procps-3.2.7/top.c 2006-07-10 10:41:35.000000000 +0200
-@@ -912,6 +912,7 @@
- static CPU_t *cpus_refresh (CPU_t *cpus)
- {
- static FILE *fp = NULL;
-+ static int cpu_max;
- int i;
- int num;
- // enough for a /proc/stat CPU line (not the intr line)
-@@ -926,24 +927,29 @@
- can hold tics representing the /proc/stat cpu summary (the first
- line read) -- that slot supports our View_CPUSUM toggle */
- cpus = alloc_c((1 + Cpu_tot) * sizeof(CPU_t));
-+ cpu_max = Cpu_tot;
- }
-+ else if (cpu_max > Cpu_tot)
-+ /* move saved CUPs summary to cpu_max possition */
-+ memcpy(&cpus[cpu_max], &cpus[Cpu_tot], sizeof(CPU_t));
-+
- rewind(fp);
- fflush(fp);
-
- // first value the last slot with the cpu summary line
- if (!fgets(buf, sizeof(buf), fp)) std_err("failed /proc/stat read");
-- cpus[Cpu_tot].x = 0; // FIXME: can't tell by kernel version number
-- cpus[Cpu_tot].y = 0; // FIXME: can't tell by kernel version number
-- cpus[Cpu_tot].z = 0; // FIXME: can't tell by kernel version number
-+ cpus[cpu_max].x = 0; // FIXME: can't tell by kernel version number
-+ cpus[cpu_max].y = 0; // FIXME: can't tell by kernel version number
-+ cpus[cpu_max].z = 0; // FIXME: can't tell by kernel version number
- num = sscanf(buf, "cpu %Lu %Lu %Lu %Lu %Lu %Lu %Lu %Lu",
-- &cpus[Cpu_tot].u,
-- &cpus[Cpu_tot].n,
-- &cpus[Cpu_tot].s,
-- &cpus[Cpu_tot].i,
-- &cpus[Cpu_tot].w,
-- &cpus[Cpu_tot].x,
-- &cpus[Cpu_tot].y,
-- &cpus[Cpu_tot].z
-+ &cpus[cpu_max].u,
-+ &cpus[cpu_max].n,
-+ &cpus[cpu_max].s,
-+ &cpus[cpu_max].i,
-+ &cpus[cpu_max].w,
-+ &cpus[cpu_max].x,
-+ &cpus[cpu_max].y,
-+ &cpus[cpu_max].z
- );
- if (num < 4)
- std_err("failed /proc/stat read");
-@@ -955,7 +961,7 @@
- }
-
- // now value each separate cpu's tics
-- for (i = 0; 1 < Cpu_tot && i < Cpu_tot; i++) {
-+ for (i = 0; ; i++) {
- if (!fgets(buf, sizeof(buf), fp)) std_err("failed /proc/stat read");
- cpus[i].x = 0; // FIXME: can't tell by kernel version number
- cpus[i].y = 0; // FIXME: can't tell by kernel version number
-@@ -964,9 +970,35 @@
- &cpus[i].id,
- &cpus[i].u, &cpus[i].n, &cpus[i].s, &cpus[i].i, &cpus[i].w, &cpus[i].x, &cpus[i].y, &cpus[i].z
- );
-- if (num < 4)
-- std_err("failed /proc/stat read");
-+ if (num < 4) {
-+ Cpu_tot = i;
-+ break;
-+ }
-+ if (i == cpu_max - 1) {
-+ // Bump cpu_max and extend cpus
-+ cpu_max++;
-+ cpus = realloc(cpus, (1 + cpu_max) * sizeof(CPU_t));
-+ if (!cpus) std_err("realloc failed");
-+ memcpy(&cpus[cpu_max], &cpus[cpu_max-1], sizeof(CPU_t));
-+ }
-+ }
-+
-+ if (cpu_max > Cpu_tot)
-+ memcpy(&cpus[Cpu_tot], &cpus[cpu_max], sizeof(CPU_t));
-+
-+ // and just in case we're 2.2.xx compiled without SMP support...
-+ if (Cpu_tot == 1) {
-+ cpus[0].id = cpus[1].id = 0;
-+ cpus[0].u = cpus[1].u;
-+ cpus[0].n = cpus[1].n;
-+ cpus[0].s = cpus[1].s;
-+ cpus[0].i = cpus[1].i;
-+ cpus[0].w = cpus[1].w;
-+ cpus[0].x = cpus[1].x;
-+ cpus[0].y = cpus[1].y;
-+ cpus[0].z = cpus[1].z;
- }
-+
- return cpus;
- }
-
diff --git a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8+gmake-3.82.patch b/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8+gmake-3.82.patch
deleted file mode 100644
index c8cee26eac..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8+gmake-3.82.patch
+++ /dev/null
@@ -1,19 +0,0 @@
-Upstream-Status: Backport
-
-Fix for stricter Makefile parser in Make 3.82 take from Gentoo bugzilla:
-http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/sys-process/procps/files/procps-3.2.8%2Bgmake-3.82.patch?revision=1.1
-
-Index: procps-3.2.8/Makefile
-===================================================================
---- procps-3.2.8.orig/Makefile
-+++ procps-3.2.8/Makefile
-@@ -174,7 +174,8 @@ INSTALL := $(BINFILES) $(MANFILES)
- # want this rule first, use := on ALL, and ALL not filled in yet
- all: do_all
-
---include */module.mk
-+-include proc/module.mk
-+-include ps/module.mk
-
- do_all: $(ALL)
-
diff --git a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8-ps-cgroup.patch b/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8-ps-cgroup.patch
deleted file mode 100644
index 1a294142f5..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/procps-3.2.8-ps-cgroup.patch
+++ /dev/null
@@ -1,82 +0,0 @@
-From e529ce0b53f6b73d8b760cd37b23e0397720cede Mon Sep 17 00:00:00 2001
-From: Daniel Novotny <dnovotny@fedoraproject.org>
-Date: Mon, 16 Feb 2009 12:22:20 +0000
-Subject: add cgroup support
-
-Rebased for 3.2.8: Andrei Gherzan <andrei.gherzan@windriver.com>
-
-Upstream-Status: Pending
-
-The patch was imported from the meta-ivi repository
-(git://git.yoctoproject.org/meta-ivi) as of commit id
-74b9624fe94b2b90810717a13d481b0db9d2d95a
-
-Signed-off-by: Jukka Rissanen <jukka.rissanen@linux.intel.com>
-
-Index: procps-3.2.8/ps/output.c
-===================================================================
---- procps-3.2.8.orig/ps/output.c 2012-11-15 17:44:05.501337741 +0200
-+++ procps-3.2.8/ps/output.c 2012-11-15 17:48:31.585328231 +0200
-@@ -1099,6 +1099,39 @@
- return snprintf(outbuf, COLWID, "*");
- }
-
-+static int pr_cgroup(char *restrict const outbuf, const proc_t *restrict const pp){
-+ char filename[48];
-+ FILE *fd;
-+ int counter = 0;
-+ int c;
-+ int is_cgroup = 0;
-+
-+ outbuf[0]='\0';
-+ snprintf(filename, sizeof filename, "/proc/%d/cgroup", pp->tgid);
-+ fd = fopen(filename, "r");
-+ if (likely(fd == NULL)) goto fail;
-+ while (( (c = fgetc(fd)) != EOF) && (counter<665)) {
-+ if (is_cgroup == 0) {
-+ if (c == ':') {
-+ is_cgroup = 1;
-+ if (counter>0)
-+ outbuf[counter++]=';';
-+ }
-+ }else
-+ if ((c == '\n') || (c == '\0'))
-+ is_cgroup = 0;
-+ else
-+ outbuf[counter++]=c;
-+ }
-+ outbuf[counter]='\0';
-+ close(fd);
-+ if (counter>0)
-+ return counter;
-+fail:
-+ outbuf[0] = '-';
-+ outbuf[1] = '\0';
-+ return 1;
-+}
-
- /****************** FLASK & seLinux security stuff **********************/
- // move the bulk of this to libproc sometime
-@@ -1293,6 +1326,7 @@
- {"bsdtime", "TIME", pr_bsdtime, sr_nop, 6, 0, LNX, ET|RIGHT},
- {"c", "C", pr_c, sr_pcpu, 2, 0, SUN, ET|RIGHT},
- {"caught", "CAUGHT", pr_sigcatch, sr_nop, 9, 0, BSD, TO|SIGNAL}, /*sigcatch*/
-+{"cgroup", "CGROUP", pr_cgroup, sr_nop, 35, 0, LNX, PO|LEFT}, /* cgroups*/
- {"class", "CLS", pr_class, sr_sched, 3, 0, XXX, TO|LEFT},
- {"cls", "CLS", pr_class, sr_sched, 3, 0, HPU, TO|RIGHT}, /*says HPUX or RT*/
- {"cmaj_flt", "-", pr_nop, sr_cmaj_flt, 1, 0, LNX, AN|RIGHT},
-Index: procps-3.2.8/ps/ps.1
-===================================================================
---- procps-3.2.8.orig/ps/ps.1 2012-11-15 17:44:50.845336117 +0200
-+++ procps-3.2.8/ps/ps.1 2012-11-15 17:49:09.621326859 +0200
-@@ -904,6 +904,10 @@
- displayed. (alias\ \fBsig_catch\fR,\ \fBsigcatch\fR).
- T}
-
-+cgroup CGROUP T{
-+display control groups to which the process belonges.
-+t}
-+
- class CLS T{
- scheduling class of the process. (alias\ \fBpolicy\fR,\ \fBcls\fR).
- Field's possible values are:
diff --git a/meta/recipes-extended/procps/procps-3.2.8/psmodule.patch b/meta/recipes-extended/procps/procps-3.2.8/psmodule.patch
deleted file mode 100644
index 0775eaba26..0000000000
--- a/meta/recipes-extended/procps/procps-3.2.8/psmodule.patch
+++ /dev/null
@@ -1,23 +0,0 @@
-Upstream-Status: Pending
-
-*** procps-3.2.5/ps/module.mk.orig Sun Jul 24 11:54:40 2005
---- procps-3.2.5/ps/module.mk Sun Jul 24 11:55:02 2005
-***************
-*** 33,40 ****
-
-
- $(bin)ps: ps/ps
-! $(install) --mode a=rx $< $@
-
- $(man1)ps.1 : ps/ps.1
-! $(install) --mode a=r $< $@
- -rm -f $(DESTDIR)/var/catman/cat1/ps.1.gz $(DESTDIR)/var/man/cat1/ps.1.gz
---- 33,40 ----
-
-
- $(bin)ps: ps/ps
-! $(install) -m 555 $< $@
-
- $(man1)ps.1 : ps/ps.1
-! $(install) -m 444 $< $@
- -rm -f $(DESTDIR)/var/catman/cat1/ps.1.gz $(DESTDIR)/var/man/cat1/ps.1.gz
diff --git a/meta/recipes-extended/procps/procps.inc b/meta/recipes-extended/procps/procps.inc
deleted file mode 100644
index da91da232d..0000000000
--- a/meta/recipes-extended/procps/procps.inc
+++ /dev/null
@@ -1,31 +0,0 @@
-SUMMARY = "System and process monitoring utilities"
-DESCRIPTION = "Procps contains a set of system utilities that provide system information about processes using \
-the /proc filesystem. The package \ includes the programs ps, top, vmstat, w, kill, and skill."
-HOMEPAGE = "http://procps.sf.net"
-SECTION = "base"
-LICENSE = "GPLv2+ & LGPLv2+"
-LIC_FILES_CHKSUM="file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
- file://COPYING.LIB;md5=6e29c688d912da12b66b73e32b03d812 \
- file://ps/COPYING;md5=6e29c688d912da12b66b73e32b03d812 \
- file://proc/COPYING;md5=6e29c688d912da12b66b73e32b03d812"
-DEPENDS = "ncurses"
-
-SRC_URI = "http://procps.sourceforge.net/procps-${PV}.tar.gz \
- file://install.patch"
-
-inherit autotools-brokensep update-alternatives
-
-do_install_append() {
- mv ${D}${bindir}/watch ${D}${bindir}/watch.${BPN}
-}
-
-FILES_${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so"
-FILES_SOLIBSDEV = ""
-
-ALTERNATIVE_${PN} = "top uptime free pkill pmap kill sysctl ps pgrep pwdx watch"
-ALTERNATIVE_LINK_NAME[kill] = "${base_bindir}/kill"
-ALTERNATIVE_LINK_NAME[sysctl] = "${base_sbindir}/sysctl"
-ALTERNATIVE_LINK_NAME[ps] = "${base_bindir}/ps"
-ALTERNATIVE_LINK_NAME[watch] = "${base_bindir}/watch"
-ALTERNATIVE_TARGET[watch] = "${bindir}/watch.${BPN}"
-ALTERNATIVE_PRIORITY = "110"
diff --git a/meta/recipes-extended/procps/procps/fix-configure.patch b/meta/recipes-extended/procps/procps/fix-configure.patch
new file mode 100644
index 0000000000..934ae80954
--- /dev/null
+++ b/meta/recipes-extended/procps/procps/fix-configure.patch
@@ -0,0 +1,19 @@
+
+exec_prefix is /usr default in OE-Core
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+Index: procps-ng-3.3.10/configure.ac
+===================================================================
+--- procps-ng-3.3.10.orig/configure.ac
++++ procps-ng-3.3.10/configure.ac
+@@ -70,7 +70,7 @@ AC_FUNC_MMAP
+ AC_FUNC_REALLOC
+ AC_FUNC_STRTOD
+
+-usrbin_execdir='${exec_prefix}/usr/bin'
++usrbin_execdir='${exec_prefix}/bin'
+ AC_SUBST([usrbin_execdir])
+
+ AM_GNU_GETTEXT_VERSION([0.14.1])
diff --git a/meta/recipes-extended/procps/procps-3.2.8/sysctl.conf b/meta/recipes-extended/procps/procps/sysctl.conf
index 34e7488bf7..34e7488bf7 100644
--- a/meta/recipes-extended/procps/procps-3.2.8/sysctl.conf
+++ b/meta/recipes-extended/procps/procps/sysctl.conf
diff --git a/meta/recipes-extended/procps/procps_3.2.8.bb b/meta/recipes-extended/procps/procps_3.2.8.bb
deleted file mode 100644
index b314bfe33c..0000000000
--- a/meta/recipes-extended/procps/procps_3.2.8.bb
+++ /dev/null
@@ -1,32 +0,0 @@
-require procps.inc
-
-PR = "r12"
-
-SRC_URI += "file://procmodule.patch \
- file://psmodule.patch \
- file://linux-limits.patch \
- file://sysctl.conf \
- file://procps-3.2.8+gmake-3.82.patch \
- file://gnu-kbsd-version.patch \
- file://60_linux_version_init.patch \
- file://procps-3.2.7-top-remcpu.patch \
- file://procps-3.2.8-ps-cgroup.patch \
- file://detect_bitness.patch \
- "
-
-SRC_URI[md5sum] = "9532714b6846013ca9898984ba4cd7e0"
-SRC_URI[sha256sum] = "11ed68d8a4433b91cd833deb714a3aa849c02aea738c42e6b4557982419c1535"
-
-EXTRA_OEMAKE = 'CFLAGS="${CFLAGS} -I${STAGING_INCDIR}" \
- CPPFLAGS=-I${STAGING_INCDIR} \
- LDFLAGS="${LDFLAGS}" \
- CURSES=-lncurses \
- install="install -D" \
- ldconfig=echo'
-
-do_install_append () {
- install -d ${D}${sysconfdir}
- install -m 0644 ${WORKDIR}/sysctl.conf ${D}${sysconfdir}/sysctl.conf
-}
-
-CONFFILES_${PN} = "${sysconfdir}/sysctl.conf"
diff --git a/meta/recipes-extended/procps/procps_3.3.10.bb b/meta/recipes-extended/procps/procps_3.3.10.bb
new file mode 100644
index 0000000000..d8b04dc204
--- /dev/null
+++ b/meta/recipes-extended/procps/procps_3.3.10.bb
@@ -0,0 +1,62 @@
+SUMMARY = "System and process monitoring utilities"
+DESCRIPTION = "Procps contains a set of system utilities that provide system information about processes using \
+the /proc filesystem. The package includes the programs ps, top, vmstat, w, kill, and skill."
+HOMEPAGE = "https://gitorious.org/procps"
+SECTION = "base"
+LICENSE = "GPLv2+ & LGPLv2+"
+LIC_FILES_CHKSUM="file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
+ file://COPYING.LIB;md5=4cf66a4984120007c9881cc871cf49db \
+ "
+
+DEPENDS = "ncurses"
+
+inherit autotools gettext pkgconfig update-alternatives
+
+SRC_URI = "http://downloads.sourceforge.net/project/procps-ng/Production/procps-ng-${PV}.tar.xz \
+ file://fix-configure.patch \
+ file://sysctl.conf \
+ "
+
+SRC_URI[md5sum] = "1fb7f3f6bf92ce6c5c9ed9949ae858fe"
+SRC_URI[sha256sum] = "a02e6f98974dfceab79884df902ca3df30b0e9bad6d76aee0fb5dce17f267f04"
+
+S = "${WORKDIR}/procps-ng-${PV}"
+
+EXTRA_OECONF = "--enable-skill"
+
+CPPFLAGS += "-I${S}"
+
+do_install_append () {
+ install -d ${D}${base_bindir}
+ [ "${bindir}" != "${base_bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i; done
+ install -d ${D}${base_sbindir}
+ [ "${sbindir}" != "${base_sbindir}" ] && for i in ${base_sbindir_progs}; do mv ${D}${sbindir}/$i ${D}${base_sbindir}/$i; done
+ # Remove now empty dir
+ rmdir ${D}/${sbindir}
+
+ install -d ${D}${sysconfdir}
+ install -m 0644 ${WORKDIR}/sysctl.conf ${D}${sysconfdir}/sysctl.conf
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ install -d ${D}${sysconfdir}/sysctl.d
+ ln -sf ../sysctl.conf ${D}${sysconfdir}/sysctl.d/99-sysctl.conf
+ fi
+}
+
+CONFFILES_${PN} = "${sysconfdir}/sysctl.conf"
+
+bindir_progs = "free pkill pmap pgrep pwdx skill snice top uptime"
+base_bindir_progs += "kill pidof ps watch"
+base_sbindir_progs += "sysctl"
+
+ALTERNATIVE_PRIORITY = "100"
+
+ALTERNATIVE_${PN} = "${bindir_progs} ${base_bindir_progs} ${base_sbindir_progs}"
+
+python __anonymous() {
+ for prog in d.getVar('base_bindir_progs', True).split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog))
+
+ for prog in d.getVar('base_sbindir_progs', True).split():
+ d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_sbindir', True), prog))
+}
+
diff --git a/meta/recipes-extended/psmisc/files/0002-Include-limits.h-for-PATH_MAX.patch b/meta/recipes-extended/psmisc/files/0002-Include-limits.h-for-PATH_MAX.patch
new file mode 100644
index 0000000000..c0835e041e
--- /dev/null
+++ b/meta/recipes-extended/psmisc/files/0002-Include-limits.h-for-PATH_MAX.patch
@@ -0,0 +1,29 @@
+From aa66afecd8ba9cc4139f25ab15ec315173413a7d Mon Sep 17 00:00:00 2001
+From: Paul Barker <paul@paulbarker.me.uk>
+Date: Wed, 20 Aug 2014 10:31:37 +0000
+Subject: [PATCH] Include <limits.h> for PATH_MAX
+
+When building against musl libc, PATH_MAX is defined in <limits.h>.
+
+Signed-off-by: Paul Barker <paul@paulbarker.me.uk>
+
+Upstream-status: Accepted (Should be in next release after 22.21)
+---
+ src/pstree.c | 1 +
+ 1 file changed, 1 insertion(+)
+
+diff --git a/src/pstree.c b/src/pstree.c
+index 071e6c4..0d28260 100644
+--- a/src/pstree.c
++++ b/src/pstree.c
+@@ -41,6 +41,7 @@
+ #include <sys/types.h>
+ #include <sys/stat.h>
+ #include <sys/ioctl.h>
++#include <limits.h>
+
+ #include "i18n.h"
+ #include "comm.h"
+--
+2.0.4
+
diff --git a/meta/recipes-extended/psmisc/psmisc_22.21.bb b/meta/recipes-extended/psmisc/psmisc_22.21.bb
index 9f328bb81b..66aba9ec57 100644
--- a/meta/recipes-extended/psmisc/psmisc_22.21.bb
+++ b/meta/recipes-extended/psmisc/psmisc_22.21.bb
@@ -5,4 +5,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3"
SRC_URI[md5sum] = "935c0fd6eb208288262b385fa656f1bf"
SRC_URI[sha256sum] = "97323cad619210845b696d7d722c383852b2acb5c49b5b0852c4f29c77a8145a"
-SRC_URI += "file://0001-Typo-in-fuser-makes-M-on-all-the-time.patch"
+SRC_URI += "file://0001-Typo-in-fuser-makes-M-on-all-the-time.patch \
+ file://0002-Include-limits.h-for-PATH_MAX.patch \
+ "
diff --git a/meta/recipes-extended/rpcbind/rpcbind/rpcbind.service b/meta/recipes-extended/rpcbind/rpcbind/rpcbind.service
index 4de28d4ae1..b3ae2541a0 100644
--- a/meta/recipes-extended/rpcbind/rpcbind/rpcbind.service
+++ b/meta/recipes-extended/rpcbind/rpcbind/rpcbind.service
@@ -1,14 +1,12 @@
[Unit]
-Description=RPC Bind
-After=network.target
-Wants=rpcbind.target
-Before=rpcbind.target
+Description=RPC Bind Service
+Requires=rpcbind.socket
[Service]
Type=forking
EnvironmentFile=-@SYSCONFDIR@/rpcbind.conf
ExecStart=@SBINDIR@/rpcbind -w $RPCBIND_OPTS
-Restart=always
+SuccessExitStatus=2
[Install]
-WantedBy=multi-user.target
+Also=rpcbind.socket
diff --git a/meta/recipes-extended/rpcbind/rpcbind/rpcbind.socket b/meta/recipes-extended/rpcbind/rpcbind/rpcbind.socket
new file mode 100644
index 0000000000..d63c1d9720
--- /dev/null
+++ b/meta/recipes-extended/rpcbind/rpcbind/rpcbind.socket
@@ -0,0 +1,8 @@
+[Unit]
+Description=RPCbind Server Activation Socket
+
+[Socket]
+ListenStream=/var/run/rpcbind.sock
+
+[Install]
+WantedBy=sockets.target
diff --git a/meta/recipes-extended/rpcbind/rpcbind_0.2.1.bb b/meta/recipes-extended/rpcbind/rpcbind_0.2.1.bb
index 89e567bfb2..b7324766f7 100644
--- a/meta/recipes-extended/rpcbind/rpcbind_0.2.1.bb
+++ b/meta/recipes-extended/rpcbind/rpcbind_0.2.1.bb
@@ -15,6 +15,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/rpcbind/rpcbind-${PV}.tar.bz2 \
file://init.d \
${UCLIBCPATCHES} \
file://rpcbind.conf \
+ file://rpcbind.socket \
file://rpcbind.service \
"
@@ -34,13 +35,12 @@ PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers"
INITSCRIPT_NAME = "rpcbind"
INITSCRIPT_PARAMS = "start 12 2 3 4 5 . stop 60 0 1 6 ."
-SYSTEMD_SERVICE_${PN} = "rpcbind.service"
-SYSTEMD_AUTO_ENABLE = "disable"
+SYSTEMD_SERVICE_${PN} = "rpcbind.service rpcbind.socket"
inherit useradd
USERADD_PACKAGES = "${PN}"
-USERADD_PARAM_${PN} = "--system --no-create-home \
+USERADD_PARAM_${PN} = "--system --no-create-home --home-dir / \
--shell /bin/false --user-group rpc"
EXTRA_OECONF += " --enable-warmstarts --with-rpcuser=rpc"
@@ -56,6 +56,7 @@ do_install_append () {
install -m 0755 ${WORKDIR}/rpcbind.conf ${D}${sysconfdir}
install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/rpcbind.socket ${D}${systemd_unitdir}/system
install -m 0644 ${WORKDIR}/rpcbind.service ${D}${systemd_unitdir}/system
sed -i -e 's,@SBINDIR@,${sbindir},g' \
-e 's,@SYSCONFDIR@,${sysconfdir},g' \
diff --git a/meta/recipes-extended/sed/sed_4.2.2.bb b/meta/recipes-extended/sed/sed_4.2.2.bb
index 74ac16bea9..ea39dae435 100644
--- a/meta/recipes-extended/sed/sed_4.2.2.bb
+++ b/meta/recipes-extended/sed/sed_4.2.2.bb
@@ -14,7 +14,7 @@ SRC_URI[md5sum] = "4111de4faa3b9848a0686b2f260c5056"
SRC_URI[sha256sum] = "fea0a94d4b605894f3e2d5572e3f96e4413bcad3a085aae7367c2cf07908b2ff"
inherit autotools texinfo update-alternatives gettext ptest
-RDEPENDS_${PN}-ptest += "make locale-base-ru-ru"
+RDEPENDS_${PN}-ptest += "make locale-base-ru-ru ${PN}"
EXTRA_OECONF = "--disable-acl \
${@bb.utils.contains('PTEST_ENABLED', '1', '--enable-regex-tests', '', d)}"
diff --git a/meta/recipes-extended/shadow/files/0001-Do-not-read-login.defs-before-doing-chroot.patch b/meta/recipes-extended/shadow/files/0001-Do-not-read-login.defs-before-doing-chroot.patch
new file mode 100644
index 0000000000..828b95a572
--- /dev/null
+++ b/meta/recipes-extended/shadow/files/0001-Do-not-read-login.defs-before-doing-chroot.patch
@@ -0,0 +1,46 @@
+From 170c25c8e0b5c3dc2615d1db94c8d24a13ff99bf Mon Sep 17 00:00:00 2001
+From: Peter Kjellerstedt <pkj@axis.com>
+Date: Thu, 11 Sep 2014 15:11:23 +0200
+Subject: [PATCH] Do not read login.defs before doing chroot()
+
+If "useradd --root <root> ..." was used, the login.defs file would still
+be read from /etc/login.defs instead of <root>/etc/login.defs. This was
+due to getdef_ulong() being called before process_root_flag().
+
+Upstream-Status: Submitted [http://lists.alioth.debian.org/pipermail/pkg-shadow-devel/2014-September/010446.html]
+
+Signed-off-by: Peter Kjellerstedt <peter.kjellerstedt@axis.com>
+---
+ src/useradd.c | 8 ++++++--
+ 1 file changed, 6 insertions(+), 2 deletions(-)
+
+diff --git a/src/useradd.c b/src/useradd.c
+index a8a1f76..e1ebf50 100644
+--- a/src/useradd.c
++++ b/src/useradd.c
+@@ -1993,9 +1993,11 @@ int main (int argc, char **argv)
+ #endif /* USE_PAM */
+ #endif /* ACCT_TOOLS_SETUID */
+
++#ifdef ENABLE_SUBIDS
+ /* Needed for userns check */
+- uid_t uid_min = (uid_t) getdef_ulong ("UID_MIN", 1000UL);
+- uid_t uid_max = (uid_t) getdef_ulong ("UID_MAX", 60000UL);
++ uid_t uid_min;
++ uid_t uid_max;
++#endif
+
+ /*
+ * Get my name so that I can use it to report errors.
+@@ -2026,6 +2028,8 @@ int main (int argc, char **argv)
+ is_shadow_grp = sgr_file_present ();
+ #endif
+ #ifdef ENABLE_SUBIDS
++ uid_min = (uid_t) getdef_ulong ("UID_MIN", 1000UL);
++ uid_max = (uid_t) getdef_ulong ("UID_MAX", 60000UL);
+ is_sub_uid = sub_uid_file_present () && !rflg &&
+ (!user_id || (user_id <= uid_max && user_id >= uid_min));
+ is_sub_gid = sub_gid_file_present () && !rflg &&
+--
+1.9.0
+
diff --git a/meta/recipes-extended/shadow/files/check_size_of_uid_t_and_gid_t_using_AC_CHECK_SIZEOF.patch b/meta/recipes-extended/shadow/files/check_size_of_uid_t_and_gid_t_using_AC_CHECK_SIZEOF.patch
new file mode 100644
index 0000000000..185590cabd
--- /dev/null
+++ b/meta/recipes-extended/shadow/files/check_size_of_uid_t_and_gid_t_using_AC_CHECK_SIZEOF.patch
@@ -0,0 +1,41 @@
+From 2cb54158b80cdbd97ca3b36df83f9255e923ae3f Mon Sep 17 00:00:00 2001
+From: James Le Cuirot <chewi@aura-online.co.uk>
+Date: Sat, 23 Aug 2014 09:46:39 +0100
+Subject: [PATCH] Check size of uid_t and gid_t using AC_CHECK_SIZEOF
+
+This built-in check is simpler than the previous method and, most
+importantly, works when cross-compiling.
+
+Upstream-Status: Accepted
+[https://github.com/shadow-maint/shadow/commit/2cb54158b80cdbd97ca3b36df83f9255e923ae3f]
+
+Signed-off-by: Serge Hallyn <serge.hallyn@ubuntu.com>
+---
+ configure.in | 14 ++++----------
+ 1 file changed, 4 insertions(+), 10 deletions(-)
+
+diff --git a/configure.in b/configure.in
+index 1a3f841..4a4d6d0 100644
+--- a/configure.in
++++ b/configure.in
+@@ -335,16 +335,10 @@ if test "$enable_subids" != "no"; then
+ dnl
+ dnl FIXME: check if 32 bit UIDs/GIDs are supported by libc
+ dnl
+- AC_RUN_IFELSE([AC_LANG_SOURCE([
+-#include <sys/types.h>
+-int main(void) {
+- uid_t u;
+- gid_t g;
+- return (sizeof u < 4) || (sizeof g < 4);
+-}
+- ])], [id32bit="yes"], [id32bit="no"])
+-
+- if test "x$id32bit" = "xyes"; then
++ AC_CHECK_SIZEOF([uid_t],, [#include "sys/types.h"])
++ AC_CHECK_SIZEOF([gid_t],, [#include "sys/types.h"])
++
++ if test "$ac_cv_sizeof_uid_t" -ge 4 && test "$ac_cv_sizeof_gid_t" -ge 4; then
+ AC_DEFINE(ENABLE_SUBIDS, 1, [Define to support the subordinate IDs.])
+ enable_subids="yes"
+ else
diff --git a/meta/recipes-extended/shadow/files/securetty b/meta/recipes-extended/shadow/files/securetty
index d919993e23..ecc246f799 100644
--- a/meta/recipes-extended/shadow/files/securetty
+++ b/meta/recipes-extended/shadow/files/securetty
@@ -9,17 +9,36 @@ ttyS2
ttyS3
# ARM AMBA SoCs
+ttyAM0
+ttyAM1
+ttyAM2
+ttyAM3
ttyAMA0
ttyAMA1
ttyAMA2
ttyAMA3
+# QCOM Socs
+ttyHSL0
+ttyHSL1
+ttyHSL2
+ttyHSL3
+ttyMSM0
+ttyMSM1
+ttyMSM2
+
# Samsung ARM SoCs
ttySAC0
ttySAC1
ttySAC2
ttySAC3
+# STM SoCs
+ttyAS0
+ttyAS1
+ttyAS2
+ttyAS3
+
# TI OMAP SoCs
ttyO0
ttyO1
diff --git a/meta/recipes-extended/shadow/shadow.inc b/meta/recipes-extended/shadow/shadow.inc
index 84d1f864b7..bb3a927c17 100644
--- a/meta/recipes-extended/shadow/shadow.inc
+++ b/meta/recipes-extended/shadow/shadow.inc
@@ -15,6 +15,8 @@ SRC_URI = "http://pkg-shadow.alioth.debian.org/releases/${BPN}-${PV}.tar.xz \
file://usermod-fix-compilation-failure-with-subids-disabled.patch \
file://fix-installation-failure-with-subids-disabled.patch \
file://0001-su.c-fix-to-exec-command-correctly.patch \
+ file://0001-Do-not-read-login.defs-before-doing-chroot.patch \
+ file://check_size_of_uid_t_and_gid_t_using_AC_CHECK_SIZEOF.patch \
${@bb.utils.contains('PACKAGECONFIG', 'pam', '${PAM_SRC_URI}', '', d)} \
"
@@ -51,13 +53,14 @@ EXTRA_OECONF += "--without-audit \
--without-libcrack \
--without-selinux \
--with-group-name-max-length=24 \
- --enable-subordinate-ids=no \
+ --enable-subordinate-ids=yes \
${NSCDOPT}"
NSCDOPT = ""
NSCDOPT_class-native = "--without-nscd"
NSCDOPT_class-nativesdk = "--without-nscd"
NSCDOPT_libc-uclibc = " --without-nscd"
+NSCDOPT_libc-glibc = "${@bb.utils.contains('DISTRO_FEATURES', 'libc-spawn', '--with-nscd', '--without-nscd', d)}"
PAM_PLUGINS = "libpam-runtime \
pam-plugin-faildelay \
@@ -157,6 +160,10 @@ ALTERNATIVE_LINK_NAME[vipw] = "${base_sbindir}/vipw"
ALTERNATIVE_LINK_NAME[vigr] = "${base_sbindir}/vigr"
ALTERNATIVE_LINK_NAME[su] = "${base_bindir}/su"
+ALTERNATIVE_${PN}-doc = "passwd.5 getspnam.3"
+ALTERNATIVE_LINK_NAME[passwd.5] = "${mandir}/man5/passwd.5"
+ALTERNATIVE_LINK_NAME[getspnam.3] = "${mandir}/man3/getspnam.3"
+
pkg_postinst_${PN} () {
if [ "x$D" != "x" ]; then
rootarg="--root $D"
diff --git a/meta/recipes-extended/sudo/files/volatiles.99_sudo b/meta/recipes-extended/sudo/files/volatiles.99_sudo
deleted file mode 100644
index ecb576eaa3..0000000000
--- a/meta/recipes-extended/sudo/files/volatiles.99_sudo
+++ /dev/null
@@ -1 +0,0 @@
-d root root 0755 /var/run/sudo none
diff --git a/meta/recipes-extended/sudo/sudo.inc b/meta/recipes-extended/sudo/sudo.inc
index 6de984a9f5..6e9aec824b 100644
--- a/meta/recipes-extended/sudo/sudo.inc
+++ b/meta/recipes-extended/sudo/sudo.inc
@@ -6,18 +6,20 @@ SECTION = "admin"
LICENSE = "ISC & BSD & Zlib"
LIC_FILES_CHKSUM = "file://doc/LICENSE;md5=69e337d679950e304953813158595256 \
file://plugins/sudoers/redblack.c;beginline=1;endline=41;md5=e2dbb155fc49beea947515300bab99e0 \
- file://compat/fnmatch.c;beginline=3;endline=27;md5=67f83ee9bd456557397082f8f1be0efd \
- file://compat/getcwd.c;beginline=5;endline=27;md5=449af4cc57fc7d46f42090608ba3e681 \
- file://compat/glob.c;beginline=6;endline=31;md5=5872733146b9eb0deb79e1f664815b85 \
- file://compat/snprintf.c;beginline=6;endline=31;md5=c98b24f02967c095d7a70ae2e4d4d4ea \
- file://include/queue.h;beginline=5;endline=27;md5=449af4cc57fc7d46f42090608ba3e681 \
- file://compat/inet_pton.c;beginline=3;endline=17;md5=3970ab0518ab79cbd0bafb697f10b33a"
+ file://lib/util/fnmatch.c;beginline=3;endline=27;md5=67f83ee9bd456557397082f8f1be0efd \
+ file://lib/util/getcwd.c;beginline=5;endline=27;md5=449af4cc57fc7d46f42090608ba3e681 \
+ file://lib/util/glob.c;beginline=6;endline=31;md5=5872733146b9eb0deb79e1f664815b85 \
+ file://lib/util/snprintf.c;beginline=6;endline=31;md5=c98b24f02967c095d7a70ae2e4d4d4ea \
+ file://include/sudo_queue.h;beginline=5;endline=27;md5=449af4cc57fc7d46f42090608ba3e681 \
+ file://lib/util/inet_pton.c;beginline=3;endline=17;md5=3970ab0518ab79cbd0bafb697f10b33a"
inherit autotools
PACKAGECONFIG ??= ""
PACKAGECONFIG[zlib] = "--enable-zlib,--disable-zlib,zlib"
+CONFFILES_${PN} = "${sysconfdir}/sudoers"
+
EXTRA_OECONF = "--with-editor=/bin/vi --with-env-editor"
do_configure_prepend () {
@@ -28,7 +30,7 @@ do_configure_prepend () {
# mksigname/mksiglist are used on build host to generate source files
do_compile_prepend () {
- oe_runmake SSP_CFLAGS="" SSP_LDFLAGS="" CC="$BUILD_CC" CFLAGS="$BUILD_CFLAGS" CPPFLAGS="$BUILD_CPPFLAGS -I${S}/include -I${S} -I${B}" -C compat mksigname mksiglist
+ oe_runmake SSP_CFLAGS="" SSP_LDFLAGS="" CC="$BUILD_CC" CFLAGS="$BUILD_CFLAGS" CPPFLAGS="$BUILD_CPPFLAGS -I${S}/include -I${S} -I${B}" -C lib/util mksigname mksiglist
}
# Explicitly create ${localstatedir}/lib before do_install to ensure
diff --git a/meta/recipes-extended/sudo/sudo_1.8.10p3.bb b/meta/recipes-extended/sudo/sudo_1.8.11p2.bb
index 46d47817e5..c5e2545c18 100644
--- a/meta/recipes-extended/sudo/sudo_1.8.10p3.bb
+++ b/meta/recipes-extended/sudo/sudo_1.8.11p2.bb
@@ -2,12 +2,12 @@ require sudo.inc
SRC_URI = "http://ftp.sudo.ws/sudo/dist/sudo-${PV}.tar.gz \
${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \
- file://volatiles.99_sudo"
+ "
PAM_SRC_URI = "file://sudo.pam"
-SRC_URI[md5sum] = "fcd8d0d9f9f0397d076ee901e242ed39"
-SRC_URI[sha256sum] = "6eda135fa68163108f1c24de6975de5ddb09d75730bb62d6390bda7b04345400"
+SRC_URI[md5sum] = "84012b4871b6c775c957cd310d5bad87"
+SRC_URI[sha256sum] = "8133849418fa18cf6b6bb6893d1855ff7afe21db8923234a00bf045c90fba1ad"
DEPENDS += " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}"
RDEPENDS_${PN} += " ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-limits pam-plugin-keyinit', '', d)}"
@@ -22,9 +22,8 @@ do_install_append () {
chmod 4111 ${D}${bindir}/sudo
chmod 0440 ${D}${sysconfdir}/sudoers
- # Explicitly remove the ${localstatedir}/run directory as we can
- # manage it by a configuration file under ${sysconfdir}/default/volatiles/
+ # Explicitly remove the ${localstatedir}/run directory to avoid QA error
rmdir -p --ignore-fail-on-non-empty ${D}${localstatedir}/run/sudo
- install -d ${D}/${sysconfdir}/default/volatiles
- install -m 644 ${WORKDIR}/volatiles.99_sudo ${D}/${sysconfdir}/default/volatiles/99_sudo
}
+
+FILES_${PN}-dev += "${libdir}/${BPN}/lib*${SOLIBSDEV} ${libdir}/${BPN}/*.la"
diff --git a/meta/recipes-extended/tar/tar-replacement-native_1.27.1.bb b/meta/recipes-extended/tar/tar-replacement-native_1.28.bb
index 071ede765b..071ede765b 100644
--- a/meta/recipes-extended/tar/tar-replacement-native_1.27.1.bb
+++ b/meta/recipes-extended/tar/tar-replacement-native_1.28.bb
diff --git a/meta/recipes-extended/tar/tar_1.27.1.bb b/meta/recipes-extended/tar/tar_1.28.bb
index 439bb500b9..a15b4b60e3 100644
--- a/meta/recipes-extended/tar/tar_1.27.1.bb
+++ b/meta/recipes-extended/tar/tar_1.28.bb
@@ -5,6 +5,5 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
SRC_URI += "file://remove-gets.patch \
"
-
-SRC_URI[md5sum] = "490e074dd7e71f553df8357a7ef9bdcf"
-SRC_URI[sha256sum] = "9b0fb3ce8512059337add0da5f8f0f7d7647f2201f5ece24581d620ea60337c6"
+SRC_URI[md5sum] = "8f32b2bc1ed7ddf4cf4e4a39711341b0"
+SRC_URI[sha256sum] = "60e4bfe0602fef34cd908d91cf638e17eeb09394d7b98c2487217dc4d3147562"
diff --git a/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy/template.py b/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy/template.py
index b0ebf75a15..8b7033eccc 100644
--- a/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy/template.py
+++ b/meta/recipes-extended/texinfo-dummy-native/texinfo-dummy/template.py
@@ -44,7 +44,7 @@ with details on the recipe that failed.
""" % this_binary
# Autotools setups query the version, so this is actually necessary. Some of
-# them (lookin' at you, eglibc) actually look for the substring "GNU texinfo,"
+# them (lookin' at you, glibc) actually look for the substring "GNU texinfo,"
# so we put that substring in there without actually telling a lie.
version_str = """ %s (fake texinfo, emulating GNU texinfo) 5.2
diff --git a/meta/recipes-extended/texinfo/texinfo-4.8/check-locale-h.patch b/meta/recipes-extended/texinfo/texinfo-4.8/check-locale-h.patch
new file mode 100644
index 0000000000..4a8cc57a56
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo-4.8/check-locale-h.patch
@@ -0,0 +1,28 @@
+fix the macro check
+
+Upstream-status: Pending
+
+configure does not check if locale.h exists, but check setlocale,
+if setlocale exist, the locale.h should exist.
+
+Signed-off-by: Roy Li <rongqing.li@windriver.com>
+---
+ lib/system.h | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/lib/system.h b/lib/system.h
+index 946eb3c..c70037b 100644
+--- a/lib/system.h
++++ b/lib/system.h
+@@ -42,7 +42,7 @@ extern char *substring (const char *, const char *);
+ #include <ctype.h>
+
+ /* All systems nowadays probably have these functions, but ... */
+-#ifdef HAVE_LOCALE_H
++#ifdef HAVE_SETLOCALE
+ #include <locale.h>
+ #endif
+ #ifndef HAVE_SETLOCALE
+--
+1.7.10.4
+
diff --git a/meta/recipes-extended/texinfo/texinfo-4.8/do-compile-native-tools.patch b/meta/recipes-extended/texinfo/texinfo-4.8/do-compile-native-tools.patch
new file mode 100644
index 0000000000..2cc5fbb1fc
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo-4.8/do-compile-native-tools.patch
@@ -0,0 +1,49 @@
+From 458e9450cecf703f55536e609365162719585900 Mon Sep 17 00:00:00 2001
+From: "Roy.Li" <rongqing.li@windriver.com>
+Date: Wed, 10 Sep 2014 17:03:29 +0800
+Subject: [PATCH] do not compile host tools, since we have native
+
+Upstream-status: Pending
+
+Signed-off-by: Roy.Li <rongqing.li@windriver.com>
+---
+ configure.ac | 23 +----------------------
+ 1 file changed, 1 insertion(+), 22 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index b46130d..cf58654 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -90,28 +90,7 @@ AC_CANONICAL_BUILD
+ # $native_tools is also added to SUBDIRS in the main Makefile.am,
+ # so that make compiles the native tools first.
+ #
+-if test "$cross_compiling" = no; then
+- native_tools=
+-else
+- native_tools=tools
+- test -d "$native_tools" || mkdir "$native_tools"
+- confdir=`(cd "$srcdir";pwd)`
+- # Make sure the secondary configure won't fail with
+- # "error: source directory already configured".
+- rm -f config.status
+- AC_MSG_NOTICE([[Doing configure of native tools (${build}).]])
+- cd "$native_tools" || exit 1
+- # Run secondary configure in alternate environment or
+- # it gets the wrong CC etc. env -i gives this build host configure
+- # a clean environment.
+- env -i CC="${BUILD_CC}" AR="${BUILD_AR}" RANLIB="${BUILD_RANLIB}" \
+- PATH="${PATH}" \
+- tools_only=1 \
+- ${confdir}/configure --build=${build} --host=${build} \
+- --disable-rpath --disable-nls
+- cd .. || exit 1
+- AC_MSG_NOTICE([[Continuing with main configure (${host}).]])
+-fi
++native_tools=
+ AC_SUBST(native_tools)
+ AM_CONDITIONAL(TOOLS_ONLY, [[test "x$tools_only" = x1]])
+
+--
+1.9.1
+
diff --git a/meta/recipes-extended/texinfo/texinfo-4.8/using-native-makeinfo.patch b/meta/recipes-extended/texinfo/texinfo-4.8/using-native-makeinfo.patch
new file mode 100644
index 0000000000..d9bed6933d
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo-4.8/using-native-makeinfo.patch
@@ -0,0 +1,24 @@
+From 9b0df7d6d3c18cfac82c291d60a5357d1bc8d9d0 Mon Sep 17 00:00:00 2001
+From: "Roy.Li" <rongqing.li@windriver.com>
+Date: Wed, 10 Sep 2014 17:10:03 +0800
+Subject: [PATCH] using native makeinfo
+
+Upstream-status: Pending
+
+Signed-off-by: Roy.Li <rongqing.li@windriver.com>
+---
+ doc/Makefile.am | 2 +-
+
+diff --git a/doc/Makefile.am b/doc/Makefile.am
+index 63df818..b6ceb34 100644
+--- a/doc/Makefile.am
++++ b/doc/Makefile.am
+@@ -19,7 +19,7 @@ man_MANS = info.1 infokey.1 install-info.1 makeinfo.1 texindex.1 texi2dvi.1 \
+
+ # Use the programs built in our distribution, taking account of possible
+ # cross-compiling.
+-MAKEINFO = $(top_builddir)/$(native_tools)/makeinfo/makeinfo
++MAKEINFO = makeinfo
+ INSTALL_INFO = $(top_builddir)/$(native_tools)/util/install-info
+
+ TXI_XLATE = txi-cs.tex txi-de.tex txi-en.tex txi-es.tex txi-fr.tex \
diff --git a/meta/recipes-extended/texinfo/texinfo_4.8.bb b/meta/recipes-extended/texinfo/texinfo_4.8.bb
new file mode 100644
index 0000000000..6b36b97603
--- /dev/null
+++ b/meta/recipes-extended/texinfo/texinfo_4.8.bb
@@ -0,0 +1,56 @@
+SUMMARY = "Documentation system for on-line information and printed output"
+DESCRIPTION = "Texinfo is a documentation system that can produce both \
+online information and printed output from a single source file. The \
+GNU Project uses the Texinfo file format for most of its documentation."
+HOMEPAGE = "http://www.gnu.org/software/texinfo/"
+SECTION = "console/utils"
+LICENSE = "GPLv2"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+
+PROVIDES_append_class-native = " texinfo-4.8-replacement-native"
+
+DEPENDS = "zlib ncurses texinfo-4.8-replacement-native"
+DEPENDS_class-native = "zlib-native ncurses-native"
+
+TARGET_PATCH = "file://use_host_makedoc.patch \
+ file://using-native-makeinfo.patch \
+"
+TARGET_PATCH_class-native = ""
+
+SRC_URI = "${GNU_MIRROR}/texinfo/${BP}.tar.gz \
+ file://check-locale-h.patch \
+ file://do-compile-native-tools.patch \
+ ${TARGET_PATCH} \
+ "
+
+SRC_URI[md5sum] = "4e9a1a591ed236003d0d4b008bf07eef"
+SRC_URI[sha256sum] = "1f3cdeebe65fdf510f55d765ab1031b54416aa5bc2635b6a54ef9bcb2367c917"
+
+S = "${WORKDIR}/${BP}"
+tex_texinfo = "texmf/tex/texinfo"
+
+inherit gettext autotools
+
+do_install_append() {
+ mkdir -p ${D}${datadir}/${tex_texinfo}
+ install -p -m644 ${S}/doc/texinfo.tex ${S}/doc/txi-??.tex ${D}${datadir}/${tex_texinfo}
+}
+
+do_install_append_class-native() {
+ install -m 755 info/makedoc ${D}${bindir}
+ install -m 755 makeinfo/makeinfo ${D}${bindir}
+}
+
+PACKAGES += "info info-doc"
+
+FILES_info = "${bindir}/info ${bindir}/infokey ${bindir}/install-info"
+FILES_info-doc = "${infodir}/info.info ${infodir}/dir ${infodir}/info-*.info \
+ ${mandir}/man1/info.1* ${mandir}/man5/info.5* \
+ ${mandir}/man1/infokey.1* ${mandir}/man1/install-info.1*"
+
+FILES_${PN} = "${bindir}/makeinfo ${bindir}/texi* ${bindir}/pdftexi2dvi ${bindir}/pod2texi ${datadir}/texinfo"
+FILES_${PN}-doc = "${infodir}/texinfo* \
+ ${datadir}/${tex_texinfo} \
+ ${mandir}/man1 ${mandir}/man5"
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-extended/texinfo/texinfo_5.2.bb b/meta/recipes-extended/texinfo/texinfo_5.2.bb
index be90aba1d2..cf9dcfd5de 100644
--- a/meta/recipes-extended/texinfo/texinfo_5.2.bb
+++ b/meta/recipes-extended/texinfo/texinfo_5.2.bb
@@ -9,6 +9,19 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504"
PROVIDES_append_class-native = " texinfo-replacement-native"
+def compress_pkg(d):
+ if "compress_doc" in (d.getVar("INHERIT", True) or "").split():
+ compress = d.getVar("DOC_COMPRESS", True)
+ if compress == "gz":
+ return "gzip"
+ elif compress == "bz2":
+ return "bzip2"
+ elif compress == "xz":
+ return "xz"
+ return ""
+
+RDEPENDS_info += "${@compress_pkg(d)}"
+
DEPENDS = "zlib ncurses texinfo-replacement-native"
DEPENDS_class-native = "zlib-native ncurses-native"
@@ -51,7 +64,7 @@ do_install_append_class-native() {
PACKAGES += "info info-doc"
FILES_info = "${bindir}/info ${bindir}/infokey ${bindir}/install-info"
-FILES_info-doc = "${infodir}/info.info ${infodir}/dir ${infodir}/info-*.info \
+FILES_info-doc = "${infodir}/info.info* ${infodir}/dir ${infodir}/info-*.info* \
${mandir}/man1/info.1* ${mandir}/man5/info.5* \
${mandir}/man1/infokey.1* ${mandir}/man1/install-info.1*"
diff --git a/meta/recipes-extended/tzcode/tzcode-native.inc b/meta/recipes-extended/tzcode/tzcode-native.inc
index f0343332ae..4af1e20739 100644
--- a/meta/recipes-extended/tzcode/tzcode-native.inc
+++ b/meta/recipes-extended/tzcode/tzcode-native.inc
@@ -1,7 +1,7 @@
DESCRIPTION = "tzcode, timezone zoneinfo utils -- zic, zdump, tzselect"
LICENSE = "PD & BSD"
-LIC_FILES_CHKSUM = "file://${WORKDIR}/README;md5=0b7570113550eb5d30aa4bd220964b8f"
+LIC_FILES_CHKSUM = "file://${WORKDIR}/README;md5=d0ff93a73dd5bc3c6e724bb4343760f6"
S = "${WORKDIR}"
diff --git a/meta/recipes-extended/tzcode/tzcode-native_2014f.bb b/meta/recipes-extended/tzcode/tzcode-native_2014f.bb
deleted file mode 100644
index d9557ea916..0000000000
--- a/meta/recipes-extended/tzcode/tzcode-native_2014f.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-# note that we allow for us to use data later than our code version
-#
-SRC_URI =" ftp://ftp.iana.org/tz/releases/tzcode${PV}.tar.gz;name=tzcode \
- ftp://ftp.iana.org/tz/releases/tzdata2014f.tar.gz;name=tzdata"
-
-SRC_URI[tzcode.md5sum] = "1e15be52900cd49e93f093d4731fec96"
-SRC_URI[tzcode.sha256sum] = "8c12b56abf614722f0ab7cfc502492063b2c7c5de19563540132b81709ac2555"
-SRC_URI[tzdata.md5sum] = "f333b2e8f876221a97871cae0c188aa5"
-SRC_URI[tzdata.sha256sum] = "eed690a72124f380bcb14947d398a7a482acb9ab792ae78bd4554e52c5ca2001"
-
-require tzcode-native.inc
diff --git a/meta/recipes-extended/tzcode/tzcode-native_2014j.bb b/meta/recipes-extended/tzcode/tzcode-native_2014j.bb
new file mode 100644
index 0000000000..b76aa33494
--- /dev/null
+++ b/meta/recipes-extended/tzcode/tzcode-native_2014j.bb
@@ -0,0 +1,10 @@
+# note that we allow for us to use data later than our code version
+#
+SRC_URI =" ftp://ftp.iana.org/tz/releases/tzcode${PV}.tar.gz;name=tzcode \
+ ftp://ftp.iana.org/tz/releases/tzdata2014j.tar.gz;name=tzdata"
+
+SRC_URI[tzcode.md5sum] = "970119e9765bc5a9320368851c91ecb6"
+SRC_URI[tzcode.sha256sum] = "7fd46125464856309fc81fe85a67a61de862b8ab884ce8ca82051f5fa308ede2"
+SRC_URI[tzdata.md5sum] = "2d7ea9c309f0d4e162e426e568290ca3"
+SRC_URI[tzdata.sha256sum] = "a2d870320694d40535df822ac8074dc629a90e92abafa5d3373314f78ddc0e0d"
+require tzcode-native.inc
diff --git a/meta/recipes-extended/tzdata/tzdata_2014f.bb b/meta/recipes-extended/tzdata/tzdata_2014f.bb
deleted file mode 100644
index caad383671..0000000000
--- a/meta/recipes-extended/tzdata/tzdata_2014f.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-SRC_URI = "ftp://ftp.iana.org/tz/releases/tzdata${PV}.tar.gz;name=tzdata"
-
-SRC_URI[tzdata.md5sum] = "f333b2e8f876221a97871cae0c188aa5"
-SRC_URI[tzdata.sha256sum] = "eed690a72124f380bcb14947d398a7a482acb9ab792ae78bd4554e52c5ca2001"
-
-require tzdata.inc
diff --git a/meta/recipes-extended/tzdata/tzdata_2014j.bb b/meta/recipes-extended/tzdata/tzdata_2014j.bb
new file mode 100644
index 0000000000..f0388c2fd2
--- /dev/null
+++ b/meta/recipes-extended/tzdata/tzdata_2014j.bb
@@ -0,0 +1,6 @@
+SRC_URI = "ftp://ftp.iana.org/tz/releases/tzdata${PV}.tar.gz;name=tzdata"
+
+SRC_URI[tzdata.md5sum] = "2d7ea9c309f0d4e162e426e568290ca3"
+SRC_URI[tzdata.sha256sum] = "a2d870320694d40535df822ac8074dc629a90e92abafa5d3373314f78ddc0e0d"
+
+require tzdata.inc
diff --git a/meta/recipes-extended/watchdog/files/fix-ping-failure.patch b/meta/recipes-extended/watchdog/watchdog/fix-ping-failure.patch
index f5976eb5cf..14ab9c56ab 100644
--- a/meta/recipes-extended/watchdog/files/fix-ping-failure.patch
+++ b/meta/recipes-extended/watchdog/watchdog/fix-ping-failure.patch
@@ -33,11 +33,11 @@ Signed-off-by: Roy.Li <rongqing.li@windriver.com>
src/watchdog.c | 5 ++++-
2 files changed, 5 insertions(+), 2 deletions(-)
-Index: watchdog-5.13/src/watchdog.c
+Index: watchdog-5.14/src/watchdog.c
===================================================================
---- watchdog-5.13.orig/src/watchdog.c 2013-02-01 03:15:44.000000000 -0800
-+++ watchdog-5.13/src/watchdog.c 2013-03-11 22:27:48.741657881 -0700
-@@ -28,6 +28,7 @@
+--- watchdog-5.14.orig/src/watchdog.c
++++ watchdog-5.14/src/watchdog.c
+@@ -24,6 +24,7 @@
#include <sys/types.h>
#include <sys/ioctl.h>
#include <linux/oom.h>
@@ -45,21 +45,35 @@ Index: watchdog-5.13/src/watchdog.c
#include <linux/watchdog.h>
#include <string.h>
-@@ -567,6 +568,8 @@
- pid_t child_pid;
- int oom_adjusted = 0;
- struct stat s;
-+ struct icmp_filter filt;
-+ filt.data = ~(1<<ICMP_ECHOREPLY);
+Index: watchdog-5.14/src/net.c
+===================================================================
+--- watchdog-5.14.orig/src/net.c
++++ watchdog-5.14/src/net.c
+@@ -11,7 +11,8 @@
+ #include <errno.h>
+ #include <sys/time.h>
+ #include <netinet/ip.h>
+-#include <netinet/ip_icmp.h>
++#include <linux/icmp.h>
++//#include <netinet/ip_icmp.h>
+ #include <fcntl.h>
+ #include <string.h>
+ #include <unistd.h> /* for gethostname() etc */
+@@ -179,6 +180,9 @@ int open_netcheck(struct list *tlist)
+ {
+ struct list *act;
+ int hold = 0;
++ struct icmp_filter filt;
++ filt.data = ~(1<<ICMP_ECHOREPLY);
++
+
+ if (tlist != NULL) {
+ for (act = tlist; act != NULL; act = act->next) {
+@@ -202,6 +206,7 @@ int open_netcheck(struct list *tlist)
+ fatal_error(EX_SYSERR, "error opening socket (%s)", strerror(errno));
+ }
+
++ setsockopt(net->sock_fp, SOL_RAW, ICMP_FILTER, (char*)&filt, sizeof(filt));
+ /* this is necessary for broadcast pings to work */
+ (void)setsockopt(net->sock_fp, SOL_SOCKET, SO_BROADCAST, (char *)&hold, sizeof(hold));
- #if USE_SYSLOG
- char *opts = "d:i:n:Ffsvbql:p:t:c:r:m:a:";
-@@ -703,7 +706,7 @@
- perror(progname);
- exit(1);
- }
--
-+ setsockopt(net->sock_fp, SOL_RAW, ICMP_FILTER, (char*)&filt, sizeof(filt));
- /* this is necessary for broadcast pings to work */
- (void) setsockopt(net->sock_fp, SOL_SOCKET, SO_BROADCAST, (char *)&hold, sizeof(hold));
-
diff --git a/meta/recipes-extended/watchdog/files/fixsepbuild.patch b/meta/recipes-extended/watchdog/watchdog/fixsepbuild.patch
index 2fad3a112f..2fad3a112f 100644
--- a/meta/recipes-extended/watchdog/files/fixsepbuild.patch
+++ b/meta/recipes-extended/watchdog/watchdog/fixsepbuild.patch
diff --git a/meta/recipes-extended/watchdog/watchdog_5.13.bb b/meta/recipes-extended/watchdog/watchdog_5.14.bb
index b9e29d22fc..c77d91dcdd 100644
--- a/meta/recipes-extended/watchdog/watchdog_5.13.bb
+++ b/meta/recipes-extended/watchdog/watchdog_5.14.bb
@@ -11,8 +11,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=ecc0551bf54ad97f6b541720f84d6569"
SRC_URI = "${SOURCEFORGE_MIRROR}/watchdog/watchdog-${PV}.tar.gz \
file://fixsepbuild.patch \
file://fix-ping-failure.patch"
-SRC_URI[md5sum] = "153455f008f1cf8f65f6ad9586a21ff1"
-SRC_URI[sha256sum] = "141e0faf3ee4d8187a6ff4e00b18ef7b7a4ce432a2d4c8a6e6fdc62507fc6eb0"
+
+SRC_URI[md5sum] = "5b2dba0c593942f4acc100bca0d560c4"
+SRC_URI[sha256sum] = "620b2f49e9879f2e85c73d4c1f422f9101e6b38e824fea2414befd8bb6866ad1"
inherit autotools
diff --git a/meta/recipes-extended/wget/wget.inc b/meta/recipes-extended/wget/wget.inc
index dc46841537..049b898d39 100644
--- a/meta/recipes-extended/wget/wget.inc
+++ b/meta/recipes-extended/wget/wget.inc
@@ -7,7 +7,7 @@ DEPENDS = "gnutls zlib libpcre"
INC_PR = "r16"
-inherit autotools gettext texinfo update-alternatives
+inherit autotools gettext texinfo update-alternatives pkgconfig
EXTRA_OECONF = "--enable-ipv6 --with-ssl=gnutls --disable-rpath --disable-iri \
--without-libgnutls-prefix ac_cv_header_uuid_uuid_h=no"
@@ -19,3 +19,6 @@ ALTERNATIVE_PRIORITY = "100"
RRECOMMENDS_${PN} += "ca-certificates"
BBCLASSEXTEND += "nativesdk"
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[libuuid] = "--with-libuuid, --without-libuuid,util-linux"
diff --git a/meta/recipes-extended/wget/wget_1.15.bb b/meta/recipes-extended/wget/wget_1.15.bb
deleted file mode 100644
index c2fcca740c..0000000000
--- a/meta/recipes-extended/wget/wget_1.15.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-SRC_URI = "${GNU_MIRROR}/wget/wget-${PV}.tar.gz \
- file://fix_makefile.patch \
- "
-SRC_URI[md5sum] = "506df41295afc6486662cc47470b4618"
-SRC_URI[sha256sum] = "52126be8cf1bddd7536886e74c053ad7d0ed2aa89b4b630f76785bac21695fcd"
-
-require wget.inc
diff --git a/meta/recipes-extended/wget/wget_1.16.1.bb b/meta/recipes-extended/wget/wget_1.16.1.bb
new file mode 100644
index 0000000000..ca6f0fee25
--- /dev/null
+++ b/meta/recipes-extended/wget/wget_1.16.1.bb
@@ -0,0 +1,8 @@
+SRC_URI = "${GNU_MIRROR}/wget/wget-${PV}.tar.gz \
+ file://fix_makefile.patch \
+ "
+
+SRC_URI[md5sum] = "5d12410a398ec9907e105e8734561ba0"
+SRC_URI[sha256sum] = "3b834ce69366d4681f295307fce36ee14e122c4ee68a4d1291b62b0b26755a77"
+
+require wget.inc
diff --git a/meta/recipes-extended/which/which-2.18/automake-foreign.patch b/meta/recipes-extended/which/which-2.18/automake-foreign.patch
new file mode 100644
index 0000000000..495cdc6cf4
--- /dev/null
+++ b/meta/recipes-extended/which/which-2.18/automake-foreign.patch
@@ -0,0 +1,28 @@
+Subject: [PATCH] automake foreign strictness
+
+Use foreign strictness to avoid automake errors.
+
+Upstream-Status: Inappropriate [upstream no longer active]
+
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ configure.ac | 5 +++--
+ 1 files changed, 3 insertions(+), 2 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index b30b6f5..bd3222c 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -1,6 +1,7 @@
+ dnl Process this file with autoconf to produce a configure script.
+-AC_INIT(which.c)
+-AM_INIT_AUTOMAKE(which, 2.18)
++AC_INIT([which],[2.18])
++AC_CONFIG_SRCDIR(which.c)
++AM_INIT_AUTOMAKE([foreign])
+ AM_CONFIG_HEADER(config.h)
+ AM_MAINTAINER_MODE
+
+--
+1.7.1
+
diff --git a/meta/recipes-extended/which/which-2.20/automake.patch b/meta/recipes-extended/which/which-2.20/automake.patch
index 92365dd854..ef00bc9f23 100644
--- a/meta/recipes-extended/which/which-2.20/automake.patch
+++ b/meta/recipes-extended/which/which-2.20/automake.patch
@@ -1,3 +1,8 @@
+Update autoconf prologue to use "foreign" strictness.
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
diff --git a/configure.ac b/configure.ac
index d974461..a20dfa8 100644
--- a/configure.ac
diff --git a/meta/recipes-extended/which/which_2.18.bb b/meta/recipes-extended/which/which_2.18.bb
index 5fa5d9e8eb..fd68520d97 100644
--- a/meta/recipes-extended/which/which_2.18.bb
+++ b/meta/recipes-extended/which/which_2.18.bb
@@ -12,7 +12,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe"
PR = "r2"
SRC_URI = "http://www.xs4all.nl/~carlo17/which/which-${PV}.tar.gz \
- file://fix_name_conflict_group_member.patch"
+ file://fix_name_conflict_group_member.patch \
+ file://automake-foreign.patch \
+"
SRC_URI[md5sum] = "42d51938e48b91f6e19fabf216f5c3e9"
SRC_URI[sha256sum] = "9445cd7e02ec0c26a44fd56098464ded064ba5d93dd2e15ec12410ba56b2e544"
diff --git a/meta/recipes-extended/xinetd/xinetd/xinetd.service b/meta/recipes-extended/xinetd/xinetd/xinetd.service
new file mode 100644
index 0000000000..d5fdc5bc29
--- /dev/null
+++ b/meta/recipes-extended/xinetd/xinetd/xinetd.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Xinetd A Powerful Replacement For Inetd
+After=syslog.target network.target
+
+[Service]
+Type=forking
+PIDFile=/var/run/xinetd.pid
+EnvironmentFile=-/etc/sysconfig/xinetd
+ExecStart=@SBINDIR@/xinetd -stayalive -pidfile /var/run/xinetd.pid "$EXTRAOPTIONS"
+ExecReload=@BASE_BINDIR@/kill -HUP $MAINPID
+
+[Install]
+WantedBy=multi-user.target
diff --git a/meta/recipes-extended/xinetd/xinetd_2.3.15.bb b/meta/recipes-extended/xinetd/xinetd_2.3.15.bb
index 1928949e94..288186e1ba 100644
--- a/meta/recipes-extended/xinetd/xinetd_2.3.15.bb
+++ b/meta/recipes-extended/xinetd/xinetd_2.3.15.bb
@@ -17,12 +17,15 @@ SRC_URI = "http://www.xinetd.org/xinetd-${PV}.tar.gz \
file://Disable-services-from-inetd.conf-if-a-service-with-t.patch \
file://xinetd-should-be-able-to-listen-on-IPv6-even-in-ine.patch \
file://xinetd-CVE-2013-4342.patch \
+ file://xinetd.service \
"
SRC_URI[md5sum] = "77358478fd58efa6366accae99b8b04c"
SRC_URI[sha256sum] = "bf4e060411c75605e4dcbdf2ac57c6bd9e1904470a2f91e01ba31b50a80a5be3"
-inherit autotools update-rc.d
+inherit autotools update-rc.d systemd
+
+SYSTEMD_SERVICE_${PN} = "xinetd.service"
INITSCRIPT_NAME = "xinetd"
INITSCRIPT_PARAMS = "defaults"
@@ -52,6 +55,13 @@ do_install() {
install -m 644 "${WORKDIR}/xinetd.default" "${D}${sysconfdir}/default/xinetd"
install -m 755 "${B}/xinetd/xinetd" "${D}${sbindir}"
install -m 755 "${B}/xinetd/itox" "${D}${sbindir}"
+
+ # Install systemd unit files
+ install -d ${D}${systemd_unitdir}/system
+ install -m 0644 ${WORKDIR}/xinetd.service ${D}${systemd_unitdir}/system
+ sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \
+ -e 's,@SBINDIR@,${sbindir},g' \
+ ${D}${systemd_unitdir}/system/xinetd.service
}
CONFFILES_${PN} = "${sysconfdir}/xinetd.conf"
diff --git a/meta/recipes-extended/xz/xz_5.1.3alpha.bb b/meta/recipes-extended/xz/xz_5.2.0.bb
index b8501a5a39..3439ec911f 100644
--- a/meta/recipes-extended/xz/xz_5.1.3alpha.bb
+++ b/meta/recipes-extended/xz/xz_5.2.0.bb
@@ -25,8 +25,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=c475b6c7dca236740ace4bba553e8e1c \
file://lib/getopt.c;endline=23;md5=2069b0ee710572c03bb3114e4532cd84 "
SRC_URI = "http://tukaani.org/xz/xz-${PV}.tar.gz"
-SRC_URI[md5sum] = "bbb2daa876c87fb2cf9fe4590af9694e"
-SRC_URI[sha256sum] = "9f94506e301d5b6863921bba861a99ba00de384dafb4e5f409679a93e41613d4"
+SRC_URI[md5sum] = "be585bdf8672e4406632eda3d819e284"
+SRC_URI[sha256sum] = "231ef369982240bb20ed7cffa52bb12a4a297ce6871f480ab85e8a7ba98bf3d6"
inherit autotools gettext
@@ -38,3 +38,5 @@ FILES_liblzma-staticdev = "${libdir}/liblzma.a"
FILES_liblzma-dbg = "${libdir}/.debug/liblzma*"
BBCLASSEXTEND = "native nativesdk"
+
+export CONFIG_SHELL="/bin/sh"
diff --git a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
index 3d08146d4a..a63d4546f6 100644
--- a/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
+++ b/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.30.8.bb
@@ -23,7 +23,7 @@ SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
SRC_URI[md5sum] = "4fed0d54432f1b69fc6e66e608bd5542"
SRC_URI[sha256sum] = "4853830616113db4435837992c0aebd94cbb993c44dc55063cee7f72a7bef8be"
-inherit autotools pkgconfig gettext pixbufcache ptest
+inherit autotools pkgconfig gettext pixbufcache ptest-gnome
LIBV = "2.10.0"
@@ -42,10 +42,7 @@ PACKAGECONFIG[jpeg2000] = "--with-libjasper,--without-libjasper,jasper"
PACKAGECONFIG[gio-sniff] = "--enable-gio-sniffing,--disable-gio-sniffing,,shared-mime-info"
PACKAGECONFIG[x11] = "--with-x11,--without-x11,virtual/libx11"
-EXTRA_OECONF = "\
- --disable-introspection \
- ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '--enable-installed-tests', '--disable-installed-tests', d)} \
-"
+EXTRA_OECONF = "--disable-introspection"
PACKAGES =+ "${PN}-xlib"
@@ -67,11 +64,6 @@ FILES_${PN}-dbg += " \
${libdir}/gdk-pixbuf-2.0/${LIBV}/loaders/.debug/* \
"
-FILES_${PN}-ptest += "${libexecdir}/installed-tests \
- ${datadir}/installed-tests/gdk-pixbuf"
-
-RDEPENDS_${PN}-ptest += "gnome-desktop-testing"
-
PACKAGES_DYNAMIC += "^gdk-pixbuf-loader-.*"
PACKAGES_DYNAMIC_class-native = ""
diff --git a/meta/recipes-gnome/gnome/gnome-common_3.12.0.bb b/meta/recipes-gnome/gnome/gnome-common_3.14.0.bb
index d3b6683090..1f9fe8bf5c 100644
--- a/meta/recipes-gnome/gnome/gnome-common_3.12.0.bb
+++ b/meta/recipes-gnome/gnome/gnome-common_3.14.0.bb
@@ -10,8 +10,8 @@ inherit gnomebase allarch
GNOME_COMPRESS_TYPE = "xz"
-SRC_URI[archive.md5sum] = "da903a1c89b0a24e062227fa97d42fe7"
-SRC_URI[archive.sha256sum] = "18712bc2df6b2dd88a11b9f7f874096d1c0c6e7ebc9cfc0686ef963bd590e1d8"
+SRC_URI[archive.md5sum] = "ba58c61d0d81b7c3ff8452c620513a9d"
+SRC_URI[archive.sha256sum] = "4c00242f781bb441289f49dd80ed1d895d84de0c94bfc2c6818a104c9e39262c"
EXTRA_AUTORECONF = ""
DEPENDS = ""
diff --git a/meta/recipes-gnome/gnome/gnome-desktop.inc b/meta/recipes-gnome/gnome/gnome-desktop.inc
deleted file mode 100644
index 3853022710..0000000000
--- a/meta/recipes-gnome/gnome/gnome-desktop.inc
+++ /dev/null
@@ -1,23 +0,0 @@
-SUMMARY = "GNOME library for reading .desktop files"
-SECTION = "x11/gnome"
-LICENSE = "GPLv2 & LGPLv2"
-DEPENDS = "gconf libxrandr virtual/libx11 gtk+ glib-2.0 gnome-doc-utils startup-notification"
-
-EXTRA_OECONF = "--disable-scrollkeeper --disable-desktop-docs"
-
-do_configure_prepend () {
- cp ${STAGING_DATADIR_NATIVE}/gnome-common/data/omf.make ${S}
-}
-
-FILES_${PN} += "${datadir}/gnome-about ${datadir}/libgnome-desktop/pnp.ids"
-
-PR = "r6"
-
-inherit gnomebase
-
-do_install_append () {
- sed -i -e's,${STAGING_BINDIR_NATIVE},${bindir},g' ${D}${bindir}/gnome-about
- sed -i -e '1s,#!.*python,#! ${USRBINPATH}/env python,' ${D}${bindir}/gnome-about
-}
-
-
diff --git a/meta/recipes-gnome/gnome/gnome-desktop_2.32.1.bb b/meta/recipes-gnome/gnome/gnome-desktop_2.32.1.bb
deleted file mode 100644
index 424eafeaf0..0000000000
--- a/meta/recipes-gnome/gnome/gnome-desktop_2.32.1.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-require gnome-desktop.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
- file://COPYING.LIB;md5=5f30f0716dfdd0d91eb439ebec522ec2"
-
-SRC_URI[archive.md5sum] = "5c80d628a240eb9d9ff78913b31f2f67"
-SRC_URI[archive.sha256sum] = "55cbecf67efe1fa1e57ac966520a7c46d799c8ba3c652a1219f60cafccb3739d"
diff --git a/meta/recipes-gnome/gnome/gnome-doc-utils.inc b/meta/recipes-gnome/gnome/gnome-doc-utils.inc
index e148ce3350..958750690a 100644
--- a/meta/recipes-gnome/gnome/gnome-doc-utils.inc
+++ b/meta/recipes-gnome/gnome/gnome-doc-utils.inc
@@ -10,6 +10,8 @@ DEPENDS_class-native = "libxml2-native libxslt-native intltool-native glib-2.0-n
inherit gnomebase gettext python-dir pythonnative autotools-brokensep
+CLEANBROKEN = "1"
+
EXTRA_OECONF += "--disable-scrollkeeper"
do_install_append() {
diff --git a/meta/recipes-gnome/gnome/gsettings-desktop-schemas_3.10.1.bb b/meta/recipes-gnome/gnome/gsettings-desktop-schemas_3.10.1.bb
deleted file mode 100644
index a0123d9005..0000000000
--- a/meta/recipes-gnome/gnome/gsettings-desktop-schemas_3.10.1.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "GNOME desktop-wide GSettings schemas"
-HOMEPAGE = "http://live.gnome.org/gsettings-desktop-schemas"
-BUGTRACKER = "https://bugzilla.gnome.org/"
-
-LICENSE = "LGPLv2.1"
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-PR = "r1"
-
-DEPENDS = "glib-2.0 intltool-native gobject-introspection-stub-native"
-
-inherit gnomebase gsettings gettext
-
-GNOME_COMPRESS_TYPE = "xz"
-
-SRC_URI[archive.md5sum] = "f9ffca591a984f19a1dd9caeb96b5f23"
-SRC_URI[archive.sha256sum] = "452378c4960a145747ec69f8c6a874e5b7715454df3e2452d1ff1a0a82e76811"
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/hardcoded_libtool.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.22/hardcoded_libtool.patch
deleted file mode 100644
index 13ff318768..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/hardcoded_libtool.patch
+++ /dev/null
@@ -1,1814 +0,0 @@
-Upstream-Status: Inappropriate [embedded specific]
-
-Updated to apply to gtk+-2.24.15
-
-Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
-diff -Nurd gtk+-2.24.15/configure.ac gtk+-2.24.15/configure.ac
---- gtk+-2.24.15/configure.ac 2013-01-12 20:52:54.000000000 +0200
-+++ gtk+-2.24.15/configure.ac 2013-02-12 21:33:30.689925967 +0200
-@@ -415,7 +415,7 @@
- case $enable_explicit_deps in
- auto)
- export SED
-- deplibs_check_method=`(./libtool --config; echo 'eval echo \"$deplibs_check_method\"') | sh`
-+ deplibs_check_method=`(./$host_alias-libtool --config; echo 'eval echo \"$deplibs_check_method\"') | sh`
- if test "x$deplibs_check_method" '!=' xpass_all || test "x$enable_static" = xyes ; then
- enable_explicit_deps=yes
- else
-@@ -774,7 +774,7 @@
- dnl Now we check to see if our libtool supports shared lib deps
- dnl (in a rather ugly way even)
- if $dynworks; then
-- module_libtool_config="${CONFIG_SHELL-/bin/sh} ./libtool --config"
-+ module_libtool_config="${CONFIG_SHELL-/bin/sh} $host_alias-libtool --config"
- module_deplibs_check=`$module_libtool_config | \
- grep '^[[a-z_]]*check[[a-z_]]*_method=[['\''"]]' | \
- sed 's/.*[['\''"]]\(.*\)[['\''"]]$/\1/'`
-@@ -1574,7 +1574,7 @@
- # We are using gmodule-no-export now, but I'm leaving the stripping
- # code in place for now, since pango and atk still require gmodule.
- export SED
--export_dynamic=`(./libtool --config; echo eval echo \\$export_dynamic_flag_spec) | sh`
-+export_dynamic=`($host_alias-libtool --config; echo eval echo \\$export_dynamic_flag_spec) | sh`
- if test -n "$export_dynamic"; then
- GDK_DEP_LIBS=`echo $GDK_DEP_LIBS | sed -e "s/$export_dynamic//"`
- GTK_DEP_LIBS=`echo $GTK_DEP_LIBS | sed -e "s/$export_dynamic//"`
-diff -Nurd gtk+-2.24.15/configure.ac.orig gtk+-2.24.15/configure.ac.orig
---- gtk+-2.24.15/configure.ac.orig 1970-01-01 02:00:00.000000000 +0200
-+++ gtk+-2.24.15/configure.ac.orig 2013-02-12 21:33:21.821926163 +0200
-@@ -0,0 +1,1775 @@
-+# Process this file with autoconf to produce a configure script.
-+# Process this file with autoconf to produce a configure script.
-+# require autoconf 2.54
-+AC_PREREQ(2.62)
-+
-+# Making releases:
-+# GTK_MICRO_VERSION += 1;
-+# GTK_INTERFACE_AGE += 1;
-+# GTK_BINARY_AGE += 1;
-+# if any functions have been added, set GTK_INTERFACE_AGE to 0.
-+# if backwards compatibility has been broken,
-+# set GTK_BINARY_AGE and GTK_INTERFACE_AGE to 0.
-+
-+m4_define([gtk_major_version], [2])
-+m4_define([gtk_minor_version], [24])
-+m4_define([gtk_micro_version], [15])
-+m4_define([gtk_interface_age], [15])
-+m4_define([gtk_binary_age],
-+ [m4_eval(100 * gtk_minor_version + gtk_micro_version)])
-+m4_define([gtk_version],
-+ [gtk_major_version.gtk_minor_version.gtk_micro_version])
-+# This is the X.Y used in -lgtk-FOO-X.Y
-+m4_define([gtk_api_version], [2.0])
-+
-+# Define a string for the earliest version that this release has
-+# backwards binary compatibility with for all interfaces a module
-+# might. Unless we add module-only API with lower stability
-+# guarantees, this should be unchanged until we break binary compat
-+# for GTK+.
-+#
-+#GTK_BINARY_VERSION=$GTK_MAJOR_VERSION.$GTK_MINOR_VERSION.$LT_CURRENT
-+m4_define([gtk_binary_version], [2.10.0])
-+
-+# required versions of other packages
-+m4_define([glib_required_version], [2.28.0])
-+m4_define([pango_required_version], [1.20])
-+m4_define([atk_required_version], [1.29.2])
-+m4_define([cairo_required_version], [1.6])
-+m4_define([gdk_pixbuf_required_version], [2.21.0])
-+
-+
-+AC_INIT([gtk+], [gtk_version],
-+ [http://bugzilla.gnome.org/enter_bug.cgi?product=gtk%2B],
-+ [gtk+])
-+
-+AC_CONFIG_SRCDIR([gdk/gdktypes.h])
-+AC_CONFIG_HEADERS([config.h])
-+AC_CONFIG_MACRO_DIR([m4])
-+
-+# Save this value here, since automake will set cflags later
-+cflags_set=${CFLAGS+set}
-+
-+AM_INIT_AUTOMAKE([no-define -Wno-portability dist-bzip2])
-+
-+# Support silent build rules, requires at least automake-1.11. Enable
-+# by either passing --enable-silent-rules to configure or passing V=0
-+# to make
-+m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([no])])
-+
-+#
-+# For each of the libraries we build, we define the following
-+
-+# substituted variables:
-+#
-+# foo_PACKAGES: pkg-config packages this library requires
-+# foo_EXTRA_LIBS: Libraries this module requires not pulled in by pkg-config
-+# foo_EXTRA_CFLAGS: cflags this module requires not pulled in by pkg-config
-+# foo_DEP_LIBS: All libraries this module requires
-+# foo_DEP_CFLAGS: All cflags this module requires
-+
-+
-+GTK_MAJOR_VERSION=gtk_major_version
-+GTK_MINOR_VERSION=gtk_minor_version
-+GTK_MICRO_VERSION=gtk_micro_version
-+GTK_INTERFACE_AGE=gtk_interface_age
-+GTK_BINARY_AGE=gtk_binary_age
-+GTK_VERSION=gtk_version
-+GTK_API_VERSION=gtk_api_version
-+GTK_BINARY_VERSION=gtk_binary_version
-+AC_SUBST(GTK_MAJOR_VERSION)
-+AC_SUBST(GTK_MINOR_VERSION)
-+AC_SUBST(GTK_MICRO_VERSION)
-+AC_SUBST(GTK_INTERFACE_AGE)
-+AC_SUBST(GTK_BINARY_AGE)
-+AC_SUBST(GTK_API_VERSION)
-+AC_SUBST(GTK_VERSION)
-+AC_SUBST(GTK_BINARY_VERSION)
-+
-+# libtool versioning
-+#LT_RELEASE=$GTK_MAJOR_VERSION.$GTK_MINOR_VERSION
-+#LT_CURRENT=`expr $GTK_MICRO_VERSION - $GTK_INTERFACE_AGE`
-+#LT_REVISION=$GTK_INTERFACE_AGE
-+#LT_AGE=`expr $GTK_BINARY_AGE - $GTK_INTERFACE_AGE`
-+#LT_CURRENT_MINUS_AGE=`expr $LT_CURRENT - $LT_AGE`
-+
-+m4_define([lt_current], [m4_eval(100 * gtk_minor_version + gtk_micro_version - gtk_interface_age)])
-+m4_define([lt_revision], [gtk_interface_age])
-+m4_define([lt_age], [m4_eval(gtk_binary_age - gtk_interface_age)])
-+LT_VERSION_INFO="lt_current:lt_revision:lt_age"
-+LT_CURRENT_MINUS_AGE=m4_eval(lt_current - lt_age)
-+AC_SUBST(LT_VERSION_INFO)
-+AC_SUBST(LT_CURRENT_MINUS_AGE)
-+
-+m4_define([gail_lt_current],[18])
-+m4_define([gail_lt_revision],[1])
-+m4_define([gail_lt_age],[0])
-+m4_define([gail_lt_version_info],[gail_lt_current:gail_lt_revision:gail_lt_age])
-+m4_define([gail_lt_current_minus_age],[m4_eval(gail_lt_current - gail_lt_age)])
-+AC_SUBST([GAIL_LT_VERSION_INFO],[gail_lt_version_info])
-+AC_SUBST([GAIL_LT_CURRENT_MINUS_AGE],[gail_lt_current_minus_age])
-+
-+GETTEXT_PACKAGE=gtk20
-+AC_SUBST(GETTEXT_PACKAGE)
-+AC_DEFINE_UNQUOTED(GETTEXT_PACKAGE, "$GETTEXT_PACKAGE",
-+ [The prefix for our gettext translation domains.])
-+
-+AC_CANONICAL_HOST
-+
-+MATH_LIB=-lm
-+AC_MSG_CHECKING([for native Win32])
-+LIB_EXE_MACHINE_FLAG=X86
-+EXE_MANIFEST_ARCHITECTURE=X86
-+case "$host" in
-+ *-*-mingw*)
-+ os_win32=yes
-+ gio_can_sniff=no
-+ MATH_LIB=
-+ case "$host" in
-+ x86_64-*-*)
-+ LIB_EXE_MACHINE_FLAG=X64
-+ EXE_MANIFEST_ARCHITECTURE=AMD64
-+ ;;
-+ esac
-+ ;;
-+ *)
-+ os_win32=no
-+ ;;
-+esac
-+AC_MSG_RESULT([$os_win32])
-+
-+AC_SUBST(LIB_EXE_MACHINE_FLAG)
-+AC_SUBST(EXE_MANIFEST_ARCHITECTURE)
-+
-+case $host in
-+ *-*-linux*)
-+ os_linux=yes
-+ ;;
-+esac
-+
-+dnl Initialize libtool
-+AC_PROG_CC
-+AM_DISABLE_STATIC
-+
-+dnl
-+dnl Check for a working C++ compiler, but do not bail out, if none is found.
-+dnl We use this for an automated test for C++ header correctness.
-+dnl
-+AC_CHECK_TOOLS(CXX, [$CCC c++ g++ gcc CC cxx cc++ cl], gcc)
-+AC_LANG_SAVE
-+AC_LANG_CPLUSPLUS
-+
-+AC_TRY_COMPILE(,[class a { int b; } c;], ,CXX=)
-+AM_CONDITIONAL(HAVE_CXX, test "$CXX" != "")
-+
-+gtk_save_cxxflags="$CXXFLAGS"
-+CXXFLAGS="$CXXFLAGS -x objective-c++"
-+AC_TRY_COMPILE([@interface Foo @end],,OBJC=yes,OBJC=no)
-+AM_CONDITIONAL(HAVE_OBJC, test "$OBJC" = "yes")
-+CXXFLAGS="$gtk_save_cxxflags"
-+AC_LANG_RESTORE
-+
-+if test "$os_win32" = "yes"; then
-+ if test x$enable_static = xyes -o x$enable_static = x; then
-+ AC_MSG_WARN([Disabling static library build, must build as DLL on Windows.])
-+ enable_static=no
-+ fi
-+ if test x$enable_shared = xno; then
-+ AC_MSG_WARN([Enabling shared library build, must build as DLL on Windows.])
-+ fi
-+ enable_shared=yes
-+fi
-+
-+AC_LIBTOOL_WIN32_DLL
-+AM_PROG_LIBTOOL
-+dnl when using libtool 2.x create libtool early, because it's used in configure
-+m4_ifdef([LT_OUTPUT], [LT_OUTPUT])
-+
-+
-+# Make sure we use 64-bit versions of various file stuff.
-+AC_SYS_LARGEFILE
-+
-+AM_PROG_AS
-+AC_PATH_PROG(NM, nm, nm)
-+
-+dnl Initialize maintainer mode
-+AM_MAINTAINER_MODE([enable])
-+
-+AC_MSG_CHECKING([for some Win32 platform])
-+case "$host" in
-+ *-*-mingw*|*-*-cygwin*)
-+ platform_win32=yes
-+ ;;
-+ *)
-+ platform_win32=no
-+ ;;
-+esac
-+AC_MSG_RESULT([$platform_win32])
-+AM_CONDITIONAL(PLATFORM_WIN32, test "$platform_win32" = "yes")
-+
-+AM_CONDITIONAL(OS_WIN32, test "$os_win32" = "yes")
-+AM_CONDITIONAL(OS_UNIX, test "$os_win32" != "yes")
-+AM_CONDITIONAL(OS_LINUX, test "$os_linux" = "yes")
-+
-+if test "$os_win32" = "yes"; then
-+ AC_CHECK_TOOL(WINDRES, windres, no)
-+ if test "$WINDRES" = no; then
-+ AC_MSG_ERROR([*** Could not find an implementation of windres in your PATH.])
-+ fi
-+ AC_CHECK_PROG(ms_librarian, lib.exe, yes, no)
-+fi
-+AM_CONDITIONAL(MS_LIB_AVAILABLE, test x$ms_librarian = xyes)
-+
-+m4_define([debug_default],
-+ m4_if(m4_eval(gtk_minor_version % 2), [1], [yes], [minimum]))
-+
-+dnl declare --enable-* args and collect ac_help strings
-+AC_ARG_ENABLE(debug,
-+ AC_HELP_STRING([--enable-debug=@<:@no/minimum/yes@:>@],
-+ [turn on debugging @<:@default=debug_default@:>@]),,
-+ enable_debug=debug_default)
-+AC_ARG_ENABLE(shm,
-+ [AC_HELP_STRING([--enable-shm],
-+ [support shared memory if available [default=yes]])],,
-+ [enable_shm="yes"])
-+AC_ARG_ENABLE(xkb,
-+ [AC_HELP_STRING([--enable-xkb],
-+ [support XKB [default=maybe]])],,
-+ [enable_xkb="maybe"])
-+AC_ARG_ENABLE(xinerama,
-+ [AC_HELP_STRING([--enable-xinerama],
-+ [support xinerama extension if available [default=yes]])],,
-+ [enable_xinerama="yes"])
-+AC_ARG_ENABLE(rebuilds,
-+ [AC_HELP_STRING([--disable-rebuilds],
-+ [disable all source autogeneration rules])],,
-+ [enable_rebuilds=yes])
-+AC_ARG_ENABLE(visibility,
-+ [AC_HELP_STRING([--disable-visibility],
-+ [don't use ELF visibility attributes])],,
-+ [enable_visibility=yes])
-+
-+AC_ARG_WITH(xinput,
-+ [AC_HELP_STRING([--with-xinput=@<:@no/yes@:>@], [support XInput])])
-+
-+if test "$platform_win32" = yes; then
-+ gdktarget=win32
-+else
-+ gdktarget=x11
-+fi
-+
-+AC_ARG_WITH(gdktarget, [ --with-gdktarget=[[x11/win32/quartz/directfb]] select non-default GDK target],
-+ gdktarget=$with_gdktarget)
-+
-+AC_SUBST(gdktarget)
-+case $gdktarget in
-+ x11|win32|quartz|directfb) ;;
-+ *) AC_MSG_ERROR([Invalid target for GDK: use x11, quartz, directfb or win32.]);;
-+esac
-+
-+gdktargetlib=libgdk-$gdktarget-$GTK_API_VERSION.la
-+gtktargetlib=libgtk-$gdktarget-$GTK_API_VERSION.la
-+
-+AC_SUBST(gdktargetlib)
-+AC_SUBST(gtktargetlib)
-+
-+if test "x$enable_debug" = "xyes"; then
-+ test "$cflags_set" = set || CFLAGS="$CFLAGS -g"
-+ GTK_DEBUG_FLAGS="-DG_ENABLE_DEBUG -DG_ERRORCHECK_MUTEXES"
-+else
-+ if test "x$enable_debug" = "xno"; then
-+ GTK_DEBUG_FLAGS="-DG_DISABLE_ASSERT -DG_DISABLE_CHECKS -DG_DISABLE_CAST_CHECKS"
-+ else
-+ GTK_DEBUG_FLAGS="-DG_DISABLE_CAST_CHECKS"
-+ fi
-+fi
-+
-+
-+if test "x$enable_visibility" = "xno"; then
-+ GTK_DEBUG_FLAGS="$GTK_DEBUG_FLAGS -DDISABLE_VISIBILITY"
-+fi
-+
-+
-+AC_DEFINE_UNQUOTED(GTK_COMPILED_WITH_DEBUGGING, "${enable_debug}",
-+ [Define if debugging is enabled])
-+
-+
-+# Build time sanity check...
-+AM_SANITY_CHECK
-+
-+# Checks for programs.
-+AC_ISC_POSIX
-+AM_PROG_CC_C_O
-+AC_PROG_INSTALL
-+AC_PROG_MAKE_SET
-+
-+changequote(,)dnl
-+if test "x$GCC" = "xyes"; then
-+ case " $CFLAGS " in
-+ *[\ \ ]-Wall[\ \ ]*) ;;
-+ *) CFLAGS="$CFLAGS -Wall" ;;
-+ esac
-+
-+ if test "x$enable_ansi" = "xyes"; then
-+ case " $CFLAGS " in
-+ *[\ \ ]-ansi[\ \ ]*) ;;
-+ *) CFLAGS="$CFLAGS -ansi" ;;
-+ esac
-+
-+ case " $CFLAGS " in
-+ *[\ \ ]-pedantic[\ \ ]*) ;;
-+ *) CFLAGS="$CFLAGS -pedantic" ;;
-+ esac
-+ fi
-+fi
-+changequote([,])dnl
-+
-+CPPFLAGS="$CPPFLAGS -DG_DISABLE_SINGLE_INCLUDES -DATK_DISABLE_SINGLE_INCLUDES -DGDK_PIXBUF_DISABLE_SINGLE_INCLUDES -DGTK_DISABLE_SINGLE_INCLUDES"
-+
-+# Ensure MSVC-compatible struct packing convention is used when
-+# compiling for Win32 with gcc.
-+# What flag to depends on gcc version: gcc3 uses "-mms-bitfields", while
-+# gcc2 uses "-fnative-struct".
-+if test x"$os_win32" = xyes; then
-+ if test x"$GCC" = xyes; then
-+ msnative_struct=''
-+ AC_MSG_CHECKING([how to get MSVC-compatible struct packing])
-+ if test -z "$ac_cv_prog_CC"; then
-+ our_gcc="$CC"
-+ else
-+ our_gcc="$ac_cv_prog_CC"
-+ fi
-+ case `$our_gcc --version | sed -e 's,\..*,.,' -e q` in
-+ 2.)
-+ if $our_gcc -v --help 2>/dev/null | grep fnative-struct >/dev/null; then
-+ msnative_struct='-fnative-struct'
-+ fi
-+ ;;
-+ *)
-+ if $our_gcc -v --help 2>/dev/null | grep ms-bitfields >/dev/null; then
-+ msnative_struct='-mms-bitfields'
-+ fi
-+ ;;
-+ esac
-+ if test x"$msnative_struct" = x ; then
-+ AC_MSG_RESULT([no way])
-+ AC_MSG_WARN([produced libraries might be incompatible with MSVC-compiled code])
-+ else
-+ CFLAGS="$CFLAGS $msnative_struct"
-+ AC_MSG_RESULT([${msnative_struct}])
-+ fi
-+ fi
-+fi
-+
-+# Honor aclocal flags
-+ACLOCAL="$ACLOCAL $ACLOCAL_FLAGS"
-+
-+## Initial sanity check, done here so that users get told they
-+## have the wrong dependencies as early in the process as possible.
-+## Later on we actually use the cflags/libs from separate pkg-config
-+## calls. Oh, also the later pkg-config calls don't include
-+## the version requirements since those make the module lists
-+## annoying to construct
-+PKG_CHECK_MODULES(BASE_DEPENDENCIES,
-+ [glib-2.0 >= glib_required_version dnl
-+ atk >= atk_required_version dnl
-+ pango >= pango_required_version dnl
-+ cairo >= cairo_required_version dnl
-+ gdk-pixbuf-2.0 >= gdk_pixbuf_required_version])
-+
-+## In addition to checking that cairo is present, we also need to
-+## check that the correct cairo backend is there. E.g. if the GDK
-+## target is win32 we need the cairo-win32 backend and so on.
-+cairo_backend=$gdktarget
-+
-+# GDK calls the xlib backend "x11," cairo calls it "xlib." Other
-+# backend names are identical.
-+if test "x$cairo_backend" = "xx11"; then
-+ cairo_backend=xlib
-+fi
-+PKG_CHECK_MODULES(CAIRO_BACKEND,
-+ [cairo-$cairo_backend >= cairo_required_version])
-+
-+PKG_CHECK_MODULES(GMODULE, [gmodule-2.0])
-+
-+if test "$os_win32" != yes; then
-+ # libtool option to control which symbols are exported
-+ # right now, symbols starting with _ are not exported
-+ LIBTOOL_EXPORT_OPTIONS='-export-symbols-regex "^[[^_]].*"'
-+else
-+ # We currently use .def files on Windows (for gdk and gtk)
-+ LIBTOOL_EXPORT_OPTIONS=
-+fi
-+AC_SUBST(LIBTOOL_EXPORT_OPTIONS)
-+
-+dnl ******************************************************
-+dnl * See whether to include shared library dependencies *
-+dnl ******************************************************
-+
-+AC_ARG_ENABLE(explicit-deps,
-+ [AC_HELP_STRING([--enable-explicit-deps=@<:@yes/no/auto@:>@],
-+ [use explicit dependencies in .pc files [default=auto]])],,
-+ [enable_explicit_deps=auto])
-+
-+AC_MSG_CHECKING([Whether to write dependencies into .pc files])
-+case $enable_explicit_deps in
-+ auto)
-+ export SED
-+ deplibs_check_method=`(./libtool --config; echo 'eval echo \"$deplibs_check_method\"') | sh`
-+ if test "x$deplibs_check_method" '!=' xpass_all || test "x$enable_static" = xyes ; then
-+ enable_explicit_deps=yes
-+ else
-+ enable_explicit_deps=no
-+ fi
-+ ;;
-+ yes|no)
-+ ;;
-+ *) AC_MSG_ERROR([Value given to --enable-explicit-deps must be one of yes, no or auto])
-+ ;;
-+esac
-+AC_MSG_RESULT($enable_explicit_deps)
-+
-+AM_CONDITIONAL(DISABLE_EXPLICIT_DEPS, test $enable_explicit_deps = no)
-+
-+# define a MAINT-like variable REBUILD which is set if Perl
-+# and awk are found, so autogenerated sources can be rebuilt
-+
-+AC_PATH_PROGS(PERL, perl5 perl)
-+
-+# We would like indent, but don't require it.
-+AC_CHECK_PROG(INDENT, indent, indent)
-+
-+REBUILD=\#
-+if test "x$enable_rebuilds" = "xyes" && \
-+ test -n "$PERL" && \
-+ $PERL -e 'exit !($] >= 5.002)' > /dev/null 2>&1 ; then
-+ REBUILD=
-+fi
-+AC_SUBST(REBUILD)
-+
-+AC_CHECK_FUNCS(lstat mkstemp flockfile getc_unlocked)
-+AC_CHECK_FUNCS(localtime_r)
-+
-+# _NL_TIME_FIRST_WEEKDAY is an enum and not a define
-+AC_MSG_CHECKING([for _NL_TIME_FIRST_WEEKDAY])
-+AC_TRY_LINK([#include <langinfo.h>], [
-+char c;
-+c = *((unsigned char *) nl_langinfo(_NL_TIME_FIRST_WEEKDAY));
-+], gtk_ok=yes, gtk_ok=no)
-+AC_MSG_RESULT($gtk_ok)
-+if test "$gtk_ok" = "yes"; then
-+ AC_DEFINE([HAVE__NL_TIME_FIRST_WEEKDAY], [1],
-+ [Define if _NL_TIME_FIRST_WEEKDAY is available])
-+fi
-+
-+# _NL_MEASUREMENT_MEASUREMENT is an enum and not a define
-+AC_MSG_CHECKING([for _NL_MEASUREMENT_MEASUREMENT])
-+AC_TRY_LINK([#include <langinfo.h>], [
-+char c;
-+c = *((unsigned char *) nl_langinfo(_NL_MEASUREMENT_MEASUREMENT));
-+], gtk_ok=yes, gtk_ok=no)
-+AC_MSG_RESULT($gtk_ok)
-+if test "$gtk_ok" = "yes"; then
-+ AC_DEFINE([HAVE__NL_MEASUREMENT_MEASUREMENT], [1],
-+ [Define if _NL_MEASUREMENT_MEASUREMENT is available])
-+fi
-+
-+# _NL_PAPER_HEIGHT is an enum and not a define
-+AC_MSG_CHECKING([for _NL_PAPER_HEIGHT])
-+AC_TRY_LINK([#include <langinfo.h>], [
-+char c;
-+c = *((unsigned char *) nl_langinfo(_NL_PAPER_HEIGHT));
-+], gtk_ok=yes, gtk_ok=no)
-+AC_MSG_RESULT($gtk_ok)
-+if test "$gtk_ok" = "yes"; then
-+ AC_DEFINE([HAVE__NL_PAPER_HEIGHT], [1],
-+ [Define if _NL_PAPER_HEIGHT is available])
-+fi
-+
-+# _NL_PAPER_WIDTH is an enum and not a define
-+AC_MSG_CHECKING([for _NL_PAPER_WIDTH])
-+AC_TRY_LINK([#include <langinfo.h>], [
-+char c;
-+c = *((unsigned char *) nl_langinfo(_NL_PAPER_WIDTH));
-+], gtk_ok=yes, gtk_ok=no)
-+AC_MSG_RESULT($gtk_ok)
-+if test "$gtk_ok" = "yes"; then
-+ AC_DEFINE([HAVE__NL_PAPER_WIDTH], [1],
-+ [Define if _NL_PAPER_WIDTH is available])
-+fi
-+
-+# sigsetjmp is a macro on some platforms, so AC_CHECK_FUNCS is not reliable
-+AC_MSG_CHECKING(for sigsetjmp)
-+AC_TRY_LINK([#include <setjmp.h>], [
-+sigjmp_buf env;
-+sigsetjmp(env, 0);
-+], gtk_ok=yes, gtk_ok=no)
-+AC_MSG_RESULT($gtk_ok)
-+if test "$gtk_ok" = "yes"; then
-+ AC_DEFINE(HAVE_SIGSETJMP, 1,
-+ [Define to 1 if sigsetjmp is available])
-+fi
-+
-+# i18n stuff
-+ALL_LINGUAS="`grep -v '^#' "$srcdir/po/LINGUAS" | tr '\n' ' '`"
-+AM_GLIB_GNU_GETTEXT
-+LIBS="$LIBS $INTLLIBS"
-+AC_OUTPUT_COMMANDS([case "$CONFIG_FILES" in *po-properties/Makefile.in*)
-+ sed -e "/POTFILES =/r po-properties/POTFILES" po-properties/Makefile.in > po-properties/Makefile
-+ esac])
-+
-+dnl Snippet below is copied from AM_GLIB_GNU_GETTEXT to generate a first
-+dnl po-properties/POTFILES during configure; see GNOME #573515.
-+dnl
-+dnl Generate list of files to be processed by xgettext which will
-+dnl be included in po-properties/Makefile.
-+test -d po-properties || mkdir po-properties
-+if test "x$srcdir" != "x."; then
-+ if test "x`echo $srcdir | sed 's@/.*@@'`" = "x"; then
-+ popropsrcprefix="$srcdir/"
-+ else
-+ popropsrcprefix="../$srcdir/"
-+ fi
-+else
-+ popropsrcprefix="../"
-+fi
-+rm -f po-properties/POTFILES
-+sed -e "/^#/d" -e "/^\$/d" -e "s,.*, $popropsrcprefix& \\\\," -e "\$s/\(.*\) \\\\/\1/" \
-+< $srcdir/po-properties/POTFILES.in > po-properties/POTFILES
-+dnl (End of adapted AM_GLIB_GNU_GETTEXT snippet.)
-+
-+AM_GLIB_DEFINE_LOCALEDIR(GTK_LOCALEDIR)
-+
-+dnl The DU4 header files don't provide library prototypes unless
-+dnl -std1 is given to the native cc.
-+AC_MSG_CHECKING([for extra flags to get ANSI library prototypes])
-+
-+gtk_save_LIBS=$LIBS
-+LIBS="$LIBS -lm"
-+AC_TRY_RUN([#include <math.h>
-+ int main (void) { return (log(1) != log(1.)); }],
-+ AC_MSG_RESULT(none needed),
-+ gtk_save_CFLAGS="$CFLAGS"
-+ CFLAGS="$CFLAGS -std1"
-+ AC_TRY_RUN([#include <math.h>
-+ int main (void) { return (log(1) != log(1.)); }],
-+ AC_MSG_RESULT(-std1),
-+ AC_MSG_RESULT()
-+ CFLAGS="$gtk_save_CFLAGS"
-+ AC_MSG_WARN(
-+ [No ANSI prototypes found in library. (-std1 didn't work.)]),
-+ true
-+ ),
-+ AC_MSG_RESULT(none needed)
-+)
-+LIBS=$gtk_save_LIBS
-+
-+AC_MSG_CHECKING(for the BeOS)
-+case $host in
-+ *-*-beos*)
-+ AC_MSG_RESULT(yes)
-+ MATH_LIB=
-+ ;;
-+ *)
-+ AC_MSG_RESULT(no)
-+ ;;
-+esac
-+
-+AC_SUBST(MATH_LIB)
-+#
-+# see bug 162979
-+#
-+AC_MSG_CHECKING(for HP-UX)
-+case $host_os in
-+ hpux9* | hpux10* | hpux11*)
-+ AC_MSG_RESULT(yes)
-+ CFLAGS="$CFLAGS -DHPPEX -DSHMLINK"
-+ ;;
-+ *)
-+ AC_MSG_RESULT(no)
-+ ;;
-+esac
-+
-+dnl NeXTStep cc seems to need this
-+AC_MSG_CHECKING([for extra flags for POSIX compliance])
-+AC_TRY_COMPILE([#include <dirent.h>], [DIR *dir;],
-+ AC_MSG_RESULT(none needed),
-+ gtk_save_CFLAGS="$CFLAGS"
-+ CFLAGS="$CFLAGS -posix"
-+ AC_TRY_COMPILE([#include <dirent.h>], [DIR *dir;],
-+ AC_MSG_RESULT(-posix),
-+ AC_MSG_RESULT()
-+ CFLAGS="$gtk_save_CFLAGS"
-+ AC_MSG_WARN([Could not determine POSIX flag. (-posix didn't work.)])))
-+
-+#
-+# Run AM_PATH_GLIB_2_0 to make sure that GLib is installed and working
-+#
-+
-+GLIB_PACKAGES="gobject-2.0 gio-2.0 gmodule-no-export-2.0"
-+
-+AM_PATH_GLIB_2_0(glib_required_version, :,
-+ AC_MSG_ERROR([
-+*** GLIB glib_required_version or better is required. The latest version of
-+*** GLIB is always available from ftp://ftp.gtk.org/pub/gtk/.]),
-+ gobject gmodule-no-export gthread)
-+
-+# See if it's safe to turn G_DISABLE_DEPRECATED on.
-+GLIB_VERSION_MAJOR_MINOR=`$PKG_CONFIG --modversion glib-2.0 | sed "s/\.@<:@^.@:>@*\$//"`
-+GLIB_REQUIRED_VERSION_MAJOR_MINOR=`echo glib_required_version | sed "s/\.@<:@^.@:>@*\$//"`
-+if test "x$GLIB_VERSION_MAJOR_MINOR" = "x$GLIB_REQUIRED_VERSION_MAJOR_MINOR"; then
-+ CFLAGS="-DG_DISABLE_DEPRECATED $CFLAGS"
-+fi
-+
-+CFLAGS="-DGDK_PIXBUF_DISABLE_DEPRECATED $CFLAGS"
-+
-+
-+dnl
-+dnl Check for bind_textdomain_codeset, including -lintl if GLib brings it in.
-+dnl
-+gtk_save_LIBS=$LIBS
-+LIBS="$LIBS $GLIB_LIBS"
-+AC_CHECK_FUNCS(bind_textdomain_codeset)
-+LIBS=$gtk_save_LIBS
-+
-+AC_CHECK_HEADERS(pwd.h,
-+ AC_DEFINE(HAVE_PWD_H, 1,
-+ [Define to 1 if pwd.h is available]))
-+AC_CHECK_HEADERS(sys/time.h,
-+ AC_DEFINE(HAVE_SYS_TIME_H, 1,
-+ [Define to 1 if time.h is available]))
-+AC_CHECK_HEADERS(unistd.h,
-+ AC_DEFINE(HAVE_UNISTD_H, 1,
-+ [Define to 1 if unistd.h is available]))
-+AC_CHECK_HEADERS(ftw.h,
-+ AC_DEFINE(HAVE_FTW_H, 1,
-+ [Define to 1 if ftw.h is available]))
-+
-+AC_MSG_CHECKING([for GNU ftw extensions])
-+AC_TRY_COMPILE([#define _XOPEN_SOURCE 500
-+#define _GNU_SOURCE
-+#include <ftw.h>], [int flags = FTW_ACTIONRETVAL;], gtk_ok=yes, gtk_ok=no)
-+if test $gtk_ok = yes; then
-+ AC_MSG_RESULT([yes])
-+ AC_DEFINE(HAVE_GNU_FTW, 1, [Have GNU ftw])
-+else
-+ AC_MSG_RESULT([no])
-+fi
-+
-+saved_cflags="$CFLAGS"
-+saved_ldflags="$LDFLAGS"
-+
-+
-+# Checks for header files.
-+AC_HEADER_STDC
-+
-+# Checks for typedefs, structures, and compiler characteristics.
-+AC_C_CONST
-+
-+# Checks for library functions.
-+AC_TYPE_SIGNAL
-+AC_FUNC_MMAP
-+
-+AC_CHECK_FUNCS(mallinfo)
-+AC_CHECK_FUNCS(getresuid)
-+AC_TYPE_UID_T
-+
-+# Check if <sys/select.h> needs to be included for fd_set
-+AC_MSG_CHECKING([for fd_set])
-+AC_TRY_COMPILE([#include <sys/types.h>],
-+ [fd_set readMask, writeMask;], gtk_ok=yes, gtk_ok=no)
-+if test $gtk_ok = yes; then
-+ AC_MSG_RESULT([yes, found in sys/types.h])
-+else
-+ AC_HEADER_EGREP(fd_mask, sys/select.h, gtk_ok=yes)
-+ if test $gtk_ok = yes; then
-+ AC_DEFINE(HAVE_SYS_SELECT_H, 1,
-+ [Define to 1 if sys/select.h is available])
-+ AC_MSG_RESULT([yes, found in sys/select.h])
-+ else
-+ AC_DEFINE(NO_FD_SET, 1,
-+ [Define to 1 if fd_set is not available])
-+ AC_MSG_RESULT(no)
-+ fi
-+fi
-+
-+# `widechar' tests for gdki18n.h
-+AC_MSG_CHECKING(for wchar.h)
-+AC_TRY_CPP([#include <wchar.h>], gdk_wchar_h=yes, gdk_wchar_h=no)
-+if test $gdk_wchar_h = yes; then
-+ AC_DEFINE(HAVE_WCHAR_H, 1, [Have wchar.h include file])
-+fi
-+AC_MSG_RESULT($gdk_wchar_h)
-+
-+# Check for wctype.h (for iswalnum)
-+AC_MSG_CHECKING(for wctype.h)
-+AC_TRY_CPP([#include <wctype.h>], gdk_wctype_h=yes, gdk_wctype_h=no)
-+if test $gdk_wctype_h = yes; then
-+ AC_DEFINE(HAVE_WCTYPE_H, 1, [Have wctype.h include file])
-+fi
-+AC_MSG_RESULT($gdk_wctype_h)
-+
-+# in Solaris 2.5, `iswalnum' is in -lw
-+GDK_WLIBS=
-+AC_CHECK_FUNC(iswalnum,,[AC_CHECK_LIB(w,iswalnum,GDK_WLIBS=-lw)])
-+
-+oLIBS="$LIBS"
-+LIBS="$LIBS $GDK_WLIBS"
-+# The following is necessary for Linux libc-5.4.38
-+AC_MSG_CHECKING(if iswalnum() and friends are properly defined)
-+AC_TRY_LINK([#include <stdlib.h>],[
-+#if (defined(HAVE_WCTYPE_H) || defined(HAVE_WCHAR_H))
-+# ifdef HAVE_WCTYPE_H
-+# include <wctype.h>
-+# else
-+# ifdef HAVE_WCHAR_H
-+# include <wchar.h>
-+# endif
-+# endif
-+#else
-+# define iswalnum(c) ((wchar_t)(c) <= 0xFF && isalnum(c))
-+#endif
-+iswalnum((wchar_t) 0);
-+], gdk_working_wctype=yes, gdk_working_wctype=no)
-+LIBS="$oLIBS"
-+
-+if test $gdk_working_wctype = no; then
-+ AC_DEFINE(HAVE_BROKEN_WCTYPE, 1, [Is the wctype implementation broken])
-+ GDK_WLIBS=
-+fi
-+AC_MSG_RESULT($gdk_working_wctype)
-+AC_SUBST(GDK_WLIBS)
-+
-+# Check for uxtheme.h (for MS-Windows Engine)
-+AC_MSG_CHECKING(for uxtheme.h)
-+AC_TRY_CPP([#include <uxtheme.h>], gtk_uxtheme_h=yes, gtk_uxtheme_h=no)
-+if test $gtk_uxtheme_h = yes; then
-+ AC_DEFINE(HAVE_UXTHEME_H, 1, [Have uxtheme.h include file])
-+fi
-+AC_MSG_RESULT($gtk_uxtheme_h)
-+
-+# Checks for gdkspawn
-+AC_CHECK_HEADERS(crt_externs.h)
-+AC_CHECK_FUNCS(_NSGetEnviron)
-+
-+AC_MSG_CHECKING(whether to build dynamic modules)
-+
-+AC_ARG_ENABLE(modules,
-+ [AC_HELP_STRING([--disable-modules],
-+ [disable dynamic module loading])])
-+
-+dynworks=false
-+deps=
-+if test x$enable_modules = xno; then
-+ AC_MSG_RESULT(no)
-+else
-+ AC_MSG_RESULT(yes)
-+ AC_MSG_CHECKING(whether dynamic modules work)
-+ ## for loop is to strip newline
-+ tmp=`$PKG_CONFIG --variable=gmodule_supported gmodule-no-export-2.0`
-+ for I in $tmp; do
-+ dynworks=$I
-+ done
-+
-+ dnl Now we check to see if our libtool supports shared lib deps
-+ dnl (in a rather ugly way even)
-+ if $dynworks; then
-+ module_libtool_config="${CONFIG_SHELL-/bin/sh} ./libtool --config"
-+ module_deplibs_check=`$module_libtool_config | \
-+ grep '^[[a-z_]]*check[[a-z_]]*_method=[['\''"]]' | \
-+ sed 's/.*[['\''"]]\(.*\)[['\''"]]$/\1/'`
-+ if test "x$module_deplibs_check" = "xnone" || \
-+ test "x$module_deplibs_check" = "xunknown" || \
-+ test "x$module_deplibs_check" = "x"; then
-+ dynworks=false
-+ fi
-+ fi
-+
-+ if $dynworks; then
-+ AC_DEFINE(USE_GMODULE, 1,
-+ [Define to 1 if gmodule works and should be used])
-+ AC_MSG_RESULT(yes)
-+ else
-+ AC_MSG_RESULT(no)
-+ fi
-+fi
-+
-+AM_CONDITIONAL(BUILD_DYNAMIC_MODULES, $dynworks)
-+
-+#
-+# Allow building some or all immodules included
-+#
-+AC_MSG_CHECKING(immodules to build)
-+
-+dnl due to an autoconf bug, commas in the first arg to
-+dnl AC_HELP_STRING cause problems.
-+dnl AC_HELP_STRING([--with-included-immodules=MODULE1 MODULE2 ...],
-+dnl [build the specified input method modules into gtk])
-+AC_ARG_WITH(included_immodules,
-+[ --with-included-immodules=MODULE1,MODULE2,...
-+ build the specified input methods into gtk])
-+
-+if $dynworks; then
-+ :
-+else
-+ ## if the option was specified, leave it; otherwise disable included immodules
-+ if test x$with_included_immodules = xno; then
-+ with_included_immodules=yes
-+ fi
-+fi
-+
-+all_immodules="am-et,cedilla,cyrillic-translit"
-+if test "$gdktarget" = "win32"; then
-+ all_immodules="${all_immodules},ime"
-+fi
-+all_immodules="${all_immodules},inuktitut,ipa,multipress,thai,ti-er,ti-et,viqr"
-+if test "$gdktarget" = "x11"; then
-+ all_immodules="${all_immodules},xim"
-+fi
-+
-+included_immodules=""
-+# If the switch specified without listing any specific ones, include all
-+if test "x$with_included_immodules" = xyes ; then
-+ included_immodules="$all_immodules"
-+else
-+ included_immodules="$with_included_immodules"
-+fi
-+
-+AC_MSG_RESULT($included_immodules)
-+AM_CONDITIONAL(HAVE_INCLUDED_IMMMODULES, test "x$included_immodules" != x)
-+
-+INCLUDED_IMMODULE_OBJ=
-+INCLUDED_IMMODULE_DEFINE=
-+
-+IFS="${IFS= }"; gtk_save_ifs="$IFS"; IFS=","
-+for immodule in $included_immodules; do
-+ immodule_underscores=`echo $immodule | sed -e 's/-/_/g'`
-+ if echo "$all_immodules" | egrep "(^|,)$immodule(\$|,)" > /dev/null; then
-+ :
-+ else
-+ AC_MSG_ERROR([the specified input method $immodule does not exist])
-+ fi
-+
-+ INCLUDED_IMMODULE_OBJ="$INCLUDED_IMMODULE_OBJ ../modules/input/libstatic-im-$immodule.la"
-+ INCLUDED_IMMODULE_DEFINE="$INCLUDED_IMMODULE_DEFINE -DINCLUDE_IM_$immodule_underscores"
-+ eval INCLUDE_$immodule_underscores=yes
-+done
-+IFS="$gtk_save_ifs"
-+AC_SUBST(INCLUDED_IMMODULE_OBJ)
-+AC_SUBST(INCLUDED_IMMODULE_DEFINE)
-+
-+AM_CONDITIONAL(INCLUDE_IM_AM_ET, [test x"$INCLUDE_am_et" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_CEDILLA, [test x"$INCLUDE_cedilla" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_CYRILLIC_TRANSLIT, [test x"$INCLUDE_cyrillic_translit" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_IME, [test x"$INCLUDE_ime" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_INUKTITUT, [test x"$INCLUDE_inuktitut" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_IPA, [test x"$INCLUDE_ipa" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_MULTIPRESS, [test x"$INCLUDE_multipress" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_THAI, [test x"$INCLUDE_thai" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_TI_ER, [test x"$INCLUDE_ti_er" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_TI_ET, [test x"$INCLUDE_ti_et" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_VIQR, [test x"$INCLUDE_viqr" = xyes])
-+AM_CONDITIONAL(INCLUDE_IM_XIM, [test x"$INCLUDE_xim" = xyes])
-+
-+AC_HEADER_SYS_WAIT
-+
-+AC_TYPE_SIGNAL
-+
-+# Checks to see whether we should include mediaLib
-+# support.
-+#
-+AC_CHECK_HEADER(sys/systeminfo.h,
-+ AC_DEFINE(HAVE_SYS_SYSTEMINFO_H, 1,
-+ [Define to 1 if sys/systeminfo.h is available]))
-+AC_CHECK_HEADER(sys/sysinfo.h,
-+ AC_DEFINE(HAVE_SYS_SYSINFO_H, 1,
-+ [Define to 1 if sys/sysinfo.h is available]))
-+
-+AC_MSG_CHECKING(for mediaLib 2.3)
-+use_mlib25=no
-+# Check for a mediaLib 2.3 function since that is what the GTK+ mediaLib
-+# patch requires.
-+AC_CHECK_LIB(mlib, mlib_ImageSetStruct, use_mlib=yes, use_mlib=no)
-+if test $use_mlib = yes; then
-+ AC_DEFINE(USE_MEDIALIB, 1,
-+ [Define to 1 if medialib is available and should be used])
-+ MEDIA_LIB=-lmlib
-+
-+ AC_MSG_CHECKING(for mediaLib 2.5)
-+ # Check for a mediaLib 2.5 function since that is what is needed for
-+ # gdk_rgb_convert integration.
-+ AC_CHECK_LIB(mlib, mlib_VideoColorRGBint_to_BGRAint, use_mlib25=yes, use_mlib25=no)
-+ if test $use_mlib25 = yes; then
-+ AC_DEFINE(USE_MEDIALIB25, 1,
-+ [Define to 1 if medialib 2.5 is available])
-+ fi
-+fi
-+AM_CONDITIONAL(USE_MEDIALIB, test $use_mlib = yes)
-+AM_CONDITIONAL(USE_MEDIALIB25, test $use_mlib25 = yes)
-+
-+dnl Look for a host system's gdk-pixbuf-csource if we are cross-compiling
-+
-+AM_CONDITIONAL(CROSS_COMPILING, test $cross_compiling = yes)
-+
-+if test $cross_compiling = yes; then
-+ AC_PATH_PROG(GTK_UPDATE_ICON_CACHE, gtk-update-icon-cache, no)
-+ if test x$GTK_UPDATE_ICON_CACHE = xno; then
-+ REBUILD_PNGS=#
-+ fi
-+fi
-+
-+AC_PATH_PROG(GDK_PIXBUF_CSOURCE, gdk-pixbuf-csource, no)
-+
-+if test ! -f $srcdir/gtk/gtkbuiltincache.h &&
-+ test "x$REBUILD_PNGS" = "x#" ; then
-+ AC_MSG_ERROR([
-+*** gtkbuiltincache.h is not in the tree, and cannot be built
-+*** because you don't have libpng, or (when cross-compiling) you
-+*** don't have a prebuilt gtk-update-icon-cache on the build system.])
-+fi
-+
-+########################################
-+# Windowing system checks
-+########################################
-+
-+GDK_EXTRA_LIBS="$GDK_WLIBS"
-+GDK_EXTRA_CFLAGS=
-+
-+# GTK+ uses some X calls, so needs to link against X directly
-+GTK_DEP_PACKAGES_FOR_X=
-+GTK_DEP_LIBS_FOR_X=
-+
-+if test "x$gdktarget" = "xx11"; then
-+ X_PACKAGES=fontconfig
-+
-+ #
-+ # We use fontconfig very peripherally when decoding the default
-+ # settings.
-+ #
-+ if $PKG_CONFIG --exists fontconfig; then : ; else
-+ AC_MSG_ERROR([
-+*** fontconfig (http://www.fontconfig.org) is required by the X11 backend.])
-+ fi
-+
-+ #
-+ # Check for basic X packages; we use pkg-config if available
-+ #
-+ if $PKG_CONFIG --exists x11 xext xrender; then
-+ have_base_x_pc=true
-+ X_PACKAGES="$X_PACKAGES x11 xext xrender"
-+ x_libs="`$PKG_CONFIG --libs x11 xext xrender`"
-+ X_CFLAGS="`$PKG_CONFIG --cflags x11 xext xrender`"
-+
-+ # Strip out any .la files that pkg-config might give us (this happens
-+ # with -uninstalled.pc files)
-+ x_libs_for_checks=
-+ for I in $x_libs ; do
-+ case $I in
-+ *.la) ;;
-+ *) x_libs_for_checks="$x_libs_for_checks $I" ;;
-+ esac
-+ done
-+
-+ GTK_PACKAGES_FOR_X="x11"
-+ else
-+ have_base_x_pc=false
-+ AC_PATH_XTRA
-+ if test x$no_x = xyes ; then
-+ AC_MSG_ERROR([X development libraries not found])
-+ fi
-+
-+ x_cflags="$X_CFLAGS"
-+ x_libs_for_checks="$X_LIBS -lXext -lXrender -lX11 $X_EXTRA_LIBS"
-+
-+ GTK_DEP_LIBS_FOR_X="$X_LIBS -lXrender -lX11 $X_EXTRA_LIBS"
-+ fi
-+
-+ # Extra libraries found during checks (-lXinerama, etc), not from pkg-config.
-+ x_extra_libs=
-+
-+ gtk_save_cppflags="$CPPFLAGS"
-+ CPPFLAGS="$CPPFLAGS $X_CFLAGS"
-+
-+ gtk_save_LIBS=$LIBS
-+ LIBS="$x_libs_for_checks $LIBS"
-+
-+ # Sanity check for the X11 and Xext libraries. While everything we need from
-+ # Xext is optional, the chances a system has *none* of these things is so
-+ # small that we just unconditionally require it.
-+ AC_CHECK_FUNC(XOpenDisplay, :,
-+ AC_MSG_ERROR([*** libX11 not found. Check 'config.log' for more details.]))
-+ AC_CHECK_FUNC(XextFindDisplay, :,
-+ AC_MSG_ERROR([*** libXext not found. Check 'config.log' for more details.]))
-+ AC_CHECK_FUNC(XRenderQueryExtension, :,
-+ AC_MSG_ERROR([*** libXrender not found. Check 'config.log' for more details.]))
-+
-+ # Check for xReply
-+
-+ AC_MSG_CHECKING([if <X11/extensions/XIproto.h> is needed for xReply])
-+ AC_TRY_COMPILE([#include <X11/Xlibint.h>],
-+ [xReply *rep;],
-+ [AC_MSG_RESULT([no])],
-+ [AC_TRY_COMPILE([#include <X11/extensions/XIproto.h>
-+#include <X11/Xlibint.h>],
-+ [xReply *rep;],
-+ [AC_MSG_RESULT([yes])
-+ AC_DEFINE([NEED_XIPROTO_H_FOR_XREPLY], 1,
-+ [Define if <X11/extensions/XIproto.h> needed for xReply])],
-+ [AC_MSG_RESULT([unknown])
-+ AC_MSG_ERROR([xReply type unavailable. X11 is too old])])])
-+
-+ # Check for XConvertCase, XInternAtoms (X11R6 specific)
-+
-+ AC_CHECK_FUNCS(XConvertCase XInternAtoms)
-+
-+ # Generic X11R6 check needed for XIM support; we could
-+ # probably use this to replace the above, but we'll
-+ # leave the separate checks for XConvertCase and XInternAtoms
-+ # for clarity
-+
-+ have_x11r6=false
-+ AC_CHECK_FUNC(XAddConnectionWatch,
-+ have_x11r6=true)
-+
-+ if $have_x11r6; then
-+ AC_DEFINE(HAVE_X11R6, 1, [Define if we have X11R6])
-+ fi
-+ AM_CONDITIONAL(HAVE_X11R6, $have_x11r6)
-+
-+ # Check for XKB support.
-+
-+ if test "x$enable_xkb" = "xyes"; then
-+ AC_MSG_WARN(XKB support explicitly enabled)
-+ AC_DEFINE(HAVE_XKB, 1, [Define to use XKB extension])
-+ elif test "x$enable_xkb" = "xmaybe"; then
-+ AC_CHECK_FUNC(XkbQueryExtension,
-+ AC_DEFINE(HAVE_XKB, 1, [Define to use XKB extension]))
-+ else
-+ AC_MSG_WARN(XKB support explicitly disabled)
-+ fi
-+
-+ # Check for shaped window extension
-+
-+ AC_CHECK_FUNC(XShapeCombineMask, :,
-+ [AC_MSG_ERROR([Shape extension not found, check your development headers])])
-+
-+ # X SYNC check
-+ gtk_save_CFLAGS="$CFLAGS"
-+ CFLAGS="$CFLAGS $x_cflags"
-+
-+ AC_CHECK_FUNC(XSyncQueryExtension,
-+ [AC_CHECK_HEADER(X11/extensions/sync.h,
-+ AC_DEFINE(HAVE_XSYNC, 1, [Have the SYNC extension library]),
-+ :, [#include <X11/Xlib.h>])])
-+
-+ CFLAGS="$gtk_save_CFLAGS"
-+
-+ # Xshm checks
-+
-+ if test "x$enable_shm" = "xyes"; then
-+ # Check for the XShm extension, normally in Xext
-+ AC_CHECK_FUNC(XShmAttach,
-+ :,
-+ # On AIX, it is in XextSam instead
-+ [AC_CHECK_LIB(XextSam, XShmAttach,
-+ [GTK_ADD_LIB(x_extra_libs,XextSam)])])
-+ fi
-+
-+ if test "x$enable_shm" = "xyes"; then
-+ # Check for shared memory
-+ AC_CHECK_HEADER(sys/ipc.h,
-+ AC_DEFINE(HAVE_IPC_H, 1,
-+ [Define to 1 if ipc.h is available]),
-+ no_sys_ipc=yes)
-+ AC_CHECK_HEADER(sys/shm.h,
-+ AC_DEFINE(HAVE_SHM_H, 1,
-+ [Define to 1 if shm.h is available]),
-+ no_sys_shm=yes)
-+
-+ # Check for the X shared memory extension header file
-+ have_xshm=no
-+ AC_MSG_CHECKING(X11/extensions/XShm.h)
-+ if test "x$no_xext_lib" = "xyes"; then
-+ :
-+ else
-+ gtk_save_CFLAGS="$CFLAGS"
-+ CFLAGS="$CFLAGS $x_cflags"
-+ AC_TRY_COMPILE([
-+#include <stdlib.h>
-+#include <sys/types.h>
-+#include <sys/ipc.h>
-+#include <sys/shm.h>
-+#include <X11/Xlib.h>
-+#include <X11/Xutil.h>
-+#include <X11/extensions/XShm.h>
-+], [XShmSegmentInfo *x_shm_info;], have_xshm=yes)
-+ CFLAGS="$gtk_save_CFLAGS"
-+ fi
-+ AC_MSG_RESULT($have_xshm)
-+ if test $have_xshm = yes ; then
-+ AC_DEFINE(HAVE_XSHM_H, 1,
-+ [Define to 1 if xshm.h is available])
-+ fi
-+ fi
-+
-+ if test "x$enable_xinerama" = "xyes"; then
-+ # Check for Xinerama extension (Solaris impl or Xfree impl)
-+ gtk_save_cppflags="$CPPFLAGS"
-+ CPPFLAGS="$CPPFLAGS $x_cflags"
-+
-+ # Check for XFree
-+ AC_MSG_CHECKING(for Xinerama support on XFree86)
-+
-+ have_xfree_xinerama=false
-+ if $PKG_CONFIG --exists xinerama ; then
-+ have_xfree_xinerama=true
-+ X_PACKAGES="$X_PACKAGES xinerama"
-+ else
-+ AC_CHECK_LIB(Xinerama, XineramaQueryExtension,
-+ [AC_CHECK_HEADER(X11/extensions/Xinerama.h,
-+ [GTK_ADD_LIB(x_extra_libs,Xinerama)
-+ have_xfree_xinerama=true], :,
-+ [#include <X11/Xlib.h>])])
-+ fi
-+
-+ if $have_xfree_xinerama ; then
-+ AC_DEFINE(HAVE_XFREE_XINERAMA, 1,
-+ [Define to 1 if XFree Xinerama is available])
-+ AC_DEFINE(HAVE_XINERAMA, 1,
-+ [Define to 1 is Xinerama is available])
-+ AC_MSG_RESULT(yes)
-+ else
-+ AC_MSG_RESULT(no)
-+
-+ case "$host" in
-+ *-*-solaris*)
-+ # Check for solaris
-+ AC_MSG_CHECKING(for Xinerama support on Solaris)
-+
-+ have_solaris_xinerama=false
-+ AC_CHECK_FUNC(XineramaGetInfo,
-+ [AC_CHECK_HEADER(X11/extensions/xinerama.h,
-+ [have_solaris_xinerama=true], :,
-+ [#include <X11/Xlib.h>])])
-+
-+ if $have_solaris_xinerama ; then
-+ AC_DEFINE(HAVE_SOLARIS_XINERAMA, 1,
-+ [Define to 1 if solaris xinerama is available])
-+ AC_DEFINE(HAVE_XINERAMA, 1,
-+ [Define to 1 if xinerama is available])
-+ AC_MSG_RESULT(yes)
-+ else
-+ AC_MSG_RESULT(no)
-+ fi
-+ ;;
-+ *)
-+ ;;
-+ esac
-+ fi
-+ fi
-+
-+ # set up things for XInput
-+
-+ if test "x$with_xinput" = "xxfree" || test "x$with_xinput" = "xyes"; then
-+ AC_DEFINE(XINPUT_XFREE, 1,
-+ [Define to 1 if XFree XInput should be used])
-+
-+ if $PKG_CONFIG --exists xi ; then
-+ X_PACKAGES="$X_PACKAGES xi"
-+ else
-+ GTK_ADD_LIB(x_extra_libs, Xi)
-+ fi
-+ else
-+ AC_DEFINE(XINPUT_NONE, 1,
-+ [Define to 1 if no XInput should be used])
-+ fi
-+
-+ AM_CONDITIONAL(XINPUT_XFREE, test x$with_xinput = xxfree || test x$with_xinput = xyes)
-+
-+ # Check for the RANDR extension
-+ if $PKG_CONFIG --exists "xrandr >= 1.2.99" ; then
-+ AC_DEFINE(HAVE_RANDR, 1, [Have the Xrandr extension library])
-+
-+ X_PACKAGES="$X_PACKAGES xrandr"
-+ fi
-+
-+ # Checks for Xcursor library
-+
-+ if $PKG_CONFIG --exists xcursor ; then
-+ AC_DEFINE(HAVE_XCURSOR, 1, [Have the Xcursor library])
-+
-+ X_PACKAGES="$X_PACKAGES xcursor"
-+ fi
-+
-+ # Checks for XFixes extension
-+
-+ if $PKG_CONFIG --exists xfixes ; then
-+ AC_DEFINE(HAVE_XFIXES, 1, [Have the XFIXES X extension])
-+
-+ X_PACKAGES="$X_PACKAGES xfixes"
-+ GTK_PACKAGES_FOR_X="$GTK_PACKAGES_FOR_X xfixes"
-+ fi
-+
-+ # Checks for Xcomposite extension
-+
-+ if $PKG_CONFIG --exists xcomposite ; then
-+ AC_DEFINE(HAVE_XCOMPOSITE, 1, [Have the XCOMPOSITE X extension])
-+
-+ X_PACKAGES="$X_PACKAGES xcomposite"
-+ GTK_PACKAGES_FOR_X="$GTK_PACKAGES_FOR_X xcomposite"
-+ fi
-+
-+ # Checks for Xdamage extension
-+
-+ if $PKG_CONFIG --exists xdamage ; then
-+ AC_DEFINE(HAVE_XDAMAGE, 1, [Have the XDAMAGE X extension])
-+
-+ X_PACKAGES="$X_PACKAGES xdamage"
-+ GTK_PACKAGES_FOR_X="$GTK_PACKAGES_FOR_X xdamage"
-+ fi
-+
-+ if $have_base_x_pc ; then
-+ GDK_EXTRA_LIBS="$x_extra_libs"
-+ else
-+ GDK_EXTRA_LIBS="$X_LIBS $x_extra_libs -lXext -lX11 $GDK_EXTRA_LIBS"
-+ fi
-+
-+ CPPFLAGS="$gtk_save_cppflags"
-+ LIBS="$gtk_save_libs"
-+
-+ AM_CONDITIONAL(USE_X11, true)
-+else
-+ XPACKAGES=
-+
-+ AM_CONDITIONAL(XINPUT_XFREE, false)
-+ AM_CONDITIONAL(USE_X11, false)
-+ AM_CONDITIONAL(HAVE_X11R6, false)
-+fi
-+
-+if test "x$gdktarget" = "xwin32"; then
-+ GDK_EXTRA_LIBS="$GDK_EXTRA_LIBS -lgdi32 -limm32 -lshell32 -lole32 -Wl,-luuid"
-+ AM_CONDITIONAL(USE_WIN32, true)
-+else
-+ AM_CONDITIONAL(USE_WIN32, false)
-+fi
-+
-+AC_ARG_ENABLE(quartz-relocation,
-+ [AS_HELP_STRING([--enable-quartz-relocation],
-+ [enable bundle-based relocation functions])],
-+ [quartz_relocation=yes])
-+
-+if test "x$gdktarget" = "xquartz"; then
-+ GDK_EXTRA_LIBS="$GDK_EXTRA_LIBS -framework Cocoa"
-+ AM_CONDITIONAL(USE_QUARTZ, true)
-+ if test "x$quartz_relocation" = xyes; then
-+ AC_DEFINE([QUARTZ_RELOCATION], [1], [Use NSBundle functions to determine load paths for libraries, translations, etc.])
-+ fi
-+
-+else
-+ AM_CONDITIONAL(USE_QUARTZ, false)
-+fi
-+
-+if test "x$gdktarget" = "xdirectfb"; then
-+ DIRECTFB_REQUIRED_VERSION=1.0.0
-+ AC_MSG_CHECKING(for DirectFB)
-+
-+ PKG_CHECK_MODULES(DIRECTFB, [directfb >= $DIRECTFB_REQUIRED_VERSION])
-+ AM_CONDITIONAL(USE_DIRECTFB, true)
-+else
-+ AM_CONDITIONAL(USE_DIRECTFB, false)
-+fi
-+
-+
-+# Check for Pango flags
-+
-+if test "x$gdktarget" = "xwin32"; then
-+ PANGO_PACKAGES="pangowin32 pangocairo"
-+else
-+ PANGO_PACKAGES="pango pangocairo"
-+fi
-+
-+AC_MSG_CHECKING(Pango flags)
-+if $PKG_CONFIG --exists $PANGO_PACKAGES ; then
-+ PANGO_CFLAGS=`$PKG_CONFIG --cflags $PANGO_PACKAGES`
-+ PANGO_LIBS=`$PKG_CONFIG --libs $PANGO_PACKAGES`
-+
-+ AC_MSG_RESULT($PANGO_CFLAGS $PANGO_LIBS)
-+else
-+ AC_MSG_ERROR([
-+*** Pango not found. Pango built with Cairo support is required
-+*** to build GTK+. See http://www.pango.org for Pango information.
-+])
-+fi
-+
-+CFLAGS="$CFLAGS $PANGO_CFLAGS"
-+
-+if $PKG_CONFIG --uninstalled $PANGO_PACKAGES; then
-+ :
-+else
-+ gtk_save_LIBS="$LIBS"
-+ LIBS="$PANGO_LIBS $LIBS"
-+ AC_TRY_LINK_FUNC(pango_context_new, :, AC_MSG_ERROR([
-+*** Can't link to Pango. Pango is required to build
-+*** GTK+. For more information see http://www.pango.org]))
-+ LIBS="$gtk_save_LIBS"
-+fi
-+
-+CFLAGS="$saved_cflags"
-+LDFLAGS="$saved_ldflags"
-+
-+# Pull in gio-unix for GDesktopAppInfo usage, see at least gdkapplaunchcontext-x11.c
-+if test "x$gdktarget" = "xx11"; then
-+ GDK_PACKAGES="$PANGO_PACKAGES gio-unix-2.0 $X_PACKAGES gdk-pixbuf-2.0 cairo-$cairo_backend"
-+else
-+ GDK_PACKAGES="$PANGO_PACKAGES gio-2.0 gdk-pixbuf-2.0 cairo-$cairo_backend"
-+fi
-+
-+GDK_DEP_LIBS="$GDK_EXTRA_LIBS `$PKG_CONFIG --libs $GDK_PACKAGES` $MATH_LIB"
-+GDK_DEP_CFLAGS="`$PKG_CONFIG --cflags gthread-2.0 $GDK_PACKAGES` $GDK_EXTRA_CFLAGS"
-+#
-+# If we aren't writing explicit dependencies, then don't put the extra libraries we need
-+# into the pkg-config files
-+#
-+if test $enable_explicit_deps != yes ; then
-+ GDK_PACKAGES="$PANGO_PACKAGES gdk-pixbuf-2.0"
-+ GDK_EXTRA_LIBS=
-+fi
-+
-+AC_SUBST(GDK_PACKAGES)
-+AC_SUBST(GDK_EXTRA_LIBS)
-+AC_SUBST(GDK_EXTRA_CFLAGS)
-+AC_SUBST(GDK_DEP_LIBS)
-+AC_SUBST(GDK_DEP_CFLAGS)
-+
-+
-+########################################
-+# Check for Accessibility Toolkit flags
-+########################################
-+
-+ATK_PACKAGES=atk
-+AC_MSG_CHECKING(ATK flags)
-+if $PKG_CONFIG --exists $ATK_PACKAGES ; then
-+ ATK_CFLAGS=`$PKG_CONFIG --cflags $ATK_PACKAGES`
-+ ATK_LIBS=`$PKG_CONFIG --libs $ATK_PACKAGES`
-+
-+ AC_MSG_RESULT($ATK_CFLAGS $ATK_LIBS)
-+else
-+ AC_MSG_ERROR([
-+*** Accessibility Toolkit not found. Accessibility Toolkit is required
-+*** to build GTK+.
-+])
-+fi
-+
-+if $PKG_CONFIG --uninstalled $ATK_PACKAGES; then
-+ :
-+else
-+ gtk_save_LIBS="$LIBS"
-+ LIBS="$ATK_LIBS $LIBS"
-+ AC_TRY_LINK_FUNC(atk_object_get_type, : , AC_MSG_ERROR([
-+ *** Cannot link to Accessibility Toolkit. Accessibility Toolkit is required
-+ *** to build GTK+]))
-+ LIBS="$gtk_save_LIBS"
-+fi
-+
-+GTK_PACKAGES="atk cairo gdk-pixbuf-2.0 gio-2.0"
-+if test "x$gdktarget" = "xx11"; then
-+ GTK_PACKAGES="$GTK_PACKAGES pangoft2"
-+fi
-+GTK_EXTRA_LIBS=
-+GTK_EXTRA_CFLAGS=
-+GTK_DEP_LIBS="$GDK_EXTRA_LIBS $GTK_DEP_LIBS_FOR_X `$PKG_CONFIG --libs $PANGO_PACKAGES $GTK_PACKAGES_FOR_X $GTK_PACKAGES` $GTK_EXTRA_LIBS $MATH_LIB"
-+GTK_DEP_CFLAGS="`$PKG_CONFIG --cflags gthread-2.0 $GDK_PACKAGES $GTK_PACKAGES` $GDK_EXTRA_CFLAGS $GTK_EXTRA_CFLAGS"
-+
-+if test x"$os_win32" = xyes; then
-+ GTK_EXTRA_CFLAGS="$msnative_struct"
-+fi
-+
-+GLIB_PREFIX="`$PKG_CONFIG --variable=prefix glib-2.0`"
-+ATK_PREFIX="`$PKG_CONFIG --variable=prefix atk`"
-+PANGO_PREFIX="`$PKG_CONFIG --variable=prefix pango`"
-+CAIRO_PREFIX="`pkg-config --variable=prefix cairo`"
-+
-+AC_SUBST(GTK_PACKAGES)
-+AC_SUBST(GTK_EXTRA_LIBS)
-+AC_SUBST(GTK_EXTRA_CFLAGS)
-+AC_SUBST(GTK_DEP_LIBS)
-+AC_SUBST(GTK_DEP_CFLAGS)
-+
-+AC_SUBST(GLIB_PREFIX)
-+AC_SUBST(ATK_PREFIX)
-+AC_SUBST(PANGO_PREFIX)
-+AC_SUBST(CAIRO_PREFIX)
-+
-+AC_SUBST(GTK_DEBUG_FLAGS)
-+AC_SUBST(GTK_XIM_FLAGS)
-+
-+GDK_PIXBUF_LIBS=`$PKG_CONFIG --libs gdk-pixbuf-2.0`
-+AC_SUBST(GDK_PIXBUF_LIBS)
-+
-+########################
-+# Checks needed for gail
-+########################
-+
-+old_LIBS="$LIBS"
-+dnl Checks for inet libraries:
-+AC_SEARCH_LIBS(gethostent, nsl)
-+AC_SEARCH_LIBS(setsockopt, socket)
-+AC_SEARCH_LIBS(connect, inet)
-+
-+dnl check for the sockaddr_un.sun_len member
-+AC_CHECK_MEMBER([struct sockaddr_un.sun_len],
-+ [struct_sockaddr_un_sun_len=true],
-+ [struct_sockaddr_un_suin_len=false],
-+ [#include <sys/types.h>
-+ #include <sys/un.h>]
-+ )
-+case $struct_sockaddr_un_sun_len in
-+ true)
-+ AC_DEFINE_UNQUOTED(HAVE_SOCKADDR_UN_SUN_LEN, 1,
-+ [Have the sockaddr_un.sun_len member])
-+ ;;
-+ *)
-+ ;;
-+esac
-+
-+GAIL_INET_LIBS="$LIBS"
-+AC_SUBST([GAIL_INET_LIBS])
-+
-+LIBS="$old_LIBS"
-+
-+################################################################
-+# Printing system checks
-+################################################################
-+
-+AC_ARG_ENABLE(cups,
-+ [AC_HELP_STRING([--disable-cups]
-+ [disable cups print backend])],,
-+ [enable_cups=auto])
-+
-+if test "x$enable_cups" = "xno"; then
-+ AM_CONDITIONAL(HAVE_CUPS, false)
-+else
-+ AC_PATH_PROG(CUPS_CONFIG, cups-config, no)
-+ if test "x$CUPS_CONFIG" = "xno"; then
-+ if test "x$enable_cups" = "xauto"; then
-+ AM_CONDITIONAL(HAVE_CUPS, false)
-+ else
-+ AC_MSG_ERROR([
-+*** cups not found.
-+])
-+ fi
-+ else
-+ CUPS_CFLAGS=`$CUPS_CONFIG --cflags | sed 's/-O[0-9]*//' | sed 's/-m[^\t]*//g'`
-+ CUPS_LIBS=`$CUPS_CONFIG --libs`
-+
-+ CUPS_API_VERSION=`$CUPS_CONFIG --api-version`
-+ CUPS_API_MAJOR=`echo $ECHO_N $CUPS_API_VERSION | awk -F. '{print $1}'`
-+ CUPS_API_MINOR=`echo $ECHO_N $CUPS_API_VERSION | awk -F. '{print $2}'`
-+
-+ if test $CUPS_API_MAJOR -gt 1 -o \
-+ $CUPS_API_MAJOR -eq 1 -a $CUPS_API_MINOR -ge 2; then
-+ AC_DEFINE(HAVE_CUPS_API_1_2, 1,
-+ [Define to 1 if CUPS 1.2 API is available])
-+ fi
-+ if test $CUPS_API_MAJOR -gt 1 -o \
-+ $CUPS_API_MAJOR -eq 1 -a $CUPS_API_MINOR -ge 6; then
-+ AC_DEFINE(HAVE_CUPS_API_1_6, 1,
-+ [Define to 1 if CUPS 1.6 API is available])
-+
-+ fi
-+
-+ AC_SUBST(CUPS_API_MAJOR)
-+ AC_SUBST(CUPS_API_MINOR)
-+ AC_SUBST(CUPS_CFLAGS)
-+ AC_SUBST(CUPS_LIBS)
-+
-+ AC_CHECK_HEADER(cups/cups.h,,AC_MSG_ERROR([[*** Sorry, cups-config present but cups/cups.h missing.]]))
-+
-+ AM_CONDITIONAL(HAVE_CUPS, true)
-+
-+ gtk_save_cflags="$CFLAGS"
-+ CFLAGS="$CUPS_CFLAGS"
-+ AC_TRY_COMPILE([#include <cups/http.h>],
-+ [http_t http; char *s = http.authstring;],
-+ [AC_DEFINE(HAVE_HTTP_AUTHSTRING, [],
-+ [Define if cups http_t authstring field is accessible])],)
-+ CFLAGS="$gtk_save_cflags"
-+
-+ AC_SUBST(HAVE_HTTP_AUTHSTRING)
-+
-+ gtk_save_libs="$LIBS"
-+ LIBS="$CUPS_LIBS"
-+ AC_CHECK_FUNCS(httpGetAuthString)
-+ LIBS="$gtk_save_libs"
-+ fi
-+fi
-+
-+# Checks to see if we should compile with PAPI backend for GTK+
-+#
-+
-+AC_ARG_ENABLE(papi,
-+ [AC_HELP_STRING([--disable-papi]
-+ [disable papi print backend])],,
-+ [enable_papi=auto])
-+
-+if test "x$enable_papi" = "xno"; then
-+ AM_CONDITIONAL(HAVE_PAPI, false)
-+else
-+ AC_MSG_CHECKING(libpapi)
-+ AC_CHECK_LIB(papi, papiServiceCreate, have_papi=yes, have_papi=no)
-+ if test $have_papi = yes; then
-+ AC_DEFINE([HAVE_PAPI], [], [Define to 1 if libpapi available])
-+ fi
-+ AM_CONDITIONAL(HAVE_PAPI, test $have_papi = yes)
-+ if test "x$enable_papi" = "xyes" -a "x$have_papi" = "xno"; then
-+ AC_MSG_ERROR([
-+*** papi not found.
-+])
-+ fi
-+fi
-+
-+AM_CONDITIONAL(HAVE_PAPI_CUPS, test "x$have_papi" = "xyes" -a "x$CUPS_CONFIG" != "xno")
-+
-+gtk_save_cppflags="$CPPFLAGS"
-+CPPFLAGS="$CPPFLAGS $GTK_DEP_CFLAGS $GDK_DEP_CFLAGS"
-+
-+AC_CHECK_HEADER(cairo-pdf.h,,AC_MSG_ERROR([
-+*** Can't find cairo-pdf.h. You must build Cairo with the pdf
-+*** backend enabled.]))
-+
-+if test "$os_win32" != "yes"; then
-+ AC_CHECK_HEADER(cairo-ps.h,,AC_MSG_ERROR([
-+*** Can't find cairo-ps.h. You must build Cairo with the
-+*** postscript backend enabled.]))
-+
-+ AC_CHECK_HEADER(cairo-svg.h,,AC_MSG_ERROR([
-+*** Can't find cairo-svg.h. You must build Cairo with the
-+*** svg backend enabled.]))
-+fi
-+
-+CPPFLAGS="$gtk_save_cppflags"
-+
-+
-+AC_ARG_ENABLE(test-print-backend,
-+ [AC_HELP_STRING([--enable-test-print-backend],
-+ [build test print backend])],,
-+ [enable_test_print_backend=no])
-+AM_CONDITIONAL(TEST_PRINT_BACKEND, test "x$enable_test_print_backend" != "xno")
-+
-+if test "$os_win32" = "yes"; then
-+ AC_CHECK_TYPES([IPrintDialogCallback],[],[],[[#include <windows.h>]])
-+fi
-+
-+################################################################
-+# Strip -export-dynamic from the link lines of various libraries
-+################################################################
-+
-+#
-+# pkg-config --libs gmodule includes the "export_dynamic" flag,
-+# but this flag is only meaningful for executables. For libraries
-+# the effect is undefined; what it causes on Linux is that the
-+# export list from -export-symbols-regex is ignored and everything
-+# is exported
-+#
-+# We are using gmodule-no-export now, but I'm leaving the stripping
-+# code in place for now, since pango and atk still require gmodule.
-+export SED
-+export_dynamic=`(./libtool --config; echo eval echo \\$export_dynamic_flag_spec) | sh`
-+if test -n "$export_dynamic"; then
-+ GDK_DEP_LIBS=`echo $GDK_DEP_LIBS | sed -e "s/$export_dynamic//"`
-+ GTK_DEP_LIBS=`echo $GTK_DEP_LIBS | sed -e "s/$export_dynamic//"`
-+fi
-+
-+##################################################
-+# GObject introspection
-+##################################################
-+
-+GOBJECT_INTROSPECTION_CHECK([0.9.3])
-+
-+##################################################
-+# Checks for gtk-doc and docbook-tools
-+##################################################
-+
-+GTK_DOC_CHECK([1.11])
-+
-+AC_CHECK_PROG(DB2HTML, db2html, true, false)
-+AM_CONDITIONAL(HAVE_DOCBOOK, $DB2HTML)
-+
-+AC_ARG_ENABLE(man,
-+ [AC_HELP_STRING([--enable-man],
-+ [regenerate man pages from Docbook [default=no]])],enable_man=yes,
-+ enable_man=no)
-+
-+if test "${enable_man}" != no; then
-+ dnl
-+ dnl Check for xsltproc
-+ dnl
-+ AC_PATH_PROG([XSLTPROC], [xsltproc])
-+ if test -z "$XSLTPROC"; then
-+ enable_man=no
-+ fi
-+
-+ dnl check for DocBook DTD and stylesheets in the local catalog.
-+ JH_CHECK_XML_CATALOG([-//OASIS//DTD DocBook XML V4.1.2//EN],
-+ [DocBook XML DTD V4.1.2],,enable_man=no)
-+ JH_CHECK_XML_CATALOG([http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl],
-+ [DocBook XSL Stylesheets],,enable_man=no)
-+fi
-+
-+AM_CONDITIONAL(ENABLE_MAN, test x$enable_man != xno)
-+
-+
-+##################################################
-+# Output commands
-+##################################################
-+
-+AC_CONFIG_COMMANDS([gdk/gdkconfig.h], [
-+ outfile=gdkconfig.h-tmp
-+ cat > $outfile <<\_______EOF
-+/* gdkconfig.h
-+ *
-+ * This is a generated file. Please modify `configure.in'
-+ */
-+
-+#ifndef GDKCONFIG_H
-+#define GDKCONFIG_H
-+
-+#ifdef __cplusplus
-+extern "C" {
-+#endif /* __cplusplus */
-+
-+#ifndef GSEAL
-+/* introduce GSEAL() here for all of Gdk and Gtk+ without the need to modify GLib */
-+# ifdef GSEAL_ENABLE
-+# define GSEAL(ident) _g_sealed__ ## ident
-+# else
-+# define GSEAL(ident) ident
-+# endif
-+#endif /* !GSEAL */
-+
-+_______EOF
-+
-+ cat >>$outfile <<_______EOF
-+$gdk_windowing
-+$gdk_wc
-+_______EOF
-+
-+ cat >>$outfile <<_______EOF
-+
-+#ifdef __cplusplus
-+}
-+#endif /* __cplusplus */
-+
-+#endif /* GDKCONFIG_H */
-+_______EOF
-+
-+
-+ if cmp -s $outfile gdk/gdkconfig.h; then
-+ AC_MSG_NOTICE([gdk/gdkconfig.h is unchanged])
-+ rm -f $outfile
-+ else
-+ mv $outfile gdk/gdkconfig.h
-+ fi
-+],[
-+if test "x$gdktarget" = "xx11" ; then
-+ gdk_windowing='
-+#define GDK_WINDOWING_X11'
-+elif test "x$gdktarget" = "xwin32" ; then
-+ gdk_windowing='
-+#define GDK_NATIVE_WINDOW_POINTER
-+
-+#define GDK_WINDOWING_WIN32'
-+elif test "x$gdktarget" = "xquartz" ; then
-+ gdk_windowing='
-+#define GDK_WINDOWING_QUARTZ'
-+elif test "x$gdktarget" = "xdirectfb" ; then
-+ gdk_windowing='
-+#define GDK_WINDOWING_DIRECTFB'
-+fi
-+
-+if test x$gdk_wchar_h = xyes; then
-+ gdk_wc='
-+#define GDK_HAVE_WCHAR_H 1'
-+fi
-+if test x$gdk_wctype_h = xyes; then
-+ gdk_wc="\$gdk_wc
-+#define GDK_HAVE_WCTYPE_H 1"
-+fi
-+if test x$gdk_working_wctype = xno; then
-+ gdk_wc="\$gdk_wc
-+#define GDK_HAVE_BROKEN_WCTYPE 1"
-+fi
-+
-+
-+])
-+
-+AC_CONFIG_FILES([
-+config.h.win32
-+gtk-zip.sh
-+Makefile
-+gdk-2.0.pc
-+gtk+-2.0.pc
-+gtk+-unix-print-2.0.pc
-+gail.pc
-+gdk-2.0-uninstalled.pc
-+gtk+-2.0-uninstalled.pc
-+gail-uninstalled.pc
-+m4macros/Makefile
-+po/Makefile.in
-+po-properties/Makefile.in
-+demos/Makefile
-+demos/gtk-demo/Makefile
-+demos/gtk-demo/geninclude.pl
-+tests/Makefile
-+docs/Makefile
-+docs/reference/Makefile
-+docs/reference/gdk/Makefile
-+docs/reference/gdk/version.xml
-+docs/reference/gtk/Makefile
-+docs/reference/gtk/version.xml
-+docs/reference/libgail-util/Makefile
-+docs/faq/Makefile
-+docs/tools/Makefile
-+docs/tutorial/Makefile
-+build/Makefile
-+build/win32/Makefile
-+build/win32/vs9/Makefile
-+build/win32/vs10/Makefile
-+gdk/Makefile
-+gdk/x11/Makefile
-+gdk/win32/Makefile
-+gdk/win32/rc/Makefile
-+gdk/win32/rc/gdk.rc
-+gdk/quartz/Makefile
-+gdk/directfb/Makefile
-+gdk/tests/Makefile
-+gtk/Makefile
-+gtk/makefile.msc
-+gtk/gtkversion.h
-+gtk/gtk-win32.rc
-+gtk/theme-bits/Makefile
-+gtk/tests/Makefile
-+modules/Makefile
-+modules/other/Makefile
-+modules/other/gail/Makefile
-+modules/other/gail/libgail-util/Makefile
-+modules/other/gail/tests/Makefile
-+modules/engines/Makefile
-+modules/engines/pixbuf/Makefile
-+modules/engines/ms-windows/Makefile
-+modules/engines/ms-windows/Theme/Makefile
-+modules/engines/ms-windows/Theme/gtk-2.0/Makefile
-+modules/input/Makefile
-+modules/printbackends/Makefile
-+modules/printbackends/cups/Makefile
-+modules/printbackends/lpr/Makefile
-+modules/printbackends/file/Makefile
-+modules/printbackends/papi/Makefile
-+modules/printbackends/test/Makefile
-+perf/Makefile
-+])
-+
-+AC_OUTPUT
-+
-+echo "configuration:
-+ target: $gdktarget"
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch
index 71e334d897..71e334d897 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-GtkButton-do-not-prelight-in-touchscreen-mode.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch
index 354f0ab376..354f0ab376 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/0001-bgo-584832-Duplicate-the-exec-string-returned-by-gtk.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/cellrenderer-cairo.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/cellrenderer-cairo.patch
index ba893292d6..ba893292d6 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/cellrenderer-cairo.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/cellrenderer-cairo.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/configure-nm.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/configure-nm.patch
index d67b797852..d67b797852 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/configure-nm.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/configure-nm.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/configurefix.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/configurefix.patch
index 2803691246..2803691246 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/configurefix.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/configurefix.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/doc-fixes.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/doc-fixes.patch
index 74e479fd1b..74e479fd1b 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/doc-fixes.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/doc-fixes.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/entry-cairo.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/entry-cairo.patch
index 3083b77830..3083b77830 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/entry-cairo.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/entry-cairo.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.24/hardcoded_libtool.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/hardcoded_libtool.patch
new file mode 100644
index 0000000000..1ae728e70d
--- /dev/null
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/hardcoded_libtool.patch
@@ -0,0 +1,35 @@
+Upstream-Status: Inappropriate [embedded specific]
+
+Updated to apply to gtk+-2.24.15
+
+Signed-off-by: Marko Lindqvist <cazfi74@gmail.com>
+diff -Nurd gtk+-2.24.15/configure.ac gtk+-2.24.15/configure.ac
+--- gtk+-2.24.15/configure.ac 2013-01-12 20:52:54.000000000 +0200
++++ gtk+-2.24.15/configure.ac 2013-02-12 21:33:30.689925967 +0200
+@@ -415,7 +415,7 @@
+ case $enable_explicit_deps in
+ auto)
+ export SED
+- deplibs_check_method=`(./libtool --config; echo 'eval echo \"$deplibs_check_method\"') | sh`
++ deplibs_check_method=`(./$host_alias-libtool --config; echo 'eval echo \"$deplibs_check_method\"') | sh`
+ if test "x$deplibs_check_method" '!=' xpass_all || test "x$enable_static" = xyes ; then
+ enable_explicit_deps=yes
+ else
+@@ -774,7 +774,7 @@
+ dnl Now we check to see if our libtool supports shared lib deps
+ dnl (in a rather ugly way even)
+ if $dynworks; then
+- module_libtool_config="${CONFIG_SHELL-/bin/sh} ./libtool --config"
++ module_libtool_config="${CONFIG_SHELL-/bin/sh} $host_alias-libtool --config"
+ module_deplibs_check=`$module_libtool_config | \
+ grep '^[[a-z_]]*check[[a-z_]]*_method=[['\''"]]' | \
+ sed 's/.*[['\''"]]\(.*\)[['\''"]]$/\1/'`
+@@ -1574,7 +1574,7 @@
+ # We are using gmodule-no-export now, but I'm leaving the stripping
+ # code in place for now, since pango and atk still require gmodule.
+ export SED
+-export_dynamic=`(./libtool --config; echo eval echo \\$export_dynamic_flag_spec) | sh`
++export_dynamic=`($host_alias-libtool --config; echo eval echo \\$export_dynamic_flag_spec) | sh`
+ if test -n "$export_dynamic"; then
+ GDK_DEP_LIBS=`echo $GDK_DEP_LIBS | sed -e "s/$export_dynamic//"`
+ GTK_DEP_LIBS=`echo $GTK_DEP_LIBS | sed -e "s/$export_dynamic//"`
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/run-iconcache.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/run-iconcache.patch
index 597ba32470..a4e2254c59 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/run-iconcache.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/run-iconcache.patch
@@ -1,21 +1,22 @@
Upstream-Status: Inappropriate [configuration]
-Index: gtk+-2.21.2/gtk/Makefile.am
-===================================================================
---- gtk+-2.21.2.orig/gtk/Makefile.am 2010-06-22 17:21:41.000000000 +0800
-+++ gtk+-2.21.2/gtk/Makefile.am 2010-06-22 17:28:12.000000000 +0800
-@@ -1376,11 +1376,11 @@
+diff -Nurd gtk+-2.24.24/gtk/Makefile.am gtk+-2.24.24/gtk/Makefile.am
+--- gtk+-2.24.24/gtk/Makefile.am 2014-06-23 18:08:14.000000000 +0300
++++ gtk+-2.24.24/gtk/Makefile.am 2014-09-03 23:45:12.669307700 +0300
+@@ -1391,12 +1391,12 @@
./gtk-update-icon-cache
endif
-gtkbuiltincache.h: @REBUILD@ stamp-icons
- $(MAKE) $(AM_MAKEFLAGS) gtk-update-icon-cache$(EXEEXT) $(GTK_UPDATE_ICON_CACHE_MANIFEST)
- $(gtk_update_icon_cache_program) --force --ignore-theme-index \
+- --include-image-data \
- --source builtin_icons stock-icons > gtkbuiltincache.h.tmp && \
- mv gtkbuiltincache.h.tmp gtkbuiltincache.h
+#gtkbuiltincache.h: @REBUILD@ stamp-icons
+# $(MAKE) $(AM_MAKEFLAGS) gtk-update-icon-cache$(EXEEXT) $(GTK_UPDATE_ICON_CACHE_MANIFEST)
+# $(gtk_update_icon_cache_program) --force --ignore-theme-index \
++# --include-image-data \
+# --source builtin_icons stock-icons > gtkbuiltincache.h.tmp && \
+# mv gtkbuiltincache.h.tmp gtkbuiltincache.h
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/toggle-font.diff b/meta/recipes-gnome/gtk+/gtk+-2.24.24/toggle-font.diff
index 340d12008b..340d12008b 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/toggle-font.diff
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/toggle-font.diff
diff --git a/meta/recipes-gnome/gtk+/gtk+-2.24.22/xsettings.patch b/meta/recipes-gnome/gtk+/gtk+-2.24.24/xsettings.patch
index d0a970ad4d..d0a970ad4d 100644
--- a/meta/recipes-gnome/gtk+/gtk+-2.24.22/xsettings.patch
+++ b/meta/recipes-gnome/gtk+/gtk+-2.24.24/xsettings.patch
diff --git a/meta/recipes-gnome/gtk+/gtk+3.inc b/meta/recipes-gnome/gtk+/gtk+3.inc
index 9e96275554..3e36676ac4 100644
--- a/meta/recipes-gnome/gtk+/gtk+3.inc
+++ b/meta/recipes-gnome/gtk+/gtk+3.inc
@@ -49,7 +49,11 @@ LIBV = "3.0.0"
FILES_${PN}-demo = "${bindir}/gtk3-demo \
${bindir}/gtk3-demo-application \
${bindir}/gtk3-widget-factory \
- ${datadir}/gtk-3.0/demo"
+ ${datadir}/gtk-3.0/demo \
+ ${datadir}/applications/gtk3-demo.desktop \
+ ${datadir}/applications/gtk3-widget-factory.desktop \
+ ${datadir}/icons/hicolor/*/apps/gtk3-demo.png \
+ ${datadir}/icons/hicolor/*/apps/gtk3-widget-factory.png"
# The demo uses PNG files and mime type sniffing, so ensure that these
# dependencies are present.
@@ -101,6 +105,6 @@ python populate_packages_prepend () {
do_split_packages(d, printmodules_root, '^libprintbackend-(.*)\.so$', 'gtk3-printbackend-%s', 'GTK printbackend module for %s')
if (d.getVar('DEBIAN_NAMES', 1)):
- d.setVar('PKG_${PN}', 'libgtk-3.0')
+ d.setVar('PKG_${PN}', '${MLPREFIX}libgtk-3.0')
}
diff --git a/meta/recipes-gnome/gtk+/gtk+3/fix-build-when-wayland-backend-enabled.patch b/meta/recipes-gnome/gtk+/gtk+3/fix-build-when-wayland-backend-enabled.patch
deleted file mode 100644
index b4b1a09455..0000000000
--- a/meta/recipes-gnome/gtk+/gtk+3/fix-build-when-wayland-backend-enabled.patch
+++ /dev/null
@@ -1,43 +0,0 @@
-Fix build when wayland backend enabled
-
-Upstream-Status: Submitted
-https://bugzilla.gnome.org/show_bug.cgi?id=710584
-
-Author: Emilio Pozuelo Monfort
-
---- gtk+-3.10.7/gtk/gtkapplication.c
-+++ gtk+-3.10.7/gtk/gtkapplication.c
-@@ -144,7 +144,6 @@
-
- gboolean register_session;
-
--#ifdef GDK_WINDOWING_X11
- guint next_id;
-
- GDBusConnection *session_bus;
-@@ -161,7 +160,6 @@
- GDBusProxy *client_proxy;
- gchar *app_id;
- gchar *client_path;
--#endif
-
- #ifdef GDK_WINDOWING_QUARTZ
- GMenu *combined;
-@@ -299,6 +297,8 @@
- g_free (application->priv->client_path);
- }
-
-+#endif
-+
- const gchar *
- gtk_application_get_app_menu_object_path (GtkApplication *application)
- {
-@@ -311,8 +311,6 @@
- return application->priv->menubar_path;
- }
-
--#endif
--
- #ifdef GDK_WINDOWING_QUARTZ
-
- typedef struct {
diff --git a/meta/recipes-gnome/gtk+/gtk+3_3.10.7.bb b/meta/recipes-gnome/gtk+/gtk+3_3.12.2.bb
index d0174f6093..f4f197131b 100644
--- a/meta/recipes-gnome/gtk+/gtk+3_3.10.7.bb
+++ b/meta/recipes-gnome/gtk+/gtk+3_3.12.2.bb
@@ -4,12 +4,11 @@ MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/${MAJ_VER}/gtk+-${PV}.tar.xz \
file://hardcoded_libtool.patch \
- file://fix-build-when-wayland-backend-enabled.patch \
file://fix-flags-for-native.patch \
"
-SRC_URI[md5sum] = "18a81944a8506231529a76bf2b68372b"
-SRC_URI[sha256sum] = "b7e9de15385031cff43897e7e59f6692eaabf500f36eef80e6b9d6486ad49427"
+SRC_URI[md5sum] = "0d6d8f9f79132b3b47475d047b369b1c"
+SRC_URI[sha256sum] = "61d74eea74231b1ea4b53084a9d6fc9917ab0e1d71b69d92cbf60a4b4fb385d0"
S = "${WORKDIR}/gtk+-${PV}"
diff --git a/meta/recipes-gnome/gtk+/gtk+_2.24.22.bb b/meta/recipes-gnome/gtk+/gtk+_2.24.24.bb
index 98c65a56c6..7d0a0a2a8d 100644
--- a/meta/recipes-gnome/gtk+/gtk+_2.24.22.bb
+++ b/meta/recipes-gnome/gtk+/gtk+_2.24.24.bb
@@ -28,8 +28,8 @@ SRC_URI = "http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-${PV}.tar.xz \
# file://combo-arrow-size.patch;striplevel=0
# file://configurefix.patch
-SRC_URI[md5sum] = "5fbbfb7637bbd571a572a2dae0e736d2"
-SRC_URI[sha256sum] = "b114b6e9fb389bf3aa8a6d09576538f58dce740779653084046852fb4140ae7f"
+SRC_URI[md5sum] = "f80ac8aa95ea8482ad89656d0370752e"
+SRC_URI[sha256sum] = "12ceb2e198c82bfb93eb36348b6e9293c8fdcd60786763d04cfec7ebe7ed3d6d"
EXTRA_OECONF = "--enable-xkb --disable-glibtest --disable-cups --disable-xinerama"
diff --git a/meta/recipes-gnome/gtk-doc-stub/gtk-doc-stub_git.bb b/meta/recipes-gnome/gtk-doc-stub/gtk-doc-stub_git.bb
index 9ceeeff1fe..40f324352f 100644
--- a/meta/recipes-gnome/gtk-doc-stub/gtk-doc-stub_git.bb
+++ b/meta/recipes-gnome/gtk-doc-stub/gtk-doc-stub_git.bb
@@ -6,8 +6,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
PROVIDES = "gtk-doc gobject-introspection-stub"
-SRCREV = "3dfd0a09de696ec8c544762747f8a0f77153622e"
-PV = "0.0+git${SRCPV}"
+SRCREV = "1dea266593edb766d6d898c79451ef193eb17cfa"
+PV = "1.1+git${SRCPV}"
SRC_URI = "git://git.gnome.org/${BPN}"
diff --git a/meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.12.bb b/meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.13.bb
index 4441e04f58..af9f2e755a 100644
--- a/meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.12.bb
+++ b/meta/recipes-gnome/hicolor-icon-theme/hicolor-icon-theme_0.13.bb
@@ -13,11 +13,16 @@ PR = "r1"
SRC_URI = "http://icon-theme.freedesktop.org/releases/${BPN}-${PV}.tar.gz \
file://index.theme"
-SRC_URI[md5sum] = "55cafbcef8bcf7107f6d502149eb4d87"
-SRC_URI[sha256sum] = "9edca690617eaa19054951ca53501c802180262be8880ed84754ac46c93bec73"
+SRC_URI[md5sum] = "21d0f50aa6b8eef02846cda9e5e9324c"
+SRC_URI[sha256sum] = "a38b038915480d1ddd4e3c421562560a14d42ace0449a5acc07c50f57f9c3406"
FILES_${PN} += "${datadir}/icons"
+# Disable default since make clean doesn't work
+autotools_preconfigure () {
+ :
+}
+
do_install_append () {
install -m 0644 ${WORKDIR}/index.theme ${D}/${datadir}/icons/hicolor
}
diff --git a/meta/recipes-graphics/clutter/clutter-1.0.inc b/meta/recipes-graphics/clutter/clutter-1.0.inc
index 77035d746f..da21cee8b5 100644
--- a/meta/recipes-graphics/clutter/clutter-1.0.inc
+++ b/meta/recipes-graphics/clutter/clutter-1.0.inc
@@ -2,7 +2,7 @@ SUMMARY = "Graphics library for creating hardware-accelerated user interfaces"
HOMEPAGE = "http://www.clutter-project.org/"
LICENSE = "LGPLv2.1+"
-inherit clutter
+inherit clutter ptest-gnome
DEPENDS = "pango glib-2.0 json-glib atk udev cogl-1.0"
PACKAGE_BEFORE_PN += "${PN}-examples"
@@ -33,12 +33,15 @@ PACKAGECONFIG[wayland] = "--enable-wayland-backend,--disable-wayland-backend,${E
PACKAGECONFIG[wayland-compositor] = "--enable-wayland-compositor,--disable-wayland-compositor,wayland"
# Default configuration, distros might want to override
-PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
+PACKAGECONFIG ??= "egl \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'glx x11', '', d)}"
FILES_${PN}-dbg += "${libdir}/clutter/examples/.debug"
FILES_${PN}-examples = "${libdir}/clutter/examples"
+FILES_${PN}-dbg += "${libexecdir}/installed-tests/.debug"
+
do_configure_prepend() {
# see https://bugzilla.gnome.org/show_bug.cgi?id=661128 for this
touch -t 200001010000 ${S}/po/clutter-1.0.pot
diff --git a/meta/recipes-graphics/clutter/clutter-1.0/run-installed-tests-with-tap-output.patch b/meta/recipes-graphics/clutter/clutter-1.0/run-installed-tests-with-tap-output.patch
new file mode 100644
index 0000000000..80acab867c
--- /dev/null
+++ b/meta/recipes-graphics/clutter/clutter-1.0/run-installed-tests-with-tap-output.patch
@@ -0,0 +1,20 @@
+Configure output from installed-tests to be TAP compliant, such that gnome-desktop-testing-runner properly interprets the results.
+
+Upstream-status: Pending
+
+Signed-off-by: Tim Orling <TicoTimo@gmail.com>
+
+Index: clutter-1.20.0/build/autotools/glib-tap.mk
+===================================================================
+--- clutter-1.20.0.orig/build/autotools/glib-tap.mk
++++ clutter-1.20.0/build/autotools/glib-tap.mk
+@@ -128,7 +128,8 @@ installed_test_meta_DATA = $(installed_t
+ $(AM_V_GEN) (echo '[Test]' > $@.tmp; \
+ echo 'Type=session' >> $@.tmp; \
+ echo 'TestEnvironment=G_ENABLE_DIAGNOSTIC=0;CLUTTER_ENABLE_DIAGNOSTIC=0;' >> $@.tmp; \
+- echo 'Exec=$(installed_testdir)/$<' >> $@.tmp; \
++ echo 'Exec=$(installed_testdir)/$< --tap' >> $@.tmp; \
++ echo 'Output=TAP' >> $@.tmp; \
+ mv $@.tmp $@)
+
+ CLEANFILES += $(installed_test_meta_DATA)
diff --git a/meta/recipes-graphics/clutter/clutter-1.0/run-ptest b/meta/recipes-graphics/clutter/clutter-1.0/run-ptest
new file mode 100644
index 0000000000..98877e5226
--- /dev/null
+++ b/meta/recipes-graphics/clutter/clutter-1.0/run-ptest
@@ -0,0 +1,3 @@
+#! /bin/sh
+
+gnome-desktop-testing-runner clutter
diff --git a/meta/recipes-graphics/clutter/clutter-1.0_1.18.2.bb b/meta/recipes-graphics/clutter/clutter-1.0_1.18.2.bb
deleted file mode 100644
index 452e7eefd0..0000000000
--- a/meta/recipes-graphics/clutter/clutter-1.0_1.18.2.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require clutter-1.0.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
-
-SRC_URI[archive.md5sum] = "471f5ea423e20d4140c7771873daef29"
-SRC_URI[archive.sha256sum] = "f9fe12e6148426063c90e67dfaeb56013bf1aea224ef502223d13eab6c1add63"
-
-SRC_URI += "file://install-examples.patch"
diff --git a/meta/recipes-graphics/clutter/clutter-1.0_1.20.0.bb b/meta/recipes-graphics/clutter/clutter-1.0_1.20.0.bb
new file mode 100644
index 0000000000..5f39a3a8ac
--- /dev/null
+++ b/meta/recipes-graphics/clutter/clutter-1.0_1.20.0.bb
@@ -0,0 +1,10 @@
+require clutter-1.0.inc
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c"
+
+SRC_URI[archive.md5sum] = "a8a33a57a944c6d7c7c013ce9aa3222b"
+SRC_URI[archive.sha256sum] = "cc940809e6e1469ce349c4bddb0cbcc2c13c087d4fc15cda9278d855ee2d1293"
+
+SRC_URI += "file://install-examples.patch \
+ file://run-installed-tests-with-tap-output.patch \
+ file://run-ptest"
diff --git a/meta/recipes-graphics/drm/libdrm.inc b/meta/recipes-graphics/drm/libdrm.inc
index 5ec63071ef..cfac9e2a2b 100644
--- a/meta/recipes-graphics/drm/libdrm.inc
+++ b/meta/recipes-graphics/drm/libdrm.inc
@@ -23,6 +23,7 @@ EXTRA_OECONF += "--disable-cairo-tests \
--enable-omap-experimental-api \
--enable-freedreno-experimental-api \
--enable-install-test-programs \
+ --disable-manpages \
"
ALLOW_EMPTY_${PN}-drivers = "1"
PACKAGES =+ "${PN}-tests ${PN}-drivers ${PN}-radeon ${PN}-nouveau ${PN}-omap \
diff --git a/meta/recipes-graphics/drm/libdrm/GNU_SOURCE_definition.patch b/meta/recipes-graphics/drm/libdrm/GNU_SOURCE_definition.patch
deleted file mode 100644
index 8eb1d5e1ea..0000000000
--- a/meta/recipes-graphics/drm/libdrm/GNU_SOURCE_definition.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-uclibc headers need to know if _GNU_SOURCE is defined or not and its defined
-in config.h so include it first to get the definition if its there fixed build
-problems on uclibc
-
-test_decode.c:107:2: error: implicit declaration of function 'open_memstream' [-Werror=implicit-function-declaration]
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-Upstream-Status: Pending
-
-Index: libdrm-2.4.30/intel/test_decode.c
-===================================================================
---- libdrm-2.4.30.orig/intel/test_decode.c 2012-02-03 14:28:46.409355918 -0800
-+++ libdrm-2.4.30/intel/test_decode.c 2012-02-03 14:29:02.357356689 -0800
-@@ -21,6 +21,8 @@
- * IN THE SOFTWARE.
- */
-
-+#include "config.h"
-+
- #include <string.h>
- #include <stdlib.h>
- #include <stdio.h>
-@@ -31,7 +33,6 @@
- #include <sys/mman.h>
- #include <err.h>
-
--#include "config.h"
- #include "intel_bufmgr.h"
- #include "intel_chipset.h"
-
diff --git a/meta/recipes-graphics/drm/libdrm_2.4.54.bb b/meta/recipes-graphics/drm/libdrm_2.4.54.bb
deleted file mode 100644
index 12eefc7751..0000000000
--- a/meta/recipes-graphics/drm/libdrm_2.4.54.bb
+++ /dev/null
@@ -1,8 +0,0 @@
-require libdrm.inc
-
-SRC_URI += "file://installtests.patch \
- file://GNU_SOURCE_definition.patch \
- "
-SRC_URI[md5sum] = "56e98a9c2073c3fab7f95e003b657f46"
-SRC_URI[sha256sum] = "d94001ebfbe80e1523d1228ee2df57294698d1c734fad9ccf53efde8932fe4e9"
-
diff --git a/meta/recipes-graphics/drm/libdrm_2.4.58.bb b/meta/recipes-graphics/drm/libdrm_2.4.58.bb
new file mode 100644
index 0000000000..7b1207e27e
--- /dev/null
+++ b/meta/recipes-graphics/drm/libdrm_2.4.58.bb
@@ -0,0 +1,6 @@
+require libdrm.inc
+
+SRC_URI += "file://installtests.patch "
+
+SRC_URI[md5sum] = "24213913333d72b36c16463ed92e522a"
+SRC_URI[sha256sum] = "b155fae6b9c9a3b02ef8b77f58c7c219194c996a4018dc55ba66c03996a365dd"
diff --git a/meta/recipes-graphics/glew/glew/fix-glew.pc-install.patch b/meta/recipes-graphics/glew/glew/fix-glew.pc-install.patch
index a20fbfce00..70a99aeeee 100644
--- a/meta/recipes-graphics/glew/glew/fix-glew.pc-install.patch
+++ b/meta/recipes-graphics/glew/glew/fix-glew.pc-install.patch
@@ -39,7 +39,8 @@ Index: glew-1.11.0/glew.pc.in
-Version: @version@
-Cflags: -I${includedir} @cflags@
-Libs: -L${libdir} -l@libname@
+-Requires: @requireslib@
+Version: @VERSION@
+Cflags: -I${includedir} @CFLAGS@
+Libs: -L${libdir} -lGLEW
- Requires: @requireslib@
++Requires: glu
diff --git a/meta/recipes-graphics/harfbuzz/harfbuzz_0.9.29.bb b/meta/recipes-graphics/harfbuzz/harfbuzz_0.9.35.bb
index 96a4932a51..2329b2ffc8 100644
--- a/meta/recipes-graphics/harfbuzz/harfbuzz_0.9.29.bb
+++ b/meta/recipes-graphics/harfbuzz/harfbuzz_0.9.35.bb
@@ -11,8 +11,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=e021dd6dda6ff1e6b1044002fc662b9b \
SECTION = "libs"
SRC_URI = "http://www.freedesktop.org/software/harfbuzz/release/${BP}.tar.bz2"
-SRC_URI[md5sum] = "6251eb6d57e8c4e9e855188dbbdf8b6d"
-SRC_URI[sha256sum] = "601cea38c6fa0cf362dd9042c66cf4db711c5f9390de4ca46d6d2fc3f64de390"
+SRC_URI[md5sum] = "531ee8650626ecddcd90b2a4637e31d4"
+SRC_URI[sha256sum] = "0aa1a8aba6f502321cf6fef3c9d2c73dde48389c5ed1d3615a7691944c2a06ed"
inherit autotools pkgconfig lib_package
DEPENDS = "glib-2.0 cairo freetype"
diff --git a/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch b/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch
index e02f1ab4ec..4b07193a7f 100644
--- a/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch
+++ b/meta/recipes-graphics/mesa/mesa-demos/0003-configure-Allow-to-disable-demos-which-require-GLEW-.patch
@@ -95,11 +95,16 @@ diff --git a/src/Makefile.am b/src/Makefile.am
index 1647d64..754c47c 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
-@@ -23,14 +23,13 @@
+@@ -23,14 +23,18 @@
# Eric Anholt <eric@anholt.net>
++if HAVE_GLEW
++UTIL = util
++endif
++
SUBDIRS = \
- util \
++ $(UTIL) \
data \
demos \
egl \
@@ -111,7 +116,7 @@ index 1647d64..754c47c 100644
objviewer \
osdemos \
perf \
-@@ -40,8 +39,13 @@ SUBDIRS = \
+@@ -40,8 +39,12 @@ SUBDIRS = \
slang \
tests \
tools \
@@ -125,7 +130,6 @@ index 1647d64..754c47c 100644
+SUBDIRS += \
+ vp \
+ vpglsl \
-+ util \
+ trivial
+endif
diff --git a/src/demos/Makefile.am b/src/demos/Makefile.am
diff --git a/meta/recipes-graphics/mesa/mesa-gl_10.1.3.bb b/meta/recipes-graphics/mesa/mesa-gl_10.3.4.bb
index 2acc21dce5..2acc21dce5 100644
--- a/meta/recipes-graphics/mesa/mesa-gl_10.1.3.bb
+++ b/meta/recipes-graphics/mesa/mesa-gl_10.3.4.bb
diff --git a/meta/recipes-graphics/mesa/mesa.inc b/meta/recipes-graphics/mesa/mesa.inc
index 3298e00ebe..cb1837ac51 100644
--- a/meta/recipes-graphics/mesa/mesa.inc
+++ b/meta/recipes-graphics/mesa/mesa.inc
@@ -65,6 +65,7 @@ MESA_LLVM_RELEASE ?= "3.3"
PACKAGECONFIG[gallium-llvm] = "--enable-gallium-llvm --with-llvm-shared-libs, --disable-gallium-llvm, llvm${MESA_LLVM_RELEASE} \
${@'elfutils' if ${GALLIUMDRIVERS_LLVM33_ENABLED} else ''}"
export WANT_LLVM_RELEASE = "${MESA_LLVM_RELEASE}"
+PACKAGECONFIG[xa] = "--enable-xa, --disable-xa"
# llvmpipe is slow if compiled with -fomit-frame-pointer (e.g. -O2)
FULL_OPTIMIZATION_append = " -fno-omit-frame-pointer"
@@ -87,6 +88,7 @@ PACKAGES =+ "libegl-mesa libegl-mesa-dev \
libegl-gallium libgbm-gallium \
libopenvg libopenvg-dev \
libxvmcsoftpipe libxvmcsoftpipe-dev \
+ libxatracker libxatracker-dev \
mesa-megadriver \
"
@@ -143,7 +145,7 @@ python mesa_populate_packages() {
for p in dri_pkgs:
m = re.match('^(.*)_dri\.so$', p)
if m:
- pkg_name = " mesa-driver-%s" % legitimize_package_name(m.group(1))
+ pkg_name = " ${MLPREFIX}mesa-driver-%s" % legitimize_package_name(m.group(1))
d.appendVar("RPROVIDES_%s" % lib_name, pkg_name)
d.appendVar("RCONFLICTS_%s" % lib_name, pkg_name)
d.appendVar("RREPLACES_%s" % lib_name, pkg_name)
@@ -171,6 +173,7 @@ FILES_libegl-gallium = "${libdir}/egl/egl_gallium.so*"
FILES_libgbm-gallium = "${libdir}/gbm/gbm_gallium_drm.so*"
FILES_libopenvg = "${libdir}/libOpenVG.so.*"
FILES_libxvmcsoftpipe = "${libdir}/libXvMCsoftpipe.so.*"
+FILES_libxatracker = "${libdir}/libxatracker.so.*"
FILES_${PN}-dev = "${libdir}/pkgconfig/dri.pc"
FILES_libegl-mesa-dev = "${libdir}/libEGL.* ${includedir}/EGL ${includedir}/KHR ${libdir}/pkgconfig/egl.pc"
@@ -185,5 +188,8 @@ FILES_libwayland-egl-dev = "${libdir}/pkgconfig/wayland-egl.pc ${libdir}/libwayl
FILES_libopenvg-dev = "${libdir}/libOpenVG.so ${libdir}/libOpenVG.la* \
${includedir}/VG ${libdir}/pkgconfig/vg.pc"
FILES_libxvmcsoftpipe-dev = "${libdir}/libXvMCsoftpipe.so ${libdir}/libXvMCsoftpipe.la"
+FILES_libxatracker-dev = "${libdir}/libxatracker.so ${libdir}/libxatracker.la \
+ ${includedir}/xa_tracker.h ${includedir}/xa_composite.h ${includedir}/xa_context.h \
+ ${libdir}/pkgconfig/xatracker.pc"
FILES_${PN}-dbg += "${libdir}/dri/.debug/* ${libdir}/egl/.debug/* ${libdir}/gbm/.debug/* ${libdir}/gallium-pipe/.debug"
diff --git a/meta/recipes-graphics/mesa/mesa/0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch b/meta/recipes-graphics/mesa/mesa/0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch
deleted file mode 100644
index 2df5e61d21..0000000000
--- a/meta/recipes-graphics/mesa/mesa/0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-From debac5531f7107d239530ff6e29eeda72b9ec1e9 Mon Sep 17 00:00:00 2001
-From: Martin Jansa <Martin.Jansa@gmail.com>
-Date: Sat, 29 Jun 2013 11:20:51 +0200
-Subject: [PATCH 2/4] pipe_loader_sw: include xlib_sw_winsys.h only when
- HAVE_PIPE_LOADER_XLIB
-
-* HAVE_WINSYS_XLIB was removed in
- commit b3f1f665b0fef178ae193e6b111f14c9a5ad3b25
- Author: Matt Turner <mattst88@gmail.com>
- Date: Sun Jan 20 15:32:08 2013 -0800
- build: Get rid of GALLIUM_WINSYS_DIRS
-
-* HAVE_PIPE_LOADER_XLIB is set correctly:
- if test "x$NEED_WINSYS_XLIB" = xyes; then
- GALLIUM_PIPE_LOADER_DEFINES="$GALLIUM_PIPE_LOADER_DEFINES -DHAVE_PIPE_LOADER_XLIB"
- GALLIUM_PIPE_LOADER_LIBS="$GALLIUM_PIPE_LOADER_LIBS \$(top_builddir)/src/gallium/winsys/sw/xlib/libws_xlib.la"
- fi
-
-* fixes build of pipe_loader_sw without libx11 headers available
-
-Upstream-Status: Submitted https://bugs.freedesktop.org/show_bug.cgi?id=66357
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
----
- src/gallium/auxiliary/pipe-loader/pipe_loader_sw.c | 4 +++-
- 1 file changed, 3 insertions(+), 1 deletion(-)
-
-diff --git a/src/gallium/auxiliary/pipe-loader/pipe_loader_sw.c b/src/gallium/auxiliary/pipe-loader/pipe_loader_sw.c
-index c2b78c6..0da3f4d 100644
---- a/src/gallium/auxiliary/pipe-loader/pipe_loader_sw.c
-+++ b/src/gallium/auxiliary/pipe-loader/pipe_loader_sw.c
-@@ -31,7 +31,9 @@
- #include "util/u_dl.h"
- #include "sw/null/null_sw_winsys.h"
- #include "target-helpers/inline_sw_helper.h"
-+#ifdef HAVE_PIPE_LOADER_XLIB
- #include "state_tracker/xlib_sw_winsys.h"
-+#endif
-
- struct pipe_loader_sw_device {
- struct pipe_loader_device base;
-@@ -44,7 +46,7 @@ struct pipe_loader_sw_device {
- static struct pipe_loader_ops pipe_loader_sw_ops;
-
- static struct sw_winsys *(*backends[])() = {
--#ifdef HAVE_WINSYS_XLIB
-+#ifdef HAVE_PIPE_LOADER_XLIB
- x11_sw_create,
- #endif
- null_sw_create
---
-1.8.2.1
-
diff --git a/meta/recipes-graphics/mesa/mesa/0003-EGL-Mutate-NativeDisplayType-depending-on-config.patch b/meta/recipes-graphics/mesa/mesa/0003-EGL-Mutate-NativeDisplayType-depending-on-config.patch
deleted file mode 100644
index 30a3d98758..0000000000
--- a/meta/recipes-graphics/mesa/mesa/0003-EGL-Mutate-NativeDisplayType-depending-on-config.patch
+++ /dev/null
@@ -1,362 +0,0 @@
-From 06c1ba29de8a26fffb73ee99f0fc54c704e9fee4 Mon Sep 17 00:00:00 2001
-From: Daniel Stone <daniel@fooishbar.org>
-Date: Fri, 24 May 2013 17:20:27 +0100
-Subject: [PATCH 3/5] EGL: Mutate NativeDisplayType depending on config
-
-If we go through ./configure without enabling X11 anywhere, then set the
-fallback types for EGL NativeDisplay and friends, rather than assuming
-X11/Xlib.
-
-Upstream-Status: Backport (slightly different solution was applied in master
-https://bugs.freedesktop.org/show_bug.cgi?id=64959)
-
-Signed-off-by: Daniel Stone <daniel@fooishbar.org>
-Signed-off-by: Martin Jansa <Martin.Jansa@gmail.com>
----
- configure.ac | 9 +++
- include/EGL/eglplatform.h | 146 -------------------------------------------
- include/EGL/eglplatform.h.in | 146 +++++++++++++++++++++++++++++++++++++++++++
- 3 files changed, 155 insertions(+), 146 deletions(-)
- delete mode 100644 include/EGL/eglplatform.h
- create mode 100644 include/EGL/eglplatform.h.in
-
-diff --git a/configure.ac b/configure.ac
-index 2b4a374..d4c7a95 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -1565,12 +1565,20 @@ fi
-
- EGL_PLATFORMS="$egl_platforms"
-
-+if echo "$egl_platforms" | grep 'x11' >/dev/null 2>&1; then
-+ MESA_EGL_NO_X11_HEADERS=0
-+else
-+ MESA_EGL_NO_X11_HEADERS=1
-+fi
-+
- AM_CONDITIONAL(HAVE_EGL_PLATFORM_X11, echo "$egl_platforms" | grep 'x11' >/dev/null 2>&1)
- AM_CONDITIONAL(HAVE_EGL_PLATFORM_WAYLAND, echo "$egl_platforms" | grep 'wayland' >/dev/null 2>&1)
- AM_CONDITIONAL(HAVE_EGL_PLATFORM_DRM, echo "$egl_platforms" | grep 'drm' >/dev/null 2>&1)
- AM_CONDITIONAL(HAVE_EGL_PLATFORM_FBDEV, echo "$egl_platforms" | grep 'fbdev' >/dev/null 2>&1)
- AM_CONDITIONAL(HAVE_EGL_PLATFORM_NULL, echo "$egl_platforms" | grep 'null' >/dev/null 2>&1)
-
-+AC_SUBST([MESA_EGL_NO_X11_HEADERS])
-+
- AM_CONDITIONAL(HAVE_EGL_DRIVER_DRI2, test "x$HAVE_EGL_DRIVER_DRI2" != "x")
- AM_CONDITIONAL(HAVE_EGL_DRIVER_GLX, test "x$HAVE_EGL_DRIVER_GLX" != "x")
-
-@@ -2042,6 +2050,7 @@ CXXFLAGS="$CXXFLAGS $USER_CXXFLAGS"
-
- dnl Substitute the config
- AC_CONFIG_FILES([Makefile
-+ include/EGL/eglplatform.h
- src/Makefile
- src/egl/Makefile
- src/egl/drivers/Makefile
-diff --git a/include/EGL/eglplatform.h b/include/EGL/eglplatform.h
-deleted file mode 100644
-index 17fdc61..0000000
---- a/include/EGL/eglplatform.h
-+++ /dev/null
-@@ -1,146 +0,0 @@
--#ifndef __eglplatform_h_
--#define __eglplatform_h_
--
--/*
--** Copyright (c) 2007-2009 The Khronos Group Inc.
--**
--** Permission is hereby granted, free of charge, to any person obtaining a
--** copy of this software and/or associated documentation files (the
--** "Materials"), to deal in the Materials without restriction, including
--** without limitation the rights to use, copy, modify, merge, publish,
--** distribute, sublicense, and/or sell copies of the Materials, and to
--** permit persons to whom the Materials are furnished to do so, subject to
--** the following conditions:
--**
--** The above copyright notice and this permission notice shall be included
--** in all copies or substantial portions of the Materials.
--**
--** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
--** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
--** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
--** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
--** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
--** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
--** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
--*/
--
--/* Platform-specific types and definitions for egl.h
-- * $Revision: 12306 $ on $Date: 2010-08-25 09:51:28 -0700 (Wed, 25 Aug 2010) $
-- *
-- * Adopters may modify khrplatform.h and this file to suit their platform.
-- * You are encouraged to submit all modifications to the Khronos group so that
-- * they can be included in future versions of this file. Please submit changes
-- * by sending them to the public Khronos Bugzilla (http://khronos.org/bugzilla)
-- * by filing a bug against product "EGL" component "Registry".
-- */
--
--#include <KHR/khrplatform.h>
--
--/* Macros used in EGL function prototype declarations.
-- *
-- * EGL functions should be prototyped as:
-- *
-- * EGLAPI return-type EGLAPIENTRY eglFunction(arguments);
-- * typedef return-type (EXPAPIENTRYP PFNEGLFUNCTIONPROC) (arguments);
-- *
-- * KHRONOS_APICALL and KHRONOS_APIENTRY are defined in KHR/khrplatform.h
-- */
--
--#ifndef EGLAPI
--#define EGLAPI KHRONOS_APICALL
--#endif
--
--#ifndef EGLAPIENTRY
--#define EGLAPIENTRY KHRONOS_APIENTRY
--#endif
--#define EGLAPIENTRYP EGLAPIENTRY*
--
--/* The types NativeDisplayType, NativeWindowType, and NativePixmapType
-- * are aliases of window-system-dependent types, such as X Display * or
-- * Windows Device Context. They must be defined in platform-specific
-- * code below. The EGL-prefixed versions of Native*Type are the same
-- * types, renamed in EGL 1.3 so all types in the API start with "EGL".
-- *
-- * Khronos STRONGLY RECOMMENDS that you use the default definitions
-- * provided below, since these changes affect both binary and source
-- * portability of applications using EGL running on different EGL
-- * implementations.
-- */
--
--#if defined(_WIN32) || defined(__VC32__) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) /* Win32 and WinCE */
--#ifndef WIN32_LEAN_AND_MEAN
--#define WIN32_LEAN_AND_MEAN 1
--#endif
--#include <windows.h>
--
--typedef HDC EGLNativeDisplayType;
--typedef HBITMAP EGLNativePixmapType;
--typedef HWND EGLNativeWindowType;
--
--#elif defined(__WINSCW__) || defined(__SYMBIAN32__) /* Symbian */
--
--typedef int EGLNativeDisplayType;
--typedef void *EGLNativeWindowType;
--typedef void *EGLNativePixmapType;
--
--#elif defined(WL_EGL_PLATFORM)
--
--typedef struct wl_display *EGLNativeDisplayType;
--typedef struct wl_egl_pixmap *EGLNativePixmapType;
--typedef struct wl_egl_window *EGLNativeWindowType;
--
--#elif defined(__GBM__)
--
--typedef struct gbm_device *EGLNativeDisplayType;
--typedef struct gbm_bo *EGLNativePixmapType;
--typedef void *EGLNativeWindowType;
--
--#elif defined(ANDROID) /* Android */
--
--struct ANativeWindow;
--struct egl_native_pixmap_t;
--
--typedef struct ANativeWindow *EGLNativeWindowType;
--typedef struct egl_native_pixmap_t *EGLNativePixmapType;
--typedef void *EGLNativeDisplayType;
--
--#elif defined(__unix__)
--
--#ifdef MESA_EGL_NO_X11_HEADERS
--
--typedef void *EGLNativeDisplayType;
--typedef khronos_uint32_t EGLNativePixmapType;
--typedef khronos_uint32_t EGLNativeWindowType;
--
--#else
--
--/* X11 (tentative) */
--#include <X11/Xlib.h>
--#include <X11/Xutil.h>
--
--typedef Display *EGLNativeDisplayType;
--typedef Pixmap EGLNativePixmapType;
--typedef Window EGLNativeWindowType;
--
--#endif /* MESA_EGL_NO_X11_HEADERS */
--
--#else
--#error "Platform not recognized"
--#endif
--
--/* EGL 1.2 types, renamed for consistency in EGL 1.3 */
--typedef EGLNativeDisplayType NativeDisplayType;
--typedef EGLNativePixmapType NativePixmapType;
--typedef EGLNativeWindowType NativeWindowType;
--
--
--/* Define EGLint. This must be a signed integral type large enough to contain
-- * all legal attribute names and values passed into and out of EGL, whether
-- * their type is boolean, bitmask, enumerant (symbolic constant), integer,
-- * handle, or other. While in general a 32-bit integer will suffice, if
-- * handles are 64 bit types, then EGLint should be defined as a signed 64-bit
-- * integer type.
-- */
--typedef khronos_int32_t EGLint;
--
--#endif /* __eglplatform_h */
-diff --git a/include/EGL/eglplatform.h.in b/include/EGL/eglplatform.h.in
-new file mode 100644
-index 0000000..5126c92
---- /dev/null
-+++ b/include/EGL/eglplatform.h.in
-@@ -0,0 +1,146 @@
-+#ifndef __eglplatform_h_
-+#define __eglplatform_h_
-+
-+/*
-+** Copyright (c) 2007-2009 The Khronos Group Inc.
-+**
-+** Permission is hereby granted, free of charge, to any person obtaining a
-+** copy of this software and/or associated documentation files (the
-+** "Materials"), to deal in the Materials without restriction, including
-+** without limitation the rights to use, copy, modify, merge, publish,
-+** distribute, sublicense, and/or sell copies of the Materials, and to
-+** permit persons to whom the Materials are furnished to do so, subject to
-+** the following conditions:
-+**
-+** The above copyright notice and this permission notice shall be included
-+** in all copies or substantial portions of the Materials.
-+**
-+** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-+** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-+** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-+** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-+** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-+** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-+** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
-+*/
-+
-+/* Platform-specific types and definitions for egl.h
-+ * $Revision: 12306 $ on $Date: 2010-08-25 09:51:28 -0700 (Wed, 25 Aug 2010) $
-+ *
-+ * Adopters may modify khrplatform.h and this file to suit their platform.
-+ * You are encouraged to submit all modifications to the Khronos group so that
-+ * they can be included in future versions of this file. Please submit changes
-+ * by sending them to the public Khronos Bugzilla (http://khronos.org/bugzilla)
-+ * by filing a bug against product "EGL" component "Registry".
-+ */
-+
-+#include <KHR/khrplatform.h>
-+
-+/* Macros used in EGL function prototype declarations.
-+ *
-+ * EGL functions should be prototyped as:
-+ *
-+ * EGLAPI return-type EGLAPIENTRY eglFunction(arguments);
-+ * typedef return-type (EXPAPIENTRYP PFNEGLFUNCTIONPROC) (arguments);
-+ *
-+ * KHRONOS_APICALL and KHRONOS_APIENTRY are defined in KHR/khrplatform.h
-+ */
-+
-+#ifndef EGLAPI
-+#define EGLAPI KHRONOS_APICALL
-+#endif
-+
-+#ifndef EGLAPIENTRY
-+#define EGLAPIENTRY KHRONOS_APIENTRY
-+#endif
-+#define EGLAPIENTRYP EGLAPIENTRY*
-+
-+/* The types NativeDisplayType, NativeWindowType, and NativePixmapType
-+ * are aliases of window-system-dependent types, such as X Display * or
-+ * Windows Device Context. They must be defined in platform-specific
-+ * code below. The EGL-prefixed versions of Native*Type are the same
-+ * types, renamed in EGL 1.3 so all types in the API start with "EGL".
-+ *
-+ * Khronos STRONGLY RECOMMENDS that you use the default definitions
-+ * provided below, since these changes affect both binary and source
-+ * portability of applications using EGL running on different EGL
-+ * implementations.
-+ */
-+
-+#if defined(_WIN32) || defined(__VC32__) && !defined(__CYGWIN__) && !defined(__SCITECH_SNAP__) /* Win32 and WinCE */
-+#ifndef WIN32_LEAN_AND_MEAN
-+#define WIN32_LEAN_AND_MEAN 1
-+#endif
-+#include <windows.h>
-+
-+typedef HDC EGLNativeDisplayType;
-+typedef HBITMAP EGLNativePixmapType;
-+typedef HWND EGLNativeWindowType;
-+
-+#elif defined(__WINSCW__) || defined(__SYMBIAN32__) /* Symbian */
-+
-+typedef int EGLNativeDisplayType;
-+typedef void *EGLNativeWindowType;
-+typedef void *EGLNativePixmapType;
-+
-+#elif defined(WL_EGL_PLATFORM)
-+
-+typedef struct wl_display *EGLNativeDisplayType;
-+typedef struct wl_egl_pixmap *EGLNativePixmapType;
-+typedef struct wl_egl_window *EGLNativeWindowType;
-+
-+#elif defined(__GBM__)
-+
-+typedef struct gbm_device *EGLNativeDisplayType;
-+typedef struct gbm_bo *EGLNativePixmapType;
-+typedef void *EGLNativeWindowType;
-+
-+#elif defined(ANDROID) /* Android */
-+
-+struct ANativeWindow;
-+struct egl_native_pixmap_t;
-+
-+typedef struct ANativeWindow *EGLNativeWindowType;
-+typedef struct egl_native_pixmap_t *EGLNativePixmapType;
-+typedef void *EGLNativeDisplayType;
-+
-+#elif defined(__unix__)
-+
-+#if @MESA_EGL_NO_X11_HEADERS@
-+
-+typedef void *EGLNativeDisplayType;
-+typedef khronos_uint32_t EGLNativePixmapType;
-+typedef khronos_uint32_t EGLNativeWindowType;
-+
-+#else
-+
-+/* X11 (tentative) */
-+#include <X11/Xlib.h>
-+#include <X11/Xutil.h>
-+
-+typedef Display *EGLNativeDisplayType;
-+typedef Pixmap EGLNativePixmapType;
-+typedef Window EGLNativeWindowType;
-+
-+#endif /* MESA_EGL_NO_X11_HEADERS */
-+
-+#else
-+#error "Platform not recognized"
-+#endif
-+
-+/* EGL 1.2 types, renamed for consistency in EGL 1.3 */
-+typedef EGLNativeDisplayType NativeDisplayType;
-+typedef EGLNativePixmapType NativePixmapType;
-+typedef EGLNativeWindowType NativeWindowType;
-+
-+
-+/* Define EGLint. This must be a signed integral type large enough to contain
-+ * all legal attribute names and values passed into and out of EGL, whether
-+ * their type is boolean, bitmask, enumerant (symbolic constant), integer,
-+ * handle, or other. While in general a 32-bit integer will suffice, if
-+ * handles are 64 bit types, then EGLint should be defined as a signed 64-bit
-+ * integer type.
-+ */
-+typedef khronos_int32_t EGLint;
-+
-+#endif /* __eglplatform_h */
---
-1.8.2.1
-
diff --git a/meta/recipes-graphics/mesa/mesa/0006-fix-out-of-tree-egl.patch b/meta/recipes-graphics/mesa/mesa/0006-fix-out-of-tree-egl.patch
deleted file mode 100644
index 88a4fb0636..0000000000
--- a/meta/recipes-graphics/mesa/mesa/0006-fix-out-of-tree-egl.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-Fix out of tree compilation failure due to
-0003-EGL-Mutate-NativeDisplayType-depending-on-config.patch.
-
-Upstream-Status: Inappropriate (upstream has different solution to root problem)
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-diff --git a/src/egl/drivers/dri2/Makefile.am b/src/egl/drivers/dri2/Makefile.am
-index 45f7dfa..ed0e777 100644
---- a/src/egl/drivers/dri2/Makefile.am
-+++ b/src/egl/drivers/dri2/Makefile.am
-@@ -21,6 +21,7 @@
-
- AM_CFLAGS = \
- -I$(top_srcdir)/include \
-+ -I$(top_builddir)/include \
- -I$(top_srcdir)/src/egl/main \
- -I$(top_srcdir)/src/gbm/main \
- -I$(top_srcdir)/src/gbm/backends/dri \
-diff --git a/src/egl/drivers/glx/Makefile.am b/src/egl/drivers/glx/Makefile.am
-index 6bf67ea..7b87047 100644
---- a/src/egl/drivers/glx/Makefile.am
-+++ b/src/egl/drivers/glx/Makefile.am
-@@ -21,6 +21,7 @@
-
- AM_CFLAGS = \
- -I$(top_srcdir)/include \
-+ -I$(top_builddir)/include \
- -I$(top_srcdir)/src/egl/main \
- $(X11_CFLAGS) \
- $(DEFINES)
-diff --git a/src/egl/main/Makefile.am b/src/egl/main/Makefile.am
-index ca5257a..13a5734 100644
---- a/src/egl/main/Makefile.am
-+++ b/src/egl/main/Makefile.am
-@@ -27,6 +27,7 @@ endif
-
- AM_CFLAGS = \
- -I$(top_srcdir)/include \
-+ -I$(top_builddir)/include \
- -I$(top_srcdir)/src/gbm/main \
- $(DEFINES) \
- $(EGL_CFLAGS) \
-@@ -135,4 +136,4 @@ egl_HEADERS = \
- $(top_srcdir)/include/EGL/eglext.h \
- $(top_srcdir)/include/EGL/egl.h \
- $(top_srcdir)/include/EGL/eglmesaext.h \
-- $(top_srcdir)/include/EGL/eglplatform.h
-+ $(top_builddir)/include/EGL/eglplatform.h
diff --git a/meta/recipes-graphics/mesa/mesa_10.1.3.bb b/meta/recipes-graphics/mesa/mesa_10.3.4.bb
index abc450a9c5..50092c9a5c 100644
--- a/meta/recipes-graphics/mesa/mesa_10.1.3.bb
+++ b/meta/recipes-graphics/mesa/mesa_10.3.4.bb
@@ -1,12 +1,9 @@
require ${BPN}.inc
-SRC_URI = "ftp://ftp.freedesktop.org/pub/mesa/${PV}/MesaLib-${PV}.tar.bz2 \
- file://0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch \
- file://0006-fix-out-of-tree-egl.patch \
- "
+SRC_URI = "ftp://ftp.freedesktop.org/pub/mesa/${PV}/MesaLib-${PV}.tar.bz2"
-SRC_URI[md5sum] = "ba6dbe2b9cab0b4de840c996b9b6a3ad"
-SRC_URI[sha256sum] = "b2615e236ef25d0fb94b8420bdd2e2a520b7dd5ca2d4b93306154f7fd4adecc3"
+SRC_URI[md5sum] = "fa0558a3d02c2bb8c208c030ccdc992e"
+SRC_URI[sha256sum] = "e6373913142338d10515daf619d659433bfd2989988198930c13b0945a15e98a"
S = "${WORKDIR}/Mesa-${PV}"
diff --git a/meta/recipes-graphics/mesa/mesa_git.bb b/meta/recipes-graphics/mesa/mesa_git.bb
index 59b0d1cdeb..b046ae2e65 100644
--- a/meta/recipes-graphics/mesa/mesa_git.bb
+++ b/meta/recipes-graphics/mesa/mesa_git.bb
@@ -2,16 +2,12 @@ require ${BPN}.inc
DEFAULT_PREFERENCE = "-1"
-LIC_FILES_CHKSUM = "file://docs/license.html;md5=f69a4626e9efc40fa0d3cc3b02c9eacf"
+LIC_FILES_CHKSUM = "file://docs/license.html;md5=6a23445982a7a972ac198e93cc1cb3de"
-PR = "${INC_PR}.0"
-SRCREV = "0028eb1083e6adc110a23a5f02c993cda217067a"
-PV = "10.1.3+git${SRCPV}"
+SRCREV = "c7b9a2e38a3e471562b50dab8be65db8ac6819f8"
+PV = "10.3.4+git${SRCPV}"
-SRC_URI = "git://anongit.freedesktop.org/git/mesa/mesa \
- file://0002-pipe_loader_sw-include-xlib_sw_winsys.h-only-when-HA.patch \
- file://0006-fix-out-of-tree-egl.patch \
- "
+SRC_URI = "git://anongit.freedesktop.org/git/mesa/mesa;branch=10.4"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-build-dir.patch b/meta/recipes-graphics/mx/mx-1.0/fix-build-dir.patch
index 3c8a832021..3c8a832021 100644
--- a/meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-build-dir.patch
+++ b/meta/recipes-graphics/mx/mx-1.0/fix-build-dir.patch
diff --git a/meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-test-includes.patch b/meta/recipes-graphics/mx/mx-1.0/fix-test-includes.patch
index 82c93dd604..82c93dd604 100644
--- a/meta/recipes-graphics/mx/mx-1.0-1.4.7+gitAUTOINC+9b1db6b806/fix-test-includes.patch
+++ b/meta/recipes-graphics/mx/mx-1.0/fix-test-includes.patch
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-directfb.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-directfb.bb
index 7786133930..53dc0ae19e 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-directfb.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-directfb.bb
@@ -1,6 +1,8 @@
SUMMARY = "DirectFB without X11"
LICENSE = "MIT"
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
inherit packagegroup
TOUCH = ' ${@bb.utils.contains("MACHINE_FEATURES", "touchscreen", "tslib tslib-calibrate tslib-tests", "",d)}'
diff --git a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
index f595a54e9d..2d68e97383 100644
--- a/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
+++ b/meta/recipes-graphics/packagegroups/packagegroup-core-x11-xserver.bb
@@ -6,10 +6,10 @@ SUMMARY = "X11 display server"
LICENSE = "MIT"
PR = "r40"
-inherit packagegroup
-
PACKAGE_ARCH = "${MACHINE_ARCH}"
+inherit packagegroup
+
XSERVER ?= "xserver-xorg xf86-video-fbdev xf86-input-evdev"
XSERVERCODECS ?= ""
diff --git a/meta/recipes-graphics/pango/pango.inc b/meta/recipes-graphics/pango/pango.inc
index 1ad46a3bf6..b59d33d14f 100644
--- a/meta/recipes-graphics/pango/pango.inc
+++ b/meta/recipes-graphics/pango/pango.inc
@@ -67,7 +67,7 @@ fi
# This binary needs to be compiled for the host architecture. This isn't pretty!
do_compile_prepend () {
if ${@base_contains('DISTRO_FEATURES', 'ptest', 'true', 'false', d)}; then
- make CC="${BUILD_CC}" AM_CPPFLAGS="$(pkg-config-native --cflags glib-2.0)" gen_all_unicode_LDADD="$(pkg-config-native --libs glib-2.0)" -C ${B}/tests gen-all-unicode
+ make CC="${BUILD_CC}" CFLAGS="" AM_CPPFLAGS="$(pkg-config-native --cflags glib-2.0)" gen_all_unicode_LDADD="$(pkg-config-native --libs glib-2.0)" -C ${B}/tests gen-all-unicode
fi
}
diff --git a/meta/recipes-graphics/pango/pango_1.36.5.bb b/meta/recipes-graphics/pango/pango_1.36.6.bb
index 5c2049756c..61ae359e93 100644
--- a/meta/recipes-graphics/pango/pango_1.36.5.bb
+++ b/meta/recipes-graphics/pango/pango_1.36.6.bb
@@ -8,5 +8,5 @@ SRC_URI += "file://run-ptest \
file://multilib-fix-clean.patch \
"
-SRC_URI[archive.md5sum] = "2500930093c3ed38acb40e4255bce2f1"
-SRC_URI[archive.sha256sum] = "be0e94b2e5c7459f0b6db21efab6253556c8f443837200b8736d697071276ac8" \ No newline at end of file
+SRC_URI[archive.md5sum] = "1c27523c3f3a4efe4d9d303d0d240320"
+SRC_URI[archive.sha256sum] = "4c53c752823723875078b91340f32136aadb99e91c0f6483f024f978a02c8624" \ No newline at end of file
diff --git a/meta/recipes-graphics/piglit/piglit_git.bb b/meta/recipes-graphics/piglit/piglit_git.bb
index 5c013ef94f..f468120f80 100644
--- a/meta/recipes-graphics/piglit/piglit_git.bb
+++ b/meta/recipes-graphics/piglit/piglit_git.bb
@@ -45,4 +45,7 @@ do_install() {
FILES_${PN}-dbg += "${libdir}/piglit/*/.debug/"
-RDEPENDS_${PN} = "python waffle python-json python-subprocess python-multiprocessing python-textutils python-netserver python-shell mesa-demos"
+RDEPENDS_${PN} = "python waffle python-json python-subprocess \
+ python-multiprocessing python-textutils python-netserver python-shell \
+ mesa-demos bash \
+ "
diff --git a/meta/recipes-graphics/wayland/libinput_0.6.0.bb b/meta/recipes-graphics/wayland/libinput_0.6.0.bb
new file mode 100644
index 0000000000..c3bbb7b0af
--- /dev/null
+++ b/meta/recipes-graphics/wayland/libinput_0.6.0.bb
@@ -0,0 +1,14 @@
+SUMMARY = "Library to handle input devices in Wayland compositors"
+HOMEPAGE = "http://www.freedesktop.org/wiki/Software/libinput/"
+SECTION = "libs"
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=673e626420c7f859fbe2be3a9c13632d"
+
+DEPENDS = "libevdev udev mtdev"
+
+SRC_URI = "http://www.freedesktop.org/software/${BPN}/${BP}.tar.xz"
+SRC_URI[md5sum] = "3afaf9f66d8796323a79edb879c10ba3"
+SRC_URI[sha256sum] = "30b555771e7cb921ccb9430c4a86940aa3938d05506e81d2417c03e30451bfbc"
+
+inherit autotools pkgconfig
diff --git a/meta/recipes-graphics/wayland/wayland_1.5.0.bb b/meta/recipes-graphics/wayland/wayland_1.6.0.bb
index b1ae59ef13..cf3c094634 100644
--- a/meta/recipes-graphics/wayland/wayland_1.5.0.bb
+++ b/meta/recipes-graphics/wayland/wayland_1.6.0.bb
@@ -11,8 +11,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=1d4476a7d98dd5691c53d4d43a510c72 \
file://src/wayland-server.c;endline=21;md5=079ae21dbf98ada52ec23744851b0a5c"
SRC_URI = "http://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz"
-SRC_URI[md5sum] = "1d882776b27329b91d2d500b6d66dd1d"
-SRC_URI[sha256sum] = "0069e1e9af888b3e05384380ad8cc6c976ea3e81d08ba19b7675ce1d693a41b5"
+SRC_URI[md5sum] = "d34c141c975084e4fb668e77b38f840e"
+SRC_URI[sha256sum] = "a7d5102dcf53d08c059d24bc62de491d7cd482070abeb6737a20d0d86ba6fc7f"
SRC_URI_append_class-native = " \
file://disable-macro-checks-not-used-for-scanner.patch \
"
diff --git a/meta/recipes-graphics/wayland/weston_1.5.0.bb b/meta/recipes-graphics/wayland/weston_1.6.0.bb
index 4a8584f79a..d30e48bfa0 100644
--- a/meta/recipes-graphics/wayland/weston_1.5.0.bb
+++ b/meta/recipes-graphics/wayland/weston_1.6.0.bb
@@ -12,8 +12,8 @@ SRC_URI = "http://wayland.freedesktop.org/releases/${BPN}-${PV}.tar.xz \
file://make-lcms-explicitly-configurable.patch \
file://make-libwebp-explicitly-configurable.patch \
"
-SRC_URI[md5sum] = "8eb40d230efc2411f083c20656534780"
-SRC_URI[sha256sum] = "06388ba04ac79aa72d685cc1a8e646ddb2b8cfe11fcc742294f9addac48b7684"
+SRC_URI[md5sum] = "c60ce9dde99a089db0539d8f6b557827"
+SRC_URI[sha256sum] = "dc3ea5d13bbf025fabc006216c5ddc0d80d5f4ebe778912b8c4d1d4acaaa614d"
inherit autotools pkgconfig useradd
@@ -60,6 +60,8 @@ PACKAGECONFIG[cairo-glesv2] = "--with-cairo-glesv2,--with-cairo=image,cairo"
PACKAGECONFIG[lcms] = "--enable-lcms,--disable-lcms,lcms"
# Weston with webp support
PACKAGECONFIG[webp] = "--enable-webp,--disable-webp,libwebp"
+# Weston with libinput backend
+PACKAGECONFIG[libinput] = "--enable-libinput-backend,--disable-libinput-backend,libinput"
do_install_append() {
# Weston doesn't need the .la files to load modules, so wipe them
@@ -77,7 +79,7 @@ do_install_append() {
PACKAGES += "${PN}-examples"
-FILES_${PN} = "${bindir}/weston ${bindir}/weston-terminal ${bindir}/weston-info ${bindir}/weston-launch ${bindir}/wcap-decode ${libexecdir} ${datadir}"
+FILES_${PN} = "${bindir}/weston ${bindir}/weston-terminal ${bindir}/weston-info ${bindir}/weston-launch ${bindir}/wcap-decode ${libdir}/weston ${datadir}"
FILES_${PN}-examples = "${bindir}/*"
RDEPENDS_${PN} += "xkeyboard-config"
diff --git a/meta/recipes-graphics/x11-common/xserver-nodm-init.bb b/meta/recipes-graphics/x11-common/xserver-nodm-init.bb
index 0a16ff5f83..b68d40e1be 100644
--- a/meta/recipes-graphics/x11-common/xserver-nodm-init.bb
+++ b/meta/recipes-graphics/x11-common/xserver-nodm-init.bb
@@ -47,6 +47,6 @@ do_install() {
RDEPENDS_${PN} = "${@base_conditional('ROOTLESS_X', '1', 'xuser-account', '', d)}"
INITSCRIPT_NAME = "xserver-nodm"
-INITSCRIPT_PARAMS = "start 9 5 2 . stop 20 0 1 6 ."
+INITSCRIPT_PARAMS = "start 9 5 . stop 20 0 1 2 3 6 ."
SYSTEMD_SERVICE_${PN} = "xserver-nodm.service"
diff --git a/meta/recipes-graphics/x11-common/xserver-nodm-init/xserver-nodm b/meta/recipes-graphics/x11-common/xserver-nodm-init/xserver-nodm
index f6692a814b..ef6c11c3f2 100755
--- a/meta/recipes-graphics/x11-common/xserver-nodm-init/xserver-nodm
+++ b/meta/recipes-graphics/x11-common/xserver-nodm-init/xserver-nodm
@@ -4,8 +4,8 @@
# Provides: xserver
# Required-Start: $local_fs $remote_fs dbus
# Required-Stop: $local_fs $remote_fs
-# Default-Start: 2 5
-# Default-Stop: 0 1 6
+# Default-Start: 5
+# Default-Stop: 0 1 2 3 6
### END INIT INFO
killproc() { # kill the named process(es)
diff --git a/meta/recipes-graphics/xorg-app/xmodmap/gnu-source.patch b/meta/recipes-graphics/xorg-app/xmodmap/gnu-source.patch
deleted file mode 100644
index 40f81f0f74..0000000000
--- a/meta/recipes-graphics/xorg-app/xmodmap/gnu-source.patch
+++ /dev/null
@@ -1,60 +0,0 @@
-Upstream-Status: Backport
-Signed-off-by: Ross Burton <ross.burton@intel.com>
-
-From 42f99a7a7b836f6f9704a7b1747eb722c93446ec Mon Sep 17 00:00:00 2001
-From: Alan Coopersmith <alan.coopersmith@oracle.com>
-Date: Mon, 23 Apr 2012 14:28:39 +0000
-Subject: include config.h before stdio.h & other system headers
-
-Ensures definitions like _GNU_SOURCE are visible when needed.
-
-Signed-off-by: Alan Coopersmith <alan.coopersmith@oracle.com>
-Reviewed-by: Peter Hutterer <peter.hutterer@who-t.net>
----
-diff --git a/exec.c b/exec.c
-index dbcb669..419b10e 100644
---- a/exec.c
-+++ b/exec.c
-@@ -54,6 +54,10 @@ from The Open Group.
- * original xmodmap, written by David Rosenthal, of Sun Microsystems.
- */
-
-+#ifdef HAVE_CONFIG_H
-+# include "config.h"
-+#endif
-+
- #include <X11/Xos.h>
- #include <X11/Xlib.h>
- #include <stdio.h>
-diff --git a/pf.c b/pf.c
-index 0eb0f55..3f0c3f4 100644
---- a/pf.c
-+++ b/pf.c
-@@ -26,6 +26,10 @@ from The Open Group.
-
- */
-
-+#ifdef HAVE_CONFIG_H
-+# include "config.h"
-+#endif
-+
- #include <X11/Xos.h>
- #include <X11/Xlib.h>
- #include <stdio.h>
-diff --git a/xmodmap.c b/xmodmap.c
-index 58a8e70..0f89629 100644
---- a/xmodmap.c
-+++ b/xmodmap.c
-@@ -26,6 +26,10 @@ from The Open Group.
-
- */
-
-+#ifdef HAVE_CONFIG_H
-+# include "config.h"
-+#endif
-+
- #include <X11/Xos.h>
- #include <X11/Xlib.h>
- #include <stdio.h>
---
-cgit v0.9.0.2-2-gbebe
diff --git a/meta/recipes-graphics/xorg-app/xmodmap_1.0.8.bb b/meta/recipes-graphics/xorg-app/xmodmap_1.0.8.bb
index 65a55e3560..87efb76022 100644
--- a/meta/recipes-graphics/xorg-app/xmodmap_1.0.8.bb
+++ b/meta/recipes-graphics/xorg-app/xmodmap_1.0.8.bb
@@ -13,7 +13,5 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=272c17e96370e1e74773fa22d9989621"
PE = "1"
-SRC_URI += "file://gnu-source.patch"
-
SRC_URI[md5sum] = "5511da3361eea4eaa21427652c559e1c"
SRC_URI[sha256sum] = "efe2e3c89858a2db3bdcf969f55f55d0af4f5007789198344de0595249a99fc3"
diff --git a/meta/recipes-graphics/xorg-app/xrandr_1.4.2.bb b/meta/recipes-graphics/xorg-app/xrandr_1.4.3.bb
index 84f01b02dd..41bd42069c 100644
--- a/meta/recipes-graphics/xorg-app/xrandr_1.4.2.bb
+++ b/meta/recipes-graphics/xorg-app/xrandr_1.4.3.bb
@@ -11,5 +11,5 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=fe1608bdb33cf8c62a4438f7d34679b3"
DEPENDS += "libxrandr libxrender"
PE = "1"
-SRC_URI[md5sum] = "78fd973d9b532106f8777a3449176148"
-SRC_URI[sha256sum] = "b2e76ee92ff827f1c52ded7c666fe6f2704ca81cdeef882397da4e3e8ab490bc"
+SRC_URI[md5sum] = "441fdb98d2abc6051108b7075d948fc7"
+SRC_URI[sha256sum] = "7154ac3486b86923692f2d6cdb2991a2ee72bc32af2c4379a6f1c068f204be1b"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.1.bb
index aca958cf64..1f0b78a157 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.0.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.1.bb
@@ -10,6 +10,6 @@ driver."
LIC_FILES_CHKSUM = "file://COPYING;md5=90ea9f90d72b6d9327dede5ffdb2a510"
-SRC_URI[md5sum] = "36b5b92000c4644f648b58a535e4ee73"
-SRC_URI[sha256sum] = "5d601e4bae53d5e9ead4ecd700f1beb5aeaf78b79e634c4aa381a9ce00276488"
+SRC_URI[md5sum] = "77085b649c5c0b333565ba562f573951"
+SRC_URI[sha256sum] = "3485d375779c08406f0789feedde15933dc703158a086ddac638598f479fc5ce"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/always_include_xorg_server.h.patch b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/always_include_xorg_server.h.patch
new file mode 100644
index 0000000000..f36bb7579d
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics/always_include_xorg_server.h.patch
@@ -0,0 +1,60 @@
+From 96e60a4ea242d2decf109835981ae186cc36f642 Mon Sep 17 00:00:00 2001
+From: Peter Hutterer <peter.hutterer@who-t.net>
+Date: Fri, 29 Aug 2014 07:57:41 +1000
+Subject: Include xorg-server.h to fix build errors on newest glibc
+
+In file included from /usr/include/string.h:634:0,
+ from /usr/include/xorg/os.h:53,
+ from /usr/include/xorg/misc.h:115,
+ from /usr/include/xorg/xf86str.h:37,
+ from /usr/include/xorg/xf86Xinput.h:54,
+ from synproto.h:36,
+ from synproto.c:24:
+/usr/include/xorg/os.h:579:1: error: expected identifier or '(' before '__extension__'
+ strndup(const char *str, size_t n);
+
+See http://lists.freedesktop.org/archives/xorg-devel/2014-July/043070.html
+
+Signed-off-by: Peter Hutterer <peter.hutterer@who-t.net>
+
+Status: Backport
+
+Index: xf86-input-synaptics-1.8.0/src/eventcomm.h
+===================================================================
+--- xf86-input-synaptics-1.8.0.orig/src/eventcomm.h 2014-08-28 18:40:28.628070587 -0700
++++ xf86-input-synaptics-1.8.0/src/eventcomm.h 2014-08-28 18:40:28.620070587 -0700
+@@ -27,6 +27,8 @@
+ #ifndef _EVENTCOMM_H_
+ #define _EVENTCOMM_H_
+
++#include <xorg-server.h>
++
+ #include <linux/input.h>
+ #include <linux/version.h>
+ #include <xf86Xinput.h>
+Index: xf86-input-synaptics-1.8.0/src/ps2comm.h
+===================================================================
+--- xf86-input-synaptics-1.8.0.orig/src/ps2comm.h 2014-08-28 18:40:28.628070587 -0700
++++ xf86-input-synaptics-1.8.0/src/ps2comm.h 2014-08-28 18:40:28.620070587 -0700
+@@ -22,6 +22,8 @@
+ #ifndef _PS2COMM_H_
+ #define _PS2COMM_H_
+
++#include <xorg-server.h>
++
+ #include <unistd.h>
+ #include <sys/ioctl.h>
+ #include "xf86_OSproc.h"
+Index: xf86-input-synaptics-1.8.0/src/synproto.h
+===================================================================
+--- xf86-input-synaptics-1.8.0.orig/src/synproto.h 2014-08-28 18:40:28.628070587 -0700
++++ xf86-input-synaptics-1.8.0/src/synproto.h 2014-08-28 18:40:28.624070587 -0700
+@@ -31,6 +31,8 @@
+ #include "config.h"
+ #endif
+
++#include <xorg-server.h>
++
+ #include <unistd.h>
+ #include <sys/ioctl.h>
+ #include <xf86Xinput.h>
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.7.4.bb b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.8.0.bb
index 8045790e9d..90baf8fbf3 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.7.4.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.8.0.bb
@@ -12,9 +12,11 @@ advanced features of the touchpad to become available."
LIC_FILES_CHKSUM = "file://COPYING;md5=55aacd3535a741824955c5eb8f061398"
-SRC_URI[md5sum] = "deaa740072c19fef8e2fb1d7787392b7"
-SRC_URI[sha256sum] = "56a2d2df7bd39e29f56102c62f153e023f3e9b2f5e255309d33fab8e81945af7"
+SRC_URI += "file://always_include_xorg_server.h.patch"
-DEPENDS += "libxi mtdev libxtst"
+SRC_URI[md5sum] = "27a3f2b31606a13dd6b58d419978d64f"
+SRC_URI[sha256sum] = "9bf27632aaa6c5e62621ca9c2ca00f9b309c85b039ee33cd592b189fc872c37a"
+
+DEPENDS += "libxi mtdev libxtst libevdev"
FILES_${PN} += "${datadir}/X11/xorg.conf.d"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse/always_include_config.h.patch b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse/always_include_config.h.patch
new file mode 100644
index 0000000000..12fb9df2fd
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse/always_include_config.h.patch
@@ -0,0 +1,81 @@
+From 336f8633837abe4a1e5ba84b53ac8b9dac5d29a2 Mon Sep 17 00:00:00 2001
+From: Jeremy Huddleston Sequoia <jeremyhu@apple.com>
+Date: Thu, 28 Nov 2013 11:59:53 -0800
+Subject: Always include config.h first
+
+This fixes some build warnings about CSRG_BASED being redefined due to
+incorrect header include ordering.
+
+Signed-off-by: Jeremy Huddleston Sequoia <jeremyhu@apple.com>
+
+Upstream-Status: Backport
+
+Index: xf86-input-vmmouse-13.0.0/shared/vmmouse_client.c
+===================================================================
+--- xf86-input-vmmouse-13.0.0.orig/shared/vmmouse_client.c 2014-08-28 18:33:16.168070587 -0700
++++ xf86-input-vmmouse-13.0.0/shared/vmmouse_client.c 2014-08-28 18:33:16.152070587 -0700
+@@ -39,6 +39,10 @@
+ #endif
+
+
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
+ #include "vmmouse_client.h"
+ #include "vmmouse_proto.h"
+
+Index: xf86-input-vmmouse-13.0.0/shared/vmmouse_proto.c
+===================================================================
+--- xf86-input-vmmouse-13.0.0.orig/shared/vmmouse_proto.c 2014-08-28 18:33:16.168070587 -0700
++++ xf86-input-vmmouse-13.0.0/shared/vmmouse_proto.c 2014-08-28 18:33:16.156070587 -0700
+@@ -33,6 +33,10 @@
+ */
+
+
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
+ #include "vmmouse_proto.h"
+
+
+Index: xf86-input-vmmouse-13.0.0/shared/vmmouse_proto.h
+===================================================================
+--- xf86-input-vmmouse-13.0.0.orig/shared/vmmouse_proto.h 2014-08-28 18:33:16.168070587 -0700
++++ xf86-input-vmmouse-13.0.0/shared/vmmouse_proto.h 2014-08-28 18:33:16.156070587 -0700
+@@ -36,11 +36,6 @@
+ #ifndef _VMMOUSE_PROTO_H_
+ #define _VMMOUSE_PROTO_H_
+
+-
+-#ifdef HAVE_CONFIG_H
+-#include "config.h"
+-#endif
+-
+ #include <stdint.h>
+
+ #ifdef HAVE_XORG_SERVER_1_1_0
+Index: xf86-input-vmmouse-13.0.0/tools/vmmouse_detect.c
+===================================================================
+--- xf86-input-vmmouse-13.0.0.orig/tools/vmmouse_detect.c 2014-08-28 18:33:16.168070587 -0700
++++ xf86-input-vmmouse-13.0.0/tools/vmmouse_detect.c 2014-08-28 18:33:16.160070587 -0700
+@@ -26,14 +26,14 @@
+ */
+
+
+-#include <stdlib.h>
+-#include <signal.h>
+-#include "vmmouse_client.h"
+-
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
++#include <stdlib.h>
++#include <signal.h>
++#include "vmmouse_client.h"
++
+ void
+ segvCB(int sig)
+ {
diff --git a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.0.0.bb b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.0.0.bb
index 35ef5991c3..83ea960a88 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.0.0.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-input-vmmouse_13.0.0.bb
@@ -8,6 +8,8 @@ standard 'mouse' driver if a VMware virtual machine is not detected."
PR = "${INC_PR}.0"
+SRC_URI += "file://always_include_config.h.patch"
+
SRC_URI[md5sum] = "34f9f64ee6a1a51fc8266a9af24e1e07"
SRC_URI[sha256sum] = "04cfb60366008d4db815c550d8fb8d0a4270c75fa7a20fa3bddc9ecbd355612c"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.2.bb b/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.2.bb
new file mode 100644
index 0000000000..01cd37e364
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-cirrus_1.5.2.bb
@@ -0,0 +1,13 @@
+require xorg-driver-video.inc
+
+SUMMARY = "X.Org X server -- cirrus display driver"
+DESCRIPTION = "cirrus is an Xorg driver for Cirrus Logic VGA adapters. These \
+devices are not so common in the wild anymore, but QEMU can emulate one, so \
+the driver is still useful."
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6ddc7ca860dc5fd014e7f160ea699295"
+
+SRC_URI[md5sum] = "91fd6b677d62027cd3001debb587a6a6"
+SRC_URI[sha256sum] = "3361e1a65d9b84c464752fd612bdf6087622c6dd204121715366a170e5c3ccd7"
+
+DEPENDS += "libpciaccess"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-intel/always_include_xorg_server.h.patch b/meta/recipes-graphics/xorg-driver/xf86-video-intel/always_include_xorg_server.h.patch
new file mode 100644
index 0000000000..8a5dd39ae1
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-intel/always_include_xorg_server.h.patch
@@ -0,0 +1,24 @@
+Include xorg-server.h to fix build errors seen with glibc 2.20
+
+In file included from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/string.h:634:0,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/i586-oe-linux/xf86-video-intel/2_2.99.912-r0/xf86-video-intel-2.99.912/src/backlight.c:39:
+/home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/os.h:579:1: error: expected identifier or '(' before '__extension__'
+ strndup(const char *str, size_t n);
+ ^
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
+
+Index: xf86-video-intel-2.99.912/src/backlight.c
+===================================================================
+--- xf86-video-intel-2.99.912.orig/src/backlight.c 2014-04-09 10:41:18.000000000 -0700
++++ xf86-video-intel-2.99.912/src/backlight.c 2014-08-28 18:52:25.524070587 -0700
+@@ -29,6 +29,7 @@
+ #include "config.h"
+ #endif
+
++#include <xorg-server.h>
+ #include <sys/types.h>
+ #include <sys/wait.h>
+ #include <sys/stat.h>
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-intel_2.99.912.bb b/meta/recipes-graphics/xorg-driver/xf86-video-intel_2.99.912.bb
index 544de4ab55..a10cd9e278 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-video-intel_2.99.912.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-intel_2.99.912.bb
@@ -10,7 +10,9 @@ Infrastructure (DRI)."
LIC_FILES_CHKSUM = "file://COPYING;md5=8730ad58d11c7bbad9a7066d69f7808e"
SRC_URI += "file://configure-dri.patch \
- file://disable-x11-dri3.patch"
+ file://disable-x11-dri3.patch \
+ file://always_include_xorg_server.h.patch \
+ "
SRC_URI[md5sum] = "88d1a884f9b7bd07bf0755cfa34052d4"
SRC_URI[sha256sum] = "7c8ffc492d59f34cac64093deb70717b4d9223cf416ecc6fa016ab2e8bde9501"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-omapfb/0007-always_include_xorg_server.h.patch b/meta/recipes-graphics/xorg-driver/xf86-video-omapfb/0007-always_include_xorg_server.h.patch
new file mode 100644
index 0000000000..dc0b9b3f41
--- /dev/null
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-omapfb/0007-always_include_xorg_server.h.patch
@@ -0,0 +1,48 @@
+Fix errors with glibc 2.20
+
+In file included from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/string.h:634:0,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/os.h:53,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/misc.h:115,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/xf86str.h:37,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/xf86.h:44,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/i586-oe-linux/xf86-video-omapfb/2_0.1.1+gitrAUTOINC+28c006c94e-r21.7/git/src/omapfb-xv-blizzard.c:33:
+/home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/os.h:579:1: error: expected identifier or '(' before '__extension__'
+ strndup(const char *str, size_t n);
+ ^
+In file included from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/string.h:634:0,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/os.h:53,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/misc.h:115,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/xf86str.h:37,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/xf86.h:44,
+ from /home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/work/i586-oe-linux/xf86-video-omapfb/2_0.1.1+gitrAUTOINC+28c006c94e-r21.7/git/src/omapfb-xv-generic.c:28:
+/home/ubuntu/work/upstream/openembedded-core/build/tmp-glibc/sysroots/qemux86/usr/include/xorg/os.h:579:1: error: expected identifier or '(' before '__extension__'
+ strndup(const char *str, size_t n);
+ ^
+
+Singed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
+Index: git/src/omapfb-xv-blizzard.c
+===================================================================
+--- git.orig/src/omapfb-xv-blizzard.c 2014-08-28 05:16:25.684070587 -0700
++++ git/src/omapfb-xv-blizzard.c 2014-08-28 19:05:39.440070587 -0700
+@@ -30,6 +30,7 @@
+ * -
+ */
+
++#include <xorg-server.h>
+ #include "xf86.h"
+ #include "xf86_OSlib.h"
+ #include "xf86xv.h"
+Index: git/src/omapfb-xv-generic.c
+===================================================================
+--- git.orig/src/omapfb-xv-generic.c 2014-08-28 05:16:25.684070587 -0700
++++ git/src/omapfb-xv-generic.c 2014-08-28 19:05:51.780070587 -0700
+@@ -25,6 +25,7 @@
+ * Generic functions for the XV driver
+ */
+
++#include <xorg-server.h>
+ #include "xf86.h"
+ #include "xf86_OSlib.h"
+ #include "xf86xv.h"
diff --git a/meta/recipes-graphics/xorg-driver/xf86-video-omapfb_git.bb b/meta/recipes-graphics/xorg-driver/xf86-video-omapfb_git.bb
index 50ce4c2f9c..4ad25389cd 100644
--- a/meta/recipes-graphics/xorg-driver/xf86-video-omapfb_git.bb
+++ b/meta/recipes-graphics/xorg-driver/xf86-video-omapfb_git.bb
@@ -20,6 +20,7 @@ SRC_URI = "git://git.pingu.fi/xf86-video-omapfb;protocol=http \
file://0004-blacklist-tv-out.patch \
file://0005-Attempt-to-fix-VRFB.patch \
file://0006-omapfb-port-to-new-xserver-video-API.patch \
+ file://0007-always_include_xorg_server.h.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-graphics/xorg-font/font-util_1.3.0.bb b/meta/recipes-graphics/xorg-font/font-util_1.3.0.bb
index 8b42991d1c..cc4258ad7b 100644
--- a/meta/recipes-graphics/xorg-font/font-util_1.3.0.bb
+++ b/meta/recipes-graphics/xorg-font/font-util_1.3.0.bb
@@ -17,7 +17,7 @@ RDEPENDS_${PN}_class-native = "mkfontdir-native mkfontscale-native"
PR = "${INC_PR}.0"
do_configure_prepend() {
- sed -i "s#MAPFILES_PATH=\`pkg-config#MAPFILES_PATH=\`PKG_CONFIG_PATH=\"${STAGING_LIBDIR_NATIVE}/pkg-config\" pkg-config#g" ${S}/fontutil.m4.in
+ sed -i "s#MAPFILES_PATH=\`pkg-config#MAPFILES_PATH=\`PKG_CONFIG_PATH=\"${STAGING_LIBDIR_NATIVE}/pkgconfig\" pkg-config#g" ${S}/fontutil.m4.in
}
BBCLASSEXTEND = "native"
diff --git a/meta/recipes-graphics/xorg-lib/libxcb_1.10.bb b/meta/recipes-graphics/xorg-lib/libxcb_1.10.bb
deleted file mode 100644
index d88b025e1f..0000000000
--- a/meta/recipes-graphics/xorg-lib/libxcb_1.10.bb
+++ /dev/null
@@ -1,10 +0,0 @@
-include libxcb.inc
-
-LICENSE = "MIT"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7"
-
-
-DEPENDS += "libxdmcp"
-
-SRC_URI[md5sum] = "074c335cc4453467eeb234e3dadda700"
-SRC_URI[sha256sum] = "98d9ab05b636dd088603b64229dd1ab2d2cc02ab807892e107d674f9c3f2d5b5"
diff --git a/meta/recipes-graphics/xorg-lib/libxcb_1.11.bb b/meta/recipes-graphics/xorg-lib/libxcb_1.11.bb
new file mode 100644
index 0000000000..c162702105
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/libxcb_1.11.bb
@@ -0,0 +1,10 @@
+include libxcb.inc
+
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7"
+
+
+DEPENDS += "libxdmcp"
+
+SRC_URI[md5sum] = "5a873ebd383d1a60612dd6ec6b42c781"
+SRC_URI[sha256sum] = "03635d70045b9ede90778e67516135828a57de87ac508f987024f43c03620ff7"
diff --git a/meta/recipes-graphics/xorg-lib/libxkbcommon_0.4.2.bb b/meta/recipes-graphics/xorg-lib/libxkbcommon_0.5.0.bb
index 2b1806142f..a166b57603 100644
--- a/meta/recipes-graphics/xorg-lib/libxkbcommon_0.4.2.bb
+++ b/meta/recipes-graphics/xorg-lib/libxkbcommon_0.5.0.bb
@@ -2,19 +2,19 @@ SUMMARY = "Generic XKB keymap library"
DESCRIPTION = "libxkbcommon is a keymap compiler and support library which \
processes a reduced subset of keymaps as defined by the XKB specification."
HOMEPAGE = "http://www.xkbcommon.org"
-LIC_FILES_CHKSUM = "file://COPYING;md5=9c0b824e72a22f9d2c40b9c93b1f0ddc"
+LIC_FILES_CHKSUM = "file://COPYING;md5=09457b156e3155972abebcaaaa0cb434"
LICENSE = "MIT & MIT-style"
DEPENDS = "util-macros flex-native bison-native"
SRC_URI = "http://xkbcommon.org/download/${BPN}-${PV}.tar.xz"
-SRC_URI[md5sum] = "4b717adce41c8305258e99a9b396330a"
-SRC_URI[sha256sum] = "a0fc71b07eeddba4af62bd709e24cec219778bb6871384aa850b2f5798a48957"
+SRC_URI[md5sum] = "2e1faeafcc609c30af3a561a91e84158"
+SRC_URI[sha256sum] = "90bd7824742b9a6f52a6cf80e2cadd6f5349cf600a358d08260772615b89d19c"
inherit autotools pkgconfig
EXTRA_OECONF = "--disable-docs"
PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}"
-PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,libxcb"
+PACKAGECONFIG[x11] = "--enable-x11,--disable-x11,libxcb xkeyboard-config,"
diff --git a/meta/recipes-graphics/xorg-lib/pixman/mips-export-revert.patch b/meta/recipes-graphics/xorg-lib/pixman/mips-export-revert.patch
new file mode 100644
index 0000000000..14a5fd209b
--- /dev/null
+++ b/meta/recipes-graphics/xorg-lib/pixman/mips-export-revert.patch
@@ -0,0 +1,22 @@
+Revert a commit in pixman 0.32.6 which breaks compliation on MIPS machines with
+errors such as:
+
+pixman-0.32.6/pixman/pixman-mips-dspr2-asm.S:4267:
+Error: invalid operands `mflo $14,$ac3'
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
+--- b/pixman/pixman-mips-dspr2-asm.h
++++ a/pixman/pixman-mips-dspr2-asm.h
+@@ -72,10 +72,7 @@
+ #define LEAF_MIPS32R2(symbol) \
+ .globl symbol; \
+ .align 2; \
+-#ifdef __ELF__
+- .hidden symbol; \
+ .type symbol, @function; \
+-#endif
+ .ent symbol, 0; \
+ symbol: .frame sp, 0, ra; \
+ .set push; \
diff --git a/meta/recipes-graphics/xorg-lib/pixman_0.32.4.bb b/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb
index 51cca5d13a..467ba508ee 100644
--- a/meta/recipes-graphics/xorg-lib/pixman_0.32.4.bb
+++ b/meta/recipes-graphics/xorg-lib/pixman_0.32.6.bb
@@ -29,9 +29,10 @@ EXTRA_OECONF_class-native = "--disable-gtk"
SRC_URI += "\
file://0001-ARM-qemu-related-workarounds-in-cpu-features-detecti.patch \
+ file://mips-export-revert.patch \
"
-SRC_URI[md5sum] = "cdb566504fe9daf6728c7b03cc7ea228"
-SRC_URI[sha256sum] = "ae2bd664057e330d41b40336ed296d3512318ce7f2401cc42601f2613d371e4c"
+SRC_URI[md5sum] = "8a9e8f14743a39cf303803f369c1f344"
+SRC_URI[sha256sum] = "201fc0d7d6bc0017496f2bd27b3ca14224aea0df6b624c5ee2dc0307a4ff14a4"
REQUIRED_DISTRO_FEATURES = ""
diff --git a/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.11.bb b/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.12.bb
index b80309d2ac..adac0eedd8 100644
--- a/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.11.bb
+++ b/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.12.bb
@@ -13,8 +13,8 @@ LICENSE = "MIT & MIT-style"
LIC_FILES_CHKSUM = "file://COPYING;md5=0e7f21ca7db975c63467d2e7624a12f9"
SRC_URI="${XORG_MIRROR}/individual/data/xkeyboard-config/${BPN}-${PV}.tar.bz2"
-SRC_URI[md5sum] = "e3defd29cc464cc1a1dfa0eebaca53b1"
-SRC_URI[sha256sum] = "e7125460892c2b5c3a8d843cb18c24b60c46051e925c2888a61fa672a2f76d76"
+SRC_URI[md5sum] = "1fd54ceb9092d1dbcaabaf03653092bc"
+SRC_URI[sha256sum] = "65b62b95b77b609cb6c0439e0148c48c3ab7dcb5c90eb8d34cf1cb8f360cca44"
SECTION = "x11/libs"
DEPENDS = "intltool-native virtual/gettext util-macros libxslt-native"
@@ -23,7 +23,7 @@ EXTRA_OECONF = "--with-xkb-rules-symlink=xorg --disable-runtime-deps"
FILES_${PN} += "${datadir}/X11/xkb"
-inherit autotools pkgconfig
+inherit autotools pkgconfig gettext
do_install_append () {
install -d ${D}${datadir}/X11/xkb/compiled
diff --git a/meta/recipes-graphics/xorg-proto/xcb-proto_1.10.bb b/meta/recipes-graphics/xorg-proto/xcb-proto_1.11.bb
index 0b67b5703e..5bc5a112e5 100644
--- a/meta/recipes-graphics/xorg-proto/xcb-proto_1.10.bb
+++ b/meta/recipes-graphics/xorg-proto/xcb-proto_1.11.bb
@@ -5,5 +5,5 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d763b081cb10c223435b01e00dc0aba7 \
file://src/dri2.xml;beginline=2;endline=28;md5=f8763b13ff432e8597e0d610cf598e65"
-SRC_URI[md5sum] = "ade74b8e9c870dc7515adfa209e66063"
-SRC_URI[sha256sum] = "7ef40ddd855b750bc597d2a435da21e55e502a0fefa85b274f2c922800baaf05"
+SRC_URI[md5sum] = "6bf2797445dc6d43e9e4707c082eff9c"
+SRC_URI[sha256sum] = "b4aceee6502a0ce45fc39b33c541a2df4715d00b72e660ebe8c5bb444771e32e"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
index 10a6d9a0bc..bbda9eaa63 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86-64/xorg.conf
@@ -30,11 +30,6 @@ Section "InputDevice"
Option "USB" "on"
EndSection
-Section "Device"
- Identifier "Graphics Controller"
- Driver "vmware"
-EndSection
-
Section "Monitor"
Identifier "Generic Monitor"
Option "DPMS"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
index 10a6d9a0bc..bbda9eaa63 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/qemux86/xorg.conf
@@ -30,11 +30,6 @@ Section "InputDevice"
Option "USB" "on"
EndSection
-Section "Device"
- Identifier "Graphics Controller"
- Driver "vmware"
-EndSection
-
Section "Monitor"
Identifier "Generic Monitor"
Option "DPMS"
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
index dd60884992..5b47e3445f 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg.inc
@@ -116,7 +116,9 @@ EXTRA_OECONF += "--with-fop=no \
ac_cv_file__usr_share_sgml_X11_defs_ent=no \
"
-PACKAGECONFIG ??= "udev ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'dri dri2 glx', '', d)}"
+PACKAGECONFIG ??= "udev ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'dri dri2 glx', '', d)} \
+ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)}"
+
PACKAGECONFIG[udev] = "--enable-config-udev,--disable-config-udev,udev"
PACKAGECONFIG[dri] = "--enable-dri,--disable-dri,glproto virtual/mesa xf86driproto"
PACKAGECONFIG[dri2] = "--enable-dri2,--disable-dri2,dri2proto"
@@ -126,6 +128,7 @@ PACKAGECONFIG[glx] = "--enable-glx --enable-glx-tls,--disable-glx,glproto virtua
PACKAGECONFIG[unwind] = "--enable-libunwind,--disable-libunwind,libunwind"
PACKAGECONFIG[xshmfence] = "--enable-xshmfence,--disable-xshmfence,libxshmfence"
PACKAGECONFIG[xmlto] = "--with-xmlto, --without-xmlto, xmlto-native docbook-xml-dtd4-native docbook-xsl-stylesheets-native"
+PACKAGECONFIG[systemd] = "--enable-systemd-logind=yes,--enable-systemd-logind=no,dbus,dbus-lib"
do_install_append () {
# Its assumed base-files creates this for us
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/aarch64.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/aarch64.patch
deleted file mode 100644
index 045e24a281..0000000000
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/aarch64.patch
+++ /dev/null
@@ -1,35 +0,0 @@
-Subject: Add AArch64 support to xserver-xorg
-Author: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org>
-
-lnx_video.c parts are not existing in 1.14 branch.
-
-Xserver was checked in AArch64 fastmodel (commercial one with graphics support).
-
-http://patchwork.freedesktop.org/patch/12785/
-
-Upstream-Status: Pending
-
----
- include/servermd.h | 7 +++++++
- 1 files changed, 7 insertions(+), 0 deletions(-)
-
---- xorg-server-1.13.1.orig/include/servermd.h
-+++ xorg-server-1.13.1/include/servermd.h
-@@ -243,10 +243,17 @@ SOFTWARE.
- #define BITMAP_BIT_ORDER LSBFirst
- #define GLYPHPADBYTES 4
- /* ???? */
- #endif /* AMD64 */
-
-+#if defined(__aarch64__) || defined(aarch64) || defined(__aarch64)
-+#define IMAGE_BYTE_ORDER MSBFirst
-+#define BITMAP_BIT_ORDER MSBFirst
-+#define GLYPHPADBYTES 4
-+/* ???? */
-+#endif /* AArch64 */
-+
- #if defined(SVR4) && (defined(__i386__) || defined(__i386) ) || \
- defined(__alpha__) || defined(__alpha) || \
- defined(__i386__) || \
- defined(__s390x__) || defined(__s390__)
-
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/crosscompile.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/crosscompile.patch
deleted file mode 100644
index 2f98bb8c89..0000000000
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/crosscompile.patch
+++ /dev/null
@@ -1,22 +0,0 @@
-Upstream-Status: Inappropriate [configuration]
-
-diff --git a/configure.ac b/configure.ac
-index b3b752c..600500b 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -523,13 +523,9 @@ dnl Uses --default-font-path if set, otherwise checks for /etc/X11/fontpath.d,
- dnl otherwise uses standard subdirectories of FONTROOTDIR. When cross
- dnl compiling, assume default font path uses standard FONTROOTDIR directories.
- DEFAULT_FONT_PATH="${FONTMISCDIR}/,${FONTTTFDIR}/,${FONTOTFDIR}/,${FONTTYPE1DIR}/,${FONT100DPIDIR}/,${FONT75DPIDIR}/"
--if test "$cross_compiling" != yes; then
-- AC_CHECK_FILE([${sysconfdir}/X11/fontpath.d],
-- [DEFAULT_FONT_PATH='catalogue:${sysconfdir}/X11/fontpath.d'],
-- [case $host_os in
-+ case $host_os in
- darwin*) DEFAULT_FONT_PATH="${DEFAULT_FONT_PATH},/Library/Fonts,/System/Library/Fonts" ;;
-- esac])
--fi
-+ esac
- AC_ARG_WITH(default-font-path, AS_HELP_STRING([--with-default-font-path=PATH], [Comma separated list of font dirs]),
- [ FONTPATH="$withval" ],
- [ FONTPATH="${DEFAULT_FONT_PATH}" ])
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/mips64-compiler.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/mips64-compiler.patch
deleted file mode 100644
index 168368e6c3..0000000000
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/mips64-compiler.patch
+++ /dev/null
@@ -1,29 +0,0 @@
-on mips64/n64 pointers are 64bit therefore the pointer conversions to int dont work well
-so we end up with incompatible conversion errors
-
-This patch choses the right values for mips64
-
-Signed-off-by: Khem Raj <raj.khem@gmail.com>
-
-Upstream-Status: Pending
-Index: xorg-server-1.13.0/hw/xfree86/common/compiler.h
-===================================================================
---- xorg-server-1.13.0-orig/hw/xfree86/common/compiler.h 2012-06-14 23:04:29.000000000 +0300
-+++ xorg-server-1.13.0/hw/xfree86/common/compiler.h 2012-11-08 10:06:50.865831783 +0200
-@@ -101,6 +101,7 @@
- #if defined(NO_INLINE) || defined(DO_PROTOTYPES)
- #if !defined(__arm__)
- #if !defined(__sparc__) && !defined(__sparc) && !defined(__arm32__) && !defined(__nds32__) \
-+ && !defined(__mips64) \
- && !(defined(__alpha__) && defined(linux)) \
- && !(defined(__ia64__) && defined(linux)) \
-
-@@ -721,7 +722,7 @@
- }
-
- #elif defined(__mips__) || (defined(__arm32__) && !defined(__linux__))
--#ifdef __arm32__
-+#if defined (__arm32__) || defined (__mips64)
- #define PORT_SIZE long
- #else
- #define PORT_SIZE short
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg/xshmfence-option.patch b/meta/recipes-graphics/xorg-xserver/xserver-xorg/xshmfence-option.patch
index 55e09134ce..682ddf7006 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg/xshmfence-option.patch
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg/xshmfence-option.patch
@@ -1,7 +1,7 @@
Upstream-Status: Submitted
Signed-off-by: Ross Burton <ross.burton@intel.com>
-commit 1affe20d5c82befc3b1626e557409dab5343c47b
+commit ca29a9f3e689f3840426897f58aaa3974932ae78
Author: Ross Burton <ross.burton@intel.com>
Date: Mon Jul 7 12:53:35 2014 +0100
@@ -13,7 +13,7 @@ Date: Mon Jul 7 12:53:35 2014 +0100
Signed-off-by: Ross Burton <ross.burton@intel.com>
diff --git a/configure.ac b/configure.ac
-index 2daa6be..fd1cf3f 100644
+index c214638..a1ca9ec 100644
--- a/configure.ac
+++ b/configure.ac
@@ -648,6 +648,7 @@ AC_ARG_ENABLE(kdrive-kbd, AS_HELP_STRING([--enable-kdrive-kbd], [Build kbd d
@@ -24,24 +24,18 @@ index 2daa6be..fd1cf3f 100644
dnl chown/chmod to be setuid root as part of build
-@@ -1235,18 +1236,19 @@ esac
+@@ -1235,18 +1236,21 @@ esac
AM_CONDITIONAL(BUSFAULT, test x"$BUSFAULT" = xyes)
-PKG_CHECK_MODULES([XSHMFENCE], $XSHMFENCE,
- [HAVE_XSHMFENCE=yes], [HAVE_XSHMFENCE=no])
-+HAVE_XSHMFENCE=no
-+if test "x$WANT_XSHMFENCE" != "xno"; then
-+ PKG_CHECK_MODULES([XSHMFENCE], $XSHMFENCE,
-+ [HAVE_XSHMFENCE=yes], [HAVE_XSHMFENCE=no])
-
-+ if test "$WANT_XSHMFENCE,$HAVE_XSHMFENCE" = "yes,no"; then
-+ AC_MSG_ERROR([libxshmfence requested but not found.])
-+ fi
-+ AC_DEFINE(HAVE_XSHMFENCE, 1, [Have X Shared Memory Fence library])
-+ REQUIRED_LIBS="$REQUIRED_LIBS xshmfence"
+
+-AM_CONDITIONAL(XSHMFENCE, test "x$HAVE_XSHMFENCE" = xyes)
++PKG_CHECK_MODULES([XSHMFENCE], $XSHMFENCE, [HAVE_XSHMFENCE=yes], [HAVE_XSHMFENCE=no])
++if test "x$WANT_XSHMFENCE" = "xauto"; then
++ WANT_XSHMFENCE="$HAVE_XSHMFENCE"
+fi
- AM_CONDITIONAL(XSHMFENCE, test "x$HAVE_XSHMFENCE" = xyes)
-case x"$HAVE_XSHMFENCE" in
- xyes)
@@ -49,7 +43,15 @@ index 2daa6be..fd1cf3f 100644
- REQUIRED_LIBS="$REQUIRED_LIBS xshmfence"
- ;;
-esac
--
++if test "x$WANT_XSHMFENCE" = "xyes"; then
++ if test "x$HAVE_XSHMFENCE" != "xyes"; then
++ AC_MSG_ERROR([xshmfence requested but not installed.])
++ fi
++ AC_DEFINE(HAVE_XSHMFENCE, 1, [Have xshmfence support])
++ REQUIRED_LIBS="$REQUIRED_LIBS xshmfence"
++fi
+
++AM_CONDITIONAL(XSHMFENCE, [test "x$WANT_XSHMFENCE" = xyes])
case "$DRI3,$HAVE_XSHMFENCE" in
yes,yes | auto,yes)
diff --git a/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.15.1.bb b/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.16.2.bb
index 22ac604433..1519271ccd 100644
--- a/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.15.1.bb
+++ b/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.16.2.bb
@@ -1,16 +1,13 @@
require xserver-xorg.inc
# Misc build failure for master HEAD
-SRC_URI += "file://crosscompile.patch \
- file://fix_open_max_preprocessor_error.patch \
- file://mips64-compiler.patch \
- file://aarch64.patch \
+SRC_URI += "file://fix_open_max_preprocessor_error.patch \
file://xorg-CVE-2013-6424.patch \
file://xshmfence-option.patch \
"
-SRC_URI[md5sum] = "e4c70262ed89764be8f8f5d699ed9227"
-SRC_URI[sha256sum] = "626db6882602ebe1ff81f7a4231c7ccc6ceb5032f2b5b3954bf749e1567221e2"
+SRC_URI[md5sum] = "89620960b13515db8d0a8dbb92a1378a"
+SRC_URI[sha256sum] = "446e0c3ebd556aced78ec0000ba9ae73f1e5317117d497f827afba48b787ce64"
# These extensions are now integrated into the server, so declare the migration
# path for in-place upgrades.
diff --git a/meta/recipes-kernel/blktrace/blktrace/ldflags.patch b/meta/recipes-kernel/blktrace/blktrace/ldflags.patch
index b9c7c20c2b..037d161340 100644
--- a/meta/recipes-kernel/blktrace/blktrace/ldflags.patch
+++ b/meta/recipes-kernel/blktrace/blktrace/ldflags.patch
@@ -59,8 +59,12 @@ Signed-off-by: Fahad Usman <fahad_usman@mentor.com>
INCS = -I. -I.. -I../btt
OCFLAGS = -UCOUNT_IOS -UDEBUG -DNDEBUG
XCFLAGS = -D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64
-@@ -35,7 +36,7 @@ btrecord: btrecord.o
- $(CC) $(CFLAGS) -o $@ $(filter %.o,$^)
+@@ -32,10 +33,10 @@ clean: docsclean
+ $(CC) $(CFLAGS) -c -o $*.o $<
+
+ btrecord: btrecord.o
+- $(CC) $(CFLAGS) -o $@ $(filter %.o,$^)
++ $(CC) $(CFLAGS) -o $@ $(filter %.o,$^) $(LDFLAGS)
btreplay: btreplay.o
- $(CC) $(CFLAGS) -o $@ $(filter %.o,$^) $(LIBS)
diff --git a/meta/recipes-kernel/cryptodev/cryptodev_1.6.inc b/meta/recipes-kernel/cryptodev/cryptodev_1.6.inc
index 946faac7d2..6a93a35a39 100644
--- a/meta/recipes-kernel/cryptodev/cryptodev_1.6.inc
+++ b/meta/recipes-kernel/cryptodev/cryptodev_1.6.inc
@@ -9,3 +9,5 @@ SRC_URI[md5sum] = "eade38998313c25fd7934719cdf8a2ea"
SRC_URI[sha256sum] = "75f1425c8ea1f8cae523905a5a046a35092327a6152800b0b86efc4e56fb3e2f"
S = "${WORKDIR}/cryptodev-linux-${PV}"
+
+CLEANBROKEN = "1"
diff --git a/meta/recipes-kernel/dtc/dtc.inc b/meta/recipes-kernel/dtc/dtc.inc
index 3eca25fe41..0c409b0c0b 100644
--- a/meta/recipes-kernel/dtc/dtc.inc
+++ b/meta/recipes-kernel/dtc/dtc.inc
@@ -4,15 +4,19 @@ SECTION = "bootloader"
LICENSE = "GPLv2 | BSD"
DEPENDS = "flex-native bison-native"
-inherit autotools-brokensep
-
-SRC_URI = "git://www.jdl.com/software/dtc.git \
+SRC_URI = "git://git.kernel.org/pub/scm/utils/dtc/dtc.git \
file://make_install.patch \
"
-EXTRA_OEMAKE='PREFIX="${prefix}" LIBDIR="${libdir}"'
+EXTRA_OEMAKE='PREFIX="${prefix}" LIBDIR="${libdir}" DESTDIR="${D}"'
S = "${WORKDIR}/git"
+do_install () {
+ oe_runmake install
+}
+
PACKAGES =+ "${PN}-misc"
FILES_${PN}-misc = "${bindir}/convert-dtsv0 ${bindir}/ftdump ${bindir}/dtdiff"
+
+RDEPENDS_${PN}-misc += "bash"
diff --git a/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb b/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
index 24263e5d2b..ce872354f2 100644
--- a/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
+++ b/meta/recipes-kernel/kern-tools/kern-tools-native_git.bb
@@ -4,7 +4,7 @@ LIC_FILES_CHKSUM = "file://git/tools/kgit;beginline=5;endline=9;md5=d8d1d729a70c
DEPENDS = "git-native"
-SRCREV = "e914d570232a5a6aa47b721dafbab4af4209d93c"
+SRCREV = "23345b8846fe4bd167efdf1bd8a1224b2ba9a5ff"
PR = "r12"
PV = "0.2+git${SRCPV}"
diff --git a/meta/recipes-kernel/kexec/kexec-tools.inc b/meta/recipes-kernel/kexec/kexec-tools.inc
index 7e3b7adfc7..312424a88f 100644
--- a/meta/recipes-kernel/kexec/kexec-tools.inc
+++ b/meta/recipes-kernel/kexec/kexec-tools.inc
@@ -12,7 +12,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/kexec/kexec-tools-${PV}.tar.gz
inherit autotools-brokensep
-COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|powerpc.*|mips.*)-(linux|freebsd.*)'
+COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*|powerpc.*|mips.*)-(linux|freebsd.*)'
INSANE_SKIP_${PN} = "arch"
diff --git a/meta/recipes-kernel/kexec/kexec-tools/kexec-aarch64.patch b/meta/recipes-kernel/kexec/kexec-tools/kexec-aarch64.patch
new file mode 100644
index 0000000000..b697a54b90
--- /dev/null
+++ b/meta/recipes-kernel/kexec/kexec-tools/kexec-aarch64.patch
@@ -0,0 +1,801 @@
+From: Geoff Levand <geoff@infradead.org>
+Date: Mon, 15 Jul 2013 23:32:36 +0000 (-0700)
+Subject: Add arm64 support
+X-Git-Url: https://git.linaro.org/gitweb?p=people%2Fgeoff%2Fkexec-tools.git;a=commitdiff_plain;h=fbf5ac6c2c70ec0f6da2b9ff563e573999752c01
+
+Add arm64 support
+
+Signed-off-by: Geoff Levand <geoff@infradead.org>
+
+Get patch from:
+https://fedorapeople.org/~hrw/aarch64/for-fedora/kexec-aarch64.patch
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+
+Index: kexec-tools-2.0.4/configure.ac
+===================================================================
+--- kexec-tools-2.0.4.orig/configure.ac
++++ kexec-tools-2.0.4/configure.ac
+@@ -30,6 +30,9 @@ case $target_cpu in
+ powerpc64 )
+ ARCH="ppc64"
+ ;;
++ aarch64 )
++ ARCH="arm64"
++ ;;
+ arm* )
+ ARCH="arm"
+ ;;
+Index: kexec-tools-2.0.4/kexec/Makefile
+===================================================================
+--- kexec-tools-2.0.4.orig/kexec/Makefile
++++ kexec-tools-2.0.4/kexec/Makefile
+@@ -70,6 +70,7 @@ KEXEC_SRCS += $($(ARCH)_FS2DT)
+
+ include $(srcdir)/kexec/arch/alpha/Makefile
+ include $(srcdir)/kexec/arch/arm/Makefile
++include $(srcdir)/kexec/arch/arm64/Makefile
+ include $(srcdir)/kexec/arch/i386/Makefile
+ include $(srcdir)/kexec/arch/ia64/Makefile
+ include $(srcdir)/kexec/arch/mips/Makefile
+Index: kexec-tools-2.0.4/kexec/arch/arm64/Makefile
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/Makefile
+@@ -0,0 +1,13 @@
++
++arm64_KEXEC_SRCS += \
++ kexec/arch/arm64/kexec-arm64.c \
++ kexec/arch/arm64/kexec-elf-arm64.c \
++ kexec/arch/arm64/crashdump-arm64.c
++
++arm64_ARCH_REUSE_INITRD =
++arm64_ADD_SEGMENT =
++arm64_VIRT_TO_PHYS =
++
++dist += $(arm64_KEXEC_SRCS) \
++ kexec/arch/arm64/Makefile \
++ kexec/arch/arm64/kexec-arm64.h
+Index: kexec-tools-2.0.4/kexec/arch/arm64/crashdump-arm64.c
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/crashdump-arm64.c
+@@ -0,0 +1,305 @@
++/*
++ * This program is free software; you can redistribute it and/or modify
++ * it under the terms of the GNU General Public License as published by
++ * the Free Software Foundation (version 2 of the License).
++ */
++
++#include "../../kexec.h"
++#include "../../kexec-elf.h"
++#include "../../crashdump.h"
++
++int is_crashkernel_mem_reserved(void)
++{
++ return 0;
++}
++
++#if 0
++/*
++ * Used to save various memory ranges/regions needed for the captured
++ * kernel to boot. (lime memmap= option in other archs)
++ */
++static struct memory_range crash_memory_ranges[CRASH_MAX_MEMORY_RANGES];
++struct memory_ranges usablemem_rgns = {
++ .size = 0,
++ .ranges = crash_memory_ranges,
++};
++
++/* memory range reserved for crashkernel */
++static struct memory_range crash_reserved_mem;
++
++static struct crash_elf_info elf_info = {
++ .class = ELFCLASS32,
++ .data = ELFDATA2LSB,
++ .machine = EM_ARM,
++ .page_offset = PAGE_OFFSET,
++};
++
++unsigned long phys_offset;
++
++/**
++ * crash_range_callback() - callback called for each iomem region
++ * @data: not used
++ * @nr: not used
++ * @str: name of the memory region
++ * @base: start address of the memory region
++ * @length: size of the memory region
++ *
++ * This function is called once for each memory region found in /proc/iomem. It
++ * locates system RAM and crashkernel reserved memory and places these to
++ * variables: @crash_memory_ranges and @crash_reserved_mem. Number of memory
++ * regions is placed in @crash_memory_nr_ranges.
++ */
++static int crash_range_callback(void *UNUSED(data), int UNUSED(nr),
++ char *str, unsigned long base,
++ unsigned long length)
++{
++ struct memory_range *range;
++
++ if (usablemem_rgns.size >= CRASH_MAX_MEMORY_RANGES)
++ return 1;
++
++ range = usablemem_rgns.ranges + usablemem_rgns.size;
++
++ if (strncmp(str, "System RAM\n", 11) == 0) {
++ range->start = base;
++ range->end = base + length - 1;
++ range->type = RANGE_RAM;
++ usablemem_rgns.size++;
++ } else if (strncmp(str, "Crash kernel\n", 13) == 0) {
++ crash_reserved_mem.start = base;
++ crash_reserved_mem.end = base + length - 1;
++ crash_reserved_mem.type = RANGE_RAM;
++ }
++
++ return 0;
++}
++
++/**
++ * crash_exclude_range() - excludes memory region reserved for crashkernel
++ *
++ * Function locates where crashkernel reserved memory is and removes that region
++ * from the available memory regions.
++ */
++static void crash_exclude_range(void)
++{
++ const struct memory_range *range = &crash_reserved_mem;
++ int i;
++
++ for (i = 0; i < usablemem_rgns.size; i++) {
++ struct memory_range *r = usablemem_rgns.ranges + i;
++
++ /*
++ * We assume that crash area is fully contained in
++ * some larger memory area.
++ */
++ if (r->start <= range->start && r->end >= range->end) {
++ struct memory_range *new;
++ /*
++ * Let's split this area into 2 smaller ones and
++ * remove excluded range from between. First create
++ * new entry for the remaining area.
++ */
++ new = usablemem_rgns.ranges + usablemem_rgns.size;
++ new->start = range->end + 1;
++ new->end = r->end;
++ usablemem_rgns.size++;
++ /*
++ * Next update this area to end before excluded range.
++ */
++ r->end = range->start - 1;
++ break;
++ }
++ }
++}
++
++static int range_cmp(const void *a1, const void *a2)
++{
++ const struct memory_range *r1 = a1;
++ const struct memory_range *r2 = a2;
++
++ if (r1->start > r2->start)
++ return 1;
++ if (r1->start < r2->start)
++ return -1;
++
++ return 0;
++}
++
++/**
++ * crash_get_memory_ranges() - read system physical memory
++ *
++ * Function reads through system physical memory and stores found memory regions
++ * in @crash_memory_ranges. Number of memory regions found is placed in
++ * @crash_memory_nr_ranges. Regions are sorted in ascending order.
++ *
++ * Returns %0 in case of success and %-1 otherwise (errno is set).
++ */
++static int crash_get_memory_ranges(void)
++{
++ /*
++ * First read all memory regions that can be considered as
++ * system memory including the crash area.
++ */
++ kexec_iomem_for_each_line(NULL, crash_range_callback, NULL);
++
++ if (usablemem_rgns.size < 1) {
++ errno = EINVAL;
++ return -1;
++ }
++
++ /*
++ * Exclude memory reserved for crashkernel (this may result a split memory
++ * region).
++ */
++ crash_exclude_range();
++
++ /*
++ * Make sure that the memory regions are sorted.
++ */
++ qsort(usablemem_rgns.ranges, usablemem_rgns.size,
++ sizeof(*usablemem_rgns.ranges), range_cmp);
++
++ return 0;
++}
++
++/**
++ * cmdline_add_elfcorehdr() - adds elfcorehdr= to @cmdline
++ * @cmdline: buffer where parameter is placed
++ * @elfcorehdr: physical address of elfcorehdr
++ *
++ * Function appends 'elfcorehdr=start' at the end of the command line given in
++ * @cmdline. Note that @cmdline must be at least %COMMAND_LINE_SIZE bytes long
++ * (inclunding %NUL).
++ */
++static void cmdline_add_elfcorehdr(char *cmdline, unsigned long elfcorehdr)
++{
++ char buf[COMMAND_LINE_SIZE];
++ int buflen;
++
++ buflen = snprintf(buf, sizeof(buf), "%s elfcorehdr=%#lx",
++ cmdline, elfcorehdr);
++ if (buflen < 0)
++ die("Failed to construct elfcorehdr= command line parameter\n");
++ if (buflen >= sizeof(buf))
++ die("Command line overflow\n");
++
++ (void) strncpy(cmdline, buf, COMMAND_LINE_SIZE);
++ cmdline[COMMAND_LINE_SIZE - 1] = '\0';
++}
++
++/**
++ * cmdline_add_mem() - adds mem= parameter to kernel command line
++ * @cmdline: buffer where parameter is placed
++ * @size: size of the kernel reserved memory (in bytes)
++ *
++ * This function appends 'mem=size' at the end of the command line given in
++ * @cmdline. Note that @cmdline must be at least %COMMAND_LINE_SIZE bytes long
++ * (including %NUL).
++ */
++static void cmdline_add_mem(char *cmdline, unsigned long size)
++{
++ char buf[COMMAND_LINE_SIZE];
++ int buflen;
++
++ buflen = snprintf(buf, sizeof(buf), "%s mem=%ldK", cmdline, size >> 10);
++ if (buflen < 0)
++ die("Failed to construct mem= command line parameter\n");
++ if (buflen >= sizeof(buf))
++ die("Command line overflow\n");
++
++ (void) strncpy(cmdline, buf, COMMAND_LINE_SIZE);
++ cmdline[COMMAND_LINE_SIZE - 1] = '\0';
++}
++
++static unsigned long long range_size(const struct memory_range *r)
++{
++ return r->end - r->start + 1;
++}
++
++static void dump_memory_ranges(void)
++{
++ int i;
++
++ if (!kexec_debug)
++ return;
++
++ dbgprintf("crashkernel: [%#llx - %#llx] (%ldM)\n",
++ crash_reserved_mem.start, crash_reserved_mem.end,
++ (unsigned long)range_size(&crash_reserved_mem) >> 20);
++
++ for (i = 0; i < usablemem_rgns.size; i++) {
++ struct memory_range *r = usablemem_rgns.ranges + i;
++ dbgprintf("memory range: [%#llx - %#llx] (%ldM)\n",
++ r->start, r->end, (unsigned long)range_size(r) >> 20);
++ }
++}
++
++/**
++ * load_crashdump_segments() - loads additional segments needed for kdump
++ * @info: kexec info structure
++ * @mod_cmdline: kernel command line
++ *
++ * This function loads additional segments which are needed for the dump capture
++ * kernel. It also updates kernel command line passed in @mod_cmdline to have
++ * right parameters for the dump capture kernel.
++ *
++ * Return %0 in case of success and %-1 in case of error.
++ */
++int load_crashdump_segments(struct kexec_info *info, char *mod_cmdline)
++{
++ unsigned long elfcorehdr;
++ unsigned long bufsz;
++ void *buf;
++ int err;
++
++ /*
++ * First fetch all the memory (RAM) ranges that we are going to pass to
++ * the crashdump kernel during panic.
++ */
++ err = crash_get_memory_ranges();
++ if (err)
++ return err;
++
++ /*
++ * Now that we have memory regions sorted, we can use first memory
++ * region as PHYS_OFFSET.
++ */
++ phys_offset = usablemem_rgns.ranges->start;
++ dbgprintf("phys_offset: %#lx\n", phys_offset);
++
++ err = crash_create_elf32_headers(info, &elf_info,
++ usablemem_rgns.ranges,
++ usablemem_rgns.size, &buf, &bufsz,
++ ELF_CORE_HEADER_ALIGN);
++ if (err)
++ return err;
++
++ /*
++ * We allocate ELF core header from the end of the memory area reserved
++ * for the crashkernel. We align the header to SECTION_SIZE (which is
++ * 1MB) so that available memory passed in kernel command line will be
++ * aligned to 1MB. This is because kernel create_mapping() wants memory
++ * regions to be aligned to SECTION_SIZE.
++ */
++ elfcorehdr = add_buffer_phys_virt(info, buf, bufsz, bufsz, 1 << 20,
++ crash_reserved_mem.start,
++ crash_reserved_mem.end, -1, 0);
++
++ dbgprintf("elfcorehdr: %#lx\n", elfcorehdr);
++ cmdline_add_elfcorehdr(mod_cmdline, elfcorehdr);
++
++ /*
++ * Add 'mem=size' parameter to dump capture kernel command line. This
++ * prevents the dump capture kernel from using any other memory regions
++ * which belong to the primary kernel.
++ */
++ cmdline_add_mem(mod_cmdline, elfcorehdr - crash_reserved_mem.start);
++
++ dump_memory_ranges();
++ dbgprintf("kernel command line: \"%s\"\n", mod_cmdline);
++
++ return 0;
++}
++
++#endif
++
+Index: kexec-tools-2.0.4/kexec/arch/arm64/include/arch/options.h
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/include/arch/options.h
+@@ -0,0 +1,26 @@
++#ifndef KEXEC_ARCH_ARM64_OPTIONS_H
++#define KEXEC_ARCH_ARM64_OPTIONS_H
++
++//#define OPT_ARCH_MAX ((OPT_MAX)+0)
++
++#define OPT_APPEND ((OPT_MAX)+0)
++#define OPT_RAMDISK ((OPT_MAX)+1)
++#define OPT_DTB ((OPT_MAX)+2)
++
++#define OPT_ARCH_MAX ((OPT_MAX)+3)
++
++
++#define KEXEC_ARCH_OPTIONS \
++ KEXEC_OPTIONS \
++ { "append", 1, NULL, OPT_APPEND }, \
++ { "command-line", 1, NULL, OPT_APPEND }, \
++ { "dtb", 1, NULL, OPT_DTB }, \
++ { "initrd", 1, NULL, OPT_RAMDISK }, \
++ { "ramdisk", 1, NULL, OPT_RAMDISK }, \
++
++#define KEXEC_ARCH_OPT_STR KEXEC_OPT_STR /* Only accept long arch options. */
++
++#define KEXEC_ALL_OPTIONS KEXEC_ARCH_OPTIONS
++#define KEXEC_ALL_OPT_STR KEXEC_ARCH_OPT_STR
++
++#endif /* KEXEC_ARCH_ARM64_OPTIONS_H */
+Index: kexec-tools-2.0.4/kexec/arch/arm64/kexec-arm64.c
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/kexec-arm64.c
+@@ -0,0 +1,177 @@
++/*
++ * ARM64 kexec support.
++ */
++
++#define _GNU_SOURCE
++
++#include <errno.h>
++#include <stddef.h>
++
++//#include <linux/kexec.h>
++
++#include "../../kexec.h"
++#include "../../kexec-syscall.h"
++#include "kexec-arm64.h"
++
++
++void arch_usage(void)
++{
++ fprintf(stderr, "%s:%d: ->\n", __func__, __LINE__);
++
++ printf(
++" --append=STRING Set the kernel command line to STRING.\n"
++" --command-line=STRING Set the kernel command line to STRING.\n"
++" --dtb=FILE Use FILE as the device tree blob.\n"
++" --initrd=FILE Use FILE as the kernel initial ramdisk.\n"
++" --ramdisk=FILE Use FILE as the kernel initial ramdisk.\n");
++
++ fprintf(stderr, "%s:%d: <-\n", __func__, __LINE__);
++}
++
++int arch_process_options(int UNUSED(argc), char **UNUSED(argv))
++{
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++ return 0;
++}
++
++const struct arch_map_entry arches[] = {
++ { "aarch64", KEXEC_ARCH_ARM64 },
++ { NULL, 0 },
++};
++
++void arch_update_purgatory(struct kexec_info *UNUSED(info))
++{
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++}
++
++unsigned long virt_to_phys(unsigned long addr)
++{
++ fprintf(stderr, "%s:%d: %016lx -> %016lx\n", __func__, __LINE__, addr,
++ addr + 0x080000000UL);
++ return addr + 0x080000000UL;
++}
++
++void add_segment(struct kexec_info *info, const void *buf, size_t bufsz,
++ unsigned long base, size_t memsz)
++{
++ fprintf(stderr, "%s:%d: ->\n", __func__, __LINE__);
++ add_segment_phys_virt(info, buf, bufsz, base, memsz, 1);
++ fprintf(stderr, "%s:%d: <-\n", __func__, __LINE__);
++}
++
++static int get_memory_ranges_1(struct memory_range **range, int *ranges,
++ unsigned long kexec_flags)
++{
++ static struct memory_range memory_range[KEXEC_SEGMENT_MAX];
++ const char *iomem;
++ int range_count = 0;
++ char line[MAX_LINE];
++ FILE *fp;
++
++ iomem = proc_iomem();
++ fp = fopen(iomem, "r");
++
++ if (!fp) {
++ fprintf(stderr, "Cannot open %s: %s\n",
++ iomem, strerror(errno));
++ return -1;
++ }
++
++ dbgprintf("memory ranges:\n");
++
++ while(fgets(line, sizeof(line), fp) != 0) {
++ struct memory_range r;
++ char *str;
++ int consumed;
++
++ if (range_count >= KEXEC_SEGMENT_MAX)
++ break;
++
++ if (sscanf(line, "%Lx-%Lx : %n", &r.start, &r.end, &consumed)
++ != 2)
++ continue;
++
++ str = line + consumed;
++ r.end++;
++
++ if (memcmp(str, "System RAM\n", 11)) {
++ dbgprintf(" Skip: %016Lx - %016Lx : %s", r.start, r.end,
++ str);
++ continue;
++ }
++
++ r.type = RANGE_RAM;
++ memory_range[range_count] = r;
++ range_count++;
++
++ dbgprintf(" Add: %016Lx - %016Lx : %s", r.start, r.end, str);
++ }
++
++ fclose(fp);
++ *range = memory_range;
++ *ranges = range_count;
++
++ return 0;
++}
++
++static int get_memory_ranges_2(struct memory_range **range, int *ranges,
++ unsigned long UNUSED(kexec_flags))
++{
++ static struct memory_range memory_range[2];
++
++ memory_range[0].start = 0x080000000;
++ memory_range[0].end = 0x100000000;
++ memory_range[0].type = RANGE_RAM;
++
++ memory_range[1].start = 0x900000000;
++ memory_range[1].end = 0x880000000;
++ memory_range[1].type = RANGE_RAM;
++
++ *range = memory_range;
++ *ranges = sizeof(memory_range) / sizeof(memory_range[0]);
++
++ return 0;
++}
++
++int get_memory_ranges(struct memory_range **range, int *ranges,
++ unsigned long kexec_flags)
++{
++ /* FIXME: Should get this info from device tree. */
++
++ return get_memory_ranges_1(range, ranges, kexec_flags);
++}
++
++struct file_type file_type[] = {
++ { "elf-arm64", elf_arm64_probe, elf_arm64_load, elf_arm64_usage },
++};
++
++int file_types = sizeof(file_type) / sizeof(file_type[0]);
++
++int arch_compat_trampoline(struct kexec_info *info)
++{
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++ return 0;
++}
++
++void arch_reuse_initrd(void)
++{
++}
++
++int machine_verify_elf_rel(struct mem_ehdr *ehdr)
++{
++ (void)ehdr;
++
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++ return 0;
++}
++
++void machine_apply_elf_rel(struct mem_ehdr *ehdr, unsigned long r_type,
++ void *location, unsigned long address, unsigned long value)
++{
++ (void)ehdr;
++ (void)r_type;
++ (void)location;
++ (void)address;
++ (void)value;
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++}
+Index: kexec-tools-2.0.4/kexec/arch/arm64/kexec-arm64.h
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/kexec-arm64.h
+@@ -0,0 +1,20 @@
++/*
++ * ARM64 kexec support.
++ */
++
++#if !defined(KEXEC_ARM64_H)
++#define KEXEC_ARM64_H
++
++/* #include <linux/kexec.h> FIXME: this is broken */
++#include <sys/types.h>
++
++#include "../../kexec.h"
++
++#define KEXEC_SEGMENT_MAX 16 /* FIXME: this should come from <linux/kexec.h> */
++
++int elf_arm64_probe(const char *buf, off_t len);
++int elf_arm64_load(int argc, char **argv, const char *buf, off_t len,
++ struct kexec_info *info);
++void elf_arm64_usage(void);
++
++#endif
+\ No newline at end of file
+Index: kexec-tools-2.0.4/kexec/arch/arm64/kexec-elf-arm64.c
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/kexec/arch/arm64/kexec-elf-arm64.c
+@@ -0,0 +1,114 @@
++/*
++ * ARM64 kexec support.
++ */
++
++#define _GNU_SOURCE
++
++#include <elf.h>
++#include <getopt.h>
++
++#include "../../kexec-syscall.h"
++
++#include "kexec-arm64.h"
++#include "arch/options.h"
++
++#if !defined(EM_AARCH64)
++# define EM_AARCH64 183
++#endif
++
++int elf_arm64_probe(const char *buf, off_t len)
++{
++ int result;
++ struct mem_ehdr ehdr;
++
++ fprintf(stderr, "%s:%d: ->\n", __func__, __LINE__);
++
++ result = build_elf_exec_info(buf, len, &ehdr, 0);
++
++ if (result < 0) {
++ dbgprintf("Not an ELF executable\n");
++ goto out;
++ }
++
++ if (ehdr.e_machine != EM_AARCH64) {
++ dbgprintf("Not an AARCH64 executable\n");
++ result = -1;
++ goto out;
++ }
++
++ result = 0;
++
++out:
++ free_elf_info(&ehdr);
++ fprintf(stderr, "%s:%d: <-\n", __func__, __LINE__);
++ return result;
++}
++
++int elf_arm64_load(int argc, char **argv, const char *buf, off_t len,
++ struct kexec_info *info)
++{
++ static const struct option options[] = {
++ KEXEC_ARCH_OPTIONS
++ { 0 }
++ };
++ static const char short_options[] = KEXEC_OPT_STR "";
++ const char *command_line = NULL;
++ unsigned int command_line_len = 0;
++ const char *ramdisk = NULL;
++ const char *dtb = NULL;
++ int opt;
++ struct mem_ehdr ehdr;
++ int result;
++
++ fprintf(stderr, "%s:%d: ->\n", __func__, __LINE__);
++
++ while ((opt = getopt_long(argc, argv, short_options, options, 0))
++ != -1) {
++ switch (opt) {
++ default:
++ if (opt < OPT_MAX) /* Ignore core options */
++ break;
++ case OPT_APPEND:
++ command_line = optarg;
++ command_line_len = strlen(command_line) + 1;
++ break;
++ case OPT_RAMDISK:
++ ramdisk = optarg;
++ break;
++ case OPT_DTB:
++ dtb = optarg;
++ break;
++ }
++ }
++
++ fprintf(stderr, "%s:%d: command_line: %s\n", __func__, __LINE__, command_line);
++ fprintf(stderr, "%s:%d: ramdisk: %s\n", __func__, __LINE__, ramdisk);
++ fprintf(stderr, "%s:%d: dtb: %s\n", __func__, __LINE__, dtb);
++
++ if (info->kexec_flags & KEXEC_ON_CRASH) {
++ fprintf(stderr, "kexec: kdump not yet supported on arm64\n");
++ return -1;
++ }
++
++ result = build_elf_exec_info(buf, len, &ehdr, 0);
++
++ if (result < 0) {
++ free_elf_info(&ehdr);
++ fprintf(stderr, "%s:%d: free_elf_info failed\n", __func__,
++ __LINE__);
++ return result;
++ }
++
++ elf_exec_build_load(info, &ehdr, buf, len, 0);
++
++ info->entry = (void*)0x80080000UL; // FIXME
++
++ fprintf(stderr, "%s:%d: <-\n", __func__, __LINE__);
++ return 0;
++}
++
++void elf_arm64_usage(void)
++{
++ fprintf(stderr, "%s:%d: ->\n", __func__, __LINE__);
++ fprintf(stderr, "%s:%d: <-\n", __func__, __LINE__);
++}
+Index: kexec-tools-2.0.4/kexec/kexec-syscall.h
+===================================================================
+--- kexec-tools-2.0.4.orig/kexec/kexec-syscall.h
++++ kexec-tools-2.0.4/kexec/kexec-syscall.h
+@@ -39,8 +39,8 @@
+ #ifdef __s390__
+ #define __NR_kexec_load 277
+ #endif
+-#ifdef __arm__
+-#define __NR_kexec_load __NR_SYSCALL_BASE + 347
++#if defined(__arm__) || defined(__arm64__)
++#define __NR_kexec_load __NR_SYSCALL_BASE + 347
+ #endif
+ #if defined(__mips__)
+ #define __NR_kexec_load 4311
+@@ -72,6 +72,8 @@ static inline long kexec_load(void *entr
+ #define KEXEC_ARCH_PPC64 (21 << 16)
+ #define KEXEC_ARCH_IA_64 (50 << 16)
+ #define KEXEC_ARCH_ARM (40 << 16)
++#define KEXEC_ARCH_ARM64 (183 << 16)
++/* #define KEXEC_ARCH_AARCH64 (183 << 16) */
+ #define KEXEC_ARCH_S390 (22 << 16)
+ #define KEXEC_ARCH_SH (42 << 16)
+ #define KEXEC_ARCH_MIPS_LE (10 << 16)
+@@ -114,5 +116,8 @@ static inline long kexec_load(void *entr
+ #if defined(__mips__)
+ #define KEXEC_ARCH_NATIVE KEXEC_ARCH_MIPS
+ #endif
++#if defined(__arm64__)
++#define KEXEC_ARCH_NATIVE KEXEC_ARCH_ARM64
++#endif
+
+ #endif /* KEXEC_SYSCALL_H */
+Index: kexec-tools-2.0.4/kexec/kexec.c
+===================================================================
+--- kexec-tools-2.0.4.orig/kexec/kexec.c
++++ kexec-tools-2.0.4/kexec/kexec.c
+@@ -659,6 +659,8 @@ static int my_load(const char *type, int
+ info.backup_start = 0;
+ info.kexec_flags = kexec_flags;
+
++ fprintf(stderr, "%s:%d: do\n", __func__, __LINE__);
++
+ result = 0;
+ if (argc - fileind <= 0) {
+ fprintf(stderr, "No kernel specified\n");
+Index: kexec-tools-2.0.4/purgatory/arch/arm64/Makefile
+===================================================================
+--- /dev/null
++++ kexec-tools-2.0.4/purgatory/arch/arm64/Makefile
+@@ -0,0 +1,7 @@
++#
++# Purgatory arm64
++#
++
++arm64_PURGATORY_SRCS =
++
++dist += purgatory/arch/arm64/Makefile $(arm64_PURGATORY_SRCS)
+Index: kexec-tools-2.0.4/configure
+===================================================================
+--- kexec-tools-2.0.4.orig/configure
++++ kexec-tools-2.0.4/configure
+@@ -2256,6 +2256,9 @@ case $target_cpu in
+ powerpc64 )
+ ARCH="ppc64"
+ ;;
++ aarch64 )
++ ARCH="arm64"
++ ;;
+ arm* )
+ ARCH="arm"
+ ;;
diff --git a/meta/recipes-kernel/kexec/kexec-tools_2.0.7.bb b/meta/recipes-kernel/kexec/kexec-tools_2.0.8.bb
index f0ece0d45b..c478732300 100644
--- a/meta/recipes-kernel/kexec/kexec-tools_2.0.7.bb
+++ b/meta/recipes-kernel/kexec/kexec-tools_2.0.8.bb
@@ -2,10 +2,12 @@ require kexec-tools.inc
export LDFLAGS = "-L${STAGING_LIBDIR}"
EXTRA_OECONF = " --with-zlib=yes"
-SRC_URI += "file://kexec-tools-Refine-kdump-device_tree-sort.patch"
+SRC_URI += "file://kexec-tools-Refine-kdump-device_tree-sort.patch \
+ file://kexec-aarch64.patch \
+ "
-SRC_URI[md5sum] = "2309ba43981cb6d39d07ac3a9aac30ab"
-SRC_URI[sha256sum] = "dde5c38be39882c6c91f0129647349c4e1943b077d3020af1970b481ee954eb0"
+SRC_URI[md5sum] = "45bd2bc676ae202579e4c185563126af"
+SRC_URI[sha256sum] = "95083c707577154a2241c5af0985f1eb3412d1ba208ef91813e9158b0a901179"
PACKAGES =+ "kexec kdump vmcore-dmesg"
diff --git a/meta/recipes-kernel/kmod/kmod.inc b/meta/recipes-kernel/kmod/kmod.inc
index dda74c85b6..c26ea23533 100644
--- a/meta/recipes-kernel/kmod/kmod.inc
+++ b/meta/recipes-kernel/kmod/kmod.inc
@@ -11,14 +11,14 @@ SECTION = "base"
DEPENDS += "pkgconfig-native"
-LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe \
+LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
file://libkmod/COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
"
inherit autotools gtk-doc ptest
-SRCREV = "ae58de0fcb4a6528dd365e23d383bbe2eaf2d566"
+SRCREV = "fd56638aed3fe147015bfa10ed4a5f7491303cb4"
# Lookout for PV bump too when SRCREV is changed
-PV = "18+git${SRCPV}"
+PV = "19+git${SRCPV}"
SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git \
file://depmod-search.conf \
@@ -26,7 +26,6 @@ SRC_URI = "git://git.kernel.org/pub/scm/utils/kernel/kmod/kmod.git \
file://ptest.patch \
file://avoid_parallel_tests.patch \
file://fix-O_CLOEXEC.patch \
- file://0001-Add-missing-O_CLOEXEC-in-kmod_module_get_size.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-kernel/kmod/kmod/0001-Add-missing-O_CLOEXEC-in-kmod_module_get_size.patch b/meta/recipes-kernel/kmod/kmod/0001-Add-missing-O_CLOEXEC-in-kmod_module_get_size.patch
deleted file mode 100644
index 77624ce151..0000000000
--- a/meta/recipes-kernel/kmod/kmod/0001-Add-missing-O_CLOEXEC-in-kmod_module_get_size.patch
+++ /dev/null
@@ -1,27 +0,0 @@
-From 74c26943f1228870022d116a1fda25be3a55a38e Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Cristian=20Rodr=C3=ADguez?= <crrodriguez@opensuse.org>
-Date: Wed, 18 Jun 2014 20:51:00 -0400
-Subject: [PATCH] Add missing O_CLOEXEC in kmod_module_get_size()
-
-Upstream-Status: Backport
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
----
- libkmod/libkmod-module.c | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/libkmod/libkmod-module.c b/libkmod/libkmod-module.c
-index e3cc5a7..b81b451 100644
---- a/libkmod/libkmod-module.c
-+++ b/libkmod/libkmod-module.c
-@@ -1783,7 +1783,7 @@ KMOD_EXPORT long kmod_module_get_size(const struct kmod_module *mod)
- * loaded.
- */
- snprintf(line, sizeof(line), "/sys/module/%s", mod->name);
-- dfd = open(line, O_RDONLY);
-+ dfd = open(line, O_RDONLY|O_CLOEXEC);
- if (dfd < 0)
- return -errno;
-
---
-1.8.3.1
-
diff --git a/meta/recipes-kernel/kmod/kmod/Change-to-calling-bswap_-instead-of-htobe-and-be-toh.patch b/meta/recipes-kernel/kmod/kmod/Change-to-calling-bswap_-instead-of-htobe-and-be-toh.patch
index 2320b92eba..7c0a27510b 100644
--- a/meta/recipes-kernel/kmod/kmod/Change-to-calling-bswap_-instead-of-htobe-and-be-toh.patch
+++ b/meta/recipes-kernel/kmod/kmod/Change-to-calling-bswap_-instead-of-htobe-and-be-toh.patch
@@ -1,6 +1,6 @@
-From 0c4dbadc9db3cda1cfca64e44ea08c6e89919ea7 Mon Sep 17 00:00:00 2001
-From: Ting Liu <b28495@freescale.com>
-Date: Tue, 10 Sep 2013 13:44:18 +0800
+From 4b68940b1ed46c54a5a0bdf6bb9d4599bc64e6f4 Mon Sep 17 00:00:00 2001
+From: Chen Qi <Qi.Chen@windriver.com>
+Date: Wed, 24 Dec 2014 10:12:40 +0800
Subject: [PATCH] Change to calling bswap_* instead of htobe* and be*toh
We can't use htobe* and be*toh functions because they are not
@@ -11,12 +11,13 @@ Change to directly calling bswap_* as defined in+byteswap.h.
Upstream-Status: Inappropriate
Signed-off-by: Ting Liu <b28495@freescale.com>
+Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
---
- libkmod/libkmod-signature.c | 3 ++-
- 1 files changed, 2 insertions(+), 1 deletions(-)
+ libkmod/libkmod-signature.c | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/libkmod/libkmod-signature.c b/libkmod/libkmod-signature.c
-index 6b80caa..3544a36 100644
+index 2b976a5..ffe58c2 100644
--- a/libkmod/libkmod-signature.c
+++ b/libkmod/libkmod-signature.c
@@ -19,6 +19,7 @@
@@ -24,10 +25,10 @@ index 6b80caa..3544a36 100644
#include <endian.h>
+#include <byteswap.h>
- #include <stdint.h>
+ #include <inttypes.h>
+ #include <stdio.h>
#include <stdlib.h>
- #include <string.h>
-@@ -121,7 +122,7 @@ bool kmod_module_signature_info(const struct kmod_file *file, struct kmod_signat
+@@ -124,7 +125,7 @@ bool kmod_module_signature_info(const struct kmod_file *file, struct kmod_signat
modsig->hash >= PKEY_HASH__LAST ||
modsig->id_type >= PKEY_ID_TYPE__LAST)
return false;
@@ -37,5 +38,5 @@ index 6b80caa..3544a36 100644
return false;
--
-1.7.5.4
+1.9.1
diff --git a/meta/recipes-kernel/kmod/kmod/run-ptest b/meta/recipes-kernel/kmod/kmod/run-ptest
index 37adec3d2f..598dd2cb5f 100755
--- a/meta/recipes-kernel/kmod/kmod/run-ptest
+++ b/meta/recipes-kernel/kmod/kmod/run-ptest
@@ -1,3 +1,5 @@
#!/bin/sh
touch testsuite/stamp-rootfs
+tar xf testmodule.tar
make -k runtest-TESTS 2>/dev/null| grep -e ^PASS -e ^FAIL
+find testsuite -name *.ko -exec rm -f {} \;
diff --git a/meta/recipes-kernel/kmod/kmod_git.bb b/meta/recipes-kernel/kmod/kmod_git.bb
index d4c21a4387..08dd81569c 100644
--- a/meta/recipes-kernel/kmod/kmod_git.bb
+++ b/meta/recipes-kernel/kmod/kmod_git.bb
@@ -34,6 +34,10 @@ do_install_append () {
# install depmod.d file for search/ dir
install -Dm644 "${WORKDIR}/depmod-search.conf" "${D}${base_libdir}/depmod.d/search.conf"
+ if ${@base_contains('DISTRO_FEATURES', 'ptest', 'true', 'false', d)}; then
+ find testsuite -name *.ko -exec tar rf testmodule.tar {} \;
+ find testsuite -name *.ko -exec rm -f {} \;
+ fi
}
do_compile_prepend() {
@@ -44,7 +48,10 @@ do_compile_ptest () {
oe_runmake buildtest-TESTS rootfs
}
-INHIBIT_PACKAGE_STRIP = "${@bb.utils.contains("DISTRO_FEATURES", "ptest", "1", "0", d)}"
+do_install_ptest () {
+ install testmodule.tar ${D}${PTEST_PATH}
+}
+
INSANE_SKIP_${PN}-ptest = "arch"
inherit update-alternatives
diff --git a/meta/recipes-kernel/latencytop/latencytop_0.5.bb b/meta/recipes-kernel/latencytop/latencytop_0.5.bb
index 516e2c5267..e910a96f43 100644
--- a/meta/recipes-kernel/latencytop/latencytop_0.5.bb
+++ b/meta/recipes-kernel/latencytop/latencytop_0.5.bb
@@ -4,7 +4,7 @@ HOMEPAGE = "http://www.latencytop.org/"
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://latencytop.c;endline=23;md5=ee9ea9b1415356e5734adad4a87dc7fa"
-DEPENDS = "virtual/libintl ncurses glib-2.0 ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'gtk+', '', d)}"
+DEPENDS = "virtual/libintl ncurses glib-2.0"
PR = "r3"
@@ -15,7 +15,11 @@ SRC_URI = "http://www.latencytop.org/download/latencytop-${PV}.tar.gz \
SRC_URI[md5sum] = "73bb3371c6ee0b0e68e25289027e865c"
SRC_URI[sha256sum] = "9e7f72fbea7bd918e71212a1eabaad8488d2c602205d2e3c95d62cd57e9203ef"
-EXTRA_OEMAKE_X = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'HAS_GTK_GUI=1', '', d)}"
+PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}"
+
+PACKAGECONFIG[x11] = ",,gtk+"
+
+EXTRA_OEMAKE_X = "${@bb.utils.contains('PACKAGECONFIG', 'x11', 'HAS_GTK_GUI=1', '', d)}"
CFLAGS += "${LDFLAGS}"
diff --git a/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb b/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
index a107f804fa..16ebf6ad6b 100644
--- a/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
+++ b/meta/recipes-kernel/linux-firmware/linux-firmware_git.bb
@@ -12,7 +12,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.radeon;md5=07b0c31777bd686d8e1609c6940b5e74\
file://LICENCE.xc5000;md5=1e170c13175323c32c7f4d0998d53f66 \
file://LICENCE.ralink-firmware.txt;md5=ab2c269277c45476fb449673911a2dfd \
file://LICENCE.qla2xxx;md5=f5ce8529ec5c17cb7f911d2721d90e91 \
- file://LICENCE.iwlwifi_firmware;md5=8b938534f77ffd453690eb34ed84ae8b \
+ file://LICENCE.iwlwifi_firmware;md5=5106226b2863d00d8ed553221ddf8cd2 \
file://LICENCE.i2400m;md5=14b901969e23c41881327c0d9e4b7d36 \
file://LICENCE.atheros_firmware;md5=30a14c7823beedac9fa39c64fdd01a13 \
file://LICENCE.agere;md5=af0133de6b4a9b2522defd5f188afd31 \
@@ -24,7 +24,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.radeon;md5=07b0c31777bd686d8e1609c6940b5e74\
file://LICENCE.Marvell;md5=9ddea1734a4baf3c78d845151f42a37a \
"
-SRCREV = "dec41bce44e0dff6a2c3358a958fadf22bf58858"
+SRCREV = "0e5f63771d0df6d7859f7c4100a74d737c62ac88"
PE = "1"
PV = "0.0+git${SRCPV}"
@@ -34,6 +34,8 @@ S = "${WORKDIR}/git"
inherit allarch update-alternatives
+CLEANBROKEN = "1"
+
do_compile() {
:
}
@@ -42,7 +44,7 @@ do_install() {
install -d ${D}/lib/firmware/
cp -r * ${D}/lib/firmware/
- # Avoid Makefile to be deplyed
+ # Avoid Makefile to be deployed
rm ${D}/lib/firmware/Makefile
# Remove unbuild firmware which needs cmake and bash
@@ -67,9 +69,13 @@ PACKAGES =+ "${PN}-ralink \
${PN}-rtl-license ${PN}-rtl8192cu ${PN}-rtl8192ce ${PN}-rtl8192su \
${PN}-broadcom-license ${PN}-bcm4329 ${PN}-bcm4330 ${PN}-bcm4334 \
${PN}-atheros-license ${PN}-ar9170 ${PN}-ar3k ${PN}-ath6k ${PN}-ath9k \
- ${PN}-iwlwifi-license ${PN}-iwlwifi-6000g2a-5 ${PN}-iwlwifi-6000g2b-6 ${PN}-iwlwifi-7260-7 \
- ${PN}-iwlwifi-6000g2a-6 ${PN}-iwlwifi-135-6"
-
+ ${PN}-iwlwifi-license ${PN}-iwlwifi-135-6 \
+ ${PN}-iwlwifi-3160-7 ${PN}-iwlwifi-3160-8 ${PN}-iwlwifi-3160-9 \
+ ${PN}-iwlwifi-6000-4 ${PN}-iwlwifi-6000g2a-5 ${PN}-iwlwifi-6000g2a-6 ${PN}-iwlwifi-6000g2b-5 ${PN}-iwlwifi-6000g2b-6 \
+ ${PN}-iwlwifi-6050-4 ${PN}-iwlwifi-6050-5 \
+ ${PN}-iwlwifi-7260-7 ${PN}-iwlwifi-7260-8 ${PN}-iwlwifi-7260-9 \
+ ${PN}-iwlwifi-7265-8 ${PN}-iwlwifi-7265-9 \
+ "
FILES_${PN}-atheros-license = "/lib/firmware/LICENCE.atheros_firmware"
@@ -208,18 +214,40 @@ RDEPENDS_${PN}-bcm4334 += "${PN}-broadcom-license"
ALTERNATIVE_linux-firmware-bcm4334 = "brcmfmac-sdio.bin"
ALTERNATIVE_TARGET_linux-firmware-bcm4334[brcmfmac-sdio.bin] = "/lib/firmware/brcm/brcmfmac4334-sdio.bin"
+RDEPENDS_${PN}-iwlwifi-135-6 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-3160-7 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-3160-8 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-3160-9 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-6000-4 = "${PN}-iwlwifi-license"
RDEPENDS_${PN}-iwlwifi-6000g2a-5 = "${PN}-iwlwifi-license"
RDEPENDS_${PN}-iwlwifi-6000g2a-6 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-6000g2a-5 = "${PN}-iwlwifi-license"
RDEPENDS_${PN}-iwlwifi-6000g2b-6 = "${PN}-iwlwifi-license"
-RDEPENDS_${PN}-iwlwifi-135-6 = "${PN}-iwlwifi-license"
-RDEPENDS_${PN}-iwlwifi-7260-7 = "${PN}-iwlwifi-license"
-
-FILES_${PN}-iwlwifi-license = "/lib/firmware/LICENCE.iwlwifi_firmware"
+RDEPENDS_${PN}-iwlwifi-6050-4 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-6050-5 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-7260-7 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-7260-8 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-7260-9 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-7265-8 = "${PN}-iwlwifi-license"
+RDEPENDS_${PN}-iwlwifi-7265-9 = "${PN}-iwlwifi-license"
+
+FILES_${PN}-iwlwifi-license = "/lib/firmware/LICENCE.iwlwifi_firmware"
+FILES_${PN}-iwlwifi-135-6 = "/lib/firmware/iwlwifi-135-6.ucode"
+FILES_${PN}-iwlwifi-3160-7 = "/lib/firmware/iwlwifi-3160-7.ucode"
+FILES_${PN}-iwlwifi-3160-8 = "/lib/firmware/iwlwifi-3160-8.ucode"
+FILES_${PN}-iwlwifi-3160-9 = "/lib/firmware/iwlwifi-3160-9.ucode"
+FILES_${PN}-iwlwifi-6000-4 = "/lib/firmware/iwlwifi-6000-4.ucode"
FILES_${PN}-iwlwifi-6000g2a-5 = "/lib/firmware/iwlwifi-6000g2a-5.ucode"
FILES_${PN}-iwlwifi-6000g2a-6 = "/lib/firmware/iwlwifi-6000g2a-6.ucode"
+FILES_${PN}-iwlwifi-6000g2b-5 = "/lib/firmware/iwlwifi-6000g2b-5.ucode"
FILES_${PN}-iwlwifi-6000g2b-6 = "/lib/firmware/iwlwifi-6000g2b-6.ucode"
-FILES_${PN}-iwlwifi-135-6 = "/lib/firmware/iwlwifi-135-6.ucode"
+FILES_${PN}-iwlwifi-6050-4 = "/lib/firmware/iwlwifi-6050-4.ucode"
+FILES_${PN}-iwlwifi-6050-5 = "/lib/firmware/iwlwifi-6050-5.ucode"
FILES_${PN}-iwlwifi-7260-7 = "/lib/firmware/iwlwifi-7260-7.ucode"
+FILES_${PN}-iwlwifi-7260-8 = "/lib/firmware/iwlwifi-7260-8.ucode"
+FILES_${PN}-iwlwifi-7260-9 = "/lib/firmware/iwlwifi-7260-9.ucode"
+FILES_${PN}-iwlwifi-7265-8 = "/lib/firmware/iwlwifi-7265-8.ucode"
+FILES_${PN}-iwlwifi-7265-9 = "/lib/firmware/iwlwifi-7265-9.ucode"
FILES_${PN} += "/lib/firmware/*"
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.14.bb b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.14.bb
deleted file mode 100644
index 9ac70af942..0000000000
--- a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.14.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-KORG_ARCHIVE_COMPRESSION = "xz"
-
-require linux-libc-headers.inc
-
-SRC_URI[md5sum] = "b621207b3f6ecbb67db18b13258f8ea8"
-SRC_URI[sha256sum] = "61558aa490855f42b6340d1a1596be47454909629327c49a5e4e10268065dffa"
-
diff --git a/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.17.7.bb b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.17.7.bb
new file mode 100644
index 0000000000..7332597332
--- /dev/null
+++ b/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_3.17.7.bb
@@ -0,0 +1,7 @@
+KORG_ARCHIVE_COMPRESSION = "xz"
+
+require linux-libc-headers.inc
+
+SRC_URI[md5sum] = "59cc7d0a236b08de743d6d9421f0f2c6"
+SRC_URI[sha256sum] = "a3c8e6aee912bfd2e69abf9df5a06058434a8771bcaa1784b5980bf1b5298e1c"
+
diff --git a/meta/recipes-kernel/linux/kernel-devsrc.bb b/meta/recipes-kernel/linux/kernel-devsrc.bb
new file mode 100644
index 0000000000..dbb3a5c581
--- /dev/null
+++ b/meta/recipes-kernel/linux/kernel-devsrc.bb
@@ -0,0 +1,65 @@
+SUMMARY = "Linux kernel Development Source"
+DESCRIPTION = "Development source linux kernel. When built, this recipe packages the \
+source of the preferred virtual/kernel provider and makes it available for full kernel \
+development or external module builds"
+
+SECTION = "kernel"
+
+LICENSE = "GPLv2"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/GPL-2.0;md5=801f80980d171dd6425610833a22dbe6"
+
+inherit linux-kernel-base
+
+# Whilst not a module, this ensures we don't get multilib extended (which would make no sense)
+inherit module-base
+
+# We need the kernel to be staged (unpacked, patched and configured) before
+# we can grab the source and make the kernel-devsrc package
+do_install[depends] += "virtual/kernel:do_populate_sysroot"
+# Need the source, not just the output of populate_sysroot
+do_install[depends] += "virtual/kernel:do_configure"
+
+# There's nothing to do here, except install the source where we can package it
+do_fetch[noexec] = "1"
+do_unpack[noexec] = "1"
+do_patch[noexec] = "1"
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+do_populate_sysroot[noexec] = "1"
+
+# Define where the kernel headers are installed on the target as well as where
+# they are staged.
+KERNEL_SRC_PATH = "/usr/src/kernel"
+S = "${STAGING_DIR_TARGET}/${KERNEL_SRC_PATH}"
+
+KERNEL_VERSION = "${@get_kernelversion_headers('${S}')}"
+
+PACKAGE_ARCH = "${MACHINE_ARCH}"
+
+do_install() {
+ kerneldir=${D}${KERNEL_SRC_PATH}
+ install -d $kerneldir
+
+ #
+ # Copy the staging dir source (and module build support) into the devsrc structure.
+ # We can keep this copy simple and take everything, since a we'll clean up any build
+ # artifacts afterwards, and the extra i/o is not significant
+ #
+ cd ${S}
+ find . -type d -name '.git*' -prune -o -type f -print0 | cpio --null -pdlu $kerneldir
+ oe_runmake -C $kerneldir CC="${KERNEL_CC}" LD="${KERNEL_LD}" clean _mrproper_scripts
+
+ # As of Linux kernel version 3.0.1, the clean target removes
+ # arch/powerpc/lib/crtsavres.o which is present in
+ # KBUILD_LDFLAGS_MODULE, making it required to build external modules.
+ if [ ${ARCH} = "powerpc" ]; then
+ mkdir -p $kerneldir/arch/powerpc/lib/
+ cp ${S}/arch/powerpc/lib/crtsavres.o $kerneldir/arch/powerpc/lib/crtsavres.o
+ fi
+}
+# Ensure we don't race against "make scripts" during cpio
+do_install[lockfiles] = "${TMPDIR}/kernel-scripts.lock"
+
+PACKAGES = "kernel-devsrc"
+FILES_${PN} = "${KERNEL_SRC_PATH}"
+RDEPENDS_${PN} = "bc"
diff --git a/meta/recipes-kernel/linux/linux-dummy.bb b/meta/recipes-kernel/linux/linux-dummy.bb
index 5d29f5cc12..8f25f17e6f 100644
--- a/meta/recipes-kernel/linux/linux-dummy.bb
+++ b/meta/recipes-kernel/linux/linux-dummy.bb
@@ -45,7 +45,6 @@ do_deploy() {
:
}
-do_bundle_initramfs[nostamp] = "1"
-addtask bundle_initramfs after do_compile
+addtask bundle_initramfs after do_install before do_deploy
addtask deploy after do_install
diff --git a/meta/recipes-kernel/linux/linux-yocto-dev.bb b/meta/recipes-kernel/linux/linux-yocto-dev.bb
index 10f3d234ed..00feadedd8 100644
--- a/meta/recipes-kernel/linux/linux-yocto-dev.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-dev.bb
@@ -23,7 +23,6 @@ python () {
}
KBRANCH = "standard/base"
-KBRANCH_DEFAULT = "${KBRANCH}"
KMETA = "meta"
SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;bareclone=1;branch=${KBRANCH},${KMETA};name=machine,meta"
@@ -36,7 +35,7 @@ SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;bareclone=1;branch=${K
SRCREV_machine ?= '${@oe.utils.conditional("PREFERRED_PROVIDER_virtual/kernel", "linux-yocto-dev", "${AUTOREV}", "29594404d7fe73cd80eaa4ee8c43dcc53970c60e", d)}'
SRCREV_meta ?= '${@oe.utils.conditional("PREFERRED_PROVIDER_virtual/kernel", "linux-yocto-dev", "${AUTOREV}", "29594404d7fe73cd80eaa4ee8c43dcc53970c60e", d)}'
-LINUX_VERSION ?= "3.16+"
+LINUX_VERSION ?= "3.17+"
LINUX_VERSION_EXTENSION ?= "-yoctodev-${LINUX_KERNEL_TYPE}"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_3.10.bb b/meta/recipes-kernel/linux/linux-yocto-rt_3.10.bb
index 65a74d3ec7..8acbd2e59b 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_3.10.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_3.10.bb
@@ -1,15 +1,15 @@
-require recipes-kernel/linux/linux-yocto.inc
+KBRANCH ?= "standard/preempt-rt/base"
+KBRANCH_qemuppc ?= "standard/preempt-rt/qemuppc"
-KBRANCH = "standard/preempt-rt/base"
-KBRANCH_qemuppc = "standard/preempt-rt/qemuppc"
+require recipes-kernel/linux/linux-yocto.inc
-SRCREV_machine ?= "c373b342af7bcbeda346df9598a5b1e9784d362f"
-SRCREV_machine_qemuppc ?= "767da5a30782cc1bc6522cef8aa5fd8130ecc06c"
-SRCREV_meta ?= "199943142f7e0a283240246ee6c02f4376b315f0"
+SRCREV_machine ?= "94a79d98e40b51466600fb3375ad2908c38dd192"
+SRCREV_machine_qemuppc ?= "c1dcbac7751652784f180fdb91f87bc37fba8c52"
+SRCREV_meta ?= "f4ab00d96a3d8e443d7f7744ad996e184eac03b5"
SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.10.git;bareclone=1;branch=${KBRANCH},meta;name=machine,meta"
-LINUX_VERSION ?= "3.10.43"
+LINUX_VERSION ?= "3.10.62"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb b/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
index 77215f6a51..7dbf82c3e6 100644
--- a/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-rt_3.14.bb
@@ -1,15 +1,15 @@
-require recipes-kernel/linux/linux-yocto.inc
+KBRANCH ?= "standard/preempt-rt/base"
+KBRANCH_qemuppc ?= "standard/preempt-rt/qemuppc"
-KBRANCH = "standard/preempt-rt/base"
-KBRANCH_qemuppc = "standard/preempt-rt/qemuppc"
+require recipes-kernel/linux/linux-yocto.inc
-SRCREV_machine ?= "8ef1733b66a1646b85338a310f787e0057a6a4e9"
-SRCREV_machine_qemuppc ?= "3079c794c30b0de82bc87b19cf477d82405a9094"
-SRCREV_meta ?= "b2af4e3528e65583c98f3a08c6edb0cad7a120b0"
+SRCREV_machine ?= "7f0712d10247ffca8e48e944f49707bcf9117ead"
+SRCREV_machine_qemuppc ?= "8bed2a975d491c963cff56496f7e35f5bcff926f"
+SRCREV_meta ?= "a227f20eff056e511d504b2e490f3774ab260d6f"
SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.14.git;bareclone=1;branch=${KBRANCH},meta;name=machine,meta"
-LINUX_VERSION ?= "3.14.5"
+LINUX_VERSION ?= "3.14.24"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-rt_3.4.bb b/meta/recipes-kernel/linux/linux-yocto-rt_3.4.bb
deleted file mode 100644
index e01ffeec7a..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-rt_3.4.bb
+++ /dev/null
@@ -1,30 +0,0 @@
-require recipes-kernel/linux/linux-yocto.inc
-
-KBRANCH = "standard/preempt-rt/base"
-KBRANCH_qemuppc = "standard/preempt-rt/qemuppc"
-
-LINUX_VERSION ?= "3.4.91"
-LINUX_KERNEL_TYPE = "preempt-rt"
-
-KMETA = "meta"
-
-SRCREV_machine ?= "5993b0a650d24f810edb22bab577287d0c6d4f4b"
-SRCREV_machine_qemuppc ?= "765d61e4883bb6db3be516722465f2e8d1fc73cd"
-SRCREV_meta ?= "a8742041c8b9f447d4ad4c3f478e022f1e4bfcfd"
-
-PR = "${INC_PR}.1"
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.4.git;bareclone=1;branch=${KBRANCH},meta;name=machine,meta"
-
-# Omit broken machines from COMPATIBLE_MACHINE
-# qemuppc hangs at boot
-# qemumips panics at boot
-COMPATIBLE_MACHINE = "(qemux86|qemux86-64|qemuarm)"
-
-# Functionality flags
-KERNEL_FEATURES_append = " features/netfilter/netfilter.scc"
-KERNEL_FEATURES_append = " features/taskstats/taskstats.scc"
-KERNEL_FEATURES_append_qemux86 = " cfg/sound.scc"
-KERNEL_FEATURES_append_qemux86-64 = " cfg/sound.scc"
-KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_3.10.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_3.10.bb
index b70d7bc081..9ea81b8874 100644
--- a/meta/recipes-kernel/linux/linux-yocto-tiny_3.10.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-tiny_3.10.bb
@@ -1,16 +1,16 @@
+KBRANCH ?= "standard/tiny/base"
+
require recipes-kernel/linux/linux-yocto.inc
-KBRANCH_DEFAULT = "standard/tiny/base"
-KBRANCH = "${KBRANCH_DEFAULT}"
LINUX_KERNEL_TYPE = "tiny"
KCONFIG_MODE = "--allnoconfig"
-LINUX_VERSION ?= "3.10.43"
+LINUX_VERSION ?= "3.10.62"
KMETA = "meta"
-SRCREV_machine ?= "aa677a2d02677ec92d59a8c36d001cf2f5cf3260"
-SRCREV_meta ?= "199943142f7e0a283240246ee6c02f4376b315f0"
+SRCREV_machine ?= "b2ac933df119a3444a32fcccf5e4ad453f5ac89d"
+SRCREV_meta ?= "f4ab00d96a3d8e443d7f7744ad996e184eac03b5"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
index e28054fc4c..67bf4624f8 100644
--- a/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
+++ b/meta/recipes-kernel/linux/linux-yocto-tiny_3.14.bb
@@ -1,15 +1,15 @@
-require recipes-kernel/linux/linux-yocto.inc
-
-KBRANCH = "standard/tiny/base"
+KBRANCH ?= "standard/tiny/base"
LINUX_KERNEL_TYPE = "tiny"
KCONFIG_MODE = "--allnoconfig"
-LINUX_VERSION ?= "3.14.5"
+require recipes-kernel/linux/linux-yocto.inc
+
+LINUX_VERSION ?= "3.14.24"
KMETA = "meta"
-SRCREV_machine ?= "5724bf17acbf54cf61003ab242448fd96d189384"
-SRCREV_meta ?= "b2af4e3528e65583c98f3a08c6edb0cad7a120b0"
+SRCREV_machine ?= "02120556b0ebc20c30374ccf211e8e4ceac2bb1c"
+SRCREV_meta ?= "a227f20eff056e511d504b2e490f3774ab260d6f"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_3.17.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_3.17.bb
new file mode 100644
index 0000000000..1a4f9a8145
--- /dev/null
+++ b/meta/recipes-kernel/linux/linux-yocto-tiny_3.17.bb
@@ -0,0 +1,21 @@
+KBRANCH ?= "standard/tiny/base"
+LINUX_KERNEL_TYPE = "tiny"
+KCONFIG_MODE = "--allnoconfig"
+
+require recipes-kernel/linux/linux-yocto.inc
+
+LINUX_VERSION ?= "3.17.6"
+
+KMETA = "meta"
+
+SRCREV_machine ?= "5ff54d8fbf74278e9e5074cbba516a14f0915ff7"
+SRCREV_meta ?= "b81030f9ec2de3dc6c048e142dcbff62e305cc40"
+
+PV = "${LINUX_VERSION}+git${SRCPV}"
+
+SRC_URI = "git://git.yoctoproject.org/linux-yocto-dev.git;bareclone=1;branch=${KBRANCH},meta;name=machine,meta"
+
+COMPATIBLE_MACHINE = "(qemux86)"
+
+# Functionality flags
+KERNEL_FEATURES = ""
diff --git a/meta/recipes-kernel/linux/linux-yocto-tiny_3.4.bb b/meta/recipes-kernel/linux/linux-yocto-tiny_3.4.bb
deleted file mode 100644
index fd5c9f7696..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto-tiny_3.4.bb
+++ /dev/null
@@ -1,26 +0,0 @@
-require recipes-kernel/linux/linux-yocto.inc
-
-# We need lzma (as CONFIG_KERNEL_LZMA=y)
-DEPENDS += "xz-native"
-
-KBRANCH_DEFAULT = "standard/tiny/base"
-KBRANCH = "${KBRANCH_DEFAULT}"
-LINUX_KERNEL_TYPE = "tiny"
-KCONFIG_MODE = "--allnoconfig"
-
-LINUX_VERSION ?= "3.4.91"
-
-KMETA = "meta"
-
-SRCREV_machine ?= "498189ccb98f833daa2092ceee72da8c878e0009"
-SRCREV_meta ?= "a8742041c8b9f447d4ad4c3f478e022f1e4bfcfd"
-
-PR = "${INC_PR}.1"
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.4.git;bareclone=1;branch=${KBRANCH},meta;name=machine,meta"
-
-COMPATIBLE_MACHINE = "(qemux86)"
-
-# Functionality flags
-KERNEL_FEATURES = ""
diff --git a/meta/recipes-kernel/linux/linux-yocto.inc b/meta/recipes-kernel/linux/linux-yocto.inc
index 4ed318886f..7416af99f5 100644
--- a/meta/recipes-kernel/linux/linux-yocto.inc
+++ b/meta/recipes-kernel/linux/linux-yocto.inc
@@ -7,6 +7,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d7810fab7487fb0aad327b76f1be7cd7"
INC_PR = "r4"
DEPENDS += "xz-native bc-native"
+DEPENDS_append_aarch64 = " libgcc"
+KERNEL_CC_append_aarch64 = " ${TOOLCHAIN_OPTIONS}"
+KERNEL_LD_append_aarch64 = " ${TOOLCHAIN_OPTIONS}"
# A KMACHINE is the mapping of a yocto $MACHINE to what is built
# by the kernel. This is typically the branch that should be built,
diff --git a/meta/recipes-kernel/linux/linux-yocto_3.10.bb b/meta/recipes-kernel/linux/linux-yocto_3.10.bb
index 57395b4ebb..978775a7d3 100644
--- a/meta/recipes-kernel/linux/linux-yocto_3.10.bb
+++ b/meta/recipes-kernel/linux/linux-yocto_3.10.bb
@@ -1,28 +1,27 @@
-require recipes-kernel/linux/linux-yocto.inc
+KBRANCH ?= "standard/base"
-KBRANCH_DEFAULT = "standard/base"
-KBRANCH = "${KBRANCH_DEFAULT}"
+require recipes-kernel/linux/linux-yocto.inc
# board specific branches
-KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
-KBRANCH_qemumips = "standard/mti-malta32"
-KBRANCH_qemuppc = "standard/qemuppc"
-KBRANCH_qemux86 = "standard/common-pc/base"
-KBRANCH_qemux86-64 = "standard/common-pc-64/base"
-KBRANCH_qemumips64 = "standard/mti-malta64"
-
-SRCREV_machine_qemuarm ?= "db489eed7f5c86037358cc9d0fefa7b90dbbaa86"
-SRCREV_machine_qemumips ?= "780aac11b3f20ed2f5df3f173b0d02b28a6eb96b"
-SRCREV_machine_qemuppc ?= "48a9dec5fc4cc9c19d12866c6ec560a83df21d85"
-SRCREV_machine_qemux86 ?= "aa677a2d02677ec92d59a8c36d001cf2f5cf3260"
-SRCREV_machine_qemux86-64 ?= "aa677a2d02677ec92d59a8c36d001cf2f5cf3260"
-SRCREV_machine_qemumips64 ?= "d4b0d6f727167d7946e03dbfb50fef0891207bd1"
-SRCREV_machine ?= "aa677a2d02677ec92d59a8c36d001cf2f5cf3260"
-SRCREV_meta ?= "199943142f7e0a283240246ee6c02f4376b315f0"
+KBRANCH_qemuarm ?= "standard/arm-versatile-926ejs"
+KBRANCH_qemumips ?= "standard/mti-malta32"
+KBRANCH_qemuppc ?= "standard/qemuppc"
+KBRANCH_qemux86 ?= "standard/common-pc/base"
+KBRANCH_qemux86-64 ?= "standard/common-pc-64/base"
+KBRANCH_qemumips64 ?= "standard/mti-malta64"
+
+SRCREV_machine_qemuarm ?= "f6b075991c91b7c2bb641b389757863e2fd34b8c"
+SRCREV_machine_qemumips ?= "48afdc632312b6cc26fe7bca151cfb66b2ebc308"
+SRCREV_machine_qemuppc ?= "cf35ea9ac92153858dadd2f4ab71cccd3a1fa26b"
+SRCREV_machine_qemux86 ?= "b2ac933df119a3444a32fcccf5e4ad453f5ac89d"
+SRCREV_machine_qemux86-64 ?= "b2ac933df119a3444a32fcccf5e4ad453f5ac89d"
+SRCREV_machine_qemumips64 ?= "ebdb374ca5130ef456d0baf75b6fe7a242932d0d"
+SRCREV_machine ?= "b2ac933df119a3444a32fcccf5e4ad453f5ac89d"
+SRCREV_meta ?= "f4ab00d96a3d8e443d7f7744ad996e184eac03b5"
SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.10.git;bareclone=1;branch=${KBRANCH},${KMETA};name=machine,meta"
-LINUX_VERSION ?= "3.10.43"
+LINUX_VERSION ?= "3.10.62"
PV = "${LINUX_VERSION}+git${SRCPV}"
diff --git a/meta/recipes-kernel/linux/linux-yocto_3.14.bb b/meta/recipes-kernel/linux/linux-yocto_3.14.bb
index 7b8b653827..5f0a09e973 100644
--- a/meta/recipes-kernel/linux/linux-yocto_3.14.bb
+++ b/meta/recipes-kernel/linux/linux-yocto_3.14.bb
@@ -1,33 +1,35 @@
-require recipes-kernel/linux/linux-yocto.inc
+KBRANCH ?= "standard/base"
-KBRANCH = "standard/base"
+require recipes-kernel/linux/linux-yocto.inc
# board specific branches
-KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
-KBRANCH_qemumips = "standard/mti-malta32"
-KBRANCH_qemuppc = "standard/qemuppc"
-KBRANCH_qemux86 = "standard/common-pc/base"
-KBRANCH_qemux86-64 = "standard/common-pc-64/base"
-KBRANCH_qemumips64 = "standard/mti-malta64"
-
-SRCREV_machine_qemuarm ?= "b38b84aebf889d84e65e81ac11122b977f0c5155"
-SRCREV_machine_qemumips ?= "c9d827207d8dfab330787659b2842485dbd36d77"
-SRCREV_machine_qemuppc ?= "58b7cb00580985410ba8491c61e80d2572552ed9"
-SRCREV_machine_qemux86 ?= "5b327970eb1dba02c65cb8330dc8f3049c4fa580"
-SRCREV_machine_qemux86-64 ?= "5724bf17acbf54cf61003ab242448fd96d189384"
-SRCREV_machine_qemumips64 ?= "34837892b66eaa034cd3e3d339cab0ea6f594511"
-SRCREV_machine ?= "5724bf17acbf54cf61003ab242448fd96d189384"
-SRCREV_meta ?= "b2af4e3528e65583c98f3a08c6edb0cad7a120b0"
+KBRANCH_qemuarm ?= "standard/arm-versatile-926ejs"
+KBRANCH_qemuarm64 ?= "standard/qemuarm64"
+KBRANCH_qemumips ?= "standard/mti-malta32"
+KBRANCH_qemuppc ?= "standard/qemuppc"
+KBRANCH_qemux86 ?= "standard/common-pc/base"
+KBRANCH_qemux86-64 ?= "standard/common-pc-64/base"
+KBRANCH_qemumips64 ?= "standard/mti-malta64"
+
+SRCREV_machine_qemuarm ?= "6166316d47b859aa38bfecc61f4808828af03937"
+SRCREV_machine_qemuarm64 ?= "902f34d36102a4b2008b776ecae686f80d307e12"
+SRCREV_machine_qemumips ?= "4ececcc09c6550a0896728163907e729d817c2fd"
+SRCREV_machine_qemuppc ?= "1cc5b09f8bb7f40b289d149d370c62dcc8109501"
+SRCREV_machine_qemux86 ?= "38cd560d5022ed2dbd1ab0dca9642e47c98a0aa1"
+SRCREV_machine_qemux86-64 ?= "02120556b0ebc20c30374ccf211e8e4ceac2bb1c"
+SRCREV_machine_qemumips64 ?= "737272b1dfd361d9ea19812a9717e2798e3c4576"
+SRCREV_machine ?= "02120556b0ebc20c30374ccf211e8e4ceac2bb1c"
+SRCREV_meta ?= "a227f20eff056e511d504b2e490f3774ab260d6f"
SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.14.git;bareclone=1;branch=${KBRANCH},${KMETA};name=machine,meta"
-LINUX_VERSION ?= "3.14.5"
+LINUX_VERSION ?= "3.14.24"
PV = "${LINUX_VERSION}+git${SRCPV}"
KMETA = "meta"
-COMPATIBLE_MACHINE = "qemuarm|qemux86|qemuppc|qemumips|qemumips64|qemux86-64"
+COMPATIBLE_MACHINE = "qemuarm|qemuarm64|qemux86|qemuppc|qemumips|qemumips64|qemux86-64"
# Functionality flags
KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
diff --git a/meta/recipes-kernel/linux/linux-yocto_3.17.bb b/meta/recipes-kernel/linux/linux-yocto_3.17.bb
new file mode 100644
index 0000000000..973d8aaa84
--- /dev/null
+++ b/meta/recipes-kernel/linux/linux-yocto_3.17.bb
@@ -0,0 +1,39 @@
+KBRANCH ?= "standard/base"
+
+require recipes-kernel/linux/linux-yocto.inc
+
+# board specific branches
+KBRANCH_qemuarm ?= "standard/arm-versatile-926ejs"
+KBRANCH_qemuarm64 ?= "standard/qemuarm64"
+KBRANCH_qemumips ?= "standard/mti-malta32"
+KBRANCH_qemuppc ?= "standard/qemuppc"
+KBRANCH_qemux86 ?= "standard/common-pc"
+KBRANCH_qemux86-64 ?= "standard/common-pc-64/base"
+KBRANCH_qemumips64 ?= "standard/mti-malta64"
+
+SRCREV_machine_qemuarm ?= "a1183153c56ace9f9140c73ce730ec181f78dc0f"
+SRCREV_machine_qemuarm64 ?= "268b7677421eef003a84f31c6bd0b8ec3acc1e36"
+SRCREV_machine_qemumips ?= "18c8db2009f25524a29a5258d041e0442c10817d"
+SRCREV_machine_qemuppc ?= "6d2bbdac0e6560542fa2252d91eee9ae0f17f9c7"
+SRCREV_machine_qemux86 ?= "5ff54d8fbf74278e9e5074cbba516a14f0915ff7"
+SRCREV_machine_qemux86-64 ?= "5ff54d8fbf74278e9e5074cbba516a14f0915ff7"
+SRCREV_machine_qemumips64 ?= "2306cb2dd019c8b731879331ba314035a7271e7b"
+SRCREV_machine ?= "5ff54d8fbf74278e9e5074cbba516a14f0915ff7"
+SRCREV_meta ?= "b81030f9ec2de3dc6c048e142dcbff62e305cc40"
+
+SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.17.git;bareclone=1;branch=${KBRANCH},${KMETA};name=machine,meta"
+
+LINUX_VERSION ?= "3.17.6"
+
+PV = "${LINUX_VERSION}+git${SRCPV}"
+
+KMETA = "meta"
+
+COMPATIBLE_MACHINE = "qemuarm|qemuarm64|qemux86|qemuppc|qemumips|qemumips64|qemux86-64"
+
+# Functionality flags
+KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
+KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
+KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc cfg/paravirt_kvm.scc"
+KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/meta/recipes-kernel/linux/linux-yocto_3.4.bb b/meta/recipes-kernel/linux/linux-yocto_3.4.bb
deleted file mode 100644
index 1eabf764ef..0000000000
--- a/meta/recipes-kernel/linux/linux-yocto_3.4.bb
+++ /dev/null
@@ -1,38 +0,0 @@
-require recipes-kernel/linux/linux-yocto.inc
-
-KBRANCH_DEFAULT = "standard/base"
-KBRANCH = "${KBRANCH_DEFAULT}"
-
-# board specific branches
-KBRANCH_qemuarm = "standard/arm-versatile-926ejs"
-KBRANCH_qemumips = "standard/mti-malta32"
-KBRANCH_qemuppc = "standard/qemuppc"
-KBRANCH_qemux86 = "standard/common-pc/base"
-KBRANCH_qemux86-64 = "standard/common-pc-64/base"
-KBRANCH_qemumips64 = "standard/mti-malta64"
-
-SRCREV_machine_qemuarm ?= "192b56b2f529af1be014ea85667c9f3fea0afd53"
-SRCREV_machine_qemumips ?= "58fb8b8fb2f9911cac84cd840d63c8a58bada6ca"
-SRCREV_machine_qemuppc ?= "4fe50989bc8bcb3564ca37c2cffd42ac176b428d"
-SRCREV_machine_qemux86 ?= "498189ccb98f833daa2092ceee72da8c878e0009"
-SRCREV_machine_qemux86-64 ?= "498189ccb98f833daa2092ceee72da8c878e0009"
-SRCREV_machine ?= "498189ccb98f833daa2092ceee72da8c878e0009"
-SRCREV_meta ?= "a8742041c8b9f447d4ad4c3f478e022f1e4bfcfd"
-
-SRC_URI = "git://git.yoctoproject.org/linux-yocto-3.4.git;bareclone=1;branch=${KBRANCH},${KMETA};name=machine,meta"
-
-LINUX_VERSION ?= "3.4.91"
-
-PR = "${INC_PR}.5"
-PV = "${LINUX_VERSION}+git${SRCPV}"
-
-KMETA = "meta"
-
-COMPATIBLE_MACHINE = "qemuarm|qemux86|qemuppc|qemumips|qemux86-64"
-
-# Functionality flags
-KERNEL_EXTRA_FEATURES ?= "features/netfilter/netfilter.scc"
-KERNEL_FEATURES_append = " ${KERNEL_EXTRA_FEATURES}"
-KERNEL_FEATURES_append_qemux86=" cfg/sound.scc cfg/paravirt_kvm.scc"
-KERNEL_FEATURES_append_qemux86-64=" cfg/sound.scc"
-KERNEL_FEATURES_append = " ${@bb.utils.contains("TUNE_FEATURES", "mx32", " cfg/x32.scc", "" ,d)}"
diff --git a/meta/recipes-kernel/lttng/babeltrace/0001-Fix-Support-out-of-tree-builds-in-babeltrace.patch b/meta/recipes-kernel/lttng/babeltrace/0001-Fix-Support-out-of-tree-builds-in-babeltrace.patch
deleted file mode 100644
index 258eedd3ba..0000000000
--- a/meta/recipes-kernel/lttng/babeltrace/0001-Fix-Support-out-of-tree-builds-in-babeltrace.patch
+++ /dev/null
@@ -1,17 +0,0 @@
-Upstream-Status: backport
-
-babeltrace: Fix support out of tree builds in babeltrace
-
-Signed-off-by: Lars Persson <larper@axis.com>
-
-diff --git a/formats/lttng-live/Makefile.am b/formats/lttng-live/Makefile.am
-index c834699..2c6b0bd 100644
---- a/formats/lttng-live/Makefile.am
-+++ b/formats/lttng-live/Makefile.am
-@@ -1,4 +1,4 @@
--AM_CFLAGS = $(PACKAGE_CFLAGS) -I$(top_srcdir)/include -I$(top_builddir)/include
-+AM_CFLAGS = $(PACKAGE_CFLAGS) -I$(top_srcdir)/include -I$(top_builddir)/include -I$(top_srcdir)
-
- lib_LTLIBRARIES = libbabeltrace-lttng-live.la
-
-
diff --git a/meta/recipes-kernel/lttng/babeltrace/Fix-Align-buffers-from-objstack_alloc-on-sizeof-void.patch b/meta/recipes-kernel/lttng/babeltrace/Fix-Align-buffers-from-objstack_alloc-on-sizeof-void.patch
deleted file mode 100644
index 8e81d2d781..0000000000
--- a/meta/recipes-kernel/lttng/babeltrace/Fix-Align-buffers-from-objstack_alloc-on-sizeof-void.patch
+++ /dev/null
@@ -1,54 +0,0 @@
-From cae67efbd9ddf2cee6bbefec076dc8933ababc43 Mon Sep 17 00:00:00 2001
-From: =?UTF-8?q?Fredrik=20Markstr=C3=B6m?= <fredrik.markstrom@gmail.com>
-Date: Fri, 16 May 2014 10:10:38 +0800
-Subject: [PATCH] Fix: Align buffers from objstack_alloc on sizeof(void *)
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-Upstream-Status: Backport
-
-The buffers from objstack_alloc will store pointers, so they must
-be aligned on a pointer's size, or else it will cause issues on the
-CPUs which do not support unaligned addresses access.
-
-Signed-off-by: Fredrik Markstrom <fredrik.markstrom@gmail.com>
-Signed-off-by: Roy Li <rongqing.li@windriver.com>
-Signed-off-by: Jérémie Galarneau <jeremie.galarneau@efficios.com>
----
- formats/ctf/metadata/objstack.c | 5 ++++-
- 1 file changed, 4 insertions(+), 1 deletion(-)
-
-diff --git a/formats/ctf/metadata/objstack.c b/formats/ctf/metadata/objstack.c
-index 9e264a4..14d9252 100644
---- a/formats/ctf/metadata/objstack.c
-+++ b/formats/ctf/metadata/objstack.c
-@@ -27,6 +27,7 @@
- #include <stdlib.h>
- #include <babeltrace/list.h>
- #include <babeltrace/babeltrace-internal.h>
-+#include <babeltrace/align.h>
-
- #define OBJSTACK_INIT_LEN 128
- #define OBJSTACK_POISON 0xcc
-@@ -39,7 +40,7 @@ struct objstack_node {
- struct bt_list_head node;
- size_t len;
- size_t used_len;
-- char data[];
-+ char __attribute__ ((aligned (sizeof(void *)))) data[];
- };
-
- BT_HIDDEN
-@@ -118,6 +119,8 @@ void *objstack_alloc(struct objstack *objstack, size_t len)
- struct objstack_node *last_node;
- void *p;
-
-+ len = ALIGN(len, sizeof(void *));
-+
- /* Get last node */
- last_node = bt_list_entry(objstack->head.prev,
- struct objstack_node, node);
---
-1.7.10.4
-
diff --git a/meta/recipes-kernel/lttng/babeltrace_1.2.1.bb b/meta/recipes-kernel/lttng/babeltrace_1.2.4.bb
index a8ea4cb213..f616146393 100644
--- a/meta/recipes-kernel/lttng/babeltrace_1.2.1.bb
+++ b/meta/recipes-kernel/lttng/babeltrace_1.2.4.bb
@@ -8,15 +8,12 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=76ba15dd76a248e1dd526bca0e2125fa"
inherit autotools pkgconfig
-DEPENDS = "glib-2.0 util-linux popt"
+DEPENDS = "glib-2.0 util-linux popt bison-native flex-native"
-SRCREV = "66c2a20b4391fb5c7f870aeb0dde854f0ae1fc79"
-PV = "1.2.1+git${SRCPV}"
+SRCREV = "90395824efc007de88787a6b8e400a07c980be1c"
+PV = "1.2.4+git${SRCPV}"
-SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-1.2 \
- file://0001-Fix-Support-out-of-tree-builds-in-babeltrace.patch \
- file://Fix-Align-buffers-from-objstack_alloc-on-sizeof-void.patch \
-"
+SRC_URI = "git://git.efficios.com/babeltrace.git;branch=stable-1.2"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-kernel/lttng/lttng-modules/Fix-noargs-probes-should-calculate-alignment-and-eve.patch b/meta/recipes-kernel/lttng/lttng-modules/Fix-noargs-probes-should-calculate-alignment-and-eve.patch
new file mode 100644
index 0000000000..9c3dc9c1e1
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/Fix-noargs-probes-should-calculate-alignment-and-eve.patch
@@ -0,0 +1,130 @@
+Upstream-Status: Backport
+Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+
+From d3de7f1468be0b18145ff85b3c1a7c7fb1d48c15 Mon Sep 17 00:00:00 2001
+From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+Date: Fri, 25 Jul 2014 12:30:43 -0400
+Subject: [PATCH 1/3] Fix: noargs probes should calculate alignment and event
+ length
+
+A noargs probe could have event fields. noargs just means that the probe
+does not receive any argument as parameter. However, it could very well
+serialize data into fields (global variables, constants, etc).
+
+It just happens that LTTng does not serialize any data in noargs events
+at the moment, but this may very well change.
+
+The if (0) with (void) variable access strategy to stop compiler from
+complaining from unused variables does not seem to work as expected with
+gcc 4.9.1. Use "unused" attribute instead.
+
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ probes/lttng-events.h | 49 +++++++++++++++++++++++++++++++++++--------------
+ 1 file changed, 35 insertions(+), 14 deletions(-)
+
+diff --git a/probes/lttng-events.h b/probes/lttng-events.h
+index 596b70608584..ba9563b15cf9 100644
+--- a/probes/lttng-events.h
++++ b/probes/lttng-events.h
+@@ -456,10 +456,19 @@ static __used struct lttng_probe_desc TP_ID(__probe_desc___, TRACE_SYSTEM) = {
+ static inline size_t __event_get_size__##_name(size_t *__dynamic_len, _proto) \
+ { \
+ size_t __event_len = 0; \
+- unsigned int __dynamic_len_idx = 0; \
++ unsigned int __dynamic_len_idx __attribute__((unused)) = 0; \
++ \
++ _tstruct \
++ return __event_len; \
++}
++
++#undef DECLARE_EVENT_CLASS_NOARGS
++#define DECLARE_EVENT_CLASS_NOARGS(_name, _tstruct, _assign, _print) \
++static inline size_t __event_get_size__##_name(size_t *__dynamic_len) \
++{ \
++ size_t __event_len = 0; \
++ unsigned int __dynamic_len_idx __attribute__((unused)) = 0; \
+ \
+- if (0) \
+- (void) __dynamic_len_idx; /* don't warn if unused */ \
+ _tstruct \
+ return __event_len; \
+ }
+@@ -514,6 +523,15 @@ static inline size_t __event_get_align__##_name(_proto) \
+ return __event_align; \
+ }
+
++#undef DECLARE_EVENT_CLASS_NOARGS
++#define DECLARE_EVENT_CLASS_NOARGS(_name, _tstruct, _assign, _print) \
++static inline size_t __event_get_align__##_name(void) \
++{ \
++ size_t __event_align = 1; \
++ _tstruct \
++ return __event_align; \
++}
++
+ #include TRACE_INCLUDE(TRACE_INCLUDE_FILE)
+
+
+@@ -553,12 +571,16 @@ static inline size_t __event_get_align__##_name(_proto) \
+ #undef TP_STRUCT__entry
+ #define TP_STRUCT__entry(args...) args
+
+-#undef DECLARE_EVENT_CLASS
+-#define DECLARE_EVENT_CLASS(_name, _proto, _args, _tstruct, _assign, _print) \
++#undef DECLARE_EVENT_CLASS_NOARGS
++#define DECLARE_EVENT_CLASS_NOARGS(_name, _tstruct, _assign, _print) \
+ struct __event_typemap__##_name { \
+ _tstruct \
+ };
+
++#undef DECLARE_EVENT_CLASS
++#define DECLARE_EVENT_CLASS(_name, _proto, _args, _tstruct, _assign, _print) \
++ DECLARE_EVENT_CLASS_NOARGS(_name, _tstruct, _assign, _print)
++
+ #include TRACE_INCLUDE(TRACE_INCLUDE_FILE)
+
+
+@@ -760,15 +782,11 @@ static void __event_probe__##_name(void *__data, _proto) \
+ struct lttng_channel *__chan = __event->chan; \
+ struct lib_ring_buffer_ctx __ctx; \
+ size_t __event_len, __event_align; \
+- size_t __dynamic_len_idx = 0; \
+- size_t __dynamic_len[2 * ARRAY_SIZE(__event_fields___##_name)]; \
+- struct __event_typemap__##_name __typemap; \
++ size_t __dynamic_len_idx __attribute__((unused)) = 0; \
++ size_t __dynamic_len[2 * ARRAY_SIZE(__event_fields___##_name)] __attribute__((unused)); \
++ struct __event_typemap__##_name __typemap __attribute__((unused)); \
+ int __ret; \
+ \
+- if (0) { \
+- (void) __dynamic_len_idx; /* don't warn if unused */ \
+- (void) __typemap; /* don't warn if unused */ \
+- } \
+ if (!_TP_SESSION_CHECK(session, __chan->session)) \
+ return; \
+ if (unlikely(!ACCESS_ONCE(__chan->session->active))) \
+@@ -800,6 +818,9 @@ static void __event_probe__##_name(void *__data) \
+ struct lttng_channel *__chan = __event->chan; \
+ struct lib_ring_buffer_ctx __ctx; \
+ size_t __event_len, __event_align; \
++ size_t __dynamic_len_idx __attribute__((unused)) = 0; \
++ size_t __dynamic_len[2 * ARRAY_SIZE(__event_fields___##_name)] __attribute__((unused)); \
++ struct __event_typemap__##_name __typemap __attribute__((unused)); \
+ int __ret; \
+ \
+ if (!_TP_SESSION_CHECK(session, __chan->session)) \
+@@ -810,8 +831,8 @@ static void __event_probe__##_name(void *__data) \
+ return; \
+ if (unlikely(!ACCESS_ONCE(__event->enabled))) \
+ return; \
+- __event_len = 0; \
+- __event_align = 1; \
++ __event_len = __event_get_size__##_name(__dynamic_len); \
++ __event_align = __event_get_align__##_name(); \
+ lib_ring_buffer_ctx_init(&__ctx, __chan->chan, __event, __event_len, \
+ __event_align, -1); \
+ __ret = __chan->ops->event_reserve(&__ctx, __event->id); \
+--
+1.8.1.2
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/Update-kvm-instrumentation-compile-on-3.17-rc1.patch b/meta/recipes-kernel/lttng/lttng-modules/Update-kvm-instrumentation-compile-on-3.17-rc1.patch
new file mode 100644
index 0000000000..3541b50b79
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/Update-kvm-instrumentation-compile-on-3.17-rc1.patch
@@ -0,0 +1,46 @@
+Upstream-Status: Backport
+Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+
+From 458c2022e992c057bd21d02e4c77bcc7d4d6cd6c Mon Sep 17 00:00:00 2001
+From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+Date: Thu, 21 Aug 2014 11:15:50 -0400
+Subject: [PATCH 3/3] Update kvm instrumentation: compile on 3.17-rc1
+
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ instrumentation/events/lttng-module/arch/x86/kvm/trace.h | 12 +++++++++++-
+ 1 file changed, 11 insertions(+), 1 deletion(-)
+
+diff --git a/instrumentation/events/lttng-module/arch/x86/kvm/trace.h b/instrumentation/events/lttng-module/arch/x86/kvm/trace.h
+index 2354884074eb..3c299c58a1cf 100644
+--- a/instrumentation/events/lttng-module/arch/x86/kvm/trace.h
++++ b/instrumentation/events/lttng-module/arch/x86/kvm/trace.h
+@@ -724,7 +724,7 @@ TRACE_EVENT(kvm_emulate_insn,
+ tp_memcpy(insn,
+ vcpu->arch.emulate_ctxt.decode.fetch.data,
+ 15)
+-#else
++#elif (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0))
+ tp_assign(rip, vcpu->arch.emulate_ctxt.fetch.start)
+ tp_assign(csbase, kvm_x86_ops->get_segment_base(vcpu, VCPU_SREG_CS))
+ tp_assign(len, vcpu->arch.emulate_ctxt._eip
+@@ -732,6 +732,16 @@ TRACE_EVENT(kvm_emulate_insn,
+ tp_memcpy(insn,
+ vcpu->arch.emulate_ctxt.fetch.data,
+ 15)
++#else
++ tp_assign(rip, vcpu->arch.emulate_ctxt._eip -
++ (vcpu->arch.emulate_ctxt.fetch.ptr -
++ vcpu->arch.emulate_ctxt.fetch.data))
++ tp_assign(csbase, kvm_x86_ops->get_segment_base(vcpu, VCPU_SREG_CS))
++ tp_assign(len, vcpu->arch.emulate_ctxt.fetch.ptr -
++ vcpu->arch.emulate_ctxt.fetch.data)
++ tp_memcpy(insn,
++ vcpu->arch.emulate_ctxt.fetch.data,
++ 15)
+ #endif
+ tp_assign(flags, kei_decode_mode(vcpu->arch.emulate_ctxt.mode))
+ tp_assign(failed, failed)
+--
+1.8.1.2
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/Update-statedump-to-3.17-nsproxy-locking.patch b/meta/recipes-kernel/lttng/lttng-modules/Update-statedump-to-3.17-nsproxy-locking.patch
new file mode 100644
index 0000000000..0f18c8a3e6
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/Update-statedump-to-3.17-nsproxy-locking.patch
@@ -0,0 +1,70 @@
+Upstream-Status: Backport
+Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+
+From 4ba1f53c5aebb4433fedc25d65af010274985043 Mon Sep 17 00:00:00 2001
+From: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+Date: Thu, 21 Aug 2014 10:53:12 -0400
+Subject: [PATCH 2/3] Update statedump to 3.17 nsproxy locking
+
+This Linux upstream commit introduces locking strategy back and forth:
+
+commit 728dba3a39c66b3d8ac889ddbe38b5b1c264aec3
+Author: Eric W. Biederman <ebiederm@xmission.com>
+Date: Mon Feb 3 19:13:49 2014 -0800
+
+ namespaces: Use task_lock and not rcu to protect nsproxy
+
+Use the task lock starting from kernel 3.17 rather than RCU to access
+the task nsproxy.
+
+Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+---
+ lttng-statedump-impl.c | 15 +++++++++++++++
+ 1 file changed, 15 insertions(+)
+
+diff --git a/lttng-statedump-impl.c b/lttng-statedump-impl.c
+index dad51ddaa250..e4caa488e436 100644
+--- a/lttng-statedump-impl.c
++++ b/lttng-statedump-impl.c
+@@ -378,6 +378,9 @@ int lttng_list_interrupts(struct lttng_session *session)
+ }
+ #endif
+
++/*
++ * Called with task lock held.
++ */
+ static
+ void lttng_statedump_process_ns(struct lttng_session *session,
+ struct task_struct *p,
+@@ -389,8 +392,18 @@ void lttng_statedump_process_ns(struct lttng_session *session,
+ struct nsproxy *proxy;
+ struct pid_namespace *pid_ns;
+
++ /*
++ * Back and forth on locking strategy within Linux upstream for nsproxy.
++ * See Linux upstream commit 728dba3a39c66b3d8ac889ddbe38b5b1c264aec3
++ * "namespaces: Use task_lock and not rcu to protect nsproxy"
++ * for details.
++ */
++#if (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0))
+ rcu_read_lock();
+ proxy = task_nsproxy(p);
++#else /* #if (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0)) */
++ proxy = p->nsproxy;
++#endif /* #else #if (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0)) */
+ if (proxy) {
+ pid_ns = lttng_get_proxy_pid_ns(proxy);
+ do {
+@@ -402,7 +415,9 @@ void lttng_statedump_process_ns(struct lttng_session *session,
+ trace_lttng_statedump_process_state(session,
+ p, type, mode, submode, status, NULL);
+ }
++#if (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0))
+ rcu_read_unlock();
++#endif /* #if (LINUX_VERSION_CODE < KERNEL_VERSION(3,17,0)) */
+ }
+
+ static
+--
+1.8.1.2
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/compaction-fix-mm_compaction_isolate_template-build.patch b/meta/recipes-kernel/lttng/lttng-modules/compaction-fix-mm_compaction_isolate_template-build.patch
new file mode 100644
index 0000000000..a99871a62e
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/compaction-fix-mm_compaction_isolate_template-build.patch
@@ -0,0 +1,41 @@
+From af48c7b08de4b811d3d974e65e362b86ce8c4a34 Mon Sep 17 00:00:00 2001
+From: Bruce Ashfield <bruce.ashfield@windriver.com>
+Date: Wed, 10 Dec 2014 03:19:28 -0500
+Subject: [PATCH] compaction: fix mm_compaction_isolate_template build
+
+linux-stable integrated the 3.16 commit f8c9301fa5a2a [mm/compaction: do
+not count migratepages when unnecessary] with the 3.14.25 update.
+
+So we have to update the lttng-module linux version codes to use the
+new definition in builds greater than 3.14.24 or 3.16.
+
+Signed-off-by: Bruce Ashfield <bruce.ashfield@windriver.com>
+---
+ instrumentation/events/lttng-module/compaction.h | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/instrumentation/events/lttng-module/compaction.h b/instrumentation/events/lttng-module/compaction.h
+index 22024e9ee582..07afbe06f1a6 100644
+--- a/instrumentation/events/lttng-module/compaction.h
++++ b/instrumentation/events/lttng-module/compaction.h
+@@ -46,7 +46,7 @@ DEFINE_EVENT(mm_compaction_isolate_template, mm_compaction_isolate_freepages,
+ TP_ARGS(nr_scanned, nr_taken)
+ )
+
+-#if (LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0))
++#if (LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0) || LINUX_VERSION_CODE >= KERNEL_VERSION(3,14,25))
+ TRACE_EVENT(mm_compaction_migratepages,
+
+ TP_PROTO(unsigned long nr_all,
+@@ -87,7 +87,7 @@ TRACE_EVENT(mm_compaction_migratepages,
+ __entry->nr_migrated,
+ __entry->nr_failed)
+ )
+-#else /* #if (LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0)) */
++#else /* #if (LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0) || LINUX_VERSION_CODE >= KERNEL_VERSION(3,14,25)) */
+ TRACE_EVENT(mm_compaction_migratepages,
+
+ TP_PROTO(unsigned long nr_migrated,
+--
+2.1.0
+
diff --git a/meta/recipes-kernel/lttng/lttng-modules/fix_build_with_v3.17_kernel.patch b/meta/recipes-kernel/lttng/lttng-modules/fix_build_with_v3.17_kernel.patch
new file mode 100644
index 0000000000..97b7a53a07
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-modules/fix_build_with_v3.17_kernel.patch
@@ -0,0 +1,113 @@
+
+Upstream-Status: Backport
+
+commit 7df57eb5d6bdc85ddcf2b9afb6cd0cacfb22096e
+Author: Nitin A Kamble <nitin.a.kamble@intel.com>
+Date: Thu Sep 25 18:19:43 2014 -0700
+
+ asoc.h: fix build with v3.17 kernel
+
+ The snd_soc_codec structure has changed in the v3.17 kernel. Some
+ of the redundant fields have been removed. To be specific this commit
+ from the v3.17 kernel causes the build failure for lttng-modules.
+
+ |commit f4333203ec933f9272c90c7add01774ec2cf94d3
+ |Author: Lars-Peter Clausen <lars@metafoo.de>
+ |Date: Mon Jun 16 18:13:02 2014 +0200
+ |
+ | ASoC: Move name and id from CODEC/platform to component
+ |
+ | The component struct already has a name and id field which are initialized to
+ | the same values as the same fields in the CODEC and platform structs. So remove
+ | them from the CODEC and platform structs and used the ones from the component
+ | struct instead.
+ |
+ | Signed-off-by: Lars-Peter Clausen <lars@metafoo.de>
+ | Signed-off-by: Mark Brown <broonie@linaro.org>
+
+ The asoc.h is changed according to the change in the above kernel commit
+ to fix the lttng-modules build. The change in the lttng-modules code is
+ conditional on the kernel version, so that it does not break builds with
+ previous kernel versions.
+
+ Signed-off-by: Nitin A Kamble <nitin.a.kamble@intel.com>
+ Signed-off-by: Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
+
+diff --git a/instrumentation/events/lttng-module/asoc.h b/instrumentation/events/lttng-module/asoc.h
+index 672bea4..bf9cf86 100644
+--- a/instrumentation/events/lttng-module/asoc.h
++++ b/instrumentation/events/lttng-module/asoc.h
+@@ -21,6 +21,14 @@ struct snd_soc_card;
+ struct snd_soc_dapm_widget;
+ #endif
+
++#if (LINUX_VERSION_CODE >= KERNEL_VERSION(3,16,0))
++#define CODEC_NAME_FIELD component.name
++#define CODEC_ID_FIELD component.id
++#else
++#define CODEC_NAME_FIELD name
++#define CODEC_ID_FIELD id
++#endif
++
+ /*
+ * Log register events
+ */
+@@ -32,15 +40,15 @@ DECLARE_EVENT_CLASS(snd_soc_reg,
+ TP_ARGS(codec, reg, val),
+
+ TP_STRUCT__entry(
+- __string( name, codec->name )
++ __string( name, codec->CODEC_NAME_FIELD )
+ __field( int, id )
+ __field( unsigned int, reg )
+ __field( unsigned int, val )
+ ),
+
+ TP_fast_assign(
+- tp_strcpy(name, codec->name)
+- tp_assign(id, codec->id)
++ tp_strcpy(name, codec->CODEC_NAME_FIELD)
++ tp_assign(id, codec->CODEC_ID_FIELD)
+ tp_assign(reg, reg)
+ tp_assign(val, val)
+ ),
+@@ -77,15 +85,15 @@ DECLARE_EVENT_CLASS(snd_soc_preg,
+ TP_ARGS(platform, reg, val),
+
+ TP_STRUCT__entry(
+- __string( name, platform->name )
++ __string( name, platform->CODEC_NAME_FIELD )
+ __field( int, id )
+ __field( unsigned int, reg )
+ __field( unsigned int, val )
+ ),
+
+ TP_fast_assign(
+- tp_strcpy(name, platform->name)
+- tp_assign(id, platform->id)
++ tp_strcpy(name, platform->CODEC_NAME_FIELD)
++ tp_assign(id, platform->CODEC_ID_FIELD)
+ tp_assign(reg, reg)
+ tp_assign(val, val)
+ ),
+@@ -399,17 +407,17 @@ TRACE_EVENT(snd_soc_cache_sync,
+ TP_ARGS(codec, type, status),
+
+ TP_STRUCT__entry(
+- __string( name, codec->name )
++ __string( name, codec->CODEC_NAME_FIELD )
+ __string( status, status )
+ __string( type, type )
+ __field( int, id )
+ ),
+
+ TP_fast_assign(
+- tp_strcpy(name, codec->name)
++ tp_strcpy(name, codec->CODEC_NAME_FIELD)
+ tp_strcpy(status, status)
+ tp_strcpy(type, type)
+- tp_assign(id, codec->id)
++ tp_assign(id, codec->CODEC_ID_FIELD)
+ ),
+
+ TP_printk("codec=%s.%d type=%s status=%s", __get_str(name),
diff --git a/meta/recipes-kernel/lttng/lttng-modules_2.5.0.bb b/meta/recipes-kernel/lttng/lttng-modules_2.5.0.bb
index 9310f0f3eb..f9475e9d85 100644
--- a/meta/recipes-kernel/lttng/lttng-modules_2.5.0.bb
+++ b/meta/recipes-kernel/lttng/lttng-modules_2.5.0.bb
@@ -7,6 +7,7 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=1412caf5a1aa90d6a48588a4794c0eac \
file://lgpl-2.1.txt;md5=243b725d71bb5df4a1e5920b344b86ad"
DEPENDS = "virtual/kernel"
+do_configure[depends] += "virtual/kernel:do_install"
inherit module
@@ -18,6 +19,11 @@ SRC_URI = "git://git.lttng.org/lttng-modules.git;branch=stable-2.5 \
file://lttng-modules-replace-KERNELDIR-with-KERNEL_SRC.patch \
file://Update-compaction-instrumentation-to-3.16-kernel.patch \
file://Update-vmscan-instrumentation-to-3.16-kernel.patch \
+ file://Fix-noargs-probes-should-calculate-alignment-and-eve.patch \
+ file://Update-statedump-to-3.17-nsproxy-locking.patch \
+ file://Update-kvm-instrumentation-compile-on-3.17-rc1.patch \
+ file://fix_build_with_v3.17_kernel.patch \
+ file://compaction-fix-mm_compaction_isolate_template-build.patch \
"
export INSTALL_MOD_DIR="kernel/lttng-modules"
diff --git a/meta/recipes-kernel/lttng/lttng-tools_2.5.0.bb b/meta/recipes-kernel/lttng/lttng-tools_2.5.0.bb
index 14e5f9ebf2..fd44aa5f6d 100644
--- a/meta/recipes-kernel/lttng/lttng-tools_2.5.0.bb
+++ b/meta/recipes-kernel/lttng/lttng-tools_2.5.0.bb
@@ -10,11 +10,19 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=01d7fc4496aacf37d90df90b90b0cac1 \
file://lgpl-2.1.txt;md5=0f0d71500e6a57fd24d825f33242b9ca"
DEPENDS = "liburcu popt lttng-ust libxml2"
-RDEPENDS_${PN}-ptest += "make"
+RDEPENDS_${PN}-ptest += "make perl bash"
SRCREV = "8b27cacb277c2cdab791139b08da8eb87ab14a88"
PV = "v2.5.0"
+PYTHON_OPTION = "am_cv_python_pyexecdir='${libdir}/python${PYTHON_BASEVERSION}/site-packages' \
+ am_cv_python_pythondir='${libdir}/python${PYTHON_BASEVERSION}/site-packages' \
+ PYTHON_INCLUDE='-I${STAGING_INCDIR}/python${PYTHON_BASEVERSION}' \
+"
+PACKAGECONFIG ??= "lttng-ust"
+PACKAGECONFIG[python] = "--enable-python-bindings ${PYTHON_OPTION},,python swig-native"
+PACKAGECONFIG[lttng-ust] = ", --disable-lttng-ust, lttng-ust"
+
SRC_URI = "git://git.lttng.org/lttng-tools.git;branch=stable-2.5 \
file://runtest-2.4.0.patch \
file://run-ptest \
@@ -26,12 +34,17 @@ inherit autotools-brokensep ptest pkgconfig
export KERNELDIR="${STAGING_KERNEL_DIR}"
-FILES_${PN} += "${libdir}/lttng/libexec/* ${datadir}/xml/lttng"
-FILES_${PN}-dbg += "${libdir}/lttng/libexec/.debug"
+FILES_${PN} += "${libdir}/lttng/libexec/* ${datadir}/xml/lttng \
+ ${libdir}/python${PYTHON_BASEVERSION}/site-packages/*"
+FILES_${PN}-dbg += "${libdir}/lttng/libexec/.debug \
+ ${libdir}/python2.7/site-packages/.debug"
+FILES_${PN}-staticdev += "${libdir}/python${PYTHON_BASEVERSION}/site-packages/*.a"
+FILES_${PN}-dev += "${libdir}/python${PYTHON_BASEVERSION}/site-packages/*.la"
# Since files are installed into ${libdir}/lttng/libexec we match
# the libexec insane test so skip it.
-INSANE_SKIP_${PN} = "libexec"
+# Python module needs to keep _lttng.so
+INSANE_SKIP_${PN} = "libexec dev-so"
INSANE_SKIP_${PN}-dbg = "libexec"
do_configure_prepend () {
@@ -40,8 +53,8 @@ do_configure_prepend () {
}
do_install_ptest () {
- chmod +x ${D}/${libdir}/${PN}/ptest/tests/utils/utils.sh
- for i in `find ${D}/${libdir}/${PN}/ptest -perm /u+x -type f`; do
+ chmod +x ${D}${PTEST_PATH}/tests/utils/utils.sh
+ for i in `find ${D}${PTEST_PATH} -perm /u+x -type f`; do
sed -e "s:\$TESTDIR.*/src/bin/lttng/\$LTTNG_BIN:\$LTTNG_BIN:g" \
-e "s:\$TESTDIR/../src/bin/lttng-sessiond/\$SESSIOND_BIN:\$SESSIOND_BIN:g" \
-e "s:\$DIR/../src/bin/lttng-sessiond/\$SESSIOND_BIN:\$SESSIOND_BIN:g" \
@@ -56,8 +69,9 @@ do_install_ptest () {
sed -e "s:src/bin/lttng-sessiond:$bindir:g" \
-e "s:src/bin/lttng-consumerd:${libexecdir}/libexec/:g" \
- -i ${D}/${libdir}/${PN}/ptest/tests/regression/run-report.py
- sed -e "s:src/bin:bin:g" \
- -i ${D}/${libdir}/${PN}/ptest/tests/utils/utils.sh
-
+ -i ${D}${PTEST_PATH}/tests/regression/run-report.py
+ sed -e "s:src/bin:bin:g" -e "s:lt-::g" \
+ -i ${D}${PTEST_PATH}/tests/utils/utils.sh
+ sed -e "s:ini_config:\.libs\/ini_config:" \
+ -i ${D}${PTEST_PATH}/tests/unit/ini_config/test_ini_config
}
diff --git a/meta/recipes-kernel/lttng/lttng-ust/add-aarch64.patch b/meta/recipes-kernel/lttng/lttng-ust/add-aarch64.patch
new file mode 100644
index 0000000000..cec5410369
--- /dev/null
+++ b/meta/recipes-kernel/lttng/lttng-ust/add-aarch64.patch
@@ -0,0 +1,19 @@
+lttng-ust: add aarch64 recognition
+
+Treat the same as "arm".
+
+Upstream-Status: Pending
+
+Signed-off-by: joe.slater@windriver.com
+
+
+--- a/configure.ac
++++ b/configure.ac
+@@ -230,6 +230,7 @@ changequote([,])dnl
+ s390) NO_UNALIGNED_ACCESS=1 ;;
+ s390x) NO_UNALIGNED_ACCESS=1 ;;
+ arm*) NO_UNALIGNED_ACCESS=1 ;;
++ aarch64) NO_UNALIGNED_ACCESS=1 ;;
+ mips*) NO_UNALIGNED_ACCESS=1 ;;
+ tile*) NO_UNALIGNED_ACCESS=1 ;;
+ *) AC_MSG_ERROR([unable to detect alignment requirements (unsupported architecture ($host_cpu)?)]) ;;
diff --git a/meta/recipes-kernel/lttng/lttng-ust_2.5.0.bb b/meta/recipes-kernel/lttng/lttng-ust_2.5.0.bb
index 1468fc1877..e07d3c726b 100644
--- a/meta/recipes-kernel/lttng/lttng-ust_2.5.0.bb
+++ b/meta/recipes-kernel/lttng/lttng-ust_2.5.0.bb
@@ -11,6 +11,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=c963eb366b781252b0bf0fdf1624d9e9 \
inherit autotools lib_package
DEPENDS = "liburcu util-linux"
+RDEPENDS_${PN}-bin = "python-core"
# For backwards compatibility after rename
RPROVIDES_${PN} = "lttng2-ust"
@@ -23,6 +24,7 @@ PE = "2"
SRC_URI = "git://git.lttng.org/lttng-ust.git;branch=stable-2.5 \
file://lttng-ust-doc-examples-disable.patch \
+ file://add-aarch64.patch \
"
S = "${WORKDIR}/git"
diff --git a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
index 125f5fb073..7031ba142c 100644
--- a/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
+++ b/meta/recipes-kernel/modutils-initscripts/modutils-initscripts.bb
@@ -18,3 +18,13 @@ do_install () {
install -d ${D}${sysconfdir}/init.d/
install -m 0755 ${WORKDIR}/modutils.sh ${D}${sysconfdir}/init.d/
}
+
+DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}"
+pkg_postinst_${PN} () {
+ if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
+ if [ -n "$D" ]; then
+ OPTS="--root=$D"
+ fi
+ systemctl $OPTS mask modutils.service
+ fi
+}
diff --git a/meta/recipes-kernel/oprofile/oprofile.inc b/meta/recipes-kernel/oprofile/oprofile.inc
index 69582039e8..509640c806 100644
--- a/meta/recipes-kernel/oprofile/oprofile.inc
+++ b/meta/recipes-kernel/oprofile/oprofile.inc
@@ -18,7 +18,7 @@ FILES_${PN} = "${bindir} ${libdir}/${BPN}/lib*${SOLIBS} ${datadir}/${BPN}"
FILES_${PN}-dev += "${libdir}/${BPN}/lib*${SOLIBSDEV} ${libdir}/${BPN}/lib*.la"
FILES_${PN}-staticdev += "${libdir}/${BPN}/lib*.a"
-SRC_URI = "file://opstart.patch \
+SRC_URI = "file://filemode-fix.patch \
file://acinclude.m4 \
file://automake-foreign.patch \
file://oprofile-cross-compile-tests.patch \
diff --git a/meta/recipes-kernel/oprofile/oprofile/0001-Add-freescale-e500mc-support.patch b/meta/recipes-kernel/oprofile/oprofile/0001-Add-freescale-e500mc-support.patch
deleted file mode 100644
index 077da4bf2b..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile/0001-Add-freescale-e500mc-support.patch
+++ /dev/null
@@ -1,219 +0,0 @@
-From ca3f796b3a7742215ed35b56fc072595174c410e Mon Sep 17 00:00:00 2001
-From: Ting Liu <b28495@freescale.com>
-Date: Thu, 5 Sep 2013 07:43:55 -0500
-Subject: [PATCH 1/2] Add freescale e500mc support
-
-Upstream-Status: Backport
-
-Signed-off-by: George Stephen <Stephen.George@freescale.com>
-Signed-off-by: Zhenhua Luo <zhenhua.luo@freescale.com>
-Signed-off-by: Ting Liu <b28495@freescale.com>
----
- events/Makefile.am | 1 +
- events/ppc/e500mc/events | 120 ++++++++++++++++++++++++++++++++++++++++++
- events/ppc/e500mc/unit_masks | 4 ++
- libop/op_cpu_type.c | 1 +
- libop/op_cpu_type.h | 1 +
- libop/op_events.c | 1 +
- utils/ophelp.c | 1 +
- 7 files changed, 129 insertions(+), 0 deletions(-)
- create mode 100644 events/ppc/e500mc/events
- create mode 100644 events/ppc/e500mc/unit_masks
-
-diff --git a/events/Makefile.am b/events/Makefile.am
-index be87781..e496f98 100644
---- a/events/Makefile.am
-+++ b/events/Makefile.am
-@@ -76,6 +76,7 @@ event_files = \
- ppc/7450/events ppc/7450/unit_masks \
- ppc/e500/events ppc/e500/unit_masks \
- ppc/e500v2/events ppc/e500v2/unit_masks \
-+ ppc/e500mc/events ppc/e500mc/unit_masks \
- ppc/e300/events ppc/e300/unit_masks \
- tile/tile64/events tile/tile64/unit_masks \
- tile/tilepro/events tile/tilepro/unit_masks \
-diff --git a/events/ppc/e500mc/events b/events/ppc/e500mc/events
-new file mode 100644
-index 0000000..8197a7d
---- /dev/null
-+++ b/events/ppc/e500mc/events
-@@ -0,0 +1,120 @@
-+# e500mc Events
-+#
-+# Copyright (C) 2010 Freescale Semiconductor, Inc.
-+#
-+event:0x1 counters:0,1,2,3 um:zero minimum:100 name:CPU_CLK : Cycles
-+event:0x2 counters:0,1,2,3 um:zero minimum:500 name:COMPLETED_INSNS : Completed Instructions (0, 1, or 2 per cycle)
-+event:0x3 counters:0,1,2,3 um:zero minimum:500 name:COMPLETED_OPS : Completed Micro-ops (counts 2 for load/store w/update)
-+event:0x4 counters:0,1,2,3 um:zero minimum:500 name:INSTRUCTION_FETCHES : Instruction fetches
-+event:0x5 counters:0,1,2,3 um:zero minimum:500 name:DECODED_OPS : Micro-ops decoded
-+event:0x8 counters:0,1,2,3 um:zero minimum:500 name:COMPLETED_BRANCHES : Branch Instructions completed
-+event:0x9 counters:0,1,2,3 um:zero minimum:500 name:COMPLETED_LOAD_OPS : Load micro-ops completed
-+event:0xa counters:0,1,2,3 um:zero minimum:500 name:COMPLETED_STORE_OPS : Store micro-ops completed
-+event:0xb counters:0,1,2,3 um:zero minimum:500 name:COMPLETION_REDIRECTS : Number of completion buffer redirects
-+event:0xc counters:0,1,2,3 um:zero minimum:500 name:BRANCHES_FINISHED : Branches finished
-+event:0xd counters:0,1,2,3 um:zero minimum:500 name:TAKEN_BRANCHES_FINISHED : Taken branches finished
-+event:0xe counters:0,1,2,3 um:zero minimum:500 name:BIFFED_BRANCHES_FINISHED : Biffed branches finished
-+event:0xf counters:0,1,2,3 um:zero minimum:500 name:BRANCHES_MISPREDICTED : Branch instructions mispredicted due to direction, target, or IAB prediction
-+event:0x10 counters:0,1,2,3 um:zero minimum:500 name:BRANCHES_MISPREDICTED_DIRECTION : Branches mispredicted due to direction prediction
-+event:0x11 counters:0,1,2,3 um:zero minimum:500 name:BTB_HITS : Branches that hit in the BTB, or missed but are not taken
-+event:0x12 counters:0,1,2,3 um:zero minimum:500 name:DECODE_STALLED : Cycles the instruction buffer was not empty, but 0 instructions decoded
-+event:0x13 counters:0,1,2,3 um:zero minimum:500 name:ISSUE_STALLED : Cycles the issue buffer is not empty but 0 instructions issued
-+event:0x14 counters:0,1,2,3 um:zero minimum:500 name:BRANCH_ISSUE_STALLED : Cycles the branch buffer is not empty but 0 instructions issued
-+event:0x15 counters:0,1,2,3 um:zero minimum:500 name:SRS0_SCHEDULE_STALLED : Cycles SRS0 is not empty but 0 instructions scheduled
-+event:0x16 counters:0,1,2,3 um:zero minimum:500 name:SRS1_SCHEDULE_STALLED : Cycles SRS1 is not empty but 0 instructions scheduled
-+event:0x17 counters:0,1,2,3 um:zero minimum:500 name:VRS_SCHEDULE_STALLED : Cycles VRS is not empty but 0 instructions scheduled
-+event:0x18 counters:0,1,2,3 um:zero minimum:500 name:LRS_SCHEDULE_STALLED : Cycles LRS is not empty but 0 instructions scheduled
-+event:0x19 counters:0,1,2,3 um:zero minimum:500 name:BRS_SCHEDULE_STALLED : Cycles BRS is not empty but 0 instructions scheduled Load/Store, Data Cache, and dLFB Events
-+event:0x1a counters:0,1,2,3 um:zero minimum:500 name:TOTAL_TRANSLATED : Total Ldst microops translated.
-+event:0x1b counters:0,1,2,3 um:zero minimum:500 name:LOADS_TRANSLATED : Number of cacheable L* or EVL* microops translated. (This includes microops from load-multiple, load-update, and load-context instructions.)
-+event:0x1c counters:0,1,2,3 um:zero minimum:500 name:STORES_TRANSLATED : Number of cacheable ST* or EVST* microops translated. (This includes microops from store-multiple, store-update, and save-context instructions.)
-+event:0x1d counters:0,1,2,3 um:zero minimum:500 name:TOUCHES_TRANSLATED : Number of cacheable DCBT and DCBTST instructions translated (L1 only) (Does not count touches that are converted to nops i.e. exceptions, noncacheable, hid0[nopti] bit is set.)
-+event:0x1e counters:0,1,2,3 um:zero minimum:500 name:CACHEOPS_TRANSLATED : Number of dcba, dcbf, dcbst, and dcbz instructions translated (e500 traps on dcbi)
-+event:0x1f counters:0,1,2,3 um:zero minimum:500 name:CACHEINHIBITED_ACCESSES_TRANSLATED : Number of cache inhibited accesses translated
-+event:0x20 counters:0,1,2,3 um:zero minimum:500 name:GUARDED_LOADS_TRANSLATED : Number of guarded loads translated
-+event:0x21 counters:0,1,2,3 um:zero minimum:500 name:WRITETHROUGH_STORES_TRANSLATED : Number of write-through stores translated
-+event:0x22 counters:0,1,2,3 um:zero minimum:500 name:MISALIGNED_ACCESSES_TRANSLATED : Number of misaligned load or store accesses translated.
-+event:0x23 counters:0,1,2,3 um:zero minimum:500 name:TOTAL_ALLOCATED_DLFB : Total allocated to dLFB
-+event:0x24 counters:0,1,2,3 um:zero minimum:500 name:LOADS_TRANSLATED_ALLOCATED_DLFB : Loads translated and allocated to dLFB (Applies to same class of instructions as loads translated.)
-+event:0x25 counters:0,1,2,3 um:zero minimum:500 name:STORES_COMPLETED_ALLOCATED_DLFB : Stores completed and allocated to dLFB (Applies to same class of instructions as stores translated.)
-+event:0x26 counters:0,1,2,3 um:zero minimum:500 name:TOUCHES_TRANSLATED_ALLOCATED_DLFB : Touches translated and allocated to dLFB (Applies to same class of instructions as touches translated.)
-+event:0x27 counters:0,1,2,3 um:zero minimum:500 name:STORES_COMPLETED : Number of cacheable ST* or EVST* microops completed. (Applies to the same class of instructions as stores translated.)
-+event:0x28 counters:0,1,2,3 um:zero minimum:500 name:DL1_LOCKS : Number of cache lines locked in the dL1. (Counts a lock even if an overlock condition is encountered.)
-+event:0x29 counters:0,1,2,3 um:zero minimum:500 name:DL1_RELOADS : This is historically used to determine dcache miss rate (along with loads/stores completed). This counts dL1 reloads for any reason.
-+event:0x2a counters:0,1,2,3 um:zero minimum:500 name:DL1_CASTOUTS : dL1 castouts. Does not count castouts due to DCBF.
-+event:0x2b counters:0,1,2,3 um:zero minimum:500 name:DETECTED_REPLAYS : Times detected replay condition - Load miss with dLFB full.
-+event:0x2c counters:0,1,2,3 um:zero minimum:500 name:LOAD_MISS_QUEUE_FULL_REPLAYS : Load miss with load queue full.
-+event:0x2d counters:0,1,2,3 um:zero minimum:500 name:LOAD_GUARDED_MISS_NOT_LAST_REPLAYS : Load guarded miss when the load is not yet at the bottom of the completion buffer.
-+event:0x2e counters:0,1,2,3 um:zero minimum:500 name:STORE_TRANSLATED_QUEUE_FULL_REPLAYS : Translate a store when the StQ is full.
-+event:0x2f counters:0,1,2,3 um:zero minimum:500 name:ADDRESS_COLLISION_REPLAYS : Address collision.
-+event:0x30 counters:0,1,2,3 um:zero minimum:500 name:DMMU_MISS_REPLAYS : DMMU_MISS_REPLAYS : DMMU miss.
-+event:0x31 counters:0,1,2,3 um:zero minimum:500 name:DMMU_BUSY_REPLAYS : DMMU_BUSY_REPLAYS : DMMU busy.
-+event:0x32 counters:0,1,2,3 um:zero minimum:500 name:SECOND_PART_MISALIGNED_AFTER_MISS_REPLAYS : Second part of misaligned access when first part missed in cache.
-+event:0x33 counters:0,1,2,3 um:zero minimum:500 name:LOAD_MISS_DLFB_FULL_CYCLES : Cycles stalled on replay condition - Load miss with dLFB full.
-+event:0x34 counters:0,1,2,3 um:zero minimum:500 name:LOAD_MISS_QUEUE_FULL_CYCLES : Cycles stalled on replay condition - Load miss with load queue full.
-+event:0x35 counters:0,1,2,3 um:zero minimum:500 name:LOAD_GUARDED_MISS_NOT_LAST_CYCLES : Cycles stalled on replay condition - Load guarded miss when the load is not yet at the bottom of the completion buffer.
-+event:0x36 counters:0,1,2,3 um:zero minimum:500 name:STORE_TRANSLATED_QUEUE_FULL_CYCLES : Cycles stalled on replay condition - Translate a store when the StQ is full.
-+event:0x37 counters:0,1,2,3 um:zero minimum:500 name:ADDRESS_COLLISION_CYCLES : Cycles stalled on replay condition - Address collision.
-+event:0x38 counters:0,1,2,3 um:zero minimum:500 name:DMMU_MISS_CYCLES : Cycles stalled on replay condition - DMMU miss.
-+event:0x39 counters:0,1,2,3 um:zero minimum:500 name:DMMU_BUSY_CYCLES : Cycles stalled on replay condition - DMMU busy.
-+event:0x3a counters:0,1,2,3 um:zero minimum:500 name:SECOND_PART_MISALIGNED_AFTER_MISS_CYCLES : Cycles stalled on replay condition - Second part of misaligned access when first part missed in cache.
-+event:0x3b counters:0,1,2,3 um:zero minimum:500 name:IL1_LOCKS : Number of cache lines locked in the iL1. (Counts a lock even if an overlock condition is encountered.)
-+event:0x3c counters:0,1,2,3 um:zero minimum:500 name:IL1_FETCH_RELOADS : This is historically used to determine icache miss rate (along with instructions completed) Reloads due to demand fetch.
-+event:0x3d counters:0,1,2,3 um:zero minimum:500 name:FETCHES : Counts the number of fetches that write at least one instruction to the instruction buffer. (With instruction fetched, can used to compute instructions-per-fetch)
-+event:0x3e counters:0,1,2,3 um:zero minimum:500 name:IMMU_TLB4K_RELOADS : iMMU TLB4K reloads
-+event:0x3f counters:0,1,2,3 um:zero minimum:500 name:IMMU_VSP_RELOADS : iMMU VSP reloads
-+event:0x40 counters:0,1,2,3 um:zero minimum:500 name:DMMU_TLB4K_RELOADS : dMMU TLB4K reloads
-+event:0x41 counters:0,1,2,3 um:zero minimum:500 name:DMMU_VSP_RELOADS : dMMU VSP reloads
-+event:0x42 counters:0,1,2,3 um:zero minimum:500 name:L2MMU_MISSES : Counts iTLB/dTLB error interrupt
-+event:0x43 counters:0,1,2,3 um:zero minimum:500 name:BIU_MASTER_REQUESTS : Number of master transactions. (Number of master TSs.)
-+event:0x44 counters:0,1,2,3 um:zero minimum:500 name:BIU_MASTER_I_REQUESTS : Number of master I-Side transactions. (Number of master I-Side TSs.)
-+event:0x45 counters:0,1,2,3 um:zero minimum:500 name:BIU_MASTER_D_REQUESTS : Number of master D-Side transactions. (Number of master D-Side TSs.)
-+event:0x46 counters:0,1,2,3 um:zero minimum:500 name:BIU_MASTER_D_CASTOUT_REQUESTS : Number of master D-Side non-program-demand castout transactions. This counts replacement pushes and snoop pushes. This does not count DCBF castouts. (Number of master D-side non-program-demand castout TSs.)
-+event:0x48 counters:0,1,2,3 um:zero minimum:500 name:SNOOP_REQUESTS : Number of externally generated snoop requests. (Counts snoop TSs.)
-+event:0x49 counters:0,1,2,3 um:zero minimum:500 name:SNOOP_HITS : Number of snoop hits on all D-side resources regardless of the cache state (modified, exclusive, or shared)
-+event:0x4a counters:0,1,2,3 um:zero minimum:500 name:SNOOP_PUSHES : Number of snoop pushes from all D-side resources. (Counts snoop ARTRY/WOPs.)
-+event:0x52 counters:0,1,2,3 um:zero minimum:500 name:PMC0_OVERFLOW : Counts the number of times PMC0[32] transitioned from 1 to 0.
-+event:0x53 counters:0,1,2,3 um:zero minimum:500 name:PMC1_OVERFLOW : Counts the number of times PMC1[32] transitioned from 1 to 0.
-+event:0x54 counters:0,1,2,3 um:zero minimum:500 name:PMC2_OVERFLOW : Counts the number of times PMC2[32] transitioned from 1 to 0.
-+event:0x55 counters:0,1,2,3 um:zero minimum:500 name:PMC3_OVERFLOW : Counts the number of times PMC3[32] transitioned from 1 to 0.
-+event:0x56 counters:0,1,2,3 um:zero minimum:500 name:INTERRUPTS : Number of interrupts taken
-+event:0x57 counters:0,1,2,3 um:zero minimum:500 name:EXTERNAL_INTERRUPTS : Number of external input interrupts taken
-+event:0x58 counters:0,1,2,3 um:zero minimum:500 name:CRITICAL_INTERRUPTS : Number of critical input interrupts taken
-+event:0x59 counters:0,1,2,3 um:zero minimum:500 name:SC_TRAP_INTERRUPTS : Number of system call and trap interrupts
-+event:0x5b counters:0,1,2,3 um:zero minimum:500 name:L2_LINEFILL_REQ : Number L2 Linefill requests
-+event:0x5c counters:0,1,2,3 um:zero minimum:500 name:L2_VICTIM_SELECT : Number L2 Victim selects
-+event:0x6e counters:0,1,2,3 um:zero minimum:500 name:L2_ACCESS : Number L2 cache accesses
-+event:0x6f counters:0,1,2,3 um:zero minimum:500 name:L2_HIT_ACCESS : Number L2 hit cache accesses
-+event:0x70 counters:0,1,2,3 um:zero minimum:500 name:L2_DATA_ACCESS : Number L2 data cache accesses
-+event:0x71 counters:0,1,2,3 um:zero minimum:500 name:L2_HIT_DATA_ACCESS : Number L2 hit data cache accesses
-+event:0x72 counters:0,1,2,3 um:zero minimum:500 name:L2_INST_ACCESS : Number L2 instruction cache accesses
-+event:0x73 counters:0,1,2,3 um:zero minimum:500 name:L2_HIT_INST_ACCESS : Number L2 hit instruction cache accesses
-+event:0x74 counters:0,1,2,3 um:zero minimum:500 name:L2_ALLOC : Number L2 cache allocations
-+event:0x75 counters:0,1,2,3 um:zero minimum:500 name:L2_DATA_ALLOC : Number L2 data cache allocations
-+event:0x76 counters:0,1,2,3 um:zero minimum:500 name:L2_DIRTY_DATA_ALLOC : Number L2 dirty data cache allocations
-+event:0x77 counters:0,1,2,3 um:zero minimum:500 name:L2_INST_ALLOC : Number L2 instruction cache allocations
-+event:0x78 counters:0,1,2,3 um:zero minimum:500 name:L2_UPDATE : Number L2 cache updates
-+event:0x79 counters:0,1,2,3 um:zero minimum:500 name:L2_CLEAN_UPDATE : Number L2 cache clean updates
-+event:0x7a counters:0,1,2,3 um:zero minimum:500 name:L2_DIRTY_UPDATE : Number L2 cache dirty updates
-+event:0x7b counters:0,1,2,3 um:zero minimum:500 name:L2_CLEAN_REDU_UPDATE : Number L2 cache clean redundant updates
-+event:0x7c counters:0,1,2,3 um:zero minimum:500 name:L2_DIRTY_REDU_UPDATE : Number L2 cache dirty redundant updates
-+event:0x7d counters:0,1,2,3 um:zero minimum:500 name:L2_LOCKS : Number L2 cache locks
-+event:0x7e counters:0,1,2,3 um:zero minimum:500 name:L2_CASTOUT : Number L2 cache castouts
-+event:0x7f counters:0,1,2,3 um:zero minimum:500 name:L2_HIT_DATA_DIRTY : Number L2 cache data dirty hits
-+event:0x82 counters:0,1,2,3 um:zero minimum:500 name:L2_INV_CLEAN : Number L2 cache invalidation of clean lines
-+event:0x83 counters:0,1,2,3 um:zero minimum:500 name:L2_INV_INCOHER : Number L2 cache invalidation of incoherent lines
-+event:0x84 counters:0,1,2,3 um:zero minimum:500 name:L2_INV_COHER : Number L2 cache invalidation of coherent lines
-+event:0x94 counters:0,1,2,3 um:zero minimum:500 name:DVT0 : Detection of write to DEVENT with DVT0 set
-+event:0x95 counters:0,1,2,3 um:zero minimum:500 name:DVT1 : Detection of write to DEVENT with DVT1 set
-+event:0x96 counters:0,1,2,3 um:zero minimum:500 name:DVT2 : Detection of write to DEVENT with DVT2 set
-+event:0x97 counters:0,1,2,3 um:zero minimum:500 name:DVT3 : Detection of write to DEVENT with DVT3 set
-+event:0x98 counters:0,1,2,3 um:zero minimum:500 name:DVT4 : Detection of write to DEVENT with DVT4 set
-+event:0x99 counters:0,1,2,3 um:zero minimum:500 name:DVT5 : Detection of write to DEVENT with DVT5 set
-+event:0x9a counters:0,1,2,3 um:zero minimum:500 name:DVT6 : Detection of write to DEVENT with DVT6 set
-+event:0x9b counters:0,1,2,3 um:zero minimum:500 name:DVT7 : Detection of write to DEVENT with DVT7 set
-+event:0x9c counters:0,1,2,3 um:zero minimum:500 name:CYCLES_NEXUS_STALLED : Number of completion cycles stalled due to Nexus FIFO full
-+event:0xb0 counters:0,1,2,3 um:zero minimum:500 name:DECORATED_LOAD : Number of decorated loads.
-+event:0xb1 counters:0,1,2,3 um:zero minimum:500 name:DECORATED_STORE : Number of decorated stores
-+event:0xb2 counters:0,1,2,3 um:zero minimum:500 name:LOAD_RETRY : Number of load retries
-+event:0xb3 counters:0,1,2,3 um:zero minimum:500 name:STWCX_SUCCESS : Number of successful stwcx. instructions
-+event:0xb4 counters:0,1,2,3 um:zero minimum:500 name:STWCX_UNSUCCESS : Number of unsuccessful stwcx. instructions
-diff --git a/events/ppc/e500mc/unit_masks b/events/ppc/e500mc/unit_masks
-new file mode 100644
-index 0000000..395c653
---- /dev/null
-+++ b/events/ppc/e500mc/unit_masks
-@@ -0,0 +1,4 @@
-+# e500 possible unit masks
-+#
-+name:zero type:mandatory default:0x0
-+ 0x0 No unit mask
-diff --git a/libop/op_cpu_type.c b/libop/op_cpu_type.c
-index 89d5a92..7d50a2d 100644
---- a/libop/op_cpu_type.c
-+++ b/libop/op_cpu_type.c
-@@ -125,6 +125,7 @@ static struct cpu_descr const cpu_descrs[MAX_CPU_TYPE] = {
- { "AMD64 generic", "x86-64/generic", CPU_AMD64_GENERIC, 4 },
- { "IBM Power Architected Events V1", "ppc64/architected_events_v1", CPU_PPC64_ARCH_V1, 6 },
- { "ppc64 POWER8", "ppc64/power8", CPU_PPC64_POWER8, 6 },
-+ { "e500mc", "ppc/e500mc", CPU_PPC_E500MC, 4 },
- };
-
- static size_t const nr_cpu_descrs = sizeof(cpu_descrs) / sizeof(struct cpu_descr);
-diff --git a/libop/op_cpu_type.h b/libop/op_cpu_type.h
-index aeb6bb2..10f000b 100644
---- a/libop/op_cpu_type.h
-+++ b/libop/op_cpu_type.h
-@@ -105,6 +105,7 @@ typedef enum {
- CPU_AMD64_GENERIC, /**< AMD64 Generic */
- CPU_PPC64_ARCH_V1, /** < IBM Power architected events version 1 */
- CPU_PPC64_POWER8, /**< ppc64 POWER8 family */
-+ CPU_PPC_E500MC, /**< e500mc */
- MAX_CPU_TYPE
- } op_cpu;
-
-diff --git a/libop/op_events.c b/libop/op_events.c
-index bb86833..638dc5c 100644
---- a/libop/op_events.c
-+++ b/libop/op_events.c
-@@ -1308,6 +1308,7 @@ void op_default_event(op_cpu cpu_type, struct op_default_event_descr * descr)
-
- case CPU_PPC_E500:
- case CPU_PPC_E500_2:
-+ case CPU_PPC_E500MC:
- case CPU_PPC_E300:
- descr->name = "CPU_CLK";
- break;
-diff --git a/utils/ophelp.c b/utils/ophelp.c
-index 1b913ca..0647360 100644
---- a/utils/ophelp.c
-+++ b/utils/ophelp.c
-@@ -753,6 +753,7 @@ int main(int argc, char const * argv[])
-
- case CPU_PPC_E500:
- case CPU_PPC_E500_2:
-+ case CPU_PPC_E500MC:
- event_doc =
- "See PowerPC e500 Core Complex Reference Manual\n"
- "Chapter 7: Performance Monitor\n"
---
diff --git a/meta/recipes-kernel/oprofile/oprofile/0001-Add-rmb-definition-for-AArch64-architecture.patch b/meta/recipes-kernel/oprofile/oprofile/0001-Add-rmb-definition-for-AArch64-architecture.patch
deleted file mode 100644
index a2385cd2b2..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile/0001-Add-rmb-definition-for-AArch64-architecture.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From 27edaef9c6d66dfc324630ef40cb27e78031eeeb Mon Sep 17 00:00:00 2001
-From: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org>
-Date: Tue, 15 Jan 2013 07:37:33 +0100
-Subject: [PATCH] Add rmb() definition for AArch64 architecture
-
-Signed-off-by: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org>
-
-Upstream-Status: backport
----
- libperf_events/operf_utils.h | 5 +++++
- 1 file changed, 5 insertions(+)
-
-diff --git a/libperf_events/operf_utils.h b/libperf_events/operf_utils.h
-index 815d51d..2df00b7 100644
---- a/libperf_events/operf_utils.h
-+++ b/libperf_events/operf_utils.h
-@@ -148,6 +148,11 @@ void op_release_resources(void);
- #define cpu_relax() asm volatile("":::"memory")
- #endif
-
-+#ifdef __aarch64__
-+#define rmb() asm volatile("dmb ld" ::: "memory")
-+#define cpu_relax() asm volatile("yield" ::: "memory")
-+#endif
-+
- #ifdef __mips__
- #include <asm/unistd.h>
- #define rmb() asm volatile( \
---
-1.8.0
-
diff --git a/meta/recipes-kernel/oprofile/oprofile/0001-Tidy-powerpc64-bfd-target-check.patch b/meta/recipes-kernel/oprofile/oprofile/0001-Tidy-powerpc64-bfd-target-check.patch
deleted file mode 100644
index 93c62400cf..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile/0001-Tidy-powerpc64-bfd-target-check.patch
+++ /dev/null
@@ -1,123 +0,0 @@
-Upstream-Status: Backport
-
-From 63b5692aace5ff6022f892822b4bfdc51ed25bfb Mon Sep 17 00:00:00 2001
-From: Alan Modra <amodra@gmail.com>
-Date: Fri, 2 May 2014 07:54:08 -0500
-Subject: [PATCH] Tidy powerpc64 bfd target check
-
-Testing for a bfd_target vector might (will!) break. See
-https://sourceware.org/ml/binutils/2014-04/msg00283.html
-
-It's safer to ask BFD for the target name. I left the direct target
-vector checks in configure tests, and updated them, even though the
-target vector is no longer used in oprofile code, because a run-time
-configure test for powerpc64 support in bfd:
- #include <bfd.h>
- int main(void)
- { return !bfd_find_target("elf64-powerpc", (void *)0); }
-unfortunately isn't possible when cross-compiling.
-
-The bfd_target vector tests could be omitted if we aren't bothered by
-the small runtime overhead of a strncmp on targets other than
-powerpc64.
-
- * libutil++/bfd_support.cpp (get_synth_symbols): Don't check for
- ppc64 target vector, use bfd_get_target to return the target
- name instead.
- * m4/binutils.m4: Modernize bfd_get_synthetic_symtab checks to
- use AC_LINK_IFELSE. Check for either powerpc_elf64_vec or
- bfd_elf64_powerpc_vec.
-
-Signed-off-by: Alan Modra <amodra@gmail.com>
----
- libutil++/bfd_support.cpp | 10 +++++++--
- m4/binutils.m4 | 50 ++++++++++++++++++++++-----------------------
- 2 files changed, 33 insertions(+), 27 deletions(-)
-
-Index: oprofile-0.9.9/libutil++/bfd_support.cpp
-===================================================================
---- oprofile-0.9.9.orig/libutil++/bfd_support.cpp 2013-07-29 08:55:06.000000000 -0700
-+++ oprofile-0.9.9/libutil++/bfd_support.cpp 2014-05-02 09:12:05.761146347 -0700
-@@ -633,10 +633,16 @@
-
- bool bfd_info::get_synth_symbols()
- {
-- extern const bfd_target bfd_elf64_powerpc_vec;
-- extern const bfd_target bfd_elf64_powerpcle_vec;
-- bool is_elf64_powerpc_target = (abfd->xvec == &bfd_elf64_powerpc_vec)
-- || (abfd->xvec == &bfd_elf64_powerpcle_vec);
-+ const char* targname = bfd_get_target(abfd);
-+ // Match elf64-powerpc and elf64-powerpc-freebsd, but not
-+ // elf64-powerpcle. elf64-powerpcle is a different ABI without
-+ // function descriptors, so we don't need the synthetic
-+ // symbols to have function code marked by a symbol.
-+ bool is_elf64_powerpc_target = (!strncmp(targname, "elf64-powerpc", 13)
-+ && (targname[13] == 0
-+ || targname[13] == '-'));
-+
-+
-
- if (!is_elf64_powerpc_target)
- return false;
-Index: oprofile-0.9.9/m4/binutils.m4
-===================================================================
---- oprofile-0.9.9.orig/m4/binutils.m4 2013-07-29 08:55:07.000000000 -0700
-+++ oprofile-0.9.9/m4/binutils.m4 2014-05-02 09:07:32.471148147 -0700
-@@ -22,32 +22,32 @@
-
- AC_LANG_PUSH(C)
- # Determine if bfd_get_synthetic_symtab macro is available
--OS="`uname`"
--if test "$OS" = "Linux"; then
-- AC_MSG_CHECKING([whether bfd_get_synthetic_symtab() exists in BFD library])
-- rm -f test-for-synth
-- AC_LANG_CONFTEST(
-- [AC_LANG_PROGRAM([[#include <bfd.h>]],
-- [[asymbol * synthsyms; bfd * ibfd = 0;
-- long synth_count = bfd_get_synthetic_symtab(ibfd, 0, 0, 0, 0, &synthsyms);
-- extern const bfd_target bfd_elf64_powerpc_vec;
-- extern const bfd_target bfd_elf64_powerpcle_vec;
-- char * ppc_name = bfd_elf64_powerpc_vec.name;
-- char * ppcle_name = bfd_elf64_powerpcle_vec.name;
-- printf("%s %s\n", ppc_name, ppcle_name);]])
-- ])
-- $CC conftest.$ac_ext $CFLAGS $LDFLAGS $LIBS -o test-for-synth > /dev/null 2>&1
-- if test -f test-for-synth; then
-- echo "yes"
-- SYNTHESIZE_SYMBOLS='1'
-- else
-- echo "no"
-- SYNTHESIZE_SYMBOLS='0'
-- fi
-- AC_DEFINE_UNQUOTED(SYNTHESIZE_SYMBOLS, $SYNTHESIZE_SYMBOLS, [Synthesize special symbols when needed])
-- rm -f test-for-synth*
-+AC_MSG_CHECKING([whether bfd_get_synthetic_symtab() exists in BFD library])
-+AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <bfd.h>
-+ ]],
-+ [[asymbol * synthsyms; bfd * ibfd = 0;
-+ long synth_count = bfd_get_synthetic_symtab(ibfd, 0, 0, 0, 0, &synthsyms);
-+ extern const bfd_target powerpc_elf64_vec;
-+ char *ppc_name = powerpc_elf64_vec.name;
-+ printf("%s\n", ppc_name);
-+ ]])],
-+ [AC_MSG_RESULT([yes])
-+ SYNTHESIZE_SYMBOLS=2],
-+ [AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <bfd.h>
-+ ]],
-+ [[asymbol * synthsyms; bfd * ibfd = 0;
-+ long synth_count = bfd_get_synthetic_symtab(ibfd, 0, 0, 0, 0, &synthsyms);
-+ extern const bfd_target bfd_elf64_powerpc_vec;
-+ char *ppc_name = bfd_elf64_powerpc_vec.name;
-+ printf("%s\n", ppc_name);
-+ ]])],
-+ [AC_MSG_RESULT([yes])
-+ SYNTHESIZE_SYMBOLS=1],
-+ [AC_MSG_RESULT([no])
-+ SYNTHESIZE_SYMBOLS=0])
-+ ])
-+AC_DEFINE_UNQUOTED(SYNTHESIZE_SYMBOLS, $SYNTHESIZE_SYMBOLS, [Synthesize special symbols when needed])
-
--fi
- AC_LANG_POP(C)
- ]
- )
diff --git a/meta/recipes-kernel/oprofile/oprofile/0002-Add-freescale-e6500-support.patch b/meta/recipes-kernel/oprofile/oprofile/0002-Add-freescale-e6500-support.patch
deleted file mode 100644
index 9b2ae042c6..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile/0002-Add-freescale-e6500-support.patch
+++ /dev/null
@@ -1,364 +0,0 @@
-From b91794fd855177946719b34ea5cd3822c7993caa Mon Sep 17 00:00:00 2001
-From: Ting Liu <b28495@freescale.com>
-Date: Thu, 5 Sep 2013 07:45:52 -0500
-Subject: [PATCH 2/2] Add freescale e6500 support
-
-Upstream-Status: Backport
-
-Signed-off-by: Zhenhua Luo <zhenhua.luo@freescale.com>
-Signed-off-by: Ting Liu <b28495@freescale.com>
----
- events/Makefile.am | 1 +
- events/ppc/e6500/events | 266 +++++++++++++++++++++++++++++++++++++++++++
- events/ppc/e6500/unit_masks | 4 +
- libop/op_cpu_type.c | 1 +
- libop/op_cpu_type.h | 1 +
- libop/op_events.c | 1 +
- utils/ophelp.c | 1 +
- 7 files changed, 275 insertions(+), 0 deletions(-)
- create mode 100644 events/ppc/e6500/events
- create mode 100644 events/ppc/e6500/unit_masks
-
-diff --git a/events/Makefile.am b/events/Makefile.am
-index e496f98..d91d44b 100644
---- a/events/Makefile.am
-+++ b/events/Makefile.am
-@@ -77,6 +77,7 @@ event_files = \
- ppc/e500/events ppc/e500/unit_masks \
- ppc/e500v2/events ppc/e500v2/unit_masks \
- ppc/e500mc/events ppc/e500mc/unit_masks \
-+ ppc/e6500/events ppc/e6500/unit_masks \
- ppc/e300/events ppc/e300/unit_masks \
- tile/tile64/events tile/tile64/unit_masks \
- tile/tilepro/events tile/tilepro/unit_masks \
-diff --git a/events/ppc/e6500/events b/events/ppc/e6500/events
-new file mode 100644
-index 0000000..f34f82d
---- /dev/null
-+++ b/events/ppc/e6500/events
-@@ -0,0 +1,266 @@
-+# e6500 Events
-+#
-+# Copyright (C) 2012 Freescale Semiconductor, Inc.
-+#
-+event:0x1 counters:0,1,2,3,4,5 um:zero minimum:100 name:CPU_CLK : Cycles
-+event:0x2 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_INSNS : Completed Instructions (0, 1, or 2 per cycle)
-+event:0x3 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_OPS : Completed Micro-ops
-+event:0x5 counters:0,1,2,3,4,5 um:zero minimum:500 name:DECODED_OPS : Micro-ops decoded
-+event:0x6 counters:0,1,2,3,4,5 um:zero minimum:500 name:TRANSITIONS_PM_EVENT : 0 to 1 transitions on the pm_event input
-+event:0x7 counters:0,1,2,3,4,5 um:zero minimum:500 name:CPU_CLK_PM_EVENT : Processor cycles that occur when the pm_event input is asserted
-+event:0x8 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_BRANCHES : Branch Instructions completed
-+event:0x9 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_LOAD_OPS : Load micro-ops completed
-+event:0xa counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_STORE_OPS : Store micro-ops completed
-+event:0xb counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETION_REDIRECTS : Number of completion buffer redirects
-+event:0xc counters:0,1,2,3,4,5 um:zero minimum:500 name:BRANCHES_FINISHED : Branches finished
-+event:0xd counters:0,1,2,3,4,5 um:zero minimum:500 name:TAKEN_BRANCHES_FINISHED : Taken branches finished
-+event:0xe counters:0,1,2,3,4,5 um:zero minimum:500 name:TAKEN_BRANCHES_FINISHED_NOT_BTB : Finished unconditional branches that miss the BTB
-+event:0xf counters:0,1,2,3,4,5 um:zero minimum:500 name:BRANCHES_MISPREDICTED : Branch instructions mispredicted due to direction, target, or IAB prediction
-+event:0x10 counters:0,1,2,3,4,5 um:zero minimum:500 name:BRANCHES_MISPREDICTED_DIRECTION : Branches mispredicted due to direction prediction
-+event:0x11 counters:0,1,2,3,4,5 um:zero minimum:500 name:BTB_HITS : Branches that hit in the BTB, or missed but are not taken
-+event:0x12 counters:0,1,2,3,4,5 um:zero minimum:500 name:DECODE_STALLED : Cycles the instruction buffer was not empty, but 0 instructions decoded
-+event:0x13 counters:0,1,2,3,4,5 um:zero minimum:500 name:ISSUE_STALLED : Cycles the SFX/CFX issue queue is not empty but 0 instructions issued
-+event:0x14 counters:0,1,2,3,4,5 um:zero minimum:500 name:BRANCH_ISSUE_STALLED : Cycles the branch buffer is not empty but 0 instructions issued
-+event:0x15 counters:0,1,2,3,4,5 um:zero minimum:500 name:SFX0_SCHEDULE_STALLED : Cycles SFX0 is not empty but 0 instructions scheduled
-+event:0x16 counters:0,1,2,3,4,5 um:zero minimum:500 name:SFX1_SCHEDULE_STALLED : Cycles SFX1 is not empty but 0 instructions scheduled
-+event:0x17 counters:0,1,2,3,4,5 um:zero minimum:500 name:CFX_SCHEDULE_STALLED : Cycles CFX is not empty but 0 instructions scheduled
-+event:0x18 counters:0,1,2,3,4,5 um:zero minimum:500 name:LSU_SCHEDULE_STALLED : Cycles LSU is not empty but 0 instructions scheduled
-+event:0x19 counters:0,1,2,3,4,5 um:zero minimum:500 name:BU_SCHEDULE_STALLED : Cycles BU is not empty but 0 instructions scheduled
-+event:0x1a counters:0,1,2,3,4,5 um:zero minimum:500 name:TOTAL_TRANSLATED : Total LSU micro-ops that reach the second stage of the LSU
-+event:0x1b counters:0,1,2,3,4,5 um:zero minimum:500 name:LOADS_TRANSLATED : Cacheable load micro-ops translated.1 (Does not include WT)
-+event:0x1c counters:0,1,2,3,4,5 um:zero minimum:500 name:STORES_TRANSLATED : Cacheable store micro-ops translated.1 (Does not include WT)
-+event:0x1d counters:0,1,2,3,4,5 um:zero minimum:500 name:TOUCHES_TRANSLATED : Cacheable touch instructions translated. Includes: dcbt / dcbtep dcbtst / dcbtstep icbt ct=2
-+event:0x1e counters:0,1,2,3,4,5 um:zero minimum:500 name:CACHEOPS_TRANSLATED : Number of dcba, dcbf, dcbst, and dcbz instructions translated (e500 traps on dcbi)
-+event:0x1f counters:0,1,2,3,4,5 um:zero minimum:500 name:CACHEINHIBITED_ACCESSES_TRANSLATED : Number of cache inhibited accesses translated
-+event:0x20 counters:0,1,2,3,4,5 um:zero minimum:500 name:GUARDED_LOADS_TRANSLATED : Number of guarded loads translated
-+event:0x21 counters:0,1,2,3,4,5 um:zero minimum:500 name:WRITETHROUGH_STORES_TRANSLATED : Number of write-through stores translated
-+event:0x22 counters:0,1,2,3,4,5 um:zero minimum:500 name:MISALIGNED_ACCESSES_TRANSLATED : Number of misaligned load or store accesses translated.
-+event:0x23 counters:0,1,2,3,4,5 um:zero minimum:500 name:FETCH_2X4_HITS : Each fetch retrieves up to 8 instructions, but only the first 4 are required. This event increments if at least one instruction of the second 4 are actually used.
-+event:0x24 counters:0,1,2,3,4,5 um:zero minimum:500 name:FETCH_HITS_ON_PREFETCHES : Fetch hits on instruction prefetch when the data is still in the ILFB.
-+event:0x25 counters:0,1,2,3,4,5 um:zero minimum:500 name:GENERATED_FETCH_PREFETCHES : Number of prefetches generated.
-+event:0x29 counters:0,1,2,3,4,5 um:zero minimum:500 name:DL1_RELOADS : This is historically used to determine dcache miss rate (along with loads/stores completed). This counts dL1 reloads for any reason.
-+event:0x2c counters:0,1,2,3,4,5 um:zero minimum:500 name:LOAD_MISS_WITH_LOAD_QUEUE_FULL : Counts number of stalls; Com:52 counts cycles stalled. Includes: cacheable loads, CI loads, loadec, larx, touches, ibll, ibsl,ibllsl
-+event:0x2d counters:0,1,2,3,4,5 um:zero minimum:500 name:LOAD_GUARDED_MISS_NOT_LAST_REPLAYS : Load guarded miss when the load is not yet at the bottom of the completion buffer.
-+event:0x2e counters:0,1,2,3,4,5 um:zero minimum:500 name:STORE_TRANSLATED_QUEUE_FULL_REPLAYS : Translate a store when the StQ is full.
-+event:0x2f counters:0,1,2,3,4,5 um:zero minimum:500 name:ADDRESS_COLLISION_REPLAYS : Address collision.
-+event:0x30 counters:0,1,2,3,4,5 um:zero minimum:500 name:DTLB_MISS_REPLAYS : Counts number of stalls; Com:56 counts cycles stalled.
-+event:0x31 counters:0,1,2,3,4,5 um:zero minimum:500 name:DTLB_BUSY_REPLAYS : Counts number of stalls; Com:57 counts cycles stalled.
-+event:0x32 counters:0,1,2,3,4,5 um:zero minimum:500 name:SECOND_PART_MISALIGNED_AFTER_MISS_REPLAYS : Second part of misaligned access when first part missed in cache.
-+event:0x34 counters:0,1,2,3,4,5 um:zero minimum:500 name:LOAD_MISS_QUEUE_FULL_CYCLES : Cycles stalled on replay condition - Load miss with load queue full.
-+event:0x35 counters:0,1,2,3,4,5 um:zero minimum:500 name:LOAD_GUARDED_MISS_NOT_LAST_CYCLES : Cycles stalled on replay condition - Load guarded miss when the load is not yet at the bottom of the completion buffer.
-+event:0x36 counters:0,1,2,3,4,5 um:zero minimum:500 name:STORE_TRANSLATED_QUEUE_FULL_CYCLES : Cycles stalled on replay condition - Translate a store when the StQ is full.
-+event:0x37 counters:0,1,2,3,4,5 um:zero minimum:500 name:ADDRESS_COLLISION_CYCLES : Cycles stalled on replay condition - Address collision.
-+event:0x38 counters:0,1,2,3,4,5 um:zero minimum:500 name:DTLB_MISS_CYCLES : Cycles stalled on replay condition - DTLB miss.
-+event:0x39 counters:0,1,2,3,4,5 um:zero minimum:500 name:DTLB_BUSY_CYCLES : Cycles stalled on replay condition - DTLB busy.
-+event:0x3a counters:0,1,2,3,4,5 um:zero minimum:500 name:SECOND_PART_MISALIGNED_AFTER_MISS_CYCLES : Cycles stalled on replay condition - Second part of misaligned access when first part missed in cache.
-+event:0x3c counters:0,1,2,3,4,5 um:zero minimum:500 name:IL1_FETCH_RELOADS : This is historically used to determine icache miss rate (along with instructions completed) Reloads due to demand fetch.
-+event:0x3d counters:0,1,2,3,4,5 um:zero minimum:500 name:FETCHES : Counts fetches that write at least one instruction to the Instruction Buffer.
-+event:0x3e counters:0,1,2,3,4,5 um:zero minimum:500 name:IMMU_TLB4K_RELOADS : iMMU TLB4K reloads
-+event:0x3f counters:0,1,2,3,4,5 um:zero minimum:500 name:IMMU_VSP_RELOADS : iMMU VSP reloads
-+event:0x40 counters:0,1,2,3,4,5 um:zero minimum:500 name:DMMU_TLB4K_RELOADS : dMMU TLB4K reloads
-+event:0x41 counters:0,1,2,3,4,5 um:zero minimum:500 name:DMMU_VSP_RELOADS : dMMU VSP reloads
-+event:0x42 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_MISSES : Counts iTLB/dTLB error interrupt
-+event:0x43 counters:0,1,2,3,4,5 um:zero minimum:500 name:TAKEN_BRANCHES : Completed branch instructions that were taken.
-+event:0x44 counters:0,1,2,3,4,5 um:zero minimum:500 name:TAKEN_BLR : Completed blr instructions that were taken.
-+event:0x45 counters:0,1,2,3,4,5 um:zero minimum:500 name:BTB_TARGET_MISPREDICT : Number of target mispredicts (BTB).
-+event:0x46 counters:0,1,2,3,4,5 um:zero minimum:500 name:MISPREDICT_TARGET_BLR : Number of link stack mispredicts (LS).
-+event:0x47 counters:0,1,2,3,4,5 um:zero minimum:500 name:TAKEN_BTB_BUT_MISS : Number of BTB misses, but taken (BTB allocates).
-+event:0x52 counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC0_OVERFLOW : Counts the number of times PMC0[32] transitioned from 1 to 0.
-+event:0x53 counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC1_OVERFLOW : Counts the number of times PMC1[32] transitioned from 1 to 0.
-+event:0x54 counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC2_OVERFLOW : Counts the number of times PMC2[32] transitioned from 1 to 0.
-+event:0x55 counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC3_OVERFLOW : Counts the number of times PMC3[32] transitioned from 1 to 0.
-+event:0x56 counters:0,1,2,3,4,5 um:zero minimum:500 name:INTERRUPTS : Number of interrupts taken
-+event:0x57 counters:0,1,2,3,4,5 um:zero minimum:500 name:EXTERNAL_INTERRUPTS : Number of external input interrupts taken
-+event:0x58 counters:0,1,2,3,4,5 um:zero minimum:500 name:CRITICAL_INTERRUPTS : Number of critical input interrupts taken
-+event:0x59 counters:0,1,2,3,4,5 um:zero minimum:500 name:SC_TRAP_INTERRUPTS : Number of system call and trap interrupts
-+event:0x5a counters:0,1,2,3,4,5 um:zero minimum:500 name:TBL_BIT_TRANS_PMGC0 : Counts transitions of the TBL bit selected by PMGC0[TBSEL].
-+event:0x5b counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC4_OVERFLOW : Counts the number of times PMC4[32] transitioned from 1 to 0.
-+event:0x5c counters:0,1,2,3,4,5 um:zero minimum:500 name:PMC5_OVERFLOW : Counts the number of times PMC5[32] transitioned from 1 to 0.
-+event:0x61 counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_STASH_HIT : Stash hits in L1 Data Cache.
-+event:0x63 counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_STASH_REQ : Stash requests to L1 Data Cache.
-+event:0x64 counters:0,1,2,3,4,5 um:zero minimum:500 name:TIMES_LSU_THREAD_PRIO_SWTICHED : Number of times the Load Store Unit thread priority switched based on resource collisions.
-+event:0x65 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_REQ_FPU_DENIED : Number of cycles both threads had Floating Point Unit requests and one was denied.
-+event:0x66 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_REQ_VPERM_DENIED : Number of cycles both threads had Altivec Permute requests and one was denied.
-+event:0x67 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_REQ_VGEN_DENIED : Number of cycles both threads had Altivec General requests and one was denied.
-+event:0x68 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_REQ_CFX_DENIED : Number of cycles both threads had Complex Fixed-Point Unit requests and one was denied.
-+event:0x69 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_REQ_FETCH_DENIED : Number of cycles both threads both threads made a Fetch request to the L1 Instruction Cache and one thread wins arbitration.
-+event:0x6e counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_LSU_ISSUE_STALLED : Cycles the LSU issue queue is not empty but 0 instructions issued.
-+event:0x6f counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_FPU_ISSUE_STALLED : Cycles the FPU issue queue is not empty but 0 instructions issued.
-+event:0x70 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_ALTIVEC_ISSUE_STALLED : Cycles the AltiVec issue queue is not empty but 0 instructions issued.
-+event:0x71 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_FPU_SCHEDULE_STALLED : Cycles FPU is not empty but 0 instructions scheduled.
-+event:0x72 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VPERM_SCHEDULE_STALLED : Cycles VPERM is not empty but 0 instructions scheduled.
-+event:0x73 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VGEN_SCHEDULE_STALLED : Cycles VGEN is not empty but 0 instructions scheduled.
-+event:0x74 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VPU_INSTRUCTION_WAIT_FOR_OPERA : Cycles VPU instruction waits for operands.
-+event:0x75 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VFPU_INSTRUCTION_WAIT_FOR_OPERA : Cycles VFPU instruction waits for operands.
-+event:0x76 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VSFX_INSTRUCTION_WAIT_FOR_OPERA : Cycles VSFX instruction waits for operands
-+event:0x77 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VCFX_INSTRUCTION_WAIT_FOR_OPERA : Cycles VCFX instruction waits for operands.
-+event:0x7a counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_IB_EMPT : Number of cycles the Instruction Buffer is empty
-+event:0x7b counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_IB_FULL : Number of cycles the Instruction Buffer is full enough such that fetch stops fetching.
-+event:0x7c counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_CB_EMPT : Number of cycles the Completion Buffer is empty.
-+event:0x7d counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_CB_FULL : Number of cycles the Completion Buffer is full enough such that decode stops.
-+event:0x7e counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_PRESYNC_SI_IB : Number of cycles a pre-sync serialized instruction holds in the Instruction Buffer and is not decoded.
-+event:0x7f counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_CLK_0_INSTRUCTIONS : Increments if 0 instructions (micro-ops) completed.
-+event:0x80 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_CLK_1_INSTRUCTIONS : Increments if 1 instruction (micro-op) completed.
-+event:0x80 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_CLK_2_INSTRUCTIONS : Increments if 2 instructions (micro-op) completed.
-+event:0x88 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC5S : Every valid IAC5 detection.
-+event:0x89 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC6S : Every valid IAC6 detection.
-+event:0x8a counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC7S : Every valid IAC7 detection.
-+event:0x8b counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC8S : Every valid IAC8 detection.
-+event:0x8c counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC1S : Every valid IAC1 detection.
-+event:0x8d counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC2S : Every valid IAC2 detection.
-+event:0x8e counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC3S : Every valid IAC3 detection.
-+event:0x8f counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_IAC4S : Every valid IAC4 detection.
-+event:0x90 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DAC1S : Every valid DAC1 detection.
-+event:0x91 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DAC2S : Every valid DAC2 detection.
-+event:0x94 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT0 : Detection of a write to DEVENT SPR with DVT0 set.
-+event:0x95 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT1 : Detection of a write to DEVENT SPR with DVT1 set.
-+event:0x96 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT2 : Detection of a write to DEVENT SPR with DVT2 set.
-+event:0x97 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT3 : Detection of a write to DEVENT SPR with DVT3 set.
-+event:0x98 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT4 : Detection of a write to DEVENT SPR with DVT4 set.
-+event:0x99 counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT5 : Detection of a write to DEVENT SPR with DVT5 set.
-+event:0x9a counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT6 : Detection of a write to DEVENT SPR with DVT6 set.
-+event:0x9b counters:0,1,2,3,4,5 um:zero minimum:500 name:DETECTED_DVT7 : Detection of a write to DEVENT SPR with DVT7 set.
-+event:0x9c counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_COMPLETION_STALLED : Number of completion cycles stalled due to Nexus FIFO full.
-+event:0xa1 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_FINISH : FPU finish.
-+event:0xa2 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_FPU_DIV : Counts once for every cycle of divide execution. (fdivs and fdiv).
-+event:0xa3 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_DENORM_INPUT : Counts extra cycles delay due to denormalized inputs. If there is one, this is incremented 4 times, Two operands increments it 5 times. This shows the real penalty due to denorms, not just how often they occur.
-+event:0xa4 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_DENORM_OUTPUT : FPU denorm output.
-+event:0xa5 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_FPSCR_FULL_STALL : FPU FPSCR stall.
-+event:0xa6 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_PIPE_SYNC_STALL : Synchronization-op stalls: count once for each cycle that a ��break-before�� FPU is in the RS/issue stage but cannotissue. Also count once for each cycle that an FPU op is in the RS/issue stage but cannot issue due to ��break-after��: of an FPU op currently in progress.
-+event:0xa7 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_INPUT_DATA_STALL : FPU data-ready stall: cycles in which there is an op in the RS/issue stage that cannot issue because one or more of its operands is not yet available.
-+event:0xa8 counters:0,1,2,3,4,5 um:zero minimum:500 name:FPU_INSTRUCTIONS_GEN_FLAG : FPU instruction sets FPSCR[FEX].
-+event:0xac counters:0,1,2,3,4,5 um:zero minimum:500 name:PW20_CNT : Number of times the core enters the PW20 power management state.
-+event:0xb0 counters:0,1,2,3,4,5 um:zero minimum:500 name:DECORATED_LOADS : Number of decorated loads to cache inhibited memory performed.
-+event:0xb1 counters:0,1,2,3,4,5 um:zero minimum:500 name:DECORATED_STORES : Number of decorated stores to cache inhibited memory performed.
-+event:0xb3 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_INSTRUCTIONS_SUCC : Number of successful stbcx., sthcx., stwcx., or stdcx. instructions.
-+event:0xb4 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_INSTRUCTIONS_UNSUCC : Number of unsuccessful stbcx., sthcx., stwcx., or stdcx. instructions.
-+event:0xb5 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_LSU_MICROOPS : Completed Load Store Unit micro-ops. Every micro-op that goes down the LSU pipe. Includes: GPR loads / GPR stores, FPR loads / FPR stores, VR loads / VR stores, Cache ops. Memory barriers Other LSU ops (dsn, msgsnd, mvidsplt, mviwsplt, tlbilx, tlbivax, tlbsync)
-+event:0xb6 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_GPR_LOADS : GPR load micro-ops completed. This event only counts once for misaligns. Note that lmw that causes a fault may end up double-counting micro-ops -- once for first pass, once for second pass.
-+event:0xb7 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_GPR_STORES : GPR store micro-ops completed. This event only counts once for misaligns. Note that stmw that causes a fault may end up double-counting micro-ops -- once for first pass, once for second pass.
-+event:0xb8 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_CACHEOPS : Cache ops completed. Includes: dcba / dcbal, dcbf / dcbfep, dcbi, dcblc / dcblq, dcbst / dcbstep, dcbt / dcbtep / dcbtls, dcbtst / dcbtstep / dcbtstls, dcbz / dcbzep / dcbzl / dcbzlep, icbi / icbiep, icblc / icblq., icbt / icbtls
-+event:0xb9 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_MEM_BARRIERS : Memory barriers completed. Includes: msync (sync, lwsync, elemental barriers) mbar (eieio) miso.
-+event:0xba counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_SFX_MICROOPS : SFX micro-ops completed.
-+event:0xbb counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_SINCLK_SFX_MICROOPS : SFX single-cycle micro-ops completed.
-+event:0xbc counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_DBLCLK_SFX_MICROOPS : SFX double-cycle micro-ops completed.
-+event:0xbe counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_CFX_INSTRUCTIONS : CFX instructions completed.
-+event:0xbf counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_SFX_CFX_INSTRUCTIONS : SFX or CFX instructions completed.
-+event:0xc0 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPU_INSTRUCTIONS : FPU instructions completed.
-+event:0xc1 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPR_MICROOPS_LOADS : FPR load micro-ops completed.
-+event:0xc2 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPR_MICROOPS_STORES : FPR store micro-ops completed.
-+event:0xc3 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPR_MICROOPS_LOADS_STORES : FPR load and store micro-ops completed.
-+event:0xc4 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPR_SINPRECISE_LOADS_STORES : FPR single-precision load and store micro-ops completed.
-+event:0xc5 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_FPR_DBLPRECISE_LOADS_STORES : FPR double-precision load and store micro-ops completed.
-+event:0xc6 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_ALTIVEC_INSTRUCTIONS : AltiVec instructions completed. (non-LSU).
-+event:0xc7 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_ALTIVEC_VSFX_INSTRUCTIONS : AltiVec VSFX instructions completed.
-+event:0xc8 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_ALTIVEC_VCFX_INSTRUCTIONS : AltiVec VCFX instructions completed.
-+event:0xc9 counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_ALTIVEC_VPU_INSTRUCTIONS : AltiVec VPU instructions completed.
-+event:0xca counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_ALTIVEC_VFPU_INSTRUCTIONS : AltiVec VFPU instructions completed.
-+event:0xcb counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_VR_LOADS_MICROOPS : VR load micro-ops completed.
-+event:0xcc counters:0,1,2,3,4,5 um:zero minimum:500 name:COMPLETED_VR_STORES_MICROOPS : VR store micro-ops completed.
-+event:0xcd counters:0,1,2,3,4,5 um:zero minimum:500 name:VSCR_SAT_SET : Number of times the saturate bit flips from 0 to 1.
-+event:0xd2 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_SFX0_IDLE : Cycles Simple Fixed Point Unit 0 is idle.
-+event:0xd3 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_SFX1_IDLE : Cycles Simple Fixed Point Unit 1 is idle.
-+event:0xd4 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_CFX_IDLE : Cycles Complex Fixed Point Unit is idle.
-+event:0xd5 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_LSU_IDLE : Cycles Load Store Unit is idle.
-+event:0xd6 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_BU_IDLE : Cycles Branch Unit is idle.
-+event:0xd7 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_FPU_IDLE : Cycles Floating Point Unit is idle.
-+event:0xd8 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VPU_IDLE : Cycles AltiVec Permute Unit is idle.
-+event:0xd9 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VFPU_IDLE : Cycles AltiVec Floating Point Unit is idle.
-+event:0xda counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VSFX_IDLE : Cycles AltiVec Simple Fixed Point Unit is idle.
-+event:0xdb counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_VCFX_IDLE : Cycles AltiVec Complex Fixed Point Unit is idle.
-+event:0xdd counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_CACHE_MISSES : Data L1 cache misses. (Includes load, store, cache ops).
-+event:0xde counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_CACHE_LOAD_MISSES : Data L1 cache load misses.
-+event:0xdf counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_CACHE_STORE_MISSES : Data L1 cache store misses.
-+event:0xe0 counters:0,1,2,3,4,5 um:zero minimum:500 name:LMQ_ALLOCATED_LOADS : Loads that allocate into Load Miss Queue. (Data L1 cache misses, but may not be to different cache lines).
-+event:0xe1 counters:0,1,2,3,4,5 um:zero minimum:500 name:LOAD_THREAD_MISS_COLLISION : Number of times that this thread��s load hits a line that is valid for the other thread but not this thread.
-+event:0xe2 counters:0,1,2,3,4,5 um:zero minimum:500 name:INTERTHREAD_STATUS_ARRAY_COLLISION : Number of times that two threads collide on status array access.
-+event:0xe3 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_ALLOC : Number of Store Gather Buffer allocates.
-+event:0xe4 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_GATHERS : Number of Store Gather Buffer gathers.
-+event:0xe5 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_OVERFLOWS : Number of Store Gather Buffer overflows. (Causes SGB full condition when additional store request is made).
-+event:0xe6 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_PROMOTIONS : Number of Store Gather Buffer promotions.
-+event:0xe7 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_INORDER_PROMOTIONS : Number of Store Gather Buffer in-order promotions. (Also includes oldest-entry timeout condition).
-+event:0xe8 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_OUTOFORDER_PROMOTIONS : Number of Store Gather Buffer out-of-order promotions.
-+event:0xe9 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_HP_PROMOTIONS : Number of Store Gather Buffer high-priority promotions. (Load hits on pending store).
-+event:0xea counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_MISO_PROMOTIONS : Number of Store Gather Buffer miso promotions. promotions. (Load hits on pending store).
-+event:0xeb counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_WATERMARK_PROMOTIONS : Number of Store Gather Buffer watermark promotions.
-+event:0xec counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_SGB_OVERFLOW_PROMOTIONS : Number of Store Gather Buffer overflow promotions.
-+event:0xed counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_DLAQ_FULL : Number of cycles the DLink Age Queue is full.
-+event:0xee counters:0,1,2,3,4,5 um:zero minimum:500 name:TIMES_DLAQ_FULL : Number of times the DLink Age Queue is full.
-+event:0xef counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_LRSAQ_FULL : Number of cycles the Load Reservation Set Age Queue is full.
-+event:0xf0 counters:0,1,2,3,4,5 um:zero minimum:500 name:TIMES_LRSAQ_FULL : Number of times the Load Reservation Set Age Queue is full.
-+event:0xf1 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_FWDAQ_FULL : Number of cycles the Forward Age Queue is full.
-+event:0xf2 counters:0,1,2,3,4,5 um:zero minimum:500 name:TIMES_FWDAQ_FULL : Number of times the Forward Age Queue is full.
-+event:0xf3 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_TIMES : Number of times a Store Queue collision is forwardable. The following cases are not forwardable: store address + size does not contain the load, cache-inhibited store, denormalized, floating point store, stcx, guarded load.
-+event:0xf4 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_TIMES_DATA_RDY : Number of times a Store Queue collision is forwardable and is ready with data to forward.
-+event:0xf5 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_TIMES_DATA_NORDY : Number of times a Store Queue collision is forwardable but is not ready with data to forward.
-+event:0xf6 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_NOFWD_STQ_COLLISION_TIMES : Number of times a Store Queue collision is not forwardable and must wait until the store leaves the Store Queue.
-+event:0xf7 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_CLK : Number of cycles a Store Queue collision is forwardable. (Number of cycles from the detection of a forwardable Store Queue entry until the load is replayed in stg1).
-+event:0xf8 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_CLK_DATA_RDY : Number of cycles a Store Queue collision is forwardable and is ready with data to forward. (Number of cycles from the detection of a forwardable Store Queue entry with valid data until the load is replayed in stg1).
-+event:0xf9 counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FWD_STQ_COLLISION_CLK_DATA_NORDY : Number of cycles a Store Queue collision is forwardable but is not ready with data to forward. (Number of cycles from the detection of a forwardable Store Queue entry without valid data until the load is replayed in stg1).
-+event:0xfa counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_NOFWD_STQ_COLLISION_CLK : Number of cycles a Store Queue collision is not forwardable and has to wait until the store leaves the Store Queue. (Number of cycles from the detection of a non-forwardable Store Queue entry until the load is replayed in stg1).
-+event:0xfb counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_FALSE_EA_COLLISION : Number of times the lower 12-bits of EA matched but the upper bits did not, leading to a false load-on-store replay. Cycle penalty is 4x the number of times.
-+event:0xfc counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_LSO_BUS_COLLISION : Number of LS0 result bus collisions. Cycle penalty is 3x this measurement.
-+event:0xfd counters:0,1,2,3,4,5 um:zero minimum:500 name:NUM_INTERTHREAD_DBLWORKD_BANK_COLLISION : Number of inter-thread double-word bank collisions. Measures when both threads attempt to access the same double-word bank. Cycle penalty is 3x this measurement.
-+event:0xfe counters:0,1,2,3,4,5 um:zero minimum:500 name:L1_CACHE_IM : Instruction L1 cache demand fetch misses. (Includes icbtls. Does not include prefetch).
-+event:0x100 counters:0,1,2,3,4,5 um:zero minimum:500 name:IMMU_MISSES : Counts misses in the level 1 Instruction MMU.
-+event:0x101 counters:0,1,2,3,4,5 um:zero minimum:500 name:IMMU_TLB4K_HITS : Counts hits in the level 1 Instruction MMU TLB-4K.
-+event:0x102 counters:0,1,2,3,4,5 um:zero minimum:500 name:IMMU_VSP_HITS : Counts hits in the level 1 Instruction MMU VSP.
-+event:0x103 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_IMMU_HW_TABLEWALK : Counts IMMU cycles spent in hardware tablewalk. This represents the cycles from the point where the L2 MMU miss occurs to when the page table walk completes with a valid translation or exception.
-+event:0x104 counters:0,1,2,3,4,5 um:zero minimum:500 name:DMMU_MISSES : Counts misses in the level 1 Data MMU. (Does not count replayed operations).
-+event:0x105 counters:0,1,2,3,4,5 um:zero minimum:500 name:DMMU_TLB4K_HITS : Counts hits in the level 1 Data MMU TLB-4K. (Does not count replayed operations).
-+event:0x106 counters:0,1,2,3,4,5 um:zero minimum:500 name:DMMU_VSP_HITS : Counts hits in the level 1 Data MMU VSP. (Does not count replayed operations).
-+event:0x107 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_DMMU_HW_TABLEWALK : Counts DMMU cycles spent in hardware tablewalk. This represents the cycles from the point where the L2 MMU miss occurs to when the page table walk completes with a valid translation or exception.
-+event:0x108 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_MISSES : Counts level 2 MMU misses. (Does not count misses that occur due to dcbt / dcbtst / dcba / dcbal instructions that fail translation and are no-oped. Does not count misses in L2MMU-VSP when looking up an indirect entry).
-+event:0x109 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_4K_HITS : Counts level 2 MMU hits in L2MMU-4K.
-+event:0x10a counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_VSP_HITS : Counts level 2 MMU hits in L2MMU-VSP. (Does not count indirect lookups).
-+event:0x10b counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_INDIRECT_MISSES : Counts level 2 MMU indirect misses. This represents indirect entry lookups that do not have a matching indirect entry.
-+event:0x10c counters:0,1,2,3,4,5 um:zero minimum:500 name:L2MMU_INDIRECT_VALID_MISSES : Counts level 2 MMU indirect valid misses. This occurts when the indirect entry is valid, but the corresponding PTE[V] = 0 or the premissions in the PTE are not sufficient for the requested access.
-+event:0x10d counters:0,1,2,3,4,5 um:zero minimum:500 name:LRAT_MISSES : Counts Logical to Real Address Translation misses. This includes LRAT misses from tlbwe instructions or from page table translations.
-+event:0x110 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_LMQ_LOSE_DLINK_DUE_SGB : Cycles the Load Miss Queue loses DLINK arbitration due to the Store Gather Buffer.
-+event:0x111 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_SGB_LOSE_DLINK_DUE_LMQ : Cycles the Store Gather Buffer loses DLINK arbitration due to the Load Miss Queue.
-+event:0x112 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_THREAD_LOSE_DLINK_DUE_OTHER_THREAD : Cycles thread loses DLINK arbitration due to other thread: Cycles thread loses DLINK arbitration due to other thread.
-+event:0x116 counters:0,1,2,3,4,5 um:zero minimum:500 name:DECODE_MASK_VALUE : One mask/value pair that allows instructions to be counted in Decode.
-+event:0x1bb counters:0,1,2,3,4,5 um:zero minimum:500 name:SHR_L2_DLINK_REQ : Number of DLINK requests made from core to Shared L2.
-+event:0x1bc counters:0,1,2,3,4,5 um:zero minimum:500 name:SHR_L2_ILINK_REQ : Number of ILINK requests made from core to Shared L2. (Includes instruction fetches and L2MMU hardware tablewalk requests).
-+event:0x1bd counters:0,1,2,3,4,5 um:zero minimum:500 name:SHR_L2_RLINK_REQ : Number of RLINK requests made from Shared L2 to core. (back invalidates, stashes, barriers).
-+event:0x1be counters:0,1,2,3,4,5 um:zero minimum:500 name:SHR_L2_BLINK_REQ : Number of BLINK requests made from Shared L2 to core. (back invalidates, stashes, barriers).
-+event:0x1bf counters:0,1,2,3,4,5 um:zero minimum:500 name:SHR_L2_CLINK_REQ : Number of CLINK requests made from Shared L2 to core. (back invalidates, stashes, barriers).
-+event:0x1c8 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_HITS : Number of L2 Cache hits. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1c9 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_MISSES : Number of L2 Cache hits. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1ca counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DEMAND_ACCESS : Number of L2 Cache demand accesses. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1cb counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_ACCESSES : Number of L2 Cache accesses from all sources (demand, reload, snoop, etc). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1cc counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_STORE_ALLOCATE : Number of L2 Cache store allocates. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1cd counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_INSTRUCTIONS_ACCESS : Number of L2 Cache instruction accesses. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1ce counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DATA_ACCESS : Number of L2 Cache data accesses. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1cf counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_INSTRUCTIONS_MISSES : Number of L2 Cache instruction misses. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d0 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DATA_MISSES : Number of L2 Cache data misses. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d1 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_HITS_PER_THREAD : Number of times this core/thread hits in the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d2 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_MISSES_PER_THREAD : Number of times this core/thread misses in the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d3 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DEMAND_ACCESS_PER_THREAD : Number of times this core/thread makes a demand access to the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d4 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_STORE_ALLOC_PER_THREAD : Number of times a store from this core/thread allocates in the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d5 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_INSTRUCTIONS_ACCESS_PER_THREAD : Number of times an instruction from this core/thread accesses the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d6 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DATA_ACCESS_PER_THREAD : Number of times a data operation from this core/thread accesses the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d7 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_INSTRUCTION_MISSES_PER_THREAD : Number of times an instruction from this core/thread misses in the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d8 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_DATA_MISSES_PER_THREAD : Number of times a data operation from this core/thread misses in the L2 Cache. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1d9 counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_RELOAD_FROM_CORENET : Number of L2 Cache reloads from CoreNet. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1da counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_IN_STASH_REQ : Number of incoming L2 Cache stash requests. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1db counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_STASH_REQ_DOWNGRD_TO_SNOOPS : Number of incoming L2 Cache stash requests downgraded to snoops. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1dc counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_SNOOPS_HITS : Number of L2 Cache snoop hits. Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1dd counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_SNOOPS_MINT : Number of L2 Cache snoops causing MINT.
-+event:0x1de counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_SNOOPS_SINT : Number of L2 Cache snoops causing SINT.
-+event:0x1df counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_SNOOPS_PUSHES : Number of L2 Cache snoop pushes.
-+event:0x1e0 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_BIB_STALL : Stall for Back Invalidate Buffer entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1e2 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_RLT_STALL : Stall for Reload Table entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1e4 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_RLFQ_STALL : Stall for Reload Fold Queue entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1e6 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_DTQ_STALL : Stall for Data Transaction Queue entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1e8 counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_COB_STALL : Stall for Castout Buffer entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1ea counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_WDB_STALL : Stall for Write Data Buffer entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1ec counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_RLDB_STALL : Stall for Reload Data Buffer entry (cycles). Counts 0, 1, 2, 3, or 4 per cycle.
-+event:0x1ee counters:0,1,2,3,4,5 um:zero minimum:500 name:CLK_SNPQ_STALL : Stall for Snoop Queue entry (cycles).
-+event:0x1fa counters:0,1,2,3,4,5 um:zero minimum:500 name:BIU_MASTER_REQ : Master transaction starts. (Number of AOut sent to CoreNet).
-+event:0x1fb counters:0,1,2,3,4,5 um:zero minimum:500 name:BIU_MASTER_GLOBAL_REQ : Master transaction starts that are global. (Number of AOut with M=1 sent to CoreNet).
-+event:0x1fc counters:0,1,2,3,4,5 um:zero minimum:500 name:BIU_MASTER_DATA_SIDE_REQ : Master transaction starts that are global. (Number of AOut with M=1 sent to CoreNet).
-+event:0x1fd counters:0,1,2,3,4,5 um:zero minimum:500 name:BIU_MASTER_INSTRUCTION_SIDE_REQ : Master instruction-side transaction starts. (Number of I-side AOut sent to CoreNet).
-+event:0x1fe counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_STASH_REQ : Stash request on AIn matches stash IDs for core or L2.
-+event:0x1ff counters:0,1,2,3,4,5 um:zero minimum:500 name:L2_SNOOP_REQ : Externally generated snoop requests. (Number of AIn from CoreNet not from self).
-+
-diff --git a/events/ppc/e6500/unit_masks b/events/ppc/e6500/unit_masks
-new file mode 100644
-index 0000000..b7e7a23
---- /dev/null
-+++ b/events/ppc/e6500/unit_masks
-@@ -0,0 +1,4 @@
-+# e6500 possible unit masks
-+#
-+name:zero type:mandatory default:0x0
-+ 0x0 no unit mask
-diff --git a/libop/op_cpu_type.c b/libop/op_cpu_type.c
-index 7d50a2d..badb7ba 100644
---- a/libop/op_cpu_type.c
-+++ b/libop/op_cpu_type.c
-@@ -126,6 +126,7 @@ static struct cpu_descr const cpu_descrs[MAX_CPU_TYPE] = {
- { "IBM Power Architected Events V1", "ppc64/architected_events_v1", CPU_PPC64_ARCH_V1, 6 },
- { "ppc64 POWER8", "ppc64/power8", CPU_PPC64_POWER8, 6 },
- { "e500mc", "ppc/e500mc", CPU_PPC_E500MC, 4 },
-+ { "e6500", "ppc/e6500", CPU_PPC_E6500, 6 },
- };
-
- static size_t const nr_cpu_descrs = sizeof(cpu_descrs) / sizeof(struct cpu_descr);
-diff --git a/libop/op_cpu_type.h b/libop/op_cpu_type.h
-index 10f000b..934fe9e 100644
---- a/libop/op_cpu_type.h
-+++ b/libop/op_cpu_type.h
-@@ -106,6 +106,7 @@ typedef enum {
- CPU_PPC64_ARCH_V1, /** < IBM Power architected events version 1 */
- CPU_PPC64_POWER8, /**< ppc64 POWER8 family */
- CPU_PPC_E500MC, /**< e500mc */
-+ CPU_PPC_E6500, /**< e6500 */
- MAX_CPU_TYPE
- } op_cpu;
-
-diff --git a/libop/op_events.c b/libop/op_events.c
-index 638dc5c..9d2aa5e 100644
---- a/libop/op_events.c
-+++ b/libop/op_events.c
-@@ -1309,6 +1309,7 @@ void op_default_event(op_cpu cpu_type, struct op_default_event_descr * descr)
- case CPU_PPC_E500:
- case CPU_PPC_E500_2:
- case CPU_PPC_E500MC:
-+ case CPU_PPC_E6500:
- case CPU_PPC_E300:
- descr->name = "CPU_CLK";
- break;
-diff --git a/utils/ophelp.c b/utils/ophelp.c
-index 0647360..3b2896a 100644
---- a/utils/ophelp.c
-+++ b/utils/ophelp.c
-@@ -754,6 +754,7 @@ int main(int argc, char const * argv[])
- case CPU_PPC_E500:
- case CPU_PPC_E500_2:
- case CPU_PPC_E500MC:
-+ case CPU_PPC_E6500:
- event_doc =
- "See PowerPC e500 Core Complex Reference Manual\n"
- "Chapter 7: Performance Monitor\n"
---
diff --git a/meta/recipes-kernel/oprofile/oprofile/automake-foreign.patch b/meta/recipes-kernel/oprofile/oprofile/automake-foreign.patch
index e82a381139..b9bb6c5779 100644
--- a/meta/recipes-kernel/oprofile/oprofile/automake-foreign.patch
+++ b/meta/recipes-kernel/oprofile/oprofile/automake-foreign.patch
@@ -1,3 +1,8 @@
+oprofile doesn't want GNU-levels of automake strictness so tell it to be "foreign".
+
+Upstream-Status: Pending
+Signed-off-by: Ross Burton <ross.burton@intel.com>
+
diff --git a/configure.ac b/configure.ac
index 5740585..cf6c316 100644
--- a/configure.ac
diff --git a/meta/recipes-kernel/oprofile/oprofile/filemode-fix.patch b/meta/recipes-kernel/oprofile/oprofile/filemode-fix.patch
new file mode 100644
index 0000000000..f7ebe24691
--- /dev/null
+++ b/meta/recipes-kernel/oprofile/oprofile/filemode-fix.patch
@@ -0,0 +1,41 @@
+With security_flags.inc:
+
+| In file included from /media/build1/poky/build/tmp/sysroots/qemumips/usr/include/fcntl.h:302:0,
+| from opjitconv.c:25:
+| In function 'open',
+| inlined from 'copy_dumpfile' at opjitconv.c:219:6:
+| /media/build1/poky/build/tmp/sysroots/qemumips/usr/include/bits/fcntl2.h:50:4: error: call to '__open_missing_mode' declared with attribute error: open with O_CREAT in second argument needs 3 arguments
+| __open_missing_mode ();
+| ^
+| Makefile:440: recipe for target 'opjitconv.o' failed
+
+Why does this only happen on mips? mips has:
+
+O_CREAT = 0x100
+and
+S_IRUSR = 0400
+
+and these (in hex and otcal) are equivalent. Most other platforms
+have O_CREAT = 0100.
+
+http://sourceforge.net/p/oprofile/oprofile/ci/4598ca73b0a367ca46d4a2843261e20e1896773b
+
+The file should not be created, only opened if its present, therefore use O_RDONLY instead.
+
+RP 2014/11/6
+
+Upstream-Status: Backport
+
+Index: oprofile-1.0.0/opjitconv/opjitconv.c
+===================================================================
+--- oprofile-1.0.0.orig/opjitconv/opjitconv.c 2014-09-12 14:39:47.000000000 +0000
++++ oprofile-1.0.0/opjitconv/opjitconv.c 2014-11-06 13:14:25.941639003 +0000
+@@ -216,7 +216,7 @@
+ int file_locked = 0;
+ unsigned int usecs_waited = 0;
+ int rc = OP_JIT_CONV_OK;
+- int fd = open(dumpfile, S_IRUSR);
++ int fd = open(dumpfile, O_RDONLY);
+ if (fd < 0) {
+ perror("opjitconv failed to open JIT dumpfile");
+ return OP_JIT_CONV_FAIL;
diff --git a/meta/recipes-kernel/oprofile/oprofile/opstart.patch b/meta/recipes-kernel/oprofile/oprofile/opstart.patch
deleted file mode 100644
index 8696f4ef4d..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile/opstart.patch
+++ /dev/null
@@ -1,245 +0,0 @@
-Upstream-Status: Pending
-
-The patch gives a low overhead way of starting/stopping oprofile which
-doesn't involve script exection.
-
-(written by RP in OpenedHand days)
-
-diff --git a/utils/Makefile.am b/utils/Makefile.am
-index d34b060..dff15f9 100644
---- oprofile.orig/utils/Makefile.am
-+++ oprofile/utils/Makefile.am
-@@ -7,7 +7,7 @@ AM_LDFLAGS = @OP_LDFLAGS@
-
- LIBS=@POPT_LIBS@ @LIBERTY_LIBS@
-
--bin_PROGRAMS = ophelp op-check-perfevents
-+bin_PROGRAMS = ophelp op-check-perfevents opstart
- dist_bin_SCRIPTS = opcontrol
-
- op_check_perfevents_SOURCES = op_perf_events_checker.c
-@@ -15,3 +15,10 @@ op_check_perfevents_CPPFLAGS = ${AM_CFLAGS} @PERF_EVENT_FLAGS@
-
- ophelp_SOURCES = ophelp.c
- ophelp_LDADD = ../libop/libop.a ../libutil/libutil.a
-+
-+opstart_SOURCES = opstart.c
-+
-+install-exec-local:
-+ cd $(DESTDIR)/$(bindir) && \
-+ rm -f opstop && \
-+ $(LN_S) opstart opstop
-Index: oprofile/utils/opstart.c
-===================================================================
---- /dev/null 1970-01-01 00:00:00.000000000 +0000
-+++ oprofile/utils/opstart.c 2008-07-02 15:14:07.000000000 +0100
-@@ -0,0 +1,110 @@
-+/**
-+ * @file opstart.c
-+ * Start/Stop oprofile
-+ *
-+ * @remark Copyright 2007 Openedhand Ltd.
-+ * @remark Read the file COPYING
-+ *
-+ * @author Richard Purdie
-+ */
-+
-+#include <signal.h>
-+#include <stdio.h>
-+#include <stdlib.h>
-+#include <string.h>
-+#include <unistd.h>
-+#include <sys/types.h>
-+#include <sys/stat.h>
-+
-+int main(const int argc, const char* argv[])
-+{
-+ const char *enable = "/dev/oprofile/enable";
-+ const char *lockfile;
-+ unsigned long dpid;
-+ struct stat sbuf;
-+ FILE *lfile, *efile;
-+ int sig, enb, err;
-+
-+ if (argc >= 2) {
-+ printf("Error: Invalid options.\n");
-+ return 1;
-+ }
-+
-+ lockfile = getenv("LOCK_FILE");
-+ if (!lockfile)
-+ lockfile = "/var/lib/oprofile/lock";
-+
-+ /* Add SESSION_DIR support? */
-+
-+ if (geteuid()) {
-+ printf("Error: This program must be run as root.\n");
-+ return 1;
-+ }
-+
-+ if (stat(enable, &sbuf)) {
-+ printf("Error: Could not find /dev/oprofile/enable, the"
-+ " kernel module probably isn't loaded.\n");
-+ printf("This binary only works with 2.6 kernels and oprofile"
-+ " must have been initialised with 'opcontrol --start-daemon'.\n");
-+ return 1;
-+ }
-+
-+ if (stat(lockfile, &sbuf)) {
-+ printf("Error: Could not find lockfile %s.\n", lockfile);
-+ printf("The oprofile daemon must be running (oprofile must"
-+ " have been initialised with 'opcontrol --start-daemon').\n");
-+ return 1;
-+ }
-+
-+ lfile = fopen(lockfile, "r");
-+ if (!lfile) {
-+ printf("Error opening lockfile %s.\n", lockfile);
-+ return 1;
-+ }
-+
-+ err = fscanf(lfile, "%lud", (unsigned long *) &dpid);
-+ if (err != 1) {
-+ printf("Error reading pid from lockfile %s.\n", lockfile);
-+ return 1;
-+ }
-+ fclose(lfile);
-+
-+ efile = fopen(enable, "r");
-+ if (!efile) {
-+ printf("Error opening %s.\n", enable);
-+ return 1;
-+ }
-+
-+ if (strstr(argv[0], "opstart")) {
-+ printf("Starting Profiler\n");
-+ sig = SIGUSR1;
-+ enb = 1;
-+ } else if (strstr(argv[0], "opstop")) {
-+ printf("Stopping Oprofile.\n");
-+ printf("You need to run 'opcontrol --dump' when the session"
-+ " is finished.\n");
-+ sig = SIGUSR2;
-+ enb = 0;
-+ } else {
-+ printf("Error: Please call as 'opstart' or 'opstop'\n");
-+ return 1;
-+ }
-+
-+ err = kill(dpid, 0);
-+ if (err) {
-+ printf("Error sending signal to oprofiled. Stale lockfile"
-+ " (%s) ?\n", lockfile);
-+ return 1;
-+ }
-+
-+ fprintf(efile, "%d\n", enb);
-+ err = kill(dpid, sig);
-+ if (err) {
-+ printf("Error sending signal to oprofiled. Stale lockfile"
-+ " (%s) ?\n", lockfile);
-+ return 1;
-+ }
-+
-+ return 0;
-+}
-+
-Index: oprofile/configure.ac
-===================================================================
---- oprofile.orig/configure.ac 2008-07-02 15:13:58.000000000 +0100
-+++ oprofile/configure.ac 2008-07-02 15:17:37.000000000 +0100
-@@ -16,6 +16,7 @@
- AM_CONFIG_HEADER(config.h)
-
- AC_PROG_RANLIB
-+AC_PROG_LN_S
- AC_PROG_LIBTOOL
-
- dnl for the man page
-@@ -241,6 +242,8 @@
- doc/xsl/catalog-1.xml \
- doc/oprofile.1 \
- doc/opcontrol.1 \
-+ doc/opstart.1 \
-+ doc/opstop.1 \
- doc/ophelp.1 \
- doc/opreport.1 \
- doc/opannotate.1 \
-Index: oprofile/doc/Makefile.am
-===================================================================
---- oprofile.orig/doc/Makefile.am 2008-07-02 15:13:59.000000000 +0100
-+++ oprofile/doc/Makefile.am 2008-07-02 15:14:07.000000000 +0100
-@@ -11,6 +11,8 @@
- man_MANS = \
- oprofile.1 \
- opcontrol.1 \
-+ opstart.1 \
-+ opstop.1 \
- opreport.1 \
- opannotate.1 \
- opgprof.1 \
-Index: oprofile/doc/opstart.1.in
-===================================================================
---- /dev/null 1970-01-01 00:00:00.000000000 +0000
-+++ oprofile/doc/opstart.1.in 2008-07-02 15:14:07.000000000 +0100
-@@ -0,0 +1,27 @@
-+.TH OPSTART 1 "@DATE@" "oprofile @VERSION@"
-+.UC 4
-+.SH NAME
-+opstart \- start OProfile profiling
-+.SH SYNOPSIS
-+.br
-+.B opstart
-+.SH DESCRIPTION
-+.B opstart
-+is a simple optimised command to start profiling with 2.6 Linux kernels.
-+OProfile should have already been initialised by calling "opcontrol --start-daemon".
-+
-+.SH ENVIRONMENT
-+No special environment variables are recognised by opstart.
-+
-+.SH FILES
-+.TP
-+.I /var/lib/oprofile/samples/
-+The location of the generated sample files.
-+
-+.SH VERSION
-+.TP
-+This man page is current for @PACKAGE@-@VERSION@.
-+
-+.SH SEE ALSO
-+.BR @OP_DOCDIR@,
-+.BR oprofile(1)
-Index: oprofile/doc/opstop.1.in
-===================================================================
---- /dev/null 1970-01-01 00:00:00.000000000 +0000
-+++ oprofile/doc/opstop.1.in 2008-07-02 15:14:07.000000000 +0100
-@@ -0,0 +1,28 @@
-+.TH OPSTOP 1 "@DATE@" "oprofile @VERSION@"
-+.UC 4
-+.SH NAME
-+opstop \- stop OProfile profiling
-+.SH SYNOPSIS
-+.br
-+.B opstop
-+.SH DESCRIPTION
-+.B opstop
-+is a simple optimsed command to stop profiling with 2.6 Linux kernels.
-+You need to run "opcontrol --dump" before being able to view a profile
-+with opreport.
-+
-+.SH ENVIRONMENT
-+No special environment variables are recognised by opstop.
-+
-+.SH FILES
-+.TP
-+.I /var/lib/oprofile/samples/
-+The location of the generated sample files.
-+
-+.SH VERSION
-+.TP
-+This man page is current for @PACKAGE@-@VERSION@.
-+
-+.SH SEE ALSO
-+.BR @OP_DOCDIR@,
-+.BR oprofile(1)
diff --git a/meta/recipes-kernel/oprofile/oprofile/root-home-dir.patch b/meta/recipes-kernel/oprofile/oprofile/root-home-dir.patch
index 45cab7d3d8..20fc5e503b 100644
--- a/meta/recipes-kernel/oprofile/oprofile/root-home-dir.patch
+++ b/meta/recipes-kernel/oprofile/oprofile/root-home-dir.patch
@@ -1,7 +1,7 @@
oprofile: Determine the root home directory dynamically
This commit detects the root home directory dynamically with changes to
-the opcontrol script and the oprofile gui app source.
+the oprofile gui app source.
The commit replaces an earlier fix that detected and adjusted a
'non-standard' root home directory at build time. The advantage of this
@@ -12,109 +12,33 @@ Upstream-Status: inappropriate [OE specific]
Signed-off-by: Dave Lerner <dave.lerner@windriver.com>
-diff --git a/doc/opcontrol.1.in b/doc/opcontrol.1.in
-index c434704..f57eb76 100644
---- a/doc/opcontrol.1.in
-+++ b/doc/opcontrol.1.in
-@@ -171,7 +171,7 @@ No special environment variables are recognised by opcontrol.
-
- .SH FILES
- .TP
--.I /root/.oprofile/daemonrc
-+.I ~root/.oprofile/daemonrc
- Configuration file for opcontrol
- .TP
- .I /var/lib/oprofile/samples/
-diff --git a/doc/oprofile.1.in b/doc/oprofile.1.in
-index 3d0f0ed..5c623e1 100644
---- a/doc/oprofile.1.in
-+++ b/doc/oprofile.1.in
-@@ -150,7 +150,7 @@ No special environment variables are recognised by oprofile.
- .I $HOME/.oprofile/
- Configuration files
- .TP
--.I /root/.oprofile/daemonrc
-+.I ~root/.oprofile/daemonrc
- Configuration file for opcontrol
- .TP
- .I @prefix@/share/oprofile/
-diff --git a/doc/oprofile.html b/doc/oprofile.html
-index 128d9f7..d7e4dea 100644
---- a/doc/oprofile.html
-+++ b/doc/oprofile.html
-@@ -1394,7 +1394,7 @@ The <span class="command"><strong>opcontrol</strong></span> script provides the
- <dd>
- <p>
- Followed by list arguments for profiling set up. List of arguments
-- saved in <code class="filename">/root/.oprofile/daemonrc</code>.
-+ saved in <code class="filename">~root/.oprofile/daemonrc</code>.
- Giving this option is not necessary; you can just directly pass one
- of the setup options, e.g. <span class="command"><strong>opcontrol --no-vmlinux</strong></span>.
- </p>
-@@ -1430,7 +1430,7 @@ The <span class="command"><strong>opcontrol</strong></span> script provides the
- <dd>
- <p>
- Start data collection with either arguments provided by <code class="option">--setup</code>
-- or information saved in <code class="filename">/root/.oprofile/daemonrc</code>. Specifying
-+ or information saved in <code class="filename">~root/.oprofile/daemonrc</code>. Specifying
- the addition <code class="option">--verbose</code> makes the daemon generate lots of debug data
- whilst it is running.
+Index: oprofile-1.0.0/doc/oprofile.html
+===================================================================
+--- oprofile-1.0.0.orig/doc/oprofile.html 2014-11-03 17:55:31.511034857 +0000
++++ oprofile-1.0.0/doc/oprofile.html 2014-11-03 17:57:26.415037988 +0000
+@@ -1563,8 +1563,8 @@
+ <span class="emphasis"><em>must</em></span> stop it in a controlled manner in order to process
+ the profile data it has collected. Use <code class="code">kill -SIGINT &lt;operf-PID&gt;</code>
+ for this purpose. It is recommended that when running <span class="command"><strong>operf</strong></span>
+- with this option, your current working directory should be <code class="filename">/root</code> or a subdirectory
+- of <code class="filename">/root</code> to avoid storing sample data files in locations accessible by regular users.
++ with this option, your current working directory should be <code class="filename">~root</code> or a subdirectory
++ of <code class="filename">~root</code> to avoid storing sample data files in locations accessible by regular users.
</p>
-diff --git a/doc/oprofile.xml b/doc/oprofile.xml
-index 6a17c6d..0968d76 100644
---- a/doc/oprofile.xml
-+++ b/doc/oprofile.xml
-@@ -568,7 +568,7 @@ The <command>opcontrol</command> script provides the following actions :
- <term><option>--setup</option></term>
- <listitem><para>
- Followed by list arguments for profiling set up. List of arguments
-- saved in <filename>/root/.oprofile/daemonrc</filename>.
-+ saved in <filename>~root/.oprofile/daemonrc</filename>.
- Giving this option is not necessary; you can just directly pass one
- of the setup options, e.g. <command>opcontrol --no-vmlinux</command>.
- </para></listitem>
-@@ -592,7 +592,7 @@ The <command>opcontrol</command> script provides the following actions :
- <term><option>--start</option></term>
- <listitem><para>
- Start data collection with either arguments provided by <option>--setup</option>
-- or information saved in <filename>/root/.oprofile/daemonrc</filename>. Specifying
-+ or information saved in <filename>~root/.oprofile/daemonrc</filename>. Specifying
- the addition <option>--verbose</option> makes the daemon generate lots of debug data
- whilst it is running.
+ </dd>
+ <dt>
+Index: oprofile-1.0.0/doc/oprofile.xml
+===================================================================
+--- oprofile-1.0.0.orig/doc/oprofile.xml 2014-11-03 17:55:31.515034857 +0000
++++ oprofile-1.0.0/doc/oprofile.xml 2014-11-03 17:58:03.719039005 +0000
+@@ -654,8 +654,8 @@
+ <emphasis>must</emphasis> stop it in a controlled manner in order to process
+ the profile data it has collected. Use <code>kill -SIGINT &lt;operf-PID&gt;</code>
+ for this purpose. It is recommended that when running <command>operf</command>
+- with this option, your current working directory should be <filename>/root</filename> or a subdirectory
+- of <filename>/root</filename> to avoid storing sample data files in locations accessible by regular users.
++ with this option, your current working directory should be <filename>~root</filename> or a subdirectory
++ of <filename>~root</filename> to avoid storing sample data files in locations accessible by regular users.
</para></listitem>
-diff --git a/gui/oprof_start_util.cpp b/gui/oprof_start_util.cpp
-index d293431..d13fa8f 100644
---- a/gui/oprof_start_util.cpp
-+++ b/gui/oprof_start_util.cpp
-@@ -20,6 +20,8 @@
- #include <iostream>
- #include <fstream>
- #include <cstdlib>
-+#include <sys/types.h>
-+#include <pwd.h>
-
- #include <qfiledialog.h>
- #include <qmessagebox.h>
-@@ -39,7 +41,8 @@ namespace {
- // return the ~ expansion suffixed with a '/'
- string const get_config_dir()
- {
-- return "/root";
-+ struct *pw = getpwnam("root");
-+ return pw->pw_dir;
- }
-
- string daemon_pid;
-diff --git a/utils/opcontrol b/utils/opcontrol
-index 09fa5a7..a8acdae 100644
---- a/utils/opcontrol
-+++ b/utils/opcontrol
-@@ -385,7 +385,7 @@ do_init()
- OPROFILED="$OPDIR/oprofiled"
-
- # location for daemon setup information
-- SETUP_DIR="/root/.oprofile"
-+ SETUP_DIR="`grep root /etc/passwd | cut -d: -f6`/.oprofile"
- SETUP_FILE="$SETUP_DIR/daemonrc"
- SEC_SETUP_FILE="$SETUP_DIR/daemonrc_new"
-
+ </varlistentry>
+ <varlistentry>
diff --git a/meta/recipes-kernel/oprofile/oprofile/run-ptest b/meta/recipes-kernel/oprofile/oprofile/run-ptest
index 583ee1b8a8..4814be652a 100644
--- a/meta/recipes-kernel/oprofile/oprofile/run-ptest
+++ b/meta/recipes-kernel/oprofile/oprofile/run-ptest
@@ -1,4 +1,5 @@
-#!/bin/bash
+#!/bin/sh
+
saved_dir=$PWD
for dir in */tests ; do
cd $dir
diff --git a/meta/recipes-kernel/oprofile/oprofile_0.9.9.bb b/meta/recipes-kernel/oprofile/oprofile_0.9.9.bb
deleted file mode 100644
index 63ef6af0e9..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile_0.9.9.bb
+++ /dev/null
@@ -1,17 +0,0 @@
-require oprofile.inc
-
-DEPENDS += "virtual/kernel"
-DEPENDS_append_powerpc64 = " libpfm4"
-
-SRC_URI += "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz \
- file://0001-Add-rmb-definition-for-AArch64-architecture.patch \
- file://0001-Tidy-powerpc64-bfd-target-check.patch \
- file://0001-Add-freescale-e500mc-support.patch \
- file://0002-Add-freescale-e6500-support.patch \
- "
-SRC_URI[md5sum] = "00aec1287da2dfffda17a9b1c0a01868"
-SRC_URI[sha256sum] = "1e523400daaba7b8d0d15269e977a08b40edfea53970774b69ae130e25117597"
-
-
-S = "${WORKDIR}/oprofile-${PV}"
-
diff --git a/meta/recipes-kernel/oprofile/oprofile_1.0.0.bb b/meta/recipes-kernel/oprofile/oprofile_1.0.0.bb
new file mode 100644
index 0000000000..ad48ab377c
--- /dev/null
+++ b/meta/recipes-kernel/oprofile/oprofile_1.0.0.bb
@@ -0,0 +1,12 @@
+require oprofile.inc
+
+DEPENDS += "virtual/kernel"
+DEPENDS_append_powerpc64 = " libpfm4"
+
+SRC_URI += "${SOURCEFORGE_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz"
+
+SRC_URI[md5sum] = "ba0b340e5c421a93959776c836ed35b3"
+SRC_URI[sha256sum] = "847110b4ecdcf8c8353cd38f94c1b704aad4bfcd9453e38b88d112cfb7e3c45a"
+
+S = "${WORKDIR}/oprofile-${PV}"
+
diff --git a/meta/recipes-kernel/oprofile/oprofile_git.bb b/meta/recipes-kernel/oprofile/oprofile_git.bb
deleted file mode 100644
index ca562db121..0000000000
--- a/meta/recipes-kernel/oprofile/oprofile_git.bb
+++ /dev/null
@@ -1,11 +0,0 @@
-require oprofile.inc
-
-SRCREV = "88f43190d412d28ebf5c75a76ba20343d0fe4c41"
-PV = "0.9.7+git${SRCPV}"
-PR = "${INC_PR}.0"
-
-SRC_URI += "git://oprofile.git.sourceforge.net/gitroot/${BPN}/${BPN}"
-
-S = "${WORKDIR}/git"
-
-DEFAULT_PREFERENCE = "-1"
diff --git a/meta/recipes-kernel/perf/perf-features.inc b/meta/recipes-kernel/perf/perf-features.inc
index b8859ab7d5..2dbbb47761 100644
--- a/meta/recipes-kernel/perf/perf-features.inc
+++ b/meta/recipes-kernel/perf/perf-features.inc
@@ -1,4 +1,4 @@
-PERF_FEATURES_ENABLE ?= "perf-scripting perf-tui"
+PERF_FEATURES_ENABLE ?= "perf-scripting perf-tui perf-libunwind"
def perf_feature_enabled(feature, trueval, falseval, d):
"""
diff --git a/meta/recipes-kernel/perf/perf.bb b/meta/recipes-kernel/perf/perf.bb
index bfd210cf2e..a42aa73c53 100644
--- a/meta/recipes-kernel/perf/perf.bb
+++ b/meta/recipes-kernel/perf/perf.bb
@@ -20,16 +20,20 @@ BUILDPERF_libc-uclibc = "no"
# to cover a wide range of kernel we add both dependencies
TUI_DEPENDS = "${@perf_feature_enabled('perf-tui', 'libnewt slang', '',d)}"
SCRIPTING_DEPENDS = "${@perf_feature_enabled('perf-scripting', 'perl python', '',d)}"
+LIBUNWIND_DEPENDS = "${@perf_feature_enabled('perf-libunwind', 'libunwind', '',d)}"
-DEPENDS = "virtual/kernel \
+DEPENDS = " \
virtual/${MLPREFIX}libc \
${MLPREFIX}elfutils \
${MLPREFIX}binutils \
${TUI_DEPENDS} \
${SCRIPTING_DEPENDS} \
+ ${LIBUNWIND_DEPENDS} \
bison flex \
"
+do_configure[depends] += "virtual/kernel:do_install"
+
PROVIDES = "virtual/perf"
inherit linux-kernel-base kernel-arch pythonnative
@@ -44,7 +48,7 @@ export HOST_SYS
#kernel 3.1+ supports WERROR to disable warnings as errors
export WERROR = "0"
-do_populate_lic[depends] += "virtual/kernel:do_populate_sysroot"
+do_populate_lic[depends] += "virtual/kernel:do_patch"
# needed for building the tools/perf Perl binding
inherit perlnative cpan-base
@@ -54,14 +58,13 @@ export PERL_INC = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}
export PERL_LIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
export PERL_ARCHLIB = "${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/${@get_perl_version(d)}"
-S = "${STAGING_KERNEL_DIR}"
-# The source should be ready after the do_unpack
-do_unpack[depends] += "virtual/kernel:do_populate_sysroot"
+inherit kernelsrc
B = "${WORKDIR}/${BPN}-${PV}"
SCRIPTING_DEFINES = "${@perf_feature_enabled('perf-scripting', '', 'NO_LIBPERL=1 NO_LIBPYTHON=1',d)}"
TUI_DEFINES = "${@perf_feature_enabled('perf-tui', '', 'NO_NEWT=1',d)}"
+LIBUNWIND_DEFINES = "${@perf_feature_enabled('perf-libunwind', '', 'NO_LIBUNWIND=1',d)}"
# The LDFLAGS is required or some old kernels fails due missing
# symbols and this is preferred than requiring patches to every old
@@ -75,8 +78,9 @@ EXTRA_OEMAKE = '\
ARCH=${ARCH} \
CC="${CC}" \
AR="${AR}" \
+ EXTRA_CFLAGS="-ldw" \
perfexecdir=${libexecdir} \
- NO_GTK2=1 ${TUI_DEFINES} NO_DWARF=1 NO_LIBUNWIND=1 ${SCRIPTING_DEFINES} \
+ NO_GTK2=1 ${TUI_DEFINES} NO_DWARF=1 ${LIBUNWIND_DEFINES} ${SCRIPTING_DEFINES} \
'
EXTRA_OEMAKE += "\
@@ -121,10 +125,13 @@ do_configure_prepend () {
# detected by perf, since it triggers via: ifeq ($(ARCH),x86_64). In a 32 bit
# build, with a 64 bit multilib, the arch won't match and the detection of a
# 64 bit build (and library) are not exected. To ensure that libraries are
- # installed to the correct location, we can make the substitution in the
- # config/Makefile. For non multilib builds, this has no impact.
+ # installed to the correct location, we can use the weak assignment in the
+ # config/Makefile.
if [ -e "${S}/tools/perf/config/Makefile" ]; then
- sed -i 's,libdir = $(prefix)/$(lib),libdir = $(prefix)/${baselib},' ${S}/tools/perf/config/Makefile
+ # Match $(prefix)/$(lib) and $(prefix)/lib
+ sed -i -e 's,^libdir = \($(prefix)/.*lib\),libdir ?= \1,' \
+ -e 's,^perfexecdir = \(.*\),perfexecdir ?= \1,' \
+ ${S}/tools/perf/config/Makefile
fi
# We need to ensure the --sysroot option in CC is preserved
if [ -e "${S}/tools/perf/Makefile.perf" ]; then
@@ -138,10 +145,19 @@ do_configure_prepend () {
if [ -e "${S}/tools/perf/config/feature-checks/Makefile" ]; then
sed -i 's,CC := $(CROSS_COMPILE)gcc -MD,CC += -MD,' ${S}/tools/perf/config/feature-checks/Makefile
fi
+
+ # 3.17-rc1+ has a include issue for arm/powerpc. Temporarily sed in the appropriate include
+ if [ -e "${S}/tools/perf/arch/$ARCH/util/skip-callchain-idx.c" ]; then
+ sed -i 's,#include "util/callchain.h",#include "util/callchain.h"\n#include "util/debug.h",' ${S}/tools/perf/arch/$ARCH/util/skip-callchain-idx.c
+ fi
+ if [ -e "${S}/tools/perf/arch/arm/util/unwind-libunwind.c" ] && [ -e "${S}/tools/perf/arch/arm/tests/dwarf-unwind.c" ]; then
+ sed -i 's,#include "tests/tests.h",#include "tests/tests.h"\n#include "util/debug.h",' ${S}/tools/perf/arch/arm/tests/dwarf-unwind.c
+ sed -i 's,#include "perf_regs.h",#include "perf_regs.h"\n#include "util/debug.h",' ${S}/tools/perf/arch/arm/util/unwind-libunwind.c
+ fi
}
python do_package_prepend() {
- bb.data.setVar('PKGV', get_kernelversion('${S}').split("-")[0], d)
+ d.setVar('PKGV', d.getVar("KERNEL_VERSION", True).split("-")[0])
}
PACKAGE_ARCH = "${MACHINE_ARCH}"
@@ -153,6 +169,7 @@ RDEPENDS_${PN} += "elfutils"
RDEPENDS_${PN}-archive =+ "bash"
RDEPENDS_${PN}-python =+ "bash python"
RDEPENDS_${PN}-perl =+ "bash perl perl-modules"
+RDEPENDS_${PN}-tests =+ "python"
RSUGGESTS_SCRIPTING = "${@perf_feature_enabled('perf-scripting', '${PN}-perl ${PN}-python', '',d)}"
RSUGGESTS_${PN} += "${PN}-archive ${PN}-tests ${RSUGGESTS_SCRIPTING}"
diff --git a/meta/recipes-kernel/powertop/powertop_2.6.1.bb b/meta/recipes-kernel/powertop/powertop_2.7.bb
index 58216c2d15..5ba07e9ab9 100644
--- a/meta/recipes-kernel/powertop/powertop_2.6.1.bb
+++ b/meta/recipes-kernel/powertop/powertop_2.7.bb
@@ -6,16 +6,17 @@ DEPENDS = "ncurses libnl pciutils"
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e"
-SRC_URI = "http://01.org/powertop/sites/default/files/downloads/powertop-${PV}.tar.gz"
+SRC_URI = "http://01.org/sites/default/files/downloads/powertop/powertop-${PV}.tar.gz"
-SRC_URI[md5sum] = "4391e7b0af854ecf722cdf712f24b631"
-SRC_URI[sha256sum] = "034cde6d5bb433fe0d29251d5cde5d4c2948abf05fe29ef10966b659331b20e4"
+SRC_URI[md5sum] = "e0d686e47daaf7e9d89031f7763432ef"
+SRC_URI[sha256sum] = "8d4b1490e2baad4467c0ded3c423db4472dcbf7b2dd8f8f2a928f54047c678ca"
inherit autotools gettext pkgconfig
# we need to explicitly link with libintl in uClibc systems
-LDFLAGS += "${EXTRA_LDFLAGS}"
+EXTRA_LDFLAGS ?= ""
EXTRA_LDFLAGS_libc-uclibc = "-lintl"
+LDFLAGS += "${EXTRA_LDFLAGS}"
# we do not want libncursesw if we can
do_configure_prepend() {
diff --git a/meta/recipes-kernel/sysprof/sysprof_git.bb b/meta/recipes-kernel/sysprof/sysprof_git.bb
index ef1e270883..cfc814ff32 100644
--- a/meta/recipes-kernel/sysprof/sysprof_git.bb
+++ b/meta/recipes-kernel/sysprof/sysprof_git.bb
@@ -18,3 +18,8 @@ SRC_URI_append_mips64 = " file://rmb-mips.patch"
S = "${WORKDIR}/git"
inherit autotools pkgconfig
+
+# We do not yet work for aarch64.
+#
+COMPATIBLE_HOST = "^(?!aarch64).*"
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/configure-allow-to-disable-libvirt.patch b/meta/recipes-kernel/systemtap/systemtap/configure-allow-to-disable-libvirt.patch
new file mode 100644
index 0000000000..b4f2fbc066
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/configure-allow-to-disable-libvirt.patch
@@ -0,0 +1,39 @@
+From 5eb10d90af9178edb65e6091ae939d1b5b19bb78 Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Tue, 23 Sep 2014 04:47:10 -0400
+Subject: [PATCH] systemtap: allow to disable libvirt
+
+Upstream-Status: Pending
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+---
+ configure.ac | 13 +++++++++----
+ 1 file changed, 9 insertions(+), 4 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index a631ae7..cb4885b 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -525,10 +525,15 @@ dnl Check for the libvirt and libxml2 devel packages
+
+ dnl We require libvirt >= 1.0.2 because stapvirt relies on the
+ dnl virDomainOpenChannel function, which was implemented in 1.0.2.
+-PKG_CHECK_MODULES([libvirt], [libvirt >= 1.0.2], [
+- have_libvirt=yes
+- AC_DEFINE([HAVE_LIBVIRT],[1],[Define to 1 if libvirt development libraries are installed])
+- ], [have_libvirt=no])
++AC_ARG_ENABLE([libvirt],
++ AS_HELP_STRING([--disable-libvirt], [Do not use libvirt even if present]))
++
++if test "$enable_libvirt" != no; then
++ PKG_CHECK_MODULES([libvirt], [libvirt >= 1.0.2], [
++ have_libvirt=yes
++ AC_DEFINE([HAVE_LIBVIRT],[1],[Define to 1 if libvirt development libraries are installed])
++ ], [have_libvirt=no])
++fi
+ AM_CONDITIONAL([HAVE_LIBVIRT], [test "${have_libvirt}" = "yes"])
+ PKG_CHECK_MODULES([libxml2], [libxml-2.0], [
+ have_libxml2=yes
+--
+1.7.9.5
+
diff --git a/meta/recipes-kernel/systemtap/systemtap/system_map_location.patch b/meta/recipes-kernel/systemtap/systemtap/system_map_location.patch
new file mode 100644
index 0000000000..013af5c3a4
--- /dev/null
+++ b/meta/recipes-kernel/systemtap/systemtap/system_map_location.patch
@@ -0,0 +1,23 @@
+systemtap: Cross compilation fix
+
+This is a cross compilation fix. It allows systemtap to find
+the kernel map file in the right place, i.e. in the kernel build tree.
+Without this fix it takes a map file from the build host, if available.
+
+Upstream-Status: Pending
+
+Signed-off-by: Mikhail Durnev <mikhail_durnev@mentor.com>
+
+Index: git/session.cxx
+===================================================================
+--- git.orig/session.cxx
++++ git/session.cxx
+@@ -1634,7 +1634,7 @@ systemtap_session::parse_kernel_function
+ clog << _F("Kernel symbol table %s unavailable, (%s)",
+ system_map_path.c_str(), strerror(errno)) << endl;
+
+- system_map_path = "/boot/System.map-" + kernel_release;
++ system_map_path = kernel_build_tree + "/System.map-" + kernel_release;
+ system_map.clear();
+ system_map.open(system_map_path.c_str(), ifstream::in);
+ if (! system_map.is_open())
diff --git a/meta/recipes-kernel/systemtap/systemtap_git.bb b/meta/recipes-kernel/systemtap/systemtap_git.bb
index 488d417150..d0dd42a643 100644
--- a/meta/recipes-kernel/systemtap/systemtap_git.bb
+++ b/meta/recipes-kernel/systemtap/systemtap_git.bb
@@ -20,6 +20,9 @@ STAP_DOCS ?= "--disable-docs --disable-publican --disable-refdocs"
EXTRA_OECONF += "${STAP_DOCS} "
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[libvirt] = "--enable-libvirt,--disable-libvirt,libvirt"
+
inherit autotools gettext pkgconfig
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-kernel/systemtap/systemtap_git.inc b/meta/recipes-kernel/systemtap/systemtap_git.inc
index 717d66f738..f9a19d964a 100644
--- a/meta/recipes-kernel/systemtap/systemtap_git.inc
+++ b/meta/recipes-kernel/systemtap/systemtap_git.inc
@@ -1,11 +1,13 @@
LICENSE = "GPLv2"
LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263"
-SRCREV = "8f0fcd995f7f650a2ee0a94539f90c99e6d19e1d"
-PV = "2.5+git${SRCPV}"
+SRCREV = "7682e51d2e11a35b2977ba9a85ab42f326b8ff8f"
+PV = "2.6+git${SRCPV}"
SRC_URI = "git://sourceware.org/git/systemtap.git \
file://docproc-build-fix.patch \
file://obsolete_automake_macros.patch \
+ file://system_map_location.patch \
+ file://configure-allow-to-disable-libvirt.patch \
"
# systemtap doesn't support mips
diff --git a/meta/recipes-multimedia/alsa/alsa-lib/Check-if-wordexp-function-is-supported.patch b/meta/recipes-multimedia/alsa/alsa-lib/Check-if-wordexp-function-is-supported.patch
index 41b3c23a40..75a6eb8a3c 100644
--- a/meta/recipes-multimedia/alsa/alsa-lib/Check-if-wordexp-function-is-supported.patch
+++ b/meta/recipes-multimedia/alsa/alsa-lib/Check-if-wordexp-function-is-supported.patch
@@ -1,6 +1,6 @@
-From 2555c5d62229cf269974f6ec6e4689ab97bbda42 Mon Sep 17 00:00:00 2001
+From e33357b59a10d44e9bec5d24100ce23ca300cc79 Mon Sep 17 00:00:00 2001
From: "Hong H. Pham" <hong.pham@windriver.com>
-Date: Tue, 26 Feb 2013 19:40:04 -0500
+Date: Fri, 29 Aug 2014 17:13:55 +0300
Subject: [PATCH] Check if wordexp function is supported
eglibc could be configured to build without wordexp, so it is not enough
@@ -8,24 +8,25 @@ to check if wordexp.h exists (the header file could be installed, but it's
possible that the wordexp() function is not supported). An additional
check if wordexp() is supported by the system C library is needed.
-Upstream-Status: Pending
+Upstream-Status: Inappropriate [configuration]
Signed-off-by: Hong H. Pham <hong.pham@windriver.com>
+Signed-off-by: Cristian Iorga <cristian.iorga@intel.com>
---
- configure.in | 5 ++++-
- src/userfile.c | 2 +-
+ configure.ac | 5 ++++-
+ src/userfile.c | 2 +-
2 files changed, 5 insertions(+), 2 deletions(-)
-diff --git a/configure.in b/configure.in
-index 4bcb0d6..ba1c2dd 100644
---- a/configure.in
-+++ b/configure.in
-@@ -333,7 +333,10 @@ arm*)
- esac
+diff --git a/configure.ac b/configure.ac
+index b8353a0..773b72f 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -311,7 +311,10 @@ fi
+ AC_SUBST(ALSA_DEPLIBS)
- dnl Check for wordexp.h
--AC_CHECK_HEADERS([wordexp.h])
-+AC_CHECK_HEADERS([wordexp.h],
+ dnl Check for headers
+-AC_CHECK_HEADERS([wordexp.h endian.h sys/endian.h])
++AC_CHECK_HEADERS([wordexp.h endian.h sys/endian.h],
+ dnl Make sure wordexp is supported by the C library
+ AC_CHECK_FUNCS([wordexp])
+)
@@ -46,5 +47,5 @@ index 3a73836..b8ce809 100644
#include <assert.h>
int snd_user_file(const char *file, char **result)
--
-1.7.10.4
+1.9.1
diff --git a/meta/recipes-multimedia/alsa/alsa-lib/Update-iatomic.h-functions-definitions-for-mips.patch b/meta/recipes-multimedia/alsa/alsa-lib/Update-iatomic.h-functions-definitions-for-mips.patch
deleted file mode 100644
index f1a7947c2b..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-lib/Update-iatomic.h-functions-definitions-for-mips.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-Upstream-Status: Backport
-
-Signed-off-by: Kai Kang <kai.kang@windriver.com>
-
-From f21f48a70f8437a671b58fcab75e54222a9eea16 Mon Sep 17 00:00:00 2001
-From: Kai Kang <jiashuo.kang at gmail.com>
-Date: Thu, 15 Aug 2013 17:17:19 +0800
-Subject: [PATCH] Update iatomic.h functions definitions for mips
-
-Functions atomic_add(s) and atomic_sub(s) are defined with 'extern
-__inline__' that may cause compile fails when cross compile for mips.
-The error message looks like:
-
-| pcm/.libs/libpcm.a(pcm_meter.o): In function `snd_pcm_meter_update_scope':
-| .../alsa-lib-1.0.27.2/src/pcm/pcm_meter.c:139: undefined reference to `atomic_sub'
-
-Replace the 'extern __inline__' with 'static __inline__' to fix this
-issue.
-
-Signed-off-by: Kai Kang <jiashuo.kang at gmail.com>
-Signed-off-by: Takashi Iwai <tiwai@suse.de>
----
- include/iatomic.h | 8 ++++----
- 1 file changed, 4 insertions(+), 4 deletions(-)
-
-diff --git a/include/iatomic.h b/include/iatomic.h
-index 5711fe8..2393297 100644
---- a/include/iatomic.h
-+++ b/include/iatomic.h
-@@ -720,7 +720,7 @@ typedef struct { volatile int counter; } atomic_t;
- * Atomically adds @i to @v. Note that the guaranteed useful range
- * of an atomic_t is only 24 bits.
- */
--extern __inline__ void atomic_add(int i, atomic_t * v)
-+static __inline__ void atomic_add(int i, atomic_t * v)
- {
- unsigned long temp;
-
-@@ -744,7 +744,7 @@ extern __inline__ void atomic_add(int i, atomic_t * v)
- * Atomically subtracts @i from @v. Note that the guaranteed
- * useful range of an atomic_t is only 24 bits.
- */
--extern __inline__ void atomic_sub(int i, atomic_t * v)
-+static __inline__ void atomic_sub(int i, atomic_t * v)
- {
- unsigned long temp;
-
-@@ -763,7 +763,7 @@ extern __inline__ void atomic_sub(int i, atomic_t * v)
- /*
- * Same as above, but return the result value
- */
--extern __inline__ int atomic_add_return(int i, atomic_t * v)
-+static __inline__ int atomic_add_return(int i, atomic_t * v)
- {
- unsigned long temp, result;
-
-@@ -784,7 +784,7 @@ extern __inline__ int atomic_add_return(int i, atomic_t * v)
- return result;
- }
-
--extern __inline__ int atomic_sub_return(int i, atomic_t * v)
-+static __inline__ int atomic_sub_return(int i, atomic_t * v)
- {
- unsigned long temp, result;
-
---
-1.8.1.2
-
diff --git a/meta/recipes-multimedia/alsa/alsa-lib_1.0.27.2.bb b/meta/recipes-multimedia/alsa/alsa-lib_1.0.28.bb
index fc0847d3a2..9a4aab44d4 100644
--- a/meta/recipes-multimedia/alsa/alsa-lib_1.0.27.2.bb
+++ b/meta/recipes-multimedia/alsa/alsa-lib_1.0.28.bb
@@ -14,16 +14,15 @@ BBCLASSEXTEND = "native nativesdk"
#FIXME: remove the following
ARM_INSTRUCTION_SET = "arm"
-SRC_URI = "ftp://ftp.alsa-project.org/pub/lib/alsa-lib-${PV}.tar.bz2 \
+SRC_URI = "ftp://ftp.alsa-project.org/pub/lib/${BP}.tar.bz2 \
file://Check-if-wordexp-function-is-supported.patch \
file://fix-tstamp-declaration.patch \
- file://Update-iatomic.h-functions-definitions-for-mips.patch \
file://0001-pcm-route-Use-get32-for-multi-source-route-calculati.patch \
file://0001-pcm-rate-fix-hw_ptr-exceed-the-boundary.patch \
file://0001-pcm-pcm_local.h-include-time.h-to-enable-CLOCK_MONOT.patch \
"
-SRC_URI[md5sum] = "69129a7c37697f81ac092335e9fa452b"
-SRC_URI[sha256sum] = "690ed393e7efd4fc7e3a2d2cda5449298ca0c895197e5914e350882012430d19"
+SRC_URI[md5sum] = "c9e21b88a2b3e6e12ea7ba0f3b271fc3"
+SRC_URI[sha256sum] = "3c074b85dde1b30e78ef4995579765833e5b693fbbd8f834c335e080cb734a6d"
inherit autotools pkgconfig
@@ -48,7 +47,7 @@ ${datadir}/alsa/pcm/default.conf \
${datadir}/alsa/pcm/dmix.conf \
${datadir}/alsa/pcm/dsnoop.conf"
-RDEPENDS_libasound = "alsa-conf-base"
+RDEPENDS_libasound = "alsa-conf-base alsa-conf"
# upgrade path
RPROVIDES_${PN}-dev = "alsa-dev"
RREPLACES_${PN}-dev = "alsa-dev"
diff --git a/meta/recipes-multimedia/alsa/alsa-tools/mips_has_no_io_h.patch b/meta/recipes-multimedia/alsa/alsa-tools/mips_has_no_io_h.patch
deleted file mode 100644
index 09b10f17e6..0000000000
--- a/meta/recipes-multimedia/alsa/alsa-tools/mips_has_no_io_h.patch
+++ /dev/null
@@ -1,16 +0,0 @@
-Upstream-Status: Pending
-Signed-off-by: Saul Wold <sgw@linux.intel.com>
-
-Index: alsa-tools-1.0.26.1/hda-verb/hda-verb.c
-===================================================================
---- alsa-tools-1.0.26.1.orig/hda-verb/hda-verb.c
-+++ alsa-tools-1.0.26.1/hda-verb/hda-verb.c
-@@ -13,7 +13,7 @@
- #include <ctype.h>
- #include <unistd.h>
- #include <sys/ioctl.h>
--#ifndef __PPC__
-+#if !(__PPC__ || __mips__)
- #include <sys/io.h>
- #endif
- #include <sys/types.h>
diff --git a/meta/recipes-multimedia/alsa/alsa-tools_1.0.27.bb b/meta/recipes-multimedia/alsa/alsa-tools_1.0.28.bb
index bca7170a28..4b9509e863 100644
--- a/meta/recipes-multimedia/alsa/alsa-tools_1.0.27.bb
+++ b/meta/recipes-multimedia/alsa/alsa-tools_1.0.28.bb
@@ -8,14 +8,13 @@ DEPENDS = "alsa-lib ncurses"
LIC_FILES_CHKSUM = "file://hdsploader/COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://ld10k1/COPYING.LIB;md5=7fbc338309ac38fefcd64b04bb903e34"
-SRC_URI = "ftp://ftp.alsa-project.org/pub/tools/alsa-tools-${PV}.tar.bz2 \
- file://mips_has_no_io_h.patch \
+SRC_URI = "ftp://ftp.alsa-project.org/pub/tools/${BP}.tar.bz2 \
file://autotools.patch \
${@bb.utils.contains('DISTRO_FEATURES', 'x11', '', \
'file://makefile_no_gtk.patch', d)}"
-SRC_URI[md5sum] = "1ea381d00a6069a98613aa7effa4cb51"
-SRC_URI[sha256sum] = "6562611b5a6560712f109e09740a9d4fa47296b07ed9590cb44139c5f154ada2"
+SRC_URI[md5sum] = "e6c929175d8ee729c06d49b51439bad6"
+SRC_URI[sha256sum] = "76e59711c6d0f39cbddce83ce1ed8da00bad112fee021f94fa990d8685cc3761"
inherit autotools-brokensep pkgconfig
diff --git a/meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.27.2.bb b/meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.28.bb
index 968c81ee2b..968c81ee2b 100644
--- a/meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.27.2.bb
+++ b/meta/recipes-multimedia/alsa/alsa-utils-alsaconf_1.0.28.bb
diff --git a/meta/recipes-multimedia/alsa/alsa-utils/0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch b/meta/recipes-multimedia/alsa/alsa-utils/0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch
index 477cd11b12..e99dd515f0 100644
--- a/meta/recipes-multimedia/alsa/alsa-utils/0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch
+++ b/meta/recipes-multimedia/alsa/alsa-utils/0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch
@@ -1,35 +1,44 @@
-From 37c99a3cbff36915b56a5703eeaed3c70796f80f Mon Sep 17 00:00:00 2001
+From 43a56fa36a12f09ccd78b3cf5e6ae197fcab501f Mon Sep 17 00:00:00 2001
From: Koen Kooi <koen@dominion.thruhere.net>
-Date: Sun, 9 Oct 2011 20:06:35 +0200
-Subject: [PATCH] alsactl: don't let systemd unit restore the volume when asound.state is missing
+Date: Fri, 29 Aug 2014 18:58:56 +0300
+Subject: [PATCH] alsactl: don't let systemd unit restore the volume when
+ asound.state is missing
This avoids an error on bootup
-Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
-
Filed as https://bugtrack.alsa-project.org/alsa-bug/view.php?id=5459
Upstream-Status: Pending
-Index: alsa-utils-1.0.27.2/alsactl/Makefile.am
-===================================================================
---- alsa-utils-1.0.27.2.orig/alsactl/Makefile.am
-+++ alsa-utils-1.0.27.2/alsactl/Makefile.am
-@@ -36,8 +36,8 @@ install-data-hook:
+Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
+Signed-off-by: Cristian Iorga <cristian.iorga@intel.com>
+---
+ alsactl/Makefile.am | 7 ++++---
+ alsactl/alsa-restore.service.in | 1 +
+ 2 files changed, 5 insertions(+), 3 deletions(-)
+
+diff --git a/alsactl/Makefile.am b/alsactl/Makefile.am
+index 47f06e9..b728c06 100644
+--- a/alsactl/Makefile.am
++++ b/alsactl/Makefile.am
+@@ -43,9 +43,10 @@ install-data-hook:
endif
edit = \
- $(SED) -r -e 's,@sbindir\@,$(sbindir),g' \
+- -e 's,@mydatadir\@,$(mydatadir),g' \
- -e 's,@daemonswitch\@,$(ALSACTL_DAEMONSWITCH),g' \
-+ $(SED) -e 's,@localstatedir\@,$(localstatedir),g' -e 's,@sbindir\@,$(sbindir),g' \
++ $(SED) -e 's,@localstatedir\@,$(localstatedir),g' \
++ -e 's,@sbindir\@,$(sbindir),g' \
++ -e 's,@mydatadir\@,$(mydatadir),g' \
+ -e 's,@daemonswitch\@,$(ALSACTL_DAEMONSWITCH),g' \
< $< > $@ || rm $@
alsa-state.service: alsa-state.service.in
-Index: alsa-utils-1.0.27.2/alsactl/alsa-restore.service.in
-===================================================================
---- alsa-utils-1.0.27.2.orig/alsactl/alsa-restore.service.in
-+++ alsa-utils-1.0.27.2/alsactl/alsa-restore.service.in
+diff --git a/alsactl/alsa-restore.service.in b/alsactl/alsa-restore.service.in
+index 245a439..b017854 100644
+--- a/alsactl/alsa-restore.service.in
++++ b/alsactl/alsa-restore.service.in
@@ -10,6 +10,7 @@ DefaultDependencies=no
After=alsa-state.service
Before=shutdown.target
@@ -38,3 +47,6 @@ Index: alsa-utils-1.0.27.2/alsactl/alsa-restore.service.in
[Service]
Type=oneshot
+--
+1.9.1
+
diff --git a/meta/recipes-multimedia/alsa/alsa-utils/alsa-utils-aplay-interrupt-signal-handling.patch b/meta/recipes-multimedia/alsa/alsa-utils/alsa-utils-aplay-interrupt-signal-handling.patch
new file mode 100644
index 0000000000..5df004aeae
--- /dev/null
+++ b/meta/recipes-multimedia/alsa/alsa-utils/alsa-utils-aplay-interrupt-signal-handling.patch
@@ -0,0 +1,48 @@
+Upstream-Status: Pending
+
+aplay/arecord (alsa-utils v1.0.28) cannot interrupt streaming
+via CTRL-C. Fixed the issue by reverting buggy patches and
+properly handling 'in_aborting' flag in appropriate functions.
+
+Signed-off-by: Anant Agrawal <Anant_Agrawal@mentor.com>
+Signed-off-by: Mikhail Durnev <mikhail_durnev@mentor.com>
+
+--- a/aplay/aplay.c 2014-05-19 16:25:14.000000000 +0530
++++ b/aplay/aplay.c 2014-05-20 15:17:14.364823007 +0530
+@@ -392,14 +392,22 @@
+ putchar('\n');
+ if (!quiet_mode)
+ fprintf(stderr, _("Aborted by signal %s...\n"), strsignal(sig));
+- if (handle)
++ if (stream == SND_PCM_STREAM_CAPTURE) {
++ if (fmt_rec_table[file_type].end) {
++ fmt_rec_table[file_type].end(fd);
++ fd = -1;
++ }
++ stream = -1;
++ }
++ if (fd > 1) {
++ close(fd);
++ fd = -1;
++ }
++ if (handle && sig != SIGABRT) {
+ snd_pcm_abort(handle);
+- if (sig == SIGABRT) {
+- /* do not call snd_pcm_close() and abort immediately */
+ handle = NULL;
+- prg_exit(EXIT_FAILURE);
+ }
+- signal(sig, signal_handler);
++ prg_exit(EXIT_FAILURE);
+ }
+
+ /* call on SIGUSR1 signal. */
+@@ -2096,7 +2104,7 @@
+ ssize_t result = count, r;
+ size_t size;
+
+- while (count > 0) {
++ while (count > 0 && !in_aborting) {
+ size = count;
+ if (size > chunk_bytes - buffer_pos)
+ size = chunk_bytes - buffer_pos;
diff --git a/meta/recipes-multimedia/alsa/alsa-utils_1.0.27.2.bb b/meta/recipes-multimedia/alsa/alsa-utils_1.0.28.bb
index 047fcfe5dd..5f35d76a6e 100644
--- a/meta/recipes-multimedia/alsa/alsa-utils_1.0.27.2.bb
+++ b/meta/recipes-multimedia/alsa/alsa-utils_1.0.28.bb
@@ -13,10 +13,11 @@ PACKAGECONFIG[xmlto] = "--enable-xmlto, --disable-xmlto, xmlto-native docbook-xm
SRC_URI = "ftp://ftp.alsa-project.org/pub/utils/alsa-utils-${PV}.tar.bz2 \
file://0001-alsactl-don-t-let-systemd-unit-restore-the-volume-wh.patch \
+ file://alsa-utils-aplay-interrupt-signal-handling.patch \
"
-SRC_URI[md5sum] = "b65e9a04181bd7c9db7667a4566f8dc3"
-SRC_URI[sha256sum] = "02bfac39092f3b68d743c23ad3d688d6c5aa8df69f2ccd692c5b8282edb352ea"
+SRC_URI[md5sum] = "361552d5b1cacd0a1e7ba09e69990211"
+SRC_URI[sha256sum] = "f3ff4c89b0125a7797b1b13cd094cc92276e655458274967386e812d03642acc"
# lazy hack. needs proper fixing in gettext.m4, see
# http://bugs.openembedded.org/show_bug.cgi?id=2348
diff --git a/meta/recipes-multimedia/flac/flac_1.3.0.bb b/meta/recipes-multimedia/flac/flac_1.3.0.bb
index fdf38e9ea9..cf786a473d 100644
--- a/meta/recipes-multimedia/flac/flac_1.3.0.bb
+++ b/meta/recipes-multimedia/flac/flac_1.3.0.bb
@@ -21,6 +21,8 @@ SRC_URI = "http://downloads.xiph.org/releases/flac/${BP}.tar.xz \
SRC_URI[md5sum] = "13b5c214cee8373464d3d65dee362cdd"
SRC_URI[sha256sum] = "fa2d64aac1f77e31dfbb270aeb08f5b32e27036a52ad15e69a77e309528010dc"
+CLEANBROKEN = "1"
+
inherit autotools-brokensep gettext
EXTRA_OECONF = "--disable-oggtest \
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-avcodec-smc-fix-off-by-1-error.patch b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-avcodec-smc-fix-off-by-1-error.patch
new file mode 100644
index 0000000000..3ca6fc4dc5
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-avcodec-smc-fix-off-by-1-error.patch
@@ -0,0 +1,32 @@
+From c17a0ad1df15a94d0b1239adc2afb593bdf0a153 Mon Sep 17 00:00:00 2001
+From: Michael Niedermayer <michaelni@gmx.at>
+Date: Fri, 3 Oct 2014 22:50:45 +0200
+Subject: [PATCH 1/2] avcodec/smc: fix off by 1 error
+
+Upstream-Status: Backport
+
+Fixes out of array access
+Fixes: asan_heap-oob_1685bf0_5_asan_heap-oob_1f35116_430_smc.mov
+
+Found-by: Mateusz "j00ru" Jurczyk and Gynvael Coldwind
+Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
+---
+ libavcodec/smc.c | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/gst-libs/ext/libav/libavcodec/smc.c b/gst-libs/ext/libav/libavcodec/smc.c
+index 3cd5e53..dec9f71 100644
+--- a/gst-libs/ext/libav/libavcodec/smc.c
++++ b/gst-libs/ext/libav/libavcodec/smc.c
+@@ -69,7 +69,7 @@ typedef struct SmcContext {
+ row_ptr += stride * 4; \
+ } \
+ total_blocks--; \
+- if (total_blocks < 0) \
++ if (total_blocks < 0 + !!n_blocks) \
+ { \
+ av_log(s->avctx, AV_LOG_INFO, "warning: block counter just went negative (this should not happen)\n"); \
+ return; \
+--
+2.1.0
+
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-set-parameters-from-SPS-whenever-it-changes.patch b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-set-parameters-from-SPS-whenever-it-changes.patch
new file mode 100644
index 0000000000..05a9de3334
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-set-parameters-from-SPS-whenever-it-changes.patch
@@ -0,0 +1,145 @@
+gst-ffmpeg: h264: set parameters from SPS whenever it changes
+
+Fixes a crash in the fuzzed sample sample_varPAR.avi_s26638 with
+alternating bit depths.
+
+Upstream-Status: Backport
+
+Signed-off-by: Yue Tao <yue.tao@windriver.com>
+
+diff --git a/gst-libs/ext/libav/libavcodec/h264.c.old b/gst-libs/ext/libav/libavcodec/h264.c
+index 3621f41..718906a 100644
+--- a/gst-libs/ext/libav/libavcodec/h264.c.old
++++ b/gst-libs/ext/libav/libavcodec/h264.c
+@@ -2491,6 +2491,34 @@ int ff_h264_get_profile(SPS *sps)
+ return profile;
+ }
+
++static int h264_set_parameter_from_sps(H264Context *h)
++{
++ MpegEncContext *s = &h->s;
++ AVCodecContext * avctx= s->avctx;
++
++ if (s->flags& CODEC_FLAG_LOW_DELAY ||
++ (h->sps.bitstream_restriction_flag && !h->sps.num_reorder_frames))
++ s->low_delay=1;
++
++ if(avctx->has_b_frames < 2)
++ avctx->has_b_frames= !s->low_delay;
++
++ if (avctx->bits_per_raw_sample != h->sps.bit_depth_luma) {
++ if (h->sps.bit_depth_luma >= 8 && h->sps.bit_depth_luma <= 10) {
++ avctx->bits_per_raw_sample = h->sps.bit_depth_luma;
++ h->pixel_shift = h->sps.bit_depth_luma > 8;
++
++ ff_h264dsp_init(&h->h264dsp, h->sps.bit_depth_luma);
++ ff_h264_pred_init(&h->hpc, s->codec_id, h->sps.bit_depth_luma);
++ dsputil_init(&s->dsp, s->avctx);
++ } else {
++ av_log(avctx, AV_LOG_DEBUG, "Unsupported bit depth: %d\n", h->sps.bit_depth_luma);
++ return -1;
++ }
++ }
++ return 0;
++}
++
+ /**
+ * decodes a slice header.
+ * This will also call MPV_common_init() and frame_start() as needed.
+@@ -2505,7 +2533,7 @@ static int decode_slice_header(H264Context *h, H264Context *h0){
+ MpegEncContext * const s0 = &h0->s;
+ unsigned int first_mb_in_slice;
+ unsigned int pps_id;
+- int num_ref_idx_active_override_flag;
++ int num_ref_idx_active_override_flag, ret;
+ unsigned int slice_type, tmp, i, j;
+ int default_ref_list_done = 0;
+ int last_pic_structure;
+@@ -2569,7 +2597,17 @@ static int decode_slice_header(H264Context *h, H264Context *h0){
+ av_log(h->s.avctx, AV_LOG_ERROR, "non-existing SPS %u referenced\n", h->pps.sps_id);
+ return -1;
+ }
+- h->sps = *h0->sps_buffers[h->pps.sps_id];
++
++ if (h->pps.sps_id != h->current_sps_id ||
++ h0->sps_buffers[h->pps.sps_id]->new) {
++ h0->sps_buffers[h->pps.sps_id]->new = 0;
++
++ h->current_sps_id = h->pps.sps_id;
++ h->sps = *h0->sps_buffers[h->pps.sps_id];
++
++ if ((ret = h264_set_parameter_from_sps(h)) < 0)
++ return ret;
++ }
+
+ s->avctx->profile = ff_h264_get_profile(&h->sps);
+ s->avctx->level = h->sps.level_idc;
+@@ -3811,26 +3811,8 @@ static int decode_nal_units(H264Context *h, const uint8_t *buf, int buf_size){
+ case NAL_SPS:
+ init_get_bits(&s->gb, ptr, bit_length);
+ ff_h264_decode_seq_parameter_set(h);
+-
+- if (s->flags& CODEC_FLAG_LOW_DELAY ||
+- (h->sps.bitstream_restriction_flag && !h->sps.num_reorder_frames))
+- s->low_delay=1;
+-
+- if(avctx->has_b_frames < 2)
+- avctx->has_b_frames= !s->low_delay;
+-
+- if (avctx->bits_per_raw_sample != h->sps.bit_depth_luma) {
+- if (h->sps.bit_depth_luma >= 8 && h->sps.bit_depth_luma <= 10) {
+- avctx->bits_per_raw_sample = h->sps.bit_depth_luma;
+- h->pixel_shift = h->sps.bit_depth_luma > 8;
+-
+- ff_h264dsp_init(&h->h264dsp, h->sps.bit_depth_luma);
+- ff_h264_pred_init(&h->hpc, s->codec_id, h->sps.bit_depth_luma);
+- dsputil_init(&s->dsp, s->avctx);
+- } else {
+- av_log(avctx, AV_LOG_DEBUG, "Unsupported bit depth: %d\n", h->sps.bit_depth_luma);
+- return -1;
+- }
++ if (h264_set_parameter_from_sps(h) < 0) {
++ return -1;
+ }
+ break;
+ case NAL_PPS:
+diff --git a/gst-libs/ext/libav/libavcodec/h264.h.old b/gst-libs/ext/libav/libavcodec/h264.h
+index e3cc815..b77ad98 100644
+--- a/gst-libs/ext/libav/libavcodec/h264.h.old
++++ b/gst-libs/ext/libav/libavcodec/h264.h
+@@ -202,6 +202,7 @@ typedef struct SPS{
+ int bit_depth_chroma; ///< bit_depth_chroma_minus8 + 8
+ int residual_color_transform_flag; ///< residual_colour_transform_flag
+ int constraint_set_flags; ///< constraint_set[0-3]_flag
++ int new; ///< flag to keep track if the decoder context needs re-init due to changed SPS
+ }SPS;
+
+ /**
+@@ -333,6 +334,7 @@ typedef struct H264Context{
+ int emu_edge_width;
+ int emu_edge_height;
+
++ unsigned current_sps_id; ///< id of the current SPS
+ SPS sps; ///< current sps
+
+ /**
+diff --git a/gst-libs/ext/libav/libavcodec/h264_ps.c.old b/gst-libs/ext/libav/libavcodec/h264_ps.c
+index 7491807..0929098 100644
+--- a/gst-libs/ext/libav/libavcodec/h264_ps.c.old
++++ b/gst-libs/ext/libav/libavcodec/h264_ps.c
+@@ -438,10 +438,13 @@ int ff_h264_decode_seq_parameter_set(H264Context *h){
+ sps->timing_info_present_flag ? sps->time_scale : 0
+ );
+ }
++ sps->new = 1;
+
+ av_free(h->sps_buffers[sps_id]);
+- h->sps_buffers[sps_id]= sps;
+- h->sps = *sps;
++ h->sps_buffers[sps_id] = sps;
++ h->sps = *sps;
++ h->current_sps_id = sps_id;
++
+ return 0;
+ fail:
+ av_free(sps);
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-skip-error-concealment-when-SPS-and-slices-are-.patch b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-skip-error-concealment-when-SPS-and-slices-are-.patch
new file mode 100644
index 0000000000..5d45c1a96c
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0001-h264-skip-error-concealment-when-SPS-and-slices-are-.patch
@@ -0,0 +1,33 @@
+gst-ffmpeg: h264: skip error concealment when SPS and slices are
+ mismatching
+
+Fixes out of array accesses
+
+Found-by: Mateusz "j00ru" Jurczyk and Gynvael Coldwind
+Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
+(cherry picked from commit 695af8eed642ff0104834495652d1ee784a4c14d)
+
+Upstream-Status: Backport
+
+Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
+Signed-off-by: Yue Tao <yue.tao@windriver.com>
+---
+ libavcodec/h264.c | 2 +-
+ 1 files changed, 1 insertions(+), 1 deletions(-)
+
+diff --git a/libavcodec/h264.c b/libavcodec/h264.c
+index da144db..0aab4e7 100644
+--- a/gst-libs/ext/libav/libavcodec/h264.c
++++ b/gst-libs/ext/libav/libavcodec/h264.c
+@@ -2351,7 +2351,7 @@ static int field_end(H264Context *h, int in_setup)
+ * past end by one (callers fault) and resync_mb_y != 0
+ * causes problems for the first MB line, too.
+ */
+- if (!FIELD_PICTURE)
++ if (!FIELD_PICTURE && h->current_slice && !h->sps.new)
+ ff_er_frame_end(s);
+
+ ff_MPV_frame_end(s);
+--
+1.7.5.4
+
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0002-avcodec-mjpegdec-check-bits-per-pixel-for-changes-si.patch b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0002-avcodec-mjpegdec-check-bits-per-pixel-for-changes-si.patch
new file mode 100644
index 0000000000..c8bafd570e
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg-0.10.13/0002-avcodec-mjpegdec-check-bits-per-pixel-for-changes-si.patch
@@ -0,0 +1,68 @@
+From 6043c431c97d55173f339fafbd033d3c0642e2e9 Mon Sep 17 00:00:00 2001
+From: Michael Niedermayer <michaelni@gmx.at>
+Date: Fri, 3 Oct 2014 01:50:27 +0200
+Subject: [PATCH 2/2] avcodec/mjpegdec: check bits per pixel for changes
+ similar to dimensions
+
+Upstream-Status: Backport
+
+Fixes out of array accesses
+Fixes: asan_heap-oob_16668e9_2_asan_heap-oob_16668e9_346_miss_congeniality_pegasus_mjpg.avi
+
+Found-by: Mateusz "j00ru" Jurczyk and Gynvael Coldwind
+Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
+
+Conflicts:
+ libavcodec/mjpegdec.c
+---
+ libavcodec/mjpegdec.c | 15 ++++++++-------
+ 1 file changed, 8 insertions(+), 7 deletions(-)
+
+diff --git a/gst-libs/ext/libav/libavcodec/mjpegdec.c b/gst-libs/ext/libav/libavcodec/mjpegdec.c
+index 84343c0..c0137d8 100644
+--- a/gst-libs/ext/libav/libavcodec/mjpegdec.c
++++ b/gst-libs/ext/libav/libavcodec/mjpegdec.c
+@@ -210,16 +210,16 @@ int ff_mjpeg_decode_dht(MJpegDecodeContext *s)
+
+ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
+ {
+- int len, nb_components, i, width, height, pix_fmt_id;
++ int len, nb_components, i, bits, width, height, pix_fmt_id;
+
+ /* XXX: verify len field validity */
+ len = get_bits(&s->gb, 16);
+- s->bits= get_bits(&s->gb, 8);
++ bits= get_bits(&s->gb, 8);
+
+- if(s->pegasus_rct) s->bits=9;
+- if(s->bits==9 && !s->pegasus_rct) s->rct=1; //FIXME ugly
++ if(s->pegasus_rct) bits=9;
++ if(bits==9 && !s->pegasus_rct) s->rct=1; //FIXME ugly
+
+- if (s->bits != 8 && !s->lossless){
++ if (bits != 8 && !s->lossless){
+ av_log(s->avctx, AV_LOG_ERROR, "only 8 bits/component accepted\n");
+ return -1;
+ }
+@@ -239,7 +239,7 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
+ if (nb_components <= 0 ||
+ nb_components > MAX_COMPONENTS)
+ return -1;
+- if (s->ls && !(s->bits <= 8 || nb_components == 1)){
++ if (s->ls && !(bits <= 8 || nb_components == 1)){
+ av_log(s->avctx, AV_LOG_ERROR, "only <= 8 bits/component or 16-bit gray accepted for JPEG-LS\n");
+ return -1;
+ }
+@@ -272,10 +272,11 @@ int ff_mjpeg_decode_sof(MJpegDecodeContext *s)
+
+ /* if different size, realloc/alloc picture */
+ /* XXX: also check h_count and v_count */
+- if (width != s->width || height != s->height) {
++ if (width != s->width || height != s->height || bits != s->bits) {
+ av_freep(&s->qscale_table);
+
+ s->width = width;
++ s->bits= bits;
+ s->height = height;
+ s->interlaced = 0;
+
diff --git a/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb b/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb
index bbe3308b03..30e5deb1f9 100644
--- a/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb
+++ b/meta/recipes-multimedia/gstreamer/gst-ffmpeg_0.10.13.bb
@@ -53,6 +53,10 @@ SRC_URI = "http://gstreamer.freedesktop.org/src/${BPN}/${BPN}-${PV}.tar.bz2 \
file://0001-qdm2-check-array-index-before-use-fix-out-of-array-a.patch \
file://0001-lavf-compute-probe-buffer-size-more-reliably.patch \
file://0001-ffserver-set-oformat.patch \
+ file://0001-h264-set-parameters-from-SPS-whenever-it-changes.patch \
+ file://0001-h264-skip-error-concealment-when-SPS-and-slices-are-.patch \
+ file://0001-avcodec-smc-fix-off-by-1-error.patch \
+ file://0002-avcodec-mjpegdec-check-bits-per-pixel-for-changes-si.patch \
${@bb.utils.contains('PACKAGECONFIG', 'libav9', 'file://libav-9.patch', '', d)} \
"
diff --git a/meta/recipes-multimedia/gstreamer/gst-meta-base_0.10.bb b/meta/recipes-multimedia/gstreamer/gst-meta-base_0.10.bb
index 518ab721af..039abe14fa 100644
--- a/meta/recipes-multimedia/gstreamer/gst-meta-base_0.10.bb
+++ b/meta/recipes-multimedia/gstreamer/gst-meta-base_0.10.bb
@@ -1,6 +1,11 @@
SUMMARY = "GStreamer package groups"
LICENSE = "MIT"
-DEPENDS = "gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad"
+
+COMMERCIAL_PLUGINS = "${COMMERCIAL_AUDIO_PLUGINS} ${COMMERCIAL_VIDEO_PLUGINS}"
+DEPENDS_UGLY="${@'gst-plugins-ugly' if 'ugly' in COMMERCIAL_PLUGINS.split('-') else ''}"
+DEPENDS_BAD="${@'gst-plugins-bad' if 'bad' in COMMERCIAL_PLUGINS.split('-') else ''}"
+DEPENDS = "gstreamer gst-plugins-base gst-plugins-good ${DEPENDS_UGLY} ${DEPENDS_BAD}"
+
LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \
file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-bad_0.10.23.bb b/meta/recipes-multimedia/gstreamer/gst-plugins-bad_0.10.23.bb
index 2ecdcf25ce..0f64871497 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins-bad_0.10.23.bb
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-bad_0.10.23.bb
@@ -37,6 +37,7 @@ PACKAGECONFIG[openal] = "--enable-openal,--disable-openal,openal-soft"
PACKAGECONFIG[schro] = "--enable-schro,--disable-schro,schroedinger"
PACKAGECONFIG[dc1394] = "--enable-dc1394,--disable-dc1394,libdc1394"
PACKAGECONFIG[faac] = "--enable-faac,--disable-faac,faac"
+PACKAGECONFIG[rtmp] = "--enable-rtmp,--disable-rtmp,rtmpdump"
ARM_INSTRUCTION_SET = "arm"
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-base-0.10.36/audioresample-Fix-build-on-x86-if-emmintrin.h-is-ava.patch b/meta/recipes-multimedia/gstreamer/gst-plugins-base-0.10.36/audioresample-Fix-build-on-x86-if-emmintrin.h-is-ava.patch
new file mode 100644
index 0000000000..52142562ab
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-base-0.10.36/audioresample-Fix-build-on-x86-if-emmintrin.h-is-ava.patch
@@ -0,0 +1,37 @@
+audioresample: Fix build on x86 if emmintrin.h is available but can't be used
+
+On x86, EMMINTRIN is defined but not usable without SSE so check for
+__SSE__ and __SSE2__ as well.
+
+https://bugzilla.gnome.org/show_bug.cgi?id=670690
+
+Upstream-Status: Backport
+
+Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
+---
+ gst/audioresample/resample.c | 4 ++--
+ 1 files changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/gst/audioresample/resample.c b/gst/audioresample/resample.c
+index 98d006c..481fa01 100644
+--- a/gst/audioresample/resample.c
++++ b/gst/audioresample/resample.c
+@@ -77,13 +77,13 @@
+ #define EXPORT G_GNUC_INTERNAL
+
+ #ifdef _USE_SSE
+-#ifndef HAVE_XMMINTRIN_H
++#if !defined(__SSE__) || !defined(HAVE_XMMINTRIN_H)
+ #undef _USE_SSE
+ #endif
+ #endif
+
+ #ifdef _USE_SSE2
+-#ifndef HAVE_EMMINTRIN_H
++#if !defined(__SSE2__) || !defined(HAVE_EMMINTRIN_H)
+ #undef _USE_SSE2
+ #endif
+ #endif
+--
+1.7.1
+
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-base_0.10.36.bb b/meta/recipes-multimedia/gstreamer/gst-plugins-base_0.10.36.bb
index 4a8a9947a7..c8a949973d 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins-base_0.10.36.bb
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-base_0.10.36.bb
@@ -9,7 +9,9 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \
DEPENDS += "alsa-lib liboil libogg libvorbis libtheora util-linux tremor glib-2.0-native"
SRC_URI += "file://gst-plugins-base-tremor.patch \
- file://configure.ac-fix-subparse-plugin.patch"
+ file://configure.ac-fix-subparse-plugin.patch \
+ file://audioresample-Fix-build-on-x86-if-emmintrin.h-is-ava.patch \
+"
SRC_URI[md5sum] = "776c73883e567f67b9c4a2847d8d041a"
SRC_URI[sha256sum] = "2cd3b0fa8e9b595db8f514ef7c2bdbcd639a0d63d154c00f8c9b609321f49976"
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
index 0503b34ea3..7bb567866f 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins-package.inc
@@ -50,6 +50,8 @@ ALLOW_EMPTY_${PN}-staticdev = "1"
PACKAGES += "${PN}-apps ${PN}-meta ${PN}-glib"
FILES_${PN}-apps = "${bindir}"
+RDEPENDS_${PN}-apps += "perl"
+
FILES_${PN} = "${datadir}/gstreamer-${LIBV}"
FILES_${PN}-dbg += "${libdir}/gstreamer-${LIBV}/.debug"
FILES_${PN}-glib = "${datadir}/glib-2.0"
diff --git a/meta/recipes-multimedia/gstreamer/gst-plugins.inc b/meta/recipes-multimedia/gstreamer/gst-plugins.inc
index 26976092d6..c852c53fa9 100644
--- a/meta/recipes-multimedia/gstreamer/gst-plugins.inc
+++ b/meta/recipes-multimedia/gstreamer/gst-plugins.inc
@@ -19,10 +19,10 @@ require gst-plugins-package.inc
PACKAGES_DYNAMIC += "^${PN}-.*"
# apply gstreamer hack after Makefile.in.in in source is replaced by our version from
-# ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in, but before configure is executed
+# ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in, but before configure is executed
# http://lists.linuxtogo.org/pipermail/openembedded-core/2012-November/032233.html
-oe_runconf_prepend() {
+oe_runconf_prepend() {
if [ -e ${S}/po/Makefile.in.in ]; then
sed -i -e "1a\\" -e 'GETTEXT_PACKAGE = @GETTEXT_PACKAGE@' ${S}/po/Makefile.in.in
fi
-}
+}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.1.bb
index bc79d54f5f..f962ff0608 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.4.1.bb
@@ -13,12 +13,13 @@ SRC_URI = " \
http://gstreamer.freedesktop.org/src/gst-libav/gst-libav-${PV}.tar.xz \
file://0001-Disable-yasm-for-libav-when-disable-yasm.patch \
"
-SRC_URI[md5sum] = "60811ae08c26fa093a96996582962c75"
-SRC_URI[sha256sum] = "c82c7a657863f2e27ac5cba539b0bd0a8114ad6bd4ba33bae777e4dbfab9e380"
+SRC_URI[md5sum] = "ea2d636c24d7c5ae123967ef22e37c07"
+SRC_URI[sha256sum] = "fc125521187fa84f3210269a0eecc51f8a856802f1ca4bb251f118dab90c5a9d"
LIBAV_EXTRA_CONFIGURE_COMMON_ARG = "--target-os=linux \
--cc='${CC}' --as='${CC}' --ld='${CC}' --nm='${NM}' --ar='${AR}' \
--ranlib='${RANLIB}' \
- ${GSTREAMER_1_0_DEBUG}"
+ ${GSTREAMER_1_0_DEBUG} \
+ --cross-prefix='${HOST_PREFIX}'"
S = "${WORKDIR}/gst-libav-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb
index 5ce28ccbe7..bf1b87e6de 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb
@@ -18,10 +18,12 @@ SRCREV = "127202d6f65584891dabf92be031f0d170b0e7f1"
LIBAV_EXTRA_CONFIGURE_COMMON_ARG = "--target-os=linux \
--cc='${CC}' --as='${CC}' --ld='${CC}' --nm='${NM}' --ar='${AR}' \
- ${GSTREAMER_1_0_DEBUG}"
+ ${GSTREAMER_1_0_DEBUG} \
+ --cross-prefix='${HOST_PREFIX}'"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
index a703d9b7a4..3ef10c3723 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-meta-base.bb
@@ -3,7 +3,10 @@ LICENSE = "MIT"
inherit packagegroup
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad"
+COMMERCIAL_PLUGINS = "${COMMERCIAL_AUDIO_PLUGINS} ${COMMERCIAL_VIDEO_PLUGINS}"
+DEPENDS_UGLY="${@'gstreamer1.0-plugins-ugly' if 'ugly' in COMMERCIAL_PLUGINS.split('-') else ''}"
+DEPENDS_BAD="${@'gstreamer1.0-plugins-bad' if 'bad' in COMMERCIAL_PLUGINS.split('-') else ''}"
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-good ${DEPENDS_UGLY} ${DEPENDS_BAD}"
PACKAGES = "\
gstreamer1.0-meta-base \
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
index b4238634fc..d698904349 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx.inc
@@ -3,7 +3,7 @@ SECTION = "multimedia"
LICENSE = "LGPLv2.1"
LICENSE_FLAGS = "commercial"
HOMEPAGE = "http://www.gstreamer.net/"
-DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base"
+DEPENDS = "gstreamer1.0 gstreamer1.0-plugins-base gstreamer1.0-plugins-bad"
RDEPENDS_${PN} = "libomxil"
inherit autotools pkgconfig gettext
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb
index a390205621..931a7fcc96 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb
@@ -13,8 +13,9 @@ S = "${WORKDIR}/git"
SRCREV = "a2db76b048db278ef0aa798e106b7594264e06c0"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
index 8ecc2aac12..9c15f2b1ed 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad.inc
@@ -2,25 +2,33 @@ require gstreamer1.0-plugins.inc
LICENSE = "GPLv2+ & LGPLv2+ & LGPLv2.1+ "
-DEPENDS += "gstreamer1.0-plugins-base bzip2"
+DEPENDS += "gstreamer1.0-plugins-base bzip2 libpng jpeg"
S = "${WORKDIR}/gst-plugins-bad-${PV}"
+SRC_URI += "file://configure-allow-to-disable-libssh2.patch \
+ "
+
inherit gettext
+# opengl packageconfig factored out to make it easy for distros
+# and BSP layers to pick either (desktop) opengl, gles2, or no GL
+PACKAGECONFIG_GL ?= "${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gles2', '', d)}"
PACKAGECONFIG ??= " \
+ ${PACKAGECONFIG_GL} \
${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'wayland', '', d)} \
- ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'gl', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \
${@bb.utils.contains('DISTRO_FEATURES', 'directfb', 'directfb', '', d)} \
orc curl uvch264 neon sndfile \
hls sbc dash bz2 smoothstreaming \
"
+
# dash = Dynamic Adaptive Streaming over HTTP
PACKAGECONFIG[assrender] = "--enable-assrender,--disable-assrender,libass"
PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl"
-PACKAGECONFIG[gl] = "--enable-gl,--disable-gl,virtual/egl virtual/libgles2"
+PACKAGECONFIG[gles2] = "--enable-gles2 --enable-egl,--disable-gles2 --disable-egl,virtual/libgles2 virtual/egl"
+PACKAGECONFIG[opengl] = "--enable-opengl,--disable-opengl,virtual/libgl libglu"
PACKAGECONFIG[faac] = "--enable-faac,--disable-faac,faac"
PACKAGECONFIG[faad] = "--enable-faad,--disable-faad,faad2"
PACKAGECONFIG[libmms] = "--enable-libmms,--disable-libmms,libmms"
@@ -45,6 +53,8 @@ PACKAGECONFIG[bluez] = "--enable-bluez,--disable-bluez,bluez4"
PACKAGECONFIG[rsvg] = "--enable-rsvg,--disable-rsvg,librsvg"
PACKAGECONFIG[sndfile] = "--enable-sndfile,--disable-sndfile,libsndfile1"
PACKAGECONFIG[webp] = "--enable-webp,--disable-webp,libwebp"
+PACKAGECONFIG[rtmp] = "--enable-rtmp,--disable-rtmp,rtmpdump"
+PACKAGECONFIG[libssh2] = "--enable-libssh2,--disable-libssh2,libssh2"
# these plugins have not been ported to 1.0 (yet):
# directdraw vcd apexsink dc1394 lv2 linsys musepack mythtv
@@ -70,6 +80,7 @@ EXTRA_OECONF += " \
--disable-apple_media \
--disable-avc \
--disable-chromaprint \
+ --disable-cocoa \
--disable-daala \
--disable-dc1394 \
--disable-direct3d \
@@ -96,7 +107,6 @@ EXTRA_OECONF += " \
--disable-pvr \
--disable-quicktime \
--disable-resindvd \
- --disable-rtmp \
--disable-sdl \
--disable-sdltest \
--disable-sndio \
@@ -121,3 +131,5 @@ EXTRA_OECONF += " \
ARM_INSTRUCTION_SET = "arm"
+FILES_gstreamer1.0-plugins-bad-opencv += "${datadir}/gst-plugins-bad/1.0/opencv*"
+
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch
new file mode 100644
index 0000000000..042a32c04d
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch
@@ -0,0 +1,41 @@
+From deba0da45ec821209a7ed148a4521d562e6512cd Mon Sep 17 00:00:00 2001
+From: Carlos Rafael Giani <dv@pseudoterminal.org>
+Date: Wed, 27 Aug 2014 14:47:25 +0200
+Subject: [PATCH] gl: do not check for GL/GLU/EGL/GLES2 libs if disabled in
+ configuration
+
+Upstream-Status: Submitted [https://bugzilla.gnome.org/show_bug.cgi?id=735522]
+
+Signed-off-by: Carlos Rafael Giani <dv@pseudoterminal.org>
+---
+ configure.ac | 14 ++++++++++----
+ 1 file changed, 10 insertions(+), 4 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 1a46afb..e85d4ba 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -661,10 +661,16 @@ case $host in
+ fi
+ ;;
+ *)
+- AG_GST_CHECK_LIBHEADER(GL, GL, glTexImage2D,, GL/gl.h)
+- AG_GST_CHECK_LIBHEADER(GLU, GLU, gluSphere,, GL/glu.h)
+- AG_GST_CHECK_LIBHEADER(GLES2, GLESv2, glTexImage2D,, GLES2/gl2.h)
+- AG_GST_CHECK_LIBHEADER(EGL, EGL, eglGetError,, EGL/egl.h)
++ if test "x$NEED_GL" != "xno"; then
++ AG_GST_CHECK_LIBHEADER(GL, GL, glTexImage2D,, GL/gl.h)
++ AG_GST_CHECK_LIBHEADER(GLU, GLU, gluSphere,, GL/glu.h)
++ fi
++ if test "x$NEED_GLES2" != "xno"; then
++ AG_GST_CHECK_LIBHEADER(GLES2, GLESv2, glTexImage2D,, GLES2/gl2.h)
++ fi
++ if test "x$NEED_EGL" != "xno"; then
++ AG_GST_CHECK_LIBHEADER(EGL, EGL, eglGetError,, EGL/egl.h)
++ fi
+
+ old_LIBS=$LIBS
+ old_CFLAGS=$CFLAGS
+--
+1.8.3.2
+
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/configure-allow-to-disable-libssh2.patch b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/configure-allow-to-disable-libssh2.patch
new file mode 100644
index 0000000000..d52afd5d57
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/configure-allow-to-disable-libssh2.patch
@@ -0,0 +1,64 @@
+From f59c5269f92d59a5296cbfeeb682d42095cd88ad Mon Sep 17 00:00:00 2001
+From: Wenzong Fan <wenzong.fan@windriver.com>
+Date: Thu, 18 Sep 2014 02:24:07 -0400
+Subject: [PATCH] gstreamer1.0-plugins-bad: allow to disable libssh2
+
+libssh2 is automatically linked to if present, this undetermined
+dependency may cause build errors like:
+
+ .../x86_64-poky-linux/4.9.0/ld: cannot find -lssh2
+
+libssh2 isn't an oe-core recipe, so allow to disable it from
+configure.
+
+Upstream-Status: Pending
+
+Signed-off-by: Wenzong Fan <wenzong.fan@windriver.com>
+---
+ configure.ac | 23 +++++++++++++++++------
+ 1 file changed, 17 insertions(+), 6 deletions(-)
+
+diff --git a/configure.ac b/configure.ac
+index 0e95c5c..12153b4 100644
+--- a/configure.ac
++++ b/configure.ac
+@@ -1901,6 +1901,15 @@ AG_GST_CHECK_FEATURE(CHROMAPRINT, [chromaprint], chromaprint, [
+ ])
+
+ dnl *** Curl ***
++AC_ARG_ENABLE([libssh2],
++ [ --enable-libssh2 enable LIBSSH2 support @<:@default=auto@:>@],
++ [case "${enableval}" in
++ yes) NEED_SSH2=yes ;;
++ no) NEED_SSH2=no ;;
++ auto) NEED_SSH2=auto ;;
++ *) AC_MSG_ERROR([bad value ${enableval} for --enable-libssh2]) ;;
++ esac],[NEED_SSH2=auto])
++
+ translit(dnm, m, l) AM_CONDITIONAL(USE_CURL, true)
+ AG_GST_CHECK_FEATURE(CURL, [Curl plugin], curl, [
+ PKG_CHECK_MODULES(CURL, libcurl >= 7.21.0, [
+@@ -1915,12 +1924,14 @@ AG_GST_CHECK_FEATURE(CURL, [Curl plugin], curl, [
+ ])
+ AC_SUBST(CURL_CFLAGS)
+ AC_SUBST(CURL_LIBS)
+- PKG_CHECK_MODULES(SSH2, libssh2 >= 1.4.3, [
+- HAVE_SSH2="yes"
+- AC_DEFINE(HAVE_SSH2, 1, [Define if libssh2 is available])
+- ], [
+- HAVE_SSH2="no"
+- ])
++ if test "x$NEED_SSH2" != "xno"; then
++ PKG_CHECK_MODULES(SSH2, libssh2 >= 1.4.3, [
++ HAVE_SSH2="yes"
++ AC_DEFINE(HAVE_SSH2, 1, [Define if libssh2 is available])
++ ], [
++ HAVE_SSH2="no"
++ ])
++ fi
+ AM_CONDITIONAL(USE_SSH2, test "x$HAVE_SSH2" = "xyes")
+ AC_SUBST(SSH2_CFLAGS)
+ AC_SUBST(SSH2_LIBS)
+--
+1.7.9.5
+
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.1.bb
index e6edb26736..f0116c08a1 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.4.1.bb
@@ -4,7 +4,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=73a5855a8119deb017f5f13cf327095d \
file://gst/tta/filters.h;beginline=12;endline=29;md5=8a08270656f2f8ad7bb3655b83138e5a \
file://COPYING.LIB;md5=21682e4e8fea52413fd26c60acb907e5 \
file://gst/tta/crc32.h;beginline=12;endline=29;md5=27db269c575d1e5317fffca2d33b3b50"
-SRC_URI[md5sum] = "3bc0fcfe8d16ad1295f0454c1fcb4ba3"
-SRC_URI[sha256sum] = "ff2cb754f7725b205aec66002b1406e440f3a03194b6cad2d126ef5cd00902f9"
+
+SRC_URI += "file://0001-gl-do-not-check-for-GL-GLU-EGL-GLES2-libs-if-disable.patch"
+
+SRC_URI[md5sum] = "20cb190b18dc63017326321cdb7c91e5"
+SRC_URI[sha256sum] = "0268db2faaf0bb22e5b709a11633abbca4f3d289b1f513bb262d0bf3f53e19ae"
S = "${WORKDIR}/gst-plugins-bad-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb
index e1a5904abf..75859f9a89 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb
@@ -19,8 +19,9 @@ EXTRA_OECONF += " \
-disable-openjpeg \
"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.1.bb
index 968f0924b7..0db42cec94 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.4.1.bb
@@ -9,6 +9,6 @@ SRC_URI += "file://do-not-change-eos-event-to-gap-event-if.patch \
file://get-caps-from-src-pad-when-query-caps.patch \
"
-SRC_URI[md5sum] = "1ff06280b03b9098a706d1290d8bb3bd"
-SRC_URI[sha256sum] = "5daed4b983b64e4e3fbe9cd29063e4302812cd03ba685a15b06a790911d04c1e"
+SRC_URI[md5sum] = "a825628225bd0a58c0df87cdd2a5db91"
+SRC_URI[sha256sum] = "aea9e25be6691bd3cc0785d005b2b5d70ce313a2c897901680a3f7e7cab5a499"
S = "${WORKDIR}/gst-plugins-base-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb
index a894abe501..fbe3a42cd7 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb
@@ -11,8 +11,9 @@ S = "${WORKDIR}/git"
SRCREV = "8d4cb64a4b9d84b10076bf350f80a0d6ea68ec2d"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.1.bb
index 2d392c607e..441dd51e39 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.4.1.bb
@@ -6,8 +6,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
SRC_URI += "file://0001-gstrtpmp4gpay-set-dafault-value-for-MPEG4-without-co.patch \
"
-
-SRC_URI[md5sum] = "8007d57a38f6b2882961b2547fa4597c"
-SRC_URI[sha256sum] = "48a62e7987fffa289a091dfc8ccc80b401d110632b8fc1adce5b82fc092f2685"
+SRC_URI[md5sum] = "eb3a3296b2f6009def1f5a09590ce767"
+SRC_URI[sha256sum] = "8559d4270065b30ed5c49b826e1b7a3a2bd5ee9a340ae745a2ae3f9718e4c637"
S = "${WORKDIR}/gst-plugins-good-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb
index 8b11b20b1b..2c0e0efe68 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb
@@ -10,8 +10,9 @@ S = "${WORKDIR}/git"
SRCREV = "fd0123800c8c1cf1468c0fa5d592ad0d0d8b4140"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.1.bb
index 9035d2dfbe..6d63ba62d4 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.4.1.bb
@@ -2,7 +2,7 @@ include gstreamer1.0-plugins-ugly.inc
LIC_FILES_CHKSUM = "file://COPYING;md5=a6f89e2100d9b6cdffcea4f398e37343 \
file://tests/check/elements/xingmux.c;beginline=1;endline=21;md5=4c771b8af188724855cb99cadd390068 "
-SRC_URI[md5sum] = "558146cb5ec8b313afe2113aafc3da85"
-SRC_URI[sha256sum] = "5314bb60f13d1a7b9c6317df73813af5f3f15a62c7c186b816b0024b5c61744d"
+SRC_URI[md5sum] = "316974af949ca4654efee704a0164076"
+SRC_URI[sha256sum] = "25440435ac4ed795d213f2420a0e7355e4a2e2e76d1f9d020b2073f815e8b071"
S = "${WORKDIR}/gst-plugins-ugly-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb
index 2f72497363..dcf5ffc3c1 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb
@@ -9,8 +9,9 @@ S = "${WORKDIR}/git"
SRCREV = "06b8ac10cee85c5c304ca320997aa8f44295a66f"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.0.bb
deleted file mode 100644
index 66aa8862c0..0000000000
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.0.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-include gstreamer1.0-rtsp-server.inc
-
-LIC_FILES_CHKSUM = "file://COPYING;md5=6762ed442b3822387a51c92d928ead0d"
-
-SRC_URI[md5sum] = "974af05dbf867cade89b8d3101e3f197"
-SRC_URI[sha256sum] = "800a93ee6de8ca3946fbb2fa3878e41af44e27dde76c9399e30b93ba3e0bffe8"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.1.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.1.bb
new file mode 100644
index 0000000000..1f34be0e8f
--- /dev/null
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.4.1.bb
@@ -0,0 +1,6 @@
+include gstreamer1.0-rtsp-server.inc
+
+LIC_FILES_CHKSUM = "file://COPYING;md5=6762ed442b3822387a51c92d928ead0d"
+
+SRC_URI[md5sum] = "e39f8643eb363611c342d87088a29aa0"
+SRC_URI[sha256sum] = "10aeacc774e99d81607aeb2748c0a1c6736425b8ae3a9caf8db3cd0c4a08d06e"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.0.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.1.bb
index a958064e17..56fc4f2083 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.0.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.4.1.bb
@@ -7,8 +7,7 @@ SRC_URI = " \
http://gstreamer.freedesktop.org/src/gstreamer/gstreamer-${PV}.tar.xz \
file://0001-Fix-crash-with-gst-inspect.patch \
"
-
-SRC_URI[md5sum] = "594c0c06eaace9b9d3bad010de1bdfae"
-SRC_URI[sha256sum] = "23c39fdc2b24f889b07cab0449825384fef7592a121e180729fd9025ec45c695"
+SRC_URI[md5sum] = "bd0938d680d657249b885162f310702d"
+SRC_URI[sha256sum] = "5638f75003282135815c0077d491da11e9a884ad91d4ba6ab3cc78bae0fb452e"
S = "${WORKDIR}/gstreamer-${PV}"
diff --git a/meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb b/meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb
index 424c90a9e8..cb00d319a7 100644
--- a/meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb
+++ b/meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb
@@ -10,8 +10,9 @@ S = "${WORKDIR}/git"
SRCREV = "3b8181a8c550e74acaba4e8c55bdc649fa551dc9"
-do_configure() {
+do_configure_prepend() {
+ cd ${S}
./autogen.sh --noconfigure
- oe_runconf
+ cd ${B}
}
diff --git a/meta/recipes-multimedia/libpng/libpng/0001-configure-lower-automake-requirement.patch b/meta/recipes-multimedia/libpng/libpng/0001-configure-lower-automake-requirement.patch
deleted file mode 100644
index bbbca89e8f..0000000000
--- a/meta/recipes-multimedia/libpng/libpng/0001-configure-lower-automake-requirement.patch
+++ /dev/null
@@ -1,31 +0,0 @@
-From fff7cbc01e061a235852f385866fe75fb17867e3 Mon Sep 17 00:00:00 2001
-From: Valentin Popa <valentin.popa@intel.com>
-Date: Tue, 26 Nov 2013 12:06:55 +0200
-Subject: [PATCH] configure: lower automake requirement
-
-We're not using parallel tests in OE-core yet
-
-Signed-off-by: Valentin Popa <valentin.popa@intel.com>
-Signed-off-by: Koen Kooi <koen@dominion.thruhere.net>
-
-Upstream-Status: Inapropriate [OE specific build hack]
----
- configure.ac | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/configure.ac b/configure.ac
-index 28200c3..fab2515 100644
---- a/configure.ac
-+++ b/configure.ac
-@@ -27,7 +27,7 @@ AC_CONFIG_MACRO_DIR([scripts])
- # dist-xz requires automake 1.11 or later
- # 1.12.2 fixes a security issue in 1.11.2 and 1.12.1
- # 1.13 is required for parallel tests
--AM_INIT_AUTOMAKE([1.13 foreign dist-xz color-tests silent-rules subdir-objects])
-+AM_INIT_AUTOMAKE([1.12.2 foreign dist-xz color-tests silent-rules subdir-objects])
- # The following line causes --disable-maintainer-mode to be the default to
- # configure, this is necessary because libpng distributions cannot rely on the
- # time stamps of the autotools generated files being correct
---
-1.8.3.2
-
diff --git a/meta/recipes-multimedia/libpng/libpng_1.6.10.bb b/meta/recipes-multimedia/libpng/libpng_1.6.10.bb
deleted file mode 100644
index babf87e392..0000000000
--- a/meta/recipes-multimedia/libpng/libpng_1.6.10.bb
+++ /dev/null
@@ -1,29 +0,0 @@
-SUMMARY = "PNG image format decoding library"
-HOMEPAGE = "http://www.libpng.org/"
-SECTION = "libs"
-LICENSE = "Libpng"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=cb7a834ba2891bc30b8577e49963a435 \
- file://png.h;endline=15;md5=853b11c4a19ec66decd641efd756bc2c \
- file://png.h;beginline=209;endline=323;md5=cface34a7db6b71eaa828fe934951f81"
-DEPENDS = "zlib"
-LIBV = "16"
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng${LIBV}/${PV}/libpng-${PV}.tar.xz \
- file://0001-configure-lower-automake-requirement.patch \
- "
-
-SRC_URI[md5sum] = "5f414b20f683b1d96b163c89e3eff768"
-SRC_URI[sha256sum] = "4003f0fd0e36110a2b742fc5b9e1ab93ed7a7ab57ae8dc65f0e8101458775a56"
-
-BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
-
-inherit autotools binconfig-disabled pkgconfig
-
-# Work around missing symbols
-EXTRA_OECONF_append_arm = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "--enable-arm-neon=on", "--enable-arm-neon=off" ,d)}"
-
-PACKAGES =+ "${PN}-tools"
-
-FILES_${PN}-tools = "${bindir}/png-fix-itxt ${bindir}/pngfix"
-
-BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/libpng/libpng_1.6.13.bb b/meta/recipes-multimedia/libpng/libpng_1.6.13.bb
new file mode 100644
index 0000000000..2c6260a9a1
--- /dev/null
+++ b/meta/recipes-multimedia/libpng/libpng_1.6.13.bb
@@ -0,0 +1,28 @@
+SUMMARY = "PNG image format decoding library"
+HOMEPAGE = "http://www.libpng.org/"
+SECTION = "libs"
+LICENSE = "Libpng"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=46401ee4b225b9ec066cb96cf1025c0f \
+ file://png.h;endline=15;md5=8167a17735fc618d1df109f8b0f839a6 \
+ file://png.h;beginline=229;endline=343;md5=5cdf8564a14e2f00339e4437a83b4913"
+DEPENDS = "zlib"
+LIBV = "16"
+
+SRC_URI = "${SOURCEFORGE_MIRROR}/project/libpng/libpng${LIBV}/${PV}/libpng-${PV}.tar.xz \
+ "
+
+SRC_URI[md5sum] = "9822c25466f060142359f80ed142c9e5"
+SRC_URI[sha256sum] = "d9c8ce54a5fc8052ed794ca65b553384a74c0608b09ae163cbbb07176018e625"
+
+BINCONFIG = "${bindir}/libpng-config ${bindir}/libpng16-config"
+
+inherit autotools binconfig-disabled pkgconfig
+
+# Work around missing symbols
+EXTRA_OECONF_append_class-target = " ${@bb.utils.contains("TUNE_FEATURES", "neon", "--enable-arm-neon=on", "--enable-arm-neon=off" ,d)}"
+
+PACKAGES =+ "${PN}-tools"
+
+FILES_${PN}-tools = "${bindir}/png-fix-itxt ${bindir}/pngfix"
+
+BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-multimedia/libtiff/files/libtiff-CVE-2013-1961.patch b/meta/recipes-multimedia/libtiff/files/libtiff-CVE-2013-1961.patch
new file mode 100644
index 0000000000..fc4adb59d8
--- /dev/null
+++ b/meta/recipes-multimedia/libtiff/files/libtiff-CVE-2013-1961.patch
@@ -0,0 +1,786 @@
+libtiff: fix CVE-2013-1961.
+
+Upstream-Status: Backported
+
+Issue Description: CVE-2013-1961
+Stack-based buffer overflow in the t2p_write_pdf_page function in tiff2pdf
+in libtiff before 4.0.3 allows remote attackers to cause a denial of service
+(application crash) via a crafted image length and resolution in a TIFF image file.
+
+Fix Description: Replace sprintf with snprintf
+
+Signed-off-by: Priyanka Shobhan <priyanka_shobhan@mentor.com>
+---
+
+diff --git a/contrib/dbs/xtiff/xtiff.c b/contrib/dbs/xtiff/xtiff.c
+index 2634030..97e4ffe 100644
+--- a/contrib/dbs/xtiff/xtiff.c
++++ b/contrib/dbs/xtiff/xtiff.c
+@@ -512,9 +512,9 @@ SetNameLabel()
+ Arg args[1];
+
+ if (tfMultiPage)
+- sprintf(buffer, "%s - page %d", fileName, tfDirectory);
++ snprintf(buffer, sizeof(buffer), "%s - page %d", fileName, tfDirectory);
+ else
+- strcpy(buffer, fileName);
++ snprintf(buffer, sizeof(buffer), "%s", fileName);
+ XtSetArg(args[0], XtNlabel, buffer);
+ XtSetValues(labelWidget, args, 1);
+ }
+diff --git a/libtiff/tif_codec.c b/libtiff/tif_codec.c
+index e201667..703e87d 100644
+--- a/libtiff/tif_codec.c
++++ b/libtiff/tif_codec.c
+@@ -108,7 +108,8 @@ _notConfigured(TIFF* tif)
+ const TIFFCodec* c = TIFFFindCODEC(tif->tif_dir.td_compression);
+ char compression_code[20];
+
+- sprintf( compression_code, "%d", tif->tif_dir.td_compression );
++ snprintf(compression_code, sizeof(compression_code), "%d",
++ tif->tif_dir.td_compression );
+ TIFFErrorExt(tif->tif_clientdata, tif->tif_name,
+ "%s compression support is not configured",
+ c ? c->name : compression_code );
+diff --git a/libtiff/tif_dirinfo.c b/libtiff/tif_dirinfo.c
+index d319931..4dae5e5 100644
+--- a/libtiff/tif_dirinfo.c
++++ b/libtiff/tif_dirinfo.c
+@@ -711,7 +711,7 @@ _TIFFCreateAnonField(TIFF *tif, uint32 tag, TIFFDataType field_type)
+ * note that this name is a special sign to TIFFClose() and
+ * _TIFFSetupFields() to free the field
+ */
+- sprintf(fld->field_name, "Tag %d", (int) tag);
++ snprintf(fld->field_name, 32, "Tag %d", (int) tag);
+
+ return fld;
+ }
+diff --git a/tools/rgb2ycbcr.c b/tools/rgb2ycbcr.c
+index 162aac1..a3eeb03 100644
+--- a/tools/rgb2ycbcr.c
++++ b/tools/rgb2ycbcr.c
+@@ -332,7 +332,8 @@ tiffcvt(TIFF* in, TIFF* out)
+ TIFFSetField(out, TIFFTAG_PLANARCONFIG, PLANARCONFIG_CONTIG);
+ { char buf[2048];
+ char *cp = strrchr(TIFFFileName(in), '/');
+- sprintf(buf, "YCbCr conversion of %s", cp ? cp+1 : TIFFFileName(in));
++ snprintf(buf, sizeof(buf), "YCbCr conversion of %s",
++ cp ? cp+1 : TIFFFileName(in));
+ TIFFSetField(out, TIFFTAG_IMAGEDESCRIPTION, buf);
+ }
+ TIFFSetField(out, TIFFTAG_SOFTWARE, TIFFGetVersion());
+diff --git a/tools/tiff2bw.c b/tools/tiff2bw.c
+index bda754a..7ffaca0 100644
+--- a/tools/tiff2bw.c
++++ b/tools/tiff2bw.c
+@@ -205,7 +205,7 @@ main(int argc, char* argv[])
+ }
+ }
+ TIFFSetField(out, TIFFTAG_PHOTOMETRIC, PHOTOMETRIC_MINISBLACK);
+- sprintf(thing, "B&W version of %s", argv[optind]);
++ snprintf(thing, sizeof(thing), "B&W version of %s", argv[optind]);
+ TIFFSetField(out, TIFFTAG_IMAGEDESCRIPTION, thing);
+ TIFFSetField(out, TIFFTAG_SOFTWARE, "tiff2bw");
+ outbuf = (unsigned char *)_TIFFmalloc(TIFFScanlineSize(out));
+diff --git a/tools/tiff2pdf.c b/tools/tiff2pdf.c
+index 356328c..957fd9f 100644
+--- a/tools/tiff2pdf.c
++++ b/tools/tiff2pdf.c
+@@ -3609,7 +3609,9 @@ tsize_t t2p_write_pdf_header(T2P* t2p, TIFF* output){
+ char buffer[16];
+ int buflen=0;
+
+- buflen=sprintf(buffer, "%%PDF-%u.%u ", t2p->pdf_majorversion&0xff, t2p->pdf_minorversion&0xff);
++ buflen = snprintf(buffer, sizeof(buffer), "%%PDF-%u.%u ",
++ t2p->pdf_majorversion&0xff,
++ t2p->pdf_minorversion&0xff);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t)"\n%\342\343\317\323\n", 7);
+
+@@ -3623,10 +3625,10 @@ tsize_t t2p_write_pdf_header(T2P* t2p, TIFF* output){
+ tsize_t t2p_write_pdf_obj_start(uint32 number, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+- buflen=sprintf(buffer, "%lu", (unsigned long)number);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)number);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen );
+ written += t2pWriteFile(output, (tdata_t) " 0 obj\n", 7);
+
+@@ -3665,13 +3667,13 @@ tsize_t t2p_write_pdf_name(unsigned char* name, TIFF* output){
+ written += t2pWriteFile(output, (tdata_t) "/", 1);
+ for (i=0;i<namelen;i++){
+ if ( ((unsigned char)name[i]) < 0x21){
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ nextchar=1;
+ }
+ if ( ((unsigned char)name[i]) > 0x7E){
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ nextchar=1;
+@@ -3679,57 +3681,57 @@ tsize_t t2p_write_pdf_name(unsigned char* name, TIFF* output){
+ if (nextchar==0){
+ switch (name[i]){
+ case 0x23:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x25:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x28:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x29:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x2F:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x3C:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x3E:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x5B:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x5D:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x7B:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+ case 0x7D:
+- sprintf(buffer, "#%.2X", name[i]);
++ snprintf(buffer, sizeof(buffer), "#%.2X", name[i]);
+ buffer[sizeof(buffer) - 1] = '\0';
+ written += t2pWriteFile(output, (tdata_t) buffer, 3);
+ break;
+@@ -3844,14 +3846,14 @@ tsize_t t2p_write_pdf_stream_end(TIFF* output){
+ tsize_t t2p_write_pdf_stream_dict(tsize_t len, uint32 number, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ written += t2pWriteFile(output, (tdata_t) "/Length ", 8);
+ if(len!=0){
+ written += t2p_write_pdf_stream_length(len, output);
+ } else {
+- buflen=sprintf(buffer, "%lu", (unsigned long)number);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)number);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R \n", 6);
+ }
+@@ -3892,10 +3894,10 @@ tsize_t t2p_write_pdf_stream_dict_end(TIFF* output){
+ tsize_t t2p_write_pdf_stream_length(tsize_t len, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+- buflen=sprintf(buffer, "%lu", (unsigned long)len);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)len);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "\n", 1);
+
+@@ -3909,7 +3911,7 @@ tsize_t t2p_write_pdf_stream_length(tsize_t len, TIFF* output){
+ tsize_t t2p_write_pdf_catalog(T2P* t2p, TIFF* output)
+ {
+ tsize_t written = 0;
+- char buffer[16];
++ char buffer[32];
+ int buflen = 0;
+
+ written += t2pWriteFile(output,
+@@ -3948,7 +3950,6 @@ tsize_t t2p_write_pdf_info(T2P* t2p, TIFF* input, TIFF* output)
+ written += t2p_write_pdf_string(t2p->pdf_datetime, output);
+ }
+ written += t2pWriteFile(output, (tdata_t) "\n/Producer ", 11);
+- _TIFFmemset((tdata_t)buffer, 0x00, sizeof(buffer));
+ snprintf(buffer, sizeof(buffer), "libtiff / tiff2pdf - %d", TIFFLIB_VERSION);
+ written += t2p_write_pdf_string(buffer, output);
+ written += t2pWriteFile(output, (tdata_t) "\n", 1);
+@@ -4089,7 +4090,7 @@ tsize_t t2p_write_pdf_pages(T2P* t2p, TIFF* output)
+ {
+ tsize_t written=0;
+ tdir_t i=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ int page=0;
+@@ -4097,7 +4098,7 @@ tsize_t t2p_write_pdf_pages(T2P* t2p, TIFF* output)
+ (tdata_t) "<< \n/Type /Pages \n/Kids [ ", 26);
+ page = t2p->pdf_pages+1;
+ for (i=0;i<t2p->tiff_pagecount;i++){
+- buflen=sprintf(buffer, "%d", page);
++ buflen=snprintf(buffer, sizeof(buffer), "%d", page);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+ if ( ((i+1)%8)==0 ) {
+@@ -4112,8 +4113,7 @@ tsize_t t2p_write_pdf_pages(T2P* t2p, TIFF* output)
+ }
+ }
+ written += t2pWriteFile(output, (tdata_t) "] \n/Count ", 10);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%d", t2p->tiff_pagecount);
++ buflen=snprintf(buffer, sizeof(buffer), "%d", t2p->tiff_pagecount);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " \n>> \n", 6);
+
+@@ -4128,28 +4128,28 @@ tsize_t t2p_write_pdf_page(uint32 object, T2P* t2p, TIFF* output){
+
+ unsigned int i=0;
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[256];
+ int buflen=0;
+
+ written += t2pWriteFile(output, (tdata_t) "<<\n/Type /Page \n/Parent ", 24);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_pages);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_pages);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R \n", 6);
+ written += t2pWriteFile(output, (tdata_t) "/MediaBox [", 11);
+- buflen=sprintf(buffer, "%.4f",t2p->pdf_mediabox.x1);
++ buflen=snprintf(buffer, sizeof(buffer), "%.4f",t2p->pdf_mediabox.x1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- buflen=sprintf(buffer, "%.4f",t2p->pdf_mediabox.y1);
++ buflen=snprintf(buffer, sizeof(buffer), "%.4f",t2p->pdf_mediabox.y1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- buflen=sprintf(buffer, "%.4f",t2p->pdf_mediabox.x2);
++ buflen=snprintf(buffer, sizeof(buffer), "%.4f",t2p->pdf_mediabox.x2);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- buflen=sprintf(buffer, "%.4f",t2p->pdf_mediabox.y2);
++ buflen=snprintf(buffer, sizeof(buffer), "%.4f",t2p->pdf_mediabox.y2);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "] \n", 3);
+ written += t2pWriteFile(output, (tdata_t) "/Contents ", 10);
+- buflen=sprintf(buffer, "%lu", (unsigned long)(object + 1));
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)(object + 1));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R \n", 6);
+ written += t2pWriteFile(output, (tdata_t) "/Resources << \n", 15);
+@@ -4157,15 +4157,13 @@ tsize_t t2p_write_pdf_page(uint32 object, T2P* t2p, TIFF* output){
+ written += t2pWriteFile(output, (tdata_t) "/XObject <<\n", 12);
+ for(i=0;i<t2p->tiff_tiles[t2p->pdf_page].tiles_tilecount;i++){
+ written += t2pWriteFile(output, (tdata_t) "/Im", 3);
+- buflen = sprintf(buffer, "%u", t2p->pdf_page+1);
++ buflen = snprintf(buffer, sizeof(buffer), "%u", t2p->pdf_page+1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "_", 1);
+- buflen = sprintf(buffer, "%u", i+1);
++ buflen = snprintf(buffer, sizeof(buffer), "%u", i+1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- buflen = sprintf(
+- buffer,
+- "%lu",
++ buflen = snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(object+3+(2*i)+t2p->tiff_pages[t2p->pdf_page].page_extra));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+@@ -4177,12 +4175,10 @@ tsize_t t2p_write_pdf_page(uint32 object, T2P* t2p, TIFF* output){
+ } else {
+ written += t2pWriteFile(output, (tdata_t) "/XObject <<\n", 12);
+ written += t2pWriteFile(output, (tdata_t) "/Im", 3);
+- buflen = sprintf(buffer, "%u", t2p->pdf_page+1);
++ buflen = snprintf(buffer, sizeof(buffer), "%u", t2p->pdf_page+1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- buflen = sprintf(
+- buffer,
+- "%lu",
++ buflen = snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(object+3+(2*i)+t2p->tiff_pages[t2p->pdf_page].page_extra));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+@@ -4191,9 +4187,7 @@ tsize_t t2p_write_pdf_page(uint32 object, T2P* t2p, TIFF* output){
+ if(t2p->tiff_transferfunctioncount != 0) {
+ written += t2pWriteFile(output, (tdata_t) "/ExtGState <<", 13);
+ t2pWriteFile(output, (tdata_t) "/GS1 ", 5);
+- buflen = sprintf(
+- buffer,
+- "%lu",
++ buflen = snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(object + 3));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+@@ -4566,7 +4560,7 @@ tsize_t t2p_write_pdf_page_content_stream(T2P* t2p, TIFF* output){
+ if(t2p->tiff_tiles[t2p->pdf_page].tiles_tilecount>0){
+ for(i=0;i<t2p->tiff_tiles[t2p->pdf_page].tiles_tilecount; i++){
+ box=t2p->tiff_tiles[t2p->pdf_page].tiles_tiles[i].tile_box;
+- buflen=sprintf(buffer,
++ buflen=snprintf(buffer, sizeof(buffer),
+ "q %s %.4f %.4f %.4f %.4f %.4f %.4f cm /Im%d_%ld Do Q\n",
+ t2p->tiff_transferfunctioncount?"/GS1 gs ":"",
+ box.mat[0],
+@@ -4581,7 +4575,7 @@ tsize_t t2p_write_pdf_page_content_stream(T2P* t2p, TIFF* output){
+ }
+ } else {
+ box=t2p->pdf_imagebox;
+- buflen=sprintf(buffer,
++ buflen=snprintf(buffer, sizeof(buffer),
+ "q %s %.4f %.4f %.4f %.4f %.4f %.4f cm /Im%d Do Q\n",
+ t2p->tiff_transferfunctioncount?"/GS1 gs ":"",
+ box.mat[0],
+@@ -4606,59 +4600,48 @@ tsize_t t2p_write_pdf_xobject_stream_dict(ttile_t tile,
+ TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ written += t2p_write_pdf_stream_dict(0, t2p->pdf_xrefcount+1, output);
+ written += t2pWriteFile(output,
+ (tdata_t) "/Type /XObject \n/Subtype /Image \n/Name /Im",
+ 42);
+- buflen=sprintf(buffer, "%u", t2p->pdf_page+1);
++ buflen=snprintf(buffer, sizeof(buffer), "%u", t2p->pdf_page+1);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ if(tile != 0){
+ written += t2pWriteFile(output, (tdata_t) "_", 1);
+- buflen=sprintf(buffer, "%lu", (unsigned long)tile);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)tile);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ }
+ written += t2pWriteFile(output, (tdata_t) "\n/Width ", 8);
+- _TIFFmemset((tdata_t)buffer, 0x00, 16);
+ if(tile==0){
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->tiff_width);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->tiff_width);
+ } else {
+ if(t2p_tile_is_right_edge(t2p->tiff_tiles[t2p->pdf_page], tile-1)!=0){
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_edgetilewidth);
+ } else {
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_tilewidth);
+ }
+ }
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "\n/Height ", 9);
+- _TIFFmemset((tdata_t)buffer, 0x00, 16);
+ if(tile==0){
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->tiff_length);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->tiff_length);
+ } else {
+ if(t2p_tile_is_bottom_edge(t2p->tiff_tiles[t2p->pdf_page], tile-1)!=0){
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_edgetilelength);
+ } else {
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_tilelength);
+ }
+ }
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "\n/BitsPerComponent ", 19);
+- _TIFFmemset((tdata_t)buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%u", t2p->tiff_bitspersample);
++ buflen=snprintf(buffer, sizeof(buffer), "%u", t2p->tiff_bitspersample);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "\n/ColorSpace ", 13);
+ written += t2p_write_pdf_xobject_cs(t2p, output);
+@@ -4702,11 +4685,10 @@ tsize_t t2p_write_pdf_xobject_cs(T2P* t2p, TIFF* output){
+ t2p->pdf_colorspace ^= T2P_CS_PALETTE;
+ written += t2p_write_pdf_xobject_cs(t2p, output);
+ t2p->pdf_colorspace |= T2P_CS_PALETTE;
+- buflen=sprintf(buffer, "%u", (0x0001 << t2p->tiff_bitspersample)-1 );
++ buflen=snprintf(buffer, sizeof(buffer), "%u", (0x0001 << t2p->tiff_bitspersample)-1 );
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " ", 1);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_palettecs );
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_palettecs );
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ]\n", 7);
+ return(written);
+@@ -4740,10 +4722,10 @@ tsize_t t2p_write_pdf_xobject_cs(T2P* t2p, TIFF* output){
+ X_W /= Y_W;
+ Z_W /= Y_W;
+ Y_W = 1.0F;
+- buflen=sprintf(buffer, "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
++ buflen=snprintf(buffer, sizeof(buffer), "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "/Range ", 7);
+- buflen=sprintf(buffer, "[%d %d %d %d] \n",
++ buflen=snprintf(buffer, sizeof(buffer), "[%d %d %d %d] \n",
+ t2p->pdf_labrange[0],
+ t2p->pdf_labrange[1],
+ t2p->pdf_labrange[2],
+@@ -4759,26 +4741,26 @@ tsize_t t2p_write_pdf_xobject_cs(T2P* t2p, TIFF* output){
+ tsize_t t2p_write_pdf_transfer(T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ written += t2pWriteFile(output, (tdata_t) "<< /Type /ExtGState \n/TR ", 25);
+ if(t2p->tiff_transferfunctioncount == 1){
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(t2p->pdf_xrefcount + 1));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+ } else {
+ written += t2pWriteFile(output, (tdata_t) "[ ", 2);
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(t2p->pdf_xrefcount + 1));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(t2p->pdf_xrefcount + 2));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)(t2p->pdf_xrefcount + 3));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R ", 5);
+@@ -4800,7 +4782,7 @@ tsize_t t2p_write_pdf_transfer_dict(T2P* t2p, TIFF* output, uint16 i){
+ written += t2pWriteFile(output, (tdata_t) "/FunctionType 0 \n", 17);
+ written += t2pWriteFile(output, (tdata_t) "/Domain [0.0 1.0] \n", 19);
+ written += t2pWriteFile(output, (tdata_t) "/Range [0.0 1.0] \n", 18);
+- buflen=sprintf(buffer, "/Size [%u] \n", (1<<t2p->tiff_bitspersample));
++ buflen=snprintf(buffer, sizeof(buffer), "/Size [%u] \n", (1<<t2p->tiff_bitspersample));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "/BitsPerSample 16 \n", 19);
+ written += t2p_write_pdf_stream_dict(((tsize_t)1)<<(t2p->tiff_bitspersample+1), 0, output);
+@@ -4827,7 +4809,7 @@ tsize_t t2p_write_pdf_transfer_stream(T2P* t2p, TIFF* output, uint16 i){
+ tsize_t t2p_write_pdf_xobject_calcs(T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[128];
++ char buffer[256];
+ int buflen=0;
+
+ float X_W=0.0;
+@@ -4895,16 +4877,16 @@ tsize_t t2p_write_pdf_xobject_calcs(T2P* t2p, TIFF* output){
+ written += t2pWriteFile(output, (tdata_t) "<< \n", 4);
+ if(t2p->pdf_colorspace & T2P_CS_CALGRAY){
+ written += t2pWriteFile(output, (tdata_t) "/WhitePoint ", 12);
+- buflen=sprintf(buffer, "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
++ buflen=snprintf(buffer, sizeof(buffer), "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "/Gamma 2.2 \n", 12);
+ }
+ if(t2p->pdf_colorspace & T2P_CS_CALRGB){
+ written += t2pWriteFile(output, (tdata_t) "/WhitePoint ", 12);
+- buflen=sprintf(buffer, "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
++ buflen=snprintf(buffer, sizeof(buffer), "[%.4f %.4f %.4f] \n", X_W, Y_W, Z_W);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "/Matrix ", 8);
+- buflen=sprintf(buffer, "[%.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f] \n",
++ buflen=snprintf(buffer, sizeof(buffer), "[%.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f] \n",
+ X_R, Y_R, Z_R,
+ X_G, Y_G, Z_G,
+ X_B, Y_B, Z_B);
+@@ -4923,11 +4905,11 @@ tsize_t t2p_write_pdf_xobject_calcs(T2P* t2p, TIFF* output){
+ tsize_t t2p_write_pdf_xobject_icccs(T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ written += t2pWriteFile(output, (tdata_t) "[/ICCBased ", 11);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_icccs);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_icccs);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " 0 R] \n", 7);
+
+@@ -4937,11 +4919,11 @@ tsize_t t2p_write_pdf_xobject_icccs(T2P* t2p, TIFF* output){
+ tsize_t t2p_write_pdf_xobject_icccs_dict(T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ written += t2pWriteFile(output, (tdata_t) "/N ", 3);
+- buflen=sprintf(buffer, "%u \n", t2p->tiff_samplesperpixel);
++ buflen=snprintf(buffer, sizeof(buffer), "%u \n", t2p->tiff_samplesperpixel);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) "/Alternate ", 11);
+ t2p->pdf_colorspace ^= T2P_CS_ICCBASED;
+@@ -5006,7 +4988,7 @@ tsize_t t2p_write_pdf_xobject_decode(T2P* t2p, TIFF* output){
+ tsize_t t2p_write_pdf_xobject_stream_filter(ttile_t tile, T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[16];
++ char buffer[32];
+ int buflen=0;
+
+ if(t2p->pdf_compression==T2P_COMPRESS_NONE){
+@@ -5021,41 +5003,33 @@ tsize_t t2p_write_pdf_xobject_stream_filter(ttile_t tile, T2P* t2p, TIFF* output
+ written += t2pWriteFile(output, (tdata_t) "<< /K -1 ", 9);
+ if(tile==0){
+ written += t2pWriteFile(output, (tdata_t) "/Columns ", 9);
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_width);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " /Rows ", 7);
+- buflen=sprintf(buffer, "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_length);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ } else {
+ if(t2p_tile_is_right_edge(t2p->tiff_tiles[t2p->pdf_page], tile-1)==0){
+ written += t2pWriteFile(output, (tdata_t) "/Columns ", 9);
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_tilewidth);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ } else {
+ written += t2pWriteFile(output, (tdata_t) "/Columns ", 9);
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_edgetilewidth);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ }
+ if(t2p_tile_is_bottom_edge(t2p->tiff_tiles[t2p->pdf_page], tile-1)==0){
+ written += t2pWriteFile(output, (tdata_t) " /Rows ", 7);
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_tilelength);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ } else {
+ written += t2pWriteFile(output, (tdata_t) " /Rows ", 7);
+- buflen=sprintf(
+- buffer,
+- "%lu",
++ buflen=snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_tiles[t2p->pdf_page].tiles_edgetilelength);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ }
+@@ -5082,21 +5056,17 @@ tsize_t t2p_write_pdf_xobject_stream_filter(ttile_t tile, T2P* t2p, TIFF* output
+ if(t2p->pdf_compressionquality%100){
+ written += t2pWriteFile(output, (tdata_t) "/DecodeParms ", 13);
+ written += t2pWriteFile(output, (tdata_t) "<< /Predictor ", 14);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%u", t2p->pdf_compressionquality%100);
++ buflen=snprintf(buffer, sizeof(buffer), "%u", t2p->pdf_compressionquality%100);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " /Columns ", 10);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen = sprintf(buffer, "%lu",
++ buflen = snprintf(buffer, sizeof(buffer), "%lu",
+ (unsigned long)t2p->tiff_width);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " /Colors ", 9);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%u", t2p->tiff_samplesperpixel);
++ buflen=snprintf(buffer, sizeof(buffer), "%u", t2p->tiff_samplesperpixel);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " /BitsPerComponent ", 19);
+- _TIFFmemset(buffer, 0x00, 16);
+- buflen=sprintf(buffer, "%u", t2p->tiff_bitspersample);
++ buflen=snprintf(buffer, sizeof(buffer), "%u", t2p->tiff_bitspersample);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) ">>\n", 3);
+ }
+@@ -5116,16 +5086,16 @@ tsize_t t2p_write_pdf_xobject_stream_filter(ttile_t tile, T2P* t2p, TIFF* output
+ tsize_t t2p_write_pdf_xreftable(T2P* t2p, TIFF* output){
+
+ tsize_t written=0;
+- char buffer[21];
++ char buffer[64];
+ int buflen=0;
+ uint32 i=0;
+
+ written += t2pWriteFile(output, (tdata_t) "xref\n0 ", 7);
+- buflen=sprintf(buffer, "%lu", (unsigned long)(t2p->pdf_xrefcount + 1));
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)(t2p->pdf_xrefcount + 1));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+ written += t2pWriteFile(output, (tdata_t) " \n0000000000 65535 f \n", 22);
+ for (i=0;i<t2p->pdf_xrefcount;i++){
+- sprintf(buffer, "%.10lu 00000 n \n",
++ snprintf(buffer, sizeof(buffer), "%.10lu 00000 n \n",
+ (unsigned long)t2p->pdf_xrefoffsets[i]);
+ written += t2pWriteFile(output, (tdata_t) buffer, 20);
+ }
+@@ -5149,17 +5119,14 @@ tsize_t t2p_write_pdf_trailer(T2P* t2p, TIFF* output)
+ snprintf(t2p->pdf_fileid + i, 9, "%.8X", rand());
+
+ written += t2pWriteFile(output, (tdata_t) "trailer\n<<\n/Size ", 17);
+- buflen = sprintf(buffer, "%lu", (unsigned long)(t2p->pdf_xrefcount+1));
++ buflen = snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)(t2p->pdf_xrefcount+1));
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+- _TIFFmemset(buffer, 0x00, 32);
+ written += t2pWriteFile(output, (tdata_t) "\n/Root ", 7);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_catalog);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_catalog);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+- _TIFFmemset(buffer, 0x00, 32);
+ written += t2pWriteFile(output, (tdata_t) " 0 R \n/Info ", 12);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_info);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_info);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+- _TIFFmemset(buffer, 0x00, 32);
+ written += t2pWriteFile(output, (tdata_t) " 0 R \n/ID[<", 11);
+ written += t2pWriteFile(output, (tdata_t) t2p->pdf_fileid,
+ sizeof(t2p->pdf_fileid) - 1);
+@@ -5167,9 +5134,8 @@ tsize_t t2p_write_pdf_trailer(T2P* t2p, TIFF* output)
+ written += t2pWriteFile(output, (tdata_t) t2p->pdf_fileid,
+ sizeof(t2p->pdf_fileid) - 1);
+ written += t2pWriteFile(output, (tdata_t) ">]\n>>\nstartxref\n", 16);
+- buflen=sprintf(buffer, "%lu", (unsigned long)t2p->pdf_startxref);
++ buflen=snprintf(buffer, sizeof(buffer), "%lu", (unsigned long)t2p->pdf_startxref);
+ written += t2pWriteFile(output, (tdata_t) buffer, buflen);
+- _TIFFmemset(buffer, 0x00, 32);
+ written += t2pWriteFile(output, (tdata_t) "\n%%EOF\n", 7);
+
+ return(written);
+diff --git a/tools/tiff2ps.c b/tools/tiff2ps.c
+index 3330750..7a9a816 100644
+--- a/tools/tiff2ps.c
++++ b/tools/tiff2ps.c
+@@ -1789,8 +1789,8 @@ PS_Lvl2ImageDict(FILE* fd, TIFF* tif, uint32 w, uint32 h)
+ imageOp = "imagemask";
+
+ (void)strcpy(im_x, "0");
+- (void)sprintf(im_y, "%lu", (long) h);
+- (void)sprintf(im_h, "%lu", (long) h);
++ (void)snprintf(im_y, sizeof(im_y), "%lu", (long) h);
++ (void)snprintf(im_h, sizeof(im_h), "%lu", (long) h);
+ tile_width = w;
+ tile_height = h;
+ if (TIFFIsTiled(tif)) {
+@@ -1811,7 +1811,7 @@ PS_Lvl2ImageDict(FILE* fd, TIFF* tif, uint32 w, uint32 h)
+ }
+ if (tile_height < h) {
+ fputs("/im_y 0 def\n", fd);
+- (void)sprintf(im_y, "%lu im_y sub", (unsigned long) h);
++ (void)snprintf(im_y, sizeof(im_y), "%lu im_y sub", (unsigned long) h);
+ }
+ } else {
+ repeat_count = tf_numberstrips;
+@@ -1823,7 +1823,7 @@ PS_Lvl2ImageDict(FILE* fd, TIFF* tif, uint32 w, uint32 h)
+ fprintf(fd, "/im_h %lu def\n",
+ (unsigned long) tile_height);
+ (void)strcpy(im_h, "im_h");
+- (void)sprintf(im_y, "%lu im_y sub", (unsigned long) h);
++ (void)snprintf(im_y, sizeof(im_y), "%lu im_y sub", (unsigned long) h);
+ }
+ }
+
+diff --git a/tools/tiffcrop.c b/tools/tiffcrop.c
+index 9cd5d86..a2443aa 100644
+--- a/tools/tiffcrop.c
++++ b/tools/tiffcrop.c
+@@ -2077,7 +2077,7 @@ update_output_file (TIFF **tiffout, char *mode, int autoindex,
+ return 1;
+ }
+
+- sprintf (filenum, "-%03d%s", findex, export_ext);
++ snprintf(filenum, sizeof(filenum), "-%03d%s", findex, export_ext);
+ filenum[14] = '\0';
+ strncat (exportname, filenum, 15);
+ }
+@@ -2230,8 +2230,8 @@ main(int argc, char* argv[])
+
+ /* dump.infilename is guaranteed to be NUL termimated and have 20 bytes
+ fewer than PATH_MAX */
+- memset (temp_filename, '\0', PATH_MAX + 1);
+- sprintf (temp_filename, "%s-read-%03d.%s", dump.infilename, dump_images,
++ snprintf(temp_filename, sizeof(temp_filename), "%s-read-%03d.%s",
++ dump.infilename, dump_images,
+ (dump.format == DUMP_TEXT) ? "txt" : "raw");
+ if ((dump.infile = fopen(temp_filename, dump.mode)) == NULL)
+ {
+@@ -2249,8 +2249,8 @@ main(int argc, char* argv[])
+
+ /* dump.outfilename is guaranteed to be NUL termimated and have 20 bytes
+ fewer than PATH_MAX */
+- memset (temp_filename, '\0', PATH_MAX + 1);
+- sprintf (temp_filename, "%s-write-%03d.%s", dump.outfilename, dump_images,
++ snprintf(temp_filename, sizeof(temp_filename), "%s-write-%03d.%s",
++ dump.outfilename, dump_images,
+ (dump.format == DUMP_TEXT) ? "txt" : "raw");
+ if ((dump.outfile = fopen(temp_filename, dump.mode)) == NULL)
+ {
+diff --git a/tools/tiffdither.c b/tools/tiffdither.c
+index f2f0f20..4308946 100644
+--- a/tools/tiffdither.c
++++ b/tools/tiffdither.c
+@@ -260,7 +260,7 @@ main(int argc, char* argv[])
+ TIFFSetField(out, TIFFTAG_FILLORDER, fillorder);
+ else
+ CopyField(TIFFTAG_FILLORDER, shortv);
+- sprintf(thing, "Dithered B&W version of %s", argv[optind]);
++ snprintf(thing, sizeof(thing), "Dithered B&W version of %s", argv[optind]);
+ TIFFSetField(out, TIFFTAG_IMAGEDESCRIPTION, thing);
+ CopyField(TIFFTAG_PHOTOMETRIC, shortv);
+ CopyField(TIFFTAG_ORIENTATION, shortv);
+--
+1.8.3.rc3
+
+
diff --git a/meta/recipes-multimedia/libtiff/tiff_4.0.3.bb b/meta/recipes-multimedia/libtiff/tiff_4.0.3.bb
index af1f2b6ad8..b7d1129ad6 100644
--- a/meta/recipes-multimedia/libtiff/tiff_4.0.3.bb
+++ b/meta/recipes-multimedia/libtiff/tiff_4.0.3.bb
@@ -6,6 +6,7 @@ HOMEPAGE = "http://www.remotesensing.org/libtiff/"
SRC_URI = "ftp://ftp.remotesensing.org/pub/libtiff/tiff-${PV}.tar.gz \
file://libtool2.patch \
file://libtiff-CVE-2013-1960.patch \
+ file://libtiff-CVE-2013-1961.patch \
file://libtiff-CVE-2013-4232.patch \
file://libtiff-CVE-2013-4243.patch \
file://libtiff-CVE-2013-4244.patch \
diff --git a/meta/recipes-multimedia/pulseaudio/files/0001-libatomic_ops-Aarch64-basic-port.patch b/meta/recipes-multimedia/pulseaudio/files/0001-libatomic_ops-Aarch64-basic-port.patch
new file mode 100644
index 0000000000..a563b8b022
--- /dev/null
+++ b/meta/recipes-multimedia/pulseaudio/files/0001-libatomic_ops-Aarch64-basic-port.patch
@@ -0,0 +1,239 @@
+From aac120d778ae5fc619b2fb8ef18ea18d3d5d20cc Mon Sep 17 00:00:00 2001
+From: Yvan Roux <yvan.roux@linaro.org>
+Date: Wed, 23 Jan 2013 17:14:16 +0100
+Subject: [PATCH] Aarch64 basic port
+
+Adapted-for-OpenEmbedded-by: Marcin Juszkiewicz <marcin.juszkiewicz@linaro.org>
+
+
+Upstream-Status: Backport
+
+It is original from
+https://github.com/ivmai/libatomic_ops/commit/cbbf86330fcb600cfe0f895cb970d922456005d6
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ src/atomic_ops.h | 4
+ src/atomic_ops/sysdeps/Makefile.am | 1
+ src/atomic_ops/sysdeps/gcc/aarch64.h | 184 +++++++++++++++++++++++++++++++++++
+ 3 files changed, 189 insertions(+)
+ create mode 100644 src/atomic_ops/sysdeps/gcc/aarch64.h
+
+--- libatomic_ops-7.2.orig/src/atomic_ops.h
++++ libatomic_ops-7.2/src/atomic_ops.h
+@@ -242,10 +242,14 @@
+ # endif /* __m68k__ */
+ # if defined(__powerpc__) || defined(__ppc__) || defined(__PPC__) \
+ || defined(__powerpc64__) || defined(__ppc64__)
+ # include "atomic_ops/sysdeps/gcc/powerpc.h"
+ # endif /* __powerpc__ */
++# if defined(__aarch64__)
++# include "atomic_ops/sysdeps/gcc/aarch64.h"
++# define AO_CAN_EMUL_CAS
++# endif /* __aarch64__ */
+ # if defined(__arm__) && !defined(AO_USE_PTHREAD_DEFS)
+ # include "atomic_ops/sysdeps/gcc/arm.h"
+ # define AO_CAN_EMUL_CAS
+ # endif /* __arm__ */
+ # if defined(__cris__) || defined(CRIS)
+--- libatomic_ops-7.2.orig/src/atomic_ops/sysdeps/Makefile.am
++++ libatomic_ops-7.2/src/atomic_ops/sysdeps/Makefile.am
+@@ -24,10 +24,11 @@ nobase_sysdep_HEADERS= generic_pthread.h
+ standard_ao_double_t.h \
+ README \
+ \
+ armcc/arm_v6.h \
+ \
++ gcc/aarch64.h \
+ gcc/alpha.h gcc/arm.h gcc/avr32.h gcc/cris.h \
+ gcc/hexagon.h gcc/hppa.h gcc/ia64.h gcc/m68k.h \
+ gcc/mips.h gcc/powerpc.h gcc/s390.h \
+ gcc/sh.h gcc/sparc.h gcc/x86.h gcc/x86_64.h \
+ \
+--- /dev/null
++++ libatomic_ops-7.2/src/atomic_ops/sysdeps/gcc/aarch64.h
+@@ -0,0 +1,184 @@
++/*
++ * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
++ * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
++ * Copyright (c) 1999-2003 by Hewlett-Packard Company. All rights reserved.
++ *
++ *
++ * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
++ * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
++ *
++ * Permission is hereby granted to use or copy this program
++ * for any purpose, provided the above notices are retained on all copies.
++ * Permission to modify the code and to distribute modified code is granted,
++ * provided the above notices are retained, and a notice that the code was
++ * modified is included with the above copyright notice.
++ *
++ */
++
++#include "../read_ordered.h"
++
++#include "../test_and_set_t_is_ao_t.h"
++
++AO_INLINE void
++AO_nop_full(void)
++{
++# ifndef AO_UNIPROCESSOR
++__sync_synchronize ();
++# endif
++}
++#define AO_HAVE_nop_full
++
++AO_INLINE AO_t
++AO_load(const volatile AO_t *addr)
++{
++ return __atomic_load_n (addr, __ATOMIC_RELAXED);
++}
++#define AO_HAVE_load
++
++AO_INLINE AO_t
++AO_load_acquire(const volatile AO_t *addr)
++{
++ return __atomic_load_n (addr, __ATOMIC_ACQUIRE);
++}
++#define AO_HAVE_load_acquire
++
++AO_INLINE void
++ AO_store(volatile AO_t *addr, AO_t value)
++{
++ __atomic_store_n(addr, value, __ATOMIC_RELAXED);
++}
++#define AO_HAVE_store
++
++AO_INLINE void
++ AO_store_release(volatile AO_t *addr, AO_t value)
++{
++ __atomic_store_n(addr, value, __ATOMIC_RELEASE);
++}
++#define AO_HAVE_store_release
++
++AO_INLINE AO_TS_VAL_t
++AO_test_and_set(volatile AO_TS_t *addr)
++{
++ return __atomic_test_and_set(addr, __ATOMIC_RELAXED);
++}
++# define AO_HAVE_test_and_set
++
++AO_INLINE AO_TS_VAL_t
++AO_test_and_set_acquire(volatile AO_TS_t *addr)
++{
++ return __atomic_test_and_set(addr, __ATOMIC_ACQUIRE);
++}
++# define AO_HAVE_test_and_set_acquire
++
++AO_INLINE AO_TS_VAL_t
++AO_test_and_set_release(volatile AO_TS_t *addr)
++{
++ return __atomic_test_and_set(addr, __ATOMIC_RELEASE);
++}
++# define AO_HAVE_test_and_set_release
++
++AO_INLINE AO_TS_VAL_t
++AO_test_and_set_full(volatile AO_TS_t *addr)
++{
++ return __atomic_test_and_set(addr, __ATOMIC_SEQ_CST);
++}
++# define AO_HAVE_test_and_set_full
++
++AO_INLINE AO_t
++AO_fetch_and_add(volatile AO_t *p, AO_t incr)
++{
++ return __atomic_fetch_add(p, incr, __ATOMIC_RELAXED);
++}
++#define AO_HAVE_fetch_and_add
++
++AO_INLINE AO_t
++AO_fetch_and_add_acquire(volatile AO_t *p, AO_t incr)
++{
++ return __atomic_fetch_add(p, incr, __ATOMIC_ACQUIRE);
++}
++#define AO_HAVE_fetch_and_add_acquire
++
++AO_INLINE AO_t
++AO_fetch_and_add_release(volatile AO_t *p, AO_t incr)
++{
++ return __atomic_fetch_add(p, incr, __ATOMIC_RELEASE);
++}
++#define AO_HAVE_fetch_and_add_release
++
++AO_INLINE AO_t
++AO_fetch_and_add_full(volatile AO_t *p, AO_t incr)
++{
++ return __atomic_fetch_add(p, incr, __ATOMIC_SEQ_CST);
++}
++#define AO_HAVE_fetch_and_add_full
++
++AO_INLINE AO_t
++AO_fetch_and_add1(volatile AO_t *p)
++{
++ return __atomic_fetch_add(p, 1, __ATOMIC_RELAXED);
++}
++#define AO_HAVE_fetch_and_add1
++
++AO_INLINE AO_t
++AO_fetch_and_add1_acquire(volatile AO_t *p)
++{
++ return __atomic_fetch_add(p, 1, __ATOMIC_ACQUIRE);
++}
++#define AO_HAVE_fetch_and_add1_acquire
++
++AO_INLINE AO_t
++AO_fetch_and_add1_release(volatile AO_t *p)
++{
++ return __atomic_fetch_add(p, 1, __ATOMIC_RELEASE);
++}
++#define AO_HAVE_fetch_and_add1_release
++
++AO_INLINE AO_t
++AO_fetch_and_add1_full(volatile AO_t *p)
++{
++ return __atomic_fetch_add(p, 1, __ATOMIC_SEQ_CST);
++}
++#define AO_HAVE_fetch_and_add1_full
++
++AO_INLINE AO_t
++AO_fetch_and_sub1(volatile AO_t *p)
++{
++ return __atomic_fetch_sub(p, 1, __ATOMIC_RELAXED);
++}
++#define AO_HAVE_fetch_and_sub1
++
++AO_INLINE AO_t
++AO_fetch_and_sub1_acquire(volatile AO_t *p)
++{
++ return __atomic_fetch_sub(p, 1, __ATOMIC_ACQUIRE);
++}
++#define AO_HAVE_fetch_and_sub1_acquire
++
++AO_INLINE AO_t
++AO_fetch_and_sub1_release(volatile AO_t *p)
++{
++ return __atomic_fetch_sub(p, 1, __ATOMIC_RELEASE);
++}
++#define AO_HAVE_fetch_and_sub1_release
++
++AO_INLINE AO_t
++AO_fetch_and_sub1_full(volatile AO_t *p)
++{
++ return __atomic_fetch_sub(p, 1, __ATOMIC_SEQ_CST);
++}
++#define AO_HAVE_fetch_and_sub1_full
++
++/* Returns nonzero if the comparison succeeded. */
++AO_INLINE int
++AO_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
++{
++ return __sync_bool_compare_and_swap(addr, old_val, new_val);
++}
++# define AO_HAVE_compare_and_swap
++
++AO_INLINE AO_t
++AO_fetch_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
++{
++ return __sync_val_compare_and_swap(addr, old_val, new_val);
++}
++# define AO_HAVE_fetch_compare_and_swap
diff --git a/meta/recipes-multimedia/pulseaudio/libatomics-ops_7.2.bb b/meta/recipes-multimedia/pulseaudio/libatomics-ops_7.2.bb
index 4632d240d2..5a582287b5 100644
--- a/meta/recipes-multimedia/pulseaudio/libatomics-ops_7.2.bb
+++ b/meta/recipes-multimedia/pulseaudio/libatomics-ops_7.2.bb
@@ -9,6 +9,7 @@ LIC_FILES_CHKSUM = "file://doc/COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
PR = "r1"
SRC_URI = "http://www.hpl.hp.com/research/linux/atomic_ops/download/libatomic_ops-${PV}.tar.gz \
+ file://0001-libatomic_ops-Aarch64-basic-port.patch \
"
SRC_URI[md5sum] = "890acdc83a7cd10e2e9536062d3741c8"
diff --git a/meta/recipes-multimedia/sbc/sbc_1.2.bb b/meta/recipes-multimedia/sbc/sbc_1.3.bb
index 9cdf01bb74..2d7f31bc1b 100644
--- a/meta/recipes-multimedia/sbc/sbc_1.2.bb
+++ b/meta/recipes-multimedia/sbc/sbc_1.3.bb
@@ -10,7 +10,7 @@ DEPENDS = "libsndfile1"
SRC_URI = "${KERNELORG_MIRROR}/linux/bluetooth/${BP}.tar.xz"
-SRC_URI[md5sum] = "ec65c444ad4c32aa85702641045b19e9"
-SRC_URI[sha256sum] = "c2f01ea54f7473704825113a9cdd46a23e67c650eff575f0670c3d9d66c4a5dc"
+SRC_URI[md5sum] = "2d8b7841f2c11ab287718d562f2b981c"
+SRC_URI[sha256sum] = "e61022cf576f14190241e7071753fdacdce5d1dea89ffd704110fc50be689309"
inherit autotools pkgconfig
diff --git a/meta/recipes-qt/meta/meta-toolchain-qt.inc b/meta/recipes-qt/meta/meta-toolchain-qt.inc
index c9bdeae9ac..7e75d93b14 100644
--- a/meta/recipes-qt/meta/meta-toolchain-qt.inc
+++ b/meta/recipes-qt/meta/meta-toolchain-qt.inc
@@ -4,29 +4,32 @@ TOOLCHAIN_OUTPUTNAME = "${SDK_NAME}-toolchain-${QTNAME}-${DISTRO_VERSION}"
require recipes-core/meta/meta-toolchain.bb
-QT_TOOLS_PREFIX = "${SDKPATHNATIVE}${bindir_nativesdk}"
+QT_TOOLS_PREFIX = "$OECORE_NATIVE_SYSROOT${bindir_nativesdk}"
-toolchain_create_sdk_env_script_append() {
- echo 'export OE_QMAKE_CFLAGS="$CFLAGS"' >> $script
+create_sdk_files_append() {
+ mkdir -p ${SDK_OUTPUT}${SDKPATHNATIVE}/environment-setup.d/
+ script=${SDK_OUTPUT}${SDKPATHNATIVE}/environment-setup.d/${QT_DIR_NAME}.sh
+
+ echo 'export OE_QMAKE_CFLAGS="$CFLAGS"' > $script
echo 'export OE_QMAKE_CXXFLAGS="$CXXFLAGS"' >> $script
echo 'export OE_QMAKE_LDFLAGS="$LDFLAGS"' >> $script
echo 'export OE_QMAKE_CC=$CC' >> $script
echo 'export OE_QMAKE_CXX=$CXX' >> $script
echo 'export OE_QMAKE_LINK=$CXX' >> $script
echo 'export OE_QMAKE_AR=$AR' >> $script
- echo 'export OE_QMAKE_LIBDIR_QT=${SDKTARGETSYSROOT}/${libdir}' >> $script
- echo 'export OE_QMAKE_INCDIR_QT=${SDKTARGETSYSROOT}/${includedir}/${QT_DIR_NAME}' >> $script
+ echo 'export OE_QMAKE_LIBDIR_QT=$OECORE_TARGET_SYSROOT${libdir}' >> $script
+ echo 'export OE_QMAKE_INCDIR_QT=$OECORE_TARGET_SYSROOT${includedir}/${QT_DIR_NAME}' >> $script
echo 'export OE_QMAKE_MOC=${QT_TOOLS_PREFIX}/moc4' >> $script
echo 'export OE_QMAKE_UIC=${QT_TOOLS_PREFIX}/uic4' >> $script
echo 'export OE_QMAKE_UIC3=${QT_TOOLS_PREFIX}/uic34' >> $script
echo 'export OE_QMAKE_RCC=${QT_TOOLS_PREFIX}/rcc4' >> $script
echo 'export OE_QMAKE_QDBUSCPP2XML=${QT_TOOLS_PREFIX}/qdbuscpp2xml4' >> $script
echo 'export OE_QMAKE_QDBUSXML2CPP=${QT_TOOLS_PREFIX}/qdbusxml2cpp4' >> $script
- echo 'export OE_QMAKE_QT_CONFIG=${SDKTARGETSYSROOT}/${datadir}/${QT_DIR_NAME}/mkspecs/qconfig.pri' >> $script
- echo 'export QMAKESPEC=${SDKTARGETSYSROOT}/${datadir}/${QT_DIR_NAME}/mkspecs/linux-g++' >> $script
- echo 'export QT_CONF_PATH=${SDKPATHNATIVE}/${sysconfdir}/qt.conf' >> $script
+ echo 'export OE_QMAKE_QT_CONFIG=$OECORE_TARGET_SYSROOT${datadir}/${QT_DIR_NAME}/mkspecs/qconfig.pri' >> $script
+ echo 'export QMAKESPEC=$OECORE_TARGET_SYSROOT${datadir}/${QT_DIR_NAME}/mkspecs/linux-g++' >> $script
+ echo 'export QT_CONF_PATH=$OECORE_NATIVE_SYSROOT${sysconfdir}/qt.conf' >> $script
- # make a symbolic link to mkspecs for compatibility with Nokia's SDK
- # and QTCreator
- (cd ${SDK_OUTPUT}/${QT_TOOLS_PREFIX}/..; ln -s ${SDKTARGETSYSROOT}/usr/share/${QT_DIR_NAME}/mkspecs mkspecs;)
+ # make a symbolic link to mkspecs for compatibility with Qt SDK
+ # and Qt Creator
+ (cd ${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/..; ln -s ${SDKTARGETSYSROOT}/usr/share/${QT_DIR_NAME}/mkspecs mkspecs;)
}
diff --git a/meta/recipes-qt/qt-demo/qt-demo-init/qtdemo-init b/meta/recipes-qt/qt-demo/qt-demo-init/qtdemo-init
index 3a1f2cb1b2..21f0273e2a 100644
--- a/meta/recipes-qt/qt-demo/qt-demo-init/qtdemo-init
+++ b/meta/recipes-qt/qt-demo/qt-demo-init/qtdemo-init
@@ -2,8 +2,8 @@
set -e
-if [ -f /usr/bin/qtdemo ]; then
- QTDEMO="qtdemo > /var/log/Xsession.log 2> &1"
+if [ -x /usr/bin/qtdemo ]; then
+ QTDEMO="qtdemo"
else
QTDEMO="qtdemoE -qws"
fi
@@ -18,7 +18,7 @@ case "$1" in
if [ ! -f /etc/pointercal ]; then
/usr/bin/ts_calibrate
fi
- if [ "$QTDEMO" = qtdemo ]; then
+ if [ "$QTDEMO" = "qtdemo" ]; then
Xorg &
export DISPLAY=:0
$QTDEMO &
@@ -26,7 +26,7 @@ case "$1" in
QWS_MOUSE_PROTO=tslib:$TSLIB_TSDEVICE $QTDEMO &
fi
else
- if [ "$QTDEMO" = qtdemo ]; then
+ if [ "$QTDEMO" = "qtdemo" ]; then
Xorg &
export DISPLAY=:0
fi
@@ -35,7 +35,7 @@ case "$1" in
;;
stop)
echo "Stopping qtdemo"
- if [ "$QTDEMO" = qtdemo ]; then
+ if [ "$QTDEMO" = "qtdemo" ]; then
killall Xorg
killall qtdemo
else
diff --git a/meta/recipes-qt/qt4/qt-mobility_1.2.0.inc b/meta/recipes-qt/qt4/qt-mobility_1.2.0.inc
index d46acb9c7e..ae1769de3a 100644
--- a/meta/recipes-qt/qt4/qt-mobility_1.2.0.inc
+++ b/meta/recipes-qt/qt4/qt-mobility_1.2.0.inc
@@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://LICENSE.LGPL;md5=fbc093901857fcd118f065f900982c24 \
file://LGPL_EXCEPTION.txt;md5=411080a56ff917a5a1aa08c98acae354"
FILESEXTRAPATHS =. "${FILE_DIRNAME}/qt-mobility-${PV}:"
-SRC_URI = "http://get.qt.nokia.com/qt/add-ons/qt-mobility-opensource-src-${PV}.tar.gz \
+SRC_URI = "http://pkgs.fedoraproject.org/repo/pkgs/qt-mobility/qt-mobility-opensource-src-${PV}.tar.gz/ea5db5a8d3dd4709c2926dceda646bd8/qt-mobility-opensource-src-${PV}.tar.gz \
file://qt-mobility-configure.patch \
file://0001-gstvideoconnector-fixed-buffers-allocation.patch \
file://0002-Remove-unnecessary-rpaths-from-qml_device-example.patch \
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6.inc b/meta/recipes-qt/qt4/qt4-4.8.6.inc
index ae6692b50a..074e82d061 100644
--- a/meta/recipes-qt/qt4/qt4-4.8.6.inc
+++ b/meta/recipes-qt/qt4/qt4-4.8.6.inc
@@ -22,6 +22,10 @@ SRC_URI = "http://download.qt-project.org/official_releases/qt/4.8/${PV}/qt-ever
file://0019-Fixes-for-gcc-4.7.0-particularly-on-qemux86.patch \
file://0027-tools.pro-disable-qmeegographicssystemhelper.patch \
file://0028-Don-t-crash-on-broken-GIF-images.patch \
+ file://0029-aarch64_arm64_fix_arch_detection.patch \
+ file://0030-aarch64_arm64_qatomic_support.patch \
+ file://0031-aarch64_arm64_mkspecs.patch \
+ file://0032-aarch64_add_header.patch \
file://g++.conf \
file://linux.conf \
"
@@ -31,6 +35,14 @@ SRC_URI[sha256sum] = "8b14dd91b52862e09b8e6a963507b74bc2580787d171feda197badfa70
S = "${WORKDIR}/qt-everywhere-opensource-src-${PV}"
+# disable webkit for mips64 n32 temporarily that fails to compile,
+# qt upstream defect:
+# https://bugreports.qt-project.org/browse/QTBUG-39224
+QT_CONFIG_FLAGS_append_mips64 = "${@bb.utils.contains("TUNE_FEATURES", "n32", " -no-webkit", "" ,d)}"
+
+# disable webkit for aarch64 temporarily that fails to compile
+QT_CONFIG_FLAGS_append_aarch64 = " -no-webkit"
+
FILES_${QT_BASE_NAME}-tools_append = " ${bindir}/qml ${bindir}/qmlplugindump"
FILES_${QT_BASE_NAME}-tools-dbg_append = " ${bindir}/.debug/qml ${bindir}/.debug/qmlplugindump"
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0007-dbus-Remove-const-usage-that-causes-compile-failure-.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0007-dbus-Remove-const-usage-that-causes-compile-failure-.patch
index 7d3b336f77..1d23c929f4 100644
--- a/meta/recipes-qt/qt4/qt4-4.8.6/0007-dbus-Remove-const-usage-that-causes-compile-failure-.patch
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0007-dbus-Remove-const-usage-that-causes-compile-failure-.patch
@@ -7,7 +7,7 @@ Subject: [PATCH 07/21] dbus: Remove "const" usage that causes compile failure
Patch has apparently been rejected upstream, not because it is invalid
but because the submitter did not submit a merge request for it, so the
validity of the patch upstream is uncertain. For further details see:
-http://bugreports.qt.nokia.com/browse/QTBUG-17962
+https://bugreports.qt-project.org/browse/QTBUG-17962
Upstream-Status: Denied [possible retry]
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0012-Add-2bpp-support.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0012-Add-2bpp-support.patch
index 8402eab635..8e411f3426 100644
--- a/meta/recipes-qt/qt4/qt4-4.8.6/0012-Add-2bpp-support.patch
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0012-Add-2bpp-support.patch
@@ -4,7 +4,7 @@ Date: Wed, 26 Sep 2012 20:39:21 +0200
Subject: [PATCH 12/21] Add 2bpp support
Submitted upstream but rejected as being "out of scope":
-http://bugreports.qt.nokia.com/browse/QTBUG-3468
+https://bugreports.qt-project.org/browse/QTBUG-3468
Upstream-Status: Denied
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0029-aarch64_arm64_fix_arch_detection.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0029-aarch64_arm64_fix_arch_detection.patch
new file mode 100644
index 0000000000..fbd32b173d
--- /dev/null
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0029-aarch64_arm64_fix_arch_detection.patch
@@ -0,0 +1,53 @@
+From 800abbba658203fc8e746e3fc780a297cd4110cf Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Lisandro=20Dami=C3=A1n=20Nicanor=20P=C3=A9rez=20Meyer?=
+ <perezmeyer@gmail.com>
+Date: Wed, 20 Aug 2014 17:52:49 -0300
+Subject: [PATCH] Fix AArch64/arm64 detection.
+
+The detection needs to go before arm, else the system will detect AArch64/arm64
+as arm.
+
+This patch comes from Wookey, he has agreed to put it under BSD or Expat
+to allow it's inclusion in here:
+<https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=735488#255>
+
+Change-Id: Ic2171c03fca8bb871347940fa3a2bc467776f797
+
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ configure | 12 ++++++------
+ 1 file changed, 6 insertions(+), 6 deletions(-)
+
+diff --git a/configure b/configure
+index a9ba7c8..eb88b72 100755
+--- a/configure
++++ b/configure
+@@ -3241,17 +3241,17 @@ if [ -z "${CFG_HOST_ARCH}" ]; then
+ fi
+ CFG_HOST_ARCH=s390
+ ;;
+- *:*:arm*)
++ *:*:aarch64*|*:*:arm64*)
+ if [ "$OPT_VERBOSE" = "yes" ]; then
+- echo " ARM (arm)"
++ echo " AArch64 (aarch64)"
+ fi
+- CFG_HOST_ARCH=arm
++ CFG_HOST_ARCH=aarch64
+ ;;
+- *:*:aarch64*)
++ *:*:arm*)
+ if [ "$OPT_VERBOSE" = "yes" ]; then
+- echo " AArch64 (aarch64)"
++ echo " ARM (arm)"
+ fi
+- CFG_HOST_ARCH=aarch64
++ CFG_HOST_ARCH=arm
+ ;;
+ Linux:*:sparc*)
+ if [ "$OPT_VERBOSE" = "yes" ]; then
+--
+2.1.0
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0030-aarch64_arm64_qatomic_support.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0030-aarch64_arm64_qatomic_support.patch
new file mode 100644
index 0000000000..ba4c2a6b4f
--- /dev/null
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0030-aarch64_arm64_qatomic_support.patch
@@ -0,0 +1,491 @@
+From 294010b562c9846bb2bc4ee9c63ff78adc7c1f4f Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Lisandro=20Dami=C3=A1n=20Nicanor=20P=C3=A9rez=20Meyer?=
+ <perezmeyer@gmail.com>
+Date: Sat, 15 Mar 2014 15:40:49 -0300
+Subject: [PATCH] Add qatomic support for AArch64 (aka arm64).
+
+Patch by Mark Salter <msalter@redhat.com>
+licensed under BSD:
+
+<https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=735488#195>
+
+This patch is known to not be the most correct way
+to implement them, as it seems to be possible to do it in a faster way,
+but should work non the less until we can provide something better.
+
+Change-Id: Ib392b27dc54691fd4c2ea9896240ad71fb8128cc
+
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+
+---
+ src/corelib/arch/aarch64/arch.pri | 4 +
+ src/corelib/arch/aarch64/qatomic_aarch64.cpp | 70 ++++++
+ src/corelib/arch/arch.pri | 4 +-
+ src/corelib/arch/qatomic_aarch64.h | 335 +++++++++++++++++++++++++++
+ src/corelib/arch/qatomic_arch.h | 2 +
+ 5 files changed, 414 insertions(+), 1 deletion(-)
+ create mode 100644 src/corelib/arch/aarch64/arch.pri
+ create mode 100644 src/corelib/arch/aarch64/qatomic_aarch64.cpp
+ create mode 100644 src/corelib/arch/qatomic_aarch64.h
+
+diff --git a/src/corelib/arch/aarch64/arch.pri b/src/corelib/arch/aarch64/arch.pri
+new file mode 100644
+index 0000000..63523d9
+--- /dev/null
++++ b/src/corelib/arch/aarch64/arch.pri
+@@ -0,0 +1,4 @@
++#
++# AArch64 architecture
++#
++SOURCES += $$QT_ARCH_CPP/qatomic_aarch64.cpp
+diff --git a/src/corelib/arch/aarch64/qatomic_aarch64.cpp b/src/corelib/arch/aarch64/qatomic_aarch64.cpp
+new file mode 100644
+index 0000000..fc851b9
+--- /dev/null
++++ b/src/corelib/arch/aarch64/qatomic_aarch64.cpp
+@@ -0,0 +1,70 @@
++/****************************************************************************
++**
++** Copyright (C) 2012, 2013 Digia Plc and/or its subsidiary(-ies).
++** Contact: http://www.qt-project.org/legal
++**
++** This file is part of the QtCore module of the Qt Toolkit.
++**
++** $QT_BEGIN_LICENSE:LGPL$
++** Commercial License Usage
++** Licensees holding valid commercial Qt licenses may use this file in
++** accordance with the commercial license agreement provided with the
++** Software or, alternatively, in accordance with the terms contained in
++** a written agreement between you and Digia. For licensing terms and
++** conditions see http://qt.digia.com/licensing. For further information
++** use the contact form at http://qt.digia.com/contact-us.
++**
++** GNU Lesser General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU Lesser
++** General Public License version 2.1 as published by the Free Software
++** Foundation and appearing in the file LICENSE.LGPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU Lesser General Public License version 2.1 requirements
++** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
++**
++** In addition, as a special exception, Digia gives you certain additional
++** rights. These rights are described in the Digia Qt LGPL Exception
++** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
++**
++** GNU General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU
++** General Public License version 3.0 as published by the Free Software
++** Foundation and appearing in the file LICENSE.GPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU General Public License version 3.0 requirements will be
++** met: http://www.gnu.org/copyleft/gpl.html.
++**
++**
++** $QT_END_LICENSE$
++**
++****************************************************************************/
++
++#include <QtCore/qglobal.h>
++
++#include <unistd.h>
++#ifdef _POSIX_PRIORITY_SCHEDULING
++# include <sched.h>
++#endif
++#include <time.h>
++
++QT_BEGIN_NAMESPACE
++
++QT_USE_NAMESPACE
++
++Q_CORE_EXPORT void qt_atomic_yield(int *count)
++{
++#ifdef _POSIX_PRIORITY_SCHEDULING
++ if ((*count)++ < 50) {
++ sched_yield();
++ } else
++#endif
++ {
++ struct timespec tm;
++ tm.tv_sec = 0;
++ tm.tv_nsec = 2000001;
++ nanosleep(&tm, NULL);
++ *count = 0;
++ }
++}
++
++QT_END_NAMESPACE
+diff --git a/src/corelib/arch/arch.pri b/src/corelib/arch/arch.pri
+index cd23e5e..f50fca7 100644
+--- a/src/corelib/arch/arch.pri
++++ b/src/corelib/arch/arch.pri
+@@ -31,7 +31,9 @@ integrity:HEADERS += arch/qatomic_integrity.h
+ arch/qatomic_s390.h \
+ arch/qatomic_x86_64.h \
+ arch/qatomic_sh.h \
+- arch/qatomic_sh4a.h
++ arch/qatomic_sh4a.h \
++ arch/qatomic_aarch64.h \
++
+
+ QT_ARCH_CPP = $$QT_SOURCE_TREE/src/corelib/arch/$$QT_ARCH
+ DEPENDPATH += $$QT_ARCH_CPP
+diff --git a/src/corelib/arch/qatomic_aarch64.h b/src/corelib/arch/qatomic_aarch64.h
+new file mode 100644
+index 0000000..de61ca8
+--- /dev/null
++++ b/src/corelib/arch/qatomic_aarch64.h
+@@ -0,0 +1,335 @@
++/****************************************************************************
++**
++** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
++** Contact: http://www.qt-project.org/legal
++**
++** This file is part of the QtCore module of the Qt Toolkit.
++**
++** $QT_BEGIN_LICENSE:LGPL$
++** Commercial License Usage
++** Licensees holding valid commercial Qt licenses may use this file in
++** accordance with the commercial license agreement provided with the
++** Software or, alternatively, in accordance with the terms contained in
++** a written agreement between you and Digia. For licensing terms and
++** conditions see http://qt.digia.com/licensing. For further information
++** use the contact form at http://qt.digia.com/contact-us.
++**
++** GNU Lesser General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU Lesser
++** General Public License version 2.1 as published by the Free Software
++** Foundation and appearing in the file LICENSE.LGPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU Lesser General Public License version 2.1 requirements
++** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
++**
++** In addition, as a special exception, Digia gives you certain additional
++** rights. These rights are described in the Digia Qt LGPL Exception
++** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
++**
++** GNU General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU
++** General Public License version 3.0 as published by the Free Software
++** Foundation and appearing in the file LICENSE.GPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU General Public License version 3.0 requirements will be
++** met: http://www.gnu.org/copyleft/gpl.html.
++**
++**
++** $QT_END_LICENSE$
++**
++****************************************************************************/
++
++#ifndef QATOMIC_AARCH64_H
++#define QATOMIC_AARCH64_H
++
++QT_BEGIN_HEADER
++
++QT_BEGIN_NAMESPACE
++
++#define Q_ATOMIC_INT_REFERENCE_COUNTING_IS_ALWAYS_NATIVE
++
++inline bool QBasicAtomicInt::isReferenceCountingNative()
++{ return true; }
++inline bool QBasicAtomicInt::isReferenceCountingWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_INT_TEST_AND_SET_IS_ALWAYS_NATIVE
++
++inline bool QBasicAtomicInt::isTestAndSetNative()
++{ return true; }
++inline bool QBasicAtomicInt::isTestAndSetWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_INT_FETCH_AND_STORE_IS_ALWAYS_NATIVE
++
++inline bool QBasicAtomicInt::isFetchAndStoreNative()
++{ return true; }
++inline bool QBasicAtomicInt::isFetchAndStoreWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_INT_FETCH_AND_ADD_IS_ALWAYS_NATIVE
++
++inline bool QBasicAtomicInt::isFetchAndAddNative()
++{ return true; }
++inline bool QBasicAtomicInt::isFetchAndAddWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_POINTER_TEST_AND_SET_IS_ALWAYS_NATIVE
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isTestAndSetNative()
++{ return true; }
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isTestAndSetWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_POINTER_FETCH_AND_STORE_IS_ALWAYS_NATIVE
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndStoreNative()
++{ return true; }
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndStoreWaitFree()
++{ return false; }
++
++#define Q_ATOMIC_POINTER_FETCH_AND_ADD_IS_ALWAYS_NATIVE
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndAddNative()
++{ return true; }
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::isFetchAndAddWaitFree()
++{ return false; }
++
++#ifndef Q_DATA_MEMORY_BARRIER
++# define Q_DATA_MEMORY_BARRIER asm volatile("dmb sy\n":::"memory")
++#endif
++#ifndef Q_COMPILER_MEMORY_BARRIER
++# define Q_COMPILER_MEMORY_BARRIER asm volatile("":::"memory")
++#endif
++
++inline bool QBasicAtomicInt::ref()
++{
++ int newValue;
++
++ Q_COMPILER_MEMORY_BARRIER;
++ newValue = __atomic_add_fetch(&_q_value, 1, __ATOMIC_ACQ_REL);
++ Q_COMPILER_MEMORY_BARRIER;
++
++ return newValue != 0;
++}
++
++inline bool QBasicAtomicInt::deref()
++{
++ int newValue;
++
++ Q_COMPILER_MEMORY_BARRIER;
++ newValue = __atomic_sub_fetch(&_q_value, 1, __ATOMIC_ACQ_REL);
++ Q_COMPILER_MEMORY_BARRIER;
++
++ return newValue != 0;
++}
++
++inline bool QBasicAtomicInt::testAndSetRelaxed(int expectedValue, int newValue)
++{
++ bool val;
++
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_compare_exchange_n (&_q_value, &expectedValue, newValue,
++ false, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++inline int QBasicAtomicInt::fetchAndStoreRelaxed(int newValue)
++{
++ int val;
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_exchange_n(&_q_value, newValue, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++inline int QBasicAtomicInt::fetchAndAddRelaxed(int valueToAdd)
++{
++ int val;
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_fetch_add(&_q_value, valueToAdd, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetRelaxed(T *expectedValue, T *newValue)
++{
++ bool val;
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_compare_exchange_n (&_q_value, &expectedValue, newValue,
++ false, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreRelaxed(T *newValue)
++{
++ T *val;
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_exchange_n(&_q_value, newValue, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddRelaxed(qptrdiff valueToAdd)
++{
++ T *val;
++ Q_COMPILER_MEMORY_BARRIER;
++ val = __atomic_fetch_add(&_q_value, valueToAdd, __ATOMIC_RELAXED);
++ Q_COMPILER_MEMORY_BARRIER;
++ return val;
++}
++
++inline bool QBasicAtomicInt::testAndSetAcquire(int expectedValue, int newValue)
++{
++ bool returnValue = testAndSetRelaxed(expectedValue, newValue);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++inline bool QBasicAtomicInt::testAndSetRelease(int expectedValue, int newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return testAndSetRelaxed(expectedValue, newValue);
++}
++
++inline bool QBasicAtomicInt::testAndSetOrdered(int expectedValue, int newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ bool returnValue = testAndSetRelaxed(expectedValue, newValue);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++inline int QBasicAtomicInt::fetchAndStoreAcquire(int newValue)
++{
++ int returnValue = fetchAndStoreRelaxed(newValue);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++inline int QBasicAtomicInt::fetchAndStoreRelease(int newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return fetchAndStoreRelaxed(newValue);
++}
++
++inline int QBasicAtomicInt::fetchAndStoreOrdered(int newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ int returnValue = fetchAndStoreRelaxed(newValue);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++inline int QBasicAtomicInt::fetchAndAddAcquire(int valueToAdd)
++{
++ int returnValue = fetchAndAddRelaxed(valueToAdd);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++inline int QBasicAtomicInt::fetchAndAddRelease(int valueToAdd)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return fetchAndAddRelaxed(valueToAdd);
++}
++
++inline int QBasicAtomicInt::fetchAndAddOrdered(int valueToAdd)
++{
++ Q_DATA_MEMORY_BARRIER;
++ int returnValue = fetchAndAddRelaxed(valueToAdd);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetAcquire(T *expectedValue, T *newValue)
++{
++ bool returnValue = testAndSetRelaxed(expectedValue, newValue);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetRelease(T *expectedValue, T *newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return testAndSetRelaxed(expectedValue, newValue);
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE bool QBasicAtomicPointer<T>::testAndSetOrdered(T *expectedValue, T *newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ bool returnValue = testAndSetAcquire(expectedValue, newValue);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreAcquire(T *newValue)
++{
++ T *returnValue = fetchAndStoreRelaxed(newValue);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreRelease(T *newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return fetchAndStoreRelaxed(newValue);
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndStoreOrdered(T *newValue)
++{
++ Q_DATA_MEMORY_BARRIER;
++ T *returnValue = fetchAndStoreRelaxed(newValue);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddAcquire(qptrdiff valueToAdd)
++{
++ T *returnValue = fetchAndAddRelaxed(valueToAdd);
++ Q_DATA_MEMORY_BARRIER;
++ return returnValue;
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddRelease(qptrdiff valueToAdd)
++{
++ Q_DATA_MEMORY_BARRIER;
++ return fetchAndAddRelaxed(valueToAdd);
++}
++
++template <typename T>
++Q_INLINE_TEMPLATE T *QBasicAtomicPointer<T>::fetchAndAddOrdered(qptrdiff valueToAdd)
++{
++ Q_DATA_MEMORY_BARRIER;
++ T *returnValue = fetchAndAddRelaxed(valueToAdd);
++ Q_COMPILER_MEMORY_BARRIER;
++ return returnValue;
++}
++
++#undef Q_DATA_MEMORY_BARRIER
++#undef Q_COMPILER_MEMORY_BARRIER
++
++QT_END_NAMESPACE
++
++QT_END_HEADER
++
++#endif // QATOMIC_AARCH64_H
+diff --git a/src/corelib/arch/qatomic_arch.h b/src/corelib/arch/qatomic_arch.h
+index 141726c..3e96926 100644
+--- a/src/corelib/arch/qatomic_arch.h
++++ b/src/corelib/arch/qatomic_arch.h
+@@ -94,6 +94,8 @@ QT_BEGIN_HEADER
+ # include "QtCore/qatomic_sh4a.h"
+ #elif defined(QT_ARCH_NACL)
+ # include "QtCore/qatomic_generic.h"
++#elif defined(QT_ARCH_AARCH64)
++# include "QtCore/qatomic_aarch64.h"
+ #else
+ # error "Qt has not been ported to this architecture"
+ #endif
+--
+2.1.0
+
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0031-aarch64_arm64_mkspecs.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0031-aarch64_arm64_mkspecs.patch
new file mode 100644
index 0000000000..a01e7ada93
--- /dev/null
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0031-aarch64_arm64_mkspecs.patch
@@ -0,0 +1,124 @@
+From 7090fc80ff630712a90de92403190f647dd38a39 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Lisandro=20Dami=C3=A1n=20Nicanor=20P=C3=A9rez=20Meyer?=
+ <perezmeyer@gmail.com>
+Date: Sat, 15 Mar 2014 15:35:00 -0300
+Subject: [PATCH] mkspecs for AArch64 (aka arm64).
+
+Patch by Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
+licensed under either Public Domain or BSD:
+
+<https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=735488#179>
+
+Change-Id: I21f17953234cfb176bac023e52ecdc927fc5c1a9
+
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+
+---
+ configure | 3 +++
+ mkspecs/linux-g++-aarch64/qmake.conf | 27 ++++++++++++++++++++
+ mkspecs/linux-g++-aarch64/qplatformdefs.h | 42 +++++++++++++++++++++++++++++++
+ 3 files changed, 72 insertions(+)
+ create mode 100644 mkspecs/linux-g++-aarch64/qmake.conf
+ create mode 100644 mkspecs/linux-g++-aarch64/qplatformdefs.h
+
+diff --git a/configure b/configure
+index a9ba7c8..e57d053 100755
+--- a/configure
++++ b/configure
+@@ -2808,6 +2808,9 @@ if [ "$CFG_EMBEDDED" != "no" ]; then
+ *86_64)
+ PLATFORM=qws/linux-x86_64-g++
+ ;;
++ aarch64)
++ PLATFORM=linux-aarch64-g++
++ ;;
+ *)
+ PLATFORM=qws/linux-generic-g++
+ ;;
+diff --git a/mkspecs/linux-g++-aarch64/qmake.conf b/mkspecs/linux-g++-aarch64/qmake.conf
+new file mode 100644
+index 0000000..ebc0a92
+--- /dev/null
++++ b/mkspecs/linux-g++-aarch64/qmake.conf
+@@ -0,0 +1,27 @@
++#
++# qmake configuration for linux-g++
++#
++# Written for GNU/Linux platforms that have both lib and lib64 directories,
++# like the AMD Opteron.
++#
++
++MAKEFILE_GENERATOR = UNIX
++TARGET_PLATFORM = unix
++TEMPLATE = app
++CONFIG += qt warn_on release incremental link_prl gdb_dwarf_index
++QT += core gui
++QMAKE_INCREMENTAL_STYLE = sublib
++
++QMAKE_CFLAGS =
++QMAKE_LFLAGS =
++
++QMAKE_CFLAGS_RELEASE += -O2
++
++include(../common/linux.conf)
++include(../common/gcc-base-unix.conf)
++include(../common/g++-unix.conf)
++
++QMAKE_LIBDIR_X11 = /usr/X11R6/lib64
++QMAKE_LIBDIR_OPENGL = /usr/X11R6/lib64
++
++load(qt_config)
+diff --git a/mkspecs/linux-g++-aarch64/qplatformdefs.h b/mkspecs/linux-g++-aarch64/qplatformdefs.h
+new file mode 100644
+index 0000000..562128b
+--- /dev/null
++++ b/mkspecs/linux-g++-aarch64/qplatformdefs.h
+@@ -0,0 +1,42 @@
++/****************************************************************************
++**
++** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
++** Contact: http://www.qt-project.org/legal
++**
++** This file is part of the qmake spec of the Qt Toolkit.
++**
++** $QT_BEGIN_LICENSE:LGPL$
++** Commercial License Usage
++** Licensees holding valid commercial Qt licenses may use this file in
++** accordance with the commercial license agreement provided with the
++** Software or, alternatively, in accordance with the terms contained in
++** a written agreement between you and Digia. For licensing terms and
++** conditions see http://qt.digia.com/licensing. For further information
++** use the contact form at http://qt.digia.com/contact-us.
++**
++** GNU Lesser General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU Lesser
++** General Public License version 2.1 as published by the Free Software
++** Foundation and appearing in the file LICENSE.LGPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU Lesser General Public License version 2.1 requirements
++** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
++**
++** In addition, as a special exception, Digia gives you certain additional
++** rights. These rights are described in the Digia Qt LGPL Exception
++** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
++**
++** GNU General Public License Usage
++** Alternatively, this file may be used under the terms of the GNU
++** General Public License version 3.0 as published by the Free Software
++** Foundation and appearing in the file LICENSE.GPL included in the
++** packaging of this file. Please review the following information to
++** ensure the GNU General Public License version 3.0 requirements will be
++** met: http://www.gnu.org/copyleft/gpl.html.
++**
++**
++** $QT_END_LICENSE$
++**
++****************************************************************************/
++
++#include "../linux-g++/qplatformdefs.h"
+--
+2.1.0
+
diff --git a/meta/recipes-qt/qt4/qt4-4.8.6/0032-aarch64_add_header.patch b/meta/recipes-qt/qt4/qt4-4.8.6/0032-aarch64_add_header.patch
new file mode 100644
index 0000000000..7132224ecc
--- /dev/null
+++ b/meta/recipes-qt/qt4/qt4-4.8.6/0032-aarch64_add_header.patch
@@ -0,0 +1,18 @@
+Add missing header for aarch64. And install ../corelib/arch/qatomic_aarch64.h.
+
+Upstream-Status: Pending
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+diff -Nru qt-everywhere-opensource-src-4.8.6.orig/include/QtCore/qatomic_aarch64.h qt-everywhere-opensource-src-4.8.6/include/QtCore/qatomic_aarch64.h
+--- qt-everywhere-opensource-src-4.8.6.orig/include/QtCore/qatomic_aarch64.h 1970-01-01 08:30:00.000000000 +0830
++++ qt-everywhere-opensource-src-4.8.6/include/QtCore/qatomic_aarch64.h 2014-09-09 17:18:03.768352551 +0800
+@@ -0,0 +1 @@
++#include "../../src/corelib/arch/qatomic_aarch64.h"
+--- qt-everywhere-opensource-src-4.8.6/include/QtCore/headers.pri.orig 2014-09-09 18:05:37.100430311 +0800
++++ qt-everywhere-opensource-src-4.8.6/include/QtCore/headers.pri 2014-09-09 18:05:59.112430911 +0800
+@@ -1,3 +1,3 @@
+-SYNCQT.HEADER_FILES = ../corelib/statemachine/qabstractstate.h ../corelib/statemachine/qabstracttransition.h ../corelib/statemachine/qeventtransition.h ../corelib/statemachine/qfinalstate.h ../corelib/statemachine/qhistorystate.h ../corelib/statemachine/qsignaltransition.h ../corelib/statemachine/qstate.h ../corelib/statemachine/qstatemachine.h ../corelib/arch/qatomic_alpha.h ../corelib/arch/qatomic_arch.h ../corelib/arch/qatomic_arm.h ../corelib/arch/qatomic_armv5.h ../corelib/arch/qatomic_armv6.h ../corelib/arch/qatomic_armv7.h ../corelib/arch/qatomic_avr32.h ../corelib/arch/qatomic_bfin.h ../corelib/arch/qatomic_bootstrap.h ../corelib/arch/qatomic_generic.h ../corelib/arch/qatomic_i386.h ../corelib/arch/qatomic_ia64.h ../corelib/arch/qatomic_integrity.h ../corelib/arch/qatomic_m68k.h ../corelib/arch/qatomic_macosx.h ../corelib/arch/qatomic_mips.h ../corelib/arch/qatomic_parisc.h ../corelib/arch/qatomic_powerpc.h ../corelib/arch/qatomic_s390.h ../corelib/arch/qatomic_sh.h ../corelib/arch/qatomic_sh4a.h ../corelib/arch/qatomic_sparc.h ../corelib/arch/qatomic_symbian.h ../corelib/arch/qatomic_vxworks.h ../corelib/arch/qatomic_windows.h ../corelib/arch/qatomic_windowsce.h ../corelib/arch/qatomic_x86_64.h ../corelib/thread/qatomic.h ../corelib/thread/qbasicatomic.h ../corelib/thread/qmutex.h ../corelib/thread/qreadwritelock.h ../corelib/thread/qsemaphore.h ../corelib/thread/qthread.h ../corelib/thread/qthreadstorage.h ../corelib/thread/qwaitcondition.h ../corelib/xml/qxmlstream.h ../corelib/concurrent/qfuture.h ../corelib/concurrent/qfutureinterface.h ../corelib/concurrent/qfuturesynchronizer.h ../corelib/concurrent/qfuturewatcher.h ../corelib/concurrent/qrunnable.h ../corelib/concurrent/qtconcurrentcompilertest.h ../corelib/concurrent/qtconcurrentexception.h ../corelib/concurrent/qtconcurrentfilter.h ../corelib/concurrent/qtconcurrentfilterkernel.h ../corelib/concurrent/qtconcurrentfunctionwrappers.h ../corelib/concurrent/qtconcurrentiteratekernel.h ../corelib/concurrent/qtconcurrentmap.h ../corelib/concurrent/qtconcurrentmapkernel.h ../corelib/concurrent/qtconcurrentmedian.h ../corelib/concurrent/qtconcurrentreducekernel.h ../corelib/concurrent/qtconcurrentresultstore.h ../corelib/concurrent/qtconcurrentrun.h ../corelib/concurrent/qtconcurrentrunbase.h ../corelib/concurrent/qtconcurrentstoredfunctioncall.h ../corelib/concurrent/qtconcurrentthreadengine.h ../corelib/concurrent/qthreadpool.h ../corelib/kernel/qabstracteventdispatcher.h ../corelib/kernel/qabstractitemmodel.h ../corelib/kernel/qbasictimer.h ../corelib/kernel/qcoreapplication.h ../corelib/kernel/qcoreevent.h ../corelib/kernel/qeventloop.h ../corelib/kernel/qfunctions_nacl.h ../corelib/kernel/qfunctions_vxworks.h ../corelib/kernel/qfunctions_wince.h ../corelib/kernel/qmath.h ../corelib/kernel/qmetaobject.h ../corelib/kernel/qmetatype.h ../corelib/kernel/qmimedata.h ../corelib/kernel/qobject.h ../corelib/kernel/qobjectcleanuphandler.h ../corelib/kernel/qobjectdefs.h ../corelib/kernel/qpointer.h ../corelib/kernel/qsharedmemory.h ../corelib/kernel/qsignalmapper.h ../corelib/kernel/qsocketnotifier.h ../corelib/kernel/qsystemsemaphore.h ../corelib/kernel/qtimer.h ../corelib/kernel/qtranslator.h ../corelib/kernel/qvariant.h ../corelib/plugin/qfactoryinterface.h ../corelib/plugin/qlibrary.h ../corelib/plugin/qplugin.h ../corelib/plugin/qpluginloader.h ../corelib/plugin/quuid.h ../corelib/global/qconfig-dist.h ../corelib/global/qconfig-large.h ../corelib/global/qconfig-medium.h ../corelib/global/qconfig-minimal.h ../corelib/global/qconfig-nacl.h ../corelib/global/qconfig-small.h ../corelib/global/qendian.h ../corelib/global/qfeatures.h ../corelib/global/qglobal.h ../corelib/global/qlibraryinfo.h ../corelib/global/qnamespace.h ../corelib/global/qnumeric.h ../corelib/global/qt_windows.h ../corelib/global/qconfig.h ../corelib/codecs/qtextcodec.h ../corelib/codecs/qtextcodecplugin.h ../corelib/io/qabstractfileengine.h ../corelib/io/qbuffer.h ../corelib/io/qdatastream.h ../corelib/io/qdebug.h ../corelib/io/qdir.h ../corelib/io/qdiriterator.h ../corelib/io/qfile.h ../corelib/io/qfileinfo.h ../corelib/io/qfilesystemwatcher.h ../corelib/io/qfsfileengine.h ../corelib/io/qiodevice.h ../corelib/io/qprocess.h ../corelib/io/qresource.h ../corelib/io/qsettings.h ../corelib/io/qtemporaryfile.h ../corelib/io/qtextstream.h ../corelib/io/qurl.h ../corelib/animation/qabstractanimation.h ../corelib/animation/qanimationgroup.h ../corelib/animation/qparallelanimationgroup.h ../corelib/animation/qpauseanimation.h ../corelib/animation/qpropertyanimation.h ../corelib/animation/qsequentialanimationgroup.h ../corelib/animation/qvariantanimation.h ../corelib/tools/qalgorithms.h ../corelib/tools/qbitarray.h ../corelib/tools/qbytearray.h ../corelib/tools/qbytearraymatcher.h ../corelib/tools/qcache.h ../corelib/tools/qchar.h ../corelib/tools/qcontainerfwd.h ../corelib/tools/qcontiguouscache.h ../corelib/tools/qcryptographichash.h ../corelib/tools/qdatetime.h ../corelib/tools/qeasingcurve.h ../corelib/tools/qelapsedtimer.h ../corelib/tools/qhash.h ../corelib/tools/qiterator.h ../corelib/tools/qline.h ../corelib/tools/qlinkedlist.h ../corelib/tools/qlist.h ../corelib/tools/qlocale.h ../corelib/tools/qlocale_blackberry.h ../corelib/tools/qmap.h ../corelib/tools/qmargins.h ../corelib/tools/qpair.h ../corelib/tools/qpoint.h ../corelib/tools/qqueue.h ../corelib/tools/qrect.h ../corelib/tools/qregexp.h ../corelib/tools/qscopedpointer.h ../corelib/tools/qscopedvaluerollback.h ../corelib/tools/qset.h ../corelib/tools/qshareddata.h ../corelib/tools/qsharedpointer.h ../corelib/tools/qsharedpointer_impl.h ../corelib/tools/qsize.h ../corelib/tools/qstack.h ../corelib/tools/qstring.h ../corelib/tools/qstringbuilder.h ../corelib/tools/qstringlist.h ../corelib/tools/qstringmatcher.h ../corelib/tools/qtextboundaryfinder.h ../corelib/tools/qtimeline.h ../corelib/tools/qvarlengtharray.h ../corelib/tools/qvector.h ../../include/QtCore/QtCore
++SYNCQT.HEADER_FILES = ../corelib/statemachine/qabstractstate.h ../corelib/statemachine/qabstracttransition.h ../corelib/statemachine/qeventtransition.h ../corelib/statemachine/qfinalstate.h ../corelib/statemachine/qhistorystate.h ../corelib/statemachine/qsignaltransition.h ../corelib/statemachine/qstate.h ../corelib/statemachine/qstatemachine.h ../corelib/arch/qatomic_alpha.h ../corelib/arch/qatomic_arch.h ../corelib/arch/qatomic_arm.h ../corelib/arch/qatomic_armv5.h ../corelib/arch/qatomic_armv6.h ../corelib/arch/qatomic_armv7.h ../corelib/arch/qatomic_aarch64.h ../corelib/arch/qatomic_avr32.h ../corelib/arch/qatomic_bfin.h ../corelib/arch/qatomic_bootstrap.h ../corelib/arch/qatomic_generic.h ../corelib/arch/qatomic_i386.h ../corelib/arch/qatomic_ia64.h ../corelib/arch/qatomic_integrity.h ../corelib/arch/qatomic_m68k.h ../corelib/arch/qatomic_macosx.h ../corelib/arch/qatomic_mips.h ../corelib/arch/qatomic_parisc.h ../corelib/arch/qatomic_powerpc.h ../corelib/arch/qatomic_s390.h ../corelib/arch/qatomic_sh.h ../corelib/arch/qatomic_sh4a.h ../corelib/arch/qatomic_sparc.h ../corelib/arch/qatomic_symbian.h ../corelib/arch/qatomic_vxworks.h ../corelib/arch/qatomic_windows.h ../corelib/arch/qatomic_windowsce.h ../corelib/arch/qatomic_x86_64.h ../corelib/thread/qatomic.h ../corelib/thread/qbasicatomic.h ../corelib/thread/qmutex.h ../corelib/thread/qreadwritelock.h ../corelib/thread/qsemaphore.h ../corelib/thread/qthread.h ../corelib/thread/qthreadstorage.h ../corelib/thread/qwaitcondition.h ../corelib/xml/qxmlstream.h ../corelib/concurrent/qfuture.h ../corelib/concurrent/qfutureinterface.h ../corelib/concurrent/qfuturesynchronizer.h ../corelib/concurrent/qfuturewatcher.h ../corelib/concurrent/qrunnable.h ../corelib/concurrent/qtconcurrentcompilertest.h ../corelib/concurrent/qtconcurrentexception.h ../corelib/concurrent/qtconcurrentfilter.h ../corelib/concurrent/qtconcurrentfilterkernel.h ../corelib/concurrent/qtconcurrentfunctionwrappers.h ../corelib/concurrent/qtconcurrentiteratekernel.h ../corelib/concurrent/qtconcurrentmap.h ../corelib/concurrent/qtconcurrentmapkernel.h ../corelib/concurrent/qtconcurrentmedian.h ../corelib/concurrent/qtconcurrentreducekernel.h ../corelib/concurrent/qtconcurrentresultstore.h ../corelib/concurrent/qtconcurrentrun.h ../corelib/concurrent/qtconcurrentrunbase.h ../corelib/concurrent/qtconcurrentstoredfunctioncall.h ../corelib/concurrent/qtconcurrentthreadengine.h ../corelib/concurrent/qthreadpool.h ../corelib/kernel/qabstracteventdispatcher.h ../corelib/kernel/qabstractitemmodel.h ../corelib/kernel/qbasictimer.h ../corelib/kernel/qcoreapplication.h ../corelib/kernel/qcoreevent.h ../corelib/kernel/qeventloop.h ../corelib/kernel/qfunctions_nacl.h ../corelib/kernel/qfunctions_vxworks.h ../corelib/kernel/qfunctions_wince.h ../corelib/kernel/qmath.h ../corelib/kernel/qmetaobject.h ../corelib/kernel/qmetatype.h ../corelib/kernel/qmimedata.h ../corelib/kernel/qobject.h ../corelib/kernel/qobjectcleanuphandler.h ../corelib/kernel/qobjectdefs.h ../corelib/kernel/qpointer.h ../corelib/kernel/qsharedmemory.h ../corelib/kernel/qsignalmapper.h ../corelib/kernel/qsocketnotifier.h ../corelib/kernel/qsystemsemaphore.h ../corelib/kernel/qtimer.h ../corelib/kernel/qtranslator.h ../corelib/kernel/qvariant.h ../corelib/plugin/qfactoryinterface.h ../corelib/plugin/qlibrary.h ../corelib/plugin/qplugin.h ../corelib/plugin/qpluginloader.h ../corelib/plugin/quuid.h ../corelib/global/qconfig-dist.h ../corelib/global/qconfig-large.h ../corelib/global/qconfig-medium.h ../corelib/global/qconfig-minimal.h ../corelib/global/qconfig-nacl.h ../corelib/global/qconfig-small.h ../corelib/global/qendian.h ../corelib/global/qfeatures.h ../corelib/global/qglobal.h ../corelib/global/qlibraryinfo.h ../corelib/global/qnamespace.h ../corelib/global/qnumeric.h ../corelib/global/qt_windows.h ../corelib/global/qconfig.h ../corelib/codecs/qtextcodec.h ../corelib/codecs/qtextcodecplugin.h ../corelib/io/qabstractfileengine.h ../corelib/io/qbuffer.h ../corelib/io/qdatastream.h ../corelib/io/qdebug.h ../corelib/io/qdir.h ../corelib/io/qdiriterator.h ../corelib/io/qfile.h ../corelib/io/qfileinfo.h ../corelib/io/qfilesystemwatcher.h ../corelib/io/qfsfileengine.h ../corelib/io/qiodevice.h ../corelib/io/qprocess.h ../corelib/io/qresource.h ../corelib/io/qsettings.h ../corelib/io/qtemporaryfile.h ../corelib/io/qtextstream.h ../corelib/io/qurl.h ../corelib/animation/qabstractanimation.h ../corelib/animation/qanimationgroup.h ../corelib/animation/qparallelanimationgroup.h ../corelib/animation/qpauseanimation.h ../corelib/animation/qpropertyanimation.h ../corelib/animation/qsequentialanimationgroup.h ../corelib/animation/qvariantanimation.h ../corelib/tools/qalgorithms.h ../corelib/tools/qbitarray.h ../corelib/tools/qbytearray.h ../corelib/tools/qbytearraymatcher.h ../corelib/tools/qcache.h ../corelib/tools/qchar.h ../corelib/tools/qcontainerfwd.h ../corelib/tools/qcontiguouscache.h ../corelib/tools/qcryptographichash.h ../corelib/tools/qdatetime.h ../corelib/tools/qeasingcurve.h ../corelib/tools/qelapsedtimer.h ../corelib/tools/qhash.h ../corelib/tools/qiterator.h ../corelib/tools/qline.h ../corelib/tools/qlinkedlist.h ../corelib/tools/qlist.h ../corelib/tools/qlocale.h ../corelib/tools/qlocale_blackberry.h ../corelib/tools/qmap.h ../corelib/tools/qmargins.h ../corelib/tools/qpair.h ../corelib/tools/qpoint.h ../corelib/tools/qqueue.h ../corelib/tools/qrect.h ../corelib/tools/qregexp.h ../corelib/tools/qscopedpointer.h ../corelib/tools/qscopedvaluerollback.h ../corelib/tools/qset.h ../corelib/tools/qshareddata.h ../corelib/tools/qsharedpointer.h ../corelib/tools/qsharedpointer_impl.h ../corelib/tools/qsize.h ../corelib/tools/qstack.h ../corelib/tools/qstring.h ../corelib/tools/qstringbuilder.h ../corelib/tools/qstringlist.h ../corelib/tools/qstringmatcher.h ../corelib/tools/qtextboundaryfinder.h ../corelib/tools/qtimeline.h ../corelib/tools/qvarlengtharray.h ../corelib/tools/qvector.h ../../include/QtCore/QtCore
+ SYNCQT.HEADER_CLASSES = ../../include/QtCore/QAbstractState ../../include/QtCore/QAbstractTransition ../../include/QtCore/QEventTransition ../../include/QtCore/QFinalState ../../include/QtCore/QHistoryState ../../include/QtCore/QSignalTransition ../../include/QtCore/QState ../../include/QtCore/QStateMachine ../../include/QtCore/QAtomicInt ../../include/QtCore/QAtomicPointer ../../include/QtCore/QBasicAtomicInt ../../include/QtCore/QBasicAtomicPointer ../../include/QtCore/QMutex ../../include/QtCore/QMutexLocker ../../include/QtCore/QMutexData ../../include/QtCore/QReadWriteLock ../../include/QtCore/QReadLocker ../../include/QtCore/QWriteLocker ../../include/QtCore/QSemaphore ../../include/QtCore/QThread ../../include/QtCore/QThreadStorageData ../../include/QtCore/QThreadStorage ../../include/QtCore/QWaitCondition ../../include/QtCore/QXmlStreamStringRef ../../include/QtCore/QXmlStreamAttribute ../../include/QtCore/QXmlStreamAttributes ../../include/QtCore/QXmlStreamNamespaceDeclaration ../../include/QtCore/QXmlStreamNamespaceDeclarations ../../include/QtCore/QXmlStreamNotationDeclaration ../../include/QtCore/QXmlStreamNotationDeclarations ../../include/QtCore/QXmlStreamEntityDeclaration ../../include/QtCore/QXmlStreamEntityDeclarations ../../include/QtCore/QXmlStreamEntityResolver ../../include/QtCore/QXmlStreamReader ../../include/QtCore/QXmlStreamWriter ../../include/QtCore/QFuture ../../include/QtCore/QFutureIterator ../../include/QtCore/QMutableFutureIterator ../../include/QtCore/QFutureInterfaceBase ../../include/QtCore/QFutureInterface ../../include/QtCore/QFutureSynchronizer ../../include/QtCore/QFutureWatcherBase ../../include/QtCore/QFutureWatcher ../../include/QtCore/QRunnable ../../include/QtCore/QtConcurrentFilter ../../include/QtCore/QtConcurrentMap ../../include/QtCore/QtConcurrentRun ../../include/QtCore/QThreadPool ../../include/QtCore/QAbstractEventDispatcher ../../include/QtCore/QModelIndex ../../include/QtCore/QPersistentModelIndex ../../include/QtCore/QModelIndexList ../../include/QtCore/QAbstractItemModel ../../include/QtCore/QAbstractTableModel ../../include/QtCore/QAbstractListModel ../../include/QtCore/QBasicTimer ../../include/QtCore/QCoreApplication ../../include/QtCore/QtCleanUpFunction ../../include/QtCore/QEvent ../../include/QtCore/QTimerEvent ../../include/QtCore/QChildEvent ../../include/QtCore/QCustomEvent ../../include/QtCore/QDynamicPropertyChangeEvent ../../include/QtCore/QEventLoop ../../include/QtCore/QMetaMethod ../../include/QtCore/QMetaEnum ../../include/QtCore/QMetaProperty ../../include/QtCore/QMetaClassInfo ../../include/QtCore/QMetaType ../../include/QtCore/QMetaTypeId ../../include/QtCore/QMetaTypeId2 ../../include/QtCore/QMimeData ../../include/QtCore/QObjectList ../../include/QtCore/QObjectData ../../include/QtCore/QObject ../../include/QtCore/QObjectUserData ../../include/QtCore/QObjectCleanupHandler ../../include/QtCore/QGenericArgument ../../include/QtCore/QGenericReturnArgument ../../include/QtCore/QArgument ../../include/QtCore/QReturnArgument ../../include/QtCore/QMetaObject ../../include/QtCore/QMetaObjectAccessor ../../include/QtCore/QMetaObjectExtraData ../../include/QtCore/QPointer ../../include/QtCore/QSharedMemory ../../include/QtCore/QSignalMapper ../../include/QtCore/QSocketNotifier ../../include/QtCore/QSystemSemaphore ../../include/QtCore/QTimer ../../include/QtCore/QTranslator ../../include/QtCore/QVariant ../../include/QtCore/QVariantList ../../include/QtCore/QVariantMap ../../include/QtCore/QVariantHash ../../include/QtCore/QVariantComparisonHelper ../../include/QtCore/QFactoryInterface ../../include/QtCore/QLibrary ../../include/QtCore/QtPlugin ../../include/QtCore/QtPluginInstanceFunction ../../include/QtCore/QPluginLoader ../../include/QtCore/QUuid ../../include/QtCore/QtEndian ../../include/QtCore/QtGlobal ../../include/QtCore/QIntegerForSize ../../include/QtCore/QNoImplicitBoolCast ../../include/QtCore/Q_INT8 ../../include/QtCore/Q_UINT8 ../../include/QtCore/Q_INT16 ../../include/QtCore/Q_UINT16 ../../include/QtCore/Q_INT32 ../../include/QtCore/Q_UINT32 ../../include/QtCore/Q_INT64 ../../include/QtCore/Q_UINT64 ../../include/QtCore/Q_LLONG ../../include/QtCore/Q_ULLONG ../../include/QtCore/Q_LONG ../../include/QtCore/Q_ULONG ../../include/QtCore/QSysInfo ../../include/QtCore/QtMsgHandler ../../include/QtCore/QGlobalStatic ../../include/QtCore/QGlobalStaticDeleter ../../include/QtCore/QBool ../../include/QtCore/QTypeInfo ../../include/QtCore/QFlag ../../include/QtCore/QIncompatibleFlag ../../include/QtCore/QFlags ../../include/QtCore/QForeachContainer ../../include/QtCore/QForeachContainerBase ../../include/QtCore/QLibraryInfo ../../include/QtCore/Qt ../../include/QtCore/QInternal ../../include/QtCore/QCOORD ../../include/QtCore/QtConfig ../../include/QtCore/QTextCodec ../../include/QtCore/QTextEncoder ../../include/QtCore/QTextDecoder ../../include/QtCore/QTextCodecFactoryInterface ../../include/QtCore/QTextCodecPlugin ../../include/QtCore/QAbstractFileEngine ../../include/QtCore/QAbstractFileEngineHandler ../../include/QtCore/QAbstractFileEngineIterator ../../include/QtCore/QBuffer ../../include/QtCore/QDataStream ../../include/QtCore/QtDebug ../../include/QtCore/QDebug ../../include/QtCore/QNoDebug ../../include/QtCore/QDir ../../include/QtCore/QDirIterator ../../include/QtCore/QFile ../../include/QtCore/QFileInfo ../../include/QtCore/QFileInfoList ../../include/QtCore/QFileInfoListIterator ../../include/QtCore/QFileSystemWatcher ../../include/QtCore/QFSFileEngine ../../include/QtCore/QIODevice ../../include/QtCore/Q_PID ../../include/QtCore/QProcessEnvironment ../../include/QtCore/QProcess ../../include/QtCore/QResource ../../include/QtCore/QSettings ../../include/QtCore/QTemporaryFile ../../include/QtCore/QTextStream ../../include/QtCore/QTextStreamFunction ../../include/QtCore/QTextStreamManipulator ../../include/QtCore/QTS ../../include/QtCore/QTextIStream ../../include/QtCore/QTextOStream ../../include/QtCore/QUrl ../../include/QtCore/QAbstractAnimation ../../include/QtCore/QAnimationDriver ../../include/QtCore/QAnimationGroup ../../include/QtCore/QParallelAnimationGroup ../../include/QtCore/QPauseAnimation ../../include/QtCore/QPropertyAnimation ../../include/QtCore/QSequentialAnimationGroup ../../include/QtCore/QVariantAnimation ../../include/QtCore/QtAlgorithms ../../include/QtCore/QBitArray ../../include/QtCore/QBitRef ../../include/QtCore/QByteArray ../../include/QtCore/QByteRef ../../include/QtCore/QByteArrayMatcher ../../include/QtCore/QCache ../../include/QtCore/QLatin1Char ../../include/QtCore/QChar ../../include/QtCore/QtContainerFwd ../../include/QtCore/QContiguousCacheData ../../include/QtCore/QContiguousCacheTypedData ../../include/QtCore/QContiguousCache ../../include/QtCore/QCryptographicHash ../../include/QtCore/QDate ../../include/QtCore/QTime ../../include/QtCore/QDateTime ../../include/QtCore/QEasingCurve ../../include/QtCore/QElapsedTimer ../../include/QtCore/QHashData ../../include/QtCore/QHashDummyValue ../../include/QtCore/QHashDummyNode ../../include/QtCore/QHashNode ../../include/QtCore/QHash ../../include/QtCore/QMultiHash ../../include/QtCore/QHashIterator ../../include/QtCore/QMutableHashIterator ../../include/QtCore/QLine ../../include/QtCore/QLineF ../../include/QtCore/QLinkedListData ../../include/QtCore/QLinkedListNode ../../include/QtCore/QLinkedList ../../include/QtCore/QLinkedListIterator ../../include/QtCore/QMutableLinkedListIterator ../../include/QtCore/QListData ../../include/QtCore/QList ../../include/QtCore/QListIterator ../../include/QtCore/QMutableListIterator ../../include/QtCore/QSystemLocale ../../include/QtCore/QLocale ../../include/QtCore/QBBSystemLocaleData ../../include/QtCore/QMapData ../../include/QtCore/QMapNode ../../include/QtCore/QMapPayloadNode ../../include/QtCore/QMap ../../include/QtCore/QMultiMap ../../include/QtCore/QMapIterator ../../include/QtCore/QMutableMapIterator ../../include/QtCore/QMargins ../../include/QtCore/QPair ../../include/QtCore/QPoint ../../include/QtCore/QPointF ../../include/QtCore/QQueue ../../include/QtCore/QRect ../../include/QtCore/QRectF ../../include/QtCore/QRegExp ../../include/QtCore/QScopedPointerDeleter ../../include/QtCore/QScopedPointerArrayDeleter ../../include/QtCore/QScopedPointerPodDeleter ../../include/QtCore/QScopedPointer ../../include/QtCore/QScopedArrayPointer ../../include/QtCore/QScopedValueRollback ../../include/QtCore/QSet ../../include/QtCore/QSetIterator ../../include/QtCore/QMutableSetIterator ../../include/QtCore/QSharedData ../../include/QtCore/QSharedDataPointer ../../include/QtCore/QExplicitlySharedDataPointer ../../include/QtCore/QSharedPointer ../../include/QtCore/QWeakPointer ../../include/QtCore/QSize ../../include/QtCore/QSizeF ../../include/QtCore/QStack ../../include/QtCore/QStdWString ../../include/QtCore/QString ../../include/QtCore/QLatin1String ../../include/QtCore/QCharRef ../../include/QtCore/QConstString ../../include/QtCore/QStringRef ../../include/QtCore/QLatin1Literal ../../include/QtCore/QAbstractConcatenable ../../include/QtCore/QConcatenable ../../include/QtCore/QStringBuilder ../../include/QtCore/QStringListIterator ../../include/QtCore/QMutableStringListIterator ../../include/QtCore/QStringList ../../include/QtCore/QStringMatcher ../../include/QtCore/QTextBoundaryFinder ../../include/QtCore/QTimeLine ../../include/QtCore/QVarLengthArray ../../include/QtCore/QVectorData ../../include/QtCore/QVectorTypedData ../../include/QtCore/QVector ../../include/QtCore/QVectorIterator ../../include/QtCore/QMutableVectorIterator
+ SYNCQT.PRIVATE_HEADER_FILES = ../corelib/statemachine/qabstractstate_p.h ../corelib/statemachine/qabstracttransition_p.h ../corelib/statemachine/qeventtransition_p.h ../corelib/statemachine/qhistorystate_p.h ../corelib/statemachine/qsignaleventgenerator_p.h ../corelib/statemachine/qsignaltransition_p.h ../corelib/statemachine/qstate_p.h ../corelib/statemachine/qstatemachine_p.h ../corelib/thread/qmutex_p.h ../corelib/thread/qmutexpool_p.h ../corelib/thread/qorderedmutexlocker_p.h ../corelib/thread/qreadwritelock_p.h ../corelib/thread/qthread_p.h ../corelib/xml/qxmlstream_p.h ../corelib/xml/qxmlutils_p.h ../corelib/concurrent/qfutureinterface_p.h ../corelib/concurrent/qfuturewatcher_p.h ../corelib/concurrent/qthreadpool_p.h ../corelib/kernel/qabstracteventdispatcher_p.h ../corelib/kernel/qabstractitemmodel_p.h ../corelib/kernel/qcore_mac_p.h ../corelib/kernel/qcore_symbian_p.h ../corelib/kernel/qcore_unix_p.h ../corelib/kernel/qcoreapplication_p.h ../corelib/kernel/qcorecmdlineargs_p.h ../corelib/kernel/qcoreglobaldata_p.h ../corelib/kernel/qcrashhandler_p.h ../corelib/kernel/qeventdispatcher_blackberry_p.h ../corelib/kernel/qeventdispatcher_glib_p.h ../corelib/kernel/qeventdispatcher_symbian_p.h ../corelib/kernel/qeventdispatcher_unix_p.h ../corelib/kernel/qeventdispatcher_win_p.h ../corelib/kernel/qfunctions_p.h ../corelib/kernel/qmetaobject_p.h ../corelib/kernel/qobject_p.h ../corelib/kernel/qsharedmemory_p.h ../corelib/kernel/qsystemerror_p.h ../corelib/kernel/qsystemsemaphore_p.h ../corelib/kernel/qtranslator_p.h ../corelib/kernel/qvariant_p.h ../corelib/kernel/qwineventnotifier_p.h ../corelib/plugin/qelfparser_p.h ../corelib/plugin/qfactoryloader_p.h ../corelib/plugin/qlibrary_p.h ../corelib/plugin/qsystemlibrary_p.h ../corelib/global/qnumeric_p.h ../corelib/global/qt_pch.h ../corelib/codecs/qfontlaocodec_p.h ../corelib/codecs/qiconvcodec_p.h ../corelib/codecs/qisciicodec_p.h ../corelib/codecs/qlatincodec_p.h ../corelib/codecs/qsimplecodec_p.h ../corelib/codecs/qtextcodec_p.h ../corelib/codecs/qtsciicodec_p.h ../corelib/codecs/qutfcodec_p.h ../corelib/io/qabstractfileengine_p.h ../corelib/io/qdatastream_p.h ../corelib/io/qdataurl_p.h ../corelib/io/qdir_p.h ../corelib/io/qfile_p.h ../corelib/io/qfileinfo_p.h ../corelib/io/qfilesystemengine_p.h ../corelib/io/qfilesystementry_p.h ../corelib/io/qfilesystemiterator_p.h ../corelib/io/qfilesystemmetadata_p.h ../corelib/io/qfilesystemwatcher_dnotify_p.h ../corelib/io/qfilesystemwatcher_fsevents_p.h ../corelib/io/qfilesystemwatcher_inotify_p.h ../corelib/io/qfilesystemwatcher_kqueue_p.h ../corelib/io/qfilesystemwatcher_p.h ../corelib/io/qfilesystemwatcher_symbian_p.h ../corelib/io/qfilesystemwatcher_win_p.h ../corelib/io/qfsfileengine_iterator_p.h ../corelib/io/qfsfileengine_p.h ../corelib/io/qiodevice_p.h ../corelib/io/qnoncontiguousbytedevice_p.h ../corelib/io/qprocess_p.h ../corelib/io/qresource_iterator_p.h ../corelib/io/qresource_p.h ../corelib/io/qsettings_p.h ../corelib/io/qtldurl_p.h ../corelib/io/qurltlds_p.h ../corelib/io/qwindowspipewriter_p.h ../corelib/animation/qabstractanimation_p.h ../corelib/animation/qanimationgroup_p.h ../corelib/animation/qparallelanimationgroup_p.h ../corelib/animation/qpropertyanimation_p.h ../corelib/animation/qsequentialanimationgroup_p.h ../corelib/animation/qvariantanimation_p.h ../corelib/tools/qbytedata_p.h ../corelib/tools/qdatetime_p.h ../corelib/tools/qharfbuzz_p.h ../corelib/tools/qlocale_data_p.h ../corelib/tools/qlocale_p.h ../corelib/tools/qlocale_tools_p.h ../corelib/tools/qpodlist_p.h ../corelib/tools/qringbuffer_p.h ../corelib/tools/qscopedpointer_p.h ../corelib/tools/qsimd_p.h ../corelib/tools/qtools_p.h ../corelib/tools/qunicodetables_p.h
diff --git a/meta/recipes-qt/qt4/qt4-native.inc b/meta/recipes-qt/qt4/qt4-native.inc
index bb16da666a..70bda7358f 100644
--- a/meta/recipes-qt/qt4/qt4-native.inc
+++ b/meta/recipes-qt/qt4/qt4-native.inc
@@ -18,6 +18,10 @@ SRC_URI = "http://download.qt-project.org/official_releases/qt/4.8/${PV}/qt-ever
file://0002-qkbdtty_qws-fix-build-with-old-kernel-headers.patch \
file://0003-webkit2-set-OUTPUT_DIR-value-if-empty.patch \
file://0021-configure-make-qt4-native-work-with-long-building-pa.patch \
+ file://0029-aarch64_arm64_fix_arch_detection.patch \
+ file://0030-aarch64_arm64_qatomic_support.patch \
+ file://0031-aarch64_arm64_mkspecs.patch \
+ file://0032-aarch64_add_header.patch \
file://g++.conf \
file://linux.conf \
"
diff --git a/meta/recipes-qt/qt4/qt4.inc b/meta/recipes-qt/qt4/qt4.inc
index 75a580c25f..09f6d93757 100644
--- a/meta/recipes-qt/qt4/qt4.inc
+++ b/meta/recipes-qt/qt4/qt4.inc
@@ -134,7 +134,7 @@ FILES_${QT_BASE_NAME}-demos-doc = "${docdir}/${QT_DIR_NAME}/qch/qt.qch"
RRECOMMENDS_${PN} = "${LIB_PACKAGES} ${OTHER_PACKAGES}"
RRECOMMENDS_${PN}-dev = "${DEV_PACKAGES}"
RRECOMMENDS_${PN}-dbg = "${DBG_PACKAGES}"
-RRECOMMENDS_${QT_BASE_LIB}core4_append_libc-glibc = " eglibc-gconv-utf-16"
+RRECOMMENDS_${QT_BASE_LIB}core4_append_libc-glibc = " glibc-gconv-utf-16"
RRECOMMENDS_${QT_BASE_NAME}-demos += " \
${QT_BASE_NAME}-examples \
${QT_BASE_NAME}-plugin-sqldriver-sqlite \
diff --git a/meta/recipes-qt/qt4/qt4_arch.inc b/meta/recipes-qt/qt4/qt4_arch.inc
index c1d35ab726..07512db713 100644
--- a/meta/recipes-qt/qt4/qt4_arch.inc
+++ b/meta/recipes-qt/qt4/qt4_arch.inc
@@ -4,6 +4,7 @@ ARM_INSTRUCTION_SET = "arm"
set_arch() {
case ${TARGET_ARCH} in
+ aarch64) QT_ARCH=aarch64 ;;
arm*) QT_ARCH=arm ;;
i*86*) QT_ARCH=i386 ;;
mips*) QT_ARCH=mips ;;
diff --git a/meta/recipes-sato/images/core-image-sato-sdk.bb b/meta/recipes-sato/images/core-image-sato-sdk.bb
index 6d4b96a1f9..1c4f328459 100644
--- a/meta/recipes-sato/images/core-image-sato-sdk.bb
+++ b/meta/recipes-sato/images/core-image-sato-sdk.bb
@@ -10,5 +10,5 @@ QT4PKG_mips64 = ""
IMAGE_FEATURES += "dev-pkgs tools-sdk ${QT4PKG} \
tools-debug eclipse-debug tools-profile tools-testapps debug-tweaks ssh-server-openssh"
-IMAGE_INSTALL += "kernel-dev"
+IMAGE_INSTALL += "kernel-devsrc"
diff --git a/meta/recipes-sato/midori/midori_0.5.5.bb b/meta/recipes-sato/midori/midori_0.5.5.bb
deleted file mode 100644
index a9379e7fff..0000000000
--- a/meta/recipes-sato/midori/midori_0.5.5.bb
+++ /dev/null
@@ -1,47 +0,0 @@
-SUMMARY = "A lightweight web browser"
-HOMEPAGE = "http://www.twotoasts.de/index.php?/pages/midori_summary.html"
-LICENSE = "LGPLv2.1"
-LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
-DEPENDS = "webkit-gtk libsoup-2.4 openssl python-native python-docutils-native librsvg-native libnotify libunique libxscrnsaver"
-
-SRC_URI = "http://www.midori-browser.org/downloads/${BPN}_${PV}_all_.tar.bz2 \
-"
-
-SRC_URI[md5sum] = "b99e87d4b73a4732ed1c1e591f0242ac"
-SRC_URI[sha256sum] = "ca69382a285222a86028abebd73fed1976735883027ff0adc094b627789bbd62"
-
-# midori depends on webkit-gtk, and webkit-gtk can NOT be built on
-# MIPS64 with n32 ABI. So remove it from mips64 n32 temporarily.
-COMPATIBLE_HOST_mips64 = "mips64.*-linux$"
-
-inherit gtk-icon-cache pkgconfig vala pythonnative
-
-do_configure() {
- sed -i -e 's:, shell=False::g' -e s:/usr/X11R6/include::g -e s:/usr/X11R6/lib::g wscript
- ./configure \
- --prefix=${prefix} \
- --bindir=${bindir} \
- --sbindir=${sbindir} \
- --libexecdir=${libexecdir} \
- --datadir=${datadir} \
- --sysconfdir=${sysconfdir} \
- --sharedstatedir=${sharedstatedir} \
- --localstatedir=${localstatedir} \
- --libdir=${libdir} \
- --includedir=${includedir} \
- --infodir=${infodir} \
- --mandir=${mandir} \
- --disable-gtk3 \
- --disable-zeitgeist \
-}
-
-PARALLEL_MAKE = ""
-TARGET_CC_ARCH += "${LDFLAGS}"
-
-do_install() {
- oe_runmake DESTDIR=${D} install
-}
-
-RRECOMMENDS_${PN} += "glib-networking ca-certificates gnome-icon-theme"
-
-FILES_${PN}-dev += "${datadir}/vala/vapi"
diff --git a/meta/recipes-sato/midori/midori_0.5.8.bb b/meta/recipes-sato/midori/midori_0.5.8.bb
new file mode 100644
index 0000000000..299ecb324c
--- /dev/null
+++ b/meta/recipes-sato/midori/midori_0.5.8.bb
@@ -0,0 +1,26 @@
+SUMMARY = "A lightweight web browser"
+HOMEPAGE = "http://midori-browser.org/"
+LICENSE = "LGPLv2.1"
+LIC_FILES_CHKSUM = "file://COPYING;md5=fbc093901857fcd118f065f900982c24"
+DEPENDS = "webkit-gtk libsoup-2.4 openssl python-native python-docutils-native librsvg-native libnotify libxscrnsaver"
+
+SRC_URI = "https://launchpad.net/midori/trunk/0.5.8/+download/${BPN}-${PV}.tar.bz2;subdir=${BPN}-${PV}"
+SRC_URI[md5sum] = "b89e25e74199d705e74767499a415976"
+SRC_URI[sha256sum] = "af19135fd4c4b04345df4d3592e7939c20f9b40eaca24550e6cb619751aa9381"
+
+# midori depends on webkit-gtk, and webkit-gtk can NOT be built on
+# MIPS64 with n32 ABI. So remove it from mips64 n32 temporarily.
+COMPATIBLE_HOST_mips64 = "mips64.*-linux$"
+
+inherit gtk-icon-cache pkgconfig vala pythonnative cmake
+
+EXTRA_OECMAKE = " \
+ -DCMAKE_INSTALL_PREFIX=${prefix} \
+ -DUSE_ZEITGEIST=0 \
+"
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
+RRECOMMENDS_${PN} += "glib-networking ca-certificates gnome-icon-theme"
+
+FILES_${PN} += "${datadir}/appdata"
diff --git a/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb b/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
index 2d23fe48b2..bc3e0fd260 100644
--- a/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
+++ b/meta/recipes-sato/packagegroups/packagegroup-core-x11-sato.bb
@@ -6,10 +6,10 @@ SUMMARY = "Sato desktop"
LICENSE = "MIT"
PR = "r33"
-inherit packagegroup
-
PACKAGE_ARCH = "${MACHINE_ARCH}"
+inherit packagegroup
+
PACKAGES = "${PN} ${PN}-base ${PN}-apps ${PN}-games"
RDEPENDS_${PN} = "\
diff --git a/meta/recipes-sato/puzzles/files/fix-compiling-failure-with-option-g-O.patch b/meta/recipes-sato/puzzles/files/fix-compiling-failure-with-option-g-O.patch
new file mode 100644
index 0000000000..d246feeb09
--- /dev/null
+++ b/meta/recipes-sato/puzzles/files/fix-compiling-failure-with-option-g-O.patch
@@ -0,0 +1,52 @@
+gtk.c: fix compiling failure with option -g -O
+
+There were compiling failure with option -g -O
+...
+././gtk.c: In function 'configure_area':
+././gtk.c:397:2: error: 'cr' may be used uninitialized in this function [-Werror=maybe-uninitialized]
+ cairo_set_source_rgb(cr,
+ ^
+././gtk.c:384:14: note: 'cr' was declared here
+ cairo_t *cr;
+ ^
+././gtk.c: In function 'main':
+././gtk.c:2911:6: error: 'error' may be used uninitialized in this function [-Werror=maybe-uninitialized]
+ fprintf(stderr, "%s: %s\n", pname, error);
+ ^
+cc1: all warnings being treated as errors
+...
+
+Initialized pointer 'cr' and 'error' with NULL
+
+Upstream-Status: Pending
+
+Signed-off-by: Hongxu Jia <hongxu.jia@windriver.com>
+---
+ gtk.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/gtk.c b/gtk.c
+index a2eba2c..c54bf63 100644
+--- a/gtk.c
++++ b/gtk.c
+@@ -381,7 +381,7 @@ static void clear_backing_store(frontend *fe)
+
+ static void setup_backing_store(frontend *fe)
+ {
+- cairo_t *cr;
++ cairo_t *cr = NULL;
+ int i;
+
+ fe->pixmap = gdk_pixmap_new(fe->area->window, fe->pw, fe->ph, -1);
+@@ -2481,7 +2481,7 @@ char *fgetline(FILE *fp)
+ int main(int argc, char **argv)
+ {
+ char *pname = argv[0];
+- char *error;
++ char *error = NULL;
+ int ngenerate = 0, print = FALSE, px = 1, py = 1;
+ int time_generation = FALSE, test_solve = FALSE, list_presets = FALSE;
+ int soln = FALSE, colour = FALSE;
+--
+1.9.1
+
diff --git a/meta/recipes-sato/puzzles/puzzles_r10116.bb b/meta/recipes-sato/puzzles/puzzles_r10116.bb
index ef5392b68f..6ee4d962b2 100644
--- a/meta/recipes-sato/puzzles/puzzles_r10116.bb
+++ b/meta/recipes-sato/puzzles/puzzles_r10116.bb
@@ -7,7 +7,9 @@ MOD_PV = "${@d.getVar('PV',1)[1:]}"
LICENSE = "MIT"
LIC_FILES_CHKSUM = "file://LICENCE;md5=33bcd4bce8f3c197f2aefbdbd2d299bc"
-SRC_URI = "svn://svn.tartarus.org/sgt;module=puzzles;rev=${MOD_PV}"
+SRC_URI = "svn://svn.tartarus.org/sgt;module=puzzles;rev=${MOD_PV} \
+ file://fix-compiling-failure-with-option-g-O.patch \
+"
S = "${WORKDIR}/${BPN}"
diff --git a/meta/recipes-support/apr/apr-util_1.5.3.bb b/meta/recipes-support/apr/apr-util_1.5.4.bb
index 063a787396..1cd47b1468 100644
--- a/meta/recipes-support/apr/apr-util_1.5.3.bb
+++ b/meta/recipes-support/apr/apr-util_1.5.4.bb
@@ -15,8 +15,8 @@ SRC_URI = "${APACHE_MIRROR}/apr/${BPN}-${PV}.tar.gz \
file://run-ptest \
"
-SRC_URI[md5sum] = "71a11d037240b292f824ba1eb537b4e3"
-SRC_URI[sha256sum] = "76db34cb508e346e3bf69347c29ed1500bf0b71bcc48d54271ad9d1c25703743"
+SRC_URI[md5sum] = "866825c04da827c6e5f53daff5569f42"
+SRC_URI[sha256sum] = "976a12a59bc286d634a21d7be0841cc74289ea9077aa1af46be19d1a6e844c19"
EXTRA_OECONF = "--with-apr=${STAGING_BINDIR_CROSS}/apr-1-config \
--without-odbc \
diff --git a/meta/recipes-support/apr/apr_1.5.1.bb b/meta/recipes-support/apr/apr_1.5.1.bb
index 2720b523f3..a27b2338d1 100644
--- a/meta/recipes-support/apr/apr_1.5.1.bb
+++ b/meta/recipes-support/apr/apr_1.5.1.bb
@@ -32,11 +32,17 @@ CACHED_CONFIGUREVARS += "apr_cv_mutex_recursive=yes"
CACHED_CONFIGUREVARS += "ac_cv_header_netinet_sctp_h=no ac_cv_header_netinet_sctp_uio_h=no"
do_configure_prepend() {
+ # Avoid absolute paths for grep since it causes failures
+ # when using sstate between different hosts with different
+ # install paths for grep.
+ export GREP="grep"
+
cd ${S}
./buildconf
}
FILES_${PN}-dev += "${libdir}/apr.exp ${datadir}/build-1/*"
+RDEPENDS_${PN}-dev += "bash"
#for some reason, build/libtool.m4 handled by buildconf still be overwritten
#when autoconf, so handle it again.
@@ -85,3 +91,5 @@ do_install_ptest() {
cp ${S}/test/testall $t/
cp ${S}/test/tryread $t/
}
+
+export CONFIG_SHELL="/bin/bash"
diff --git a/meta/recipes-support/aspell/aspell_0.60.6.1.bb b/meta/recipes-support/aspell/aspell_0.60.6.1.bb
index 56dabd38eb..a4f0e140b2 100644
--- a/meta/recipes-support/aspell/aspell_0.60.6.1.bb
+++ b/meta/recipes-support/aspell/aspell_0.60.6.1.bb
@@ -15,6 +15,8 @@ PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses"
PACKAGES += "libaspell libpspell libpspell-dev aspell-utils"
+RDEPENDS_${PN}-utils += "perl"
+
FILES_${PN}-dbg += "${libdir}/aspell-0.60/.debu*"
FILES_libaspell = "${libdir}/libaspell.so.* ${libdir}/aspell*"
FILES_aspell-utils = "${bindir}/word-list-compress ${bindir}/aspell-import ${bindir}/run-with-aspell ${bindir}/pre*"
diff --git a/meta/recipes-support/atk/at-spi2-core_2.12.0.bb b/meta/recipes-support/atk/at-spi2-core_2.12.0.bb
index 31e133fe43..e67155ccbd 100644
--- a/meta/recipes-support/atk/at-spi2-core_2.12.0.bb
+++ b/meta/recipes-support/atk/at-spi2-core_2.12.0.bb
@@ -4,7 +4,10 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=e9f288ba982d60518f375b5898283886"
MAJ_VER = "${@oe.utils.trim_version("${PV}", 2)}"
-SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz"
+SRC_URI = "${GNOME_MIRROR}/${BPN}/${MAJ_VER}/${BPN}-${PV}.tar.xz \
+ file://core_acinclude_m4.patch \
+ "
+
SRC_URI[md5sum] = "b12ad0e0924706f5e7f51216241068ef"
SRC_URI[sha256sum] = "db550edd98e53b4252521459c2dcaf0f3b060a9bad52489b9dbadbaedad3fb89"
diff --git a/meta/recipes-support/atk/files/core_acinclude_m4.patch b/meta/recipes-support/atk/files/core_acinclude_m4.patch
new file mode 100644
index 0000000000..29c538e2c3
--- /dev/null
+++ b/meta/recipes-support/atk/files/core_acinclude_m4.patch
@@ -0,0 +1,40 @@
+at-spi2-core: fix alignof m4 macro
+
+DBIND_CHECK_ALIGNOF does not work when cross-compiling, so we modify
+it to use AC_CHECK_ALIGNOF.
+
+Upstream-Status: Pending
+
+Signed-off-by: joe.slater@windriver.com
+
+
+--- a/acinclude.m4
++++ b/acinclude.m4
+@@ -2,7 +2,26 @@
+ # type alignment test #
+ #######################
+
+-AC_DEFUN([DBIND_CHECK_ALIGNOF],
++AC_DEFUN([DBIND_CHECK_ALIGNOF],[
++AC_CHECK_ALIGNOF($1,[
++#include <stdio.h>
++#include <stdlib.h>
++#define DBUS_API_SUBJECT_TO_CHANGE
++#include <dbus/dbus.h>
++typedef struct {char s1;} dbind_struct;
++typedef void *dbind_pointer;
++])
++
++dnl Note that we substitute for names like @DBIND_ALIGNOF_DBIND_STRUCT@, but
++dnl we #define names like ALIGNOF_DBIND_STRUCT in config.h!
++dnl
++AC_SUBST(translit(dbind_alignof_$1, [a-z *], [A-Z_P]),[$ac_cv_alignof_$1])
++
++])
++
++dnl The following does not work for cross-compilation.
++dnl
++AC_DEFUN([xDBIND_CHECK_ALIGNOF],
+ [changequote(<<, >>)dnl
+ dnl The name to #define.
+ define(<<AC_TYPE_NAME>>,
diff --git a/meta/recipes-support/attr/acl.inc b/meta/recipes-support/attr/acl.inc
index bc9fd6dda3..b2bc8bac28 100644
--- a/meta/recipes-support/attr/acl.inc
+++ b/meta/recipes-support/attr/acl.inc
@@ -36,4 +36,4 @@ do_install_ptest() {
cp ${S}/include/builddefs ${S}/include/buildmacros ${S}/include/buildrules ${D}${PTEST_PATH}/include/
}
-RDEPENDS_${PN}-ptest = "bash coreutils perl perl-module-filehandle perl-module-getopt-std perl-module-posix shadow"
+RDEPENDS_${PN}-ptest = "acl bash coreutils perl perl-module-filehandle perl-module-getopt-std perl-module-posix shadow"
diff --git a/meta/recipes-support/attr/ea-acl.inc b/meta/recipes-support/attr/ea-acl.inc
index 72f623e593..b17e9b10a4 100644
--- a/meta/recipes-support/attr/ea-acl.inc
+++ b/meta/recipes-support/attr/ea-acl.inc
@@ -7,7 +7,7 @@ inherit autotools-brokensep gettext
# the package comes with a custom config.h.in, it cannot be
# overwritten by autoheader
-export AUTOHEADER = "true"
+EXTRA_AUTORECONF += "--exclude=autoheader"
EXTRA_OECONF = "INSTALL_USER=root INSTALL_GROUP=root"
EXTRA_OECONF_append_class-native = " --enable-gettext=no"
diff --git a/meta/recipes-support/beecrypt/beecrypt/add-option-dev-dsp.patch b/meta/recipes-support/beecrypt/beecrypt/add-option-dev-dsp.patch
new file mode 100644
index 0000000000..b3298ce2db
--- /dev/null
+++ b/meta/recipes-support/beecrypt/beecrypt/add-option-dev-dsp.patch
@@ -0,0 +1,34 @@
+Add config option --with-dev-dsp.
+
+Upstream-Status: Pending
+
+Signed-off-by: Zhang Xiao <xiao.zhang@windriver.com>
+---
+--- a/configure.ac
++++ b/configure.ac
+@@ -106,6 +106,16 @@ AC_ARG_WITH(python,[ --with-python[[=AR
+ fi
+ ])
+
++AC_ARG_WITH(dev-dsp,[ --with-dev-dsp enables dev/dsp for entropy producing. auto for auto-detecting dev/dep on host. [[default=no]]],[
++ if test "$withval" = yes; then
++ AC_DEFINE([HAVE_DEV_DSP], 1)
++ else
++ if test "$withval" = auto; then
++ ac_detect_dev_dsp=yes
++ fi
++ fi
++ ],[ac_detect_dev_dsp=no])
++
+ # Check for expert mode
+ if test "$ac_enable_expert_mode" = yes; then
+ BEE_EXPERT_MODE
+@@ -464,7 +474,7 @@ linux*)
+ ac_cv_have_dev_dsp=no
+ fi
+ ])
+- if test "$ac_cv_have_dev_dsp" = yes; then
++ if test "$ac_cv_have_dev_dsp" = yes && test "$ac_detect_dev_dsp" = yes; then
+ AC_DEFINE([HAVE_DEV_DSP], 1)
+ fi
+ ;;
diff --git a/meta/recipes-support/beecrypt/beecrypt/run-ptest b/meta/recipes-support/beecrypt/beecrypt/run-ptest
index 5bc7460be9..2ee294d991 100644
--- a/meta/recipes-support/beecrypt/beecrypt/run-ptest
+++ b/meta/recipes-support/beecrypt/beecrypt/run-ptest
@@ -1,5 +1,5 @@
#!/bin/sh
cd tests
-for i in `ls`; do ./$i; if [ $? == 0 ]; then echo "PASS: $i"; \
+for i in `ls`; do ./$i; if [ $? -eq 0 ]; then echo "PASS: $i"; \
else echo "FAIL: $i"; fi; done
diff --git a/meta/recipes-support/beecrypt/beecrypt_4.2.1.bb b/meta/recipes-support/beecrypt/beecrypt_4.2.1.bb
index 209b92e653..1e626f154c 100644
--- a/meta/recipes-support/beecrypt/beecrypt_4.2.1.bb
+++ b/meta/recipes-support/beecrypt/beecrypt_4.2.1.bb
@@ -11,6 +11,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/beecrypt/beecrypt-${PV}.tar.gz \
file://fix-for-gcc-4.7.patch \
file://run-ptest \
file://beecrypt-enable-ptest-support.patch \
+ file://add-option-dev-dsp.patch \
"
SRC_URI[md5sum] = "8441c014170823f2dff97e33df55af1e"
diff --git a/meta/recipes-support/boost/bjam-native_1.56.0.bb b/meta/recipes-support/boost/bjam-native_1.57.0.bb
index d85d1a96cb..d85d1a96cb 100644
--- a/meta/recipes-support/boost/bjam-native_1.56.0.bb
+++ b/meta/recipes-support/boost/bjam-native_1.57.0.bb
diff --git a/meta/recipes-support/boost/boost-1.56.0.inc b/meta/recipes-support/boost/boost-1.57.0.inc
index 24c32b4c6f..809419dbac 100644
--- a/meta/recipes-support/boost/boost-1.56.0.inc
+++ b/meta/recipes-support/boost/boost-1.57.0.inc
@@ -13,7 +13,7 @@ BOOST_P = "boost_${BOOST_VER}"
SRC_URI = "${SOURCEFORGE_MIRROR}/boost/${BOOST_P}.tar.bz2"
-SRC_URI[md5sum] = "a744cf167b05d72335f27c88115f211d"
-SRC_URI[sha256sum] = "134732acaf3a6e7eba85988118d943f0fa6b7f0850f65131fff89823ad30ff1d"
+SRC_URI[md5sum] = "1be49befbdd9a5ce9def2983ba3e7b76"
+SRC_URI[sha256sum] = "910c8c022a33ccec7f088bd65d4f14b466588dda94ba2124e78b8c57db264967"
S = "${WORKDIR}/${BOOST_P}"
diff --git a/meta/recipes-support/boost/boost.inc b/meta/recipes-support/boost/boost.inc
index ad1bc76184..bf22c95ebd 100644
--- a/meta/recipes-support/boost/boost.inc
+++ b/meta/recipes-support/boost/boost.inc
@@ -5,6 +5,8 @@ DEPENDS = "bjam-native zlib bzip2"
ARM_INSTRUCTION_SET = "arm"
BOOST_LIBS = "\
+ atomic \
+ chrono \
date_time \
filesystem \
graph \
@@ -106,16 +108,24 @@ BJAM_TOOLS = "-sTOOLS=gcc \
def get_boost_parallel_make(bb, d):
pm = d.getVar('PARALLEL_MAKE', True)
if pm:
- # people are usually using "-jN" or "-j N", but let it work with something else appended to it
- import re
- pm_prefix = re.search("\D+", pm)
- pm_val = re.search("\d+", pm)
- if pm_prefix is None or pm_val is None:
- bb.error("Unable to analyse format of PARALLEL_MAKE variable: %s" % pm)
- pm_nval = min(64, int(pm_val.group(0)))
- return pm_prefix.group(0) + str(pm_nval) + pm[pm_val.end():]
- else:
- return ""
+ # look for '-j' and throw other options (e.g. '-l') away
+ # because they might have different meaning in bjam
+ pm = pm.split()
+ while pm:
+ v = None
+ opt = pm.pop(0)
+ if opt == '-j':
+ v = pm.pop(0)
+ elif opt.startswith('-j'):
+ v = opt[2:].strip()
+ else:
+ v = None
+
+ if v:
+ v = min(64, int(v))
+ return '-j' + str(v)
+
+ return ""
BOOST_PARALLEL_MAKE = "${@get_boost_parallel_make(bb, d)}"
BJAM_OPTS = '${BOOST_PARALLEL_MAKE} \
diff --git a/meta/recipes-support/boost/boost_1.56.0.bb b/meta/recipes-support/boost/boost_1.56.0.bb
deleted file mode 100644
index b199c88577..0000000000
--- a/meta/recipes-support/boost/boost_1.56.0.bb
+++ /dev/null
@@ -1,4 +0,0 @@
-include boost-${PV}.inc
-include boost.inc
-
-SRC_URI += "file://arm-intrinsics.patch"
diff --git a/meta/recipes-support/boost/boost_1.57.0.bb b/meta/recipes-support/boost/boost_1.57.0.bb
new file mode 100644
index 0000000000..c09d5f431f
--- /dev/null
+++ b/meta/recipes-support/boost/boost_1.57.0.bb
@@ -0,0 +1,6 @@
+include boost-${PV}.inc
+include boost.inc
+
+SRC_URI += "\
+ file://arm-intrinsics.patch \
+"
diff --git a/meta/recipes-support/curl/curl_7.37.1.bb b/meta/recipes-support/curl/curl_7.38.0.bb
index 8bcd9bae7c..85bd3be032 100644
--- a/meta/recipes-support/curl/curl_7.37.1.bb
+++ b/meta/recipes-support/curl/curl_7.38.0.bb
@@ -7,15 +7,15 @@ LIC_FILES_CHKSUM = "file://COPYING;beginline=7;md5=3a34942f4ae3fbf1a303160714e66
SRC_URI = "http://curl.haxx.se/download/curl-${PV}.tar.bz2 \
file://pkgconfig_fix.patch \
-"
+ "
# curl likes to set -g0 in CFLAGS, so we stop it
# from mucking around with debug options
#
SRC_URI += " file://configure_ac.patch"
-SRC_URI[md5sum] = "95c627abcf6494f5abe55effe7cd6a57"
-SRC_URI[sha256sum] = "c3ef3cd148f3778ddbefb344117d7829db60656efe1031f9e3065fc0faa25136"
+SRC_URI[md5sum] = "af6b3c299bd891f43cb5f76c4091b7b4"
+SRC_URI[sha256sum] = "035bd41e99aa1a4e64713f4cea5ccdf366ca8199e9be1b53d5a043d5165f9eba"
inherit autotools pkgconfig binconfig multilib_header
@@ -27,9 +27,10 @@ PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6,"
PACKAGECONFIG[ssl] = "--with-ssl --with-random=/dev/urandom,--without-ssl,openssl"
PACKAGECONFIG[gnutls] = "--with-gnutls,--without-gnutls,gnutls"
PACKAGECONFIG[zlib] = "--with-zlib=${STAGING_LIBDIR}/../,--without-zlib,zlib"
+PACKAGECONFIG[rtmpdump] = "--with-librtmp,--without-librtmp,rtmpdump"
+PACKAGECONFIG[libssh2] = "--with-libssh2,--without-libssh2,libssh2"
-EXTRA_OECONF = "--without-libssh2 \
- --without-libidn \
+EXTRA_OECONF = "--without-libidn \
--enable-crypto-auth \
--disable-ldap \
--disable-ldaps \
diff --git a/meta/recipes-support/db/db_5.3.28.bb b/meta/recipes-support/db/db_5.3.28.bb
index 057c9d8425..e8b814e06b 100644
--- a/meta/recipes-support/db/db_5.3.28.bb
+++ b/meta/recipes-support/db/db_5.3.28.bb
@@ -42,6 +42,7 @@ PROVIDES += "${VIRTUAL_NAME}"
# persuades bitbake to go to the right place
S = "${WORKDIR}/db-${PV}/dist"
B = "${WORKDIR}/db-${PV}/build_unix"
+SPDX_S = "${WORKDIR}/db-${PV}"
# The executables go in a separate package - typically there
# is no need to install these unless doing real database
diff --git a/meta/recipes-support/db/db_6.0.30.bb b/meta/recipes-support/db/db_6.0.30.bb
index 9fc4a30a88..47fb296daa 100644
--- a/meta/recipes-support/db/db_6.0.30.bb
+++ b/meta/recipes-support/db/db_6.0.30.bb
@@ -42,6 +42,7 @@ PROVIDES += "${VIRTUAL_NAME}"
# persuades bitbake to go to the right place
S = "${WORKDIR}/db-${PV}/dist"
B = "${WORKDIR}/db-${PV}/build_unix"
+SPDX_S = "${WORKDIR}/db-${PV}"
# The executables go in a separate package - typically there
# is no need to install these unless doing real database
diff --git a/meta/recipes-support/debianutils/debianutils_4.4.bb b/meta/recipes-support/debianutils/debianutils_4.4.bb
new file mode 100644
index 0000000000..346eaf16eb
--- /dev/null
+++ b/meta/recipes-support/debianutils/debianutils_4.4.bb
@@ -0,0 +1,35 @@
+SUMMARY = "Miscellaneous utilities specific to Debian"
+SECTION = "base"
+LICENSE = "GPLv2 & SMAIL_GPL"
+LIC_FILES_CHKSUM = "file://debian/copyright;md5=f01a5203d50512fc4830b4332b696a9f"
+
+SRC_URI = "${DEBIAN_MIRROR}/main/d/${BPN}/${BPN}_${PV}.tar.gz"
+SRC_URI[md5sum] = "c0cb076754d7f4eb1e3397d00916647f"
+SRC_URI[sha256sum] = "190850cdd6b5302e0a1ba1aaed1bc7074d67d3bd8d04c613f242f7145afa53a6"
+
+inherit autotools update-alternatives
+
+do_configure_prepend() {
+ sed -i -e 's:tempfile.1 which.1:which.1:g' ${S}/Makefile.am
+}
+
+do_install_append() {
+ if [ "${base_bindir}" != "${bindir}" ]; then
+ # Debian places some utils into ${base_bindir} as does busybox
+ install -d ${D}${base_bindir}
+ for app in run-parts tempfile; do
+ mv ${D}${bindir}/$app ${D}${base_bindir}/$app
+ done
+ fi
+}
+
+ALTERNATIVE_PRIORITY="100"
+ALTERNATIVE_${PN} = "add-shell installkernel remove-shell run-parts savelog tempfile which"
+
+ALTERNATIVE_LINK_NAME[add-shell]="${sbindir}/add-shell"
+ALTERNATIVE_LINK_NAME[installkernel]="${sbindir}/installkernel"
+ALTERNATIVE_LINK_NAME[remove-shell]="${sbindir}/remove-shell"
+ALTERNATIVE_LINK_NAME[run-parts]="${base_bindir}/run-parts"
+ALTERNATIVE_LINK_NAME[savelog]="${bindir}/savelog"
+ALTERNATIVE_LINK_NAME[tempfile]="${base_bindir}/tempfile"
+ALTERNATIVE_LINK_NAME[which]="${bindir}/which"
diff --git a/meta/recipes-support/gdbm/gdbm-1.8.3/ldflags.patch b/meta/recipes-support/gdbm/gdbm-1.8.3/ldflags.patch
new file mode 100644
index 0000000000..770d8f66b9
--- /dev/null
+++ b/meta/recipes-support/gdbm/gdbm-1.8.3/ldflags.patch
@@ -0,0 +1,22 @@
+Obey LDFLAGS
+
+Signed-off-by: Christopher Larson <chris_larson@mentor.com>
+Upstream-status: Inappropriate [old version]
+
+--- gdbm-1.8.3.orig/Makefile.in
++++ gdbm-1.8.3/Makefile.in
+@@ -156,12 +156,12 @@ install-compat:
+
+ libgdbm.la: $(LOBJS) gdbm.h
+ rm -f libgdbm.la
+- $(LIBTOOL) --mode=link $(CC) -o libgdbm.la -rpath $(libdir) \
++ $(LIBTOOL) --mode=link $(CC) $(LDFLAGS) -o libgdbm.la -rpath $(libdir) \
+ -version-info $(SHLIB_VER) $(LOBJS)
+
+ libgdbm_compat.la: $(C_LOBJS) gdbm.h
+ rm -f libgdbm_compat.la
+- $(LIBTOOL) --mode=link $(CC) -o libgdbm_compat.la -rpath $(libdir) \
++ $(LIBTOOL) --mode=link $(CC) $(LDFLAGS) -o libgdbm_compat.la -rpath $(libdir) \
+ -version-info $(SHLIB_VER) $(C_LOBJS)
+
+ gdbm.h: gdbm.proto gdbmerrno.h gdbm.proto2
diff --git a/meta/recipes-support/gdbm/gdbm_1.8.3.bb b/meta/recipes-support/gdbm/gdbm_1.8.3.bb
index aecf47afb9..2331d1df3a 100644
--- a/meta/recipes-support/gdbm/gdbm_1.8.3.bb
+++ b/meta/recipes-support/gdbm/gdbm_1.8.3.bb
@@ -8,7 +8,8 @@ PR = "r4"
SRC_URI = "${GNU_MIRROR}/gdbm/gdbm-${PV}.tar.gz \
file://makefile.patch \
- file://libtool-mode.patch"
+ file://libtool-mode.patch \
+ file://ldflags.patch"
SRC_URI[md5sum] = "1d1b1d5c0245b1c00aff92da751e9aa1"
SRC_URI[sha256sum] = "cc340338a2e28b40058ab9eb5354a21d53f88a1582ea21ba0bb185c37a281dc9"
diff --git a/meta/recipes-support/gmp/gmp/gmp-6.0.0-ppc64.patch b/meta/recipes-support/gmp/gmp/gmp-6.0.0-ppc64.patch
new file mode 100644
index 0000000000..1113b41ecd
--- /dev/null
+++ b/meta/recipes-support/gmp/gmp/gmp-6.0.0-ppc64.patch
@@ -0,0 +1,26 @@
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+This patch with pulled from gmp.
+https://gmplib.org/repo/gmp/rev/4a6d258b467f
+Upstream-Status: Backport
+
+# HG changeset patch
+# User Torbjorn Granlund <tege@gmplib.org>
+# Date 1395835068 -3600
+# Node ID 4a6d258b467f661da0894cc60ecd060f2e3c67c7
+# Parent 301ce2788826a2d4d2725bd5cf01e998638db37a
+Provide default for BMOD_1_TO_MOD_1_THRESHOLD.
+
+diff -r 301ce2788826 -r 4a6d258b467f mpn/powerpc64/mode64/gcd_1.asm
+--- a/mpn/powerpc64/mode64/gcd_1.asm Tue Mar 25 15:34:52 2014 +0100
++++ b/mpn/powerpc64/mode64/gcd_1.asm Wed Mar 26 12:57:48 2014 +0100
+@@ -43,6 +43,9 @@
+ define(`n', `r4')
+ define(`v0', `r5')
+
++ifdef(`BMOD_1_TO_MOD_1_THRESHOLD',,
++ `define(`BMOD_1_TO_MOD_1_THRESHOLD',30)')
++
+ EXTERN_FUNC(mpn_mod_1)
+ EXTERN_FUNC(mpn_modexact_1c_odd)
diff --git a/meta/recipes-support/gmp/gmp_6.0.0.bb b/meta/recipes-support/gmp/gmp_6.0.0.bb
index 19e5f4dd19..6218491142 100644
--- a/meta/recipes-support/gmp/gmp_6.0.0.bb
+++ b/meta/recipes-support/gmp/gmp_6.0.0.bb
@@ -7,6 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
"
SRC_URI_append = " file://use-includedir.patch \
file://append_user_provided_flags.patch \
+ file://gmp-6.0.0-ppc64.patch \
"
SRC_URI[md5sum] = "b7ff2d88cae7f8085bd5006096eed470"
diff --git a/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing/0001-gsystem-subprocess.c-Enable-GNU-extensions-in-system.patch b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing/0001-gsystem-subprocess.c-Enable-GNU-extensions-in-system.patch
new file mode 100644
index 0000000000..840666f302
--- /dev/null
+++ b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing/0001-gsystem-subprocess.c-Enable-GNU-extensions-in-system.patch
@@ -0,0 +1,35 @@
+From b1de2c6290bc0651fe87a8c4fb52e7a0a5fe6322 Mon Sep 17 00:00:00 2001
+From: Khem Raj <raj.khem@gmail.com>
+Date: Sun, 5 Oct 2014 16:01:49 -0700
+Subject: [PATCH] gsystem-subprocess.c: Enable GNU extensions in system C
+ library
+
+This should export O_CLOEXEC where it is only
+available when _GNU_SOURCE is defined .e.g. uclibc based systems
+
+Signed-off-by: Khem Raj <raj.khem@gmail.com>
+
+Upstream-Status: Pending
+
+---
+ src/libgsystem/gsystem-subprocess.c | 4 ++++
+ 1 file changed, 4 insertions(+)
+
+diff --git a/src/libgsystem/gsystem-subprocess.c b/src/libgsystem/gsystem-subprocess.c
+index a967896..a52a362 100644
+--- a/src/libgsystem/gsystem-subprocess.c
++++ b/src/libgsystem/gsystem-subprocess.c
+@@ -18,6 +18,10 @@
+
+ #include "config.h"
+
++#ifndef _GNU_SOURCE
++#define _GNU_SOURCE
++#endif
++
+ #define _GSYSTEM_NO_LOCAL_ALLOC
+ #include "libgsystem.h"
+
+--
+2.1.1
+
diff --git a/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2014.1.bb b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2014.1.bb
index 146a02ac7d..c688e9d1d7 100644
--- a/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2014.1.bb
+++ b/meta/recipes-support/gnome-desktop-testing/gnome-desktop-testing_2014.1.bb
@@ -2,7 +2,9 @@ SUMMARY = "Test runner for GNOME-style installed tests"
HOMEPAGE = "https://wiki.gnome.org/GnomeGoals/InstalledTests"
LICENSE = "LGPLv2+"
-SRC_URI = "${GNOME_MIRROR}/${BPN}/${PV}/${BPN}-${PV}.tar.xz"
+SRC_URI = "${GNOME_MIRROR}/${BPN}/${PV}/${BPN}-${PV}.tar.xz \
+ file://0001-gsystem-subprocess.c-Enable-GNU-extensions-in-system.patch \
+ "
SRC_URI[md5sum] = "a608ad72a77e23a1aecdfd8d07a94baf"
SRC_URI[sha256sum] = "1a3eed73678dd22d09d6a7ec4f899557df3e8b4a802affa76d0f163b31286539"
diff --git a/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4242.patch b/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4242.patch
new file mode 100644
index 0000000000..c9addca28e
--- /dev/null
+++ b/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4242.patch
@@ -0,0 +1,62 @@
+From e2202ff2b704623efc6277fb5256e4e15bac5676 Mon Sep 17 00:00:00 2001
+From: Werner Koch <wk@gnupg.org>
+Date: Thu, 25 Jul 2013 11:17:52 +0200
+Subject: [PATCH] Mitigate a flush+reload cache attack on RSA secret
+ exponents.
+
+commit e2202ff2b704623efc6277fb5256e4e15bac5676 from
+git://git.gnupg.org/libgcrypt.git
+
+* mpi/mpi-pow.c (gcry_mpi_powm): Always perfrom the mpi_mul for
+exponents in secure memory.
+
+Upstream-Status: Backport
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+--
+
+The attack is published as http://eprint.iacr.org/2013/448 :
+
+Flush+Reload: a High Resolution, Low Noise, L3 Cache Side-Channel
+Attack by Yuval Yarom and Katrina Falkner. 18 July 2013.
+
+ Flush+Reload is a cache side-channel attack that monitors access to
+ data in shared pages. In this paper we demonstrate how to use the
+ attack to extract private encryption keys from GnuPG. The high
+ resolution and low noise of the Flush+Reload attack enables a spy
+ program to recover over 98% of the bits of the private key in a
+ single decryption or signing round. Unlike previous attacks, the
+ attack targets the last level L3 cache. Consequently, the spy
+ program and the victim do not need to share the execution core of
+ the CPU. The attack is not limited to a traditional OS and can be
+ used in a virtualised environment, where it can attack programs
+ executing in a different VM.
+
+Index: gnupg-1.4.7/mpi/mpi-pow.c
+===================================================================
+--- gnupg-1.4.7.orig/mpi/mpi-pow.c
++++ gnupg-1.4.7/mpi/mpi-pow.c
+@@ -212,7 +212,13 @@ mpi_powm( MPI res, MPI base, MPI exponen
+ tp = rp; rp = xp; xp = tp;
+ rsize = xsize;
+
+- if( (mpi_limb_signed_t)e < 0 ) {
++ /* To mitigate the Yarom/Falkner flush+reload cache
++ * side-channel attack on the RSA secret exponent, we do
++ * the multiplication regardless of the value of the
++ * high-bit of E. But to avoid this performance penalty
++ * we do it only if the exponent has been stored in secure
++ * memory and we can thus assume it is a secret exponent. */
++ if (esec || (mpi_limb_signed_t)e < 0) {
+ /*mpihelp_mul( xp, rp, rsize, bp, bsize );*/
+ if( bsize < KARATSUBA_THRESHOLD ) {
+ mpihelp_mul( xp, rp, rsize, bp, bsize );
+@@ -227,6 +233,8 @@ mpi_powm( MPI res, MPI base, MPI exponen
+ mpihelp_divrem(xp + msize, 0, xp, xsize, mp, msize);
+ xsize = msize;
+ }
++ }
++ if ( (mpi_limb_signed_t)e < 0 ) {
+
+ tp = rp; rp = xp; xp = tp;
+ rsize = xsize;
diff --git a/meta/recipes-support/gnupg/gnupg_1.4.7.bb b/meta/recipes-support/gnupg/gnupg_1.4.7.bb
index ddcc2c24fe..aef515d534 100644
--- a/meta/recipes-support/gnupg/gnupg_1.4.7.bb
+++ b/meta/recipes-support/gnupg/gnupg_1.4.7.bb
@@ -17,6 +17,7 @@ SRC_URI = "ftp://ftp.gnupg.org/gcrypt/gnupg/gnupg-${PV}.tar.bz2 \
file://curl_typeof_fix_backport.patch \
file://CVE-2013-4351.patch \
file://CVE-2013-4576.patch \
+ file://CVE-2013-4242.patch \
"
SRC_URI[md5sum] = "b06a141cca5cd1a55bbdd25ab833303c"
@@ -98,3 +99,4 @@ FILES_${PN}-dbg += "${libexecdir}/${BPN}/.debug"
PACKAGECONFIG ??= ""
PACKAGECONFIG[curl] = "--with-libcurl=${STAGING_LIBDIR},--without-libcurl,curl"
+PACKAGECONFIG[libusb] = "--with-libusb=${STAGING_LIBDIR},--without-libusb,libusb-compat"
diff --git a/meta/recipes-support/gnupg/gnupg_2.0.26.bb b/meta/recipes-support/gnupg/gnupg_2.0.26.bb
index c460b7e1fa..92dd3bd643 100644
--- a/meta/recipes-support/gnupg/gnupg_2.0.26.bb
+++ b/meta/recipes-support/gnupg/gnupg_2.0.26.bb
@@ -23,6 +23,7 @@ EXTRA_OECONF = "--disable-ldap \
--with-bzip2=${STAGING_LIBDIR}/.. \
--with-readline=${STAGING_LIBDIR}/.. \
"
+RRECOMMENDS_${PN} = "pinentry"
do_configure_prepend () {
# Else these could be used in prefernce to those in aclocal-copy
diff --git a/meta/recipes-support/gnutls/gnutls.inc b/meta/recipes-support/gnutls/gnutls.inc
index 27aba26943..12b26cc97d 100644
--- a/meta/recipes-support/gnutls/gnutls.inc
+++ b/meta/recipes-support/gnutls/gnutls.inc
@@ -35,6 +35,9 @@ do_configure_prepend() {
done
}
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[tpm] = "--with-tpm, --without-tpm, trousers"
+
PACKAGES =+ "${PN}-openssl ${PN}-xx"
FILES_${PN}-dev += "${bindir}/gnutls-cli-debug"
diff --git a/meta/recipes-support/gnutls/gnutls_3.3.11.bb b/meta/recipes-support/gnutls/gnutls_3.3.11.bb
new file mode 100644
index 0000000000..320c510843
--- /dev/null
+++ b/meta/recipes-support/gnutls/gnutls_3.3.11.bb
@@ -0,0 +1,6 @@
+require gnutls.inc
+
+SRC_URI += "file://correct_rpl_gettimeofday_signature.patch \
+ "
+SRC_URI[md5sum] = "b657e3010c10cae2244e7ce79ee3d446"
+SRC_URI[sha256sum] = "aef28d629b6ba824bd435f9b23506525e657e3746d4aa021296b13cbaaa6ae71"
diff --git a/meta/recipes-support/gnutls/gnutls_3.3.5.bb b/meta/recipes-support/gnutls/gnutls_3.3.5.bb
deleted file mode 100644
index b3daa49249..0000000000
--- a/meta/recipes-support/gnutls/gnutls_3.3.5.bb
+++ /dev/null
@@ -1,7 +0,0 @@
-require gnutls.inc
-
-SRC_URI += "file://correct_rpl_gettimeofday_signature.patch \
- "
-
-SRC_URI[md5sum] = "1f396dcf3c14ea67de7243821006d1a2"
-SRC_URI[sha256sum] = "48f34ae032692c498e782e9f1369506572be40ecf7f3f3604b0b00bad1b10477"
diff --git a/meta/recipes-support/gpgme/gpgme-1.4.3/gpgme.pc b/meta/recipes-support/gpgme/gpgme-1.4.3/gpgme.pc
new file mode 100644
index 0000000000..30a4d56d6e
--- /dev/null
+++ b/meta/recipes-support/gpgme/gpgme-1.4.3/gpgme.pc
@@ -0,0 +1,10 @@
+prefix=/usr
+libdir=${prefix}/lib
+includedir=${prefix}/include
+
+Name: gpgme
+Description: GNU Privacy Guard Made Easy
+Version: 1.4.3
+Requires:
+Libs: -L${libdir} -lgpgme -lassuan -lgpg-error
+Cflags: -I${includedir}
diff --git a/meta/recipes-support/gpgme/gpgme_1.4.3.bb b/meta/recipes-support/gpgme/gpgme_1.4.3.bb
index ef08d4f2f4..ca1e5f9344 100644
--- a/meta/recipes-support/gpgme/gpgme_1.4.3.bb
+++ b/meta/recipes-support/gpgme/gpgme_1.4.3.bb
@@ -10,7 +10,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \
file://src/engine.h;endline=22;md5=4b6d8ba313d9b564cc4d4cfb1640af9d"
SRC_URI = "ftp://ftp.gnupg.org/gcrypt/gpgme/gpgme-${PV}.tar.bz2 \
- file://disable_gpgconf_check.patch"
+ file://disable_gpgconf_check.patch \
+ file://gpgme.pc"
SRC_URI[md5sum] = "334e524cffa8af4e2f43ae8afe585672"
SRC_URI[sha256sum] = "2d1cc12411753752d9c5b9037e6fd3fd363517af720154768cc7b46b60120496"
@@ -32,3 +33,8 @@ do_configure_prepend () {
rm -f ${S}/m4/gpg-error.m4
rm -f ${S}/m4/libassuan.m4
}
+
+do_install_append () {
+ install -d ${D}${libdir}/pkgconfig
+ install -m 0644 ${WORKDIR}/gpgme.pc ${D}${libdir}/pkgconfig/
+}
diff --git a/meta/recipes-support/icu/icu.inc b/meta/recipes-support/icu/icu.inc
index 77321076d5..15d002dbef 100644
--- a/meta/recipes-support/icu/icu.inc
+++ b/meta/recipes-support/icu/icu.inc
@@ -7,6 +7,7 @@ DEPENDS = "icu-native"
DEPENDS_class-native = ""
S = "${WORKDIR}/icu/source"
+SPDX_S = "${WORKDIR}/icu"
STAGING_ICU_DIR_NATIVE = "${STAGING_DATADIR_NATIVE}/${BPN}/${PV}"
PARALLEL_MAKE = ""
diff --git a/meta/recipes-support/libassuan/libassuan/libassuan-add-pkgconfig-support.patch b/meta/recipes-support/libassuan/libassuan/libassuan-add-pkgconfig-support.patch
index 4f968812c9..f130d85652 100644
--- a/meta/recipes-support/libassuan/libassuan/libassuan-add-pkgconfig-support.patch
+++ b/meta/recipes-support/libassuan/libassuan/libassuan-add-pkgconfig-support.patch
@@ -6,18 +6,18 @@ Upstream-Status: Rejected
Signed-off-by: Chen Qi <Qi.Chen@windriver.com>
Signed-off-by: Constantin Musca <constantinx.musca@intel.com>
-Index: libassuan-2.1.1/Makefile.am
+Index: libassuan-2.1.2/Makefile.am
===================================================================
---- libassuan-2.1.1.orig/Makefile.am 2014-05-13 20:57:37.794398357 +0000
-+++ libassuan-2.1.1/Makefile.am 2014-05-13 20:57:37.790398357 +0000
-@@ -24,10 +24,13 @@
+--- libassuan-2.1.2.orig/Makefile.am
++++ libassuan-2.1.2/Makefile.am
+@@ -24,10 +24,13 @@ AUTOMAKE_OPTIONS = dist-bzip2 no-dist-gz
# (A suitable gitlog-to-changelog script can be found in GnuPG master.)
GITLOG_TO_CHANGELOG=gitlog-to-changelog
+pkgconfigdir = $(libdir)/pkgconfig
+pkgconfig_DATA = libassuan.pc
+
- EXTRA_DIST = config.rpath autogen.sh README.GIT \
+ EXTRA_DIST = autogen.sh autogen.rc README.GIT \
ChangeLog-2011 doc/ChangeLog-2011 src/ChangeLog-2011 \
tests/ChangeLog-2011 contrib/ChangeLog-2011 \
- build-aux/git-log-footer build-aux/git-log-fix
@@ -25,10 +25,10 @@ Index: libassuan-2.1.1/Makefile.am
SUBDIRS = m4 src doc tests
-Index: libassuan-2.1.1/libassuan.pc.in
+Index: libassuan-2.1.2/libassuan.pc.in
===================================================================
---- /dev/null 1970-01-01 00:00:00.000000000 +0000
-+++ libassuan-2.1.1/libassuan.pc.in 2014-05-13 20:57:37.790398357 +0000
+--- /dev/null
++++ libassuan-2.1.2/libassuan.pc.in
@@ -0,0 +1,14 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
@@ -44,11 +44,11 @@ Index: libassuan-2.1.1/libassuan.pc.in
+Libs: -L${libdir} -lassuan
+Libs.private: -lgpg-error
+Cflags: -I${includedir}
-Index: libassuan-2.1.1/configure.ac
+Index: libassuan-2.1.2/configure.ac
===================================================================
---- libassuan-2.1.1.orig/configure.ac 2014-05-13 20:57:37.794398357 +0000
-+++ libassuan-2.1.1/configure.ac 2014-05-13 20:57:37.790398357 +0000
-@@ -434,7 +434,7 @@
+--- libassuan-2.1.2.orig/configure.ac
++++ libassuan-2.1.2/configure.ac
+@@ -439,7 +439,7 @@ AC_CONFIG_FILES([doc/Makefile])
AC_CONFIG_FILES([tests/Makefile])
AC_CONFIG_FILES([src/libassuan-config], [chmod +x src/libassuan-config])
AC_CONFIG_FILES([src/versioninfo.rc])
@@ -57,11 +57,11 @@ Index: libassuan-2.1.1/configure.ac
AC_OUTPUT
echo "
-Index: libassuan-2.1.1/src/libassuan.m4
+Index: libassuan-2.1.2/src/libassuan.m4
===================================================================
---- libassuan-2.1.1.orig/src/libassuan.m4 2011-04-06 15:37:26.000000000 +0000
-+++ libassuan-2.1.1/src/libassuan.m4 2014-05-13 21:06:53.402395537 +0000
-@@ -15,18 +15,6 @@
+--- libassuan-2.1.2.orig/src/libassuan.m4
++++ libassuan-2.1.2/src/libassuan.m4
+@@ -15,18 +15,6 @@ dnl Returns ok set to yes or no.
dnl
AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON],
[ AC_REQUIRE([AC_CANONICAL_HOST])
@@ -80,7 +80,7 @@ Index: libassuan-2.1.1/src/libassuan.m4
tmp=ifelse([$1], ,1:0.9.2,$1)
if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then
-@@ -37,51 +25,12 @@
+@@ -37,51 +25,12 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON],
min_libassuan_version="$tmp"
fi
@@ -134,7 +134,7 @@ Index: libassuan-2.1.1/src/libassuan.m4
if test "$tmp" -gt 0 ; then
AC_MSG_CHECKING([LIBASSUAN API version])
if test "$req_libassuan_api" -eq "$tmp" ; then
-@@ -96,7 +45,7 @@
+@@ -96,7 +45,7 @@ AC_DEFUN([_AM_PATH_LIBASSUAN_COMMON],
if test $ok = yes; then
if test x"$host" != x ; then
@@ -143,7 +143,7 @@ Index: libassuan-2.1.1/src/libassuan.m4
if test x"$libassuan_config_host" != xnone ; then
if test x"$libassuan_config_host" != x"$host" ; then
AC_MSG_WARN([[
-@@ -137,12 +86,8 @@
+@@ -137,12 +86,8 @@ dnl
AC_DEFUN([AM_PATH_LIBASSUAN],
[ _AM_PATH_LIBASSUAN_COMMON($1)
if test $ok = yes; then
diff --git a/meta/recipes-support/libassuan/libassuan_2.1.1.bb b/meta/recipes-support/libassuan/libassuan_2.1.2.bb
index d66023320d..97dec6a76a 100644
--- a/meta/recipes-support/libassuan/libassuan_2.1.1.bb
+++ b/meta/recipes-support/libassuan/libassuan_2.1.2.bb
@@ -13,8 +13,8 @@ DEPENDS = "libgpg-error"
SRC_URI = "ftp://ftp.gnupg.org/gcrypt/libassuan/libassuan-${PV}.tar.bz2 \
file://libassuan-add-pkgconfig-support.patch"
-SRC_URI[md5sum] = "757243cc4a71b30ed8d8dbe784035d36"
-SRC_URI[sha256sum] = "23e2d67779b88e90d29fe1df6b157109f1c2a647d0f1b2a0f4295bb3c0b2039d"
+SRC_URI[md5sum] = "1dc4c3e1dbfb3939bfa2d72db8e136ba"
+SRC_URI[sha256sum] = "39f8a7c9349aaaf7ccd937b90660153ec4d2d4df2465018754e5bcae5b1db77b"
BINCONFIG = "${bindir}/libassuan-config"
@@ -22,5 +22,5 @@ inherit autotools texinfo binconfig-disabled pkgconfig
do_configure_prepend () {
# Else these could be used in prefernce to those in aclocal-copy
- rm ${S}/m4/*.m4
+ rm -f ${S}/m4/*.m4
}
diff --git a/meta/recipes-support/libcap/libcap/fix-CAP_LAST_CAP.patch b/meta/recipes-support/libcap/libcap/fix-CAP_LAST_CAP.patch
deleted file mode 100644
index a5571883d3..0000000000
--- a/meta/recipes-support/libcap/libcap/fix-CAP_LAST_CAP.patch
+++ /dev/null
@@ -1,39 +0,0 @@
-fix CAP_LAST_CAP
-
-Upstream-Status: pending
-
-Two new capability CAP_BLOCK_SUSPEND and CAP_WAKE_ALARM have been added into
-kernel, but libcap did not update them.
-Once libcap uses its capability.h (the default value of KERNEL_HEADERS), and
-application always use capability.h from kernel, that will make cap_get_flag
-return wrong value.
-
-Signed-off-by: Roy Li <rongqing.li@windriver.com>
----
- libcap/include/linux/capability.h | 10 +++++++++-
- 1 file changed, 9 insertions(+), 1 deletion(-)
-
-diff --git a/libcap/include/linux/capability.h b/libcap/include/linux/capability.h
-index 4924f2a..57026be 100644
---- a/libcap/include/linux/capability.h
-+++ b/libcap/include/linux/capability.h
-@@ -360,7 +360,15 @@ struct cpu_vfs_cap_data {
- CAP_SYS_ADMIN is not acceptable anymore. */
- #define CAP_SYSLOG 34
-
--#define CAP_LAST_CAP CAP_SYSLOG
-+/* Allow triggering something that will wake the system */
-+
-+#define CAP_WAKE_ALARM 35
-+
-+/* Allow preventing system suspends */
-+
-+#define CAP_BLOCK_SUSPEND 36
-+
-+#define CAP_LAST_CAP CAP_BLOCK_SUSPEND
-
- #define cap_valid(x) ((x) >= 0 && (x) <= CAP_LAST_CAP)
-
---
-1.7.10.4
-
diff --git a/meta/recipes-support/libcap/libcap_2.22.bb b/meta/recipes-support/libcap/libcap_2.22.bb
deleted file mode 100644
index a989bb6a6b..0000000000
--- a/meta/recipes-support/libcap/libcap_2.22.bb
+++ /dev/null
@@ -1,6 +0,0 @@
-require libcap.inc
-
-PR = "r6"
-
-SRC_URI[md5sum] = "b4896816b626bea445f0b3849bdd4077"
-SRC_URI[sha256sum] = "e1cae65d8febf2579be37c255d2e058715785ead481a4e6a4357a06aff84721f"
diff --git a/meta/recipes-support/libcap/libcap.inc b/meta/recipes-support/libcap/libcap_2.24.bb
index 0e28ea04a9..75cf5d415d 100644
--- a/meta/recipes-support/libcap/libcap.inc
+++ b/meta/recipes-support/libcap/libcap_2.24.bb
@@ -7,10 +7,10 @@ LIC_FILES_CHKSUM = "file://License;md5=3f84fd6f29d453a56514cb7e4ead25f1"
DEPENDS = "perl-native-runtime"
-SRC_URI = "${DEBIAN_MIRROR}/main/libc/libcap2/${BPN}2_${PV}.orig.tar.gz \
- file://fix-CAP_LAST_CAP.patch"
+SRC_URI = "${KERNELORG_MIRROR}/linux/libs/security/linux-privs/${BPN}2/${BPN}-${PV}.tar.xz"
-PR = "r1"
+SRC_URI[md5sum] = "d43ab9f680435a7fff35b4ace8d45b80"
+SRC_URI[sha256sum] = "cee4568f78dc851d726fc93f25f4ed91cc223b1fe8259daa4a77158d174e6c65"
inherit lib_package
@@ -56,8 +56,10 @@ do_install_append() {
# Move the library to base_libdir
install -d ${D}${base_libdir}
if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then
- mv ${D}${libdir}/* ${D}${base_libdir}
- rmdir ${D}${libdir}
+ mv ${D}${libdir}/libcap* ${D}${base_libdir}
+ if [ -d ${D}${libdir}/security ]; then
+ mv ${D}${libdir}/security ${D}${base_libdir}
+ fi
fi
}
diff --git a/meta/recipes-support/libgcrypt/libgcrypt_1.6.1.bb b/meta/recipes-support/libgcrypt/libgcrypt_1.6.1.bb
deleted file mode 100644
index 1657ea4de4..0000000000
--- a/meta/recipes-support/libgcrypt/libgcrypt_1.6.1.bb
+++ /dev/null
@@ -1,4 +0,0 @@
-require libgcrypt.inc
-
-SRC_URI[md5sum] = "d155aa1b06fa879175922ba28f6a6509"
-SRC_URI[sha256sum] = "7c1007197bef49c3b8740cf6af8b4eb4eb74c7a69796ebcf555d928c287255de"
diff --git a/meta/recipes-support/libgcrypt/libgcrypt_1.6.2.bb b/meta/recipes-support/libgcrypt/libgcrypt_1.6.2.bb
new file mode 100644
index 0000000000..c49c0e7c17
--- /dev/null
+++ b/meta/recipes-support/libgcrypt/libgcrypt_1.6.2.bb
@@ -0,0 +1,4 @@
+require libgcrypt.inc
+
+SRC_URI[md5sum] = "d19adc062edff0ebc7e887212733ef1f"
+SRC_URI[sha256sum] = "936921644b9c81e2395e18a554a9a5f9252aae3976f8afc3e4229ee9d785e627"
diff --git a/meta/recipes-support/libical/files/pthread-fix.patch b/meta/recipes-support/libical/files/pthread-fix.patch
deleted file mode 100644
index 877b808fc0..0000000000
--- a/meta/recipes-support/libical/files/pthread-fix.patch
+++ /dev/null
@@ -1,52 +0,0 @@
-New added pthread feature leads to some deadlock with some unlock code missing.
-This patch fix it.
-
-Signed-off-by: Zhai Edwin <edwin.zhai@intel.com>
-
-Upstream-Status: Pending
-
-Index: libical-0.47/src/libical/icaltimezone.c
-===================================================================
---- libical-0.47.orig/src/libical/icaltimezone.c 2011-12-16 13:42:25.000000000 +0800
-+++ libical-0.47/src/libical/icaltimezone.c 2011-12-16 14:16:25.000000000 +0800
-@@ -1773,7 +1773,7 @@
- filename = (char*) malloc (filename_len);
- if (!filename) {
- icalerror_set_errno(ICAL_NEWFAILED_ERROR);
-- return;
-+ goto out;
- }
-
- snprintf (filename, filename_len, "%s/%s.ics", get_zone_directory(),
-@@ -1783,7 +1783,7 @@
- free (filename);
- if (!fp) {
- icalerror_set_errno(ICAL_FILE_ERROR);
-- return;
-+ goto out;
- }
-
-
-@@ -1807,7 +1807,7 @@
-
- if (!subcomp) {
- icalerror_set_errno(ICAL_PARSE_ERROR);
-- return;
-+ goto out;
- }
-
- icaltimezone_get_vtimezone_properties (zone, subcomp);
-@@ -1817,10 +1817,12 @@
- icalcomponent_free(comp);
- }
- #endif
--#ifdef HAVE_PTHREAD
-+
- out:
-+#ifdef HAVE_PTHREAD
- pthread_mutex_unlock(&builtin_mutex);
- #endif
-+ return;
- }
-
-
diff --git a/meta/recipes-support/libical/libical_0.48.bb b/meta/recipes-support/libical/libical_0.48.bb
deleted file mode 100644
index f464026f8c..0000000000
--- a/meta/recipes-support/libical/libical_0.48.bb
+++ /dev/null
@@ -1,16 +0,0 @@
-SUMMARY = "iCal and scheduling (RFC 2445, 2446, 2447) library"
-HOMEPAGE = "http://sourceforge.net/projects/freeassociation/"
-BUGTRACKER = "http://sourceforge.net/tracker/?group_id=16077&atid=116077"
-LICENSE = "LGPLv2.1 | MPL-1"
-LIC_FILES_CHKSUM = "file://COPYING;md5=d4fc58309d8ed46587ac63bb449d82f8 \
- file://LICENSE;md5=d1a0891cd3e582b3e2ec8fe63badbbb6"
-SECTION = "libs"
-
-
-SRC_URI = "${SOURCEFORGE_MIRROR}/project/freeassociation/${BPN}/${P}/${BPN}-${PV}.tar.gz\
- file://pthread-fix.patch"
-
-SRC_URI[md5sum] = "e549f434d5fbf9cd156c60ed4943618f"
-SRC_URI[sha256sum] = "2ae78b0757f0dd13431acf42a9a8d038339fd4767fd5134e650bf60ee0b4dff0"
-
-inherit autotools
diff --git a/meta/recipes-support/libical/libical_1.0.0.bb b/meta/recipes-support/libical/libical_1.0.0.bb
new file mode 100644
index 0000000000..07b549ecf2
--- /dev/null
+++ b/meta/recipes-support/libical/libical_1.0.0.bb
@@ -0,0 +1,13 @@
+SUMMARY = "iCal and scheduling (RFC 2445, 2446, 2447) library"
+HOMEPAGE = "https://github.com/libical/libical"
+BUGTRACKER = "https://github.com/libical/libical/issues"
+LICENSE = "LGPLv2.1 | MPL-1"
+LIC_FILES_CHKSUM = "file://COPYING;md5=d4fc58309d8ed46587ac63bb449d82f8 \
+ file://LICENSE;md5=d1a0891cd3e582b3e2ec8fe63badbbb6"
+SECTION = "libs"
+
+SRC_URI = "https://github.com/${PN}/${PN}/archive/v${PV}.tar.gz"
+SRC_URI[md5sum] = "f4b8e33ae5efb2f025eb43ce69682a36"
+SRC_URI[sha256sum] = "0072e83834092315772e6719b85fc8b11530b1ff53f4d108315fb38cddbce8c2"
+
+inherit autotools
diff --git a/meta/recipes-support/libiconv/libiconv_1.11.1.bb b/meta/recipes-support/libiconv/libiconv_1.11.1.bb
index c52564a42c..abf739bfa1 100644
--- a/meta/recipes-support/libiconv/libiconv_1.11.1.bb
+++ b/meta/recipes-support/libiconv/libiconv_1.11.1.bb
@@ -21,8 +21,8 @@ S = "${WORKDIR}/libiconv-${PV}"
inherit autotools pkgconfig gettext
python __anonymous() {
- if d.getVar("TCLIBC", True) == "eglibc":
- raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - eglibc already provides iconv")
+ if d.getVar("TCLIBC", True) == "glibc":
+ raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv")
}
EXTRA_OECONF += "--enable-shared --enable-static --enable-relocatable"
diff --git a/meta/recipes-support/libiconv/libiconv_1.14.bb b/meta/recipes-support/libiconv/libiconv_1.14.bb
index d5f47b4e34..1b6fe09bb7 100644
--- a/meta/recipes-support/libiconv/libiconv_1.14.bb
+++ b/meta/recipes-support/libiconv/libiconv_1.14.bb
@@ -25,8 +25,8 @@ inherit autotools pkgconfig gettext
python __anonymous() {
if d.getVar("TARGET_OS", True) != "linux":
return
- if d.getVar("TCLIBC", True) == "eglibc":
- raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - eglibc already provides iconv")
+ if d.getVar("TCLIBC", True) == "glibc":
+ raise bb.parse.SkipPackage("libiconv is provided for use with uClibc only - glibc already provides iconv")
}
EXTRA_OECONF += "--enable-shared --enable-static --enable-relocatable"
diff --git a/meta/recipes-support/libksba/libksba_1.3.0.bb b/meta/recipes-support/libksba/libksba_1.3.1.bb
index 13ad437629..6950dbbd85 100644
--- a/meta/recipes-support/libksba/libksba_1.3.0.bb
+++ b/meta/recipes-support/libksba/libksba_1.3.1.bb
@@ -6,7 +6,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=fd541d83f75d038c4e0617b672ed8bda \
file://COPYING.GPLv3;md5=2f31b266d3440dd7ee50f92cf67d8e6c \
file://COPYING.LGPLv3;md5=e6a600fd5e1d9cbde2d983680233ad02 \
"
-PR = "r1"
DEPENDS = "libgpg-error"
@@ -17,8 +16,8 @@ inherit autotools binconfig-disabled pkgconfig texinfo
SRC_URI = "ftp://ftp.gnupg.org/gcrypt/${BPN}/${BPN}-${PV}.tar.bz2 \
file://ksba-add-pkgconfig-support.patch"
-SRC_URI[md5sum] = "cd86fad9c9d360b2cf80449f8a4a4075"
-SRC_URI[sha256sum] = "5a61eed50550d4d0dcb47457ce7b6a90f8e719d42a3b25f7e79333e8cd721971"
+SRC_URI[md5sum] = "9be95245fcfa9d56f56853078ef2650b"
+SRC_URI[sha256sum] = "bc96b95516bd2b67f413bc8b5cc5a75a2583c6e666d24dfd0d5bcc6b1aab46f9"
do_configure_prepend () {
# Else these could be used in preference to those in aclocal-copy
diff --git a/meta/recipes-support/libpcre/libpcre/Makefile b/meta/recipes-support/libpcre/libpcre/Makefile
index 5419d71f7f..708d807d08 100644
--- a/meta/recipes-support/libpcre/libpcre/Makefile
+++ b/meta/recipes-support/libpcre/libpcre/Makefile
@@ -65,7 +65,7 @@ am__test_logs1 = $(TESTS:=.log)
am__test_logs2 = $(am__test_logs1:.log=.log)
TEST_LOGS = $(am__test_logs2:.test.log=.log)
MKDIR_P = /bin/mkdir -p
-PACKAGE_STRING = PCRE 8.34
+PACKAGE_STRING = PCRE 8.36
SHELL = /bin/sh
srcdir = .
top_srcdir = .
diff --git a/meta/recipes-support/libpcre/libpcre_8.35.bb b/meta/recipes-support/libpcre/libpcre_8.36.bb
index 92098c8c51..6aa0237aa4 100644
--- a/meta/recipes-support/libpcre/libpcre_8.35.bb
+++ b/meta/recipes-support/libpcre/libpcre_8.36.bb
@@ -14,8 +14,8 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/project/pcre/pcre/${PV}/pcre-${PV}.tar.bz2 \
file://Makefile \
"
-SRC_URI[md5sum] = "6aacb23986adccd9b3bc626c00979958"
-SRC_URI[sha256sum] = "a961c1c78befef263cc130756eeca7b674b4e73a81533293df44e4265236865b"
+SRC_URI[md5sum] = "b767bc9af0c20bc9c1fe403b0d41ad97"
+SRC_URI[sha256sum] = "ef833457de0c40e82f573e34528f43a751ff20257ad0e86d272ed5637eb845bb"
S = "${WORKDIR}/pcre-${PV}"
diff --git a/meta/recipes-support/libproxy/libproxy_0.4.11.bb b/meta/recipes-support/libproxy/libproxy_0.4.11.bb
index a53a197615..3367c85156 100644
--- a/meta/recipes-support/libproxy/libproxy_0.4.11.bb
+++ b/meta/recipes-support/libproxy/libproxy_0.4.11.bb
@@ -29,7 +29,7 @@ do_configure_prepend() {
}
python() {
- if bb.utils.contains("INCOMPATIBLE_LICENSE", "GPLv3", "x", "", d) == "x" or bb.utils.contains("DISTRO_FEATURES", "x11", "x", "", d) == "":
+ if incompatible_license_contains("GPLv3", "x", "", d) == "x" or bb.utils.contains("DISTRO_FEATURES", "x11", "x", "", d) == "":
d.setVar("EXTRA_OECMAKE", d.getVar("EXTRA_OECMAKE").replace("-DWITH_GNOME=yes", "-DWITH_GNOME=no"))
d.setVar("DEPENDS", " ".join(i for i in d.getVar("DEPENDS").split() if i != "gconf"))
}
diff --git a/meta/recipes-support/libunistring/libunistring/iconv-m4-remove-the-test-to-convert-euc-jp.patch b/meta/recipes-support/libunistring/libunistring/iconv-m4-remove-the-test-to-convert-euc-jp.patch
index a330c73412..d4489165bf 100644
--- a/meta/recipes-support/libunistring/libunistring/iconv-m4-remove-the-test-to-convert-euc-jp.patch
+++ b/meta/recipes-support/libunistring/libunistring/iconv-m4-remove-the-test-to-convert-euc-jp.patch
@@ -16,11 +16,11 @@ Signed-off-by: Jackie Huang <jackie.huang@windriver.com>
gnulib-m4/iconv.m4 | 11 -----------
1 files changed, 0 insertions(+), 11 deletions(-)
-diff --git a/gnulib-m4/iconv.m4 b/gnulib-m4/iconv.m4
-index f46ff14..de0a5e9 100644
---- a/gnulib-m4/iconv.m4
-+++ b/gnulib-m4/iconv.m4
-@@ -126,17 +126,6 @@ int main ()
+Index: libunistring-0.9.4/gnulib-m4/iconv.m4
+===================================================================
+--- libunistring-0.9.4.orig/gnulib-m4/iconv.m4 2014-11-03 17:41:29.755011917 +0000
++++ libunistring-0.9.4/gnulib-m4/iconv.m4 2014-11-03 17:43:03.795014480 +0000
+@@ -159,17 +159,6 @@
}
}
#endif
@@ -34,9 +34,7 @@ index f46ff14..de0a5e9 100644
- && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1)
- /* Try HP-UX names. */
- && iconv_open ("utf8", "eucJP") == (iconv_t)(-1))
-- return 1;
- return 0;
- }], [am_cv_func_iconv_works=yes], [am_cv_func_iconv_works=no],
- [case "$host_os" in
---
-1.7.4
+- result |= 16;
+ return result;
+ }]])],
+ [am_cv_func_iconv_works=yes],
diff --git a/meta/recipes-support/libunistring/libunistring/libunistring_fix_for_automake_1.12.patch b/meta/recipes-support/libunistring/libunistring/libunistring_fix_for_automake_1.12.patch
deleted file mode 100644
index 5d831d5491..0000000000
--- a/meta/recipes-support/libunistring/libunistring/libunistring_fix_for_automake_1.12.patch
+++ /dev/null
@@ -1,81 +0,0 @@
-Upstream-Status: Pending
-
-automake 1.12 has deprecated use of mkdir_p, and it recommends
-use of MKDIR_P instead. Changed the code to avoid these kind
-of warning-errors.
-
-| make[2]: /build/tmp/work/x86_64-linux/libunistring-native-0.9.3-r2/image/srv/home/nitin/builds2/build0/tmp/sysroots/x86_64-linux/usr/share/doc/libunistring: Command not found
-| make[2]: *** [install-html-split] Error 127
-
-Signed-Off-By: Nitin A Kamble <nitin.a.kamble@intel.com>
-2012/07/10
-
-Index: libunistring-0.9.3/doc/Makefile.am
-===================================================================
---- libunistring-0.9.3.orig/doc/Makefile.am
-+++ libunistring-0.9.3/doc/Makefile.am
-@@ -87,7 +87,7 @@ TEXI2DVI = @TEXI2DVI@ $(TEXINCLUDES)
- # The install-dvi target is already defined by automake.
-
- installdirs-dvi:
-- $(mkdir_p) $(DESTDIR)$(dvidir)
-+ $(MKDIR_P) $(DESTDIR)$(dvidir)
-
- uninstall-dvi:
- $(RM) $(DESTDIR)$(dvidir)/libunistring.dvi
-@@ -105,7 +105,7 @@ libunistring.ps: libunistring.dvi
- # The install-ps target is already defined by automake.
-
- installdirs-ps:
-- $(mkdir_p) $(DESTDIR)$(psdir)
-+ $(MKDIR_P) $(DESTDIR)$(psdir)
-
- uninstall-ps:
- $(RM) $(DESTDIR)$(psdir)/libunistring.ps
-@@ -120,7 +120,7 @@ TEXI2PDF = @TEXI2DVI@ --pdf $(TEXINCLUDE
- # The install-pdf target is already defined by automake.
-
- installdirs-pdf:
-- $(mkdir_p) $(DESTDIR)$(pdfdir)
-+ $(MKDIR_P) $(DESTDIR)$(pdfdir)
-
- uninstall-pdf:
- $(RM) $(DESTDIR)$(pdfdir)/libunistring.pdf
-@@ -151,17 +151,17 @@ libunistring_toc.html: libunistring.texi
- }
-
- install-html-monolithic: libunistring.html
-- $(mkdir_p) $(DESTDIR)$(htmldir)
-+ $(MKDIR_P) $(DESTDIR)$(htmldir)
- $(INSTALL_DATA) `if test -f libunistring.html; then echo .; else echo $(srcdir); fi`/libunistring.html $(DESTDIR)$(htmldir)/libunistring.html
-
- install-html-split: libunistring_toc.html
-- $(mkdir_p) $(DESTDIR)$(htmldir)
-+ $(MKDIR_P) $(DESTDIR)$(htmldir)
- for file in `if test -f libunistring_toc.html; then echo .; else echo $(srcdir); fi`/libunistring_*.html; do \
- $(INSTALL_DATA) $$file $(DESTDIR)$(htmldir)/`basename $$file`; \
- done
-
- installdirs-html:
-- $(mkdir_p) $(DESTDIR)$(htmldir)
-+ $(MKDIR_P) $(DESTDIR)$(htmldir)
-
- uninstall-html-monolithic:
- $(RM) $(DESTDIR)$(htmldir)/libunistring.html
-@@ -170,14 +170,14 @@ uninstall-html-split:
- $(RM) $(DESTDIR)$(htmldir)/libunistring_*.html
-
- dist-html-monolithic:
-- $(mkdir_p) $(distdir)/
-+ $(MKDIR_P) $(distdir)/
- file=libunistring.html; \
- if test -f $$file; then d=.; else d=$(srcdir); fi; \
- cp -p $$d/$$file $(distdir)/$$file || exit 1
-
- # We would like to put libunistring_*.html into EXTRA_DIST, but it doesn't work.
- dist-html-split: libunistring_toc.html
-- $(mkdir_p) $(distdir)/
-+ $(MKDIR_P) $(distdir)/
- file=libunistring_toc.html; \
- if test -f $$file; then d=.; else d=$(srcdir); fi; \
- for file in `cd $$d && echo libunistring_*.html`; do \
diff --git a/meta/recipes-support/libunistring/libunistring/parallelmake.patch b/meta/recipes-support/libunistring/libunistring/parallelmake.patch
deleted file mode 100644
index 21025b8e9d..0000000000
--- a/meta/recipes-support/libunistring/libunistring/parallelmake.patch
+++ /dev/null
@@ -1,26 +0,0 @@
-Fix a parallel make race where cdefs.h uses the unused-parameter.h header file
-but has no dependency listed. This can result in an empty cdefs.h file which
-results in a build failure like:
-
-| In file included from striconveh.c:30:0:
-| unistr.h:193:48: error: expected ';', ',' or ')' before '_GL_UNUSED_PARAMETER'
-| In file included from striconveh.c:30:0:
-| unistr.h:263:54: error: expected ';', ',' or ')' before '_GL_UNUSED_PARAMETER'
-
-Upstream-Status: Pending
-
-RP 2012/4/12
-
-Index: libunistring-0.9.3/lib/Makefile.am
-===================================================================
---- libunistring-0.9.3.orig/lib/Makefile.am 2012-04-12 07:45:41.450059820 +0000
-+++ libunistring-0.9.3/lib/Makefile.am 2012-04-12 07:48:45.434055559 +0000
-@@ -157,7 +157,7 @@
-
- # unistring/cdefs.h is not public, but is included by other header files.
- nobase_nodist_include_HEADERS += unistring/cdefs.h
--unistring/cdefs.h : unistring/cdefs.in.h
-+unistring/cdefs.h : unistring/cdefs.in.h $(UNUSED_PARAMETER_H)
- @MKDIR_P@ unistring
- rm -f $@-t $@
- sed -e '/definition of _GL_UNUSED_PARAMETER/r $(UNUSED_PARAMETER_H)' \
diff --git a/meta/recipes-support/libunistring/libunistring_0.9.3.bb b/meta/recipes-support/libunistring/libunistring_0.9.4.bb
index db1651b2a6..89b1dc26f1 100644
--- a/meta/recipes-support/libunistring/libunistring_0.9.3.bb
+++ b/meta/recipes-support/libunistring/libunistring_0.9.4.bb
@@ -18,15 +18,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504 \
file://COPYING.LIB;md5=6a6a8e020838b23406c81b19c1d46df6"
SRC_URI = "${GNU_MIRROR}/libunistring/libunistring-${PV}.tar.gz \
- file://parallelmake.patch \
- file://libunistring_fix_for_automake_1.12.patch \
file://iconv-m4-remove-the-test-to-convert-euc-jp.patch \
"
-SRC_URI[md5sum] = "db8eca3b64163abadf8c40e5cecc261f"
-SRC_URI[sha256sum] = "610d3ec724fbdaa654afe3cff20b9f4d504be3fd296fded2e0f7f764041006a3"
-
-PR = "r3"
+SRC_URI[md5sum] = "c24a6a3838d9ad4a41a62549312c4226"
+SRC_URI[sha256sum] = "f5246d63286a42902dc096d6d44541fbe4204b6c02d6d5d28b457c9882ddd8a6"
inherit autotools texinfo
BBCLASSEXTEND = "native nativesdk"
diff --git a/meta/recipes-support/libunwind/libunwind-1.1/AArch64-port.patch b/meta/recipes-support/libunwind/libunwind-1.1/AArch64-port.patch
new file mode 100644
index 0000000000..228ec322d5
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind-1.1/AArch64-port.patch
@@ -0,0 +1,2529 @@
+From ac6c0a6535975f1dc2da6e4e2766614baac2a14a Mon Sep 17 00:00:00 2001
+From: Yvan Roux <yvan.roux@linaro.org>
+Date: Sat, 11 May 2013 09:18:23 -0600
+Subject: [PATCH] AArch64 port.
+
+Upstream-Status: Backport
+
+---
+ Makefile.am | 6
+ README | 1
+ configure.ac | 6
+ include/libunwind-aarch64.h | 187 ++++++++++++++++++++++
+ include/libunwind.h.in | 4
+ include/tdep-aarch64/dwarf-config.h | 52 ++++++
+ include/tdep-aarch64/jmpbuf.h | 33 +++
+ include/tdep-aarch64/libunwind_i.h | 294 +++++++++++++++++++++++++++++++++++
+ include/tdep/dwarf-config.h | 4
+ include/tdep/jmpbuf.h | 2
+ include/tdep/libunwind_i.h.in | 4
+ src/Makefile.am | 39 ++++
+ src/aarch64/Gcreate_addr_space.c | 60 +++++++
+ src/aarch64/Gget_proc_info.c | 39 ++++
+ src/aarch64/Gget_save_loc.c | 100 +++++++++++
+ src/aarch64/Gglobal.c | 57 ++++++
+ src/aarch64/Ginit.c | 187 ++++++++++++++++++++++
+ src/aarch64/Ginit_local.c | 55 ++++++
+ src/aarch64/Ginit_remote.c | 45 +++++
+ src/aarch64/Gis_signal_frame.c | 64 +++++++
+ src/aarch64/Gregs.c | 113 +++++++++++++
+ src/aarch64/Gresume.c | 177 +++++++++++++++++++++
+ src/aarch64/Gstep.c | 129 +++++++++++++++
+ src/aarch64/Lcreate_addr_space.c | 5
+ src/aarch64/Lget_proc_info.c | 5
+ src/aarch64/Lget_save_loc.c | 5
+ src/aarch64/Lglobal.c | 5
+ src/aarch64/Linit.c | 5
+ src/aarch64/Linit_local.c | 5
+ src/aarch64/Linit_remote.c | 5
+ src/aarch64/Lis_signal_frame.c | 5
+ src/aarch64/Lregs.c | 5
+ src/aarch64/Lresume.c | 5
+ src/aarch64/Lstep.c | 5
+ src/aarch64/gen-offsets.c | 68 ++++++++
+ src/aarch64/init.h | 127 +++++++++++++++
+ src/aarch64/is_fpreg.c | 32 +++
+ src/aarch64/offsets.h | 49 +++++
+ src/aarch64/regname.c | 106 ++++++++++++
+ src/aarch64/siglongjmp.S | 12 +
+ src/aarch64/unwind_i.h | 43 +++++
+ src/coredump/_UCD_access_reg_linux.c | 5
+ src/ptrace/_UPT_reg_offset.c | 36 ++++
+ 43 files changed, 2184 insertions(+), 7 deletions(-)
+ create mode 100644 include/libunwind-aarch64.h
+ create mode 100644 include/tdep-aarch64/dwarf-config.h
+ create mode 100644 include/tdep-aarch64/jmpbuf.h
+ create mode 100644 include/tdep-aarch64/libunwind_i.h
+ create mode 100644 src/aarch64/Gcreate_addr_space.c
+ create mode 100644 src/aarch64/Gget_proc_info.c
+ create mode 100644 src/aarch64/Gget_save_loc.c
+ create mode 100644 src/aarch64/Gglobal.c
+ create mode 100644 src/aarch64/Ginit.c
+ create mode 100644 src/aarch64/Ginit_local.c
+ create mode 100644 src/aarch64/Ginit_remote.c
+ create mode 100644 src/aarch64/Gis_signal_frame.c
+ create mode 100644 src/aarch64/Gregs.c
+ create mode 100644 src/aarch64/Gresume.c
+ create mode 100644 src/aarch64/Gstep.c
+ create mode 100644 src/aarch64/Lcreate_addr_space.c
+ create mode 100644 src/aarch64/Lget_proc_info.c
+ create mode 100644 src/aarch64/Lget_save_loc.c
+ create mode 100644 src/aarch64/Lglobal.c
+ create mode 100644 src/aarch64/Linit.c
+ create mode 100644 src/aarch64/Linit_local.c
+ create mode 100644 src/aarch64/Linit_remote.c
+ create mode 100644 src/aarch64/Lis_signal_frame.c
+ create mode 100644 src/aarch64/Lregs.c
+ create mode 100644 src/aarch64/Lresume.c
+ create mode 100644 src/aarch64/Lstep.c
+ create mode 100644 src/aarch64/gen-offsets.c
+ create mode 100644 src/aarch64/init.h
+ create mode 100644 src/aarch64/is_fpreg.c
+ create mode 100644 src/aarch64/offsets.h
+ create mode 100644 src/aarch64/regname.c
+ create mode 100644 src/aarch64/siglongjmp.S
+ create mode 100644 src/aarch64/unwind_i.h
+
+--- a/Makefile.am
++++ b/Makefile.am
+@@ -2,6 +2,9 @@ include_HEADERS = include/libunwind-dyna
+ include/libunwind-ptrace.h \
+ include/libunwind-coredump.h
+
++if ARCH_AARCH64
++include_HEADERS += include/libunwind-aarch64.h
++endif
+ if ARCH_ARM
+ include_HEADERS += include/libunwind-arm.h
+ endif
+@@ -41,6 +44,9 @@ SUBDIRS = src tests doc
+ noinst_HEADERS = include/dwarf.h include/dwarf_i.h include/dwarf-eh.h \
+ include/compiler.h include/libunwind_i.h include/mempool.h \
+ include/remote.h \
++ include/tdep-aarch64/dwarf-config.h \
++ include/tdep-aarch64/jmpbuf.h \
++ include/tdep-aarch64/libunwind_i.h \
+ include/tdep-arm/dwarf-config.h include/tdep-arm/ex_tables.h \
+ include/tdep-arm/jmpbuf.h include/tdep-arm/libunwind_i.h \
+ include/tdep-ia64/jmpbuf.h include/tdep-ia64/rse.h \
+--- a/README
++++ b/README
+@@ -9,6 +9,7 @@ several architecture/operating-system co
+ Linux/IA-64: Fully tested and supported.
+ Linux/PARISC: Works well, but C library missing unwind-info.
+ HP-UX/IA-64: Mostly works but known to have some serious limitations.
++ Linux/AArch64: Newly added.
+ Linux/PPC64: Newly added.
+ Linux/SuperH: Newly added.
+ FreeBSD/i386: Newly added.
+--- a/configure.ac
++++ b/configure.ac
+@@ -104,7 +104,7 @@ SET_ARCH([$target_cpu],[target_arch])
+
+ AC_ARG_ENABLE(coredump,
+ AS_HELP_STRING([--enable-coredump],[building libunwind-coredump library]),,
+- [AS_CASE([$host_arch], [arm*|mips*|sh*|x86*], [enable_coredump=yes], [enable_coredump=no])]
++ [AS_CASE([$host_arch], [aarch64*|arm*|mips*|sh*|x86*], [enable_coredump=yes], [enable_coredump=no])]
+ )
+
+ AC_MSG_CHECKING([if we should build libunwind-coredump])
+@@ -121,6 +121,7 @@ AC_MSG_RESULT([$target_os])
+
+ AM_CONDITIONAL(BUILD_COREDUMP, test x$enable_coredump = xyes)
+ AM_CONDITIONAL(REMOTE_ONLY, test x$target_arch != x$host_arch)
++AM_CONDITIONAL(ARCH_AARCH64, test x$target_arch = xaarch64)
+ AM_CONDITIONAL(ARCH_ARM, test x$target_arch = xarm)
+ AM_CONDITIONAL(ARCH_IA64, test x$target_arch = xia64)
+ AM_CONDITIONAL(ARCH_HPPA, test x$target_arch = xhppa)
+@@ -137,7 +138,7 @@ AM_CONDITIONAL(OS_FREEBSD, expr x$target
+ AC_MSG_CHECKING([for ELF helper width])
+ case "${target_arch}" in
+ (arm|hppa|ppc32|x86|sh) use_elf32=yes; AC_MSG_RESULT([32]);;
+-(ia64|ppc64|x86_64) use_elf64=yes; AC_MSG_RESULT([64]);;
++(aarch64|ia64|ppc64|x86_64) use_elf64=yes; AC_MSG_RESULT([64]);;
+ (mips) use_elfxx=yes; AC_MSG_RESULT([xx]);;
+ *) AC_MSG_ERROR([Unknown ELF target: ${target_arch}])
+ esac
+@@ -186,6 +187,7 @@ AS_HELP_STRING([--enable-cxx-exceptions]
+ # C++ exception handling doesn't work too well on x86
+ case $target_arch in
+ x86*) enable_cxx_exceptions=no;;
++ aarch64*) enable_cxx_exceptions=no;;
+ arm*) enable_cxx_exceptions=no;;
+ mips*) enable_cxx_exceptions=no;;
+ *) enable_cxx_exceptions=yes;;
+--- /dev/null
++++ b/include/libunwind-aarch64.h
+@@ -0,0 +1,187 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2001-2004 Hewlett-Packard Co
++ Contributed by David Mosberger-Tang <davidm@hpl.hp.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#ifndef LIBUNWIND_H
++#define LIBUNWIND_H
++
++#if defined(__cplusplus) || defined(c_plusplus)
++extern "C" {
++#endif
++
++#include <inttypes.h>
++#include <stddef.h>
++#include <ucontext.h>
++
++#define UNW_TARGET aarch64
++#define UNW_TARGET_AARCH64 1
++
++#define _U_TDEP_QP_TRUE 0 /* see libunwind-dynamic.h */
++
++/* This needs to be big enough to accommodate "struct cursor", while
++ leaving some slack for future expansion. Changing this value will
++ require recompiling all users of this library. Stack allocation is
++ relatively cheap and unwind-state copying is relatively rare, so we
++ want to err on making it rather too big than too small. */
++
++#define UNW_TDEP_CURSOR_LEN 4096
++
++typedef uint64_t unw_word_t;
++typedef int64_t unw_sword_t;
++
++typedef long double unw_tdep_fpreg_t;
++
++typedef struct
++ {
++ /* no aarch64-specific auxiliary proc-info */
++ }
++unw_tdep_proc_info_t;
++
++typedef enum
++ {
++ /* 64-bit general registers. */
++ UNW_AARCH64_X0,
++ UNW_AARCH64_X1,
++ UNW_AARCH64_X2,
++ UNW_AARCH64_X3,
++ UNW_AARCH64_X4,
++ UNW_AARCH64_X5,
++ UNW_AARCH64_X6,
++ UNW_AARCH64_X7,
++ UNW_AARCH64_X8,
++
++ /* Temporary registers. */
++ UNW_AARCH64_X9,
++ UNW_AARCH64_X10,
++ UNW_AARCH64_X11,
++ UNW_AARCH64_X12,
++ UNW_AARCH64_X13,
++ UNW_AARCH64_X14,
++ UNW_AARCH64_X15,
++
++ /* Intra-procedure-call temporary registers. */
++ UNW_AARCH64_X16,
++ UNW_AARCH64_X17,
++
++ /* Callee-saved registers. */
++ UNW_AARCH64_X18,
++ UNW_AARCH64_X19,
++ UNW_AARCH64_X20,
++ UNW_AARCH64_X21,
++ UNW_AARCH64_X22,
++ UNW_AARCH64_X23,
++ UNW_AARCH64_X24,
++ UNW_AARCH64_X25,
++ UNW_AARCH64_X26,
++ UNW_AARCH64_X27,
++ UNW_AARCH64_X28,
++
++ /* 64-bit frame pointer. */
++ UNW_AARCH64_X29,
++
++ /* 64-bit link register. */
++ UNW_AARCH64_X30,
++
++ /* 64-bit stack pointer. */
++ UNW_AARCH64_SP = 31,
++ UNW_AARCH64_PC,
++ UNW_AARCH64_PSTATE,
++
++ /* 128-bit FP/Advanced SIMD registers. */
++ UNW_AARCH64_V0 = 64,
++ UNW_AARCH64_V1,
++ UNW_AARCH64_V2,
++ UNW_AARCH64_V3,
++ UNW_AARCH64_V4,
++ UNW_AARCH64_V5,
++ UNW_AARCH64_V6,
++ UNW_AARCH64_V7,
++ UNW_AARCH64_V8,
++ UNW_AARCH64_V9,
++ UNW_AARCH64_V10,
++ UNW_AARCH64_V11,
++ UNW_AARCH64_V12,
++ UNW_AARCH64_V13,
++ UNW_AARCH64_V14,
++ UNW_AARCH64_V15,
++ UNW_AARCH64_V16,
++ UNW_AARCH64_V17,
++ UNW_AARCH64_V18,
++ UNW_AARCH64_V19,
++ UNW_AARCH64_V20,
++ UNW_AARCH64_V21,
++ UNW_AARCH64_V22,
++ UNW_AARCH64_V23,
++ UNW_AARCH64_V24,
++ UNW_AARCH64_V25,
++ UNW_AARCH64_V26,
++ UNW_AARCH64_V27,
++ UNW_AARCH64_V28,
++ UNW_AARCH64_V29,
++ UNW_AARCH64_V30,
++ UNW_AARCH64_V31,
++
++ UNW_AARCH64_FPSR,
++ UNW_AARCH64_FPCR,
++
++ /* For AArch64, the CFA is the value of SP (x31) at the call site of the
++ previous frame. */
++ UNW_AARCH64_CFA = UNW_AARCH64_SP,
++
++ UNW_TDEP_LAST_REG = UNW_AARCH64_FPCR,
++
++ UNW_TDEP_IP = UNW_AARCH64_X30,
++ UNW_TDEP_SP = UNW_AARCH64_SP,
++ UNW_TDEP_EH = UNW_AARCH64_X0,
++
++ }
++aarch64_regnum_t;
++
++/* Use R0 through R3 to pass exception handling information. */
++#define UNW_TDEP_NUM_EH_REGS 4
++
++typedef struct unw_tdep_save_loc
++ {
++ /* Additional target-dependent info on a save location. */
++ }
++unw_tdep_save_loc_t;
++
++
++/* On AArch64, we can directly use ucontext_t as the unwind context. */
++typedef ucontext_t unw_tdep_context_t;
++
++#include "libunwind-common.h"
++#include "libunwind-dynamic.h"
++
++#define unw_tdep_getcontext(uc) (getcontext (uc), 0)
++#define unw_tdep_is_fpreg UNW_ARCH_OBJ(is_fpreg)
++
++extern int unw_tdep_is_fpreg (int);
++
++#if defined(__cplusplus) || defined(c_plusplus)
++}
++#endif
++
++#endif /* LIBUNWIND_H */
+--- a/include/libunwind.h.in
++++ b/include/libunwind.h.in
+@@ -3,7 +3,9 @@
+
+ #ifndef UNW_REMOTE_ONLY
+
+-#if defined __arm__
++#if defined __aarch64__
++#include "libunwind-aarch64.h"
++#elif defined __arm__
+ # include "libunwind-arm.h"
+ #elif defined __hppa__
+ # include "libunwind-hppa.h"
+--- /dev/null
++++ b/include/tdep-aarch64/dwarf-config.h
+@@ -0,0 +1,52 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#ifndef dwarf_config_h
++#define dwarf_config_h
++
++/* This matches the value udes by GCC (see
++ gcc/config/aarch64/aarch64.h:DWARF_FRAME_REGISTERS. */
++#define DWARF_NUM_PRESERVED_REGS 97
++
++/* Return TRUE if the ADDR_SPACE uses big-endian byte-order. */
++#define dwarf_is_big_endian(addr_space) 0
++
++#define dwarf_to_unw_regnum(reg) (((reg) <= UNW_AARCH64_V31) ? (reg) : 0)
++
++/* Convert a pointer to a dwarf_cursor structure to a pointer to
++ unw_cursor_t. */
++#define dwarf_to_cursor(c) ((unw_cursor_t *) (c))
++
++typedef struct dwarf_loc
++ {
++ unw_word_t val;
++#ifndef UNW_LOCAL_ONLY
++ unw_word_t type; /* see DWARF_LOC_TYPE_* macros. */
++#endif
++ }
++dwarf_loc_t;
++
++#endif /* dwarf_config_h */
+--- /dev/null
++++ b/include/tdep-aarch64/jmpbuf.h
+@@ -0,0 +1,33 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++/* Use glibc's jump-buffer indices; NPTL peeks at SP: */
++
++/* FIXME for AArch64 */
++
++#define JB_SP 13
++#define JB_RP 14
++#define JB_MASK_SAVED 15
++#define JB_MASK 16
+--- /dev/null
++++ b/include/tdep-aarch64/libunwind_i.h
+@@ -0,0 +1,294 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2001-2005 Hewlett-Packard Co
++ Contributed by David Mosberger-Tang <davidm@hpl.hp.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#ifndef AARCH64_LIBUNWIND_I_H
++#define AARCH64_LIBUNWIND_I_H
++
++/* Target-dependent definitions that are internal to libunwind but need
++ to be shared with target-independent code. */
++
++#include <stdlib.h>
++#include <libunwind.h>
++
++#include "elf64.h"
++#include "mempool.h"
++#include "dwarf.h"
++
++typedef struct
++ {
++ /* no aarch64-specific fast trace */
++ }
++unw_tdep_frame_t;
++
++#ifdef UNW_LOCAL_ONLY
++
++typedef unw_word_t aarch64_loc_t;
++
++#else /* !UNW_LOCAL_ONLY */
++
++typedef struct aarch64_loc
++ {
++ unw_word_t w0, w1;
++ }
++aarch64_loc_t;
++
++#endif /* !UNW_LOCAL_ONLY */
++
++struct unw_addr_space
++ {
++ struct unw_accessors acc;
++ int big_endian;
++ unw_caching_policy_t caching_policy;
++#ifdef HAVE_ATOMIC_OPS_H
++ AO_t cache_generation;
++#else
++ uint32_t cache_generation;
++#endif
++ unw_word_t dyn_generation; /* see dyn-common.h */
++ unw_word_t dyn_info_list_addr; /* (cached) dyn_info_list_addr */
++ struct dwarf_rs_cache global_cache;
++ struct unw_debug_frame_list *debug_frames;
++ };
++
++struct cursor
++ {
++ struct dwarf_cursor dwarf; /* must be first */
++ enum
++ {
++ AARCH64_SCF_NONE,
++ AARCH64_SCF_LINUX_RT_SIGFRAME,
++ }
++ sigcontext_format;
++ unw_word_t sigcontext_addr;
++ unw_word_t sigcontext_sp;
++ unw_word_t sigcontext_pc;
++ };
++
++#define DWARF_GET_LOC(l) ((l).val)
++
++#ifdef UNW_LOCAL_ONLY
++# define DWARF_NULL_LOC DWARF_LOC (0, 0)
++# define DWARF_IS_NULL_LOC(l) (DWARF_GET_LOC (l) == 0)
++# define DWARF_LOC(r, t) ((dwarf_loc_t) { .val = (r) })
++# define DWARF_IS_REG_LOC(l) 0
++# define DWARF_REG_LOC(c,r) (DWARF_LOC((unw_word_t) \
++ tdep_uc_addr((c)->as_arg, (r)), 0))
++# define DWARF_MEM_LOC(c,m) DWARF_LOC ((m), 0)
++# define DWARF_FPREG_LOC(c,r) (DWARF_LOC((unw_word_t) \
++ tdep_uc_addr((c)->as_arg, (r)), 0))
++
++static inline int
++dwarf_getfp (struct dwarf_cursor *c, dwarf_loc_t loc, unw_fpreg_t *val)
++{
++ if (!DWARF_GET_LOC (loc))
++ return -1;
++ *val = *(unw_fpreg_t *) DWARF_GET_LOC (loc);
++ return 0;
++}
++
++static inline int
++dwarf_putfp (struct dwarf_cursor *c, dwarf_loc_t loc, unw_fpreg_t val)
++{
++ if (!DWARF_GET_LOC (loc))
++ return -1;
++ *(unw_fpreg_t *) DWARF_GET_LOC (loc) = val;
++ return 0;
++}
++
++static inline int
++dwarf_get (struct dwarf_cursor *c, dwarf_loc_t loc, unw_word_t *val)
++{
++ if (!DWARF_GET_LOC (loc))
++ return -1;
++ *val = *(unw_word_t *) DWARF_GET_LOC (loc);
++ return 0;
++}
++
++static inline int
++dwarf_put (struct dwarf_cursor *c, dwarf_loc_t loc, unw_word_t val)
++{
++ if (!DWARF_GET_LOC (loc))
++ return -1;
++ *(unw_word_t *) DWARF_GET_LOC (loc) = val;
++ return 0;
++}
++
++#else /* !UNW_LOCAL_ONLY */
++# define DWARF_LOC_TYPE_FP (1 << 0)
++# define DWARF_LOC_TYPE_REG (1 << 1)
++# define DWARF_NULL_LOC DWARF_LOC (0, 0)
++# define DWARF_IS_NULL_LOC(l) \
++ ({ dwarf_loc_t _l = (l); _l.val == 0 && _l.type == 0; })
++# define DWARF_LOC(r, t) ((dwarf_loc_t) { .val = (r), .type = (t) })
++# define DWARF_IS_REG_LOC(l) (((l).type & DWARF_LOC_TYPE_REG) != 0)
++# define DWARF_IS_FP_LOC(l) (((l).type & DWARF_LOC_TYPE_FP) != 0)
++# define DWARF_REG_LOC(c,r) DWARF_LOC((r), DWARF_LOC_TYPE_REG)
++# define DWARF_MEM_LOC(c,m) DWARF_LOC ((m), 0)
++# define DWARF_FPREG_LOC(c,r) DWARF_LOC((r), (DWARF_LOC_TYPE_REG \
++ | DWARF_LOC_TYPE_FP))
++
++static inline int
++dwarf_getfp (struct dwarf_cursor *c, dwarf_loc_t loc, unw_fpreg_t *val)
++{
++ char *valp = (char *) &val;
++ unw_word_t addr;
++ int ret;
++
++ if (DWARF_IS_NULL_LOC (loc))
++ return -UNW_EBADREG;
++
++ if (DWARF_IS_REG_LOC (loc))
++ return (*c->as->acc.access_fpreg) (c->as, DWARF_GET_LOC (loc),
++ val, 0, c->as_arg);
++
++ addr = DWARF_GET_LOC (loc);
++ if ((ret = (*c->as->acc.access_mem) (c->as, addr + 0, (unw_word_t *) valp,
++ 0, c->as_arg)) < 0)
++ return ret;
++
++ return (*c->as->acc.access_mem) (c->as, addr + 4, (unw_word_t *) valp + 1, 0,
++ c->as_arg);
++}
++
++static inline int
++dwarf_putfp (struct dwarf_cursor *c, dwarf_loc_t loc, unw_fpreg_t val)
++{
++ char *valp = (char *) &val;
++ unw_word_t addr;
++ int ret;
++
++ if (DWARF_IS_NULL_LOC (loc))
++ return -UNW_EBADREG;
++
++ if (DWARF_IS_REG_LOC (loc))
++ return (*c->as->acc.access_fpreg) (c->as, DWARF_GET_LOC (loc),
++ &val, 1, c->as_arg);
++
++ addr = DWARF_GET_LOC (loc);
++ if ((ret = (*c->as->acc.access_mem) (c->as, addr + 0, (unw_word_t *) valp,
++ 1, c->as_arg)) < 0)
++ return ret;
++
++ return (*c->as->acc.access_mem) (c->as, addr + 4, (unw_word_t *) valp + 1,
++ 1, c->as_arg);
++}
++
++static inline int
++dwarf_get (struct dwarf_cursor *c, dwarf_loc_t loc, unw_word_t *val)
++{
++ if (DWARF_IS_NULL_LOC (loc))
++ return -UNW_EBADREG;
++
++ /* If a code-generator were to save a value of type unw_word_t in a
++ floating-point register, we would have to support this case. I
++ suppose it could happen with MMX registers, but does it really
++ happen? */
++ assert (!DWARF_IS_FP_LOC (loc));
++
++ if (DWARF_IS_REG_LOC (loc))
++ return (*c->as->acc.access_reg) (c->as, DWARF_GET_LOC (loc), val,
++ 0, c->as_arg);
++ else
++ return (*c->as->acc.access_mem) (c->as, DWARF_GET_LOC (loc), val,
++ 0, c->as_arg);
++}
++
++static inline int
++dwarf_put (struct dwarf_cursor *c, dwarf_loc_t loc, unw_word_t val)
++{
++ if (DWARF_IS_NULL_LOC (loc))
++ return -UNW_EBADREG;
++
++ /* If a code-generator were to save a value of type unw_word_t in a
++ floating-point register, we would have to support this case. I
++ suppose it could happen with MMX registers, but does it really
++ happen? */
++ assert (!DWARF_IS_FP_LOC (loc));
++
++ if (DWARF_IS_REG_LOC (loc))
++ return (*c->as->acc.access_reg) (c->as, DWARF_GET_LOC (loc), &val,
++ 1, c->as_arg);
++ else
++ return (*c->as->acc.access_mem) (c->as, DWARF_GET_LOC (loc), &val,
++ 1, c->as_arg);
++}
++
++#endif /* !UNW_LOCAL_ONLY */
++
++
++
++#define tdep_getcontext_trace unw_getcontext
++#define tdep_init_done UNW_OBJ(init_done)
++#define tdep_init UNW_OBJ(init)
++/* Platforms that support UNW_INFO_FORMAT_TABLE need to define
++ tdep_search_unwind_table. */
++#define tdep_search_unwind_table dwarf_search_unwind_table
++#define tdep_find_unwind_table dwarf_find_unwind_table
++#define tdep_uc_addr UNW_OBJ(uc_addr)
++#define tdep_get_elf_image UNW_ARCH_OBJ(get_elf_image)
++#define tdep_access_reg UNW_OBJ(access_reg)
++#define tdep_access_fpreg UNW_OBJ(access_fpreg)
++#define tdep_fetch_frame(c,ip,n) do {} while(0)
++#define tdep_cache_frame(c,rs) do {} while(0)
++#define tdep_reuse_frame(c,rs) do {} while(0)
++#define tdep_stash_frame(c,rs) do {} while(0)
++#define tdep_trace(cur,addr,n) (-UNW_ENOINFO)
++
++#ifdef UNW_LOCAL_ONLY
++# define tdep_find_proc_info(c,ip,n) \
++ dwarf_find_proc_info((c)->as, (ip), &(c)->pi, (n), \
++ (c)->as_arg)
++# define tdep_put_unwind_info(as,pi,arg) \
++ dwarf_put_unwind_info((as), (pi), (arg))
++#else
++# define tdep_find_proc_info(c,ip,n) \
++ (*(c)->as->acc.find_proc_info)((c)->as, (ip), &(c)->pi, (n), \
++ (c)->as_arg)
++# define tdep_put_unwind_info(as,pi,arg) \
++ (*(as)->acc.put_unwind_info)((as), (pi), (arg))
++#endif
++
++#define tdep_get_as(c) ((c)->dwarf.as)
++#define tdep_get_as_arg(c) ((c)->dwarf.as_arg)
++#define tdep_get_ip(c) ((c)->dwarf.ip)
++#define tdep_big_endian(as) ((as)->big_endian)
++
++extern int tdep_init_done;
++
++extern void tdep_init (void);
++extern int tdep_search_unwind_table (unw_addr_space_t as, unw_word_t ip,
++ unw_dyn_info_t *di, unw_proc_info_t *pi,
++ int need_unwind_info, void *arg);
++extern void *tdep_uc_addr (unw_tdep_context_t *uc, int reg);
++extern int tdep_get_elf_image (struct elf_image *ei, pid_t pid, unw_word_t ip,
++ unsigned long *segbase, unsigned long *mapoff,
++ char *path, size_t pathlen);
++extern int tdep_access_reg (struct cursor *c, unw_regnum_t reg,
++ unw_word_t *valp, int write);
++extern int tdep_access_fpreg (struct cursor *c, unw_regnum_t reg,
++ unw_fpreg_t *valp, int write);
++
++#endif /* AARCH64_LIBUNWIND_I_H */
+--- a/include/tdep/dwarf-config.h
++++ b/include/tdep/dwarf-config.h
+@@ -1,7 +1,9 @@
+ /* Provide a real file - not a symlink - as it would cause multiarch conflicts
+ when multiple different arch releases are installed simultaneously. */
+
+-#if defined __arm__
++#if defined __aarch64__
++# include "tdep-aarch64/dwarf-config.h"
++#elif defined __arm__
+ # include "tdep-arm/dwarf-config.h"
+ #elif defined __hppa__
+ # include "tdep-hppa/dwarf-config.h"
+--- a/include/tdep/jmpbuf.h
++++ b/include/tdep/jmpbuf.h
+@@ -3,6 +3,8 @@
+
+ #ifndef UNW_REMOTE_ONLY
+
++#if defined __aarch64__
++# include "tdep-aarch64/jmpbuf.h"
+ #if defined __arm__
+ # include "tdep-arm/jmpbuf.h"
+ #elif defined __hppa__
+--- a/include/tdep/libunwind_i.h.in
++++ b/include/tdep/libunwind_i.h.in
+@@ -3,7 +3,9 @@
+
+ #ifndef UNW_REMOTE_ONLY
+
+-#if defined __arm__
++#if defined __aarch64__
++# include "tdep-aarch64/libunwind_i.h"
++#elif defined __arm__
+ # include "tdep-arm/libunwind_i.h"
+ #elif defined __hppa__
+ # include "tdep-hppa/libunwind_i.h"
+--- a/src/Makefile.am
++++ b/src/Makefile.am
+@@ -170,6 +170,28 @@ libunwind_elfxx_la_SOURCES = elfxx.c
+ noinst_LTLIBRARIES += $(LIBUNWIND_ELF)
+ libunwind_la_LIBADD += $(LIBUNWIND_ELF)
+
++# The list of files that go into libunwind and libunwind-aarch64:
++noinst_HEADERS += aarch64/init.h aarch64/offsets.h aarch64/unwind_i.h
++libunwind_la_SOURCES_aarch64_common = $(libunwind_la_SOURCES_common) \
++ aarch64/is_fpreg.c aarch64/regname.c
++
++# The list of files that go into libunwind:
++libunwind_la_SOURCES_aarch64 = $(libunwind_la_SOURCES_aarch64_common) \
++ $(libunwind_la_SOURCES_local) \
++ aarch64/Lcreate_addr_space.c aarch64/Lget_proc_info.c \
++ aarch64/Lget_save_loc.c aarch64/Lglobal.c aarch64/Linit.c \
++ aarch64/Linit_local.c aarch64/Linit_remote.c \
++ aarch64/Lis_signal_frame.c aarch64/Lregs.c aarch64/Lresume.c \
++ aarch64/Lstep.c
++
++libunwind_aarch64_la_SOURCES_aarch64 = $(libunwind_la_SOURCES_aarch64_common) \
++ $(libunwind_la_SOURCES_generic) \
++ aarch64/Gcreate_addr_space.c aarch64/Gget_proc_info.c \
++ aarch64/Gget_save_loc.c aarch64/Gglobal.c aarch64/Ginit.c \
++ aarch64/Ginit_local.c aarch64/Ginit_remote.c \
++ aarch64/Gis_signal_frame.c aarch64/Gregs.c aarch64/Gresume.c \
++ aarch64/Gstep.c
++
+ # The list of files that go into libunwind and libunwind-arm:
+ noinst_HEADERS += arm/init.h arm/offsets.h arm/unwind_i.h
+ libunwind_la_SOURCES_arm_common = $(libunwind_la_SOURCES_common) \
+@@ -418,6 +440,18 @@ if OS_FREEBSD
+ libunwind_coredump_la_SOURCES += coredump/_UCD_access_reg_freebsd.c
+ endif
+
++if ARCH_AARCH64
++ lib_LTLIBRARIES += libunwind-aarch64.la
++ libunwind_la_SOURCES = $(libunwind_la_SOURCES_aarch64)
++ libunwind_aarch64_la_SOURCES = $(libunwind_aarch64_la_SOURCES_aarch64)
++ libunwind_aarch64_la_LDFLAGS = $(COMMON_SO_LDFLAGS) -version-info $(SOVERSION)
++ libunwind_aarch64_la_LIBADD = libunwind-dwarf-generic.la
++ libunwind_aarch64_la_LIBADD += libunwind-elf64.la
++if !REMOTE_ONLY
++ libunwind_aarch64_la_LIBADD += libunwind.la -lc
++endif
++ libunwind_setjmp_la_SOURCES += aarch64/siglongjmp.S
++else
+ if ARCH_ARM
+ lib_LTLIBRARIES += libunwind-arm.la
+ libunwind_la_SOURCES = $(libunwind_la_SOURCES_arm)
+@@ -545,6 +579,7 @@ endif # ARCH_MIPS
+ endif # ARCH_HPPA
+ endif # ARCH_IA64
+ endif # ARCH_ARM
++endif # ARCH_AARCH64
+
+ # libunwind-setjmp depends on libunwind-$(arch). Therefore must be added
+ # at the end.
+@@ -567,7 +602,8 @@ AM_CPPFLAGS = -I$(top_srcdir)/include -I
+ AM_CCASFLAGS = $(AM_CPPFLAGS)
+ noinst_HEADERS += unwind/unwind-internal.h
+
+-EXTRA_DIST = $(libunwind_la_SOURCES_arm) \
++EXTRA_DIST = $(libunwind_la_SOURCES_aarch64) \
++ $(libunwind_la_SOURCES_arm) \
+ $(libunwind_la_SOURCES_hppa) \
+ $(libunwind_la_SOURCES_ia64) \
+ $(libunwind_la_SOURCES_mips) \
+@@ -579,6 +615,7 @@ EXTRA_DIST = $(libunwind_la_SOURCES_arm)
+ $(libunwind_la_SOURCES_common) \
+ $(libunwind_la_SOURCES_local) \
+ $(libunwind_la_SOURCES_generic) \
++ $(libunwind_aarch64_la_SOURCES_aarch64) \
+ $(libunwind_arm_la_SOURCES_arm) \
+ $(libunwind_hppa_la_SOURCES_hppa) \
+ $(libunwind_ia64_la_SOURCES_ia64) \
+--- /dev/null
++++ b/src/aarch64/Gcreate_addr_space.c
+@@ -0,0 +1,60 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include <string.h>
++#include <stdlib.h>
++
++#include "unwind_i.h"
++
++PROTECTED unw_addr_space_t
++unw_create_addr_space (unw_accessors_t *a, int byte_order)
++{
++#ifdef UNW_LOCAL_ONLY
++ return NULL;
++#else
++ unw_addr_space_t as;
++
++ /* AArch64 supports little-endian and big-endian. */
++ if (byte_order != 0 && byte_order != __LITTLE_ENDIAN
++ && byte_order != __BIG_ENDIAN)
++ return NULL;
++
++ as = malloc (sizeof (*as));
++ if (!as)
++ return NULL;
++
++ memset (as, 0, sizeof (*as));
++
++ as->acc = *a;
++
++ /* Default to little-endian for AArch64. */
++ if (byte_order == 0 || byte_order == __LITTLE_ENDIAN)
++ as->big_endian = 0;
++ else
++ as->big_endian = 1;
++
++ return as;
++#endif
++}
+--- /dev/null
++++ b/src/aarch64/Gget_proc_info.c
+@@ -0,0 +1,39 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++PROTECTED int
++unw_get_proc_info (unw_cursor_t *cursor, unw_proc_info_t *pi)
++{
++ struct cursor *c = (struct cursor *) cursor;
++ int ret;
++
++ ret = dwarf_make_proc_info (&c->dwarf);
++ if (ret < 0)
++ return ret;
++
++ *pi = c->dwarf.pi;
++ return 0;
++}
+--- /dev/null
++++ b/src/aarch64/Gget_save_loc.c
+@@ -0,0 +1,100 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++PROTECTED int
++unw_get_save_loc (unw_cursor_t *cursor, int reg, unw_save_loc_t *sloc)
++{
++ struct cursor *c = (struct cursor *) cursor;
++ dwarf_loc_t loc;
++
++ switch (reg)
++ {
++ case UNW_AARCH64_X0:
++ case UNW_AARCH64_X1:
++ case UNW_AARCH64_X2:
++ case UNW_AARCH64_X3:
++ case UNW_AARCH64_X4:
++ case UNW_AARCH64_X5:
++ case UNW_AARCH64_X6:
++ case UNW_AARCH64_X7:
++ case UNW_AARCH64_X8:
++ case UNW_AARCH64_X9:
++ case UNW_AARCH64_X10:
++ case UNW_AARCH64_X11:
++ case UNW_AARCH64_X12:
++ case UNW_AARCH64_X13:
++ case UNW_AARCH64_X14:
++ case UNW_AARCH64_X15:
++ case UNW_AARCH64_X16:
++ case UNW_AARCH64_X17:
++ case UNW_AARCH64_X18:
++ case UNW_AARCH64_X19:
++ case UNW_AARCH64_X20:
++ case UNW_AARCH64_X21:
++ case UNW_AARCH64_X22:
++ case UNW_AARCH64_X23:
++ case UNW_AARCH64_X24:
++ case UNW_AARCH64_X25:
++ case UNW_AARCH64_X26:
++ case UNW_AARCH64_X27:
++ case UNW_AARCH64_X28:
++ case UNW_AARCH64_X29:
++ case UNW_AARCH64_X30:
++ case UNW_AARCH64_SP:
++ case UNW_AARCH64_PC:
++ case UNW_AARCH64_PSTATE:
++ loc = c->dwarf.loc[reg];
++ break;
++
++ default:
++ loc = DWARF_NULL_LOC; /* default to "not saved" */
++ break;
++ }
++
++ memset (sloc, 0, sizeof (*sloc));
++
++ if (DWARF_IS_NULL_LOC (loc))
++ {
++ sloc->type = UNW_SLT_NONE;
++ return 0;
++ }
++
++#if !defined(UNW_LOCAL_ONLY)
++ if (DWARF_IS_REG_LOC (loc))
++ {
++ sloc->type = UNW_SLT_REG;
++ sloc->u.regnum = DWARF_GET_LOC (loc);
++ }
++ else
++#endif
++ {
++ sloc->type = UNW_SLT_MEMORY;
++ sloc->u.addr = DWARF_GET_LOC (loc);
++ }
++ return 0;
++}
+--- /dev/null
++++ b/src/aarch64/Gglobal.c
+@@ -0,0 +1,57 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++#include "dwarf_i.h"
++
++HIDDEN define_lock (aarch64_lock);
++HIDDEN int tdep_init_done;
++
++HIDDEN void
++tdep_init (void)
++{
++ intrmask_t saved_mask;
++
++ sigfillset (&unwi_full_mask);
++
++ lock_acquire (&aarch64_lock, saved_mask);
++ {
++ if (tdep_init_done)
++ /* another thread else beat us to it... */
++ goto out;
++
++ mi_init ();
++
++ dwarf_init ();
++
++#ifndef UNW_REMOTE_ONLY
++ aarch64_local_addr_space_init ();
++#endif
++ tdep_init_done = 1; /* signal that we're initialized... */
++ }
++ out:
++ lock_release (&aarch64_lock, saved_mask);
++}
+--- /dev/null
++++ b/src/aarch64/Ginit.c
+@@ -0,0 +1,187 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include <stdlib.h>
++#include <string.h>
++
++#include "unwind_i.h"
++
++#ifdef UNW_REMOTE_ONLY
++
++/* unw_local_addr_space is a NULL pointer in this case. */
++PROTECTED unw_addr_space_t unw_local_addr_space;
++
++#else /* !UNW_REMOTE_ONLY */
++
++static struct unw_addr_space local_addr_space;
++
++PROTECTED unw_addr_space_t unw_local_addr_space = &local_addr_space;
++
++static inline void *
++uc_addr (ucontext_t *uc, int reg)
++{
++ if (reg >= UNW_AARCH64_X0 && reg <= UNW_AARCH64_V31)
++ return &uc->uc_mcontext.regs[reg];
++ else
++ return NULL;
++}
++
++# ifdef UNW_LOCAL_ONLY
++
++HIDDEN void *
++tdep_uc_addr (ucontext_t *uc, int reg)
++{
++ return uc_addr (uc, reg);
++}
++
++# endif /* UNW_LOCAL_ONLY */
++
++HIDDEN unw_dyn_info_list_t _U_dyn_info_list;
++
++/* XXX fix me: there is currently no way to locate the dyn-info list
++ by a remote unwinder. On ia64, this is done via a special
++ unwind-table entry. Perhaps something similar can be done with
++ DWARF2 unwind info. */
++
++static void
++put_unwind_info (unw_addr_space_t as, unw_proc_info_t *proc_info, void *arg)
++{
++ /* it's a no-op */
++}
++
++static int
++get_dyn_info_list_addr (unw_addr_space_t as, unw_word_t *dyn_info_list_addr,
++ void *arg)
++{
++ *dyn_info_list_addr = (unw_word_t) &_U_dyn_info_list;
++ return 0;
++}
++
++static int
++access_mem (unw_addr_space_t as, unw_word_t addr, unw_word_t *val, int write,
++ void *arg)
++{
++ if (write)
++ {
++ Debug (16, "mem[%lx] <- %lx\n", addr, *val);
++ *(unw_word_t *) addr = *val;
++ }
++ else
++ {
++ *val = *(unw_word_t *) addr;
++ Debug (16, "mem[%lx] -> %lx\n", addr, *val);
++ }
++ return 0;
++}
++
++static int
++access_reg (unw_addr_space_t as, unw_regnum_t reg, unw_word_t *val, int write,
++ void *arg)
++{
++ unw_word_t *addr;
++ ucontext_t *uc = arg;
++
++ if (unw_is_fpreg (reg))
++ goto badreg;
++
++ if (!(addr = uc_addr (uc, reg)))
++ goto badreg;
++
++ if (write)
++ {
++ *(unw_word_t *) addr = *val;
++ Debug (12, "%s <- %lx\n", unw_regname (reg), *val);
++ }
++ else
++ {
++ *val = *(unw_word_t *) addr;
++ Debug (12, "%s -> %lx\n", unw_regname (reg), *val);
++ }
++ return 0;
++
++ badreg:
++ Debug (1, "bad register number %u\n", reg);
++ return -UNW_EBADREG;
++}
++
++static int
++access_fpreg (unw_addr_space_t as, unw_regnum_t reg, unw_fpreg_t *val,
++ int write, void *arg)
++{
++ ucontext_t *uc = arg;
++ unw_fpreg_t *addr;
++
++ if (!unw_is_fpreg (reg))
++ goto badreg;
++
++ if (!(addr = uc_addr (uc, reg)))
++ goto badreg;
++
++ if (write)
++ {
++ Debug (12, "%s <- %08lx.%08lx.%08lx\n", unw_regname (reg),
++ ((long *)val)[0], ((long *)val)[1], ((long *)val)[2]);
++ *(unw_fpreg_t *) addr = *val;
++ }
++ else
++ {
++ *val = *(unw_fpreg_t *) addr;
++ Debug (12, "%s -> %08lx.%08lx.%08lx\n", unw_regname (reg),
++ ((long *)val)[0], ((long *)val)[1], ((long *)val)[2]);
++ }
++ return 0;
++
++ badreg:
++ Debug (1, "bad register number %u\n", reg);
++ /* attempt to access a non-preserved register */
++ return -UNW_EBADREG;
++}
++
++static int
++get_static_proc_name (unw_addr_space_t as, unw_word_t ip,
++ char *buf, size_t buf_len, unw_word_t *offp,
++ void *arg)
++{
++ return _Uelf64_get_proc_name (as, getpid (), ip, buf, buf_len, offp);
++}
++
++HIDDEN void
++aarch64_local_addr_space_init (void)
++{
++ memset (&local_addr_space, 0, sizeof (local_addr_space));
++ local_addr_space.caching_policy = UNW_CACHE_GLOBAL;
++ local_addr_space.acc.find_proc_info = dwarf_find_proc_info;
++ local_addr_space.acc.put_unwind_info = put_unwind_info;
++ local_addr_space.acc.get_dyn_info_list_addr = get_dyn_info_list_addr;
++ local_addr_space.acc.access_mem = access_mem;
++ local_addr_space.acc.access_reg = access_reg;
++ local_addr_space.acc.access_fpreg = access_fpreg;
++ local_addr_space.acc.resume = aarch64_local_resume;
++ local_addr_space.acc.get_proc_name = get_static_proc_name;
++ unw_flush_cache (&local_addr_space, 0, 0);
++}
++
++#endif /* !UNW_REMOTE_ONLY */
+--- /dev/null
++++ b/src/aarch64/Ginit_local.c
+@@ -0,0 +1,55 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2011-2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++#include "init.h"
++
++#ifdef UNW_REMOTE_ONLY
++
++PROTECTED int
++unw_init_local (unw_cursor_t *cursor, unw_context_t *uc)
++{
++ return -UNW_EINVAL;
++}
++
++#else /* !UNW_REMOTE_ONLY */
++
++PROTECTED int
++unw_init_local (unw_cursor_t *cursor, unw_context_t *uc)
++{
++ struct cursor *c = (struct cursor *) cursor;
++
++ if (!tdep_init_done)
++ tdep_init ();
++
++ Debug (1, "(cursor=%p)\n", c);
++
++ c->dwarf.as = unw_local_addr_space;
++ c->dwarf.as_arg = uc;
++
++ return common_init (c, 1);
++}
++
++#endif /* !UNW_REMOTE_ONLY */
+--- /dev/null
++++ b/src/aarch64/Ginit_remote.c
+@@ -0,0 +1,45 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "init.h"
++#include "unwind_i.h"
++
++PROTECTED int
++unw_init_remote (unw_cursor_t *cursor, unw_addr_space_t as, void *as_arg)
++{
++#ifdef UNW_LOCAL_ONLY
++ return -UNW_EINVAL;
++#else /* !UNW_LOCAL_ONLY */
++ struct cursor *c = (struct cursor *) cursor;
++
++ if (!tdep_init_done)
++ tdep_init ();
++
++ Debug (1, "(cursor=%p)\n", c);
++
++ c->dwarf.as = as;
++ c->dwarf.as_arg = as_arg;
++ return common_init (c, 0);
++#endif /* !UNW_LOCAL_ONLY */
++}
+--- /dev/null
++++ b/src/aarch64/Gis_signal_frame.c
+@@ -0,0 +1,64 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++/* The restorer stub will always have the form:
++
++ d2801168 movz x8, #0x8b
++ d4000001 svc #0x0
++*/
++
++PROTECTED int
++unw_is_signal_frame (unw_cursor_t *cursor)
++{
++#ifdef __linux__
++ struct cursor *c = (struct cursor *) cursor;
++ unw_word_t w0, ip;
++ unw_addr_space_t as;
++ unw_accessors_t *a;
++ void *arg;
++ int ret;
++
++ as = c->dwarf.as;
++ a = unw_get_accessors (as);
++ arg = c->dwarf.as_arg;
++
++ ip = c->dwarf.ip;
++
++ ret = (*a->access_mem) (as, ip, &w0, 0, arg);
++ if (ret < 0)
++ return ret;
++
++ /* FIXME: distinguish 32bit insn vs 64bit registers. */
++ if (w0 != 0xd4000001d2801168)
++ return 0;
++
++ return 1;
++
++#else
++ return -UNW_ENOINFO;
++#endif
++}
+--- /dev/null
++++ b/src/aarch64/Gregs.c
+@@ -0,0 +1,113 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++HIDDEN int
++tdep_access_reg (struct cursor *c, unw_regnum_t reg, unw_word_t *valp,
++ int write)
++{
++ dwarf_loc_t loc = DWARF_NULL_LOC;
++ unsigned int mask;
++
++ switch (reg)
++ {
++ case UNW_AARCH64_X0:
++ case UNW_AARCH64_X1:
++ case UNW_AARCH64_X2:
++ case UNW_AARCH64_X3:
++ mask = 1 << reg;
++ if (write)
++ {
++ c->dwarf.eh_args[reg] = *valp;
++ c->dwarf.eh_valid_mask |= mask;
++ return 0;
++ }
++ else if ((c->dwarf.eh_valid_mask & mask) != 0)
++ {
++ *valp = c->dwarf.eh_args[reg];
++ return 0;
++ }
++ else
++ loc = c->dwarf.loc[reg];
++ break;
++
++ case UNW_AARCH64_X4:
++ case UNW_AARCH64_X5:
++ case UNW_AARCH64_X6:
++ case UNW_AARCH64_X7:
++ case UNW_AARCH64_X8:
++ case UNW_AARCH64_X9:
++ case UNW_AARCH64_X10:
++ case UNW_AARCH64_X11:
++ case UNW_AARCH64_X12:
++ case UNW_AARCH64_X13:
++ case UNW_AARCH64_X14:
++ case UNW_AARCH64_X15:
++ case UNW_AARCH64_X16:
++ case UNW_AARCH64_X17:
++ case UNW_AARCH64_X18:
++ case UNW_AARCH64_X19:
++ case UNW_AARCH64_X20:
++ case UNW_AARCH64_X21:
++ case UNW_AARCH64_X22:
++ case UNW_AARCH64_X23:
++ case UNW_AARCH64_X24:
++ case UNW_AARCH64_X25:
++ case UNW_AARCH64_X26:
++ case UNW_AARCH64_X27:
++ case UNW_AARCH64_X28:
++ case UNW_AARCH64_X29:
++ case UNW_AARCH64_X30:
++ case UNW_AARCH64_PC:
++ case UNW_AARCH64_PSTATE:
++ loc = c->dwarf.loc[reg];
++ break;
++
++ case UNW_AARCH64_SP:
++ if (write)
++ return -UNW_EREADONLYREG;
++ *valp = c->dwarf.cfa;
++ return 0;
++
++ default:
++ Debug (1, "bad register number %u\n", reg);
++ return -UNW_EBADREG;
++ }
++
++ if (write)
++ return dwarf_put (&c->dwarf, loc, *valp);
++ else
++ return dwarf_get (&c->dwarf, loc, valp);
++}
++
++HIDDEN int
++tdep_access_fpreg (struct cursor *c, unw_regnum_t reg, unw_fpreg_t *valp,
++ int write)
++{
++ Debug (1, "bad register number %u\n", reg);
++ return -UNW_EBADREG;
++}
+--- /dev/null
++++ b/src/aarch64/Gresume.c
+@@ -0,0 +1,177 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2011-2013 Linaro Limited
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++#include "offsets.h"
++
++#ifndef UNW_REMOTE_ONLY
++
++HIDDEN inline int
++aarch64_local_resume (unw_addr_space_t as, unw_cursor_t *cursor, void *arg)
++{
++#ifdef __linux__
++ struct cursor *c = (struct cursor *) cursor;
++ unw_tdep_context_t *uc = c->dwarf.as_arg;
++
++ if (c->sigcontext_format == AARCH64_SCF_NONE)
++ {
++ /* Since there are no signals involved here we restore the non scratch
++ registers only. */
++ unsigned long regs[11];
++ regs[0] = uc->uc_mcontext.regs[19];
++ regs[1] = uc->uc_mcontext.regs[20];
++ regs[2] = uc->uc_mcontext.regs[21];
++ regs[3] = uc->uc_mcontext.regs[22];
++ regs[4] = uc->uc_mcontext.regs[23];
++ regs[5] = uc->uc_mcontext.regs[24];
++ regs[6] = uc->uc_mcontext.regs[25];
++ regs[7] = uc->uc_mcontext.regs[26];
++ regs[8] = uc->uc_mcontext.regs[27];
++ regs[9] = uc->uc_mcontext.regs[28];
++ regs[10] = uc->uc_mcontext.regs[30]; /* LR */
++ unsigned long sp = uc->uc_mcontext.sp;
++
++ struct regs_overlay {
++ char x[sizeof(regs)];
++ };
++
++ asm volatile (
++ "ldp x19, x20, [%0]\n"
++ "ldp x21, x22, [%0,16]\n"
++ "ldp x23, x24, [%0,32]\n"
++ "ldp x25, x26, [%0,48]\n"
++ "ldp x27, x28, [%0,64]\n"
++ "ldr x30, [%0,80]\n"
++ "mov sp, %1\n"
++ "ret \n"
++ :
++ : "r" (regs),
++ "r" (sp),
++ "m" (*(struct regs_overlay *)regs)
++ );
++ }
++ else
++ {
++ struct sigcontext *sc = (struct sigcontext *) c->sigcontext_addr;
++
++ if (c->dwarf.eh_valid_mask & 0x1) sc->regs[0] = c->dwarf.eh_args[0];
++ if (c->dwarf.eh_valid_mask & 0x2) sc->regs[1] = c->dwarf.eh_args[1];
++ if (c->dwarf.eh_valid_mask & 0x4) sc->regs[2] = c->dwarf.eh_args[2];
++ if (c->dwarf.eh_valid_mask & 0x8) sc->regs[3] = c->dwarf.eh_args[3];
++
++ sc->regs[4] = uc->uc_mcontext.regs[4];
++ sc->regs[5] = uc->uc_mcontext.regs[5];
++ sc->regs[6] = uc->uc_mcontext.regs[6];
++ sc->regs[7] = uc->uc_mcontext.regs[7];
++ sc->regs[8] = uc->uc_mcontext.regs[8];
++ sc->regs[9] = uc->uc_mcontext.regs[9];
++ sc->regs[10] = uc->uc_mcontext.regs[10];
++ sc->regs[11] = uc->uc_mcontext.regs[11];
++ sc->regs[12] = uc->uc_mcontext.regs[12];
++ sc->regs[13] = uc->uc_mcontext.regs[13];
++ sc->regs[14] = uc->uc_mcontext.regs[14];
++ sc->regs[15] = uc->uc_mcontext.regs[15];
++ sc->regs[16] = uc->uc_mcontext.regs[16];
++ sc->regs[17] = uc->uc_mcontext.regs[17];
++ sc->regs[18] = uc->uc_mcontext.regs[18];
++ sc->regs[19] = uc->uc_mcontext.regs[19];
++ sc->regs[20] = uc->uc_mcontext.regs[20];
++ sc->regs[21] = uc->uc_mcontext.regs[21];
++ sc->regs[22] = uc->uc_mcontext.regs[22];
++ sc->regs[23] = uc->uc_mcontext.regs[23];
++ sc->regs[24] = uc->uc_mcontext.regs[24];
++ sc->regs[25] = uc->uc_mcontext.regs[25];
++ sc->regs[26] = uc->uc_mcontext.regs[26];
++ sc->regs[27] = uc->uc_mcontext.regs[27];
++ sc->regs[28] = uc->uc_mcontext.regs[28];
++ sc->regs[29] = uc->uc_mcontext.regs[29];
++ sc->regs[30] = uc->uc_mcontext.regs[30];
++ sc->sp = uc->uc_mcontext.sp;
++ sc->pc = uc->uc_mcontext.pc;
++ sc->pstate = uc->uc_mcontext.pstate;
++
++ asm volatile (
++ "mov sp, %0\n"
++ "ret %1\n"
++ : : "r" (c->sigcontext_sp), "r" (c->sigcontext_pc)
++ );
++ }
++ unreachable();
++#else
++ printf ("%s: implement me\n", __FUNCTION__);
++#endif
++ return -UNW_EINVAL;
++}
++
++#endif /* !UNW_REMOTE_ONLY */
++
++static inline void
++establish_machine_state (struct cursor *c)
++{
++ unw_addr_space_t as = c->dwarf.as;
++ void *arg = c->dwarf.as_arg;
++ unw_fpreg_t fpval;
++ unw_word_t val;
++ int reg;
++
++ Debug (8, "copying out cursor state\n");
++
++ for (reg = 0; reg <= UNW_AARCH64_PSTATE; ++reg)
++ {
++ Debug (16, "copying %s %d\n", unw_regname (reg), reg);
++ if (unw_is_fpreg (reg))
++ {
++ if (tdep_access_fpreg (c, reg, &fpval, 0) >= 0)
++ as->acc.access_fpreg (as, reg, &fpval, 1, arg);
++ }
++ else
++ {
++ if (tdep_access_reg (c, reg, &val, 0) >= 0)
++ as->acc.access_reg (as, reg, &val, 1, arg);
++ }
++ }
++}
++
++PROTECTED int
++unw_resume (unw_cursor_t *cursor)
++{
++ struct cursor *c = (struct cursor *) cursor;
++
++ Debug (1, "(cursor=%p)\n", c);
++
++ if (!c->dwarf.ip)
++ {
++ /* This can happen easily when the frame-chain gets truncated
++ due to bad or missing unwind-info. */
++ Debug (1, "refusing to resume execution at address 0\n");
++ return -UNW_EINVAL;
++ }
++
++ establish_machine_state (c);
++
++ return (*c->dwarf.as->acc.resume) (c->dwarf.as, (unw_cursor_t *) c,
++ c->dwarf.as_arg);
++}
+--- /dev/null
++++ b/src/aarch64/Gstep.c
+@@ -0,0 +1,129 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2011-2013 Linaro Limited
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++#include "offsets.h"
++
++PROTECTED int
++unw_handle_signal_frame (unw_cursor_t *cursor)
++{
++ struct cursor *c = (struct cursor *) cursor;
++ int ret;
++ unw_word_t sc_addr, sp, sp_addr = c->dwarf.cfa;
++ struct dwarf_loc sp_loc = DWARF_LOC (sp_addr, 0);
++
++ if ((ret = dwarf_get (&c->dwarf, sp_loc, &sp)) < 0)
++ return -UNW_EUNSPEC;
++
++ ret = unw_is_signal_frame (cursor);
++ Debug(1, "unw_is_signal_frame()=%d\n", ret);
++
++ /* Save the SP and PC to be able to return execution at this point
++ later in time (unw_resume). */
++ c->sigcontext_sp = c->dwarf.cfa;
++ c->sigcontext_pc = c->dwarf.ip;
++
++ if (ret)
++ {
++ c->sigcontext_format = AARCH64_SCF_LINUX_RT_SIGFRAME;
++ sc_addr = sp_addr + sizeof (siginfo_t) + LINUX_UC_MCONTEXT_OFF;
++ }
++ else
++ return -UNW_EUNSPEC;
++
++ c->sigcontext_addr = sc_addr;
++
++ /* Update the dwarf cursor.
++ Set the location of the registers to the corresponding addresses of the
++ uc_mcontext / sigcontext structure contents. */
++ c->dwarf.loc[UNW_AARCH64_X0] = DWARF_LOC (sc_addr + LINUX_SC_X0_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X1] = DWARF_LOC (sc_addr + LINUX_SC_X1_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X2] = DWARF_LOC (sc_addr + LINUX_SC_X2_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X3] = DWARF_LOC (sc_addr + LINUX_SC_X3_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X4] = DWARF_LOC (sc_addr + LINUX_SC_X4_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X5] = DWARF_LOC (sc_addr + LINUX_SC_X5_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X6] = DWARF_LOC (sc_addr + LINUX_SC_X6_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X7] = DWARF_LOC (sc_addr + LINUX_SC_X7_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X8] = DWARF_LOC (sc_addr + LINUX_SC_X8_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X9] = DWARF_LOC (sc_addr + LINUX_SC_X9_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X10] = DWARF_LOC (sc_addr + LINUX_SC_X10_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X11] = DWARF_LOC (sc_addr + LINUX_SC_X11_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X12] = DWARF_LOC (sc_addr + LINUX_SC_X12_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X13] = DWARF_LOC (sc_addr + LINUX_SC_X13_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X14] = DWARF_LOC (sc_addr + LINUX_SC_X14_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X15] = DWARF_LOC (sc_addr + LINUX_SC_X15_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X16] = DWARF_LOC (sc_addr + LINUX_SC_X16_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X17] = DWARF_LOC (sc_addr + LINUX_SC_X17_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X18] = DWARF_LOC (sc_addr + LINUX_SC_X18_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X19] = DWARF_LOC (sc_addr + LINUX_SC_X19_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X20] = DWARF_LOC (sc_addr + LINUX_SC_X20_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X21] = DWARF_LOC (sc_addr + LINUX_SC_X21_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X22] = DWARF_LOC (sc_addr + LINUX_SC_X22_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X23] = DWARF_LOC (sc_addr + LINUX_SC_X23_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X24] = DWARF_LOC (sc_addr + LINUX_SC_X24_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X25] = DWARF_LOC (sc_addr + LINUX_SC_X25_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X26] = DWARF_LOC (sc_addr + LINUX_SC_X26_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X27] = DWARF_LOC (sc_addr + LINUX_SC_X27_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X28] = DWARF_LOC (sc_addr + LINUX_SC_X28_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X29] = DWARF_LOC (sc_addr + LINUX_SC_X29_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_X30] = DWARF_LOC (sc_addr + LINUX_SC_X30_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_SP] = DWARF_LOC (sc_addr + LINUX_SC_SP_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_PC] = DWARF_LOC (sc_addr + LINUX_SC_PC_OFF, 0);
++ c->dwarf.loc[UNW_AARCH64_PSTATE] = DWARF_LOC (sc_addr + LINUX_SC_PSTATE_OFF, 0);
++
++ /* Set SP/CFA and PC/IP. */
++ dwarf_get (&c->dwarf, c->dwarf.loc[UNW_AARCH64_SP], &c->dwarf.cfa);
++ dwarf_get (&c->dwarf, c->dwarf.loc[UNW_AARCH64_PC], &c->dwarf.ip);
++
++ c->dwarf.pi_valid = 0;
++
++ return 1;
++}
++
++PROTECTED int
++unw_step (unw_cursor_t *cursor)
++{
++ struct cursor *c = (struct cursor *) cursor;
++ int ret;
++
++ Debug (1, "(cursor=%p, ip=0x%016lx, cfa=0x%016lx))\n",
++ c, c->dwarf.ip, c->dwarf.cfa);
++
++ /* Check if this is a signal frame. */
++ if (unw_is_signal_frame (cursor))
++ return unw_handle_signal_frame (cursor);
++
++ ret = dwarf_step (&c->dwarf);
++ Debug(1, "dwarf_step()=%d\n", ret);
++
++ if (unlikely (ret == -UNW_ESTOPUNWIND))
++ return ret;
++
++ if (unlikely (ret < 0))
++ return 0;
++
++ return (c->dwarf.ip == 0) ? 0 : 1;
++}
+--- /dev/null
++++ b/src/aarch64/Lcreate_addr_space.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gcreate_addr_space.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lget_proc_info.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gget_proc_info.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lget_save_loc.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gget_save_loc.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lglobal.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gglobal.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Linit.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Ginit.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Linit_local.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Ginit_local.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Linit_remote.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Ginit_remote.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lis_signal_frame.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gis_signal_frame.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lregs.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gregs.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lresume.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gresume.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/Lstep.c
+@@ -0,0 +1,5 @@
++#define UNW_LOCAL_ONLY
++#include <libunwind.h>
++#if defined(UNW_LOCAL_ONLY) && !defined(UNW_REMOTE_ONLY)
++#include "Gstep.c"
++#endif
+--- /dev/null
++++ b/src/aarch64/gen-offsets.c
+@@ -0,0 +1,68 @@
++#include <stdio.h>
++#include <stddef.h>
++#include <ucontext.h>
++#include <asm/sigcontext.h>
++
++#define UC(N,X) \
++ printf ("#define LINUX_UC_" N "_OFF\t0x%X\n", offsetof (ucontext_t, X))
++
++#define SC(N,X) \
++ printf ("#define LINUX_SC_" N "_OFF\t0x%X\n", offsetof (struct sigcontext, X))
++
++int
++main (void)
++{
++ printf (
++"/* Linux-specific definitions: */\n\n"
++
++"/* Define various structure offsets to simplify cross-compilation. */\n\n"
++
++"/* Offsets for AArch64 Linux \"ucontext_t\": */\n\n");
++
++ UC ("FLAGS", uc_flags);
++ UC ("LINK", uc_link);
++ UC ("STACK", uc_stack);
++ UC ("MCONTEXT", uc_mcontext);
++ UC ("SIGMASK", uc_sigmask);
++
++ printf ("\n/* Offsets for AArch64 Linux \"struct sigcontext\": */\n\n");
++
++ SC ("R0", regs[0]);
++ SC ("R1", regs[1]);
++ SC ("R2", regs[2]);
++ SC ("R3", regs[3]);
++ SC ("R4", regs[4]);
++ SC ("R5", regs[5]);
++ SC ("R6", regs[6]);
++ SC ("R7", regs[7]);
++ SC ("R8", regs[8]);
++ SC ("R9", regs[9]);
++ SC ("R10", regs[10]);
++ SC ("R11", regs[11]);
++ SC ("R12", regs[12]);
++ SC ("R13", regs[13]);
++ SC ("R14", regs[14]);
++ SC ("R15", regs[15]);
++ SC ("R16", regs[16]);
++ SC ("R17", regs[17]);
++ SC ("R18", regs[18]);
++ SC ("R19", regs[19]);
++ SC ("R20", regs[20]);
++ SC ("R21", regs[21]);
++ SC ("R22", regs[22]);
++ SC ("R23", regs[23]);
++ SC ("R24", regs[24]);
++ SC ("R25", regs[25]);
++ SC ("R26", regs[26]);
++ SC ("R27", regs[27]);
++ SC ("R28", regs[28]);
++ SC ("R29", regs[29]);
++ SC ("R30", regs[30]);
++ SC ("R31", regs[31]);
++
++ SC ("PC", pc);
++ SC ("SP", sp);
++ SC ("Fault", fault_address);
++ SC ("state", pstate);
++ return 0;
++}
+--- /dev/null
++++ b/src/aarch64/init.h
+@@ -0,0 +1,127 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++static inline int
++common_init (struct cursor *c, unsigned use_prev_instr)
++{
++ int ret, i;
++
++ c->dwarf.loc[UNW_AARCH64_X0] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X0);
++ c->dwarf.loc[UNW_AARCH64_X1] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X1);
++ c->dwarf.loc[UNW_AARCH64_X2] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X2);
++ c->dwarf.loc[UNW_AARCH64_X3] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X3);
++ c->dwarf.loc[UNW_AARCH64_X4] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X4);
++ c->dwarf.loc[UNW_AARCH64_X5] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X5);
++ c->dwarf.loc[UNW_AARCH64_X6] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X6);
++ c->dwarf.loc[UNW_AARCH64_X7] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X7);
++ c->dwarf.loc[UNW_AARCH64_X8] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X8);
++ c->dwarf.loc[UNW_AARCH64_X9] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X9);
++ c->dwarf.loc[UNW_AARCH64_X10] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X10);
++ c->dwarf.loc[UNW_AARCH64_X11] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X11);
++ c->dwarf.loc[UNW_AARCH64_X12] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X12);
++ c->dwarf.loc[UNW_AARCH64_X13] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X13);
++ c->dwarf.loc[UNW_AARCH64_X14] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X14);
++ c->dwarf.loc[UNW_AARCH64_X15] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X15);
++ c->dwarf.loc[UNW_AARCH64_X16] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X16);
++ c->dwarf.loc[UNW_AARCH64_X17] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X17);
++ c->dwarf.loc[UNW_AARCH64_X18] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X18);
++ c->dwarf.loc[UNW_AARCH64_X19] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X19);
++ c->dwarf.loc[UNW_AARCH64_X20] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X20);
++ c->dwarf.loc[UNW_AARCH64_X21] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X21);
++ c->dwarf.loc[UNW_AARCH64_X22] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X22);
++ c->dwarf.loc[UNW_AARCH64_X23] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X23);
++ c->dwarf.loc[UNW_AARCH64_X24] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X24);
++ c->dwarf.loc[UNW_AARCH64_X25] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X25);
++ c->dwarf.loc[UNW_AARCH64_X26] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X26);
++ c->dwarf.loc[UNW_AARCH64_X27] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X27);
++ c->dwarf.loc[UNW_AARCH64_X28] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X28);
++ c->dwarf.loc[UNW_AARCH64_X29] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X29);
++ c->dwarf.loc[UNW_AARCH64_X30] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_X30);
++ c->dwarf.loc[UNW_AARCH64_SP] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_SP);
++ c->dwarf.loc[UNW_AARCH64_PC] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_PC);
++ c->dwarf.loc[UNW_AARCH64_PSTATE] = DWARF_REG_LOC (&c->dwarf,
++ UNW_AARCH64_PSTATE);
++ c->dwarf.loc[UNW_AARCH64_V0] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V0);
++ c->dwarf.loc[UNW_AARCH64_V1] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V1);
++ c->dwarf.loc[UNW_AARCH64_V2] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V2);
++ c->dwarf.loc[UNW_AARCH64_V3] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V3);
++ c->dwarf.loc[UNW_AARCH64_V4] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V4);
++ c->dwarf.loc[UNW_AARCH64_V5] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V5);
++ c->dwarf.loc[UNW_AARCH64_V6] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V6);
++ c->dwarf.loc[UNW_AARCH64_V7] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V7);
++ c->dwarf.loc[UNW_AARCH64_V8] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V8);
++ c->dwarf.loc[UNW_AARCH64_V9] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V9);
++ c->dwarf.loc[UNW_AARCH64_V10] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V10);
++ c->dwarf.loc[UNW_AARCH64_V11] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V11);
++ c->dwarf.loc[UNW_AARCH64_V12] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V12);
++ c->dwarf.loc[UNW_AARCH64_V13] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V13);
++ c->dwarf.loc[UNW_AARCH64_V14] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V14);
++ c->dwarf.loc[UNW_AARCH64_V15] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V15);
++ c->dwarf.loc[UNW_AARCH64_V16] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V16);
++ c->dwarf.loc[UNW_AARCH64_V17] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V17);
++ c->dwarf.loc[UNW_AARCH64_V18] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V18);
++ c->dwarf.loc[UNW_AARCH64_V19] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V19);
++ c->dwarf.loc[UNW_AARCH64_V20] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V20);
++ c->dwarf.loc[UNW_AARCH64_V21] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V21);
++ c->dwarf.loc[UNW_AARCH64_V22] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V22);
++ c->dwarf.loc[UNW_AARCH64_V23] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V23);
++ c->dwarf.loc[UNW_AARCH64_V24] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V24);
++ c->dwarf.loc[UNW_AARCH64_V25] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V25);
++ c->dwarf.loc[UNW_AARCH64_V26] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V26);
++ c->dwarf.loc[UNW_AARCH64_V27] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V27);
++ c->dwarf.loc[UNW_AARCH64_V28] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V28);
++ c->dwarf.loc[UNW_AARCH64_V29] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V29);
++ c->dwarf.loc[UNW_AARCH64_V30] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V30);
++ c->dwarf.loc[UNW_AARCH64_V31] = DWARF_REG_LOC (&c->dwarf, UNW_AARCH64_V31);
++
++ for (i = UNW_AARCH64_PSTATE + 1; i < UNW_AARCH64_V0; ++i)
++ c->dwarf.loc[i] = DWARF_NULL_LOC;
++
++ ret = dwarf_get (&c->dwarf, c->dwarf.loc[UNW_AARCH64_PC], &c->dwarf.ip);
++ if (ret < 0)
++ return ret;
++
++ ret = dwarf_get (&c->dwarf, c->dwarf.loc[UNW_AARCH64_SP], &c->dwarf.cfa);
++ if (ret < 0)
++ return ret;
++
++ c->sigcontext_format = AARCH64_SCF_NONE;
++ c->sigcontext_addr = 0;
++ c->sigcontext_sp = 0;
++ c->sigcontext_pc = 0;
++
++ c->dwarf.args_size = 0;
++ c->dwarf.ret_addr_column = 0;
++ c->dwarf.stash_frames = 0;
++ c->dwarf.use_prev_instr = use_prev_instr;
++ c->dwarf.pi_valid = 0;
++ c->dwarf.pi_is_dynamic = 0;
++ c->dwarf.hint = 0;
++ c->dwarf.prev_rs = 0;
++
++ return 0;
++}
+--- /dev/null
++++ b/src/aarch64/is_fpreg.c
+@@ -0,0 +1,32 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "libunwind_i.h"
++
++PROTECTED int
++unw_is_fpreg (int regnum)
++{
++ return (regnum >= UNW_AARCH64_V0 && regnum <= UNW_AARCH64_V31);
++}
+--- /dev/null
++++ b/src/aarch64/offsets.h
+@@ -0,0 +1,49 @@
++/* Linux-specific definitions: */
++
++/* Define various structure offsets to simplify cross-compilation. */
++
++/* Offsets for AArch64 Linux "ucontext_t": */
++
++#define LINUX_UC_FLAGS_OFF 0x0
++#define LINUX_UC_LINK_OFF 0x8
++#define LINUX_UC_STACK_OFF 0x10
++#define LINUX_UC_SIGMASK_OFF 0x28
++#define LINUX_UC_MCONTEXT_OFF 0xb0
++
++/* Offsets for AArch64 Linux "struct sigcontext": */
++
++#define LINUX_SC_FAULTADDRESS_OFF 0x00
++#define LINUX_SC_X0_OFF 0x008
++#define LINUX_SC_X1_OFF 0x010
++#define LINUX_SC_X2_OFF 0x018
++#define LINUX_SC_X3_OFF 0x020
++#define LINUX_SC_X4_OFF 0x028
++#define LINUX_SC_X5_OFF 0x030
++#define LINUX_SC_X6_OFF 0x038
++#define LINUX_SC_X7_OFF 0x040
++#define LINUX_SC_X8_OFF 0x048
++#define LINUX_SC_X9_OFF 0x050
++#define LINUX_SC_X10_OFF 0x058
++#define LINUX_SC_X11_OFF 0x060
++#define LINUX_SC_X12_OFF 0x068
++#define LINUX_SC_X13_OFF 0x070
++#define LINUX_SC_X14_OFF 0x078
++#define LINUX_SC_X15_OFF 0x080
++#define LINUX_SC_X16_OFF 0x088
++#define LINUX_SC_X17_OFF 0x090
++#define LINUX_SC_X18_OFF 0x098
++#define LINUX_SC_X19_OFF 0x0a0
++#define LINUX_SC_X20_OFF 0x0a8
++#define LINUX_SC_X21_OFF 0x0b0
++#define LINUX_SC_X22_OFF 0x0b8
++#define LINUX_SC_X23_OFF 0x0c0
++#define LINUX_SC_X24_OFF 0x0c8
++#define LINUX_SC_X25_OFF 0x0d0
++#define LINUX_SC_X26_OFF 0x0d8
++#define LINUX_SC_X27_OFF 0x0e0
++#define LINUX_SC_X28_OFF 0x0e8
++#define LINUX_SC_X29_OFF 0x0f0
++#define LINUX_SC_X30_OFF 0x0f8
++#define LINUX_SC_SP_OFF 0x100
++#define LINUX_SC_PC_OFF 0x108
++#define LINUX_SC_PSTATE_OFF 0x110
+--- /dev/null
++++ b/src/aarch64/regname.c
+@@ -0,0 +1,106 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2012 Tommi Rantala <tt.rantala@gmail.com>
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#include "unwind_i.h"
++
++static const char *const regname[] =
++ {
++ [UNW_AARCH64_X0] = "x0",
++ [UNW_AARCH64_X1] = "x1",
++ [UNW_AARCH64_X2] = "x2",
++ [UNW_AARCH64_X3] = "x3",
++ [UNW_AARCH64_X4] = "x4",
++ [UNW_AARCH64_X5] = "x5",
++ [UNW_AARCH64_X6] = "x6",
++ [UNW_AARCH64_X7] = "x7",
++ [UNW_AARCH64_X8] = "x8",
++ [UNW_AARCH64_X9] = "x9",
++ [UNW_AARCH64_X10] = "x10",
++ [UNW_AARCH64_X11] = "x11",
++ [UNW_AARCH64_X12] = "x12",
++ [UNW_AARCH64_X13] = "x13",
++ [UNW_AARCH64_X14] = "x14",
++ [UNW_AARCH64_X15] = "x15",
++ [UNW_AARCH64_X16] = "ip0",
++ [UNW_AARCH64_X17] = "ip1",
++ [UNW_AARCH64_X18] = "x18",
++ [UNW_AARCH64_X19] = "x19",
++ [UNW_AARCH64_X20] = "x20",
++ [UNW_AARCH64_X21] = "x21",
++ [UNW_AARCH64_X22] = "x22",
++ [UNW_AARCH64_X23] = "x23",
++ [UNW_AARCH64_X24] = "x24",
++ [UNW_AARCH64_X25] = "x25",
++ [UNW_AARCH64_X26] = "x26",
++ [UNW_AARCH64_X27] = "x27",
++ [UNW_AARCH64_X28] = "x28",
++ [UNW_AARCH64_X29] = "fp",
++ [UNW_AARCH64_X30] = "lr",
++ [UNW_AARCH64_SP] = "sp",
++ [UNW_AARCH64_PC] = "pc",
++ [UNW_AARCH64_V0] = "v0",
++ [UNW_AARCH64_V1] = "v1",
++ [UNW_AARCH64_V2] = "v2",
++ [UNW_AARCH64_V3] = "v3",
++ [UNW_AARCH64_V4] = "v4",
++ [UNW_AARCH64_V5] = "v5",
++ [UNW_AARCH64_V6] = "v6",
++ [UNW_AARCH64_V7] = "v7",
++ [UNW_AARCH64_V8] = "v8",
++ [UNW_AARCH64_V9] = "v9",
++ [UNW_AARCH64_V10] = "v10",
++ [UNW_AARCH64_V11] = "v11",
++ [UNW_AARCH64_V12] = "v12",
++ [UNW_AARCH64_V13] = "v13",
++ [UNW_AARCH64_V14] = "v14",
++ [UNW_AARCH64_V15] = "v15",
++ [UNW_AARCH64_V16] = "v16",
++ [UNW_AARCH64_V17] = "v17",
++ [UNW_AARCH64_V18] = "v18",
++ [UNW_AARCH64_V19] = "v19",
++ [UNW_AARCH64_V20] = "v20",
++ [UNW_AARCH64_V21] = "v21",
++ [UNW_AARCH64_V22] = "v22",
++ [UNW_AARCH64_V23] = "v23",
++ [UNW_AARCH64_V24] = "v24",
++ [UNW_AARCH64_V25] = "v25",
++ [UNW_AARCH64_V26] = "v26",
++ [UNW_AARCH64_V27] = "v27",
++ [UNW_AARCH64_V28] = "v28",
++ [UNW_AARCH64_V29] = "v29",
++ [UNW_AARCH64_V30] = "v30",
++ [UNW_AARCH64_V31] = "v31",
++ [UNW_AARCH64_FPSR] = "fpsr",
++ [UNW_AARCH64_FPCR] = "fpcr",
++ };
++
++PROTECTED const char *
++unw_regname (unw_regnum_t reg)
++{
++ if (reg < (unw_regnum_t) ARRAY_SIZE (regname) && regname[reg] != NULL)
++ return regname[reg];
++ else
++ return "???";
++}
+--- /dev/null
++++ b/src/aarch64/siglongjmp.S
+@@ -0,0 +1,12 @@
++ /* Dummy implementation for now. */
++
++ .global _UI_siglongjmp_cont
++ .global _UI_longjmp_cont
++
++_UI_siglongjmp_cont:
++_UI_longjmp_cont:
++ ret
++#ifdef __linux__
++ /* We do not need executable stack. */
++ .section .note.GNU-stack,"",%progbits
++#endif
+--- /dev/null
++++ b/src/aarch64/unwind_i.h
+@@ -0,0 +1,43 @@
++/* libunwind - a platform-independent unwind library
++ Copyright (C) 2008 CodeSourcery
++ Copyright (C) 2013 Linaro Limited
++
++This file is part of libunwind.
++
++Permission is hereby granted, free of charge, to any person obtaining
++a copy of this software and associated documentation files (the
++"Software"), to deal in the Software without restriction, including
++without limitation the rights to use, copy, modify, merge, publish,
++distribute, sublicense, and/or sell copies of the Software, and to
++permit persons to whom the Software is furnished to do so, subject to
++the following conditions:
++
++The above copyright notice and this permission notice shall be
++included in all copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
++NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
++LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
++WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
++
++#ifndef unwind_i_h
++#define unwind_i_h
++
++#include <stdint.h>
++
++#include <libunwind-aarch64.h>
++
++#include "libunwind_i.h"
++
++#define aarch64_lock UNW_OBJ(lock)
++#define aarch64_local_resume UNW_OBJ(local_resume)
++#define aarch64_local_addr_space_init UNW_OBJ(local_addr_space_init)
++
++extern void aarch64_local_addr_space_init (void);
++extern int aarch64_local_resume (unw_addr_space_t as, unw_cursor_t *cursor,
++ void *arg);
++
++#endif /* unwind_i_h */
+--- a/src/coredump/_UCD_access_reg_linux.c
++++ b/src/coredump/_UCD_access_reg_linux.c
+@@ -39,7 +39,10 @@ _UCD_access_reg (unw_addr_space_t as,
+ return -UNW_EINVAL;
+ }
+
+-#if defined(UNW_TARGET_ARM)
++#if defined(UNW_TARGET_AARCH64)
++ if (regnum < 0 || regnum >= UNW_AARCH64_FPCR)
++ goto badreg;
++#elif defined(UNW_TARGET_ARM)
+ if (regnum < 0 || regnum >= 16)
+ goto badreg;
+ #elif defined(UNW_TARGET_SH)
+--- a/src/ptrace/_UPT_reg_offset.c
++++ b/src/ptrace/_UPT_reg_offset.c
+@@ -1,6 +1,7 @@
+ /* libunwind - a platform-independent unwind library
+ Copyright (C) 2003-2004 Hewlett-Packard Co
+ Contributed by David Mosberger-Tang <davidm@hpl.hp.com>
++ Copyright (C) 2013 Linaro Limited
+
+ This file is part of libunwind.
+
+@@ -501,6 +502,41 @@ const int _UPT_reg_offset[UNW_REG_LAST +
+ [UNW_ARM_R15] = 0x3c,
+ #elif defined(UNW_TARGET_MIPS)
+ #elif defined(UNW_TARGET_SH)
++#elif defined(UNW_TARGET_AARCH64)
++ [UNW_AARCH64_X0] = 0x00,
++ [UNW_AARCH64_X1] = 0x08,
++ [UNW_AARCH64_X2] = 0x10,
++ [UNW_AARCH64_X3] = 0x18,
++ [UNW_AARCH64_X4] = 0x20,
++ [UNW_AARCH64_X5] = 0x28,
++ [UNW_AARCH64_X6] = 0x30,
++ [UNW_AARCH64_X7] = 0x38,
++ [UNW_AARCH64_X8] = 0x40,
++ [UNW_AARCH64_X9] = 0x48,
++ [UNW_AARCH64_X10] = 0x50,
++ [UNW_AARCH64_X11] = 0x58,
++ [UNW_AARCH64_X12] = 0x60,
++ [UNW_AARCH64_X13] = 0x68,
++ [UNW_AARCH64_X14] = 0x70,
++ [UNW_AARCH64_X15] = 0x78,
++ [UNW_AARCH64_X16] = 0x80,
++ [UNW_AARCH64_X17] = 0x88,
++ [UNW_AARCH64_X18] = 0x90,
++ [UNW_AARCH64_X19] = 0x98,
++ [UNW_AARCH64_X20] = 0xa0,
++ [UNW_AARCH64_X21] = 0xa8,
++ [UNW_AARCH64_X22] = 0xb0,
++ [UNW_AARCH64_X23] = 0xb8,
++ [UNW_AARCH64_X24] = 0xc0,
++ [UNW_AARCH64_X25] = 0xc8,
++ [UNW_AARCH64_X26] = 0xd0,
++ [UNW_AARCH64_X27] = 0xd8,
++ [UNW_AARCH64_X28] = 0xe0,
++ [UNW_AARCH64_X29] = 0xe8,
++ [UNW_AARCH64_X30] = 0xf0,
++ [UNW_AARCH64_SP] = 0xf8,
++ [UNW_AARCH64_PC] = 0x100,
++ [UNW_AARCH64_PSTATE] = 0x108
+ #else
+ # error Fix me.
+ #endif
diff --git a/meta/recipes-support/libunwind/libunwind-1.1/Support-building-with-older-compilers.patch b/meta/recipes-support/libunwind/libunwind-1.1/Support-building-with-older-compilers.patch
new file mode 100644
index 0000000000..268b702dcb
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind-1.1/Support-building-with-older-compilers.patch
@@ -0,0 +1,72 @@
+From 10b064ffe902d5af31bb49bd8e4f03c545f8d462 Mon Sep 17 00:00:00 2001
+From: Ladislav Michl <ladis@linux-mips.org>
+Date: Tue, 13 Nov 2012 11:19:47 +0100
+Subject: [PATCH] Support building with older compilers.
+
+Add a check for __builtin_unreachable.
+
+Upstream-Status: Pending
+---
+ configure.ac | 11 +++++++++++
+ include/libunwind_i.h | 6 ++++++
+ src/arm/Gresume.c | 2 +-
+ src/sh/Gresume.c | 2 +-
+ 4 files changed, 19 insertions(+), 2 deletions(-)
+
+--- a/configure.ac
++++ b/configure.ac
+@@ -285,6 +285,17 @@ if test x$have__builtin___clear_cache =
+ fi
+ AC_MSG_RESULT([$have__builtin___clear_cache])
+
++AC_MSG_CHECKING([for __builtin_unreachable])
++AC_LINK_IFELSE(
++ [AC_LANG_PROGRAM([[]], [[__builtin_unreachable()]])],
++ [have__builtin_unreachable=yes],
++ [have__builtin_unreachable=no])
++if test x$have__builtin_unreachable = xyes; then
++ AC_DEFINE([HAVE__BUILTIN_UNREACHABLE], [1],
++ [Defined if __builtin_unreachable() is available])
++fi
++AC_MSG_RESULT([$have__builtin_unreachable])
++
+ AC_MSG_CHECKING([for __sync atomics])
+ AC_LINK_IFELSE(
+ [AC_LANG_PROGRAM([[]], [[
+--- a/include/libunwind_i.h
++++ b/include/libunwind_i.h
+@@ -72,6 +72,12 @@ WITH THE SOFTWARE OR THE USE OR OTHER DE
+ # endif
+ #endif
+
++#if defined(HAVE__BUILTIN_UNREACHABLE)
++# define unreachable() __builtin_unreachable()
++#else
++# define unreachable() do { } while (1)
++#endif
++
+ #ifdef DEBUG
+ # define UNW_DEBUG 1
+ #else
+--- a/src/arm/Gresume.c
++++ b/src/arm/Gresume.c
+@@ -96,7 +96,7 @@ arm_local_resume (unw_addr_space_t as, u
+ : : "r" (c->sigcontext_sp), "r" (c->sigcontext_pc)
+ );
+ }
+- __builtin_unreachable();
++ unreachable();
+ #else
+ printf ("%s: implement me\n", __FUNCTION__);
+ #endif
+--- a/src/sh/Gresume.c
++++ b/src/sh/Gresume.c
+@@ -109,7 +109,7 @@ sh_local_resume (unw_addr_space_t as, un
+ "r" (c->sigcontext_pc)
+ );
+ }
+- __builtin_unreachable();
++ unreachable();
+ #endif
+ return -UNW_EINVAL;
+ }
diff --git a/meta/recipes-support/libunwind/libunwind.inc b/meta/recipes-support/libunwind/libunwind.inc
new file mode 100644
index 0000000000..6743b21fcf
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind.inc
@@ -0,0 +1,31 @@
+DESCRIPTION = "a portable and efficient C programming interface (API) to determine the call-chain of a program"
+HOMEPAGE = "http://www.nongnu.org/libunwind"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=3fced11d6df719b47505837a51c16ae5"
+
+SRC_URI = "${SAVANNAH_NONGNU_MIRROR}/${BPN}/${BPN}-${PV}.tar.gz"
+
+inherit autotools
+
+PACKAGECONFIG ??= ""
+PACKAGECONFIG[lzma] = "--enable-minidebuginfo,--disable-minidebuginfo,lzma"
+
+EXTRA_OECONF_arm = "--enable-debug-frame"
+EXTRA_OECONF_aarch64 = "--enable-debug-frame"
+
+CFLAGS += "${ATOMICOPS}"
+ATOMICOPS_armv5 = "-DAO_USE_PTHREAD_DEFS=1"
+ATOMICOPS_armv4 = "-DAO_USE_PTHREAD_DEFS=1"
+ATOMICOPS ?= ""
+
+LDFLAGS += "${LIBATOMICS}"
+LIBATOMICS_armv5 = "-latomic_ops"
+LIBATOMICS_armv4 = "-latomic_ops"
+LIBATOMICS ?= ""
+
+DEPENDS += "${DEPLIBATOMICS}"
+DEPLIBATOMICS_armv5 = "libatomics-ops"
+DEPLIBATOMICS_armv4 = "libatomics-ops"
+DEPLIBATOMICS ?= ""
+
+BBCLASSEXTEND = "native"
diff --git a/meta/recipes-support/libunwind/libunwind_1.1.bb b/meta/recipes-support/libunwind/libunwind_1.1.bb
new file mode 100644
index 0000000000..2e52430d54
--- /dev/null
+++ b/meta/recipes-support/libunwind/libunwind_1.1.bb
@@ -0,0 +1,9 @@
+require libunwind.inc
+
+SRC_URI += "\
+ file://Support-building-with-older-compilers.patch \
+ file://AArch64-port.patch \
+"
+
+SRC_URI[md5sum] = "fb4ea2f6fbbe45bf032cd36e586883ce"
+SRC_URI[sha256sum] = "9dfe0fcae2a866de9d3942c66995e4b460230446887dbdab302d41a8aee8d09a"
diff --git a/meta/recipes-support/liburcu/files/aarch64.patch b/meta/recipes-support/liburcu/files/aarch64.patch
new file mode 100644
index 0000000000..c6cc8c2fd3
--- /dev/null
+++ b/meta/recipes-support/liburcu/files/aarch64.patch
@@ -0,0 +1,19 @@
+libucru: recognize aarch64
+
+Make the same as "arm" internally.
+
+Upstream-Status: Pending
+
+Signed-off-by: joe.slater@windriver.com
+
+
+--- a/configure.ac
++++ b/configure.ac
+@@ -77,6 +77,7 @@ AS_CASE([$host_cpu],
+ [alpha*], [ARCHTYPE="alpha"],
+ [ia64], [ARCHTYPE="gcc"],
+ [arm*], [ARCHTYPE="arm"],
++ [aarch64], [ARCHTYPE="arm"],
+ [mips*], [ARCHTYPE="mips"],
+ [tile*], [ARCHTYPE="gcc"],
+ [ARCHTYPE="unknown"]
diff --git a/meta/recipes-support/liburcu/liburcu/Revert-Blacklist-ARM-gcc-4.8.0-4.8.1-4.8.2.patch b/meta/recipes-support/liburcu/liburcu/Revert-Blacklist-ARM-gcc-4.8.0-4.8.1-4.8.2.patch
new file mode 100644
index 0000000000..535a7384cb
--- /dev/null
+++ b/meta/recipes-support/liburcu/liburcu/Revert-Blacklist-ARM-gcc-4.8.0-4.8.1-4.8.2.patch
@@ -0,0 +1,47 @@
+From 7b3df100346128d780f218b881d563d1fd12e310 Mon Sep 17 00:00:00 2001
+From: Jonathan Liu <net147@gmail.com>
+Date: Mon, 20 Oct 2014 13:46:10 +1100
+Subject: [PATCH] Revert "Blacklist ARM gcc 4.8.0, 4.8.1, 4.8.2"
+
+This reverts commit 4b79310aa3d408ba30fee02cc497a68072d38a99.
+OE-Core is using a patched GCC 4.8.2 which is able to compile liburcu
+properly.
+
+Upstream-Status: Inappropriate [OE specific]
+
+Signed-off-by: Jonathan Liu <net147@gmail.com>
+---
+ urcu/compiler.h | 19 -------------------
+ 1 file changed, 19 deletions(-)
+
+diff --git a/urcu/compiler.h b/urcu/compiler.h
+index 1e30903..19534f0 100644
+--- a/urcu/compiler.h
++++ b/urcu/compiler.h
+@@ -108,23 +108,4 @@
+
+ #define CAA_ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
+
+-/*
+- * Don't allow compiling with buggy compiler.
+- */
+-
+-#ifdef __GNUC__
+-# define URCU_GCC_VERSION (__GNUC__ * 10000 \
+- + __GNUC_MINOR__ * 100 \
+- + __GNUC_PATCHLEVEL__)
+-
+-/*
+- * http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58854
+- */
+-# ifdef __ARMEL__
+-# if URCU_GCC_VERSION >= 40800 && URCU_GCC_VERSION <= 40802
+-# error Your gcc version produces clobbered frame accesses
+-# endif
+-# endif
+-#endif
+-
+ #endif /* _URCU_COMPILER_H */
+--
+2.1.2
+
diff --git a/meta/recipes-support/liburcu/liburcu_0.8.4.bb b/meta/recipes-support/liburcu/liburcu_0.8.5.bb
index cd7af24baa..62323d3fd7 100644
--- a/meta/recipes-support/liburcu/liburcu_0.8.4.bb
+++ b/meta/recipes-support/liburcu/liburcu_0.8.5.bb
@@ -8,10 +8,12 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=0f060c30a27922ce9c0d557a639b4fa3 \
file://urcu/uatomic/x86.h;beginline=4;endline=21;md5=220552f72c55b102f2ee35929734ef42"
SRC_URI = "http://lttng.org/files/urcu/userspace-rcu-${PV}.tar.bz2 \
+ file://Revert-Blacklist-ARM-gcc-4.8.0-4.8.1-4.8.2.patch \
+ file://aarch64.patch \
"
-SRC_URI[md5sum] = "2ca6671b20a550aa0e8020a1a9a96fd4"
-SRC_URI[sha256sum] = "96c0a157e94a15b1506efe9aedd98145e6eb41a3fbcf5b0d118b7a783b22fe12"
+SRC_URI[md5sum] = "24ba9e03542b747d3378434eb0041acf"
+SRC_URI[sha256sum] = "a2562eaca107ec6eca2856632b6035c6aaf38df79020195ed8955a7b4773312a"
S = "${WORKDIR}/userspace-rcu-${PV}"
CFLAGS_append_libc-uclibc = " -D_GNU_SOURCE"
diff --git a/meta/recipes-support/libxslt/libxslt_1.1.28.bb b/meta/recipes-support/libxslt/libxslt_1.1.28.bb
index 0b2526dbee..ded883e457 100644
--- a/meta/recipes-support/libxslt/libxslt_1.1.28.bb
+++ b/meta/recipes-support/libxslt/libxslt_1.1.28.bb
@@ -35,6 +35,11 @@ RPROVIDES_${PN}-bin += "${PN}-utils"
RCONFLICTS_${PN}-bin += "${PN}-utils"
RREPLACES_${PN}-bin += "${PN}-utils"
+
+do_install_append_class-native () {
+ create_wrapper ${D}/${bindir}/xsltproc XML_CATALOG_FILES=${sysconfdir}/xml/catalog.xml
+}
+
FILES_${PN} += "${libdir}/libxslt-plugins"
FILES_${PN}-dev += "${libdir}/xsltConf.sh"
diff --git a/meta/recipes-support/lz4/lz4_svn.bb b/meta/recipes-support/lz4/lz4_svn.bb
index adbe098970..3a742000b1 100644
--- a/meta/recipes-support/lz4/lz4_svn.bb
+++ b/meta/recipes-support/lz4/lz4_svn.bb
@@ -3,10 +3,10 @@ DESCRIPTION = "LZ4 is a very fast lossless compression algorithm, providing comp
LICENSE = "BSD"
LIC_FILES_CHKSUM = "file://LICENSE;md5=0b0d063f37a4477b54af2459477dcafd \
- file://Makefile;md5=ca9db31c873dc45240e2792a866ab0b5"
+ file://Makefile;md5=7551cc5d31ca66e003ab06e83486524a"
# Upstream names releases after SVN revs
-SRCREV = "120"
+SRCREV = "123"
PV = "r${SRCREV}"
SRC_URI = "svn://lz4.googlecode.com/svn/;module=trunk;protocol=http"
diff --git a/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch b/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch
new file mode 100644
index 0000000000..db3a70e803
--- /dev/null
+++ b/meta/recipes-support/lzo/lzo/0001-Use-memcpy-instead-of-reinventing-it.patch
@@ -0,0 +1,70 @@
+From: Simon McVittie <smcv@debian.org>
+Date: Sun, 23 Nov 2014 22:50:33 +0000
+Subject: Use memcpy() instead of reinventing it
+
+gcc inlines memcpy() with results as fast as handwritten code (at
+least in my brief testing with lzop), and knows the alignment
+constraints for our architectures.
+
+Change suggested by Julian Taylor.
+
+Bug-Debian: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=757037
+
+Upstream-Status: Pending
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+---
+ minilzo/minilzo.c | 14 ++++++++++++++
+ src/lzo_func.h | 14 ++++++++++++++
+ 2 files changed, 28 insertions(+)
+
+
+diff --git a/minilzo/minilzo.c b/minilzo/minilzo.c
+index ab2be5f..6913c2f 100644
+--- a/minilzo/minilzo.c
++++ b/minilzo/minilzo.c
+@@ -3523,6 +3523,20 @@ LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU8p)0)==8)
+ if ((void)0, n__n > 0) do { *d__n++ = *s__n++; } while (--n__n > 0); \
+ LZO_BLOCK_END
+
++/* Debian-specific change: we know that our compiler inlines memcpy() with
++ * constant n to be as fast as handwritten code, and knows which architectures
++ * need things correctly aligned. */
++#undef LZO_MEMOPS_COPY1
++#undef LZO_MEMOPS_COPY2
++#undef LZO_MEMOPS_COPY4
++#undef LZO_MEMOPS_COPY8
++#undef LZO_MEMOPS_COPYN
++#define LZO_MEMOPS_COPY1(dd,ss) memcpy(dd, ss, 1)
++#define LZO_MEMOPS_COPY2(dd,ss) memcpy(dd, ss, 2)
++#define LZO_MEMOPS_COPY4(dd,ss) memcpy(dd, ss, 4)
++#define LZO_MEMOPS_COPY8(dd,ss) memcpy(dd, ss, 8)
++#define LZO_MEMOPS_COPYN(dd,ss,nn) memcpy(dd, ss, nn)
++
+ __lzo_static_forceinline lzo_uint16_t lzo_memops_get_le16(const lzo_voidp ss)
+ {
+ lzo_uint16_t v;
+diff --git a/src/lzo_func.h b/src/lzo_func.h
+index dfaa676..1cc1b53 100644
+--- a/src/lzo_func.h
++++ b/src/lzo_func.h
+@@ -333,6 +333,20 @@ LZO_COMPILE_TIME_ASSERT_HEADER(sizeof(*(lzo_memops_TU8p)0)==8)
+ if ((void)0, n__n > 0) do { *d__n++ = *s__n++; } while (--n__n > 0); \
+ LZO_BLOCK_END
+
++/* Debian-specific change: we know that our compiler inlines memcpy() with
++ * constant n to be as fast as handwritten code, and knows which architectures
++ * need things correctly aligned. */
++#undef LZO_MEMOPS_COPY1
++#undef LZO_MEMOPS_COPY2
++#undef LZO_MEMOPS_COPY4
++#undef LZO_MEMOPS_COPY8
++#undef LZO_MEMOPS_COPYN
++#define LZO_MEMOPS_COPY1(dd,ss) memcpy(dd, ss, 1)
++#define LZO_MEMOPS_COPY2(dd,ss) memcpy(dd, ss, 2)
++#define LZO_MEMOPS_COPY4(dd,ss) memcpy(dd, ss, 4)
++#define LZO_MEMOPS_COPY8(dd,ss) memcpy(dd, ss, 8)
++#define LZO_MEMOPS_COPYN(dd,ss,nn) memcpy(dd, ss, nn)
++
+ __lzo_static_forceinline lzo_uint16_t lzo_memops_get_le16(const lzo_voidp ss)
+ {
+ lzo_uint16_t v;
diff --git a/meta/recipes-support/lzo/lzo_2.08.bb b/meta/recipes-support/lzo/lzo_2.08.bb
index 7d7d1f32ba..af06e29a6f 100644
--- a/meta/recipes-support/lzo/lzo_2.08.bb
+++ b/meta/recipes-support/lzo/lzo_2.08.bb
@@ -6,6 +6,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \
file://src/lzo_init.c;beginline=5;endline=25;md5=a6e25df9a83b24629e847846ccdd8054"
SRC_URI = "http://www.oberhumer.com/opensource/lzo/download/lzo-${PV}.tar.gz \
+ file://0001-Use-memcpy-instead-of-reinventing-it.patch \
file://acinclude.m4 \
"
diff --git a/meta/recipes-support/npth/npth/pkgconfig.patch b/meta/recipes-support/npth/npth/pkgconfig.patch
new file mode 100644
index 0000000000..178ed54c22
--- /dev/null
+++ b/meta/recipes-support/npth/npth/pkgconfig.patch
@@ -0,0 +1,49 @@
+Added npth pkgconfig file
+
+Upstream-Status: Incompatible [oe-core specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: npth-1.1/src/npth.pc.in
+===================================================================
+--- /dev/null
++++ npth-1.1/src/npth.pc.in
+@@ -0,0 +1,10 @@
++prefix=@prefix@
++exec_prefix=@exec_prefix@
++libdir=@libdir@
++includedir=@includedir@
++
++Name: npth
++Description: a new portable posix threading library
++Version: @VERSION@
++Libs: -L${libdir} -lnpth -lpthread
++Cflags: -I${includedir}
+Index: npth-1.1/src/Makefile.am
+===================================================================
+--- npth-1.1.orig/src/Makefile.am
++++ npth-1.1/src/Makefile.am
+@@ -27,8 +27,10 @@
+ # License along with this program; if not, see <http://www.gnu.org/licenses/>.
+
+ ## Process this file with automake to produce Makefile.in
++pkgconfigdir = $(libdir)/pkgconfig
++pkgconfig_DATA = npth.pc
+
+-EXTRA_DIST = libnpth.vers
++EXTRA_DIST = libnpth.vers npth.pc
+ # versioninfo.rc.in
+ nodist_include_HEADERS = npth.h
+
+Index: npth-1.1/configure.ac
+===================================================================
+--- npth-1.1.orig/configure.ac
++++ npth-1.1/configure.ac
+@@ -337,6 +337,7 @@ src/Makefile
+ w32/Makefile
+ tests/Makefile])
+ AC_CONFIG_FILES(npth-config, chmod +x npth-config)
++AC_CONFIG_FILES([src/npth.pc])
+ AC_OUTPUT
+
+ echo "
diff --git a/meta/recipes-support/npth/npth_0.91.bb b/meta/recipes-support/npth/npth_1.1.bb
index 7105b94e3d..72a194e784 100644
--- a/meta/recipes-support/npth/npth_0.91.bb
+++ b/meta/recipes-support/npth/npth_1.1.bb
@@ -7,9 +7,11 @@ LIC_FILES_CHKSUM = "\
file://COPYING.LESSER;md5=6a6a8e020838b23406c81b19c1d46df6\
"
SRC_URI = "ftp://ftp.gnupg.org/gcrypt/npth/npth-${PV}.tar.bz2 \
+ file://pkgconfig.patch \
"
-SRC_URI[md5sum] = "87712f0cee656c390b49773923e26e7f"
-SRC_URI[sha256sum] = "caef86ced4a331e162897818a5b924860c8d6003e52da5bdf76da00e8e0dfae1"
+
+SRC_URI[md5sum] = "aaffc8ef3e955ab50a1905809f268a23"
+SRC_URI[sha256sum] = "896c561eb2ec8da35f11828fb04a3fbff12d41ff657c799056d7dc4a66e5df7f"
BINCONFIG = "${bindir}/npth-config"
diff --git a/meta/recipes-support/nspr/nspr/fix-build-on-x86_64.patch b/meta/recipes-support/nspr/nspr/fix-build-on-x86_64.patch
index a6fa1ea607..c2b7258e50 100644
--- a/meta/recipes-support/nspr/nspr/fix-build-on-x86_64.patch
+++ b/meta/recipes-support/nspr/nspr/fix-build-on-x86_64.patch
@@ -11,23 +11,27 @@ Upstream-Status: Pending
configure.in | 12 ++++++------
1 files changed, 6 insertions(+), 6 deletions(-)
-diff --git a/configure.in b/configure.in
-index 39c96a3..99a03ac 100644
---- a/configure.in
-+++ b/configure.in
-@@ -1778,24 +1778,24 @@ tools are selected during the Xcode/Developer Tools installation.])
+Index: nspr/configure.in
+===================================================================
+--- nspr.orig/configure.in
++++ nspr/configure.in
+@@ -1875,28 +1875,24 @@ tools are selected during the Xcode/Deve
PR_MD_ASFILES=os_Linux_ia64.s
;;
x86_64)
- if test -n "$USE_64"; then
- PR_MD_ASFILES=os_Linux_x86_64.s
-- else
-+ if test -n "$USE_N32"; then
- AC_DEFINE(i386)
- PR_MD_ASFILES=os_Linux_x86.s
- CC="$CC -m32"
- CXX="$CXX -m32"
-+ else
+- elif test -n "$USE_X32"; then
++ if test -n "$USE_X32"; then
++ AC_DEFINE(i386)
+ PR_MD_ASFILES=os_Linux_x86_64.s
+ CC="$CC -mx32"
+ CXX="$CXX -mx32"
+ else
+- AC_DEFINE(i386)
+- PR_MD_ASFILES=os_Linux_x86.s
+- CC="$CC -m32"
+- CXX="$CXX -m32"
+ PR_MD_ASFILES=os_Linux_x86_64.s
fi
;;
@@ -46,6 +50,3 @@ index 39c96a3..99a03ac 100644
fi
;;
m68k)
---
-1.7.1
-
diff --git a/meta/recipes-support/nspr/nspr/remove-srcdir-from-configure-in.patch b/meta/recipes-support/nspr/nspr/remove-srcdir-from-configure-in.patch
new file mode 100644
index 0000000000..bde715c5dc
--- /dev/null
+++ b/meta/recipes-support/nspr/nspr/remove-srcdir-from-configure-in.patch
@@ -0,0 +1,19 @@
+the $srcdir is not defined at the time of gnu-configurize.
+
+Upstream-Status: Inappropriate [OE-Core specific]
+
+Signed-off-by: Saul Wold <sgw@linux.intel.com>
+
+Index: nspr/configure.in
+===================================================================
+--- nspr.orig/configure.in
++++ nspr/configure.in
+@@ -8,7 +8,7 @@ AC_PREREQ(2.61)
+ AC_INIT
+ AC_CONFIG_SRCDIR([pr/include/nspr.h])
+
+-AC_CONFIG_AUX_DIR(${srcdir}/build/autoconf)
++AC_CONFIG_AUX_DIR(build/autoconf)
+ AC_CANONICAL_TARGET
+
+ dnl ========================================================
diff --git a/meta/recipes-support/nspr/nspr/trickly-fix-build-on-x86_64.patch b/meta/recipes-support/nspr/nspr/trickly-fix-build-on-x86_64.patch
deleted file mode 100644
index 8ca51e4d1f..0000000000
--- a/meta/recipes-support/nspr/nspr/trickly-fix-build-on-x86_64.patch
+++ /dev/null
@@ -1,62 +0,0 @@
-trickily fix build failure on x86_64
-
-It seems that we can not run the 'autoreconf -f -i' for the nspr, I met
-several strange problems while trying to do that, and the previous
-author seemed had noticed this, so he wrote:
-
-do_configure() {
- oe_runconf
-}
-
-to avoid running the "autoreconf". But we must modify configure.in to
-fix the build failure on x86_64, so both modify configure and
-configure.in, once the "autoreconf" can work correctly, we can remove
-this patch.
-
-Signed-off-by: Robert Yang <liezhi.yang@windriver.com>
-
-Upstream-Status: Inappropriate [configuration]
----
- configure | 12 ++++++------
- 1 files changed, 6 insertions(+), 6 deletions(-)
-
-diff --git a/configure b/configure
---- a/configure
-+++ b/configure
-@@ -4366,9 +4366,7 @@ EOF
- PR_MD_ASFILES=os_Linux_ia64.s
- ;;
- x86_64)
-- if test -n "$USE_64"; then
-- PR_MD_ASFILES=os_Linux_x86_64.s
-- else
-+ if test -n "$USE_N32"; then
- cat >> confdefs.h <<\EOF
- #define i386 1
- EOF
-@@ -4376,17 +4374,19 @@ EOF
- PR_MD_ASFILES=os_Linux_x86.s
- CC="$CC -m32"
- CXX="$CXX -m32"
-+ else
-+ PR_MD_ASFILES=os_Linux_x86_64.s
- fi
- ;;
- ppc|powerpc)
- PR_MD_ASFILES=os_Linux_ppc.s
- ;;
- powerpc64)
-- if test -n "$USE_64"; then
-+ if test -n "$USE_N32"; then
-+ PR_MD_ASFILES=os_Linux_ppc.s
-+ else
- CC="$CC -m64"
- CXX="$CXX -m64"
-- else
-- PR_MD_ASFILES=os_Linux_ppc.s
- fi
- ;;
- m68k)
---
-1.7.1
-
diff --git a/meta/recipes-support/nspr/nspr_4.10.3.bb b/meta/recipes-support/nspr/nspr_4.10.7.bb
index 60e1bfa7b1..69e9dfa6a3 100644
--- a/meta/recipes-support/nspr/nspr_4.10.3.bb
+++ b/meta/recipes-support/nspr/nspr_4.10.7.bb
@@ -8,14 +8,13 @@ SECTION = "libs/network"
SRC_URI = "ftp://ftp.mozilla.org/pub/mozilla.org/nspr/releases/v${PV}/src/nspr-${PV}.tar.gz \
file://remove-rpath-from-tests.patch \
file://fix-build-on-x86_64.patch \
- file://trickly-fix-build-on-x86_64.patch \
- file://nspr-CVE-2014-1545.patch \
+ file://remove-srcdir-from-configure-in.patch \
"
SRC_URI += "file://nspr.pc.in"
-SRC_URI[md5sum] = "bf298e874cf454a3c2f8fe7e671c5d2e"
-SRC_URI[sha256sum] = "f25779b1a665dab0090b9c977dc6c29a63320f442956ed78629b66b405cb01e5"
+SRC_URI[md5sum] = "6e06919e4b56efed501e05d8b45ec10e"
+SRC_URI[sha256sum] = "389af5cfa863ea9bc6de7b30c15f8a4f9bddd8002f8c6fdc8b33caef43893938"
S = "${WORKDIR}/nspr-${PV}/nspr"
@@ -141,12 +140,6 @@ TESTS = "runtests.pl \
inherit autotools-brokensep
-do_configure() {
- gnu-configize --force
- mv config.sub config.guess build/autoconf
- oe_runconf
-}
-
do_compile_prepend() {
oe_runmake CROSS_COMPILE=1 CFLAGS="-DXP_UNIX" LDFLAGS="" CC=gcc -C config export
}
diff --git a/meta/recipes-support/nss-myhostname/nss-myhostname_0.3.bb b/meta/recipes-support/nss-myhostname/nss-myhostname_0.3.bb
index d8ec863954..bbce9e9af0 100644
--- a/meta/recipes-support/nss-myhostname/nss-myhostname_0.3.bb
+++ b/meta/recipes-support/nss-myhostname/nss-myhostname_0.3.bb
@@ -11,16 +11,19 @@ SRC_URI = "http://0pointer.de/lennart/projects/nss-myhostname/nss-myhostname-${P
SRC_URI[md5sum] = "d4ab9ac36c053ab8fb836db1cbd4a48f"
SRC_URI[sha256sum] = "2ba744ea8d578d1c57c85884e94a3042ee17843a5294434d3a7f6c4d67e7caf2"
-inherit autotools
+inherit autotools distro_features_check
+
+# The systemd has its own copy of nss-myhostname
+CONFLICT_DISTRO_FEATURES = "systemd"
pkg_postinst_${PN} () {
sed -e '/^hosts:/s/\s*\<myhostname\>//' \
-e 's/\(^hosts:.*\)\(\<files\>\)\(.*\)\(\<dns\>\)\(.*\)/\1\2 myhostname \3\4\5/' \
- -i ${D}${sysconfdir}/nsswitch.conf
+ -i $D${sysconfdir}/nsswitch.conf
}
pkg_prerm_${PN} () {
sed -e '/^hosts:/s/\s*\<myhostname\>//' \
-e '/^hosts:/s/\s*myhostname//' \
- -i ${D}${sysconfdir}/nsswitch.conf
+ -i $D${sysconfdir}/nsswitch.conf
}
diff --git a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1739.patch b/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1739.patch
deleted file mode 100644
index 1a159c3934..0000000000
--- a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1739.patch
+++ /dev/null
@@ -1,81 +0,0 @@
-Upstream-Status: Backport
-Signed-off-by: yzhu1 <yanjun.zhu@windriver.com>
-
---- a/nss/lib/ssl/ssl3con.c
-+++ b/nss/lib/ssl/ssl3con.c
-@@ -10509,7 +10509,7 @@ ssl_RemoveSSLv3CBCPadding(sslBuffer *pla
- /* SSLv3 padding bytes are random and cannot be checked. */
- t = plaintext->len;
- t -= paddingLength+overhead;
-- /* If len >= padding_length+overhead then the MSB of t is zero. */
-+ /* If len >= paddingLength+overhead then the MSB of t is zero. */
- good = DUPLICATE_MSB_TO_ALL(~t);
- /* SSLv3 requires that the padding is minimal. */
- t = blockSize - (paddingLength+1);
-@@ -10742,7 +10742,7 @@ ssl3_HandleRecord(sslSocket *ss, SSL3Cip
- }
- }
-
-- good = (unsigned)-1;
-+ good = ~0U;
- minLength = crSpec->mac_size;
- if (cipher_def->type == type_block) {
- /* CBC records have a padding length byte at the end. */
-@@ -10756,14 +10756,7 @@ ssl3_HandleRecord(sslSocket *ss, SSL3Cip
- /* We can perform this test in variable time because the record's total
- * length and the ciphersuite are both public knowledge. */
- if (cText->buf->len < minLength) {
-- SSL_DBG(("%d: SSL3[%d]: HandleRecord, record too small.",
-- SSL_GETPID(), ss->fd));
-- /* must not hold spec lock when calling SSL3_SendAlert. */
-- ssl_ReleaseSpecReadLock(ss);
-- SSL3_SendAlert(ss, alert_fatal, bad_record_mac);
-- /* always log mac error, in case attacker can read server logs. */
-- PORT_SetError(SSL_ERROR_BAD_MAC_READ);
-- return SECFailure;
-+ goto decrypt_loser;
- }
-
- if (cipher_def->type == type_block &&
-@@ -10831,11 +10824,18 @@ ssl3_HandleRecord(sslSocket *ss, SSL3Cip
- return SECFailure;
- }
-
-+ if (cipher_def->type == type_block &&
-+ ((cText->buf->len - ivLen) % cipher_def->block_size) != 0) {
-+ goto decrypt_loser;
-+ }
-+
- /* decrypt from cText buf to plaintext. */
- rv = crSpec->decode(
- crSpec->decodeContext, plaintext->buf, (int *)&plaintext->len,
- plaintext->space, cText->buf->buf + ivLen, cText->buf->len - ivLen);
-- good &= SECStatusToMask(rv);
-+ if (rv != SECSuccess) {
-+ goto decrypt_loser;
-+ }
-
- PRINT_BUF(80, (ss, "cleartext:", plaintext->buf, plaintext->len));
-
-@@ -10843,7 +10843,7 @@ ssl3_HandleRecord(sslSocket *ss, SSL3Cip
-
- /* If it's a block cipher, check and strip the padding. */
- if (cipher_def->type == type_block) {
-- const unsigned int blockSize = cipher_def->iv_size;
-+ const unsigned int blockSize = cipher_def->block_size;
- const unsigned int macSize = crSpec->mac_size;
-
- if (crSpec->version <= SSL_LIBRARY_VERSION_3_0) {
-@@ -10899,10 +10899,11 @@ ssl3_HandleRecord(sslSocket *ss, SSL3Cip
- }
-
- if (good == 0) {
-+decrypt_loser:
- /* must not hold spec lock when calling SSL3_SendAlert. */
- ssl_ReleaseSpecReadLock(ss);
-
-- SSL_DBG(("%d: SSL3[%d]: mac check failed", SSL_GETPID(), ss->fd));
-+ SSL_DBG(("%d: SSL3[%d]: decryption failed", SSL_GETPID(), ss->fd));
-
- if (!IS_DTLS(ss)) {
- SSL3_SendAlert(ss, alert_fatal, bad_record_mac);
diff --git a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1741.patch b/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1741.patch
deleted file mode 100644
index 21da0c03b5..0000000000
--- a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-1741.patch
+++ /dev/null
@@ -1,92 +0,0 @@
-Upstream-Status: backport
-yanjun.zhu <yanjun.zhu@windriver.com>
---- a/nss/lib/util/secport.c
-+++ b/nss/lib/util/secport.c
-@@ -69,13 +69,22 @@ PORTCharConversionFunc ucs4Utf8ConvertFu
- PORTCharConversionFunc ucs2Utf8ConvertFunc;
- PORTCharConversionWSwapFunc ucs2AsciiConvertFunc;
-
-+/* NSPR memory allocation functions (PR_Malloc, PR_Calloc, and PR_Realloc)
-+ * use the PRUint32 type for the size parameter. Before we pass a size_t or
-+ * unsigned long size to these functions, we need to ensure it is <= half of
-+ * the maximum PRUint32 value to avoid truncation and catch a negative size.
-+ */
-+#define MAX_SIZE (PR_UINT32_MAX >> 1)
-+
- void *
- PORT_Alloc(size_t bytes)
- {
-- void *rv;
-+ void *rv = NULL;
-
-- /* Always allocate a non-zero amount of bytes */
-- rv = (void *)PR_Malloc(bytes ? bytes : 1);
-+ if (bytes <= MAX_SIZE) {
-+ /* Always allocate a non-zero amount of bytes */
-+ rv = PR_Malloc(bytes ? bytes : 1);
-+ }
- if (!rv) {
- ++port_allocFailures;
- PORT_SetError(SEC_ERROR_NO_MEMORY);
-@@ -86,9 +95,11 @@ PORT_Alloc(size_t bytes)
- void *
- PORT_Realloc(void *oldptr, size_t bytes)
- {
-- void *rv;
-+ void *rv = NULL;
-
-- rv = (void *)PR_Realloc(oldptr, bytes);
-+ if (bytes <= MAX_SIZE) {
-+ rv = PR_Realloc(oldptr, bytes);
-+ }
- if (!rv) {
- ++port_allocFailures;
- PORT_SetError(SEC_ERROR_NO_MEMORY);
-@@ -99,10 +110,12 @@ PORT_Realloc(void *oldptr, size_t bytes)
- void *
- PORT_ZAlloc(size_t bytes)
- {
-- void *rv;
-+ void *rv = NULL;
-
-- /* Always allocate a non-zero amount of bytes */
-- rv = (void *)PR_Calloc(1, bytes ? bytes : 1);
-+ if (bytes <= MAX_SIZE) {
-+ /* Always allocate a non-zero amount of bytes */
-+ rv = PR_Calloc(1, bytes ? bytes : 1);
-+ }
- if (!rv) {
- ++port_allocFailures;
- PORT_SetError(SEC_ERROR_NO_MEMORY);
-@@ -209,6 +222,10 @@ PORT_NewArena(unsigned long chunksize)
- {
- PORTArenaPool *pool;
-
-+ if (chunksize > MAX_SIZE) {
-+ PORT_SetError(SEC_ERROR_NO_MEMORY);
-+ return NULL;
-+ }
- pool = PORT_ZNew(PORTArenaPool);
- if (!pool) {
- return NULL;
-@@ -224,8 +241,6 @@ PORT_NewArena(unsigned long chunksize)
- return(&pool->arena);
- }
-
--#define MAX_SIZE 0x7fffffffUL
--
- void *
- PORT_ArenaAlloc(PLArenaPool *arena, size_t size)
- {
-@@ -330,6 +345,11 @@ PORT_ArenaGrow(PLArenaPool *arena, void
- PORTArenaPool *pool = (PORTArenaPool *)arena;
- PORT_Assert(newsize >= oldsize);
-
-+ if (newsize > MAX_SIZE) {
-+ PORT_SetError(SEC_ERROR_NO_MEMORY);
-+ return NULL;
-+ }
-+
- if (ARENAPOOL_MAGIC == pool->magic ) {
- PZ_Lock(pool->lock);
- /* Do we do a THREADMARK check here? */
diff --git a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-5605.patch b/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-5605.patch
deleted file mode 100644
index 7203d02c78..0000000000
--- a/meta/recipes-support/nss/files/nss-3.15.1-fix-CVE-2013-5605.patch
+++ /dev/null
@@ -1,18 +0,0 @@
-signed-off-by: Ryan Sleevi <ryan.sleevi@gmail.com>
-Upstream-Status: Backport
-reference:https://hg.mozilla.org/projects/nss/rev/e79a09364b5e
-
---- a/nss/lib/ssl/ssl3con.c
-+++ b/nss/lib/ssl/ssl3con.c
-@@ -781,6 +781,11 @@ static SECStatus
- Null_Cipher(void *ctx, unsigned char *output, int *outputLen, int maxOutputLen,
- const unsigned char *input, int inputLen)
- {
-+ if (inputLen > maxOutputLen) {
-+ *outputLen = 0; /* Match PK11_CipherOp in setting outputLen */
-+ PORT_SetError(SEC_ERROR_OUTPUT_LEN);
-+ return SECFailure;
-+ }
- *outputLen = inputLen;
- if (input != output)
- PORT_Memcpy(output, input, inputLen);
diff --git a/meta/recipes-support/nss/files/nss-CVE-2013-1740.patch b/meta/recipes-support/nss/files/nss-CVE-2013-1740.patch
deleted file mode 100644
index db3d6f9103..0000000000
--- a/meta/recipes-support/nss/files/nss-CVE-2013-1740.patch
+++ /dev/null
@@ -1,916 +0,0 @@
-nss: CVE-2013-1740
-
-Upstream-Status: Backport
-
-the patch comes from:
-http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-1740
-https://bugzilla.mozilla.org/show_bug.cgi?id=919877
-https://bugzilla.mozilla.org/show_bug.cgi?id=713933
-
-changeset: 10946:f28426e944ae
-user: Wan-Teh Chang <wtc@google.com>
-date: Tue Nov 26 16:44:39 2013 -0800
-summary: Bug 713933: Handle the return value of both ssl3_HandleRecord calls
-
-changeset: 10945:774c7dec7565
-user: Wan-Teh Chang <wtc@google.com>
-date: Mon Nov 25 19:16:23 2013 -0800
-summary: Bug 713933: Declare the |falseStart| local variable in the smallest
-
-changeset: 10848:141fae8fb2e8
-user: Wan-Teh Chang <wtc@google.com>
-date: Mon Sep 23 11:25:41 2013 -0700
-summary: Bug 681839: Allow SSL_HandshakeNegotiatedExtension to be called before the handshake is finished, r=brian@briansmith.org
-
-changeset: 10898:1b9c43d28713
-user: Brian Smith <brian@briansmith.org>
-date: Thu Oct 31 15:40:42 2013 -0700
-summary: Bug 713933: Make SSL False Start work with asynchronous certificate validation, r=wtc
-
-Signed-off-by: Li Wang <li.wang@windriver.com>
----
- nss/lib/ssl/ssl.def | 7 ++
- nss/lib/ssl/ssl.h | 54 +++++++++++---
- nss/lib/ssl/ssl3con.c | 188 +++++++++++++++++++++++++++++++++++------------
- nss/lib/ssl/ssl3gthr.c | 63 ++++++++++++----
- nss/lib/ssl/sslauth.c | 10 +--
- nss/lib/ssl/sslimpl.h | 22 +++++-
- nss/lib/ssl/sslinfo.c | 10 +--
- nss/lib/ssl/sslreveal.c | 9 +--
- nss/lib/ssl/sslsecur.c | 139 ++++++++++++++++++++++++++++-------
- nss/lib/ssl/sslsock.c | 12 ++-
- 10 files changed, 386 insertions(+), 128 deletions(-)
-
-diff --git a/nss/lib/ssl/ssl.def b/nss/lib/ssl/ssl.def
-index fbf7fc5..e937bd4 100644
---- a/nss/lib/ssl/ssl.def
-+++ b/nss/lib/ssl/ssl.def
-@@ -163,3 +163,10 @@ SSL_SetStapledOCSPResponses;
- ;+ local:
- ;+*;
- ;+};
-+;+NSS_3.15.3 { # NSS 3.15.3 release
-+;+ global:
-+SSL_RecommendedCanFalseStart;
-+SSL_SetCanFalseStartCallback;
-+;+ local:
-+;+*;
-+;+};
-diff --git a/nss/lib/ssl/ssl.h b/nss/lib/ssl/ssl.h
-index 6db0e34..ddeaaef 100644
---- a/nss/lib/ssl/ssl.h
-+++ b/nss/lib/ssl/ssl.h
-@@ -121,14 +121,17 @@ SSL_IMPORT PRFileDesc *DTLS_ImportFD(PRFileDesc *model, PRFileDesc *fd);
- #define SSL_ENABLE_FALSE_START 22 /* Enable SSL false start (off by */
- /* default, applies only to */
- /* clients). False start is a */
--/* mode where an SSL client will start sending application data before */
--/* verifying the server's Finished message. This means that we could end up */
--/* sending data to an imposter. However, the data will be encrypted and */
--/* only the true server can derive the session key. Thus, so long as the */
--/* cipher isn't broken this is safe. Because of this, False Start will only */
--/* occur on RSA or DH ciphersuites where the cipher's key length is >= 80 */
--/* bits. The advantage of False Start is that it saves a round trip for */
--/* client-speaks-first protocols when performing a full handshake. */
-+/* mode where an SSL client will start sending application data before
-+ * verifying the server's Finished message. This means that we could end up
-+ * sending data to an imposter. However, the data will be encrypted and
-+ * only the true server can derive the session key. Thus, so long as the
-+ * cipher isn't broken this is safe. The advantage of false start is that
-+ * it saves a round trip for client-speaks-first protocols when performing a
-+ * full handshake.
-+ *
-+ * In addition to enabling this option, the application must register a
-+ * callback using the SSL_SetCanFalseStartCallback function.
-+ */
-
- /* For SSL 3.0 and TLS 1.0, by default we prevent chosen plaintext attacks
- * on SSL CBC mode cipher suites (see RFC 4346 Section F.3) by splitting
-@@ -653,14 +656,45 @@ SSL_IMPORT SECStatus SSL_SetMaxServerCacheLocks(PRUint32 maxLocks);
- SSL_IMPORT SECStatus SSL_InheritMPServerSIDCache(const char * envString);
-
- /*
--** Set the callback on a particular socket that gets called when we finish
--** performing a handshake.
-+** Set the callback that gets called when a TLS handshake is complete. The
-+** handshake callback is called after verifying the peer's Finished message and
-+** before processing incoming application data.
-+**
-+** For the initial handshake: If the handshake false started (see
-+** SSL_ENABLE_FALSE_START), then application data may already have been sent
-+** before the handshake callback is called. If we did not false start then the
-+** callback will get called before any application data is sent.
- */
- typedef void (PR_CALLBACK *SSLHandshakeCallback)(PRFileDesc *fd,
- void *client_data);
- SSL_IMPORT SECStatus SSL_HandshakeCallback(PRFileDesc *fd,
- SSLHandshakeCallback cb, void *client_data);
-
-+/* Applications that wish to enable TLS false start must set this callback
-+** function. NSS will invoke the functon to determine if a particular
-+** connection should use false start or not. SECSuccess indicates that the
-+** callback completed successfully, and if so *canFalseStart indicates if false
-+** start can be used. If the callback does not return SECSuccess then the
-+** handshake will be canceled. NSS's recommended criteria can be evaluated by
-+** calling SSL_RecommendedCanFalseStart.
-+**
-+** If no false start callback is registered then false start will never be
-+** done, even if the SSL_ENABLE_FALSE_START option is enabled.
-+**/
-+typedef SECStatus (PR_CALLBACK *SSLCanFalseStartCallback)(
-+ PRFileDesc *fd, void *arg, PRBool *canFalseStart);
-+
-+SSL_IMPORT SECStatus SSL_SetCanFalseStartCallback(
-+ PRFileDesc *fd, SSLCanFalseStartCallback callback, void *arg);
-+
-+/* This function sets *canFalseStart according to the recommended criteria for
-+** false start. These criteria may change from release to release and may depend
-+** on which handshake features have been negotiated and/or properties of the
-+** certifciates/keys used on the connection.
-+*/
-+SSL_IMPORT SECStatus SSL_RecommendedCanFalseStart(PRFileDesc *fd,
-+ PRBool *canFalseStart);
-+
- /*
- ** For the server, request a new handshake. For the client, begin a new
- ** handshake. If flushCache is non-zero, the SSL3 cache entry will be
-diff --git a/nss/lib/ssl/ssl3con.c b/nss/lib/ssl/ssl3con.c
-index 61d24d9..f39ba09 100644
---- a/nss/lib/ssl/ssl3con.c
-+++ b/nss/lib/ssl/ssl3con.c
-@@ -2535,7 +2535,7 @@ ssl3_SendRecord( sslSocket * ss,
- SSL_TRC(3, ("%d: SSL3[%d] SendRecord type: %s nIn=%d",
- SSL_GETPID(), ss->fd, ssl3_DecodeContentType(type),
- nIn));
-- PRINT_BUF(3, (ss, "Send record (plain text)", pIn, nIn));
-+ PRINT_BUF(50, (ss, "Send record (plain text)", pIn, nIn));
-
- PORT_Assert( ss->opt.noLocks || ssl_HaveXmitBufLock(ss) );
-
-@@ -6674,36 +6674,73 @@ done:
- return rv;
- }
-
-+static SECStatus
-+ssl3_CheckFalseStart(sslSocket *ss)
-+{
-+ PORT_Assert( ss->opt.noLocks || ssl_HaveSSL3HandshakeLock(ss) );
-+ PORT_Assert( !ss->ssl3.hs.authCertificatePending );
-+ PORT_Assert( !ss->ssl3.hs.canFalseStart );
-+
-+ if (!ss->canFalseStartCallback) {
-+ SSL_TRC(3, ("%d: SSL[%d]: no false start callback so no false start",
-+ SSL_GETPID(), ss->fd));
-+ } else {
-+ PRBool maybeFalseStart;
-+ SECStatus rv;
-+
-+ /* An attacker can control the selected ciphersuite so we only wish to
-+ * do False Start in the case that the selected ciphersuite is
-+ * sufficiently strong that the attack can gain no advantage.
-+ * Therefore we always require an 80-bit cipher. */
-+ ssl_GetSpecReadLock(ss);
-+ maybeFalseStart = ss->ssl3.cwSpec->cipher_def->secret_key_size >= 10;
-+ ssl_ReleaseSpecReadLock(ss);
-+
-+ if (!maybeFalseStart) {
-+ SSL_TRC(3, ("%d: SSL[%d]: no false start due to weak cipher",
-+ SSL_GETPID(), ss->fd));
-+ } else {
-+ rv = (ss->canFalseStartCallback)(ss->fd,
-+ ss->canFalseStartCallbackData,
-+ &ss->ssl3.hs.canFalseStart);
-+ if (rv == SECSuccess) {
-+ SSL_TRC(3, ("%d: SSL[%d]: false start callback returned %s",
-+ SSL_GETPID(), ss->fd,
-+ ss->ssl3.hs.canFalseStart ? "TRUE" : "FALSE"));
-+ } else {
-+ SSL_TRC(3, ("%d: SSL[%d]: false start callback failed (%s)",
-+ SSL_GETPID(), ss->fd,
-+ PR_ErrorToName(PR_GetError())));
-+ }
-+ return rv;
-+ }
-+ }
-+
-+ ss->ssl3.hs.canFalseStart = PR_FALSE;
-+ return SECSuccess;
-+}
-+
- PRBool
--ssl3_CanFalseStart(sslSocket *ss) {
-- PRBool rv;
-+ssl3_WaitingForStartOfServerSecondRound(sslSocket *ss)
-+{
-+ PRBool result = PR_FALSE;
-
- PORT_Assert( ss->opt.noLocks || ssl_HaveSSL3HandshakeLock(ss) );
-
-- /* XXX: does not take into account whether we are waiting for
-- * SSL_AuthCertificateComplete or SSL_RestartHandshakeAfterCertReq. If/when
-- * that is done, this function could return different results each time it
-- * would be called.
-- */
-+ switch (ss->ssl3.hs.ws) {
-+ case wait_new_session_ticket:
-+ result = PR_TRUE;
-+ break;
-+ case wait_change_cipher:
-+ result = !ssl3_ExtensionNegotiated(ss, ssl_session_ticket_xtn);
-+ break;
-+ case wait_finished:
-+ break;
-+ default:
-+ PR_NOT_REACHED("ssl3_WaitingForStartOfServerSecondRound");
-+ }
-
-- ssl_GetSpecReadLock(ss);
-- rv = ss->opt.enableFalseStart &&
-- !ss->sec.isServer &&
-- !ss->ssl3.hs.isResuming &&
-- ss->ssl3.cwSpec &&
--
-- /* An attacker can control the selected ciphersuite so we only wish to
-- * do False Start in the case that the selected ciphersuite is
-- * sufficiently strong that the attack can gain no advantage.
-- * Therefore we require an 80-bit cipher and a forward-secret key
-- * exchange. */
-- ss->ssl3.cwSpec->cipher_def->secret_key_size >= 10 &&
-- (ss->ssl3.hs.kea_def->kea == kea_dhe_dss ||
-- ss->ssl3.hs.kea_def->kea == kea_dhe_rsa ||
-- ss->ssl3.hs.kea_def->kea == kea_ecdhe_ecdsa ||
-- ss->ssl3.hs.kea_def->kea == kea_ecdhe_rsa);
-- ssl_ReleaseSpecReadLock(ss);
-- return rv;
-+ return result;
- }
-
- static SECStatus ssl3_SendClientSecondRound(sslSocket *ss);
-@@ -6785,6 +6822,9 @@ ssl3_SendClientSecondRound(sslSocket *ss)
- }
- if (ss->ssl3.hs.authCertificatePending &&
- (sendClientCert || ss->ssl3.sendEmptyCert || ss->firstHsDone)) {
-+ SSL_TRC(3, ("%d: SSL3[%p]: deferring ssl3_SendClientSecondRound because"
-+ " certificate authentication is still pending.",
-+ SSL_GETPID(), ss->fd));
- ss->ssl3.hs.restartTarget = ssl3_SendClientSecondRound;
- return SECWouldBlock;
- }
-@@ -6822,14 +6862,50 @@ ssl3_SendClientSecondRound(sslSocket *ss)
- goto loser; /* err code was set. */
- }
-
-- /* XXX: If the server's certificate hasn't been authenticated by this
-- * point, then we may be leaking this NPN message to an attacker.
-+ /* This must be done after we've set ss->ssl3.cwSpec in
-+ * ssl3_SendChangeCipherSpecs because SSL_GetChannelInfo uses information
-+ * from cwSpec. This must be done before we call ssl3_CheckFalseStart
-+ * because the false start callback (if any) may need the information from
-+ * the functions that depend on this being set.
- */
-+ ss->enoughFirstHsDone = PR_TRUE;
-+
- if (!ss->firstHsDone) {
-+ /* XXX: If the server's certificate hasn't been authenticated by this
-+ * point, then we may be leaking this NPN message to an attacker.
-+ */
- rv = ssl3_SendNextProto(ss);
- if (rv != SECSuccess) {
- goto loser; /* err code was set. */
- }
-+
-+ if (ss->opt.enableFalseStart) {
-+ if (!ss->ssl3.hs.authCertificatePending) {
-+ /* When we fix bug 589047, we will need to know whether we are
-+ * false starting before we try to flush the client second
-+ * round to the network. With that in mind, we purposefully
-+ * call ssl3_CheckFalseStart before calling ssl3_SendFinished,
-+ * which includes a call to ssl3_FlushHandshake, so that
-+ * no application develops a reliance on such flushing being
-+ * done before its false start callback is called.
-+ */
-+ ssl_ReleaseXmitBufLock(ss);
-+ rv = ssl3_CheckFalseStart(ss);
-+ ssl_GetXmitBufLock(ss);
-+ if (rv != SECSuccess) {
-+ goto loser;
-+ }
-+ } else {
-+ /* The certificate authentication and the server's Finished
-+ * message are racing each other. If the certificate
-+ * authentication wins, then we will try to false start in
-+ * ssl3_AuthCertificateComplete.
-+ */
-+ SSL_TRC(3, ("%d: SSL3[%p]: deferring false start check because"
-+ " certificate authentication is still pending.",
-+ SSL_GETPID(), ss->fd));
-+ }
-+ }
- }
-
- rv = ssl3_SendFinished(ss, 0);
-@@ -6844,10 +6920,7 @@ ssl3_SendClientSecondRound(sslSocket *ss)
- else
- ss->ssl3.hs.ws = wait_change_cipher;
-
-- /* Do the handshake callback for sslv3 here, if we can false start. */
-- if (ss->handshakeCallback != NULL && ssl3_CanFalseStart(ss)) {
-- (ss->handshakeCallback)(ss->fd, ss->handshakeCallbackData);
-- }
-+ PORT_Assert(ssl3_WaitingForStartOfServerSecondRound(ss));
-
- return SECSuccess;
-
-@@ -9421,13 +9494,6 @@ ssl3_AuthCertificate(sslSocket *ss)
-
- ss->ssl3.hs.authCertificatePending = PR_TRUE;
- rv = SECSuccess;
--
-- /* XXX: Async cert validation and False Start don't work together
-- * safely yet; if we leave False Start enabled, we may end up false
-- * starting (sending application data) before we
-- * SSL_AuthCertificateComplete has been called.
-- */
-- ss->opt.enableFalseStart = PR_FALSE;
- }
-
- if (rv != SECSuccess) {
-@@ -9551,6 +9617,12 @@ ssl3_AuthCertificateComplete(sslSocket *ss, PRErrorCode error)
- } else if (ss->ssl3.hs.restartTarget != NULL) {
- sslRestartTarget target = ss->ssl3.hs.restartTarget;
- ss->ssl3.hs.restartTarget = NULL;
-+
-+ if (target == ssl3_FinishHandshake) {
-+ SSL_TRC(3,("%d: SSL3[%p]: certificate authentication lost the race"
-+ " with peer's finished message", SSL_GETPID(), ss->fd));
-+ }
-+
- rv = target(ss);
- /* Even if we blocked here, we have accomplished enough to claim
- * success. Any remaining work will be taken care of by subsequent
-@@ -9560,7 +9632,29 @@ ssl3_AuthCertificateComplete(sslSocket *ss, PRErrorCode error)
- rv = SECSuccess;
- }
- } else {
-- rv = SECSuccess;
-+ SSL_TRC(3, ("%d: SSL3[%p]: certificate authentication won the race with"
-+ " peer's finished message", SSL_GETPID(), ss->fd));
-+
-+ PORT_Assert(!ss->firstHsDone);
-+ PORT_Assert(!ss->sec.isServer);
-+ PORT_Assert(!ss->ssl3.hs.isResuming);
-+ PORT_Assert(ss->ssl3.hs.ws == wait_new_session_ticket ||
-+ ss->ssl3.hs.ws == wait_change_cipher ||
-+ ss->ssl3.hs.ws == wait_finished);
-+
-+ /* ssl3_SendClientSecondRound deferred the false start check because
-+ * certificate authentication was pending, so we do it now if we still
-+ * haven't received any of the server's second round yet.
-+ */
-+ if (ss->opt.enableFalseStart &&
-+ !ss->firstHsDone &&
-+ !ss->sec.isServer &&
-+ !ss->ssl3.hs.isResuming &&
-+ ssl3_WaitingForStartOfServerSecondRound(ss)) {
-+ rv = ssl3_CheckFalseStart(ss);
-+ } else {
-+ rv = SECSuccess;
-+ }
- }
-
- done:
-@@ -10023,9 +10117,6 @@ xmit_loser:
- return rv;
- }
-
-- ss->gs.writeOffset = 0;
-- ss->gs.readOffset = 0;
--
- if (ss->ssl3.hs.kea_def->kea == kea_ecdhe_rsa) {
- effectiveExchKeyType = kt_rsa;
- } else {
-@@ -10090,6 +10181,9 @@ xmit_loser:
- return rv;
- }
-
-+/* The return type is SECStatus instead of void because this function needs
-+ * to have type sslRestartTarget.
-+ */
- SECStatus
- ssl3_FinishHandshake(sslSocket * ss)
- {
-@@ -10099,19 +10193,16 @@ ssl3_FinishHandshake(sslSocket * ss)
-
- /* The first handshake is now completed. */
- ss->handshake = NULL;
-- ss->firstHsDone = PR_TRUE;
-
- if (ss->ssl3.hs.cacheSID) {
- (*ss->sec.cache)(ss->sec.ci.sid);
- ss->ssl3.hs.cacheSID = PR_FALSE;
- }
-
-+ ss->ssl3.hs.canFalseStart = PR_FALSE; /* False Start phase is complete */
- ss->ssl3.hs.ws = idle_handshake;
-
-- /* Do the handshake callback for sslv3 here, if we cannot false start. */
-- if (ss->handshakeCallback != NULL && !ssl3_CanFalseStart(ss)) {
-- (ss->handshakeCallback)(ss->fd, ss->handshakeCallbackData);
-- }
-+ ssl_FinishHandshake(ss);
-
- return SECSuccess;
- }
-@@ -11045,7 +11136,6 @@ process_it:
-
- ssl_ReleaseSSL3HandshakeLock(ss);
- return rv;
--
- }
-
- /*
-diff --git a/nss/lib/ssl/ssl3gthr.c b/nss/lib/ssl/ssl3gthr.c
-index 6d62515..03e369d 100644
---- a/nss/lib/ssl/ssl3gthr.c
-+++ b/nss/lib/ssl/ssl3gthr.c
-@@ -275,11 +275,17 @@ ssl3_GatherCompleteHandshake(sslSocket *ss, int flags)
- {
- SSL3Ciphertext cText;
- int rv;
-- PRBool canFalseStart = PR_FALSE;
-+ PRBool keepGoing = PR_TRUE;
-
- SSL_TRC(30, ("ssl3_GatherCompleteHandshake"));
-
-+ /* ssl3_HandleRecord may end up eventually calling ssl_FinishHandshake,
-+ * which requires the 1stHandshakeLock, which must be acquired before the
-+ * RecvBufLock.
-+ */
-+ PORT_Assert( ss->opt.noLocks || ssl_Have1stHandshakeLock(ss) );
- PORT_Assert( ss->opt.noLocks || ssl_HaveRecvBufLock(ss) );
-+
- do {
- PRBool handleRecordNow = PR_FALSE;
-
-@@ -368,20 +374,48 @@ ssl3_GatherCompleteHandshake(sslSocket *ss, int flags)
- if (rv < 0) {
- return ss->recvdCloseNotify ? 0 : rv;
- }
-+ if (rv == (int) SECSuccess && ss->gs.buf.len > 0) {
-+ /* We have application data to return to the application. This
-+ * prioritizes returning application data to the application over
-+ * completing any renegotiation handshake we may be doing.
-+ */
-+ PORT_Assert(ss->firstHsDone);
-+ PORT_Assert(cText.type == content_application_data);
-+ break;
-+ }
-
-- /* If we kicked off a false start in ssl3_HandleServerHelloDone, break
-- * out of this loop early without finishing the handshake.
-- */
-- if (ss->opt.enableFalseStart) {
-- ssl_GetSSL3HandshakeLock(ss);
-- canFalseStart = (ss->ssl3.hs.ws == wait_change_cipher ||
-- ss->ssl3.hs.ws == wait_new_session_ticket) &&
-- ssl3_CanFalseStart(ss);
-- ssl_ReleaseSSL3HandshakeLock(ss);
-+ PORT_Assert(keepGoing);
-+ ssl_GetSSL3HandshakeLock(ss);
-+ if (ss->ssl3.hs.ws == idle_handshake) {
-+ /* We are done with the current handshake so stop trying to
-+ * handshake. Note that it would be safe to test ss->firstHsDone
-+ * instead of ss->ssl3.hs.ws. By testing ss->ssl3.hs.ws instead,
-+ * we prioritize completing a renegotiation handshake over sending
-+ * application data.
-+ */
-+ PORT_Assert(ss->firstHsDone);
-+ PORT_Assert(!ss->ssl3.hs.canFalseStart);
-+ keepGoing = PR_FALSE;
-+ } else if (ss->ssl3.hs.canFalseStart) {
-+ /* Prioritize sending application data over trying to complete
-+ * the handshake if we're false starting.
-+ *
-+ * If we were to do this check at the beginning of the loop instead
-+ * of here, then this function would become be a no-op after
-+ * receiving the ServerHelloDone in the false start case, and we
-+ * would never complete the handshake.
-+ */
-+ PORT_Assert(!ss->firstHsDone);
-+
-+ if (ssl3_WaitingForStartOfServerSecondRound(ss)) {
-+ keepGoing = PR_FALSE;
-+ } else {
-+ ss->ssl3.hs.canFalseStart = PR_FALSE;
-+ }
- }
-- } while (ss->ssl3.hs.ws != idle_handshake &&
-- !canFalseStart &&
-- ss->gs.buf.len == 0);
-+ ssl_ReleaseSSL3HandshakeLock(ss);
-+ } while (keepGoing);
-+
-
- ss->gs.readOffset = 0;
- ss->gs.writeOffset = ss->gs.buf.len;
-@@ -404,7 +438,10 @@ ssl3_GatherAppDataRecord(sslSocket *ss, int flags)
- {
- int rv;
-
-+ /* ssl3_GatherCompleteHandshake requires both of these locks. */
-+ PORT_Assert( ss->opt.noLocks || ssl_Have1stHandshakeLock(ss) );
- PORT_Assert( ss->opt.noLocks || ssl_HaveRecvBufLock(ss) );
-+
- do {
- rv = ssl3_GatherCompleteHandshake(ss, flags);
- } while (rv > 0 && ss->gs.buf.len == 0);
-diff --git a/nss/lib/ssl/sslauth.c b/nss/lib/ssl/sslauth.c
-index d2f57bf..cb956d4 100644
---- a/nss/lib/ssl/sslauth.c
-+++ b/nss/lib/ssl/sslauth.c
-@@ -60,7 +60,6 @@ SSL_SecurityStatus(PRFileDesc *fd, int *op, char **cp, int *kp0, int *kp1,
- sslSocket *ss;
- const char *cipherName;
- PRBool isDes = PR_FALSE;
-- PRBool enoughFirstHsDone = PR_FALSE;
-
- ss = ssl_FindSocket(fd);
- if (!ss) {
-@@ -78,14 +77,7 @@ SSL_SecurityStatus(PRFileDesc *fd, int *op, char **cp, int *kp0, int *kp1,
- *op = SSL_SECURITY_STATUS_OFF;
- }
-
-- if (ss->firstHsDone) {
-- enoughFirstHsDone = PR_TRUE;
-- } else if (ss->version >= SSL_LIBRARY_VERSION_3_0 &&
-- ssl3_CanFalseStart(ss)) {
-- enoughFirstHsDone = PR_TRUE;
-- }
--
-- if (ss->opt.useSecurity && enoughFirstHsDone) {
-+ if (ss->opt.useSecurity && ss->enoughFirstHsDone) {
- if (ss->version < SSL_LIBRARY_VERSION_3_0) {
- cipherName = ssl_cipherName[ss->sec.cipherType];
- } else {
-diff --git a/nss/lib/ssl/sslimpl.h b/nss/lib/ssl/sslimpl.h
-index 90e9567..bf0d67f 100644
---- a/nss/lib/ssl/sslimpl.h
-+++ b/nss/lib/ssl/sslimpl.h
-@@ -842,6 +842,8 @@ const ssl3CipherSuiteDef *suite_def;
- /* Shared state between ssl3_HandleFinished and ssl3_FinishHandshake */
- PRBool cacheSID;
-
-+ PRBool canFalseStart; /* Can/did we False Start */
-+
- /* clientSigAndHash contains the contents of the signature_algorithms
- * extension (if any) from the client. This is only valid for TLS 1.2
- * or later. */
-@@ -1116,6 +1118,10 @@ struct sslSocketStr {
- unsigned long clientAuthRequested;
- unsigned long delayDisabled; /* Nagle delay disabled */
- unsigned long firstHsDone; /* first handshake is complete. */
-+ unsigned long enoughFirstHsDone; /* enough of the first handshake is
-+ * done for callbacks to be able to
-+ * retrieve channel security
-+ * parameters from the SSL socket. */
- unsigned long handshakeBegun;
- unsigned long lastWriteBlocked;
- unsigned long recvdCloseNotify; /* received SSL EOF. */
-@@ -1156,6 +1162,8 @@ const unsigned char * preferredCipher;
- void *badCertArg;
- SSLHandshakeCallback handshakeCallback;
- void *handshakeCallbackData;
-+ SSLCanFalseStartCallback canFalseStartCallback;
-+ void *canFalseStartCallbackData;
- void *pkcs11PinArg;
- SSLNextProtoCallback nextProtoCallback;
- void *nextProtoArg;
-@@ -1358,7 +1366,19 @@ extern void ssl3_SetAlwaysBlock(sslSocket *ss);
-
- extern SECStatus ssl_EnableNagleDelay(sslSocket *ss, PRBool enabled);
-
--extern PRBool ssl3_CanFalseStart(sslSocket *ss);
-+extern void ssl_FinishHandshake(sslSocket *ss);
-+
-+/* Returns PR_TRUE if we are still waiting for the server to respond to our
-+ * client second round. Once we've received any part of the server's second
-+ * round then we don't bother trying to false start since it is almost always
-+ * the case that the NewSessionTicket, ChangeCipherSoec, and Finished messages
-+ * were sent in the same packet and we want to process them all at the same
-+ * time. If we were to try to false start in the middle of the server's second
-+ * round, then we would increase the number of I/O operations
-+ * (SSL_ForceHandshake/PR_Recv/PR_Send/etc.) needed to finish the handshake.
-+ */
-+extern PRBool ssl3_WaitingForStartOfServerSecondRound(sslSocket *ss);
-+
- extern SECStatus
- ssl3_CompressMACEncryptRecord(ssl3CipherSpec * cwSpec,
- PRBool isServer,
-diff --git a/nss/lib/ssl/sslinfo.c b/nss/lib/ssl/sslinfo.c
-index 9f2597e..d0c23b7 100644
---- a/nss/lib/ssl/sslinfo.c
-+++ b/nss/lib/ssl/sslinfo.c
-@@ -26,7 +26,6 @@ SSL_GetChannelInfo(PRFileDesc *fd, SSLChannelInfo *info, PRUintn len)
- sslSocket * ss;
- SSLChannelInfo inf;
- sslSessionID * sid;
-- PRBool enoughFirstHsDone = PR_FALSE;
-
- if (!info || len < sizeof inf.length) {
- PORT_SetError(SEC_ERROR_INVALID_ARGS);
-@@ -43,14 +42,7 @@ SSL_GetChannelInfo(PRFileDesc *fd, SSLChannelInfo *info, PRUintn len)
- memset(&inf, 0, sizeof inf);
- inf.length = PR_MIN(sizeof inf, len);
-
-- if (ss->firstHsDone) {
-- enoughFirstHsDone = PR_TRUE;
-- } else if (ss->version >= SSL_LIBRARY_VERSION_3_0 &&
-- ssl3_CanFalseStart(ss)) {
-- enoughFirstHsDone = PR_TRUE;
-- }
--
-- if (ss->opt.useSecurity && enoughFirstHsDone) {
-+ if (ss->opt.useSecurity && ss->enoughFirstHsDone) {
- sid = ss->sec.ci.sid;
- inf.protocolVersion = ss->version;
- inf.authKeyBits = ss->sec.authKeyBits;
-diff --git a/nss/lib/ssl/sslreveal.c b/nss/lib/ssl/sslreveal.c
-index dc14794..d972998 100644
---- a/nss/lib/ssl/sslreveal.c
-+++ b/nss/lib/ssl/sslreveal.c
-@@ -77,7 +77,6 @@ SSL_HandshakeNegotiatedExtension(PRFileDesc * socket,
- {
- /* some decisions derived from SSL_GetChannelInfo */
- sslSocket * sslsocket = NULL;
-- PRBool enoughFirstHsDone = PR_FALSE;
-
- if (!pYes) {
- PORT_SetError(SEC_ERROR_INVALID_ARGS);
-@@ -93,14 +92,8 @@ SSL_HandshakeNegotiatedExtension(PRFileDesc * socket,
-
- *pYes = PR_FALSE;
-
-- if (sslsocket->firstHsDone) {
-- enoughFirstHsDone = PR_TRUE;
-- } else if (sslsocket->ssl3.initialized && ssl3_CanFalseStart(sslsocket)) {
-- enoughFirstHsDone = PR_TRUE;
-- }
--
- /* according to public API SSL_GetChannelInfo, this doesn't need a lock */
-- if (sslsocket->opt.useSecurity && enoughFirstHsDone) {
-+ if (sslsocket->opt.useSecurity) {
- if (sslsocket->ssl3.initialized) { /* SSL3 and TLS */
- /* now we know this socket went through ssl3_InitState() and
- * ss->xtnData got initialized, which is the only member accessed by
-diff --git a/nss/lib/ssl/sslsecur.c b/nss/lib/ssl/sslsecur.c
-index 49bb42b..d0df442 100644
---- a/nss/lib/ssl/sslsecur.c
-+++ b/nss/lib/ssl/sslsecur.c
-@@ -97,23 +97,13 @@ ssl_Do1stHandshake(sslSocket *ss)
- ss->securityHandshake = 0;
- }
- if (ss->handshake == 0) {
-- ssl_GetRecvBufLock(ss);
-- ss->gs.recordLen = 0;
-- ssl_ReleaseRecvBufLock(ss);
--
-- SSL_TRC(3, ("%d: SSL[%d]: handshake is completed",
-- SSL_GETPID(), ss->fd));
-- /* call handshake callback for ssl v2 */
-- /* for v3 this is done in ssl3_HandleFinished() */
-- if ((ss->handshakeCallback != NULL) && /* has callback */
-- (!ss->firstHsDone) && /* only first time */
-- (ss->version < SSL_LIBRARY_VERSION_3_0)) { /* not ssl3 */
-- ss->firstHsDone = PR_TRUE;
-- (ss->handshakeCallback)(ss->fd, ss->handshakeCallbackData);
-+ /* for v3 this is done in ssl3_FinishHandshake */
-+ if (!ss->firstHsDone && ss->version < SSL_LIBRARY_VERSION_3_0) {
-+ ssl_GetRecvBufLock(ss);
-+ ss->gs.recordLen = 0;
-+ ssl_FinishHandshake(ss);
-+ ssl_ReleaseRecvBufLock(ss);
- }
-- ss->firstHsDone = PR_TRUE;
-- ss->gs.writeOffset = 0;
-- ss->gs.readOffset = 0;
- break;
- }
- rv = (*ss->handshake)(ss);
-@@ -134,6 +124,24 @@ ssl_Do1stHandshake(sslSocket *ss)
- return rv;
- }
-
-+void
-+ssl_FinishHandshake(sslSocket *ss)
-+{
-+ PORT_Assert( ss->opt.noLocks || ssl_Have1stHandshakeLock(ss) );
-+ PORT_Assert( ss->opt.noLocks || ssl_HaveRecvBufLock(ss) );
-+
-+ SSL_TRC(3, ("%d: SSL[%d]: handshake is completed", SSL_GETPID(), ss->fd));
-+
-+ ss->firstHsDone = PR_TRUE;
-+ ss->enoughFirstHsDone = PR_TRUE;
-+ ss->gs.writeOffset = 0;
-+ ss->gs.readOffset = 0;
-+
-+ if (ss->handshakeCallback) {
-+ (ss->handshakeCallback)(ss->fd, ss->handshakeCallbackData);
-+ }
-+}
-+
- /*
- * Handshake function that blocks. Used to force a
- * retry on a connection on the next read/write.
-@@ -206,6 +214,7 @@ SSL_ResetHandshake(PRFileDesc *s, PRBool asServer)
- ssl_Get1stHandshakeLock(ss);
-
- ss->firstHsDone = PR_FALSE;
-+ ss->enoughFirstHsDone = PR_FALSE;
- if ( asServer ) {
- ss->handshake = ssl2_BeginServerHandshake;
- ss->handshaking = sslHandshakingAsServer;
-@@ -221,6 +230,8 @@ SSL_ResetHandshake(PRFileDesc *s, PRBool asServer)
- ssl_ReleaseRecvBufLock(ss);
-
- ssl_GetSSL3HandshakeLock(ss);
-+ ss->ssl3.hs.canFalseStart = PR_FALSE;
-+ ss->ssl3.hs.restartTarget = NULL;
-
- /*
- ** Blow away old security state and get a fresh setup.
-@@ -331,6 +342,71 @@ SSL_HandshakeCallback(PRFileDesc *fd, SSLHandshakeCallback cb,
- return SECSuccess;
- }
-
-+/* Register an application callback to be called when false start may happen.
-+** Acquires and releases HandshakeLock.
-+*/
-+SECStatus
-+SSL_SetCanFalseStartCallback(PRFileDesc *fd, SSLCanFalseStartCallback cb,
-+ void *arg)
-+{
-+ sslSocket *ss;
-+
-+ ss = ssl_FindSocket(fd);
-+ if (!ss) {
-+ SSL_DBG(("%d: SSL[%d]: bad socket in SSL_SetCanFalseStartCallback",
-+ SSL_GETPID(), fd));
-+ return SECFailure;
-+ }
-+
-+ if (!ss->opt.useSecurity) {
-+ PORT_SetError(SEC_ERROR_INVALID_ARGS);
-+ return SECFailure;
-+ }
-+
-+ ssl_Get1stHandshakeLock(ss);
-+ ssl_GetSSL3HandshakeLock(ss);
-+
-+ ss->canFalseStartCallback = cb;
-+ ss->canFalseStartCallbackData = arg;
-+
-+ ssl_ReleaseSSL3HandshakeLock(ss);
-+ ssl_Release1stHandshakeLock(ss);
-+
-+ return SECSuccess;
-+}
-+
-+SECStatus
-+SSL_RecommendedCanFalseStart(PRFileDesc *fd, PRBool *canFalseStart)
-+{
-+ sslSocket *ss;
-+
-+ *canFalseStart = PR_FALSE;
-+ ss = ssl_FindSocket(fd);
-+ if (!ss) {
-+ SSL_DBG(("%d: SSL[%d]: bad socket in SSL_RecommendedCanFalseStart",
-+ SSL_GETPID(), fd));
-+ return SECFailure;
-+ }
-+
-+ if (!ss->ssl3.initialized) {
-+ PORT_SetError(SEC_ERROR_INVALID_ARGS);
-+ return SECFailure;
-+ }
-+
-+ if (ss->version < SSL_LIBRARY_VERSION_3_0) {
-+ PORT_SetError(SSL_ERROR_FEATURE_NOT_SUPPORTED_FOR_SSL2);
-+ return SECFailure;
-+ }
-+
-+ /* Require a forward-secret key exchange. */
-+ *canFalseStart = ss->ssl3.hs.kea_def->kea == kea_dhe_dss ||
-+ ss->ssl3.hs.kea_def->kea == kea_dhe_rsa ||
-+ ss->ssl3.hs.kea_def->kea == kea_ecdhe_ecdsa ||
-+ ss->ssl3.hs.kea_def->kea == kea_ecdhe_rsa;
-+
-+ return SECSuccess;
-+}
-+
- /* Try to make progress on an SSL handshake by attempting to read the
- ** next handshake from the peer, and sending any responses.
- ** For non-blocking sockets, returns PR_ERROR_WOULD_BLOCK if it cannot
-@@ -524,6 +600,9 @@ DoRecv(sslSocket *ss, unsigned char *out, int len, int flags)
- int amount;
- int available;
-
-+ /* ssl3_GatherAppDataRecord may call ssl_FinishHandshake, which needs the
-+ * 1stHandshakeLock. */
-+ ssl_Get1stHandshakeLock(ss);
- ssl_GetRecvBufLock(ss);
-
- available = ss->gs.writeOffset - ss->gs.readOffset;
-@@ -590,6 +669,7 @@ DoRecv(sslSocket *ss, unsigned char *out, int len, int flags)
-
- done:
- ssl_ReleaseRecvBufLock(ss);
-+ ssl_Release1stHandshakeLock(ss);
- return rv;
- }
-
-@@ -1156,7 +1236,7 @@ ssl_SecureRead(sslSocket *ss, unsigned char *buf, int len)
- int
- ssl_SecureSend(sslSocket *ss, const unsigned char *buf, int len, int flags)
- {
-- int rv = 0;
-+ int rv = 0;
-
- SSL_TRC(2, ("%d: SSL[%d]: SecureSend: sending %d bytes",
- SSL_GETPID(), ss->fd, len));
-@@ -1191,19 +1271,15 @@ ssl_SecureSend(sslSocket *ss, const unsigned char *buf, int len, int flags)
- ss->writerThread = PR_GetCurrentThread();
- /* If any of these is non-zero, the initial handshake is not done. */
- if (!ss->firstHsDone) {
-- PRBool canFalseStart = PR_FALSE;
-+ PRBool falseStart = PR_FALSE;
- ssl_Get1stHandshakeLock(ss);
-- if (ss->version >= SSL_LIBRARY_VERSION_3_0) {
-+ if (ss->opt.enableFalseStart &&
-+ ss->version >= SSL_LIBRARY_VERSION_3_0) {
- ssl_GetSSL3HandshakeLock(ss);
-- if ((ss->ssl3.hs.ws == wait_change_cipher ||
-- ss->ssl3.hs.ws == wait_finished ||
-- ss->ssl3.hs.ws == wait_new_session_ticket) &&
-- ssl3_CanFalseStart(ss)) {
-- canFalseStart = PR_TRUE;
-- }
-+ falseStart = ss->ssl3.hs.canFalseStart;
- ssl_ReleaseSSL3HandshakeLock(ss);
- }
-- if (!canFalseStart &&
-+ if (!falseStart &&
- (ss->handshake || ss->nextHandshake || ss->securityHandshake)) {
- rv = ssl_Do1stHandshake(ss);
- }
-@@ -1228,6 +1304,17 @@ ssl_SecureSend(sslSocket *ss, const unsigned char *buf, int len, int flags)
- goto done;
- }
-
-+ if (!ss->firstHsDone) {
-+ PORT_Assert(ss->version >= SSL_LIBRARY_VERSION_3_0);
-+#ifdef DEBUG
-+ ssl_GetSSL3HandshakeLock(ss);
-+ PORT_Assert(ss->ssl3.hs.canFalseStart);
-+ ssl_ReleaseSSL3HandshakeLock(ss);
-+#endif
-+ SSL_TRC(3, ("%d: SSL[%d]: SecureSend: sending data due to false start",
-+ SSL_GETPID(), ss->fd));
-+ }
-+
- /* Send out the data using one of these functions:
- * ssl2_SendClear, ssl2_SendStream, ssl2_SendBlock,
- * ssl3_SendApplicationData
-diff --git a/nss/lib/ssl/sslsock.c b/nss/lib/ssl/sslsock.c
-index cd4a7a7..73e069b 100644
---- a/nss/lib/ssl/sslsock.c
-+++ b/nss/lib/ssl/sslsock.c
-@@ -349,6 +349,8 @@ ssl_DupSocket(sslSocket *os)
- ss->badCertArg = os->badCertArg;
- ss->handshakeCallback = os->handshakeCallback;
- ss->handshakeCallbackData = os->handshakeCallbackData;
-+ ss->canFalseStartCallback = os->canFalseStartCallback;
-+ ss->canFalseStartCallbackData = os->canFalseStartCallbackData;
- ss->pkcs11PinArg = os->pkcs11PinArg;
-
- /* Create security data */
-@@ -2341,10 +2343,14 @@ ssl_Poll(PRFileDesc *fd, PRInt16 how_flags, PRInt16 *p_out_flags)
- } else if (new_flags & PR_POLL_WRITE) {
- /* The caller is trying to write, but the handshake is
- ** blocked waiting for data to read, and the first
-- ** handshake has been sent. so do NOT to poll on write.
-+ ** handshake has been sent. So do NOT to poll on write
-+ ** unless we did false start.
- */
-- new_flags ^= PR_POLL_WRITE; /* don't select on write. */
-- new_flags |= PR_POLL_READ; /* do select on read. */
-+ if (!(ss->version >= SSL_LIBRARY_VERSION_3_0 &&
-+ ss->ssl3.hs.canFalseStart)) {
-+ new_flags ^= PR_POLL_WRITE; /* don't select on write. */
-+ }
-+ new_flags |= PR_POLL_READ; /* do select on read. */
- }
- }
- } else if ((new_flags & PR_POLL_READ) && (SSL_DataPending(fd) > 0)) {
---
-1.7.9.5
-
diff --git a/meta/recipes-support/nss/files/nss-CVE-2013-5606.patch b/meta/recipes-support/nss/files/nss-CVE-2013-5606.patch
deleted file mode 100644
index f30475b16b..0000000000
--- a/meta/recipes-support/nss/files/nss-CVE-2013-5606.patch
+++ /dev/null
@@ -1,48 +0,0 @@
-nss: CVE-2013-5606
-
-Upstream-Status: Backport
-
-the patch comes from:
-http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2013-5606
-https://bugzilla.mozilla.org/show_bug.cgi?id=910438
-http://hg.mozilla.org/projects/nss/rev/d29898e0981c
-
-The CERT_VerifyCert function in lib/certhigh/certvfy.c in
-Mozilla Network Security Services (NSS) 3.15 before 3.15.3 provides
-an unexpected return value for an incompatible key-usage certificate
-when the CERTVerifyLog argument is valid, which might allow remote
-attackers to bypass intended access restrictions via a crafted certificate.
-
-Signed-off-by: Li Wang <li.wang@windriver.com>
----
- nss/lib/certhigh/certvfy.c | 7 +++++--
- 1 file changed, 5 insertions(+), 2 deletions(-)
-
-diff --git a/nss/lib/certhigh/certvfy.c b/nss/lib/certhigh/certvfy.c
-index f364ceb..f450205 100644
---- a/nss/lib/certhigh/certvfy.c
-+++ b/nss/lib/certhigh/certvfy.c
-@@ -1312,7 +1312,7 @@ CERT_VerifyCert(CERTCertDBHandle *handle, CERTCertificate *cert,
- PORT_SetError(SEC_ERROR_UNTRUSTED_CERT);
- LOG_ERROR_OR_EXIT(log,cert,0,flags);
- } else if (trusted) {
-- goto winner;
-+ goto done;
- }
-
-
-@@ -1340,7 +1340,10 @@ CERT_VerifyCert(CERTCertDBHandle *handle, CERTCertificate *cert,
- }
- }
-
--winner:
-+done:
-+ if (log && log->head) {
-+ return SECFailure;
-+ }
- return(SECSuccess);
-
- loser:
---
-1.7.9.5
-
diff --git a/meta/recipes-support/nss/files/nss-CVE-2014-1492.patch b/meta/recipes-support/nss/files/nss-CVE-2014-1492.patch
deleted file mode 100644
index 1be8a17870..0000000000
--- a/meta/recipes-support/nss/files/nss-CVE-2014-1492.patch
+++ /dev/null
@@ -1,68 +0,0 @@
-nss: CVE-2014-1492
-
-Upstream-Status: Backport
-
-the patch comes from:
-http://web.nvd.nist.gov/view/vuln/detail?vulnId=CVE-2014-1492
-https://bugzilla.mozilla.org/show_bug.cgi?id=903885
-
-changeset: 11063:709d4e597979
-user: Kai Engert <kaie@kuix.de>
-date: Wed Mar 05 18:38:55 2014 +0100
-summary: Bug 903885, address requests to clarify comments from wtc
-
-changeset: 11046:2ffa40a3ff55
-tag: tip
-user: Wan-Teh Chang <wtc@google.com>
-date: Tue Feb 25 18:17:08 2014 +0100
-summary: Bug 903885, fix IDNA wildcard handling v4, r=kaie
-
-changeset: 11045:15ea62260c21
-user: Christian Heimes <sites@cheimes.de>
-date: Mon Feb 24 17:50:25 2014 +0100
-summary: Bug 903885, fix IDNA wildcard handling, r=kaie
-
-Signed-off-by: Li Wang <li.wang@windriver.com>
----
- nss/lib/certdb/certdb.c | 15 +++++++++------
- 1 file changed, 9 insertions(+), 6 deletions(-)
-
-diff --git a/nss/lib/certdb/certdb.c b/nss/lib/certdb/certdb.c
-index b7d22bd..91877b7 100644
---- a/nss/lib/certdb/certdb.c
-+++ b/nss/lib/certdb/certdb.c
-@@ -1381,7 +1381,7 @@ cert_TestHostName(char * cn, const char * hn)
- return rv;
- }
- } else {
-- /* New approach conforms to RFC 2818. */
-+ /* New approach conforms to RFC 6125. */
- char *wildcard = PORT_Strchr(cn, '*');
- char *firstcndot = PORT_Strchr(cn, '.');
- char *secondcndot = firstcndot ? PORT_Strchr(firstcndot+1, '.') : NULL;
-@@ -1390,14 +1390,17 @@ cert_TestHostName(char * cn, const char * hn)
- /* For a cn pattern to be considered valid, the wildcard character...
- * - may occur only in a DNS name with at least 3 components, and
- * - may occur only as last character in the first component, and
-- * - may be preceded by additional characters
-+ * - may be preceded by additional characters, and
-+ * - must not be preceded by an IDNA ACE prefix (xn--)
- */
- if (wildcard && secondcndot && secondcndot[1] && firsthndot
-- && firstcndot - wildcard == 1
-- && secondcndot - firstcndot > 1
-- && PORT_Strrchr(cn, '*') == wildcard
-+ && firstcndot - wildcard == 1 /* wildcard is last char in first component */
-+ && secondcndot - firstcndot > 1 /* second component is non-empty */
-+ && PORT_Strrchr(cn, '*') == wildcard /* only one wildcard in cn */
- && !PORT_Strncasecmp(cn, hn, wildcard - cn)
-- && !PORT_Strcasecmp(firstcndot, firsthndot)) {
-+ && !PORT_Strcasecmp(firstcndot, firsthndot)
-+ /* If hn starts with xn--, then cn must start with wildcard */
-+ && (PORT_Strncasecmp(hn, "xn--", 4) || wildcard == cn)) {
- /* valid wildcard pattern match */
- return SECSuccess;
- }
---
-1.7.9.5
-
diff --git a/meta/recipes-support/nss/nss.inc b/meta/recipes-support/nss/nss.inc
index 69f98b58cd..e5e30961cc 100644
--- a/meta/recipes-support/nss/nss.inc
+++ b/meta/recipes-support/nss/nss.inc
@@ -8,7 +8,7 @@ v3 certificates, and other security standards."
HOMEPAGE = "http://www.mozilla.org/projects/security/pki/nss/"
SECTION = "libs"
-LICENSE = "MPL-1.1 GPL-2.0 LGPL-2.1"
+LICENSE = "MPL-2.0 | (MPL-2.0 & GPL-2.0+) | (MPL-2.0 & LGPL-2.1+)"
LIC_FILES_CHKSUM = "file://nss/lib/freebl/mpi/doc/LICENSE;md5=491f158d09d948466afce85d6f1fe18f \
file://nss/lib/freebl/mpi/doc/LICENSE-MPL;md5=6bf96825e3d7ce4de25621ae886cc859"
@@ -16,19 +16,13 @@ SRC_URI = "\
file://nss-fix-support-cross-compiling.patch \
file://nss-no-rpath-for-cross-compiling.patch \
file://nss-fix-incorrect-shebang-of-perl.patch \
- file://nss-3.15.1-fix-CVE-2013-1741.patch \
- file://nss-3.15.1-fix-CVE-2013-5605.patch \
- file://nss-CVE-2014-1492.patch \
- file://nss-CVE-2013-1740.patch \
- file://nss-3.15.1-fix-CVE-2013-1739.patch \
- file://nss-CVE-2013-5606.patch \
"
-SRC_URI_append_class-target = "\
+SRC_URI_append = "\
file://nss.pc.in \
file://signlibs.sh \
"
inherit siteinfo
-PR = "r0"
+
DEPENDS = "sqlite3 nspr zlib nss-native"
DEPENDS_class-native = "sqlite3-native nspr-native zlib-native"
RDEPENDS_${PN} = "perl"
@@ -152,7 +146,7 @@ do_install() {
done
}
-do_install_append_class-target() {
+do_install_append() {
# Create empty .chk files for the NSS libraries at build time. They could
# be regenerated at target's boot time.
for file in libsoftokn3.chk libfreebl3.chk libnssdbm3.chk; do
@@ -167,7 +161,9 @@ do_install_append_class-target() {
sed -i s:OEEXECPREFIX:${exec_prefix}:g ${D}${libdir}/pkgconfig/nss.pc
sed -i s:OELIBDIR:${libdir}:g ${D}${libdir}/pkgconfig/nss.pc
sed -i s:OEINCDIR:${includedir}/nss3:g ${D}${libdir}/pkgconfig/nss.pc
+}
+do_install_append_class-target() {
# Create a blank certificate
mkdir -p ${D}${sysconfdir}/pki/nssdb/
touch ./empty_password
diff --git a/meta/recipes-support/nss/files/nss-fix-incorrect-shebang-of-perl.patch b/meta/recipes-support/nss/nss/nss-fix-incorrect-shebang-of-perl.patch
index 547594d5b6..547594d5b6 100644
--- a/meta/recipes-support/nss/files/nss-fix-incorrect-shebang-of-perl.patch
+++ b/meta/recipes-support/nss/nss/nss-fix-incorrect-shebang-of-perl.patch
diff --git a/meta/recipes-support/nss/files/nss-fix-support-cross-compiling.patch b/meta/recipes-support/nss/nss/nss-fix-support-cross-compiling.patch
index f0b3550bff..f0b3550bff 100644
--- a/meta/recipes-support/nss/files/nss-fix-support-cross-compiling.patch
+++ b/meta/recipes-support/nss/nss/nss-fix-support-cross-compiling.patch
diff --git a/meta/recipes-support/nss/files/nss-no-rpath-for-cross-compiling.patch b/meta/recipes-support/nss/nss/nss-no-rpath-for-cross-compiling.patch
index 7661dc93a0..7661dc93a0 100644
--- a/meta/recipes-support/nss/files/nss-no-rpath-for-cross-compiling.patch
+++ b/meta/recipes-support/nss/nss/nss-no-rpath-for-cross-compiling.patch
diff --git a/meta/recipes-support/nss/files/nss.pc.in b/meta/recipes-support/nss/nss/nss.pc.in
index 200f635c65..200f635c65 100644
--- a/meta/recipes-support/nss/files/nss.pc.in
+++ b/meta/recipes-support/nss/nss/nss.pc.in
diff --git a/meta/recipes-support/nss/files/signlibs.sh b/meta/recipes-support/nss/nss/signlibs.sh
index 1ec79f4576..1ec79f4576 100644
--- a/meta/recipes-support/nss/files/signlibs.sh
+++ b/meta/recipes-support/nss/nss/signlibs.sh
diff --git a/meta/recipes-support/nss/nss_3.15.1.bb b/meta/recipes-support/nss/nss_3.15.1.bb
deleted file mode 100644
index 7b06f00cde..0000000000
--- a/meta/recipes-support/nss/nss_3.15.1.bb
+++ /dev/null
@@ -1,9 +0,0 @@
-require nss.inc
-
-SRC_URI += "\
- http://ftp.mozilla.org/pub/mozilla.org/security/nss/releases/NSS_3_15_1_RTM/src/${BPN}-${PV}.tar.gz \
-"
-
-SRC_URI[md5sum] = "fb68f4d210ac9397dd0d3c39c4f938eb"
-SRC_URI[sha256sum] = "f994106a33d1f3210f4151bbb3419a1c28fd1cb545caa7dc9afdebd6da626284"
-
diff --git a/meta/recipes-support/nss/nss_3.17.2.bb b/meta/recipes-support/nss/nss_3.17.2.bb
new file mode 100644
index 0000000000..039406f9f5
--- /dev/null
+++ b/meta/recipes-support/nss/nss_3.17.2.bb
@@ -0,0 +1,8 @@
+require nss.inc
+
+SRC_URI += "\
+ http://ftp.mozilla.org/pub/mozilla.org/security/nss/releases/NSS_3_17_2_RTM/src/${BPN}-${PV}.tar.gz \
+"
+
+SRC_URI[md5sum] = "d3edb6f6c3688b2fde67ec9c9a8c1214"
+SRC_URI[sha256sum] = "134929e44e44b968a4883f4ee513a71ae45d55b486cee41ee8e26c3cc84dab8b"
diff --git a/meta/recipes-support/pinentry/pinentry_0.9.0.bb b/meta/recipes-support/pinentry/pinentry_0.9.0.bb
new file mode 100644
index 0000000000..68d2478a8d
--- /dev/null
+++ b/meta/recipes-support/pinentry/pinentry_0.9.0.bb
@@ -0,0 +1,27 @@
+SUMMARY = "Collection of simple PIN or passphrase entry dialogs"
+DESCRIPTION = "\
+ Pinentry is a collection of simple PIN or passphrase entry dialogs which \
+ utilize the Assuan protocol as described by the aegypten project; see \
+ http://www.gnupg.org/aegypten/ for details."
+
+HOMEPAGE = "http://www.gnupg.org/related_software/pinentry/index.en.html"
+LICENSE = "GPLv2"
+LIC_FILES_CHKSUM = "file://COPYING;md5=cbbd794e2a0a289b9dfcc9f513d1996e"
+
+inherit autotools-brokensep
+
+SRC_URI = "ftp://ftp.gnupg.org/gcrypt/${BPN}/${BPN}-${PV}.tar.bz2"
+
+SRC_URI[md5sum] = "40a05856cb3accf6679987b7899b0f5a"
+SRC_URI[sha256sum] = "90045a07ab8e1a8e1ecf5d19b51691f195525e579fa5d71d7e92c120b05490ab"
+
+EXTRA_OECONF = "--disable-rpath \
+ --disable-dependency-tracking \
+ "
+
+PACKAGECONFIG ??= "ncurses libcap"
+
+PACKAGECONFIG[ncurses] = "--enable-ncurses --with-ncurses-include-dir=${STAGING_INCDIR}, --disable-ncurses, ncurses"
+PACKAGECONFIG[libcap] = "--with-libcap, --without-libcap, libcap"
+PACKAGECONFIG[qt4] = "--enable-pinentry-qt4, --disable-pinentry-qt4, qt4-x11"
+PACKAGECONFIG[gtk2] = "--enable-pinentry-gtk2, --disable-pinentry-gtk2, gtk+ glib-2.0"
diff --git a/meta/recipes-support/ptest-runner/files/ptest-runner b/meta/recipes-support/ptest-runner/files/ptest-runner
index 33424134c5..c618f1148d 100644
--- a/meta/recipes-support/ptest-runner/files/ptest-runner
+++ b/meta/recipes-support/ptest-runner/files/ptest-runner
@@ -1,5 +1,5 @@
#!/bin/sh
-
+ANYFAILED=no
echo "START: $0"
for libdir in /usr/lib*
@@ -15,9 +15,13 @@ do
date "+%Y-%m-%dT%H:%M"
echo "BEGIN: $x"
cd "$x"
- ./run-ptest
+ ./run-ptest || ANYFAILED=yes
echo "END: $x"
date "+%Y-%m-%dT%H:%M"
done
done
echo "STOP: $0"
+if [ "$ANYFAILED" = "yes" ]; then
+ exit 1
+fi
+exit 0
diff --git a/meta/recipes-support/serf/serf/env.patch b/meta/recipes-support/serf/serf/env.patch
new file mode 100644
index 0000000000..9d073e9abf
--- /dev/null
+++ b/meta/recipes-support/serf/serf/env.patch
@@ -0,0 +1,28 @@
+'scons' cleans the environment which breaks ccache builds because
+CCACHEDIR can point to an unexpected location:
+
+| ccache arm-linux-gnueabi-gcc ... context.c
+| ccache: failed to create .../serf/1.3.6-r0/.home/.ccache (No such file or directory)
+
+Issue is described in
+
+ http://www.scons.org/wiki/ImportingEnvironmentSettings
+
+and because 'bitbake' cleans environment we can pass it completely
+instead of trying to enumerate needed env.
+
+Upstream-Status: Inappropriate
+
+
+Index: serf-1.3.6/SConstruct
+===================================================================
+--- serf-1.3.6.orig/SConstruct
++++ serf-1.3.6/SConstruct
+@@ -149,6 +149,7 @@ if sys.platform == 'win32':
+ env = Environment(variables=opts,
+ tools=('default', 'textfile',),
+ CPPPATH=['.', ],
++ ENV = os.environ,
+ )
+
+ env.Append(BUILDERS = {
diff --git a/meta/recipes-support/serf/serf_1.3.6.bb b/meta/recipes-support/serf/serf_1.3.8.bb
index 08b04d3ca6..aa7918b5c3 100644
--- a/meta/recipes-support/serf/serf_1.3.6.bb
+++ b/meta/recipes-support/serf/serf_1.3.8.bb
@@ -1,20 +1,19 @@
-SRC_URI = "http://serf.googlecode.com/svn/src_releases/serf-1.3.6.tar.bz2 \
- file://norpath.patch"
-SRC_URI[md5sum] = "7fe38fa6eab078e0beabf291d8e4995d"
-SRC_URI[sha256sum] = "ca637beb0399797d4fc7ffa85e801733cd9c876997fac4a4fd12e9afe86563f2"
+SRC_URI = "http://serf.googlecode.com/svn/src_releases/serf-${PV}.tar.bz2 \
+ file://norpath.patch \
+ file://env.patch"
+
+SRC_URI[md5sum] = "2e4efe57ff28cb3202a112e90f0c2889"
+SRC_URI[sha256sum] = "e0500be065dbbce490449837bb2ab624e46d64fc0b090474d9acaa87c82b2590"
LICENSE = "Apache-2.0"
LIC_FILES_CHKSUM = "file://LICENSE;md5=86d3f3a95c324c9479bd8986968f4327"
DEPENDS = "python-scons-native openssl apr apr-util util-linux expat"
-FULLCC = "${STAGING_BINDIR_TOOLCHAIN}/${CC}"
-FULLCC_class-native = "${CC}"
-
do_compile() {
${STAGING_BINDIR_NATIVE}/scons ${PARALLEL_MAKE} PREFIX=${prefix} \
- CC="${FULLCC}" \
+ CC="${CC}" \
APR=`which apr-1-config` APU=`which apu-1-config` \
CFLAGS="${CFLAGS}" LINKFLAGS="${LDFLAGS}" \
OPENSSL="${STAGING_EXECPREFIXDIR}"
diff --git a/meta/recipes-support/sqlite/sqlite3_3.8.6.0.bb b/meta/recipes-support/sqlite/sqlite3_3.8.7.4.bb
index 32f60ebbeb..4c73d304da 100644
--- a/meta/recipes-support/sqlite/sqlite3_3.8.6.0.bb
+++ b/meta/recipes-support/sqlite/sqlite3_3.8.7.4.bb
@@ -10,8 +10,8 @@ PE = "3"
SQLITE_PV = "${@sqlite_download_version(d)}"
SRC_URI = "http://www.sqlite.org/2014/sqlite-autoconf-${SQLITE_PV}.tar.gz"
-SRC_URI[md5sum] = "f7e4a156b583abeba349629e2364224b"
-SRC_URI[sha256sum] = "53bab14255bc16c3d73fdbe54b364c1764709d68dda167587f8026c98b9f4e95"
+SRC_URI[md5sum] = "33bb8db0038317ce1b0480ca1185c7ba"
+SRC_URI[sha256sum] = "86370f139405fdfe03334fd618171a74e50f589f17ccbe5933361ed1f58359ec"
S = "${WORKDIR}/sqlite-autoconf-${SQLITE_PV}"
diff --git a/meta/site/arm-32 b/meta/site/arm-32
new file mode 100644
index 0000000000..81fd8d3103
--- /dev/null
+++ b/meta/site/arm-32
@@ -0,0 +1,47 @@
+# definitions assuming 32-bit arm architecture
+
+# apache
+ac_cv_sizeof_size_t=${ac_cv_sizeof_size_t=4}
+ac_cv_sizeof_ssize_t=${ac_cv_sizeof_ssize_t=4}
+
+# glib
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+
+# glib-2.0
+glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
+glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
+glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+ac_cv_alignof_guint32=4
+ac_cv_alignof_guint64=8
+ac_cv_alignof_unsigned_long=4
+
+# jikes
+ac_cv_sizeof_wchar_t=4
+
+# ORBit2
+ac_cv_alignof_CORBA_boolean=1
+ac_cv_alignof_CORBA_char=1
+ac_cv_alignof_CORBA_double=8
+ac_cv_alignof_CORBA_float=4
+ac_cv_alignof_CORBA_long=4
+ac_cv_alignof_CORBA_long_double=8
+ac_cv_alignof_CORBA_long_long=8
+ac_cv_alignof_CORBA_octet=1
+ac_cv_alignof_CORBA_pointer=4
+ac_cv_alignof_CORBA_short=2
+ac_cv_alignof_CORBA_struct=1
+ac_cv_alignof_CORBA_wchar=2
+
+# at-spi2-core
+ac_cv_alignof_char=1
+ac_cv_alignof_dbind_pointer=4
+ac_cv_alignof_dbind_struct=1
+ac_cv_alignof_dbus_bool_t=4
+ac_cv_alignof_dbus_int16_t=2
+ac_cv_alignof_dbus_int32_t=4
+ac_cv_alignof_dbus_int64_t=8
+ac_cv_alignof_double=8
diff --git a/meta/site/arm-64 b/meta/site/arm-64
new file mode 100644
index 0000000000..90c09b2d54
--- /dev/null
+++ b/meta/site/arm-64
@@ -0,0 +1,46 @@
+# definitions assuming 64-bit arm architecture
+
+# general
+ac_cv_sizeof_wchar_t=4
+ac_cv_sizeof_size_t=8
+ac_cv_sizeof_ssize_t=8
+ac_cv_alignof_char=1
+ac_cv_alignof_double=8
+
+# glib
+#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+#glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+
+# glib-2.0
+#glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
+#glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
+#glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
+#glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
+#glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
+ac_cv_alignof_guint32=4
+ac_cv_alignof_guint64=8
+ac_cv_alignof_unsigned_long=8
+
+# ORBit2 (should be in meta-gnome/site/arm-64)
+#ac_cv_alignof_CORBA_boolean=1
+#ac_cv_alignof_CORBA_char=1
+#ac_cv_alignof_CORBA_double=8
+#ac_cv_alignof_CORBA_float=4
+#ac_cv_alignof_CORBA_long=4
+#ac_cv_alignof_CORBA_long_double=8
+#ac_cv_alignof_CORBA_long_long=8
+#ac_cv_alignof_CORBA_octet=1
+#ac_cv_alignof_CORBA_pointer=4
+#ac_cv_alignof_CORBA_short=2
+#ac_cv_alignof_CORBA_struct=1
+#ac_cv_alignof_CORBA_wchar=2
+
+# at-spi2-core
+ac_cv_alignof_dbind_pointer=8
+ac_cv_alignof_dbind_struct=1
+ac_cv_alignof_dbus_bool_t=4
+ac_cv_alignof_dbus_int16_t=2
+ac_cv_alignof_dbus_int32_t=4
+ac_cv_alignof_dbus_int64_t=8
+
diff --git a/meta/site/arm-common b/meta/site/arm-common
index 1893dc0ab8..12e5d4592a 100644
--- a/meta/site/arm-common
+++ b/meta/site/arm-common
@@ -11,7 +11,6 @@ ac_cv_sctp=${ac_cv_sctp=no}
# apache
ac_cv_func_pthread_key_delete=${ac_cv_func_pthread_key_delete=yes}
apr_cv_process_shared_works=${apr_cv_process_shared_works=no}
-ac_cv_sizeof_ssize_t=${ac_cv_sizeof_ssize_t=4}
apr_cv_tcp_nodelay_with_cork=${apr_cv_tcp_nodelay_with_cork=yes}
# bash
@@ -51,8 +50,6 @@ ac_cv_func_fnmatch_works=${ac_cv_func_fnmatch_works=yes}
am_cv_func_working_getline=${am_cv_func_working_getline=yes}
# glib
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
glib_cv_stack_grows=${glib_cv_stack_grows=no}
glib_cv_uscore=${glib_cv_uscore=no}
glib_cv_use_pid_surrogate=${glib_cv_use_pid_surrogate=yes}
@@ -60,7 +57,6 @@ glib_cv_has__inline=${glib_cv_has__inline=yes}
glib_cv_has__inline__=${glib_cv_has__inline__=yes}
glib_cv_hasinline=${glib_cv_hasinline=yes}
glib_cv_sane_realloc=${glib_cv_sane_realloc=yes}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
glib_cv_uscore=${glib_cv_uscore=no}
glib_cv_va_val_copy=${glib_cv_va_val_copy=yes}
glib_cv_rtldglobal_broken=${glib_cv_rtldglobal_broken=no}
@@ -70,15 +66,7 @@ glib_cv_sys_pthread_cond_timedwait_posix=${glib_cv_sys_pthread_cond_timedwait_po
# glib-2.0
glib_cv_long_long_format=${glib_cv_long_long_format=ll}
-glib_cv_sizeof_gmutex=${glib_cv_sizeof_gmutex=24}
-glib_cv_sizeof_intmax_t=${glib_cv_sizeof_intmax_t=8}
-glib_cv_sizeof_ptrdiff_t=${glib_cv_sizeof_ptrdiff_t=4}
-glib_cv_sizeof_size_t=${glib_cv_sizeof_size_t=4}
-glib_cv_sizeof_system_thread=${glib_cv_sizeof_system_thread=4}
glib_cv_sys_use_pid_niceness_surrogate=${glib_cv_sys_use_pid_niceness_surrogate=yes}
-ac_cv_alignof_guint32=4
-ac_cv_alignof_guint64=8
-ac_cv_alignof_unsigned_long=4
#gstreamer
as_cv_unaligned_access=${as_cv_unaligned_access=no}
@@ -86,9 +74,6 @@ as_cv_unaligned_access=${as_cv_unaligned_access=no}
# httppc
ac_cv_strerror_r_SUSv3=${ac_cv_strerror_r_SUSv3=no}
-# jikes
-ac_cv_sizeof_wchar_t=4
-
# lftp
ac_cv_need_trio=${ac_cv_need_trio=no}
lftp_cv_va_val_copy=${lftp_cv_va_val_copy=yes}
@@ -119,20 +104,6 @@ mysql_cv_func_atomic_add=${mysql_cv_func_atomic_add=no}
ac_cv_regexec_segfault_emptystr=${ac_cv_regexec_segfault_emptystr=no}
nano_cv_func_regexec_segv_emptystr=${nano_cv_func_regexec_segv_emptystr=no}
-# ORBit2
-ac_cv_alignof_CORBA_boolean=1
-ac_cv_alignof_CORBA_char=1
-ac_cv_alignof_CORBA_double=8
-ac_cv_alignof_CORBA_float=4
-ac_cv_alignof_CORBA_long=4
-ac_cv_alignof_CORBA_long_double=8
-ac_cv_alignof_CORBA_long_long=8
-ac_cv_alignof_CORBA_octet=1
-ac_cv_alignof_CORBA_pointer=4
-ac_cv_alignof_CORBA_short=2
-ac_cv_alignof_CORBA_struct=1
-ac_cv_alignof_CORBA_wchar=2
-
# php
ac_cv_pread=${ac_cv_pread=no}
ac_cv_pwrite=${ac_cv_pwrite=no}
@@ -186,12 +157,3 @@ jm_cv_func_working_readdir=yes
# evolution-data-server
ac_cv_libiconv_utf8=${ac_cv_libiconv_utf8=yes}
-# at-spi2-core
-ac_cv_alignof_char=1
-ac_cv_alignof_dbind_pointer=4
-ac_cv_alignof_dbind_struct=1
-ac_cv_alignof_dbus_bool_t=4
-ac_cv_alignof_dbus_int16_t=2
-ac_cv_alignof_dbus_int32_t=4
-ac_cv_alignof_dbus_int64_t=8
-ac_cv_alignof_double=8
diff --git a/oe-init-build-env-memres b/oe-init-build-env-memres
index 00079989dc..9b9e0f44b0 100755
--- a/oe-init-build-env-memres
+++ b/oe-init-build-env-memres
@@ -60,14 +60,14 @@ if [ -e bitbake.lock ] && grep : bitbake.lock > /dev/null ; then
res=$?
fi
+if [ $res != 0 ] ; then
+ bitbake --server-only -t xmlrpc -B localhost:$port
+fi
+
if [ $port = -1 ] ; then
export BBSERVER=localhost:-1
echo "Bitbake server started on demand as needed, use bitbake -m to shut it down"
else
- if [ $res != 0 ] ; then
- bitbake --server-only -t xmlrpc -B localhost:$port
- fi
-
export BBSERVER=`cat bitbake.lock`
if [ $res = 0 ] ; then
diff --git a/scripts/contrib/build-perf-test.sh b/scripts/contrib/build-perf-test.sh
index be3b648046..cdd7885dca 100755
--- a/scripts/contrib/build-perf-test.sh
+++ b/scripts/contrib/build-perf-test.sh
@@ -335,7 +335,7 @@ test2 () {
#
# Start with
# i) "rm -rf tmp/cache; time bitbake -p"
-# ii) "rm -rf tmp/cache/default-eglibc/; time bitbake -p"
+# ii) "rm -rf tmp/cache/default-glibc/; time bitbake -p"
# iii) "time bitbake -p"
@@ -344,8 +344,8 @@ test3 () {
log " Removing tmp/cache && cache"
rm -rf tmp/cache cache
bbtime -p
- log " Removing tmp/cache/default-eglibc/"
- rm -rf tmp/cache/default-eglibc/
+ log " Removing tmp/cache/default-glibc/"
+ rm -rf tmp/cache/default-glibc/
bbtime -p
bbtime -p
}
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py
index 598b5c3fc6..2f3b8b06a6 100755
--- a/scripts/contrib/list-packageconfig-flags.py
+++ b/scripts/contrib/list-packageconfig-flags.py
@@ -65,7 +65,6 @@ def get_recipesdata(bbhandler, preferred):
data = bb.cache.Cache.loadDataFull(fn, bbhandler.cooker.collection.get_file_appends(fn), bbhandler.config_data)
flags = data.getVarFlags("PACKAGECONFIG")
flags.pop('doc', None)
- flags.pop('defaultval', None)
if flags:
data_dict[fn] = data
@@ -78,7 +77,6 @@ def collect_pkgs(data_dict):
for fn in data_dict:
pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
pkgconfigflags.pop('doc', None)
- pkgconfigflags.pop('defaultval', None)
pkgname = data_dict[fn].getVar("P", True)
pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
@@ -135,7 +133,7 @@ def display_all(data_dict):
print('PACKAGECONFIG %s' % packageconfig)
for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").iteritems():
- if flag in ["defaultval", "doc"]:
+ if flag == "doc":
continue
print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
print ''
diff --git a/scripts/contrib/python/generate-manifest-2.7.py b/scripts/contrib/python/generate-manifest-2.7.py
index 65486d8ec4..99b11678db 100755
--- a/scripts/contrib/python/generate-manifest-2.7.py
+++ b/scripts/contrib/python/generate-manifest-2.7.py
@@ -275,11 +275,11 @@ if __name__ == "__main__":
m.addPackage( "${PN}-image", "Python graphical image handling", "${PN}-core",
"colorsys.* imghdr.* lib-dynload/imageop.so lib-dynload/rgbimg.so" )
- m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math ${PN}-textutils",
+ m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math ${PN}-textutils ${PN}-netclient",
"lib-dynload/_socket.so lib-dynload/_io.so lib-dynload/_ssl.so lib-dynload/select.so lib-dynload/termios.so lib-dynload/cStringIO.so " +
"pipes.* socket.* ssl.* tempfile.* StringIO.* io.* _pyio.*" )
- m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re",
+ m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re ${PN}-codecs",
"json lib-dynload/_json.so" ) # package
m.addPackage( "${PN}-lang", "Python low-level language support", "${PN}-core",
@@ -388,4 +388,7 @@ if __name__ == "__main__":
m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime",
"mailbox.*" )
+ m.addPackage( "${PN}-argparse", "Python command line argument parser", "${PN}-core ${PN}-codecs ${PN}-textutils",
+ "argparse.*" )
+
m.make()
diff --git a/scripts/contrib/python/generate-manifest-3.3.py b/scripts/contrib/python/generate-manifest-3.3.py
index 288def293d..48cc84d4e0 100755
--- a/scripts/contrib/python/generate-manifest-3.3.py
+++ b/scripts/contrib/python/generate-manifest-3.3.py
@@ -166,15 +166,15 @@ if __name__ == "__main__":
# Parameters: revision, name, description, dependencies, filenames
#
- m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re",
- "__future__.* _abcoll.* abc.* copy.* copy_reg.* ConfigParser.* " +
+ m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re ${PN}-reprlib ${PN}-codecs ${PN}-io ${PN}-math",
+ "__future__.* _abcoll.* abc.* copy.* copyreg.* ConfigParser.* " +
"genericpath.* getopt.* linecache.* new.* " +
"os.* posixpath.* struct.* " +
"warnings.* site.* stat.* " +
"UserDict.* UserList.* UserString.* " +
"lib-dynload/binascii.*.so lib-dynload/_struct.*.so lib-dynload/time.*.so " +
"lib-dynload/xreadlines.*.so types.* platform.* ${bindir}/python* " +
- "_weakrefset.* sysconfig.* config/Makefile " +
+ "_weakrefset.* sysconfig.* _sysconfigdata.* config/Makefile " +
"${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h " +
"${libdir}/python${PYTHON_MAJMIN}/collections " +
"${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py ")
@@ -329,6 +329,9 @@ if __name__ == "__main__":
m.addPackage( "${PN}-readline", "Python readline support", "${PN}-core",
"lib-dynload/readline.*.so rlcompleter.*" )
+ m.addPackage( "${PN}-reprlib", "Python alternate repr() implementation", "${PN}-core",
+ "${libdir}/python3.3/reprlib.py" )
+
m.addPackage( "${PN}-resource", "Python resource control interface", "${PN}-core",
"lib-dynload/resource.*.so" )
diff --git a/scripts/create-recipe b/scripts/create-recipe
index b192990080..e4bc4c322b 100755
--- a/scripts/create-recipe
+++ b/scripts/create-recipe
@@ -1747,7 +1747,6 @@ sub write_bbfile
print BBFILE "\"\n";
}
- print BBFILE 'PR = "r0"' . "\n";
if ($python == 1) {
print BBFILE "PV = \"$pversion\"\n\n";
}
@@ -1865,7 +1864,7 @@ foreach (@tgzfiles) {
#
my @sourcetars = <$orgdir/$outputdir/*\.tar\.bz2 $orgdir/$outputdir/*\.tar\.gz $orgdir/$outputdir/*\.zip>;
-if ( length @sourcetars == 0) {
+if (scalar(@sourcetars) == 0) {
print "Can NOT find source tarball. Exiting...\n";
exit (1);
}
diff --git a/scripts/devtool b/scripts/devtool
new file mode 100755
index 0000000000..d6e1b9710d
--- /dev/null
+++ b/scripts/devtool
@@ -0,0 +1,255 @@
+#!/usr/bin/env python
+
+# OpenEmbedded Development tool
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import os
+import argparse
+import glob
+import re
+import ConfigParser
+import subprocess
+import logging
+
+basepath = ''
+workspace = {}
+config = None
+context = None
+
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import scriptutils
+logger = scriptutils.logger_create('devtool')
+
+plugins = []
+
+
+class ConfigHandler(object):
+ config_file = ''
+ config_obj = None
+ init_path = ''
+ workspace_path = ''
+
+ def __init__(self, filename):
+ self.config_file = filename
+ self.config_obj = ConfigParser.SafeConfigParser()
+
+ def get(self, section, option, default=None):
+ try:
+ ret = self.config_obj.get(section, option)
+ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
+ if default != None:
+ ret = default
+ else:
+ raise
+ return ret
+
+ def read(self):
+ if os.path.exists(self.config_file):
+ self.config_obj.read(self.config_file)
+
+ if self.config_obj.has_option('General', 'init_path'):
+ pth = self.get('General', 'init_path')
+ self.init_path = os.path.join(basepath, pth)
+ if not os.path.exists(self.init_path):
+ logger.error('init_path %s specified in config file cannot be found' % pth)
+ return False
+ else:
+ self.config_obj.add_section('General')
+
+ self.workspace_path = self.get('General', 'workspace_path', os.path.join(basepath, 'workspace'))
+ return True
+
+
+ def write(self):
+ logger.debug('writing to config file %s' % self.config_file)
+ self.config_obj.set('General', 'workspace_path', self.workspace_path)
+ with open(self.config_file, 'w') as f:
+ self.config_obj.write(f)
+
+class Context:
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+def read_workspace():
+ global workspace
+ workspace = {}
+ if not os.path.exists(os.path.join(config.workspace_path, 'conf', 'layer.conf')):
+ if context.fixed_setup:
+ logger.error("workspace layer not set up")
+ sys.exit(1)
+ else:
+ logger.info('Creating workspace layer in %s' % config.workspace_path)
+ _create_workspace(config.workspace_path, config, basepath)
+
+ logger.debug('Reading workspace in %s' % config.workspace_path)
+ externalsrc_re = re.compile(r'^EXTERNALSRC(_pn-[a-zA-Z0-9-]*)? =.*$')
+ for fn in glob.glob(os.path.join(config.workspace_path, 'appends', '*.bbappend')):
+ pn = os.path.splitext(os.path.basename(fn))[0].split('_')[0]
+ with open(fn, 'r') as f:
+ for line in f:
+ if externalsrc_re.match(line.rstrip()):
+ splitval = line.split('=', 2)
+ workspace[pn] = splitval[1].strip('" \n\r\t')
+ break
+
+def create_workspace(args, config, basepath, workspace):
+ if args.directory:
+ workspacedir = os.path.abspath(args.directory)
+ else:
+ workspacedir = os.path.abspath(os.path.join(basepath, 'workspace'))
+ _create_workspace(workspacedir, config, basepath, args.create_only)
+
+def _create_workspace(workspacedir, config, basepath, create_only=False):
+ import bb
+
+ confdir = os.path.join(workspacedir, 'conf')
+ if os.path.exists(os.path.join(confdir, 'layer.conf')):
+ logger.info('Specified workspace already set up, leaving as-is')
+ else:
+ # Add a config file
+ bb.utils.mkdirhier(confdir)
+ with open(os.path.join(confdir, 'layer.conf'), 'w') as f:
+ f.write('# ### workspace layer auto-generated by devtool ###\n')
+ f.write('BBPATH =. "$' + '{LAYERDIR}:"\n')
+ f.write('BBFILES += "$' + '{LAYERDIR}/recipes/*/*.bb \\\n')
+ f.write(' $' + '{LAYERDIR}/appends/*.bbappend"\n')
+ f.write('BBFILE_COLLECTIONS += "workspacelayer"\n')
+ f.write('BBFILE_PATTERN_workspacelayer = "^$' + '{LAYERDIR}/"\n')
+ f.write('BBFILE_PATTERN_IGNORE_EMPTY_workspacelayer = "1"\n')
+ f.write('BBFILE_PRIORITY_workspacelayer = "99"\n')
+ # Add a README file
+ with open(os.path.join(workspacedir, 'README'), 'w') as f:
+ f.write('This layer was created by the OpenEmbedded devtool utility in order to\n')
+ f.write('contain recipes and bbappends. In most instances you should use the\n')
+ f.write('devtool utility to manage files within it rather than modifying files\n')
+ f.write('directly (although recipes added with "devtool add" will often need\n')
+ f.write('direct modification.)\n')
+ f.write('\nIf you no longer need to use devtool you can remove the path to this\n')
+ f.write('workspace layer from your conf/bblayers.conf file (and then delete the\n')
+ f.write('layer, if you wish).\n')
+ if not create_only:
+ # Add the workspace layer to bblayers.conf
+ bblayers_conf = os.path.join(basepath, 'conf', 'bblayers.conf')
+ if not os.path.exists(bblayers_conf):
+ logger.error('Unable to find bblayers.conf')
+ return -1
+ bb.utils.edit_bblayers_conf(bblayers_conf, workspacedir, config.workspace_path)
+ if config.workspace_path != workspacedir:
+ # Update our config to point to the new location
+ config.workspace_path = workspacedir
+ config.write()
+
+
+def main():
+ global basepath
+ global config
+ global context
+
+ context = Context(fixed_setup=False)
+
+ # Default basepath
+ basepath = os.path.dirname(os.path.abspath(__file__))
+ pth = basepath
+ while pth != '' and pth != os.sep:
+ if os.path.exists(os.path.join(pth, '.devtoolbase')):
+ context.fixed_setup = True
+ basepath = pth
+ break
+ pth = os.path.dirname(pth)
+
+ parser = argparse.ArgumentParser(description="OpenEmbedded development tool",
+ epilog="Use %(prog)s <command> --help to get help on a specific command")
+ parser.add_argument('--basepath', help='Base directory of SDK / build directory')
+ parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+ parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
+ parser.add_argument('--color', help='Colorize output', choices=['auto', 'always', 'never'], default='auto')
+
+ subparsers = parser.add_subparsers(dest="subparser_name")
+
+ if not context.fixed_setup:
+ parser_create_workspace = subparsers.add_parser('create-workspace', help='Set up a workspace')
+ parser_create_workspace.add_argument('directory', nargs='?', help='Directory for the workspace')
+ parser_create_workspace.add_argument('--create-only', action="store_true", help='Only create the workspace, do not alter configuration')
+ parser_create_workspace.set_defaults(func=create_workspace)
+
+ scriptutils.load_plugins(logger, plugins, os.path.join(scripts_path, 'lib', 'devtool'))
+ for plugin in plugins:
+ if hasattr(plugin, 'register_commands'):
+ plugin.register_commands(subparsers, context)
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logger.setLevel(logging.DEBUG)
+ elif args.quiet:
+ logger.setLevel(logging.ERROR)
+
+ if args.basepath:
+ # Override
+ basepath = args.basepath
+ elif not context.fixed_setup:
+ basepath = os.environ.get('BUILDDIR')
+ if not basepath:
+ logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
+ sys.exit(1)
+
+ logger.debug('Using basepath %s' % basepath)
+
+ config = ConfigHandler(os.path.join(basepath, 'conf', 'devtool.conf'))
+ if not config.read():
+ return -1
+
+ bitbake_subdir = config.get('General', 'bitbake_subdir', '')
+ if bitbake_subdir:
+ # Normally set for use within the SDK
+ logger.debug('Using bitbake subdir %s' % bitbake_subdir)
+ sys.path.insert(0, os.path.join(basepath, bitbake_subdir, 'lib'))
+ core_meta_subdir = config.get('General', 'core_meta_subdir')
+ sys.path.insert(0, os.path.join(basepath, core_meta_subdir, 'lib'))
+ else:
+ # Standard location
+ import scriptpath
+ bitbakepath = scriptpath.add_bitbake_lib_path()
+ if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ sys.exit(1)
+ logger.debug('Using standard bitbake path %s' % bitbakepath)
+ scriptpath.add_oe_lib_path()
+
+ scriptutils.logger_setup_color(logger, args.color)
+
+ if args.subparser_name != 'create-workspace':
+ read_workspace()
+
+ ret = args.func(args, config, basepath, workspace)
+
+ return ret
+
+
+if __name__ == "__main__":
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc(5)
+ sys.exit(ret)
diff --git a/scripts/gen-lockedsig-cache b/scripts/gen-lockedsig-cache
new file mode 100755
index 0000000000..dfb282efd4
--- /dev/null
+++ b/scripts/gen-lockedsig-cache
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+#
+# gen-lockedsig-cache <locked-sigs.inc> <input-cachedir> <output-cachedir>
+#
+
+import os
+import sys
+import glob
+import shutil
+import errno
+
+def mkdir(d):
+ try:
+ os.makedirs(d)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise e
+
+if len(sys.argv) < 3:
+ print("Incorrect number of arguments specified")
+ sys.exit(1)
+
+sigs = []
+with open(sys.argv[1]) as f:
+ for l in f.readlines():
+ if ":" in l:
+ sigs.append(l.split(":")[2].split()[0])
+
+files = set()
+for s in sigs:
+ p = sys.argv[2] + "/" + s[:2] + "/*" + s + "*"
+ files |= set(glob.glob(p))
+ p = sys.argv[2] + "/*/" + s[:2] + "/*" + s + "*"
+ files |= set(glob.glob(p))
+
+for f in files:
+ dst = f.replace(sys.argv[2], sys.argv[3])
+ mkdir(os.path.dirname(dst))
+ os.link(f, dst)
+
diff --git a/scripts/lib/bsp/engine.py b/scripts/lib/bsp/engine.py
index 681720d20a..7d6be239da 100644
--- a/scripts/lib/bsp/engine.py
+++ b/scripts/lib/bsp/engine.py
@@ -756,6 +756,8 @@ class CheckInputLine(ListValInputLine):
return None
+dirname_substitutions = {}
+
class SubstrateBase(object):
"""
Base class for both expanded and unexpanded file and dir container
@@ -764,6 +766,7 @@ class SubstrateBase(object):
def __init__(self, filename, filebase, out_filebase):
self.filename = filename
self.filebase = filebase
+ self.translated_filename = filename
self.out_filebase = out_filebase
self.raw_lines = []
self.expanded_lines = []
@@ -869,6 +872,167 @@ class SubstrateBase(object):
return self.expand_input_tag(tag, lineno)
+ def append_translated_filename(self, filename):
+ """
+ Simply append filename to translated_filename
+ """
+ self.translated_filename = os.path.join(self.translated_filename, filename)
+
+ def get_substituted_file_or_dir_name(self, first_line, tag):
+ """
+ If file or dir names contain name substitutions, return the name
+ to substitute. Note that this is just the file or dirname and
+ doesn't include the path.
+ """
+ filename = first_line.find(tag)
+ if filename != -1:
+ filename += len(tag)
+ substituted_filename = first_line[filename:].strip()
+ this = substituted_filename.find(" this")
+ if this != -1:
+ head, tail = os.path.split(self.filename)
+ substituted_filename = substituted_filename[:this + 1] + tail
+ if tag == DIRNAME_TAG: # get rid of .noinstall in dirname
+ substituted_filename = substituted_filename.split('.')[0]
+
+ return substituted_filename
+
+ def get_substituted_filename(self, first_line):
+ """
+ If a filename contains a name substitution, return the name to
+ substitute. Note that this is just the filename and doesn't
+ include the path.
+ """
+ return self.get_substituted_file_or_dir_name(first_line, FILENAME_TAG)
+
+ def get_substituted_dirname(self, first_line):
+ """
+ If a dirname contains a name substitution, return the name to
+ substitute. Note that this is just the dirname and doesn't
+ include the path.
+ """
+ return self.get_substituted_file_or_dir_name(first_line, DIRNAME_TAG)
+
+ def substitute_filename(self, first_line):
+ """
+ Find the filename in first_line and append it to translated_filename.
+ """
+ substituted_filename = self.get_substituted_filename(first_line)
+ self.append_translated_filename(substituted_filename);
+
+ def substitute_dirname(self, first_line):
+ """
+ Find the dirname in first_line and append it to translated_filename.
+ """
+ substituted_dirname = self.get_substituted_dirname(first_line)
+ self.append_translated_filename(substituted_dirname);
+
+ def is_filename_substitution(self, line):
+ """
+ Do we have a filename subustition?
+ """
+ if line.find(FILENAME_TAG) != -1:
+ return True
+ return False
+
+ def is_dirname_substitution(self, line):
+ """
+ Do we have a dirname subustition?
+ """
+ if line.find(DIRNAME_TAG) != -1:
+ return True
+ return False
+
+ def translate_dirname(self, first_line):
+ """
+ Just save the first_line mapped by filename. The later pass
+ through the directories will look for a dirname.noinstall
+ match and grab the substitution line.
+ """
+ dirname_substitutions[self.filename] = first_line
+
+ def translate_dirnames_in_path(self, path):
+ """
+ Translate dirnames below this file or dir, not including tail.
+ dirname_substititions is keyed on actual untranslated filenames.
+ translated_path contains the subsititutions for each element.
+ """
+ remainder = path[len(self.filebase)+1:]
+ translated_path = untranslated_path = self.filebase
+
+ untranslated_dirs = remainder.split(os.sep)
+
+ for dir in untranslated_dirs:
+ key = os.path.join(untranslated_path, dir + '.noinstall')
+ try:
+ first_line = dirname_substitutions[key]
+ except KeyError:
+ translated_path = os.path.join(translated_path, dir)
+ untranslated_path = os.path.join(untranslated_path, dir)
+ continue
+ substituted_dir = self.get_substituted_dirname(first_line)
+ translated_path = os.path.join(translated_path, substituted_dir)
+ untranslated_path = os.path.join(untranslated_path, dir)
+
+ return translated_path
+
+ def translate_file_or_dir_name(self):
+ """
+ Originally we were allowed to use open/close/assign tags and python
+ code in the filename, which fit in nicely with the way we
+ processed the templates and generated code. Now that we can't
+ do that, we make those tags proper file contents and have this
+ pass substitute the nice but non-functional names with those
+ 'strange' ones, and then proceed as usual.
+
+ So, if files or matching dir<.noinstall> files contain
+ filename substitutions, this function translates them into the
+ corresponding 'strange' names, which future passes will expand
+ as they always have. The resulting pathname is kept in the
+ file or directory's translated_filename. Another way to think
+ about it is that self.filename is the input filename, and
+ translated_filename is the output filename before expansion.
+ """
+ # remove leaf file or dirname
+ head, tail = os.path.split(self.filename)
+ translated_path = self.translate_dirnames_in_path(head)
+ self.translated_filename = translated_path
+
+ # This is a dirname - does it have a matching .noinstall with
+ # a substitution? If so, apply the dirname subsititution.
+ if not os.path.isfile(self.filename):
+ key = self.filename + ".noinstall"
+ try:
+ first_line = dirname_substitutions[key]
+ except KeyError:
+ self.append_translated_filename(tail)
+ return
+ self.substitute_dirname(first_line)
+ return
+
+ f = open(self.filename)
+ first_line = f.readline()
+ f.close()
+
+ # This is a normal filename not needing translation, just use
+ # it as-is.
+ if not first_line or not first_line.startswith("#"):
+ self.append_translated_filename(tail)
+ return
+
+ # If we have a filename substitution (first line in the file
+ # is a FILENAME_TAG line) do the substitution now. If we have
+ # a dirname substitution (DIRNAME_TAG in dirname.noinstall
+ # meta-file), hash it so we can apply it when we see the
+ # matching dirname later. Otherwise we have a regular
+ # filename, just use it as-is.
+ if self.is_filename_substitution(first_line):
+ self.substitute_filename(first_line)
+ elif self.is_dirname_substitution(first_line):
+ self.translate_dirname(first_line)
+ else:
+ self.append_translated_filename(tail)
+
def expand_file_or_dir_name(self):
"""
Expand file or dir names into codeline. Dirnames and
@@ -878,7 +1042,7 @@ class SubstrateBase(object):
"""
lineno = 0
- line = self.filename[len(self.filebase):]
+ line = self.translated_filename[len(self.filebase):]
if line.startswith("/"):
line = line[1:]
opentag_start = -1
@@ -897,7 +1061,6 @@ class SubstrateBase(object):
self.parse_error("No close tag found for open tag", lineno, line)
# we have a {{ tag i.e. code
tag = line[opentag_start + len(OPEN_TAG):end].strip()
-
if not tag.lstrip().startswith(IF_TAG):
self.parse_error("Only 'if' tags are allowed in file or directory names",
lineno, line)
@@ -933,6 +1096,7 @@ class SubstrateBase(object):
Expand the file or dir name first, eventually this ends up
creating the file or dir.
"""
+ self.translate_file_or_dir_name()
self.expand_file_or_dir_name()
@@ -955,6 +1119,9 @@ class SubstrateFile(SubstrateBase):
self.read()
for lineno, line in enumerate(self.raw_lines):
+ # only first line can be a filename substitition
+ if lineno == 0 and line.startswith("#") and FILENAME_TAG in line:
+ continue # skip it - we've already expanded it
expanded_line = self.expand_tag(line, lineno + 1) # humans not 0-based
if not expanded_line:
expanded_line = NormalLine(line.rstrip())
@@ -1141,7 +1308,7 @@ def gather_inputlines(files):
for file in files:
if isinstance(file, SubstrateFile):
group = None
- basename = os.path.basename(file.filename)
+ basename = os.path.basename(file.translated_filename)
codeline = conditional_filename(basename)
if codeline:
diff --git a/scripts/lib/bsp/help.py b/scripts/lib/bsp/help.py
index 7c436d6be0..4cce100d16 100644
--- a/scripts/lib/bsp/help.py
+++ b/scripts/lib/bsp/help.py
@@ -230,6 +230,7 @@ DESCRIPTION
powerpc
i386
mips
+ mips64
x86_64
qemu
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/arm/conf/machine/{{=machine}}.conf
deleted file mode 100644
index 44a80d226c..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/arm/conf/machine/{{=machine}}.conf
+++ /dev/null
@@ -1,105 +0,0 @@
-#@TYPE: Machine
-#@NAME: {{=machine}}
-
-#@DESCRIPTION: Machine configuration for {{=machine}} systems
-
-{{ input type:"boolean" name:"xserver" prio:"50" msg:"Do you need support for X? (y/n)" default:"y" }}
-{{ if xserver == "y": }}
-PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
-XSERVER ?= "xserver-xorg \
- xf86-input-evdev \
- xf86-input-mouse \
- xf86-video-omapfb \
- xf86-input-keyboard"
-
-# Ship all kernel modules by default
-MACHINE_EXTRA_RRECOMMENDS = " kernel-modules"
-
-# Allow for MMC booting (required by the NAND-less Beagleboard XM)
-EXTRA_IMAGEDEPENDS += "u-boot"
-
-# Uncomment the following line to enable the hard floating point abi. Note that
-# this breaks some binary libraries and 3D (neither of which ship with
-# meta-yocto). For maximum compatibility, leave this disabled.
-#DEFAULTTUNE ?= "cortexa8hf-neon"
-{{ input type:"choicelist" name:"tunefile" prio:"40" msg:"Which machine tuning would you like to use?" default:"tune_cortexa8" }}
-{{ input type:"choice" val:"tune_arm1136jf_s" msg:"arm1136jf-s tuning optimizations" }}
-{{ input type:"choice" val:"tune_arm920t" msg:"arm920t tuning optimizations" }}
-{{ input type:"choice" val:"tune_arm926ejs" msg:"arm926ejs tuning optimizations" }}
-{{ input type:"choice" val:"tune_arm9tdmi" msg:"arm9tdmi tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexa5" msg:"cortexa5 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexa7" msg:"cortexa7 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexa8" msg:"cortexa8 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexa9" msg:"cortexa9 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexa15" msg:"cortexa15 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexm1" msg:"cortexm1 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexm3" msg:"cortexm3 tuning optimizations" }}
-{{ input type:"choice" val:"tune_cortexr4" msg:"cortexr4 tuning optimizations" }}
-{{ input type:"choice" val:"tune_ep9312" msg:"ep9312 tuning optimizations" }}
-{{ input type:"choice" val:"tune_iwmmxt" msg:"iwmmxt tuning optimizations" }}
-{{ input type:"choice" val:"tune_strongarm1100" msg:"strongarm1100 tuning optimizations" }}
-{{ input type:"choice" val:"tune_xscale" msg:"xscale tuning optimizations" }}
-{{ if tunefile == "tune_arm1136jf_s": }}
-include conf/machine/include/tune-arm1136jf-s.inc
-{{ if tunefile == "tune_arm920t": }}
-include conf/machine/include/tune-arm920t.inc
-{{ if tunefile == "tune_arm926ejs": }}
-include conf/machine/include/tune-arm926ejs.inc
-{{ if tunefile == "tune_arm9tdmi": }}
-include conf/machine/include/tune-arm9tdmi.inc
-{{ if tunefile == "tune_cortexa5": }}
-include conf/machine/include/tune-cortexa5.inc
-{{ if tunefile == "tune_cortexa7": }}
-include conf/machine/include/tune-cortexa7.inc
-{{ if tunefile == "tune_cortexa8": }}
-include conf/machine/include/tune-cortexa8.inc
-{{ if tunefile == "tune_cortexa9": }}
-include conf/machine/include/tune-cortexa9.inc
-{{ if tunefile == "tune_cortexa15": }}
-include conf/machine/include/tune-cortexa15.inc
-{{ if tunefile == "tune_cortexm1": }}
-include conf/machine/include/tune-cortexm1.inc
-{{ if tunefile == "tune_cortexm3": }}
-include conf/machine/include/tune-cortexm3.inc
-{{ if tunefile == "tune_cortexr4": }}
-include conf/machine/include/tune-cortexr4.inc
-{{ if tunefile == "tune_ep9312": }}
-include conf/machine/include/tune-ep9312.inc
-{{ if tunefile == "tune_iwmmxt": }}
-include conf/machine/include/tune-iwmmxt.inc
-{{ if tunefile == "tune_strongarm1100": }}
-include conf/machine/include/tune-strongarm1100.inc
-{{ if tunefile == "tune_xscale": }}
-include conf/machine/include/tune-xscale.inc
-
-IMAGE_FSTYPES += "tar.bz2 jffs2"
-EXTRA_IMAGECMD_jffs2 = "-lnp "
-
-# 2.6.37 and later kernels use OMAP_SERIAL, ttyO2
-# earlier kernels use ttyS2
-SERIAL_CONSOLE = "115200 ttyO2"
-
-{{ if kernel_choice == "custom": preferred_kernel = "linux-yocto-custom" }}
-{{ if kernel_choice == "linux-yocto-dev": preferred_kernel = "linux-yocto-dev" }}
-{{ if kernel_choice == "custom" or kernel_choice == "linux-yocto-dev" : }}
-PREFERRED_PROVIDER_virtual/kernel ?= "{{=preferred_kernel}}"
-
-{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": preferred_kernel = kernel_choice.split('_')[0] }}
-{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": preferred_kernel_version = kernel_choice.split('_')[1] }}
-{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": }}
-PREFERRED_PROVIDER_virtual/kernel ?= "{{=preferred_kernel}}"
-PREFERRED_VERSION_{{=preferred_kernel}} ?= "{{=preferred_kernel_version}}%"
-
-KERNEL_IMAGETYPE = "zImage"
-KERNEL_DEVICETREE = "${S}/arch/arm/boot/dts/omap3-beagle.dts ${S}/arch/arm/boot/dts/omap3-beagle-xm.dts"
-
-SPL_BINARY = "MLO"
-UBOOT_SUFFIX = "img"
-{{ input type:"edit" name:"uboot_machine" prio:"40" msg:"Please specify a value for UBOOT_MACHINE:" default:"omap3_beagle_config" }}
-UBOOT_MACHINE = "{{=uboot_machine}}"
-{{ input type:"edit" name:"uboot_entrypoint" prio:"40" msg:"Please specify a value for UBOOT_ENTRYPOINT:" default:"0x80008000" }}
-UBOOT_ENTRYPOINT = "{{=uboot_entrypoint}}"
-{{ input type:"edit" name:"uboot_loadaddress" prio:"40" msg:"Please specify a value for UBOOT_LOADADDRESS:" default:"0x80008000" }}
-UBOOT_LOADADDRESS = "{{=uboot_loadaddress}}"
-
-MACHINE_FEATURES = "usbgadget usbhost vfat alsa"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
index 264f3c91ad..bc52893e2a 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
Section "Module"
Load "extmod"
Load "dbe"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
new file mode 100644
index 0000000000..30830031ed
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
@@ -0,0 +1,2 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
deleted file mode 100644
index 72d991c7e5..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
+++ /dev/null
@@ -1 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-non_hardware.cfg b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-non_hardware.cfg
index 361343bb58..9bfc90c6f2 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-non_hardware.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-non_hardware.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-non_hardware.cfg
#
# Miscellaneous filesystems
#
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc
index 56f7f0f1e3..ca5f3b5be9 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH arm
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc
index 80640db4a2..9014c2c97e 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH arm
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc
index 51eaf2d32c..3f1c252232 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH arm
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..47489e44e9
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..582759e612
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..97f747fa07
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.cfg
index 10134c81f5..a2e1ae0f75 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.cfg
#
# System Type
#
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc
index 24196e6f67..828400df40 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
kconf non-hardware {{machine}}-non_hardware.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
index 25c87a85ac..2fa6231cbf 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
index 08b1f88d1b..35b0958582 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..5f8db03c64
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
index bc6968d832..f04dd0cce4 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index d221d5f2a4..471ccbcc3e 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..fb4253271a
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.10.bbappend
index 1e814c54d7..badb3aa239 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
index ca7f8c5978..1e1cc51315 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.17.bbappend
index ca7f8c5978..72af58afc1 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +30,4 @@ SRC_URI += "file://{{=machine}}-standard.scc \
# the appropriate changes committed to the upstream linux-yocto repo
#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
-#LINUX_VERSION = "3.14" \ No newline at end of file
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/{{=machine}}/machconfig b/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine/machconfig
index 3b85d3821f..3b85d3821f 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/{{=machine}}/machconfig
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-bsp/formfactor/formfactor/machine/machconfig
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/kernel-list.noinstall
index 03b7d84ec2..663dddbb0f 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/kernel-list.noinstall
@@ -2,22 +2,22 @@
{{ input type:"boolean" name:"custom_kernel_remote" prio:"20" msg:"Is the custom kernel you'd like to use in a remote git repo? (y/n)" default:"y"}}
{{ if kernel_choice == "custom" and custom_kernel_remote == "y": }}
-{{ input type:"edit-git-repo" name:"custom_kernel_remote_path" prio:"20" msg:"Please enter the full URI to the remote git repo (the default corresponds to linux-stable v3.13.9)" default:"git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git"}}
+{{ input type:"edit-git-repo" name:"custom_kernel_remote_path" prio:"20" msg:"Please enter the full URI to the remote git repo (the default corresponds to linux-stable v3.16.3)" default:"git://git.kernel.org/pub/scm/linux/kernel/git/stable/linux-stable.git"}}
{{ if kernel_choice == "custom" and custom_kernel_remote == "n": }}
-{{ input type:"edit-git-repo" name:"custom_kernel_local_path" prio:"20" msg:"You've indicated that you're not using a remote git repo. Please enter the full path to the local git repo you want to use (the default assumes a local linux-stable v3.13.9)" default:"/home/trz/yocto/kernels/linux-stable.git"}}
+{{ input type:"edit-git-repo" name:"custom_kernel_local_path" prio:"20" msg:"You've indicated that you're not using a remote git repo. Please enter the full path to the local git repo you want to use (the default assumes a local linux-stable v3.16.3)" default:"/home/trz/yocto/kernels/linux-stable.git"}}
{{ if kernel_choice == "custom": }}
{{ input type:"boolean" name:"custom_kernel_need_kbranch" prio:"20" msg:"Do you need to use a specific (non-master) branch? (y/n)" default:"n"}}
{{ if kernel_choice == "custom" and custom_kernel_need_kbranch == "y": }}
-{{ input type:"edit" name:"custom_kernel_kbranch" prio:"20" msg:"Please enter the branch you want to use (the default branch corresponds to the linux-stable 'linux-3.13.y' branch):" default:"linux-3.13.y"}}
+{{ input type:"edit" name:"custom_kernel_kbranch" prio:"20" msg:"Please enter the branch you want to use (the default branch corresponds to the linux-stable 'linux-3.16.y' branch):" default:"linux-3.16.y"}}
{{ if kernel_choice == "custom": }}
{{ input type:"edit" name:"custom_kernel_srcrev" prio:"20" msg:"Please enter the SRCREV (commit id) you'd like to use (use '${AUTOREV}' to track the current HEAD):" default:"${AUTOREV}"}}
{{ if kernel_choice == "custom": }}
-{{ input type:"edit" name:"custom_kernel_linux_version" prio:"20" msg:"Please enter the Linux version of the kernel you've specified:" default:"3.13.9"}}
+{{ input type:"edit" name:"custom_kernel_linux_version" prio:"20" msg:"Please enter the Linux version of the kernel you've specified:" default:"3.16.3"}}
{{ if kernel_choice == "custom": }}
{{ input type:"edit" name:"custom_kernel_linux_version_extension" prio:"20" msg:"Please enter a Linux version extension if you want (it will show up at the end of the kernel name shown by uname):" default:"-custom"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom.bb b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb
index 6d3cc6f743..69f598e144 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom.bb
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.bb
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "custom": }} this
# This file was derived from the linux-yocto-custom.bb recipe in
# oe-core.
#
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.noinstall b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.noinstall
new file mode 100644
index 0000000000..017d206c24
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice == "custom": }} linux-yocto-custom
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/defconfig b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/defconfig
index ceb0ffa30c..ceb0ffa30c 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/defconfig
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/defconfig
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-config.cfg
index 17c8b503da..922309d5ab 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-config.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-config.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg
#
# Used by yocto-kernel to manage config options.
#
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-patches.scc
index 7a598d9118..6d1138f42a 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}-user-patches.scc
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine-user-patches.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc
#
# Used by yocto-kernel to manage patches.
#
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.cfg
index 95170b12eb..1ba8201f16 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.cfg
#
# A convenient place to add config options, nothing more.
#
diff --git a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc
index 2e3ca90793..0b6b413377 100644
--- a/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/{{ if kernel_choice == "custom": }} linux-yocto-custom/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/common/recipes-kernel/linux/linux-yocto-custom/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
#
# The top-level 'feature' for the {{=machine}} custom kernel.
#
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
index 0df4dc2035..e13dabc1e5 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/{{=machine}}.conf
+++ b/scripts/lib/bsp/substrate/target/arch/i386/conf/machine/machine.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.conf
#@TYPE: Machine
#@NAME: {{=machine}}
@@ -32,19 +33,23 @@ MACHINE_FEATURES += "wifi efi pcbios"
{{ input type:"boolean" name:"xserver" prio:"50" msg:"Do you need support for X? (y/n)" default:"y" }}
-{{ if xserver == "y" and (kernel_choice == "linux-yocto_3.14" or kernel_choice == "linux-yocto_3.10"): }}
-{{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_i915" }}
+{{ if xserver == "y" and (kernel_choice == "linux-yocto_3.17" or kernel_choice == "linux-yocto_3.14" or kernel_choice == "linux-yocto_3.10"): }}
+{{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_vesa" }}
{{ input type:"choice" val:"xserver_vesa" msg:"VESA xserver support" }}
{{ input type:"choice" val:"xserver_i915" msg:"i915 xserver support" }}
{{ input type:"choice" val:"xserver_i965" msg:"i965 xserver support" }}
+{{ input type:"choice" val:"xserver_fbdev" msg:"fbdev xserver support" }}
+{{ input type:"choice" val:"xserver_modesetting" msg:"modesetting xserver support" }}
{{ if xserver == "y" and kernel_choice == "custom": }}
-{{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_i915" }}
+{{ input type:"choicelist" name:"xserver_choice" prio:"50" msg:"Please select an xserver for this machine:" default:"xserver_vesa" }}
{{ input type:"choice" val:"xserver_vesa" msg:"VESA xserver support" }}
{{ input type:"choice" val:"xserver_i915" msg:"i915 xserver support" }}
{{ input type:"choice" val:"xserver_i965" msg:"i965 xserver support" }}
+{{ input type:"choice" val:"xserver_fbdev" msg:"fbdev xserver support" }}
+{{ input type:"choice" val:"xserver_modesetting" msg:"modesetting xserver support" }}
-{{ if xserver == "y" and kernel_choice != "linux-yocto_3.14" and kernel_choice != "linux-yocto_3.10" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
+{{ if xserver == "y" and kernel_choice != "linux-yocto_3.17" and kernel_choice != "linux-yocto_3.14" and kernel_choice != "linux-yocto_3.10" and kernel_choice != "custom": xserver_choice = "xserver_i915" }}
{{ if xserver == "y": }}
XSERVER ?= "${XSERVER_X86_BASE} \
@@ -55,12 +60,18 @@ XSERVER ?= "${XSERVER_X86_BASE} \
${XSERVER_X86_I915} \
{{ if xserver == "y" and xserver_choice == "xserver_i965": }}
${XSERVER_X86_I965} \
+{{ if xserver == "y" and xserver_choice == "xserver_fbdev": }}
+ ${XSERVER_X86_FBDEV} \
+{{ if xserver == "y" and xserver_choice == "xserver_modesetting": }}
+ ${XSERVER_X86_MODESETTING} \
{{ if xserver == "y": }}
"
-MACHINE_EXTRA_RRECOMMENDS += "linux-firmware v86d"
+MACHINE_EXTRA_RRECOMMENDS += "linux-firmware v86d eee-acpi-scripts"
EXTRA_OECONF_append_pn-matchbox-panel-2 = " --with-battery=acpi"
+GLIBC_ADDONS = "nptl"
+
{{ if xserver == "y" and xserver_choice == "xserver_vesa": }}
APPEND += "video=vesafb vga=0x318"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
new file mode 100644
index 0000000000..ac9a0f1bb0
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
@@ -0,0 +1 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
new file mode 100644
index 0000000000..30830031ed
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
@@ -0,0 +1,2 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
deleted file mode 100644
index 72d991c7e5..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
+++ /dev/null
@@ -1 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc
index bfefb0d0a0..619ee3f367 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH i386
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc
index 60b670dffc..682012fafc 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH i386
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc
index ec44ef9485..cc7519699a 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH i386
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..69efdcc759
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..85be26de97
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..4c59daac46
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.cfg
index e93c0b8a08..3b168b7e36 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.cfg
CONFIG_X86_32=y
CONFIG_MATOM=y
CONFIG_PRINTK=y
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc
index eda1d62f11..3d32f111b0 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
include features/intel-e1xxxx/intel-e100.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
index 25c87a85ac..2fa6231cbf 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
index 08b1f88d1b..35b0958582 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..5f8db03c64
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
index bc6968d832..f04dd0cce4 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index d221d5f2a4..471ccbcc3e 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..fb4253271a
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.10.bbappend
index c1f26540a7..1cfc611949 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 948d568cd1..fbb49edb26 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..c8fc73a97d
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend.noinstall b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend.noinstall
new file mode 100644
index 0000000000..3594e6583c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if create_example_bbappend == "y": }} recipes-example-bbappend
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}_{{=example_bbappend_version}}.bbappend b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.bbappend
index 2e50ff668d..353133080a 100644
--- a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}_{{=example_bbappend_version}}.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=example_bbappend_name}}_{{=example_bbappend_version}}.bbappend
FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}-${PV}:"
#
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.noinstall b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.noinstall
new file mode 100644
index 0000000000..46df8a8e04
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=example_bbappend_name}}-{{=example_bbappend_version}}
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}-{{=example_bbappend_version}}/example.patch b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version/example.patch
index 2000a34da5..2000a34da5 100644
--- a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_bbappend == "y": }} recipes-example-bbappend/example-bbappend/{{=example_bbappend_name}}-{{=example_bbappend_version}}/example.patch
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example-bbappend/example-bbappend/example-bbappend-version/example.patch
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/recipes-example.noinstall b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example.noinstall
new file mode 100644
index 0000000000..b0069b1a5a
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if create_example_recipe == "y": }} recipes-example
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}_0.1.bb b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.bb
index 14bf344da5..ba1ccb16c6 100644
--- a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}_0.1.bb
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.bb
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=example_recipe_name}}_0.1.bb
#
# This file was derived from the 'Hello World!' example recipe in the
# Yocto Project Development Manual.
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.noinstall b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.noinstall
new file mode 100644
index 0000000000..c319c19c57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=example_recipe_name}}-0.1
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/example.patch b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/example.patch
index 2000a34da5..2000a34da5 100644
--- a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/example.patch
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/example.patch
diff --git a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/helloworld.c b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/helloworld.c
index 71f2e46b4e..71f2e46b4e 100644
--- a/scripts/lib/bsp/substrate/target/arch/layer/{{ if create_example_recipe == "y": }} recipes-example/example/{{=example_recipe_name}}-0.1/helloworld.c
+++ b/scripts/lib/bsp/substrate/target/arch/layer/recipes-example/example/example-recipe-0.1/helloworld.c
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/mips/conf/machine/machine.conf
index 9ea411933c..b319d626f4 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/conf/machine/{{=machine}}.conf
+++ b/scripts/lib/bsp/substrate/target/arch/mips/conf/machine/machine.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.conf
#@TYPE: Machine
#@NAME: {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc
index b0fb63ac6a..176190cd2e 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH mips
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc
index 326663a509..f05dd851d2 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH mips
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc
index 4514765eb3..f71c775397 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH mips
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..47489e44e9
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..85be26de97
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..97f747fa07
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.cfg b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.cfg
new file mode 100644
index 0000000000..2fe476691c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.cfg
@@ -0,0 +1,2 @@
+# yocto-bsp-filename {{=machine}}.cfg
+CONFIG_MIPS=y
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc
index 1ef01b6e3c..f39dc3edf1 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
include cfg/usb-mass-storage.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
index 25c87a85ac..2fa6231cbf 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
index 08b1f88d1b..35b0958582 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..5f8db03c64
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
index bc6968d832..f04dd0cce4 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
new file mode 100644
index 0000000000..c7e7989821
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..142e38b3bc
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.10.bbappend
index 1e814c54d7..badb3aa239 100644
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
new file mode 100644
index 0000000000..1e1cc51315
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..72af58afc1
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
deleted file mode 100644
index a1b333ca56..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ /dev/null
@@ -1 +0,0 @@
-CONFIG_MIPS=y
diff --git a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
deleted file mode 100644
index 85544e812c..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# uncomment and replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
-#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/mips64/.gitignore
index e69de29bb2..e69de29bb2 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/.gitignore
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/conf/machine/machine.conf b/scripts/lib/bsp/substrate/target/arch/mips64/conf/machine/machine.conf
new file mode 100644
index 0000000000..3afc5e093e
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/conf/machine/machine.conf
@@ -0,0 +1,39 @@
+# yocto-bsp-filename {{=machine}}.conf
+#@TYPE: Machine
+#@NAME: {{=machine}}
+
+#@DESCRIPTION: Machine configuration for {{=machine}} systems
+
+require conf/machine/include/tune-mips64.inc
+
+MACHINE_FEATURES = "pci ext2 ext3 serial"
+
+KERNEL_IMAGETYPE = "vmlinux"
+KERNEL_ALT_IMAGETYPE = "vmlinux.bin"
+KERNEL_IMAGE_STRIP_EXTRA_SECTIONS = ".comment"
+
+{{ if kernel_choice == "custom": preferred_kernel = "linux-yocto-custom" }}
+{{ if kernel_choice == "linux-yocto-dev": preferred_kernel = "linux-yocto-dev" }}
+{{ if kernel_choice == "custom" or kernel_choice == "linux-yocto-dev" : }}
+PREFERRED_PROVIDER_virtual/kernel ?= "{{=preferred_kernel}}"
+
+{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": preferred_kernel = kernel_choice.split('_')[0] }}
+{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": preferred_kernel_version = kernel_choice.split('_')[1] }}
+{{ if kernel_choice != "custom" and kernel_choice != "linux-yocto-dev": }}
+PREFERRED_PROVIDER_virtual/kernel ?= "{{=preferred_kernel}}"
+PREFERRED_VERSION_{{=preferred_kernel}} ?= "{{=preferred_kernel_version}}%"
+
+{{ input type:"boolean" name:"xserver" prio:"50" msg:"Do you need support for X? (y/n)" default:"y" }}
+{{ if xserver == "y": }}
+PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg"
+XSERVER ?= "xserver-xorg \
+ xf86-input-evdev \
+ xf86-video-fbdev"
+
+SERIAL_CONSOLE = "115200 ttyS0"
+USE_VT ?= "0"
+
+MACHINE_EXTRA_RRECOMMENDS = " kernel-modules"
+
+IMAGE_FSTYPES ?= "jffs2 tar.bz2"
+JFFS2_ERASEBLOCK = "0x10000"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc
new file mode 100644
index 0000000000..176190cd2e
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -0,0 +1,10 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
+define KMACHINE {{=machine}}
+define KTYPE preempt-rt
+define KARCH mips
+
+include {{=map_preempt_rt_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
+{{ if need_new_kbranch == "y": }}
+branch {{=machine}}
+
+include {{=machine}}.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc
new file mode 100644
index 0000000000..f05dd851d2
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-standard.scc
@@ -0,0 +1,10 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
+define KMACHINE {{=machine}}
+define KTYPE standard
+define KARCH mips
+
+include {{=map_standard_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
+{{ if need_new_kbranch == "y": }}
+branch {{=machine}}
+
+include {{=machine}}.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc
new file mode 100644
index 0000000000..f71c775397
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-tiny.scc
@@ -0,0 +1,10 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
+define KMACHINE {{=machine}}
+define KTYPE tiny
+define KARCH mips
+
+include {{=map_tiny_kbranch(need_new_kbranch, new_kbranch, existing_kbranch)}}
+{{ if need_new_kbranch == "y": }}
+branch {{=machine}}
+
+include {{=machine}}.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..69efdcc759
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..85be26de97
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..4c59daac46
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.cfg b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.cfg
new file mode 100644
index 0000000000..0cc906bbf0
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.cfg
@@ -0,0 +1,66 @@
+# yocto-bsp-filename {{=machine}}.cfg
+#SOC
+CONFIG_CAVIUM_OCTEON_SOC=y
+CONFIG_CAVIUM_CN63XXP1=y
+CONFIG_CAVIUM_OCTEON_CVMSEG_SIZE=2
+
+#Kernel
+CONFIG_SMP=y
+CONFIG_NR_CPUS=32
+#Executable file formats
+CONFIG_MIPS32_COMPAT=y
+CONFIG_MIPS32_O32=y
+CONFIG_MIPS32_N32=y
+
+
+#PCI
+CONFIG_PCI=y
+CONFIG_PCI_MSI=y
+
+#I2C
+CONFIG_I2C=y
+CONFIG_I2C_OCTEON=y
+
+CONFIG_HW_RANDOM_OCTEON=y
+
+#SPI
+CONFIG_SPI=y
+CONFIG_SPI_OCTEON=y
+
+#Misc
+CONFIG_EEPROM_AT24=y
+CONFIG_EEPROM_AT25=y
+CONFIG_OCTEON_WDT=y
+
+CONFIG_STAGING=y
+
+#Ethernet
+CONFIG_OCTEON_ETHERNET=y
+CONFIG_OCTEON_MGMT_ETHERNET=y
+CONFIG_MDIO_OCTEON=y
+
+#PHY
+CONFIG_MARVELL_PHY=y
+CONFIG_BROADCOM_PHY=y
+CONFIG_BCM87XX_PHY=y
+
+
+#USB
+CONFIG_USB=y
+CONFIG_OCTEON_USB=y
+CONFIG_USB_OCTEON_EHCI=y
+CONFIG_USB_OCTEON_OHCI=y
+CONFIG_USB_OCTEON2_COMMON=y
+
+CONFIG_MTD=y
+CONFIG_MTD_BLOCK=y
+CONFIG_MTD_CFI=y
+CONFIG_MTD_CFI_AMDSTD=y
+CONFIG_MTD_CMDLINE_PARTS=y
+
+CONFIG_SERIAL_8250=y
+CONFIG_SERIAL_8250_CONSOLE=y
+CONFIG_SERIAL_8250_NR_UARTS=2
+CONFIG_SERIAL_8250_RUNTIME_UARTS=2
+CONFIG_SERIAL_8250_DW=y
+
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc
new file mode 100644
index 0000000000..f39dc3edf1
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/files/machine.scc
@@ -0,0 +1,8 @@
+# yocto-bsp-filename {{=machine}}.scc
+kconf hardware {{=machine}}.cfg
+
+include cfg/usb-mass-storage.scc
+include cfg/fs/vfat.scc
+
+kconf hardware {{=machine}}-user-config.cfg
+include {{=machine}}-user-patches.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
new file mode 100644
index 0000000000..8903a823aa
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/kernel-list.noinstall
@@ -0,0 +1,5 @@
+{{ if kernel_choice != "custom": }}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
+
+{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
index 25c87a85ac..2fa6231cbf 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
new file mode 100644
index 0000000000..35b0958582
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.10.9"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..5f8db03c64
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14"
diff --git a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
index bc6968d832..f04dd0cce4 100644
--- a/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
new file mode 100644
index 0000000000..c7e7989821
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..142e38b3bc
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.10.bbappend
new file mode 100644
index 0000000000..badb3aa239
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "19f7e43b54aef08d58135ed2a897d77b624b320a"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "459165c1dd61c4e843c36e6a1abeb30949a20ba7"
+#LINUX_VERSION = "3.10.9" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
new file mode 100644
index 0000000000..1e1cc51315
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..72af58afc1
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
index 78fb5db22b..48fcb2bbbe 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/{{=machine}}.conf
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/conf/machine/machine.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.conf
#@TYPE: Machine
#@NAME: {{=machine}}
@@ -16,6 +17,9 @@ TARGET_FPU = ""
{{ input type:"choice" val:"tune_ppce500v2" msg:"ppce500v2 tuning optimizations" }}
{{ input type:"choice" val:"tune_ppce5500" msg:"ppce5500 tuning optimizations" }}
{{ input type:"choice" val:"tune_ppce6500" msg:"ppce6500 tuning optimizations" }}
+{{ input type:"choice" val:"tune_power5" msg:"power5 tuning optimizations" }}
+{{ input type:"choice" val:"tune_power6" msg:"power6 tuning optimizations" }}
+{{ input type:"choice" val:"tune_power7" msg:"power7 tuning optimizations" }}
{{ if tunefile == "tune_ppc476": }}
include conf/machine/include/tune-ppc476.inc
{{ if tunefile == "tune_ppc603e": }}
@@ -36,6 +40,12 @@ include conf/machine/include/tune-ppce500v2.inc
include conf/machine/include/tune-ppce5500.inc
{{ if tunefile == "tune_ppce6500": }}
include conf/machine/include/tune-ppce6500.inc
+{{ if tunefile == "tune_power5": }}
+include conf/machine/include/tune-power5.inc
+{{ if tunefile == "tune_power6": }}
+include conf/machine/include/tune-power6.inc
+{{ if tunefile == "tune_power7": }}
+include conf/machine/include/tune-power7.inc
KERNEL_IMAGETYPE = "uImage"
@@ -72,3 +82,6 @@ UBOOT_ENTRYPOINT = "{{=uboot_entrypoint}}"
KERNEL_DEVICETREE = "${S}/arch/powerpc/boot/dts/{{=kernel_devicetree}}"
MACHINE_EXTRA_RRECOMMENDS = " kernel-modules"
+
+IMAGE_FSTYPES ?= "jffs2 tar.bz2"
+JFFS2_ERASEBLOCK = "0x4000"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc
index 1da7b0c892..40c9267831 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH powerpc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc
index 53a74a6ca2..7a1d35be1e 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH powerpc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc
index 4ca6224774..1bf94b2d05 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH powerpc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..47489e44e9
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..582759e612
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..97f747fa07
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.cfg
index 9f37d07553..5bfe1fe4b0 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.cfg
..........................................................................
. WARNING
.
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc
index c9fd468180..7aac8b0801 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
include cfg/usb-mass-storage.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
new file mode 100644
index 0000000000..2fa6231cbf
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -0,0 +1,26 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
new file mode 100644
index 0000000000..39bc72d9c4
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.10.9" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
index 00c8c68933..7a2544617f 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -29,4 +30,4 @@ SRC_URI += "file://{{=machine}}-preempt-rt.scc \
# the appropriate changes committed to the upstream linux-yocto repo
#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
-#LINUX_VERSION = "3.10.9" \ No newline at end of file
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
new file mode 100644
index 0000000000..f04dd0cce4
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.10.9"
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index d221d5f2a4..471ccbcc3e 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..fb4253271a
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.10.bbappend
index a61f5ccb80..15b4b973c9 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
index aebda9b3a5..e688384020 100644
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..2f1933055d
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/machine.conf
index d53b6f57b7..67e1cbd997 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/{{=machine}}.conf
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/conf/machine/machine.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.conf
#@TYPE: Machine
#@NAME: {{=machine}}
@@ -26,6 +27,7 @@ PREFERRED_PROVIDER_virtual/libgles2 ?= "mesa"
{{ input type:"choice" val:"arm" msg:"ARM (32-bit)" }}
{{ input type:"choice" val:"powerpc" msg:"PowerPC (32-bit)" }}
{{ input type:"choice" val:"mips" msg:"MIPS (32-bit)" }}
+{{ input type:"choice" val:"mips64" msg:"MIPS64 (64-bit)" }}
{{ if qemuarch == "i386": }}
require conf/machine/include/qemu.inc
require conf/machine/include/tune-i586.inc
@@ -42,6 +44,9 @@ require conf/machine/include/tune-ppc7400.inc
{{ if qemuarch == "mips": }}
require conf/machine/include/qemu.inc
require conf/machine/include/tune-mips32.inc
+{{ if qemuarch == "mips64": }}
+require conf/machine/include/qemu.inc
+require conf/machine/include/tune-mips64.inc
{{ if qemuarch == "i386" or qemuarch == "x86_64": }}
MACHINE_FEATURES += "x86"
@@ -62,7 +67,7 @@ SERIAL_CONSOLE = "115200 ttyAMA0"
KERNEL_IMAGETYPE = "vmlinux"
SERIAL_CONSOLE = "115200 ttyS0"
-{{ if qemuarch == "mips": }}
+{{ if qemuarch == "mips" or qemuarch == "mips64": }}
KERNEL_IMAGETYPE = "vmlinux"
KERNEL_ALT_IMAGETYPE = "vmlinux.bin"
SERIAL_CONSOLE = "115200 ttyS0"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine/interfaces b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine/interfaces
new file mode 100644
index 0000000000..16967763e5
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-core/init-ifupdown/init-ifupdown/machine/interfaces
@@ -0,0 +1,5 @@
+# /etc/network/interfaces -- configuration file for ifup(8), ifdown(8)
+
+# The loopback interface
+auto lo
+iface lo inet loopback
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/xorg.conf b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
index 13519804bc..3bdde79e6f 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/xorg.conf
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
@@ -14,7 +14,7 @@ EndSection
Section "InputDevice"
Identifier "Configured Mouse"
-{{ if qemuarch == "arm" or qemuarch == "powerpc" or qemuarch == "mips": }}
+{{ if qemuarch == "arm" or qemuarch == "powerpc" or qemuarch == "mips" or qemuarch == "mips64": }}
Driver "mouse"
{{ if qemuarch == "i386" or qemuarch == "x86_64": }}
Driver "vmmouse"
@@ -36,7 +36,7 @@ EndSection
Section "Device"
Identifier "Graphics Controller"
-{{ if qemuarch == "arm" or qemuarch == "powerpc" or qemuarch == "mips": }}
+{{ if qemuarch == "arm" or qemuarch == "powerpc" or qemuarch == "mips" or qemuarch == "mips64": }}
Driver "fbdev"
{{ if qemuarch == "i386" or qemuarch == "x86_64": }}
Driver "vmware"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..0fb5283a8d
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc
index af34437d0a..6aaffb8184 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH {{=qemuarch}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc
index 0e20023764..d2a03ec209 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH {{=qemuarch}}
@@ -10,6 +11,8 @@ include bsp/arm-versatile-926ejs/arm-versatile-926ejs-standard
include bsp/qemu-ppc32/qemu-ppc32-standard
{{ if qemuarch == "mips": }}
include bsp/mti-malta32/mti-malta32-be-standard
+{{ if qemuarch == "mips64": }}
+include bsp/mti-malta64/mti-malta64-be-standard
{{ if need_new_kbranch == "y": }}
branch {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc
index 10c4dac44d..6c098fed21 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH {{=qemuarch}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..69efdcc759
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..582759e612
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..4c59daac46
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.cfg b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.cfg
new file mode 100644
index 0000000000..d560784b56
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}.cfg \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc
index f3739be1e6..8301e05f7d 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
kconf hardware {{=machine}}-user-config.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
index 7599ecb0a5..be479bee67 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -18,10 +19,10 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
index 73b6e34839..3609787207 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..ce5e1a09b3
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,62 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/qemuppc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
index da4e61ef83..1ee148fd0c 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..0175107899
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,62 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"arm" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/common-pc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.4.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.4.bbappend
index 013883ffeb..5bfb877eb9 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.4.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_3.4.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.4": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.10.bbappend
index 392ace6694..974e291afd 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -18,10 +19,10 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 2cc9b87cf2..626019c40a 100644
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
@@ -18,10 +19,10 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc/base" }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
@@ -35,6 +36,12 @@ COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
+
{{ if need_new_kbranch == "n": }}
KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..b81f272c38
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,62 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base your new BSP branch on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "arm": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose an existing machine branch to use for this BSP:" default:"standard/arm-versatile-926ejs" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "powerpc": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"powerpc" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/qemuppc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "i386": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "x86_64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"x86_64" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta32" }}
+
+{{ if need_new_kbranch == "n" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/mti-malta64" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "y" and qemuarch == "mips64": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"mips64" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "0143c6ebb4a2d63b241df5f608b19f483f7eb9e0"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "8f55bee2403176a50cc0dd41811aa60fcf07243c"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/{{=machine}}.conf b/scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/machine.conf
index 0780af90da..e4b825104f 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/{{=machine}}.conf
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/conf/machine/machine.conf
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.conf
#@TYPE: Machine
#@NAME: {{=machine}}
@@ -36,6 +37,8 @@ MACHINE_FEATURES += "wifi efi pcbios"
{{ input type:"choice" val:"xserver_vesa" msg:"VESA xserver support" }}
{{ input type:"choice" val:"xserver_i915" msg:"i915 xserver support" }}
{{ input type:"choice" val:"xserver_i965" msg:"i965 xserver support" }}
+{{ input type:"choice" val:"xserver_fbdev" msg:"fbdev xserver support" }}
+{{ input type:"choice" val:"xserver_modesetting" msg:"modesetting xserver support" }}
{{ if xserver == "y": }}
XSERVER ?= "${XSERVER_X86_BASE} \
${XSERVER_X86_EXT} \
@@ -45,12 +48,18 @@ XSERVER ?= "${XSERVER_X86_BASE} \
${XSERVER_X86_I915} \
{{ if xserver == "y" and xserver_choice == "xserver_i965": }}
${XSERVER_X86_I965} \
+{{ if xserver == "y" and xserver_choice == "xserver_fbdev": }}
+ ${XSERVER_X86_FBDEV} \
+{{ if xserver == "y" and xserver_choice == "xserver_modesetting": }}
+ ${XSERVER_X86_MODESETTING} \
{{ if xserver == "y": }}
"
-MACHINE_EXTRA_RRECOMMENDS += "linux-firmware v86d"
+MACHINE_EXTRA_RRECOMMENDS += "linux-firmware v86d eee-acpi-scripts"
EXTRA_OECONF_append_pn-matchbox-panel-2 = " --with-battery=acpi"
+GLIBC_ADDONS = "nptl"
+
{{ if xserver == "y" and xserver_choice == "xserver_vesa": }}
APPEND += "video=vesafb vga=0x318"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
new file mode 100644
index 0000000000..b442d02d57
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{=machine}}
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
new file mode 100644
index 0000000000..ac9a0f1bb0
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/machine/xorg.conf
@@ -0,0 +1 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
new file mode 100644
index 0000000000..30830031ed
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/xserver-xf86-config_0.1.bbappend
@@ -0,0 +1,2 @@
+# yocto-bsp-filename {{ if xserver == "y": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
deleted file mode 100644
index 72d991c7e5..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-graphics/xorg-xserver/{{ if xserver == "y": }} xserver-xf86-config_0.1.bbappend
+++ /dev/null
@@ -1 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files.noinstall b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files.noinstall
new file mode 100644
index 0000000000..1e0d92c55c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files.noinstall
@@ -0,0 +1 @@
+# yocto-bsp-dirname {{ if kernel_choice != "custom": }} files
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc
index c9882590a8..fd5320ba1e 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-preempt-rt.scc
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-preempt-rt.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-preempt-rt.scc
define KMACHINE {{=machine}}
define KTYPE preempt-rt
define KARCH x86_64
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc
index e500bad4b2..569f967c6a 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-standard.scc
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-standard.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-standard.scc
define KMACHINE {{=machine}}
define KTYPE standard
define KARCH x86_64
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc
index e8e3c1c04d..fb21432a4f 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-tiny.scc
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-tiny.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}-tiny.scc
define KMACHINE {{=machine}}
define KTYPE tiny
define KARCH x86_64
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-config.cfg
new file mode 100644
index 0000000000..47489e44e9
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-config.cfg
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-config.cfg
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-features.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-features.scc
new file mode 100644
index 0000000000..582759e612
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-features.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-features.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-patches.scc
new file mode 100644
index 0000000000..97f747fa07
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine-user-patches.scc
@@ -0,0 +1 @@
+# yocto-bsp-filename {{=machine}}-user-patches.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.cfg
index b4b82d7ca0..3290ddefe7 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.cfg
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.cfg
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.cfg
CONFIG_PRINTK=y
# Basic hardware support for the box - network, USB, PCI, sound
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc
index db45140381..9b7c291a8f 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}.scc
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/files/machine.scc
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{=machine}}.scc
kconf hardware {{=machine}}.cfg
include features/serial/8250.scc
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
index a04e6c7852..8903a823aa 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/kernel-list.noinstall
@@ -1,5 +1,5 @@
{{ if kernel_choice != "custom": }}
-{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.14) kernel? (y/n)" default:"y"}}
+{{ input type:"boolean" name:"use_default_kernel" prio:"10" msg:"Would you like to use the default (3.17) kernel? (y/n)" default:"y"}}
{{ if kernel_choice != "custom" and use_default_kernel == "n": }}
-{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.14"}}
+{{ input type:"choicelist" name:"kernel_choice" gen:"bsp.kernel.kernels" prio:"10" msg:"Please choose the kernel to use in this BSP:" default:"linux-yocto_3.17"}}
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
new file mode 100644
index 0000000000..2fa6231cbf
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -0,0 +1,26 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-dev": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
new file mode 100644
index 0000000000..39bc72d9c4
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.10.9" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
new file mode 100644
index 0000000000..7a2544617f
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-rt_3.14.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-rt_3.14": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-preempt-rt.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
+#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
+#LINUX_VERSION = "3.14" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
new file mode 100644
index 0000000000..f04dd0cce4
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.10.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.10": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.10.9"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
index d221d5f2a4..471ccbcc3e 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.14": }} linux-yocto-tiny_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
new file mode 100644
index 0000000000..fb4253271a
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto-tiny_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-tiny.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.10.bbappend
index 162348114f..e21a333fa4 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.10": }} linux-yocto_3.10.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.10.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.10": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
index 81e528bc33..ca0b497ff4 100644
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto_3.14": }} linux-yocto_3.14.bbappend
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.14.bbappend
@@ -1,3 +1,4 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.14": }} this
FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
PR := "${PR}.1"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.17.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.17.bbappend
new file mode 100644
index 0000000000..08aa00a15c
--- /dev/null
+++ b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_3.17.bbappend
@@ -0,0 +1,33 @@
+# yocto-bsp-filename {{ if kernel_choice == "linux-yocto_3.17": }} this
+FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
+
+PR := "${PR}.1"
+
+COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
+
+{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
+
+{{ if need_new_kbranch == "y": }}
+{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n": }}
+{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard:standard/common-pc-64" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/common-pc-64/base" }}
+
+{{ if need_new_kbranch == "n": }}
+KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
+
+{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
+{{ if smp == "y": }}
+KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
+
+SRC_URI += "file://{{=machine}}-standard.scc \
+ file://{{=machine}}-user-config.cfg \
+ file://{{=machine}}-user-patches.scc \
+ file://{{=machine}}-user-features.scc \
+ "
+
+# uncomment and replace these SRCREVs with the real commit ids once you've had
+# the appropriate changes committed to the upstream linux-yocto repo
+#SRCREV_machine_pn-linux-yocto_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
+#SRCREV_meta_pn-linux-yocto_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
+#LINUX_VERSION = "3.17"
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-config.cfg
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-features.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
deleted file mode 100644
index e69de29bb2..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice != "custom": }} files/{{=machine}}-user-patches.scc
+++ /dev/null
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
deleted file mode 100644
index 25c87a85ac..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-dev": }} linux-yocto-dev.bbappend
+++ /dev/null
@@ -1,25 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" nameappend:"i386" gen:"bsp.kernel.all_branches" branches_base:"standard" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Would you like SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-standard.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
deleted file mode 100644
index 00c8c68933..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-rt_3.10": }} linux-yocto-rt_3.10.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/preempt-rt" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/preempt-rt/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-preempt-rt.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# uncomment and replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-rt_{{=machine}} ?= "f35992f80c81dc5fa1a97165dfd5cbb84661f7cb"
-#SRCREV_meta_pn-linux-yocto-rt_{{=machine}} ?= "1b534b2f8bbe9b8a773268cfa30a4850346f6f5f"
-#LINUX_VERSION = "3.10.9" \ No newline at end of file
diff --git a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend b/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
deleted file mode 100644
index bc6968d832..0000000000
--- a/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/{{ if kernel_choice == "linux-yocto-tiny_3.10": }} linux-yocto-tiny_3.10.bbappend
+++ /dev/null
@@ -1,32 +0,0 @@
-FILESEXTRAPATHS_prepend := "${THISDIR}/files:"
-
-PR := "${PR}.1"
-
-COMPATIBLE_MACHINE_{{=machine}} = "{{=machine}}"
-
-{{ input type:"boolean" name:"need_new_kbranch" prio:"20" msg:"Do you need a new machine branch for this BSP (the alternative is to re-use an existing branch)? [y/n]" default:"y" }}
-
-{{ if need_new_kbranch == "y": }}
-{{ input type:"choicelist" name:"new_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-{{ input type:"choicelist" name:"existing_kbranch" gen:"bsp.kernel.all_branches" branches_base:"standard/tiny" prio:"20" msg:"Please choose a machine branch to base this BSP on:" default:"standard/tiny/base" }}
-
-{{ if need_new_kbranch == "n": }}
-KBRANCH_{{=machine}} = "{{=existing_kbranch}}"
-
-{{ input type:"boolean" name:"smp" prio:"30" msg:"Do you need SMP support? (y/n)" default:"y"}}
-{{ if smp == "y": }}
-KERNEL_FEATURES_append_{{=machine}} += " cfg/smp.scc"
-
-SRC_URI += "file://{{=machine}}-tiny.scc \
- file://{{=machine}}-user-config.cfg \
- file://{{=machine}}-user-patches.scc \
- file://{{=machine}}-user-features.scc \
- "
-
-# uncomment and replace these SRCREVs with the real commit ids once you've had
-# the appropriate changes committed to the upstream linux-yocto repo
-#SRCREV_machine_pn-linux-yocto-tiny_{{=machine}} ?= "840bb8c059418c4753415df56c9aff1c0d5354c8"
-#SRCREV_meta_pn-linux-yocto-tiny_{{=machine}} ?= "4fd76cc4f33e0afd8f906b1e8f231b6d13b6c993"
-#LINUX_VERSION = "3.10.9"
diff --git a/scripts/lib/bsp/tags.py b/scripts/lib/bsp/tags.py
index 6d5feb0a59..3719427884 100644
--- a/scripts/lib/bsp/tags.py
+++ b/scripts/lib/bsp/tags.py
@@ -25,11 +25,13 @@
# Tom Zanussi <tom.zanussi (at] intel.com>
#
-OPEN_TAG = "{{"
-CLOSE_TAG = "}}"
-ASSIGN_TAG = "{{="
-INPUT_TAG = "input"
-IF_TAG = "if"
+OPEN_TAG = "{{"
+CLOSE_TAG = "}}"
+ASSIGN_TAG = "{{="
+INPUT_TAG = "input"
+IF_TAG = "if"
+FILENAME_TAG = "yocto-bsp-filename"
+DIRNAME_TAG = "yocto-bsp-dirname"
INDENT_STR = " "
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
new file mode 100644
index 0000000000..3f8158e24a
--- /dev/null
+++ b/scripts/lib/devtool/__init__.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+# Development tool - utility functions for plugins
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+
+import os
+import sys
+import subprocess
+import logging
+
+logger = logging.getLogger('devtool')
+
+def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
+ import bb
+ if not 'cwd' in options:
+ options["cwd"] = builddir
+ if init_path:
+ logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
+ init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
+ else:
+ logger.debug('Executing command "%s"' % cmd)
+ init_prefix = ''
+ if watch:
+ if sys.stdout.isatty():
+ # Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
+ cmd = 'script -q -c "%s" /dev/null' % cmd
+ return exec_watch('%s%s' % (init_prefix, cmd), **options)
+ else:
+ return bb.process.run('%s%s' % (init_prefix, cmd), **options)
+
+def exec_watch(cmd, **options):
+ if isinstance(cmd, basestring) and not "shell" in options:
+ options["shell"] = True
+
+ process = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
+ )
+
+ buf = ''
+ while True:
+ out = process.stdout.read(1)
+ if out:
+ sys.stdout.write(out)
+ sys.stdout.flush()
+ buf += out
+ elif out == '' and process.poll() != None:
+ break
+ return buf
+
+def setup_tinfoil():
+ import scriptpath
+ bitbakepath = scriptpath.add_bitbake_lib_path()
+ if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ sys.exit(1)
+
+ import bb.tinfoil
+ import logging
+ tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil.prepare(False)
+ tinfoil.logger.setLevel(logging.WARNING)
+ return tinfoil
+
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
new file mode 100644
index 0000000000..bd23e95dd9
--- /dev/null
+++ b/scripts/lib/devtool/deploy.py
@@ -0,0 +1,100 @@
+# Development tool - deploy/undeploy command plugin
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import subprocess
+import logging
+from devtool import exec_build_env_command
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+ pass
+
+
+def deploy(args, config, basepath, workspace):
+ import re
+ from devtool import exec_build_env_command
+
+ if not args.recipename in workspace:
+ logger.error("no recipe named %s in your workspace" % args.recipename)
+ return -1
+ try:
+ host, destdir = args.target.split(':')
+ except ValueError:
+ destdir = '/'
+ else:
+ args.target = host
+
+ deploy_dir = os.path.join(basepath, 'target_deploy', args.target)
+ deploy_file = os.path.join(deploy_dir, args.recipename + '.list')
+
+ if os.path.exists(deploy_file):
+ undeploy(args)
+
+ stdout, stderr = exec_build_env_command(config.init_path, basepath, 'bitbake -e %s' % args.recipename, shell=True)
+ recipe_outdir = re.search(r'^D="(.*)"', stdout, re.MULTILINE).group(1)
+ ret = subprocess.call('scp -qr %s/* %s:%s' % (recipe_outdir, args.target, destdir), shell=True)
+ if ret != 0:
+ return ret
+
+ logger.info('Successfully deployed %s' % recipe_outdir)
+
+ if not os.path.exists(deploy_dir):
+ os.makedirs(deploy_dir)
+
+ files_list = []
+ for root, _, files in os.walk(recipe_outdir):
+ for filename in files:
+ filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
+ files_list.append(os.path.join(destdir, filename))
+
+ with open(deploy_file, 'w') as fobj:
+ fobj.write('\n'.join(files_list))
+
+ return 0
+
+def undeploy(args, config, basepath, workspace):
+
+ deploy_file = os.path.join(basepath, 'target_deploy', args.target, args.recipename + '.list')
+ if not os.path.exists(deploy_file):
+ logger.error('%s has not been deployed' % args.recipename)
+ return -1
+
+ ret = subprocess.call("scp -q %s %s:/tmp" % (deploy_file, args.target), shell=True)
+ if ret != 0:
+ logger.error('Failed to copy %s to %s' % (deploy, args.target))
+ return -1
+
+ ret = subprocess.call("ssh %s 'xargs -n1 rm -f </tmp/%s'" % (args.target, os.path.basename(deploy_file)), shell=True)
+ if ret == 0:
+ logger.info('Successfully undeployed %s' % args.recipename)
+ os.remove(deploy_file)
+
+ return ret
+
+
+def register_commands(subparsers, context):
+ parser_deploy = subparsers.add_parser('deploy-target', help='Deploy recipe output files to live target machine')
+ parser_deploy.add_argument('recipename', help='Recipe to deploy')
+ parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
+ parser_deploy.set_defaults(func=deploy)
+
+ parser_undeploy = subparsers.add_parser('undeploy-target', help='Undeploy recipe output files in live target machine')
+ parser_undeploy.add_argument('recipename', help='Recipe to undeploy')
+ parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
+ parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
new file mode 100644
index 0000000000..69bb228487
--- /dev/null
+++ b/scripts/lib/devtool/standard.py
@@ -0,0 +1,545 @@
+# Development tool - standard commands plugin
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import re
+import shutil
+import glob
+import tempfile
+import logging
+import argparse
+from devtool import exec_build_env_command, setup_tinfoil
+
+logger = logging.getLogger('devtool')
+
+def plugin_init(pluginlist):
+ pass
+
+
+def add(args, config, basepath, workspace):
+ import bb
+ import oe.recipeutils
+
+ if args.recipename in workspace:
+ logger.error("recipe %s is already in your workspace" % args.recipename)
+ return -1
+
+ reason = oe.recipeutils.validate_pn(args.recipename)
+ if reason:
+ logger.error(reason)
+ return -1
+
+ srctree = os.path.abspath(args.srctree)
+ appendpath = os.path.join(config.workspace_path, 'appends')
+ if not os.path.exists(appendpath):
+ os.makedirs(appendpath)
+
+ recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename)
+ bb.utils.mkdirhier(recipedir)
+ if args.version:
+ if '_' in args.version or ' ' in args.version:
+ logger.error('Invalid version string "%s"' % args.version)
+ return -1
+ bp = "%s_%s" % (args.recipename, args.version)
+ else:
+ bp = args.recipename
+ recipefile = os.path.join(recipedir, "%s.bb" % bp)
+ if sys.stdout.isatty():
+ color = 'always'
+ else:
+ color = args.color
+ stdout, stderr = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s %s' % (color, recipefile, srctree))
+ logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile)
+
+ _add_md5(config, args.recipename, recipefile)
+
+ initial_rev = None
+ if os.path.exists(os.path.join(srctree, '.git')):
+ (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
+ initial_rev = stdout.rstrip()
+
+ appendfile = os.path.join(appendpath, '%s.bbappend' % args.recipename)
+ with open(appendfile, 'w') as f:
+ f.write('inherit externalsrc\n')
+ f.write('EXTERNALSRC = "%s"\n' % srctree)
+ if initial_rev:
+ f.write('\n# initial_rev: %s\n' % initial_rev)
+
+ _add_md5(config, args.recipename, appendfile)
+
+ return 0
+
+
+def _get_recipe_file(cooker, pn):
+ import oe.recipeutils
+ recipefile = oe.recipeutils.pn_to_recipe(cooker, pn)
+ if not recipefile:
+ skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn)
+ if skipreasons:
+ logger.error('\n'.join(skipreasons))
+ else:
+ logger.error("Unable to find any recipe file matching %s" % pn)
+ return recipefile
+
+
+def extract(args, config, basepath, workspace):
+ import bb
+ import oe.recipeutils
+
+ tinfoil = setup_tinfoil()
+
+ recipefile = _get_recipe_file(tinfoil.cooker, args.recipename)
+ if not recipefile:
+ # Error already logged
+ return -1
+ rd = oe.recipeutils.parse_recipe(recipefile, tinfoil.config_data)
+
+ srctree = os.path.abspath(args.srctree)
+ initial_rev = _extract_source(srctree, args.keep_temp, args.branch, rd)
+ if initial_rev:
+ return 0
+ else:
+ return -1
+
+
+def _extract_source(srctree, keep_temp, devbranch, d):
+ import bb.event
+
+ def eventfilter(name, handler, event, d):
+ if name == 'base_eventhandler':
+ return True
+ else:
+ return False
+
+ if hasattr(bb.event, 'set_eventfilter'):
+ bb.event.set_eventfilter(eventfilter)
+
+ pn = d.getVar('PN', True)
+
+ if pn == 'perf':
+ logger.error("The perf recipe does not actually check out source and thus cannot be supported by this tool")
+ return None
+
+ if 'work-shared' in d.getVar('S', True):
+ logger.error("The %s recipe uses a shared workdir which this tool does not currently support" % pn)
+ return None
+
+ if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True):
+ logger.error("externalsrc is currently enabled for the %s recipe. This prevents the normal do_patch task from working. You will need to disable this first." % pn)
+ return None
+
+ if os.path.exists(srctree):
+ if not os.path.isdir(srctree):
+ logger.error("output path %s exists and is not a directory" % srctree)
+ return None
+ elif os.listdir(srctree):
+ logger.error("output path %s already exists and is non-empty" % srctree)
+ return None
+
+ # Prepare for shutil.move later on
+ bb.utils.mkdirhier(srctree)
+ os.rmdir(srctree)
+
+ initial_rev = None
+ tempdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ crd = d.createCopy()
+ # Make a subdir so we guard against WORKDIR==S
+ workdir = os.path.join(tempdir, 'workdir')
+ crd.setVar('WORKDIR', workdir)
+ crd.setVar('T', os.path.join(tempdir, 'temp'))
+
+ # FIXME: This is very awkward. Unfortunately it's not currently easy to properly
+ # execute tasks outside of bitbake itself, until then this has to suffice if we
+ # are to handle e.g. linux-yocto's extra tasks
+ executed = []
+ def exec_task_func(func, report):
+ if not func in executed:
+ deps = crd.getVarFlag(func, 'deps')
+ if deps:
+ for taskdepfunc in deps:
+ exec_task_func(taskdepfunc, True)
+ if report:
+ logger.info('Executing %s...' % func)
+ fn = d.getVar('FILE', True)
+ localdata = bb.build._task_data(fn, func, crd)
+ bb.build.exec_func(func, localdata)
+ executed.append(func)
+
+ logger.info('Fetching %s...' % pn)
+ exec_task_func('do_fetch', False)
+ logger.info('Unpacking...')
+ exec_task_func('do_unpack', False)
+ srcsubdir = crd.getVar('S', True)
+ if srcsubdir != workdir and os.path.dirname(srcsubdir) != workdir:
+ # Handle if S is set to a subdirectory of the source
+ srcsubdir = os.path.join(workdir, os.path.relpath(srcsubdir, workdir).split(os.sep)[0])
+
+ patchdir = os.path.join(srcsubdir, 'patches')
+ haspatches = False
+ if os.path.exists(patchdir):
+ if os.listdir(patchdir):
+ haspatches = True
+ else:
+ os.rmdir(patchdir)
+
+ if not bb.data.inherits_class('kernel-yocto', d):
+ if not os.listdir(srcsubdir):
+ logger.error("no source unpacked to S, perhaps the %s recipe doesn't use any source?" % pn)
+ return None
+
+ if not os.path.exists(os.path.join(srcsubdir, '.git')):
+ bb.process.run('git init', cwd=srcsubdir)
+ bb.process.run('git add .', cwd=srcsubdir)
+ bb.process.run('git commit -q -m "Initial commit from upstream at version %s"' % crd.getVar('PV', True), cwd=srcsubdir)
+
+ (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir)
+ initial_rev = stdout.rstrip()
+
+ bb.process.run('git checkout -b %s' % devbranch, cwd=srcsubdir)
+ bb.process.run('git tag -f devtool-base', cwd=srcsubdir)
+
+ crd.setVar('PATCHTOOL', 'git')
+
+ logger.info('Patching...')
+ exec_task_func('do_patch', False)
+
+ bb.process.run('git tag -f devtool-patched', cwd=srcsubdir)
+
+ if os.path.exists(patchdir):
+ shutil.rmtree(patchdir)
+ if haspatches:
+ bb.process.run('git checkout patches', cwd=srcsubdir)
+
+ shutil.move(srcsubdir, srctree)
+ logger.info('Source tree extracted to %s' % srctree)
+ finally:
+ if keep_temp:
+ logger.info('Preserving temporary directory %s' % tempdir)
+ else:
+ shutil.rmtree(tempdir)
+ return initial_rev
+
+def _add_md5(config, recipename, filename):
+ import bb.utils
+ md5 = bb.utils.md5_file(filename)
+ with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a') as f:
+ f.write('%s|%s|%s\n' % (recipename, os.path.relpath(filename, config.workspace_path), md5))
+
+def _check_preserve(config, recipename):
+ import bb.utils
+ origfile = os.path.join(config.workspace_path, '.devtool_md5')
+ newfile = os.path.join(config.workspace_path, '.devtool_md5_new')
+ preservepath = os.path.join(config.workspace_path, 'attic')
+ with open(origfile, 'r') as f:
+ with open(newfile, 'w') as tf:
+ for line in f.readlines():
+ splitline = line.rstrip().split('|')
+ if splitline[0] == recipename:
+ removefile = os.path.join(config.workspace_path, splitline[1])
+ md5 = bb.utils.md5_file(removefile)
+ if splitline[2] != md5:
+ bb.utils.mkdirhier(preservepath)
+ preservefile = os.path.basename(removefile)
+ logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
+ shutil.move(removefile, os.path.join(preservepath, preservefile))
+ else:
+ os.remove(removefile)
+ else:
+ tf.write(line)
+ os.rename(newfile, origfile)
+
+ return False
+
+
+def modify(args, config, basepath, workspace):
+ import bb
+ import oe.recipeutils
+
+ if args.recipename in workspace:
+ logger.error("recipe %s is already in your workspace" % args.recipename)
+ return -1
+
+ if not args.extract:
+ if not os.path.isdir(args.srctree):
+ logger.error("directory %s does not exist or not a directory (specify -x to extract source from recipe)" % args.srctree)
+ return -1
+
+ tinfoil = setup_tinfoil()
+
+ recipefile = _get_recipe_file(tinfoil.cooker, args.recipename)
+ if not recipefile:
+ # Error already logged
+ return -1
+ rd = oe.recipeutils.parse_recipe(recipefile, tinfoil.config_data)
+
+ initial_rev = None
+ commits = []
+ srctree = os.path.abspath(args.srctree)
+ if args.extract:
+ initial_rev = _extract_source(args.srctree, False, args.branch, rd)
+ if not initial_rev:
+ return -1
+ # Get list of commits since this revision
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=args.srctree)
+ commits = stdout.split()
+ else:
+ if os.path.exists(os.path.join(args.srctree, '.git')):
+ (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=args.srctree)
+ initial_rev = stdout.rstrip()
+
+ # Handle if S is set to a subdirectory of the source
+ s = rd.getVar('S', True)
+ workdir = rd.getVar('WORKDIR', True)
+ if s != workdir and os.path.dirname(s) != workdir:
+ srcsubdir = os.sep.join(os.path.relpath(s, workdir).split(os.sep)[1:])
+ srctree = os.path.join(srctree, srcsubdir)
+
+ appendpath = os.path.join(config.workspace_path, 'appends')
+ if not os.path.exists(appendpath):
+ os.makedirs(appendpath)
+
+ appendname = os.path.splitext(os.path.basename(recipefile))[0]
+ if args.wildcard:
+ appendname = re.sub(r'_.*', '_%', appendname)
+ appendfile = os.path.join(appendpath, appendname + '.bbappend')
+ with open(appendfile, 'w') as f:
+ f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
+ f.write('inherit externalsrc\n')
+ f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
+ f.write('EXTERNALSRC_pn-%s = "%s"\n' % (args.recipename, srctree))
+ if bb.data.inherits_class('autotools-brokensep', rd):
+ logger.info('using source tree as build directory since original recipe inherits autotools-brokensep')
+ f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (args.recipename, srctree))
+ if initial_rev:
+ f.write('\n# initial_rev: %s\n' % initial_rev)
+ for commit in commits:
+ f.write('# commit: %s\n' % commit)
+
+ _add_md5(config, args.recipename, appendfile)
+
+ logger.info('Recipe %s now set up to build from %s' % (args.recipename, srctree))
+
+ return 0
+
+
+def update_recipe(args, config, basepath, workspace):
+ if not args.recipename in workspace:
+ logger.error("no recipe named %s in your workspace" % args.recipename)
+ return -1
+
+ # Get initial revision from bbappend
+ appends = glob.glob(os.path.join(config.workspace_path, 'appends', '%s_*.bbappend' % args.recipename))
+ if not appends:
+ logger.error('unable to find workspace bbappend for recipe %s' % args.recipename)
+ return -1
+
+ tinfoil = setup_tinfoil()
+ import bb
+ from oe.patch import GitApplyTree
+ import oe.recipeutils
+
+ srctree = workspace[args.recipename]
+ commits = []
+ update_rev = None
+ if args.initial_rev:
+ initial_rev = args.initial_rev
+ else:
+ initial_rev = None
+ with open(appends[0], 'r') as f:
+ for line in f:
+ if line.startswith('# initial_rev:'):
+ initial_rev = line.split(':')[-1].strip()
+ elif line.startswith('# commit:'):
+ commits.append(line.split(':')[-1].strip())
+
+ if initial_rev:
+ # Find first actually changed revision
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
+ newcommits = stdout.split()
+ for i in xrange(min(len(commits), len(newcommits))):
+ if newcommits[i] == commits[i]:
+ update_rev = commits[i]
+
+ if not initial_rev:
+ logger.error('Unable to find initial revision - please specify it with --initial-rev')
+ return -1
+
+ if not update_rev:
+ update_rev = initial_rev
+
+ # Find list of existing patches in recipe file
+ recipefile = _get_recipe_file(tinfoil.cooker, args.recipename)
+ if not recipefile:
+ # Error already logged
+ return -1
+ rd = oe.recipeutils.parse_recipe(recipefile, tinfoil.config_data)
+ existing_patches = oe.recipeutils.get_recipe_patches(rd)
+
+ removepatches = []
+ if not args.no_remove:
+ # Get all patches from source tree and check if any should be removed
+ tempdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ GitApplyTree.extractPatches(srctree, initial_rev, tempdir)
+ newpatches = os.listdir(tempdir)
+ for patch in existing_patches:
+ patchfile = os.path.basename(patch)
+ if patchfile not in newpatches:
+ removepatches.append(patch)
+ finally:
+ shutil.rmtree(tempdir)
+
+ # Get updated patches from source tree
+ tempdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ GitApplyTree.extractPatches(srctree, update_rev, tempdir)
+
+ # Match up and replace existing patches with corresponding new patches
+ updatepatches = False
+ updaterecipe = False
+ newpatches = os.listdir(tempdir)
+ for patch in existing_patches:
+ patchfile = os.path.basename(patch)
+ if patchfile in newpatches:
+ logger.info('Updating patch %s' % patchfile)
+ shutil.move(os.path.join(tempdir, patchfile), patch)
+ newpatches.remove(patchfile)
+ updatepatches = True
+ srcuri = (rd.getVar('SRC_URI', False) or '').split()
+ if newpatches:
+ # Add any patches left over
+ patchdir = os.path.join(os.path.dirname(recipefile), rd.getVar('BPN', True))
+ bb.utils.mkdirhier(patchdir)
+ for patchfile in newpatches:
+ logger.info('Adding new patch %s' % patchfile)
+ shutil.move(os.path.join(tempdir, patchfile), os.path.join(patchdir, patchfile))
+ srcuri.append('file://%s' % patchfile)
+ updaterecipe = True
+ if removepatches:
+ # Remove any patches that we don't need
+ for patch in removepatches:
+ patchfile = os.path.basename(patch)
+ for i in xrange(len(srcuri)):
+ if srcuri[i].startswith('file://') and os.path.basename(srcuri[i]).split(';')[0] == patchfile:
+ logger.info('Removing patch %s' % patchfile)
+ srcuri.pop(i)
+ # FIXME "git rm" here would be nice if the file in question is tracked
+ # FIXME there's a chance that this file is referred to by another recipe, in which case deleting wouldn't be the right thing to do
+ os.remove(patch)
+ updaterecipe = True
+ break
+ if updaterecipe:
+ logger.info('Updating recipe %s' % os.path.basename(recipefile))
+ oe.recipeutils.patch_recipe(rd, recipefile, {'SRC_URI': ' '.join(srcuri)})
+ elif not updatepatches:
+ # Neither patches nor recipe were updated
+ logger.info('No patches need updating')
+ finally:
+ shutil.rmtree(tempdir)
+
+ return 0
+
+
+def status(args, config, basepath, workspace):
+ if workspace:
+ for recipe, value in workspace.iteritems():
+ print("%s: %s" % (recipe, value))
+ else:
+ logger.info('No recipes currently in your workspace - you can use "devtool modify" to work on an existing recipe or "devtool add" to add a new one')
+ return 0
+
+
+def reset(args, config, basepath, workspace):
+ import bb.utils
+ if not args.recipename in workspace:
+ logger.error("no recipe named %s in your workspace" % args.recipename)
+ return -1
+ _check_preserve(config, args.recipename)
+
+ preservepath = os.path.join(config.workspace_path, 'attic', args.recipename)
+ def preservedir(origdir):
+ if os.path.exists(origdir):
+ for fn in os.listdir(origdir):
+ logger.warn('Preserving %s in %s' % (fn, preservepath))
+ bb.utils.mkdirhier(preservepath)
+ shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn))
+ os.rmdir(origdir)
+
+ preservedir(os.path.join(config.workspace_path, 'recipes', args.recipename))
+ # We don't automatically create this dir next to appends, but the user can
+ preservedir(os.path.join(config.workspace_path, 'appends', args.recipename))
+ return 0
+
+
+def build(args, config, basepath, workspace):
+ import bb
+ if not args.recipename in workspace:
+ logger.error("no recipe named %s in your workspace" % args.recipename)
+ return -1
+ exec_build_env_command(config.init_path, basepath, 'bitbake -c install %s' % args.recipename, watch=True)
+
+ return 0
+
+
+def register_commands(subparsers, context):
+ parser_add = subparsers.add_parser('add', help='Add a new recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_add.add_argument('recipename', help='Name for new recipe to add')
+ parser_add.add_argument('srctree', help='Path to external source tree')
+ parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
+ parser_add.set_defaults(func=add)
+
+ parser_add = subparsers.add_parser('modify', help='Modify the source for an existing recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_add.add_argument('recipename', help='Name for recipe to edit')
+ parser_add.add_argument('srctree', help='Path to external source tree')
+ parser_add.add_argument('--wildcard', '-w', action="store_true", help='Use wildcard for unversioned bbappend')
+ parser_add.add_argument('--extract', '-x', action="store_true", help='Extract source as well')
+ parser_add.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
+ parser_add.set_defaults(func=modify)
+
+ parser_add = subparsers.add_parser('extract', help='Extract the source for an existing recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_add.add_argument('recipename', help='Name for recipe to extract the source for')
+ parser_add.add_argument('srctree', help='Path to where to extract the source tree')
+ parser_add.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout')
+ parser_add.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+ parser_add.set_defaults(func=extract)
+
+ parser_add = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_add.add_argument('recipename', help='Name of recipe to update')
+ parser_add.add_argument('--initial-rev', help='Starting revision for patches')
+ parser_add.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
+ parser_add.set_defaults(func=update_recipe)
+
+ parser_status = subparsers.add_parser('status', help='Show status',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_status.set_defaults(func=status)
+
+ parser_build = subparsers.add_parser('build', help='Build recipe',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_build.add_argument('recipename', help='Recipe to build')
+ parser_build.set_defaults(func=build)
+
+ parser_reset = subparsers.add_parser('reset', help='Remove a recipe from your workspace',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser_reset.add_argument('recipename', help='Recipe to reset')
+ parser_reset.set_defaults(func=reset)
+
diff --git a/scripts/lib/image/canned-wks/sdimage-bootpart.wks b/scripts/lib/image/canned-wks/sdimage-bootpart.wks
new file mode 100644
index 0000000000..7ffd632f4a
--- /dev/null
+++ b/scripts/lib/image/canned-wks/sdimage-bootpart.wks
@@ -0,0 +1,6 @@
+# short-description: Create SD card image with a boot partition
+# long-description: Creates a partitioned SD card image. Boot files
+# are located in the first vfat partition.
+
+part /boot --source bootimg-partition --ondisk mmcblk --fstype=vfat --label boot --active --align 4 --size 16
+part / --source rootfs --ondisk mmcblk --fstype=ext4 --label root --align 4
diff --git a/scripts/lib/image/engine.py b/scripts/lib/image/engine.py
index f1df8b4db8..e794545e94 100644
--- a/scripts/lib/image/engine.py
+++ b/scripts/lib/image/engine.py
@@ -67,7 +67,7 @@ def find_artifacts(image_name):
"""
bitbake_env_lines = get_bitbake_env_lines()
- rootfs_dir = kernel_dir = hdddir = staging_data_dir = native_sysroot = ""
+ rootfs_dir = kernel_dir = bootimg_dir = native_sysroot = ""
for line in bitbake_env_lines.split('\n'):
if (get_line_val(line, "IMAGE_ROOTFS")):
@@ -76,17 +76,11 @@ def find_artifacts(image_name):
if (get_line_val(line, "STAGING_KERNEL_DIR")):
kernel_dir = get_line_val(line, "STAGING_KERNEL_DIR")
continue
- if (get_line_val(line, "HDDDIR")):
- hdddir = get_line_val(line, "HDDDIR")
- continue
- if (get_line_val(line, "STAGING_DATADIR")):
- staging_data_dir = get_line_val(line, "STAGING_DATADIR")
- continue
if (get_line_val(line, "STAGING_DIR_NATIVE")):
native_sysroot = get_line_val(line, "STAGING_DIR_NATIVE")
continue
- return (rootfs_dir, kernel_dir, hdddir, staging_data_dir, native_sysroot)
+ return (rootfs_dir, kernel_dir, bootimg_dir, native_sysroot)
CANNED_IMAGE_DIR = "lib/image/canned-wks" # relative to scripts
@@ -185,18 +179,15 @@ def list_source_plugins():
def wic_create(args, wks_file, rootfs_dir, bootimg_dir, kernel_dir,
- native_sysroot, hdddir, staging_data_dir, scripts_path,
- image_output_dir, debug, properties_file, properties=None):
- """
- Create image
+ native_sysroot, scripts_path, image_output_dir, debug,
+ properties_file, properties=None):
+ """Create image
wks_file - user-defined OE kickstart file
rootfs_dir - absolute path to the build's /rootfs dir
bootimg_dir - absolute path to the build's boot artifacts directory
kernel_dir - absolute path to the build's kernel directory
native_sysroot - absolute path to the build's native sysroots dir
- hdddir - absolute path to the build's HDDDIR dir
- staging_data_dir - absolute path to the build's STAGING_DATA_DIR dir
scripts_path - absolute path to /scripts dir
image_output_dir - dirname to create for image
properties_file - use values from this file if nonempty i.e no prompting
@@ -211,22 +202,14 @@ def wic_create(args, wks_file, rootfs_dir, bootimg_dir, kernel_dir,
rootfs_dir: IMAGE_ROOTFS
kernel_dir: STAGING_KERNEL_DIR
native_sysroot: STAGING_DIR_NATIVE
- hdddir: HDDDIR
- staging_data_dir: STAGING_DATA_DIR
- In the above case, bootimg_dir remains unset and the image
- creation code determines which of the passed-in directories to
- use.
+ In the above case, bootimg_dir remains unset and the
+ plugin-specific image creation code is responsible for finding the
+ bootimg artifacts.
In the case where the values are passed in explicitly i.e 'wic -e'
is not used but rather the individual 'wic' options are used to
- explicitly specify these values, hdddir and staging_data_dir will
- be unset, but bootimg_dir must be explicit i.e. explicitly set to
- either hdddir or staging_data_dir, depending on the image being
- generated. The other values (rootfs_dir, kernel_dir, and
- native_sysroot) correspond to the same values found above via
- 'bitbake -e').
-
+ explicitly specify these values.
"""
try:
oe_builddir = os.environ["BUILDDIR"]
@@ -242,8 +225,6 @@ def wic_create(args, wks_file, rootfs_dir, bootimg_dir, kernel_dir,
direct_args.insert(0, bootimg_dir)
direct_args.insert(0, kernel_dir)
direct_args.insert(0, native_sysroot)
- direct_args.insert(0, hdddir)
- direct_args.insert(0, staging_data_dir)
direct_args.insert(0, "direct")
if debug:
diff --git a/scripts/lib/image/help.py b/scripts/lib/image/help.py
index 080795e577..6b74f57662 100644
--- a/scripts/lib/image/help.py
+++ b/scripts/lib/image/help.py
@@ -109,8 +109,9 @@ wic_create_usage = """
usage: wic create <wks file or image name> [-o <DIRNAME> | --outdir <DIRNAME>]
[-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
- [-e | --image-name] [-r, --rootfs-dir] [-b, --bootimg-dir]
- [-k, --kernel-dir] [-n, --native-sysroot] [-s, --skip-build-check]
+ [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
+ [-r, --rootfs-dir] [-b, --bootimg-dir]
+ [-k, --kernel-dir] [-n, --native-sysroot]
This command creates an OpenEmbedded image based on the 'OE kickstart
commands' found in the <wks file>.
@@ -129,8 +130,9 @@ NAME
SYNOPSIS
wic create <wks file or image name> [-o <DIRNAME> | --outdir <DIRNAME>]
[-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
- [-e | --image-name] [-r, --rootfs-dir] [-b, --bootimg-dir]
- [-k, --kernel-dir] [-n, --native-sysroot] [-s, --skip-build-check]
+ [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
+ [-r, --rootfs-dir] [-b, --bootimg-dir]
+ [-k, --kernel-dir] [-n, --native-sysroot]
DESCRIPTION
This command creates an OpenEmbedded image based on the 'OE
@@ -172,6 +174,12 @@ DESCRIPTION
explicitly, 'wic' assumes the user knows what he or she is doing
and skips the build check.
+ The -D option is used to display debug information detailing
+ exactly what happens behind the scenes when a create request is
+ fulfilled (or not, as the case may be). It enumerates and
+ displays the command sequence used, and should be included in any
+ bug report describing unexpected results.
+
When 'wic -e' is used, the locations for the build artifacts
values are determined by 'wic -e' from the output of the 'bitbake
-e' command given an image name e.g. 'core-image-minimal' and a
@@ -181,7 +189,7 @@ DESCRIPTION
-r: IMAGE_ROOTFS
-k: STAGING_KERNEL_DIR
-n: STAGING_DIR_NATIVE
- -b: HDDDIR and STAGING_DATA_DIR (handlers decide which to use)
+ -b: empty (plugin-specific handlers must determine this)
If 'wic -e' is not used, the user needs to select the appropriate
value for -b (as well as -r, -k, and -n).
@@ -519,8 +527,9 @@ DESCRIPTION
usage: wic create <wks file or image name> [-o <DIRNAME> | ...]
[-i <JSON PROPERTY FILE> | --infile <JSON PROPERTY_FILE>]
- [-e | --image-name] [-r, --rootfs-dir] [-b, --bootimg-dir]
- [-k, --kernel-dir] [-n, --native-sysroot] [-s, --skip-build-check]
+ [-e | --image-name] [-s, --skip-build-check] [-D, --debug]
+ [-r, --rootfs-dir] [-b, --bootimg-dir]
+ [-k, --kernel-dir] [-n, --native-sysroot]
This command creates an OpenEmbedded image based on the 'OE
kickstart commands' found in the <wks file>.
diff --git a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf b/scripts/lib/recipetool/__init__.py
index e69de29bb2..e69de29bb2 100644
--- a/scripts/lib/bsp/substrate/target/arch/i386/recipes-graphics/xorg-xserver/xserver-xf86-config/{{=machine}}/{{ if xserver == "y": }} xorg.conf
+++ b/scripts/lib/recipetool/__init__.py
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
new file mode 100644
index 0000000000..6dba07819b
--- /dev/null
+++ b/scripts/lib/recipetool/create.py
@@ -0,0 +1,413 @@
+# Recipe creation tool - create command plugin
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import os
+import argparse
+import glob
+import fnmatch
+import re
+import logging
+
+logger = logging.getLogger('recipetool')
+
+tinfoil = None
+plugins = None
+
+def plugin_init(pluginlist):
+ # Take a reference to the list so we can use it later
+ global plugins
+ plugins = pluginlist
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+class RecipeHandler():
+ @staticmethod
+ def checkfiles(path, speclist):
+ results = []
+ for spec in speclist:
+ results.extend(glob.glob(os.path.join(path, spec)))
+ return results
+
+ def genfunction(self, outlines, funcname, content):
+ outlines.append('%s () {' % funcname)
+ for line in content:
+ outlines.append('\t%s' % line)
+ outlines.append('}')
+ outlines.append('')
+
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ return False
+
+
+
+def fetch_source(uri, destdir):
+ import bb.data
+ bb.utils.mkdirhier(destdir)
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ bb.data.update_data(localdata)
+ localdata.setVar('BB_STRICT_CHECKSUM', '')
+ localdata.setVar('SRCREV', '${AUTOREV}')
+ ret = (None, None)
+ olddir = os.getcwd()
+ try:
+ fetcher = bb.fetch2.Fetch([uri], localdata)
+ for u in fetcher.ud:
+ ud = fetcher.ud[u]
+ ud.ignore_checksums = True
+ fetcher.download()
+ fetcher.unpack(destdir)
+ for u in fetcher.ud:
+ ud = fetcher.ud[u]
+ if ud.method.recommends_checksum(ud):
+ md5value = bb.utils.md5_file(ud.localpath)
+ sha256value = bb.utils.sha256_file(ud.localpath)
+ ret = (md5value, sha256value)
+ except bb.fetch2.BBFetchException, e:
+ raise bb.build.FuncFailed(e)
+ finally:
+ os.chdir(olddir)
+ return ret
+
+def supports_srcrev(uri):
+ localdata = bb.data.createCopy(tinfoil.config_data)
+ bb.data.update_data(localdata)
+ fetcher = bb.fetch2.Fetch([uri], localdata)
+ urldata = fetcher.ud
+ for u in urldata:
+ if urldata[u].method.supports_srcrev():
+ return True
+ return False
+
+def create_recipe(args):
+ import bb.process
+ import tempfile
+ import shutil
+
+ pkgarch = ""
+ if args.machine:
+ pkgarch = "${MACHINE_ARCH}"
+
+ checksums = (None, None)
+ tempsrc = ''
+ srcsubdir = ''
+ if '://' in args.source:
+ # Fetch a URL
+ srcuri = args.source
+ if args.externalsrc:
+ srctree = args.externalsrc
+ else:
+ tempsrc = tempfile.mkdtemp(prefix='recipetool-')
+ srctree = tempsrc
+ logger.info('Fetching %s...' % srcuri)
+ checksums = fetch_source(args.source, srctree)
+ dirlist = os.listdir(srctree)
+ if 'git.indirectionsymlink' in dirlist:
+ dirlist.remove('git.indirectionsymlink')
+ if len(dirlist) == 1 and os.path.isdir(os.path.join(srctree, dirlist[0])):
+ # We unpacked a single directory, so we should use that
+ srcsubdir = dirlist[0]
+ srctree = os.path.join(srctree, srcsubdir)
+ else:
+ # Assume we're pointing to an existing source tree
+ if args.externalsrc:
+ logger.error('externalsrc cannot be specified if source is a directory')
+ sys.exit(1)
+ if not os.path.isdir(args.source):
+ logger.error('Invalid source directory %s' % args.source)
+ sys.exit(1)
+ srcuri = ''
+ srctree = args.source
+
+ outfile = args.outfile
+ if outfile and outfile != '-':
+ if os.path.exists(outfile):
+ logger.error('Output file %s already exists' % outfile)
+ sys.exit(1)
+
+ lines_before = []
+ lines_after = []
+
+ lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0]))
+ lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.')
+ lines_before.append('# (Feel free to remove these comments when editing.)')
+ lines_before.append('#')
+
+ licvalues = guess_license(srctree)
+ lic_files_chksum = []
+ if licvalues:
+ licenses = []
+ for licvalue in licvalues:
+ if not licvalue[0] in licenses:
+ licenses.append(licvalue[0])
+ lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
+ lines_before.append('# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is')
+ lines_before.append('# your responsibility to verify that the values are complete and correct.')
+ if len(licvalues) > 1:
+ lines_before.append('#')
+ lines_before.append('# NOTE: multiple licenses have been detected; if that is correct you should separate')
+ lines_before.append('# these in the LICENSE value using & if the multiple licenses all apply, or | if there')
+ lines_before.append('# is a choice between the multiple licenses. If in doubt, check the accompanying')
+ lines_before.append('# documentation to determine which situation is applicable.')
+ else:
+ lines_before.append('# Unable to find any files that looked like license statements. Check the accompanying')
+ lines_before.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.')
+ lines_before.append('#')
+ lines_before.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if')
+ lines_before.append('# this is not accurate with respect to the licensing of the software being built (it')
+ lines_before.append('# will not be in most cases) you must specify the correct value before using this')
+ lines_before.append('# recipe for anything other than initial testing/development!')
+ licenses = ['CLOSED']
+ lines_before.append('LICENSE = "%s"' % ' '.join(licenses))
+ lines_before.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
+ lines_before.append('')
+
+ # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
+ # we'd also want a way to automatically set outfile based upon auto-detecting these values from the source if possible
+ recipefn = os.path.splitext(os.path.basename(outfile))[0]
+ fnsplit = recipefn.split('_')
+ if len(fnsplit) > 1:
+ pn = fnsplit[0]
+ pv = fnsplit[1]
+ else:
+ pn = recipefn
+ pv = None
+
+ if srcuri:
+ if pv and pv not in 'git svn hg'.split():
+ srcuri = srcuri.replace(pv, '${PV}')
+ else:
+ lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
+ lines_before.append('SRC_URI = "%s"' % srcuri)
+ (md5value, sha256value) = checksums
+ if md5value:
+ lines_before.append('SRC_URI[md5sum] = "%s"' % md5value)
+ if sha256value:
+ lines_before.append('SRC_URI[sha256sum] = "%s"' % sha256value)
+ if srcuri and supports_srcrev(srcuri):
+ lines_before.append('')
+ lines_before.append('# Modify these as desired')
+ lines_before.append('PV = "1.0+git${SRCPV}"')
+ lines_before.append('SRCREV = "${AUTOREV}"')
+ lines_before.append('')
+
+ if srcsubdir and pv:
+ if srcsubdir == "%s-%s" % (pn, pv):
+ # This would be the default, so we don't need to set S in the recipe
+ srcsubdir = ''
+ if srcsubdir:
+ lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir)
+ lines_before.append('')
+
+ if pkgarch:
+ lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch)
+ lines_after.append('')
+
+ # Find all plugins that want to register handlers
+ handlers = []
+ for plugin in plugins:
+ if hasattr(plugin, 'register_recipe_handlers'):
+ plugin.register_recipe_handlers(handlers)
+
+ # Apply the handlers
+ classes = []
+ handled = []
+ for handler in handlers:
+ handler.process(srctree, classes, lines_before, lines_after, handled)
+
+ outlines = []
+ outlines.extend(lines_before)
+ if classes:
+ outlines.append('inherit %s' % ' '.join(classes))
+ outlines.append('')
+ outlines.extend(lines_after)
+
+ if outfile == '-':
+ sys.stdout.write('\n'.join(outlines) + '\n')
+ else:
+ with open(outfile, 'w') as f:
+ f.write('\n'.join(outlines) + '\n')
+ logger.info('Recipe %s has been created; further editing may be required to make it fully functional' % outfile)
+
+ if tempsrc:
+ shutil.rmtree(tempsrc)
+
+ return 0
+
+def get_license_md5sums(d, static_only=False):
+ import bb.utils
+ md5sums = {}
+ if not static_only:
+ # Gather md5sums of license files in common license dir
+ commonlicdir = d.getVar('COMMON_LICENSE_DIR', True)
+ for fn in os.listdir(commonlicdir):
+ md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
+ md5sums[md5value] = fn
+ # The following were extracted from common values in various recipes
+ # (double checking the license against the license file itself, not just
+ # the LICENSE value in the recipe)
+ md5sums['94d55d512a9ba36caa9b7df079bae19f'] = 'GPLv2'
+ md5sums['b234ee4d69f5fce4486a80fdaf4a4263'] = 'GPLv2'
+ md5sums['59530bdf33659b29e73d4adb9f9f6552'] = 'GPLv2'
+ md5sums['0636e73ff0215e8d672dc4c32c317bb3'] = 'GPLv2'
+ md5sums['eb723b61539feef013de476e68b5c50a'] = 'GPLv2'
+ md5sums['751419260aa954499f7abaabaa882bbe'] = 'GPLv2'
+ md5sums['393a5ca445f6965873eca0259a17f833'] = 'GPLv2'
+ md5sums['12f884d2ae1ff87c09e5b7ccc2c4ca7e'] = 'GPLv2'
+ md5sums['8ca43cbc842c2336e835926c2166c28b'] = 'GPLv2'
+ md5sums['ebb5c50ab7cab4baeffba14977030c07'] = 'GPLv2'
+ md5sums['c93c0550bd3173f4504b2cbd8991e50b'] = 'GPLv2'
+ md5sums['9ac2e7cff1ddaf48b6eab6028f23ef88'] = 'GPLv2'
+ md5sums['4325afd396febcb659c36b49533135d4'] = 'GPLv2'
+ md5sums['18810669f13b87348459e611d31ab760'] = 'GPLv2'
+ md5sums['d7810fab7487fb0aad327b76f1be7cd7'] = 'GPLv2' # the Linux kernel's COPYING file
+ md5sums['bbb461211a33b134d42ed5ee802b37ff'] = 'LGPLv2.1'
+ md5sums['7fbc338309ac38fefcd64b04bb903e34'] = 'LGPLv2.1'
+ md5sums['4fbd65380cdd255951079008b364516c'] = 'LGPLv2.1'
+ md5sums['2d5025d4aa3495befef8f17206a5b0a1'] = 'LGPLv2.1'
+ md5sums['fbc093901857fcd118f065f900982c24'] = 'LGPLv2.1'
+ md5sums['a6f89e2100d9b6cdffcea4f398e37343'] = 'LGPLv2.1'
+ md5sums['d8045f3b8f929c1cb29a1e3fd737b499'] = 'LGPLv2.1'
+ md5sums['fad9b3332be894bab9bc501572864b29'] = 'LGPLv2.1'
+ md5sums['3bf50002aefd002f49e7bb854063f7e7'] = 'LGPLv2'
+ md5sums['9f604d8a4f8e74f4f5140845a21b6674'] = 'LGPLv2'
+ md5sums['5f30f0716dfdd0d91eb439ebec522ec2'] = 'LGPLv2'
+ md5sums['55ca817ccb7d5b5b66355690e9abc605'] = 'LGPLv2'
+ md5sums['252890d9eee26aab7b432e8b8a616475'] = 'LGPLv2'
+ md5sums['d32239bcb673463ab874e80d47fae504'] = 'GPLv3'
+ md5sums['f27defe1e96c2e1ecd4e0c9be8967949'] = 'GPLv3'
+ md5sums['6a6a8e020838b23406c81b19c1d46df6'] = 'LGPLv3'
+ md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
+ md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
+ return md5sums
+
+def guess_license(srctree):
+ import bb
+ md5sums = get_license_md5sums(tinfoil.config_data)
+
+ licenses = []
+ licspecs = ['LICENSE*', 'COPYING*', '*[Ll]icense*', 'LICENCE*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*']
+ licfiles = []
+ for root, dirs, files in os.walk(srctree):
+ for fn in files:
+ for spec in licspecs:
+ if fnmatch.fnmatch(fn, spec):
+ licfiles.append(os.path.join(root, fn))
+ for licfile in licfiles:
+ md5value = bb.utils.md5_file(licfile)
+ license = md5sums.get(md5value, 'Unknown')
+ licenses.append((license, os.path.relpath(licfile, srctree), md5value))
+
+ # FIXME should we grab at least one source file with a license header and add that too?
+
+ return licenses
+
+def read_pkgconfig_provides(d):
+ pkgdatadir = d.getVar('PKGDATA_DIR', True)
+ pkgmap = {}
+ for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')):
+ with open(fn, 'r') as f:
+ for line in f:
+ pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0]
+ recipemap = {}
+ for pc, pkg in pkgmap.iteritems():
+ pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg)
+ if os.path.exists(pkgdatafile):
+ with open(pkgdatafile, 'r') as f:
+ for line in f:
+ if line.startswith('PN: '):
+ recipemap[pc] = line.split(':', 1)[1].strip()
+ return recipemap
+
+def convert_pkginfo(pkginfofile):
+ values = {}
+ with open(pkginfofile, 'r') as f:
+ indesc = False
+ for line in f:
+ if indesc:
+ if line.strip():
+ values['DESCRIPTION'] += ' ' + line.strip()
+ else:
+ indesc = False
+ else:
+ splitline = line.split(': ', 1)
+ key = line[0]
+ value = line[1]
+ if key == 'LICENSE':
+ for dep in value.split(','):
+ dep = dep.split()[0]
+ mapped = depmap.get(dep, '')
+ if mapped:
+ depends.append(mapped)
+ elif key == 'License':
+ values['LICENSE'] = value
+ elif key == 'Summary':
+ values['SUMMARY'] = value
+ elif key == 'Description':
+ values['DESCRIPTION'] = value
+ indesc = True
+ return values
+
+def convert_debian(debpath):
+ # FIXME extend this mapping - perhaps use distro_alias.inc?
+ depmap = {'libz-dev': 'zlib'}
+
+ values = {}
+ depends = []
+ with open(os.path.join(debpath, 'control')) as f:
+ indesc = False
+ for line in f:
+ if indesc:
+ if line.strip():
+ if line.startswith(' This package contains'):
+ indesc = False
+ else:
+ values['DESCRIPTION'] += ' ' + line.strip()
+ else:
+ indesc = False
+ else:
+ splitline = line.split(':', 1)
+ key = line[0]
+ value = line[1]
+ if key == 'Build-Depends':
+ for dep in value.split(','):
+ dep = dep.split()[0]
+ mapped = depmap.get(dep, '')
+ if mapped:
+ depends.append(mapped)
+ elif key == 'Section':
+ values['SECTION'] = value
+ elif key == 'Description':
+ values['SUMMARY'] = value
+ indesc = True
+
+ if depends:
+ values['DEPENDS'] = ' '.join(depends)
+
+ return values
+
+
+def register_command(subparsers):
+ parser_create = subparsers.add_parser('create', help='Create a new recipe')
+ parser_create.add_argument('source', help='Path or URL to source')
+ parser_create.add_argument('-o', '--outfile', help='Full path and filename to recipe to add', required=True)
+ parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true')
+ parser_create.add_argument('-x', '--externalsrc', help='Assuming source is a URL, fetch it and extract it to the specified directory')
+ parser_create.set_defaults(func=create_recipe)
+
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
new file mode 100644
index 0000000000..6c9e0efa2a
--- /dev/null
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -0,0 +1,319 @@
+# Recipe creation tool - create command build system handlers
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import re
+import logging
+from recipetool.create import RecipeHandler, read_pkgconfig_provides
+
+logger = logging.getLogger('recipetool')
+
+tinfoil = None
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+class CmakeRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ if 'buildsystem' in handled:
+ return False
+
+ if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']):
+ classes.append('cmake')
+ lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:')
+ lines_after.append('EXTRA_OECMAKE = ""')
+ lines_after.append('')
+ handled.append('buildsystem')
+ return True
+ return False
+
+class SconsRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ if 'buildsystem' in handled:
+ return False
+
+ if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']):
+ classes.append('scons')
+ lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:')
+ lines_after.append('EXTRA_OESCONS = ""')
+ lines_after.append('')
+ handled.append('buildsystem')
+ return True
+ return False
+
+class QmakeRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ if 'buildsystem' in handled:
+ return False
+
+ if RecipeHandler.checkfiles(srctree, ['*.pro']):
+ classes.append('qmake2')
+ handled.append('buildsystem')
+ return True
+ return False
+
+class AutotoolsRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ if 'buildsystem' in handled:
+ return False
+
+ autoconf = False
+ if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']):
+ autoconf = True
+ values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree)
+ classes.extend(values.pop('inherit', '').split())
+ for var, value in values.iteritems():
+ lines_before.append('%s = "%s"' % (var, value))
+ else:
+ conffile = RecipeHandler.checkfiles(srctree, ['configure'])
+ if conffile:
+ # Check if this is just a pre-generated autoconf configure script
+ with open(conffile[0], 'r') as f:
+ for i in range(1, 10):
+ if 'Generated by GNU Autoconf' in f.readline():
+ autoconf = True
+ break
+
+ if autoconf:
+ lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory')
+ lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the')
+ lines_before.append('# inherit line')
+ classes.append('autotools')
+ lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:')
+ lines_after.append('EXTRA_OECONF = ""')
+ lines_after.append('')
+ handled.append('buildsystem')
+ return True
+
+ return False
+
+ @staticmethod
+ def extract_autotools_deps(outlines, srctree, acfile=None):
+ import shlex
+ import oe.package
+
+ values = {}
+ inherits = []
+
+ # FIXME this mapping is very thin
+ progmap = {'flex': 'flex-native',
+ 'bison': 'bison-native',
+ 'm4': 'm4-native'}
+ progclassmap = {'gconftool-2': 'gconf',
+ 'pkg-config': 'pkgconfig'}
+
+ ignoredeps = ['gcc-runtime', 'glibc', 'uclibc']
+
+ pkg_re = re.compile('PKG_CHECK_MODULES\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)[),].*')
+ lib_re = re.compile('AC_CHECK_LIB\(\[?([a-zA-Z0-9]*)\]?, .*')
+ progs_re = re.compile('_PROGS?\(\[?[a-zA-Z0-9]*\]?, \[?([^,\]]*)\]?[),].*')
+ dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
+
+ # Build up lib library->package mapping
+ shlib_providers = oe.package.read_shlib_providers(tinfoil.config_data)
+ libdir = tinfoil.config_data.getVar('libdir', True)
+ base_libdir = tinfoil.config_data.getVar('base_libdir', True)
+ libpaths = list(set([base_libdir, libdir]))
+ libname_re = re.compile('^lib(.+)\.so.*$')
+ pkglibmap = {}
+ for lib, item in shlib_providers.iteritems():
+ for path, pkg in item.iteritems():
+ if path in libpaths:
+ res = libname_re.match(lib)
+ if res:
+ libname = res.group(1)
+ if not libname in pkglibmap:
+ pkglibmap[libname] = pkg[0]
+ else:
+ logger.debug('unable to extract library name from %s' % lib)
+
+ # Now turn it into a library->recipe mapping
+ recipelibmap = {}
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True)
+ for libname, pkg in pkglibmap.iteritems():
+ try:
+ with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f:
+ for line in f:
+ if line.startswith('PN:'):
+ recipelibmap[libname] = line.split(':', 1)[-1].strip()
+ break
+ except IOError as ioe:
+ if ioe.errno == 2:
+ logger.warn('unable to find a pkgdata file for package %s' % pkg)
+ else:
+ raise
+
+ # Since a configure.ac file is essentially a program, this is only ever going to be
+ # a hack unfortunately; but it ought to be enough of an approximation
+ if acfile:
+ srcfiles = [acfile]
+ else:
+ srcfiles = RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in'])
+ pcdeps = []
+ deps = []
+ unmapped = []
+ unmappedlibs = []
+ with open(srcfiles[0], 'r') as f:
+ for line in f:
+ if 'PKG_CHECK_MODULES' in line:
+ res = pkg_re.search(line)
+ if res:
+ res = dep_re.findall(res.group(1))
+ if res:
+ pcdeps.extend([x[0] for x in res])
+ inherits.append('pkgconfig')
+ if line.lstrip().startswith('AM_GNU_GETTEXT'):
+ inherits.append('gettext')
+ elif 'AC_CHECK_PROG' in line or 'AC_PATH_PROG' in line:
+ res = progs_re.search(line)
+ if res:
+ for prog in shlex.split(res.group(1)):
+ prog = prog.split()[0]
+ progclass = progclassmap.get(prog, None)
+ if progclass:
+ inherits.append(progclass)
+ else:
+ progdep = progmap.get(prog, None)
+ if progdep:
+ deps.append(progdep)
+ else:
+ if not prog.startswith('$'):
+ unmapped.append(prog)
+ elif 'AC_CHECK_LIB' in line:
+ res = lib_re.search(line)
+ if res:
+ lib = res.group(1)
+ libdep = recipelibmap.get(lib, None)
+ if libdep:
+ deps.append(libdep)
+ else:
+ if libdep is None:
+ if not lib.startswith('$'):
+ unmappedlibs.append(lib)
+ elif 'AC_PATH_X' in line:
+ deps.append('libx11')
+
+ if unmapped:
+ outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(unmapped))
+
+ if unmappedlibs:
+ outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(unmappedlibs))
+ outlines.append('# (this is based on recipes that have previously been built and packaged)')
+
+ recipemap = read_pkgconfig_provides(tinfoil.config_data)
+ unmapped = []
+ for pcdep in pcdeps:
+ recipe = recipemap.get(pcdep, None)
+ if recipe:
+ deps.append(recipe)
+ else:
+ if not pcdep.startswith('$'):
+ unmapped.append(pcdep)
+
+ deps = set(deps).difference(set(ignoredeps))
+
+ if unmapped:
+ outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmapped))
+ outlines.append('# (this is based on recipes that have previously been built and packaged)')
+
+ if deps:
+ values['DEPENDS'] = ' '.join(deps)
+
+ if inherits:
+ values['inherit'] = ' '.join(list(set(inherits)))
+
+ return values
+
+
+class MakefileRecipeHandler(RecipeHandler):
+ def process(self, srctree, classes, lines_before, lines_after, handled):
+ if 'buildsystem' in handled:
+ return False
+
+ makefile = RecipeHandler.checkfiles(srctree, ['Makefile'])
+ if makefile:
+ lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the')
+ lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure')
+ lines_after.append('# that the appropriate arguments are passed in.')
+ lines_after.append('')
+
+ scanfile = os.path.join(srctree, 'configure.scan')
+ skipscan = False
+ try:
+ stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True)
+ except bb.process.ExecutionError as e:
+ skipscan = True
+ if scanfile and os.path.exists(scanfile):
+ values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile)
+ classes.extend(values.pop('inherit', '').split())
+ for var, value in values.iteritems():
+ if var == 'DEPENDS':
+ lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation')
+ lines_before.append('%s = "%s"' % (var, value))
+ lines_before.append('')
+ for f in ['configure.scan', 'autoscan.log']:
+ fp = os.path.join(srctree, f)
+ if os.path.exists(fp):
+ os.remove(fp)
+
+ self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
+
+ func = []
+ func.append('# You will almost certainly need to add additional arguments here')
+ func.append('oe_runmake')
+ self.genfunction(lines_after, 'do_compile', func)
+
+ installtarget = True
+ try:
+ stdout, stderr = bb.process.run('make -qn install', cwd=srctree, shell=True)
+ except bb.process.ExecutionError as e:
+ if e.exitcode != 1:
+ installtarget = False
+ func = []
+ if installtarget:
+ func.append('# This is a guess; additional arguments may be required')
+ makeargs = ''
+ with open(makefile[0], 'r') as f:
+ for i in range(1, 100):
+ if 'DESTDIR' in f.readline():
+ makeargs += " 'DESTDIR=${D}'"
+ break
+ func.append('oe_runmake install%s' % makeargs)
+ else:
+ func.append('# NOTE: unable to determine what to put here - there is a Makefile but no')
+ func.append('# target named "install", so you will need to define this yourself')
+ self.genfunction(lines_after, 'do_install', func)
+
+ handled.append('buildsystem')
+ else:
+ lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done')
+ lines_after.append('')
+ self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here'])
+ self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here'])
+ self.genfunction(lines_after, 'do_install', ['# Specify install commands here'])
+
+
+def plugin_init(pluginlist):
+ pass
+
+def register_recipe_handlers(handlers):
+ # These are in a specific order so that the right one is detected first
+ handlers.append(CmakeRecipeHandler())
+ handlers.append(AutotoolsRecipeHandler())
+ handlers.append(SconsRecipeHandler())
+ handlers.append(QmakeRecipeHandler())
+ handlers.append(MakefileRecipeHandler())
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py
new file mode 100644
index 0000000000..e7861268a5
--- /dev/null
+++ b/scripts/lib/scriptutils.py
@@ -0,0 +1,60 @@
+# Script utility functions
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import os
+import logging
+import glob
+
+def logger_create(name):
+ logger = logging.getLogger(name)
+ loggerhandler = logging.StreamHandler()
+ loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
+ logger.addHandler(loggerhandler)
+ logger.setLevel(logging.INFO)
+ return logger
+
+def logger_setup_color(logger, color='auto'):
+ from bb.msg import BBLogFormatter
+ console = logging.StreamHandler(sys.stdout)
+ formatter = BBLogFormatter("%(levelname)s: %(message)s")
+ console.setFormatter(formatter)
+ logger.handlers = [console]
+ if color == 'always' or (color=='auto' and console.stream.isatty()):
+ formatter.enable_color()
+
+
+def load_plugins(logger, plugins, pluginpath):
+ import imp
+
+ def load_plugin(name):
+ logger.debug('Loading plugin %s' % name)
+ fp, pathname, description = imp.find_module(name, [pluginpath])
+ try:
+ return imp.load_module(name, fp, pathname, description)
+ finally:
+ if fp:
+ fp.close()
+
+ logger.debug('Loading plugins from %s...' % pluginpath)
+ for fn in glob.glob(os.path.join(pluginpath, '*.py')):
+ name = os.path.splitext(os.path.basename(fn))[0]
+ if name != '__init__':
+ plugin = load_plugin(name)
+ if hasattr(plugin, 'plugin_init'):
+ plugin.plugin_init(plugins)
+ plugins.append(plugin)
diff --git a/scripts/lib/wic/3rdparty/pykickstart/parser.py b/scripts/lib/wic/3rdparty/pykickstart/parser.py
index 840a448673..9c9674bf73 100644
--- a/scripts/lib/wic/3rdparty/pykickstart/parser.py
+++ b/scripts/lib/wic/3rdparty/pykickstart/parser.py
@@ -38,8 +38,6 @@ import sys
import tempfile
from copy import copy
from optparse import *
-from urlgrabber import urlread
-import urlgrabber.grabber as grabber
import constants
from errors import KickstartError, KickstartParseError, KickstartValueError, formatErrorMsg
@@ -55,87 +53,6 @@ STATE_COMMANDS = "commands"
ver = version.DEVEL
-def _preprocessStateMachine (lineIter):
- l = None
- lineno = 0
-
- # Now open an output kickstart file that we are going to write to one
- # line at a time.
- (outF, outName) = tempfile.mkstemp("-ks.cfg", "", "/tmp")
-
- while True:
- try:
- l = lineIter.next()
- except StopIteration:
- break
-
- # At the end of the file?
- if l == "":
- break
-
- lineno += 1
- url = None
-
- ll = l.strip()
- if not ll.startswith("%ksappend"):
- os.write(outF, l)
- continue
-
- # Try to pull down the remote file.
- try:
- ksurl = ll.split(' ')[1]
- except:
- raise KickstartParseError, formatErrorMsg(lineno, msg=_("Illegal url for %%ksappend: %s") % ll)
-
- try:
- url = grabber.urlopen(ksurl)
- except grabber.URLGrabError, e:
- raise KickstartError, formatErrorMsg(lineno, msg=_("Unable to open %%ksappend file: %s") % e.strerror)
- else:
- # Sanity check result. Sometimes FTP doesn't catch a file
- # is missing.
- try:
- if url.size < 1:
- raise KickstartError, formatErrorMsg(lineno, msg=_("Unable to open %%ksappend file"))
- except:
- raise KickstartError, formatErrorMsg(lineno, msg=_("Unable to open %%ksappend file"))
-
- # If that worked, write the remote file to the output kickstart
- # file in one burst. Then close everything up to get ready to
- # read ahead in the input file. This allows multiple %ksappend
- # lines to exist.
- if url is not None:
- os.write(outF, url.read())
- url.close()
-
- # All done - close the temp file and return its location.
- os.close(outF)
- return outName
-
-def preprocessFromString (s):
- """Preprocess the kickstart file, provided as the string str. This
- method is currently only useful for handling %ksappend lines,
- which need to be fetched before the real kickstart parser can be
- run. Returns the location of the complete kickstart file.
- """
- i = iter(s.splitlines(True) + [""])
- rc = _preprocessStateMachine (i.next)
- return rc
-
-def preprocessKickstart (f):
- """Preprocess the kickstart file, given by the filename file. This
- method is currently only useful for handling %ksappend lines,
- which need to be fetched before the real kickstart parser can be
- run. Returns the location of the complete kickstart file.
- """
- try:
- fh = urlopen(f)
- except grabber.URLGrabError, e:
- raise KickstartError, formatErrorMsg(0, msg=_("Unable to open input kickstart file: %s") % e.strerror)
-
- rc = _preprocessStateMachine (iter(fh.readlines()))
- fh.close()
- return rc
class PutBackIterator(Iterator):
def __init__(self, iterable):
@@ -682,8 +599,8 @@ class KickstartParser:
self.currentdir[self._includeDepth] = cd
try:
- s = urlread(f)
- except grabber.URLGrabError, e:
+ s = file(f).read()
+ except IOError, e:
raise KickstartError, formatErrorMsg(0, msg=_("Unable to open input kickstart file: %s") % e.strerror)
self.readKickstartFromString(s, reset=False)
diff --git a/scripts/lib/wic/3rdparty/pykickstart/version.py b/scripts/lib/wic/3rdparty/pykickstart/version.py
index 102cc37d80..8a8e6aad22 100644
--- a/scripts/lib/wic/3rdparty/pykickstart/version.py
+++ b/scripts/lib/wic/3rdparty/pykickstart/version.py
@@ -44,7 +44,6 @@ This module also exports several functions:
have a version= comment in it.
"""
import imputil, re, sys
-from urlgrabber import urlopen
import gettext
_ = lambda x: gettext.ldgettext("pykickstart", x)
@@ -132,34 +131,6 @@ def versionToString(version, skipDevel=False):
raise KickstartVersionError(_("Unsupported version specified: %s") % version)
-def versionFromFile(f):
- """Given a file or URL, look for a line starting with #version= and
- return the version number. If no version is found, return DEVEL.
- """
- v = DEVEL
-
- fh = urlopen(f)
-
- while True:
- try:
- l = fh.readline()
- except StopIteration:
- break
-
- # At the end of the file?
- if l == "":
- break
-
- if l.isspace() or l.strip() == "":
- continue
-
- if l[:9] == "#version=":
- v = stringToVersion(l[9:].rstrip())
- break
-
- fh.close()
- return v
-
def returnClassForVersion(version=DEVEL):
"""Return the class of the syntax handler for version. version can be
either a string or the matching constant. Raises KickstartValueError
diff --git a/scripts/lib/wic/conf.py b/scripts/lib/wic/conf.py
index d5419f8e94..be34355ce4 100644
--- a/scripts/lib/wic/conf.py
+++ b/scripts/lib/wic/conf.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/scripts/lib/wic/creator.py b/scripts/lib/wic/creator.py
index a4b19ac6e0..2219377b38 100644
--- a/scripts/lib/wic/creator.py
+++ b/scripts/lib/wic/creator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/scripts/lib/wic/imager/baseimager.py b/scripts/lib/wic/imager/baseimager.py
index 5bcd2f7529..e8305272a2 100644
--- a/scripts/lib/wic/imager/baseimager.py
+++ b/scripts/lib/wic/imager/baseimager.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2007 Red Hat Inc.
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
diff --git a/scripts/lib/wic/imager/direct.py b/scripts/lib/wic/imager/direct.py
index 5b12856289..b1dc3e96f4 100644
--- a/scripts/lib/wic/imager/direct.py
+++ b/scripts/lib/wic/imager/direct.py
@@ -52,8 +52,7 @@ class DirectImageCreator(BaseImageCreator):
"""
def __init__(self, oe_builddir, image_output_dir, rootfs_dir, bootimg_dir,
- kernel_dir, native_sysroot, hdddir, staging_data_dir,
- creatoropts=None):
+ kernel_dir, native_sysroot, creatoropts=None):
"""
Initialize a DirectImageCreator instance.
@@ -74,8 +73,6 @@ class DirectImageCreator(BaseImageCreator):
self.bootimg_dir = bootimg_dir
self.kernel_dir = kernel_dir
self.native_sysroot = native_sysroot
- self.hdddir = hdddir
- self.staging_data_dir = staging_data_dir
def __write_fstab(self, image_rootfs):
"""overriden to generate fstab (temporarily) in rootfs. This is called
@@ -103,10 +100,18 @@ class DirectImageCreator(BaseImageCreator):
for num, p in enumerate(parts, 1):
if not p.mountpoint or p.mountpoint == "/" or p.mountpoint == "/boot":
continue
- if self._ptable_format == 'msdos' and num > 3:
- device_name = "/dev/" + p.disk + str(num + 1)
- else:
- device_name = "/dev/" + p.disk + str(num)
+
+ part = ''
+ # mmc device partitions are named mmcblk0p1, mmcblk0p2..
+ if p.disk.startswith('mmcblk'):
+ part = 'p'
+
+ pnum = num
+ if self._ptable_format == 'msdos' and pnum > 3:
+ # account for logical partition numbering, ex. sda5..
+ pnum += 1
+
+ device_name = "/dev/" + p.disk + part + str(pnum)
opts = "defaults"
if p.fsopts:
diff --git a/scripts/lib/wic/kickstart/__init__.py b/scripts/lib/wic/kickstart/__init__.py
index 4f5b778b5d..600098293a 100644
--- a/scripts/lib/wic/kickstart/__init__.py
+++ b/scripts/lib/wic/kickstart/__init__.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2007 Red Hat, Inc.
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
diff --git a/scripts/lib/wic/kickstart/custom_commands/micboot.py b/scripts/lib/wic/kickstart/custom_commands/micboot.py
index 66d1678aa7..d162142506 100644
--- a/scripts/lib/wic/kickstart/custom_commands/micboot.py
+++ b/scripts/lib/wic/kickstart/custom_commands/micboot.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2008, 2009, 2010 Intel, Inc.
#
diff --git a/scripts/lib/wic/kickstart/custom_commands/micpartition.py b/scripts/lib/wic/kickstart/custom_commands/micpartition.py
index 59a87fb486..43d04f1294 100644
--- a/scripts/lib/wic/kickstart/custom_commands/micpartition.py
+++ b/scripts/lib/wic/kickstart/custom_commands/micpartition.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Marko Saukko <marko.saukko@cybercom.com>
#
diff --git a/scripts/lib/wic/kickstart/custom_commands/partition.py b/scripts/lib/wic/kickstart/custom_commands/partition.py
index abf3498134..54a494e033 100644
--- a/scripts/lib/wic/kickstart/custom_commands/partition.py
+++ b/scripts/lib/wic/kickstart/custom_commands/partition.py
@@ -184,7 +184,7 @@ class Wic_PartData(Mic_PartData):
Prepare content for a rootfs partition i.e. create a partition
and fill it from a /rootfs dir.
- Currently handles ext2/3/4 and btrfs.
+ Currently handles ext2/3/4, btrfs and vfat.
"""
pseudo = "export PSEUDO_PREFIX=%s/usr;" % native_sysroot
pseudo += "export PSEUDO_LOCALSTATEDIR=%s/../pseudo;" % rootfs_dir
@@ -229,6 +229,7 @@ class Wic_PartData(Mic_PartData):
extra_blocks = IMAGE_EXTRA_SPACE
rootfs_size = actual_rootfs_size + extra_blocks
+ rootfs_size *= IMAGE_OVERHEAD_FACTOR
msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \
(extra_blocks, self.mountpoint, rootfs_size))
@@ -241,8 +242,10 @@ class Wic_PartData(Mic_PartData):
mkfs_cmd = "mkfs.%s -F %s %s -d %s" % \
(self.fstype, extra_imagecmd, rootfs, image_rootfs)
- exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
-
+ (rc, out) = exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
+ if rc:
+ print "rootfs_dir: %s" % rootfs_dir
+ msger.error("ERROR: mkfs.%s returned '%s' instead of 0 (which you probably don't want to ignore, use --debug for details) when creating filesystem from rootfs directory: %s" % (self.fstype, rc, rootfs_dir))
# get the rootfs size in the right units for kickstart (Mb)
du_cmd = "du -Lbms %s" % rootfs
@@ -274,6 +277,7 @@ class Wic_PartData(Mic_PartData):
extra_blocks = IMAGE_EXTRA_SPACE
rootfs_size = actual_rootfs_size + extra_blocks
+ rootfs_size *= IMAGE_OVERHEAD_FACTOR
msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \
(extra_blocks, self.mountpoint, rootfs_size))
@@ -284,7 +288,9 @@ class Wic_PartData(Mic_PartData):
mkfs_cmd = "mkfs.%s -b %d -r %s %s" % \
(self.fstype, rootfs_size * 1024, image_rootfs, rootfs)
- exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
+ (rc, out) = exec_native_cmd(pseudo + mkfs_cmd, native_sysroot)
+ if rc:
+ msger.error("ERROR: mkfs.%s returned '%s' instead of 0 (which you probably don't want to ignore, use --debug for details) when creating filesystem from rootfs directory: %s" % (self.fstype, rc, rootfs_dir))
# get the rootfs size in the right units for kickstart (Mb)
du_cmd = "du -Lbms %s" % rootfs
@@ -308,8 +314,8 @@ class Wic_PartData(Mic_PartData):
extra_blocks = self.get_extra_block_count(blocks)
- if extra_blocks < BOOTDD_EXTRA_SPACE:
- extra_blocks = BOOTDD_EXTRA_SPACE
+ if extra_blocks < IMAGE_EXTRA_SPACE:
+ extra_blocks = IMAGE_EXTRA_SPACE
blocks += extra_blocks
@@ -318,9 +324,11 @@ class Wic_PartData(Mic_PartData):
# Ensure total sectors is an integral number of sectors per
# track or mcopy will complain. Sectors are 512 bytes, and we
- # generate images with 32 sectors per track. This calculation is
- # done in blocks, thus the mod by 16 instead of 32.
- blocks += (16 - (blocks % 16))
+ # generate images with 32 sectors per track. This calculation
+ # is done in blocks, thus the mod by 16 instead of 32. Apply
+ # sector count fix only when needed.
+ if blocks % 16 != 0:
+ blocks += (16 - (blocks % 16))
dosfs_cmd = "mkdosfs -n boot -S 512 -C %s %d" % (rootfs, blocks)
exec_native_cmd(dosfs_cmd, native_sysroot)
@@ -394,7 +402,9 @@ class Wic_PartData(Mic_PartData):
extra_imagecmd = "-i 8192"
mkfs_cmd = "mkfs.%s -F %s %s" % (self.fstype, extra_imagecmd, fs)
- exec_native_cmd(mkfs_cmd, native_sysroot)
+ (rc, out) = exec_native_cmd(mkfs_cmd, native_sysroot)
+ if rc:
+ msger.error("ERROR: mkfs.%s returned '%s' instead of 0 (which you probably don't want to ignore, use --debug for details)" % (self.fstype, rc))
self.source_file = fs
@@ -412,10 +422,14 @@ class Wic_PartData(Mic_PartData):
exec_cmd(dd_cmd)
mkfs_cmd = "mkfs.%s -b %d %s" % (self.fstype, self.size * 1024, rootfs)
- exec_native_cmd(mkfs_cmd, native_sysroot)
+ (rc, out) = exec_native_cmd(mkfs_cmd, native_sysroot)
+ if rc:
+ msger.error("ERROR: mkfs.%s returned '%s' instead of 0 (which you probably don't want to ignore, use --debug for details)" % (self.fstype, rc))
mkfs_cmd = "mkfs.%s -F %s %s" % (self.fstype, extra_imagecmd, fs)
- exec_native_cmd(mkfs_cmd, native_sysroot)
+ (rc, out) = exec_native_cmd(mkfs_cmd, native_sysroot)
+ if rc:
+ msger.error("ERROR: mkfs.%s returned '%s' instead of 0 (which you probably don't want to ignore, use --debug for details)" % (self.fstype, rc))
self.source_file = fs
diff --git a/scripts/lib/wic/msger.py b/scripts/lib/wic/msger.py
index 9afc85be93..9f557e7b9a 100644
--- a/scripts/lib/wic/msger.py
+++ b/scripts/lib/wic/msger.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
# vim: ai ts=4 sts=4 et sw=4
#
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
diff --git a/scripts/lib/wic/plugin.py b/scripts/lib/wic/plugin.py
index 61c5859bac..41a80175ca 100644
--- a/scripts/lib/wic/plugin.py
+++ b/scripts/lib/wic/plugin.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/scripts/lib/wic/pluginbase.py b/scripts/lib/wic/pluginbase.py
index b8b3a46354..e3de9bacb8 100644
--- a/scripts/lib/wic/pluginbase.py
+++ b/scripts/lib/wic/pluginbase.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/scripts/lib/wic/plugins/imager/direct_plugin.py b/scripts/lib/wic/plugins/imager/direct_plugin.py
index dabd6fc3e0..5601c3f1c9 100644
--- a/scripts/lib/wic/plugins/imager/direct_plugin.py
+++ b/scripts/lib/wic/plugins/imager/direct_plugin.py
@@ -58,21 +58,19 @@ class DirectPlugin(ImagerPlugin):
"""
Create direct image, called from creator as 'direct' cmd
"""
- if len(args) != 9:
+ if len(args) != 7:
raise errors.Usage("Extra arguments given")
- staging_data_dir = args[0]
- hdddir = args[1]
- native_sysroot = args[2]
- kernel_dir = args[3]
- bootimg_dir = args[4]
- rootfs_dir = args[5]
+ native_sysroot = args[0]
+ kernel_dir = args[1]
+ bootimg_dir = args[2]
+ rootfs_dir = args[3]
creatoropts = configmgr.create
- ksconf = args[6]
+ ksconf = args[4]
- image_output_dir = args[7]
- oe_builddir = args[8]
+ image_output_dir = args[5]
+ oe_builddir = args[6]
krootfs_dir = self.__rootfs_dir_to_dict(rootfs_dir)
@@ -84,8 +82,6 @@ class DirectPlugin(ImagerPlugin):
bootimg_dir,
kernel_dir,
native_sysroot,
- hdddir,
- staging_data_dir,
creatoropts)
try:
diff --git a/scripts/lib/wic/plugins/source/bootimg-efi.py b/scripts/lib/wic/plugins/source/bootimg-efi.py
index 855bbc2ce2..e4067b6dbf 100644
--- a/scripts/lib/wic/plugins/source/bootimg-efi.py
+++ b/scripts/lib/wic/plugins/source/bootimg-efi.py
@@ -173,7 +173,6 @@ class BootimgEFIPlugin(SourcePlugin):
cr.set_bootimg_dir(bootimg_dir)
staging_kernel_dir = kernel_dir
- staging_data_dir = bootimg_dir
hdddir = "%s/hdd/boot" % cr_workdir
@@ -185,12 +184,12 @@ class BootimgEFIPlugin(SourcePlugin):
if source_params['loader'] == 'grub-efi':
shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir,
"%s/grub.cfg" % cr_workdir)
- cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (staging_data_dir, hdddir)
+ cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (bootimg_dir, hdddir)
exec_cmd(cp_cmd, True)
shutil.move("%s/grub.cfg" % cr_workdir,
"%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir)
elif source_params['loader'] == 'gummiboot':
- cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (staging_data_dir, hdddir)
+ cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (bootimg_dir, hdddir)
exec_cmd(cp_cmd, True)
else:
msger.error("unrecognized bootimg-efi loader: %s" % source_params['loader'])
diff --git a/scripts/lib/wic/plugins/source/bootimg-partition.py b/scripts/lib/wic/plugins/source/bootimg-partition.py
new file mode 100644
index 0000000000..6ba39a01f7
--- /dev/null
+++ b/scripts/lib/wic/plugins/source/bootimg-partition.py
@@ -0,0 +1,138 @@
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# DESCRIPTION
+# This implements the 'bootimg-partition' source plugin class for
+# 'wic'. The plugin creates an image of boot partition, copying over
+# files listed in IMAGE_BOOT_FILES bitbake variable.
+#
+# AUTHORS
+# Maciej Borzecki <maciej.borzecki (at] open-rnd.pl>
+#
+
+import os
+import re
+
+from wic import msger
+from wic.pluginbase import SourcePlugin
+from wic.utils.oe.misc import *
+from glob import glob
+
+class BootimgPartitionPlugin(SourcePlugin):
+ name = 'bootimg-partition'
+
+ @classmethod
+ def do_install_disk(self, disk, disk_name, cr, workdir, oe_builddir,
+ bootimg_dir, kernel_dir, native_sysroot):
+ """
+ Called after all partitions have been prepared and assembled into a
+ disk image. Do nothing.
+ """
+ pass
+
+ @classmethod
+ def do_configure_partition(self, part, source_params, cr, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ native_sysroot):
+ """
+ Called before do_prepare_partition(). Possibly prepare
+ configuration files of some sort.
+
+ """
+ pass
+
+ @classmethod
+ def do_prepare_partition(self, part, source_params, cr, cr_workdir,
+ oe_builddir, bootimg_dir, kernel_dir,
+ rootfs_dir, native_sysroot):
+ """
+ Called to do the actual content population for a partition i.e. it
+ 'prepares' the partition to be incorporated into the image.
+ In this case, does the following:
+ - sets up a vfat partition
+ - copies all files listed in IMAGE_BOOT_FILES variable
+ """
+ hdddir = "%s/boot" % cr_workdir
+ rm_cmd = "rm -rf %s" % cr_workdir
+ exec_cmd(rm_cmd)
+
+ install_cmd = "install -d %s" % hdddir
+ exec_cmd(install_cmd)
+
+ if not bootimg_dir:
+ bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
+ if not bootimg_dir:
+ msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting\n")
+
+ msger.debug('Bootimg dir: %s' % bootimg_dir)
+
+ boot_files = get_bitbake_var("IMAGE_BOOT_FILES")
+
+ if not boot_files:
+ msger.error('No boot files defined, IMAGE_BOOT_FILES unset')
+
+ msger.debug('Boot files: %s' % boot_files)
+
+ # list of tuples (src_name, dst_name)
+ deploy_files = []
+ for src_entry in re.findall(r'[\w;\-\./\*]+', boot_files):
+ if ';' in src_entry:
+ dst_entry = tuple(src_entry.split(';'))
+ if not dst_entry[0] or not dst_entry[1]:
+ msger.error('Malformed boot file entry: %s' % (src_entry))
+ else:
+ dst_entry = (src_entry, src_entry)
+
+ msger.debug('Destination entry: %r' % (dst_entry,))
+ deploy_files.append(dst_entry)
+
+ for deploy_entry in deploy_files:
+ src, dst = deploy_entry
+ install_task = []
+ if '*' in src:
+ # by default install files under their basename
+ entry_name_fn = os.path.basename
+ if dst != src:
+ # unless a target name was given, then treat name
+ # as a directory and append a basename
+ entry_name_fn = lambda name: \
+ os.path.join(dst,
+ os.path.basename(name))
+
+ srcs = glob(os.path.join(bootimg_dir, src))
+
+ msger.debug('Globbed sources: %s' % (', '.join(srcs)))
+ for entry in srcs:
+ entry_dst_name = entry_name_fn(entry)
+ install_task.append((entry,
+ os.path.join(hdddir,
+ entry_dst_name)))
+ else:
+ install_task = [(os.path.join(bootimg_dir, src),
+ os.path.join(hdddir, dst))]
+
+ for task in install_task:
+ src_path, dst_path = task
+ msger.debug('Install %s as %s' % (os.path.basename(src_path),
+ dst_path))
+ install_cmd = "install -m 0644 -D %s %s" \
+ % (src_path, dst_path)
+ exec_cmd(install_cmd)
+
+ msger.debug('Prepare boot partition using rootfs in %s' % (hdddir))
+ part.prepare_rootfs(cr_workdir, oe_builddir, hdddir,
+ native_sysroot)
+
diff --git a/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
index aceed20428..8a1aca1ad1 100644
--- a/scripts/lib/wic/plugins/source/bootimg-pcbios.py
+++ b/scripts/lib/wic/plugins/source/bootimg-pcbios.py
@@ -127,15 +127,23 @@ class BootimgPcbiosPlugin(SourcePlugin):
'prepares' the partition to be incorporated into the image.
In this case, prepare content for legacy bios boot partition.
"""
- if not bootimg_dir:
+ def _has_syslinux(dir):
+ if dir:
+ syslinux = "%s/syslinux" % dir
+ if os.path.exists(syslinux):
+ return True
+ return False
+
+ if not _has_syslinux(bootimg_dir):
bootimg_dir = get_bitbake_var("STAGING_DATADIR")
if not bootimg_dir:
msger.error("Couldn't find STAGING_DATADIR, exiting\n")
+ if not _has_syslinux(bootimg_dir):
+ msger.error("Please build syslinux first\n")
# just so the result notes display it
cr.set_bootimg_dir(bootimg_dir)
staging_kernel_dir = kernel_dir
- staging_data_dir = bootimg_dir
hdddir = "%s/hdd/boot" % cr_workdir
@@ -144,7 +152,7 @@ class BootimgPcbiosPlugin(SourcePlugin):
exec_cmd(install_cmd)
install_cmd = "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" \
- % (staging_data_dir, hdddir)
+ % (bootimg_dir, hdddir)
exec_cmd(install_cmd)
du_cmd = "du -bks %s" % hdddir
diff --git a/scripts/lib/wic/utils/errors.py b/scripts/lib/wic/utils/errors.py
index 86e230ac19..9410311875 100644
--- a/scripts/lib/wic/utils/errors.py
+++ b/scripts/lib/wic/utils/errors.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2007 Red Hat, Inc.
# Copyright (c) 2011 Intel, Inc.
diff --git a/scripts/lib/wic/utils/fs_related.py b/scripts/lib/wic/utils/fs_related.py
index 79cc1d52a7..ea9f85c60f 100644
--- a/scripts/lib/wic/utils/fs_related.py
+++ b/scripts/lib/wic/utils/fs_related.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2007, Red Hat, Inc.
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
diff --git a/scripts/lib/wic/utils/misc.py b/scripts/lib/wic/utils/misc.py
index 194b88f691..6e56316608 100644
--- a/scripts/lib/wic/utils/misc.py
+++ b/scripts/lib/wic/utils/misc.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2010, 2011 Intel Inc.
#
diff --git a/scripts/lib/wic/utils/oe/misc.py b/scripts/lib/wic/utils/oe/misc.py
index aa9b23582b..b0b5baab73 100644
--- a/scripts/lib/wic/utils/oe/misc.py
+++ b/scripts/lib/wic/utils/oe/misc.py
@@ -123,6 +123,7 @@ def add_wks_var(key, val):
BOOTDD_EXTRA_SPACE = 16384
IMAGE_EXTRA_SPACE = 10240
+IMAGE_OVERHEAD_FACTOR = 1.3
__bitbake_env_lines = ""
diff --git a/scripts/lib/wic/utils/partitionedfs.py b/scripts/lib/wic/utils/partitionedfs.py
index 76aa42135b..fb95cc790e 100644
--- a/scripts/lib/wic/utils/partitionedfs.py
+++ b/scripts/lib/wic/utils/partitionedfs.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2009, 2010, 2011 Intel, Inc.
# Copyright (c) 2007, 2008 Red Hat, Inc.
@@ -155,9 +155,6 @@ class Image:
# Skip one sector required for the partitioning scheme overhead
d['offset'] += overhead
- # Steal few sectors from the first partition to offset for the
- # partitioning overhead
- p['size'] -= overhead
if p['align']:
# If not first partition and we do have alignment set we need
@@ -167,16 +164,19 @@ class Image:
# Calc how much the alignment is off.
align_sectors = d['offset'] % (p['align'] * 1024 / self.sector_size)
- # We need to move forward to the next alignment point
- align_sectors = (p['align'] * 1024 / self.sector_size) - align_sectors
- msger.debug("Realignment for %s%s with %s sectors, original"
- " offset %s, target alignment is %sK." %
- (p['disk_name'], d['numpart'], align_sectors,
- d['offset'], p['align']))
+ if align_sectors:
+ # If partition is not aligned as required, we need
+ # to move forward to the next alignment point
+ align_sectors = (p['align'] * 1024 / self.sector_size) - align_sectors
- # increase the offset so we actually start the partition on right alignment
- d['offset'] += align_sectors
+ msger.debug("Realignment for %s%s with %s sectors, original"
+ " offset %s, target alignment is %sK." %
+ (p['disk_name'], d['numpart'], align_sectors,
+ d['offset'], p['align']))
+
+ # increase the offset so we actually start the partition on right alignment
+ d['offset'] += align_sectors
p['start'] = d['offset']
d['offset'] += p['size']
diff --git a/scripts/lib/wic/utils/runner.py b/scripts/lib/wic/utils/runner.py
index e740dad253..2ae9f417c5 100644
--- a/scripts/lib/wic/utils/runner.py
+++ b/scripts/lib/wic/utils/runner.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -tt
+#!/usr/bin/env python -tt
#
# Copyright (c) 2011 Intel, Inc.
#
diff --git a/scripts/oe-git-proxy b/scripts/oe-git-proxy
index 98191faadd..0ce7ed090e 100755
--- a/scripts/oe-git-proxy
+++ b/scripts/oe-git-proxy
@@ -121,7 +121,7 @@ if [ "$PROTO" = "socks" ]; then
if [ -z "$PORT" ]; then
PORT="1080"
fi
- METHOD="SOCKS4:$PROXY:$1:$2,socksport=$PORT"
+ METHOD="SOCKS4A:$PROXY:$1:$2,socksport=$PORT"
else
# Assume PROXY (http, https, etc)
if [ -z "$PORT" ]; then
diff --git a/scripts/pybootchartgui/pybootchartgui.py b/scripts/pybootchartgui/pybootchartgui.py
index 947ce10338..7ce1a5be40 100755
--- a/scripts/pybootchartgui/pybootchartgui.py
+++ b/scripts/pybootchartgui/pybootchartgui.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
#
# This file is part of pybootchartgui.
diff --git a/scripts/recipetool b/scripts/recipetool
new file mode 100755
index 0000000000..70e6b6c877
--- /dev/null
+++ b/scripts/recipetool
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+
+# Recipe creation tool
+#
+# Copyright (C) 2014 Intel Corporation
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import sys
+import os
+import argparse
+import glob
+import logging
+
+scripts_path = os.path.dirname(os.path.realpath(__file__))
+lib_path = scripts_path + '/lib'
+sys.path = sys.path + [lib_path]
+import scriptutils
+logger = scriptutils.logger_create('recipetool')
+
+plugins = []
+
+def tinfoil_init():
+ import bb.tinfoil
+ import logging
+ tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil.prepare(True)
+
+ for plugin in plugins:
+ if hasattr(plugin, 'tinfoil_init'):
+ plugin.tinfoil_init(tinfoil)
+ tinfoil.logger.setLevel(logging.WARNING)
+
+def main():
+
+ if not os.environ.get('BUILDDIR', ''):
+ logger.error("This script can only be run after initialising the build environment (e.g. by using oe-init-build-env)")
+ sys.exit(1)
+
+ parser = argparse.ArgumentParser(description="OpenEmbedded recipe tool",
+ epilog="Use %(prog)s <command> --help to get help on a specific command")
+ parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
+ parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
+ parser.add_argument('--color', help='Colorize output', choices=['auto', 'always', 'never'], default='auto')
+ subparsers = parser.add_subparsers()
+
+ scriptutils.load_plugins(logger, plugins, os.path.join(scripts_path, 'lib', 'recipetool'))
+ registered = False
+ for plugin in plugins:
+ if hasattr(plugin, 'register_command'):
+ registered = True
+ plugin.register_command(subparsers)
+
+ if not registered:
+ logger.error("No commands registered - missing plugins?")
+ sys.exit(1)
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logger.setLevel(logging.DEBUG)
+ elif args.quiet:
+ logger.setLevel(logging.ERROR)
+
+ import scriptpath
+ bitbakepath = scriptpath.add_bitbake_lib_path()
+ if not bitbakepath:
+ logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
+ sys.exit(1)
+ logger.debug('Found bitbake path: %s' % bitbakepath)
+
+ scriptutils.logger_setup_color(logger, args.color)
+
+ tinfoil_init()
+
+ ret = args.func(args)
+
+ return ret
+
+
+if __name__ == "__main__":
+ try:
+ ret = main()
+ except Exception:
+ ret = 1
+ import traceback
+ traceback.print_exc(5)
+ sys.exit(ret)
diff --git a/scripts/runqemu b/scripts/runqemu
index ff64a1d4c2..da35bb80d2 100755
--- a/scripts/runqemu
+++ b/scripts/runqemu
@@ -108,7 +108,7 @@ process_filename() {
while true; do
arg=${1}
case "$arg" in
- "qemux86" | "qemux86-64" | "qemuarm" | "qemumips" | "qemumipsel" | \
+ "qemux86" | "qemux86-64" | "qemuarm" | "qemuarm64" | "qemumips" | "qemumipsel" | \
"qemumips64" | "qemush4" | "qemuppc" | "qemumicroblaze" | "qemuzynq")
[ -z "$MACHINE" ] && MACHINE=$arg || \
error "conflicting MACHINE types [$MACHINE] and [$arg]"
@@ -301,6 +301,9 @@ QEMUX86_64_DEFAULT_FSTYPE=ext3
QEMUARM_DEFAULT_KERNEL=zImage-qemuarm.bin
QEMUARM_DEFAULT_FSTYPE=ext3
+QEMUARM64_DEFAULT_KERNEL=Image-qemuarm64.bin
+QEMUARM64_DEFAULT_FSTYPE=ext3
+
QEMUMIPS_DEFAULT_KERNEL=vmlinux-qemumips.bin
QEMUMIPS_DEFAULT_FSTYPE=ext3
diff --git a/scripts/runqemu-internal b/scripts/runqemu-internal
index 38745dd126..5711bd3802 100755
--- a/scripts/runqemu-internal
+++ b/scripts/runqemu-internal
@@ -50,6 +50,9 @@ else
"qemuarm")
mem_size=128
;;
+ "qemuarm64")
+ mem_size=512
+ ;;
"qemumicroblaze")
mem_size=64
;;
@@ -260,8 +263,17 @@ else
DROOT="/dev/hda"
ROOTFS_OPTIONS="-hda $ROOTFS"
fi
+ if [ "$MACHINE" = "qemuarm64" ]; then
+ QEMU_NETWORK_CMD="-netdev tap,id=net0,ifname=$TAP,script=no,downscript=no -device virtio-net-device,netdev=net0 "
+ DROOT="/dev/vda"
+ ROOTFS_OPTIONS="-drive id=disk0,file=$ROOTFS -device virtio-blk-device,drive=disk0"
+ fi
+
KERNCMDLINE="mem=$QEMU_MEMORY"
QEMU_UI_OPTIONS="-show-cursor -usb -usbdevice wacom-tablet"
+ if [ $MACHINE = 'qemuarm64' ]; then
+ QEMU_UI_OPTIONS="-nographic"
+ fi
NFS_INSTANCE=`echo $TAP | sed 's/tap//'`
export NFS_INSTANCE
@@ -274,6 +286,7 @@ fi
case "$MACHINE" in
"qemuarm") ;;
+ "qemuarm64") ;;
"qemumicroblaze") ;;
"qemumips") ;;
"qemumipsel") ;;
@@ -362,6 +375,19 @@ if [ "$MACHINE" = "qemuarm" -o "$MACHINE" = "qemuarmv6" -o "$MACHINE" = "qemuarm
fi
fi
+if [ "$MACHINE" = "qemuarm64" ]; then
+ QEMU=qemu-system-aarch64
+
+ export QEMU_AUDIO_DRV="none"
+ QEMU_UI_OPTIONS="$QEMU_UI_OPTIONS"
+ if [ "${FSTYPE:0:3}" = "ext" -o "$FSTYPE" = "btrfs" ]; then
+ KERNCMDLINE="root=/dev/vda rw console=ttyAMA0,38400 mem=$QEMU_MEMORY highres=off $KERNEL_NETWORK_CMD"
+ # qemu-system-aarch64 only support '-machine virt -cpu cortex-a57' for now
+ QEMUOPTIONS="$QEMU_NETWORK_CMD -machine virt -cpu cortex-a57 $ROOTFS_OPTIONS $QEMU_UI_OPTIONS"
+ fi
+fi
+
+
if [ "$MACHINE" = "qemux86" ]; then
QEMU=qemu-system-i386
if [ "$KVM_ACTIVE" = "yes" ]; then
diff --git a/scripts/wic b/scripts/wic
index 15cc9b31ef..e7df60f28e 100755
--- a/scripts/wic
+++ b/scripts/wic
@@ -131,11 +131,11 @@ def wic_create_subcommand(args, usage_str):
sys.exit(1)
set_bitbake_env_lines(bitbake_env_lines)
- bootimg_dir = staging_data_dir = hdddir = ""
+ bootimg_dir = ""
if options.image_name:
- (rootfs_dir, kernel_dir, hdddir, staging_data_dir, native_sysroot) = \
- find_artifacts(options.image_name)
+ (rootfs_dir, kernel_dir, bootimg_dir, native_sysroot) \
+ = find_artifacts(options.image_name)
wks_file = args[0]
@@ -172,8 +172,6 @@ def wic_create_subcommand(args, usage_str):
not_found = not_found_dir = ""
if not os.path.isdir(rootfs_dir):
(not_found, not_found_dir) = ("rootfs-dir", rootfs_dir)
- elif not os.path.isdir(hdddir) and not os.path.isdir(staging_data_dir):
- (not_found, not_found_dir) = ("bootimg-dir", bootimg_dir)
elif not os.path.isdir(kernel_dir):
(not_found, not_found_dir) = ("kernel-dir", kernel_dir)
elif not os.path.isdir(native_sysroot):
@@ -197,8 +195,8 @@ def wic_create_subcommand(args, usage_str):
rootfs_dir = rootfs_dir_to_args(krootfs_dir)
wic_create(args, wks_file, rootfs_dir, bootimg_dir, kernel_dir,
- native_sysroot, hdddir, staging_data_dir, scripts_path,
- image_output_dir, options.debug, options.properties_file)
+ native_sysroot, scripts_path, image_output_dir,
+ options.debug, options.properties_file)
def wic_list_subcommand(args, usage_str):