Radix cross Linux

The main Radix cross Linux repository contains the build scripts of packages, which have the most complete and common functionality for desktop machines

383 Commits   1 Branch   1 Tag
Index: radix-1.9/libs/mozjs/102.15.0/Makefile
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/Makefile	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/Makefile	(revision 228)
@@ -0,0 +1,429 @@
+
+COMPONENT_TARGETS  = $(HARDWARE_INTEL_PC32)
+COMPONENT_TARGETS += $(HARDWARE_INTEL_PC64)
+COMPONENT_TARGETS += $(HARDWARE_EBOX_3350DX2)
+COMPONENT_TARGETS += $(HARDWARE_CB1X)
+COMPONENT_TARGETS += $(HARDWARE_CB2X)
+COMPONENT_TARGETS += $(HARDWARE_CB3X)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PP2E)
+COMPONENT_TARGETS += $(HARDWARE_NANOPI_NEO)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PP)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PL2)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PI5)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PI5B)
+COMPONENT_TARGETS += $(HARDWARE_ORANGE_PI5P)
+COMPONENT_TARGETS += $(HARDWARE_ROCK_5B)
+COMPONENT_TARGETS += $(HARDWARE_WECHIP_TX6)
+COMPONENT_TARGETS += $(HARDWARE_REPKA_PI3)
+COMPONENT_TARGETS += $(HARDWARE_FFRK3288)
+COMPONENT_TARGETS += $(HARDWARE_POIN2)
+COMPONENT_TARGETS += $(HARDWARE_RK3328_CC)
+COMPONENT_TARGETS += $(HARDWARE_KHADAS_EDGE)
+COMPONENT_TARGETS += $(HARDWARE_LEEZ_P710)
+COMPONENT_TARGETS += $(HARDWARE_M201)
+COMPONENT_TARGETS += $(HARDWARE_MXV)
+COMPONENT_TARGETS += $(HARDWARE_P201)
+COMPONENT_TARGETS += $(HARDWARE_NEXBOX_A95X)
+COMPONENT_TARGETS += $(HARDWARE_ODROID_C2)
+COMPONENT_TARGETS += $(HARDWARE_P212)
+COMPONENT_TARGETS += $(HARDWARE_KHADAS_VIM)
+COMPONENT_TARGETS += $(HARDWARE_Q201)
+COMPONENT_TARGETS += $(HARDWARE_ENYBOX_X2)
+COMPONENT_TARGETS += $(HARDWARE_KHADAS_VIM2)
+COMPONENT_TARGETS += $(HARDWARE_NIT6Q)
+COMPONENT_TARGETS += $(HARDWARE_OKMX6DL_C)
+COMPONENT_TARGETS += $(HARDWARE_OKMX6Q_C)
+COMPONENT_TARGETS += $(HARDWARE_BONE_BLACK)
+COMPONENT_TARGETS += $(HARDWARE_OMAP5UEVM)
+COMPONENT_TARGETS += $(HARDWARE_DRA7XXEVM)
+COMPONENT_TARGETS += $(HARDWARE_CI20)
+COMPONENT_TARGETS += $(HARDWARE_BAIKAL_T1)
+COMPONENT_TARGETS += $(HARDWARE_BAIKAL_M1)
+COMPONENT_TARGETS += $(HARDWARE_S824L)
+COMPONENT_TARGETS += $(HARDWARE_VESNIN)
+COMPONENT_TARGETS += $(HARDWARE_S824L_LSB)
+COMPONENT_TARGETS += $(HARDWARE_VESNIN_LSB)
+COMPONENT_TARGETS += $(HARDWARE_TL2WK2)
+COMPONENT_TARGETS += $(HARDWARE_TL2SV2)
+COMPONENT_TARGETS += $(HARDWARE_TL2WK2_LSB)
+COMPONENT_TARGETS += $(HARDWARE_TL2SV2_LSB)
+COMPONENT_TARGETS += $(HARDWARE_VISIONFIVE2)
+COMPONENT_TARGETS += $(HARDWARE_SIFIVE_U740)
+
+
+NEED_ABS_PATH      = true
+COMPONENT_IS_3PP   = true
+
+
+include ../../../build-system/constants.mk
+
+
+SOURCE_REQUIRES    = sources/packages/x/mozjs
+
+REQUIRES           = libs/icu4c/73.1
+REQUIRES          += libs/readline/8.2
+REQUIRES          += libs/zlib/1.2.13
+
+# ======= __END_OF_REQUIRES__ =======
+
+
+version            = 102.15.0
+tar_xz_archive     = $(SRC_PACKAGE_PATH)/packages/x/mozjs/firefox-$(version)esr.source.tar.xz
+SRC_ARCHIVE        = $(tar_xz_archive)
+SRC_DIR            = $(TARGET_BUILD_DIR)/firefox-$(version)
+src_dir_name       = firefox-$(version)
+doc_dir_name       = mozjs-$(version)
+src_done           = $(TARGET_BUILD_DIR)/.source_done
+
+PATCHES = PATCHES
+
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_RISCV64_GLIBC)),)
+OPT_PATCHES = PATCHES.riscv64gc
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_I586_GLIBC) $(TOOLCHAIN_I686_GLIBC)),)
+OPT_PATCHES = PATCHES.x86
+endif
+
+build_dir          = $(TARGET_BUILD_DIR)/build
+build_target       = $(TARGET_BUILD_DIR)/.build_done
+install_target     = $(TARGET_BUILD_DIR)/.install_done
+
+
+####### Targets
+
+PKG_GROUP = libs
+
+#
+# *PKG_NAME & *PKG_VERSION shouldn't be a reference to value.
+#
+MOZJS_PKG_NAME                = mozjs
+MOZJS_PKG_VERSION             = 102.15.0
+MOZJS_PKG_ARCH                = $(PKGARCH)
+MOZJS_PKG_DISTRO_NAME         = $(DISTRO_NAME)
+MOZJS_PKG_DISTRO_VERSION      = $(DISTRO_VERSION)
+MOZJS_PKG_GROUP               = $(PKG_GROUP)
+###                              |---handy-ruler-------------------------------|
+MOZJS_PKG_SHORT_DESCRIPTION   = Mozilla JavaScript Engine
+MOZJS_PKG_URL                 = $(BUG_URL)
+MOZJS_PKG_LICENSE             = MPL
+MOZJS_PKG_DESCRIPTION_FILE    = $(TARGET_BUILD_DIR)/$(MOZJS_PKG_NAME)-pkg-description
+MOZJS_PKG_DESCRIPTION_FILE_IN = $(MOZJS_PKG_NAME)-pkg-description.in
+MOZJS_PKG_INSTALL_SCRIPT      = $(MOZJS_PKG_NAME)-pkg-install.sh
+
+MOZJS_PKG      = $(CURDIR)/$(TARGET_BUILD_DIR)/$(MOZJS_PKG_NAME)-package
+
+pkg_basename     = $(MOZJS_PKG_NAME)-$(MOZJS_PKG_VERSION)-$(MOZJS_PKG_ARCH)-$(MOZJS_PKG_DISTRO_NAME)-$(MOZJS_PKG_DISTRO_VERSION)
+
+pkg_archive      = $(TARGET_BUILD_DIR)/$(PKG_GROUP)/$(pkg_basename).$(pkg_arch_suffix)
+pkg_signature    = $(call sign-name,$(pkg_archive))
+pkg_description  = $(call desc-name,$(pkg_archive))
+products         = $(call pkg-files,$(pkg_archive))
+
+BUILD_TARGETS    = $(build_target)
+BUILD_TARGETS   += $(install_target)
+
+PRODUCT_TARGETS  = $(products)
+
+ROOTFS_TARGETS   = $(pkg_archive)
+
+
+include ../../../build-system/core.mk
+
+
+env_sysroot = DESTDIR=$(MOZJS_PKG)
+
+
+extra_configure_switches  = --libdir=/usr/lib$(LIBSUFFIX)
+
+extra_configure_switches += --enable-bootstrap
+extra_configure_switches += --enable-shared-js
+extra_configure_switches += --enable-optimize
+extra_configure_switches += --with-system-zlib
+extra_configure_switches += --enable-readline
+extra_configure_switches += --with-system-icu
+extra_configure_switches += --with-intl-api
+
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_A1X_GLIBC)    $(TOOLCHAIN_A2X_GLIBC)      \
+                             $(TOOLCHAIN_H3_GLIBC)     $(TOOLCHAIN_RK328X_GLIBC)   \
+                             $(TOOLCHAIN_S8XX_GLIBC)   $(TOOLCHAIN_IMX6_GLIBC)     \
+                             $(TOOLCHAIN_AM335X_GLIBC) $(TOOLCHAIN_OMAP543X_GLIBC)),)
+extra_configure_switches += --disable-cpp-rtti
+else
+extra_configure_switches += --enable-cpp-rtti
+endif
+
+extra_configure_switches += --disable-strip
+extra_configure_switches += --disable-install-strip
+extra_configure_switches += --disable-jemalloc
+extra_configure_switches += --disable-debug
+extra_configure_switches += --disable-tests
+
+#
+# NOTE:
+# ====
+#
+#  Before run this Makefile we have to setup RUST compiler
+#  and stable x86_64-unknown-linux-gnu toolchain with targets:
+#
+# Install RUST:
+# ------------
+# $ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
+# $ source "$HOME/.cargo/env"
+# $ rustup update
+# $ mkdir $HOME/projects
+# $ ( cd $HOME/projects ; rustup override set stable )
+#
+# List available targets:
+# ----------------------
+# $ rustc --print target-list
+#
+# Add targets:
+# -----------
+# $ rustup target add arm-unknown-linux-gnueabihf
+# $ rustup target add armv7-unknown-linux-gnueabihf
+# $ rustup target add aarch64-unknown-linux-gnu
+# $ rustup target add mipsel-unknown-linux-gnu
+# $ rustup target add powerpc-unknown-linux-gnu
+# $ rustup target add powerpc64-unknown-linux-gnu
+# $ rustup target add powerpc64le-unknown-linux-gnu
+# $ rustup target add riscv64gc-unknown-linux-gnu
+# $ rustup target add i586-unknown-linux-gnu
+# $ rustup target add i686-unknown-linux-gnu
+# $ rustup target add x86_64-unknown-linux-gnu
+#
+# Rust Target Triplets:
+# ====================
+#
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_A1X_GLIBC)    $(TOOLCHAIN_A2X_GLIBC)    \
+                             $(TOOLCHAIN_H3_GLIBC)     $(TOOLCHAIN_RK328X_GLIBC) \
+                             $(TOOLCHAIN_S8XX_GLIBC)   $(TOOLCHAIN_IMX6_GLIBC)   \
+                             $(TOOLCHAIN_AM335X_GLIBC) $(TOOLCHAIN_OMAP543X_GLIBC)),)
+rust-target = armv7-unknown-linux-gnueabihf
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_H5_GLIBC)     $(TOOLCHAIN_RK33XX_GLIBC) \
+                             $(TOOLCHAIN_RK339X_GLIBC) $(TOOLCHAIN_RK358X_GLIBC) \
+                             $(TOOLCHAIN_S9XX_GLIBC)   \
+                             $(TOOLCHAIN_A311X_GLIBC)  $(TOOLCHAIN_M1000_GLIBC)),)
+rust-target = aarch64-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_JZ47XX_GLIBC) $(TOOLCHAIN_P5600_GLIBC)),)
+rust-target = mipsel-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_POWER8_GLIBC) $(TOOLCHAIN_POWER9_GLIBC)),)
+rust-target = powerpc64-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_POWER8LE_GLIBC) $(TOOLCHAIN_POWER9LE_GLIBC)),)
+rust-target = powerpc64le-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_RISCV64_GLIBC)),)
+rust-target = riscv64gc-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_I586_GLIBC)),)
+rust-target = i586-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_I686_GLIBC)),)
+rust-target = i686-unknown-linux-gnu
+endif
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_X86_64_GLIBC)),)
+rust-target = x86_64-unknown-linux-gnu
+endif
+
+#
+# RUST cannot be a cross compiler if HOST == TARGET, but
+# HOST tools should be run on build-machine
+#
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_X86_64_GLIBC)),)
+x86-64-dynamic-linker = -Wl,--dynamic-linker,/lib$(BUILD_MULTILIB_SUFFIX)/ld-linux-x86-64.so.2
+endif
+
+cpu-arch = $(word 1,$(subst -, ,$(rust-target)))
+
+rust-host = x86_64-unknown-linux-gnu
+
+gcc-version  = $(shell $(CC) -dumpversion)
+
+binding-flags  = --sysroot=$(TARGET_DEST_DIR)
+binding-flags += --target=$(TARGET)
+binding-flags += -I$(TARGET_DEST_DIR)/usr/include/c++/$(gcc-version)
+binding-flags += -I$(TARGET_DEST_DIR)/usr/include/c++/$(gcc-version)/$(TARGET)
+binding-flags += -I$(TARGET_DEST_DIR)/usr/include
+binding-flags += -I$(TARGET_DEST_DIR)/usr/lib/glib-2.0/include
+binding-flags += -I$(CURDIR)/$(SRC_DIR)/xpcom/string
+
+arch-flags = $(ARCH_FLAGS)
+
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_A1X_GLIBC) $(TOOLCHAIN_A2X_GLIBC)    \
+                             $(TOOLCHAIN_H3_GLIBC)  $(TOOLCHAIN_RK328X_GLIBC) \
+                             $(TOOLCHAIN_OMAP543X_GLIBC)),)
+arch-flags = $(shell echo $(ARCH_FLAGS) | sed 's,\(-mfpu\)=[^ ]*,\1=vfpv3-d16,')
+endif
+
+mozilla-link-flags  = --sysroot=$(TARGET_DEST_DIR) $(arch-flags)
+mozilla-link-flags += -Wl,--as-needed -Wl,--no-keep-memory
+mozilla-link-flags += -Wl,--stats -Wl,--reduce-memory-overheads
+
+suppres-c-warnings    = -Wno-deprecated-declarations -Wno-unused-result -Wno-maybe-uninitialized
+suppres-c-warnings   += -Wno-stringop-truncation -Wno-stringop-overflow -Wno-pointer-arith
+suppres-c-warnings   += -Wno-sign-compare-Wno-unused-but-set-variable -Wno-unused-value
+suppres-c-warnings   += -Wno-type-limits
+
+suppres-cxx-warnings  = -Wno-deprecated-declarations -Wno-class-memaccess -Wno-invalid-offsetof
+suppres-cxx-warnings += -Wno-array-bounds -Wno-stringop-truncation -Wno-stringop-overflow
+suppres-cxx-warnings += -Wno-dangling-pointer -Wno-use-after-free -Wno-return-local-addr
+suppres-cxx-warnings += -Wno-maybe-uninitialized -Wno-pointer-arith -Wno-sign-compare
+suppres-cxx-warnings += -Wno-unused-but-set-variable -Wno-restrict -Wno-alloc-size-larger-than
+suppres-cxx-warnings += -Wno-unused-value -Wno-narrowing -Wno-free-nonheap-object
+suppres-cxx-warnings += -Wno-return-type -Wno-stringop-overread
+
+extra-c-flags    = -fomit-frame-pointer -fno-delete-null-pointer-checks -fno-strict-aliasing -fno-tree-vrp
+extra-cxx-flags  = -fomit-frame-pointer -fno-delete-null-pointer-checks -fno-strict-aliasing -fno-tree-vrp
+
+extra_environment  = CC='$(CCACHE)$(CROSS_PREFIX)gcc --sysroot=$(TARGET_DEST_DIR)'
+extra_environment += CXX='$(CCACHE)$(CROSS_PREFIX)g++ --sysroot=$(TARGET_DEST_DIR)'
+extra_environment += OBJCOPY='$(CROSS_PREFIX)objcopy'
+extra_environment += AR='$(CROSS_PREFIX)ar'
+extra_environment += NM='$(CROSS_PREFIX)nm'
+extra_environment += CFLAGS='--sysroot=$(TARGET_DEST_DIR) -I$(TARGET_DEST_DIR)/usr/include -I$(TARGET_DEST_DIR)/usr/lib/glib-2.0/include  -I$(CURDIR)/$(SRC_DIR)/xpcom/string $(arch-flags) $(extra-c-flags) $(suppres-c-warnings)'
+extra_environment += CXXFLAGS='--sysroot=$(TARGET_DEST_DIR) -I$(TARGET_DEST_DIR)/usr/include -I$(TARGET_DEST_DIR)/usr/lib/glib-2.0/include  -I$(CURDIR)/$(SRC_DIR)/xpcom/string $(arch-flags) $(extra-cxx-flags) $(suppres-cxx-warnings)'
+extra_environment += LDFLAGS='--sysroot=$(TARGET_DEST_DIR) $(arch-flags) $(x86-64-dynamic-linker) -Wl,-rpath-link,$(TARGET_DEST_DIR)/usr/lib$(LIBSUFFIX) -Wl,--as-needed -Wl,--no-keep-memory -Wl,--stats -Wl,--reduce-memory-overheads'
+extra_environment += HOST_CC='/usr/bin/gcc'
+extra_environment += HOST_CXX='/usr/bin/g++'
+extra_environment += HOST_AR='/usr/bin/ar'
+extra_environment += HOST_CFLAGS='-I/usr/include'
+extra_environment += HOST_CXXFLAGS='-I/usr/include'
+extra_environment += HOST_LDFLAGS='-L/usr/lib$(BUILD_MULTILIB_SUFFIX)'
+extra_environment += OS_TARGET=Linux
+extra_environment += OS_ARCH=$(cpu-arch)
+extra_environment += CPU_ARCH=$(cpu-arch)
+extra_environment += TARGET_CPU=$(cpu-arch)
+extra_environment += TARGET_OS=Linux
+extra_environment += MOZILLA_OFFICIAL=1
+extra_environment += BUILD_OFFICIAL=1
+extra_environment += MOZ_PHOENIX=1
+extra_environment += MOZ_PACKAGE_JSSHELL=1
+extra_environment += BINDGEN_CFLAGS='$(binding-flags)'
+extra_environment += MOZ_LINK_FLAGS='$(mozilla-link-flags)'
+extra_environment += MOZBUILD_STATE_PATH='$(CURDIR)/$(build_dir)/tmp/firefox-$(version)/.mozbuild'
+
+cargo-executable = $(shell which cargo)
+
+extra_environment += RUST_TARGET=$(rust-target)
+extra_environment += RUSTFLAGS='-Awarnings'
+extra_environment += CARGO='$(cargo-executable)'
+extra_environment += CARGO_EXTRA_FLAGS='+stable-$(rust-host)'
+
+extra_environment += MOZ_CARGO_WRAP_HOST_LD='/usr/bin/g++'
+extra_environment += MOZ_CARGO_WRAP_HOST_LD_CXX='/usr/bin/g++'
+extra_environment += MOZ_CARGO_WRAP_HOST_LDFLAGS='-L/usr/lib$(BUILD_MULTILIB_SUFFIX)'
+
+
+####### Dependencies
+
+$(src_done): $(SRC_ARCHIVE) $(PATCHES_DEP)
+	$(UNPACK_SRC_ARCHIVE)
+	$(APPLY_PATCHES)
+	$(call apply-opt-patches, $(SRC_DIR))
+	@( cd $(SRC_DIR)/js/src ; \
+	   cat configure.in | sed '1,/^: "divert(0)dnl"$$/d' > configure ; \
+	   chmod a+x configure ; \
+	 )
+	@touch $@
+
+$(build_target): $(src_done)
+	@mkdir -p $(build_dir)/tmp
+	@cd $(build_dir) && \
+	  $(extra_environment) ../$(src_dir_name)/js/src/configure \
+	  --prefix=/usr               \
+	  --host=$(rust-host)         \
+	  --target=$(rust-target)     \
+	  $(extra_configure_switches)
+	@cd $(build_dir) && PYTHON3=/usr/bin/python3 $(MAKE)
+	@touch $@
+
+$(install_target): $(build_target)
+	@mkdir -p $(MOZJS_PKG)
+	@cd $(build_dir) && PYTHON3=/usr/bin/python3 $(MAKE) install $(env_sysroot)
+	# ======= Do not ship huge static library and config script =======
+	@rm -f $(MOZJS_PKG)/usr/bin/js102-config
+	@rm -f $(MOZJS_PKG)/usr/lib$(LIBSUFFIX)/libjs_static.*
+	@( cd $(MOZJS_PKG)/usr/lib$(LIBSUFFIX) ; \
+	   mv libmozjs-102.so libmozjs-102.so.0.0.0 ; \
+	   ln -sf libmozjs-102.so.0.0.0 libmozjs-102.so.0 ; \
+	   ln -sf libmozjs-102.so.0     libmozjs-102.so   ; \
+	 )
+	@chmod a-x $(MOZJS_PKG)/usr/lib$(LIBSUFFIX)/pkgconfig/*.pc
+	@chmod a-x $(MOZJS_PKG)/usr/include/mozjs-102/js-config.h
+ifneq ($(filter $(TOOLCHAIN),$(TOOLCHAIN_X86_64_GLIBC)),)
+ifneq ($(PATCHELF),)
+	# ======= Set Interpreter for x86_64 target binaries: =======
+	@( cd $(MOZJS_PKG)/usr/bin ; \
+	   for file in `find . | xargs file | grep "executable" | grep ELF | cut -f 1 -d : | xargs echo` ; do \
+	     $(PATCHELF) --set-interpreter /lib$(LIBSUFFIX)/ld-linux-x86-64.so.2 $$file 1> /dev/null 2> /dev/null ; \
+	   done ; \
+	 )
+endif
+endif
+	# ======= Install Documentation =======
+	@if [ -d $(MOZJS_PKG)/usr/share/man ]; then \
+	  ( cd $(MOZJS_PKG)/usr/share/man ; \
+	    for manpagedir in `find . -type d -name "man*"` ; do \
+	      ( cd $$manpagedir ; \
+	        for eachpage in `find . -type l -maxdepth 1` ; do \
+	          ln -s `readlink $$eachpage`.gz $$eachpage.gz ; \
+	          rm $$eachpage ; \
+	        done ; \
+	        gzip -9 *.?  ; \
+	      ) \
+	    done \
+	  ) \
+	 fi
+	@mkdir -p $(MOZJS_PKG)/usr/doc/$(doc_dir_name)
+	@echo ""                                                          > $(MOZJS_PKG)/usr/doc/$(doc_dir_name)/LICENSE
+	@echo "See: https://www.mozilla.org/en-US/foundation/licensing/" >> $(MOZJS_PKG)/usr/doc/$(doc_dir_name)/LICENSE
+	@mkdir -p $(MOZJS_PKG)/usr/share/doc/$(doc_dir_name)
+	@cp $(MOZJS_PKG)/usr/doc/$(doc_dir_name)/LICENSE $(MOZJS_PKG)/usr/share/doc/$(doc_dir_name)
+	@echo ""                                         > $(MOZJS_PKG)/usr/share/doc/$(doc_dir_name)/README
+	@echo "SpiderMonkey: https://spidermonkey.dev/" >> $(MOZJS_PKG)/usr/share/doc/$(doc_dir_name)/README
+	@( cd $(SRC_DIR)/js/src ; \
+	   if [ -r ChangeLog -a -s ChangeLog ]; then \
+	     DOCSDIR=`echo $(MOZJS_PKG)/usr/share/doc/$(src_dir_name)` ; \
+	     cat ChangeLog | head -n 1000 > $$DOCSDIR/ChangeLog ; \
+	     touch -r ChangeLog $$DOCSDIR/ChangeLog ; \
+	   fi \
+	 )
+	# ======= Install the same to $(TARGET_DEST_DIR) =======
+	$(call install-into-devenv, $(MOZJS_PKG))
+	# ======= tune pkg-config *.pc search path to the target destination for development =======
+	@( cd $(TARGET_DEST_DIR)/usr/lib$(LIBSUFFIX)/pkgconfig ; \
+	   sed -i "s,/usr,$(TARGET_DEST_DIR)/usr,g" mozjs-102.pc ; \
+	 )
+	# ======= Strip binaries =======
+	@( cd $(MOZJS_PKG) ; \
+	   find . | xargs file | grep "executable" | grep ELF | cut -f 1 -d : | xargs $(STRIP) --strip-unneeded 2> /dev/null ; \
+	   find . | xargs file | grep "shared object" | grep ELF | cut -f 1 -d : | xargs $(STRIP) --strip-unneeded 2> /dev/null ; \
+	 )
+	@touch $@
+
+$(MOZJS_PKG_DESCRIPTION_FILE): $(MOZJS_PKG_DESCRIPTION_FILE_IN)
+	@cat $< | $(SED) -e "s/@VERSION@/$(version)/g" > $@
+
+$(pkg_certificate) : $(pkg_archive) ;
+$(pkg_signature)   : $(pkg_archive) ;
+$(pkg_description) : $(pkg_archive) ;
+
+$(pkg_archive): $(install_target) $(MOZJS_PKG_DESCRIPTION_FILE) $(MOZJS_PKG_INSTALL_SCRIPT)
+	@cp $(MOZJS_PKG_DESCRIPTION_FILE) $(MOZJS_PKG)/.DESCRIPTION
+	@cp $(MOZJS_PKG_INSTALL_SCRIPT) $(MOZJS_PKG)/.INSTALL
+	@$(BUILD_PKG_REQUIRES) $(MOZJS_PKG)/.REQUIRES
+	@echo "pkgname=$(MOZJS_PKG_NAME)"                            >  $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "pkgver=$(MOZJS_PKG_VERSION)"                          >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "arch=$(MOZJS_PKG_ARCH)"                               >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "distroname=$(MOZJS_PKG_DISTRO_NAME)"                  >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "distrover=$(MOZJS_PKG_DISTRO_VERSION)"                >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "group=$(MOZJS_PKG_GROUP)"                             >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "short_description=\"$(MOZJS_PKG_SHORT_DESCRIPTION)\"" >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "url=$(MOZJS_PKG_URL)"                                 >> $(MOZJS_PKG)/.PKGINFO ; \
+	 echo "license=$(MOZJS_PKG_LICENSE)"                         >> $(MOZJS_PKG)/.PKGINFO
+	@$(PSEUDO) sh -c "cd $(MOZJS_PKG) && \
+	                  chown -R root:root . && \
+	                  $(MAKE_PACKAGE) -J --linkadd=yes $(GNUPG_OPTIONS) -m -d .. ."
Index: radix-1.9/libs/mozjs/102.15.0/PATCHES
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/PATCHES	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/PATCHES	(revision 228)
@@ -0,0 +1,10 @@
+
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-copy-headers.patch        -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-emitter.patch             -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-icu-sources.patch         -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-init.patch                -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-remove-sloppy-m4.patch    -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-skip-failing-tests.patch  -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-soname.patch              -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-spidermonkey-checks.patch -p0
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-tests.patch               -p0
Index: radix-1.9/libs/mozjs/102.15.0/PATCHES.riscv64gc
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/PATCHES.riscv64gc	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/PATCHES.riscv64gc	(revision 228)
@@ -0,0 +1,2 @@
+
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-riscv64gc.patch -p0
Index: radix-1.9/libs/mozjs/102.15.0/PATCHES.x86
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/PATCHES.x86	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/PATCHES.x86	(revision 228)
@@ -0,0 +1,2 @@
+
+../../../sources/packages/x/mozjs/patches/firefox-102.15.0-x86.patch -p0
Index: radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-description.in
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-description.in	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-description.in	(revision 228)
@@ -0,0 +1,19 @@
+# HOW TO EDIT THIS FILE:
+# The "handy ruler" below makes it easier to edit a package description.  Line
+# up the first '|' above the ':' following the base package name, and the '|'
+# on the right side marks the last column you can put a character in.  You must
+# make exactly 11 lines for the formatting to be correct.  It's also
+# customary to leave one space after the ':'.
+
+     |-----handy-ruler------------------------------------------------------|
+mozjs: mozjs @VERSION@ (Mozilla JavaScript Engine)
+mozjs:
+mozjs: SpiderMonkey is Mozilla's JavaScript engine written in C/C++.
+mozjs: It is used in various Mozilla products (including Firefox) and
+mozjs: is available under MPL/GPL/LGPL tri-license.
+mozjs:
+mozjs:
+mozjs: Homepage: https://spidermonkey.dev
+mozjs:
+mozjs:
+mozjs:
Index: radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-install.sh
===================================================================
--- radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-install.sh	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-install.sh	(revision 228)
@@ -0,0 +1,53 @@
+#!/bin/sh
+
+# Preserve new files
+install_file() {
+  NEW="$1"
+  OLD="`dirname $NEW`/`basename $NEW .new`"
+  # If there's no file by that name, mv it over:
+  if [ ! -r $OLD ]; then
+    mv $NEW $OLD
+  elif [ "`cat $OLD | md5sum`" = "`cat $NEW | md5sum`" ]; then # toss the redundant copy
+    rm $NEW
+  fi
+  # Otherwise, we leave the .new copy for the admin to consider...
+}
+
+
+# arg 1:  the new package version
+pre_install() {
+  /bin/true
+}
+
+# arg 1:  the new package version
+post_install() {
+  /bin/true
+}
+
+# arg 1:  the new package version
+# arg 2:  the old package version
+pre_update() {
+  /bin/true
+}
+
+# arg 1:  the new package version
+# arg 2:  the old package version
+post_update() {
+  post_install
+}
+
+# arg 1:  the old package version
+pre_remove() {
+  /bin/true
+}
+
+# arg 1:  the old package version
+post_remove() {
+  /bin/true
+}
+
+
+operation=$1
+shift
+
+$operation $*

Property changes on: radix-1.9/libs/mozjs/102.15.0/mozjs-pkg-install.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/libs/mozjs/102.15.0
===================================================================
--- radix-1.9/libs/mozjs/102.15.0	(nonexistent)
+++ radix-1.9/libs/mozjs/102.15.0	(revision 228)

Property changes on: radix-1.9/libs/mozjs/102.15.0
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,74 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.rk358x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: radix-1.9/products/base/Makefile
===================================================================
--- radix-1.9/products/base/Makefile	(revision 227)
+++ radix-1.9/products/base/Makefile	(revision 228)
@@ -907,7 +907,9 @@
 
 REQUIRES += libs/iso-codes/4.13.0
 
+REQUIRES += libs/mozjs/102.15.0
 
+
 #######
 ####### Development tools:
 #######
Index: radix-1.9/sources/packages/x/mozjs/Makefile
===================================================================
--- radix-1.9/sources/packages/x/mozjs/Makefile	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/Makefile	(revision 228)
@@ -0,0 +1,76 @@
+
+COMPONENT_TARGETS = $(HARDWARE_NOARCH)
+
+
+include ../../../../build-system/constants.mk
+
+
+url         = $(DOWNLOAD_SERVER)/sources/packages/x/mozjs
+
+versions    = 102.15.0
+pkgname     = firefox
+suffix      = esr.source.tar.xz
+
+tarballs    = $(addsuffix $(suffix), $(addprefix $(pkgname)-, $(versions)))
+sha1s       = $(addsuffix .sha1sum, $(tarballs))
+
+patches     = $(CURDIR)/patches/firefox-102.15.0-copy-headers.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-emitter.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-icu-sources.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-init.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-remove-sloppy-m4.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-riscv64gc.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-skip-failing-tests.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-soname.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-spidermonkey-checks.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-tests.patch
+patches    += $(CURDIR)/patches/firefox-102.15.0-x86.patch
+
+.NOTPARALLEL: $(patches)
+
+
+BUILD_TARGETS = $(tarballs) $(sha1s) $(patches)
+
+
+include ../../../../build-system/core.mk
+
+
+.PHONY: download_clean
+
+
+$(tarballs):
+	@echo -e "\n======= Downloading source tarballs =======" ; \
+	 for tarball in $(tarballs) ; do \
+	   echo "$(url)/$$tarball" | xargs -n 1 -P 100 wget $(WGET_OPTIONS) - & \
+	 done ; wait
+
+$(sha1s): $(tarballs)
+	@for sha in $@ ; do \
+	   echo -e "\n======= Downloading '$$sha' signature =======\n" ; \
+	   echo "$(url)/$$sha" | xargs -n 1 -P 100 wget $(WGET_OPTIONS) - & wait %1 ; \
+	   touch $$sha ; \
+	   echo -e "\n======= Check the '$$sha' sha1sum =======\n" ; \
+	   sha1sum --check $$sha ; ret="$$?" ; \
+	   if [ "$$ret" == "1" ]; then \
+	     echo -e "\n======= ERROR: Bad '$$sha' sha1sum =======\n" ; \
+	     exit 1 ; \
+	   fi ; \
+	 done
+
+$(patches): $(sha1s)
+	@echo -e "\n======= Create Patches =======\n" ; \
+	 ( cd create-102.15.0-copy-headers-patch        ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-emitter-patch             ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-icu-sources-patch         ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-init-patch                ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-remove-sloppy-m4-patch    ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-riscv64gc-patch           ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-skip-failing-tests-patch  ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-soname-patch              ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-spidermonkey-checks-patch ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-tests-patch               ; ./create.patch.sh ) ; \
+	 ( cd create-102.15.0-x86-patch                 ; ./create.patch.sh ) ; \
+	 echo -e "\n"
+
+download_clean:
+	@rm -f $(tarballs) $(sha1s) $(patches)
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-copy-headers.patch
+
+mv firefox-$VERSION-copy-headers.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/python/mozbuild/mozbuild/backend/recursivemake.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/backend/recursivemake.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/backend/recursivemake.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-copy-headers-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/backend/recursivemake.py	(revision 228)
@@ -0,0 +1,1905 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import io
+import logging
+import os
+import re
+import six
+
+from collections import defaultdict, namedtuple
+from itertools import chain
+from operator import itemgetter
+from six import StringIO
+
+from mozpack.manifests import InstallManifest
+import mozpack.path as mozpath
+
+from mozbuild import frontend
+from mozbuild.frontend.context import (
+    AbsolutePath,
+    Path,
+    RenamedSourcePath,
+    SourcePath,
+    ObjDirPath,
+)
+from .common import CommonBackend
+from .make import MakeBackend
+from ..frontend.data import (
+    BaseLibrary,
+    BaseProgram,
+    BaseRustLibrary,
+    ChromeManifestEntry,
+    ComputedFlags,
+    ConfigFileSubstitution,
+    ContextDerived,
+    Defines,
+    DirectoryTraversal,
+    ExternalLibrary,
+    FinalTargetFiles,
+    FinalTargetPreprocessedFiles,
+    GeneratedFile,
+    HostDefines,
+    HostLibrary,
+    HostProgram,
+    HostRustProgram,
+    HostSimpleProgram,
+    HostSources,
+    InstallationTarget,
+    JARManifest,
+    Linkable,
+    LocalInclude,
+    LocalizedFiles,
+    LocalizedPreprocessedFiles,
+    ObjdirFiles,
+    ObjdirPreprocessedFiles,
+    PerSourceFlag,
+    Program,
+    HostSharedLibrary,
+    RustProgram,
+    RustTests,
+    SandboxedWasmLibrary,
+    SharedLibrary,
+    SimpleProgram,
+    Sources,
+    StaticLibrary,
+    TestManifest,
+    VariablePassthru,
+    WasmSources,
+    XPIDLModule,
+)
+from ..util import ensureParentDir, FileAvoidWrite, OrderedDefaultDict, pairwise
+from ..makeutil import Makefile
+from mozbuild.shellutil import quote as shell_quote
+
+# To protect against accidentally adding logic to Makefiles that belong in moz.build,
+# we check if moz.build-like variables are defined in Makefiles. If they are, we throw
+# an error to encourage the usage of moz.build instead.
+_MOZBUILD_ONLY_VARIABLES = set(frontend.context.VARIABLES.keys()) - {
+    # The migration to moz.build from Makefiles still isn't complete, and there's still
+    # some straggling Makefile logic that uses variables that only moz.build should
+    # use.
+    # These remaining variables are excluded from our blacklist. As the variables here
+    # are migrated from Makefiles in the future, they should be removed from this
+    # "override" list.
+    "XPI_NAME",
+    "USE_EXTENSION_MANIFEST",
+    "CFLAGS",
+    "CXXFLAGS",
+}
+
+DEPRECATED_VARIABLES = [
+    "ALLOW_COMPILER_WARNINGS",
+    "EXPORT_LIBRARY",
+    "EXTRA_LIBS",
+    "FAIL_ON_WARNINGS",
+    "HOST_LIBS",
+    "LIBXUL_LIBRARY",
+    "MOCHITEST_A11Y_FILES",
+    "MOCHITEST_BROWSER_FILES",
+    "MOCHITEST_BROWSER_FILES_PARTS",
+    "MOCHITEST_CHROME_FILES",
+    "MOCHITEST_FILES",
+    "MOCHITEST_FILES_PARTS",
+    "MOCHITEST_METRO_FILES",
+    "MOCHITEST_ROBOCOP_FILES",
+    "MODULE_OPTIMIZE_FLAGS",
+    "MOZ_CHROME_FILE_FORMAT",
+    "SHORT_LIBNAME",
+    "TESTING_JS_MODULES",
+    "TESTING_JS_MODULE_DIR",
+]
+
+MOZBUILD_VARIABLES_MESSAGE = "It should only be defined in moz.build files."
+
+DEPRECATED_VARIABLES_MESSAGE = (
+    "This variable has been deprecated. It does nothing. It must be removed "
+    "in order to build."
+)
+
+
+def make_quote(s):
+    return s.replace("#", "\#").replace("$", "$$")
+
+
+class BackendMakeFile(object):
+    """Represents a generated backend.mk file.
+
+    This is both a wrapper around a file handle as well as a container that
+    holds accumulated state.
+
+    It's worth taking a moment to explain the make dependencies. The
+    generated backend.mk as well as the Makefile.in (if it exists) are in the
+    GLOBAL_DEPS list. This means that if one of them changes, all targets
+    in that Makefile are invalidated. backend.mk also depends on all of its
+    input files.
+
+    It's worth considering the effect of file mtimes on build behavior.
+
+    Since we perform an "all or none" traversal of moz.build files (the whole
+    tree is scanned as opposed to individual files), if we were to blindly
+    write backend.mk files, the net effect of updating a single mozbuild file
+    in the tree is all backend.mk files have new mtimes. This would in turn
+    invalidate all make targets across the whole tree! This would effectively
+    undermine incremental builds as any mozbuild change would cause the entire
+    tree to rebuild!
+
+    The solution is to not update the mtimes of backend.mk files unless they
+    actually change. We use FileAvoidWrite to accomplish this.
+    """
+
+    def __init__(self, srcdir, objdir, environment, topsrcdir, topobjdir, dry_run):
+        self.topsrcdir = topsrcdir
+        self.srcdir = srcdir
+        self.objdir = objdir
+        self.relobjdir = mozpath.relpath(objdir, topobjdir)
+        self.environment = environment
+        self.name = mozpath.join(objdir, "backend.mk")
+
+        self.xpt_name = None
+
+        self.fh = FileAvoidWrite(self.name, capture_diff=True, dry_run=dry_run)
+        self.fh.write("# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT.\n")
+        self.fh.write("\n")
+
+    def write(self, buf):
+        self.fh.write(buf)
+
+    def write_once(self, buf):
+        buf = six.ensure_text(buf)
+        if "\n" + buf not in six.ensure_text(self.fh.getvalue()):
+            self.write(buf)
+
+    # For compatibility with makeutil.Makefile
+    def add_statement(self, stmt):
+        self.write("%s\n" % stmt)
+
+    def close(self):
+        if self.xpt_name:
+            # We just recompile all xpidls because it's easier and less error
+            # prone.
+            self.fh.write("NONRECURSIVE_TARGETS += export\n")
+            self.fh.write("NONRECURSIVE_TARGETS_export += xpidl\n")
+            self.fh.write(
+                "NONRECURSIVE_TARGETS_export_xpidl_DIRECTORY = "
+                "$(DEPTH)/xpcom/xpidl\n"
+            )
+            self.fh.write("NONRECURSIVE_TARGETS_export_xpidl_TARGETS += " "export\n")
+
+        return self.fh.close()
+
+    @property
+    def diff(self):
+        return self.fh.diff
+
+
+class RecursiveMakeTraversal(object):
+    """
+    Helper class to keep track of how the "traditional" recursive make backend
+    recurses subdirectories. This is useful until all adhoc rules are removed
+    from Makefiles.
+
+    Each directory may have one or more types of subdirectories:
+        - (normal) dirs
+        - tests
+    """
+
+    SubDirectoryCategories = ["dirs", "tests"]
+    SubDirectoriesTuple = namedtuple("SubDirectories", SubDirectoryCategories)
+
+    class SubDirectories(SubDirectoriesTuple):
+        def __new__(self):
+            return RecursiveMakeTraversal.SubDirectoriesTuple.__new__(self, [], [])
+
+    def __init__(self):
+        self._traversal = {}
+        self._attached = set()
+
+    def add(self, dir, dirs=[], tests=[]):
+        """
+        Adds a directory to traversal, registering its subdirectories,
+        sorted by categories. If the directory was already added to
+        traversal, adds the new subdirectories to the already known lists.
+        """
+        subdirs = self._traversal.setdefault(dir, self.SubDirectories())
+        for key, value in (("dirs", dirs), ("tests", tests)):
+            assert key in self.SubDirectoryCategories
+            # Callers give us generators
+            value = list(value)
+            getattr(subdirs, key).extend(value)
+            self._attached |= set(value)
+
+    @staticmethod
+    def default_filter(current, subdirs):
+        """
+        Default filter for use with compute_dependencies and traverse.
+        """
+        return current, [], subdirs.dirs + subdirs.tests
+
+    def call_filter(self, current, filter):
+        """
+        Helper function to call a filter from compute_dependencies and
+        traverse.
+        """
+        return filter(current, self.get_subdirs(current))
+
+    def compute_dependencies(self, filter=None):
+        """
+        Compute make dependencies corresponding to the registered directory
+        traversal.
+
+        filter is a function with the following signature:
+            def filter(current, subdirs)
+        where current is the directory being traversed, and subdirs the
+        SubDirectories instance corresponding to it.
+        The filter function returns a tuple (filtered_current, filtered_parallel,
+        filtered_dirs) where filtered_current is either current or None if
+        the current directory is to be skipped, and filtered_parallel and
+        filtered_dirs are lists of parallel directories and sequential
+        directories, which can be rearranged from whatever is given in the
+        SubDirectories members.
+
+        The default filter corresponds to a default recursive traversal.
+
+        """
+        filter = filter or self.default_filter
+
+        deps = {}
+
+        def recurse(start_node, prev_nodes=None):
+            current, parallel, sequential = self.call_filter(start_node, filter)
+            if current is not None:
+                if start_node != "":
+                    deps[start_node] = prev_nodes
+                prev_nodes = (start_node,)
+            if start_node not in self._traversal:
+                return prev_nodes
+            parallel_nodes = []
+            for node in parallel:
+                nodes = recurse(node, prev_nodes)
+                if nodes and nodes != ("",):
+                    parallel_nodes.extend(nodes)
+            if parallel_nodes:
+                prev_nodes = tuple(parallel_nodes)
+            for dir in sequential:
+                prev_nodes = recurse(dir, prev_nodes)
+            return prev_nodes
+
+        return recurse(""), deps
+
+    def traverse(self, start, filter=None):
+        """
+        Iterate over the filtered subdirectories, following the traditional
+        make traversal order.
+        """
+        if filter is None:
+            filter = self.default_filter
+
+        current, parallel, sequential = self.call_filter(start, filter)
+        if current is not None:
+            yield start
+        if start not in self._traversal:
+            return
+        for node in parallel:
+            for n in self.traverse(node, filter):
+                yield n
+        for dir in sequential:
+            for d in self.traverse(dir, filter):
+                yield d
+
+    def get_subdirs(self, dir):
+        """
+        Returns all direct subdirectories under the given directory.
+        """
+        result = self._traversal.get(dir, self.SubDirectories())
+        if dir == "":
+            unattached = set(self._traversal) - self._attached - set([""])
+            if unattached:
+                new_result = self.SubDirectories()
+                new_result.dirs.extend(result.dirs)
+                new_result.dirs.extend(sorted(unattached))
+                new_result.tests.extend(result.tests)
+                result = new_result
+        return result
+
+
+class RecursiveMakeBackend(MakeBackend):
+    """Backend that integrates with the existing recursive make build system.
+
+    This backend facilitates the transition from Makefile.in to moz.build
+    files.
+
+    This backend performs Makefile.in -> Makefile conversion. It also writes
+    out .mk files containing content derived from moz.build files. Both are
+    consumed by the recursive make builder.
+
+    This backend may eventually evolve to write out non-recursive make files.
+    However, as long as there are Makefile.in files in the tree, we are tied to
+    recursive make and thus will need this backend.
+    """
+
+    def _init(self):
+        MakeBackend._init(self)
+
+        self._backend_files = {}
+        self._idl_dirs = set()
+
+        self._makefile_in_count = 0
+        self._makefile_out_count = 0
+
+        self._test_manifests = {}
+
+        self.backend_input_files.add(
+            mozpath.join(self.environment.topobjdir, "config", "autoconf.mk")
+        )
+
+        self._install_manifests = defaultdict(InstallManifest)
+        # The build system relies on some install manifests always existing
+        # even if they are empty, because the directories are still filled
+        # by the build system itself, and the install manifests are only
+        # used for a "magic" rm -rf.
+        self._install_manifests["dist_public"]
+        self._install_manifests["dist_private"]
+
+        self._traversal = RecursiveMakeTraversal()
+        self._compile_graph = OrderedDefaultDict(set)
+        self._rust_targets = set()
+        self._rust_lib_targets = set()
+        self._gkrust_target = None
+        self._pre_compile = set()
+
+        self._no_skip = {
+            "pre-export": set(),
+            "export": set(),
+            "libs": set(),
+            "misc": set(),
+            "tools": set(),
+            "check": set(),
+            "syms": set(),
+        }
+
+    def summary(self):
+        summary = super(RecursiveMakeBackend, self).summary()
+        summary.extend(
+            "; {makefile_in:d} -> {makefile_out:d} Makefile",
+            makefile_in=self._makefile_in_count,
+            makefile_out=self._makefile_out_count,
+        )
+        return summary
+
+    def _get_backend_file_for(self, obj):
+        # For generated files that we put in the export or misc tiers, we use the
+        # top-level backend file, except for localized files, which we need to keep
+        # in each directory for dependencies from jar manifests for l10n repacks.
+        if (
+            isinstance(obj, GeneratedFile)
+            and not obj.required_during_compile
+            and not obj.localized
+        ):
+            objdir = self.environment.topobjdir
+        else:
+            objdir = obj.objdir
+
+        if objdir not in self._backend_files:
+            self._backend_files[objdir] = BackendMakeFile(
+                obj.srcdir,
+                objdir,
+                obj.config,
+                obj.topsrcdir,
+                self.environment.topobjdir,
+                self.dry_run,
+            )
+        return self._backend_files[objdir]
+
+    def consume_object(self, obj):
+        """Write out build files necessary to build with recursive make."""
+
+        if not isinstance(obj, ContextDerived):
+            return False
+
+        backend_file = self._get_backend_file_for(obj)
+
+        consumed = CommonBackend.consume_object(self, obj)
+
+        # CommonBackend handles XPIDLModule, but we want to do
+        # some extra things for them.
+        if isinstance(obj, XPIDLModule):
+            backend_file.xpt_name = "%s.xpt" % obj.name
+            self._idl_dirs.add(obj.relobjdir)
+
+        # If CommonBackend acknowledged the object, we're done with it.
+        if consumed:
+            return True
+
+        if not isinstance(obj, Defines):
+            self.consume_object(obj.defines)
+
+        if isinstance(obj, Linkable):
+            self._process_test_support_file(obj)
+
+        if isinstance(obj, DirectoryTraversal):
+            self._process_directory_traversal(obj, backend_file)
+        elif isinstance(obj, ConfigFileSubstitution):
+            # Other ConfigFileSubstitution should have been acked by
+            # CommonBackend.
+            assert os.path.basename(obj.output_path) == "Makefile"
+            self._create_makefile(obj)
+        elif isinstance(obj, Sources):
+            suffix_map = {
+                ".s": "ASFILES",
+                ".c": "CSRCS",
+                ".m": "CMSRCS",
+                ".mm": "CMMSRCS",
+                ".cpp": "CPPSRCS",
+                ".S": "SSRCS",
+            }
+            variables = [suffix_map[obj.canonical_suffix]]
+            for files, base, cls, prefix in (
+                (obj.static_files, backend_file.srcdir, SourcePath, ""),
+                (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+            ):
+                for f in sorted(files):
+                    p = self._pretty_path(
+                        cls(obj._context, prefix + mozpath.relpath(f, base)),
+                        backend_file,
+                    )
+                    for var in variables:
+                        backend_file.write("%s += %s\n" % (var, p))
+            self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+        elif isinstance(obj, HostSources):
+            suffix_map = {
+                ".c": "HOST_CSRCS",
+                ".mm": "HOST_CMMSRCS",
+                ".cpp": "HOST_CPPSRCS",
+            }
+            variables = [suffix_map[obj.canonical_suffix]]
+            for files, base, cls, prefix in (
+                (obj.static_files, backend_file.srcdir, SourcePath, ""),
+                (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+            ):
+                for f in sorted(files):
+                    p = self._pretty_path(
+                        cls(obj._context, prefix + mozpath.relpath(f, base)),
+                        backend_file,
+                    )
+                    for var in variables:
+                        backend_file.write("%s += %s\n" % (var, p))
+            self._compile_graph[mozpath.join(backend_file.relobjdir, "host-objects")]
+        elif isinstance(obj, WasmSources):
+            suffix_map = {".c": "WASM_CSRCS", ".cpp": "WASM_CPPSRCS"}
+            variables = [suffix_map[obj.canonical_suffix]]
+            for files, base, cls, prefix in (
+                (obj.static_files, backend_file.srcdir, SourcePath, ""),
+                (obj.generated_files, backend_file.objdir, ObjDirPath, "!"),
+            ):
+                for f in sorted(files):
+                    p = self._pretty_path(
+                        cls(obj._context, prefix + mozpath.relpath(f, base)),
+                        backend_file,
+                    )
+                    for var in variables:
+                        backend_file.write("%s += %s\n" % (var, p))
+            self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+        elif isinstance(obj, VariablePassthru):
+            # Sorted so output is consistent and we don't bump mtimes.
+            for k, v in sorted(obj.variables.items()):
+                if isinstance(v, list):
+                    for item in v:
+                        backend_file.write(
+                            "%s += %s\n" % (k, make_quote(shell_quote(item)))
+                        )
+                elif isinstance(v, bool):
+                    if v:
+                        backend_file.write("%s := 1\n" % k)
+                elif isinstance(v, Path):
+                    path = self._pretty_path(Path(obj._context, v), backend_file)
+                    backend_file.write("%s := %s\n" % (k, path))
+                else:
+                    backend_file.write("%s := %s\n" % (k, v))
+        elif isinstance(obj, HostDefines):
+            self._process_defines(obj, backend_file, which="HOST_DEFINES")
+        elif isinstance(obj, Defines):
+            self._process_defines(obj, backend_file)
+
+        elif isinstance(obj, GeneratedFile):
+            if obj.required_before_export:
+                tier = "pre-export"
+            elif obj.required_before_compile:
+                tier = "export"
+            elif obj.required_during_compile:
+                tier = "pre-compile"
+            else:
+                tier = "misc"
+            relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+            if tier == "pre-compile":
+                self._pre_compile.add(relobjdir)
+            else:
+                self._no_skip[tier].add(relobjdir)
+            backend_file.write_once("include $(topsrcdir)/config/AB_rCD.mk\n")
+            relobjdir = mozpath.relpath(obj.objdir, backend_file.objdir)
+            # For generated files that we handle in the top-level backend file,
+            # we want to have a `directory/tier` target depending on the file.
+            # For the others, we want a `tier` target.
+            if tier != "pre-compile" and relobjdir:
+                tier = "%s/%s" % (relobjdir, tier)
+            for stmt in self._format_statements_for_generated_file(
+                obj, tier, extra_dependencies="backend.mk" if obj.flags else ""
+            ):
+                backend_file.write(stmt + "\n")
+
+        elif isinstance(obj, JARManifest):
+            self._no_skip["misc"].add(backend_file.relobjdir)
+            backend_file.write("JAR_MANIFEST := %s\n" % obj.path.full_path)
+
+        elif isinstance(obj, RustProgram):
+            self._process_rust_program(obj, backend_file)
+            # Hook the program into the compile graph.
+            build_target = self._build_target_for_obj(obj)
+            self._compile_graph[build_target]
+            self._rust_targets.add(build_target)
+
+        elif isinstance(obj, HostRustProgram):
+            self._process_host_rust_program(obj, backend_file)
+            # Hook the program into the compile graph.
+            build_target = self._build_target_for_obj(obj)
+            self._compile_graph[build_target]
+            self._rust_targets.add(build_target)
+
+        elif isinstance(obj, RustTests):
+            self._process_rust_tests(obj, backend_file)
+
+        elif isinstance(obj, Program):
+            self._process_program(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+            self._no_skip["syms"].add(backend_file.relobjdir)
+
+        elif isinstance(obj, HostProgram):
+            self._process_host_program(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+
+        elif isinstance(obj, SimpleProgram):
+            self._process_simple_program(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+            self._no_skip["syms"].add(backend_file.relobjdir)
+
+        elif isinstance(obj, HostSimpleProgram):
+            self._process_host_simple_program(obj.program, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+
+        elif isinstance(obj, LocalInclude):
+            self._process_local_include(obj.path, backend_file)
+
+        elif isinstance(obj, PerSourceFlag):
+            self._process_per_source_flag(obj, backend_file)
+
+        elif isinstance(obj, ComputedFlags):
+            self._process_computed_flags(obj, backend_file)
+
+        elif isinstance(obj, InstallationTarget):
+            self._process_installation_target(obj, backend_file)
+
+        elif isinstance(obj, BaseRustLibrary):
+            self.backend_input_files.add(obj.cargo_file)
+            self._process_rust_library(obj, backend_file)
+            # No need to call _process_linked_libraries, because Rust
+            # libraries are self-contained objects at this point.
+
+            # Hook the library into the compile graph.
+            build_target = self._build_target_for_obj(obj)
+            self._compile_graph[build_target]
+            self._rust_targets.add(build_target)
+            self._rust_lib_targets.add(build_target)
+            if obj.is_gkrust:
+                self._gkrust_target = build_target
+
+        elif isinstance(obj, SharedLibrary):
+            self._process_shared_library(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+            self._no_skip["syms"].add(backend_file.relobjdir)
+
+        elif isinstance(obj, StaticLibrary):
+            self._process_static_library(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+
+        elif isinstance(obj, SandboxedWasmLibrary):
+            self._process_sandboxed_wasm_library(obj, backend_file)
+            self._no_skip["syms"].add(backend_file.relobjdir)
+
+        elif isinstance(obj, HostLibrary):
+            self._process_linked_libraries(obj, backend_file)
+
+        elif isinstance(obj, HostSharedLibrary):
+            self._process_host_shared_library(obj, backend_file)
+            self._process_linked_libraries(obj, backend_file)
+
+        elif isinstance(obj, ObjdirFiles):
+            self._process_objdir_files(obj, obj.files, backend_file)
+
+        elif isinstance(obj, ObjdirPreprocessedFiles):
+            self._process_final_target_pp_files(
+                obj, obj.files, backend_file, "OBJDIR_PP_FILES"
+            )
+
+        elif isinstance(obj, LocalizedFiles):
+            self._process_localized_files(obj, obj.files, backend_file)
+
+        elif isinstance(obj, LocalizedPreprocessedFiles):
+            self._process_localized_pp_files(obj, obj.files, backend_file)
+
+        elif isinstance(obj, FinalTargetFiles):
+            self._process_final_target_files(obj, obj.files, backend_file)
+
+        elif isinstance(obj, FinalTargetPreprocessedFiles):
+            self._process_final_target_pp_files(
+                obj, obj.files, backend_file, "DIST_FILES"
+            )
+
+        elif isinstance(obj, ChromeManifestEntry):
+            self._process_chrome_manifest_entry(obj, backend_file)
+
+        elif isinstance(obj, TestManifest):
+            self._process_test_manifest(obj, backend_file)
+
+        else:
+            return False
+
+        return True
+
+    def _fill_root_mk(self):
+        """
+        Create two files, root.mk and root-deps.mk, the first containing
+        convenience variables, and the other dependency definitions for a
+        hopefully proper directory traversal.
+        """
+        for tier, no_skip in self._no_skip.items():
+            self.log(
+                logging.DEBUG,
+                "fill_root_mk",
+                {"number": len(no_skip), "tier": tier},
+                "Using {number} directories during {tier}",
+            )
+
+        def should_skip(tier, dir):
+            if tier in self._no_skip:
+                return dir not in self._no_skip[tier]
+            return False
+
+        # Traverse directories in parallel, and skip static dirs
+        def parallel_filter(current, subdirs):
+            all_subdirs = subdirs.dirs + subdirs.tests
+            if should_skip(tier, current) or current.startswith("subtiers/"):
+                current = None
+            return current, all_subdirs, []
+
+        # build everything in parallel, including static dirs
+        # Because of bug 925236 and possible other unknown race conditions,
+        # don't parallelize the libs tier.
+        def libs_filter(current, subdirs):
+            if should_skip("libs", current) or current.startswith("subtiers/"):
+                current = None
+            return current, [], subdirs.dirs + subdirs.tests
+
+        # Because of bug 925236 and possible other unknown race conditions,
+        # don't parallelize the tools tier. There aren't many directories for
+        # this tier anyways.
+        def tools_filter(current, subdirs):
+            if should_skip("tools", current) or current.startswith("subtiers/"):
+                current = None
+            return current, [], subdirs.dirs + subdirs.tests
+
+        filters = [
+            ("export", parallel_filter),
+            ("libs", libs_filter),
+            ("misc", parallel_filter),
+            ("tools", tools_filter),
+            ("check", parallel_filter),
+        ]
+
+        root_deps_mk = Makefile()
+
+        # Fill the dependencies for traversal of each tier.
+        for tier, filter in sorted(filters, key=itemgetter(0)):
+            main, all_deps = self._traversal.compute_dependencies(filter)
+            for dir, deps in sorted(all_deps.items()):
+                if deps is not None or (dir in self._idl_dirs and tier == "export"):
+                    rule = root_deps_mk.create_rule(["%s/%s" % (dir, tier)])
+                    if deps:
+                        rule.add_dependencies(
+                            "%s/%s" % (d, tier) for d in sorted(deps) if d
+                        )
+            rule = root_deps_mk.create_rule(["recurse_%s" % tier])
+            if main:
+                rule.add_dependencies("%s/%s" % (d, tier) for d in sorted(main))
+
+        rule = root_deps_mk.create_rule(["recurse_pre-compile"])
+        rule.add_dependencies("%s/pre-compile" % d for d in sorted(self._pre_compile))
+
+        targets_with_pre_compile = sorted(
+            t for t in self._compile_graph if mozpath.dirname(t) in self._pre_compile
+        )
+        for t in targets_with_pre_compile:
+            relobjdir = mozpath.dirname(t)
+            rule = root_deps_mk.create_rule([t])
+            rule.add_dependencies(["%s/pre-compile" % relobjdir])
+
+        all_compile_deps = (
+            six.moves.reduce(lambda x, y: x | y, self._compile_graph.values())
+            if self._compile_graph
+            else set()
+        )
+        # Include the following as dependencies of the top recursion target for
+        # compilation:
+        # - nodes that are not dependended upon by anything. Typically, this
+        #   would include programs, that need to be recursed, but that nothing
+        #   depends on.
+        # - nodes that have no dependencies of their own. Technically, this is
+        #   not necessary, because other things have dependencies on them, and
+        #   they all end up rooting to nodes from the above category. But the
+        #   way make works[1] is such that there can be benefits listing them
+        #   as direct dependencies of the top recursion target, to somehow
+        #   prioritize them.
+        #   1. See bug 1262241 comment 5.
+        compile_roots = [
+            t
+            for t, deps in six.iteritems(self._compile_graph)
+            if not deps or t not in all_compile_deps
+        ]
+
+        def add_category_rules(category, roots, graph):
+            rule = root_deps_mk.create_rule(["recurse_%s" % category])
+            # Directories containing rust compilations don't generally depend
+            # on other directories in the tree, so putting them first here will
+            # start them earlier in the build.
+            rust_roots = sorted(r for r in roots if r in self._rust_targets)
+            rust_libs = sorted(r for r in roots if r in self._rust_lib_targets)
+            if category == "compile" and rust_roots:
+                rust_rule = root_deps_mk.create_rule(["recurse_rust"])
+                rust_rule.add_dependencies(rust_roots)
+                # Ensure our cargo invocations are serialized, and gecko comes
+                # first. Cargo will lock on the build output directory anyway,
+                # so trying to run things in parallel is not useful. Dependencies
+                # for gecko are especially expensive to build and parallelize
+                # poorly, so prioritizing these will save some idle time in full
+                # builds.
+                for prior_target, target in pairwise(
+                    sorted(
+                        [t for t in rust_libs], key=lambda t: t != self._gkrust_target
+                    )
+                ):
+                    r = root_deps_mk.create_rule([target])
+                    r.add_dependencies([prior_target])
+
+            rule.add_dependencies(chain(rust_roots, sorted(roots)))
+            for target, deps in sorted(graph.items()):
+                if deps:
+                    rule = root_deps_mk.create_rule([target])
+                    rule.add_dependencies(sorted(deps))
+
+        non_default_roots = defaultdict(list)
+        non_default_graphs = defaultdict(lambda: OrderedDefaultDict(set))
+
+        for root in compile_roots:
+            # If this is a non-default target, separate the root from the
+            # rest of the compile graph.
+            target_name = mozpath.basename(root)
+
+            if target_name not in ("target", "target-objects", "host", "host-objects"):
+                non_default_roots[target_name].append(root)
+                non_default_graphs[target_name][root] = self._compile_graph[root]
+                del self._compile_graph[root]
+
+        for root in chain(*non_default_roots.values()):
+            compile_roots.remove(root)
+            dirname = mozpath.dirname(root)
+            # If a directory only contains non-default compile targets, we don't
+            # attempt to dump symbols there.
+            if (
+                dirname in self._no_skip["syms"]
+                and "%s/target" % dirname not in self._compile_graph
+            ):
+                self._no_skip["syms"].remove(dirname)
+
+        add_category_rules("compile", compile_roots, self._compile_graph)
+        for category, graph in sorted(six.iteritems(non_default_graphs)):
+            add_category_rules(category, non_default_roots[category], graph)
+
+        root_mk = Makefile()
+
+        # Fill root.mk with the convenience variables.
+        for tier, filter in filters:
+            all_dirs = self._traversal.traverse("", filter)
+            root_mk.add_statement("%s_dirs := %s" % (tier, " ".join(all_dirs)))
+
+        # Need a list of compile targets because we can't use pattern rules:
+        # https://savannah.gnu.org/bugs/index.php?42833
+        root_mk.add_statement(
+            "pre_compile_targets := %s"
+            % " ".join(sorted("%s/pre-compile" % p for p in self._pre_compile))
+        )
+        root_mk.add_statement(
+            "compile_targets := %s"
+            % " ".join(sorted(set(self._compile_graph.keys()) | all_compile_deps))
+        )
+        root_mk.add_statement(
+            "syms_targets := %s"
+            % " ".join(sorted(set("%s/syms" % d for d in self._no_skip["syms"])))
+        )
+        root_mk.add_statement(
+            "rust_targets := %s" % " ".join(sorted(self._rust_targets))
+        )
+
+        root_mk.add_statement(
+            "non_default_tiers := %s" % " ".join(sorted(non_default_roots.keys()))
+        )
+
+        for category, graphs in sorted(six.iteritems(non_default_graphs)):
+            category_dirs = [mozpath.dirname(target) for target in graphs.keys()]
+            root_mk.add_statement("%s_dirs := %s" % (category, " ".join(category_dirs)))
+
+        root_mk.add_statement("include root-deps.mk")
+
+        with self._write_file(
+            mozpath.join(self.environment.topobjdir, "root.mk")
+        ) as root:
+            root_mk.dump(root, removal_guard=False)
+
+        with self._write_file(
+            mozpath.join(self.environment.topobjdir, "root-deps.mk")
+        ) as root_deps:
+            root_deps_mk.dump(root_deps, removal_guard=False)
+
+    def _add_unified_build_rules(
+        self,
+        makefile,
+        unified_source_mapping,
+        unified_files_makefile_variable="unified_files",
+        include_curdir_build_rules=True,
+    ):
+
+        # In case it's a generator.
+        unified_source_mapping = sorted(unified_source_mapping)
+
+        explanation = (
+            "\n"
+            "# We build files in 'unified' mode by including several files\n"
+            "# together into a single source file.  This cuts down on\n"
+            "# compilation times and debug information size."
+        )
+        makefile.add_statement(explanation)
+
+        all_sources = " ".join(source for source, _ in unified_source_mapping)
+        makefile.add_statement(
+            "%s := %s" % (unified_files_makefile_variable, all_sources)
+        )
+
+        if include_curdir_build_rules:
+            makefile.add_statement(
+                "\n"
+                '# Make sometimes gets confused between "foo" and "$(CURDIR)/foo".\n'
+                "# Help it out by explicitly specifiying dependencies."
+            )
+            makefile.add_statement(
+                "all_absolute_unified_files := \\\n"
+                "  $(addprefix $(CURDIR)/,$(%s))" % unified_files_makefile_variable
+            )
+            rule = makefile.create_rule(["$(all_absolute_unified_files)"])
+            rule.add_dependencies(["$(CURDIR)/%: %"])
+
+    def _check_blacklisted_variables(self, makefile_in, makefile_content):
+        if "EXTERNALLY_MANAGED_MAKE_FILE" in makefile_content:
+            # Bypass the variable restrictions for externally managed makefiles.
+            return
+
+        for l in makefile_content.splitlines():
+            l = l.strip()
+            # Don't check comments
+            if l.startswith("#"):
+                continue
+            for x in chain(_MOZBUILD_ONLY_VARIABLES, DEPRECATED_VARIABLES):
+                if x not in l:
+                    continue
+
+                # Finding the variable name in the Makefile is not enough: it
+                # may just appear as part of something else, like DIRS appears
+                # in GENERATED_DIRS.
+                if re.search(r"\b%s\s*[:?+]?=" % x, l):
+                    if x in _MOZBUILD_ONLY_VARIABLES:
+                        message = MOZBUILD_VARIABLES_MESSAGE
+                    else:
+                        message = DEPRECATED_VARIABLES_MESSAGE
+                    raise Exception(
+                        "Variable %s is defined in %s. %s" % (x, makefile_in, message)
+                    )
+
+    def consume_finished(self):
+        CommonBackend.consume_finished(self)
+
+        for objdir, backend_file in sorted(self._backend_files.items()):
+            srcdir = backend_file.srcdir
+            with self._write_file(fh=backend_file) as bf:
+                makefile_in = mozpath.join(srcdir, "Makefile.in")
+                makefile = mozpath.join(objdir, "Makefile")
+
+                # If Makefile.in exists, use it as a template. Otherwise,
+                # create a stub.
+                stub = not os.path.exists(makefile_in)
+                if not stub:
+                    self.log(
+                        logging.DEBUG,
+                        "substitute_makefile",
+                        {"path": makefile},
+                        "Substituting makefile: {path}",
+                    )
+                    self._makefile_in_count += 1
+
+                    # In the export and libs tiers, we don't skip directories
+                    # containing a Makefile.in.
+                    # topobjdir is handled separatedly, don't do anything for
+                    # it.
+                    if bf.relobjdir:
+                        for tier in ("export", "libs"):
+                            self._no_skip[tier].add(bf.relobjdir)
+                else:
+                    self.log(
+                        logging.DEBUG,
+                        "stub_makefile",
+                        {"path": makefile},
+                        "Creating stub Makefile: {path}",
+                    )
+
+                obj = self.Substitution()
+                obj.output_path = makefile
+                obj.input_path = makefile_in
+                obj.topsrcdir = backend_file.topsrcdir
+                obj.topobjdir = bf.environment.topobjdir
+                obj.config = bf.environment
+                self._create_makefile(obj, stub=stub)
+                with io.open(obj.output_path, encoding="utf-8") as fh:
+                    content = fh.read()
+                    # Directories with a Makefile containing a tools target, or
+                    # XPI_PKGNAME can't be skipped and must run during the
+                    # 'tools' tier.
+                    for t in ("XPI_PKGNAME", "tools"):
+                        if t not in content:
+                            continue
+                        if t == "tools" and not re.search(
+                            "(?:^|\s)tools.*::", content, re.M
+                        ):
+                            continue
+                        if objdir == self.environment.topobjdir:
+                            continue
+                        self._no_skip["tools"].add(
+                            mozpath.relpath(objdir, self.environment.topobjdir)
+                        )
+
+                    # Directories with a Makefile containing a check target
+                    # can't be skipped and must run during the 'check' tier.
+                    if re.search("(?:^|\s)check.*::", content, re.M):
+                        self._no_skip["check"].add(
+                            mozpath.relpath(objdir, self.environment.topobjdir)
+                        )
+
+                    # Detect any Makefile.ins that contain variables on the
+                    # moz.build-only list
+                    self._check_blacklisted_variables(makefile_in, content)
+
+        self._fill_root_mk()
+
+        # Make the master test manifest files.
+        for flavor, t in self._test_manifests.items():
+            install_prefix, manifests = t
+            manifest_stem = mozpath.join(install_prefix, "%s.ini" % flavor)
+            self._write_master_test_manifest(
+                mozpath.join(self.environment.topobjdir, "_tests", manifest_stem),
+                manifests,
+            )
+
+            # Catch duplicate inserts.
+            try:
+                self._install_manifests["_tests"].add_optional_exists(manifest_stem)
+            except ValueError:
+                pass
+
+        self._write_manifests("install", self._install_manifests)
+
+        ensureParentDir(mozpath.join(self.environment.topobjdir, "dist", "foo"))
+
+    def _pretty_path_parts(self, path, backend_file):
+        assert isinstance(path, Path)
+        if isinstance(path, SourcePath):
+            if path.full_path.startswith(backend_file.srcdir):
+                return "$(srcdir)", path.full_path[len(backend_file.srcdir) :]
+            if path.full_path.startswith(backend_file.topsrcdir):
+                return "$(topsrcdir)", path.full_path[len(backend_file.topsrcdir) :]
+        elif isinstance(path, ObjDirPath):
+            if path.full_path.startswith(backend_file.objdir):
+                return "", path.full_path[len(backend_file.objdir) + 1 :]
+            if path.full_path.startswith(self.environment.topobjdir):
+                return "$(DEPTH)", path.full_path[len(self.environment.topobjdir) :]
+
+        return "", path.full_path
+
+    def _pretty_path(self, path, backend_file):
+        return "".join(self._pretty_path_parts(path, backend_file))
+
+    def _process_unified_sources(self, obj):
+        backend_file = self._get_backend_file_for(obj)
+
+        suffix_map = {
+            ".c": "UNIFIED_CSRCS",
+            ".m": "UNIFIED_CMSRCS",
+            ".mm": "UNIFIED_CMMSRCS",
+            ".cpp": "UNIFIED_CPPSRCS",
+        }
+
+        var = suffix_map[obj.canonical_suffix]
+        non_unified_var = var[len("UNIFIED_") :]
+
+        if obj.have_unified_mapping:
+            self._add_unified_build_rules(
+                backend_file,
+                obj.unified_source_mapping,
+                unified_files_makefile_variable=var,
+                include_curdir_build_rules=False,
+            )
+            backend_file.write("%s += $(%s)\n" % (non_unified_var, var))
+        else:
+            # Sorted so output is consistent and we don't bump mtimes.
+            source_files = list(sorted(obj.files))
+
+            backend_file.write("%s += %s\n" % (non_unified_var, " ".join(source_files)))
+
+        self._compile_graph[mozpath.join(backend_file.relobjdir, "target-objects")]
+
+    def _process_directory_traversal(self, obj, backend_file):
+        """Process a data.DirectoryTraversal instance."""
+        fh = backend_file.fh
+
+        def relativize(base, dirs):
+            return (mozpath.relpath(d.translated, base) for d in dirs)
+
+        if obj.dirs:
+            fh.write(
+                "DIRS := %s\n" % " ".join(relativize(backend_file.objdir, obj.dirs))
+            )
+            self._traversal.add(
+                backend_file.relobjdir,
+                dirs=relativize(self.environment.topobjdir, obj.dirs),
+            )
+
+        # The directory needs to be registered whether subdirectories have been
+        # registered or not.
+        self._traversal.add(backend_file.relobjdir)
+
+    def _process_defines(self, obj, backend_file, which="DEFINES"):
+        """Output the DEFINES rules to the given backend file."""
+        defines = list(obj.get_defines())
+        if defines:
+            defines = " ".join(shell_quote(d) for d in defines)
+            backend_file.write_once("%s += %s\n" % (which, defines))
+
+    def _process_installation_target(self, obj, backend_file):
+        # A few makefiles need to be able to override the following rules via
+        # make XPI_NAME=blah commands, so we default to the lazy evaluation as
+        # much as possible here to avoid breaking things.
+        if obj.xpiname:
+            backend_file.write("XPI_NAME = %s\n" % (obj.xpiname))
+        if obj.subdir:
+            backend_file.write("DIST_SUBDIR = %s\n" % (obj.subdir))
+        if obj.target and not obj.is_custom():
+            backend_file.write("FINAL_TARGET = $(DEPTH)/%s\n" % (obj.target))
+        else:
+            backend_file.write(
+                "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),"
+                "$(DIST)/bin)$(DIST_SUBDIR:%=/%)\n"
+            )
+
+        if not obj.enabled:
+            backend_file.write("NO_DIST_INSTALL := 1\n")
+
+    def _handle_idl_manager(self, manager):
+        build_files = self._install_manifests["xpidl"]
+
+        for p in ("Makefile", "backend.mk", ".deps/.mkdir.done"):
+            build_files.add_optional_exists(p)
+
+        for stem in manager.idl_stems():
+            self._install_manifests["dist_include"].add_optional_exists("%s.h" % stem)
+
+        for module in manager.modules:
+            build_files.add_optional_exists(mozpath.join(".deps", "%s.pp" % module))
+
+        modules = manager.modules
+        xpt_modules = sorted(modules.keys())
+
+        mk = Makefile()
+        all_directories = set()
+
+        for module_name in xpt_modules:
+            module = manager.modules[module_name]
+            all_directories |= module.directories
+            deps = sorted(module.idl_files)
+
+            # It may seem strange to have the .idl files listed as
+            # prerequisites both here and in the auto-generated .pp files.
+            # It is necessary to list them here to handle the case where a
+            # new .idl is added to an xpt. If we add a new .idl and nothing
+            # else has changed, the new .idl won't be referenced anywhere
+            # except in the command invocation. Therefore, the .xpt won't
+            # be rebuilt because the dependencies say it is up to date. By
+            # listing the .idls here, we ensure the make file has a
+            # reference to the new .idl. Since the new .idl presumably has
+            # an mtime newer than the .xpt, it will trigger xpt generation.
+
+            mk.add_statement("%s_deps := %s" % (module_name, " ".join(deps)))
+
+            build_files.add_optional_exists("%s.xpt" % module_name)
+
+        mk.add_statement("all_idl_dirs := %s" % " ".join(sorted(all_directories)))
+
+        rules = StringIO()
+        mk.dump(rules, removal_guard=False)
+
+        # Create dependency for output header so we force regeneration if the
+        # header was deleted. This ideally should not be necessary. However,
+        # some processes (such as PGO at the time this was implemented) wipe
+        # out dist/include without regard to our install manifests.
+
+        obj = self.Substitution()
+        obj.output_path = mozpath.join(
+            self.environment.topobjdir, "config", "makefiles", "xpidl", "Makefile"
+        )
+        obj.input_path = mozpath.join(
+            self.environment.topsrcdir, "config", "makefiles", "xpidl", "Makefile.in"
+        )
+        obj.topsrcdir = self.environment.topsrcdir
+        obj.topobjdir = self.environment.topobjdir
+        obj.config = self.environment
+        self._create_makefile(
+            obj,
+            extra=dict(
+                xpidl_rules=rules.getvalue(), xpidl_modules=" ".join(xpt_modules)
+            ),
+        )
+
+    def _process_program(self, obj, backend_file):
+        backend_file.write(
+            "PROGRAM = %s\n" % self._pretty_path(obj.output_path, backend_file)
+        )
+        if not obj.cxx_link and not self.environment.bin_suffix:
+            backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+    def _process_host_program(self, program, backend_file):
+        backend_file.write(
+            "HOST_PROGRAM = %s\n" % self._pretty_path(program.output_path, backend_file)
+        )
+
+    def _process_rust_program_base(
+        self, obj, backend_file, target_variable, target_cargo_variable
+    ):
+        backend_file.write_once("CARGO_FILE := %s\n" % obj.cargo_file)
+        backend_file.write_once("CARGO_TARGET_DIR := .\n")
+        backend_file.write("%s += %s\n" % (target_variable, obj.location))
+        backend_file.write("%s += %s\n" % (target_cargo_variable, obj.name))
+
+    def _process_rust_program(self, obj, backend_file):
+        self._process_rust_program_base(
+            obj, backend_file, "RUST_PROGRAMS", "RUST_CARGO_PROGRAMS"
+        )
+
+    def _process_host_rust_program(self, obj, backend_file):
+        self._process_rust_program_base(
+            obj, backend_file, "HOST_RUST_PROGRAMS", "HOST_RUST_CARGO_PROGRAMS"
+        )
+
+    def _process_rust_tests(self, obj, backend_file):
+        if obj.config.substs.get("MOZ_RUST_TESTS"):
+            # If --enable-rust-tests has been set, run these as a part of
+            # make check.
+            self._no_skip["check"].add(backend_file.relobjdir)
+            backend_file.write("check:: force-cargo-test-run\n")
+        build_target = self._build_target_for_obj(obj)
+        self._compile_graph[build_target]
+        self._process_non_default_target(obj, "force-cargo-test-run", backend_file)
+        backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+        backend_file.write_once("RUST_TESTS := %s\n" % " ".join(obj.names))
+        backend_file.write_once("RUST_TEST_FEATURES := %s\n" % " ".join(obj.features))
+
+    def _process_simple_program(self, obj, backend_file):
+        if obj.is_unit_test:
+            backend_file.write("CPP_UNIT_TESTS += %s\n" % obj.program)
+            assert obj.cxx_link
+        else:
+            backend_file.write("SIMPLE_PROGRAMS += %s\n" % obj.program)
+            if not obj.cxx_link and not self.environment.bin_suffix:
+                backend_file.write("PROG_IS_C_ONLY_%s := 1\n" % obj.program)
+
+    def _process_host_simple_program(self, program, backend_file):
+        backend_file.write("HOST_SIMPLE_PROGRAMS += %s\n" % program)
+
+    def _process_test_support_file(self, obj):
+        # Ensure test support programs and libraries are tracked by an
+        # install manifest for the benefit of the test packager.
+        if not obj.install_target.startswith("_tests"):
+            return
+
+        dest_basename = None
+        if isinstance(obj, BaseLibrary):
+            dest_basename = obj.lib_name
+        elif isinstance(obj, BaseProgram):
+            dest_basename = obj.program
+        if dest_basename is None:
+            return
+
+        self._install_manifests["_tests"].add_optional_exists(
+            mozpath.join(obj.install_target[len("_tests") + 1 :], dest_basename)
+        )
+
+    def _process_test_manifest(self, obj, backend_file):
+        # Much of the logic in this function could be moved to CommonBackend.
+        for source in obj.source_relpaths:
+            self.backend_input_files.add(mozpath.join(obj.topsrcdir, source))
+
+        # Don't allow files to be defined multiple times unless it is allowed.
+        # We currently allow duplicates for non-test files or test files if
+        # the manifest is listed as a duplicate.
+        for source, (dest, is_test) in obj.installs.items():
+            try:
+                self._install_manifests["_test_files"].add_link(source, dest)
+            except ValueError:
+                if not obj.dupe_manifest and is_test:
+                    raise
+
+        for base, pattern, dest in obj.pattern_installs:
+            try:
+                self._install_manifests["_test_files"].add_pattern_link(
+                    base, pattern, dest
+                )
+            except ValueError:
+                if not obj.dupe_manifest:
+                    raise
+
+        for dest in obj.external_installs:
+            try:
+                self._install_manifests["_test_files"].add_optional_exists(dest)
+            except ValueError:
+                if not obj.dupe_manifest:
+                    raise
+
+        m = self._test_manifests.setdefault(obj.flavor, (obj.install_prefix, set()))
+        m[1].add(obj.manifest_obj_relpath)
+
+        try:
+            from reftest import ReftestManifest
+
+            if isinstance(obj.manifest, ReftestManifest):
+                # Mark included files as part of the build backend so changes
+                # result in re-config.
+                self.backend_input_files |= obj.manifest.manifests
+        except ImportError:
+            # Ignore errors caused by the reftest module not being present.
+            # This can happen when building SpiderMonkey standalone, for example.
+            pass
+
+    def _process_local_include(self, local_include, backend_file):
+        d, path = self._pretty_path_parts(local_include, backend_file)
+        if isinstance(local_include, ObjDirPath) and not d:
+            # path doesn't start with a slash in this case
+            d = "$(CURDIR)/"
+        elif d == "$(DEPTH)":
+            d = "$(topobjdir)"
+        quoted_path = shell_quote(path) if path else path
+        if quoted_path != path:
+            path = quoted_path[0] + d + quoted_path[1:]
+        else:
+            path = d + path
+        backend_file.write("LOCAL_INCLUDES += -I%s\n" % path)
+
+    def _process_per_source_flag(self, per_source_flag, backend_file):
+        for flag in per_source_flag.flags:
+            backend_file.write(
+                "%s_FLAGS += %s\n" % (mozpath.basename(per_source_flag.file_name), flag)
+            )
+
+    def _process_computed_flags(self, computed_flags, backend_file):
+        for var, flags in computed_flags.get_flags():
+            backend_file.write(
+                "COMPUTED_%s += %s\n"
+                % (var, " ".join(make_quote(shell_quote(f)) for f in flags))
+            )
+
+    def _process_non_default_target(self, libdef, target_name, backend_file):
+        backend_file.write("%s:: %s\n" % (libdef.output_category, target_name))
+        backend_file.write("MOZBUILD_NON_DEFAULT_TARGETS += %s\n" % target_name)
+
+    def _process_shared_library(self, libdef, backend_file):
+        backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+        backend_file.write("FORCE_SHARED_LIB := 1\n")
+        backend_file.write("IMPORT_LIBRARY := %s\n" % libdef.import_name)
+        backend_file.write("SHARED_LIBRARY := %s\n" % libdef.lib_name)
+        if libdef.soname:
+            backend_file.write("DSO_SONAME := %s\n" % libdef.soname)
+        if libdef.symbols_file:
+            if libdef.symbols_link_arg:
+                backend_file.write("EXTRA_DSO_LDOPTS += %s\n" % libdef.symbols_link_arg)
+        if not libdef.cxx_link:
+            backend_file.write("LIB_IS_C_ONLY := 1\n")
+        if libdef.output_category:
+            self._process_non_default_target(libdef, libdef.lib_name, backend_file)
+            # Override the install rule target for this library. This is hacky,
+            # but can go away as soon as we start building libraries in their
+            # final location (bug 1459764).
+            backend_file.write("SHARED_LIBRARY_TARGET := %s\n" % libdef.output_category)
+
+    def _process_static_library(self, libdef, backend_file):
+        backend_file.write_once("LIBRARY_NAME := %s\n" % libdef.basename)
+        backend_file.write("FORCE_STATIC_LIB := 1\n")
+        backend_file.write("REAL_LIBRARY := %s\n" % libdef.lib_name)
+        if libdef.no_expand_lib:
+            backend_file.write("NO_EXPAND_LIBS := 1\n")
+
+    def _process_sandboxed_wasm_library(self, libdef, backend_file):
+        backend_file.write("WASM_ARCHIVE := %s\n" % libdef.basename)
+
+    def _process_rust_library(self, libdef, backend_file):
+        backend_file.write_once(
+            "%s := %s\n" % (libdef.LIB_FILE_VAR, libdef.import_name)
+        )
+        backend_file.write_once("CARGO_FILE := $(srcdir)/Cargo.toml\n")
+        # Need to normalize the path so Cargo sees the same paths from all
+        # possible invocations of Cargo with this CARGO_TARGET_DIR.  Otherwise,
+        # Cargo's dependency calculations don't work as we expect and we wind
+        # up recompiling lots of things.
+        target_dir = mozpath.normpath(backend_file.environment.topobjdir)
+        backend_file.write("CARGO_TARGET_DIR := %s\n" % target_dir)
+        if libdef.features:
+            backend_file.write(
+                "%s := %s\n" % (libdef.FEATURES_VAR, " ".join(libdef.features))
+            )
+        if libdef.output_category:
+            self._process_non_default_target(libdef, libdef.import_name, backend_file)
+
+    def _process_host_shared_library(self, libdef, backend_file):
+        backend_file.write("HOST_SHARED_LIBRARY = %s\n" % libdef.lib_name)
+
+    def _build_target_for_obj(self, obj):
+        if hasattr(obj, "output_category") and obj.output_category:
+            target_name = obj.output_category
+        else:
+            target_name = obj.KIND
+        if target_name == "wasm":
+            target_name = "target"
+        return "%s/%s" % (
+            mozpath.relpath(obj.objdir, self.environment.topobjdir),
+            target_name,
+        )
+
+    def _process_linked_libraries(self, obj, backend_file):
+        def pretty_relpath(lib, name):
+            return os.path.normpath(
+                mozpath.join(mozpath.relpath(lib.objdir, obj.objdir), name)
+            )
+
+        objs, shared_libs, os_libs, static_libs = self._expand_libs(obj)
+
+        obj_target = obj.name
+        if isinstance(obj, Program):
+            obj_target = self._pretty_path(obj.output_path, backend_file)
+
+        objs_ref = " \\\n    ".join(os.path.relpath(o, obj.objdir) for o in objs)
+        # Don't bother with a list file if we're only linking objects built
+        # in this directory or building a real static library. This
+        # accommodates clang-plugin, where we would otherwise pass an
+        # incorrect list file format to the host compiler as well as when
+        # creating an archive with AR, which doesn't understand list files.
+        if (
+            objs == obj.objs
+            and not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary))
+            or isinstance(obj, (StaticLibrary, SandboxedWasmLibrary))
+            and obj.no_expand_lib
+        ):
+            backend_file.write_once("%s_OBJS := %s\n" % (obj.name, objs_ref))
+            backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+        elif not isinstance(obj, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+            list_file_path = "%s.list" % obj.name.replace(".", "_")
+            list_file_ref = self._make_list_file(
+                obj.KIND, obj.objdir, objs, list_file_path
+            )
+            backend_file.write_once("%s_OBJS := %s\n" % (obj.name, list_file_ref))
+            backend_file.write_once("%s: %s\n" % (obj_target, list_file_path))
+            backend_file.write("%s: %s\n" % (obj_target, objs_ref))
+
+        for lib in shared_libs:
+            assert obj.KIND != "host" and obj.KIND != "wasm"
+            backend_file.write_once(
+                "SHARED_LIBS += %s\n" % pretty_relpath(lib, lib.import_name)
+            )
+
+        # We have to link any Rust libraries after all intermediate static
+        # libraries have been listed to ensure that the Rust libraries are
+        # searched after the C/C++ objects that might reference Rust symbols.
+        var = "HOST_LIBS" if obj.KIND == "host" else "STATIC_LIBS"
+        for lib in chain(
+            (l for l in static_libs if not isinstance(l, BaseRustLibrary)),
+            (l for l in static_libs if isinstance(l, BaseRustLibrary)),
+        ):
+            backend_file.write_once(
+                "%s += %s\n" % (var, pretty_relpath(lib, lib.import_name))
+            )
+
+        for lib in os_libs:
+            if obj.KIND == "target":
+                backend_file.write_once("OS_LIBS += %s\n" % lib)
+            elif obj.KIND == "host":
+                backend_file.write_once("HOST_EXTRA_LIBS += %s\n" % lib)
+
+        if not isinstance(obj, (StaticLibrary, HostLibrary)) or obj.no_expand_lib:
+            # This will create the node even if there aren't any linked libraries.
+            build_target = self._build_target_for_obj(obj)
+            self._compile_graph[build_target]
+
+            # Make the build target depend on all the target/host-objects that
+            # recursively are linked into it.
+            def recurse_libraries(obj):
+                for lib in obj.linked_libraries:
+                    if (
+                        isinstance(lib, (StaticLibrary, HostLibrary))
+                        and not lib.no_expand_lib
+                    ):
+                        recurse_libraries(lib)
+                    elif not isinstance(lib, ExternalLibrary):
+                        self._compile_graph[build_target].add(
+                            self._build_target_for_obj(lib)
+                        )
+                relobjdir = mozpath.relpath(obj.objdir, self.environment.topobjdir)
+                objects_target = mozpath.join(relobjdir, "%s-objects" % obj.KIND)
+                if objects_target in self._compile_graph:
+                    self._compile_graph[build_target].add(objects_target)
+
+            recurse_libraries(obj)
+
+        # Process library-based defines
+        self._process_defines(obj.lib_defines, backend_file)
+
+    def _add_install_target(self, backend_file, install_target, tier, dest, files):
+        self._no_skip[tier].add(backend_file.relobjdir)
+        for f in files:
+            backend_file.write("%s_FILES += %s\n" % (install_target, f))
+        backend_file.write("%s_DEST := %s\n" % (install_target, dest))
+        backend_file.write("%s_TARGET := %s\n" % (install_target, tier))
+        backend_file.write("INSTALL_TARGETS += %s\n" % install_target)
+
+    def _process_final_target_files(self, obj, files, backend_file):
+        target = obj.install_target
+        path = mozpath.basedir(
+            target, ("dist/bin", "dist/xpi-stage", "_tests", "dist/include")
+        )
+        if not path:
+            raise Exception("Cannot install to " + target)
+
+        # Exports are not interesting to artifact builds.
+        if path == "dist/include" and self.environment.is_artifact_build:
+            return
+
+        manifest = path.replace("/", "_")
+        install_manifest = self._install_manifests[manifest]
+        reltarget = mozpath.relpath(target, path)
+
+        for path, files in files.walk():
+            target_var = (mozpath.join(target, path) if path else target).replace(
+                "/", "_"
+            )
+            # We don't necessarily want to combine these, because non-wildcard
+            # absolute files tend to be libraries, and we don't want to mix
+            # those in with objdir headers that will be installed during export.
+            # (See bug 1642882 for details.)
+            objdir_files = []
+            absolute_files = []
+
+            for f in files:
+                assert not isinstance(f, RenamedSourcePath)
+                dest_dir = mozpath.join(reltarget, path)
+                dest_file = mozpath.join(dest_dir, f.target_basename)
+                if not isinstance(f, ObjDirPath):
+                    if "*" in f:
+                        if f.startswith("/") or isinstance(f, AbsolutePath):
+                            basepath, wild = os.path.split(f.full_path)
+                            if "*" in basepath:
+                                raise Exception(
+                                    "Wildcards are only supported in the filename part"
+                                    " of srcdir-relative or absolute paths."
+                                )
+
+                            install_manifest.add_pattern_copy(basepath, wild, dest_dir)
+                        else:
+                            install_manifest.add_pattern_copy(f.srcdir, f, dest_dir)
+                    elif isinstance(f, AbsolutePath):
+                        if not f.full_path.lower().endswith((".dll", ".pdb", ".so")):
+                            raise Exception(
+                                "Absolute paths installed to FINAL_TARGET_FILES must"
+                                " only be shared libraries or associated debug"
+                                " information."
+                            )
+                        install_manifest.add_optional_exists(dest_file)
+                        absolute_files.append(f.full_path)
+                    else:
+                        install_manifest.add_copy(f.full_path, dest_file)
+                else:
+                    install_manifest.add_optional_exists(dest_file)
+                    objdir_files.append(self._pretty_path(f, backend_file))
+            install_location = "$(DEPTH)/%s" % mozpath.join(target, path)
+            if objdir_files:
+                tier = "export" if obj.install_target == "dist/include" else "misc"
+                # We cannot generate multilocale.txt during misc at the moment.
+                if objdir_files[0] == "multilocale.txt":
+                    tier = "libs"
+                self._add_install_target(
+                    backend_file, target_var, tier, install_location, objdir_files
+                )
+            if absolute_files:
+                # Unfortunately, we can't use _add_install_target because on
+                # Windows, the absolute file paths that we want to install
+                # from often have spaces.  So we write our own rule.
+                self._no_skip["misc"].add(backend_file.relobjdir)
+                backend_file.write(
+                    "misc::\n%s\n"
+                    % "\n".join(
+                        "\t$(INSTALL) %s %s"
+                        % (make_quote(shell_quote(f)), install_location)
+                        for f in absolute_files
+                    )
+                )
+
+    def _process_final_target_pp_files(self, obj, files, backend_file, name):
+        # Bug 1177710 - We'd like to install these via manifests as
+        # preprocessed files. But they currently depend on non-standard flags
+        # being added via some Makefiles, so for now we just pass them through
+        # to the underlying Makefile.in.
+        #
+        # Note that if this becomes a manifest, OBJDIR_PP_FILES will likely
+        # still need to use PP_TARGETS internally because we can't have an
+        # install manifest for the root of the objdir.
+        for i, (path, files) in enumerate(files.walk()):
+            self._no_skip["misc"].add(backend_file.relobjdir)
+            var = "%s_%d" % (name, i)
+            for f in files:
+                backend_file.write(
+                    "%s += %s\n" % (var, self._pretty_path(f, backend_file))
+                )
+            backend_file.write(
+                "%s_PATH := $(DEPTH)/%s\n"
+                % (var, mozpath.join(obj.install_target, path))
+            )
+            backend_file.write("%s_TARGET := misc\n" % var)
+            backend_file.write("PP_TARGETS += %s\n" % var)
+
+    def _write_localized_files_files(self, files, name, backend_file):
+        for f in files:
+            if not isinstance(f, ObjDirPath):
+                # The emitter asserts that all srcdir files start with `en-US/`
+                e, f = f.split("en-US/")
+                assert not e
+                if "*" in f:
+                    # We can't use MERGE_FILE for wildcards because it takes
+                    # only the first match internally. This is only used
+                    # in one place in the tree currently so we'll hardcode
+                    # that specific behavior for now.
+                    backend_file.write(
+                        "%s += $(wildcard $(LOCALE_SRCDIR)/%s)\n" % (name, f)
+                    )
+                else:
+                    backend_file.write("%s += $(call MERGE_FILE,%s)\n" % (name, f))
+            else:
+                # Objdir files are allowed from LOCALIZED_GENERATED_FILES
+                backend_file.write(
+                    "%s += %s\n" % (name, self._pretty_path(f, backend_file))
+                )
+
+    def _process_localized_files(self, obj, files, backend_file):
+        target = obj.install_target
+        path = mozpath.basedir(target, ("dist/bin",))
+        if not path:
+            raise Exception("Cannot install localized files to " + target)
+        for i, (path, files) in enumerate(files.walk()):
+            name = "LOCALIZED_FILES_%d" % i
+            self._no_skip["misc"].add(backend_file.relobjdir)
+            self._write_localized_files_files(files, name + "_FILES", backend_file)
+            # Use FINAL_TARGET here because some l10n repack rules set
+            # XPI_NAME to generate langpacks.
+            backend_file.write("%s_DEST = $(FINAL_TARGET)/%s\n" % (name, path))
+            backend_file.write("%s_TARGET := misc\n" % name)
+            backend_file.write("INSTALL_TARGETS += %s\n" % name)
+
+    def _process_localized_pp_files(self, obj, files, backend_file):
+        target = obj.install_target
+        path = mozpath.basedir(target, ("dist/bin",))
+        if not path:
+            raise Exception("Cannot install localized files to " + target)
+        for i, (path, files) in enumerate(files.walk()):
+            name = "LOCALIZED_PP_FILES_%d" % i
+            self._no_skip["misc"].add(backend_file.relobjdir)
+            self._write_localized_files_files(files, name, backend_file)
+            # Use FINAL_TARGET here because some l10n repack rules set
+            # XPI_NAME to generate langpacks.
+            backend_file.write("%s_PATH = $(FINAL_TARGET)/%s\n" % (name, path))
+            backend_file.write("%s_TARGET := misc\n" % name)
+            # Localized files will have different content in different
+            # localizations, and some preprocessed files may not have
+            # any preprocessor directives.
+            backend_file.write(
+                "%s_FLAGS := --silence-missing-directive-warnings\n" % name
+            )
+            backend_file.write("PP_TARGETS += %s\n" % name)
+
+    def _process_objdir_files(self, obj, files, backend_file):
+        # We can't use an install manifest for the root of the objdir, since it
+        # would delete all the other files that get put there by the build
+        # system.
+        for i, (path, files) in enumerate(files.walk()):
+            self._no_skip["misc"].add(backend_file.relobjdir)
+            for f in files:
+                backend_file.write(
+                    "OBJDIR_%d_FILES += %s\n" % (i, self._pretty_path(f, backend_file))
+                )
+            backend_file.write("OBJDIR_%d_DEST := $(topobjdir)/%s\n" % (i, path))
+            backend_file.write("OBJDIR_%d_TARGET := misc\n" % i)
+            backend_file.write("INSTALL_TARGETS += OBJDIR_%d\n" % i)
+
+    def _process_chrome_manifest_entry(self, obj, backend_file):
+        fragment = Makefile()
+        rule = fragment.create_rule(targets=["misc:"])
+
+        top_level = mozpath.join(obj.install_target, "chrome.manifest")
+        if obj.path != top_level:
+            args = [
+                mozpath.join("$(DEPTH)", top_level),
+                make_quote(
+                    shell_quote(
+                        "manifest %s" % mozpath.relpath(obj.path, obj.install_target)
+                    )
+                ),
+            ]
+            rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+        args = [
+            mozpath.join("$(DEPTH)", obj.path),
+            make_quote(shell_quote(str(obj.entry))),
+        ]
+        rule.add_commands(["$(call py_action,buildlist,%s)" % " ".join(args)])
+        fragment.dump(backend_file.fh, removal_guard=False)
+
+        self._no_skip["misc"].add(obj.relsrcdir)
+
+    def _write_manifests(self, dest, manifests):
+        man_dir = mozpath.join(self.environment.topobjdir, "_build_manifests", dest)
+
+        for k, manifest in manifests.items():
+            with self._write_file(mozpath.join(man_dir, k)) as fh:
+                manifest.write(fileobj=fh)
+
+    def _write_master_test_manifest(self, path, manifests):
+        with self._write_file(path) as master:
+            master.write(
+                "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n\n"
+            )
+
+            for manifest in sorted(manifests):
+                master.write("[include:%s]\n" % manifest)
+
+    class Substitution(object):
+        """BaseConfigSubstitution-like class for use with _create_makefile."""
+
+        __slots__ = ("input_path", "output_path", "topsrcdir", "topobjdir", "config")
+
+    def _create_makefile(self, obj, stub=False, extra=None):
+        """Creates the given makefile. Makefiles are treated the same as
+        config files, but some additional header and footer is added to the
+        output.
+
+        When the stub argument is True, no source file is used, and a stub
+        makefile with the default header and footer only is created.
+        """
+        with self._get_preprocessor(obj) as pp:
+            if extra:
+                pp.context.update(extra)
+            if not pp.context.get("autoconfmk", ""):
+                pp.context["autoconfmk"] = "autoconf.mk"
+            pp.handleLine(
+                "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.\n"
+            )
+            pp.handleLine("DEPTH := @DEPTH@\n")
+            pp.handleLine("topobjdir := @topobjdir@\n")
+            pp.handleLine("topsrcdir := @top_srcdir@\n")
+            pp.handleLine("srcdir := @srcdir@\n")
+            pp.handleLine("srcdir_rel := @srcdir_rel@\n")
+            pp.handleLine("relativesrcdir := @relativesrcdir@\n")
+            pp.handleLine("include $(DEPTH)/config/@autoconfmk@\n")
+            if not stub:
+                pp.do_include(obj.input_path)
+            # Empty line to avoid failures when last line in Makefile.in ends
+            # with a backslash.
+            pp.handleLine("\n")
+            pp.handleLine("include $(topsrcdir)/config/recurse.mk\n")
+        if not stub:
+            # Adding the Makefile.in here has the desired side-effect
+            # that if the Makefile.in disappears, this will force
+            # moz.build traversal. This means that when we remove empty
+            # Makefile.in files, the old file will get replaced with
+            # the autogenerated one automatically.
+            self.backend_input_files.add(obj.input_path)
+
+        self._makefile_out_count += 1
+
+    def _handle_linked_rust_crates(self, obj, extern_crate_file):
+        backend_file = self._get_backend_file_for(obj)
+
+        backend_file.write("RS_STATICLIB_CRATE_SRC := %s\n" % extern_crate_file)
+
+    def _handle_ipdl_sources(
+        self,
+        ipdl_dir,
+        sorted_ipdl_sources,
+        sorted_nonstatic_ipdl_sources,
+        sorted_static_ipdl_sources,
+    ):
+        # Write out a master list of all IPDL source files.
+        mk = Makefile()
+
+        sorted_nonstatic_ipdl_basenames = list()
+        for source in sorted_nonstatic_ipdl_sources:
+            basename = os.path.basename(source)
+            sorted_nonstatic_ipdl_basenames.append(basename)
+            rule = mk.create_rule([basename])
+            rule.add_dependencies([source])
+            rule.add_commands(
+                [
+                    "$(RM) $@",
+                    "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+                    "$< -o $@)",
+                ]
+            )
+
+        mk.add_statement(
+            "ALL_IPDLSRCS := %s %s"
+            % (
+                " ".join(sorted_nonstatic_ipdl_basenames),
+                " ".join(sorted_static_ipdl_sources),
+            )
+        )
+
+        # Preprocessed ipdl files are generated in ipdl_dir.
+        mk.add_statement(
+            "IPDLDIRS := %s %s"
+            % (
+                ipdl_dir,
+                " ".join(
+                    sorted(set(mozpath.dirname(p) for p in sorted_static_ipdl_sources))
+                ),
+            )
+        )
+
+        with self._write_file(mozpath.join(ipdl_dir, "ipdlsrcs.mk")) as ipdls:
+            mk.dump(ipdls, removal_guard=False)
+
+    def _handle_webidl_build(
+        self,
+        bindings_dir,
+        unified_source_mapping,
+        webidls,
+        expected_build_output_files,
+        global_define_files,
+    ):
+        include_dir = mozpath.join(self.environment.topobjdir, "dist", "include")
+        for f in expected_build_output_files:
+            if f.startswith(include_dir):
+                self._install_manifests["dist_include"].add_optional_exists(
+                    mozpath.relpath(f, include_dir)
+                )
+
+        # We pass WebIDL info to make via a completely generated make file.
+        mk = Makefile()
+        mk.add_statement(
+            "nonstatic_webidl_files := %s"
+            % " ".join(sorted(webidls.all_non_static_basenames()))
+        )
+        mk.add_statement(
+            "globalgen_sources := %s" % " ".join(sorted(global_define_files))
+        )
+        mk.add_statement(
+            "test_sources := %s"
+            % " ".join(sorted("%sBinding.cpp" % s for s in webidls.all_test_stems()))
+        )
+
+        # Add rules to preprocess bindings.
+        # This should ideally be using PP_TARGETS. However, since the input
+        # filenames match the output filenames, the existing PP_TARGETS rules
+        # result in circular dependencies and other make weirdness. One
+        # solution is to rename the input or output files repsectively. See
+        # bug 928195 comment 129.
+        for source in sorted(webidls.all_preprocessed_sources()):
+            basename = os.path.basename(source)
+            rule = mk.create_rule([basename])
+            # GLOBAL_DEPS would be used here, but due to the include order of
+            # our makefiles it's not set early enough to be useful, so we use
+            # WEBIDL_PP_DEPS, which has analagous content.
+            rule.add_dependencies([source, "$(WEBIDL_PP_DEPS)"])
+            rule.add_commands(
+                [
+                    # Remove the file before writing so bindings that go from
+                    # static to preprocessed don't end up writing to a symlink,
+                    # which would modify content in the source directory.
+                    "$(RM) $@",
+                    "$(call py_action,preprocessor,$(DEFINES) $(ACDEFINES) "
+                    "$< -o $@)",
+                ]
+            )
+
+        self._add_unified_build_rules(
+            mk,
+            unified_source_mapping,
+            unified_files_makefile_variable="unified_binding_cpp_files",
+        )
+
+        webidls_mk = mozpath.join(bindings_dir, "webidlsrcs.mk")
+        with self._write_file(webidls_mk) as fh:
+            mk.dump(fh, removal_guard=False)
+
+        # Add the test directory to the compile graph.
+        if self.environment.substs.get("ENABLE_TESTS"):
+            self._compile_graph[
+                mozpath.join(
+                    mozpath.relpath(bindings_dir, self.environment.topobjdir),
+                    "test",
+                    "target-objects",
+                )
+            ]
+
+    def _format_generated_file_input_name(self, path, obj):
+        if obj.localized:
+            # Localized generated files can have locale-specific inputs, which
+            # are indicated by paths starting with `en-US/` or containing
+            # `locales/en-US/`.
+            if "locales/en-US" in path:
+                # We need an "absolute source path" relative to
+                # topsrcdir, like "/source/path".
+                if not path.startswith("/"):
+                    path = "/" + mozpath.relpath(path.full_path, obj.topsrcdir)
+                e, f = path.split("locales/en-US/", 1)
+                assert f
+                return "$(call MERGE_RELATIVE_FILE,{},{}locales)".format(
+                    f, e if not e.startswith("/") else e[len("/") :]
+                )
+            elif path.startswith("en-US/"):
+                e, f = path.split("en-US/", 1)
+                assert not e
+                return "$(call MERGE_FILE,%s)" % f
+            return self._pretty_path(path, self._get_backend_file_for(obj))
+        else:
+            return self._pretty_path(path, self._get_backend_file_for(obj))
+
+    def _format_generated_file_output_name(self, path, obj):
+        if not isinstance(path, Path):
+            path = ObjDirPath(obj._context, "!" + path)
+        return self._pretty_path(path, self._get_backend_file_for(obj))
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-emitter.patch
+
+mv firefox-$VERSION-emitter.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/file.list	(revision 228)
@@ -0,0 +1,2 @@
+firefox-102.15.0/python/mozbuild/mozbuild/frontend/emitter.py
+firefox-102.15.0/python/mozbuild/mozbuild/test/frontend/test_emitter.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/frontend/emitter.py	(revision 228)
@@ -0,0 +1,1889 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+import six
+import sys
+import time
+import traceback
+
+from collections import defaultdict, OrderedDict
+from mach.mixin.logging import LoggingMixin
+from mozbuild.util import memoize, OrderedDefaultDict
+
+import mozpack.path as mozpath
+import mozinfo
+import pytoml
+
+from .data import (
+    BaseRustProgram,
+    ChromeManifestEntry,
+    ComputedFlags,
+    ConfigFileSubstitution,
+    Defines,
+    DirectoryTraversal,
+    Exports,
+    FinalTargetFiles,
+    FinalTargetPreprocessedFiles,
+    GeneratedFile,
+    ExternalStaticLibrary,
+    ExternalSharedLibrary,
+    HostDefines,
+    HostLibrary,
+    HostProgram,
+    HostRustProgram,
+    HostSharedLibrary,
+    HostSimpleProgram,
+    HostSources,
+    InstallationTarget,
+    IPDLCollection,
+    JARManifest,
+    Library,
+    Linkable,
+    LocalInclude,
+    LocalizedFiles,
+    LocalizedPreprocessedFiles,
+    ObjdirFiles,
+    ObjdirPreprocessedFiles,
+    PerSourceFlag,
+    WebIDLCollection,
+    Program,
+    RustLibrary,
+    HostRustLibrary,
+    RustProgram,
+    RustTests,
+    SandboxedWasmLibrary,
+    SharedLibrary,
+    SimpleProgram,
+    Sources,
+    StaticLibrary,
+    TestHarnessFiles,
+    TestManifest,
+    UnifiedSources,
+    VariablePassthru,
+    WasmDefines,
+    WasmSources,
+    XPCOMComponentManifests,
+    XPIDLModule,
+)
+from mozpack.chrome.manifest import Manifest
+
+from .reader import SandboxValidationError
+
+from ..testing import TEST_MANIFESTS, REFTEST_FLAVORS, SupportFilesConverter
+
+from .context import Context, SourcePath, ObjDirPath, Path, SubContext
+
+from mozbuild.base import ExecutionSummary
+
+
+class TreeMetadataEmitter(LoggingMixin):
+    """Converts the executed mozbuild files into data structures.
+
+    This is a bridge between reader.py and data.py. It takes what was read by
+    reader.BuildReader and converts it into the classes defined in the data
+    module.
+    """
+
+    def __init__(self, config):
+        self.populate_logger()
+
+        self.config = config
+
+        mozinfo.find_and_update_from_json(config.topobjdir)
+
+        self.info = dict(mozinfo.info)
+
+        self._libs = OrderedDefaultDict(list)
+        self._binaries = OrderedDict()
+        self._compile_dirs = set()
+        self._host_compile_dirs = set()
+        self._wasm_compile_dirs = set()
+        self._asm_compile_dirs = set()
+        self._compile_flags = dict()
+        self._compile_as_flags = dict()
+        self._linkage = []
+        self._static_linking_shared = set()
+        self._crate_verified_local = set()
+        self._crate_directories = dict()
+        self._idls = defaultdict(set)
+
+        # Keep track of external paths (third party build systems), starting
+        # from what we run a subconfigure in. We'll eliminate some directories
+        # as we traverse them with moz.build (e.g. js/src).
+        subconfigures = os.path.join(self.config.topobjdir, "subconfigures")
+        paths = []
+        if os.path.exists(subconfigures):
+            paths = open(subconfigures).read().splitlines()
+        self._external_paths = set(mozpath.normsep(d) for d in paths)
+
+        self._emitter_time = 0.0
+        self._object_count = 0
+        self._test_files_converter = SupportFilesConverter()
+
+    def summary(self):
+        return ExecutionSummary(
+            "Processed into {object_count:d} build config descriptors in "
+            "{execution_time:.2f}s",
+            execution_time=self._emitter_time,
+            object_count=self._object_count,
+        )
+
+    def emit(self, output, emitfn=None):
+        """Convert the BuildReader output into data structures.
+
+        The return value from BuildReader.read_topsrcdir() (a generator) is
+        typically fed into this function.
+        """
+        contexts = {}
+        emitfn = emitfn or self.emit_from_context
+
+        def emit_objs(objs):
+            for o in objs:
+                self._object_count += 1
+                yield o
+
+        for out in output:
+            # Nothing in sub-contexts is currently of interest to us. Filter
+            # them all out.
+            if isinstance(out, SubContext):
+                continue
+
+            if isinstance(out, Context):
+                # Keep all contexts around, we will need them later.
+                contexts[os.path.normcase(out.objdir)] = out
+
+                start = time.time()
+                # We need to expand the generator for the timings to work.
+                objs = list(emitfn(out))
+                self._emitter_time += time.time() - start
+
+                for o in emit_objs(objs):
+                    yield o
+
+            else:
+                raise Exception("Unhandled output type: %s" % type(out))
+
+        # Don't emit Linkable objects when COMPILE_ENVIRONMENT is not set
+        if self.config.substs.get("COMPILE_ENVIRONMENT"):
+            start = time.time()
+            objs = list(self._emit_libs_derived(contexts))
+            self._emitter_time += time.time() - start
+
+            for o in emit_objs(objs):
+                yield o
+
+    def _emit_libs_derived(self, contexts):
+
+        # First aggregate idl sources.
+        webidl_attrs = [
+            ("GENERATED_EVENTS_WEBIDL_FILES", lambda c: c.generated_events_sources),
+            ("GENERATED_WEBIDL_FILES", lambda c: c.generated_sources),
+            ("PREPROCESSED_TEST_WEBIDL_FILES", lambda c: c.preprocessed_test_sources),
+            ("PREPROCESSED_WEBIDL_FILES", lambda c: c.preprocessed_sources),
+            ("TEST_WEBIDL_FILES", lambda c: c.test_sources),
+            ("WEBIDL_FILES", lambda c: c.sources),
+            ("WEBIDL_EXAMPLE_INTERFACES", lambda c: c.example_interfaces),
+        ]
+        ipdl_attrs = [
+            ("IPDL_SOURCES", lambda c: c.sources),
+            ("PREPROCESSED_IPDL_SOURCES", lambda c: c.preprocessed_sources),
+        ]
+        xpcom_attrs = [("XPCOM_MANIFESTS", lambda c: c.manifests)]
+
+        idl_sources = {}
+        for root, cls, attrs in (
+            (self.config.substs.get("WEBIDL_ROOT"), WebIDLCollection, webidl_attrs),
+            (self.config.substs.get("IPDL_ROOT"), IPDLCollection, ipdl_attrs),
+            (
+                self.config.substs.get("XPCOM_ROOT"),
+                XPCOMComponentManifests,
+                xpcom_attrs,
+            ),
+        ):
+            if root:
+                collection = cls(contexts[os.path.normcase(root)])
+                for var, src_getter in attrs:
+                    src_getter(collection).update(self._idls[var])
+
+                idl_sources[root] = collection.all_source_files()
+                if isinstance(collection, WebIDLCollection):
+                    # Test webidl sources are added here as a somewhat special
+                    # case.
+                    idl_sources[mozpath.join(root, "test")] = [
+                        s for s in collection.all_test_cpp_basenames()
+                    ]
+
+                yield collection
+
+        # Next do FINAL_LIBRARY linkage.
+        for lib in (l for libs in self._libs.values() for l in libs):
+            if not isinstance(lib, (StaticLibrary, RustLibrary)) or not lib.link_into:
+                continue
+            if lib.link_into not in self._libs:
+                raise SandboxValidationError(
+                    'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
+                    % lib.link_into,
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+            candidates = self._libs[lib.link_into]
+
+            # When there are multiple candidates, but all are in the same
+            # directory and have a different type, we want all of them to
+            # have the library linked. The typical usecase is when building
+            # both a static and a shared library in a directory, and having
+            # that as a FINAL_LIBRARY.
+            if (
+                len(set(type(l) for l in candidates)) == len(candidates)
+                and len(set(l.objdir for l in candidates)) == 1
+            ):
+                for c in candidates:
+                    c.link_library(lib)
+            else:
+                raise SandboxValidationError(
+                    'FINAL_LIBRARY ("%s") matches a LIBRARY_NAME defined in '
+                    "multiple places:\n    %s"
+                    % (lib.link_into, "\n    ".join(l.objdir for l in candidates)),
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+
+        # ...and USE_LIBS linkage.
+        for context, obj, variable in self._linkage:
+            self._link_libraries(context, obj, variable, idl_sources)
+
+        def recurse_refs(lib):
+            for o in lib.refs:
+                yield o
+                if isinstance(o, StaticLibrary):
+                    for q in recurse_refs(o):
+                        yield q
+
+        # Check that all static libraries refering shared libraries in
+        # USE_LIBS are linked into a shared library or program.
+        for lib in self._static_linking_shared:
+            if all(isinstance(o, StaticLibrary) for o in recurse_refs(lib)):
+                shared_libs = sorted(
+                    l.basename
+                    for l in lib.linked_libraries
+                    if isinstance(l, SharedLibrary)
+                )
+                raise SandboxValidationError(
+                    'The static "%s" library is not used in a shared library '
+                    "or a program, but USE_LIBS contains the following shared "
+                    "library names:\n    %s\n\nMaybe you can remove the "
+                    'static "%s" library?'
+                    % (lib.basename, "\n    ".join(shared_libs), lib.basename),
+                    contexts[os.path.normcase(lib.objdir)],
+                )
+
+        @memoize
+        def rust_libraries(obj):
+            libs = []
+            for o in obj.linked_libraries:
+                if isinstance(o, (HostRustLibrary, RustLibrary)):
+                    libs.append(o)
+                elif isinstance(o, (HostLibrary, StaticLibrary, SandboxedWasmLibrary)):
+                    libs.extend(rust_libraries(o))
+            return libs
+
+        def check_rust_libraries(obj):
+            rust_libs = set(rust_libraries(obj))
+            if len(rust_libs) <= 1:
+                return
+            if isinstance(obj, (Library, HostLibrary)):
+                what = '"%s" library' % obj.basename
+            else:
+                what = '"%s" program' % obj.name
+            raise SandboxValidationError(
+                "Cannot link the following Rust libraries into the %s:\n"
+                "%s\nOnly one is allowed."
+                % (
+                    what,
+                    "\n".join(
+                        "  - %s" % r.basename
+                        for r in sorted(rust_libs, key=lambda r: r.basename)
+                    ),
+                ),
+                contexts[os.path.normcase(obj.objdir)],
+            )
+
+        # Propagate LIBRARY_DEFINES to all child libraries recursively.
+        def propagate_defines(outerlib, defines):
+            outerlib.lib_defines.update(defines)
+            for lib in outerlib.linked_libraries:
+                # Propagate defines only along FINAL_LIBRARY paths, not USE_LIBS
+                # paths.
+                if (
+                    isinstance(lib, StaticLibrary)
+                    and lib.link_into == outerlib.basename
+                ):
+                    propagate_defines(lib, defines)
+
+        for lib in (l for libs in self._libs.values() for l in libs):
+            if isinstance(lib, Library):
+                propagate_defines(lib, lib.lib_defines)
+            check_rust_libraries(lib)
+            yield lib
+
+        for lib in (l for libs in self._libs.values() for l in libs):
+            lib_defines = list(lib.lib_defines.get_defines())
+            if lib_defines:
+                objdir_flags = self._compile_flags[lib.objdir]
+                objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+                objdir_flags = self._compile_as_flags.get(lib.objdir)
+                if objdir_flags:
+                    objdir_flags.resolve_flags("LIBRARY_DEFINES", lib_defines)
+
+        for flags_obj in self._compile_flags.values():
+            yield flags_obj
+
+        for flags_obj in self._compile_as_flags.values():
+            yield flags_obj
+
+        for obj in self._binaries.values():
+            if isinstance(obj, Linkable):
+                check_rust_libraries(obj)
+            yield obj
+
+    LIBRARY_NAME_VAR = {
+        "host": "HOST_LIBRARY_NAME",
+        "target": "LIBRARY_NAME",
+        "wasm": "SANDBOXED_WASM_LIBRARY_NAME",
+    }
+
+    ARCH_VAR = {"host": "HOST_OS_ARCH", "target": "OS_TARGET"}
+
+    STDCXXCOMPAT_NAME = {"host": "host_stdc++compat", "target": "stdc++compat"}
+
+    def _link_libraries(self, context, obj, variable, extra_sources):
+        """Add linkage declarations to a given object."""
+        assert isinstance(obj, Linkable)
+
+        if context.objdir in extra_sources:
+            # All "extra sources" are .cpp for the moment, and happen to come
+            # first in order.
+            obj.sources[".cpp"] = extra_sources[context.objdir] + obj.sources[".cpp"]
+
+        for path in context.get(variable, []):
+            self._link_library(context, obj, variable, path)
+
+        # Link system libraries from OS_LIBS/HOST_OS_LIBS.
+        for lib in context.get(variable.replace("USE", "OS"), []):
+            obj.link_system_library(lib)
+
+        # We have to wait for all the self._link_library calls above to have
+        # happened for obj.cxx_link to be final.
+        # FIXME: Theoretically, HostSharedLibrary shouldn't be here (bug
+        # 1474022).
+        if (
+            not isinstance(
+                obj, (StaticLibrary, HostLibrary, HostSharedLibrary, BaseRustProgram)
+            )
+            and obj.cxx_link
+        ):
+            if (
+                context.config.substs.get("MOZ_STDCXX_COMPAT")
+                and context.config.substs.get(self.ARCH_VAR.get(obj.KIND)) == "Linux"
+            ):
+                self._link_library(
+                    context, obj, variable, self.STDCXXCOMPAT_NAME[obj.KIND]
+                )
+            if obj.KIND == "target":
+                for lib in context.config.substs.get("STLPORT_LIBS", []):
+                    obj.link_system_library(lib)
+
+    def _link_library(self, context, obj, variable, path):
+        force_static = path.startswith("static:") and obj.KIND == "target"
+        if force_static:
+            path = path[7:]
+        name = mozpath.basename(path)
+        dir = mozpath.dirname(path)
+        candidates = [l for l in self._libs[name] if l.KIND == obj.KIND]
+        if dir:
+            if dir.startswith("/"):
+                dir = mozpath.normpath(mozpath.join(obj.topobjdir, dir[1:]))
+            else:
+                dir = mozpath.normpath(mozpath.join(obj.objdir, dir))
+            dir = mozpath.relpath(dir, obj.topobjdir)
+            candidates = [l for l in candidates if l.relobjdir == dir]
+            if not candidates:
+                # If the given directory is under one of the external
+                # (third party) paths, use a fake library reference to
+                # there.
+                for d in self._external_paths:
+                    if dir.startswith("%s/" % d):
+                        candidates = [
+                            self._get_external_library(dir, name, force_static)
+                        ]
+                        break
+
+            if not candidates:
+                raise SandboxValidationError(
+                    '%s contains "%s", but there is no "%s" %s in %s.'
+                    % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+                    context,
+                )
+
+        if len(candidates) > 1:
+            # If there's more than one remaining candidate, it could be
+            # that there are instances for the same library, in static and
+            # shared form.
+            libs = {}
+            for l in candidates:
+                key = mozpath.join(l.relobjdir, l.basename)
+                if force_static:
+                    if isinstance(l, StaticLibrary):
+                        libs[key] = l
+                else:
+                    if key in libs and isinstance(l, SharedLibrary):
+                        libs[key] = l
+                    if key not in libs:
+                        libs[key] = l
+            candidates = list(libs.values())
+            if force_static and not candidates:
+                if dir:
+                    raise SandboxValidationError(
+                        '%s contains "static:%s", but there is no static '
+                        '"%s" %s in %s.'
+                        % (variable, path, name, self.LIBRARY_NAME_VAR[obj.KIND], dir),
+                        context,
+                    )
+                raise SandboxValidationError(
+                    '%s contains "static:%s", but there is no static "%s" '
+                    "%s in the tree"
+                    % (variable, name, name, self.LIBRARY_NAME_VAR[obj.KIND]),
+                    context,
+                )
+
+        if not candidates:
+            raise SandboxValidationError(
+                '%s contains "%s", which does not match any %s in the tree.'
+                % (variable, path, self.LIBRARY_NAME_VAR[obj.KIND]),
+                context,
+            )
+
+        elif len(candidates) > 1:
+            paths = (mozpath.join(l.relsrcdir, "moz.build") for l in candidates)
+            raise SandboxValidationError(
+                '%s contains "%s", which matches a %s defined in multiple '
+                "places:\n    %s"
+                % (
+                    variable,
+                    path,
+                    self.LIBRARY_NAME_VAR[obj.KIND],
+                    "\n    ".join(paths),
+                ),
+                context,
+            )
+
+        elif force_static and not isinstance(candidates[0], StaticLibrary):
+            raise SandboxValidationError(
+                '%s contains "static:%s", but there is only a shared "%s" '
+                "in %s. You may want to add FORCE_STATIC_LIB=True in "
+                '%s/moz.build, or remove "static:".'
+                % (
+                    variable,
+                    path,
+                    name,
+                    candidates[0].relobjdir,
+                    candidates[0].relobjdir,
+                ),
+                context,
+            )
+
+        elif isinstance(obj, StaticLibrary) and isinstance(
+            candidates[0], SharedLibrary
+        ):
+            self._static_linking_shared.add(obj)
+        obj.link_library(candidates[0])
+
+    @memoize
+    def _get_external_library(self, dir, name, force_static):
+        # Create ExternalStaticLibrary or ExternalSharedLibrary object with a
+        # context more or less truthful about where the external library is.
+        context = Context(config=self.config)
+        context.add_source(mozpath.join(self.config.topsrcdir, dir, "dummy"))
+        if force_static:
+            return ExternalStaticLibrary(context, name)
+        else:
+            return ExternalSharedLibrary(context, name)
+
+    def _parse_cargo_file(self, context):
+        """Parse the Cargo.toml file in context and return a Python object
+        representation of it.  Raise a SandboxValidationError if the Cargo.toml
+        file does not exist.  Return a tuple of (config, cargo_file)."""
+        cargo_file = mozpath.join(context.srcdir, "Cargo.toml")
+        if not os.path.exists(cargo_file):
+            raise SandboxValidationError(
+                "No Cargo.toml file found in %s" % cargo_file, context
+            )
+        with open(cargo_file, "r") as f:
+            return pytoml.load(f), cargo_file
+
+    def _verify_deps(
+        self, context, crate_dir, crate_name, dependencies, description="Dependency"
+    ):
+        """Verify that a crate's dependencies all specify local paths."""
+        for dep_crate_name, values in six.iteritems(dependencies):
+            # A simple version number.
+            if isinstance(values, (six.binary_type, six.text_type)):
+                raise SandboxValidationError(
+                    "%s %s of crate %s does not list a path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            dep_path = values.get("path", None)
+            if not dep_path:
+                raise SandboxValidationError(
+                    "%s %s of crate %s does not list a path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            # Try to catch the case where somebody listed a
+            # local path for development.
+            if os.path.isabs(dep_path):
+                raise SandboxValidationError(
+                    "%s %s of crate %s has a non-relative path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+            if not os.path.exists(
+                mozpath.join(context.config.topsrcdir, crate_dir, dep_path)
+            ):
+                raise SandboxValidationError(
+                    "%s %s of crate %s refers to a non-existent path"
+                    % (description, dep_crate_name, crate_name),
+                    context,
+                )
+
+    def _rust_library(
+        self, context, libname, static_args, is_gkrust=False, cls=RustLibrary
+    ):
+        # We need to note any Rust library for linking purposes.
+        config, cargo_file = self._parse_cargo_file(context)
+        crate_name = config["package"]["name"]
+
+        if crate_name != libname:
+            raise SandboxValidationError(
+                "library %s does not match Cargo.toml-defined package %s"
+                % (libname, crate_name),
+                context,
+            )
+
+        # Check that the [lib.crate-type] field is correct
+        lib_section = config.get("lib", None)
+        if not lib_section:
+            raise SandboxValidationError(
+                "Cargo.toml for %s has no [lib] section" % libname, context
+            )
+
+        crate_type = lib_section.get("crate-type", None)
+        if not crate_type:
+            raise SandboxValidationError(
+                "Can't determine a crate-type for %s from Cargo.toml" % libname, context
+            )
+
+        crate_type = crate_type[0]
+        if crate_type != "staticlib":
+            raise SandboxValidationError(
+                "crate-type %s is not permitted for %s" % (crate_type, libname), context
+            )
+
+        dependencies = set(six.iterkeys(config.get("dependencies", {})))
+
+        features = context.get(cls.FEATURES_VAR, [])
+        unique_features = set(features)
+        if len(features) != len(unique_features):
+            raise SandboxValidationError(
+                "features for %s should not contain duplicates: %s"
+                % (libname, features),
+                context,
+            )
+
+        return cls(
+            context,
+            libname,
+            cargo_file,
+            crate_type,
+            dependencies,
+            features,
+            is_gkrust,
+            **static_args,
+        )
+
+    def _handle_linkables(self, context, passthru, generated_files):
+        linkables = []
+        host_linkables = []
+        wasm_linkables = []
+
+        def add_program(prog, var):
+            if var.startswith("HOST_"):
+                host_linkables.append(prog)
+            else:
+                linkables.append(prog)
+
+        def check_unique_binary(program, kind):
+            if program in self._binaries:
+                raise SandboxValidationError(
+                    'Cannot use "%s" as %s name, '
+                    "because it is already used in %s"
+                    % (program, kind, self._binaries[program].relsrcdir),
+                    context,
+                )
+
+        for kind, cls in [("PROGRAM", Program), ("HOST_PROGRAM", HostProgram)]:
+            program = context.get(kind)
+            if program:
+                check_unique_binary(program, kind)
+                self._binaries[program] = cls(context, program)
+                self._linkage.append(
+                    (
+                        context,
+                        self._binaries[program],
+                        kind.replace("PROGRAM", "USE_LIBS"),
+                    )
+                )
+                add_program(self._binaries[program], kind)
+
+        all_rust_programs = []
+        for kind, cls in [
+            ("RUST_PROGRAMS", RustProgram),
+            ("HOST_RUST_PROGRAMS", HostRustProgram),
+        ]:
+            programs = context[kind]
+            if not programs:
+                continue
+
+            all_rust_programs.append((programs, kind, cls))
+
+        # Verify Rust program definitions.
+        if all_rust_programs:
+            config, cargo_file = self._parse_cargo_file(context)
+            bin_section = config.get("bin", None)
+            if not bin_section:
+                raise SandboxValidationError(
+                    "Cargo.toml in %s has no [bin] section" % context.srcdir, context
+                )
+
+            defined_binaries = {b["name"] for b in bin_section}
+
+            for programs, kind, cls in all_rust_programs:
+                for program in programs:
+                    if program not in defined_binaries:
+                        raise SandboxValidationError(
+                            "Cannot find Cargo.toml definition for %s" % program,
+                            context,
+                        )
+
+                    check_unique_binary(program, kind)
+                    self._binaries[program] = cls(context, program, cargo_file)
+                    add_program(self._binaries[program], kind)
+
+        for kind, cls in [
+            ("SIMPLE_PROGRAMS", SimpleProgram),
+            ("CPP_UNIT_TESTS", SimpleProgram),
+            ("HOST_SIMPLE_PROGRAMS", HostSimpleProgram),
+        ]:
+            for program in context[kind]:
+                if program in self._binaries:
+                    raise SandboxValidationError(
+                        'Cannot use "%s" in %s, '
+                        "because it is already used in %s"
+                        % (program, kind, self._binaries[program].relsrcdir),
+                        context,
+                    )
+                self._binaries[program] = cls(
+                    context, program, is_unit_test=kind == "CPP_UNIT_TESTS"
+                )
+                self._linkage.append(
+                    (
+                        context,
+                        self._binaries[program],
+                        "HOST_USE_LIBS"
+                        if kind == "HOST_SIMPLE_PROGRAMS"
+                        else "USE_LIBS",
+                    )
+                )
+                add_program(self._binaries[program], kind)
+
+        host_libname = context.get("HOST_LIBRARY_NAME")
+        libname = context.get("LIBRARY_NAME")
+
+        if host_libname:
+            if host_libname == libname:
+                raise SandboxValidationError(
+                    "LIBRARY_NAME and HOST_LIBRARY_NAME must have a different value",
+                    context,
+                )
+
+            is_rust_library = context.get("IS_RUST_LIBRARY")
+            if is_rust_library:
+                lib = self._rust_library(context, host_libname, {}, cls=HostRustLibrary)
+            elif context.get("FORCE_SHARED_LIB"):
+                lib = HostSharedLibrary(context, host_libname)
+            else:
+                lib = HostLibrary(context, host_libname)
+            self._libs[host_libname].append(lib)
+            self._linkage.append((context, lib, "HOST_USE_LIBS"))
+            host_linkables.append(lib)
+
+        final_lib = context.get("FINAL_LIBRARY")
+        if not libname and final_lib:
+            # If no LIBRARY_NAME is given, create one.
+            libname = context.relsrcdir.replace("/", "_")
+
+        static_lib = context.get("FORCE_STATIC_LIB")
+        shared_lib = context.get("FORCE_SHARED_LIB")
+
+        static_name = context.get("STATIC_LIBRARY_NAME")
+        shared_name = context.get("SHARED_LIBRARY_NAME")
+
+        is_framework = context.get("IS_FRAMEWORK")
+
+        soname = context.get("SONAME")
+
+        lib_defines = context.get("LIBRARY_DEFINES")
+
+        wasm_lib = context.get("SANDBOXED_WASM_LIBRARY_NAME")
+
+        shared_args = {}
+        static_args = {}
+
+        if final_lib:
+            if static_lib:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY implies FORCE_STATIC_LIB. "
+                    "Please remove the latter.",
+                    context,
+                )
+            if shared_lib:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY conflicts with FORCE_SHARED_LIB. "
+                    "Please remove one.",
+                    context,
+                )
+            if is_framework:
+                raise SandboxValidationError(
+                    "FINAL_LIBRARY conflicts with IS_FRAMEWORK. " "Please remove one.",
+                    context,
+                )
+            static_args["link_into"] = final_lib
+            static_lib = True
+
+        if libname:
+            if is_framework:
+                if soname:
+                    raise SandboxValidationError(
+                        "IS_FRAMEWORK conflicts with SONAME. " "Please remove one.",
+                        context,
+                    )
+                shared_lib = True
+                shared_args["variant"] = SharedLibrary.FRAMEWORK
+
+            if not static_lib and not shared_lib:
+                static_lib = True
+
+            if static_name:
+                if not static_lib:
+                    raise SandboxValidationError(
+                        "STATIC_LIBRARY_NAME requires FORCE_STATIC_LIB", context
+                    )
+                static_args["real_name"] = static_name
+
+            if shared_name:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SHARED_LIBRARY_NAME requires FORCE_SHARED_LIB", context
+                    )
+                shared_args["real_name"] = shared_name
+
+            if soname:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SONAME requires FORCE_SHARED_LIB", context
+                    )
+                shared_args["soname"] = soname
+
+            if context.get("NO_EXPAND_LIBS"):
+                if not static_lib:
+                    raise SandboxValidationError(
+                        "NO_EXPAND_LIBS can only be set for static libraries.", context
+                    )
+                static_args["no_expand_lib"] = True
+
+            if shared_lib and static_lib:
+                if not static_name and not shared_name:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but neither STATIC_LIBRARY_NAME or "
+                        "SHARED_LIBRARY_NAME is set. At least one is required.",
+                        context,
+                    )
+                if static_name and not shared_name and static_name == libname:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but STATIC_LIBRARY_NAME is the same as LIBRARY_NAME, "
+                        "and SHARED_LIBRARY_NAME is unset. Please either "
+                        "change STATIC_LIBRARY_NAME or LIBRARY_NAME, or set "
+                        "SHARED_LIBRARY_NAME.",
+                        context,
+                    )
+                if shared_name and not static_name and shared_name == libname:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but SHARED_LIBRARY_NAME is the same as LIBRARY_NAME, "
+                        "and STATIC_LIBRARY_NAME is unset. Please either "
+                        "change SHARED_LIBRARY_NAME or LIBRARY_NAME, or set "
+                        "STATIC_LIBRARY_NAME.",
+                        context,
+                    )
+                if shared_name and static_name and shared_name == static_name:
+                    raise SandboxValidationError(
+                        "Both FORCE_STATIC_LIB and FORCE_SHARED_LIB are True, "
+                        "but SHARED_LIBRARY_NAME is the same as "
+                        "STATIC_LIBRARY_NAME. Please change one of them.",
+                        context,
+                    )
+
+            symbols_file = context.get("SYMBOLS_FILE")
+            if symbols_file:
+                if not shared_lib:
+                    raise SandboxValidationError(
+                        "SYMBOLS_FILE can only be used with a SHARED_LIBRARY.", context
+                    )
+                if context.get("DEFFILE"):
+                    raise SandboxValidationError(
+                        "SYMBOLS_FILE cannot be used along DEFFILE.", context
+                    )
+                if isinstance(symbols_file, SourcePath):
+                    if not os.path.exists(symbols_file.full_path):
+                        raise SandboxValidationError(
+                            "Path specified in SYMBOLS_FILE does not exist: %s "
+                            "(resolved to %s)" % (symbols_file, symbols_file.full_path),
+                            context,
+                        )
+                    shared_args["symbols_file"] = True
+                else:
+                    if symbols_file.target_basename not in generated_files:
+                        raise SandboxValidationError(
+                            (
+                                "Objdir file specified in SYMBOLS_FILE not in "
+                                + "GENERATED_FILES: %s"
+                            )
+                            % (symbols_file,),
+                            context,
+                        )
+                    shared_args["symbols_file"] = symbols_file.target_basename
+
+            if shared_lib:
+                lib = SharedLibrary(context, libname, **shared_args)
+                self._libs[libname].append(lib)
+                self._linkage.append((context, lib, "USE_LIBS"))
+                linkables.append(lib)
+                if not lib.installed:
+                    generated_files.add(lib.lib_name)
+                if symbols_file and isinstance(symbols_file, SourcePath):
+                    script = mozpath.join(
+                        mozpath.dirname(mozpath.dirname(__file__)),
+                        "action",
+                        "generate_symbols_file.py",
+                    )
+                    defines = ()
+                    if lib.defines:
+                        defines = lib.defines.get_defines()
+                    yield GeneratedFile(
+                        context,
+                        script,
+                        "generate_symbols_file",
+                        lib.symbols_file,
+                        [symbols_file],
+                        defines,
+                        required_during_compile=[lib.symbols_file],
+                    )
+            if static_lib:
+                is_rust_library = context.get("IS_RUST_LIBRARY")
+                if is_rust_library:
+                    lib = self._rust_library(
+                        context,
+                        libname,
+                        static_args,
+                        is_gkrust=bool(context.get("IS_GKRUST")),
+                    )
+                else:
+                    lib = StaticLibrary(context, libname, **static_args)
+                self._libs[libname].append(lib)
+                self._linkage.append((context, lib, "USE_LIBS"))
+                linkables.append(lib)
+
+            if lib_defines:
+                if not libname:
+                    raise SandboxValidationError(
+                        "LIBRARY_DEFINES needs a " "LIBRARY_NAME to take effect",
+                        context,
+                    )
+                lib.lib_defines.update(lib_defines)
+
+        if wasm_lib:
+            if wasm_lib == libname:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and LIBRARY_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            if wasm_lib == host_libname:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and HOST_LIBRARY_NAME must "
+                    "have a different value.",
+                    context,
+                )
+            if wasm_lib == shared_name:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and SHARED_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            if wasm_lib == static_name:
+                raise SandboxValidationError(
+                    "SANDBOXED_WASM_LIBRARY_NAME and STATIC_NAME must have a "
+                    "different value.",
+                    context,
+                )
+            lib = SandboxedWasmLibrary(context, wasm_lib)
+            self._libs[libname].append(lib)
+            wasm_linkables.append(lib)
+            self._wasm_compile_dirs.add(context.objdir)
+
+        seen = {}
+        for symbol in ("SOURCES", "UNIFIED_SOURCES"):
+            for src in context.get(symbol, []):
+                basename = os.path.splitext(os.path.basename(src))[0]
+                if basename in seen:
+                    other_src, where = seen[basename]
+                    extra = ""
+                    if "UNIFIED_SOURCES" in (symbol, where):
+                        extra = " in non-unified builds"
+                    raise SandboxValidationError(
+                        f"{src} from {symbol} would have the same object name "
+                        f"as {other_src} from {where}{extra}.",
+                        context,
+                    )
+                seen[basename] = (src, symbol)
+
+        # Only emit sources if we have linkables defined in the same context.
+        # Note the linkables are not emitted in this function, but much later,
+        # after aggregation (because of e.g. USE_LIBS processing).
+        if not (linkables or host_linkables or wasm_linkables):
+            return
+
+        self._compile_dirs.add(context.objdir)
+
+        if host_linkables and not all(
+            isinstance(l, HostRustLibrary) for l in host_linkables
+        ):
+            self._host_compile_dirs.add(context.objdir)
+            # TODO: objdirs with only host things in them shouldn't need target
+            # flags, but there's at least one Makefile.in (in
+            # build/unix/elfhack) that relies on the value of LDFLAGS being
+            # passed to one-off rules.
+            self._compile_dirs.add(context.objdir)
+
+        sources = defaultdict(list)
+        gen_sources = defaultdict(list)
+        all_flags = {}
+        for symbol in ("SOURCES", "HOST_SOURCES", "UNIFIED_SOURCES", "WASM_SOURCES"):
+            srcs = sources[symbol]
+            gen_srcs = gen_sources[symbol]
+            context_srcs = context.get(symbol, [])
+            seen_sources = set()
+            for f in context_srcs:
+                if f in seen_sources:
+                    raise SandboxValidationError(
+                        "Source file should only "
+                        "be added to %s once: %s" % (symbol, f),
+                        context,
+                    )
+                seen_sources.add(f)
+                full_path = f.full_path
+                if isinstance(f, SourcePath):
+                    srcs.append(full_path)
+                else:
+                    assert isinstance(f, Path)
+                    gen_srcs.append(full_path)
+                if symbol == "SOURCES":
+                    context_flags = context_srcs[f]
+                    if context_flags:
+                        all_flags[full_path] = context_flags
+
+                if isinstance(f, SourcePath) and not os.path.exists(full_path):
+                    raise SandboxValidationError(
+                        "File listed in %s does not "
+                        "exist: '%s'" % (symbol, full_path),
+                        context,
+                    )
+
+        # Process the .cpp files generated by IPDL as generated sources within
+        # the context which declared the IPDL_SOURCES attribute.
+        ipdl_root = self.config.substs.get("IPDL_ROOT")
+        for symbol in ("IPDL_SOURCES", "PREPROCESSED_IPDL_SOURCES"):
+            context_srcs = context.get(symbol, [])
+            for f in context_srcs:
+                root, ext = mozpath.splitext(mozpath.basename(f))
+
+                suffix_map = {
+                    ".ipdlh": [".cpp"],
+                    ".ipdl": [".cpp", "Child.cpp", "Parent.cpp"],
+                }
+                if ext not in suffix_map:
+                    raise SandboxValidationError(
+                        "Unexpected extension for IPDL source %s" % ext
+                    )
+
+                gen_sources["UNIFIED_SOURCES"].extend(
+                    mozpath.join(ipdl_root, root + suffix) for suffix in suffix_map[ext]
+                )
+
+        no_pgo = context.get("NO_PGO")
+        no_pgo_sources = [f for f, flags in six.iteritems(all_flags) if flags.no_pgo]
+        if no_pgo:
+            if no_pgo_sources:
+                raise SandboxValidationError(
+                    "NO_PGO and SOURCES[...].no_pgo " "cannot be set at the same time",
+                    context,
+                )
+            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo
+        if no_pgo_sources:
+            passthru.variables["NO_PROFILE_GUIDED_OPTIMIZE"] = no_pgo_sources
+
+        # A map from "canonical suffixes" for a particular source file
+        # language to the range of suffixes associated with that language.
+        #
+        # We deliberately don't list the canonical suffix in the suffix list
+        # in the definition; we'll add it in programmatically after defining
+        # things.
+        suffix_map = {
+            ".s": set([".asm"]),
+            ".c": set(),
+            ".m": set(),
+            ".mm": set(),
+            ".cpp": set([".cc", ".cxx"]),
+            ".S": set(),
+        }
+
+        # The inverse of the above, mapping suffixes to their canonical suffix.
+        canonicalized_suffix_map = {}
+        for suffix, alternatives in six.iteritems(suffix_map):
+            alternatives.add(suffix)
+            for a in alternatives:
+                canonicalized_suffix_map[a] = suffix
+
+        # A map from moz.build variables to the canonical suffixes of file
+        # kinds that can be listed therein.
+        all_suffixes = list(suffix_map.keys())
+        varmap = dict(
+            SOURCES=(Sources, all_suffixes),
+            HOST_SOURCES=(HostSources, [".c", ".mm", ".cpp"]),
+            UNIFIED_SOURCES=(UnifiedSources, [".c", ".mm", ".m", ".cpp"]),
+        )
+        # Only include a WasmSources context if there are any WASM_SOURCES.
+        # (This is going to matter later because we inject an extra .c file to
+        # compile with the wasm compiler if, and only if, there are any WASM
+        # sources.)
+        if sources["WASM_SOURCES"] or gen_sources["WASM_SOURCES"]:
+            varmap["WASM_SOURCES"] = (WasmSources, [".c", ".cpp"])
+        # Track whether there are any C++ source files.
+        # Technically this won't do the right thing for SIMPLE_PROGRAMS in
+        # a directory with mixed C and C++ source, but it's not that important.
+        cxx_sources = defaultdict(bool)
+
+        # Source files to track for linkables associated with this context.
+        ctxt_sources = defaultdict(lambda: defaultdict(list))
+
+        for variable, (klass, suffixes) in varmap.items():
+            # Group static and generated files by their canonical suffixes, and
+            # ensure we haven't been given filetypes that we don't recognize.
+            by_canonical_suffix = defaultdict(lambda: {"static": [], "generated": []})
+            for srcs, key in (
+                (sources[variable], "static"),
+                (gen_sources[variable], "generated"),
+            ):
+                for f in srcs:
+                    canonical_suffix = canonicalized_suffix_map.get(
+                        mozpath.splitext(f)[1]
+                    )
+                    if canonical_suffix not in suffixes:
+                        raise SandboxValidationError(
+                            "%s has an unknown file type." % f, context
+                        )
+                    by_canonical_suffix[canonical_suffix][key].append(f)
+
+            # Yield an object for each canonical suffix, grouping generated and
+            # static sources together to allow them to be unified together.
+            for canonical_suffix in sorted(by_canonical_suffix.keys()):
+                if canonical_suffix in (".cpp", ".mm"):
+                    cxx_sources[variable] = True
+                elif canonical_suffix in (".s", ".S"):
+                    self._asm_compile_dirs.add(context.objdir)
+                src_group = by_canonical_suffix[canonical_suffix]
+                obj = klass(
+                    context,
+                    src_group["static"],
+                    src_group["generated"],
+                    canonical_suffix,
+                )
+                srcs = list(obj.files)
+                if isinstance(obj, UnifiedSources) and obj.have_unified_mapping:
+                    srcs = sorted(dict(obj.unified_source_mapping).keys())
+                ctxt_sources[variable][canonical_suffix] += srcs
+                yield obj
+
+        if ctxt_sources:
+            for linkable in linkables:
+                for target_var in ("SOURCES", "UNIFIED_SOURCES"):
+                    for suffix, srcs in ctxt_sources[target_var].items():
+                        linkable.sources[suffix] += srcs
+            for host_linkable in host_linkables:
+                for suffix, srcs in ctxt_sources["HOST_SOURCES"].items():
+                    host_linkable.sources[suffix] += srcs
+            for wasm_linkable in wasm_linkables:
+                for suffix, srcs in ctxt_sources["WASM_SOURCES"].items():
+                    wasm_linkable.sources[suffix] += srcs
+
+        for f, flags in sorted(six.iteritems(all_flags)):
+            if flags.flags:
+                ext = mozpath.splitext(f)[1]
+                yield PerSourceFlag(context, f, flags.flags)
+
+        # If there are any C++ sources, set all the linkables defined here
+        # to require the C++ linker.
+        for vars, linkable_items in (
+            (("SOURCES", "UNIFIED_SOURCES"), linkables),
+            (("HOST_SOURCES",), host_linkables),
+        ):
+            for var in vars:
+                if cxx_sources[var]:
+                    for l in linkable_items:
+                        l.cxx_link = True
+                    break
+
+    def emit_from_context(self, context):
+        """Convert a Context to tree metadata objects.
+
+        This is a generator of mozbuild.frontend.data.ContextDerived instances.
+        """
+
+        # We only want to emit an InstallationTarget if one of the consulted
+        # variables is defined. Later on, we look up FINAL_TARGET, which has
+        # the side-effect of populating it. So, we need to do this lookup
+        # early.
+        if any(k in context for k in ("FINAL_TARGET", "XPI_NAME", "DIST_SUBDIR")):
+            yield InstallationTarget(context)
+
+        # We always emit a directory traversal descriptor. This is needed by
+        # the recursive make backend.
+        for o in self._emit_directory_traversal_from_context(context):
+            yield o
+
+        for obj in self._process_xpidl(context):
+            yield obj
+
+        computed_flags = ComputedFlags(context, context["COMPILE_FLAGS"])
+        computed_link_flags = ComputedFlags(context, context["LINK_FLAGS"])
+        computed_host_flags = ComputedFlags(context, context["HOST_COMPILE_FLAGS"])
+        computed_as_flags = ComputedFlags(context, context["ASM_FLAGS"])
+        computed_wasm_flags = ComputedFlags(context, context["WASM_FLAGS"])
+
+        # Proxy some variables as-is until we have richer classes to represent
+        # them. We should aim to keep this set small because it violates the
+        # desired abstraction of the build definition away from makefiles.
+        passthru = VariablePassthru(context)
+        varlist = [
+            "EXTRA_DSO_LDOPTS",
+            "RCFILE",
+            "RCINCLUDE",
+            "WIN32_EXE_LDFLAGS",
+            "USE_EXTENSION_MANIFEST",
+        ]
+        for v in varlist:
+            if v in context and context[v]:
+                passthru.variables[v] = context[v]
+
+        if (
+            context.config.substs.get("OS_TARGET") == "WINNT"
+            and context["DELAYLOAD_DLLS"]
+        ):
+            if context.config.substs.get("CC_TYPE") != "clang":
+                context["LDFLAGS"].extend(
+                    [("-DELAYLOAD:%s" % dll) for dll in context["DELAYLOAD_DLLS"]]
+                )
+            else:
+                context["LDFLAGS"].extend(
+                    [
+                        ("-Wl,-Xlink=-DELAYLOAD:%s" % dll)
+                        for dll in context["DELAYLOAD_DLLS"]
+                    ]
+                )
+            context["OS_LIBS"].append("delayimp")
+
+        for v in ["CMFLAGS", "CMMFLAGS"]:
+            if v in context and context[v]:
+                passthru.variables["MOZBUILD_" + v] = context[v]
+
+        for v in ["CXXFLAGS", "CFLAGS"]:
+            if v in context and context[v]:
+                computed_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        for v in ["WASM_CFLAGS", "WASM_CXXFLAGS"]:
+            if v in context and context[v]:
+                computed_wasm_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        for v in ["HOST_CXXFLAGS", "HOST_CFLAGS"]:
+            if v in context and context[v]:
+                computed_host_flags.resolve_flags("MOZBUILD_%s" % v, context[v])
+
+        if "LDFLAGS" in context and context["LDFLAGS"]:
+            computed_link_flags.resolve_flags("MOZBUILD", context["LDFLAGS"])
+
+        deffile = context.get("DEFFILE")
+        if deffile and context.config.substs.get("OS_TARGET") == "WINNT":
+            if isinstance(deffile, SourcePath):
+                if not os.path.exists(deffile.full_path):
+                    raise SandboxValidationError(
+                        "Path specified in DEFFILE does not exist: %s "
+                        "(resolved to %s)" % (deffile, deffile.full_path),
+                        context,
+                    )
+                path = mozpath.relpath(deffile.full_path, context.objdir)
+            else:
+                path = deffile.target_basename
+
+            if context.config.substs.get("GNU_CC"):
+                computed_link_flags.resolve_flags("DEFFILE", [path])
+            else:
+                computed_link_flags.resolve_flags("DEFFILE", ["-DEF:" + path])
+
+        dist_install = context["DIST_INSTALL"]
+        if dist_install is True:
+            passthru.variables["DIST_INSTALL"] = True
+        elif dist_install is False:
+            passthru.variables["NO_DIST_INSTALL"] = True
+
+        # Ideally, this should be done in templates, but this is difficult at
+        # the moment because USE_STATIC_LIBS can be set after a template
+        # returns. Eventually, with context-based templates, it will be
+        # possible.
+        if context.config.substs.get(
+            "OS_ARCH"
+        ) == "WINNT" and not context.config.substs.get("GNU_CC"):
+            use_static_lib = context.get(
+                "USE_STATIC_LIBS"
+            ) and not context.config.substs.get("MOZ_ASAN")
+            rtl_flag = "-MT" if use_static_lib else "-MD"
+            if context.config.substs.get("MOZ_DEBUG") and not context.config.substs.get(
+                "MOZ_NO_DEBUG_RTL"
+            ):
+                rtl_flag += "d"
+            computed_flags.resolve_flags("RTL", [rtl_flag])
+            if not context.config.substs.get("CROSS_COMPILE"):
+                computed_host_flags.resolve_flags("RTL", [rtl_flag])
+
+        generated_files = set()
+        localized_generated_files = set()
+        for obj in self._process_generated_files(context):
+            for f in obj.outputs:
+                generated_files.add(f)
+                if obj.localized:
+                    localized_generated_files.add(f)
+            yield obj
+
+        for path in context["CONFIGURE_SUBST_FILES"]:
+            sub = self._create_substitution(ConfigFileSubstitution, context, path)
+            generated_files.add(str(sub.relpath))
+            yield sub
+
+        for defines_var, cls, backend_flags in (
+            ("DEFINES", Defines, (computed_flags, computed_as_flags)),
+            ("HOST_DEFINES", HostDefines, (computed_host_flags,)),
+            ("WASM_DEFINES", WasmDefines, (computed_wasm_flags,)),
+        ):
+            defines = context.get(defines_var)
+            if defines:
+                defines_obj = cls(context, defines)
+                if isinstance(defines_obj, Defines):
+                    # DEFINES have consumers outside the compile command line,
+                    # HOST_DEFINES do not.
+                    yield defines_obj
+            else:
+                # If we don't have explicitly set defines we need to make sure
+                # initialized values if present end up in computed flags.
+                defines_obj = cls(context, context[defines_var])
+
+            defines_from_obj = list(defines_obj.get_defines())
+            if defines_from_obj:
+                for flags in backend_flags:
+                    flags.resolve_flags(defines_var, defines_from_obj)
+
+        idl_vars = (
+            "GENERATED_EVENTS_WEBIDL_FILES",
+            "GENERATED_WEBIDL_FILES",
+            "PREPROCESSED_TEST_WEBIDL_FILES",
+            "PREPROCESSED_WEBIDL_FILES",
+            "TEST_WEBIDL_FILES",
+            "WEBIDL_FILES",
+            "IPDL_SOURCES",
+            "PREPROCESSED_IPDL_SOURCES",
+            "XPCOM_MANIFESTS",
+        )
+        for context_var in idl_vars:
+            for name in context.get(context_var, []):
+                self._idls[context_var].add(mozpath.join(context.srcdir, name))
+        # WEBIDL_EXAMPLE_INTERFACES do not correspond to files.
+        for name in context.get("WEBIDL_EXAMPLE_INTERFACES", []):
+            self._idls["WEBIDL_EXAMPLE_INTERFACES"].add(name)
+
+        local_includes = []
+        for local_include in context.get("LOCAL_INCLUDES", []):
+            full_path = local_include.full_path
+            if not isinstance(local_include, ObjDirPath):
+                if not os.path.exists(full_path):
+                    raise SandboxValidationError(
+                        "Path specified in LOCAL_INCLUDES does not exist: %s (resolved to %s)"
+                        % (local_include, full_path),
+                        context,
+                    )
+                if not os.path.isdir(full_path):
+                    raise SandboxValidationError(
+                        "Path specified in LOCAL_INCLUDES "
+                        "is a filename, but a directory is required: %s "
+                        "(resolved to %s)" % (local_include, full_path),
+                        context,
+                    )
+            include_obj = LocalInclude(context, local_include)
+            local_includes.append(include_obj.path.full_path)
+            yield include_obj
+
+        computed_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_as_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_host_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+        computed_wasm_flags.resolve_flags(
+            "LOCAL_INCLUDES", ["-I%s" % p for p in local_includes]
+        )
+
+        for obj in self._handle_linkables(context, passthru, generated_files):
+            yield obj
+
+        generated_files.update(
+            [
+                "%s%s" % (k, self.config.substs.get("BIN_SUFFIX", ""))
+                for k in self._binaries.keys()
+            ]
+        )
+
+        components = []
+        for var, cls in (
+            ("EXPORTS", Exports),
+            ("FINAL_TARGET_FILES", FinalTargetFiles),
+            ("FINAL_TARGET_PP_FILES", FinalTargetPreprocessedFiles),
+            ("LOCALIZED_FILES", LocalizedFiles),
+            ("LOCALIZED_PP_FILES", LocalizedPreprocessedFiles),
+            ("OBJDIR_FILES", ObjdirFiles),
+            ("OBJDIR_PP_FILES", ObjdirPreprocessedFiles),
+            ("TEST_HARNESS_FILES", TestHarnessFiles),
+        ):
+            all_files = context.get(var)
+            if not all_files:
+                continue
+            if dist_install is False and var != "TEST_HARNESS_FILES":
+                raise SandboxValidationError(
+                    "%s cannot be used with DIST_INSTALL = False" % var, context
+                )
+            has_prefs = False
+            has_resources = False
+            for base, files in all_files.walk():
+                if var == "TEST_HARNESS_FILES" and not base:
+                    raise SandboxValidationError(
+                        "Cannot install files to the root of TEST_HARNESS_FILES",
+                        context,
+                    )
+                if base == "components":
+                    components.extend(files)
+                if base == "defaults/pref":
+                    has_prefs = True
+                if mozpath.split(base)[0] == "res":
+                    has_resources = True
+                for f in files:
+                    if (
+                        var
+                        in (
+                            "FINAL_TARGET_PP_FILES",
+                            "OBJDIR_PP_FILES",
+                            "LOCALIZED_PP_FILES",
+                        )
+                        and not isinstance(f, SourcePath)
+                    ):
+                        raise SandboxValidationError(
+                            ("Only source directory paths allowed in " + "%s: %s")
+                            % (var, f),
+                            context,
+                        )
+                    if var.startswith("LOCALIZED_"):
+                        if isinstance(f, SourcePath):
+                            if f.startswith("en-US/"):
+                                pass
+                            elif "locales/en-US/" in f:
+                                pass
+                            else:
+                                raise SandboxValidationError(
+                                    "%s paths must start with `en-US/` or "
+                                    "contain `locales/en-US/`: %s" % (var, f),
+                                    context,
+                                )
+
+                    if not isinstance(f, ObjDirPath):
+                        path = f.full_path
+                        if "*" not in path and not os.path.exists(path):
+                            raise SandboxValidationError(
+                                "File listed in %s does not exist: %s" % (var, path),
+                                context,
+                            )
+                    else:
+                        # TODO: Bug 1254682 - The '/' check is to allow
+                        # installing files generated from other directories,
+                        # which is done occasionally for tests. However, it
+                        # means we don't fail early if the file isn't actually
+                        # created by the other moz.build file.
+                        if f.target_basename not in generated_files and "/" not in f:
+                            raise SandboxValidationError(
+                                (
+                                    "Objdir file listed in %s not in "
+                                    + "GENERATED_FILES: %s"
+                                )
+                                % (var, f),
+                                context,
+                            )
+
+                        if var.startswith("LOCALIZED_"):
+                            # Further require that LOCALIZED_FILES are from
+                            # LOCALIZED_GENERATED_FILES.
+                            if f.target_basename not in localized_generated_files:
+                                raise SandboxValidationError(
+                                    (
+                                        "Objdir file listed in %s not in "
+                                        + "LOCALIZED_GENERATED_FILES: %s"
+                                    )
+                                    % (var, f),
+                                    context,
+                                )
+                        else:
+                            # Additionally, don't allow LOCALIZED_GENERATED_FILES to be used
+                            # in anything *but* LOCALIZED_FILES.
+                            if f.target_basename in localized_generated_files:
+                                raise SandboxValidationError(
+                                    (
+                                        "Outputs of LOCALIZED_GENERATED_FILES cannot "
+                                        "be used in %s: %s"
+                                    )
+                                    % (var, f),
+                                    context,
+                                )
+
+            # Addons (when XPI_NAME is defined) and Applications (when
+            # DIST_SUBDIR is defined) use a different preferences directory
+            # (default/preferences) from the one the GRE uses (defaults/pref).
+            # Hence, we move the files from the latter to the former in that
+            # case.
+            if has_prefs and (context.get("XPI_NAME") or context.get("DIST_SUBDIR")):
+                all_files.defaults.preferences += all_files.defaults.pref
+                del all_files.defaults._children["pref"]
+
+            if has_resources and (
+                context.get("DIST_SUBDIR") or context.get("XPI_NAME")
+            ):
+                raise SandboxValidationError(
+                    "RESOURCES_FILES cannot be used with DIST_SUBDIR or " "XPI_NAME.",
+                    context,
+                )
+
+            yield cls(context, all_files)
+
+        for c in components:
+            if c.endswith(".manifest"):
+                yield ChromeManifestEntry(
+                    context,
+                    "chrome.manifest",
+                    Manifest("components", mozpath.basename(c)),
+                )
+
+        rust_tests = context.get("RUST_TESTS", [])
+        if rust_tests:
+            # TODO: more sophisticated checking of the declared name vs.
+            # contents of the Cargo.toml file.
+            features = context.get("RUST_TEST_FEATURES", [])
+
+            yield RustTests(context, rust_tests, features)
+
+        for obj in self._process_test_manifests(context):
+            yield obj
+
+        for obj in self._process_jar_manifests(context):
+            yield obj
+
+        computed_as_flags.resolve_flags("MOZBUILD", context.get("ASFLAGS"))
+
+        if context.get("USE_NASM") is True:
+            nasm = context.config.substs.get("NASM")
+            if not nasm:
+                raise SandboxValidationError("nasm is not available", context)
+            passthru.variables["AS"] = nasm
+            passthru.variables["AS_DASH_C_FLAG"] = ""
+            passthru.variables["ASOUTOPTION"] = "-o "
+            computed_as_flags.resolve_flags(
+                "OS", context.config.substs.get("NASM_ASFLAGS", [])
+            )
+
+        if context.get("USE_INTEGRATED_CLANGCL_AS") is True:
+            if context.config.substs.get("CC_TYPE") != "clang-cl":
+                raise SandboxValidationError("clang-cl is not available", context)
+            passthru.variables["AS"] = context.config.substs.get("CC")
+            passthru.variables["AS_DASH_C_FLAG"] = "-c"
+            passthru.variables["ASOUTOPTION"] = "-o "
+
+        if passthru.variables:
+            yield passthru
+
+        if context.objdir in self._compile_dirs:
+            self._compile_flags[context.objdir] = computed_flags
+            yield computed_link_flags
+
+        if context.objdir in self._asm_compile_dirs:
+            self._compile_as_flags[context.objdir] = computed_as_flags
+
+        if context.objdir in self._host_compile_dirs:
+            yield computed_host_flags
+
+        if context.objdir in self._wasm_compile_dirs:
+            yield computed_wasm_flags
+
+    def _create_substitution(self, cls, context, path):
+        sub = cls(context)
+        sub.input_path = "%s.in" % path.full_path
+        sub.output_path = path.translated
+        sub.relpath = path
+
+        return sub
+
+    def _process_xpidl(self, context):
+        # XPIDL source files get processed and turned into .h and .xpt files.
+        # If there are multiple XPIDL files in a directory, they get linked
+        # together into a final .xpt, which has the name defined by
+        # XPIDL_MODULE.
+        xpidl_module = context["XPIDL_MODULE"]
+
+        if not xpidl_module:
+            if context["XPIDL_SOURCES"]:
+                raise SandboxValidationError(
+                    "XPIDL_MODULE must be defined if " "XPIDL_SOURCES is defined.",
+                    context,
+                )
+            return
+
+        if not context["XPIDL_SOURCES"]:
+            raise SandboxValidationError(
+                "XPIDL_MODULE cannot be defined " "unless there are XPIDL_SOURCES",
+                context,
+            )
+
+        if context["DIST_INSTALL"] is False:
+            self.log(
+                logging.WARN,
+                "mozbuild_warning",
+                dict(path=context.main_path),
+                "{path}: DIST_INSTALL = False has no effect on XPIDL_SOURCES.",
+            )
+
+        for idl in context["XPIDL_SOURCES"]:
+            if not os.path.exists(idl.full_path):
+                raise SandboxValidationError(
+                    "File %s from XPIDL_SOURCES " "does not exist" % idl.full_path,
+                    context,
+                )
+
+        yield XPIDLModule(context, xpidl_module, context["XPIDL_SOURCES"])
+
+    def _process_generated_files(self, context):
+        for path in context["CONFIGURE_DEFINE_FILES"]:
+            script = mozpath.join(
+                mozpath.dirname(mozpath.dirname(__file__)),
+                "action",
+                "process_define_files.py",
+            )
+            yield GeneratedFile(
+                context,
+                script,
+                "process_define_file",
+                six.text_type(path),
+                [Path(context, path + ".in")],
+            )
+
+        generated_files = context.get("GENERATED_FILES") or []
+        localized_generated_files = context.get("LOCALIZED_GENERATED_FILES") or []
+        if not (generated_files or localized_generated_files):
+            return
+
+        for (localized, gen) in (
+            (False, generated_files),
+            (True, localized_generated_files),
+        ):
+            for f in gen:
+                flags = gen[f]
+                outputs = f
+                inputs = []
+                if flags.script:
+                    method = "main"
+                    script = SourcePath(context, flags.script).full_path
+
+                    # Deal with cases like "C:\\path\\to\\script.py:function".
+                    if ".py:" in script:
+                        script, method = script.rsplit(".py:", 1)
+                        script += ".py"
+
+                    if not os.path.exists(script):
+                        raise SandboxValidationError(
+                            "Script for generating %s does not exist: %s" % (f, script),
+                            context,
+                        )
+                    if os.path.splitext(script)[1] != ".py":
+                        raise SandboxValidationError(
+                            "Script for generating %s does not end in .py: %s"
+                            % (f, script),
+                            context,
+                        )
+                else:
+                    script = None
+                    method = None
+
+                for i in flags.inputs:
+                    p = Path(context, i)
+                    if isinstance(p, SourcePath) and not os.path.exists(p.full_path):
+                        raise SandboxValidationError(
+                            "Input for generating %s does not exist: %s"
+                            % (f, p.full_path),
+                            context,
+                        )
+                    inputs.append(p)
+
+                yield GeneratedFile(
+                    context,
+                    script,
+                    method,
+                    outputs,
+                    inputs,
+                    flags.flags,
+                    localized=localized,
+                    force=flags.force,
+                )
+
+    def _process_test_manifests(self, context):
+        for prefix, info in TEST_MANIFESTS.items():
+            for path, manifest in context.get("%s_MANIFESTS" % prefix, []):
+                for obj in self._process_test_manifest(context, info, path, manifest):
+                    yield obj
+
+        for flavor in REFTEST_FLAVORS:
+            for path, manifest in context.get("%s_MANIFESTS" % flavor.upper(), []):
+                for obj in self._process_reftest_manifest(
+                    context, flavor, path, manifest
+                ):
+                    yield obj
+
+    def _process_test_manifest(self, context, info, manifest_path, mpmanifest):
+        flavor, install_root, install_subdir, package_tests = info
+
+        path = manifest_path.full_path
+        manifest_dir = mozpath.dirname(path)
+        manifest_reldir = mozpath.dirname(
+            mozpath.relpath(path, context.config.topsrcdir)
+        )
+        manifest_sources = [
+            mozpath.relpath(pth, context.config.topsrcdir)
+            for pth in mpmanifest.source_files
+        ]
+        install_prefix = mozpath.join(install_root, install_subdir)
+
+        try:
+            if not mpmanifest.tests:
+                raise SandboxValidationError("Empty test manifest: %s" % path, context)
+
+            defaults = mpmanifest.manifest_defaults[os.path.normpath(path)]
+            obj = TestManifest(
+                context,
+                path,
+                mpmanifest,
+                flavor=flavor,
+                install_prefix=install_prefix,
+                relpath=mozpath.join(manifest_reldir, mozpath.basename(path)),
+                sources=manifest_sources,
+                dupe_manifest="dupe-manifest" in defaults,
+            )
+
+            filtered = mpmanifest.tests
+
+            missing = [t["name"] for t in filtered if not os.path.exists(t["path"])]
+            if missing:
+                raise SandboxValidationError(
+                    "Test manifest (%s) lists "
+                    "test that does not exist: %s" % (path, ", ".join(missing)),
+                    context,
+                )
+
+            out_dir = mozpath.join(install_prefix, manifest_reldir)
+
+            def process_support_files(test):
+                install_info = self._test_files_converter.convert_support_files(
+                    test, install_root, manifest_dir, out_dir
+                )
+
+                obj.pattern_installs.extend(install_info.pattern_installs)
+                for source, dest in install_info.installs:
+                    obj.installs[source] = (dest, False)
+                obj.external_installs |= install_info.external_installs
+                for install_path in install_info.deferred_installs:
+                    if all(
+                        [
+                            "*" not in install_path,
+                            not os.path.isfile(
+                                mozpath.join(context.config.topsrcdir, install_path[2:])
+                            ),
+                            install_path not in install_info.external_installs,
+                        ]
+                    ):
+                        raise SandboxValidationError(
+                            "Error processing test "
+                            "manifest %s: entry in support-files not present "
+                            "in the srcdir: %s" % (path, install_path),
+                            context,
+                        )
+
+                obj.deferred_installs |= install_info.deferred_installs
+
+            for test in filtered:
+                obj.tests.append(test)
+
+                # Some test files are compiled and should not be copied into the
+                # test package. They function as identifiers rather than files.
+                if package_tests:
+                    manifest_relpath = mozpath.relpath(
+                        test["path"], mozpath.dirname(test["manifest"])
+                    )
+                    obj.installs[mozpath.normpath(test["path"])] = (
+                        (mozpath.join(out_dir, manifest_relpath)),
+                        True,
+                    )
+
+                process_support_files(test)
+
+            for path, m_defaults in mpmanifest.manifest_defaults.items():
+                process_support_files(m_defaults)
+
+            # We also copy manifests into the output directory,
+            # including manifests from [include:foo] directives.
+            for mpath in mpmanifest.manifests():
+                mpath = mozpath.normpath(mpath)
+                out_path = mozpath.join(out_dir, mozpath.basename(mpath))
+                obj.installs[mpath] = (out_path, False)
+
+            # Some manifests reference files that are auto generated as
+            # part of the build or shouldn't be installed for some
+            # reason. Here, we prune those files from the install set.
+            # FUTURE we should be able to detect autogenerated files from
+            # other build metadata. Once we do that, we can get rid of this.
+            for f in defaults.get("generated-files", "").split():
+                # We re-raise otherwise the stack trace isn't informative.
+                try:
+                    del obj.installs[mozpath.join(manifest_dir, f)]
+                except KeyError:
+                    raise SandboxValidationError(
+                        "Error processing test "
+                        "manifest %s: entry in generated-files not present "
+                        "elsewhere in manifest: %s" % (path, f),
+                        context,
+                    )
+
+            yield obj
+        except (AssertionError, Exception):
+            raise SandboxValidationError(
+                "Error processing test "
+                "manifest file %s: %s"
+                % (path, "\n".join(traceback.format_exception(*sys.exc_info()))),
+                context,
+            )
+
+    def _process_reftest_manifest(self, context, flavor, manifest_path, manifest):
+        manifest_full_path = manifest_path.full_path
+        manifest_reldir = mozpath.dirname(
+            mozpath.relpath(manifest_full_path, context.config.topsrcdir)
+        )
+
+        # reftest manifests don't come from manifest parser. But they are
+        # similar enough that we can use the same emitted objects. Note
+        # that we don't perform any installs for reftests.
+        obj = TestManifest(
+            context,
+            manifest_full_path,
+            manifest,
+            flavor=flavor,
+            install_prefix="%s/" % flavor,
+            relpath=mozpath.join(manifest_reldir, mozpath.basename(manifest_path)),
+        )
+        obj.tests = list(sorted(manifest.tests, key=lambda t: t["path"]))
+
+        yield obj
+
+    def _process_jar_manifests(self, context):
+        jar_manifests = context.get("JAR_MANIFESTS", [])
+        if len(jar_manifests) > 1:
+            raise SandboxValidationError(
+                "While JAR_MANIFESTS is a list, "
+                "it is currently limited to one value.",
+                context,
+            )
+
+        for path in jar_manifests:
+            yield JARManifest(context, path)
+
+        # Temporary test to look for jar.mn files that creep in without using
+        # the new declaration. Before, we didn't require jar.mn files to
+        # declared anywhere (they were discovered). This will detect people
+        # relying on the old behavior.
+        if os.path.exists(os.path.join(context.srcdir, "jar.mn")):
+            if "jar.mn" not in jar_manifests:
+                raise SandboxValidationError(
+                    "A jar.mn exists but it "
+                    "is not referenced in the moz.build file. "
+                    "Please define JAR_MANIFESTS.",
+                    context,
+                )
+
+    def _emit_directory_traversal_from_context(self, context):
+        o = DirectoryTraversal(context)
+        o.dirs = context.get("DIRS", [])
+
+        # Some paths have a subconfigure, yet also have a moz.build. Those
+        # shouldn't end up in self._external_paths.
+        if o.objdir:
+            self._external_paths -= {o.relobjdir}
+
+        yield o
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-emitter-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/test/frontend/test_emitter.py	(revision 228)
@@ -0,0 +1,1850 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import six
+import unittest
+
+from mozunit import main
+
+from mozbuild.frontend.context import ObjDirPath, Path
+from mozbuild.frontend.data import (
+    ComputedFlags,
+    ConfigFileSubstitution,
+    Defines,
+    DirectoryTraversal,
+    Exports,
+    FinalTargetPreprocessedFiles,
+    GeneratedFile,
+    HostProgram,
+    HostRustLibrary,
+    HostRustProgram,
+    HostSources,
+    IPDLCollection,
+    JARManifest,
+    LocalInclude,
+    LocalizedFiles,
+    LocalizedPreprocessedFiles,
+    Program,
+    RustLibrary,
+    RustProgram,
+    SharedLibrary,
+    SimpleProgram,
+    Sources,
+    StaticLibrary,
+    TestHarnessFiles,
+    TestManifest,
+    UnifiedSources,
+    VariablePassthru,
+    WasmSources,
+)
+from mozbuild.frontend.emitter import TreeMetadataEmitter
+from mozbuild.frontend.reader import (
+    BuildReader,
+    BuildReaderError,
+    SandboxValidationError,
+)
+
+from mozbuild.test.common import MockConfig
+
+import mozpack.path as mozpath
+
+
+data_path = mozpath.abspath(mozpath.dirname(__file__))
+data_path = mozpath.join(data_path, "data")
+
+
+class TestEmitterBasic(unittest.TestCase):
+    def setUp(self):
+        self._old_env = dict(os.environ)
+        os.environ.pop("MOZ_OBJDIR", None)
+
+    def tearDown(self):
+        os.environ.clear()
+        os.environ.update(self._old_env)
+
+    def reader(self, name, enable_tests=False, extra_substs=None):
+        substs = dict(
+            ENABLE_TESTS="1" if enable_tests else "",
+            BIN_SUFFIX=".prog",
+            HOST_BIN_SUFFIX=".hostprog",
+            OS_TARGET="WINNT",
+            COMPILE_ENVIRONMENT="1",
+            STL_FLAGS=["-I/path/to/topobjdir/dist/stl_wrappers"],
+            VISIBILITY_FLAGS=["-include", "$(topsrcdir)/config/gcc_hidden.h"],
+            OBJ_SUFFIX="obj",
+            WASM_OBJ_SUFFIX="wasm",
+            WASM_CFLAGS=["-foo"],
+        )
+        if extra_substs:
+            substs.update(extra_substs)
+        config = MockConfig(mozpath.join(data_path, name), extra_substs=substs)
+
+        return BuildReader(config)
+
+    def read_topsrcdir(self, reader, filter_common=True):
+        emitter = TreeMetadataEmitter(reader.config)
+        objs = list(emitter.emit(reader.read_topsrcdir()))
+        self.assertGreater(len(objs), 0)
+
+        filtered = []
+        for obj in objs:
+            if filter_common and isinstance(obj, DirectoryTraversal):
+                continue
+
+            filtered.append(obj)
+
+        return filtered
+
+    def test_dirs_traversal_simple(self):
+        reader = self.reader("traversal-simple")
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 4)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+            self.assertTrue(os.path.isabs(o.context_main_path))
+            self.assertEqual(len(o.context_all_paths), 1)
+
+        reldirs = [o.relsrcdir for o in objs]
+        self.assertEqual(reldirs, ["", "foo", "foo/biz", "bar"])
+
+        dirs = [[d.full_path for d in o.dirs] for o in objs]
+        self.assertEqual(
+            dirs,
+            [
+                [
+                    mozpath.join(reader.config.topsrcdir, "foo"),
+                    mozpath.join(reader.config.topsrcdir, "bar"),
+                ],
+                [mozpath.join(reader.config.topsrcdir, "foo", "biz")],
+                [],
+                [],
+            ],
+        )
+
+    def test_traversal_all_vars(self):
+        reader = self.reader("traversal-all-vars")
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 2)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+
+        reldirs = set([o.relsrcdir for o in objs])
+        self.assertEqual(reldirs, set(["", "regular"]))
+
+        for o in objs:
+            reldir = o.relsrcdir
+
+            if reldir == "":
+                self.assertEqual(
+                    [d.full_path for d in o.dirs],
+                    [mozpath.join(reader.config.topsrcdir, "regular")],
+                )
+
+    def test_traversal_all_vars_enable_tests(self):
+        reader = self.reader("traversal-all-vars", enable_tests=True)
+        objs = self.read_topsrcdir(reader, filter_common=False)
+        self.assertEqual(len(objs), 3)
+
+        for o in objs:
+            self.assertIsInstance(o, DirectoryTraversal)
+
+        reldirs = set([o.relsrcdir for o in objs])
+        self.assertEqual(reldirs, set(["", "regular", "test"]))
+
+        for o in objs:
+            reldir = o.relsrcdir
+
+            if reldir == "":
+                self.assertEqual(
+                    [d.full_path for d in o.dirs],
+                    [
+                        mozpath.join(reader.config.topsrcdir, "regular"),
+                        mozpath.join(reader.config.topsrcdir, "test"),
+                    ],
+                )
+
+    def test_config_file_substitution(self):
+        reader = self.reader("config-file-substitution")
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 2)
+
+        self.assertIsInstance(objs[0], ConfigFileSubstitution)
+        self.assertIsInstance(objs[1], ConfigFileSubstitution)
+
+        topobjdir = mozpath.abspath(reader.config.topobjdir)
+        self.assertEqual(objs[0].relpath, "foo")
+        self.assertEqual(
+            mozpath.normpath(objs[0].output_path),
+            mozpath.normpath(mozpath.join(topobjdir, "foo")),
+        )
+        self.assertEqual(
+            mozpath.normpath(objs[1].output_path),
+            mozpath.normpath(mozpath.join(topobjdir, "bar")),
+        )
+
+    def test_variable_passthru(self):
+        reader = self.reader("variable-passthru")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], VariablePassthru)
+
+        wanted = {
+            "NO_DIST_INSTALL": True,
+            "RCFILE": "foo.rc",
+            "RCINCLUDE": "bar.rc",
+            "WIN32_EXE_LDFLAGS": ["-subsystem:console"],
+        }
+
+        variables = objs[0].variables
+        maxDiff = self.maxDiff
+        self.maxDiff = None
+        self.assertEqual(wanted, variables)
+        self.maxDiff = maxDiff
+
+    def test_compile_flags(self):
+        reader = self.reader(
+            "compile-flags", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], reader.config.substs["STL_FLAGS"])
+        self.assertEqual(
+            flags.flags["VISIBILITY"], reader.config.substs["VISIBILITY_FLAGS"]
+        )
+        self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], ["-Werror"])
+        self.assertEqual(flags.flags["MOZBUILD_CFLAGS"], ["-Wall", "-funroll-loops"])
+        self.assertEqual(flags.flags["MOZBUILD_CXXFLAGS"], ["-funroll-loops", "-Wall"])
+
+    def test_asflags(self):
+        reader = self.reader("asflags", extra_substs={"ASFLAGS": ["-safeseh"]})
+        as_sources, sources, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+        self.assertIsInstance(asflags, ComputedFlags)
+        self.assertEqual(asflags.flags["OS"], reader.config.substs["ASFLAGS"])
+        self.assertEqual(asflags.flags["MOZBUILD"], ["-no-integrated-as"])
+
+    def test_debug_flags(self):
+        reader = self.reader(
+            "compile-flags",
+            extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": "1"},
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["DEBUG"], ["-g"])
+
+    def test_disable_debug_flags(self):
+        reader = self.reader(
+            "compile-flags",
+            extra_substs={"MOZ_DEBUG_FLAGS": "-g", "MOZ_DEBUG_SYMBOLS": ""},
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["DEBUG"], [])
+
+    def test_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_LDFLAGS": ["-Wl,rpath-link=/usr/lib"],
+                "MOZ_OPTIMIZE": "",
+                "MOZ_OPTIMIZE_LDFLAGS": ["-Wl,-dead_strip"],
+                "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], reader.config.substs["OS_LDFLAGS"])
+        self.assertEqual(
+            ldflags.flags["MOZBUILD"], ["-Wl,-U_foo", "-framework Foo", "-x"]
+        )
+        self.assertEqual(ldflags.flags["OPTIMIZE"], [])
+
+    def test_debug_ldflags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-framework ExceptionHandling"],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], reader.config.substs["MOZ_DEBUG_LDFLAGS"])
+
+    def test_windows_opt_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_ARCH": "WINNT",
+                "GNU_CC": "",
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_OPTIMIZE_FLAGS": [],
+                "MOZ_OPTIMIZE_LDFLAGS": [],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIn("-DEBUG", ldflags.flags["OS"])
+        self.assertIn("-OPT:REF,ICF", ldflags.flags["OS"])
+
+    def test_windows_dmd_link_flags(self):
+        reader = self.reader(
+            "link-flags",
+            extra_substs={
+                "OS_ARCH": "WINNT",
+                "GNU_CC": "",
+                "MOZ_DMD": "1",
+                "MOZ_DEBUG_LDFLAGS": ["-DEBUG"],
+                "MOZ_DEBUG_SYMBOLS": "1",
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_OPTIMIZE_FLAGS": [],
+            },
+        )
+        sources, ldflags, lib, compile_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(ldflags.flags["OS"], ["-DEBUG", "-OPT:REF,ICF"])
+
+    def test_host_compile_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={
+                "HOST_CXXFLAGS": ["-Wall", "-Werror"],
+                "HOST_CFLAGS": ["-Werror", "-Wall"],
+            },
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["HOST_CXXFLAGS"], reader.config.substs["HOST_CXXFLAGS"]
+        )
+        self.assertEqual(
+            flags.flags["HOST_CFLAGS"], reader.config.substs["HOST_CFLAGS"]
+        )
+        self.assertEqual(
+            set(flags.flags["HOST_DEFINES"]),
+            set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+        )
+        self.assertEqual(
+            flags.flags["MOZBUILD_HOST_CFLAGS"], ["-funroll-loops", "-host-arg"]
+        )
+        self.assertEqual(flags.flags["MOZBUILD_HOST_CXXFLAGS"], [])
+
+    def test_host_no_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={"MOZ_OPTIMIZE": "", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], [])
+
+    def test_host_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={"MOZ_OPTIMIZE": "1", "MOZ_OPTIMIZE_FLAGS": ["-O2"]},
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O2"])
+
+    def test_cross_optimize_flags(self):
+        reader = self.reader(
+            "host-compile-flags",
+            extra_substs={
+                "MOZ_OPTIMIZE": "1",
+                "MOZ_OPTIMIZE_FLAGS": ["-O2"],
+                "HOST_OPTIMIZE_FLAGS": ["-O3"],
+                "CROSS_COMPILE": "1",
+            },
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["HOST_OPTIMIZE"], ["-O3"])
+
+    def test_host_rtl_flag(self):
+        reader = self.reader(
+            "host-compile-flags", extra_substs={"OS_ARCH": "WINNT", "MOZ_DEBUG": "1"}
+        )
+        sources, ldflags, flags, lib, target_flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["RTL"], ["-MDd"])
+
+    def test_compile_flags_validation(self):
+        reader = self.reader("compile-flags-field-validation")
+
+        with six.assertRaisesRegex(self, BuildReaderError, "Invalid value."):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("compile-flags-type-validation")
+        with six.assertRaisesRegex(
+            self, BuildReaderError, "A list of strings must be provided"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_compile_flags_templates(self):
+        reader = self.reader(
+            "compile-flags-templates",
+            extra_substs={
+                "NSPR_CFLAGS": ["-I/nspr/path"],
+                "NSS_CFLAGS": ["-I/nss/path"],
+                "MOZ_JPEG_CFLAGS": ["-I/jpeg/path"],
+                "MOZ_PNG_CFLAGS": ["-I/png/path"],
+                "MOZ_ZLIB_CFLAGS": ["-I/zlib/path"],
+                "MOZ_PIXMAN_CFLAGS": ["-I/pixman/path"],
+            },
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], [])
+        self.assertEqual(flags.flags["VISIBILITY"], [])
+        self.assertEqual(
+            flags.flags["OS_INCLUDES"],
+            [
+                "-I/nspr/path",
+                "-I/nss/path",
+                "-I/jpeg/path",
+                "-I/png/path",
+                "-I/zlib/path",
+                "-I/pixman/path",
+            ],
+        )
+
+    def test_disable_stl_wrapping(self):
+        reader = self.reader("disable-stl-wrapping")
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["STL"], [])
+
+    def test_visibility_flags(self):
+        reader = self.reader("visibility-flags")
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(flags.flags["VISIBILITY"], [])
+
+    def test_defines_in_flags(self):
+        reader = self.reader("compile-defines")
+        defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["LIBRARY_DEFINES"], ["-DMOZ_LIBRARY_DEFINE=MOZ_TEST"]
+        )
+        self.assertEqual(flags.flags["DEFINES"], ["-DMOZ_TEST_DEFINE"])
+
+    def test_resolved_flags_error(self):
+        reader = self.reader("resolved-flags-error")
+        with six.assertRaisesRegex(
+            self,
+            BuildReaderError,
+            "`DEFINES` may not be set in COMPILE_FLAGS from moz.build",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_includes_in_flags(self):
+        reader = self.reader("compile-includes")
+        defines, sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["BASE_INCLUDES"],
+            ["-I%s" % reader.config.topsrcdir, "-I%s" % reader.config.topobjdir],
+        )
+        self.assertEqual(
+            flags.flags["EXTRA_INCLUDES"],
+            ["-I%s/dist/include" % reader.config.topobjdir],
+        )
+        self.assertEqual(
+            flags.flags["LOCAL_INCLUDES"], ["-I%s/subdir" % reader.config.topsrcdir]
+        )
+
+    def test_allow_compiler_warnings(self):
+        reader = self.reader(
+            "allow-compiler-warnings", extra_substs={"WARNINGS_AS_ERRORS": "-Werror"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertEqual(flags.flags["WARNINGS_AS_ERRORS"], [])
+
+    def test_disable_compiler_warnings(self):
+        reader = self.reader(
+            "disable-compiler-warnings", extra_substs={"WARNINGS_CFLAGS": "-Wall"}
+        )
+        sources, ldflags, lib, flags = self.read_topsrcdir(reader)
+        self.assertEqual(flags.flags["WARNINGS_CFLAGS"], [])
+
+    def test_use_nasm(self):
+        # When nasm is not available, this should raise.
+        reader = self.reader("use-nasm")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "nasm is not available"
+        ):
+            self.read_topsrcdir(reader)
+
+        # When nasm is available, this should work.
+        reader = self.reader(
+            "use-nasm", extra_substs=dict(NASM="nasm", NASM_ASFLAGS="-foo")
+        )
+
+        sources, passthru, ldflags, lib, flags, asflags = self.read_topsrcdir(reader)
+
+        self.assertIsInstance(passthru, VariablePassthru)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(asflags, ComputedFlags)
+
+        self.assertEqual(asflags.flags["OS"], reader.config.substs["NASM_ASFLAGS"])
+
+        maxDiff = self.maxDiff
+        self.maxDiff = None
+        self.assertEqual(
+            passthru.variables,
+            {"AS": "nasm", "AS_DASH_C_FLAG": "", "ASOUTOPTION": "-o "},
+        )
+        self.maxDiff = maxDiff
+
+    def test_generated_files(self):
+        reader = self.reader("generated-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertFalse(o.localized)
+            self.assertFalse(o.force)
+
+        expected = ["bar.c", "foo.c", ("xpidllex.py", "xpidlyacc.py")]
+        for o, f in zip(objs, expected):
+            expected_filename = f if isinstance(f, tuple) else (f,)
+            self.assertEqual(o.outputs, expected_filename)
+            self.assertEqual(o.script, None)
+            self.assertEqual(o.method, None)
+            self.assertEqual(o.inputs, [])
+
+    def test_generated_files_force(self):
+        reader = self.reader("generated-files-force")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertEqual(o.force, "bar.c" in o.outputs)
+
+    def test_localized_generated_files(self):
+        reader = self.reader("localized-generated-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertTrue(o.localized)
+
+        expected = ["abc.ini", ("bar", "baz")]
+        for o, f in zip(objs, expected):
+            expected_filename = f if isinstance(f, tuple) else (f,)
+            self.assertEqual(o.outputs, expected_filename)
+            self.assertEqual(o.script, None)
+            self.assertEqual(o.method, None)
+            self.assertEqual(o.inputs, [])
+
+    def test_localized_generated_files_force(self):
+        reader = self.reader("localized-generated-files-force")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+            self.assertTrue(o.localized)
+            self.assertEqual(o.force, "abc.ini" in o.outputs)
+
+    def test_localized_files_from_generated(self):
+        """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+        LOCALIZED_FILES as an objdir path works.
+        """
+        reader = self.reader("localized-files-from-generated")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        self.assertIsInstance(objs[0], GeneratedFile)
+        self.assertIsInstance(objs[1], LocalizedFiles)
+
+    def test_localized_files_not_localized_generated(self):
+        """Test that using GENERATED_FILES and then putting the output in
+        LOCALIZED_FILES as an objdir path produces an error.
+        """
+        reader = self.reader("localized-files-not-localized-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file listed in LOCALIZED_FILES not in LOCALIZED_GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_generated_files_final_target_files(self):
+        """Test that using LOCALIZED_GENERATED_FILES and then putting the output in
+        FINAL_TARGET_FILES as an objdir path produces an error.
+        """
+        reader = self.reader("localized-generated-files-final-target-files")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Outputs of LOCALIZED_GENERATED_FILES cannot be used in FINAL_TARGET_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_method_names(self):
+        reader = self.reader("generated-files-method-names")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        for o in objs:
+            self.assertIsInstance(o, GeneratedFile)
+
+        expected = ["bar.c", "foo.c"]
+        expected_method_names = ["make_bar", "main"]
+        for o, expected_filename, expected_method in zip(
+            objs, expected, expected_method_names
+        ):
+            self.assertEqual(o.outputs, (expected_filename,))
+            self.assertEqual(o.method, expected_method)
+            self.assertEqual(o.inputs, [])
+
+    def test_generated_files_absolute_script(self):
+        reader = self.reader("generated-files-absolute-script")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+
+        o = objs[0]
+        self.assertIsInstance(o, GeneratedFile)
+        self.assertEqual(o.outputs, ("bar.c",))
+        self.assertRegex(o.script, "script.py$")
+        self.assertEqual(o.method, "make_bar")
+        self.assertEqual(o.inputs, [])
+
+    def test_generated_files_no_script(self):
+        reader = self.reader("generated-files-no-script")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Script for generating bar.c does not exist"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_no_inputs(self):
+        reader = self.reader("generated-files-no-inputs")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Input for generating foo.c does not exist"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_files_no_python_script(self):
+        reader = self.reader("generated-files-no-python-script")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Script for generating bar.c does not end in .py",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports(self):
+        reader = self.reader("exports")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], Exports)
+
+        expected = [
+            ("", ["foo.h", "bar.h", "baz.h"]),
+            ("mozilla", ["mozilla1.h", "mozilla2.h"]),
+            ("mozilla/dom", ["dom1.h", "dom2.h", "dom3.h"]),
+            ("mozilla/gfx", ["gfx.h"]),
+            ("nspr/private", ["pprio.h", "pprthred.h"]),
+            ("vpx", ["mem.h", "mem2.h"]),
+        ]
+        for (expect_path, expect_headers), (actual_path, actual_headers) in zip(
+            expected, [(path, list(seq)) for path, seq in objs[0].files.walk()]
+        ):
+            self.assertEqual(expect_path, actual_path)
+            self.assertEqual(expect_headers, actual_headers)
+
+    def test_exports_missing(self):
+        """
+        Missing files in EXPORTS is an error.
+        """
+        reader = self.reader("exports-missing")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "File listed in EXPORTS does not exist:"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports_missing_generated(self):
+        """
+        An objdir file in EXPORTS that is not in GENERATED_FILES is an error.
+        """
+        reader = self.reader("exports-missing-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file listed in EXPORTS not in GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_exports_generated(self):
+        reader = self.reader("exports-generated")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 2)
+        self.assertIsInstance(objs[0], GeneratedFile)
+        self.assertIsInstance(objs[1], Exports)
+        exports = [(path, list(seq)) for path, seq in objs[1].files.walk()]
+        self.assertEqual(
+            exports, [("", ["foo.h"]), ("mozilla", ["mozilla1.h", "!mozilla2.h"])]
+        )
+        path, files = exports[1]
+        self.assertIsInstance(files[1], ObjDirPath)
+
+    def test_test_harness_files(self):
+        reader = self.reader("test-harness-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], TestHarnessFiles)
+
+        expected = {
+            "mochitest": ["runtests.py", "utils.py"],
+            "testing/mochitest": ["mochitest.py", "mochitest.ini"],
+        }
+
+        for path, strings in objs[0].files.walk():
+            self.assertTrue(path in expected)
+            basenames = sorted(mozpath.basename(s) for s in strings)
+            self.assertEqual(sorted(expected[path]), basenames)
+
+    def test_test_harness_files_root(self):
+        reader = self.reader("test-harness-files-root")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Cannot install files to the root of TEST_HARNESS_FILES",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_program(self):
+        reader = self.reader("program")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 6)
+        self.assertIsInstance(objs[0], Sources)
+        self.assertIsInstance(objs[1], ComputedFlags)
+        self.assertIsInstance(objs[2], ComputedFlags)
+        self.assertIsInstance(objs[3], Program)
+        self.assertIsInstance(objs[4], SimpleProgram)
+        self.assertIsInstance(objs[5], SimpleProgram)
+
+        self.assertEqual(objs[3].program, "test_program.prog")
+        self.assertEqual(objs[4].program, "test_program1.prog")
+        self.assertEqual(objs[5].program, "test_program2.prog")
+
+        self.assertEqual(objs[3].name, "test_program.prog")
+        self.assertEqual(objs[4].name, "test_program1.prog")
+        self.assertEqual(objs[5].name, "test_program2.prog")
+
+        self.assertEqual(
+            objs[4].objs,
+            [
+                mozpath.join(
+                    reader.config.topobjdir,
+                    "test_program1.%s" % reader.config.substs["OBJ_SUFFIX"],
+                )
+            ],
+        )
+        self.assertEqual(
+            objs[5].objs,
+            [
+                mozpath.join(
+                    reader.config.topobjdir,
+                    "test_program2.%s" % reader.config.substs["OBJ_SUFFIX"],
+                )
+            ],
+        )
+
+    def test_program_paths(self):
+        """Various moz.build settings that change the destination of PROGRAM should be
+        accurately reflected in Program.output_path."""
+        reader = self.reader("program-paths")
+        objs = self.read_topsrcdir(reader)
+        prog_paths = [o.output_path for o in objs if isinstance(o, Program)]
+        self.assertEqual(
+            prog_paths,
+            [
+                "!/dist/bin/dist-bin.prog",
+                "!/dist/bin/foo/dist-subdir.prog",
+                "!/final/target/final-target.prog",
+                "!not-installed.prog",
+            ],
+        )
+
+    def test_host_program_paths(self):
+        """The destination of a HOST_PROGRAM (almost always dist/host/bin)
+        should be accurately reflected in Program.output_path."""
+        reader = self.reader("host-program-paths")
+        objs = self.read_topsrcdir(reader)
+        prog_paths = [o.output_path for o in objs if isinstance(o, HostProgram)]
+        self.assertEqual(
+            prog_paths,
+            [
+                "!/dist/host/bin/final-target.hostprog",
+                "!/dist/host/bin/dist-host-bin.hostprog",
+                "!not-installed.hostprog",
+            ],
+        )
+
+    def test_test_manifest_missing_manifest(self):
+        """A missing manifest file should result in an error."""
+        reader = self.reader("test-manifest-missing-manifest")
+
+        with six.assertRaisesRegex(self, BuildReaderError, "Missing files"):
+            self.read_topsrcdir(reader)
+
+    def test_empty_test_manifest_rejected(self):
+        """A test manifest without any entries is rejected."""
+        reader = self.reader("test-manifest-empty")
+
+        with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_just_support_files(self):
+        """A test manifest with no tests but support-files is not supported."""
+        reader = self.reader("test-manifest-just-support")
+
+        with six.assertRaisesRegex(self, SandboxValidationError, "Empty test manifest"):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_dupe_support_files(self):
+        """A test manifest with dupe support-files in a single test is not
+        supported.
+        """
+        reader = self.reader("test-manifest-dupes")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "bar.js appears multiple times "
+            "in a test manifest under a support-files field, please omit the duplicate entry.",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_absolute_support_files(self):
+        """Support files starting with '/' are placed relative to the install root"""
+        reader = self.reader("test-manifest-absolute-support")
+
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 1)
+        o = objs[0]
+        self.assertEqual(len(o.installs), 3)
+        expected = [
+            mozpath.normpath(mozpath.join(o.install_prefix, "../.well-known/foo.txt")),
+            mozpath.join(o.install_prefix, "absolute-support.ini"),
+            mozpath.join(o.install_prefix, "test_file.js"),
+        ]
+        paths = sorted([v[0] for v in o.installs.values()])
+        self.assertEqual(paths, expected)
+
+    @unittest.skip("Bug 1304316 - Items in the second set but not the first")
+    def test_test_manifest_shared_support_files(self):
+        """Support files starting with '!' are given separate treatment, so their
+        installation can be resolved when running tests.
+        """
+        reader = self.reader("test-manifest-shared-support")
+        supported, child = self.read_topsrcdir(reader)
+
+        expected_deferred_installs = {
+            "!/child/test_sub.js",
+            "!/child/another-file.sjs",
+            "!/child/data/**",
+        }
+
+        self.assertEqual(len(supported.installs), 3)
+        self.assertEqual(set(supported.deferred_installs), expected_deferred_installs)
+        self.assertEqual(len(child.installs), 3)
+        self.assertEqual(len(child.pattern_installs), 1)
+
+    def test_test_manifest_deffered_install_missing(self):
+        """A non-existent shared support file reference produces an error."""
+        reader = self.reader("test-manifest-shared-missing")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "entry in support-files not present in the srcdir",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_install_includes(self):
+        """Ensure that any [include:foo.ini] are copied to the objdir."""
+        reader = self.reader("test-manifest-install-includes")
+
+        objs = self.read_topsrcdir(reader)
+        self.assertEqual(len(objs), 1)
+        o = objs[0]
+        self.assertEqual(len(o.installs), 3)
+        self.assertEqual(o.manifest_relpath, "mochitest.ini")
+        self.assertEqual(o.manifest_obj_relpath, "mochitest.ini")
+        expected = [
+            mozpath.normpath(mozpath.join(o.install_prefix, "common.ini")),
+            mozpath.normpath(mozpath.join(o.install_prefix, "mochitest.ini")),
+            mozpath.normpath(mozpath.join(o.install_prefix, "test_foo.html")),
+        ]
+        paths = sorted([v[0] for v in o.installs.values()])
+        self.assertEqual(paths, expected)
+
+    def test_test_manifest_includes(self):
+        """Ensure that manifest objects from the emitter list a correct manifest."""
+        reader = self.reader("test-manifest-emitted-includes")
+        [obj] = self.read_topsrcdir(reader)
+
+        # Expected manifest leafs for our tests.
+        expected_manifests = {
+            "reftest1.html": "reftest.list",
+            "reftest1-ref.html": "reftest.list",
+            "reftest2.html": "included-reftest.list",
+            "reftest2-ref.html": "included-reftest.list",
+        }
+
+        for t in obj.tests:
+            self.assertTrue(t["manifest"].endswith(expected_manifests[t["name"]]))
+
+    def test_test_manifest_keys_extracted(self):
+        """Ensure all metadata from test manifests is extracted."""
+        reader = self.reader("test-manifest-keys-extracted")
+
+        objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+        self.assertEqual(len(objs), 8)
+
+        metadata = {
+            "a11y.ini": {
+                "flavor": "a11y",
+                "installs": {"a11y.ini": False, "test_a11y.js": True},
+                "pattern-installs": 1,
+            },
+            "browser.ini": {
+                "flavor": "browser-chrome",
+                "installs": {
+                    "browser.ini": False,
+                    "test_browser.js": True,
+                    "support1": False,
+                    "support2": False,
+                },
+            },
+            "mochitest.ini": {
+                "flavor": "mochitest",
+                "installs": {"mochitest.ini": False, "test_mochitest.js": True},
+                "external": {"external1", "external2"},
+            },
+            "chrome.ini": {
+                "flavor": "chrome",
+                "installs": {"chrome.ini": False, "test_chrome.js": True},
+            },
+            "xpcshell.ini": {
+                "flavor": "xpcshell",
+                "dupe": True,
+                "installs": {
+                    "xpcshell.ini": False,
+                    "test_xpcshell.js": True,
+                    "head1": False,
+                    "head2": False,
+                },
+            },
+            "reftest.list": {"flavor": "reftest", "installs": {}},
+            "crashtest.list": {"flavor": "crashtest", "installs": {}},
+            "python.ini": {"flavor": "python", "installs": {"python.ini": False}},
+        }
+
+        for o in objs:
+            m = metadata[mozpath.basename(o.manifest_relpath)]
+
+            self.assertTrue(o.path.startswith(o.directory))
+            self.assertEqual(o.flavor, m["flavor"])
+            self.assertEqual(o.dupe_manifest, m.get("dupe", False))
+
+            external_normalized = set(mozpath.basename(p) for p in o.external_installs)
+            self.assertEqual(external_normalized, m.get("external", set()))
+
+            self.assertEqual(len(o.installs), len(m["installs"]))
+            for path in o.installs.keys():
+                self.assertTrue(path.startswith(o.directory))
+                relpath = path[len(o.directory) + 1 :]
+
+                self.assertIn(relpath, m["installs"])
+                self.assertEqual(o.installs[path][1], m["installs"][relpath])
+
+            if "pattern-installs" in m:
+                self.assertEqual(len(o.pattern_installs), m["pattern-installs"])
+
+    def test_test_manifest_unmatched_generated(self):
+        reader = self.reader("test-manifest-unmatched-generated")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "entry in generated-files not present elsewhere",
+        ):
+            self.read_topsrcdir(reader),
+
+    def test_test_manifest_parent_support_files_dir(self):
+        """support-files referencing a file in a parent directory works."""
+        reader = self.reader("test-manifest-parent-support-files-dir")
+
+        objs = [o for o in self.read_topsrcdir(reader) if isinstance(o, TestManifest)]
+
+        self.assertEqual(len(objs), 1)
+
+        o = objs[0]
+
+        expected = mozpath.join(o.srcdir, "support-file.txt")
+        self.assertIn(expected, o.installs)
+        self.assertEqual(
+            o.installs[expected],
+            ("testing/mochitest/tests/child/support-file.txt", False),
+        )
+
+    def test_test_manifest_missing_test_error(self):
+        """Missing test files should result in error."""
+        reader = self.reader("test-manifest-missing-test-file")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "lists test that does not exist: test_missing.html",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_test_manifest_missing_test_error_unfiltered(self):
+        """Missing test files should result in error, even when the test list is not filtered."""
+        reader = self.reader("test-manifest-missing-test-file-unfiltered")
+
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "lists test that does not exist: missing.js"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_ipdl_sources(self):
+        reader = self.reader(
+            "ipdl_sources",
+            extra_substs={"IPDL_ROOT": mozpath.abspath("/path/to/topobjdir")},
+        )
+        objs = self.read_topsrcdir(reader)
+        ipdl_collection = objs[0]
+        self.assertIsInstance(ipdl_collection, IPDLCollection)
+
+        ipdls = set(
+            mozpath.relpath(p, ipdl_collection.topsrcdir)
+            for p in ipdl_collection.all_regular_sources()
+        )
+        expected = set(
+            ["bar/bar.ipdl", "bar/bar2.ipdlh", "foo/foo.ipdl", "foo/foo2.ipdlh"]
+        )
+
+        self.assertEqual(ipdls, expected)
+
+        pp_ipdls = set(
+            mozpath.relpath(p, ipdl_collection.topsrcdir)
+            for p in ipdl_collection.all_preprocessed_sources()
+        )
+        expected = set(["bar/bar1.ipdl", "foo/foo1.ipdl"])
+        self.assertEqual(pp_ipdls, expected)
+
+    def test_local_includes(self):
+        """Test that LOCAL_INCLUDES is emitted correctly."""
+        reader = self.reader("local_includes")
+        objs = self.read_topsrcdir(reader)
+
+        local_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+        expected = ["/bar/baz", "foo"]
+
+        self.assertEqual(local_includes, expected)
+
+        local_includes = [o.path.full_path for o in objs if isinstance(o, LocalInclude)]
+        expected = [
+            mozpath.join(reader.config.topsrcdir, "bar/baz"),
+            mozpath.join(reader.config.topsrcdir, "foo"),
+        ]
+
+        self.assertEqual(local_includes, expected)
+
+    def test_local_includes_file(self):
+        """Test that a filename can't be used in LOCAL_INCLUDES."""
+        reader = self.reader("local_includes-filename")
+
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Path specified in LOCAL_INCLUDES is a filename",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_generated_includes(self):
+        """Test that GENERATED_INCLUDES is emitted correctly."""
+        reader = self.reader("generated_includes")
+        objs = self.read_topsrcdir(reader)
+
+        generated_includes = [o.path for o in objs if isinstance(o, LocalInclude)]
+        expected = ["!/bar/baz", "!foo"]
+
+        self.assertEqual(generated_includes, expected)
+
+        generated_includes = [
+            o.path.full_path for o in objs if isinstance(o, LocalInclude)
+        ]
+        expected = [
+            mozpath.join(reader.config.topobjdir, "bar/baz"),
+            mozpath.join(reader.config.topobjdir, "foo"),
+        ]
+
+        self.assertEqual(generated_includes, expected)
+
+    def test_defines(self):
+        reader = self.reader("defines")
+        objs = self.read_topsrcdir(reader)
+
+        defines = {}
+        for o in objs:
+            if isinstance(o, Defines):
+                defines = o.defines
+
+        expected = {
+            "BAR": 7,
+            "BAZ": '"abcd"',
+            "FOO": True,
+            "VALUE": "xyz",
+            "QUX": False,
+        }
+
+        self.assertEqual(defines, expected)
+
+    def test_jar_manifests(self):
+        reader = self.reader("jar-manifests")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        for obj in objs:
+            self.assertIsInstance(obj, JARManifest)
+            self.assertIsInstance(obj.path, Path)
+
+    def test_jar_manifests_multiple_files(self):
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "limited to one value"
+        ):
+            reader = self.reader("jar-manifests-multiple-files")
+            self.read_topsrcdir(reader)
+
+    def test_xpidl_module_no_sources(self):
+        """XPIDL_MODULE without XPIDL_SOURCES should be rejected."""
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "XPIDL_MODULE " "cannot be defined"
+        ):
+            reader = self.reader("xpidl-module-no-sources")
+            self.read_topsrcdir(reader)
+
+    def test_xpidl_module_missing_sources(self):
+        """Missing XPIDL_SOURCES should be rejected."""
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "File .* " "from XPIDL_SOURCES does not exist"
+        ):
+            reader = self.reader("missing-xpidl")
+            self.read_topsrcdir(reader)
+
+    def test_missing_local_includes(self):
+        """LOCAL_INCLUDES containing non-existent directories should be rejected."""
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Path specified in " "LOCAL_INCLUDES does not exist",
+        ):
+            reader = self.reader("missing-local-includes")
+            self.read_topsrcdir(reader)
+
+    def test_library_defines(self):
+        """Test that LIBRARY_DEFINES is propagated properly."""
+        reader = self.reader("library-defines")
+        objs = self.read_topsrcdir(reader)
+
+        libraries = [o for o in objs if isinstance(o, StaticLibrary)]
+        library_flags = [
+            o
+            for o in objs
+            if isinstance(o, ComputedFlags) and "LIBRARY_DEFINES" in o.flags
+        ]
+        expected = {
+            "liba": "-DIN_LIBA",
+            "libb": "-DIN_LIBB -DIN_LIBA",
+            "libc": "-DIN_LIBA -DIN_LIBB",
+            "libd": "",
+        }
+        defines = {}
+        for lib in libraries:
+            defines[lib.basename] = " ".join(lib.lib_defines.get_defines())
+        self.assertEqual(expected, defines)
+        defines_in_flags = {}
+        for flags in library_flags:
+            defines_in_flags[flags.relobjdir] = " ".join(
+                flags.flags["LIBRARY_DEFINES"] or []
+            )
+        self.assertEqual(expected, defines_in_flags)
+
+    def test_sources(self):
+        """Test that SOURCES works properly."""
+        reader = self.reader("sources")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        computed_flags = objs.pop()
+        self.assertIsInstance(computed_flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        ld_flags = objs.pop()
+        self.assertIsInstance(ld_flags, ComputedFlags)
+        self.assertEqual(len(objs), 6)
+        for o in objs:
+            self.assertIsInstance(o, Sources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 6)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".m": ["e.m"],
+            ".mm": ["f.mm"],
+            ".S": ["g.S"],
+            ".s": ["h.s", "i.asm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_sources_just_c(self):
+        """Test that a linkable with no C++ sources doesn't have cxx_link set."""
+        reader = self.reader("sources-just-c")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertFalse(linkable.cxx_link)
+
+    def test_linkables_cxx_link(self):
+        """Test that linkables transitively set cxx_link properly."""
+        reader = self.reader("test-linkables-cxx-link")
+        got_results = 0
+        for obj in self.read_topsrcdir(reader):
+            if isinstance(obj, SharedLibrary):
+                if obj.basename == "cxx_shared":
+                    self.assertEqual(
+                        obj.name,
+                        "%scxx_shared%s"
+                        % (reader.config.dll_prefix, reader.config.dll_suffix),
+                    )
+                    self.assertTrue(obj.cxx_link)
+                    got_results += 1
+                elif obj.basename == "just_c_shared":
+                    self.assertEqual(
+                        obj.name,
+                        "%sjust_c_shared%s"
+                        % (reader.config.dll_prefix, reader.config.dll_suffix),
+                    )
+                    self.assertFalse(obj.cxx_link)
+                    got_results += 1
+        self.assertEqual(got_results, 2)
+
+    def test_generated_sources(self):
+        """Test that GENERATED_SOURCES works properly."""
+        reader = self.reader("generated-sources")
+        objs = self.read_topsrcdir(reader)
+
+        as_flags = objs.pop()
+        self.assertIsInstance(as_flags, ComputedFlags)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The third to last object is a Linkable.
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(len(objs), 6)
+
+        generated_sources = [
+            o for o in objs if isinstance(o, Sources) and o.generated_files
+        ]
+        self.assertEqual(len(generated_sources), 6)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in generated_sources}
+        self.assertEqual(len(suffix_map), 6)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".m": ["e.m"],
+            ".mm": ["f.mm"],
+            ".S": ["g.S"],
+            ".s": ["h.s", "i.asm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.generated_files,
+                [mozpath.join(reader.config.topobjdir, f) for f in files],
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_host_sources(self):
+        """Test that HOST_SOURCES works properly."""
+        reader = self.reader("host-sources")
+        objs = self.read_topsrcdir(reader)
+
+        # This objdir will generate target flags.
+        flags = objs.pop()
+        self.assertIsInstance(flags, ComputedFlags)
+        # The second to last object is a Linkable
+        linkable = objs.pop()
+        self.assertTrue(linkable.cxx_link)
+        # This objdir will also generate host flags.
+        host_flags = objs.pop()
+        self.assertIsInstance(host_flags, ComputedFlags)
+        # ...and ldflags.
+        ldflags = objs.pop()
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, HostSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["a.cpp", "b.cc", "c.cxx"],
+            ".c": ["d.c"],
+            ".mm": ["e.mm", "f.mm"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "host_%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_wasm_sources(self):
+        """Test that WASM_SOURCES works properly."""
+        reader = self.reader(
+            "wasm-sources", extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"}
+        )
+        objs = list(self.read_topsrcdir(reader))
+
+        # The second to last object is a linkable.
+        linkable = objs[-2]
+        # Other than that, we only care about the WasmSources objects.
+        objs = objs[:2]
+        for o in objs:
+            self.assertIsInstance(o, WasmSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 2)
+
+        expected = {".cpp": ["a.cpp", "b.cc", "c.cxx"], ".c": ["d.c"]}
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            for f in files:
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (
+                            mozpath.splitext(f)[0],
+                            reader.config.substs["WASM_OBJ_SUFFIX"],
+                        ),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_unified_sources(self):
+        """Test that UNIFIED_SOURCES works properly."""
+        reader = self.reader("unified-sources")
+        objs = self.read_topsrcdir(reader)
+
+        # The last object is a ComputedFlags, the second to last a Linkable,
+        # followed by ldflags, ignore them.
+        linkable = objs[-2]
+        objs = objs[:-3]
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, UnifiedSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+            ".mm": ["objc1.mm", "objc2.mm"],
+            ".c": ["c1.c", "c2.c"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            self.assertTrue(sources.have_unified_mapping)
+
+            for f in dict(sources.unified_source_mapping).keys():
+                self.assertIn(
+                    mozpath.join(
+                        reader.config.topobjdir,
+                        "%s.%s"
+                        % (mozpath.splitext(f)[0], reader.config.substs["OBJ_SUFFIX"]),
+                    ),
+                    linkable.objs,
+                )
+
+    def test_unified_sources_non_unified(self):
+        """Test that UNIFIED_SOURCES with FILES_PER_UNIFIED_FILE=1 works properly."""
+        reader = self.reader("unified-sources-non-unified")
+        objs = self.read_topsrcdir(reader)
+
+        # The last object is a Linkable, the second to last ComputedFlags,
+        # followed by ldflags, ignore them.
+        objs = objs[:-3]
+        self.assertEqual(len(objs), 3)
+        for o in objs:
+            self.assertIsInstance(o, UnifiedSources)
+
+        suffix_map = {obj.canonical_suffix: obj for obj in objs}
+        self.assertEqual(len(suffix_map), 3)
+
+        expected = {
+            ".cpp": ["bar.cxx", "foo.cpp", "quux.cc"],
+            ".mm": ["objc1.mm", "objc2.mm"],
+            ".c": ["c1.c", "c2.c"],
+        }
+        for suffix, files in expected.items():
+            sources = suffix_map[suffix]
+            self.assertEqual(
+                sources.files, [mozpath.join(reader.config.topsrcdir, f) for f in files]
+            )
+            self.assertFalse(sources.have_unified_mapping)
+
+    def test_object_conflicts(self):
+        """Test that object name conflicts are detected."""
+        reader = self.reader("object-conflicts/1")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from SOURCES would have the same object name as"
+            " Test.c from SOURCES\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/2")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from SOURCES would have the same object name as"
+            " subdir/Test.cpp from SOURCES\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/3")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+            " Test.c from SOURCES in non-unified builds\.",
+        ):
+            self.read_topsrcdir(reader)
+
+        reader = self.reader("object-conflicts/4")
+        with self.assertRaisesRegex(
+            SandboxValidationError,
+            "Test.cpp from UNIFIED_SOURCES would have the same object name as"
+            " Test.c from UNIFIED_SOURCES in non-unified builds\.",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_final_target_pp_files(self):
+        """Test that FINAL_TARGET_PP_FILES works properly."""
+        reader = self.reader("dist-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], FinalTargetPreprocessedFiles)
+
+        # Ideally we'd test hierarchies, but that would just be testing
+        # the HierarchicalStringList class, which we test separately.
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "")
+            self.assertEqual(len(files), 2)
+
+            expected = {"install.rdf", "main.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_missing_final_target_pp_files(self):
+        """Test that FINAL_TARGET_PP_FILES with missing files throws errors."""
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "File listed in " "FINAL_TARGET_PP_FILES does not exist",
+        ):
+            reader = self.reader("dist-files-missing")
+            self.read_topsrcdir(reader)
+
+    def test_final_target_pp_files_non_srcdir(self):
+        """Test that non-srcdir paths in FINAL_TARGET_PP_FILES throws errors."""
+        reader = self.reader("final-target-pp-files-non-srcdir")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Only source directory paths allowed in FINAL_TARGET_PP_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_files(self):
+        """Test that LOCALIZED_FILES works properly."""
+        reader = self.reader("localized-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], LocalizedFiles)
+
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "foo")
+            self.assertEqual(len(files), 3)
+
+            expected = {"en-US/bar.ini", "en-US/code/*.js", "en-US/foo.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_localized_files_no_en_us(self):
+        """Test that LOCALIZED_FILES errors if a path does not start with
+        `en-US/` or contain `locales/en-US/`."""
+        reader = self.reader("localized-files-no-en-us")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "LOCALIZED_FILES paths must start with `en-US/` or contain `locales/en-US/`: "
+            "foo.js",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_localized_pp_files(self):
+        """Test that LOCALIZED_PP_FILES works properly."""
+        reader = self.reader("localized-pp-files")
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 1)
+        self.assertIsInstance(objs[0], LocalizedPreprocessedFiles)
+
+        for path, files in objs[0].files.walk():
+            self.assertEqual(path, "foo")
+            self.assertEqual(len(files), 2)
+
+            expected = {"en-US/bar.ini", "en-US/foo.js"}
+            for f in files:
+                self.assertTrue(six.text_type(f) in expected)
+
+    def test_rust_library_no_cargo_toml(self):
+        """Test that defining a RustLibrary without a Cargo.toml fails."""
+        reader = self.reader("rust-library-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_name_mismatch(self):
+        """Test that defining a RustLibrary that doesn't match Cargo.toml fails."""
+        reader = self.reader("rust-library-name-mismatch")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "library.*does not match Cargo.toml-defined package",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_no_lib_section(self):
+        """Test that a RustLibrary Cargo.toml with no [lib] section fails."""
+        reader = self.reader("rust-library-no-lib-section")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cargo.toml for.* has no \\[lib\\] section"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_invalid_crate_type(self):
+        """Test that a RustLibrary Cargo.toml has a permitted crate-type."""
+        reader = self.reader("rust-library-invalid-crate-type")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "crate-type.* is not permitted"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_dash_folding(self):
+        """Test that on-disk names of RustLibrary objects convert dashes to underscores."""
+        reader = self.reader(
+            "rust-library-dash-folding",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+        self.assertRegex(lib.lib_name, "random_crate")
+        self.assertRegex(lib.import_name, "random_crate")
+        self.assertRegex(lib.basename, "random-crate")
+
+    def test_multiple_rust_libraries(self):
+        """Test that linking multiple Rust libraries throws an error"""
+        reader = self.reader(
+            "multiple-rust-libraries",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot link the following Rust libraries"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_library_features(self):
+        """Test that RustLibrary features are correctly emitted."""
+        reader = self.reader(
+            "rust-library-features",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+        self.assertEqual(lib.features, ["musthave", "cantlivewithout"])
+
+    def test_rust_library_duplicate_features(self):
+        """Test that duplicate RustLibrary features are rejected."""
+        reader = self.reader("rust-library-duplicate-features")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "features for .* should not contain duplicates",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_program_no_cargo_toml(self):
+        """Test that specifying RUST_PROGRAMS without a Cargo.toml fails."""
+        reader = self.reader("rust-program-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_host_rust_program_no_cargo_toml(self):
+        """Test that specifying HOST_RUST_PROGRAMS without a Cargo.toml fails."""
+        reader = self.reader("host-rust-program-no-cargo-toml")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "No Cargo.toml file found"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_program_nonexistent_name(self):
+        """Test that specifying RUST_PROGRAMS that don't exist in Cargo.toml
+        correctly throws an error."""
+        reader = self.reader("rust-program-nonexistent-name")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_host_rust_program_nonexistent_name(self):
+        """Test that specifying HOST_RUST_PROGRAMS that don't exist in
+        Cargo.toml correctly throws an error."""
+        reader = self.reader("host-rust-program-nonexistent-name")
+        with six.assertRaisesRegex(
+            self, SandboxValidationError, "Cannot find Cargo.toml definition for"
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_rust_programs(self):
+        """Test RUST_PROGRAMS emission."""
+        reader = self.reader(
+            "rust-programs",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc", BIN_SUFFIX=".exe"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, cflags, prog = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(prog, RustProgram)
+        self.assertEqual(prog.name, "some")
+
+    def test_host_rust_programs(self):
+        """Test HOST_RUST_PROGRAMS emission."""
+        reader = self.reader(
+            "host-rust-programs",
+            extra_substs=dict(
+                RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+            ),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 4)
+        print(objs)
+        ldflags, cflags, hostflags, prog = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(hostflags, ComputedFlags)
+        self.assertIsInstance(prog, HostRustProgram)
+        self.assertEqual(prog.name, "some")
+
+    def test_host_rust_libraries(self):
+        """Test HOST_RUST_LIBRARIES emission."""
+        reader = self.reader(
+            "host-rust-libraries",
+            extra_substs=dict(
+                RUST_HOST_TARGET="i686-pc-windows-msvc", HOST_BIN_SUFFIX=".exe"
+            ),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, HostRustLibrary)
+        self.assertRegex(lib.lib_name, "host_lib")
+        self.assertRegex(lib.import_name, "host_lib")
+
+    def test_crate_dependency_path_resolution(self):
+        """Test recursive dependencies resolve with the correct paths."""
+        reader = self.reader(
+            "crate-dependency-path-resolution",
+            extra_substs=dict(RUST_TARGET="i686-pc-windows-msvc"),
+        )
+        objs = self.read_topsrcdir(reader)
+
+        self.assertEqual(len(objs), 3)
+        ldflags, lib, cflags = objs
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(cflags, ComputedFlags)
+        self.assertIsInstance(lib, RustLibrary)
+
+    def test_install_shared_lib(self):
+        """Test that we can install a shared library with TEST_HARNESS_FILES"""
+        reader = self.reader("test-install-shared-lib")
+        objs = self.read_topsrcdir(reader)
+        self.assertIsInstance(objs[0], TestHarnessFiles)
+        self.assertIsInstance(objs[1], VariablePassthru)
+        self.assertIsInstance(objs[2], ComputedFlags)
+        self.assertIsInstance(objs[3], SharedLibrary)
+        self.assertIsInstance(objs[4], ComputedFlags)
+        for path, files in objs[0].files.walk():
+            for f in files:
+                self.assertEqual(str(f), "!libfoo.so")
+                self.assertEqual(path, "foo/bar")
+
+    def test_symbols_file(self):
+        """Test that SYMBOLS_FILE works"""
+        reader = self.reader("test-symbols-file")
+        genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(genfile, GeneratedFile)
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(shlib, SharedLibrary)
+        # This looks weird but MockConfig sets DLL_{PREFIX,SUFFIX} and
+        # the reader method in this class sets OS_TARGET=WINNT.
+        self.assertEqual(shlib.symbols_file, "libfoo.so.def")
+
+    def test_symbols_file_objdir(self):
+        """Test that a SYMBOLS_FILE in the objdir works"""
+        reader = self.reader("test-symbols-file-objdir")
+        genfile, ldflags, shlib, flags = self.read_topsrcdir(reader)
+        self.assertIsInstance(genfile, GeneratedFile)
+        self.assertEqual(
+            genfile.script, mozpath.join(reader.config.topsrcdir, "foo.py")
+        )
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertIsInstance(ldflags, ComputedFlags)
+        self.assertIsInstance(shlib, SharedLibrary)
+        self.assertEqual(shlib.symbols_file, "foo.symbols")
+
+    def test_symbols_file_objdir_missing_generated(self):
+        """Test that a SYMBOLS_FILE in the objdir that's missing
+        from GENERATED_FILES is an error.
+        """
+        reader = self.reader("test-symbols-file-objdir-missing-generated")
+        with six.assertRaisesRegex(
+            self,
+            SandboxValidationError,
+            "Objdir file specified in SYMBOLS_FILE not in GENERATED_FILES:",
+        ):
+            self.read_topsrcdir(reader)
+
+    def test_wasm_compile_flags(self):
+        reader = self.reader(
+            "wasm-compile-flags",
+            extra_substs={"WASM_CC": "clang", "WASM_CXX": "clang++"},
+        )
+        flags = list(self.read_topsrcdir(reader))[2]
+        self.assertIsInstance(flags, ComputedFlags)
+        self.assertEqual(
+            flags.flags["WASM_CFLAGS"], reader.config.substs["WASM_CFLAGS"]
+        )
+        self.assertEqual(
+            flags.flags["MOZBUILD_WASM_CFLAGS"], ["-funroll-loops", "-wasm-arg"]
+        )
+        self.assertEqual(
+            set(flags.flags["WASM_DEFINES"]),
+            set(["-DFOO", '-DBAZ="abcd"', "-UQUX", "-DBAR=7", "-DVALUE=xyz"]),
+        )
+
+
+if __name__ == "__main__":
+    main()
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-icu-sources.patch
+
+mv firefox-$VERSION-icu-sources.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/intl/icu_sources_data.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/firefox-102.15.0-new/intl/icu_sources_data.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/firefox-102.15.0-new/intl/icu_sources_data.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-icu-sources-patch/firefox-102.15.0-new/intl/icu_sources_data.py	(revision 228)
@@ -0,0 +1,293 @@
+#!/usr/bin/env python
+#
+# Any copyright is dedicated to the Public Domain.
+# http://creativecommons.org/publicdomain/zero/1.0/
+#
+# Generate SOURCES in sources.mozbuild files from ICU's Makefile.in
+# files, and also build a standalone copy of ICU using its build
+# system to generate a new copy of the in-tree ICU data file.
+#
+# This script expects to be run from `update-icu.sh` after the in-tree
+# copy of ICU has been updated.
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import glob
+import multiprocessing
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+# Close enough
+import os.path as mozpath
+mozpath.normsep = lambda p: p
+
+# The following files have been determined to be dead/unused by a
+# semi-automated analysis. You can just remove any of the files below
+# if you need them. However, files marked with a "Cluster" comment
+# can only be removed together, as they have (directional) dependencies.
+# If you want to rerun this analysis, contact :decoder.
+UNUSED_SOURCES = set(
+    [
+        "intl/icu/source/common/bytestrieiterator.cpp",
+        "intl/icu/source/common/cstr.cpp",
+        "intl/icu/source/common/cwchar.cpp",
+        "intl/icu/source/common/icudataver.cpp",
+        "intl/icu/source/common/icuplug.cpp",
+        "intl/icu/source/common/pluralmap.cpp",
+        "intl/icu/source/common/ucat.cpp",
+        "intl/icu/source/common/ucnv2022.cpp",
+        "intl/icu/source/common/ucnv_ct.cpp",
+        "intl/icu/source/common/ucnvdisp.cpp",
+        "intl/icu/source/common/ucnv_ext.cpp",
+        "intl/icu/source/common/ucnvhz.cpp",
+        "intl/icu/source/common/ucnvisci.cpp",
+        "intl/icu/source/common/ucnv_lmb.cpp",
+        "intl/icu/source/common/ucnvmbcs.cpp",
+        "intl/icu/source/common/uidna.cpp",
+        "intl/icu/source/common/unorm.cpp",
+        "intl/icu/source/common/usc_impl.cpp",
+        "intl/icu/source/common/ustr_wcs.cpp",
+        "intl/icu/source/common/util_props.cpp",
+        "intl/icu/source/i18n/anytrans.cpp",
+        "intl/icu/source/i18n/brktrans.cpp",
+        "intl/icu/source/i18n/casetrn.cpp",
+        "intl/icu/source/i18n/cpdtrans.cpp",
+        "intl/icu/source/i18n/esctrn.cpp",
+        "intl/icu/source/i18n/fmtable_cnv.cpp",
+        "intl/icu/source/i18n/funcrepl.cpp",
+        "intl/icu/source/i18n/gender.cpp",
+        "intl/icu/source/i18n/name2uni.cpp",
+        "intl/icu/source/i18n/nortrans.cpp",
+        "intl/icu/source/i18n/nultrans.cpp",
+        "intl/icu/source/i18n/quant.cpp",
+        "intl/icu/source/i18n/rbt.cpp",
+        "intl/icu/source/i18n/rbt_data.cpp",
+        "intl/icu/source/i18n/rbt_pars.cpp",
+        "intl/icu/source/i18n/rbt_rule.cpp",
+        "intl/icu/source/i18n/rbt_set.cpp",
+        "intl/icu/source/i18n/regexcmp.cpp",
+        "intl/icu/source/i18n/regeximp.cpp",
+        "intl/icu/source/i18n/regexst.cpp",
+        "intl/icu/source/i18n/regextxt.cpp",
+        "intl/icu/source/i18n/rematch.cpp",
+        "intl/icu/source/i18n/remtrans.cpp",
+        "intl/icu/source/i18n/repattrn.cpp",
+        "intl/icu/source/i18n/scientificnumberformatter.cpp",
+        "intl/icu/source/i18n/strmatch.cpp",
+        "intl/icu/source/i18n/strrepl.cpp",
+        "intl/icu/source/i18n/titletrn.cpp",
+        "intl/icu/source/i18n/tolowtrn.cpp",
+        "intl/icu/source/i18n/toupptrn.cpp",
+        "intl/icu/source/i18n/translit.cpp",
+        "intl/icu/source/i18n/transreg.cpp",
+        "intl/icu/source/i18n/tridpars.cpp",
+        "intl/icu/source/i18n/unesctrn.cpp",
+        "intl/icu/source/i18n/uni2name.cpp",
+        "intl/icu/source/i18n/uregexc.cpp",
+        "intl/icu/source/i18n/uregex.cpp",
+        "intl/icu/source/i18n/uregion.cpp",
+        "intl/icu/source/i18n/uspoof_build.cpp",
+        "intl/icu/source/i18n/uspoof_conf.cpp",
+        "intl/icu/source/i18n/utrans.cpp",
+        "intl/icu/source/i18n/vzone.cpp",
+        "intl/icu/source/i18n/zrule.cpp",
+        "intl/icu/source/i18n/ztrans.cpp",
+        # Cluster
+        "intl/icu/source/common/resbund_cnv.cpp",
+        "intl/icu/source/common/ures_cnv.cpp",
+        # Cluster
+        "intl/icu/source/common/propsvec.cpp",
+        "intl/icu/source/common/ucnvsel.cpp",
+        "intl/icu/source/common/ucnv_set.cpp",
+        # Cluster
+        "intl/icu/source/common/ubiditransform.cpp",
+        "intl/icu/source/common/ushape.cpp",
+        # Cluster
+        "intl/icu/source/i18n/csdetect.cpp",
+        "intl/icu/source/i18n/csmatch.cpp",
+        "intl/icu/source/i18n/csr2022.cpp",
+        "intl/icu/source/i18n/csrecog.cpp",
+        "intl/icu/source/i18n/csrmbcs.cpp",
+        "intl/icu/source/i18n/csrsbcs.cpp",
+        "intl/icu/source/i18n/csrucode.cpp",
+        "intl/icu/source/i18n/csrutf8.cpp",
+        "intl/icu/source/i18n/inputext.cpp",
+        "intl/icu/source/i18n/ucsdet.cpp",
+        # Cluster
+        "intl/icu/source/i18n/alphaindex.cpp",
+        "intl/icu/source/i18n/ulocdata.cpp",
+    ]
+)
+
+
+def ensure_source_file_exists(dir, filename):
+    f = mozpath.join(dir, filename)
+    if os.path.isfile(f):
+        return f
+    raise Exception("Couldn't find source file for: %s" % filename)
+
+
+def get_sources(sources_file):
+    srcdir = os.path.dirname(sources_file)
+    with open(sources_file) as f:
+        return sorted(
+            (ensure_source_file_exists(srcdir, name.strip()) for name in f),
+            key=lambda x: x.lower(),
+        )
+
+
+def list_headers(path):
+    result = []
+    for name in os.listdir(path):
+        f = mozpath.join(path, name)
+        if os.path.isfile(f):
+            result.append(f)
+    return sorted(result, key=lambda x: x.lower())
+
+
+def write_sources(mozbuild, sources, headers):
+    with open(mozbuild, "w", newline="\n", encoding="utf-8") as f:
+        f.write(
+            "# THIS FILE IS GENERATED BY /intl/icu_sources_data.py " + "DO NOT EDIT\n"
+        )
+
+        def write_list(name, content):
+            if content:
+                f.write("%s %s [\n" % (name, "=" if name.islower() else "+="))
+                f.write("".join("   '/%s',\n" % s for s in content))
+                f.write("]\n")
+
+        write_list("sources", [s for s in sources if s not in UNUSED_SOURCES])
+        write_list("other_sources", [s for s in sources if s in UNUSED_SOURCES])
+        write_list("EXPORTS.unicode", headers)
+
+
+def update_sources(topsrcdir):
+    print("Updating ICU sources lists...")
+    for d in ["common", "i18n", "tools/toolutil", "tools/icupkg"]:
+        base_path = mozpath.join(topsrcdir, "intl/icu/source/%s" % d)
+        sources_file = mozpath.join(base_path, "sources.txt")
+        mozbuild = mozpath.join(
+            topsrcdir, "config/external/icu/%s/sources.mozbuild" % mozpath.basename(d)
+        )
+        sources = [mozpath.relpath(s, topsrcdir) for s in get_sources(sources_file)]
+        unicode_dir = mozpath.join(base_path, "unicode")
+        if os.path.exists(unicode_dir):
+            headers = [
+                mozpath.normsep(os.path.relpath(s, topsrcdir))
+                for s in list_headers(unicode_dir)
+            ]
+        else:
+            headers = None
+        write_sources(mozbuild, sources, headers)
+
+
+def try_run(name, command, cwd=None, **kwargs):
+    try:
+        subprocess.check_call(
+            command, cwd=cwd, stdout=sys.stderr, stderr=subprocess.STDOUT, **kwargs
+        )
+    except subprocess.CalledProcessError:
+        print('''Error running "{}" in directory {}'''.format(' '.join(command), cwd), file=sys.stderr)
+        return False
+    else:
+        return True
+
+
+def get_data_file(data_dir):
+    files = glob.glob(mozpath.join(data_dir, "icudt*.dat"))
+    return files[0] if files else None
+
+
+def update_data_file(topsrcdir):
+    objdir = tempfile.mkdtemp(prefix="icu-obj-")
+    configure = mozpath.join(topsrcdir, "intl/icu/source/configure")
+    env = dict(os.environ)
+    # bug 1262101 - these should be shared with the moz.build files
+    env.update(
+        {
+            "CPPFLAGS": (
+                "-DU_NO_DEFAULT_INCLUDE_UTF_HEADERS=1 "
+                + "-DU_HIDE_OBSOLETE_UTF_OLD_H=1"
+                + "-DUCONFIG_NO_LEGACY_CONVERSION "
+                + "-DUCONFIG_NO_TRANSLITERATION "
+                + "-DUCONFIG_NO_REGULAR_EXPRESSIONS "
+                + "-DUCONFIG_NO_BREAK_ITERATION "
+                + "-DU_CHARSET_IS_UTF8"
+            )
+        }
+    )
+
+    # Exclude data that we currently don't need.
+    #
+    # The file format for ICU's data build tool is described at
+    # <https://github.com/unicode-org/icu/blob/master/docs/userguide/icu_data/buildtool.md>.
+    env["ICU_DATA_FILTER_FILE"] = mozpath.join(topsrcdir, "intl/icu/data_filter.json")
+
+    print("Running ICU configure...")
+    if not try_run(
+        "icu-configure",
+        [
+            "sh",
+            configure,
+            "--with-data-packaging=archive",
+            "--enable-static",
+            "--disable-shared",
+            "--disable-extras",
+            "--disable-icuio",
+            "--disable-layout",
+            "--disable-layoutex",
+            "--disable-tests",
+            "--disable-samples",
+            "--disable-strict",
+        ],
+        cwd=objdir,
+        env=env,
+    ):
+        return False
+    print("Running ICU make...")
+    if not try_run(
+        "icu-make",
+        ["make", "--jobs=%d" % multiprocessing.cpu_count(), "--output-sync"],
+        cwd=objdir,
+    ):
+        return False
+    print("Copying ICU data file...")
+    tree_data_path = mozpath.join(topsrcdir, "config/external/icu/data/")
+    old_data_file = get_data_file(tree_data_path)
+    if not old_data_file:
+        print("Error: no ICU data file in %s" % tree_data_path, file=sys.stderr)
+        return False
+    new_data_file = get_data_file(mozpath.join(objdir, "data/out"))
+    if not new_data_file:
+        print("Error: no ICU data in ICU objdir", file=sys.stderr)
+        return False
+    if os.path.basename(old_data_file) != os.path.basename(new_data_file):
+        # Data file name has the major version number embedded.
+        os.unlink(old_data_file)
+    shutil.copy(new_data_file, tree_data_path)
+    try:
+        shutil.rmtree(objdir)
+    except Exception:
+        print("Warning: failed to remove %s" % objdir, file=sys.stderr)
+    return True
+
+
+def main():
+    if len(sys.argv) != 2:
+        print("Usage: icu_sources_data.py <mozilla topsrcdir>", file=sys.stderr)
+        sys.exit(1)
+
+    topsrcdir = mozpath.abspath(sys.argv[1])
+    update_sources(topsrcdir)
+    if not update_data_file(topsrcdir):
+        print("Error updating ICU data file", file=sys.stderr)
+        sys.exit(1)
+
+
+if __name__ == "__main__":
+    main()
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-init.patch
+
+mv firefox-$VERSION-init.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/python/mozbuild/mozbuild/configure/__init__.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/configure/__init__.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/configure/__init__.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-init-patch/firefox-102.15.0-new/python/mozbuild/mozbuild/configure/__init__.py	(revision 228)
@@ -0,0 +1,1310 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import codecs
+import inspect
+import logging
+import os
+import re
+import six
+from six.moves import builtins as __builtin__
+import sys
+import types
+from collections import OrderedDict
+from contextlib import contextmanager
+from functools import wraps
+from mozbuild.configure.options import (
+    CommandLineHelper,
+    ConflictingOptionError,
+    HELP_OPTIONS_CATEGORY,
+    InvalidOptionError,
+    Option,
+    OptionValue,
+)
+from mozbuild.configure.help import HelpFormatter
+from mozbuild.configure.util import ConfigureOutputHandler, getpreferredencoding, LineIO
+from mozbuild.util import (
+    exec_,
+    memoize,
+    memoized_property,
+    ReadOnlyDict,
+    ReadOnlyNamespace,
+    system_encoding,
+)
+
+import mozpack.path as mozpath
+
+
+# TRACE logging level, below (thus more verbose than) DEBUG
+TRACE = 5
+
+
+class ConfigureError(Exception):
+    pass
+
+
+class SandboxDependsFunction(object):
+    """Sandbox-visible representation of @depends functions."""
+
+    def __init__(self, unsandboxed):
+        self._or = unsandboxed.__or__
+        self._and = unsandboxed.__and__
+        self._getattr = unsandboxed.__getattr__
+
+    def __call__(self, *arg, **kwargs):
+        raise ConfigureError("The `%s` function may not be called" % self.__name__)
+
+    def __or__(self, other):
+        if not isinstance(other, SandboxDependsFunction):
+            raise ConfigureError(
+                "Can only do binary arithmetic operations "
+                "with another @depends function."
+            )
+        return self._or(other).sandboxed
+
+    def __and__(self, other):
+        if not isinstance(other, SandboxDependsFunction):
+            raise ConfigureError(
+                "Can only do binary arithmetic operations "
+                "with another @depends function."
+            )
+        return self._and(other).sandboxed
+
+    def __cmp__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __eq__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __hash__(self):
+        return object.__hash__(self)
+
+    def __ne__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __lt__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __le__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __gt__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __ge__(self, other):
+        raise ConfigureError("Cannot compare @depends functions.")
+
+    def __getattr__(self, key):
+        return self._getattr(key).sandboxed
+
+    def __nonzero__(self):
+        raise ConfigureError("Cannot do boolean operations on @depends functions.")
+
+
+class DependsFunction(object):
+    __slots__ = (
+        "_func",
+        "_name",
+        "dependencies",
+        "when",
+        "sandboxed",
+        "sandbox",
+        "_result",
+    )
+
+    def __init__(self, sandbox, func, dependencies, when=None):
+        assert isinstance(sandbox, ConfigureSandbox)
+        assert not inspect.isgeneratorfunction(func)
+        # Allow non-functions when there are no dependencies. This is equivalent
+        # to passing a lambda that returns the given value.
+        if not (inspect.isroutine(func) or not dependencies):
+            print(func)
+        assert inspect.isroutine(func) or not dependencies
+        self._func = func
+        self._name = getattr(func, "__name__", None)
+        self.dependencies = dependencies
+        self.sandboxed = wraps(func)(SandboxDependsFunction(self))
+        self.sandbox = sandbox
+        self.when = when
+        sandbox._depends[self.sandboxed] = self
+
+        # Only @depends functions with a dependency on '--help' are executed
+        # immediately. Everything else is queued for later execution.
+        if sandbox._help_option in dependencies:
+            sandbox._value_for(self)
+        elif not sandbox._help:
+            sandbox._execution_queue.append((sandbox._value_for, (self,)))
+
+    @property
+    def name(self):
+        return self._name
+
+    @name.setter
+    def name(self, value):
+        self._name = value
+
+    @property
+    def sandboxed_dependencies(self):
+        return [
+            d.sandboxed if isinstance(d, DependsFunction) else d
+            for d in self.dependencies
+        ]
+
+    @memoize
+    def result(self):
+        if self.when and not self.sandbox._value_for(self.when):
+            return None
+
+        if inspect.isroutine(self._func):
+            resolved_args = [self.sandbox._value_for(d) for d in self.dependencies]
+            return self._func(*resolved_args)
+        return self._func
+
+    def __repr__(self):
+        return "<%s %s(%s)>" % (
+            self.__class__.__name__,
+            self.name,
+            ", ".join(repr(d) for d in self.dependencies),
+        )
+
+    def __or__(self, other):
+        if isinstance(other, SandboxDependsFunction):
+            other = self.sandbox._depends.get(other)
+        assert isinstance(other, DependsFunction)
+        assert self.sandbox is other.sandbox
+        return CombinedDependsFunction(self.sandbox, self.or_impl, (self, other))
+
+    @staticmethod
+    def or_impl(iterable):
+        # Applies "or" to all the items of iterable.
+        # e.g. if iterable contains a, b and c, returns `a or b or c`.
+        for i in iterable:
+            if i:
+                return i
+        return i
+
+    def __and__(self, other):
+        if isinstance(other, SandboxDependsFunction):
+            other = self.sandbox._depends.get(other)
+        assert isinstance(other, DependsFunction)
+        assert self.sandbox is other.sandbox
+        return CombinedDependsFunction(self.sandbox, self.and_impl, (self, other))
+
+    @staticmethod
+    def and_impl(iterable):
+        # Applies "and" to all the items of iterable.
+        # e.g. if iterable contains a, b and c, returns `a and b and c`.
+        for i in iterable:
+            if not i:
+                return i
+        return i
+
+    def __getattr__(self, key):
+        if key.startswith("_"):
+            return super(DependsFunction, self).__getattr__(key)
+        # Our function may return None or an object that simply doesn't have
+        # the wanted key. In that case, just return None.
+        return TrivialDependsFunction(
+            self.sandbox, lambda x: getattr(x, key, None), [self], self.when
+        )
+
+
+class TrivialDependsFunction(DependsFunction):
+    """Like a DependsFunction, but the linter won't expect it to have a
+    dependency on --help ever."""
+
+
+class CombinedDependsFunction(DependsFunction):
+    def __init__(self, sandbox, func, dependencies):
+        flatten_deps = []
+        for d in dependencies:
+            if isinstance(d, CombinedDependsFunction) and d._func is func:
+                for d2 in d.dependencies:
+                    if d2 not in flatten_deps:
+                        flatten_deps.append(d2)
+            elif d not in flatten_deps:
+                flatten_deps.append(d)
+
+        super(CombinedDependsFunction, self).__init__(sandbox, func, flatten_deps)
+
+    @memoize
+    def result(self):
+        resolved_args = (self.sandbox._value_for(d) for d in self.dependencies)
+        return self._func(resolved_args)
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, self.__class__)
+            and self._func is other._func
+            and set(self.dependencies) == set(other.dependencies)
+        )
+
+    def __hash__(self):
+        return object.__hash__(self)
+
+    def __ne__(self, other):
+        return not self == other
+
+
+class SandboxedGlobal(dict):
+    """Identifiable dict type for use as function global"""
+
+
+def forbidden_import(*args, **kwargs):
+    raise ImportError("Importing modules is forbidden")
+
+
+class ConfigureSandbox(dict):
+    """Represents a sandbox for executing Python code for build configuration.
+    This is a different kind of sandboxing than the one used for moz.build
+    processing.
+
+    The sandbox has 9 primitives:
+    - option
+    - depends
+    - template
+    - imports
+    - include
+    - set_config
+    - set_define
+    - imply_option
+    - only_when
+
+    `option`, `include`, `set_config`, `set_define` and `imply_option` are
+    functions. `depends`, `template`, and `imports` are decorators. `only_when`
+    is a context_manager.
+
+    These primitives are declared as name_impl methods to this class and
+    the mapping name -> name_impl is done automatically in __getitem__.
+
+    Additional primitives should be frowned upon to keep the sandbox itself as
+    simple as possible. Instead, helpers should be created within the sandbox
+    with the existing primitives.
+
+    The sandbox is given, at creation, a dict where the yielded configuration
+    will be stored.
+
+        config = {}
+        sandbox = ConfigureSandbox(config)
+        sandbox.run(path)
+        do_stuff(config)
+    """
+
+    # The default set of builtins. We expose unicode as str to make sandboxed
+    # files more python3-ready.
+    BUILTINS = ReadOnlyDict(
+        {
+            b: getattr(__builtin__, b, None)
+            for b in (
+                "AssertionError",
+                "False",
+                "None",
+                "True",
+                "__build_class__",  # will be None on py2
+                "all",
+                "any",
+                "bool",
+                "dict",
+                "enumerate",
+                "getattr",
+                "hasattr",
+                "int",
+                "isinstance",
+                "len",
+                "list",
+                "max",
+                "min",
+                "range",
+                "set",
+                "sorted",
+                "tuple",
+                "zip",
+            )
+        },
+        __import__=forbidden_import,
+        str=six.text_type,
+    )
+
+    # Expose a limited set of functions from os.path
+    OS = ReadOnlyNamespace(
+        path=ReadOnlyNamespace(
+            **{
+                k: getattr(mozpath, k, getattr(os.path, k))
+                for k in (
+                    "abspath",
+                    "basename",
+                    "dirname",
+                    "isabs",
+                    "join",
+                    "normcase",
+                    "normpath",
+                    "realpath",
+                    "relpath",
+                )
+            }
+        )
+    )
+
+    def __init__(
+        self,
+        config,
+        environ=os.environ,
+        argv=sys.argv,
+        stdout=sys.stdout,
+        stderr=sys.stderr,
+        logger=None,
+    ):
+        dict.__setitem__(self, "__builtins__", self.BUILTINS)
+
+        self._environ = dict(environ)
+
+        self._paths = []
+        self._all_paths = set()
+        self._templates = set()
+        # Associate SandboxDependsFunctions to DependsFunctions.
+        self._depends = OrderedDict()
+        self._seen = set()
+        # Store the @imports added to a given function.
+        self._imports = {}
+
+        self._options = OrderedDict()
+        # Store raw option (as per command line or environment) for each Option
+        self._raw_options = OrderedDict()
+
+        # Store options added with `imply_option`, and the reason they were
+        # added (which can either have been given to `imply_option`, or
+        # inferred. Their order matters, so use a list.
+        self._implied_options = []
+
+        # Store all results from _prepare_function
+        self._prepared_functions = set()
+
+        # Queue of functions to execute, with their arguments
+        self._execution_queue = []
+
+        # Store the `when`s associated to some options.
+        self._conditions = {}
+
+        # A list of conditions to apply as a default `when` for every *_impl()
+        self._default_conditions = []
+
+        self._helper = CommandLineHelper(environ, argv)
+
+        assert isinstance(config, dict)
+        self._config = config
+
+        # Tracks how many templates "deep" we are in the stack.
+        self._template_depth = 0
+
+        logging.addLevelName(TRACE, "TRACE")
+        if logger is None:
+            logger = moz_logger = logging.getLogger("moz.configure")
+            logger.setLevel(logging.DEBUG)
+            formatter = logging.Formatter("%(levelname)s: %(message)s")
+            handler = ConfigureOutputHandler(stdout, stderr)
+            handler.setFormatter(formatter)
+            queue_debug = handler.queue_debug
+            logger.addHandler(handler)
+
+        else:
+            assert isinstance(logger, logging.Logger)
+            moz_logger = None
+
+            @contextmanager
+            def queue_debug():
+                yield
+
+        self._logger = logger
+
+        # Some callers will manage to log a bytestring with characters in it
+        # that can't be converted to ascii. Make our log methods robust to this
+        # by detecting the encoding that a producer is likely to have used.
+        encoding = getpreferredencoding()
+
+        def wrapped_log_method(logger, key):
+            method = getattr(logger, key)
+
+            def wrapped(*args, **kwargs):
+                out_args = [
+                    six.ensure_text(arg, encoding=encoding or "utf-8")
+                    if isinstance(arg, six.binary_type)
+                    else arg
+                    for arg in args
+                ]
+                return method(*out_args, **kwargs)
+
+            return wrapped
+
+        log_namespace = {
+            k: wrapped_log_method(logger, k)
+            for k in ("debug", "info", "warning", "error")
+        }
+        log_namespace["queue_debug"] = queue_debug
+        self.log_impl = ReadOnlyNamespace(**log_namespace)
+
+        self._help = None
+        self._help_option = self.option_impl(
+            "--help", help="print this message", category=HELP_OPTIONS_CATEGORY
+        )
+        self._seen.add(self._help_option)
+
+        self._always = DependsFunction(self, lambda: True, [])
+        self._never = DependsFunction(self, lambda: False, [])
+
+        if self._value_for(self._help_option):
+            self._help = HelpFormatter(argv[0])
+            self._help.add(self._help_option)
+        elif moz_logger:
+            handler = logging.FileHandler(
+                "config.log", mode="w", delay=True, encoding="utf-8"
+            )
+            handler.setFormatter(formatter)
+            logger.addHandler(handler)
+
+    def include_file(self, path):
+        """Include one file in the sandbox. Users of this class probably want
+        to use `run` instead.
+
+        Note: this will execute all template invocations, as well as @depends
+        functions that depend on '--help', but nothing else.
+        """
+
+        if self._paths:
+            path = mozpath.join(mozpath.dirname(self._paths[-1]), path)
+            path = mozpath.normpath(path)
+            if not mozpath.basedir(path, (mozpath.dirname(self._paths[0]),)):
+                raise ConfigureError(
+                    "Cannot include `%s` because it is not in a subdirectory "
+                    "of `%s`" % (path, mozpath.dirname(self._paths[0]))
+                )
+        else:
+            path = mozpath.realpath(mozpath.abspath(path))
+        if path in self._all_paths:
+            raise ConfigureError(
+                "Cannot include `%s` because it was included already." % path
+            )
+        self._paths.append(path)
+        self._all_paths.add(path)
+
+        with open(path, "rb") as fh:
+            source = fh.read()
+
+        code = compile(source, path, "exec")
+
+        exec_(code, self)
+
+        self._paths.pop(-1)
+
+    def run(self, path=None):
+        """Executes the given file within the sandbox, as well as everything
+        pending from any other included file, and ensure the overall
+        consistency of the executed script(s)."""
+        if path:
+            self.include_file(path)
+
+        for option in six.itervalues(self._options):
+            # All options must be referenced by some @depends function
+            if option not in self._seen:
+                raise ConfigureError(
+                    "Option `%s` is not handled ; reference it with a @depends"
+                    % option.option
+                )
+
+            self._value_for(option)
+
+        # All implied options should exist.
+        for implied_option in self._implied_options:
+            value = self._resolve(implied_option.value)
+            if value is not None:
+                # There are two ways to end up here: either the implied option
+                # is unknown, or it's known but there was a dependency loop
+                # that prevented the implication from being applied.
+                option = self._options.get(implied_option.name)
+                if not option:
+                    raise ConfigureError(
+                        "`%s`, emitted from `%s` line %d, is unknown."
+                        % (
+                            implied_option.option,
+                            implied_option.caller[1],
+                            implied_option.caller[2],
+                        )
+                    )
+                # If the option is known, check that the implied value doesn't
+                # conflict with what value was attributed to the option.
+                if implied_option.when and not self._value_for(implied_option.when):
+                    continue
+                option_value = self._value_for_option(option)
+                if value != option_value:
+                    reason = implied_option.reason
+                    if isinstance(reason, Option):
+                        reason = self._raw_options.get(reason) or reason.option
+                        reason = reason.split("=", 1)[0]
+                    value = OptionValue.from_(value)
+                    raise InvalidOptionError(
+                        "'%s' implied by '%s' conflicts with '%s' from the %s"
+                        % (
+                            value.format(option.option),
+                            reason,
+                            option_value.format(option.option),
+                            option_value.origin,
+                        )
+                    )
+
+        # All options should have been removed (handled) by now.
+        for arg in self._helper:
+            without_value = arg.split("=", 1)[0]
+            msg = "Unknown option: %s" % without_value
+            self._logger.warning(msg)
+
+        # Run the execution queue
+        for func, args in self._execution_queue:
+            func(*args)
+
+        if self._help:
+            with LineIO(self.log_impl.info) as out:
+                self._help.usage(out)
+
+    def __getitem__(self, key):
+        impl = "%s_impl" % key
+        func = getattr(self, impl, None)
+        if func:
+            return func
+
+        return super(ConfigureSandbox, self).__getitem__(key)
+
+    def __setitem__(self, key, value):
+        if (
+            key in self.BUILTINS
+            or key == "__builtins__"
+            or hasattr(self, "%s_impl" % key)
+        ):
+            raise KeyError("Cannot reassign builtins")
+
+        if inspect.isfunction(value) and value not in self._templates:
+            value = self._prepare_function(value)
+
+        elif (
+            not isinstance(value, SandboxDependsFunction)
+            and value not in self._templates
+            and not (inspect.isclass(value) and issubclass(value, Exception))
+        ):
+            raise KeyError(
+                "Cannot assign `%s` because it is neither a "
+                "@depends nor a @template" % key
+            )
+
+        if isinstance(value, SandboxDependsFunction):
+            self._depends[value].name = key
+
+        return super(ConfigureSandbox, self).__setitem__(key, value)
+
+    def _resolve(self, arg):
+        if isinstance(arg, SandboxDependsFunction):
+            return self._value_for_depends(self._depends[arg])
+        return arg
+
+    def _value_for(self, obj):
+        if isinstance(obj, SandboxDependsFunction):
+            assert obj in self._depends
+            return self._value_for_depends(self._depends[obj])
+
+        elif isinstance(obj, DependsFunction):
+            return self._value_for_depends(obj)
+
+        elif isinstance(obj, Option):
+            return self._value_for_option(obj)
+
+        assert False
+
+    @memoize
+    def _value_for_depends(self, obj):
+        value = obj.result()
+        self._logger.log(TRACE, "%r = %r", obj, value)
+        return value
+
+    @memoize
+    def _value_for_option(self, option):
+        implied = {}
+        matching_implied_options = [
+            o for o in self._implied_options if o.name in (option.name, option.env)
+        ]
+        # Update self._implied_options before going into the loop with the non-matching
+        # options.
+        self._implied_options = [
+            o for o in self._implied_options if o.name not in (option.name, option.env)
+        ]
+
+        for implied_option in matching_implied_options:
+            if implied_option.when and not self._value_for(implied_option.when):
+                continue
+
+            value = self._resolve(implied_option.value)
+
+            if value is not None:
+                value = OptionValue.from_(value)
+                opt = value.format(implied_option.option)
+                self._helper.add(opt, "implied")
+                implied[opt] = implied_option
+
+        try:
+            value, option_string = self._helper.handle(option)
+        except ConflictingOptionError as e:
+            reason = implied[e.arg].reason
+            if isinstance(reason, Option):
+                reason = self._raw_options.get(reason) or reason.option
+                reason = reason.split("=", 1)[0]
+            raise InvalidOptionError(
+                "'%s' implied by '%s' conflicts with '%s' from the %s"
+                % (e.arg, reason, e.old_arg, e.old_origin)
+            )
+
+        if value.origin == "implied":
+            recursed_value = getattr(self, "__value_for_option").get((option,))
+            if recursed_value is not None:
+                _, filename, line, _, _, _ = implied[value.format(option.option)].caller
+                raise ConfigureError(
+                    "'%s' appears somewhere in the direct or indirect dependencies when "
+                    "resolving imply_option at %s:%d" % (option.option, filename, line)
+                )
+
+        if option_string:
+            self._raw_options[option] = option_string
+
+        when = self._conditions.get(option)
+        # If `when` resolves to a false-ish value, we always return None.
+        # This makes option(..., when='--foo') equivalent to
+        # option(..., when=depends('--foo')(lambda x: x)).
+        if when and not self._value_for(when) and value is not None:
+            # If the option was passed explicitly, we throw an error that
+            # the option is not available. Except when the option was passed
+            # from the environment, because that would be too cumbersome.
+            if value.origin not in ("default", "environment"):
+                raise InvalidOptionError(
+                    "%s is not available in this configuration"
+                    % option_string.split("=", 1)[0]
+                )
+            self._logger.log(TRACE, "%r = None", option)
+            return None
+
+        self._logger.log(TRACE, "%r = %r", option, value)
+        return value
+
+    def _dependency(self, arg, callee_name, arg_name=None):
+        if isinstance(arg, six.string_types):
+            prefix, name, values = Option.split_option(arg)
+            if values != ():
+                raise ConfigureError("Option must not contain an '='")
+            if name not in self._options:
+                raise ConfigureError(
+                    "'%s' is not a known option. " "Maybe it's declared too late?" % arg
+                )
+            arg = self._options[name]
+            self._seen.add(arg)
+        elif isinstance(arg, SandboxDependsFunction):
+            assert arg in self._depends
+            arg = self._depends[arg]
+        else:
+            raise TypeError(
+                "Cannot use object of type '%s' as %sargument to %s"
+                % (
+                    type(arg).__name__,
+                    "`%s` " % arg_name if arg_name else "",
+                    callee_name,
+                )
+            )
+        return arg
+
+    def _normalize_when(self, when, callee_name):
+        if when is True:
+            when = self._always
+        elif when is False:
+            when = self._never
+        elif when is not None:
+            when = self._dependency(when, callee_name, "when")
+
+        if self._default_conditions:
+            # Create a pseudo @depends function for the combination of all
+            # default conditions and `when`.
+            dependencies = [when] if when else []
+            dependencies.extend(self._default_conditions)
+            if len(dependencies) == 1:
+                return dependencies[0]
+            return CombinedDependsFunction(self, all, dependencies)
+        return when
+
+    @contextmanager
+    def only_when_impl(self, when):
+        """Implementation of only_when()
+
+        `only_when` is a context manager that essentially makes calls to
+        other sandbox functions within the context block ignored.
+        """
+        when = self._normalize_when(when, "only_when")
+        if when and self._default_conditions[-1:] != [when]:
+            self._default_conditions.append(when)
+            yield
+            self._default_conditions.pop()
+        else:
+            yield
+
+    def option_impl(self, *args, **kwargs):
+        """Implementation of option()
+        This function creates and returns an Option() object, passing it the
+        resolved arguments (uses the result of functions when functions are
+        passed). In most cases, the result of this function is not expected to
+        be used.
+        Command line argument/environment variable parsing for this Option is
+        handled here.
+        """
+        when = self._normalize_when(kwargs.get("when"), "option")
+        args = [self._resolve(arg) for arg in args]
+        kwargs = {k: self._resolve(v) for k, v in six.iteritems(kwargs) if k != "when"}
+        # The Option constructor needs to look up the stack to infer a category
+        # for the Option, since the category is based on the filename where the
+        # Option is defined. However, if the Option is defined in a template, we
+        # want the category to reference the caller of the template rather than
+        # the caller of the option() function.
+        kwargs["define_depth"] = self._template_depth * 3
+        option = Option(*args, **kwargs)
+        if when:
+            self._conditions[option] = when
+        if option.name in self._options:
+            raise ConfigureError("Option `%s` already defined" % option.option)
+        if option.env in self._options:
+            raise ConfigureError("Option `%s` already defined" % option.env)
+        if option.name:
+            self._options[option.name] = option
+        if option.env:
+            self._options[option.env] = option
+
+        if self._help and (when is None or self._value_for(when)):
+            self._help.add(option)
+
+        return option
+
+    def depends_impl(self, *args, **kwargs):
+        """Implementation of @depends()
+        This function is a decorator. It returns a function that subsequently
+        takes a function and returns a dummy function. The dummy function
+        identifies the actual function for the sandbox, while preventing
+        further function calls from within the sandbox.
+
+        @depends() takes a variable number of option strings or dummy function
+        references. The decorated function is called as soon as the decorator
+        is called, and the arguments it receives are the OptionValue or
+        function results corresponding to each of the arguments to @depends.
+        As an exception, when a HelpFormatter is attached, only functions that
+        have '--help' in their @depends argument list are called.
+
+        The decorated function is altered to use a different global namespace
+        for its execution. This different global namespace exposes a limited
+        set of functions from os.path.
+        """
+        for k in kwargs:
+            if k != "when":
+                raise TypeError(
+                    "depends_impl() got an unexpected keyword argument '%s'" % k
+                )
+
+        when = self._normalize_when(kwargs.get("when"), "@depends")
+
+        if not when and not args:
+            raise ConfigureError("@depends needs at least one argument")
+
+        dependencies = tuple(self._dependency(arg, "@depends") for arg in args)
+
+        conditions = [
+            self._conditions[d]
+            for d in dependencies
+            if d in self._conditions and isinstance(d, Option)
+        ]
+        for c in conditions:
+            if c != when:
+                raise ConfigureError(
+                    "@depends function needs the same `when` "
+                    "as options it depends on"
+                )
+
+        def decorator(func):
+            if inspect.isgeneratorfunction(func):
+                raise ConfigureError(
+                    "Cannot decorate generator functions with @depends"
+                )
+            if inspect.isroutine(func):
+                if func in self._templates:
+                    raise TypeError("Cannot use a @template function here")
+                func = self._prepare_function(func)
+            elif isinstance(func, SandboxDependsFunction):
+                raise TypeError("Cannot nest @depends functions")
+            elif dependencies:
+                raise TypeError(
+                    "Cannot wrap literal values in @depends with dependencies"
+                )
+            depends = DependsFunction(self, func, dependencies, when=when)
+            return depends.sandboxed
+
+        return decorator
+
+    def include_impl(self, what, when=None):
+        """Implementation of include().
+        Allows to include external files for execution in the sandbox.
+        It is possible to use a @depends function as argument, in which case
+        the result of the function is the file name to include. This latter
+        feature is only really meant for --enable-application/--enable-project.
+        """
+        with self.only_when_impl(when):
+            what = self._resolve(what)
+            if what:
+                if not isinstance(what, six.string_types):
+                    raise TypeError("Unexpected type: '%s'" % type(what).__name__)
+                self.include_file(what)
+
+    def template_impl(self, func):
+        """Implementation of @template.
+        This function is a decorator. Template functions are called
+        immediately. They are altered so that their global namespace exposes
+        a limited set of functions from os.path, as well as `depends` and
+        `option`.
+        Templates allow to simplify repetitive constructs, or to implement
+        helper decorators and somesuch.
+        """
+
+        def update_globals(glob):
+            glob.update(
+                (k[: -len("_impl")], getattr(self, k))
+                for k in dir(self)
+                if k.endswith("_impl") and k != "template_impl"
+            )
+            glob.update((k, v) for k, v in six.iteritems(self) if k not in glob)
+
+        template = self._prepare_function(func, update_globals)
+
+        # Any function argument to the template must be prepared to be sandboxed.
+        # If the template itself returns a function (in which case, it's very
+        # likely a decorator), that function must be prepared to be sandboxed as
+        # well.
+        def wrap_template(template):
+            isfunction = inspect.isfunction
+
+            def maybe_prepare_function(obj):
+                if isfunction(obj):
+                    return self._prepare_function(obj)
+                return obj
+
+            # The following function may end up being prepared to be sandboxed,
+            # so it mustn't depend on anything from the global scope in this
+            # file. It can however depend on variables from the closure, thus
+            # maybe_prepare_function and isfunction are declared above to be
+            # available there.
+            @self.wraps(template)
+            def wrapper(*args, **kwargs):
+                args = [maybe_prepare_function(arg) for arg in args]
+                kwargs = {k: maybe_prepare_function(v) for k, v in kwargs.items()}
+                self._template_depth += 1
+                ret = template(*args, **kwargs)
+                self._template_depth -= 1
+                if isfunction(ret):
+                    # We can't expect the sandboxed code to think about all the
+                    # details of implementing decorators, so do some of the
+                    # work for them. If the function takes exactly one function
+                    # as argument and returns a function, it must be a
+                    # decorator, so mark the returned function as wrapping the
+                    # function passed in.
+                    if len(args) == 1 and not kwargs and isfunction(args[0]):
+                        ret = self.wraps(args[0])(ret)
+                    return wrap_template(ret)
+                return ret
+
+            return wrapper
+
+        wrapper = wrap_template(template)
+        self._templates.add(wrapper)
+        return wrapper
+
+    def wraps(self, func):
+        return wraps(func)
+
+    RE_MODULE = re.compile("^[a-zA-Z0-9_\.]+$")
+
+    def imports_impl(self, _import, _from=None, _as=None):
+        """Implementation of @imports.
+        This decorator imports the given _import from the given _from module
+        optionally under a different _as name.
+        The options correspond to the various forms for the import builtin.
+
+            @imports('sys')
+            @imports(_from='mozpack', _import='path', _as='mozpath')
+        """
+        for value, required in ((_import, True), (_from, False), (_as, False)):
+
+            if not isinstance(value, six.string_types) and (
+                required or value is not None
+            ):
+                raise TypeError("Unexpected type: '%s'" % type(value).__name__)
+            if value is not None and not self.RE_MODULE.match(value):
+                raise ValueError("Invalid argument to @imports: '%s'" % value)
+        if _as and "." in _as:
+            raise ValueError("Invalid argument to @imports: '%s'" % _as)
+
+        def decorator(func):
+            if func in self._templates:
+                raise ConfigureError("@imports must appear after @template")
+            if func in self._depends:
+                raise ConfigureError("@imports must appear after @depends")
+            # For the imports to apply in the order they appear in the
+            # .configure file, we accumulate them in reverse order and apply
+            # them later.
+            imports = self._imports.setdefault(func, [])
+            imports.insert(0, (_from, _import, _as))
+            return func
+
+        return decorator
+
+    def _apply_imports(self, func, glob):
+        for _from, _import, _as in self._imports.pop(func, ()):
+            self._get_one_import(_from, _import, _as, glob)
+
+    def _handle_wrapped_import(self, _from, _import, _as, glob):
+        """Given the name of a module, "import" a mocked package into the glob
+        iff the module is one that we wrap (either for the sandbox or for the
+        purpose of testing). Applies if the wrapped module is exposed by an
+        attribute of `self`.
+
+        For example, if the import statement is `from os import environ`, then
+        this function will set
+        glob['environ'] = self._wrapped_os.environ.
+
+        Iff this function handles the given import, return True.
+        """
+        module = (_from or _import).split(".")[0]
+        attr = "_wrapped_" + module
+        wrapped = getattr(self, attr, None)
+        if wrapped:
+            if _as or _from:
+                obj = self._recursively_get_property(
+                    module, (_from + "." if _from else "") + _import, wrapped
+                )
+                glob[_as or _import] = obj
+            else:
+                glob[module] = wrapped
+            return True
+        else:
+            return False
+
+    def _recursively_get_property(self, module, what, wrapped):
+        """Traverse the wrapper object `wrapped` (which represents the module
+        `module`) and return the property represented by `what`, which may be a
+        series of nested attributes.
+
+        For example, if `module` is 'os' and `what` is 'os.path.join',
+        return `wrapped.path.join`.
+        """
+        if what == module:
+            return wrapped
+        assert what.startswith(module + ".")
+        attrs = what[len(module + ".") :].split(".")
+        for attr in attrs:
+            wrapped = getattr(wrapped, attr)
+        return wrapped
+
+    @memoized_property
+    def _wrapped_os(self):
+        wrapped_os = {}
+        exec_("from os import *", {}, wrapped_os)
+        # Special case os and os.environ so that os.environ is our copy of
+        # the environment.
+        wrapped_os["environ"] = self._environ
+        # Also override some os.path functions with ours.
+        wrapped_path = {}
+        exec_("from os.path import *", {}, wrapped_path)
+        wrapped_path.update(self.OS.path.__dict__)
+        wrapped_os["path"] = ReadOnlyNamespace(**wrapped_path)
+        return ReadOnlyNamespace(**wrapped_os)
+
+    @memoized_property
+    def _wrapped_subprocess(self):
+        wrapped_subprocess = {}
+        exec_("from subprocess import *", {}, wrapped_subprocess)
+
+        def wrap(function):
+            def wrapper(*args, **kwargs):
+                if kwargs.get("env") is None and self._environ:
+                    kwargs["env"] = dict(self._environ)
+
+                return function(*args, **kwargs)
+
+            return wrapper
+
+        for f in ("call", "check_call", "check_output", "Popen", "run"):
+            # `run` is new to python 3.5. In case this still runs from python2
+            # code, avoid failing here.
+            if f in wrapped_subprocess:
+                wrapped_subprocess[f] = wrap(wrapped_subprocess[f])
+
+        return ReadOnlyNamespace(**wrapped_subprocess)
+
+    @memoized_property
+    def _wrapped_six(self):
+        if six.PY3:
+            return six
+        wrapped_six = {}
+        exec_("from six import *", {}, wrapped_six)
+        wrapped_six_moves = {}
+        exec_("from six.moves import *", {}, wrapped_six_moves)
+        wrapped_six_moves_builtins = {}
+        exec_("from six.moves.builtins import *", {}, wrapped_six_moves_builtins)
+
+        # Special case for the open() builtin, because otherwise, using it
+        # fails with "IOError: file() constructor not accessible in
+        # restricted mode". We also make open() look more like python 3's,
+        # decoding to unicode strings unless the mode says otherwise.
+        def wrapped_open(name, mode=None, buffering=None):
+            args = (name,)
+            kwargs = {}
+            if buffering is not None:
+                kwargs["buffering"] = buffering
+            if mode is not None:
+                args += (mode,)
+                if "b" in mode:
+                    return open(*args, **kwargs)
+            kwargs["encoding"] = system_encoding
+            return codecs.open(*args, **kwargs)
+
+        wrapped_six_moves_builtins["open"] = wrapped_open
+        wrapped_six_moves["builtins"] = ReadOnlyNamespace(**wrapped_six_moves_builtins)
+        wrapped_six["moves"] = ReadOnlyNamespace(**wrapped_six_moves)
+
+        return ReadOnlyNamespace(**wrapped_six)
+
+    def _get_one_import(self, _from, _import, _as, glob):
+        """Perform the given import, placing the result into the dict glob."""
+        if not _from and _import == "__builtin__":
+            glob[_as or "__builtin__"] = __builtin__
+            return
+        if _from == "__builtin__":
+            _from = "six.moves.builtins"
+        # The special `__sandbox__` module gives access to the sandbox
+        # instance.
+        if not _from and _import == "__sandbox__":
+            glob[_as or _import] = self
+            return
+        if self._handle_wrapped_import(_from, _import, _as, glob):
+            return
+        # If we've gotten this far, we should just do a normal import.
+        # Until this proves to be a performance problem, just construct an
+        # import statement and execute it.
+        import_line = "%simport %s%s" % (
+            ("from %s " % _from) if _from else "",
+            _import,
+            (" as %s" % _as) if _as else "",
+        )
+        exec_(import_line, {}, glob)
+
+    def _resolve_and_set(self, data, name, value, when=None):
+        # Don't set anything when --help was on the command line
+        if self._help:
+            return
+        if when and not self._value_for(when):
+            return
+        name = self._resolve(name)
+        if name is None:
+            return
+        if not isinstance(name, six.string_types):
+            raise TypeError("Unexpected type: '%s'" % type(name).__name__)
+        if name in data:
+            raise ConfigureError(
+                "Cannot add '%s' to configuration: Key already " "exists" % name
+            )
+        value = self._resolve(value)
+        if value is not None:
+            if self._logger.isEnabledFor(TRACE):
+                if data is self._config:
+                    self._logger.log(TRACE, "set_config(%s, %r)", name, value)
+                elif data is self._config.get("DEFINES"):
+                    self._logger.log(TRACE, "set_define(%s, %r)", name, value)
+            data[name] = value
+
+    def set_config_impl(self, name, value, when=None):
+        """Implementation of set_config().
+        Set the configuration items with the given name to the given value.
+        Both `name` and `value` can be references to @depends functions,
+        in which case the result from these functions is used. If the result
+        of either function is None, the configuration item is not set.
+        """
+        when = self._normalize_when(when, "set_config")
+
+        self._execution_queue.append(
+            (self._resolve_and_set, (self._config, name, value, when))
+        )
+
+    def set_define_impl(self, name, value, when=None):
+        """Implementation of set_define().
+        Set the define with the given name to the given value. Both `name` and
+        `value` can be references to @depends functions, in which case the
+        result from these functions is used. If the result of either function
+        is None, the define is not set. If the result is False, the define is
+        explicitly undefined (-U).
+        """
+        when = self._normalize_when(when, "set_define")
+
+        defines = self._config.setdefault("DEFINES", {})
+        self._execution_queue.append(
+            (self._resolve_and_set, (defines, name, value, when))
+        )
+
+    def imply_option_impl(self, option, value, reason=None, when=None):
+        """Implementation of imply_option().
+        Injects additional options as if they had been passed on the command
+        line. The `option` argument is a string as in option()'s `name` or
+        `env`. The option must be declared after `imply_option` references it.
+        The `value` argument indicates the value to pass to the option.
+        It can be:
+        - True. In this case `imply_option` injects the positive option
+
+          (--enable-foo/--with-foo).
+              imply_option('--enable-foo', True)
+              imply_option('--disable-foo', True)
+
+          are both equivalent to `--enable-foo` on the command line.
+
+        - False. In this case `imply_option` injects the negative option
+
+          (--disable-foo/--without-foo).
+              imply_option('--enable-foo', False)
+              imply_option('--disable-foo', False)
+
+          are both equivalent to `--disable-foo` on the command line.
+
+        - None. In this case `imply_option` does nothing.
+              imply_option('--enable-foo', None)
+              imply_option('--disable-foo', None)
+
+        are both equivalent to not passing any flag on the command line.
+
+        - a string or a tuple. In this case `imply_option` injects the positive
+          option with the given value(s).
+
+              imply_option('--enable-foo', 'a')
+              imply_option('--disable-foo', 'a')
+
+          are both equivalent to `--enable-foo=a` on the command line.
+              imply_option('--enable-foo', ('a', 'b'))
+              imply_option('--disable-foo', ('a', 'b'))
+
+          are both equivalent to `--enable-foo=a,b` on the command line.
+
+        Because imply_option('--disable-foo', ...) can be misleading, it is
+        recommended to use the positive form ('--enable' or '--with') for
+        `option`.
+
+        The `value` argument can also be (and usually is) a reference to a
+        @depends function, in which case the result of that function will be
+        used as per the descripted mapping above.
+
+        The `reason` argument indicates what caused the option to be implied.
+        It is necessary when it cannot be inferred from the `value`.
+        """
+
+        when = self._normalize_when(when, "imply_option")
+
+        # Don't do anything when --help was on the command line
+        if self._help:
+            return
+        if not reason and isinstance(value, SandboxDependsFunction):
+            deps = self._depends[value].dependencies
+            possible_reasons = [d for d in deps if d != self._help_option]
+            if len(possible_reasons) == 1:
+                if isinstance(possible_reasons[0], Option):
+                    reason = possible_reasons[0]
+        if not reason and (
+            isinstance(value, (bool, tuple)) or isinstance(value, six.string_types)
+        ):
+            # A reason can be provided automatically when imply_option
+            # is called with an immediate value.
+            _, filename, line, _, _, _ = inspect.stack()[1]
+            reason = "imply_option at %s:%s" % (filename, line)
+
+        if not reason:
+            raise ConfigureError(
+                "Cannot infer what implies '%s'. Please add a `reason` to "
+                "the `imply_option` call." % option
+            )
+
+        prefix, name, values = Option.split_option(option)
+        if values != ():
+            raise ConfigureError("Implied option must not contain an '='")
+
+        self._implied_options.append(
+            ReadOnlyNamespace(
+                option=option,
+                prefix=prefix,
+                name=name,
+                value=value,
+                caller=inspect.stack()[1],
+                reason=reason,
+                when=when,
+            )
+        )
+
+    def _prepare_function(self, func, update_globals=None):
+        """Alter the given function global namespace with the common ground
+        for @depends, and @template.
+        """
+        if not inspect.isfunction(func):
+            raise TypeError("Unexpected type: '%s'" % type(func).__name__)
+        if func in self._prepared_functions:
+            return func
+
+        glob = SandboxedGlobal(
+            (k, v)
+            for k, v in six.iteritems(func.__globals__)
+            if (inspect.isfunction(v) and v not in self._templates)
+            or (inspect.isclass(v) and issubclass(v, Exception))
+        )
+        glob.update(
+            __builtins__=self.BUILTINS,
+            __file__=self._paths[-1] if self._paths else "",
+            __name__=self._paths[-1] if self._paths else "",
+            os=self.OS,
+            log=self.log_impl,
+            namespace=ReadOnlyNamespace,
+        )
+        if update_globals:
+            update_globals(glob)
+
+        # The execution model in the sandbox doesn't guarantee the execution
+        # order will always be the same for a given function, and if it uses
+        # variables from a closure that are changed after the function is
+        # declared, depending when the function is executed, the value of the
+        # variable can differ. For consistency, we force the function to use
+        # the value from the earliest it can be run, which is at declaration.
+        # Note this is not entirely bullet proof (if the value is e.g. a list,
+        # the list contents could have changed), but covers the bases.
+        closure = None
+        if func.__closure__:
+
+            def makecell(content):
+                def f():
+                    content
+
+                return f.__closure__[0]
+
+            closure = tuple(makecell(cell.cell_contents) for cell in func.__closure__)
+
+        new_func = self.wraps(func)(
+            types.FunctionType(
+                func.__code__, glob, func.__name__, func.__defaults__, closure
+            )
+        )
+
+        @self.wraps(new_func)
+        def wrapped(*args, **kwargs):
+            if func in self._imports:
+                self._apply_imports(func, glob)
+            return new_func(*args, **kwargs)
+
+        self._prepared_functions.add(wrapped)
+        return wrapped
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-remove-sloppy-m4.patch
+
+mv firefox-$VERSION-remove-sloppy-m4.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/build/autoconf/autoconf.sh
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/firefox-102.15.0-new/build/autoconf/autoconf.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/firefox-102.15.0-new/build/autoconf/autoconf.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-remove-sloppy-m4-patch/firefox-102.15.0-new/build/autoconf/autoconf.sh	(revision 228)
@@ -0,0 +1,152 @@
+#! @SHELL@
+# autoconf -- create `configure' using m4 macros
+# Copyright (C) 1992, 1993, 1994, 1996 Free Software Foundation, Inc.
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
+# 02111-1307, USA.
+
+# If given no args, create `configure' from template file `configure.in'.
+# With one arg, create a configure script on standard output from
+# the given template file.
+
+usage="\
+Usage: autoconf [-h] [--help] [-m dir] [--macrodir=dir]
+       [-l dir] [--localdir=dir] [--version] [template-file]"
+
+# NLS nuisances.
+# Only set these to C if already set.  These must not be set unconditionally
+# because not all systems understand e.g. LANG=C (notably SCO).
+# Fixing LC_MESSAGES prevents Solaris sh from translating var values in `set'!
+# Non-C LC_CTYPE values break the ctype check.
+if test "${LANG+set}"   = set; then LANG=C;   export LANG;   fi
+if test "${LC_ALL+set}" = set; then LC_ALL=C; export LC_ALL; fi
+if test "${LC_MESSAGES+set}" = set; then LC_MESSAGES=C; export LC_MESSAGES; fi
+if test "${LC_CTYPE+set}"    = set; then LC_CTYPE=C;    export LC_CTYPE;    fi
+
+: ${AC_MACRODIR=@datadir@}
+: ${M4=@M4@}
+: ${AWK=@AWK@}
+case "${M4}" in
+/*) # Handle the case that m4 has moved since we were configured.
+    # It may have been found originally in a build directory.
+    test -f "${M4}" || M4=m4 ;;
+esac
+
+: ${TMPDIR=/tmp}
+tmpout=${TMPDIR}/acout.$$
+localdir=
+show_version=no
+
+while test $# -gt 0 ; do
+   case "${1}" in
+      -h | --help | --h* )
+         echo "${usage}" 1>&2; exit 0 ;;
+      --localdir=* | --l*=* )
+         localdir="`echo \"${1}\" | sed -e 's/^[^=]*=//'`"
+         shift ;;
+      -l | --localdir | --l*)
+         shift
+         test $# -eq 0 && { echo "${usage}" 1>&2; exit 1; }
+         localdir="${1}"
+         shift ;;
+      --macrodir=* | --m*=* )
+         AC_MACRODIR="`echo \"${1}\" | sed -e 's/^[^=]*=//'`"
+         shift ;;
+      -m | --macrodir | --m* )
+         shift
+         test $# -eq 0 && { echo "${usage}" 1>&2; exit 1; }
+         AC_MACRODIR="${1}"
+         shift ;;
+      --version | --v* )
+         show_version=yes; shift ;;
+      -- )     # Stop option processing
+        shift; break ;;
+      - )	# Use stdin as input.
+        break ;;
+      -* )
+        echo "${usage}" 1>&2; exit 1 ;;
+      * )
+        break ;;
+   esac
+done
+
+if test $show_version = yes; then
+  version=`sed -n 's/define.AC_ACVERSION.[ 	]*\([0-9.]*\).*/\1/p' \
+    $AC_MACRODIR/acgeneral.m4`
+  echo "Autoconf version $version"
+  exit 0
+fi
+
+case $# in
+  0) infile=configure.in ;;
+  1) infile="$1" ;;
+  *) echo "$usage" >&2; exit 1 ;;
+esac
+
+trap 'rm -f $tmpin $tmpout; exit 1' 1 2 15
+
+tmpin=${TMPDIR}/acin.$$ # Always set this, to avoid bogus errors from some rm's.
+if test z$infile = z-; then
+  infile=$tmpin
+  cat > $infile
+elif test ! -r "$infile"; then
+  echo "autoconf: ${infile}: No such file or directory" >&2
+  exit 1
+fi
+
+if test -n "$localdir"; then
+  use_localdir="-I$localdir -DAC_LOCALDIR=$localdir"
+else
+  use_localdir=
+fi
+
+# Use the frozen version of Autoconf if available.
+r= f=
+
+$M4 -I$AC_MACRODIR $use_localdir $r autoconf.m4$f $infile > $tmpout ||
+  { rm -f $tmpin $tmpout; exit 2; }
+
+# You could add your own prefixes to pattern if you wanted to check for
+# them too, e.g. pattern='\(AC_\|ILT_\)', except that UNIX sed doesn't do
+# alternation.
+pattern="AC_"
+
+status=0
+if grep "^[^#]*${pattern}" $tmpout > /dev/null 2>&1; then
+  echo "autoconf: Undefined macros:" >&2
+  sed -n "s/^[^#]*\\(${pattern}[_A-Za-z0-9]*\\).*/\\1/p" $tmpout |
+    while read macro; do
+      grep -n "^[^#]*$macro" $infile /dev/null
+      test $? -eq 1 && echo >&2 "***BUG in Autoconf--please report*** $macro"
+    done | sort -u >&2
+  status=1
+fi
+
+if test $# -eq 0; then
+  echo "This case should not be reached."
+  exit 1
+fi
+
+# Put the real line numbers into the output to make config.log more helpful.
+$AWK '
+/__oline__/ { printf "%d:", NR + 1 }
+           { print }
+' $tmpout | sed '
+/__oline__/s/^\([0-9][0-9]*\):\(.*\)__oline__/\2\1/
+'
+
+rm -f $tmpout
+
+exit $status
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-riscv64gc.patch
+
+mv firefox-$VERSION-riscv64gc.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/file.list	(revision 228)
@@ -0,0 +1,2 @@
+firefox-102.15.0/build/autoconf/config.guess
+firefox-102.15.0/build/autoconf/config.sub
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.guess
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.guess	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.guess	(revision 228)
@@ -0,0 +1,1757 @@
+#! /bin/sh
+# Attempt to guess a canonical system name.
+#   Copyright 1992-2022 Free Software Foundation, Inc.
+
+# shellcheck disable=SC2006,SC2268 # see below for rationale
+
+timestamp='2022-01-09'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program.  This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+#
+# Originally written by Per Bothner; maintained since 2000 by Ben Elliston.
+#
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
+#
+# Please send patches to <config-patches@gnu.org>.
+
+
+# The "shellcheck disable" line above the timestamp inhibits complaints
+# about features and limitations of the classic Bourne shell that were
+# superseded or lifted in POSIX.  However, this script identifies a wide
+# variety of pre-POSIX systems that do not have POSIX shells at all, and
+# even some reasonably current systems (Solaris 10 as case-in-point) still
+# have a pre-POSIX /bin/sh.
+
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION]
+
+Output the configuration name of the system \`$me' is run on.
+
+Options:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.guess ($timestamp)
+
+Originally written by Per Bothner.
+Copyright 1992-2022 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+    * )
+       break ;;
+  esac
+done
+
+if test $# != 0; then
+  echo "$me: too many arguments$help" >&2
+  exit 1
+fi
+
+# Just in case it came from the environment.
+GUESS=
+
+# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+# compiler to aid in system detection is discouraged as it requires
+# temporary files to be created and, as you can see below, it is a
+# headache to deal with in a portable fashion.
+
+# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
+# use `HOST_CC' if defined, but it is deprecated.
+
+# Portable tmp directory creation inspired by the Autoconf team.
+
+tmp=
+# shellcheck disable=SC2172
+trap 'test -z "$tmp" || rm -fr "$tmp"' 0 1 2 13 15
+
+set_cc_for_build() {
+    # prevent multiple calls if $tmp is already set
+    test "$tmp" && return 0
+    : "${TMPDIR=/tmp}"
+    # shellcheck disable=SC2039,SC3028
+    { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+	{ test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } ||
+	{ tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } ||
+	{ echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; }
+    dummy=$tmp/dummy
+    case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in
+	,,)    echo "int x;" > "$dummy.c"
+	       for driver in cc gcc c89 c99 ; do
+		   if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then
+		       CC_FOR_BUILD=$driver
+		       break
+		   fi
+	       done
+	       if test x"$CC_FOR_BUILD" = x ; then
+		   CC_FOR_BUILD=no_compiler_found
+	       fi
+	       ;;
+	,,*)   CC_FOR_BUILD=$CC ;;
+	,*,*)  CC_FOR_BUILD=$HOST_CC ;;
+    esac
+}
+
+# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
+# (ghazi@noc.rutgers.edu 1994-08-24)
+if test -f /.attbin/uname ; then
+	PATH=$PATH:/.attbin ; export PATH
+fi
+
+UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
+UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
+UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
+UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
+case $UNAME_SYSTEM in
+Linux|GNU|GNU/*)
+	LIBC=unknown
+
+	set_cc_for_build
+	cat <<-EOF > "$dummy.c"
+	#include <features.h>
+	#if defined(__UCLIBC__)
+	LIBC=uclibc
+	#elif defined(__dietlibc__)
+	LIBC=dietlibc
+	#elif defined(__GLIBC__)
+	LIBC=gnu
+	#else
+	#include <stdarg.h>
+	/* First heuristic to detect musl libc.  */
+	#ifdef __DEFINED_va_list
+	LIBC=musl
+	#endif
+	#endif
+	EOF
+	cc_set_libc=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`
+	eval "$cc_set_libc"
+
+	# Second heuristic to detect musl libc.
+	if [ "$LIBC" = unknown ] &&
+	   command -v ldd >/dev/null &&
+	   ldd --version 2>&1 | grep -q ^musl; then
+		LIBC=musl
+	fi
+
+	# If the system lacks a compiler, then just pick glibc.
+	# We could probably try harder.
+	if [ "$LIBC" = unknown ]; then
+		LIBC=gnu
+	fi
+	;;
+esac
+
+# Note: order is significant - the case branches are not exclusive.
+
+case $UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION in
+    *:NetBSD:*:*)
+	# NetBSD (nbsd) targets should (where applicable) match one or
+	# more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
+	# switched to ELF, *-*-netbsd* would select the old
+	# object file format.  This provides both forward
+	# compatibility and a consistent mechanism for selecting the
+	# object file format.
+	#
+	# Note: NetBSD doesn't particularly care about the vendor
+	# portion of the name.  We always set it to "unknown".
+	UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \
+	    /sbin/sysctl -n hw.machine_arch 2>/dev/null || \
+	    /usr/sbin/sysctl -n hw.machine_arch 2>/dev/null || \
+	    echo unknown)`
+	case $UNAME_MACHINE_ARCH in
+	    aarch64eb) machine=aarch64_be-unknown ;;
+	    armeb) machine=armeb-unknown ;;
+	    arm*) machine=arm-unknown ;;
+	    sh3el) machine=shl-unknown ;;
+	    sh3eb) machine=sh-unknown ;;
+	    sh5el) machine=sh5le-unknown ;;
+	    earmv*)
+		arch=`echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,'`
+		endian=`echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p'`
+		machine=${arch}${endian}-unknown
+		;;
+	    *) machine=$UNAME_MACHINE_ARCH-unknown ;;
+	esac
+	# The Operating System including object format, if it has switched
+	# to ELF recently (or will in the future) and ABI.
+	case $UNAME_MACHINE_ARCH in
+	    earm*)
+		os=netbsdelf
+		;;
+	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+		set_cc_for_build
+		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+			| grep -q __ELF__
+		then
+		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
+		    # Return netbsd for either.  FIX?
+		    os=netbsd
+		else
+		    os=netbsdelf
+		fi
+		;;
+	    *)
+		os=netbsd
+		;;
+	esac
+	# Determine ABI tags.
+	case $UNAME_MACHINE_ARCH in
+	    earm*)
+		expr='s/^earmv[0-9]/-eabi/;s/eb$//'
+		abi=`echo "$UNAME_MACHINE_ARCH" | sed -e "$expr"`
+		;;
+	esac
+	# The OS release
+	# Debian GNU/NetBSD machines have a different userland, and
+	# thus, need a distinct triplet. However, they do not need
+	# kernel version information, so it can be replaced with a
+	# suitable tag, in the style of linux-gnu.
+	case $UNAME_VERSION in
+	    Debian*)
+		release='-gnu'
+		;;
+	    *)
+		release=`echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2`
+		;;
+	esac
+	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+	# contains redundant information, the shorter form:
+	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+	GUESS=$machine-${os}${release}${abi-}
+	;;
+    *:Bitrig:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'`
+	GUESS=$UNAME_MACHINE_ARCH-unknown-bitrig$UNAME_RELEASE
+	;;
+    *:OpenBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+	GUESS=$UNAME_MACHINE_ARCH-unknown-openbsd$UNAME_RELEASE
+	;;
+    *:SecBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/SecBSD.//'`
+	GUESS=$UNAME_MACHINE_ARCH-unknown-secbsd$UNAME_RELEASE
+	;;
+    *:LibertyBSD:*:*)
+	UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'`
+	GUESS=$UNAME_MACHINE_ARCH-unknown-libertybsd$UNAME_RELEASE
+	;;
+    *:MidnightBSD:*:*)
+	GUESS=$UNAME_MACHINE-unknown-midnightbsd$UNAME_RELEASE
+	;;
+    *:ekkoBSD:*:*)
+	GUESS=$UNAME_MACHINE-unknown-ekkobsd$UNAME_RELEASE
+	;;
+    *:SolidBSD:*:*)
+	GUESS=$UNAME_MACHINE-unknown-solidbsd$UNAME_RELEASE
+	;;
+    *:OS108:*:*)
+	GUESS=$UNAME_MACHINE-unknown-os108_$UNAME_RELEASE
+	;;
+    macppc:MirBSD:*:*)
+	GUESS=powerpc-unknown-mirbsd$UNAME_RELEASE
+	;;
+    *:MirBSD:*:*)
+	GUESS=$UNAME_MACHINE-unknown-mirbsd$UNAME_RELEASE
+	;;
+    *:Sortix:*:*)
+	GUESS=$UNAME_MACHINE-unknown-sortix
+	;;
+    *:Twizzler:*:*)
+	GUESS=$UNAME_MACHINE-unknown-twizzler
+	;;
+    *:Redox:*:*)
+	GUESS=$UNAME_MACHINE-unknown-redox
+	;;
+    mips:OSF1:*.*)
+	GUESS=mips-dec-osf1
+	;;
+    alpha:OSF1:*:*)
+	# Reset EXIT trap before exiting to avoid spurious non-zero exit code.
+	trap '' 0
+	case $UNAME_RELEASE in
+	*4.0)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+		;;
+	*5.*)
+		UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+		;;
+	esac
+	# According to Compaq, /usr/sbin/psrinfo has been available on
+	# OSF/1 and Tru64 systems produced since 1995.  I hope that
+	# covers most systems running today.  This code pipes the CPU
+	# types through head -n 1, so we only detect the type of CPU 0.
+	ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+	case $ALPHA_CPU_TYPE in
+	    "EV4 (21064)")
+		UNAME_MACHINE=alpha ;;
+	    "EV4.5 (21064)")
+		UNAME_MACHINE=alpha ;;
+	    "LCA4 (21066/21068)")
+		UNAME_MACHINE=alpha ;;
+	    "EV5 (21164)")
+		UNAME_MACHINE=alphaev5 ;;
+	    "EV5.6 (21164A)")
+		UNAME_MACHINE=alphaev56 ;;
+	    "EV5.6 (21164PC)")
+		UNAME_MACHINE=alphapca56 ;;
+	    "EV5.7 (21164PC)")
+		UNAME_MACHINE=alphapca57 ;;
+	    "EV6 (21264)")
+		UNAME_MACHINE=alphaev6 ;;
+	    "EV6.7 (21264A)")
+		UNAME_MACHINE=alphaev67 ;;
+	    "EV6.8CB (21264C)")
+		UNAME_MACHINE=alphaev68 ;;
+	    "EV6.8AL (21264B)")
+		UNAME_MACHINE=alphaev68 ;;
+	    "EV6.8CX (21264D)")
+		UNAME_MACHINE=alphaev68 ;;
+	    "EV6.9A (21264/EV69A)")
+		UNAME_MACHINE=alphaev69 ;;
+	    "EV7 (21364)")
+		UNAME_MACHINE=alphaev7 ;;
+	    "EV7.9 (21364A)")
+		UNAME_MACHINE=alphaev79 ;;
+	esac
+	# A Pn.n version is a patched version.
+	# A Vn.n version is a released version.
+	# A Tn.n version is a released field test version.
+	# A Xn.n version is an unreleased experimental baselevel.
+	# 1.2 uses "1.2" for uname -r.
+	OSF_REL=`echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
+	GUESS=$UNAME_MACHINE-dec-osf$OSF_REL
+	;;
+    Amiga*:UNIX_System_V:4.0:*)
+	GUESS=m68k-unknown-sysv4
+	;;
+    *:[Aa]miga[Oo][Ss]:*:*)
+	GUESS=$UNAME_MACHINE-unknown-amigaos
+	;;
+    *:[Mm]orph[Oo][Ss]:*:*)
+	GUESS=$UNAME_MACHINE-unknown-morphos
+	;;
+    *:OS/390:*:*)
+	GUESS=i370-ibm-openedition
+	;;
+    *:z/VM:*:*)
+	GUESS=s390-ibm-zvmoe
+	;;
+    *:OS400:*:*)
+	GUESS=powerpc-ibm-os400
+	;;
+    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+	GUESS=arm-acorn-riscix$UNAME_RELEASE
+	;;
+    arm*:riscos:*:*|arm*:RISCOS:*:*)
+	GUESS=arm-unknown-riscos
+	;;
+    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+	GUESS=hppa1.1-hitachi-hiuxmpp
+	;;
+    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
+	# akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
+	case `(/bin/universe) 2>/dev/null` in
+	    att) GUESS=pyramid-pyramid-sysv3 ;;
+	    *)   GUESS=pyramid-pyramid-bsd   ;;
+	esac
+	;;
+    NILE*:*:*:dcosx)
+	GUESS=pyramid-pyramid-svr4
+	;;
+    DRS?6000:unix:4.0:6*)
+	GUESS=sparc-icl-nx6
+	;;
+    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
+	case `/usr/bin/uname -p` in
+	    sparc) GUESS=sparc-icl-nx7 ;;
+	esac
+	;;
+    s390x:SunOS:*:*)
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=$UNAME_MACHINE-ibm-solaris2$SUN_REL
+	;;
+    sun4H:SunOS:5.*:*)
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=sparc-hal-solaris2$SUN_REL
+	;;
+    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=sparc-sun-solaris2$SUN_REL
+	;;
+    i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
+	GUESS=i386-pc-auroraux$UNAME_RELEASE
+	;;
+    i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+	set_cc_for_build
+	SUN_ARCH=i386
+	# If there is a compiler, see if it is configured for 64-bit objects.
+	# Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+	# This test works for both compilers.
+	if test "$CC_FOR_BUILD" != no_compiler_found; then
+	    if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+		(CCOPTS="" $CC_FOR_BUILD -m64 -E - 2>/dev/null) | \
+		grep IS_64BIT_ARCH >/dev/null
+	    then
+		SUN_ARCH=x86_64
+	    fi
+	fi
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=$SUN_ARCH-pc-solaris2$SUN_REL
+	;;
+    sun4*:SunOS:6*:*)
+	# According to config.sub, this is the proper way to canonicalize
+	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
+	# it's likely to be more like Solaris than SunOS4.
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=sparc-sun-solaris3$SUN_REL
+	;;
+    sun4*:SunOS:*:*)
+	case `/usr/bin/arch -k` in
+	    Series*|S4*)
+		UNAME_RELEASE=`uname -v`
+		;;
+	esac
+	# Japanese Language versions have a version number like `4.1.3-JL'.
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/-/_/'`
+	GUESS=sparc-sun-sunos$SUN_REL
+	;;
+    sun3*:SunOS:*:*)
+	GUESS=m68k-sun-sunos$UNAME_RELEASE
+	;;
+    sun*:*:4.2BSD:*)
+	UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+	test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3
+	case `/bin/arch` in
+	    sun3)
+		GUESS=m68k-sun-sunos$UNAME_RELEASE
+		;;
+	    sun4)
+		GUESS=sparc-sun-sunos$UNAME_RELEASE
+		;;
+	esac
+	;;
+    aushp:SunOS:*:*)
+	GUESS=sparc-auspex-sunos$UNAME_RELEASE
+	;;
+    # The situation for MiNT is a little confusing.  The machine name
+    # can be virtually everything (everything which is not
+    # "atarist" or "atariste" at least should have a processor
+    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
+    # to the lowercase version "mint" (or "freemint").  Finally
+    # the system name "TOS" denotes a system which is actually not
+    # MiNT.  But MiNT is downward compatible to TOS, so this should
+    # be no problem.
+    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+	GUESS=m68k-atari-mint$UNAME_RELEASE
+	;;
+    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+	GUESS=m68k-atari-mint$UNAME_RELEASE
+	;;
+    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+	GUESS=m68k-atari-mint$UNAME_RELEASE
+	;;
+    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+	GUESS=m68k-milan-mint$UNAME_RELEASE
+	;;
+    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+	GUESS=m68k-hades-mint$UNAME_RELEASE
+	;;
+    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+	GUESS=m68k-unknown-mint$UNAME_RELEASE
+	;;
+    m68k:machten:*:*)
+	GUESS=m68k-apple-machten$UNAME_RELEASE
+	;;
+    powerpc:machten:*:*)
+	GUESS=powerpc-apple-machten$UNAME_RELEASE
+	;;
+    RISC*:Mach:*:*)
+	GUESS=mips-dec-mach_bsd4.3
+	;;
+    RISC*:ULTRIX:*:*)
+	GUESS=mips-dec-ultrix$UNAME_RELEASE
+	;;
+    VAX*:ULTRIX*:*:*)
+	GUESS=vax-dec-ultrix$UNAME_RELEASE
+	;;
+    2020:CLIX:*:* | 2430:CLIX:*:*)
+	GUESS=clipper-intergraph-clix$UNAME_RELEASE
+	;;
+    mips:*:*:UMIPS | mips:*:*:RISCos)
+	set_cc_for_build
+	sed 's/^	//' << EOF > "$dummy.c"
+#ifdef __cplusplus
+#include <stdio.h>  /* for printf() prototype */
+	int main (int argc, char *argv[]) {
+#else
+	int main (argc, argv) int argc; char *argv[]; {
+#endif
+	#if defined (host_mips) && defined (MIPSEB)
+	#if defined (SYSTYPE_SYSV)
+	  printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_SVR4)
+	  printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0);
+	#endif
+	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
+	  printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0);
+	#endif
+	#endif
+	  exit (-1);
+	}
+EOF
+	$CC_FOR_BUILD -o "$dummy" "$dummy.c" &&
+	  dummyarg=`echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p'` &&
+	  SYSTEM_NAME=`"$dummy" "$dummyarg"` &&
+	    { echo "$SYSTEM_NAME"; exit; }
+	GUESS=mips-mips-riscos$UNAME_RELEASE
+	;;
+    Motorola:PowerMAX_OS:*:*)
+	GUESS=powerpc-motorola-powermax
+	;;
+    Motorola:*:4.3:PL8-*)
+	GUESS=powerpc-harris-powermax
+	;;
+    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
+	GUESS=powerpc-harris-powermax
+	;;
+    Night_Hawk:Power_UNIX:*:*)
+	GUESS=powerpc-harris-powerunix
+	;;
+    m88k:CX/UX:7*:*)
+	GUESS=m88k-harris-cxux7
+	;;
+    m88k:*:4*:R4*)
+	GUESS=m88k-motorola-sysv4
+	;;
+    m88k:*:3*:R3*)
+	GUESS=m88k-motorola-sysv3
+	;;
+    AViiON:dgux:*:*)
+	# DG/UX returns AViiON for all architectures
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	if test "$UNAME_PROCESSOR" = mc88100 || test "$UNAME_PROCESSOR" = mc88110
+	then
+	    if test "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx || \
+	       test "$TARGET_BINARY_INTERFACE"x = x
+	    then
+		GUESS=m88k-dg-dgux$UNAME_RELEASE
+	    else
+		GUESS=m88k-dg-dguxbcs$UNAME_RELEASE
+	    fi
+	else
+	    GUESS=i586-dg-dgux$UNAME_RELEASE
+	fi
+	;;
+    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
+	GUESS=m88k-dolphin-sysv3
+	;;
+    M88*:*:R3*:*)
+	# Delta 88k system running SVR3
+	GUESS=m88k-motorola-sysv3
+	;;
+    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
+	GUESS=m88k-tektronix-sysv3
+	;;
+    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
+	GUESS=m68k-tektronix-bsd
+	;;
+    *:IRIX*:*:*)
+	IRIX_REL=`echo "$UNAME_RELEASE" | sed -e 's/-/_/g'`
+	GUESS=mips-sgi-irix$IRIX_REL
+	;;
+    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
+	GUESS=romp-ibm-aix    # uname -m gives an 8 hex-code CPU id
+	;;                    # Note that: echo "'`uname -s`'" gives 'AIX '
+    i*86:AIX:*:*)
+	GUESS=i386-ibm-aix
+	;;
+    ia64:AIX:*:*)
+	if test -x /usr/bin/oslevel ; then
+		IBM_REV=`/usr/bin/oslevel`
+	else
+		IBM_REV=$UNAME_VERSION.$UNAME_RELEASE
+	fi
+	GUESS=$UNAME_MACHINE-ibm-aix$IBM_REV
+	;;
+    *:AIX:2:3)
+	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
+		set_cc_for_build
+		sed 's/^		//' << EOF > "$dummy.c"
+		#include <sys/systemcfg.h>
+
+		main()
+			{
+			if (!__power_pc())
+				exit(1);
+			puts("powerpc-ibm-aix3.2.5");
+			exit(0);
+			}
+EOF
+		if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"`
+		then
+			GUESS=$SYSTEM_NAME
+		else
+			GUESS=rs6000-ibm-aix3.2.5
+		fi
+	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
+		GUESS=rs6000-ibm-aix3.2.4
+	else
+		GUESS=rs6000-ibm-aix3.2
+	fi
+	;;
+    *:AIX:*:[4567])
+	IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
+	if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then
+		IBM_ARCH=rs6000
+	else
+		IBM_ARCH=powerpc
+	fi
+	if test -x /usr/bin/lslpp ; then
+		IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | \
+			   awk -F: '{ print $3 }' | sed s/[0-9]*$/0/`
+	else
+		IBM_REV=$UNAME_VERSION.$UNAME_RELEASE
+	fi
+	GUESS=$IBM_ARCH-ibm-aix$IBM_REV
+	;;
+    *:AIX:*:*)
+	GUESS=rs6000-ibm-aix
+	;;
+    ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*)
+	GUESS=romp-ibm-bsd4.4
+	;;
+    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
+	GUESS=romp-ibm-bsd$UNAME_RELEASE    # 4.3 with uname added to
+	;;                                  # report: romp-ibm BSD 4.3
+    *:BOSX:*:*)
+	GUESS=rs6000-bull-bosx
+	;;
+    DPX/2?00:B.O.S.:*:*)
+	GUESS=m68k-bull-sysv3
+	;;
+    9000/[34]??:4.3bsd:1.*:*)
+	GUESS=m68k-hp-bsd
+	;;
+    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
+	GUESS=m68k-hp-bsd4.4
+	;;
+    9000/[34678]??:HP-UX:*:*)
+	HPUX_REV=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*.[0B]*//'`
+	case $UNAME_MACHINE in
+	    9000/31?)            HP_ARCH=m68000 ;;
+	    9000/[34]??)         HP_ARCH=m68k ;;
+	    9000/[678][0-9][0-9])
+		if test -x /usr/bin/getconf; then
+		    sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+		    sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+		    case $sc_cpu_version in
+		      523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0
+		      528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1
+		      532)                      # CPU_PA_RISC2_0
+			case $sc_kernel_bits in
+			  32) HP_ARCH=hppa2.0n ;;
+			  64) HP_ARCH=hppa2.0w ;;
+			  '') HP_ARCH=hppa2.0 ;;   # HP-UX 10.20
+			esac ;;
+		    esac
+		fi
+		if test "$HP_ARCH" = ""; then
+		    set_cc_for_build
+		    sed 's/^		//' << EOF > "$dummy.c"
+
+		#define _HPUX_SOURCE
+		#include <stdlib.h>
+		#include <unistd.h>
+
+		int main ()
+		{
+		#if defined(_SC_KERNEL_BITS)
+		    long bits = sysconf(_SC_KERNEL_BITS);
+		#endif
+		    long cpu  = sysconf (_SC_CPU_VERSION);
+
+		    switch (cpu)
+			{
+			case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+			case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+			case CPU_PA_RISC2_0:
+		#if defined(_SC_KERNEL_BITS)
+			    switch (bits)
+				{
+				case 64: puts ("hppa2.0w"); break;
+				case 32: puts ("hppa2.0n"); break;
+				default: puts ("hppa2.0"); break;
+				} break;
+		#else  /* !defined(_SC_KERNEL_BITS) */
+			    puts ("hppa2.0"); break;
+		#endif
+			default: puts ("hppa1.0"); break;
+			}
+		    exit (0);
+		}
+EOF
+		    (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=`"$dummy"`
+		    test -z "$HP_ARCH" && HP_ARCH=hppa
+		fi ;;
+	esac
+	if test "$HP_ARCH" = hppa2.0w
+	then
+	    set_cc_for_build
+
+	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
+	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
+	    # generating 64-bit code.  GNU and HP use different nomenclature:
+	    #
+	    # $ CC_FOR_BUILD=cc ./config.guess
+	    # => hppa2.0w-hp-hpux11.23
+	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+	    # => hppa64-hp-hpux11.23
+
+	    if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) |
+		grep -q __LP64__
+	    then
+		HP_ARCH=hppa2.0w
+	    else
+		HP_ARCH=hppa64
+	    fi
+	fi
+	GUESS=$HP_ARCH-hp-hpux$HPUX_REV
+	;;
+    ia64:HP-UX:*:*)
+	HPUX_REV=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*.[0B]*//'`
+	GUESS=ia64-hp-hpux$HPUX_REV
+	;;
+    3050*:HI-UX:*:*)
+	set_cc_for_build
+	sed 's/^	//' << EOF > "$dummy.c"
+	#include <unistd.h>
+	int
+	main ()
+	{
+	  long cpu = sysconf (_SC_CPU_VERSION);
+	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
+	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
+	     results, however.  */
+	  if (CPU_IS_PA_RISC (cpu))
+	    {
+	      switch (cpu)
+		{
+		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
+		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
+		  default: puts ("hppa-hitachi-hiuxwe2"); break;
+		}
+	    }
+	  else if (CPU_IS_HP_MC68K (cpu))
+	    puts ("m68k-hitachi-hiuxwe2");
+	  else puts ("unknown-hitachi-hiuxwe2");
+	  exit (0);
+	}
+EOF
+	$CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=`"$dummy"` &&
+		{ echo "$SYSTEM_NAME"; exit; }
+	GUESS=unknown-hitachi-hiuxwe2
+	;;
+    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*)
+	GUESS=hppa1.1-hp-bsd
+	;;
+    9000/8??:4.3bsd:*:*)
+	GUESS=hppa1.0-hp-bsd
+	;;
+    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
+	GUESS=hppa1.0-hp-mpeix
+	;;
+    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*)
+	GUESS=hppa1.1-hp-osf
+	;;
+    hp8??:OSF1:*:*)
+	GUESS=hppa1.0-hp-osf
+	;;
+    i*86:OSF1:*:*)
+	if test -x /usr/sbin/sysversion ; then
+	    GUESS=$UNAME_MACHINE-unknown-osf1mk
+	else
+	    GUESS=$UNAME_MACHINE-unknown-osf1
+	fi
+	;;
+    parisc*:Lites*:*:*)
+	GUESS=hppa1.1-hp-lites
+	;;
+    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+	GUESS=c1-convex-bsd
+	;;
+    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+	if getsysinfo -f scalar_acc
+	then echo c32-convex-bsd
+	else echo c2-convex-bsd
+	fi
+	exit ;;
+    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+	GUESS=c34-convex-bsd
+	;;
+    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+	GUESS=c38-convex-bsd
+	;;
+    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+	GUESS=c4-convex-bsd
+	;;
+    CRAY*Y-MP:*:*:*)
+	CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+	GUESS=ymp-cray-unicos$CRAY_REL
+	;;
+    CRAY*[A-Z]90:*:*:*)
+	echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \
+	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
+	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
+	      -e 's/\.[^.]*$/.X/'
+	exit ;;
+    CRAY*TS:*:*:*)
+	CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+	GUESS=t90-cray-unicos$CRAY_REL
+	;;
+    CRAY*T3E:*:*:*)
+	CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+	GUESS=alphaev5-cray-unicosmk$CRAY_REL
+	;;
+    CRAY*SV1:*:*:*)
+	CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+	GUESS=sv1-cray-unicos$CRAY_REL
+	;;
+    *:UNICOS/mp:*:*)
+	CRAY_REL=`echo "$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'`
+	GUESS=craynv-cray-unicosmp$CRAY_REL
+	;;
+    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+	FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
+	FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+	FUJITSU_REL=`echo "$UNAME_RELEASE" | sed -e 's/ /_/'`
+	GUESS=${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}
+	;;
+    5000:UNIX_System_V:4.*:*)
+	FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
+	FUJITSU_REL=`echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'`
+	GUESS=sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}
+	;;
+    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+	GUESS=$UNAME_MACHINE-pc-bsdi$UNAME_RELEASE
+	;;
+    sparc*:BSD/OS:*:*)
+	GUESS=sparc-unknown-bsdi$UNAME_RELEASE
+	;;
+    *:BSD/OS:*:*)
+	GUESS=$UNAME_MACHINE-unknown-bsdi$UNAME_RELEASE
+	;;
+    arm:FreeBSD:*:*)
+	UNAME_PROCESSOR=`uname -p`
+	set_cc_for_build
+	if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+	    | grep -q __ARM_PCS_VFP
+	then
+	    FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+	    GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL-gnueabi
+	else
+	    FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+	    GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL-gnueabihf
+	fi
+	;;
+    *:FreeBSD:*:*)
+	UNAME_PROCESSOR=`/usr/bin/uname -p`
+	case $UNAME_PROCESSOR in
+	    amd64)
+		UNAME_PROCESSOR=x86_64 ;;
+	    i386)
+		UNAME_PROCESSOR=i586 ;;
+	esac
+	FREEBSD_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+	GUESS=$UNAME_PROCESSOR-unknown-freebsd$FREEBSD_REL
+	;;
+    i*:CYGWIN*:*)
+	GUESS=$UNAME_MACHINE-pc-cygwin
+	;;
+    *:MINGW64*:*)
+	GUESS=$UNAME_MACHINE-pc-mingw64
+	;;
+    *:MINGW*:*)
+	GUESS=$UNAME_MACHINE-pc-mingw32
+	;;
+    *:MSYS*:*)
+	GUESS=$UNAME_MACHINE-pc-msys
+	;;
+    i*:PW*:*)
+	GUESS=$UNAME_MACHINE-pc-pw32
+	;;
+    *:SerenityOS:*:*)
+        GUESS=$UNAME_MACHINE-pc-serenity
+        ;;
+    *:Interix*:*)
+	case $UNAME_MACHINE in
+	    x86)
+		GUESS=i586-pc-interix$UNAME_RELEASE
+		;;
+	    authenticamd | genuineintel | EM64T)
+		GUESS=x86_64-unknown-interix$UNAME_RELEASE
+		;;
+	    IA64)
+		GUESS=ia64-unknown-interix$UNAME_RELEASE
+		;;
+	esac ;;
+    i*:UWIN*:*)
+	GUESS=$UNAME_MACHINE-pc-uwin
+	;;
+    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
+	GUESS=x86_64-pc-cygwin
+	;;
+    prep*:SunOS:5.*:*)
+	SUN_REL=`echo "$UNAME_RELEASE" | sed -e 's/[^.]*//'`
+	GUESS=powerpcle-unknown-solaris2$SUN_REL
+	;;
+    *:GNU:*:*)
+	# the GNU system
+	GNU_ARCH=`echo "$UNAME_MACHINE" | sed -e 's,[-/].*$,,'`
+	GNU_REL=`echo "$UNAME_RELEASE" | sed -e 's,/.*$,,'`
+	GUESS=$GNU_ARCH-unknown-$LIBC$GNU_REL
+	;;
+    *:GNU/*:*:*)
+	# other systems with GNU libc and userland
+	GNU_SYS=`echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"`
+	GNU_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+	GUESS=$UNAME_MACHINE-unknown-$GNU_SYS$GNU_REL-$LIBC
+	;;
+    *:Minix:*:*)
+	GUESS=$UNAME_MACHINE-unknown-minix
+	;;
+    aarch64:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    aarch64_be:Linux:*:*)
+	UNAME_MACHINE=aarch64_be
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    alpha:Linux:*:*)
+	case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' /proc/cpuinfo 2>/dev/null` in
+	  EV5)   UNAME_MACHINE=alphaev5 ;;
+	  EV56)  UNAME_MACHINE=alphaev56 ;;
+	  PCA56) UNAME_MACHINE=alphapca56 ;;
+	  PCA57) UNAME_MACHINE=alphapca56 ;;
+	  EV6)   UNAME_MACHINE=alphaev6 ;;
+	  EV67)  UNAME_MACHINE=alphaev67 ;;
+	  EV68*) UNAME_MACHINE=alphaev68 ;;
+	esac
+	objdump --private-headers /bin/sh | grep -q ld.so.1
+	if test "$?" = 0 ; then LIBC=gnulibc1 ; fi
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    arc:Linux:*:* | arceb:Linux:*:* | arc32:Linux:*:* | arc64:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    arm*:Linux:*:*)
+	set_cc_for_build
+	if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+	    | grep -q __ARM_EABI__
+	then
+	    GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	else
+	    if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
+		| grep -q __ARM_PCS_VFP
+	    then
+		GUESS=$UNAME_MACHINE-unknown-linux-${LIBC}eabi
+	    else
+		GUESS=$UNAME_MACHINE-unknown-linux-${LIBC}eabihf
+	    fi
+	fi
+	;;
+    avr32*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    cris:Linux:*:*)
+	GUESS=$UNAME_MACHINE-axis-linux-$LIBC
+	;;
+    crisv32:Linux:*:*)
+	GUESS=$UNAME_MACHINE-axis-linux-$LIBC
+	;;
+    e2k:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    frv:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    hexagon:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    i*86:Linux:*:*)
+	GUESS=$UNAME_MACHINE-pc-linux-$LIBC
+	;;
+    ia64:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    k1om:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    loongarch32:Linux:*:* | loongarch64:Linux:*:* | loongarchx32:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    m32r*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    m68*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    mips:Linux:*:* | mips64:Linux:*:*)
+	set_cc_for_build
+	IS_GLIBC=0
+	test x"${LIBC}" = xgnu && IS_GLIBC=1
+	sed 's/^	//' << EOF > "$dummy.c"
+	#undef CPU
+	#undef mips
+	#undef mipsel
+	#undef mips64
+	#undef mips64el
+	#if ${IS_GLIBC} && defined(_ABI64)
+	LIBCABI=gnuabi64
+	#else
+	#if ${IS_GLIBC} && defined(_ABIN32)
+	LIBCABI=gnuabin32
+	#else
+	LIBCABI=${LIBC}
+	#endif
+	#endif
+
+	#if ${IS_GLIBC} && defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
+	CPU=mipsisa64r6
+	#else
+	#if ${IS_GLIBC} && !defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
+	CPU=mipsisa32r6
+	#else
+	#if defined(__mips64)
+	CPU=mips64
+	#else
+	CPU=mips
+	#endif
+	#endif
+	#endif
+
+	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
+	MIPS_ENDIAN=el
+	#else
+	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
+	MIPS_ENDIAN=
+	#else
+	MIPS_ENDIAN=
+	#endif
+	#endif
+EOF
+	cc_set_vars=`$CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU\|^MIPS_ENDIAN\|^LIBCABI'`
+	eval "$cc_set_vars"
+	test "x$CPU" != x && { echo "$CPU${MIPS_ENDIAN}-unknown-linux-$LIBCABI"; exit; }
+	;;
+    mips64el:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    openrisc*:Linux:*:*)
+	GUESS=or1k-unknown-linux-$LIBC
+	;;
+    or32:Linux:*:* | or1k*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    padre:Linux:*:*)
+	GUESS=sparc-unknown-linux-$LIBC
+	;;
+    parisc64:Linux:*:* | hppa64:Linux:*:*)
+	GUESS=hppa64-unknown-linux-$LIBC
+	;;
+    parisc:Linux:*:* | hppa:Linux:*:*)
+	# Look for CPU level
+	case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+	  PA7*) GUESS=hppa1.1-unknown-linux-$LIBC ;;
+	  PA8*) GUESS=hppa2.0-unknown-linux-$LIBC ;;
+	  *)    GUESS=hppa-unknown-linux-$LIBC ;;
+	esac
+	;;
+    ppc64:Linux:*:*)
+	GUESS=powerpc64-unknown-linux-$LIBC
+	;;
+    ppc:Linux:*:*)
+	GUESS=powerpc-unknown-linux-$LIBC
+	;;
+    ppc64le:Linux:*:*)
+	GUESS=powerpc64le-unknown-linux-$LIBC
+	;;
+    ppcle:Linux:*:*)
+	GUESS=powerpcle-unknown-linux-$LIBC
+	;;
+    riscv64gc:Linux:*:*)
+	GUESS=riscv64-unknown-linux-$LIBC
+	;;
+    riscv32:Linux:*:* | riscv32be:Linux:*:* | riscv64:Linux:*:* | riscv64be:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    s390:Linux:*:* | s390x:Linux:*:*)
+	GUESS=$UNAME_MACHINE-ibm-linux-$LIBC
+	;;
+    sh64*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    sh*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    sparc:Linux:*:* | sparc64:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    tile*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    vax:Linux:*:*)
+	GUESS=$UNAME_MACHINE-dec-linux-$LIBC
+	;;
+    x86_64:Linux:*:*)
+	set_cc_for_build
+	LIBCABI=$LIBC
+	if test "$CC_FOR_BUILD" != no_compiler_found; then
+	    if (echo '#ifdef __ILP32__'; echo IS_X32; echo '#endif') | \
+		(CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+		grep IS_X32 >/dev/null
+	    then
+		LIBCABI=${LIBC}x32
+	    fi
+	fi
+	GUESS=$UNAME_MACHINE-pc-linux-$LIBCABI
+	;;
+    xtensa*:Linux:*:*)
+	GUESS=$UNAME_MACHINE-unknown-linux-$LIBC
+	;;
+    i*86:DYNIX/ptx:4*:*)
+	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+	# earlier versions are messed up and put the nodename in both
+	# sysname and nodename.
+	GUESS=i386-sequent-sysv4
+	;;
+    i*86:UNIX_SV:4.2MP:2.*)
+	# Unixware is an offshoot of SVR4, but it has its own version
+	# number series starting with 2...
+	# I am not positive that other SVR4 systems won't match this,
+	# I just have to hope.  -- rms.
+	# Use sysv4.2uw... so that sysv4* matches it.
+	GUESS=$UNAME_MACHINE-pc-sysv4.2uw$UNAME_VERSION
+	;;
+    i*86:OS/2:*:*)
+	# If we were able to find `uname', then EMX Unix compatibility
+	# is probably installed.
+	GUESS=$UNAME_MACHINE-pc-os2-emx
+	;;
+    i*86:XTS-300:*:STOP)
+	GUESS=$UNAME_MACHINE-unknown-stop
+	;;
+    i*86:atheos:*:*)
+	GUESS=$UNAME_MACHINE-unknown-atheos
+	;;
+    i*86:syllable:*:*)
+	GUESS=$UNAME_MACHINE-pc-syllable
+	;;
+    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
+	GUESS=i386-unknown-lynxos$UNAME_RELEASE
+	;;
+    i*86:*DOS:*:*)
+	GUESS=$UNAME_MACHINE-pc-msdosdjgpp
+	;;
+    i*86:*:4.*:*)
+	UNAME_REL=`echo "$UNAME_RELEASE" | sed 's/\/MP$//'`
+	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
+		GUESS=$UNAME_MACHINE-univel-sysv$UNAME_REL
+	else
+		GUESS=$UNAME_MACHINE-pc-sysv$UNAME_REL
+	fi
+	;;
+    i*86:*:5:[678]*)
+	# UnixWare 7.x, OpenUNIX and OpenServer 6.
+	case `/bin/uname -X | grep "^Machine"` in
+	    *486*)	     UNAME_MACHINE=i486 ;;
+	    *Pentium)	     UNAME_MACHINE=i586 ;;
+	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
+	esac
+	GUESS=$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
+	;;
+    i*86:*:3.2:*)
+	if test -f /usr/options/cb.name; then
+		UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
+		GUESS=$UNAME_MACHINE-pc-isc$UNAME_REL
+	elif /bin/uname -X 2>/dev/null >/dev/null ; then
+		UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
+		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
+		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
+			&& UNAME_MACHINE=i586
+		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
+			&& UNAME_MACHINE=i686
+		GUESS=$UNAME_MACHINE-pc-sco$UNAME_REL
+	else
+		GUESS=$UNAME_MACHINE-pc-sysv32
+	fi
+	;;
+    pc:*:*:*)
+	# Left here for compatibility:
+	# uname -m prints for DJGPP always 'pc', but it prints nothing about
+	# the processor, so we play safe by assuming i586.
+	# Note: whatever this is, it MUST be the same as what config.sub
+	# prints for the "djgpp" host, or else GDB configure will decide that
+	# this is a cross-build.
+	GUESS=i586-pc-msdosdjgpp
+	;;
+    Intel:Mach:3*:*)
+	GUESS=i386-pc-mach3
+	;;
+    paragon:*:*:*)
+	GUESS=i860-intel-osf1
+	;;
+    i860:*:4.*:*) # i860-SVR4
+	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
+	  GUESS=i860-stardent-sysv$UNAME_RELEASE    # Stardent Vistra i860-SVR4
+	else # Add other i860-SVR4 vendors below as they are discovered.
+	  GUESS=i860-unknown-sysv$UNAME_RELEASE     # Unknown i860-SVR4
+	fi
+	;;
+    mini*:CTIX:SYS*5:*)
+	# "miniframe"
+	GUESS=m68010-convergent-sysv
+	;;
+    mc68k:UNIX:SYSTEM5:3.51m)
+	GUESS=m68k-convergent-sysv
+	;;
+    M680?0:D-NIX:5.3:*)
+	GUESS=m68k-diab-dnix
+	;;
+    M68*:*:R3V[5678]*:*)
+	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
+    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
+	OS_REL=''
+	test -r /etc/.relid \
+	&& OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	  && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	  && { echo i486-ncr-sysv4; exit; } ;;
+    NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+	OS_REL='.3'
+	test -r /etc/.relid \
+	    && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
+	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+	    && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+	    && { echo i586-ncr-sysv4.3"$OS_REL"; exit; }
+	/bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
+	    && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
+    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
+	GUESS=m68k-unknown-lynxos$UNAME_RELEASE
+	;;
+    mc68030:UNIX_System_V:4.*:*)
+	GUESS=m68k-atari-sysv4
+	;;
+    TSUNAMI:LynxOS:2.*:*)
+	GUESS=sparc-unknown-lynxos$UNAME_RELEASE
+	;;
+    rs6000:LynxOS:2.*:*)
+	GUESS=rs6000-unknown-lynxos$UNAME_RELEASE
+	;;
+    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
+	GUESS=powerpc-unknown-lynxos$UNAME_RELEASE
+	;;
+    SM[BE]S:UNIX_SV:*:*)
+	GUESS=mips-dde-sysv$UNAME_RELEASE
+	;;
+    RM*:ReliantUNIX-*:*:*)
+	GUESS=mips-sni-sysv4
+	;;
+    RM*:SINIX-*:*:*)
+	GUESS=mips-sni-sysv4
+	;;
+    *:SINIX-*:*:*)
+	if uname -p 2>/dev/null >/dev/null ; then
+		UNAME_MACHINE=`(uname -p) 2>/dev/null`
+		GUESS=$UNAME_MACHINE-sni-sysv4
+	else
+		GUESS=ns32k-sni-sysv
+	fi
+	;;
+    PENTIUM:*:4.0*:*)	# Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+			# says <Richard.M.Bartel@ccMail.Census.GOV>
+	GUESS=i586-unisys-sysv4
+	;;
+    *:UNIX_System_V:4*:FTX*)
+	# From Gerald Hewes <hewes@openmarket.com>.
+	# How about differentiating between stratus architectures? -djm
+	GUESS=hppa1.1-stratus-sysv4
+	;;
+    *:*:*:FTX*)
+	# From seanf@swdc.stratus.com.
+	GUESS=i860-stratus-sysv4
+	;;
+    i*86:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	GUESS=$UNAME_MACHINE-stratus-vos
+	;;
+    *:VOS:*:*)
+	# From Paul.Green@stratus.com.
+	GUESS=hppa1.1-stratus-vos
+	;;
+    mc68*:A/UX:*:*)
+	GUESS=m68k-apple-aux$UNAME_RELEASE
+	;;
+    news*:NEWS-OS:6*:*)
+	GUESS=mips-sony-newsos6
+	;;
+    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+	if test -d /usr/nec; then
+		GUESS=mips-nec-sysv$UNAME_RELEASE
+	else
+		GUESS=mips-unknown-sysv$UNAME_RELEASE
+	fi
+	;;
+    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
+	GUESS=powerpc-be-beos
+	;;
+    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
+	GUESS=powerpc-apple-beos
+	;;
+    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
+	GUESS=i586-pc-beos
+	;;
+    BePC:Haiku:*:*)	# Haiku running on Intel PC compatible.
+	GUESS=i586-pc-haiku
+	;;
+    x86_64:Haiku:*:*)
+	GUESS=x86_64-unknown-haiku
+	;;
+    SX-4:SUPER-UX:*:*)
+	GUESS=sx4-nec-superux$UNAME_RELEASE
+	;;
+    SX-5:SUPER-UX:*:*)
+	GUESS=sx5-nec-superux$UNAME_RELEASE
+	;;
+    SX-6:SUPER-UX:*:*)
+	GUESS=sx6-nec-superux$UNAME_RELEASE
+	;;
+    SX-7:SUPER-UX:*:*)
+	GUESS=sx7-nec-superux$UNAME_RELEASE
+	;;
+    SX-8:SUPER-UX:*:*)
+	GUESS=sx8-nec-superux$UNAME_RELEASE
+	;;
+    SX-8R:SUPER-UX:*:*)
+	GUESS=sx8r-nec-superux$UNAME_RELEASE
+	;;
+    SX-ACE:SUPER-UX:*:*)
+	GUESS=sxace-nec-superux$UNAME_RELEASE
+	;;
+    Power*:Rhapsody:*:*)
+	GUESS=powerpc-apple-rhapsody$UNAME_RELEASE
+	;;
+    *:Rhapsody:*:*)
+	GUESS=$UNAME_MACHINE-apple-rhapsody$UNAME_RELEASE
+	;;
+    arm64:Darwin:*:*)
+	GUESS=aarch64-apple-darwin$UNAME_RELEASE
+	;;
+    *:Darwin:*:*)
+	UNAME_PROCESSOR=`uname -p`
+	case $UNAME_PROCESSOR in
+	    unknown) UNAME_PROCESSOR=powerpc ;;
+	esac
+	if command -v xcode-select > /dev/null 2> /dev/null && \
+		! xcode-select --print-path > /dev/null 2> /dev/null ; then
+	    # Avoid executing cc if there is no toolchain installed as
+	    # cc will be a stub that puts up a graphical alert
+	    # prompting the user to install developer tools.
+	    CC_FOR_BUILD=no_compiler_found
+	else
+	    set_cc_for_build
+	fi
+	if test "$CC_FOR_BUILD" != no_compiler_found; then
+	    if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+		   (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+		   grep IS_64BIT_ARCH >/dev/null
+	    then
+		case $UNAME_PROCESSOR in
+		    i386) UNAME_PROCESSOR=x86_64 ;;
+		    powerpc) UNAME_PROCESSOR=powerpc64 ;;
+		esac
+	    fi
+	    # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc
+	    if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \
+		   (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+		   grep IS_PPC >/dev/null
+	    then
+		UNAME_PROCESSOR=powerpc
+	    fi
+	elif test "$UNAME_PROCESSOR" = i386 ; then
+	    # uname -m returns i386 or x86_64
+	    UNAME_PROCESSOR=$UNAME_MACHINE
+	fi
+	GUESS=$UNAME_PROCESSOR-apple-darwin$UNAME_RELEASE
+	;;
+    *:procnto*:*:* | *:QNX:[0123456789]*:*)
+	UNAME_PROCESSOR=`uname -p`
+	if test "$UNAME_PROCESSOR" = x86; then
+		UNAME_PROCESSOR=i386
+		UNAME_MACHINE=pc
+	fi
+	GUESS=$UNAME_PROCESSOR-$UNAME_MACHINE-nto-qnx$UNAME_RELEASE
+	;;
+    *:QNX:*:4*)
+	GUESS=i386-pc-qnx
+	;;
+    NEO-*:NONSTOP_KERNEL:*:*)
+	GUESS=neo-tandem-nsk$UNAME_RELEASE
+	;;
+    NSE-*:NONSTOP_KERNEL:*:*)
+	GUESS=nse-tandem-nsk$UNAME_RELEASE
+	;;
+    NSR-*:NONSTOP_KERNEL:*:*)
+	GUESS=nsr-tandem-nsk$UNAME_RELEASE
+	;;
+    NSV-*:NONSTOP_KERNEL:*:*)
+	GUESS=nsv-tandem-nsk$UNAME_RELEASE
+	;;
+    NSX-*:NONSTOP_KERNEL:*:*)
+	GUESS=nsx-tandem-nsk$UNAME_RELEASE
+	;;
+    *:NonStop-UX:*:*)
+	GUESS=mips-compaq-nonstopux
+	;;
+    BS2000:POSIX*:*:*)
+	GUESS=bs2000-siemens-sysv
+	;;
+    DS/*:UNIX_System_V:*:*)
+	GUESS=$UNAME_MACHINE-$UNAME_SYSTEM-$UNAME_RELEASE
+	;;
+    *:Plan9:*:*)
+	# "uname -m" is not consistent, so use $cputype instead. 386
+	# is converted to i386 for consistency with other x86
+	# operating systems.
+	if test "${cputype-}" = 386; then
+	    UNAME_MACHINE=i386
+	elif test "x${cputype-}" != x; then
+	    UNAME_MACHINE=$cputype
+	fi
+	GUESS=$UNAME_MACHINE-unknown-plan9
+	;;
+    *:TOPS-10:*:*)
+	GUESS=pdp10-unknown-tops10
+	;;
+    *:TENEX:*:*)
+	GUESS=pdp10-unknown-tenex
+	;;
+    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
+	GUESS=pdp10-dec-tops20
+	;;
+    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
+	GUESS=pdp10-xkl-tops20
+	;;
+    *:TOPS-20:*:*)
+	GUESS=pdp10-unknown-tops20
+	;;
+    *:ITS:*:*)
+	GUESS=pdp10-unknown-its
+	;;
+    SEI:*:*:SEIUX)
+	GUESS=mips-sei-seiux$UNAME_RELEASE
+	;;
+    *:DragonFly:*:*)
+	DRAGONFLY_REL=`echo "$UNAME_RELEASE" | sed -e 's/[-(].*//'`
+	GUESS=$UNAME_MACHINE-unknown-dragonfly$DRAGONFLY_REL
+	;;
+    *:*VMS:*:*)
+	UNAME_MACHINE=`(uname -p) 2>/dev/null`
+	case $UNAME_MACHINE in
+	    A*) GUESS=alpha-dec-vms ;;
+	    I*) GUESS=ia64-dec-vms ;;
+	    V*) GUESS=vax-dec-vms ;;
+	esac ;;
+    *:XENIX:*:SysV)
+	GUESS=i386-pc-xenix
+	;;
+    i*86:skyos:*:*)
+	SKYOS_REL=`echo "$UNAME_RELEASE" | sed -e 's/ .*$//'`
+	GUESS=$UNAME_MACHINE-pc-skyos$SKYOS_REL
+	;;
+    i*86:rdos:*:*)
+	GUESS=$UNAME_MACHINE-pc-rdos
+	;;
+    i*86:Fiwix:*:*)
+	GUESS=$UNAME_MACHINE-pc-fiwix
+	;;
+    *:AROS:*:*)
+	GUESS=$UNAME_MACHINE-unknown-aros
+	;;
+    x86_64:VMkernel:*:*)
+	GUESS=$UNAME_MACHINE-unknown-esx
+	;;
+    amd64:Isilon\ OneFS:*:*)
+	GUESS=x86_64-unknown-onefs
+	;;
+    *:Unleashed:*:*)
+	GUESS=$UNAME_MACHINE-unknown-unleashed$UNAME_RELEASE
+	;;
+esac
+
+# Do we have a guess based on uname results?
+if test "x$GUESS" != x; then
+    echo "$GUESS"
+    exit
+fi
+
+# No uname command or uname output not recognized.
+set_cc_for_build
+cat > "$dummy.c" <<EOF
+#ifdef _SEQUENT_
+#include <sys/types.h>
+#include <sys/utsname.h>
+#endif
+#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
+#if defined (vax) || defined (__vax) || defined (__vax__) || defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
+#include <signal.h>
+#if defined(_SIZE_T_) || defined(SIGLOST)
+#include <sys/utsname.h>
+#endif
+#endif
+#endif
+main ()
+{
+#if defined (sony)
+#if defined (MIPSEB)
+  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
+     I don't know....  */
+  printf ("mips-sony-bsd\n"); exit (0);
+#else
+#include <sys/param.h>
+  printf ("m68k-sony-newsos%s\n",
+#ifdef NEWSOS4
+  "4"
+#else
+  ""
+#endif
+  ); exit (0);
+#endif
+#endif
+
+#if defined (NeXT)
+#if !defined (__ARCHITECTURE__)
+#define __ARCHITECTURE__ "m68k"
+#endif
+  int version;
+  version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+  if (version < 4)
+    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+  else
+    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+  exit (0);
+#endif
+
+#if defined (MULTIMAX) || defined (n16)
+#if defined (UMAXV)
+  printf ("ns32k-encore-sysv\n"); exit (0);
+#else
+#if defined (CMU)
+  printf ("ns32k-encore-mach\n"); exit (0);
+#else
+  printf ("ns32k-encore-bsd\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (__386BSD__)
+  printf ("i386-pc-bsd\n"); exit (0);
+#endif
+
+#if defined (sequent)
+#if defined (i386)
+  printf ("i386-sequent-dynix\n"); exit (0);
+#endif
+#if defined (ns32000)
+  printf ("ns32k-sequent-dynix\n"); exit (0);
+#endif
+#endif
+
+#if defined (_SEQUENT_)
+  struct utsname un;
+
+  uname(&un);
+  if (strncmp(un.version, "V2", 2) == 0) {
+    printf ("i386-sequent-ptx2\n"); exit (0);
+  }
+  if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+    printf ("i386-sequent-ptx1\n"); exit (0);
+  }
+  printf ("i386-sequent-ptx\n"); exit (0);
+#endif
+
+#if defined (vax)
+#if !defined (ultrix)
+#include <sys/param.h>
+#if defined (BSD)
+#if BSD == 43
+  printf ("vax-dec-bsd4.3\n"); exit (0);
+#else
+#if BSD == 199006
+  printf ("vax-dec-bsd4.3reno\n"); exit (0);
+#else
+  printf ("vax-dec-bsd\n"); exit (0);
+#endif
+#endif
+#else
+  printf ("vax-dec-bsd\n"); exit (0);
+#endif
+#else
+#if defined(_SIZE_T_) || defined(SIGLOST)
+  struct utsname un;
+  uname (&un);
+  printf ("vax-dec-ultrix%s\n", un.release); exit (0);
+#else
+  printf ("vax-dec-ultrix\n"); exit (0);
+#endif
+#endif
+#endif
+#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
+#if defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
+#if defined(_SIZE_T_) || defined(SIGLOST)
+  struct utsname *un;
+  uname (&un);
+  printf ("mips-dec-ultrix%s\n", un.release); exit (0);
+#else
+  printf ("mips-dec-ultrix\n"); exit (0);
+#endif
+#endif
+#endif
+
+#if defined (alliant) && defined (i860)
+  printf ("i860-alliant-bsd\n"); exit (0);
+#endif
+
+  exit (1);
+}
+EOF
+
+$CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null && SYSTEM_NAME=`"$dummy"` &&
+	{ echo "$SYSTEM_NAME"; exit; }
+
+# Apollos put the system type in the environment.
+test -d /usr/apollo && { echo "$ISP-apollo-$SYSTYPE"; exit; }
+
+echo "$0: unable to guess system type" >&2
+
+case $UNAME_MACHINE:$UNAME_SYSTEM in
+    mips:Linux | mips64:Linux)
+	# If we got here on MIPS GNU/Linux, output extra information.
+	cat >&2 <<EOF
+
+NOTE: MIPS GNU/Linux systems require a C compiler to fully recognize
+the system type. Please install a C compiler and try again.
+EOF
+	;;
+esac
+
+cat >&2 <<EOF
+
+This script (version $timestamp), has failed to recognize the
+operating system you are using. If your script is old, overwrite *all*
+copies of config.guess and config.sub with the latest versions from:
+
+  https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
+and
+  https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
+EOF
+
+our_year=`echo $timestamp | sed 's,-.*,,'`
+thisyear=`date +%Y`
+# shellcheck disable=SC2003
+script_age=`expr "$thisyear" - "$our_year"`
+if test "$script_age" -lt 3 ; then
+   cat >&2 <<EOF
+
+If $0 has already been updated, send the following data and any
+information you think might be pertinent to config-patches@gnu.org to
+provide the necessary information to handle your system.
+
+config.guess timestamp = $timestamp
+
+uname -m = `(uname -m) 2>/dev/null || echo unknown`
+uname -r = `(uname -r) 2>/dev/null || echo unknown`
+uname -s = `(uname -s) 2>/dev/null || echo unknown`
+uname -v = `(uname -v) 2>/dev/null || echo unknown`
+
+/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
+/bin/uname -X     = `(/bin/uname -X) 2>/dev/null`
+
+hostinfo               = `(hostinfo) 2>/dev/null`
+/bin/universe          = `(/bin/universe) 2>/dev/null`
+/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null`
+/bin/arch              = `(/bin/arch) 2>/dev/null`
+/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null`
+/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
+
+UNAME_MACHINE = "$UNAME_MACHINE"
+UNAME_RELEASE = "$UNAME_RELEASE"
+UNAME_SYSTEM  = "$UNAME_SYSTEM"
+UNAME_VERSION = "$UNAME_VERSION"
+EOF
+fi
+
+exit 1
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.guess
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.sub
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.sub	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.sub	(revision 228)
@@ -0,0 +1,1893 @@
+#! /bin/sh
+# Configuration validation subroutine script.
+#   Copyright 1992-2022 Free Software Foundation, Inc.
+
+# shellcheck disable=SC2006,SC2268 # see below for rationale
+
+timestamp='2022-01-03'
+
+# This file is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, see <https://www.gnu.org/licenses/>.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that
+# program.  This Exception is an additional permission under section 7
+# of the GNU General Public License, version 3 ("GPLv3").
+
+
+# Please send patches to <config-patches@gnu.org>.
+#
+# Configuration subroutine to validate and canonicalize a configuration type.
+# Supply the specified configuration type as an argument.
+# If it is invalid, we print an error message on stderr and exit with code 1.
+# Otherwise, we print the canonical config type on stdout and succeed.
+
+# You can get the latest version of this script from:
+# https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
+
+# This file is supposed to be the same for all GNU packages
+# and recognize all the CPU types, system types and aliases
+# that are meaningful with *any* GNU software.
+# Each package is responsible for reporting which valid configurations
+# it does not support.  The user should be able to distinguish
+# a failure to support a valid configuration from a meaningless
+# configuration.
+
+# The goal of this file is to map all the various variations of a given
+# machine specification into a single specification in the form:
+#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
+# or in some cases, the newer four-part form:
+#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
+# It is wrong to echo any other type of specification.
+
+# The "shellcheck disable" line above the timestamp inhibits complaints
+# about features and limitations of the classic Bourne shell that were
+# superseded or lifted in POSIX.  However, this script identifies a wide
+# variety of pre-POSIX systems that do not have POSIX shells at all, and
+# even some reasonably current systems (Solaris 10 as case-in-point) still
+# have a pre-POSIX /bin/sh.
+
+me=`echo "$0" | sed -e 's,.*/,,'`
+
+usage="\
+Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS
+
+Canonicalize a configuration name.
+
+Options:
+  -h, --help         print this help, then exit
+  -t, --time-stamp   print date of last modification, then exit
+  -v, --version      print version number, then exit
+
+Report bugs and patches to <config-patches@gnu.org>."
+
+version="\
+GNU config.sub ($timestamp)
+
+Copyright 1992-2022 Free Software Foundation, Inc.
+
+This is free software; see the source for copying conditions.  There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+
+help="
+Try \`$me --help' for more information."
+
+# Parse command line
+while test $# -gt 0 ; do
+  case $1 in
+    --time-stamp | --time* | -t )
+       echo "$timestamp" ; exit ;;
+    --version | -v )
+       echo "$version" ; exit ;;
+    --help | --h* | -h )
+       echo "$usage"; exit ;;
+    -- )     # Stop option processing
+       shift; break ;;
+    - )	# Use stdin as input.
+       break ;;
+    -* )
+       echo "$me: invalid option $1$help" >&2
+       exit 1 ;;
+
+    *local*)
+       # First pass through any local machine types.
+       echo "$1"
+       exit ;;
+
+    * )
+       break ;;
+  esac
+done
+
+case $# in
+ 0) echo "$me: missing argument$help" >&2
+    exit 1;;
+ 1) ;;
+ *) echo "$me: too many arguments$help" >&2
+    exit 1;;
+esac
+
+# Split fields of configuration type
+# shellcheck disable=SC2162
+saved_IFS=$IFS
+IFS="-" read field1 field2 field3 field4 <<EOF
+$1
+EOF
+IFS=$saved_IFS
+
+# Separate into logical components for further validation
+case $1 in
+	*-*-*-*-*)
+		echo Invalid configuration \`"$1"\': more than four components >&2
+		exit 1
+		;;
+	*-*-*-*)
+		basic_machine=$field1-$field2
+		basic_os=$field3-$field4
+		;;
+	*-*-*)
+		# Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two
+		# parts
+		maybe_os=$field2-$field3
+		case $maybe_os in
+			nto-qnx* | linux-* | uclinux-uclibc* \
+			| uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \
+			| netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \
+			| storm-chaos* | os2-emx* | rtmk-nova*)
+				basic_machine=$field1
+				basic_os=$maybe_os
+				;;
+			android-linux)
+				basic_machine=$field1-unknown
+				basic_os=linux-android
+				;;
+			*)
+				basic_machine=$field1-$field2
+				basic_os=$field3
+				;;
+		esac
+		;;
+	*-*)
+		# A lone config we happen to match not fitting any pattern
+		case $field1-$field2 in
+			decstation-3100)
+				basic_machine=mips-dec
+				basic_os=
+				;;
+			*-*)
+				# Second component is usually, but not always the OS
+				case $field2 in
+					# Prevent following clause from handling this valid os
+					sun*os*)
+						basic_machine=$field1
+						basic_os=$field2
+						;;
+					zephyr*)
+						basic_machine=$field1-unknown
+						basic_os=$field2
+						;;
+					# Manufacturers
+					dec* | mips* | sequent* | encore* | pc533* | sgi* | sony* \
+					| att* | 7300* | 3300* | delta* | motorola* | sun[234]* \
+					| unicom* | ibm* | next | hp | isi* | apollo | altos* \
+					| convergent* | ncr* | news | 32* | 3600* | 3100* \
+					| hitachi* | c[123]* | convex* | sun | crds | omron* | dg \
+					| ultra | tti* | harris | dolphin | highlevel | gould \
+					| cbm | ns | masscomp | apple | axis | knuth | cray \
+					| microblaze* | sim | cisco \
+					| oki | wec | wrs | winbond)
+						basic_machine=$field1-$field2
+						basic_os=
+						;;
+					*)
+						basic_machine=$field1
+						basic_os=$field2
+						;;
+				esac
+			;;
+		esac
+		;;
+	*)
+		# Convert single-component short-hands not valid as part of
+		# multi-component configurations.
+		case $field1 in
+			386bsd)
+				basic_machine=i386-pc
+				basic_os=bsd
+				;;
+			a29khif)
+				basic_machine=a29k-amd
+				basic_os=udi
+				;;
+			adobe68k)
+				basic_machine=m68010-adobe
+				basic_os=scout
+				;;
+			alliant)
+				basic_machine=fx80-alliant
+				basic_os=
+				;;
+			altos | altos3068)
+				basic_machine=m68k-altos
+				basic_os=
+				;;
+			am29k)
+				basic_machine=a29k-none
+				basic_os=bsd
+				;;
+			amdahl)
+				basic_machine=580-amdahl
+				basic_os=sysv
+				;;
+			amiga)
+				basic_machine=m68k-unknown
+				basic_os=
+				;;
+			amigaos | amigados)
+				basic_machine=m68k-unknown
+				basic_os=amigaos
+				;;
+			amigaunix | amix)
+				basic_machine=m68k-unknown
+				basic_os=sysv4
+				;;
+			apollo68)
+				basic_machine=m68k-apollo
+				basic_os=sysv
+				;;
+			apollo68bsd)
+				basic_machine=m68k-apollo
+				basic_os=bsd
+				;;
+			aros)
+				basic_machine=i386-pc
+				basic_os=aros
+				;;
+			aux)
+				basic_machine=m68k-apple
+				basic_os=aux
+				;;
+			balance)
+				basic_machine=ns32k-sequent
+				basic_os=dynix
+				;;
+			blackfin)
+				basic_machine=bfin-unknown
+				basic_os=linux
+				;;
+			cegcc)
+				basic_machine=arm-unknown
+				basic_os=cegcc
+				;;
+			convex-c1)
+				basic_machine=c1-convex
+				basic_os=bsd
+				;;
+			convex-c2)
+				basic_machine=c2-convex
+				basic_os=bsd
+				;;
+			convex-c32)
+				basic_machine=c32-convex
+				basic_os=bsd
+				;;
+			convex-c34)
+				basic_machine=c34-convex
+				basic_os=bsd
+				;;
+			convex-c38)
+				basic_machine=c38-convex
+				basic_os=bsd
+				;;
+			cray)
+				basic_machine=j90-cray
+				basic_os=unicos
+				;;
+			crds | unos)
+				basic_machine=m68k-crds
+				basic_os=
+				;;
+			da30)
+				basic_machine=m68k-da30
+				basic_os=
+				;;
+			decstation | pmax | pmin | dec3100 | decstatn)
+				basic_machine=mips-dec
+				basic_os=
+				;;
+			delta88)
+				basic_machine=m88k-motorola
+				basic_os=sysv3
+				;;
+			dicos)
+				basic_machine=i686-pc
+				basic_os=dicos
+				;;
+			djgpp)
+				basic_machine=i586-pc
+				basic_os=msdosdjgpp
+				;;
+			ebmon29k)
+				basic_machine=a29k-amd
+				basic_os=ebmon
+				;;
+			es1800 | OSE68k | ose68k | ose | OSE)
+				basic_machine=m68k-ericsson
+				basic_os=ose
+				;;
+			gmicro)
+				basic_machine=tron-gmicro
+				basic_os=sysv
+				;;
+			go32)
+				basic_machine=i386-pc
+				basic_os=go32
+				;;
+			h8300hms)
+				basic_machine=h8300-hitachi
+				basic_os=hms
+				;;
+			h8300xray)
+				basic_machine=h8300-hitachi
+				basic_os=xray
+				;;
+			h8500hms)
+				basic_machine=h8500-hitachi
+				basic_os=hms
+				;;
+			harris)
+				basic_machine=m88k-harris
+				basic_os=sysv3
+				;;
+			hp300 | hp300hpux)
+				basic_machine=m68k-hp
+				basic_os=hpux
+				;;
+			hp300bsd)
+				basic_machine=m68k-hp
+				basic_os=bsd
+				;;
+			hppaosf)
+				basic_machine=hppa1.1-hp
+				basic_os=osf
+				;;
+			hppro)
+				basic_machine=hppa1.1-hp
+				basic_os=proelf
+				;;
+			i386mach)
+				basic_machine=i386-mach
+				basic_os=mach
+				;;
+			isi68 | isi)
+				basic_machine=m68k-isi
+				basic_os=sysv
+				;;
+			m68knommu)
+				basic_machine=m68k-unknown
+				basic_os=linux
+				;;
+			magnum | m3230)
+				basic_machine=mips-mips
+				basic_os=sysv
+				;;
+			merlin)
+				basic_machine=ns32k-utek
+				basic_os=sysv
+				;;
+			mingw64)
+				basic_machine=x86_64-pc
+				basic_os=mingw64
+				;;
+			mingw32)
+				basic_machine=i686-pc
+				basic_os=mingw32
+				;;
+			mingw32ce)
+				basic_machine=arm-unknown
+				basic_os=mingw32ce
+				;;
+			monitor)
+				basic_machine=m68k-rom68k
+				basic_os=coff
+				;;
+			morphos)
+				basic_machine=powerpc-unknown
+				basic_os=morphos
+				;;
+			moxiebox)
+				basic_machine=moxie-unknown
+				basic_os=moxiebox
+				;;
+			msdos)
+				basic_machine=i386-pc
+				basic_os=msdos
+				;;
+			msys)
+				basic_machine=i686-pc
+				basic_os=msys
+				;;
+			mvs)
+				basic_machine=i370-ibm
+				basic_os=mvs
+				;;
+			nacl)
+				basic_machine=le32-unknown
+				basic_os=nacl
+				;;
+			ncr3000)
+				basic_machine=i486-ncr
+				basic_os=sysv4
+				;;
+			netbsd386)
+				basic_machine=i386-pc
+				basic_os=netbsd
+				;;
+			netwinder)
+				basic_machine=armv4l-rebel
+				basic_os=linux
+				;;
+			news | news700 | news800 | news900)
+				basic_machine=m68k-sony
+				basic_os=newsos
+				;;
+			news1000)
+				basic_machine=m68030-sony
+				basic_os=newsos
+				;;
+			necv70)
+				basic_machine=v70-nec
+				basic_os=sysv
+				;;
+			nh3000)
+				basic_machine=m68k-harris
+				basic_os=cxux
+				;;
+			nh[45]000)
+				basic_machine=m88k-harris
+				basic_os=cxux
+				;;
+			nindy960)
+				basic_machine=i960-intel
+				basic_os=nindy
+				;;
+			mon960)
+				basic_machine=i960-intel
+				basic_os=mon960
+				;;
+			nonstopux)
+				basic_machine=mips-compaq
+				basic_os=nonstopux
+				;;
+			os400)
+				basic_machine=powerpc-ibm
+				basic_os=os400
+				;;
+			OSE68000 | ose68000)
+				basic_machine=m68000-ericsson
+				basic_os=ose
+				;;
+			os68k)
+				basic_machine=m68k-none
+				basic_os=os68k
+				;;
+			paragon)
+				basic_machine=i860-intel
+				basic_os=osf
+				;;
+			parisc)
+				basic_machine=hppa-unknown
+				basic_os=linux
+				;;
+			psp)
+				basic_machine=mipsallegrexel-sony
+				basic_os=psp
+				;;
+			pw32)
+				basic_machine=i586-unknown
+				basic_os=pw32
+				;;
+			rdos | rdos64)
+				basic_machine=x86_64-pc
+				basic_os=rdos
+				;;
+			rdos32)
+				basic_machine=i386-pc
+				basic_os=rdos
+				;;
+			rom68k)
+				basic_machine=m68k-rom68k
+				basic_os=coff
+				;;
+			sa29200)
+				basic_machine=a29k-amd
+				basic_os=udi
+				;;
+			sei)
+				basic_machine=mips-sei
+				basic_os=seiux
+				;;
+			sequent)
+				basic_machine=i386-sequent
+				basic_os=
+				;;
+			sps7)
+				basic_machine=m68k-bull
+				basic_os=sysv2
+				;;
+			st2000)
+				basic_machine=m68k-tandem
+				basic_os=
+				;;
+			stratus)
+				basic_machine=i860-stratus
+				basic_os=sysv4
+				;;
+			sun2)
+				basic_machine=m68000-sun
+				basic_os=
+				;;
+			sun2os3)
+				basic_machine=m68000-sun
+				basic_os=sunos3
+				;;
+			sun2os4)
+				basic_machine=m68000-sun
+				basic_os=sunos4
+				;;
+			sun3)
+				basic_machine=m68k-sun
+				basic_os=
+				;;
+			sun3os3)
+				basic_machine=m68k-sun
+				basic_os=sunos3
+				;;
+			sun3os4)
+				basic_machine=m68k-sun
+				basic_os=sunos4
+				;;
+			sun4)
+				basic_machine=sparc-sun
+				basic_os=
+				;;
+			sun4os3)
+				basic_machine=sparc-sun
+				basic_os=sunos3
+				;;
+			sun4os4)
+				basic_machine=sparc-sun
+				basic_os=sunos4
+				;;
+			sun4sol2)
+				basic_machine=sparc-sun
+				basic_os=solaris2
+				;;
+			sun386 | sun386i | roadrunner)
+				basic_machine=i386-sun
+				basic_os=
+				;;
+			sv1)
+				basic_machine=sv1-cray
+				basic_os=unicos
+				;;
+			symmetry)
+				basic_machine=i386-sequent
+				basic_os=dynix
+				;;
+			t3e)
+				basic_machine=alphaev5-cray
+				basic_os=unicos
+				;;
+			t90)
+				basic_machine=t90-cray
+				basic_os=unicos
+				;;
+			toad1)
+				basic_machine=pdp10-xkl
+				basic_os=tops20
+				;;
+			tpf)
+				basic_machine=s390x-ibm
+				basic_os=tpf
+				;;
+			udi29k)
+				basic_machine=a29k-amd
+				basic_os=udi
+				;;
+			ultra3)
+				basic_machine=a29k-nyu
+				basic_os=sym1
+				;;
+			v810 | necv810)
+				basic_machine=v810-nec
+				basic_os=none
+				;;
+			vaxv)
+				basic_machine=vax-dec
+				basic_os=sysv
+				;;
+			vms)
+				basic_machine=vax-dec
+				basic_os=vms
+				;;
+			vsta)
+				basic_machine=i386-pc
+				basic_os=vsta
+				;;
+			vxworks960)
+				basic_machine=i960-wrs
+				basic_os=vxworks
+				;;
+			vxworks68)
+				basic_machine=m68k-wrs
+				basic_os=vxworks
+				;;
+			vxworks29k)
+				basic_machine=a29k-wrs
+				basic_os=vxworks
+				;;
+			xbox)
+				basic_machine=i686-pc
+				basic_os=mingw32
+				;;
+			ymp)
+				basic_machine=ymp-cray
+				basic_os=unicos
+				;;
+			*)
+				basic_machine=$1
+				basic_os=
+				;;
+		esac
+		;;
+esac
+
+# Decode 1-component or ad-hoc basic machines
+case $basic_machine in
+	# Here we handle the default manufacturer of certain CPU types.  It is in
+	# some cases the only manufacturer, in others, it is the most popular.
+	w89k)
+		cpu=hppa1.1
+		vendor=winbond
+		;;
+	op50n)
+		cpu=hppa1.1
+		vendor=oki
+		;;
+	op60c)
+		cpu=hppa1.1
+		vendor=oki
+		;;
+	ibm*)
+		cpu=i370
+		vendor=ibm
+		;;
+	orion105)
+		cpu=clipper
+		vendor=highlevel
+		;;
+	mac | mpw | mac-mpw)
+		cpu=m68k
+		vendor=apple
+		;;
+	pmac | pmac-mpw)
+		cpu=powerpc
+		vendor=apple
+		;;
+
+	# Recognize the various machine names and aliases which stand
+	# for a CPU type and a company and sometimes even an OS.
+	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
+		cpu=m68000
+		vendor=att
+		;;
+	3b*)
+		cpu=we32k
+		vendor=att
+		;;
+	bluegene*)
+		cpu=powerpc
+		vendor=ibm
+		basic_os=cnk
+		;;
+	decsystem10* | dec10*)
+		cpu=pdp10
+		vendor=dec
+		basic_os=tops10
+		;;
+	decsystem20* | dec20*)
+		cpu=pdp10
+		vendor=dec
+		basic_os=tops20
+		;;
+	delta | 3300 | motorola-3300 | motorola-delta \
+	      | 3300-motorola | delta-motorola)
+		cpu=m68k
+		vendor=motorola
+		;;
+	dpx2*)
+		cpu=m68k
+		vendor=bull
+		basic_os=sysv3
+		;;
+	encore | umax | mmax)
+		cpu=ns32k
+		vendor=encore
+		;;
+	elxsi)
+		cpu=elxsi
+		vendor=elxsi
+		basic_os=${basic_os:-bsd}
+		;;
+	fx2800)
+		cpu=i860
+		vendor=alliant
+		;;
+	genix)
+		cpu=ns32k
+		vendor=ns
+		;;
+	h3050r* | hiux*)
+		cpu=hppa1.1
+		vendor=hitachi
+		basic_os=hiuxwe2
+		;;
+	hp3k9[0-9][0-9] | hp9[0-9][0-9])
+		cpu=hppa1.0
+		vendor=hp
+		;;
+	hp9k2[0-9][0-9] | hp9k31[0-9])
+		cpu=m68000
+		vendor=hp
+		;;
+	hp9k3[2-9][0-9])
+		cpu=m68k
+		vendor=hp
+		;;
+	hp9k6[0-9][0-9] | hp6[0-9][0-9])
+		cpu=hppa1.0
+		vendor=hp
+		;;
+	hp9k7[0-79][0-9] | hp7[0-79][0-9])
+		cpu=hppa1.1
+		vendor=hp
+		;;
+	hp9k78[0-9] | hp78[0-9])
+		# FIXME: really hppa2.0-hp
+		cpu=hppa1.1
+		vendor=hp
+		;;
+	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
+		# FIXME: really hppa2.0-hp
+		cpu=hppa1.1
+		vendor=hp
+		;;
+	hp9k8[0-9][13679] | hp8[0-9][13679])
+		cpu=hppa1.1
+		vendor=hp
+		;;
+	hp9k8[0-9][0-9] | hp8[0-9][0-9])
+		cpu=hppa1.0
+		vendor=hp
+		;;
+	i*86v32)
+		cpu=`echo "$1" | sed -e 's/86.*/86/'`
+		vendor=pc
+		basic_os=sysv32
+		;;
+	i*86v4*)
+		cpu=`echo "$1" | sed -e 's/86.*/86/'`
+		vendor=pc
+		basic_os=sysv4
+		;;
+	i*86v)
+		cpu=`echo "$1" | sed -e 's/86.*/86/'`
+		vendor=pc
+		basic_os=sysv
+		;;
+	i*86sol2)
+		cpu=`echo "$1" | sed -e 's/86.*/86/'`
+		vendor=pc
+		basic_os=solaris2
+		;;
+	j90 | j90-cray)
+		cpu=j90
+		vendor=cray
+		basic_os=${basic_os:-unicos}
+		;;
+	iris | iris4d)
+		cpu=mips
+		vendor=sgi
+		case $basic_os in
+		    irix*)
+			;;
+		    *)
+			basic_os=irix4
+			;;
+		esac
+		;;
+	miniframe)
+		cpu=m68000
+		vendor=convergent
+		;;
+	*mint | mint[0-9]* | *MiNT | *MiNT[0-9]*)
+		cpu=m68k
+		vendor=atari
+		basic_os=mint
+		;;
+	news-3600 | risc-news)
+		cpu=mips
+		vendor=sony
+		basic_os=newsos
+		;;
+	next | m*-next)
+		cpu=m68k
+		vendor=next
+		case $basic_os in
+		    openstep*)
+		        ;;
+		    nextstep*)
+			;;
+		    ns2*)
+		      basic_os=nextstep2
+			;;
+		    *)
+		      basic_os=nextstep3
+			;;
+		esac
+		;;
+	np1)
+		cpu=np1
+		vendor=gould
+		;;
+	op50n-* | op60c-*)
+		cpu=hppa1.1
+		vendor=oki
+		basic_os=proelf
+		;;
+	pa-hitachi)
+		cpu=hppa1.1
+		vendor=hitachi
+		basic_os=hiuxwe2
+		;;
+	pbd)
+		cpu=sparc
+		vendor=tti
+		;;
+	pbb)
+		cpu=m68k
+		vendor=tti
+		;;
+	pc532)
+		cpu=ns32k
+		vendor=pc532
+		;;
+	pn)
+		cpu=pn
+		vendor=gould
+		;;
+	power)
+		cpu=power
+		vendor=ibm
+		;;
+	ps2)
+		cpu=i386
+		vendor=ibm
+		;;
+	rm[46]00)
+		cpu=mips
+		vendor=siemens
+		;;
+	rtpc | rtpc-*)
+		cpu=romp
+		vendor=ibm
+		;;
+	sde)
+		cpu=mipsisa32
+		vendor=sde
+		basic_os=${basic_os:-elf}
+		;;
+	simso-wrs)
+		cpu=sparclite
+		vendor=wrs
+		basic_os=vxworks
+		;;
+	tower | tower-32)
+		cpu=m68k
+		vendor=ncr
+		;;
+	vpp*|vx|vx-*)
+		cpu=f301
+		vendor=fujitsu
+		;;
+	w65)
+		cpu=w65
+		vendor=wdc
+		;;
+	w89k-*)
+		cpu=hppa1.1
+		vendor=winbond
+		basic_os=proelf
+		;;
+	none)
+		cpu=none
+		vendor=none
+		;;
+	leon|leon[3-9])
+		cpu=sparc
+		vendor=$basic_machine
+		;;
+	leon-*|leon[3-9]-*)
+		cpu=sparc
+		vendor=`echo "$basic_machine" | sed 's/-.*//'`
+		;;
+
+	*-*)
+		# shellcheck disable=SC2162
+		saved_IFS=$IFS
+		IFS="-" read cpu vendor <<EOF
+$basic_machine
+EOF
+		IFS=$saved_IFS
+		;;
+	# We use `pc' rather than `unknown'
+	# because (1) that's what they normally are, and
+	# (2) the word "unknown" tends to confuse beginning users.
+	i*86 | x86_64)
+		cpu=$basic_machine
+		vendor=pc
+		;;
+	# These rules are duplicated from below for sake of the special case above;
+	# i.e. things that normalized to x86 arches should also default to "pc"
+	pc98)
+		cpu=i386
+		vendor=pc
+		;;
+	x64 | amd64)
+		cpu=x86_64
+		vendor=pc
+		;;
+	# Recognize the basic CPU types without company name.
+	*)
+		cpu=$basic_machine
+		vendor=unknown
+		;;
+esac
+
+unset -v basic_machine
+
+# Decode basic machines in the full and proper CPU-Company form.
+case $cpu-$vendor in
+	# Here we handle the default manufacturer of certain CPU types in canonical form. It is in
+	# some cases the only manufacturer, in others, it is the most popular.
+	craynv-unknown)
+		vendor=cray
+		basic_os=${basic_os:-unicosmp}
+		;;
+	c90-unknown | c90-cray)
+		vendor=cray
+		basic_os=${Basic_os:-unicos}
+		;;
+	fx80-unknown)
+		vendor=alliant
+		;;
+	romp-unknown)
+		vendor=ibm
+		;;
+	mmix-unknown)
+		vendor=knuth
+		;;
+	microblaze-unknown | microblazeel-unknown)
+		vendor=xilinx
+		;;
+	rs6000-unknown)
+		vendor=ibm
+		;;
+	vax-unknown)
+		vendor=dec
+		;;
+	pdp11-unknown)
+		vendor=dec
+		;;
+	we32k-unknown)
+		vendor=att
+		;;
+	cydra-unknown)
+		vendor=cydrome
+		;;
+	i370-ibm*)
+		vendor=ibm
+		;;
+	orion-unknown)
+		vendor=highlevel
+		;;
+	xps-unknown | xps100-unknown)
+		cpu=xps100
+		vendor=honeywell
+		;;
+
+	# Here we normalize CPU types with a missing or matching vendor
+	armh-unknown | armh-alt)
+		cpu=armv7l
+		vendor=alt
+		basic_os=${basic_os:-linux-gnueabihf}
+		;;
+	dpx20-unknown | dpx20-bull)
+		cpu=rs6000
+		vendor=bull
+		basic_os=${basic_os:-bosx}
+		;;
+
+	# Here we normalize CPU types irrespective of the vendor
+	amd64-*)
+		cpu=x86_64
+		;;
+	blackfin-*)
+		cpu=bfin
+		basic_os=linux
+		;;
+	c54x-*)
+		cpu=tic54x
+		;;
+	c55x-*)
+		cpu=tic55x
+		;;
+	c6x-*)
+		cpu=tic6x
+		;;
+	e500v[12]-*)
+		cpu=powerpc
+		basic_os=${basic_os}"spe"
+		;;
+	mips3*-*)
+		cpu=mips64
+		;;
+	ms1-*)
+		cpu=mt
+		;;
+	m68knommu-*)
+		cpu=m68k
+		basic_os=linux
+		;;
+	m9s12z-* | m68hcs12z-* | hcs12z-* | s12z-*)
+		cpu=s12z
+		;;
+	openrisc-*)
+		cpu=or32
+		;;
+	parisc-*)
+		cpu=hppa
+		basic_os=linux
+		;;
+	pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
+		cpu=i586
+		;;
+	pentiumpro-* | p6-* | 6x86-* | athlon-* | athalon_*-*)
+		cpu=i686
+		;;
+	pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
+		cpu=i686
+		;;
+	pentium4-*)
+		cpu=i786
+		;;
+	pc98-*)
+		cpu=i386
+		;;
+	ppc-* | ppcbe-*)
+		cpu=powerpc
+		;;
+	ppcle-* | powerpclittle-*)
+		cpu=powerpcle
+		;;
+	ppc64-*)
+		cpu=powerpc64
+		;;
+	ppc64le-* | powerpc64little-*)
+		cpu=powerpc64le
+		;;
+	riscv64gc-*)
+		cpu=riscv64
+		;;
+	sb1-*)
+		cpu=mipsisa64sb1
+		;;
+	sb1el-*)
+		cpu=mipsisa64sb1el
+		;;
+	sh5e[lb]-*)
+		cpu=`echo "$cpu" | sed 's/^\(sh.\)e\(.\)$/\1\2e/'`
+		;;
+	spur-*)
+		cpu=spur
+		;;
+	strongarm-* | thumb-*)
+		cpu=arm
+		;;
+	tx39-*)
+		cpu=mipstx39
+		;;
+	tx39el-*)
+		cpu=mipstx39el
+		;;
+	x64-*)
+		cpu=x86_64
+		;;
+	xscale-* | xscalee[bl]-*)
+		cpu=`echo "$cpu" | sed 's/^xscale/arm/'`
+		;;
+	arm64-* | aarch64le-*)
+		cpu=aarch64
+		;;
+
+	# Recognize the canonical CPU Types that limit and/or modify the
+	# company names they are paired with.
+	cr16-*)
+		basic_os=${basic_os:-elf}
+		;;
+	crisv32-* | etraxfs*-*)
+		cpu=crisv32
+		vendor=axis
+		;;
+	cris-* | etrax*-*)
+		cpu=cris
+		vendor=axis
+		;;
+	crx-*)
+		basic_os=${basic_os:-elf}
+		;;
+	neo-tandem)
+		cpu=neo
+		vendor=tandem
+		;;
+	nse-tandem)
+		cpu=nse
+		vendor=tandem
+		;;
+	nsr-tandem)
+		cpu=nsr
+		vendor=tandem
+		;;
+	nsv-tandem)
+		cpu=nsv
+		vendor=tandem
+		;;
+	nsx-tandem)
+		cpu=nsx
+		vendor=tandem
+		;;
+	mipsallegrexel-sony)
+		cpu=mipsallegrexel
+		vendor=sony
+		;;
+	tile*-*)
+		basic_os=${basic_os:-linux-gnu}
+		;;
+
+	*)
+		# Recognize the canonical CPU types that are allowed with any
+		# company name.
+		case $cpu in
+			1750a | 580 \
+			| a29k \
+			| aarch64 | aarch64_be \
+			| abacus \
+			| alpha | alphaev[4-8] | alphaev56 | alphaev6[78] \
+			| alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] \
+			| alphapca5[67] | alpha64pca5[67] \
+			| am33_2.0 \
+			| amdgcn \
+			| arc | arceb | arc32 | arc64 \
+			| arm | arm[lb]e | arme[lb] | armv* \
+			| avr | avr32 \
+			| asmjs \
+			| ba \
+			| be32 | be64 \
+			| bfin | bpf | bs2000 \
+			| c[123]* | c30 | [cjt]90 | c4x \
+			| c8051 | clipper | craynv | csky | cydra \
+			| d10v | d30v | dlx | dsp16xx \
+			| e2k | elxsi | epiphany \
+			| f30[01] | f700 | fido | fr30 | frv | ft32 | fx80 \
+			| h8300 | h8500 \
+			| hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
+			| hexagon \
+			| i370 | i*86 | i860 | i960 | ia16 | ia64 \
+			| ip2k | iq2000 \
+			| k1om \
+			| le32 | le64 \
+			| lm32 \
+			| loongarch32 | loongarch64 | loongarchx32 \
+			| m32c | m32r | m32rle \
+			| m5200 | m68000 | m680[012346]0 | m68360 | m683?2 | m68k \
+			| m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x \
+			| m88110 | m88k | maxq | mb | mcore | mep | metag \
+			| microblaze | microblazeel \
+			| mips | mipsbe | mipseb | mipsel | mipsle \
+			| mips16 \
+			| mips64 | mips64eb | mips64el \
+			| mips64octeon | mips64octeonel \
+			| mips64orion | mips64orionel \
+			| mips64r5900 | mips64r5900el \
+			| mips64vr | mips64vrel \
+			| mips64vr4100 | mips64vr4100el \
+			| mips64vr4300 | mips64vr4300el \
+			| mips64vr5000 | mips64vr5000el \
+			| mips64vr5900 | mips64vr5900el \
+			| mipsisa32 | mipsisa32el \
+			| mipsisa32r2 | mipsisa32r2el \
+			| mipsisa32r3 | mipsisa32r3el \
+			| mipsisa32r5 | mipsisa32r5el \
+			| mipsisa32r6 | mipsisa32r6el \
+			| mipsisa64 | mipsisa64el \
+			| mipsisa64r2 | mipsisa64r2el \
+			| mipsisa64r3 | mipsisa64r3el \
+			| mipsisa64r5 | mipsisa64r5el \
+			| mipsisa64r6 | mipsisa64r6el \
+			| mipsisa64sb1 | mipsisa64sb1el \
+			| mipsisa64sr71k | mipsisa64sr71kel \
+			| mipsr5900 | mipsr5900el \
+			| mipstx39 | mipstx39el \
+			| mmix \
+			| mn10200 | mn10300 \
+			| moxie \
+			| mt \
+			| msp430 \
+			| nds32 | nds32le | nds32be \
+			| nfp \
+			| nios | nios2 | nios2eb | nios2el \
+			| none | np1 | ns16k | ns32k | nvptx \
+			| open8 \
+			| or1k* \
+			| or32 \
+			| orion \
+			| picochip \
+			| pdp10 | pdp11 | pj | pjl | pn | power \
+			| powerpc | powerpc64 | powerpc64le | powerpcle | powerpcspe \
+			| pru \
+			| pyramid \
+			| riscv | riscv32 | riscv32be | riscv64 | riscv64be \
+			| rl78 | romp | rs6000 | rx \
+			| s390 | s390x \
+			| score \
+			| sh | shl \
+			| sh[1234] | sh[24]a | sh[24]ae[lb] | sh[23]e | she[lb] | sh[lb]e \
+			| sh[1234]e[lb] |  sh[12345][lb]e | sh[23]ele | sh64 | sh64le \
+			| sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet \
+			| sparclite \
+			| sparcv8 | sparcv9 | sparcv9b | sparcv9v | sv1 | sx* \
+			| spu \
+			| tahoe \
+			| thumbv7* \
+			| tic30 | tic4x | tic54x | tic55x | tic6x | tic80 \
+			| tron \
+			| ubicom32 \
+			| v70 | v850 | v850e | v850e1 | v850es | v850e2 | v850e2v3 \
+			| vax \
+			| visium \
+			| w65 \
+			| wasm32 | wasm64 \
+			| we32k \
+			| x86 | x86_64 | xc16x | xgate | xps100 \
+			| xstormy16 | xtensa* \
+			| ymp \
+			| z8k | z80)
+				;;
+
+			*)
+				echo Invalid configuration \`"$1"\': machine \`"$cpu-$vendor"\' not recognized 1>&2
+				exit 1
+				;;
+		esac
+		;;
+esac
+
+# Here we canonicalize certain aliases for manufacturers.
+case $vendor in
+	digital*)
+		vendor=dec
+		;;
+	commodore*)
+		vendor=cbm
+		;;
+	*)
+		;;
+esac
+
+# Decode manufacturer-specific aliases for certain operating systems.
+
+if test x$basic_os != x
+then
+
+# First recognize some ad-hoc cases, or perhaps split kernel-os, or else just
+# set os.
+case $basic_os in
+	gnu/linux*)
+		kernel=linux
+		os=`echo "$basic_os" | sed -e 's|gnu/linux|gnu|'`
+		;;
+	os2-emx)
+		kernel=os2
+		os=`echo "$basic_os" | sed -e 's|os2-emx|emx|'`
+		;;
+	nto-qnx*)
+		kernel=nto
+		os=`echo "$basic_os" | sed -e 's|nto-qnx|qnx|'`
+		;;
+	*-*)
+		# shellcheck disable=SC2162
+		saved_IFS=$IFS
+		IFS="-" read kernel os <<EOF
+$basic_os
+EOF
+		IFS=$saved_IFS
+		;;
+	# Default OS when just kernel was specified
+	nto*)
+		kernel=nto
+		os=`echo "$basic_os" | sed -e 's|nto|qnx|'`
+		;;
+	linux*)
+		kernel=linux
+		os=`echo "$basic_os" | sed -e 's|linux|gnu|'`
+		;;
+	*)
+		kernel=
+		os=$basic_os
+		;;
+esac
+
+# Now, normalize the OS (knowing we just have one component, it's not a kernel,
+# etc.)
+case $os in
+	# First match some system type aliases that might get confused
+	# with valid system types.
+	# solaris* is a basic system type, with this one exception.
+	auroraux)
+		os=auroraux
+		;;
+	bluegene*)
+		os=cnk
+		;;
+	solaris1 | solaris1.*)
+		os=`echo "$os" | sed -e 's|solaris1|sunos4|'`
+		;;
+	solaris)
+		os=solaris2
+		;;
+	unixware*)
+		os=sysv4.2uw
+		;;
+	# es1800 is here to avoid being matched by es* (a different OS)
+	es1800*)
+		os=ose
+		;;
+	# Some version numbers need modification
+	chorusos*)
+		os=chorusos
+		;;
+	isc)
+		os=isc2.2
+		;;
+	sco6)
+		os=sco5v6
+		;;
+	sco5)
+		os=sco3.2v5
+		;;
+	sco4)
+		os=sco3.2v4
+		;;
+	sco3.2.[4-9]*)
+		os=`echo "$os" | sed -e 's/sco3.2./sco3.2v/'`
+		;;
+	sco*v* | scout)
+		# Don't match below
+		;;
+	sco*)
+		os=sco3.2v2
+		;;
+	psos*)
+		os=psos
+		;;
+	qnx*)
+		os=qnx
+		;;
+	hiux*)
+		os=hiuxwe2
+		;;
+	lynx*178)
+		os=lynxos178
+		;;
+	lynx*5)
+		os=lynxos5
+		;;
+	lynxos*)
+		# don't get caught up in next wildcard
+		;;
+	lynx*)
+		os=lynxos
+		;;
+	mac[0-9]*)
+		os=`echo "$os" | sed -e 's|mac|macos|'`
+		;;
+	opened*)
+		os=openedition
+		;;
+	os400*)
+		os=os400
+		;;
+	sunos5*)
+		os=`echo "$os" | sed -e 's|sunos5|solaris2|'`
+		;;
+	sunos6*)
+		os=`echo "$os" | sed -e 's|sunos6|solaris3|'`
+		;;
+	wince*)
+		os=wince
+		;;
+	utek*)
+		os=bsd
+		;;
+	dynix*)
+		os=bsd
+		;;
+	acis*)
+		os=aos
+		;;
+	atheos*)
+		os=atheos
+		;;
+	syllable*)
+		os=syllable
+		;;
+	386bsd)
+		os=bsd
+		;;
+	ctix* | uts*)
+		os=sysv
+		;;
+	nova*)
+		os=rtmk-nova
+		;;
+	ns2)
+		os=nextstep2
+		;;
+	# Preserve the version number of sinix5.
+	sinix5.*)
+		os=`echo "$os" | sed -e 's|sinix|sysv|'`
+		;;
+	sinix*)
+		os=sysv4
+		;;
+	tpf*)
+		os=tpf
+		;;
+	triton*)
+		os=sysv3
+		;;
+	oss*)
+		os=sysv3
+		;;
+	svr4*)
+		os=sysv4
+		;;
+	svr3)
+		os=sysv3
+		;;
+	sysvr4)
+		os=sysv4
+		;;
+	ose*)
+		os=ose
+		;;
+	*mint | mint[0-9]* | *MiNT | MiNT[0-9]*)
+		os=mint
+		;;
+	dicos*)
+		os=dicos
+		;;
+	pikeos*)
+		# Until real need of OS specific support for
+		# particular features comes up, bare metal
+		# configurations are quite functional.
+		case $cpu in
+		    arm*)
+			os=eabi
+			;;
+		    *)
+			os=elf
+			;;
+		esac
+		;;
+	*)
+		# No normalization, but not necessarily accepted, that comes below.
+		;;
+esac
+
+else
+
+# Here we handle the default operating systems that come with various machines.
+# The value should be what the vendor currently ships out the door with their
+# machine or put another way, the most popular os provided with the machine.
+
+# Note that if you're going to try to match "-MANUFACTURER" here (say,
+# "-sun"), then you have to tell the case statement up towards the top
+# that MANUFACTURER isn't an operating system.  Otherwise, code above
+# will signal an error saying that MANUFACTURER isn't an operating
+# system, and we'll never get to this point.
+
+kernel=
+case $cpu-$vendor in
+	score-*)
+		os=elf
+		;;
+	spu-*)
+		os=elf
+		;;
+	*-acorn)
+		os=riscix1.2
+		;;
+	arm*-rebel)
+		kernel=linux
+		os=gnu
+		;;
+	arm*-semi)
+		os=aout
+		;;
+	c4x-* | tic4x-*)
+		os=coff
+		;;
+	c8051-*)
+		os=elf
+		;;
+	clipper-intergraph)
+		os=clix
+		;;
+	hexagon-*)
+		os=elf
+		;;
+	tic54x-*)
+		os=coff
+		;;
+	tic55x-*)
+		os=coff
+		;;
+	tic6x-*)
+		os=coff
+		;;
+	# This must come before the *-dec entry.
+	pdp10-*)
+		os=tops20
+		;;
+	pdp11-*)
+		os=none
+		;;
+	*-dec | vax-*)
+		os=ultrix4.2
+		;;
+	m68*-apollo)
+		os=domain
+		;;
+	i386-sun)
+		os=sunos4.0.2
+		;;
+	m68000-sun)
+		os=sunos3
+		;;
+	m68*-cisco)
+		os=aout
+		;;
+	mep-*)
+		os=elf
+		;;
+	mips*-cisco)
+		os=elf
+		;;
+	mips*-*)
+		os=elf
+		;;
+	or32-*)
+		os=coff
+		;;
+	*-tti)	# must be before sparc entry or we get the wrong os.
+		os=sysv3
+		;;
+	sparc-* | *-sun)
+		os=sunos4.1.1
+		;;
+	pru-*)
+		os=elf
+		;;
+	*-be)
+		os=beos
+		;;
+	*-ibm)
+		os=aix
+		;;
+	*-knuth)
+		os=mmixware
+		;;
+	*-wec)
+		os=proelf
+		;;
+	*-winbond)
+		os=proelf
+		;;
+	*-oki)
+		os=proelf
+		;;
+	*-hp)
+		os=hpux
+		;;
+	*-hitachi)
+		os=hiux
+		;;
+	i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
+		os=sysv
+		;;
+	*-cbm)
+		os=amigaos
+		;;
+	*-dg)
+		os=dgux
+		;;
+	*-dolphin)
+		os=sysv3
+		;;
+	m68k-ccur)
+		os=rtu
+		;;
+	m88k-omron*)
+		os=luna
+		;;
+	*-next)
+		os=nextstep
+		;;
+	*-sequent)
+		os=ptx
+		;;
+	*-crds)
+		os=unos
+		;;
+	*-ns)
+		os=genix
+		;;
+	i370-*)
+		os=mvs
+		;;
+	*-gould)
+		os=sysv
+		;;
+	*-highlevel)
+		os=bsd
+		;;
+	*-encore)
+		os=bsd
+		;;
+	*-sgi)
+		os=irix
+		;;
+	*-siemens)
+		os=sysv4
+		;;
+	*-masscomp)
+		os=rtu
+		;;
+	f30[01]-fujitsu | f700-fujitsu)
+		os=uxpv
+		;;
+	*-rom68k)
+		os=coff
+		;;
+	*-*bug)
+		os=coff
+		;;
+	*-apple)
+		os=macos
+		;;
+	*-atari*)
+		os=mint
+		;;
+	*-wrs)
+		os=vxworks
+		;;
+	*)
+		os=none
+		;;
+esac
+
+fi
+
+# Now, validate our (potentially fixed-up) OS.
+case $os in
+	# Sometimes we do "kernel-libc", so those need to count as OSes.
+	musl* | newlib* | relibc* | uclibc*)
+		;;
+	# Likewise for "kernel-abi"
+	eabi* | gnueabi*)
+		;;
+	# VxWorks passes extra cpu info in the 4th filed.
+	simlinux | simwindows | spe)
+		;;
+	# Now accept the basic system types.
+	# The portable systems comes first.
+	# Each alternative MUST end in a * to match a version number.
+	gnu* | android* | bsd* | mach* | minix* | genix* | ultrix* | irix* \
+	     | *vms* | esix* | aix* | cnk* | sunos | sunos[34]* \
+	     | hpux* | unos* | osf* | luna* | dgux* | auroraux* | solaris* \
+	     | sym* |  plan9* | psp* | sim* | xray* | os68k* | v88r* \
+	     | hiux* | abug | nacl* | netware* | windows* \
+	     | os9* | macos* | osx* | ios* \
+	     | mpw* | magic* | mmixware* | mon960* | lnews* \
+	     | amigaos* | amigados* | msdos* | newsos* | unicos* | aof* \
+	     | aos* | aros* | cloudabi* | sortix* | twizzler* \
+	     | nindy* | vxsim* | vxworks* | ebmon* | hms* | mvs* \
+	     | clix* | riscos* | uniplus* | iris* | isc* | rtu* | xenix* \
+	     | mirbsd* | netbsd* | dicos* | openedition* | ose* \
+	     | bitrig* | openbsd* | secbsd* | solidbsd* | libertybsd* | os108* \
+	     | ekkobsd* | freebsd* | riscix* | lynxos* | os400* \
+	     | bosx* | nextstep* | cxux* | aout* | elf* | oabi* \
+	     | ptx* | coff* | ecoff* | winnt* | domain* | vsta* \
+	     | udi* | lites* | ieee* | go32* | aux* | hcos* \
+	     | chorusrdb* | cegcc* | glidix* | serenity* \
+	     | cygwin* | msys* | pe* | moss* | proelf* | rtems* \
+	     | midipix* | mingw32* | mingw64* | mint* \
+	     | uxpv* | beos* | mpeix* | udk* | moxiebox* \
+	     | interix* | uwin* | mks* | rhapsody* | darwin* \
+	     | openstep* | oskit* | conix* | pw32* | nonstopux* \
+	     | storm-chaos* | tops10* | tenex* | tops20* | its* \
+	     | os2* | vos* | palmos* | uclinux* | nucleus* | morphos* \
+	     | scout* | superux* | sysv* | rtmk* | tpf* | windiss* \
+	     | powermax* | dnix* | nx6 | nx7 | sei* | dragonfly* \
+	     | skyos* | haiku* | rdos* | toppers* | drops* | es* \
+	     | onefs* | tirtos* | phoenix* | fuchsia* | redox* | bme* \
+	     | midnightbsd* | amdhsa* | unleashed* | emscripten* | wasi* \
+	     | nsk* | powerunix* | genode* | zvmoe* | qnx* | emx* | zephyr* \
+	     | fiwix* )
+		;;
+	# This one is extra strict with allowed versions
+	sco3.2v2 | sco3.2v[4-9]* | sco5v6*)
+		# Don't forget version if it is 3.2v4 or newer.
+		;;
+	none)
+		;;
+	*)
+		echo Invalid configuration \`"$1"\': OS \`"$os"\' not recognized 1>&2
+		exit 1
+		;;
+esac
+
+# As a final step for OS-related things, validate the OS-kernel combination
+# (given a valid OS), if there is a kernel.
+case $kernel-$os in
+	linux-gnu* | linux-dietlibc* | linux-android* | linux-newlib* \
+		   | linux-musl* | linux-relibc* | linux-uclibc* )
+		;;
+	uclinux-uclibc* )
+		;;
+	-dietlibc* | -newlib* | -musl* | -relibc* | -uclibc* )
+		# These are just libc implementations, not actual OSes, and thus
+		# require a kernel.
+		echo "Invalid configuration \`$1': libc \`$os' needs explicit kernel." 1>&2
+		exit 1
+		;;
+	kfreebsd*-gnu* | kopensolaris*-gnu*)
+		;;
+	vxworks-simlinux | vxworks-simwindows | vxworks-spe)
+		;;
+	nto-qnx*)
+		;;
+	os2-emx)
+		;;
+	*-eabi* | *-gnueabi*)
+		;;
+	-*)
+		# Blank kernel with real OS is always fine.
+		;;
+	*-*)
+		echo "Invalid configuration \`$1': Kernel \`$kernel' not known to work with OS \`$os'." 1>&2
+		exit 1
+		;;
+esac
+
+# Here we handle the case where we know the os, and the CPU type, but not the
+# manufacturer.  We pick the logical manufacturer.
+case $vendor in
+	unknown)
+		case $cpu-$os in
+			*-riscix*)
+				vendor=acorn
+				;;
+			*-sunos*)
+				vendor=sun
+				;;
+			*-cnk* | *-aix*)
+				vendor=ibm
+				;;
+			*-beos*)
+				vendor=be
+				;;
+			*-hpux*)
+				vendor=hp
+				;;
+			*-mpeix*)
+				vendor=hp
+				;;
+			*-hiux*)
+				vendor=hitachi
+				;;
+			*-unos*)
+				vendor=crds
+				;;
+			*-dgux*)
+				vendor=dg
+				;;
+			*-luna*)
+				vendor=omron
+				;;
+			*-genix*)
+				vendor=ns
+				;;
+			*-clix*)
+				vendor=intergraph
+				;;
+			*-mvs* | *-opened*)
+				vendor=ibm
+				;;
+			*-os400*)
+				vendor=ibm
+				;;
+			s390-* | s390x-*)
+				vendor=ibm
+				;;
+			*-ptx*)
+				vendor=sequent
+				;;
+			*-tpf*)
+				vendor=ibm
+				;;
+			*-vxsim* | *-vxworks* | *-windiss*)
+				vendor=wrs
+				;;
+			*-aux*)
+				vendor=apple
+				;;
+			*-hms*)
+				vendor=hitachi
+				;;
+			*-mpw* | *-macos*)
+				vendor=apple
+				;;
+			*-*mint | *-mint[0-9]* | *-*MiNT | *-MiNT[0-9]*)
+				vendor=atari
+				;;
+			*-vos*)
+				vendor=stratus
+				;;
+		esac
+		;;
+esac
+
+echo "$cpu-$vendor-${kernel:+$kernel-}$os"
+exit
+
+# Local variables:
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "timestamp='"
+# time-stamp-format: "%:y-%02m-%02d"
+# time-stamp-end: "'"
+# End:

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-riscv64gc-patch/firefox-102.15.0-new/build/autoconf/config.sub
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-skip-failing-tests.patch
+
+mv firefox-$VERSION-skip-failing-tests.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/js/src/tests/jstests.list
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-skip-failing-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.list	(revision 228)
@@ -0,0 +1,1050 @@
+# Manifest entries for imported test suites whose individual test cases
+# we don't want to change.
+
+# Skip the folder with tests for the scripts
+skip include test/jstests.list
+
+skip script non262/String/normalize-generateddata-input.js # input data for other test
+
+# Timeouts on arm and cgc builds.
+slow script test262/built-ins/decodeURI/S15.1.3.1_A2.5_T1.js
+slow script test262/built-ins/decodeURIComponent/S15.1.3.2_A2.5_T1.js
+
+# Windows10-aarch64 fails certain tests.
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1526003
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1526012
+skip-if((xulRuntime.XPCOMABI.match(/aarch64/))&&(xulRuntime.OS=="WINNT")) script non262/Math/fround.js
+skip-if((xulRuntime.XPCOMABI.match(/aarch64/))&&(xulRuntime.OS=="WINNT")) script non262/Math/log2-approx.js
+
+
+###########################################################################
+# Generated jstests.list for test262 when inline |reftest| isn't possible #
+###########################################################################
+
+include test262/jstests.list
+
+
+#################################################################
+# Tests disabled due to intentional alternative implementations #
+#################################################################
+
+# Legacy "caller" and "arguments" implemented as accessor properties on Function.prototype.
+skip script test262/built-ins/Function/prototype/restricted-property-arguments.js
+skip script test262/built-ins/Function/prototype/restricted-property-caller.js
+skip script test262/built-ins/ThrowTypeError/unique-per-realm-function-proto.js
+
+
+#########################################################################
+# Test262 tests disabled when features are only conditionally available #
+#########################################################################
+
+skip-if(!String.prototype.normalize) include test262/built-ins/String/prototype/normalize/jstests.list
+
+# Requires ICU to detect Final_Sigma case mapping context
+skip-if(!this.hasOwnProperty("Intl")) script test262/built-ins/String/prototype/toLowerCase/Final_Sigma_U180E.js
+skip-if(!this.hasOwnProperty("Intl")) script test262/built-ins/String/prototype/toLowerCase/special_casing_conditional.js
+skip-if(!this.hasOwnProperty("Intl")) script test262/built-ins/String/prototype/toLocaleLowerCase/Final_Sigma_U180E.js
+skip-if(!this.hasOwnProperty("Intl")) script test262/built-ins/String/prototype/toLocaleLowerCase/special_casing_conditional.js
+
+# Skip intl402 tests when Intl isn't available.
+skip-if(!this.hasOwnProperty("Intl")) include test262/intl402/jstests.list
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1415303
+skip-if(!this.hasOwnProperty("Atomics")) include test262/built-ins/Atomics/jstests.list
+skip-if(!this.hasOwnProperty("SharedArrayBuffer")) include test262/built-ins/SharedArrayBuffer/jstests.list
+
+# Crashes on s390x and ppc64, avoid it
+skip-if(xulRuntime.XPCOMABI.match(/s390x|ppc64-/)) script non262/extensions/clone-errors.js
+
+# Crashes on s390x, ppc64, aarch64
+skip-if(xulRuntime.XPCOMABI.match(/s390x|aarch64|ppc64-/)) script test262/built-ins/Date/UTC/fp-evaluation-order.js
+
+# Crashes on s390x, avoid it
+skip-if(xulRuntime.XPCOMABI.match(/s390x/)) script test262/built-ins/TypedArray/prototype/set/typedarray-arg-set-values-same-buffer-other-type.js
+
+#####################################
+# Test262 tests disabled on browser #
+#####################################
+
+# Defines a non-configurable property on the WindowProxy object.
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-block-decl-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-block-decl-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-decl-a-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-decl-a-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-decl-b-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-decl-b-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-stmt-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-else-stmt-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-no-else-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-decl-no-else-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-stmt-else-decl-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-if-stmt-else-decl-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-switch-case-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-switch-case-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-switch-dflt-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/direct/global-switch-dflt-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-block-decl-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-block-decl-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-decl-a-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-decl-a-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-decl-b-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-decl-b-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-stmt-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-else-stmt-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-no-else-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-decl-no-else-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-stmt-else-decl-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-if-stmt-else-decl-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-switch-case-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-switch-case-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-switch-dflt-eval-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/eval-code/indirect/global-switch-dflt-eval-global-existing-global-update.js
+skip-if(!xulRuntime.shell) script test262/language/eval-code/direct/var-env-func-init-global-update-non-configurable.js
+skip-if(!xulRuntime.shell) script test262/language/eval-code/indirect/var-env-func-init-global-update-non-configurable.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-lex-restricted-global.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-var.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-func-err-non-configurable.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-func.js
+skip-if(!xulRuntime.shell) script test262/built-ins/Array/prototype/methods-called-as-functions.js
+
+# Calls preventExtensions() on the WindowProxy object.
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-func-err-non-extensible.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-lex.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/script-decl-var-err.js
+
+# Global var-bindings are configurable in a browser environment.
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/block-decl-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/block-decl-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-decl-a-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-decl-a-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-decl-b-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-decl-b-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-stmt-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-else-stmt-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-no-else-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-decl-no-else-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-stmt-else-decl-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/if-stmt-else-decl-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/switch-case-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/switch-case-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/switch-dflt-global-existing-global-init.js
+skip-if(!xulRuntime.shell) script test262/annexB/language/global-code/switch-dflt-global-init.js
+skip-if(!xulRuntime.shell) script test262/language/eval-code/direct/var-env-var-init-global-exstng.js
+skip-if(!xulRuntime.shell) script test262/language/eval-code/indirect/var-env-var-init-global-exstng.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/decl-func.js
+skip-if(!xulRuntime.shell) script test262/language/global-code/decl-var.js
+
+# Atomics tests (Atomics/notify, Atomics/wait) are disabled because our $.agent implementation needs getSharedArrayBuffer, setSharedArrayBuffer and evalInWorker: https://bugzilla.mozilla.org/show_bug.cgi?id=1598612
+# And also: https://bugzil.la/1349863
+skip-if(!xulRuntime.shell) include test262/built-ins/Atomics/notify/jstests.list
+skip-if(!xulRuntime.shell) include test262/built-ins/Atomics/wait/jstests.list
+
+
+##################################################
+# Test262 tests skipped due to SpiderMonkey bugs #
+##################################################
+
+# Lazily resolved "name" and "length" properties on functions violate property key order.
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1629803
+skip script test262/built-ins/Object/keys/order-after-define-property.js
+skip script test262/built-ins/Object/entries/order-after-define-property.js
+skip script test262/language/computed-property-names/class/static/method-number-order.js
+skip script test262/language/computed-property-names/class/static/method-string-order.js
+skip script test262/language/computed-property-names/class/static/method-symbol-order.js
+skip script test262/language/statements/class/definition/fn-length-static-precedence-order.js
+skip script test262/language/statements/class/definition/fn-name-static-precedence-order.js
+skip script test262/built-ins/Object/keys/order-after-define-property-with-function.js
+skip script test262/built-ins/Object/entries/order-after-define-property-with-function.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1286997
+# Bug 1286997 probably doesn't cover all spec violations.
+skip script test262/language/expressions/assignment/S11.13.1_A5_T5.js
+skip script test262/language/expressions/assignment/S11.13.1_A7_T3.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.10_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.11_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.1_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.2_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.3_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.4_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.5_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.6_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.7_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.8_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A5.9_T5.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.10_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.10_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.11_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.11_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.1_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.1_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.2_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.2_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.3_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.3_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.4_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.4_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.5_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.5_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.6_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.6_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.7_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.7_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.8_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.8_T2.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.9_T1.js
+skip script test262/language/expressions/compound-assignment/S11.13.2_A7.9_T2.js
+skip script test262/language/expressions/postfix-decrement/S11.3.2_A5_T5.js
+skip script test262/language/expressions/postfix-decrement/S11.3.2_A6_T1.js
+skip script test262/language/expressions/postfix-decrement/S11.3.2_A6_T2.js
+skip script test262/language/expressions/postfix-increment/S11.3.1_A5_T5.js
+skip script test262/language/expressions/postfix-increment/S11.3.1_A6_T1.js
+skip script test262/language/expressions/postfix-increment/S11.3.1_A6_T2.js
+skip script test262/language/expressions/prefix-decrement/S11.4.5_A5_T5.js
+skip script test262/language/expressions/prefix-decrement/S11.4.5_A6_T1.js
+skip script test262/language/expressions/prefix-decrement/S11.4.5_A6_T2.js
+skip script test262/language/expressions/prefix-increment/S11.4.4_A5_T5.js
+skip script test262/language/expressions/prefix-increment/S11.4.4_A6_T1.js
+skip script test262/language/expressions/prefix-increment/S11.4.4_A6_T2.js
+skip script test262/language/expressions/logical-assignment/lgcl-nullish-assignment-operator-lhs-before-rhs.js
+skip script test262/language/expressions/logical-assignment/lgcl-and-assignment-operator-lhs-before-rhs.js
+skip script test262/language/expressions/logical-assignment/lgcl-or-assignment-operator-lhs-before-rhs.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=605515
+skip script test262/language/identifier-resolution/assign-to-global-undefined-strict.js
+skip script test262/language/expressions/postfix-decrement/operator-x-postfix-decrement-calls-putvalue-lhs-newvalue-.js
+skip script test262/language/expressions/assignment/assignment-operator-calls-putvalue-lref--rval-.js
+skip script test262/language/expressions/prefix-increment/operator-prefix-increment-x-calls-putvalue-lhs-newvalue-.js
+skip script test262/language/expressions/prefix-decrement/operator-prefix-decrement-x-calls-putvalue-lhs-newvalue-.js
+skip script test262/language/expressions/postfix-increment/operator-x-postfix-increment-calls-putvalue-lhs-newvalue-.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--16.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--8.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--12.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--18.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--14.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--2.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--10.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--6.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--4.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v-.js
+skip script test262/language/expressions/compound-assignment/compound-assignment-operator-calls-putvalue-lref--v--20.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1670502
+skip script test262/built-ins/Function/prototype/toString/built-in-function-object.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1225839
+ignore-flag(--more-compartments) script test262/built-ins/Function/internals/Call/class-ctor-realm.js
+
+# These two tests assume that assignment to function calls is a SyntaxError.  We
+# historically implemented otherwise, as web reality.  Perhaps that can be
+# reevaluated at some point.
+skip script test262/language/expressions/assignmenttargettype/parenthesized-callexpression-arguments.js
+skip script test262/language/expressions/assignmenttargettype/direct-callexpression-arguments.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1297179
+# All of these tests pass except with --more-compartments.
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/apply/arguments-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/apply/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/arguments-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/desc-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/null-handler-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/targetdesc-configurable-desc-not-configurable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/targetdesc-not-compatible-descriptor-not-configurable-target-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/targetdesc-not-compatible-descriptor-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/targetdesc-undefined-not-configurable-descriptor-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/targetdesc-undefined-target-is-not-extensible-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/defineProperty/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/deleteProperty/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/get/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/getOwnPropertyDescriptor/result-type-is-not-object-nor-undefined-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/getOwnPropertyDescriptor/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/getPrototypeOf/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/has/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/isExtensible/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/ownKeys/return-not-list-object-throws-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/ownKeys/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/preventExtensions/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/set/trap-is-not-callable-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/setPrototypeOf/trap-is-not-callable-realm.js
+
+# Errors thrown from wrong realm, similar to 1225839, 1288457, and 1297179.
+ignore-flag(--more-compartments) script test262/built-ins/Array/length/define-own-prop-length-overflow-realm.js
+skip script test262/built-ins/Function/internals/Construct/derived-return-val-realm.js
+skip script test262/built-ins/Function/internals/Construct/derived-this-uninitialized-realm.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1317378
+skip script test262/language/statements/do-while/cptn-abrupt-empty.js
+skip script test262/language/statements/do-while/cptn-normal.js
+skip script test262/language/statements/for-in/cptn-decl-abrupt-empty.js
+skip script test262/language/statements/for-in/cptn-decl-itr.js
+skip script test262/language/statements/for-in/cptn-decl-skip-itr.js
+skip script test262/language/statements/for-in/cptn-decl-zero-itr.js
+skip script test262/language/statements/for-in/cptn-expr-abrupt-empty.js
+skip script test262/language/statements/for-in/cptn-expr-itr.js
+skip script test262/language/statements/for-in/cptn-expr-skip-itr.js
+skip script test262/language/statements/for-in/cptn-expr-zero-itr.js
+skip script test262/language/statements/for/cptn-decl-expr-iter.js
+skip script test262/language/statements/for/cptn-decl-expr-no-iter.js
+skip script test262/language/statements/for/cptn-expr-expr-iter.js
+skip script test262/language/statements/for/cptn-expr-expr-no-iter.js
+skip script test262/language/statements/for/head-init-expr-check-empty-inc-empty-completion.js
+skip script test262/language/statements/for/head-init-var-check-empty-inc-empty-completion.js
+skip script test262/language/statements/for-of/cptn-decl-abrupt-empty.js
+skip script test262/language/statements/for-of/cptn-decl-itr.js
+skip script test262/language/statements/for-of/cptn-decl-no-itr.js
+skip script test262/language/statements/for-of/cptn-expr-abrupt-empty.js
+skip script test262/language/statements/for-of/cptn-expr-itr.js
+skip script test262/language/statements/for-of/cptn-expr-no-itr.js
+skip script test262/language/statements/if/cptn-else-false-nrml.js
+skip script test262/language/statements/if/cptn-else-false-abrupt-empty.js
+skip script test262/language/statements/if/cptn-else-true-abrupt-empty.js
+skip script test262/language/statements/if/cptn-else-true-nrml.js
+skip script test262/language/statements/if/cptn-no-else-false.js
+skip script test262/language/statements/if/cptn-no-else-true-abrupt-empty.js
+skip script test262/language/statements/if/cptn-no-else-true-nrml.js
+skip script test262/language/statements/switch/cptn-a-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-b-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-b-final.js
+skip script test262/language/statements/switch/cptn-dflt-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-dflt-b-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-dflt-b-final.js
+skip script test262/language/statements/switch/cptn-dflt-final.js
+skip script test262/language/statements/switch/cptn-no-dflt-match-abrupt-empty.js
+skip script test262/language/statements/switch/cptn-no-dflt-match-final.js
+skip script test262/language/statements/switch/cptn-no-dflt-no-match.js
+skip script test262/language/statements/try/cptn-finally-skip-catch.js
+skip script test262/language/statements/try/cptn-finally-wo-catch.js
+skip script test262/language/statements/try/cptn-try.js
+skip script test262/language/statements/while/cptn-abrupt-empty.js
+skip script test262/language/statements/while/cptn-iter.js
+skip script test262/language/statements/while/cptn-no-iter.js
+skip script test262/language/statements/with/cptn-abrupt-empty.js
+skip script test262/language/statements/with/cptn-nrml.js
+
+# Hoisted block-level function named "arguments" not initialized with undefined per B.3.3.1
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1339123
+skip script test262/annexB/language/function-code/block-decl-func-skip-arguments.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1407587
+skip script test262/language/expressions/assignment/destructuring/keyed-destructuring-property-reference-target-evaluation-order.js
+skip script test262/language/expressions/assignment/destructuring/iterator-destructuring-property-reference-target-evaluation-order.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1321616
+skip script test262/annexB/built-ins/Function/createdynfn-no-line-terminator-html-close-comment-body.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1462745
+skip script test262/annexB/language/function-code/block-decl-nested-blocks-with-fun-decl.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1545038
+# All of these tests pass except with --more-compartments.
+ignore-flag(--more-compartments) script test262/built-ins/String/prototype/valueOf/non-generic-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/String/prototype/valueOf/non-generic-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/String/prototype/toString/non-generic-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/String/prototype/toString/non-generic-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/apply/null-handler-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/apply/null-handler-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-number-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-number-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/null-handler-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/null-handler-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-undefined-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-undefined-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-symbol-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-symbol-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-string-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-string-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-null-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-null-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-boolean-realm.js
+ignore-flag(--more-compartments) script test262/built-ins/Proxy/construct/return-not-object-throws-boolean-realm.js
+
+# Depends upon the SharedArrayBuffer constructor being defined as a global
+# property -- and right now, it's only defined for cross-site-isolated pages
+# that request it using COOP/COEP.
+fails-if(!xulRuntime.shell) script test262/built-ins/ArrayBuffer/prototype/byteLength/this-is-sharedarraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/ArrayBuffer/prototype/slice/this-is-sharedarraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/validate-arraytype-before-expectedValue-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/validate-arraytype-before-replacementValue-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/expected-return-value-negative-zero.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/bigint/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/bigint/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/validate-arraytype-before-index-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/validate-arraytype-before-value-coercion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/bad-range.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/expected-return-value.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/good-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/non-views.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/buffer-does-not-have-arraybuffer-data-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/buffer-reference-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/byteoffset-is-negative-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/custom-proto-access-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/custom-proto-if-not-object-fallbacks-to-default-prototype-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/custom-proto-if-object-is-used-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/defined-bytelength-and-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/defined-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/defined-byteoffset-undefined-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/excessive-bytelength-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/excessive-byteoffset-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/instance-extensibility-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/negative-bytelength-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/negative-byteoffset-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/newtarget-undefined-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/buffer/return-buffer-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/buffer/this-has-no-dataview-internal-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/byteLength/return-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/byteLength/this-has-no-dataview-internal-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/byteOffset/return-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/byteOffset/this-has-no-dataview-internal-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/index-is-out-of-range-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/negative-byteoffset-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/return-abrupt-from-tonumber-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/return-abrupt-from-tonumber-byteoffset-symbol-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/return-value-clean-arraybuffer-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/return-values-custom-offset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/return-values-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/this-has-no-dataview-internal-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/to-boolean-littleendian-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/prototype/getInt32/toindex-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/proto-from-ctor-realm-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/return-abrupt-tonumber-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/return-abrupt-tonumber-bytelength-symbol-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/return-abrupt-tonumber-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/return-abrupt-tonumber-byteoffset-symbol-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/return-instance-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/toindex-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/DataView/toindex-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/invoked-as-accessor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/invoked-as-func.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/name.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/prop-desc.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/return-bytelength.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/this-has-no-typedarrayname-internal.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/this-is-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/byteLength/this-is-not-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/context-is-not-arraybuffer-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/context-is-not-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/descriptor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/end-default-if-absent.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/end-default-if-undefined.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/end-exceeds-length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/extensible.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/name.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/negative-end.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/negative-start.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/nonconstructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/number-conversion.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-constructor-is-not-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-constructor-is-undefined.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-is-not-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-is-not-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-is-null.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-is-undefined.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-returns-larger-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-returns-not-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-returns-same-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species-returns-smaller-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/species.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/start-default-if-absent.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/start-default-if-undefined.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/start-exceeds-end.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/start-exceeds-length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/this-is-arraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/tointeger-conversion-end.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/tointeger-conversion-start.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/Symbol.toStringTag.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/allocation-limit.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/data-allocation-after-object-creation.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/init-zero.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/length-is-absent.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/length-is-too-large-throws.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/negative-length-throws.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/newtarget-prototype-is-not-object.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/proto-from-ctor-realm.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype-from-newtarget.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/return-abrupt-from-length-symbol.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/return-abrupt-from-length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/toindex-length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/undefined-newtarget-throws.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/zero-length.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/excessive-length-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/excessive-offset-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/invoked-with-undefined-newtarget-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/is-referenced-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/length-access-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/length-is-symbol-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/new-instance-extensibility-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/proto-from-ctor-realm-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/returns-new-instance-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/toindex-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/toindex-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/typedarray-backed-by-sharedarraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/use-custom-proto-if-object-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/use-default-proto-if-custom-proto-is-not-object-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/bufferbyteoffset-throws-from-modulo-element-size-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/byteoffset-is-negative-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/byteoffset-is-negative-zero-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/byteoffset-is-symbol-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/byteoffset-throws-from-modulo-element-size-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/byteoffset-to-number-throws-sab.j
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/custom-proto-access-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/defined-length-and-offset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/defined-length-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/defined-negative-length-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/defined-offset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/excessive-length-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/excessive-offset-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/invoked-with-undefined-newtarget-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/is-referenced-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/length-access-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/length-is-symbol-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/new-instance-extensibility-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/proto-from-ctor-realm-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/returns-new-instance-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/toindex-bytelength-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/toindex-byteoffset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/typedarray-backed-by-sharedarraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/use-custom-proto-if-object-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors-bigint/buffer-arg/use-default-proto-if-custom-proto-is-not-object-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/bufferbyteoffset-throws-from-modulo-element-size-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/byteoffset-is-negative-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/byteoffset-is-negative-zero-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/byteoffset-is-symbol-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/byteoffset-throws-from-modulo-element-size-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/byteoffset-to-number-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/custom-proto-access-throws-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/defined-length-and-offset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/defined-length-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/defined-negative-length-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/ctors/buffer-arg/defined-offset-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Get/BigInt/indexed-value-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Get/indexed-value-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-other-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-diff-buffer-same-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/BigInt/typedarray-arg-set-values-same-buffer-same-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/typedarray-arg-set-values-diff-buffer-other-type-conversions-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/typedarray-arg-set-values-diff-buffer-other-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/typedarray-arg-set-values-diff-buffer-same-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArray/prototype/set/typedarray-arg-set-values-same-buffer-same-type-sab.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/add/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/and/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/compareExchange/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/exchange/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/isLockFree/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/load/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/or/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/store/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/sub/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Atomics/xor/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/Object/seal/seal-sharedarraybuffer.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/slice/not-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/is-a-constructor.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Delete/BigInt/indexed-value-sab-non-strict.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Delete/BigInt/indexed-value-sab-strict-strict.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Delete/indexed-value-sab-non-strict.js
+fails-if(!xulRuntime.shell) script test262/built-ins/TypedArrayConstructors/internals/Delete/indexed-value-sab-strict-strict.js
+fails-if(!xulRuntime.shell) script test262/language/expressions/class/subclass-builtins/subclass-SharedArrayBuffer.js
+fails-if(!xulRuntime.shell) script test262/language/statements/class/subclass-builtins/subclass-SharedArrayBuffer.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1648202
+skip script test262/built-ins/RegExp/named-groups/non-unicode-property-names-valid.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1761989
+skip script test262/built-ins/TypedArrayConstructors/ctors/no-species.js
+
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1763606
+skip script test262/built-ins/TypedArray/prototype/sort/sort-tonumber.js
+
+
+###########################################################
+# Tests disabled due to issues in test262 importer script #
+###########################################################
+
+# test262 importer merges all includes in a per directory shell.js file, breaking this harness test case.
+skip script test262/harness/detachArrayBuffer.js
+
+# Likewise here -- shell.js defines an |AsyncFunction| global property for
+# various tests in this directory that have an include that defines it, but this
+# particular test *doesn't* have that include and *shouldn't* define
+# |AsyncFunction| yet *checks* that no such global property exists, so it fails.
+# https://bugzilla.mozilla.org/show_bug.cgi?id=1672850
+skip script test262/built-ins/AsyncFunction/is-not-a-global.js
+
+skip script test262/language/module-code/top-level-await/await-dynamic-import-rejection.js  # Bug 1727925, 1726243
+
+####################################################
+# Tests disabled due to invalid test expectations  #
+####################################################
+
+# The test exposes a potential spec issue in InitializeDateTimeFormat:
+# Let's assume the locale is "en", adding likely subtags results in "en-Latn-US".
+# Consulting CLDR's supplementalData.xml shows for the "US" region:
+#   <hours preferred="h" allowed="h hb H hB" regions="[...] US [...]"/>
+# So preferred is "h" (= hour-cycle "h12") and additionally allowed is "H" (=hc "h23").
+# But InitializeDateTimeFormat defaults for hour12=false and hcDefault="h12" the
+# hour-cycle to "h24", which isn't allowed per the above CLDR data.
+# https://github.com/tc39/ecma402/issues/402
+skip script test262/intl402/DateTimeFormat/prototype/resolvedOptions/hourCycle-default.js
+
+# Not yet updated for https://github.com/tc39/proposal-intl-numberformat-v3/pull/85
+skip script test262/intl402/NumberFormat/constructor-roundingIncrement.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-2.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-5.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-10.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-20.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-50.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-25.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-100.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-200.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-250.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-500.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-1000.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-2000.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-2500.js
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-increment-5000.js
+
+# Not yet updated for https://github.com/tc39/proposal-intl-numberformat-v3/pull/92
+skip script test262/intl402/NumberFormat/test-option-useGrouping.js
+skip script test262/intl402/NumberFormat/test-option-useGrouping-extended.js
+
+# Missing "SharedArrayBuffer" features tag
+fails-if(!xulRuntime.shell) script test262/built-ins/SharedArrayBuffer/prototype/prop-desc.js
+
+# "morePrecision" has the same expected results as "lessPrecision", which can't be correct.
+skip script test262/intl402/NumberFormat/prototype/format/format-rounding-priority-more-precision.js
+
+
+# Update required for https://github.com/tc39/ecma262/pull/2550
+# See bug: https://bugzilla.mozilla.org/show_bug.cgi?id=1769088
+skip script test262/built-ins/Date/year-zero.js
+skip script test262/built-ins/Date/parse/year-zero.js
+
+
+##############################################
+# Enable Iterator Helpers tests in the shell #
+##############################################
+
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/asynciterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/proto.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/constructor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/constructor-subclassable.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/constructor-throw-when-called-directly.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/constructor-throw-without-new.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/return-true-if-all-match.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/short-circuit-on-false.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/every/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/return-undefined-if-none-match.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/short-circuit-on-match.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/find/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/forEach.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/forEach/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/accumulator-set-to-initial-value.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/empty-iterator-without-initial-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/iterator-empty-return-initial-value.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/iterator-next-return-non-object-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/left-associative.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/no-initial-value-set-accumulator-to-first-value.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/reduce.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/reducer-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/reducer-throws-iterator-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/reduce/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/return-false-if-none-match.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/short-circuit-on-true.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/some/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/async-writes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/create-in-current-realm.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/interleaving-calls.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/iterator-empty.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/next-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/proxy.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/toArray.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/toArray/value-throws-iterator-not-closed.js
+
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/async-iterator-helpers-from-other-global.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/clobber-symbol.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/generator-methods-throw-on-iterator-helpers.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/iterator-helper-methods-throw-on-generators.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-from-other-global.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-handle-empty-iterators.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-interleaved.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-closed-on-call-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-closed-on-yield-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-next-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-next-returns-reject.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-next-promise-executor-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-next-then-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-next-get-then-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-not-closed-on-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-iterator-returns-done-generator-finishes.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-multiple-return-close-iterator-once.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-multiple-throw-close-iterator-once.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-mutate-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-mutate-iterator-after-done.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-not-close-iterator-next-reject.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-pass-through-lastValue.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-pass-value-through-chain.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-proxy-accesses.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-return-closes-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-return-new-iterator-result.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-reentry-not-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-closes-iterator-before-next.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-eagerly-on-next-non-callable.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-eagerly-on-non-callable.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-eagerly-on-non-iterator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-next-done-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-next-not-object.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/lazy-methods-throw-on-reentry.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take-drop-throw-eagerly-on-negative.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take-drop-throw-eagerly-on-non-integer.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/asIndexedPairs/asIndexedPairs.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/asIndexedPairs/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/asIndexedPairs/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/drop/drop.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/drop/drop-more-than-available.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/drop/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/drop/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/filter/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/filter/filter.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/filter/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/filter/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/close-iterator-when-inner-complete-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/close-iterator-when-inner-next-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/close-iterator-when-inner-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/flatMap.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/inner-empty-iterable.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/inner-generator.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/flatMap/throw-when-inner-not-iterable.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/map/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/map/map.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/map/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take/close-iterator-when-none-remaining.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take/length.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take/name.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take/take.js
+shell-option(--enable-iterator-helpers) script non262/AsyncIterator/prototype/take/take-more-than-available.js
+
+shell-option(--enable-iterator-helpers) script non262/Iterator/iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/proto.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/constructor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/constructor-subclassable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/constructor-throw-when-called-directly.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/constructor-throw-without-new.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/call-from-with-different-this.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/Iterator.from-descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/Iterator.from-length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/Iterator.from-name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/iterator-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/modify-next.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/modify-return.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/modify-throw.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/o-not-object-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/proxy-not-wrapped.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/proxy-wrap-next.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/proxy-wrap-return.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/proxy-wrap-throw.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/return-iterator-if-iterable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/return-wrapper-if-not-iterable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/return-wrapper-if-not-iterator-instance.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-functions-on-other-global.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-method-with-non-wrap-this-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-next-forwards-value.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-next-not-object-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-new-global.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-return-closes-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/from/wrap-throw.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/return-true-if-all-match.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/short-circuit-on-false.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/every/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/return-undefined-if-none-match.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/short-circuit-on-match.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/find/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/forEach.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/forEach/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/accumulator-set-to-initial-value.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/empty-iterator-without-initial-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/iterator-empty-return-initial-value.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/iterator-next-return-non-object-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/left-associative.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/no-initial-value-set-accumulator-to-first-value.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/reduce.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/reducer-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/reducer-throws-iterator-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/reduce/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/check-fn-after-getting-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/error-from-correct-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/fn-not-callable-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/fn-throws-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/next-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/return-false-if-none-match.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/short-circuit-on-true.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/some/value-throws-iterator-not-closed.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/create-in-current-realm.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/descriptor.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/iterator-empty.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/next-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/proxy.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/this-not-iterator-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/toArray.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/toArray/value-throws-iterator-not-closed.js
+
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/generator-methods-throw-on-iterator-helpers.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/iterator-helpers-from-other-global.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/iterator-helper-methods-throw-on-generators.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-from-other-global.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-handle-empty-iterators.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-interleaved.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-iterator-closed-on-call-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-iterator-closed-on-yield-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-iterator-not-closed-on-next-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-iterator-not-closed-on-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-iterator-returns-done-generator-finishes.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-multiple-return-close-iterator-once.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-multiple-throw-close-iterator-once.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-pass-through-lastValue.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-pass-value-through-chain.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-proxy-accesses.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-return-closes-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-return-new-iterator-result.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-reentry-not-close-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-closes-iterator-before-next.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-eagerly-on-next-non-callable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-eagerly-on-non-callable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-eagerly-on-non-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-next-done-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-next-not-object.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/lazy-methods-throw-on-reentry.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take-drop-throw-eagerly-on-negative.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take-drop-throw-eagerly-on-non-integer.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/asIndexedPairs/asIndexedPairs.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/asIndexedPairs/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/asIndexedPairs/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/drop/drop.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/drop/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/drop/drop-more-than-available.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/drop/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/filter/coerce-result-to-boolean.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/filter/filter.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/filter/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/filter/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/close-iterator-when-inner-complete-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/close-iterator-when-inner-next-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/close-iterator-when-inner-value-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/flatMap.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/inner-empty-iterable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/inner-generator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/flatMap/throw-when-inner-not-iterable.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/call-next-on-iterator-while-iterating.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/clobber-symbol.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/interleaved-map-calls.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/map.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/mapper-not-callable-throw.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/mutate-iterator-after-done.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/mutate-iterator.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/output-at-generator-end.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/pass-lastValue-to-next.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/prototype.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/proxy-abrupt-completion-in-iteratorValue.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/proxy-abrupt-completion-in-yield.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/proxy-abrupt-completion.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/proxy-accesses.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/reenter-map-generator-from-mapper.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/this-not-iterator-throw.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/this-value-array-throws.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/throw-when-iterator-returns-non-object.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/map/values-pass-through-chained-maps-to-next.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take/close-iterator-when-none-remaining.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take/length.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take/name.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take/take.js
+shell-option(--enable-iterator-helpers) script non262/Iterator/prototype/take/take-more-than-available.js
+
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-soname.patch
+
+mv firefox-$VERSION-soname.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/config/rules.mk
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/firefox-102.15.0-new/config/rules.mk
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/firefox-102.15.0-new/config/rules.mk	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-soname-patch/firefox-102.15.0-new/config/rules.mk	(revision 228)
@@ -0,0 +1,1144 @@
+# -*- makefile -*-
+# vim:set ts=8 sw=8 sts=8 noet:
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+#
+
+ifndef topsrcdir
+$(error topsrcdir was not set))
+endif
+
+# Define an include-at-most-once flag
+ifdef INCLUDED_RULES_MK
+$(error Do not include rules.mk twice!)
+endif
+INCLUDED_RULES_MK = 1
+
+ifndef INCLUDED_CONFIG_MK
+include $(topsrcdir)/config/config.mk
+endif
+
+USE_AUTOTARGETS_MK = 1
+include $(MOZILLA_DIR)/config/makefiles/makeutils.mk
+
+ifdef REBUILD_CHECK
+REPORT_BUILD = $(info $(shell $(PYTHON3) $(MOZILLA_DIR)/config/rebuild_check.py $@ $^))
+REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
+else
+REPORT_BUILD = $(info $(relativesrcdir)/$(notdir $@))
+
+ifdef BUILD_VERBOSE_LOG
+REPORT_BUILD_VERBOSE = $(REPORT_BUILD)
+else
+REPORT_BUILD_VERBOSE = $(call BUILDSTATUS,BUILD_VERBOSE $(relativesrcdir))
+endif
+
+endif
+
+EXEC			= exec
+
+################################################################################
+# Testing frameworks support
+################################################################################
+
+testxpcobjdir = $(DEPTH)/_tests/xpcshell
+
+ifdef ENABLE_TESTS
+ifdef CPP_UNIT_TESTS
+ifdef COMPILE_ENVIRONMENT
+
+# Compile the tests to $(DIST)/bin.  Make lots of niceties available by default
+# through TestHarness.h, by modifying the list of includes and the libs against
+# which stuff links.
+SIMPLE_PROGRAMS += $(CPP_UNIT_TESTS)
+
+ifndef MOZ_PROFILE_GENERATE
+CPP_UNIT_TESTS_FILES = $(CPP_UNIT_TESTS)
+CPP_UNIT_TESTS_DEST = $(DIST)/cppunittests
+CPP_UNIT_TESTS_TARGET = target
+INSTALL_TARGETS += CPP_UNIT_TESTS
+endif
+
+run-cppunittests::
+	@$(PYTHON3) $(MOZILLA_DIR)/testing/runcppunittests.py --xre-path=$(DIST)/bin --symbols-path=$(DIST)/crashreporter-symbols $(CPP_UNIT_TESTS)
+
+cppunittests-remote:
+	$(PYTHON3) -u $(MOZILLA_DIR)/testing/remotecppunittests.py \
+		--xre-path=$(DEPTH)/dist/bin \
+		--localLib=$(DEPTH)/dist/$(MOZ_APP_NAME) \
+		--deviceIP=${TEST_DEVICE} \
+		$(CPP_UNIT_TESTS) $(EXTRA_TEST_ARGS); \
+
+endif # COMPILE_ENVIRONMENT
+endif # CPP_UNIT_TESTS
+endif # ENABLE_TESTS
+
+
+#
+# Library rules
+#
+# If FORCE_STATIC_LIB is set, build a static library.
+# Otherwise, build a shared library.
+#
+
+ifndef LIBRARY
+ifdef REAL_LIBRARY
+ifdef NO_EXPAND_LIBS
+# Only build actual library if it is requested.
+LIBRARY			:= $(REAL_LIBRARY)
+endif
+endif
+endif
+
+ifdef FORCE_SHARED_LIB
+ifdef MKSHLIB
+
+ifdef LIB_IS_C_ONLY
+MKSHLIB			= $(MKCSHLIB)
+endif
+
+endif # MKSHLIB
+endif # FORCE_SHARED_LIB
+
+ifeq ($(OS_ARCH),WINNT)
+
+#
+# This next line captures both the default (non-MOZ_COPY_PDBS)
+# case as well as the MOZ_COPY_PDBS-for-mingwclang case.
+#
+# For the default case, placing the pdb in the build
+# directory is needed.
+#
+# For the MOZ_COPY_PDBS, non-mingwclang case - we need to
+# build the pdb next to the executable (handled in the if
+# statement immediately below.)
+#
+# For the MOZ_COPY_PDBS, mingwclang case - we also need to
+# build the pdb next to the executable, but this macro doesn't
+# work for jsapi-tests which is a little special, so we specify
+# the output directory below with MOZ_PROGRAM_LDFLAGS.
+#
+LINK_PDBFILE ?= $(basename $(@F)).pdb
+
+ifdef MOZ_COPY_PDBS
+ifneq ($(CC_TYPE),clang)
+LINK_PDBFILE = $(basename $@).pdb
+endif
+endif
+
+ifndef GNU_CC
+
+ifdef SIMPLE_PROGRAMS
+COMPILE_PDB_FLAG ?= -Fd$(basename $(@F)).pdb
+COMPILE_CFLAGS += $(COMPILE_PDB_FLAG)
+COMPILE_CXXFLAGS += $(COMPILE_PDB_FLAG)
+endif
+
+ifdef MOZ_DEBUG
+CODFILE=$(basename $(@F)).cod
+endif
+
+endif # !GNU_CC
+endif # WINNT
+
+ifeq (arm-Darwin,$(CPU_ARCH)-$(OS_TARGET))
+ifdef PROGRAM
+MOZ_PROGRAM_LDFLAGS += -Wl,-rpath -Wl,@executable_path/Frameworks
+endif
+endif
+
+# For Mac executables, set the @rpath to be @executable_path by default so that
+# shared libraries built with an @rpath install name in the same directory
+# as the executable can be resolved. Executables not in the same directory
+# should override the @rpath with a relative path such as @executable_path/../
+# depending on their install location.
+ifeq ($(OS_ARCH),Darwin)
+MOZ_PROGRAM_LDFLAGS += -Wl,-rpath,@executable_path
+endif
+
+ifeq ($(OS_ARCH),WINNT)
+ifeq ($(CC_TYPE),clang)
+MOZ_PROGRAM_LDFLAGS += -Wl,-pdb,$(dir $@)/$(LINK_PDBFILE)
+endif
+endif
+
+ifeq ($(HOST_OS_ARCH),WINNT)
+HOST_PDBFILE=$(basename $(@F)).pdb
+HOST_PDB_FLAG ?= -PDB:$(HOST_PDBFILE)
+HOST_C_LDFLAGS += -DEBUG $(HOST_PDB_FLAG)
+HOST_CXX_LDFLAGS += -DEBUG $(HOST_PDB_FLAG)
+endif
+
+# Don't build SIMPLE_PROGRAMS during the MOZ_PROFILE_GENERATE pass, and do not
+# attempt to install them
+ifdef MOZ_PROFILE_GENERATE
+$(foreach category,$(INSTALL_TARGETS),\
+  $(eval $(category)_FILES := $(foreach file,$($(category)_FILES),$(if $(filter $(SIMPLE_PROGRAMS),$(notdir $(file))),,$(file)))))
+SIMPLE_PROGRAMS :=
+endif
+
+ifdef COMPILE_ENVIRONMENT
+ifndef TARGETS
+TARGETS			= $(LIBRARY) $(SHARED_LIBRARY) $(PROGRAM) $(SIMPLE_PROGRAMS) $(HOST_PROGRAM) $(HOST_SIMPLE_PROGRAMS) $(HOST_SHARED_LIBRARY)
+endif
+
+COBJS = $(notdir $(CSRCS:.c=.$(OBJ_SUFFIX)))
+CWASMOBJS = $(notdir $(WASM_CSRCS:.c=.$(WASM_OBJ_SUFFIX)))
+SOBJS = $(notdir $(SSRCS:.S=.$(OBJ_SUFFIX)))
+# CPPSRCS can have different extensions (eg: .cpp, .cc)
+CPPOBJS = $(notdir $(addsuffix .$(OBJ_SUFFIX),$(basename $(CPPSRCS))))
+CPPWASMOBJS = $(notdir $(addsuffix .$(WASM_OBJ_SUFFIX),$(basename $(WASM_CPPSRCS))))
+CMOBJS = $(notdir $(CMSRCS:.m=.$(OBJ_SUFFIX)))
+CMMOBJS = $(notdir $(CMMSRCS:.mm=.$(OBJ_SUFFIX)))
+# ASFILES can have different extensions (.s, .asm)
+ASOBJS = $(notdir $(addsuffix .$(OBJ_SUFFIX),$(basename $(ASFILES))))
+RS_STATICLIB_CRATE_OBJ = $(addprefix lib,$(notdir $(RS_STATICLIB_CRATE_SRC:.rs=.$(LIB_SUFFIX))))
+ifndef OBJS
+_OBJS = $(COBJS) $(SOBJS) $(CPPOBJS) $(CMOBJS) $(CMMOBJS) $(ASOBJS) $(CWASMOBJS) $(CPPWASMOBJS)
+OBJS = $(strip $(_OBJS))
+endif
+
+HOST_COBJS = $(addprefix host_,$(notdir $(HOST_CSRCS:.c=.$(OBJ_SUFFIX))))
+# HOST_CPPOBJS can have different extensions (eg: .cpp, .cc)
+HOST_CPPOBJS = $(addprefix host_,$(notdir $(addsuffix .$(OBJ_SUFFIX),$(basename $(HOST_CPPSRCS)))))
+HOST_CMOBJS = $(addprefix host_,$(notdir $(HOST_CMSRCS:.m=.$(OBJ_SUFFIX))))
+HOST_CMMOBJS = $(addprefix host_,$(notdir $(HOST_CMMSRCS:.mm=.$(OBJ_SUFFIX))))
+ifndef HOST_OBJS
+_HOST_OBJS = $(HOST_COBJS) $(HOST_CPPOBJS) $(HOST_CMOBJS) $(HOST_CMMOBJS)
+HOST_OBJS = $(strip $(_HOST_OBJS))
+endif
+else
+LIBRARY :=
+SHARED_LIBRARY :=
+IMPORT_LIBRARY :=
+REAL_LIBRARY :=
+PROGRAM :=
+SIMPLE_PROGRAMS :=
+HOST_SHARED_LIBRARY :=
+HOST_PROGRAM :=
+HOST_SIMPLE_PROGRAMS :=
+endif
+
+ifdef MACH
+ifndef NO_BUILDSTATUS_MESSAGES
+define BUILDSTATUS
+@echo 'BUILDSTATUS $1'
+
+endef
+endif
+endif
+
+define SUBMAKE # $(call SUBMAKE,target,directory,static)
++@$(MAKE) $(if $(2),-C $(2)) $(1)
+
+endef # The extra line is important here! don't delete it
+
+define TIER_DIR_SUBMAKE
+$(call SUBMAKE,$(4),$(3),$(5))
+
+endef # Ths empty line is important.
+
+ifneq (,$(strip $(DIRS)))
+LOOP_OVER_DIRS = \
+  $(foreach dir,$(DIRS),$(call SUBMAKE,$@,$(dir)))
+endif
+
+#
+# Now we can differentiate between objects used to build a library, and
+# objects used to build an executable in the same directory.
+#
+ifndef PROGOBJS
+PROGOBJS		= $(OBJS)
+endif
+
+ifndef HOST_PROGOBJS
+HOST_PROGOBJS		= $(HOST_OBJS)
+endif
+
+#
+# Tags: emacs (etags), vi (ctags)
+# TAG_PROGRAM := ctags -L -
+#
+TAG_PROGRAM		= xargs etags -a
+
+#
+# Turn on C++ linking if we have any .cpp or .mm files
+# (moved this from config.mk so that config.mk can be included
+#  before the CPPSRCS are defined)
+#
+ifneq ($(HOST_CPPSRCS)$(HOST_CMMSRCS),)
+HOST_CPP_PROG_LINK	= 1
+endif
+
+#
+# MacOS X specific stuff
+#
+
+ifeq ($(OS_ARCH),Darwin)
+ifneq (,$(SHARED_LIBRARY))
+_LOADER_PATH := @rpath
+EXTRA_DSO_LDOPTS	+= -dynamiclib -install_name $(_LOADER_PATH)/$@ -compatibility_version 1 -current_version 1
+endif
+endif
+
+#
+# GNU doesn't have path length limitation
+#
+
+ifeq ($(OS_ARCH),GNU)
+OS_CPPFLAGS += -DPATH_MAX=1024 -DMAXPATHLEN=1024
+endif
+
+EXTRA_DSO_LDOPTS += -Wl,-soname,lib$(JS_LIBRARY_NAME).so.0
+
+#
+# MINGW32
+#
+ifeq ($(OS_ARCH),WINNT)
+ifdef GNU_CC
+DSO_LDOPTS += -Wl,--out-implib -Wl,$(IMPORT_LIBRARY)
+endif
+endif
+
+ifeq ($(USE_TVFS),1)
+IFLAGS1 = -rb
+IFLAGS2 = -rb
+else
+IFLAGS1 = -m 644
+IFLAGS2 = -m 755
+endif
+
+ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
+OUTOPTION = -Fo# eol
+else
+OUTOPTION = -o # eol
+endif # WINNT && !GNU_CC
+
+ifeq (,$(CROSS_COMPILE))
+HOST_OUTOPTION = $(OUTOPTION)
+else
+# Windows-to-Windows cross compiles should always use MSVC-style options for
+# host compiles.
+ifeq (WINNT_WINNT,$(HOST_OS_ARCH)_$(OS_ARCH))
+ifneq (,$(filter-out clang-cl,$(HOST_CC_TYPE)))
+$(error MSVC-style compilers should be used for host compilations!)
+endif
+HOST_OUTOPTION = -Fo# eol
+else
+HOST_OUTOPTION = -o # eol
+endif
+endif
+################################################################################
+
+# Ensure the build config is up to date. This is done automatically when builds
+# are performed through |mach build|. The check here is to catch people not
+# using mach. If we ever enforce builds through mach, this code can be removed.
+ifndef MOZBUILD_BACKEND_CHECKED
+ifndef MACH
+ifndef TOPLEVEL_BUILD
+BUILD_BACKEND_FILES := $(addprefix $(DEPTH)/backend.,$(addsuffix Backend,$(BUILD_BACKENDS)))
+$(DEPTH)/backend.%Backend:
+	$(error Build configuration changed. Build with |mach build| or run |mach build-backend| to regenerate build config)
+
+define build_backend_rule
+$(1): $$(shell cat $(1).in)
+
+endef
+$(foreach file,$(BUILD_BACKEND_FILES),$(eval $(call build_backend_rule,$(file))))
+
+default:: $(BUILD_BACKEND_FILES)
+
+export MOZBUILD_BACKEND_CHECKED=1
+endif
+endif
+endif
+
+# The root makefile doesn't want to do a plain export/libs, because
+# of the tiers and because of libxul. Suppress the default rules in favor
+# of something else. Makefiles which use this var *must* provide a sensible
+# default rule before including rules.mk
+default all::
+	$(foreach tier,$(TIERS),$(call SUBMAKE,$(tier)))
+
+ifdef BUILD_VERBOSE_LOG
+ECHO := echo
+QUIET :=
+else
+ECHO := true
+QUIET := -q
+endif
+
+# Dependencies which, if modified, should cause everything to rebuild
+GLOBAL_DEPS += Makefile $(addprefix $(DEPTH)/config/,$(INCLUDED_AUTOCONF_MK)) $(MOZILLA_DIR)/config/config.mk
+
+ifeq ($(MOZ_WIDGET_TOOLKIT),windows)
+# We always build .res files for programs and shared libraries
+resfile = $(notdir $1).res
+# We also build .res files for simple programs if a corresponding manifest
+# exists. We'll generate a .rc file that includes the manifest.
+ifdef GNU_CC
+# Skip on mingw builds because of bug 1657863
+resfile_for_manifest =
+else
+resfile_for_manifest = $(if $(wildcard $(srcdir)/$(notdir $1).manifest),$(call resfile,$1))
+endif
+else
+resfile =
+resfile_for_manifest =
+endif
+
+##############################################
+ifdef COMPILE_ENVIRONMENT
+compile:: host target
+
+host:: $(HOST_OBJS) $(HOST_PROGRAM) $(HOST_SIMPLE_PROGRAMS) $(HOST_RUST_PROGRAMS) $(HOST_RUST_LIBRARY_FILE) $(HOST_SHARED_LIBRARY)
+
+target:: $(filter-out $(MOZBUILD_NON_DEFAULT_TARGETS),$(LIBRARY) $(SHARED_LIBRARY) $(PROGRAM) $(SIMPLE_PROGRAMS) $(RUST_LIBRARY_FILE) $(RUST_PROGRAMS))
+
+ifndef LIBRARY
+ifdef OBJS
+target:: $(OBJS)
+endif
+endif
+
+target-objects: $(OBJS) $(PROGOBJS)
+host-objects: $(HOST_OBJS) $(HOST_PROGOBJS)
+
+syms::
+
+include $(MOZILLA_DIR)/config/makefiles/target_binaries.mk
+endif
+
+alltags:
+	$(RM) TAGS
+	find $(topsrcdir) -name dist -prune -o \( -name '*.[hc]' -o -name '*.cp' -o -name '*.cpp' -o -name '*.idl' \) -print | $(TAG_PROGRAM)
+
+define EXPAND_CC_OR_CXX
+$(if $(PROG_IS_C_ONLY_$(1)),$(CC),$(CCC))
+endef
+
+#
+# PROGRAM = Foo
+# creates OBJS, links with LIBS to create Foo
+#
+$(PROGRAM): $(PROGOBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(call resfile,$(PROGRAM)) $(GLOBAL_DEPS) $(call mkdir_deps,$(FINAL_TARGET))
+	$(REPORT_BUILD)
+ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
+	$(LINKER) -OUT:$@ -PDB:$(LINK_PDBFILE) -IMPLIB:$(basename $(@F)).lib $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $($(notdir $@)_OBJS) $(filter %.res,$^) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
+else # !WINNT || GNU_CC
+	$(call EXPAND_CC_OR_CXX,$@) -o $@ $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) $($(notdir $@)_OBJS) $(filter %.res,$^) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
+	$(call py_action,check_binary,--target $@)
+endif # WINNT && !GNU_CC
+
+ifdef ENABLE_STRIP
+	$(STRIP) $(STRIP_FLAGS) $@
+endif
+ifdef MOZ_POST_PROGRAM_COMMAND
+	$(MOZ_POST_PROGRAM_COMMAND) $@
+endif
+
+$(HOST_PROGRAM): $(HOST_PROGOBJS) $(HOST_LIBS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS) $(call mkdir_deps,$(DEPTH)/dist/host/bin)
+	$(REPORT_BUILD)
+ifeq (_WINNT,$(GNU_CC)_$(HOST_OS_ARCH))
+	$(HOST_LINKER) -OUT:$@ -PDB:$(HOST_PDBFILE) $($(notdir $@)_OBJS) $(WIN32_EXE_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LINKER_LIBPATHS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+else
+ifeq ($(HOST_CPP_PROG_LINK),1)
+	$(HOST_CXX) -o $@ $(HOST_CXX_LDFLAGS) $(HOST_LDFLAGS) $($(notdir $@)_OBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+else
+	$(HOST_CC) -o $@ $(HOST_C_LDFLAGS) $(HOST_LDFLAGS) $($(notdir $@)_OBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+endif # HOST_CPP_PROG_LINK
+endif
+ifndef CROSS_COMPILE
+	$(call py_action,check_binary,--host $@)
+endif
+
+#
+# This is an attempt to support generation of multiple binaries
+# in one directory, it assumes everything to compile Foo is in
+# Foo.o (from either Foo.c or Foo.cpp).
+#
+# SIMPLE_PROGRAMS = Foo Bar
+# creates Foo.o Bar.o, links with LIBS to create Foo, Bar.
+#
+define simple_program_deps
+$1: $(1:$(BIN_SUFFIX)=.$(OBJ_SUFFIX)) $(STATIC_LIBS) $(EXTRA_DEPS) $(call resfile_for_manifest,$1) $(GLOBAL_DEPS)
+endef
+$(foreach p,$(SIMPLE_PROGRAMS),$(eval $(call simple_program_deps,$(p))))
+
+$(SIMPLE_PROGRAMS):
+	$(REPORT_BUILD)
+ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
+	$(LINKER) -out:$@ -pdb:$(LINK_PDBFILE) $($@_OBJS) $(filter %.res,$^) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(OS_LIBS)
+else
+	$(call EXPAND_CC_OR_CXX,$@) $(COMPUTED_CXX_LDFLAGS) $(PGO_CFLAGS) -o $@ $($@_OBJS) $(filter %.res,$^) $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(STATIC_LIBS) $(MOZ_PROGRAM_LDFLAGS) $(SHARED_LIBS) $(OS_LIBS)
+	$(call py_action,check_binary,--target $@)
+endif # WINNT && !GNU_CC
+
+ifdef ENABLE_STRIP
+	$(STRIP) $(STRIP_FLAGS) $@
+endif
+ifdef MOZ_POST_PROGRAM_COMMAND
+	$(MOZ_POST_PROGRAM_COMMAND) $@
+endif
+
+$(HOST_SIMPLE_PROGRAMS): host_%$(HOST_BIN_SUFFIX): $(HOST_LIBS) $(HOST_EXTRA_DEPS) $(GLOBAL_DEPS)
+	$(REPORT_BUILD)
+ifeq (WINNT_,$(HOST_OS_ARCH)_$(GNU_CC))
+	$(HOST_LINKER) -OUT:$@ -PDB:$(HOST_PDBFILE) $($(notdir $@)_OBJS) $(WIN32_EXE_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LINKER_LIBPATHS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+else
+ifneq (,$(HOST_CPPSRCS)$(USE_HOST_CXX))
+	$(HOST_CXX) $(HOST_OUTOPTION)$@ $(HOST_CXX_LDFLAGS) $($(notdir $@)_OBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+else
+	$(HOST_CC) $(HOST_OUTOPTION)$@ $(HOST_C_LDFLAGS) $($(notdir $@)_OBJS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+endif
+endif
+ifndef CROSS_COMPILE
+	$(call py_action,check_binary,--host $@)
+endif
+
+$(LIBRARY): $(OBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
+	$(REPORT_BUILD)
+	$(RM) $(REAL_LIBRARY)
+	$(AR) $(AR_FLAGS) $($@_OBJS)
+
+$(WASM_ARCHIVE): $(CWASMOBJS) $(CPPWASMOBJS) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
+	$(REPORT_BUILD_VERBOSE)
+	$(RM) $(WASM_ARCHIVE)
+	$(WASM_CXX) -o $@ -Wl,--export-all -Wl,--stack-first -Wl,-z,stack-size=$(if $(MOZ_OPTIMIZE),262144,1048576) -Wl,--no-entry -Wl,--growable-table $(CWASMOBJS) $(CPPWASMOBJS) -lwasi-emulated-process-clocks
+
+$(addsuffix .c,$(WASM_ARCHIVE)): $(WASM_ARCHIVE) $(DIST)/host/bin/wasm2c$(HOST_BIN_SUFFIX)
+	$(DIST)/host/bin/wasm2c$(HOST_BIN_SUFFIX) -o $@ $<
+
+ifeq ($(OS_ARCH),WINNT)
+# Import libraries are created by the rules creating shared libraries.
+# The rules to copy them to $(DIST)/lib depend on $(IMPORT_LIBRARY),
+# but make will happily consider the import library before it is refreshed
+# when rebuilding the corresponding shared library. Defining an empty recipe
+# for import libraries forces make to wait for the shared library recipe to
+# have run before considering other targets that depend on the import library.
+# See bug 795204.
+$(IMPORT_LIBRARY): $(SHARED_LIBRARY) ;
+endif
+
+$(HOST_SHARED_LIBRARY): Makefile
+	$(REPORT_BUILD)
+	$(RM) $@
+ifneq (,$(filter clang-cl,$(HOST_CC_TYPE)))
+	$(HOST_LINKER) -DLL -OUT:$@ $($(notdir $@)_OBJS) $(HOST_CXX_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LINKER_LIBPATHS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+else
+	$(HOST_CXX) $(HOST_OUTOPTION)$@ $($(notdir $@)_OBJS) $(HOST_CXX_LDFLAGS) $(HOST_LDFLAGS) $(HOST_LIBS) $(HOST_EXTRA_LIBS)
+endif
+
+# On Darwin (Mac OS X), dwarf2 debugging uses debug info left in .o files,
+# so instead of deleting .o files after repacking them into a dylib, we make
+# symlinks back to the originals. The symlinks are a no-op for stabs debugging,
+# so no need to conditionalize on OS version or debugging format.
+
+$(SHARED_LIBRARY): $(OBJS) $(call resfile,$(SHARED_LIBRARY)) $(STATIC_LIBS) $(EXTRA_DEPS) $(GLOBAL_DEPS)
+	$(REPORT_BUILD)
+	$(RM) $@
+	$(MKSHLIB) $($@_OBJS) $(filter %.res,$^) $(LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(EXTRA_DSO_LDOPTS) $(MOZ_GLUE_LDFLAGS) $(OS_LIBS)
+	$(call py_action,check_binary,--target $@)
+
+ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH))
+endif	# WINNT && !GCC
+	chmod +x $@
+ifdef ENABLE_STRIP
+	$(STRIP) $(STRIP_FLAGS) $@
+endif
+
+# The object file is in the current directory, and the source file can be any
+# relative path. This macro adds the dependency obj: src for each source file.
+# This dependency must be first for the $< flag to work correctly, and the
+# rules that have commands for these targets must not list any other
+# prerequisites, or they will override the $< variable.
+define src_objdep
+$(basename $3$(notdir $1)).$2: $1 $$(call mkdir_deps,$$(MDDEPDIR))
+endef
+$(foreach f,$(CSRCS) $(SSRCS) $(CPPSRCS) $(CMSRCS) $(CMMSRCS) $(ASFILES),$(eval $(call src_objdep,$(f),$(OBJ_SUFFIX))))
+$(foreach f,$(HOST_CSRCS) $(HOST_CPPSRCS) $(HOST_CMSRCS) $(HOST_CMMSRCS),$(eval $(call src_objdep,$(f),$(OBJ_SUFFIX),host_)))
+$(foreach f,$(WASM_CSRCS) $(WASM_CPPSRCS),$(eval $(call src_objdep,$(f),wasm)))
+
+# The Rust compiler only outputs library objects, and so we need different
+# mangling to generate dependency rules for it.
+mk_global_crate_libname = $(basename lib$(notdir $1)).$(LIB_SUFFIX)
+crate_src_libdep = $(call mk_global_crate_libname,$1): $1 $$(call mkdir_deps,$$(MDDEPDIR))
+$(foreach f,$(RS_STATICLIB_CRATE_SRC),$(eval $(call crate_src_libdep,$(f))))
+
+$(OBJS) $(HOST_OBJS) $(PROGOBJS) $(HOST_PROGOBJS): $(GLOBAL_DEPS)
+
+# Rules for building native targets must come first because of the host_ prefix
+$(HOST_COBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(HOST_CC) $(HOST_OUTOPTION)$@ -c $(HOST_CPPFLAGS) $(HOST_CFLAGS) $(NSPR_CFLAGS) $<
+
+$(HOST_CPPOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(call BUILDSTATUS,OBJECT_FILE $@)
+	$(HOST_CXX) $(HOST_OUTOPTION)$@ -c $(HOST_CPPFLAGS) $(HOST_CXXFLAGS) $(NSPR_CFLAGS) $<
+
+$(HOST_CMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(HOST_CC) $(HOST_OUTOPTION)$@ -c $(HOST_CPPFLAGS) $(HOST_CFLAGS) $(HOST_CMFLAGS) $(NSPR_CFLAGS) $<
+
+$(HOST_CMMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(HOST_CXX) $(HOST_OUTOPTION)$@ -c $(HOST_CPPFLAGS) $(HOST_CXXFLAGS) $(HOST_CMMFLAGS) $(NSPR_CFLAGS) $<
+
+$(COBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(CC) $(OUTOPTION)$@ -c $(COMPILE_CFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(CWASMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(WASM_CC) -o $@ -c $(WASM_CFLAGS) $($(notdir $<)_FLAGS) $<
+
+WINEWRAP = $(if $(and $(filter %.exe,$1),$(WINE)),$(WINE) $1,$1)
+
+# Windows program run via Wine don't like Unix absolute paths (they look
+# like command line arguments). So when needed, create relative paths
+# from absolute paths. We start with $(DEPTH), which gets us to topobjdir,
+# then add "/.." for each component of topobjdir, which gets us to /.
+# then we can add the absolute path after that and we have a relative path,
+# albeit longer than it could be.
+ifdef WINE
+relativize = $(if $(filter /%,$1),$(DEPTH)$(subst $(space),,$(foreach d,$(subst /, ,$(topobjdir)),/..))$1,$1)
+else
+relativize = $1
+endif
+
+ifdef ASFILES
+# The AS_DASH_C_FLAG is needed cause not all assemblers (Solaris) accept
+# a '-c' flag.
+$(ASOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(call WINEWRAP,$(AS)) $(ASOUTOPTION)$@ $(ASFLAGS) $($(notdir $<)_FLAGS) $(AS_DASH_C_FLAG) $(call relativize,$<)
+endif
+
+define syms_template
+syms:: $(2)
+$(2): $(1)
+ifdef MOZ_CRASHREPORTER
+	$$(call py_action,dumpsymbols,$$(abspath $$<) $$(abspath $$@) $$(DUMP_SYMBOLS_FLAGS))
+ifeq ($(OS_ARCH),WINNT)
+ifdef WINCHECKSEC
+	$$(PYTHON3) $$(topsrcdir)/build/win32/autowinchecksec.py $$<
+endif # WINCHECKSEC
+endif # WINNT
+endif
+endef
+
+ifneq (,$(filter $(DIST)/bin%,$(FINAL_TARGET)))
+DUMP_SYMS_TARGETS := $(SHARED_LIBRARY) $(PROGRAM) $(SIMPLE_PROGRAMS)
+endif
+
+ifdef MOZ_AUTOMATION
+ifeq (,$(filter 1,$(MOZ_AUTOMATION_BUILD_SYMBOLS)))
+DUMP_SYMS_TARGETS :=
+endif
+endif
+
+ifdef MOZ_COPY_PDBS
+MAIN_PDB_FILES = $(addsuffix .pdb,$(basename $(DUMP_SYMS_TARGETS)))
+MAIN_PDB_DEST ?= $(FINAL_TARGET)
+MAIN_PDB_TARGET = syms
+INSTALL_TARGETS += MAIN_PDB
+
+ifdef CPP_UNIT_TESTS
+CPP_UNIT_TESTS_PDB_FILES = $(addsuffix .pdb,$(basename $(CPP_UNIT_TESTS)))
+CPP_UNIT_TESTS_PDB_DEST = $(DIST)/cppunittests
+CPP_UNIT_TESTS_PDB_TARGET = syms
+INSTALL_TARGETS += CPP_UNIT_TESTS_PDB
+endif
+
+else ifdef MOZ_CRASHREPORTER
+$(foreach file,$(DUMP_SYMS_TARGETS),$(eval $(call syms_template,$(file),$(notdir $(file))_syms.track)))
+endif
+
+ifneq (,$(RUST_TESTS)$(RUST_LIBRARY_FILE)$(HOST_RUST_LIBRARY_FILE)$(RUST_PROGRAMS)$(HOST_RUST_PROGRAMS))
+include $(MOZILLA_DIR)/config/makefiles/rust.mk
+endif
+
+$(SOBJS):
+	$(REPORT_BUILD)
+	$(call WINEWRAP,$(AS)) $(ASOUTOPTION)$@ $(SFLAGS) $($(notdir $<)_FLAGS) -c $(call relativize,$<)
+
+$(CPPOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(call BUILDSTATUS,OBJECT_FILE $@)
+	$(CCC) $(OUTOPTION)$@ -c $(COMPILE_CXXFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(CPPWASMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(call BUILDSTATUS,OBJECT_FILE $@)
+	$(WASM_CXX) -o $@ -c $(WASM_CXXFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(CMMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(CCC) -o $@ -c $(COMPILE_CXXFLAGS) $(COMPILE_CMMFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(CMOBJS):
+	$(REPORT_BUILD_VERBOSE)
+	$(CC) -o $@ -c $(COMPILE_CFLAGS) $(COMPILE_CMFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(filter %.s,$(CPPSRCS:%.cpp=%.s)): %.s: %.cpp $(call mkdir_deps,$(MDDEPDIR))
+	$(REPORT_BUILD_VERBOSE)
+	$(CCC) -S $(COMPILE_CXXFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(filter %.s,$(CPPSRCS:%.cc=%.s)): %.s: %.cc $(call mkdir_deps,$(MDDEPDIR))
+	$(REPORT_BUILD_VERBOSE)
+	$(CCC) -S $(COMPILE_CXXFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(filter %.s,$(CPPSRCS:%.cxx=%.s)): %.s: %.cpp $(call mkdir_deps,$(MDDEPDIR))
+	$(REPORT_BUILD_VERBOSE)
+	$(CCC) -S $(COMPILE_CXXFLAGS) $($(notdir $<)_FLAGS) $<
+
+$(filter %.s,$(CSRCS:%.c=%.s)): %.s: %.c $(call mkdir_deps,$(MDDEPDIR))
+	$(REPORT_BUILD_VERBOSE)
+	$(CC) -S $(COMPILE_CFLAGS) $($(notdir $<)_FLAGS) $<
+
+ifneq (,$(filter %.i,$(MAKECMDGOALS)))
+# Call as $(call _group_srcs,extension,$(SRCS)) - this will create a list
+# of the full sources, as well as the $(notdir) version. So:
+#   foo.cpp sub/bar.cpp
+# becomes:
+#   foo.cpp sub/bar.cpp bar.cpp
+#
+# This way we can match both 'make sub/bar.i' and 'make bar.i'
+_group_srcs = $(sort $(patsubst %.$1,%.i,$(filter %.$1,$2 $(notdir $2))))
+
+define PREPROCESS_RULES
+_PREPROCESSED_$1_FILES := $$(call _group_srcs,$1,$$($2))
+# Make preprocessed files PHONY so they are always executed, since they are
+# manual targets and we don't necessarily write to $@.
+.PHONY: $$(_PREPROCESSED_$1_FILES)
+
+# Hack up VPATH so we can reach the sources. Eg: 'make Parser.i' may need to
+# reach $(srcdir)/frontend/Parser.i
+vpath %.$1 $$(addprefix $$(srcdir)/,$$(sort $$(dir $$($2))))
+vpath %.$1 $$(addprefix $$(CURDIR)/,$$(sort $$(dir $$($2))))
+
+$$(_PREPROCESSED_$1_FILES): _DEPEND_CFLAGS=
+$$(_PREPROCESSED_$1_FILES): %.i: %.$1
+	$$(REPORT_BUILD_VERBOSE)
+	$$(addprefix $$(MKDIR) -p ,$$(filter-out .,$$(@D)))
+	$$($3) -C $$(PREPROCESS_OPTION)$$@ $(foreach var,$4,$$($(var))) $$($$(notdir $$<)_FLAGS) $$<
+
+endef
+
+$(eval $(call PREPROCESS_RULES,cpp,CPPSRCS,CCC,COMPILE_CXXFLAGS))
+$(eval $(call PREPROCESS_RULES,cc,CPPSRCS,CCC,COMPILE_CXXFLAGS))
+$(eval $(call PREPROCESS_RULES,cxx,CPPSRCS,CCC,COMPILE_CXXFLAGS))
+$(eval $(call PREPROCESS_RULES,c,CSRCS,CC,COMPILE_CFLAGS))
+$(eval $(call PREPROCESS_RULES,mm,CMMSRCS,CCC,COMPILE_CXXFLAGS COMPILE_CMMFLAGS))
+
+# Default to pre-processing the actual unified file. This can be overridden
+# at the command-line to pre-process only the individual source file.
+PP_UNIFIED ?= 1
+
+# PP_REINVOKE gets set on the sub-make to prevent us from going in an
+# infinite loop if the filename doesn't exist in the unified source files.
+ifndef PP_REINVOKE
+
+MATCH_cpp = \(cpp\|cc|cxx\)
+UPPER_c = C
+UPPER_cpp = CPP
+UPPER_mm = CMM
+
+# When building with PP_UNIFIED=0, we also have to look in the Unified files to
+# find a matching pathname.
+_get_all_sources = $1 $(if $(filter Unified%,$1),$(shell sed -n 's/\#include "\(.*\)"$$/\1/p' $(filter Unified%,$1)))
+all_cpp_sources := $(call _get_all_sources,$(CPPSRCS))
+all_mm_sources := $(call _get_all_sources,$(CMMSRCS))
+all_c_sources := $(call _get_all_sources,$(CSRCS))
+all_sources := $(all_cpp_sources) $(all_cmm_sources) $(all_c_sources)
+
+# The catch-all %.i rule runs when we pass in a .i filename that doesn't match
+# one of the *SRCS variables. The two code paths depend on whether or not
+# we are requesting a unified file (PP_UNIFIED=1, the default) or not:
+#
+# PP_UNIFIED=1:
+#  - Look for it in any of the Unified files, and re-exec make with
+#    Unified_foo0.i as the target. This gets us the full unified preprocessed
+#    file.
+#
+# PP_UNIFIED=0:
+#  - If the .i filename is in *SRCS, or in a Unified filename, then we re-exec
+#    make with that filename as the target. The *SRCS variables are modified
+#    to have the Unified sources appended to them so that the static pattern
+#    rules will match.
+%.i: FORCE
+ifeq ($(PP_UNIFIED),1)
+	@$(MAKE) PP_REINVOKE=1 \
+	    $(or $(addsuffix .i, \
+              $(foreach type,c cpp mm, \
+	        $(if $(filter Unified%,$($(UPPER_$(type))SRCS)), \
+	          $(shell grep -l '#include "\(.*/\)\?$(basename $@).$(or $(MATCH_$(type)),$(type))"' Unified*.$(type) | sed 's/\.$(type)$$//') \
+            ))),$(error "File not found for preprocessing: $@"))
+else
+	@$(MAKE) PP_REINVOKE=1 $@ \
+	    $(foreach type,c cpp mm,$(UPPER_$(type))SRCS="$(all_$(type)_sources)")
+endif
+
+endif
+
+endif
+
+# EXTRA_DEPS contains manifests (manually added in Makefile.in ; bug 1498414)
+%.res: $(or $(RCFILE),%.rc) $(MOZILLA_DIR)/config/create_res.py $(EXTRA_DEPS)
+	$(REPORT_BUILD)
+	$(PYTHON3) $(MOZILLA_DIR)/config/create_res.py $(DEFINES) $(INCLUDES) -o $@ $<
+
+$(notdir $(addsuffix .rc,$(PROGRAM) $(SHARED_LIBRARY) $(SIMPLE_PROGRAMS) module)): %.rc: $(RCINCLUDE) $(MOZILLA_DIR)/config/create_rc.py
+	$(PYTHON3) $(MOZILLA_DIR)/config/create_rc.py '$(if $(filter module,$*),,$*)' '$(RCINCLUDE)'
+
+# Cancel GNU make built-in implicit rules
+MAKEFLAGS += -r
+
+ifneq (,$(filter WINNT,$(OS_ARCH)))
+SEP := ;
+else
+SEP := :
+endif
+
+EMPTY :=
+SPACE := $(EMPTY) $(EMPTY)
+
+###############################################################################
+# Bunch of things that extend the 'export' rule (in order):
+###############################################################################
+
+ifneq ($(XPI_NAME),)
+$(FINAL_TARGET):
+	$(NSINSTALL) -D $@
+
+export:: $(FINAL_TARGET)
+endif
+
+################################################################################
+# The default location for prefs is the gre prefs directory.
+# PREF_DIR is used for L10N_PREF_JS_EXPORTS in various locales/ directories.
+PREF_DIR = defaults/pref
+
+# If DIST_SUBDIR is defined it indicates that app and gre dirs are
+# different and that we are building app related resources. Hence,
+# PREF_DIR should point to the app prefs location.
+ifneq (,$(DIST_SUBDIR)$(XPI_NAME))
+PREF_DIR = defaults/preferences
+endif
+
+################################################################################
+# CHROME PACKAGING
+
+chrome::
+	$(MAKE) realchrome
+	$(LOOP_OVER_DIRS)
+
+$(FINAL_TARGET)/chrome: $(call mkdir_deps,$(FINAL_TARGET)/chrome)
+
+ifneq (,$(JAR_MANIFEST))
+ifndef NO_DIST_INSTALL
+
+ifdef XPI_NAME
+ifdef XPI_ROOT_APPID
+# For add-on packaging we may specify that an application
+# sub-dir should be added to the root chrome manifest with
+# a specific application id.
+MAKE_JARS_FLAGS += --root-manifest-entry-appid='$(XPI_ROOT_APPID)'
+endif
+
+# if DIST_SUBDIR is defined but XPI_ROOT_APPID is not there's
+# no way langpacks will get packaged right, so error out.
+ifneq (,$(DIST_SUBDIR))
+ifndef XPI_ROOT_APPID
+$(error XPI_ROOT_APPID is not defined - langpacks will break.)
+endif
+endif
+endif
+
+misc realchrome:: $(FINAL_TARGET)/chrome
+	$(call py_action,jar_maker,\
+	  $(QUIET) -d $(FINAL_TARGET) \
+	  $(MAKE_JARS_FLAGS) $(DEFINES) $(ACDEFINES) \
+	  $(JAR_MANIFEST))
+
+ifdef AB_CD
+.PHONY: l10n
+l10n: misc ;
+endif
+endif
+
+endif
+
+# When you move this out of the tools tier, please remove the corresponding
+# hacks in recursivemake.py that check if Makefile.in sets the variable.
+ifneq ($(XPI_PKGNAME),)
+tools realchrome::
+	@echo 'Packaging $(XPI_PKGNAME).xpi...'
+	$(call py_action,zip,-C $(FINAL_TARGET) ../$(XPI_PKGNAME).xpi '*')
+endif
+
+#############################################################################
+# MDDEPDIR is the subdirectory where all the dependency files are placed.
+#   This uses a make rule (instead of a macro) to support parallel
+#   builds (-jN). If this were done in the LOOP_OVER_DIRS macro, two
+#   processes could simultaneously try to create the same directory.
+#
+#   We use $(CURDIR) in the rule's target to ensure that we don't find
+#   a dependency directory in the source tree via VPATH (perhaps from
+#   a previous build in the source tree) and thus neglect to create a
+#   dependency directory in the object directory, where we really need
+#   it.
+
+_MDDEPEND_FILES :=
+
+ifneq (,$(filter target-objects target all default,$(MAKECMDGOALS)))
+_MDDEPEND_FILES += $(addsuffix .pp,$(notdir $(sort $(OBJS) $(PROGOBJS))))
+endif
+
+ifneq (,$(filter host-objects host all default,$(MAKECMDGOALS)))
+_MDDEPEND_FILES += $(addsuffix .pp,$(notdir $(sort $(HOST_OBJS) $(HOST_PROGOBJS))))
+endif
+
+MDDEPEND_FILES := $(strip $(wildcard $(addprefix $(MDDEPDIR)/,$(_MDDEPEND_FILES))))
+MDDEPEND_FILES += $(EXTRA_MDDEPEND_FILES)
+
+ifneq (,$(MDDEPEND_FILES))
+-include $(MDDEPEND_FILES)
+endif
+
+################################################################################
+# Install/copy rules
+#
+# The INSTALL_TARGETS variable contains a list of all install target
+# categories. Each category defines a list of files and executables, and an
+# install destination,
+#
+# FOO_FILES := foo bar
+# FOO_EXECUTABLES := baz
+# FOO_DEST := target_path
+# INSTALL_TARGETS += FOO
+#
+# Additionally, a FOO_TARGET variable may be added to indicate the target for
+# which the files and executables are installed. Default is "libs".
+#
+# Finally, a FOO_KEEP_PATH variable may be set to 1 to indicate the paths given
+# in FOO_FILES/FOO_EXECUTABLES are to be kept at the destination. That is,
+# if FOO_FILES is bar/baz/qux.h, and FOO_DEST is $(DIST)/include, the installed
+# file would be $(DIST)/include/bar/baz/qux.h instead of $(DIST)/include/qux.h
+
+# If we're using binary nsinstall and it's not built yet, fallback to python nsinstall.
+ifneq (,$(filter $(DEPTH)/config/nsinstall$(HOST_BIN_SUFFIX),$(install_cmd)))
+ifeq (,$(wildcard $(DEPTH)/config/nsinstall$(HOST_BIN_SUFFIX)))
+nsinstall_is_usable = $(if $(wildcard $(DEPTH)/config/nsinstall$(HOST_BIN_SUFFIX)),yes)
+
+define install_cmd_override
+$(1): install_cmd = $$(if $$(nsinstall_is_usable),$$(INSTALL),$$(NSINSTALL_PY) -t) $$(1)
+endef
+endif
+endif
+
+install_target_tier = $(or $($(1)_TARGET),libs)
+INSTALL_TARGETS_TIERS := $(sort $(foreach category,$(INSTALL_TARGETS),$(call install_target_tier,$(category))))
+
+install_target_result = $($(1)_DEST:%/=%)/$(if $($(1)_KEEP_PATH),$(2),$(notdir $(2)))
+install_target_files = $(foreach file,$($(1)_FILES),$(call install_target_result,$(category),$(file)))
+install_target_executables = $(foreach file,$($(1)_EXECUTABLES),$(call install_target_result,$(category),$(file)))
+
+# Work around a GNU make 3.81 bug where it gives $< the wrong value.
+# See details in bug 934864.
+define create_dependency
+$(1): $(2)
+$(1): $(2)
+endef
+
+define install_target_template
+$(call install_cmd_override,$(2))
+$(call create_dependency,$(2),$(1))
+endef
+
+$(foreach category,$(INSTALL_TARGETS),\
+  $(if $($(category)_DEST),,$(error Missing $(category)_DEST)) \
+  $(foreach tier,$(call install_target_tier,$(category)),\
+    $(eval INSTALL_TARGETS_FILES_$(tier) += $(call install_target_files,$(category))) \
+    $(eval INSTALL_TARGETS_EXECUTABLES_$(tier) += $(call install_target_executables,$(category))) \
+  ) \
+  $(foreach file,$($(category)_FILES) $($(category)_EXECUTABLES), \
+    $(eval $(call install_target_template,$(file),$(call install_target_result,$(category),$(file)))) \
+  ) \
+)
+
+$(foreach tier,$(INSTALL_TARGETS_TIERS), \
+  $(eval $(tier):: $(INSTALL_TARGETS_FILES_$(tier)) $(INSTALL_TARGETS_EXECUTABLES_$(tier))) \
+)
+
+install_targets_sanity = $(if $(filter-out $(notdir $@),$(notdir $(<))),$(error Looks like $@ has an unexpected dependency on $< which breaks INSTALL_TARGETS))
+
+$(sort $(foreach tier,$(INSTALL_TARGETS_TIERS),$(INSTALL_TARGETS_FILES_$(tier)))):
+	$(install_targets_sanity)
+	$(call install_cmd,$(IFLAGS1) '$<' '$(@D)')
+
+$(sort $(foreach tier,$(INSTALL_TARGETS_TIERS),$(INSTALL_TARGETS_EXECUTABLES_$(tier)))):
+	$(install_targets_sanity)
+	$(call install_cmd,$(IFLAGS2) '$<' '$(@D)')
+
+################################################################################
+# Preprocessing rules
+#
+# The PP_TARGETS variable contains a list of all preprocessing target
+# categories. Each category has associated variables listing input files, the
+# output directory, extra preprocessor flags, and so on. For example:
+#
+#   FOO := input-file
+#   FOO_PATH := target-directory
+#   FOO_FLAGS := -Dsome_flag
+#   PP_TARGETS += FOO
+#
+# If PP_TARGETS lists a category name <C> (like FOO, above), then we consult the
+# following make variables to see what to do:
+#
+# - <C> lists input files to be preprocessed with mozbuild.action.preprocessor.
+#   We search VPATH for the names given here. If an input file name ends in
+#   '.in', that suffix is omitted from the output file name.
+#
+# - <C>_PATH names the directory in which to place the preprocessed output
+#   files. We create this directory if it does not already exist. Setting
+#   this variable is optional; if unset, we install the files in $(CURDIR).
+#
+# - <C>_FLAGS lists flags to pass to mozbuild.action.preprocessor, in addition
+#   to the usual bunch. Setting this variable is optional.
+#
+# - <C>_TARGET names the 'make' target that should depend on creating the output
+#   files. Setting this variable is optional; if unset, we preprocess the
+#   files for the 'libs' target.
+#
+# - <C>_KEEP_PATH may be set to 1 to indicate the paths given in <C> are to be
+#   kept under <C>_PATH. That is, if <C> is bar/baz/qux.h.in and <C>_PATH is
+#   $(DIST)/include, the preprocessed file would be $(DIST)/include/bar/baz/qux.h
+#   instead of $(DIST)/include/qux.h.
+
+pp_target_tier = $(or $($(1)_TARGET),libs)
+PP_TARGETS_TIERS := $(sort $(foreach category,$(PP_TARGETS),$(call pp_target_tier,$(category))))
+
+pp_target_result = $(or $($(1)_PATH:%/=%),$(CURDIR))/$(if $($(1)_KEEP_PATH),$(2:.in=),$(notdir $(2:.in=)))
+pp_target_results = $(foreach file,$($(1)),$(call pp_target_result,$(category),$(file)))
+
+$(foreach category,$(PP_TARGETS), \
+  $(foreach tier,$(call pp_target_tier,$(category)), \
+    $(eval PP_TARGETS_RESULTS_$(tier) += $(call pp_target_results,$(category))) \
+  ) \
+  $(foreach file,$($(category)), \
+    $(eval $(call create_dependency,$(call pp_target_result,$(category),$(file)), \
+                                    $(file) $(GLOBAL_DEPS))) \
+  ) \
+  $(eval $(call pp_target_results,$(category)): PP_TARGET_FLAGS=$($(category)_FLAGS)) \
+)
+
+$(foreach tier,$(PP_TARGETS_TIERS), \
+  $(eval $(tier):: $(PP_TARGETS_RESULTS_$(tier))) \
+)
+
+PP_TARGETS_ALL_RESULTS := $(sort $(foreach tier,$(PP_TARGETS_TIERS),$(PP_TARGETS_RESULTS_$(tier))))
+$(PP_TARGETS_ALL_RESULTS):
+	$(if $(filter-out $(notdir $@),$(notdir $(<:.in=))),$(error Looks like $@ has an unexpected dependency on $< which breaks PP_TARGETS))
+	$(RM) '$@'
+	$(call py_action,preprocessor,--depend $(MDDEPDIR)/$(@F).pp $(PP_TARGET_FLAGS) $(DEFINES) $(ACDEFINES) '$<' -o '$@')
+
+$(filter %.css,$(PP_TARGETS_ALL_RESULTS)): PP_TARGET_FLAGS+=--marker %
+
+# The depfile is based on the filename, and we don't want conflicts. So check
+# there's only one occurrence of any given filename in PP_TARGETS_ALL_RESULTS.
+PP_TARGETS_ALL_RESULT_NAMES := $(notdir $(PP_TARGETS_ALL_RESULTS))
+$(foreach file,$(sort $(PP_TARGETS_ALL_RESULT_NAMES)), \
+  $(if $(filter-out 1,$(words $(filter $(file),$(PP_TARGETS_ALL_RESULT_NAMES)))), \
+    $(error Multiple preprocessing rules are creating a $(file) file) \
+  ) \
+)
+
+ifneq (,$(filter $(PP_TARGETS_TIERS) $(PP_TARGETS_ALL_RESULTS),$(MAKECMDGOALS)))
+# If the depfile for a preprocessed file doesn't exist, add a dep to force
+# re-preprocessing.
+$(foreach file,$(PP_TARGETS_ALL_RESULTS), \
+  $(if $(wildcard $(MDDEPDIR)/$(notdir $(file)).pp), \
+    , \
+    $(eval $(file): FORCE) \
+  ) \
+)
+
+MDDEPEND_FILES := $(strip $(wildcard $(addprefix $(MDDEPDIR)/,$(addsuffix .pp,$(notdir $(PP_TARGETS_ALL_RESULTS))))))
+
+ifneq (,$(MDDEPEND_FILES))
+-include $(MDDEPEND_FILES)
+endif
+
+endif
+
+# Pull in non-recursive targets if this is a partial tree build.
+ifndef TOPLEVEL_BUILD
+include $(MOZILLA_DIR)/config/makefiles/nonrecursive.mk
+endif
+
+################################################################################
+# Special gmake rules.
+################################################################################
+
+
+#
+# Re-define the list of default suffixes, so gmake won't have to churn through
+# hundreds of built-in suffix rules for stuff we don't need.
+#
+.SUFFIXES:
+
+#
+# Fake targets.  Always run these rules, even if a file/directory with that
+# name already exists.
+#
+.PHONY: all alltags boot chrome realchrome export install libs makefiles run_apprunner tools $(DIRS) FORCE
+
+# Used as a dependency to force targets to rebuild
+FORCE:
+
+# Delete target if error occurs when building target
+.DELETE_ON_ERROR:
+
+tags: TAGS
+
+TAGS: $(CSRCS) $(CPPSRCS) $(wildcard *.h)
+	-etags $(CSRCS) $(CPPSRCS) $(wildcard *.h)
+	$(LOOP_OVER_DIRS)
+
+ifndef INCLUDED_DEBUGMAKE_MK #{
+  ## Only parse when an echo* or show* target is requested
+  ifneq (,$(call isTargetStem,echo,show))
+    include $(MOZILLA_DIR)/config/makefiles/debugmake.mk
+  endif #}
+endif #}
+
+FREEZE_VARIABLES = \
+  CSRCS \
+  CPPSRCS \
+  EXPORTS \
+  DIRS \
+  LIBRARY \
+  MODULE \
+  $(NULL)
+
+$(foreach var,$(FREEZE_VARIABLES),$(eval $(var)_FROZEN := '$($(var))'))
+
+CHECK_FROZEN_VARIABLES = $(foreach var,$(FREEZE_VARIABLES), \
+  $(if $(subst $($(var)_FROZEN),,'$($(var))'),$(error Makefile variable '$(var)' changed value after including rules.mk. Was $($(var)_FROZEN), now $($(var)).)))
+
+libs export::
+	$(CHECK_FROZEN_VARIABLES)
+
+.DEFAULT_GOAL := $(or $(OVERRIDE_DEFAULT_GOAL),default)
+
+#############################################################################
+# Derived targets and dependencies
+
+include $(MOZILLA_DIR)/config/makefiles/autotargets.mk
+ifneq ($(NULL),$(AUTO_DEPS))
+  default all libs tools export:: $(AUTO_DEPS)
+endif
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-spidermonkey-checks.patch
+
+mv firefox-$VERSION-spidermonkey-checks.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/config/run_spidermonkey_checks.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/firefox-102.15.0-new/config/run_spidermonkey_checks.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/firefox-102.15.0-new/config/run_spidermonkey_checks.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-spidermonkey-checks-patch/firefox-102.15.0-new/config/run_spidermonkey_checks.py	(revision 228)
@@ -0,0 +1,13 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import
+import buildconfig
+import subprocess
+import sys
+
+
+def main(output, lib_file, *scripts):
+    for script in scripts:
+        retcode = subprocess.call([sys.executable, script], cwd=buildconfig.topsrcdir)
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-tests.patch
+
+mv firefox-$VERSION-tests.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/file.list	(revision 228)
@@ -0,0 +1,4 @@
+firefox-102.15.0/js/src/Makefile.in
+firefox-102.15.0/js/src/jit-test/jit_test.py
+firefox-102.15.0/js/src/tests/jstests.py
+firefox-102.15.0/js/src/tests/lib/tempfile.py
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/Makefile.in
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/Makefile.in	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/Makefile.in	(revision 228)
@@ -0,0 +1,150 @@
+# -*- Mode: makefile -*-
+#
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+ifeq (,$(MAKE_VERSION))
+$(error GNU Make is required)
+endif
+make_min_ver := 3.81
+ifneq ($(make_min_ver),$(firstword $(sort $(make_min_ver) $(MAKE_VERSION))))
+$(error GNU Make $(make_min_ver) or higher is required)
+endif
+
+DASH_R		= -r
+
+# Define keyword generator before rules.mk, see bug 323979 comment 50
+
+USE_HOST_CXX = 1
+
+include $(topsrcdir)/config/rules.mk
+
+ifdef MOZ_VALGRIND
+ifndef MOZ_ASAN
+JITTEST_VALGRIND_FLAG = --valgrind
+endif
+endif
+
+ifneq ($(LLVM_SYMBOLIZER),)
+# Use the LLVM symbolizer when running jit-tests under ASan and TSan, if available
+ifdef MOZ_ASAN
+JITTEST_SANITIZER_ENV=ASAN_SYMBOLIZER_PATH='$(LLVM_SYMBOLIZER)'
+endif
+ifdef MOZ_TSAN
+JITTEST_SANITIZER_ENV=TSAN_OPTIONS="external_symbolizer_path=$(LLVM_SYMBOLIZER) handle_segv=0 $$TSAN_OPTIONS"
+endif
+ifdef MOZ_MSAN
+JITTEST_SANITIZER_ENV=MSAN_SYMBOLIZER_PATH='$(LLVM_SYMBOLIZER)'
+endif
+endif
+
+check-js-msg::
+	(cd $(topsrcdir) && $(PYTHON3) $(topsrcdir)/config/check_js_msg_encoding.py);
+
+check-jit-test::
+	$(JITTEST_SANITIZER_ENV) $(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/jit-test/jit_test.py \
+	        --no-slow --no-progress --format=automation --jitflags=all \
+			$(JITTEST_VALGRIND_FLAG) \
+			$(JITTEST_EXTRA_ARGS) \
+	        $(DIST)/bin/js$(BIN_SUFFIX) $(JITTEST_TEST_ARGS)
+
+check:: check-js-msg
+
+check-jstests:
+	$(wildcard $(RUN_TEST_PROGRAM)) $(PYTHON3) -u $(srcdir)/tests/jstests.py \
+		--no-progress --format=automation --timeout 600 \
+		$(JSTESTS_EXTRA_ARGS) \
+		$(DIST)/bin/js$(BIN_SUFFIX)
+
+# FIXME:
+# We want to run check-jstests as part of |make check| on all platforms, on
+# tinderbox. However, some configurations don't work quite right just yet.
+# Rather than risking regressions in major configurations while fixing these
+# secondary configuration, we work around them and fix the bugs later.
+#
+#   Bug 652154: On Windows, SM(!m !t) don't work because of path problems with
+#				their unusual directory layout
+#
+#   Bug 652155: On Mac, SM(d) doesn't work for unknown reasons
+
+ifneq ($(OS_ARCH),WINNT)
+ifndef HAVE_DTRACE
+#check:: check-jstests
+endif
+endif
+
+CFLAGS += $(MOZ_ZLIB_CFLAGS)
+
+# Silence warnings on AIX/HP-UX from non-GNU compilers
+ifndef GNU_CC
+ifeq ($(OS_ARCH),AIX)
+# Suppress warnings from xlC
+# 1540-1281: offsetof() on null non-POD types
+# 1540-1608: anonymous unions using static data members
+CFLAGS		+= -qsuppress=1540-1281 -qsuppress=1540-1608
+CXXFLAGS	+= -qsuppress=1540-1281 -qsuppress=1540-1608
+endif
+ifeq ($(OS_ARCH),HP-UX)
+# Suppress warnings from aCC
+# 3055: anonymous unions declaring types
+# 4189: offsetof() on non-POD types
+CFLAGS		+= +W3055,4189
+CXXFLAGS	+= +W3055,4189
+endif
+endif
+ifeq ($(OS_ARCH),SunOS)
+ifeq ($(TARGET_CPU),sparc)
+
+ifdef GNU_CC
+CFLAGS   += -mcpu=v9
+CXXFLAGS += -mcpu=v9
+endif # GNU_CC
+
+endif
+endif
+
+install::
+	$(MAKE) -C build install
+	$(MAKE) -C shell install
+
+ifdef HAVE_DTRACE
+javascript-trace.h: $(srcdir)/devtools/javascript-trace.d
+	dtrace -x nolibs -h -s $(srcdir)/devtools/javascript-trace.d -o javascript-trace.h.in
+	sed -e 's/if _DTRACE_VERSION/ifdef INCLUDE_MOZILLA_DTRACE/' \
+	    -e '/const/!s/char \*/const char */g' \
+	    javascript-trace.h.in > javascript-trace.h
+
+# We can't automatically generate dependencies on auto-generated headers;
+# we have to list them explicitly.
+$(addsuffix .$(OBJ_SUFFIX),Probes jsinterp jsobj): $(CURDIR)/javascript-trace.h
+
+ifneq ($(OS_ARCH),Darwin)
+DTRACE_PROBE_OBJ = js-dtrace.$(OBJ_SUFFIX)
+$(LIBRARY): $(DTRACE_PROBE_OBJ)
+$(DTRACE_PROBE_OBJ): $(srcdir)/devtools/javascript-trace.d $(OBJS)
+	dtrace -x nolibs -G -C -s $< -o $@ $(filter-out $<, $^)
+
+OBJS += $(DTRACE_PROBE_OBJ)
+endif # OS_ARCH != Darwin
+endif # HAVE_DTRACE
+
+###############################################
+# Generating source package tarballs
+# (only possible when tar is found)
+ifneq (,$(TAR))
+
+source-package:
+	SRCDIR=$(srcdir) \
+	DIST=$(DIST) \
+	MKDIR=$(MKDIR) \
+	TAR=$(TAR) \
+	M4=$(M4) \
+	AWK=$(AWK) \
+	MOZJS_MAJOR_VERSION=$(MOZJS_MAJOR_VERSION) \
+	MOZJS_MINOR_VERSION=$(MOZJS_MINOR_VERSION) \
+	MOZJS_PATCH_VERSION=$(MOZJS_PATCH_VERSION) \
+	MOZJS_ALPHA=$(MOZJS_ALPHA) \
+	$(srcdir)/make-source-package.sh
+
+endif
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/jit-test/jit_test.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/jit-test/jit_test.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/jit-test/jit_test.py	(revision 228)
@@ -0,0 +1,595 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function, unicode_literals
+
+import math
+import os
+import platform
+import posixpath
+import shlex
+import subprocess
+import sys
+import traceback
+
+
+read_input = input
+if sys.version_info.major == 2:
+    read_input = raw_input
+
+
+def add_tests_dir_to_path():
+    from os.path import dirname, exists, join, realpath
+
+    js_src_dir = dirname(dirname(realpath(sys.argv[0])))
+    assert exists(join(js_src_dir, "jsapi.h"))
+    sys.path.insert(0, join(js_src_dir, "tests"))
+
+
+add_tests_dir_to_path()
+
+from lib import jittests
+from lib.tests import (
+    get_jitflags,
+    valid_jitflags,
+    get_cpu_count,
+    get_environment_overlay,
+    change_env,
+)
+from tempfile import TemporaryDirectory
+
+
+def which(name):
+    if name.find(os.path.sep) != -1:
+        return os.path.abspath(name)
+
+    for path in os.environ["PATH"].split(os.pathsep):
+        full = os.path.join(path, name)
+        if os.path.exists(full):
+            return os.path.abspath(full)
+
+    return name
+
+
+def choose_item(jobs, max_items, display):
+    job_count = len(jobs)
+
+    # Don't present a choice if there are too many tests
+    if job_count > max_items:
+        raise Exception("Too many jobs.")
+
+    for i, job in enumerate(jobs, 1):
+        print("{}) {}".format(i, display(job)))
+
+    item = read_input("Which one:\n")
+    try:
+        item = int(item)
+        if item > job_count or item < 1:
+            raise Exception("Input isn't between 1 and {}".format(job_count))
+    except ValueError:
+        raise Exception("Unrecognized input")
+
+    return jobs[item - 1]
+
+
+def main(argv):
+    # The [TESTS] optional arguments are paths of test files relative
+    # to the jit-test/tests directory.
+    import argparse
+
+    op = argparse.ArgumentParser(description="Run jit-test JS shell tests")
+    op.add_argument(
+        "-s",
+        "--show-cmd",
+        dest="show_cmd",
+        action="store_true",
+        help="show js shell command run",
+    )
+    op.add_argument(
+        "-f",
+        "--show-failed-cmd",
+        dest="show_failed",
+        action="store_true",
+        help="show command lines of failed tests",
+    )
+    op.add_argument(
+        "-o",
+        "--show-output",
+        dest="show_output",
+        action="store_true",
+        help="show output from js shell",
+    )
+    op.add_argument(
+        "-F",
+        "--failed-only",
+        dest="failed_only",
+        action="store_true",
+        help="if --show-output is given, only print output for" " failed tests",
+    )
+    op.add_argument(
+        "--no-show-failed",
+        dest="no_show_failed",
+        action="store_true",
+        help="don't print output for failed tests" " (no-op with --show-output)",
+    )
+    op.add_argument(
+        "-x",
+        "--exclude",
+        dest="exclude",
+        default=[],
+        action="append",
+        help="exclude given test dir or path",
+    )
+    op.add_argument(
+        "--exclude-from",
+        dest="exclude_from",
+        type=str,
+        help="exclude each test dir or path in FILE",
+    )
+    op.add_argument(
+        "--slow",
+        dest="run_slow",
+        action="store_true",
+        help="also run tests marked as slow",
+    )
+    op.add_argument(
+        "--no-slow",
+        dest="run_slow",
+        action="store_false",
+        help="do not run tests marked as slow (the default)",
+    )
+    op.add_argument(
+        "-t",
+        "--timeout",
+        dest="timeout",
+        type=float,
+        default=150.0,
+        help="set test timeout in seconds",
+    )
+    op.add_argument(
+        "--no-progress",
+        dest="hide_progress",
+        action="store_true",
+        help="hide progress bar",
+    )
+    op.add_argument(
+        "--tinderbox",
+        dest="format",
+        action="store_const",
+        const="automation",
+        help="Use automation-parseable output format",
+    )
+    op.add_argument(
+        "--format",
+        dest="format",
+        default="none",
+        choices=("automation", "none"),
+        help="Output format (default %(default)s).",
+    )
+    op.add_argument(
+        "--args",
+        dest="shell_args",
+        metavar="ARGS",
+        default="",
+        help="extra args to pass to the JS shell",
+    )
+    op.add_argument(
+        "--feature-args",
+        dest="feature_args",
+        metavar="ARGS",
+        default="",
+        help="even more args to pass to the JS shell "
+        "(for compatibility with jstests.py)",
+    )
+    op.add_argument(
+        "-w",
+        "--write-failures",
+        dest="write_failures",
+        metavar="FILE",
+        help="Write a list of failed tests to [FILE]",
+    )
+    op.add_argument(
+        "-C",
+        "--check-output",
+        action="store_true",
+        dest="check_output",
+        help="Run tests to check output for different jit-flags",
+    )
+    op.add_argument(
+        "-r",
+        "--read-tests",
+        dest="read_tests",
+        metavar="FILE",
+        help="Run test files listed in [FILE]",
+    )
+    op.add_argument(
+        "-R",
+        "--retest",
+        dest="retest",
+        metavar="FILE",
+        help="Retest using test list file [FILE]",
+    )
+    op.add_argument(
+        "-g",
+        "--debug",
+        action="store_const",
+        const="gdb",
+        dest="debugger",
+        help="Run a single test under the gdb debugger",
+    )
+    op.add_argument(
+        "-G",
+        "--debug-rr",
+        action="store_const",
+        const="rr",
+        dest="debugger",
+        help="Run a single test under the rr debugger",
+    )
+    op.add_argument(
+        "--debugger", type=str, help="Run a single test under the specified debugger"
+    )
+    op.add_argument(
+        "--valgrind",
+        dest="valgrind",
+        action="store_true",
+        help="Enable the |valgrind| flag, if valgrind is in $PATH.",
+    )
+    op.add_argument(
+        "--unusable-error-status",
+        action="store_true",
+        help="Ignore incorrect exit status on tests that should return nonzero.",
+    )
+    op.add_argument(
+        "--valgrind-all",
+        dest="valgrind_all",
+        action="store_true",
+        help="Run all tests with valgrind, if valgrind is in $PATH.",
+    )
+    op.add_argument(
+        "--avoid-stdio",
+        dest="avoid_stdio",
+        action="store_true",
+        help="Use js-shell file indirection instead of piping stdio.",
+    )
+    op.add_argument(
+        "--write-failure-output",
+        dest="write_failure_output",
+        action="store_true",
+        help="With --write-failures=FILE, additionally write the"
+        " output of failed tests to [FILE]",
+    )
+    op.add_argument(
+        "--jitflags",
+        dest="jitflags",
+        default="none",
+        choices=valid_jitflags(),
+        help="IonMonkey option combinations (default %(default)s).",
+    )
+    op.add_argument(
+        "--ion",
+        dest="jitflags",
+        action="store_const",
+        const="ion",
+        help="Run tests once with --ion-eager and once with"
+        " --baseline-eager (equivalent to --jitflags=ion)",
+    )
+    op.add_argument(
+        "--no-xdr",
+        dest="use_xdr",
+        action="store_false",
+        help="Whether to disable caching of self-hosted parsed content in XDR format.",
+    )
+    op.add_argument(
+        "--tbpl",
+        dest="jitflags",
+        action="store_const",
+        const="all",
+        help="Run tests with all IonMonkey option combinations"
+        " (equivalent to --jitflags=all)",
+    )
+    op.add_argument(
+        "-j",
+        "--worker-count",
+        dest="max_jobs",
+        type=int,
+        default=max(1, get_cpu_count()),
+        help="Number of tests to run in parallel (default %(default)s).",
+    )
+    op.add_argument(
+        "--remote", action="store_true", help="Run tests on a remote device"
+    )
+    op.add_argument(
+        "--deviceIP",
+        action="store",
+        type=str,
+        dest="device_ip",
+        help="IP address of remote device to test",
+    )
+    op.add_argument(
+        "--devicePort",
+        action="store",
+        type=int,
+        dest="device_port",
+        default=20701,
+        help="port of remote device to test",
+    )
+    op.add_argument(
+        "--deviceSerial",
+        action="store",
+        type=str,
+        dest="device_serial",
+        default=None,
+        help="ADB device serial number of remote device to test",
+    )
+    op.add_argument(
+        "--remoteTestRoot",
+        dest="remote_test_root",
+        action="store",
+        type=str,
+        default="/data/local/tmp/test_root",
+        help="The remote directory to use as test root" " (e.g.  %(default)s)",
+    )
+    op.add_argument(
+        "--localLib",
+        dest="local_lib",
+        action="store",
+        type=str,
+        help="The location of libraries to push -- preferably" " stripped",
+    )
+    op.add_argument(
+        "--repeat", type=int, default=1, help="Repeat tests the given number of times."
+    )
+    op.add_argument("--this-chunk", type=int, default=1, help="The test chunk to run.")
+    op.add_argument(
+        "--total-chunks", type=int, default=1, help="The total number of test chunks."
+    )
+    op.add_argument(
+        "--ignore-timeouts",
+        dest="ignore_timeouts",
+        metavar="FILE",
+        help="Ignore timeouts of tests listed in [FILE]",
+    )
+    op.add_argument(
+        "--retry-remote-timeouts",
+        dest="timeout_retry",
+        type=int,
+        default=1,
+        help="Number of time to retry timeout on remote devices",
+    )
+    op.add_argument(
+        "--test-reflect-stringify",
+        dest="test_reflect_stringify",
+        help="instead of running tests, use them to test the "
+        "Reflect.stringify code in specified file",
+    )
+    # --enable-webrender is ignored as it is not relevant for JIT
+    # tests, but is required for harness compatibility.
+    op.add_argument(
+        "--enable-webrender",
+        action="store_true",
+        dest="enable_webrender",
+        default=False,
+        help=argparse.SUPPRESS,
+    )
+    op.add_argument("js_shell", metavar="JS_SHELL", help="JS shell to run tests with")
+    op.add_argument(
+        "-z", "--gc-zeal", help="GC zeal mode to use when running the shell"
+    )
+
+    options, test_args = op.parse_known_args(argv)
+    js_shell = which(options.js_shell)
+    test_environment = get_environment_overlay(js_shell, options.gc_zeal)
+
+    if not (os.path.isfile(js_shell) and os.access(js_shell, os.X_OK)):
+        if (
+            platform.system() != "Windows"
+            or os.path.isfile(js_shell)
+            or not os.path.isfile(js_shell + ".exe")
+            or not os.access(js_shell + ".exe", os.X_OK)
+        ):
+            op.error("shell is not executable: " + js_shell)
+
+    if jittests.stdio_might_be_broken():
+        # Prefer erring on the side of caution and not using stdio if
+        # it might be broken on this platform.  The file-redirect
+        # fallback should work on any platform, so at worst by
+        # guessing wrong we might have slowed down the tests a bit.
+        #
+        # XXX technically we could check for broken stdio, but it
+        # really seems like overkill.
+        options.avoid_stdio = True
+
+    if options.retest:
+        options.read_tests = options.retest
+        options.write_failures = options.retest
+
+    test_list = []
+    read_all = True
+
+    if test_args:
+        read_all = False
+        for arg in test_args:
+            test_list += jittests.find_tests(arg)
+
+    if options.read_tests:
+        read_all = False
+        try:
+            f = open(options.read_tests)
+            for line in f:
+                test_list.append(os.path.join(jittests.TEST_DIR, line.strip("\n")))
+            f.close()
+        except IOError:
+            if options.retest:
+                read_all = True
+            else:
+                sys.stderr.write(
+                    "Exception thrown trying to read test file"
+                    " '{}'\n".format(options.read_tests)
+                )
+                traceback.print_exc()
+                sys.stderr.write("---\n")
+
+    if read_all:
+        test_list = jittests.find_tests()
+
+    if options.exclude_from:
+        with open(options.exclude_from) as fh:
+            for line in fh:
+                line_exclude = line.strip()
+                if not line_exclude.startswith("#") and len(line_exclude):
+                    options.exclude.append(line_exclude)
+
+    if options.exclude:
+        exclude_list = []
+        for exclude in options.exclude:
+            exclude_list += jittests.find_tests(exclude)
+        test_list = [test for test in test_list if test not in set(exclude_list)]
+
+    if not test_list:
+        print("No tests found matching command line arguments.", file=sys.stderr)
+        sys.exit(0)
+
+    test_list = [jittests.JitTest.from_file(_, options) for _ in test_list]
+
+    if not options.run_slow:
+        test_list = [_ for _ in test_list if not _.slow]
+
+    if options.test_reflect_stringify is not None:
+        for test in test_list:
+            test.test_reflect_stringify = options.test_reflect_stringify
+
+    # If chunking is enabled, determine which tests are part of this chunk.
+    # This code was adapted from testing/mochitest/runtestsremote.py.
+    if options.total_chunks > 1:
+        total_tests = len(test_list)
+        tests_per_chunk = math.ceil(total_tests / float(options.total_chunks))
+        start = int(round((options.this_chunk - 1) * tests_per_chunk))
+        end = int(round(options.this_chunk * tests_per_chunk))
+        test_list = test_list[start:end]
+
+    if not test_list:
+        print(
+            "No tests found matching command line arguments after filtering.",
+            file=sys.stderr,
+        )
+        sys.exit(0)
+
+    # The full test list is ready. Now create copies for each JIT configuration.
+    test_flags = get_jitflags(options.jitflags)
+
+    test_list = [_ for test in test_list for _ in test.copy_variants(test_flags)]
+
+    job_list = (test for test in test_list)
+    job_count = len(test_list)
+
+    if options.repeat:
+
+        def repeat_copy(job_list_generator, repeat):
+            job_list = list(job_list_generator)
+            for i in range(repeat):
+                for test in job_list:
+                    if i == 0:
+                        yield test
+                    else:
+                        yield test.copy()
+
+        job_list = repeat_copy(job_list, options.repeat)
+        job_count *= options.repeat
+
+    if options.ignore_timeouts:
+        read_all = False
+        try:
+            with open(options.ignore_timeouts) as f:
+                ignore = set()
+                for line in f.readlines():
+                    path = line.strip("\n")
+                    ignore.add(path)
+                options.ignore_timeouts = ignore
+        except IOError:
+            sys.exit("Error reading file: " + options.ignore_timeouts)
+    else:
+        options.ignore_timeouts = set()
+
+    prefix = (
+        [js_shell] + shlex.split(options.shell_args) + shlex.split(options.feature_args)
+    )
+    prologue = os.path.join(jittests.LIB_DIR, "prologue.js")
+    if options.remote:
+        prologue = posixpath.join(options.remote_test_root, "lib", "prologue.js")
+
+    prefix += ["-f", prologue]
+
+    if options.debugger:
+        if job_count > 1:
+            print(
+                "Multiple tests match command line"
+                " arguments, debugger can only run one"
+            )
+            jobs = list(job_list)
+
+            def display_job(job):
+                flags = ""
+                if len(job.jitflags) != 0:
+                    flags = "({})".format(" ".join(job.jitflags))
+                return "{} {}".format(job.path, flags)
+
+            try:
+                tc = choose_item(jobs, max_items=50, display=display_job)
+            except Exception as e:
+                sys.exit(str(e))
+        else:
+            tc = next(job_list)
+
+        if options.debugger == "gdb":
+            debug_cmd = ["gdb", "--args"]
+        elif options.debugger == "lldb":
+            debug_cmd = ["lldb", "--"]
+        elif options.debugger == "rr":
+            debug_cmd = ["rr", "record"]
+        else:
+            debug_cmd = options.debugger.split()
+
+        with change_env(test_environment):
+            with TemporaryDirectory() as tempdir:
+                if options.debugger == "rr":
+                    subprocess.call(
+                        debug_cmd
+                        + tc.command(
+                            prefix, jittests.LIB_DIR, jittests.MODULE_DIR, tempdir
+                        )
+                    )
+                    os.execvp("rr", ["rr", "replay"])
+                else:
+                    os.execvp(
+                        debug_cmd[0],
+                        debug_cmd
+                        + tc.command(
+                            prefix, jittests.LIB_DIR, jittests.MODULE_DIR, tempdir
+                        ),
+                    )
+        sys.exit()
+
+    try:
+        ok = None
+        if options.remote:
+            ok = jittests.run_tests(job_list, job_count, prefix, options, remote=True)
+        else:
+            with change_env(test_environment):
+                ok = jittests.run_tests(job_list, job_count, prefix, options)
+        if not ok:
+            sys.exit(2)
+    except OSError:
+        if not os.path.exists(prefix[0]):
+            print(
+                "JS shell argument: file does not exist:" " '{}'".format(prefix[0]),
+                file=sys.stderr,
+            )
+            sys.exit(1)
+        else:
+            raise
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/jit-test/jit_test.py
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.py	(revision 228)
@@ -0,0 +1,877 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+The JS Shell Test Harness.
+
+See the adjacent README.txt for more details.
+"""
+
+from __future__ import print_function
+
+import math
+import os
+import posixpath
+import re
+import shlex
+import sys
+import tempfile
+import platform
+
+from os.path import abspath, dirname, isfile, realpath
+from contextlib import contextmanager
+from copy import copy
+from datetime import datetime
+from itertools import chain
+from subprocess import list2cmdline, call
+
+from lib.tests import (
+    RefTestCase,
+    get_jitflags,
+    get_cpu_count,
+    get_environment_overlay,
+    change_env,
+)
+from lib.results import ResultsSink, TestOutput
+from lib.progressbar import ProgressBar
+from lib.adaptor import xdr_annotate
+from tempfile import TemporaryDirectory
+
+if sys.platform.startswith("linux") or sys.platform.startswith("darwin"):
+    from lib.tasks_unix import run_all_tests
+else:
+    from lib.tasks_win import run_all_tests
+
+here = dirname(abspath(__file__))
+
+
+@contextmanager
+def changedir(dirname):
+    pwd = os.getcwd()
+    os.chdir(dirname)
+    try:
+        yield
+    finally:
+        os.chdir(pwd)
+
+
+class PathOptions(object):
+    def __init__(self, location, requested_paths, excluded_paths):
+        self.requested_paths = requested_paths
+        self.excluded_files, self.excluded_dirs = PathOptions._split_files_and_dirs(
+            location, excluded_paths
+        )
+
+    @staticmethod
+    def _split_files_and_dirs(location, paths):
+        """Split up a set of paths into files and directories"""
+        files, dirs = set(), set()
+        for path in paths:
+            fullpath = os.path.join(location, path)
+            if path.endswith("/"):
+                dirs.add(path[:-1])
+            elif os.path.isdir(fullpath):
+                dirs.add(path)
+            elif os.path.exists(fullpath):
+                files.add(path)
+
+        return files, dirs
+
+    def should_run(self, filename):
+        # If any tests are requested by name, skip tests that do not match.
+        if self.requested_paths and not any(
+            req in filename for req in self.requested_paths
+        ):
+            return False
+
+        # Skip excluded tests.
+        if filename in self.excluded_files:
+            return False
+
+        for dir in self.excluded_dirs:
+            if filename.startswith(dir + "/"):
+                return False
+
+        return True
+
+
+def parse_args():
+    """
+    Parse command line arguments.
+    Returns a tuple of: (options, js_shell, requested_paths, excluded_paths)
+        options :object: The raw OptionParser output.
+        js_shell :str: The absolute location of the shell to test with.
+        requested_paths :set<str>: Test paths specially requested on the CLI.
+        excluded_paths :set<str>: Test paths specifically excluded by the CLI.
+    """
+    from argparse import ArgumentParser
+
+    op = ArgumentParser(
+        description="Run jstests JS shell tests",
+        epilog="Shell output format: [ pass | fail | timeout | skip ] progress | time",
+    )
+    op.add_argument(
+        "--xul-info",
+        dest="xul_info_src",
+        help="config data for xulRuntime" " (avoids search for config/autoconf.mk)",
+    )
+
+    harness_og = op.add_argument_group("Harness Controls", "Control how tests are run.")
+    harness_og.add_argument(
+        "-j",
+        "--worker-count",
+        type=int,
+        default=max(1, get_cpu_count()),
+        help="Number of tests to run in parallel" " (default %(default)s)",
+    )
+    harness_og.add_argument(
+        "-t",
+        "--timeout",
+        type=float,
+        default=150.0,
+        help="Set maximum time a test is allows to run" " (in seconds).",
+    )
+    harness_og.add_argument(
+        "--show-slow",
+        action="store_true",
+        help="Show tests taking longer than a minimum time" " (in seconds).",
+    )
+    harness_og.add_argument(
+        "--slow-test-threshold",
+        type=float,
+        default=5.0,
+        help="Time in seconds a test can take until it is"
+        "considered slow (default %(default)s).",
+    )
+    harness_og.add_argument(
+        "-a",
+        "--args",
+        dest="shell_args",
+        default="",
+        help="Extra args to pass to the JS shell.",
+    )
+    harness_og.add_argument(
+        "--feature-args",
+        dest="feature_args",
+        default="",
+        help="Extra args to pass to the JS shell even when feature-testing.",
+    )
+    harness_og.add_argument(
+        "--jitflags",
+        dest="jitflags",
+        default="none",
+        type=str,
+        help="IonMonkey option combinations. One of all,"
+        " debug, ion, and none (default %(default)s).",
+    )
+    harness_og.add_argument(
+        "--tbpl",
+        action="store_true",
+        help="Runs each test in all configurations tbpl" " tests.",
+    )
+    harness_og.add_argument(
+        "--tbpl-debug",
+        action="store_true",
+        help="Runs each test in some faster configurations" " tbpl tests.",
+    )
+    harness_og.add_argument(
+        "-g", "--debug", action="store_true", help="Run a test in debugger."
+    )
+    harness_og.add_argument(
+        "--debugger", default="gdb -q --args", help="Debugger command."
+    )
+    harness_og.add_argument(
+        "-J", "--jorendb", action="store_true", help="Run under JS debugger."
+    )
+    harness_og.add_argument(
+        "--passthrough",
+        action="store_true",
+        help="Run tests with stdin/stdout attached to" " caller.",
+    )
+    harness_og.add_argument(
+        "--test-reflect-stringify",
+        dest="test_reflect_stringify",
+        help="instead of running tests, use them to test the "
+        "Reflect.stringify code in specified file",
+    )
+    harness_og.add_argument(
+        "--valgrind", action="store_true", help="Run tests in valgrind."
+    )
+    harness_og.add_argument(
+        "--valgrind-args", default="", help="Extra args to pass to valgrind."
+    )
+    harness_og.add_argument(
+        "--rr",
+        action="store_true",
+        help="Run tests under RR record-and-replay debugger.",
+    )
+    harness_og.add_argument(
+        "-C",
+        "--check-output",
+        action="store_true",
+        help="Run tests to check output for different jit-flags",
+    )
+    harness_og.add_argument(
+        "--remote", action="store_true", help="Run tests on a remote device"
+    )
+    harness_og.add_argument(
+        "--deviceIP",
+        action="store",
+        type=str,
+        dest="device_ip",
+        help="IP address of remote device to test",
+    )
+    harness_og.add_argument(
+        "--devicePort",
+        action="store",
+        type=int,
+        dest="device_port",
+        default=20701,
+        help="port of remote device to test",
+    )
+    harness_og.add_argument(
+        "--deviceSerial",
+        action="store",
+        type=str,
+        dest="device_serial",
+        default=None,
+        help="ADB device serial number of remote device to test",
+    )
+    harness_og.add_argument(
+        "--remoteTestRoot",
+        dest="remote_test_root",
+        action="store",
+        type=str,
+        default="/data/local/tmp/test_root",
+        help="The remote directory to use as test root" " (e.g. %(default)s)",
+    )
+    harness_og.add_argument(
+        "--localLib",
+        dest="local_lib",
+        action="store",
+        type=str,
+        help="The location of libraries to push -- preferably" " stripped",
+    )
+    harness_og.add_argument(
+        "--no-xdr",
+        dest="use_xdr",
+        action="store_false",
+        help="Whether to disable caching of self-hosted parsed content in XDR format.",
+    )
+
+    input_og = op.add_argument_group("Inputs", "Change what tests are run.")
+    input_og.add_argument(
+        "-f",
+        "--file",
+        dest="test_file",
+        action="append",
+        help="Get tests from the given file.",
+    )
+    input_og.add_argument(
+        "-x",
+        "--exclude-file",
+        action="append",
+        help="Exclude tests from the given file.",
+    )
+    input_og.add_argument(
+        "--wpt",
+        dest="wpt",
+        choices=["enabled", "disabled", "if-running-everything"],
+        default="if-running-everything",
+        help="Enable or disable shell web-platform-tests "
+        "(default: enable if no test paths are specified).",
+    )
+    input_og.add_argument(
+        "--include",
+        action="append",
+        dest="requested_paths",
+        default=[],
+        help="Include the given test file or directory.",
+    )
+    input_og.add_argument(
+        "--exclude",
+        action="append",
+        dest="excluded_paths",
+        default=[],
+        help="Exclude the given test file or directory.",
+    )
+    input_og.add_argument(
+        "-d",
+        "--exclude-random",
+        dest="random",
+        action="store_false",
+        help='Exclude tests marked as "random."',
+    )
+    input_og.add_argument(
+        "--run-skipped", action="store_true", help='Run tests marked as "skip."'
+    )
+    input_og.add_argument(
+        "--run-only-skipped",
+        action="store_true",
+        help='Run only tests marked as "skip."',
+    )
+    input_og.add_argument(
+        "--run-slow-tests",
+        action="store_true",
+        help='Do not skip tests marked as "slow."',
+    )
+    input_og.add_argument(
+        "--no-extensions",
+        action="store_true",
+        help="Run only tests conforming to the ECMAScript 5" " standard.",
+    )
+    input_og.add_argument(
+        "--repeat", type=int, default=1, help="Repeat tests the given number of times."
+    )
+
+    output_og = op.add_argument_group("Output", "Modify the harness and tests output.")
+    output_og.add_argument(
+        "-s",
+        "--show-cmd",
+        action="store_true",
+        help="Show exact commandline used to run each test.",
+    )
+    output_og.add_argument(
+        "-o",
+        "--show-output",
+        action="store_true",
+        help="Print each test's output to the file given by" " --output-file.",
+    )
+    output_og.add_argument(
+        "-F",
+        "--failed-only",
+        action="store_true",
+        help="If a --show-* option is given, only print" " output for failed tests.",
+    )
+    output_og.add_argument(
+        "--no-show-failed",
+        action="store_true",
+        help="Don't print output for failed tests" " (no-op with --show-output).",
+    )
+    output_og.add_argument(
+        "-O",
+        "--output-file",
+        help="Write all output to the given file" " (default: stdout).",
+    )
+    output_og.add_argument(
+        "--failure-file", help="Write all not-passed tests to the given file."
+    )
+    output_og.add_argument(
+        "--no-progress",
+        dest="hide_progress",
+        action="store_true",
+        help="Do not show the progress bar.",
+    )
+    output_og.add_argument(
+        "--tinderbox",
+        dest="format",
+        action="store_const",
+        const="automation",
+        help="Use automation-parseable output format.",
+    )
+    output_og.add_argument(
+        "--format",
+        dest="format",
+        default="none",
+        choices=["automation", "none"],
+        help="Output format. Either automation or none" " (default %(default)s).",
+    )
+    output_og.add_argument(
+        "--log-wptreport",
+        dest="wptreport",
+        action="store",
+        help="Path to write a Web Platform Tests report (wptreport)",
+    )
+    output_og.add_argument(
+        "--this-chunk", type=int, default=1, help="The test chunk to run."
+    )
+    output_og.add_argument(
+        "--total-chunks", type=int, default=1, help="The total number of test chunks."
+    )
+
+    special_og = op.add_argument_group(
+        "Special", "Special modes that do not run tests."
+    )
+    special_og.add_argument(
+        "--make-manifests",
+        metavar="BASE_TEST_PATH",
+        help="Generate reftest manifest files.",
+    )
+
+    op.add_argument("--js-shell", metavar="JS_SHELL", help="JS shell to run tests with")
+    op.add_argument(
+        "-z", "--gc-zeal", help="GC zeal mode to use when running the shell"
+    )
+
+    options, args = op.parse_known_args()
+
+    # Need a shell unless in a special mode.
+    if not options.make_manifests:
+        if not args:
+            op.error("missing JS_SHELL argument")
+        options.js_shell = os.path.abspath(args.pop(0))
+
+    requested_paths = set(args)
+
+    # Valgrind, gdb, and rr are mutually exclusive.
+    if sum(map(bool, (options.valgrind, options.debug, options.rr))) > 1:
+        op.error("--valgrind, --debug, and --rr are mutually exclusive.")
+
+    # Fill the debugger field, as needed.
+    if options.debug:
+        if options.debugger == "lldb":
+            debugger_prefix = ["lldb", "--"]
+        else:
+            debugger_prefix = options.debugger.split()
+    else:
+        debugger_prefix = []
+
+    if options.valgrind:
+        debugger_prefix = ["valgrind"] + options.valgrind_args.split()
+        if os.uname()[0] == "Darwin":
+            debugger_prefix.append("--dsymutil=yes")
+        options.show_output = True
+    if options.rr:
+        debugger_prefix = ["rr", "record"]
+
+    js_cmd_args = shlex.split(options.shell_args) + shlex.split(options.feature_args)
+    if options.jorendb:
+        options.passthrough = True
+        options.hide_progress = True
+        options.worker_count = 1
+        debugger_path = realpath(
+            os.path.join(
+                abspath(dirname(abspath(__file__))),
+                "..",
+                "..",
+                "examples",
+                "jorendb.js",
+            )
+        )
+        js_cmd_args.extend(["-d", "-f", debugger_path, "--"])
+    prefix = RefTestCase.build_js_cmd_prefix(
+        options.js_shell, js_cmd_args, debugger_prefix
+    )
+
+    # If files with lists of tests to run were specified, add them to the
+    # requested tests set.
+    if options.test_file:
+        for test_file in options.test_file:
+            requested_paths |= set(
+                [line.strip() for line in open(test_file).readlines()]
+            )
+
+    excluded_paths = set(options.excluded_paths)
+
+    # If files with lists of tests to exclude were specified, add them to the
+    # excluded tests set.
+    if options.exclude_file:
+        for filename in options.exclude_file:
+            with open(filename, "r") as fp:
+                for line in fp:
+                    if line.startswith("#"):
+                        continue
+                    line = line.strip()
+                    if not line:
+                        continue
+                    excluded_paths.add(line)
+
+    # Handle output redirection, if requested and relevant.
+    options.output_fp = sys.stdout
+    if options.output_file:
+        if not options.show_cmd:
+            options.show_output = True
+        try:
+            options.output_fp = open(options.output_file, "w")
+        except IOError as ex:
+            raise SystemExit("Failed to open output file: " + str(ex))
+
+    # Hide the progress bar if it will get in the way of other output.
+    options.hide_progress = (
+        options.format == "automation"
+        or not ProgressBar.conservative_isatty()
+        or options.hide_progress
+    )
+
+    return (options, prefix, requested_paths, excluded_paths)
+
+
+def load_wpt_tests(xul_tester, requested_paths, excluded_paths, update_manifest=True):
+    """Return a list of `RefTestCase` objects for the jsshell testharness.js
+    tests filtered by the given paths and debug-ness."""
+    repo_root = abspath(os.path.join(here, "..", "..", ".."))
+    wp = os.path.join(repo_root, "testing", "web-platform")
+    wpt = os.path.join(wp, "tests")
+
+    sys_paths = [
+        "python/mozterm",
+        "python/mozboot",
+        "testing/mozbase/mozcrash",
+        "testing/mozbase/mozdevice",
+        "testing/mozbase/mozfile",
+        "testing/mozbase/mozinfo",
+        "testing/mozbase/mozleak",
+        "testing/mozbase/mozlog",
+        "testing/mozbase/mozprocess",
+        "testing/mozbase/mozprofile",
+        "testing/mozbase/mozrunner",
+        "testing/mozbase/mozversion",
+        "testing/web-platform/",
+        "testing/web-platform/tests/tools",
+        "testing/web-platform/tests/tools/third_party/html5lib",
+        "testing/web-platform/tests/tools/third_party/webencodings",
+        "testing/web-platform/tests/tools/wptrunner",
+        "testing/web-platform/tests/tools/wptserve",
+        "third_party/python/requests",
+    ]
+    abs_sys_paths = [os.path.join(repo_root, path) for path in sys_paths]
+
+    failed = False
+    for path in abs_sys_paths:
+        if not os.path.isdir(path):
+            failed = True
+            print("Could not add '%s' to the path")
+    if failed:
+        return []
+
+    sys.path[0:0] = abs_sys_paths
+
+    import manifestupdate
+    from wptrunner import products, testloader, wptcommandline, wpttest, wptlogging
+
+    manifest_root = tempfile.gettempdir()
+    (maybe_dist, maybe_bin) = os.path.split(os.path.dirname(xul_tester.js_bin))
+    if maybe_bin == "bin":
+        (maybe_root, maybe_dist) = os.path.split(maybe_dist)
+        if maybe_dist == "dist":
+            if os.path.exists(os.path.join(maybe_root, "_tests")):
+                # Assume this is a gecko objdir.
+                manifest_root = maybe_root
+
+    logger = wptlogging.setup({}, {})
+
+    test_manifests = manifestupdate.run(
+        repo_root, manifest_root, logger, update=update_manifest
+    )
+
+    kwargs = vars(wptcommandline.create_parser().parse_args([]))
+    kwargs.update(
+        {
+            "config": os.path.join(
+                manifest_root, "_tests", "web-platform", "wptrunner.local.ini"
+            ),
+            "gecko_e10s": False,
+            "verify": False,
+            "wasm": xul_tester.test("wasmIsSupported()"),
+        }
+    )
+    wptcommandline.set_from_config(kwargs)
+
+    def filter_jsshell_tests(it):
+        for item_type, path, tests in it:
+            tests = set(item for item in tests if item.jsshell)
+            if tests:
+                yield item_type, path, tests
+
+    run_info_extras = products.Product(kwargs["config"], "firefox").run_info_extras(
+        **kwargs
+    )
+    run_info = wpttest.get_run_info(
+        kwargs["run_info"],
+        "firefox",
+        debug=xul_tester.test("isDebugBuild"),
+        extras=run_info_extras,
+    )
+    release_or_beta = xul_tester.test("getBuildConfiguration().release_or_beta")
+    run_info["release_or_beta"] = release_or_beta
+    run_info["nightly_build"] = not release_or_beta
+    early_beta_or_earlier = xul_tester.test(
+        "getBuildConfiguration().early_beta_or_earlier"
+    )
+    run_info["early_beta_or_earlier"] = early_beta_or_earlier
+
+    path_filter = testloader.TestFilter(
+        test_manifests, include=requested_paths, exclude=excluded_paths
+    )
+    loader = testloader.TestLoader(
+        test_manifests,
+        ["testharness"],
+        run_info,
+        manifest_filters=[path_filter, filter_jsshell_tests],
+    )
+
+    extra_helper_paths = [
+        os.path.join(here, "web-platform-test-shims.js"),
+        os.path.join(wpt, "resources", "testharness.js"),
+        os.path.join(here, "testharnessreport.js"),
+    ]
+
+    def resolve(test_path, script):
+        if script.startswith("/"):
+            return os.path.join(wpt, script[1:])
+
+        return os.path.join(wpt, os.path.dirname(test_path), script)
+
+    tests = []
+    for test in loader.tests["testharness"]:
+        test_path = os.path.relpath(test.path, wpt)
+        scripts = [resolve(test_path, s) for s in test.scripts]
+        extra_helper_paths_for_test = extra_helper_paths + scripts
+
+        # We must create at least one test with the default options, along with
+        # one test for each option given in a test-also annotation.
+        options = [None]
+        for m in test.itermeta():
+            if m.has_key("test-also"):  # NOQA: W601
+                options += m.get("test-also").split()
+        for option in options:
+            test_case = RefTestCase(
+                wpt,
+                test_path,
+                extra_helper_paths=extra_helper_paths_for_test[:],
+                wpt=test,
+            )
+            if option:
+                test_case.options.append(option)
+            tests.append(test_case)
+    return tests
+
+
+def load_tests(options, requested_paths, excluded_paths):
+    """
+    Returns a tuple: (test_count, test_gen)
+        test_count: [int] Number of tests that will be in test_gen
+        test_gen: [iterable<Test>] Tests found that should be run.
+    """
+    import lib.manifest as manifest
+
+    if options.js_shell is None:
+        xul_tester = manifest.NullXULInfoTester()
+    else:
+        if options.xul_info_src is None:
+            xul_info = manifest.XULInfo.create(options.js_shell)
+        else:
+            xul_abi, xul_os, xul_debug = options.xul_info_src.split(r":")
+            xul_debug = xul_debug.lower() == "true"
+            xul_info = manifest.XULInfo(xul_abi, xul_os, xul_debug)
+        feature_args = shlex.split(options.feature_args)
+        xul_tester = manifest.XULInfoTester(xul_info, options, feature_args)
+
+    test_dir = dirname(abspath(__file__))
+    path_options = PathOptions(test_dir, requested_paths, excluded_paths)
+    test_count = manifest.count_tests(test_dir, path_options)
+    test_gen = manifest.load_reftests(test_dir, path_options, xul_tester)
+
+    # WPT tests are already run in the browser in their own harness.
+    wpt_enabled = options.wpt == "enabled" or (
+        options.wpt == "if-running-everything"
+        and len(requested_paths) == 0
+        and not options.make_manifests
+    )
+    if wpt_enabled:
+        wpt_tests = load_wpt_tests(xul_tester, requested_paths, excluded_paths)
+        test_count += len(wpt_tests)
+        test_gen = chain(test_gen, wpt_tests)
+
+    if options.test_reflect_stringify is not None:
+
+        def trs_gen(tests):
+            for test in tests:
+                test.test_reflect_stringify = options.test_reflect_stringify
+                # Even if the test is not normally expected to pass, we still
+                # expect reflect-stringify to be able to handle it.
+                test.expect = True
+                test.random = False
+                test.slow = False
+                yield test
+
+        test_gen = trs_gen(test_gen)
+
+    if options.make_manifests:
+        manifest.make_manifests(options.make_manifests, test_gen)
+        sys.exit()
+
+    # Create a new test list. Apply each TBPL configuration to every test.
+    flags_list = None
+    if options.tbpl:
+        flags_list = get_jitflags("all")
+    elif options.tbpl_debug:
+        flags_list = get_jitflags("debug")
+    else:
+        flags_list = get_jitflags(options.jitflags, none=None)
+
+    if flags_list:
+
+        def flag_gen(tests):
+            for test in tests:
+                for jitflags in flags_list:
+                    tmp_test = copy(test)
+                    tmp_test.jitflags = copy(test.jitflags)
+                    tmp_test.jitflags.extend(jitflags)
+                    yield tmp_test
+
+        test_count = test_count * len(flags_list)
+        test_gen = flag_gen(test_gen)
+
+    if options.test_file:
+        paths = set()
+        for test_file in options.test_file:
+            paths |= set([line.strip() for line in open(test_file).readlines()])
+        test_gen = (_ for _ in test_gen if _.path in paths)
+
+    if options.no_extensions:
+        pattern = os.sep + "extensions" + os.sep
+        test_gen = (_ for _ in test_gen if pattern not in _.path)
+
+    if not options.random:
+        test_gen = (_ for _ in test_gen if not _.random)
+
+    if options.run_only_skipped:
+        options.run_skipped = True
+        test_gen = (_ for _ in test_gen if not _.enable)
+
+    if not options.run_slow_tests:
+        test_gen = (_ for _ in test_gen if not _.slow)
+
+    if options.repeat:
+        test_gen = (test for test in test_gen for i in range(options.repeat))
+        test_count *= options.repeat
+
+    return test_count, test_gen
+
+
+def main():
+    options, prefix, requested_paths, excluded_paths = parse_args()
+    if options.js_shell is not None and not (
+        isfile(options.js_shell) and os.access(options.js_shell, os.X_OK)
+    ):
+        if (
+            platform.system() != "Windows"
+            or isfile(options.js_shell)
+            or not isfile(options.js_shell + ".exe")
+            or not os.access(options.js_shell + ".exe", os.X_OK)
+        ):
+            print("Could not find executable shell: " + options.js_shell)
+            return 1
+
+    test_count, test_gen = load_tests(options, requested_paths, excluded_paths)
+    test_environment = get_environment_overlay(options.js_shell, options.gc_zeal)
+
+    if test_count == 0:
+        print("no tests selected")
+        return 1
+
+    test_dir = dirname(abspath(__file__))
+
+    if options.debug:
+        if test_count > 1:
+            print(
+                "Multiple tests match command line arguments,"
+                " debugger can only run one"
+            )
+            for tc in test_gen:
+                print("    {}".format(tc.path))
+            return 2
+
+        with changedir(test_dir), change_env(
+            test_environment
+        ), TemporaryDirectory() as tempdir:
+            cmd = next(test_gen).get_command(prefix, tempdir)
+            if options.show_cmd:
+                print(list2cmdline(cmd))
+            call(cmd)
+        return 0
+
+    # The test_gen generator is converted into a list in
+    # run_all_tests. Go ahead and do it here so we can apply
+    # chunking.
+    #
+    # If chunking is enabled, determine which tests are part of this chunk.
+    # This code was adapted from testing/mochitest/runtestsremote.py.
+    if options.total_chunks > 1:
+        tests_per_chunk = math.ceil(test_count / float(options.total_chunks))
+        start = int(round((options.this_chunk - 1) * tests_per_chunk))
+        end = int(round(options.this_chunk * tests_per_chunk))
+        test_gen = list(test_gen)[start:end]
+
+    if options.remote:
+        results = ResultsSink("jstests", options, test_count)
+        try:
+            from lib.remote import init_remote_dir, init_device
+
+            device = init_device(options)
+            tempdir = posixpath.join(options.remote_test_root, "tmp")
+            jtd_tests = posixpath.join(options.remote_test_root, "tests", "tests")
+            init_remote_dir(device, jtd_tests)
+            device.push(test_dir, jtd_tests, timeout=600)
+            device.chmod(jtd_tests, recursive=True)
+            prefix[0] = options.js_shell
+            if options.use_xdr:
+                test_gen = xdr_annotate(test_gen, options)
+            for test in test_gen:
+                out = run_test_remote(test, device, prefix, tempdir, options)
+                results.push(out)
+            results.finish(True)
+        except KeyboardInterrupt:
+            results.finish(False)
+
+        return 0 if results.all_passed() else 1
+
+    with changedir(test_dir), change_env(
+        test_environment
+    ), TemporaryDirectory() as tempdir:
+        results = ResultsSink("jstests", options, test_count)
+        try:
+            for out in run_all_tests(test_gen, prefix, tempdir, results.pb, options):
+                results.push(out)
+            results.finish(True)
+        except KeyboardInterrupt:
+            results.finish(False)
+
+        return 0 if results.all_passed() else 1
+
+    return 0
+
+
+def run_test_remote(test, device, prefix, tempdir, options):
+    from mozdevice import ADBDevice, ADBProcessError
+
+    cmd = test.get_command(prefix, tempdir)
+    test_root_parent = os.path.dirname(test.root)
+    jtd_tests = posixpath.join(options.remote_test_root, "tests")
+    cmd = [_.replace(test_root_parent, jtd_tests) for _ in cmd]
+
+    env = {"TZ": "PST8PDT", "LD_LIBRARY_PATH": os.path.dirname(prefix[0])}
+
+    adb_cmd = ADBDevice._escape_command_line(cmd)
+    start = datetime.now()
+    try:
+        # Allow ADBError or ADBTimeoutError to terminate the test run,
+        # but handle ADBProcessError in order to support the use of
+        # non-zero exit codes in the JavaScript shell tests.
+        out = device.shell_output(
+            adb_cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout)
+        )
+        returncode = 0
+    except ADBProcessError as e:
+        # Treat ignorable intermittent adb communication errors as
+        # skipped tests.
+        out = str(e.adb_process.stdout)
+        returncode = e.adb_process.exitcode
+        re_ignore = re.compile(r"error: (closed|device .* not found)")
+        if returncode == 1 and re_ignore.search(out):
+            print("Skipping {} due to ignorable adb error {}".format(test.path, out))
+            test.skip_if_cond = "true"
+            returncode = test.SKIPPED_EXIT_STATUS
+
+    elapsed = (datetime.now() - start).total_seconds()
+
+    # We can't distinguish between stdout and stderr so we pass
+    # the same buffer to both.
+    return TestOutput(test, cmd, out, out, returncode, elapsed, False)
+
+
+if __name__ == "__main__":
+    sys.exit(main())

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/jstests.py
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/lib/tempfile.py
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/lib/tempfile.py	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-tests-patch/firefox-102.15.0-new/js/src/tests/lib/tempfile.py	(revision 228)
@@ -0,0 +1,5 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+from tempfile import TemporaryDirectory
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/create.patch.sh
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/create.patch.sh	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/create.patch.sh	(revision 228)
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+VERSION=102.15.0
+
+tar --files-from=file.list -xJvf ../firefox-${VERSION}esr.source.tar.xz
+mv firefox-$VERSION firefox-$VERSION-orig
+
+cp -rf ./firefox-$VERSION-new ./firefox-$VERSION
+
+diff --unified -Nr  firefox-$VERSION-orig  firefox-$VERSION > firefox-$VERSION-x86.patch
+
+mv firefox-$VERSION-x86.patch ../patches
+
+rm -rf ./firefox-$VERSION
+rm -rf ./firefox-$VERSION-orig

Property changes on: radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/create.patch.sh
___________________________________________________________________
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/file.list
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/file.list	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/file.list	(revision 228)
@@ -0,0 +1 @@
+firefox-102.15.0/modules/fdlibm/src/math_private.h
Index: radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/firefox-102.15.0-new/modules/fdlibm/src/math_private.h
===================================================================
--- radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/firefox-102.15.0-new/modules/fdlibm/src/math_private.h	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/create-102.15.0-x86-patch/firefox-102.15.0-new/modules/fdlibm/src/math_private.h	(revision 228)
@@ -0,0 +1,919 @@
+/*
+ * ====================================================
+ * Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
+ *
+ * Developed at SunPro, a Sun Microsystems, Inc. business.
+ * Permission to use, copy, modify, and distribute this
+ * software is freely granted, provided that this notice
+ * is preserved.
+ * ====================================================
+ */
+
+/*
+ * from: @(#)fdlibm.h 5.1 93/09/24
+ * $FreeBSD$
+ */
+
+#ifndef _MATH_PRIVATE_H_
+#define	_MATH_PRIVATE_H_
+
+#include <cfloat>
+#include <stdint.h>
+#include <sys/types.h>
+
+#include "fdlibm.h"
+
+#include "mozilla/EndianUtils.h"
+
+/*
+ * Emulate FreeBSD internal double types.
+ * Adapted from https://github.com/freebsd/freebsd-src/search?q=__double_t
+ */
+
+typedef long double __double_t;
+typedef __double_t  double_t;
+
+/*
+ * The original fdlibm code used statements like:
+ *	n0 = ((*(int*)&one)>>29)^1;		* index of high word *
+ *	ix0 = *(n0+(int*)&x);			* high word of x *
+ *	ix1 = *((1-n0)+(int*)&x);		* low word of x *
+ * to dig two 32 bit words out of the 64 bit IEEE floating point
+ * value.  That is non-ANSI, and, moreover, the gcc instruction
+ * scheduler gets it wrong.  We instead use the following macros.
+ * Unlike the original code, we determine the endianness at compile
+ * time, not at run time; I don't see much benefit to selecting
+ * endianness at run time.
+ */
+
+#ifndef u_int32_t
+#define u_int32_t uint32_t
+#endif
+#ifndef u_int64_t
+#define u_int64_t uint64_t
+#endif
+
+/* A union which permits us to convert between a long double and
+   four 32 bit ints.  */
+
+#if MOZ_BIG_ENDIAN()
+
+typedef union
+{
+  long double value;
+  struct {
+    u_int32_t mswhi;
+    u_int32_t mswlo;
+    u_int32_t lswhi;
+    u_int32_t lswlo;
+  } parts32;
+  struct {
+    u_int64_t msw;
+    u_int64_t lsw;
+  } parts64;
+} ieee_quad_shape_type;
+
+#endif
+
+#if MOZ_LITTLE_ENDIAN()
+
+typedef union
+{
+  long double value;
+  struct {
+    u_int32_t lswlo;
+    u_int32_t lswhi;
+    u_int32_t mswlo;
+    u_int32_t mswhi;
+  } parts32;
+  struct {
+    u_int64_t lsw;
+    u_int64_t msw;
+  } parts64;
+} ieee_quad_shape_type;
+
+#endif
+
+/*
+ * A union which permits us to convert between a double and two 32 bit
+ * ints.
+ */
+
+#if MOZ_BIG_ENDIAN()
+
+typedef union
+{
+  double value;
+  struct
+  {
+    u_int32_t msw;
+    u_int32_t lsw;
+  } parts;
+  struct
+  {
+    u_int64_t w;
+  } xparts;
+} ieee_double_shape_type;
+
+#endif
+
+#if MOZ_LITTLE_ENDIAN()
+
+typedef union
+{
+  double value;
+  struct
+  {
+    u_int32_t lsw;
+    u_int32_t msw;
+  } parts;
+  struct
+  {
+    u_int64_t w;
+  } xparts;
+} ieee_double_shape_type;
+
+#endif
+
+/* Get two 32 bit ints from a double.  */
+
+#define EXTRACT_WORDS(ix0,ix1,d)				\
+do {								\
+  ieee_double_shape_type ew_u;					\
+  ew_u.value = (d);						\
+  (ix0) = ew_u.parts.msw;					\
+  (ix1) = ew_u.parts.lsw;					\
+} while (0)
+
+/* Get a 64-bit int from a double. */
+#define EXTRACT_WORD64(ix,d)					\
+do {								\
+  ieee_double_shape_type ew_u;					\
+  ew_u.value = (d);						\
+  (ix) = ew_u.xparts.w;						\
+} while (0)
+
+/* Get the more significant 32 bit int from a double.  */
+
+#define GET_HIGH_WORD(i,d)					\
+do {								\
+  ieee_double_shape_type gh_u;					\
+  gh_u.value = (d);						\
+  (i) = gh_u.parts.msw;						\
+} while (0)
+
+/* Get the less significant 32 bit int from a double.  */
+
+#define GET_LOW_WORD(i,d)					\
+do {								\
+  ieee_double_shape_type gl_u;					\
+  gl_u.value = (d);						\
+  (i) = gl_u.parts.lsw;						\
+} while (0)
+
+/* Set a double from two 32 bit ints.  */
+
+#define INSERT_WORDS(d,ix0,ix1)					\
+do {								\
+  ieee_double_shape_type iw_u;					\
+  iw_u.parts.msw = (ix0);					\
+  iw_u.parts.lsw = (ix1);					\
+  (d) = iw_u.value;						\
+} while (0)
+
+/* Set a double from a 64-bit int. */
+#define INSERT_WORD64(d,ix)					\
+do {								\
+  ieee_double_shape_type iw_u;					\
+  iw_u.xparts.w = (ix);						\
+  (d) = iw_u.value;						\
+} while (0)
+
+/* Set the more significant 32 bits of a double from an int.  */
+
+#define SET_HIGH_WORD(d,v)					\
+do {								\
+  ieee_double_shape_type sh_u;					\
+  sh_u.value = (d);						\
+  sh_u.parts.msw = (v);						\
+  (d) = sh_u.value;						\
+} while (0)
+
+/* Set the less significant 32 bits of a double from an int.  */
+
+#define SET_LOW_WORD(d,v)					\
+do {								\
+  ieee_double_shape_type sl_u;					\
+  sl_u.value = (d);						\
+  sl_u.parts.lsw = (v);						\
+  (d) = sl_u.value;						\
+} while (0)
+
+/*
+ * A union which permits us to convert between a float and a 32 bit
+ * int.
+ */
+
+typedef union
+{
+  float value;
+  /* FIXME: Assumes 32 bit int.  */
+  unsigned int word;
+} ieee_float_shape_type;
+
+/* Get a 32 bit int from a float.  */
+
+#define GET_FLOAT_WORD(i,d)					\
+do {								\
+  ieee_float_shape_type gf_u;					\
+  gf_u.value = (d);						\
+  (i) = gf_u.word;						\
+} while (0)
+
+/* Set a float from a 32 bit int.  */
+
+#define SET_FLOAT_WORD(d,i)					\
+do {								\
+  ieee_float_shape_type sf_u;					\
+  sf_u.word = (i);						\
+  (d) = sf_u.value;						\
+} while (0)
+
+/*
+ * Get expsign and mantissa as 16 bit and 64 bit ints from an 80 bit long
+ * double.
+ */
+
+#define	EXTRACT_LDBL80_WORDS(ix0,ix1,d)				\
+do {								\
+  union IEEEl2bits ew_u;					\
+  ew_u.e = (d);							\
+  (ix0) = ew_u.xbits.expsign;					\
+  (ix1) = ew_u.xbits.man;					\
+} while (0)
+
+/*
+ * Get expsign and mantissa as one 16 bit and two 64 bit ints from a 128 bit
+ * long double.
+ */
+
+#define	EXTRACT_LDBL128_WORDS(ix0,ix1,ix2,d)			\
+do {								\
+  union IEEEl2bits ew_u;					\
+  ew_u.e = (d);							\
+  (ix0) = ew_u.xbits.expsign;					\
+  (ix1) = ew_u.xbits.manh;					\
+  (ix2) = ew_u.xbits.manl;					\
+} while (0)
+
+/* Get expsign as a 16 bit int from a long double.  */
+
+#define	GET_LDBL_EXPSIGN(i,d)					\
+do {								\
+  union IEEEl2bits ge_u;					\
+  ge_u.e = (d);							\
+  (i) = ge_u.xbits.expsign;					\
+} while (0)
+
+/*
+ * Set an 80 bit long double from a 16 bit int expsign and a 64 bit int
+ * mantissa.
+ */
+
+#define	INSERT_LDBL80_WORDS(d,ix0,ix1)				\
+do {								\
+  union IEEEl2bits iw_u;					\
+  iw_u.xbits.expsign = (ix0);					\
+  iw_u.xbits.man = (ix1);					\
+  (d) = iw_u.e;							\
+} while (0)
+
+/*
+ * Set a 128 bit long double from a 16 bit int expsign and two 64 bit ints
+ * comprising the mantissa.
+ */
+
+#define	INSERT_LDBL128_WORDS(d,ix0,ix1,ix2)			\
+do {								\
+  union IEEEl2bits iw_u;					\
+  iw_u.xbits.expsign = (ix0);					\
+  iw_u.xbits.manh = (ix1);					\
+  iw_u.xbits.manl = (ix2);					\
+  (d) = iw_u.e;							\
+} while (0)
+
+/* Set expsign of a long double from a 16 bit int.  */
+
+#define	SET_LDBL_EXPSIGN(d,v)					\
+do {								\
+  union IEEEl2bits se_u;					\
+  se_u.e = (d);							\
+  se_u.xbits.expsign = (v);					\
+  (d) = se_u.e;							\
+} while (0)
+
+#ifdef __i386__
+/* Long double constants are broken on i386. */
+#define	LD80C(m, ex, v) {						\
+	.xbits.man = __CONCAT(m, ULL),					\
+	.xbits.expsign = (0x3fff + (ex)) | ((v) < 0 ? 0x8000 : 0),	\
+}
+#else
+/* The above works on non-i386 too, but we use this to check v. */
+#define	LD80C(m, ex, v)	{ .e = (v), }
+#endif
+
+#ifdef FLT_EVAL_METHOD
+/*
+ * Attempt to get strict C99 semantics for assignment with non-C99 compilers.
+ */
+#if !defined(_MSC_VER) && (FLT_EVAL_METHOD == 0 || __GNUC__ == 0)
+#define	STRICT_ASSIGN(type, lval, rval)	((lval) = (rval))
+#else
+#define	STRICT_ASSIGN(type, lval, rval) do {	\
+	volatile type __lval;			\
+						\
+	if (sizeof(type) >= sizeof(long double))	\
+		(lval) = (rval);		\
+	else {					\
+		__lval = (rval);		\
+		(lval) = __lval;		\
+	}					\
+} while (0)
+#endif
+#else
+#define	STRICT_ASSIGN(type, lval, rval) do {	\
+	volatile type __lval;			\
+						\
+	if (sizeof(type) >= sizeof(long double))	\
+		(lval) = (rval);		\
+	else {					\
+		__lval = (rval);		\
+		(lval) = __lval;		\
+	}					\
+} while (0)
+#endif /* FLT_EVAL_METHOD */
+
+/* Support switching the mode to FP_PE if necessary. */
+#if defined(__i386__) && !defined(NO_FPSETPREC)
+#define	ENTERI() ENTERIT(long double)
+#define	ENTERIT(returntype)			\
+	returntype __retval;			\
+	fp_prec_t __oprec;			\
+						\
+	if ((__oprec = fpgetprec()) != FP_PE)	\
+		fpsetprec(FP_PE)
+#define	RETURNI(x) do {				\
+	__retval = (x);				\
+	if (__oprec != FP_PE)			\
+		fpsetprec(__oprec);		\
+	RETURNF(__retval);			\
+} while (0)
+#define	ENTERV()				\
+	fp_prec_t __oprec;			\
+						\
+	if ((__oprec = fpgetprec()) != FP_PE)	\
+		fpsetprec(FP_PE)
+#define	RETURNV() do {				\
+	if (__oprec != FP_PE)			\
+		fpsetprec(__oprec);		\
+	return;			\
+} while (0)
+#else
+#define	ENTERI()
+#define	ENTERIT(x)
+#define	RETURNI(x)	RETURNF(x)
+#define	ENTERV()
+#define	RETURNV()	return
+#endif
+
+/* Default return statement if hack*_t() is not used. */
+#define      RETURNF(v)      return (v)
+
+/*
+ * 2sum gives the same result as 2sumF without requiring |a| >= |b| or
+ * a == 0, but is slower.
+ */
+#define	_2sum(a, b) do {	\
+	__typeof(a) __s, __w;	\
+				\
+	__w = (a) + (b);	\
+	__s = __w - (a);	\
+	(b) = ((a) - (__w - __s)) + ((b) - __s); \
+	(a) = __w;		\
+} while (0)
+
+/*
+ * 2sumF algorithm.
+ *
+ * "Normalize" the terms in the infinite-precision expression a + b for
+ * the sum of 2 floating point values so that b is as small as possible
+ * relative to 'a'.  (The resulting 'a' is the value of the expression in
+ * the same precision as 'a' and the resulting b is the rounding error.)
+ * |a| must be >= |b| or 0, b's type must be no larger than 'a's type, and
+ * exponent overflow or underflow must not occur.  This uses a Theorem of
+ * Dekker (1971).  See Knuth (1981) 4.2.2 Theorem C.  The name "TwoSum"
+ * is apparently due to Skewchuk (1997).
+ *
+ * For this to always work, assignment of a + b to 'a' must not retain any
+ * extra precision in a + b.  This is required by C standards but broken
+ * in many compilers.  The brokenness cannot be worked around using
+ * STRICT_ASSIGN() like we do elsewhere, since the efficiency of this
+ * algorithm would be destroyed by non-null strict assignments.  (The
+ * compilers are correct to be broken -- the efficiency of all floating
+ * point code calculations would be destroyed similarly if they forced the
+ * conversions.)
+ *
+ * Fortunately, a case that works well can usually be arranged by building
+ * any extra precision into the type of 'a' -- 'a' should have type float_t,
+ * double_t or long double.  b's type should be no larger than 'a's type.
+ * Callers should use these types with scopes as large as possible, to
+ * reduce their own extra-precision and efficiciency problems.  In
+ * particular, they shouldn't convert back and forth just to call here.
+ */
+#ifdef DEBUG
+#define	_2sumF(a, b) do {				\
+	__typeof(a) __w;				\
+	volatile __typeof(a) __ia, __ib, __r, __vw;	\
+							\
+	__ia = (a);					\
+	__ib = (b);					\
+	assert(__ia == 0 || fabsl(__ia) >= fabsl(__ib));	\
+							\
+	__w = (a) + (b);				\
+	(b) = ((a) - __w) + (b);			\
+	(a) = __w;					\
+							\
+	/* The next 2 assertions are weak if (a) is already long double. */ \
+	assert((long double)__ia + __ib == (long double)(a) + (b));	\
+	__vw = __ia + __ib;				\
+	__r = __ia - __vw;				\
+	__r += __ib;					\
+	assert(__vw == (a) && __r == (b));		\
+} while (0)
+#else /* !DEBUG */
+#define	_2sumF(a, b) do {	\
+	__typeof(a) __w;	\
+				\
+	__w = (a) + (b);	\
+	(b) = ((a) - __w) + (b); \
+	(a) = __w;		\
+} while (0)
+#endif /* DEBUG */
+
+/*
+ * Set x += c, where x is represented in extra precision as a + b.
+ * x must be sufficiently normalized and sufficiently larger than c,
+ * and the result is then sufficiently normalized.
+ *
+ * The details of ordering are that |a| must be >= |c| (so that (a, c)
+ * can be normalized without extra work to swap 'a' with c).  The details of
+ * the normalization are that b must be small relative to the normalized 'a'.
+ * Normalization of (a, c) makes the normalized c tiny relative to the
+ * normalized a, so b remains small relative to 'a' in the result.  However,
+ * b need not ever be tiny relative to 'a'.  For example, b might be about
+ * 2**20 times smaller than 'a' to give about 20 extra bits of precision.
+ * That is usually enough, and adding c (which by normalization is about
+ * 2**53 times smaller than a) cannot change b significantly.  However,
+ * cancellation of 'a' with c in normalization of (a, c) may reduce 'a'
+ * significantly relative to b.  The caller must ensure that significant
+ * cancellation doesn't occur, either by having c of the same sign as 'a',
+ * or by having |c| a few percent smaller than |a|.  Pre-normalization of
+ * (a, b) may help.
+ *
+ * This is is a variant of an algorithm of Kahan (see Knuth (1981) 4.2.2
+ * exercise 19).  We gain considerable efficiency by requiring the terms to
+ * be sufficiently normalized and sufficiently increasing.
+ */
+#define	_3sumF(a, b, c) do {	\
+	__typeof(a) __tmp;	\
+				\
+	__tmp = (c);		\
+	_2sumF(__tmp, (a));	\
+	(b) += (a);		\
+	(a) = __tmp;		\
+} while (0)
+
+/*
+ * Common routine to process the arguments to nan(), nanf(), and nanl().
+ */
+void _scan_nan(uint32_t *__words, int __num_words, const char *__s);
+
+/*
+ * Mix 0, 1 or 2 NaNs.  First add 0 to each arg.  This normally just turns
+ * signaling NaNs into quiet NaNs by setting a quiet bit.  We do this
+ * because we want to never return a signaling NaN, and also because we
+ * don't want the quiet bit to affect the result.  Then mix the converted
+ * args using the specified operation.
+ *
+ * When one arg is NaN, the result is typically that arg quieted.  When both
+ * args are NaNs, the result is typically the quietening of the arg whose
+ * mantissa is largest after quietening.  When neither arg is NaN, the
+ * result may be NaN because it is indeterminate, or finite for subsequent
+ * construction of a NaN as the indeterminate 0.0L/0.0L.
+ *
+ * Technical complications: the result in bits after rounding to the final
+ * precision might depend on the runtime precision and/or on compiler
+ * optimizations, especially when different register sets are used for
+ * different precisions.  Try to make the result not depend on at least the
+ * runtime precision by always doing the main mixing step in long double
+ * precision.  Try to reduce dependencies on optimizations by adding the
+ * the 0's in different precisions (unless everything is in long double
+ * precision).
+ */
+#define	nan_mix(x, y)		(nan_mix_op((x), (y), +))
+#define	nan_mix_op(x, y, op)	(((x) + 0.0L) op ((y) + 0))
+
+#ifdef _COMPLEX_H
+
+/*
+ * C99 specifies that complex numbers have the same representation as
+ * an array of two elements, where the first element is the real part
+ * and the second element is the imaginary part.
+ */
+typedef union {
+	float complex f;
+	float a[2];
+} float_complex;
+typedef union {
+	double complex f;
+	double a[2];
+} double_complex;
+typedef union {
+	long double complex f;
+	long double a[2];
+} long_double_complex;
+#define	REALPART(z)	((z).a[0])
+#define	IMAGPART(z)	((z).a[1])
+
+/*
+ * Inline functions that can be used to construct complex values.
+ *
+ * The C99 standard intends x+I*y to be used for this, but x+I*y is
+ * currently unusable in general since gcc introduces many overflow,
+ * underflow, sign and efficiency bugs by rewriting I*y as
+ * (0.0+I)*(y+0.0*I) and laboriously computing the full complex product.
+ * In particular, I*Inf is corrupted to NaN+I*Inf, and I*-0 is corrupted
+ * to -0.0+I*0.0.
+ *
+ * The C11 standard introduced the macros CMPLX(), CMPLXF() and CMPLXL()
+ * to construct complex values.  Compilers that conform to the C99
+ * standard require the following functions to avoid the above issues.
+ */
+
+#ifndef CMPLXF
+static __inline float complex
+CMPLXF(float x, float y)
+{
+	float_complex z;
+
+	REALPART(z) = x;
+	IMAGPART(z) = y;
+	return (z.f);
+}
+#endif
+
+#ifndef CMPLX
+static __inline double complex
+CMPLX(double x, double y)
+{
+	double_complex z;
+
+	REALPART(z) = x;
+	IMAGPART(z) = y;
+	return (z.f);
+}
+#endif
+
+#ifndef CMPLXL
+static __inline long double complex
+CMPLXL(long double x, long double y)
+{
+	long_double_complex z;
+
+	REALPART(z) = x;
+	IMAGPART(z) = y;
+	return (z.f);
+}
+#endif
+
+#endif /* _COMPLEX_H */
+ 
+/*
+ * The rnint() family rounds to the nearest integer for a restricted range
+ * range of args (up to about 2**MANT_DIG).  We assume that the current
+ * rounding mode is FE_TONEAREST so that this can be done efficiently.
+ * Extra precision causes more problems in practice, and we only centralize
+ * this here to reduce those problems, and have not solved the efficiency
+ * problems.  The exp2() family uses a more delicate version of this that
+ * requires extracting bits from the intermediate value, so it is not
+ * centralized here and should copy any solution of the efficiency problems.
+ */
+
+static inline double
+rnint(__double_t x)
+{
+	/*
+	 * This casts to double to kill any extra precision.  This depends
+	 * on the cast being applied to a double_t to avoid compiler bugs
+	 * (this is a cleaner version of STRICT_ASSIGN()).  This is
+	 * inefficient if there actually is extra precision, but is hard
+	 * to improve on.  We use double_t in the API to minimise conversions
+	 * for just calling here.  Note that we cannot easily change the
+	 * magic number to the one that works directly with double_t, since
+	 * the rounding precision is variable at runtime on x86 so the
+	 * magic number would need to be variable.  Assuming that the
+	 * rounding precision is always the default is too fragile.  This
+	 * and many other complications will move when the default is
+	 * changed to FP_PE.
+	 */
+	return ((double)(x + 0x1.8p52) - 0x1.8p52);
+}
+
+/*
+ * irint() and i64rint() give the same result as casting to their integer
+ * return type provided their arg is a floating point integer.  They can
+ * sometimes be more efficient because no rounding is required.
+ */
+#if (defined(amd64) || defined(__i386__)) && defined(__GNUCLIKE_ASM)
+#define	irint(x)						\
+    (sizeof(x) == sizeof(float) &&				\
+    sizeof(__float_t) == sizeof(long double) ? irintf(x) :	\
+    sizeof(x) == sizeof(double) &&				\
+    sizeof(__double_t) == sizeof(long double) ? irintd(x) :	\
+    sizeof(x) == sizeof(long double) ? irintl(x) : (int)(x))
+#else
+#define	irint(x)	((int)(x))
+#endif
+
+#ifdef DEBUG
+#if defined(__amd64__) || defined(__i386__)
+#define	breakpoint()	asm("int $3")
+#else
+#include <signal.h>
+
+#define	breakpoint()	raise(SIGTRAP)
+#endif
+#endif
+
+/* Write a pari script to test things externally. */
+#ifdef DOPRINT
+#include <stdio.h>
+
+#ifndef DOPRINT_SWIZZLE
+#define	DOPRINT_SWIZZLE		0
+#endif
+
+#ifdef DOPRINT_LD80
+
+#define	DOPRINT_START(xp) do {						\
+	uint64_t __lx;							\
+	uint16_t __hx;							\
+									\
+	/* Hack to give more-problematic args. */			\
+	EXTRACT_LDBL80_WORDS(__hx, __lx, *xp);				\
+	__lx ^= DOPRINT_SWIZZLE;					\
+	INSERT_LDBL80_WORDS(*xp, __hx, __lx);				\
+	printf("x = %.21Lg; ", (long double)*xp);			\
+} while (0)
+#define	DOPRINT_END1(v)							\
+	printf("y = %.21Lg; z = 0; show(x, y, z);\n", (long double)(v))
+#define	DOPRINT_END2(hi, lo)						\
+	printf("y = %.21Lg; z = %.21Lg; show(x, y, z);\n",		\
+	    (long double)(hi), (long double)(lo))
+
+#elif defined(DOPRINT_D64)
+
+#define	DOPRINT_START(xp) do {						\
+	uint32_t __hx, __lx;						\
+									\
+	EXTRACT_WORDS(__hx, __lx, *xp);					\
+	__lx ^= DOPRINT_SWIZZLE;					\
+	INSERT_WORDS(*xp, __hx, __lx);					\
+	printf("x = %.21Lg; ", (long double)*xp);			\
+} while (0)
+#define	DOPRINT_END1(v)							\
+	printf("y = %.21Lg; z = 0; show(x, y, z);\n", (long double)(v))
+#define	DOPRINT_END2(hi, lo)						\
+	printf("y = %.21Lg; z = %.21Lg; show(x, y, z);\n",		\
+	    (long double)(hi), (long double)(lo))
+
+#elif defined(DOPRINT_F32)
+
+#define	DOPRINT_START(xp) do {						\
+	uint32_t __hx;							\
+									\
+	GET_FLOAT_WORD(__hx, *xp);					\
+	__hx ^= DOPRINT_SWIZZLE;					\
+	SET_FLOAT_WORD(*xp, __hx);					\
+	printf("x = %.21Lg; ", (long double)*xp);			\
+} while (0)
+#define	DOPRINT_END1(v)							\
+	printf("y = %.21Lg; z = 0; show(x, y, z);\n", (long double)(v))
+#define	DOPRINT_END2(hi, lo)						\
+	printf("y = %.21Lg; z = %.21Lg; show(x, y, z);\n",		\
+	    (long double)(hi), (long double)(lo))
+
+#else /* !DOPRINT_LD80 && !DOPRINT_D64 (LD128 only) */
+
+#ifndef DOPRINT_SWIZZLE_HIGH
+#define	DOPRINT_SWIZZLE_HIGH	0
+#endif
+
+#define	DOPRINT_START(xp) do {						\
+	uint64_t __lx, __llx;						\
+	uint16_t __hx;							\
+									\
+	EXTRACT_LDBL128_WORDS(__hx, __lx, __llx, *xp);			\
+	__llx ^= DOPRINT_SWIZZLE;					\
+	__lx ^= DOPRINT_SWIZZLE_HIGH;					\
+	INSERT_LDBL128_WORDS(*xp, __hx, __lx, __llx);			\
+	printf("x = %.36Lg; ", (long double)*xp);					\
+} while (0)
+#define	DOPRINT_END1(v)							\
+	printf("y = %.36Lg; z = 0; show(x, y, z);\n", (long double)(v))
+#define	DOPRINT_END2(hi, lo)						\
+	printf("y = %.36Lg; z = %.36Lg; show(x, y, z);\n",		\
+	    (long double)(hi), (long double)(lo))
+
+#endif /* DOPRINT_LD80 */
+
+#else /* !DOPRINT */
+#define	DOPRINT_START(xp)
+#define	DOPRINT_END1(v)
+#define	DOPRINT_END2(hi, lo)
+#endif /* DOPRINT */
+
+#define	RETURNP(x) do {			\
+	DOPRINT_END1(x);		\
+	RETURNF(x);			\
+} while (0)
+#define	RETURNPI(x) do {		\
+	DOPRINT_END1(x);		\
+	RETURNI(x);			\
+} while (0)
+#define	RETURN2P(x, y) do {		\
+	DOPRINT_END2((x), (y));		\
+	RETURNF((x) + (y));		\
+} while (0)
+#define	RETURN2PI(x, y) do {		\
+	DOPRINT_END2((x), (y));		\
+	RETURNI((x) + (y));		\
+} while (0)
+#ifdef STRUCT_RETURN
+#define	RETURNSP(rp) do {		\
+	if (!(rp)->lo_set)		\
+		RETURNP((rp)->hi);	\
+	RETURN2P((rp)->hi, (rp)->lo);	\
+} while (0)
+#define	RETURNSPI(rp) do {		\
+	if (!(rp)->lo_set)		\
+		RETURNPI((rp)->hi);	\
+	RETURN2PI((rp)->hi, (rp)->lo);	\
+} while (0)
+#endif
+#define	SUM2P(x, y) ({			\
+	const __typeof (x) __x = (x);	\
+	const __typeof (y) __y = (y);	\
+					\
+	DOPRINT_END2(__x, __y);		\
+	__x + __y;			\
+})
+
+/*
+ * ieee style elementary functions
+ *
+ * We rename functions here to improve other sources' diffability
+ * against fdlibm.
+ */
+#define	__ieee754_sqrt	sqrt
+#define	__ieee754_acos	acos
+#define	__ieee754_acosh	acosh
+#define	__ieee754_log	log
+#define	__ieee754_log2	log2
+#define	__ieee754_atanh	atanh
+#define	__ieee754_asin	asin
+#define	__ieee754_atan2	atan2
+#define	__ieee754_exp	exp
+#define	__ieee754_cosh	cosh
+#define	__ieee754_fmod	fmod
+#define	__ieee754_pow	pow
+#define	__ieee754_lgamma lgamma
+#define	__ieee754_gamma	gamma
+#define	__ieee754_lgamma_r lgamma_r
+#define	__ieee754_gamma_r gamma_r
+#define	__ieee754_log10	log10
+#define	__ieee754_sinh	sinh
+#define	__ieee754_hypot	hypot
+#define	__ieee754_j0	j0
+#define	__ieee754_j1	j1
+#define	__ieee754_y0	y0
+#define	__ieee754_y1	y1
+#define	__ieee754_jn	jn
+#define	__ieee754_yn	yn
+#define	__ieee754_remainder remainder
+#define	__ieee754_scalb	scalb
+#define	__ieee754_sqrtf	sqrtf
+#define	__ieee754_acosf	acosf
+#define	__ieee754_acoshf acoshf
+#define	__ieee754_logf	logf
+#define	__ieee754_atanhf atanhf
+#define	__ieee754_asinf	asinf
+#define	__ieee754_atan2f atan2f
+#define	__ieee754_expf	expf
+#define	__ieee754_coshf	coshf
+#define	__ieee754_fmodf	fmodf
+#define	__ieee754_powf	powf
+#define	__ieee754_lgammaf lgammaf
+#define	__ieee754_gammaf gammaf
+#define	__ieee754_lgammaf_r lgammaf_r
+#define	__ieee754_gammaf_r gammaf_r
+#define	__ieee754_log10f log10f
+#define	__ieee754_log2f log2f
+#define	__ieee754_sinhf	sinhf
+#define	__ieee754_hypotf hypotf
+#define	__ieee754_j0f	j0f
+#define	__ieee754_j1f	j1f
+#define	__ieee754_y0f	y0f
+#define	__ieee754_y1f	y1f
+#define	__ieee754_jnf	jnf
+#define	__ieee754_ynf	ynf
+#define	__ieee754_remainderf remainderf
+#define	__ieee754_scalbf scalbf
+
+#define acos fdlibm::acos
+#define asin fdlibm::asin
+#define atan fdlibm::atan
+#define atan2 fdlibm::atan2
+#define cos fdlibm::cos
+#define sin fdlibm::sin
+#define tan fdlibm::tan
+#define cosh fdlibm::cosh
+#define sinh fdlibm::sinh
+#define tanh fdlibm::tanh
+#define exp fdlibm::exp
+#define log fdlibm::log
+#define log10 fdlibm::log10
+#define pow fdlibm::pow
+#define ceil fdlibm::ceil
+#define ceilf fdlibm::ceilf
+#define fabs fdlibm::fabs
+#define floor fdlibm::floor
+#define acosh fdlibm::acosh
+#define asinh fdlibm::asinh
+#define atanh fdlibm::atanh
+#define cbrt fdlibm::cbrt
+#define expm1 fdlibm::expm1
+#define hypot fdlibm::hypot
+#define log1p fdlibm::log1p
+#define log2 fdlibm::log2
+#define scalb fdlibm::scalb
+#define copysign fdlibm::copysign
+#define scalbn fdlibm::scalbn
+#define trunc fdlibm::trunc
+#define truncf fdlibm::truncf
+#define floorf fdlibm::floorf
+#define nearbyint fdlibm::nearbyint
+#define nearbyintf fdlibm::nearbyintf
+#define rint fdlibm::rint
+#define rintf fdlibm::rintf
+
+/* fdlibm kernel function */
+int	__kernel_rem_pio2(double*,double*,int,int,int);
+
+/* double precision kernel functions */
+#ifndef INLINE_REM_PIO2
+int	__ieee754_rem_pio2(double,double*);
+#endif
+double	__kernel_sin(double,double,int);
+double	__kernel_cos(double,double);
+double	__kernel_tan(double,double,int);
+double	__ldexp_exp(double,int);
+#ifdef _COMPLEX_H
+double complex __ldexp_cexp(double complex,int);
+#endif
+
+/* float precision kernel functions */
+#ifndef INLINE_REM_PIO2F
+int	__ieee754_rem_pio2f(float,double*);
+#endif
+#ifndef INLINE_KERNEL_SINDF
+float	__kernel_sindf(double);
+#endif
+#ifndef INLINE_KERNEL_COSDF
+float	__kernel_cosdf(double);
+#endif
+#ifndef INLINE_KERNEL_TANDF
+float	__kernel_tandf(double,int);
+#endif
+float	__ldexp_expf(float,int);
+#ifdef _COMPLEX_H
+float complex __ldexp_cexpf(float complex,int);
+#endif
+
+/* long double precision kernel functions */
+long double __kernel_sinl(long double, long double, int);
+long double __kernel_cosl(long double, long double);
+long double __kernel_tanl(long double, long double, int);
+
+#endif /* !_MATH_PRIVATE_H_ */
Index: radix-1.9/sources/packages/x/mozjs/patches/README
===================================================================
--- radix-1.9/sources/packages/x/mozjs/patches/README	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/patches/README	(revision 228)
@@ -0,0 +1,6 @@
+
+/* begin *
+
+   TODO: Leave some comment here.
+
+ * end */
Index: radix-1.9/sources/packages/x/mozjs/patches
===================================================================
--- radix-1.9/sources/packages/x/mozjs/patches	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs/patches	(revision 228)

Property changes on: radix-1.9/sources/packages/x/mozjs/patches
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,74 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.rk358x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~
Index: radix-1.9/sources/packages/x/mozjs
===================================================================
--- radix-1.9/sources/packages/x/mozjs	(nonexistent)
+++ radix-1.9/sources/packages/x/mozjs	(revision 228)

Property changes on: radix-1.9/sources/packages/x/mozjs
___________________________________________________________________
Added: svn:ignore
## -0,0 +1,74 ##
+
+# install dir
+dist
+
+# Target build dirs
+.a1x-newlib
+.a2x-newlib
+.at91sam7s-newlib
+
+.build-machine
+
+.a1x-glibc
+.a2x-glibc
+.h3-glibc
+.h5-glibc
+.i586-glibc
+.i686-glibc
+.imx6-glibc
+.jz47xx-glibc
+.makefile
+.am335x-glibc
+.omap543x-glibc
+.p5600-glibc
+.power8-glibc
+.power8le-glibc
+.power9-glibc
+.power9le-glibc
+.m1000-glibc
+.riscv64-glibc
+.rk328x-glibc
+.rk33xx-glibc
+.rk339x-glibc
+.rk358x-glibc
+.s8xx-glibc
+.s9xx-glibc
+.x86_64-glibc
+
+# Hidden files (each file)
+.makefile
+.dist
+.rootfs
+
+# src & hw requires
+.src_requires
+.src_requires_depend
+.requires
+.requires_depend
+
+# Tarballs
+*.gz
+*.bz2
+*.lz
+*.xz
+*.tgz
+*.txz
+
+# Signatures
+*.asc
+*.sig
+*.sign
+*.sha1sum
+
+# Patches
+*.patch
+
+# Descriptions
+*.dsc
+*.txt
+
+# Default linux config files
+*.defconfig
+
+# backup copies
+*~