Change `os::args()` and `os::env()` to use `str::from_utf8_lossy()`.
Add new functions `os::args_as_bytes()` and `os::env_as_bytes()` to retrieve the args/env as byte vectors instead.
The existing methods were left returning strings because I expect that the common use-case is to want string handling.
Fixes #7188.
# option. This file may not be copied, modified, or distributed
# except according to those terms.
-# An explanation of how the build is structured:
+# <help> \(^o^)/
+#
+# Greetings, adventurer! The Rust Build System is at your service.
+#
+# Whether you want a genuine copy of `rustc`, access to the latest and
+# most authoritative Rust documentation, or even to investigate the
+# most intimate workings of the compiler itself, you've come to the
+# right place. Let's see what's on the menu.
+#
+# First, start with one of these build targets:
+#
+# * all - The default. Builds a complete, bootstrapped compiler.
+# `rustc` will be in `${target-triple}/stage2/bin/`. Run it
+# directly from the build directory if you like. This also
+# comes with docs in `doc/`.
+#
+# * check - Run the complete test suite
+#
+# * install - Install Rust. Note that installation is not necessary
+# to use the compiler.
+#
+# * uninstall - Uninstall the binaries
+#
+# For tips on working with The Rust Build System, just:
+#
+# run `make tips`
+#
+# Otherwise
+#
+# run `make`
+#
+# </help>
+#
+# <tips>
+#
+# # The Rust Build System Tip Line
+#
+# There are a bazillion different targets you might want to build. Here
+# are a few ideas.
+#
+# * docs - Build gobs of HTML documentation and put it into `doc/`
+# * check-$(crate) - Test a crate, e.g. `check-std`
+# * check-ref - Run the language reference tests
+# * check-docs - Test the documentation examples
+# * check-stage$(stage)-$(crate) - Test a crate in a specific stage
+# * check-stage$(stage)-{rpass,rfail,cfail,rmake,...} - Run tests in src/test/
+# * check-stage1-T-$(target)-H-$(host) - Run cross-compiled-tests
+#
+# Then mix in some of these environment variables to harness the
+# ultimate power of The Rust Build System.
+#
+# * `VERBOSE=1` - Print all commands. Use this to see what's going on.
+# * `RUSTFLAGS=...` - Add compiler flags to all `rustc` invocations
+#
+# * `TESTNAME=...` - Specify the name of tests to run
+# * `CHECK_IGNORED=1` - Run normally-ignored tests
+# * `NO_BENCH=1` - Don't run crate benchmarks (disable `--bench` flag)
+#
+# * `CFG_ENABLE_VALGRIND=1` - Run tests under valgrind
+# * `VALGRIND_COMPILE=1` - Run the compiler itself under valgrind
+# (may require `CFG_ENABLE_VALGRIND`)
+#
+# * `NO_REBUILD=1` - Don't rebootstrap when testing std
+# (and possibly other crates)
+# * `NO_MKFILE_DEPS=1` - Don't rebuild for modified .mk files
+#
+# * `SAVE_TEMPS=1` - Use `--save-temps` flag on all `rustc` invocations
+# * `ASM_COMMENTS=1` - Use `-Z asm-comments`
+# * `TIME_PASSES=1` - Use `-Z time-passes`
+# * `TIME_LLVM_PASSES=1` - Use `-Z time-llvm-passes`
+# * `TRACE=1` - Use `-Z trace`
+#
+# # Rust recipes for build system success
+#
+# // Modifying libstd? Use this comment to run unit tests just on your change
+# make check-stage1-std NO_REBUILD=1 NO_BENCH=1
+#
+# // Added a run-pass test? Use this to test running your test
+# make check-stage1-rpass TESTNAME=my-shiny-new-test
+#
+# // Having trouble figuring out which test is failing? Turn off parallel tests
+# make check-stage1-std RUST_TEST_TASKS=1
+#
+# This is hardly all there is to know of The Rust Build System's
+# mysteries. The tale continues on the wiki[1][2].
+#
+# [1]: https://github.com/mozilla/rust/wiki/Note-build-system
+# [2]: https://github.com/mozilla/rust/wiki/Note-testsuite
+#
+# If you really feel like getting your hands dirty, then:
+#
+# run `make nitty-gritty`
+#
+# </tips>
+#
+# <nittygritty>
+#
+# # The Rust Build System
+#
+# Gosh I wish there was something useful here (TODO).
+#
+# # An (old) explanation of how the build is structured:
+#
+# *Note: Hey, like, this is probably inaccurate, and is definitely
+# an outdated and insufficient explanation of the remarkable
+# Rust Build System.*
#
# There are multiple build stages (0-3) needed to verify that the
# compiler is properly self-hosting. Each stage is divided between
# will just link against the libraries in the target lib directory.
#
# Admittedly this is a little convoluted.
-
-STAGES = 0 1 2 3
-
-######################################################################
-# Residual auto-configuration
-######################################################################
-
-# Recursive wildcard function
-# http://blog.jgc.org/2011/07/gnu-make-recursive-wildcard-function.html
-rwildcard=$(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) \
- $(filter $(subst *,%,$2),$d))
-
-include config.mk
-
-# We track all of the object files we might build so that we can find
-# and include all of the .d files in one fell swoop.
-ALL_OBJ_FILES :=
-
-MKFILE_DEPS := config.stamp $(call rwildcard,$(CFG_SRC_DIR)mk/,*)
-NON_BUILD_HOST = $(filter-out $(CFG_BUILD),$(CFG_HOST))
-NON_BUILD_TARGET = $(filter-out $(CFG_BUILD),$(CFG_TARGET))
-
-ifneq ($(MAKE_RESTARTS),)
-CFG_INFO := $(info cfg: make restarts: $(MAKE_RESTARTS))
-endif
-
-CFG_INFO := $(info cfg: build triple $(CFG_BUILD))
-CFG_INFO := $(info cfg: host triples $(CFG_HOST))
-CFG_INFO := $(info cfg: target triples $(CFG_TARGET))
-
-ifneq ($(wildcard $(NON_BUILD_HOST)),)
-CFG_INFO := $(info cfg: non-build host triples $(NON_BUILD_HOST))
-endif
-ifneq ($(wildcard $(NON_BUILD_TARGET)),)
-CFG_INFO := $(info cfg: non-build target triples $(NON_BUILD_TARGET))
-endif
-
-CFG_RUSTC_FLAGS := $(RUSTFLAGS)
-CFG_GCCISH_CFLAGS :=
-CFG_GCCISH_LINK_FLAGS :=
-
-ifdef CFG_DISABLE_OPTIMIZE
- $(info cfg: disabling rustc optimization (CFG_DISABLE_OPTIMIZE))
- CFG_RUSTC_FLAGS +=
-else
- # The rtopt cfg turns off runtime sanity checks
- CFG_RUSTC_FLAGS += -O --cfg rtopt
-endif
-
-ifdef CFG_DISABLE_DEBUG
- CFG_RUSTC_FLAGS += --cfg ndebug
- CFG_GCCISH_CFLAGS += -DRUST_NDEBUG
-else
- $(info cfg: enabling more debugging (CFG_ENABLE_DEBUG))
- CFG_RUSTC_FLAGS += --cfg debug
- CFG_GCCISH_CFLAGS += -DRUST_DEBUG
-endif
-
-ifdef SAVE_TEMPS
- CFG_RUSTC_FLAGS += --save-temps
-endif
-ifdef ASM_COMMENTS
- CFG_RUSTC_FLAGS += -Z asm-comments
-endif
-ifdef TIME_PASSES
- CFG_RUSTC_FLAGS += -Z time-passes
-endif
-ifdef TIME_LLVM_PASSES
- CFG_RUSTC_FLAGS += -Z time-llvm-passes
-endif
-ifdef TRACE
- CFG_RUSTC_FLAGS += -Z trace
-endif
-ifdef CFG_DISABLE_RPATH
-CFG_RUSTC_FLAGS += -C no-rpath
-endif
-
-# The executables crated during this compilation process have no need to include
-# static copies of libstd and libextra. We also generate dynamic versions of all
-# libraries, so in the interest of space, prefer dynamic linking throughout the
-# compilation process.
-#
-# Note though that these flags are omitted for stage2+. This means that the
-# snapshot will be generated with a statically linked rustc so we only have to
-# worry about the distribution of one file (with its native dynamic
-# dependencies)
-RUSTFLAGS_STAGE0 += -C prefer-dynamic
-RUSTFLAGS_STAGE1 += -C prefer-dynamic
-
-# platform-specific auto-configuration
-include $(CFG_SRC_DIR)mk/platform.mk
-
-# Run the stage1/2 compilers under valgrind
-ifdef VALGRIND_COMPILE
- CFG_VALGRIND_COMPILE :=$(CFG_VALGRIND)
-else
- CFG_VALGRIND_COMPILE :=
-endif
-
-# version-string calculation
-CFG_GIT_DIR := $(CFG_SRC_DIR).git
-CFG_RELEASE = 0.10-pre
-CFG_VERSION = $(CFG_RELEASE)
-# windows exe's need numeric versions - don't use anything but
-# numbers and dots here
-CFG_VERSION_WIN = 0.10
-
-# since $(CFG_GIT) may contain spaces (especially on Windows),
-# we need to escape them. (" " to r"\ ")
-# Note that $(subst ...) ignores space after `subst`,
-# so we use a hack: define $(SPACE) which contains space character.
-SPACE :=
-SPACE +=
-ifneq ($(wildcard $(subst $(SPACE),\$(SPACE),$(CFG_GIT))),)
-ifneq ($(wildcard $(subst $(SPACE),\$(SPACE),$(CFG_GIT_DIR))),)
- CFG_VERSION += $(shell git --git-dir='$(CFG_GIT_DIR)' log -1 \
- --pretty=format:'(%h %ci)')
- CFG_VER_HASH = $(shell git --git-dir='$(CFG_GIT_DIR)' rev-parse HEAD)
-endif
-endif
-
-ifdef CFG_ENABLE_VALGRIND
- $(info cfg: enabling valgrind (CFG_ENABLE_VALGRIND))
-else
- CFG_VALGRIND :=
-endif
-ifdef CFG_BAD_VALGRIND
- $(info cfg: disabling valgrind due to its unreliability on this platform)
- CFG_VALGRIND :=
-endif
-
-
-######################################################################
-# Target-and-rule "utility variables"
-######################################################################
-
-ifdef VERBOSE
- Q :=
- E =
-else
- Q := @
- E = echo $(1)
-endif
-
-S := $(CFG_SRC_DIR)
-
-define DEF_X
-X_$(1) := $(CFG_EXE_SUFFIX_$(1))
-endef
-$(foreach target,$(CFG_TARGET),\
- $(eval $(call DEF_X,$(target))))
-
-# "Source" files we generate in builddir along the way.
-GENERATED :=
-
-# Delete the built-in rules.
-.SUFFIXES:
-%:: %,v
-%:: RCS/%,v
-%:: RCS/%
-%:: s.%
-%:: SCCS/s.%
-
-
-######################################################################
-# Cleaning out old crates
-######################################################################
-
-# $(1) is the path for directory to match against
-# $(2) is the glob to use in the match
-#
-# Note that a common bug is to accidentally construct the glob denoted
-# by $(2) with a space character prefix, which invalidates the
-# construction $(1)$(2).
-define CHECK_FOR_OLD_GLOB_MATCHES
- $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then echo "warning: there are previous" \'$(notdir $(2))\' "libraries:" $$MATCHES; fi
-endef
-
-# Same interface as above, but deletes rather than just listing the files.
-ifdef VERBOSE
-define REMOVE_ALL_OLD_GLOB_MATCHES
- $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then echo "warning: removing previous" \'$(notdir $(1))\' "libraries:" $$MATCHES; rm $$MATCHES ; fi
-endef
-else
-define REMOVE_ALL_OLD_GLOB_MATCHES
- $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then rm $$MATCHES ; fi
-endef
-endif
-
-# We use a different strategy for LIST_ALL_OLD_GLOB_MATCHES_EXCEPT
-# than in the macros above because it needs the result of running the
-# `ls` command after other rules in the command list have run; the
-# macro-expander for $(wildcard ...) would deliver its results too
-# soon. (This is in contrast to the macros above, which are meant to
-# be run at the outset of a command list in a rule.)
-ifdef VERBOSE
-define LIST_ALL_OLD_GLOB_MATCHES
- @echo "info: now are following matches for" '$(notdir $(1))' "libraries:"
- @( ls $(1) 2>/dev/null || true )
-endef
-else
-define LIST_ALL_OLD_GLOB_MATCHES
-endef
-endif
-
-######################################################################
-# LLVM macros
-######################################################################
-
-# FIXME: x86-ism
-LLVM_COMPONENTS=x86 arm mips ipo bitreader bitwriter linker asmparser jit mcjit \
- interpreter instrumentation
-
-# Only build these LLVM tools
-LLVM_TOOLS=bugpoint llc llvm-ar llvm-as llvm-dis llvm-mc opt llvm-extract
-
-define DEF_LLVM_VARS
-# The configure script defines these variables with the target triples
-# separated by Z. This defines new ones with the expected format.
-CFG_LLVM_BUILD_DIR_$(1):=$$(CFG_LLVM_BUILD_DIR_$(subst -,_,$(1)))
-CFG_LLVM_INST_DIR_$(1):=$$(CFG_LLVM_INST_DIR_$(subst -,_,$(1)))
-
-# Any rules that depend on LLVM should depend on LLVM_CONFIG
-LLVM_CONFIG_$(1):=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-config$$(X_$(1))
-LLVM_MC_$(1):=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-mc$$(X_$(1))
-LLVM_VERSION_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --version)
-LLVM_BINDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --bindir)
-LLVM_INCDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --includedir)
-LLVM_LIBDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --libdir)
-LLVM_LIBS_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --libs $$(LLVM_COMPONENTS))
-LLVM_LDFLAGS_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --ldflags)
-# On FreeBSD, it may search wrong headers (that are for pre-installed LLVM),
-# so we replace -I with -iquote to ensure that it searches bundled LLVM first.
-LLVM_CXXFLAGS_$(1)=$$(subst -I, -iquote , $$(shell "$$(LLVM_CONFIG_$(1))" --cxxflags))
-LLVM_HOST_TRIPLE_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --host-target)
-
-LLVM_AS_$(1)=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-as$$(X_$(1))
-LLC_$(1)=$$(CFG_LLVM_INST_DIR_$(1))/bin/llc$$(X_$(1))
-
-endef
-
-$(foreach host,$(CFG_HOST), \
- $(eval $(call DEF_LLVM_VARS,$(host))))
-
-######################################################################
-# Exports for sub-utilities
-######################################################################
-
-# Note that any variable that re-configure should pick up needs to be
-# exported
-
-export CFG_SRC_DIR
-export CFG_BUILD_DIR
-export CFG_VERSION
-export CFG_VERSION_WIN
-export CFG_RELEASE
-export CFG_BUILD
-export CFG_LLVM_ROOT
-export CFG_ENABLE_MINGW_CROSS
-export CFG_PREFIX
-export CFG_LIBDIR
-export CFG_RUSTLIBDIR
-export CFG_LIBDIR_RELATIVE
-export CFG_DISABLE_INJECT_STD_VERSION
-
-######################################################################
-# Per-stage targets and runner
-######################################################################
-
-include $(CFG_SRC_DIR)mk/crates.mk
-
-define SREQ
-# $(1) is the stage number
-# $(2) is the target triple
-# $(3) is the host triple
-
-# Destinations of artifacts for the host compiler
-HROOT$(1)_H_$(3) = $(3)/stage$(1)
-HBIN$(1)_H_$(3) = $$(HROOT$(1)_H_$(3))/bin
-HLIB$(1)_H_$(3) = $$(HROOT$(1)_H_$(3))/$$(CFG_LIBDIR_RELATIVE)
-
-# Destinations of artifacts for target architectures
-TROOT$(1)_T_$(2)_H_$(3) = $$(HLIB$(1)_H_$(3))/$$(CFG_RUSTLIBDIR)/$(2)
-TBIN$(1)_T_$(2)_H_$(3) = $$(TROOT$(1)_T_$(2)_H_$(3))/bin
-TLIB$(1)_T_$(2)_H_$(3) = $$(TROOT$(1)_T_$(2)_H_$(3))/lib
-
-# Preqrequisites for using the stageN compiler
-ifeq ($(1),0)
-HSREQ$(1)_H_$(3) = $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3))
-else
-HSREQ$(1)_H_$(3) = \
- $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
- $$(HLIB$(1)_H_$(3))/stamp.rustc \
- $$(foreach dep,$$(RUST_DEPS_rustc),$$(HLIB$(1)_H_$(3))/stamp.$$(dep)) \
- $$(MKFILE_DEPS)
-endif
-
-# Prerequisites for using the stageN compiler to build target artifacts
-TSREQ$(1)_T_$(2)_H_$(3) = \
- $$(HSREQ$(1)_H_$(3)) \
- $$(TLIB$(1)_T_$(2)_H_$(3))/libmorestack.a \
- $$(TLIB$(1)_T_$(2)_H_$(3))/libcompiler-rt.a
-
-# Prerequisites for a working stageN compiler and libraries, for a specific
-# target
-SREQ$(1)_T_$(2)_H_$(3) = \
- $$(TSREQ$(1)_T_$(2)_H_$(3)) \
- $$(foreach dep,$$(TARGET_CRATES),\
- $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep))
-
-# Prerequisites for a working stageN compiler and complete set of target
-# libraries
-CSREQ$(1)_T_$(2)_H_$(3) = \
- $$(TSREQ$(1)_T_$(2)_H_$(3)) \
- $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \
- $$(foreach dep,$$(CRATES),$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \
- $$(foreach dep,$$(HOST_CRATES),$$(HLIB$(1)_H_$(3))/stamp.$$(dep))
-
-ifeq ($(1),0)
-# Don't run the the stage0 compiler under valgrind - that ship has sailed
-CFG_VALGRIND_COMPILE$(1) =
-else
-CFG_VALGRIND_COMPILE$(1) = $$(CFG_VALGRIND_COMPILE)
-endif
-
-# Add RUSTFLAGS_STAGEN values to the build command
-EXTRAFLAGS_STAGE$(1) = $$(RUSTFLAGS_STAGE$(1))
-
-CFGFLAG$(1)_T_$(2)_H_$(3) = stage$(1)
-
-# Pass --cfg stage0 only for the build->host part of stage0;
-# if you're building a cross config, the host->* parts are
-# effectively stage1, since it uses the just-built stage0.
-ifeq ($(1),0)
-ifneq ($(strip $(CFG_BUILD)),$(strip $(3)))
-CFGFLAG$(1)_T_$(2)_H_$(3) = stage1
-endif
-endif
-
-ifdef CFG_DISABLE_RPATH
-ifeq ($$(OSTYPE_$(3)),apple-darwin)
- RPATH_VAR$(1)_T_$(2)_H_$(3) := \
- DYLD_LIBRARY_PATH="$$$$DYLD_LIBRARY_PATH:$$(HLIB$(1)_H_$(3))"
-else
- RPATH_VAR$(1)_T_$(2)_H_$(3) := \
- LD_LIBRARY_PATH="$$$$LD_LIBRARY_PATH:$$(HLIB$(1)_H_$(3))"
-endif
-else
- RPATH_VAR$(1)_T_$(2)_H_$(3) :=
-endif
-
-STAGE$(1)_T_$(2)_H_$(3) := \
- $$(Q)$$(RPATH_VAR$(1)_T_$(2)_H_$(3)) \
- $$(call CFG_RUN_TARG_$(3),$(1), \
- $$(CFG_VALGRIND_COMPILE$(1)) \
- $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
- --cfg $$(CFGFLAG$(1)_T_$(2)_H_$(3)) \
- $$(CFG_RUSTC_FLAGS) $$(EXTRAFLAGS_STAGE$(1)) --target=$(2)) \
- $$(RUSTC_FLAGS_$(2))
-
-PERF_STAGE$(1)_T_$(2)_H_$(3) := \
- $$(Q)$$(call CFG_RUN_TARG_$(3),$(1), \
- $$(CFG_PERF_TOOL) \
- $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
- --cfg $$(CFGFLAG$(1)_T_$(2)_H_$(3)) \
- $$(CFG_RUSTC_FLAGS) $$(EXTRAFLAGS_STAGE$(1)) --target=$(2)) \
- $$(RUSTC_FLAGS_$(2))
-
-endef
-
-$(foreach build,$(CFG_HOST), \
- $(eval $(foreach target,$(CFG_TARGET), \
- $(eval $(foreach stage,$(STAGES), \
- $(eval $(call SREQ,$(stage),$(target),$(build))))))))
-
-######################################################################
-# rustc-H-targets
#
-# Builds a functional Rustc for the given host.
-######################################################################
-
-define DEF_RUSTC_STAGE_TARGET
-# $(1) == architecture
-# $(2) == stage
-
-rustc-stage$(2)-H-$(1): \
- $$(foreach target,$$(CFG_TARGET),$$(SREQ$(2)_T_$$(target)_H_$(1)))
-
-endef
-
-$(foreach host,$(CFG_HOST), \
- $(eval $(foreach stage,1 2 3, \
- $(eval $(call DEF_RUSTC_STAGE_TARGET,$(host),$(stage))))))
-
-rustc-stage1: rustc-stage1-H-$(CFG_BUILD)
-rustc-stage2: rustc-stage2-H-$(CFG_BUILD)
-rustc-stage3: rustc-stage3-H-$(CFG_BUILD)
-
-define DEF_RUSTC_TARGET
-# $(1) == architecture
-
-rustc-H-$(1): rustc-stage2-H-$(1)
-endef
-
-$(foreach host,$(CFG_TARGET), \
- $(eval $(call DEF_RUSTC_TARGET,$(host))))
-
-rustc-stage1: rustc-stage1-H-$(CFG_BUILD)
-rustc-stage2: rustc-stage2-H-$(CFG_BUILD)
-rustc-stage3: rustc-stage3-H-$(CFG_BUILD)
-rustc: rustc-H-$(CFG_BUILD)
-
-rustc-H-all: $(foreach host,$(CFG_HOST),rustc-H-$(host))
-
-######################################################################
-# Entrypoint rule
-######################################################################
-
-.DEFAULT_GOAL := all
-
-ifneq ($(CFG_IN_TRANSITION),)
-
-CFG_INFO := $(info cfg:)
-CFG_INFO := $(info cfg: *** compiler is in snapshot transition ***)
-CFG_INFO := $(info cfg: *** stage2 and later will not be built ***)
-CFG_INFO := $(info cfg:)
-
-#FIXME This is surely busted
-all: $(SREQ1$(CFG_BUILD)) $(GENERATED) docs
-
-else
-
-define ALL_TARGET_N
-ifneq ($$(findstring $(1),$$(CFG_HOST)),)
-# This is a host
-all-target-$(1)-host-$(2): $$(CSREQ2_T_$(1)_H_$(2))
-else
-# This is a target only
-all-target-$(1)-host-$(2): $$(SREQ2_T_$(1)_H_$(2))
-endif
-endef
-
-$(foreach target,$(CFG_TARGET), \
- $(foreach host,$(CFG_HOST), \
- $(eval $(call ALL_TARGET_N,$(target),$(host)))))
-
-ALL_TARGET_RULES = $(foreach target,$(CFG_TARGET), \
- $(foreach host,$(CFG_HOST), \
- all-target-$(target)-host-$(host)))
-
-all: $(ALL_TARGET_RULES) $(GENERATED) docs
-
-endif
-
-
-######################################################################
-# Re-configuration
-######################################################################
-
-ifndef CFG_DISABLE_MANAGE_SUBMODULES
-# This is a pretty expensive operation but I don't see any way to avoid it
-NEED_GIT_RECONFIG=$(shell cd "$(CFG_SRC_DIR)" && "$(CFG_GIT)" submodule status | grep -c '^\(+\|-\)')
-else
-NEED_GIT_RECONFIG=0
-endif
-
-ifeq ($(NEED_GIT_RECONFIG),0)
-else
-# If the submodules have changed then always execute config.mk
-.PHONY: config.stamp
-endif
-
-Makefile config.mk: config.stamp
-
-config.stamp: $(S)configure $(S)Makefile.in $(S)src/snapshots.txt
- @$(call E, cfg: reconfiguring)
- $(Q)$(S)configure $(CFG_CONFIGURE_ARGS)
-
+# </nittygritty>
+#
######################################################################
-# Primary-target makefiles
+# Primary rules
######################################################################
# Issue #9531: If you change the order of any of the following (or add
# new definitions), make sure definitions always precede their uses,
# especially for the dependency lists of recipes.
+# First, load the variables exported by the configure script
+include config.mk
+
+# Just a few macros used everywhere
+include $(CFG_SRC_DIR)mk/util.mk
+# All crates and their dependencies
+include $(CFG_SRC_DIR)mk/crates.mk
+# Reconfiguring when the makefiles or submodules change
+include $(CFG_SRC_DIR)mk/reconfig.mk
+# Various bits of setup, common macros, and top-level rules
+include $(CFG_SRC_DIR)mk/main.mk
+# C and assembly components that are not LLVM
include $(CFG_SRC_DIR)mk/rt.mk
+# Rules for crates in the target directories
include $(CFG_SRC_DIR)mk/target.mk
+# Rules for crates in the host directories
include $(CFG_SRC_DIR)mk/host.mk
+# Special rules for bootstrapping stage0
include $(CFG_SRC_DIR)mk/stage0.mk
+# Rust-specific LLVM extensions
include $(CFG_SRC_DIR)mk/rustllvm.mk
+# Documentation
include $(CFG_SRC_DIR)mk/docs.mk
+# LLVM
include $(CFG_SRC_DIR)mk/llvm.mk
######################################################################
# Secondary makefiles, conditionalized for speed
######################################################################
+# Source and binary distribution artifacts
ifneq ($(strip $(findstring dist,$(MAKECMDGOALS)) \
$(findstring check,$(MAKECMDGOALS)) \
$(findstring test,$(MAKECMDGOALS)) \
include $(CFG_SRC_DIR)mk/dist.mk
endif
+# Binary snapshots
ifneq ($(strip $(findstring snap,$(MAKECMDGOALS)) \
$(findstring clean,$(MAKECMDGOALS))),)
CFG_INFO := $(info cfg: including snap rules)
include $(CFG_SRC_DIR)mk/snap.mk
endif
+# The test suite
ifneq ($(strip $(findstring check,$(MAKECMDGOALS)) \
$(findstring test,$(MAKECMDGOALS)) \
$(findstring perf,$(MAKECMDGOALS)) \
include $(CFG_SRC_DIR)mk/tests.mk
endif
+# Performance and benchmarking
ifneq ($(findstring perf,$(MAKECMDGOALS)),)
CFG_INFO := $(info cfg: including perf rules)
include $(CFG_SRC_DIR)mk/perf.mk
endif
+# Cleaning
ifneq ($(findstring clean,$(MAKECMDGOALS)),)
CFG_INFO := $(info cfg: including clean rules)
include $(CFG_SRC_DIR)mk/clean.mk
endif
+# Installation from the build directory
ifneq ($(findstring install,$(MAKECMDGOALS)),)
CFG_INFO := $(info cfg: including install rules)
include $(CFG_SRC_DIR)mk/install.mk
endif
+# CTAGS building
ifneq ($(strip $(findstring TAGS.emacs,$(MAKECMDGOALS)) \
$(findstring TAGS.vi,$(MAKECMDGOALS))),)
CFG_INFO := $(info cfg: including ctags rules)
putvar CFG_PANDOC
fi
-if head -n 1 ${CFG_SRC_DIR}src/snapshots.txt | grep -q '^T'
-then
- CFG_IN_TRANSITION=1
- putvar CFG_IN_TRANSITION
-fi
-
# Valgrind is only reliable on Linux. On Windows it doesn't work at all, and
# on the Mac the dynamic linker causes Valgrind to emit a huge stream of
# errors.
touch config.stamp
step_msg "complete"
+msg "run \`make help\`"
+msg
DEPS_green := std native:context_switch
DEPS_rustuv := std native:uv native:uv_support
DEPS_native := std
-DEPS_syntax := std extra term serialize collections
+DEPS_syntax := std term serialize collections
DEPS_rustc := syntax native:rustllvm flate arena serialize sync getopts \
- collections
+ collections extra
DEPS_rustdoc := rustc native:sundown serialize sync getopts collections
DEPS_flate := std native:miniz
DEPS_arena := std collections
--- /dev/null
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+######################################################################
+# Version numbers and strings
+######################################################################
+
+# The version number
+CFG_RELEASE = 0.10-pre
+
+# The version string plus commit information
+CFG_VERSION = $(CFG_RELEASE)
+CFG_GIT_DIR := $(CFG_SRC_DIR).git
+# since $(CFG_GIT) may contain spaces (especially on Windows),
+# we need to escape them. (" " to r"\ ")
+# Note that $(subst ...) ignores space after `subst`,
+# so we use a hack: define $(SPACE) which contains space character.
+SPACE :=
+SPACE +=
+ifneq ($(wildcard $(subst $(SPACE),\$(SPACE),$(CFG_GIT))),)
+ifneq ($(wildcard $(subst $(SPACE),\$(SPACE),$(CFG_GIT_DIR))),)
+ CFG_VERSION += $(shell git --git-dir='$(CFG_GIT_DIR)' log -1 \
+ --pretty=format:'(%h %ci)')
+ CFG_VER_HASH = $(shell git --git-dir='$(CFG_GIT_DIR)' rev-parse HEAD)
+endif
+endif
+
+# windows exe's need numeric versions - don't use anything but
+# numbers and dots here
+CFG_VERSION_WIN = $(subst -pre,,$(CFG_RELEASE))
+
+
+######################################################################
+# More configuration
+######################################################################
+
+# We track all of the object files we might build so that we can find
+# and include all of the .d files in one fell swoop.
+ALL_OBJ_FILES :=
+
+ifneq ($(NO_MAKEFILE_DEPS),)
+MKFILE_DEPS :=
+else
+MKFILE_DEPS := config.stamp $(call rwildcard,$(CFG_SRC_DIR)mk/,*)
+endif
+NON_BUILD_HOST = $(filter-out $(CFG_BUILD),$(CFG_HOST))
+NON_BUILD_TARGET = $(filter-out $(CFG_BUILD),$(CFG_TARGET))
+
+ifneq ($(MAKE_RESTARTS),)
+CFG_INFO := $(info cfg: make restarts: $(MAKE_RESTARTS))
+endif
+
+CFG_INFO := $(info cfg: build triple $(CFG_BUILD))
+CFG_INFO := $(info cfg: host triples $(CFG_HOST))
+CFG_INFO := $(info cfg: target triples $(CFG_TARGET))
+
+ifneq ($(wildcard $(NON_BUILD_HOST)),)
+CFG_INFO := $(info cfg: non-build host triples $(NON_BUILD_HOST))
+endif
+ifneq ($(wildcard $(NON_BUILD_TARGET)),)
+CFG_INFO := $(info cfg: non-build target triples $(NON_BUILD_TARGET))
+endif
+
+CFG_RUSTC_FLAGS := $(RUSTFLAGS)
+CFG_GCCISH_CFLAGS :=
+CFG_GCCISH_LINK_FLAGS :=
+
+ifdef CFG_DISABLE_OPTIMIZE
+ $(info cfg: disabling rustc optimization (CFG_DISABLE_OPTIMIZE))
+ CFG_RUSTC_FLAGS +=
+else
+ # The rtopt cfg turns off runtime sanity checks
+ CFG_RUSTC_FLAGS += -O --cfg rtopt
+endif
+
+ifdef CFG_DISABLE_DEBUG
+ CFG_RUSTC_FLAGS += --cfg ndebug
+ CFG_GCCISH_CFLAGS += -DRUST_NDEBUG
+else
+ $(info cfg: enabling more debugging (CFG_ENABLE_DEBUG))
+ CFG_RUSTC_FLAGS += --cfg debug
+ CFG_GCCISH_CFLAGS += -DRUST_DEBUG
+endif
+
+ifdef SAVE_TEMPS
+ CFG_RUSTC_FLAGS += --save-temps
+endif
+ifdef ASM_COMMENTS
+ CFG_RUSTC_FLAGS += -Z asm-comments
+endif
+ifdef TIME_PASSES
+ CFG_RUSTC_FLAGS += -Z time-passes
+endif
+ifdef TIME_LLVM_PASSES
+ CFG_RUSTC_FLAGS += -Z time-llvm-passes
+endif
+ifdef TRACE
+ CFG_RUSTC_FLAGS += -Z trace
+endif
+ifdef CFG_DISABLE_RPATH
+CFG_RUSTC_FLAGS += -C no-rpath
+endif
+
+# The executables crated during this compilation process have no need to include
+# static copies of libstd and libextra. We also generate dynamic versions of all
+# libraries, so in the interest of space, prefer dynamic linking throughout the
+# compilation process.
+#
+# Note though that these flags are omitted for stage2+. This means that the
+# snapshot will be generated with a statically linked rustc so we only have to
+# worry about the distribution of one file (with its native dynamic
+# dependencies)
+RUSTFLAGS_STAGE0 += -C prefer-dynamic
+RUSTFLAGS_STAGE1 += -C prefer-dynamic
+
+# platform-specific auto-configuration
+include $(CFG_SRC_DIR)mk/platform.mk
+
+# Run the stage1/2 compilers under valgrind
+ifdef VALGRIND_COMPILE
+ CFG_VALGRIND_COMPILE :=$(CFG_VALGRIND)
+else
+ CFG_VALGRIND_COMPILE :=
+endif
+
+ifdef CFG_ENABLE_VALGRIND
+ $(info cfg: enabling valgrind (CFG_ENABLE_VALGRIND))
+else
+ CFG_VALGRIND :=
+endif
+ifdef CFG_BAD_VALGRIND
+ $(info cfg: disabling valgrind due to its unreliability on this platform)
+ CFG_VALGRIND :=
+endif
+
+
+######################################################################
+# Target-and-rule "utility variables"
+######################################################################
+
+define DEF_X
+X_$(1) := $(CFG_EXE_SUFFIX_$(1))
+endef
+$(foreach target,$(CFG_TARGET),\
+ $(eval $(call DEF_X,$(target))))
+
+# "Source" files we generate in builddir along the way.
+GENERATED :=
+
+# Delete the built-in rules.
+.SUFFIXES:
+%:: %,v
+%:: RCS/%,v
+%:: RCS/%
+%:: s.%
+%:: SCCS/s.%
+
+
+######################################################################
+# Cleaning out old crates
+######################################################################
+
+# $(1) is the path for directory to match against
+# $(2) is the glob to use in the match
+#
+# Note that a common bug is to accidentally construct the glob denoted
+# by $(2) with a space character prefix, which invalidates the
+# construction $(1)$(2).
+define CHECK_FOR_OLD_GLOB_MATCHES
+ $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then echo "warning: there are previous" \'$(notdir $(2))\' "libraries:" $$MATCHES; fi
+endef
+
+# Same interface as above, but deletes rather than just listing the files.
+ifdef VERBOSE
+define REMOVE_ALL_OLD_GLOB_MATCHES
+ $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then echo "warning: removing previous" \'$(notdir $(1))\' "libraries:" $$MATCHES; rm $$MATCHES ; fi
+endef
+else
+define REMOVE_ALL_OLD_GLOB_MATCHES
+ $(Q)MATCHES="$(wildcard $(1))"; if [ -n "$$MATCHES" ] ; then rm $$MATCHES ; fi
+endef
+endif
+
+# We use a different strategy for LIST_ALL_OLD_GLOB_MATCHES_EXCEPT
+# than in the macros above because it needs the result of running the
+# `ls` command after other rules in the command list have run; the
+# macro-expander for $(wildcard ...) would deliver its results too
+# soon. (This is in contrast to the macros above, which are meant to
+# be run at the outset of a command list in a rule.)
+ifdef VERBOSE
+define LIST_ALL_OLD_GLOB_MATCHES
+ @echo "info: now are following matches for" '$(notdir $(1))' "libraries:"
+ @( ls $(1) 2>/dev/null || true )
+endef
+else
+define LIST_ALL_OLD_GLOB_MATCHES
+endef
+endif
+
+######################################################################
+# LLVM macros
+######################################################################
+
+# FIXME: x86-ism
+LLVM_COMPONENTS=x86 arm mips ipo bitreader bitwriter linker asmparser jit mcjit \
+ interpreter instrumentation
+
+# Only build these LLVM tools
+LLVM_TOOLS=bugpoint llc llvm-ar llvm-as llvm-dis llvm-mc opt llvm-extract
+
+define DEF_LLVM_VARS
+# The configure script defines these variables with the target triples
+# separated by Z. This defines new ones with the expected format.
+CFG_LLVM_BUILD_DIR_$(1):=$$(CFG_LLVM_BUILD_DIR_$(subst -,_,$(1)))
+CFG_LLVM_INST_DIR_$(1):=$$(CFG_LLVM_INST_DIR_$(subst -,_,$(1)))
+
+# Any rules that depend on LLVM should depend on LLVM_CONFIG
+LLVM_CONFIG_$(1):=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-config$$(X_$(1))
+LLVM_MC_$(1):=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-mc$$(X_$(1))
+LLVM_VERSION_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --version)
+LLVM_BINDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --bindir)
+LLVM_INCDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --includedir)
+LLVM_LIBDIR_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --libdir)
+LLVM_LIBS_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --libs $$(LLVM_COMPONENTS))
+LLVM_LDFLAGS_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --ldflags)
+# On FreeBSD, it may search wrong headers (that are for pre-installed LLVM),
+# so we replace -I with -iquote to ensure that it searches bundled LLVM first.
+LLVM_CXXFLAGS_$(1)=$$(subst -I, -iquote , $$(shell "$$(LLVM_CONFIG_$(1))" --cxxflags))
+LLVM_HOST_TRIPLE_$(1)=$$(shell "$$(LLVM_CONFIG_$(1))" --host-target)
+
+LLVM_AS_$(1)=$$(CFG_LLVM_INST_DIR_$(1))/bin/llvm-as$$(X_$(1))
+LLC_$(1)=$$(CFG_LLVM_INST_DIR_$(1))/bin/llc$$(X_$(1))
+
+endef
+
+$(foreach host,$(CFG_HOST), \
+ $(eval $(call DEF_LLVM_VARS,$(host))))
+
+######################################################################
+# Exports for sub-utilities
+######################################################################
+
+# Note that any variable that re-configure should pick up needs to be
+# exported
+
+export CFG_SRC_DIR
+export CFG_BUILD_DIR
+export CFG_VERSION
+export CFG_VERSION_WIN
+export CFG_RELEASE
+export CFG_BUILD
+export CFG_LLVM_ROOT
+export CFG_ENABLE_MINGW_CROSS
+export CFG_PREFIX
+export CFG_LIBDIR
+export CFG_RUSTLIBDIR
+export CFG_LIBDIR_RELATIVE
+export CFG_DISABLE_INJECT_STD_VERSION
+
+######################################################################
+# Per-stage targets and runner
+######################################################################
+
+STAGES = 0 1 2 3
+
+define SREQ
+# $(1) is the stage number
+# $(2) is the target triple
+# $(3) is the host triple
+
+# Destinations of artifacts for the host compiler
+HROOT$(1)_H_$(3) = $(3)/stage$(1)
+HBIN$(1)_H_$(3) = $$(HROOT$(1)_H_$(3))/bin
+HLIB$(1)_H_$(3) = $$(HROOT$(1)_H_$(3))/$$(CFG_LIBDIR_RELATIVE)
+
+# Destinations of artifacts for target architectures
+TROOT$(1)_T_$(2)_H_$(3) = $$(HLIB$(1)_H_$(3))/$$(CFG_RUSTLIBDIR)/$(2)
+TBIN$(1)_T_$(2)_H_$(3) = $$(TROOT$(1)_T_$(2)_H_$(3))/bin
+TLIB$(1)_T_$(2)_H_$(3) = $$(TROOT$(1)_T_$(2)_H_$(3))/lib
+
+# Preqrequisites for using the stageN compiler
+ifeq ($(1),0)
+HSREQ$(1)_H_$(3) = $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3))
+else
+HSREQ$(1)_H_$(3) = \
+ $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
+ $$(HLIB$(1)_H_$(3))/stamp.rustc \
+ $$(foreach dep,$$(RUST_DEPS_rustc),$$(HLIB$(1)_H_$(3))/stamp.$$(dep)) \
+ $$(MKFILE_DEPS)
+endif
+
+# Prerequisites for using the stageN compiler to build target artifacts
+TSREQ$(1)_T_$(2)_H_$(3) = \
+ $$(HSREQ$(1)_H_$(3)) \
+ $$(TLIB$(1)_T_$(2)_H_$(3))/libmorestack.a \
+ $$(TLIB$(1)_T_$(2)_H_$(3))/libcompiler-rt.a
+
+# Prerequisites for a working stageN compiler and libraries, for a specific
+# target
+SREQ$(1)_T_$(2)_H_$(3) = \
+ $$(TSREQ$(1)_T_$(2)_H_$(3)) \
+ $$(foreach dep,$$(TARGET_CRATES),\
+ $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep))
+
+# Prerequisites for a working stageN compiler and complete set of target
+# libraries
+CSREQ$(1)_T_$(2)_H_$(3) = \
+ $$(TSREQ$(1)_T_$(2)_H_$(3)) \
+ $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \
+ $$(foreach dep,$$(CRATES),$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \
+ $$(foreach dep,$$(HOST_CRATES),$$(HLIB$(1)_H_$(3))/stamp.$$(dep))
+
+ifeq ($(1),0)
+# Don't run the the stage0 compiler under valgrind - that ship has sailed
+CFG_VALGRIND_COMPILE$(1) =
+else
+CFG_VALGRIND_COMPILE$(1) = $$(CFG_VALGRIND_COMPILE)
+endif
+
+# Add RUSTFLAGS_STAGEN values to the build command
+EXTRAFLAGS_STAGE$(1) = $$(RUSTFLAGS_STAGE$(1))
+
+CFGFLAG$(1)_T_$(2)_H_$(3) = stage$(1)
+
+# Pass --cfg stage0 only for the build->host part of stage0;
+# if you're building a cross config, the host->* parts are
+# effectively stage1, since it uses the just-built stage0.
+ifeq ($(1),0)
+ifneq ($(strip $(CFG_BUILD)),$(strip $(3)))
+CFGFLAG$(1)_T_$(2)_H_$(3) = stage1
+endif
+endif
+
+ifdef CFG_DISABLE_RPATH
+ifeq ($$(OSTYPE_$(3)),apple-darwin)
+ RPATH_VAR$(1)_T_$(2)_H_$(3) := \
+ DYLD_LIBRARY_PATH="$$$$DYLD_LIBRARY_PATH:$$(HLIB$(1)_H_$(3))"
+else
+ RPATH_VAR$(1)_T_$(2)_H_$(3) := \
+ LD_LIBRARY_PATH="$$$$LD_LIBRARY_PATH:$$(HLIB$(1)_H_$(3))"
+endif
+else
+ RPATH_VAR$(1)_T_$(2)_H_$(3) :=
+endif
+
+STAGE$(1)_T_$(2)_H_$(3) := \
+ $$(Q)$$(RPATH_VAR$(1)_T_$(2)_H_$(3)) \
+ $$(call CFG_RUN_TARG_$(3),$(1), \
+ $$(CFG_VALGRIND_COMPILE$(1)) \
+ $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
+ --cfg $$(CFGFLAG$(1)_T_$(2)_H_$(3)) \
+ $$(CFG_RUSTC_FLAGS) $$(EXTRAFLAGS_STAGE$(1)) --target=$(2)) \
+ $$(RUSTC_FLAGS_$(2))
+
+PERF_STAGE$(1)_T_$(2)_H_$(3) := \
+ $$(Q)$$(call CFG_RUN_TARG_$(3),$(1), \
+ $$(CFG_PERF_TOOL) \
+ $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
+ --cfg $$(CFGFLAG$(1)_T_$(2)_H_$(3)) \
+ $$(CFG_RUSTC_FLAGS) $$(EXTRAFLAGS_STAGE$(1)) --target=$(2)) \
+ $$(RUSTC_FLAGS_$(2))
+
+endef
+
+$(foreach build,$(CFG_HOST), \
+ $(eval $(foreach target,$(CFG_TARGET), \
+ $(eval $(foreach stage,$(STAGES), \
+ $(eval $(call SREQ,$(stage),$(target),$(build))))))))
+
+######################################################################
+# rustc-H-targets
+#
+# Builds a functional Rustc for the given host.
+######################################################################
+
+define DEF_RUSTC_STAGE_TARGET
+# $(1) == architecture
+# $(2) == stage
+
+rustc-stage$(2)-H-$(1): \
+ $$(foreach target,$$(CFG_TARGET),$$(SREQ$(2)_T_$$(target)_H_$(1)))
+
+endef
+
+$(foreach host,$(CFG_HOST), \
+ $(eval $(foreach stage,1 2 3, \
+ $(eval $(call DEF_RUSTC_STAGE_TARGET,$(host),$(stage))))))
+
+rustc-stage1: rustc-stage1-H-$(CFG_BUILD)
+rustc-stage2: rustc-stage2-H-$(CFG_BUILD)
+rustc-stage3: rustc-stage3-H-$(CFG_BUILD)
+
+define DEF_RUSTC_TARGET
+# $(1) == architecture
+
+rustc-H-$(1): rustc-stage2-H-$(1)
+endef
+
+$(foreach host,$(CFG_TARGET), \
+ $(eval $(call DEF_RUSTC_TARGET,$(host))))
+
+rustc-stage1: rustc-stage1-H-$(CFG_BUILD)
+rustc-stage2: rustc-stage2-H-$(CFG_BUILD)
+rustc-stage3: rustc-stage3-H-$(CFG_BUILD)
+rustc: rustc-H-$(CFG_BUILD)
+
+rustc-H-all: $(foreach host,$(CFG_HOST),rustc-H-$(host))
+
+######################################################################
+# Entrypoint rule
+######################################################################
+
+.DEFAULT_GOAL := all
+
+define ALL_TARGET_N
+ifneq ($$(findstring $(1),$$(CFG_HOST)),)
+# This is a host
+all-target-$(1)-host-$(2): $$(CSREQ2_T_$(1)_H_$(2))
+else
+# This is a target only
+all-target-$(1)-host-$(2): $$(SREQ2_T_$(1)_H_$(2))
+endif
+endef
+
+$(foreach target,$(CFG_TARGET), \
+ $(foreach host,$(CFG_HOST), \
+ $(eval $(call ALL_TARGET_N,$(target),$(host)))))
+
+ALL_TARGET_RULES = $(foreach target,$(CFG_TARGET), \
+ $(foreach host,$(CFG_HOST), \
+ all-target-$(target)-host-$(host)))
+
+all: $(ALL_TARGET_RULES) $(GENERATED) docs
+
+######################################################################
+# Build system documentation
+######################################################################
+
+# $(1) is the name of the doc <section> in Makefile.in
+# pick everything between tags | remove first line | remove last line
+# | remove extra (?) line | strip leading `#` from lines
+SHOW_DOCS = $(Q)awk '/$(1)/,/<\/$(1)>/' $(S)/Makefile.in | sed '1d' | sed '$$d' | sed 's/^\# \?//'
+
+help:
+ $(call SHOW_DOCS,help)
+
+hot-tips:
+ $(call SHOW_DOCS,hottips)
+
+nitty-gritty:
+ $(call SHOW_DOCS,nittygritty)
--- /dev/null
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+# Recursive wildcard function
+# http://blog.jgc.org/2011/07/gnu-make-recursive-wildcard-function.html
+rwildcard=$(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) \
+ $(filter $(subst *,%,$2),$d))
+
+ifndef CFG_DISABLE_MANAGE_SUBMODULES
+# This is a pretty expensive operation but I don't see any way to avoid it
+NEED_GIT_RECONFIG=$(shell cd "$(CFG_SRC_DIR)" && "$(CFG_GIT)" submodule status | grep -c '^\(+\|-\)')
+else
+NEED_GIT_RECONFIG=0
+endif
+
+ifeq ($(NEED_GIT_RECONFIG),0)
+else
+# If the submodules have changed then always execute config.mk
+.PHONY: config.stamp
+endif
+
+Makefile config.mk: config.stamp
+
+config.stamp: $(S)configure $(S)Makefile.in $(S)src/snapshots.txt
+ @$(call E, cfg: reconfiguring)
+ $(S)configure $(CFG_CONFIGURE_ARGS)
COMPRT_LIB_$(1) := $$(RT_OUTPUT_DIR_$(1))/$$(COMPRT_NAME_$(1))
COMPRT_BUILD_DIR_$(1) := $$(RT_OUTPUT_DIR_$(1))/compiler-rt
-$$(COMPRT_LIB_$(1)): $$(COMPRT_DEPS)
+$$(COMPRT_LIB_$(1)): $$(COMPRT_DEPS) $$(MKFILE_DEPS)
@$$(call E, make: compiler-rt)
$$(Q)$$(MAKE) -C "$(S)src/compiler-rt" \
ProjSrcRoot="$(S)src/compiler-rt" \
check-stage2-rfail check-stage2-cfail check-stage2-rmake
$(Q)$(CFG_PYTHON) $(S)src/etc/check-summary.py tmp/*.log
+check-ref: cleantestlibs cleantmptestlogs check-stage2-rpass \
+ check-stage2-rfail check-stage2-cfail check-stage2-rmake
+ $(Q)$(CFG_PYTHON) $(S)src/etc/check-summary.py tmp/*.log
+
+check-docs: cleantestlibs cleantmptestlogs check-stage2-docs
+ $(Q)$(CFG_PYTHON) $(S)src/etc/check-summary.py tmp/*.log
+
.PHONY: cleantmptestlogs cleantestlibs
cleantmptestlogs:
$(foreach group,$(TEST_GROUPS), \
$(eval $(call DEF_CHECK_FOR_STAGE_AND_HOSTS_AND_GROUP,$(stage),$(host),$(group))))))
+define DEF_CHECK_DOC_FOR_STAGE
+check-stage$(1)-docs: $$(foreach docname,$$(DOC_TEST_NAMES),\
+ check-stage$(1)-T-$$(CFG_BUILD)-H-$$(CFG_BUILD)-doc-$$(docname)) \
+ $$(foreach crate,$$(DOC_CRATE_NAMES),\
+ check-stage$(1)-T-$$(CFG_BUILD)-H-$$(CFG_BUILD)-doc-$$(crate))
+endef
+
+$(foreach stage,$(STAGES), \
+ $(eval $(call DEF_CHECK_DOC_FOR_STAGE,$(stage))))
+
+define DEF_CHECK_CRATE
+check-$(1): check-stage2-T-$$(CFG_BUILD)-H-$$(CFG_BUILD)-$(1)-exec
+endef
+
+$(foreach crate,$(TEST_CRATES), \
+ $(eval $(call DEF_CHECK_CRATE,$(crate))))
+
######################################################################
# check-fast rules
######################################################################
--- /dev/null
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+ifdef VERBOSE
+ Q :=
+ E =
+else
+ Q := @
+ E = echo $(1)
+endif
+
+S := $(CFG_SRC_DIR)
+
check_lines: ~[~str],
// Flag to force a crate to be built with the host architecture
force_host: bool,
+ // Check stdout for error-pattern output as well as stderr
+ check_stdout: bool,
}
// Load any test directives embedded in the file
let mut debugger_cmds = ~[];
let mut check_lines = ~[];
let mut force_host = false;
+ let mut check_stdout = false;
iter_header(testfile, |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
force_host = parse_force_host(ln);
}
+ if !check_stdout {
+ check_stdout = parse_check_stdout(ln);
+ }
+
match parse_aux_build(ln) {
Some(ab) => { aux_builds.push(ab); }
None => {}
debugger_cmds: debugger_cmds,
check_lines: check_lines,
force_host: force_host,
+ check_stdout: check_stdout,
};
}
parse_name_directive(line, "force-host")
}
+fn parse_check_stdout(line: &str) -> bool {
+ parse_name_directive(line, "check-stdout")
+}
+
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
parse_name_value_directive(line, ~"exec-env").map(|nv| {
// nv is either FOO or FOO=BAR
let mut next_err_idx = 0u;
let mut next_err_pat = &props.error_patterns[next_err_idx];
let mut done = false;
- for line in ProcRes.stderr.lines() {
+ let output_to_check = if props.check_stdout {
+ ProcRes.stdout + ProcRes.stderr
+ } else {
+ ProcRes.stderr.clone()
+ };
+ for line in output_to_check.lines() {
if line.contains(*next_err_pat) {
debug!("found error pattern {}", *next_err_pat);
next_err_idx += 1u;
# Getting started
-> **NOTE**: The tarball and installer links are for the most recent release,
-> not master.
+> **WARNING**: The tarball and installer links are for the most recent
+> release, not master. To use master, you **must** build from [git].
The Rust compiler currently must be built from a [tarball] or [git], unless
you are on Windows, in which case using the [installer][win-exe] is
'("as"
"break"
"continue"
+ "crate"
"do"
"else" "enum" "extern"
"false" "fn" "for"
<keyword>break</keyword>
<keyword>const</keyword>
<keyword>continue</keyword>
+ <keyword>crate</keyword>
<keyword>do</keyword>
<keyword>drop</keyword>
<keyword>else</keyword>
<item> as </item>
<item> break </item>
<item> continue </item>
+ <item> crate </item>
<item> do </item>
<item> drop </item>
<item> else </item>
" Maintainer: Patrick Walton <pcwalton@mozilla.com>
" Maintainer: Ben Blum <bblum@cs.cmu.edu>
" Maintainer: Chris Morgan <me@chrismorgan.info>
-" Last Change: 2014 Jan 4
+" Last Change: 2014 Feb 14
if version < 600
syntax clear
syn match rustAssert "\<assert\(\w\)*!" contained
syn match rustFail "\<fail\(\w\)*!" contained
-syn keyword rustKeyword break continue do extern
+syn keyword rustKeyword break continue do
+syn keyword rustKeyword extern nextgroup=rustExternCrate skipwhite
syn keyword rustKeyword for in if impl let
syn keyword rustKeyword loop once priv pub
syn keyword rustKeyword return
syn keyword rustStorage mut ref static
syn keyword rustObsoleteStorage const
+syn keyword rustInvalidBareKeyword crate
+
+syn keyword rustExternCrate crate contained nextgroup=rustIdentifier skipwhite
+
syn match rustIdentifier contains=rustIdentifierPrime "\%([^[:cntrl:][:space:][:punct:][:digit:]]\|_\)\%([^[:cntrl:][:punct:][:space:]]\|_\)*" display contained
syn match rustFuncName "\%([^[:cntrl:][:space:][:punct:][:digit:]]\|_\)\%([^[:cntrl:][:punct:][:space:]]\|_\)*" display contained
syn keyword rustTrait Algebraic Trigonometric Exponential Hyperbolic
syn keyword rustTrait Bitwise Bounded Integer Fractional Real RealExt
-syn keyword rustTrait Num NumCast CheckedAdd CheckedSub CheckedMul
+syn keyword rustTrait Num NumCast CheckedAdd CheckedSub CheckedMul CheckedDiv
syn keyword rustTrait Orderable Signed Unsigned Round
syn keyword rustTrait Primitive Int Float ToStrRadix ToPrimitive FromPrimitive
syn keyword rustTrait GenericPath Path PosixPath WindowsPath
hi def link rustStorage StorageClass
hi def link rustObsoleteStorage Error
hi def link rustLifetime Special
+hi def link rustInvalidBareKeyword Error
+hi def link rustExternCrate rustKeyword
" Other Suggestions:
" hi rustAttribute ctermfg=cyan
let mut bitv = 0 as uint;
b.iter(|| {
bitv |= (1 << ((r.next_u32() as uint) % uint::BITS));
+ &bitv
})
}
let mut bitv = SmallBitv::new(uint::BITS);
b.iter(|| {
bitv.set((r.next_u32() as uint) % uint::BITS, true);
+ &bitv
})
}
let mut bitv = BigBitv::new(~[0]);
b.iter(|| {
bitv.set((r.next_u32() as uint) % uint::BITS, true);
+ &bitv
})
}
let mut bitv = BigBitv::new(storage);
b.iter(|| {
bitv.set((r.next_u32() as uint) % BENCH_BITS, true);
+ &bitv
})
}
let mut bitv = Bitv::new(BENCH_BITS, false);
b.iter(|| {
bitv.set((r.next_u32() as uint) % BENCH_BITS, true);
+ &bitv
})
}
let mut bitv = Bitv::new(uint::BITS, false);
b.iter(|| {
bitv.set((r.next_u32() as uint) % uint::BITS, true);
+ &bitv
})
}
let mut bitv = BitvSet::new();
b.iter(|| {
bitv.insert((r.next_u32() as uint) % uint::BITS);
+ &bitv
})
}
let mut bitv = BitvSet::new();
b.iter(|| {
bitv.insert((r.next_u32() as uint) % BENCH_BITS);
+ &bitv
})
}
// measure
let mut i = 0;
bh.iter(|| {
- map.find(&i);
+ let x = map.find(&i);
i = (i + 1) % n;
+ x
})
}
}
use time::precise_time_ns;
use collections::TreeMap;
-use std::clone::Clone;
use std::cmp;
use std::io;
-use std::io::File;
-use std::io::Writer;
+use std::io::{File, PortReader, ChanWriter};
use std::io::stdio::StdWriter;
+use std::str;
use std::task;
use std::to_str::ToStr;
use std::f64;
ignored: uint,
measured: uint,
metrics: MetricMap,
- failures: ~[TestDesc],
+ failures: ~[(TestDesc, ~[u8])],
max_name_len: uint, // number of columns to fill when aligning names
}
pub fn write_failures(&mut self) -> io::IoResult<()> {
if_ok!(self.write_plain("\nfailures:\n"));
let mut failures = ~[];
- for f in self.failures.iter() {
+ let mut fail_out = ~"";
+ for &(ref f, ref stdout) in self.failures.iter() {
failures.push(f.name.to_str());
+ if stdout.len() > 0 {
+ fail_out.push_str(format!("---- {} stdout ----\n\t",
+ f.name.to_str()));
+ let output = str::from_utf8_lossy(*stdout);
+ fail_out.push_str(output.as_slice().replace("\n", "\n\t"));
+ fail_out.push_str("\n");
+ }
+ }
+ if fail_out.len() > 0 {
+ if_ok!(self.write_plain("\n"));
+ if_ok!(self.write_plain(fail_out));
}
+
+ if_ok!(self.write_plain("\nfailures:\n"));
failures.sort();
for name in failures.iter() {
if_ok!(self.write_plain(format!(" {}\n", name.to_str())));
match (*event).clone() {
TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
TeWait(ref test, padding) => st.write_test_start(test, padding),
- TeResult(test, result) => {
+ TeResult(test, result, stdout) => {
if_ok!(st.write_log(&test, &result));
if_ok!(st.write_result(&result));
match result {
}
TrFailed => {
st.failed += 1;
- st.failures.push(test);
+ st.failures.push((test, stdout));
}
}
Ok(())
measured: 0u,
max_name_len: 10u,
metrics: MetricMap::new(),
- failures: ~[test_b, test_a]
+ failures: ~[(test_b, ~[]), (test_a, ~[])]
};
st.write_failures().unwrap();
let s = match st.out {
- Raw(ref m) => str::from_utf8(m.get_ref()).unwrap(),
+ Raw(ref m) => str::from_utf8_lossy(m.get_ref()),
Pretty(_) => unreachable!()
};
- let apos = s.find_str("a").unwrap();
- let bpos = s.find_str("b").unwrap();
+ let apos = s.as_slice().find_str("a").unwrap();
+ let bpos = s.as_slice().find_str("b").unwrap();
assert!(apos < bpos);
}
enum TestEvent {
TeFiltered(~[TestDesc]),
TeWait(TestDesc, NamePadding),
- TeResult(TestDesc, TestResult),
+ TeResult(TestDesc, TestResult, ~[u8] /* stdout */),
}
-/// The message sent to the test monitor from the individual runners.
-pub type MonitorMsg = (TestDesc, TestResult);
+pub type MonitorMsg = (TestDesc, TestResult, ~[u8] /* stdout */);
fn run_tests(opts: &TestOpts,
tests: ~[TestDescAndFn],
pending += 1;
}
- let (desc, result) = p.recv();
+ let (desc, result, stdout) = p.recv();
if concurrency != 1 {
if_ok!(callback(TeWait(desc.clone(), PadNone)));
}
- if_ok!(callback(TeResult(desc, result)));
+ if_ok!(callback(TeResult(desc, result, stdout)));
pending -= 1;
}
for b in filtered_benchs_and_metrics.move_iter() {
if_ok!(callback(TeWait(b.desc.clone(), b.testfn.padding())));
run_test(!opts.run_benchmarks, b, ch.clone());
- let (test, result) = p.recv();
- if_ok!(callback(TeResult(test, result)));
+ let (test, result, stdout) = p.recv();
+ if_ok!(callback(TeResult(test, result, stdout)));
}
Ok(())
}
let TestDescAndFn {desc, testfn} = test;
if force_ignore || desc.ignore {
- monitor_ch.send((desc, TrIgnored));
+ monitor_ch.send((desc, TrIgnored, ~[]));
return;
}
testfn: proc()) {
spawn(proc() {
let mut task = task::task();
- task.name(match desc.name {
- DynTestName(ref name) => name.to_owned().into_maybe_owned(),
- StaticTestName(name) => name.into_maybe_owned()
- });
+ let (p, c) = Chan::new();
+ let mut reader = PortReader::new(p);
+ let stdout = ChanWriter::new(c.clone());
+ let stderr = ChanWriter::new(c);
+ match desc.name {
+ DynTestName(ref name) => task.name(name.clone()),
+ StaticTestName(name) => task.name(name),
+ }
+ task.opts.stdout = Some(~stdout as ~Writer);
+ task.opts.stderr = Some(~stderr as ~Writer);
let result_future = task.future_result();
task.spawn(testfn);
+ let stdout = reader.read_to_end().unwrap();
let task_result = result_future.recv();
let test_result = calc_result(&desc, task_result.is_ok());
- monitor_ch.send((desc.clone(), test_result));
- });
+ monitor_ch.send((desc.clone(), test_result, stdout));
+ })
}
match testfn {
DynBenchFn(bencher) => {
let bs = ::test::bench::benchmark(|harness| bencher.run(harness));
- monitor_ch.send((desc, TrBench(bs)));
+ monitor_ch.send((desc, TrBench(bs), ~[]));
return;
}
StaticBenchFn(benchfn) => {
let bs = ::test::bench::benchmark(|harness| benchfn(harness));
- monitor_ch.send((desc, TrBench(bs)));
+ monitor_ch.send((desc, TrBench(bs), ~[]));
return;
}
DynMetricFn(f) => {
let mut mm = MetricMap::new();
f(&mut mm);
- monitor_ch.send((desc, TrMetrics(mm)));
+ monitor_ch.send((desc, TrMetrics(mm), ~[]));
return;
}
StaticMetricFn(f) => {
let mut mm = MetricMap::new();
f(&mut mm);
- monitor_ch.send((desc, TrMetrics(mm)));
+ monitor_ch.send((desc, TrMetrics(mm), ~[]));
return;
}
DynTestFn(f) => run_test_inner(desc, monitor_ch, f),
};
let (p, ch) = Chan::new();
run_test(false, desc, ch);
- let (_, res) = p.recv();
+ let (_, res, _) = p.recv();
assert!(res != TrOk);
}
};
let (p, ch) = Chan::new();
run_test(false, desc, ch);
- let (_, res) = p.recv();
+ let (_, res, _) = p.recv();
assert_eq!(res, TrIgnored);
}
};
let (p, ch) = Chan::new();
run_test(false, desc, ch);
- let (_, res) = p.recv();
+ let (_, res, _) = p.recv();
assert_eq!(res, TrOk);
}
};
let (p, ch) = Chan::new();
run_test(false, desc, ch);
- let (_, res) = p.recv();
+ let (_, res, _) = p.recv();
assert_eq!(res, TrFailed);
}
-// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
}
/**
- * Encodes a URI by replacing reserved characters with percent encoded
+ * Encodes a URI by replacing reserved characters with percent-encoded
* character sequences.
*
* This function is compliant with RFC 3986.
+ *
+ * # Example
+ *
+ * ```rust
+ * use extra::url::encode;
+ *
+ * let url = encode(&"https://example.com/Rust (programming language)");
+ * println!("{}", url); // https://example.com/Rust%20(programming%20language)
+ * ```
*/
pub fn encode(s: &str) -> ~str {
encode_inner(s, true)
}
/**
- * Decode a string encoded with percent encoding.
+ * Decodes a percent-encoded string representing a URI.
*
- * This will only decode escape sequences generated by encode.
+ * This will only decode escape sequences generated by `encode`.
+ *
+ * # Example
+ *
+ * ```rust
+ * use extra::url::decode;
+ *
+ * let url = decode(&"https://example.com/Rust%20(programming%20language)");
+ * println!("{}", url); // https://example.com/Rust (programming language)
+ * ```
*/
pub fn decode(s: &str) -> ~str {
decode_inner(s, true)
return strvec.connect("&");
}
-// returns the scheme and the rest of the url, or a parsing error
+/**
+ * Returns a tuple of the URI scheme and the rest of the URI, or a parsing error.
+ *
+ * Does not include the separating `:` character.
+ *
+ * # Example
+ *
+ * ```rust
+ * use extra::url::get_scheme;
+ *
+ * let scheme = match get_scheme("https://example.com/") {
+ * Ok((sch, _)) => sch,
+ * Err(_) => ~"(None)",
+ * };
+ * println!("Scheme in use: {}.", scheme); // Scheme in use: https.
+ * ```
+ */
pub fn get_scheme(rawurl: &str) -> Result<(~str, ~str), ~str> {
for (i,c) in rawurl.chars().enumerate() {
match c {
}
/**
- * Parse a `str` to a `url`
+ * Parses a URL, converting it from a string to `Url` representation.
*
* # Arguments
*
- * `rawurl` - a string representing a full url, including scheme.
+ * `rawurl` - a string representing the full URL, including scheme.
*
* # Returns
*
- * a `url` that contains the parsed representation of the url.
- *
+ * A `Url` struct type representing the URL.
*/
-
pub fn from_str(rawurl: &str) -> Result<Url, ~str> {
// scheme
let (scheme, rest) = match get_scheme(rawurl) {
}
/**
- * Format a `url` as a string
+ * Converts a URL from `Url` to string representation.
*
* # Arguments
*
- * `url` - a url.
+ * `url` - a URL.
*
* # Returns
*
- * a `str` that contains the formatted url. Note that this will usually
- * be an inverse of `from_str` but might strip out unneeded separators.
+ * A string that contains the formatted URL. Note that this will usually
+ * be an inverse of `from_str` but might strip out unneeded separators;
* for example, "http://somehost.com?", when parsed and formatted, will
* result in just "http://somehost.com".
- *
*/
pub fn to_str(url: &Url) -> ~str {
let user = match url.user {
args.push(~"-Wl,--allow-multiple-definition");
}
- // Stack growth requires statically linking a __morestack function
- args.push(~"-lmorestack");
- // compiler-rt contains implementations of low-level LLVM helpers
- // It should go before platform and user libraries, so it has first dibs
- // at resolving symbols that also appear in libgcc.
- args.push(~"-lcompiler-rt");
-
add_local_native_libraries(&mut args, sess);
add_upstream_rust_crates(&mut args, sess, dylib, tmpdir);
add_upstream_native_libraries(&mut args, sess);
args.push_all(rpath::get_rpath_flags(sess, out_filename));
}
+ // Stack growth requires statically linking a __morestack function
+ args.push(~"-lmorestack");
+ // compiler-rt contains implementations of low-level LLVM helpers
+ // It should go before platform and user libraries, so it has first dibs
+ // at resolving symbols that also appear in libgcc.
+ args.push(~"-lcompiler-rt");
+
// Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate
args.push_all(sess.opts.cg.link_args);
ast::ExprAssignOp(_, _, dest, _) => {
this.check_assignment(dest);
}
- ast::ExprCall(f, ref args, _) => {
+ ast::ExprCall(f, ref args) => {
this.check_call(expr, Some(f), f.id, f.span, *args);
}
- ast::ExprMethodCall(callee_id, _, _, ref args, _) => {
+ ast::ExprMethodCall(callee_id, _, _, ref args) => {
this.check_call(expr, None, callee_id, expr.span, *args);
}
ast::ExprIndex(callee_id, _, rval) |
self.straightline(expr, pred, *elems)
}
- ast::ExprCall(func, ref args, _) => {
+ ast::ExprCall(func, ref args) => {
self.call(expr, pred, func, *args)
}
- ast::ExprMethodCall(_, _, _, ref args, _) => {
+ ast::ExprMethodCall(_, _, _, ref args) => {
self.call(expr, pred, args[0], args.slice_from(1))
}
}
}
}
- ExprCall(callee, _, NoSugar) => {
+ ExprCall(callee, _) => {
let def_map = def_map.borrow();
match def_map.get().find(&callee.id) {
Some(&DefStruct(..)) => {} // OK.
self.walk_opt_expr(with_expr, in_out, loop_scopes);
}
- ast::ExprCall(f, ref args, _) => {
+ ast::ExprCall(f, ref args) => {
self.walk_expr(f, in_out, loop_scopes);
self.walk_call(f.id, expr.id, *args, in_out, loop_scopes);
}
- ast::ExprMethodCall(callee_id, _, _, ref args, _) => {
+ ast::ExprMethodCall(callee_id, _, _, ref args) => {
self.walk_call(callee_id, expr.id, *args, in_out, loop_scopes);
}
fn visit_expr(&mut self, expr: &ast::Expr, _:()) {
match expr.node {
- ast::ExprMethodCall(callee_id, _, _, _, _) => {
+ ast::ExprMethodCall(callee_id, _, _, _) => {
let base_type = ty::node_id_to_type(self.tcx, callee_id);
debug!("effect: method call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type));
"invocation of unsafe method")
}
}
- ast::ExprCall(base, _, _) => {
+ ast::ExprCall(base, _) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
debug!("effect: call case, base type is {}",
ppaux::ty_to_str(self.tcx, base_type));
})
}
- ExprCall(f, ref args, _) => {
+ ExprCall(f, ref args) => {
// calling a fn with bot return type means that the fn
// will fail, and hence the successors can be ignored
let t_ret = ty::ty_fn_ret(ty::expr_ty(self.tcx, f));
self.propagate_through_expr(f, succ)
}
- ExprMethodCall(callee_id, _, _, ref args, _) => {
+ ExprMethodCall(callee_id, _, _, ref args) => {
// calling a method with bot return type means that the method
// will fail, and hence the successors can be ignored
let t_ret = ty::ty_fn_ret(ty::node_id_to_type(self.tcx, callee_id));
}
}
- ExprCall(callee, ref args, _) => { // callee(args)
+ ExprCall(callee, ref args) => { // callee(args)
// Figure out whether the called function is consumed.
let mode = match ty::get(ty::expr_ty(self.tcx, callee)).sty {
ty::ty_closure(ref cty) => {
self.use_fn_args(callee.id, *args);
}
- ExprMethodCall(callee_id, _, _, ref args, _) => { // callee.m(args)
+ ExprMethodCall(callee_id, _, _, ref args) => { // callee.m(args)
self.use_fn_args(callee_id, *args);
}
_ => {}
}
}
- ast::ExprMethodCall(_, ident, _, ref args, _) => {
+ ast::ExprMethodCall(_, ident, _, ref args) => {
// see above
let t = ty::type_autoderef(ty::expr_ty(self.tcx, args[0]));
match ty::get(t).sty {
let traits = self.search_for_traits_containing_method(ident);
self.trait_map.insert(expr.id, @RefCell::new(traits));
}
- ExprMethodCall(_, ident, _, _, _) => {
+ ExprMethodCall(_, ident, _, _) => {
debug!("(recording candidate traits for expr) recording \
traits for {}",
expr.id);
}
}
}
- ast::ExprCall(callee, ref args, _) => {
+ ast::ExprCall(callee, ref args) => {
let tcx = cx.tcx;
let opt_def = {
let def_map = tcx.def_map.borrow();
})
}
- ast::ExprCall(fn_exp, ref args, _) => {
+ ast::ExprCall(fn_exp, ref args) => {
walk_expr(cx, fn_exp, scope_stack, scope_map);
for arg_exp in args.iter() {
}
}
- ast::ExprMethodCall(node_id, _, _, ref args, _) => {
+ ast::ExprMethodCall(node_id, _, _, ref args) => {
scope_map.insert(node_id, scope_stack.last().unwrap().scope_metadata);
for arg_exp in args.iter() {
expr_to_str(expr), expr_ty.repr(tcx));
closure::trans_expr_fn(bcx, sigil, decl, body, expr.id, dest)
}
- ast::ExprCall(f, ref args, _) => {
+ ast::ExprCall(f, ref args) => {
callee::trans_call(bcx, expr, f,
callee::ArgExprs(*args), expr.id, dest)
}
- ast::ExprMethodCall(callee_id, _, _, ref args, _) => {
+ ast::ExprMethodCall(callee_id, _, _, ref args) => {
callee::trans_method_call(bcx, expr, callee_id, args[0],
callee::ArgExprs(*args), dest)
}
// converting to an rvalue.
let self_datum = unpack_datum!(
bcx, expr::trans(bcx, self_expr));
- let self_datum = unpack_datum!(
- bcx, self_datum.to_rvalue_datum(bcx, "trait_callee"));
- // Convert to by-ref since `trans_trait_callee_from_llval` wants it
- // that way.
- let self_datum = unpack_datum!(
- bcx, self_datum.to_ref_datum(bcx));
+ let llval = if ty::type_needs_drop(bcx.tcx(), self_datum.ty) {
+ let self_datum = unpack_datum!(
+ bcx, self_datum.to_rvalue_datum(bcx, "trait_callee"));
+
+ // Convert to by-ref since `trans_trait_callee_from_llval` wants it
+ // that way.
+ let self_datum = unpack_datum!(
+ bcx, self_datum.to_ref_datum(bcx));
- // Arrange cleanup in case something should go wrong before the
- // actual call occurs.
- let llval = self_datum.add_clean(bcx.fcx, arg_cleanup_scope);
+ // Arrange cleanup in case something should go wrong before the
+ // actual call occurs.
+ self_datum.add_clean(bcx.fcx, arg_cleanup_scope)
+ } else {
+ // We don't have to do anything about cleanups for &Trait and &mut Trait.
+ assert!(self_datum.kind.is_by_ref());
+ self_datum.val
+ };
let callee_ty = node_id_type(bcx, callee_id);
trans_trait_callee_from_llval(bcx, callee_ty, n_method, llval)
method_fn_ty: ty::t,
callee_expr: &ast::Expr,
args: &[@ast::Expr],
- sugar: ast::CallSugar,
- deref_args: DerefArgs) -> ty::t
- {
+ deref_args: DerefArgs) -> ty::t {
// HACK(eddyb) ignore provided self (it has special typeck rules).
let args = args.slice_from(1);
if ty::type_is_error(method_fn_ty) {
let err_inputs = err_args(args.len());
check_argument_types(fcx, sp, err_inputs, callee_expr,
- args, sugar, deref_args, false);
+ args, deref_args, false);
method_fn_ty
} else {
match ty::get(method_fn_ty).sty {
ty::ty_bare_fn(ref fty) => {
// HACK(eddyb) ignore self in the definition (see above).
check_argument_types(fcx, sp, fty.sig.inputs.slice_from(1),
- callee_expr, args, sugar, deref_args,
+ callee_expr, args, deref_args,
fty.sig.variadic);
fty.sig.output
}
fn_inputs: &[ty::t],
callee_expr: &ast::Expr,
args: &[@ast::Expr],
- sugar: ast::CallSugar,
deref_args: DerefArgs,
variadic: bool) {
/*!
err_args(supplied_arg_count)
}
} else {
- let suffix = match sugar {
- ast::NoSugar => "",
- ast::ForSugar => " (including the closure passed by \
- the `for` keyword)"
- };
let msg = format!(
"this function takes {} parameter{} \
- but {} parameter{} supplied{}",
+ but {} parameter{} supplied",
expected_arg_count, if expected_arg_count == 1 {""} else {"s"},
supplied_arg_count,
- if supplied_arg_count == 1 {" was"} else {"s were"},
- suffix);
+ if supplied_arg_count == 1 {" was"} else {"s were"});
tcx.sess.span_err(sp, msg);
// The callee checks for bot / err, we don't need to
}
- fn write_call(fcx: @FnCtxt,
- call_expr: &ast::Expr,
- output: ty::t,
- sugar: ast::CallSugar) {
- let ret_ty = match sugar {
- ast::ForSugar => {
- match ty::get(output).sty {
- ty::ty_bool => {}
- _ => fcx.type_error_message(call_expr.span, |actual| {
- format!("expected `for` closure to return `bool`, \
- but found `{}`", actual) },
- output, None)
- }
- ty::mk_nil()
- }
- _ => output
- };
- fcx.write_ty(call_expr.id, ret_ty);
+ fn write_call(fcx: @FnCtxt, call_expr: &ast::Expr, output: ty::t) {
+ fcx.write_ty(call_expr.id, output);
}
// A generic function for doing all of the checking for call expressions
callee_id: ast::NodeId,
call_expr: &ast::Expr,
f: &ast::Expr,
- args: &[@ast::Expr],
- sugar: ast::CallSugar) {
+ args: &[@ast::Expr]) {
// Index expressions need to be handled separately, to inform them
// that they appear in call position.
check_expr(fcx, f);
// Call the generic checker.
check_argument_types(fcx, call_expr.span, fn_sig.inputs, f,
- args, sugar, DontDerefArgs, fn_sig.variadic);
+ args, DontDerefArgs, fn_sig.variadic);
- write_call(fcx, call_expr, fn_sig.output, sugar);
+ write_call(fcx, call_expr, fn_sig.output);
}
// Checks a method call.
expr: &ast::Expr,
method_name: ast::Ident,
args: &[@ast::Expr],
- tps: &[ast::P<ast::Ty>],
- sugar: ast::CallSugar) {
+ tps: &[ast::P<ast::Ty>]) {
let rcvr = args[0];
check_expr(fcx, rcvr);
// Call the generic checker.
let fn_ty = fcx.node_ty(callee_id);
let ret_ty = check_method_argument_types(fcx, expr.span,
- fn_ty, expr, args, sugar,
+ fn_ty, expr, args,
DontDerefArgs);
- write_call(fcx, expr, ret_ty, sugar);
+ write_call(fcx, expr, ret_ty);
}
// A generic function for checking the then and else in an if
method_map.get().insert(op_ex.id, *origin);
}
check_method_argument_types(fcx, op_ex.span,
- method_ty, op_ex, args,
- ast::NoSugar, deref_args)
+ method_ty, op_ex,
+ args, deref_args)
}
_ => {
unbound_method();
// so we get all the error messages
let expected_ty = ty::mk_err();
check_method_argument_types(fcx, op_ex.span,
- expected_ty, op_ex, args,
- ast::NoSugar, deref_args);
+ expected_ty, op_ex,
+ args, deref_args);
ty::mk_err()
}
}
check_block_with_expected(fcx, b, expected);
fcx.write_ty(id, fcx.node_ty(b.id));
}
- ast::ExprCall(f, ref args, sugar) => {
- check_call(fcx, expr.id, expr, f, *args, sugar);
+ ast::ExprCall(f, ref args) => {
+ check_call(fcx, expr.id, expr, f, *args);
let f_ty = fcx.expr_ty(f);
let (args_bot, args_err) = args.iter().fold((false, false),
|(rest_bot, rest_err), a| {
fcx.write_bot(id);
}
}
- ast::ExprMethodCall(callee_id, ident, ref tps, ref args, sugar) => {
- check_method_call(fcx, callee_id, expr, ident, *args, *tps, sugar);
+ ast::ExprMethodCall(callee_id, ident, ref tps, ref args) => {
+ check_method_call(fcx, callee_id, expr, ident, *args, *tps);
let arg_tys = args.map(|a| fcx.expr_ty(*a));
let (args_bot, args_err) = arg_tys.iter().fold((false, false),
|(rest_bot, rest_err), a| {
}
match expr.node {
- ast::ExprCall(callee, ref args, _) => {
+ ast::ExprCall(callee, ref args) => {
constrain_callee(rcx, callee.id, expr, callee);
constrain_call(rcx, callee.id, expr, None, *args, false);
visit::walk_expr(rcx, expr, ());
}
- ast::ExprMethodCall(callee_id, _, _, ref args, _) => {
+ ast::ExprMethodCall(callee_id, _, _, ref args) => {
constrain_call(rcx, callee_id, expr, Some(args[0]),
args.slice_from(1), false);
ast::ExprUnary(callee_id, _, _) |
ast::ExprAssignOp(callee_id, _, _, _) |
ast::ExprIndex(callee_id, _, _) |
- ast::ExprMethodCall(callee_id, _, _, _, _) => {
+ ast::ExprMethodCall(callee_id, _, _, _) => {
match ty::method_call_type_param_defs(cx.tcx, fcx.inh.method_map, ex.id) {
Some(type_param_defs) => {
debug!("vtable resolution on parameter bounds for method call {}",
maybe_resolve_type_vars_for_node(wbcx, e.span, callee_id);
}
- ast::ExprMethodCall(callee_id, _, _, _, _) => {
+ ast::ExprMethodCall(callee_id, _, _, _) => {
// We must always have written in a callee ID type for these.
resolve_type_vars_for_node(wbcx, e.span, callee_id);
}
use syntax::visit::Visitor;
use std::local_data;
-use extra;
+use extra::time;
pub fn time<T, U>(do_it: bool, what: &str, u: U, f: |U| -> T) -> T {
local_data_key!(depth: uint);
let old = local_data::get(depth, |d| d.map(|a| *a).unwrap_or(0));
local_data::set(depth, old + 1);
- let start = extra::time::precise_time_s();
+ let start = time::precise_time_s();
let rv = f(u);
- let end = extra::time::precise_time_s();
+ let end = time::precise_time_s();
println!("{}time: {:3.3f} s\t{}", " ".repeat(old), end - start, what);
local_data::set(depth, old);
// can't have the source to it anyway.
let contents = match File::open(&p).read_to_end() {
Ok(r) => r,
- // eew macro hacks
- Err(..) if filename == "<std-macros>" => return Ok(()),
+ // macros from other libraries get special filenames which we can
+ // safely ignore
+ Err(..) if filename.starts_with("<") &&
+ filename.ends_with("macros>") => return Ok(()),
Err(e) => return Err(e)
};
let contents = str::from_utf8_owned(contents).unwrap();
/// Strip items marked `#[doc(hidden)]`
pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult {
- struct Stripper;
- impl fold::DocFolder for Stripper {
- fn fold_item(&mut self, i: Item) -> Option<Item> {
- for attr in i.attrs.iter() {
- match attr {
- &clean::List(~"doc", ref l) => {
- for innerattr in l.iter() {
- match innerattr {
- &clean::Word(ref s) if "hidden" == *s => {
- debug!("found one in strip_hidden; removing");
- return None;
- },
- _ => (),
+ let mut stripped = HashSet::new();
+
+ // strip all #[doc(hidden)] items
+ let krate = {
+ struct Stripper<'a> {
+ stripped: &'a mut HashSet<ast::NodeId>
+ };
+ impl<'a> fold::DocFolder for Stripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ for attr in i.attrs.iter() {
+ match attr {
+ &clean::List(~"doc", ref l) => {
+ for innerattr in l.iter() {
+ match innerattr {
+ &clean::Word(ref s) if "hidden" == *s => {
+ debug!("found one in strip_hidden; removing");
+ self.stripped.insert(i.id);
+ return None;
+ },
+ _ => (),
+ }
}
+ },
+ _ => ()
+ }
+ }
+ self.fold_item_recur(i)
+ }
+ }
+ let mut stripper = Stripper{ stripped: &mut stripped };
+ stripper.fold_crate(krate)
+ };
+
+ // strip any traits implemented on stripped items
+ let krate = {
+ struct ImplStripper<'a> {
+ stripped: &'a mut HashSet<ast::NodeId>
+ };
+ impl<'a> fold::DocFolder for ImplStripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ match i.inner {
+ clean::ImplItem(clean::Impl{ for_: clean::ResolvedPath{ id: for_id, .. },
+ .. }) => {
+ if self.stripped.contains(&for_id) {
+ return None;
}
- },
- _ => ()
+ }
+ _ => {}
}
+ self.fold_item_recur(i)
}
- self.fold_item_recur(i)
}
- }
- let mut stripper = Stripper;
- let krate = stripper.fold_crate(krate);
+ let mut stripper = ImplStripper{ stripped: &mut stripped };
+ stripper.fold_crate(krate)
+ };
+
(krate, None)
}
// handled below
clean::ModuleItem(..) => {}
- // impls/tymethods have no control over privacy
- clean::ImplItem(..) | clean::TyMethodItem(..) => {}
+ // trait impls for private items should be stripped
+ clean::ImplItem(clean::Impl{ for_: clean::ResolvedPath{ id: ref for_id, .. }, .. }) => {
+ if !self.exported_items.contains(for_id) {
+ return None;
+ }
+ }
+ clean::ImplItem(..) => {}
+
+ // tymethods have no control over privacy
+ clean::TyMethodItem(..) => {}
}
let fastreturn = match i.inner {
match *slot.get() {
(ref mut task, ref mut val) => {
*val = n;
- task.take_unwrap()
+ match task.take() {
+ Some(t) => t,
+ None => return
+ }
}
}
}
#[bench]
fn bench_buffered_reader(bh: &mut Harness) {
bh.iter(|| {
- BufferedReader::new(NullStream);
+ BufferedReader::new(NullStream)
});
}
#[bench]
fn bench_buffered_writer(bh: &mut Harness) {
bh.iter(|| {
- BufferedWriter::new(NullStream);
+ BufferedWriter::new(NullStream)
});
}
}
}
+impl Clone for ChanWriter {
+ fn clone(&self) -> ChanWriter {
+ ChanWriter { chan: self.chan.clone() }
+ }
+}
+
impl Writer for ChanWriter {
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
if !self.chan.try_send(buf.to_owned()) {
self.read_byte().map(|i| i as i8)
}
+ /// Creates a wrapper around a mutable reference to the reader.
+ ///
+ /// This is useful to allow applying adaptors while still
+ /// retaining ownership of the original value.
+ fn by_ref<'a>(&'a mut self) -> RefReader<'a, Self> {
+ RefReader { inner: self }
+ }
}
impl Reader for ~Reader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.read(buf) }
}
+pub struct RefReader<'a, R> {
+ priv inner: &'a mut R
+}
+
+impl<'a, R: Reader> Reader for RefReader<'a, R> {
+ fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> { self.inner.read(buf) }
+}
+
fn extend_sign(val: u64, nbytes: uint) -> i64 {
let shift = (8 - nbytes) * 8;
(val << shift) as i64 >> shift
fn write_i8(&mut self, n: i8) -> IoResult<()> {
self.write([n as u8])
}
+
+ /// Creates a wrapper around a mutable reference to the writer.
+ ///
+ /// This is useful to allow applying wrappers while still
+ /// retaining ownership of the original value.
+ fn by_ref<'a>(&'a mut self) -> RefWriter<'a, Self> {
+ RefWriter { inner: self }
+ }
}
impl Writer for ~Writer {
fn flush(&mut self) -> IoResult<()> { self.flush() }
}
+pub struct RefWriter<'a, W> {
+ inner: &'a mut W
+}
+
+impl<'a, W: Writer> Writer for RefWriter<'a, W> {
+ fn write(&mut self, buf: &[u8]) -> IoResult<()> { self.inner.write(buf) }
+ fn flush(&mut self) -> IoResult<()> { self.inner.flush() }
+}
+
+
pub trait Stream: Reader + Writer { }
impl<T: Reader + Writer> Stream for T {}
use vec::bytes::MutableByteVector;
/// Wraps a `Reader`, limiting the number of bytes that can be read from it.
-pub struct LimitReader<'a, R> {
+pub struct LimitReader<R> {
priv limit: uint,
- priv inner: &'a mut R
+ priv inner: R
}
-impl<'a, R: Reader> LimitReader<'a, R> {
+impl<R: Reader> LimitReader<R> {
/// Creates a new `LimitReader`
- pub fn new<'a>(r: &'a mut R, limit: uint) -> LimitReader<'a, R> {
+ pub fn new(r: R, limit: uint) -> LimitReader<R> {
LimitReader { limit: limit, inner: r }
}
+ pub fn unwrap(self) -> R { self.inner }
}
-impl<'a, R: Reader> Reader for LimitReader<'a, R> {
+impl<R: Reader> Reader for LimitReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::IoResult<uint> {
if self.limit == 0 {
return Err(io::standard_error(io::EndOfFile));
fn test_bounded_reader_unlimited() {
let mut r = MemReader::new(~[0, 1, 2]);
{
- let mut r = LimitReader::new(&mut r, 4);
+ let mut r = LimitReader::new(r.by_ref(), 4);
assert_eq!(~[0, 1, 2], r.read_to_end().unwrap());
}
}
fn test_bound_reader_limited() {
let mut r = MemReader::new(~[0, 1, 2]);
{
- let mut r = LimitReader::new(&mut r, 2);
+ let mut r = LimitReader::new(r.by_ref(), 2);
assert_eq!(~[0, 1], r.read_to_end().unwrap());
}
assert_eq!(~[2], r.read_to_end().unwrap());
let s = Struct { field: 10 };
let t = &s as &Trait;
bh.iter(|| {
- t.method();
+ t.method()
});
}
fn trait_static_method_call(bh: &mut BenchHarness) {
let s = Struct { field: 10 };
bh.iter(|| {
- s.method();
+ s.method()
});
}
fn match_option_some(bh: &mut BenchHarness) {
let x = Some(10);
bh.iter(|| {
- let _q = match x {
+ match x {
Some(y) => y,
None => 11
- };
+ }
});
}
fn match_vec_pattern(bh: &mut BenchHarness) {
let x = [1,2,3,4,5,6];
bh.iter(|| {
- let _q = match x {
+ match x {
[1,2,3,..] => 10,
_ => 11
- };
+ }
});
}
}
}
}
-// FIXME: #8449: should not be disabled on 32-bit
-#[cfg(target_word_size = "64")]
impl CheckedMul for i64 {
#[inline]
fn checked_mul(&self, v: &i64) -> Option<i64> {
+ Bitwise
+ CheckedAdd
+ CheckedSub
- // + CheckedMul // FIXME #8849: currently not impled on 32-bit
+ + CheckedMul
+ CheckedDiv {}
/// Returns the smallest power of 2 greater than or equal to `n`.
}
}
-// FIXME: #8449: should not be disabled on 32-bit
-#[cfg(target_word_size = "64")]
impl CheckedMul for u64 {
#[inline]
fn checked_mul(&self, v: &u64) -> Option<u64> {
#[bench]
fn alloc_owned_small(bh: &mut BenchHarness) {
bh.iter(|| {
- ~10;
+ ~10
})
}
#[bench]
fn alloc_owned_big(bh: &mut BenchHarness) {
bh.iter(|| {
- ~[10, ..1000];
+ ~[10, ..1000]
})
}
}
assert_eq!(100, s.len());
bh.iter(|| {
- let _ = is_utf8(s);
+ is_utf8(s)
});
}
let s = bytes!("𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰");
assert_eq!(100, s.len());
bh.iter(|| {
- let _ = is_utf8(s);
+ is_utf8(s)
});
}
#[bench]
fn bench_with_capacity(bh: &mut BenchHarness) {
bh.iter(|| {
- let _ = with_capacity(100);
+ with_capacity(100)
});
}
let mut vec: ~[uint] = ~[0u];
bh.iter(|| {
vec.push(0);
+ &vec
})
}
fn starts_with_same_vector(bh: &mut BenchHarness) {
let vec: ~[uint] = vec::from_fn(100, |i| i);
bh.iter(|| {
- vec.starts_with(vec);
+ vec.starts_with(vec)
})
}
fn starts_with_single_element(bh: &mut BenchHarness) {
let vec: ~[uint] = ~[0u];
bh.iter(|| {
- vec.starts_with(vec);
+ vec.starts_with(vec)
})
}
let mut match_vec: ~[uint] = vec::from_fn(99, |i| i);
match_vec.push(0);
bh.iter(|| {
- vec.starts_with(match_vec);
+ vec.starts_with(match_vec)
})
}
fn ends_with_same_vector(bh: &mut BenchHarness) {
let vec: ~[uint] = vec::from_fn(100, |i| i);
bh.iter(|| {
- vec.ends_with(vec);
+ vec.ends_with(vec)
})
}
fn ends_with_single_element(bh: &mut BenchHarness) {
let vec: ~[uint] = ~[0u];
bh.iter(|| {
- vec.ends_with(vec);
+ vec.ends_with(vec)
})
}
let mut match_vec: ~[uint] = vec::from_fn(100, |i| i);
match_vec[0] = 200;
bh.iter(|| {
- vec.starts_with(match_vec);
+ vec.starts_with(match_vec)
})
}
fn contains_last_element(bh: &mut BenchHarness) {
let vec: ~[uint] = vec::from_fn(100, |i| i);
bh.iter(|| {
- vec.contains(&99u);
+ vec.contains(&99u)
})
}
ptr::set_memory(vp, 0, 1024);
v.set_len(1024);
}
+ v
});
}
#[bench]
fn zero_1kb_fixed_repeat(bh: &mut BenchHarness) {
bh.iter(|| {
- let _v: ~[u8] = ~[0u8, ..1024];
+ ~[0u8, ..1024]
});
}
for x in v.mut_iter() {
*x = 0;
}
+ v
});
}
impl Expr {
pub fn get_callee_id(&self) -> Option<NodeId> {
match self.node {
- ExprMethodCall(callee_id, _, _, _, _) |
+ ExprMethodCall(callee_id, _, _, _) |
ExprIndex(callee_id, _, _) |
ExprBinary(callee_id, _, _, _) |
ExprAssignOp(callee_id, _, _, _) |
}
}
-#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
-pub enum CallSugar {
- NoSugar,
- ForSugar
-}
-
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
pub enum Expr_ {
ExprVstore(@Expr, ExprVstore),
// First expr is the place; second expr is the value.
ExprBox(@Expr, @Expr),
ExprVec(~[@Expr], Mutability),
- ExprCall(@Expr, ~[@Expr], CallSugar),
- ExprMethodCall(NodeId, Ident, ~[P<Ty>], ~[@Expr], CallSugar),
+ ExprCall(@Expr, ~[@Expr]),
+ ExprMethodCall(NodeId, Ident, ~[P<Ty>], ~[@Expr]),
ExprTup(~[@Expr]),
ExprBinary(NodeId, BinOp, @Expr, @Expr),
ExprUnary(NodeId, UnOp, @Expr),
}
fn expr_call(&self, span: Span, expr: @ast::Expr, args: ~[@ast::Expr]) -> @ast::Expr {
- self.expr(span, ast::ExprCall(expr, args, ast::NoSugar))
+ self.expr(span, ast::ExprCall(expr, args))
}
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: ~[@ast::Expr]) -> @ast::Expr {
- self.expr(span,
- ast::ExprCall(self.expr_ident(span, id), args, ast::NoSugar))
+ self.expr(span, ast::ExprCall(self.expr_ident(span, id), args))
}
fn expr_call_global(&self, sp: Span, fn_path: ~[ast::Ident],
args: ~[@ast::Expr]) -> @ast::Expr {
ident: ast::Ident,
mut args: ~[@ast::Expr]) -> @ast::Expr {
args.unshift(expr);
- self.expr(span, ast::ExprMethodCall(ast::DUMMY_NODE_ID, ident, ~[], args, ast::NoSugar))
+ self.expr(span, ast::ExprMethodCall(ast::DUMMY_NODE_ID, ident, ~[], args))
}
fn expr_block(&self, b: P<ast::Block>) -> @ast::Expr {
self.expr(b.span, ast::ExprBlock(b))
}
}
-// This is a secondary mechanism for invoking syntax extensions on items:
-// "decorator" attributes, such as #[auto_encode]. These are invoked by an
-// attribute prefixing an item, and are interpreted by feeding the item
-// through the named attribute _as a syntax extension_ and splicing in the
-// resulting item vec into place in favour of the decorator. Note that
-// these do _not_ work for macro extensions, just ItemDecorator ones.
-//
-// NB: there is some redundancy between this and expand_item, below, and
-// they might benefit from some amount of semantic and language-UI merger.
-pub fn expand_mod_items(module_: &ast::Mod, fld: &mut MacroExpander) -> ast::Mod {
- // Fold the contents first:
- let module_ = noop_fold_mod(module_, fld);
-
- // For each item, look through the attributes. If any of them are
- // decorated with "item decorators", then use that function to transform
- // the item into a new set of items.
- let mut new_items = module_.items.clone();
- for item in module_.items.iter() {
- for attr in item.attrs.rev_iter() {
- let mname = attr.name();
-
- match fld.extsbox.find(&intern(mname.get())) {
- Some(&ItemDecorator(dec_fn)) => {
- fld.cx.bt_push(ExpnInfo {
- call_site: attr.span,
- callee: NameAndSpan {
- name: mname.get().to_str(),
- format: MacroAttribute,
- span: None
- }
- });
- dec_fn(fld.cx, attr.span, attr.node.value, *item,
- |item| new_items.push(item));
- fld.cx.bt_pop();
- },
- _ => {},
- }
- }
- }
-
- ast::Mod {
- items: new_items,
- ..module_
- }
-}
-
// eval $e with a new exts frame:
macro_rules! with_exts_frame (
($extsboxexpr:expr,$macros_escape:expr,$e:expr) =>
// When we enter a module, record it, for the sake of `module!`
pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander)
-> SmallVector<@ast::Item> {
- match it.node {
+ let mut decorator_items = SmallVector::zero();
+ for attr in it.attrs.rev_iter() {
+ let mname = attr.name();
+
+ match fld.extsbox.find(&intern(mname.get())) {
+ Some(&ItemDecorator(dec_fn)) => {
+ fld.cx.bt_push(ExpnInfo {
+ call_site: attr.span,
+ callee: NameAndSpan {
+ name: mname.get().to_str(),
+ format: MacroAttribute,
+ span: None
+ }
+ });
+ // we'd ideally decorator_items.push_all(expand_item(item, fld)),
+ // but that double-mut-borrows fld
+ dec_fn(fld.cx, attr.span, attr.node.value, it,
+ |item| decorator_items.push(item));
+ fld.cx.bt_pop();
+ }
+ _ => {}
+ }
+ }
+
+ let decorator_items = decorator_items.move_iter()
+ .flat_map(|item| expand_item(item, fld).move_iter())
+ .collect();
+
+ let mut new_items = match it.node {
ast::ItemMac(..) => expand_item_mac(it, fld),
ast::ItemMod(_) | ast::ItemForeignMod(_) => {
fld.cx.mod_push(it.ident);
result
},
_ => noop_fold_item(it, fld)
- }
+ };
+
+ new_items.push_all(decorator_items);
+ new_items
}
// does this attribute list contain "macro_escape" ?
expand_expr(expr, self)
}
- fn fold_mod(&mut self, module: &ast::Mod) -> ast::Mod {
- expand_mod_items(module, self)
- }
-
fn fold_item(&mut self, item: @ast::Item) -> SmallVector<@ast::Item> {
expand_item(item, self)
}
ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count), mutt)
}
ExprTup(ref elts) => ExprTup(elts.map(|x| folder.fold_expr(*x))),
- ExprCall(f, ref args, blk) => {
+ ExprCall(f, ref args) => {
ExprCall(folder.fold_expr(f),
- args.map(|&x| folder.fold_expr(x)),
- blk)
+ args.map(|&x| folder.fold_expr(x)))
}
- ExprMethodCall(callee_id, i, ref tps, ref args, blk) => {
+ ExprMethodCall(callee_id, i, ref tps, ref args) => {
ExprMethodCall(
folder.new_id(callee_id),
folder.fold_ident(i),
tps.map(|&x| folder.fold_ty(x)),
- args.map(|&x| folder.fold_expr(x)),
- blk
- )
+ args.map(|&x| folder.fold_expr(x)))
}
ExprBinary(callee_id, binop, lhs, rhs) => {
ExprBinary(folder.new_id(callee_id),
#[deny(non_camel_case_types)];
-extern mod extra;
+#[cfg(test)] extern mod extra;
extern mod serialize;
extern mod term;
extern mod collections;
// isn't parsed as (if true {...} else {...} | x) | 5
pub fn expr_requires_semi_to_be_stmt(e: @ast::Expr) -> bool {
match e.node {
- ast::ExprIf(..)
- | ast::ExprMatch(..)
- | ast::ExprBlock(_)
- | ast::ExprWhile(..)
- | ast::ExprLoop(..)
- | ast::ExprForLoop(..)
- | ast::ExprCall(_, _, ast::ForSugar)
- | ast::ExprMethodCall(_, _, _, _, ast::ForSugar) => false,
- _ => true
+ ast::ExprIf(..)
+ | ast::ExprMatch(..)
+ | ast::ExprBlock(_)
+ | ast::ExprWhile(..)
+ | ast::ExprLoop(..)
+ | ast::ExprForLoop(..) => false,
+ _ => true
}
}
use abi;
use abi::AbiSet;
use ast::{Sigil, BorrowedSigil, ManagedSigil, OwnedSigil};
-use ast::{CallSugar, NoSugar};
use ast::{BareFnTy, ClosureTy};
use ast::{RegionTyParamBound, TraitTyParamBound};
use ast::{Provided, Public, Purity};
ExprBinary(ast::DUMMY_NODE_ID, binop, lhs, rhs)
}
- pub fn mk_call(&mut self, f: @Expr, args: ~[@Expr], sugar: CallSugar) -> ast::Expr_ {
- ExprCall(f, args, sugar)
+ pub fn mk_call(&mut self, f: @Expr, args: ~[@Expr]) -> ast::Expr_ {
+ ExprCall(f, args)
}
- fn mk_method_call(&mut self, ident: Ident, tps: ~[P<Ty>], args: ~[@Expr],
- sugar: CallSugar) -> ast::Expr_ {
- ExprMethodCall(ast::DUMMY_NODE_ID, ident, tps, args, sugar)
+ fn mk_method_call(&mut self, ident: Ident, tps: ~[P<Ty>], args: ~[@Expr]) -> ast::Expr_ {
+ ExprMethodCall(ast::DUMMY_NODE_ID, ident, tps, args)
}
pub fn mk_index(&mut self, expr: @Expr, idx: @Expr) -> ast::Expr_ {
hi = self.last_span.hi;
es.unshift(e);
- let nd = self.mk_method_call(i, tys, es, NoSugar);
+ let nd = self.mk_method_call(i, tys, es);
e = self.mk_expr(lo, hi, nd);
}
_ => {
);
hi = self.last_span.hi;
- let nd = self.mk_call(e, es, NoSugar);
+ let nd = self.mk_call(e, es);
e = self.mk_expr(lo, hi, nd);
}
}
}
-pub fn print_call_pre(s: &mut State,
- sugar: ast::CallSugar,
- base_args: &mut ~[@ast::Expr])
- -> io::IoResult<Option<@ast::Expr>> {
- match sugar {
- ast::ForSugar => {
- if_ok!(head(s, "for"));
- Ok(Some(base_args.pop().unwrap()))
- }
- ast::NoSugar => Ok(None)
- }
-}
-
-pub fn print_call_post(s: &mut State,
- sugar: ast::CallSugar,
- blk: &Option<@ast::Expr>,
- base_args: &mut ~[@ast::Expr]) -> io::IoResult<()> {
- if sugar == ast::NoSugar || !base_args.is_empty() {
- if_ok!(popen(s));
- if_ok!(commasep_exprs(s, Inconsistent, *base_args));
- if_ok!(pclose(s));
- }
- if sugar != ast::NoSugar {
- if_ok!(nbsp(s));
- // not sure if this can happen
- if_ok!(print_expr(s, blk.unwrap()));
- }
+fn print_call_post(s: &mut State, args: &[@ast::Expr]) -> io::IoResult<()> {
+ if_ok!(popen(s));
+ if_ok!(commasep_exprs(s, Inconsistent, args));
+ if_ok!(pclose(s));
Ok(())
}
}
if_ok!(pclose(s));
}
- ast::ExprCall(func, ref args, sugar) => {
- let mut base_args = (*args).clone();
- let blk = if_ok!(print_call_pre(s, sugar, &mut base_args));
+ ast::ExprCall(func, ref args) => {
if_ok!(print_expr(s, func));
- if_ok!(print_call_post(s, sugar, &blk, &mut base_args));
+ if_ok!(print_call_post(s, *args));
}
- ast::ExprMethodCall(_, ident, ref tys, ref args, sugar) => {
- let mut base_args = args.slice_from(1).to_owned();
- let blk = if_ok!(print_call_pre(s, sugar, &mut base_args));
+ ast::ExprMethodCall(_, ident, ref tys, ref args) => {
+ let base_args = args.slice_from(1);
if_ok!(print_expr(s, args[0]));
if_ok!(word(&mut s.s, "."));
if_ok!(print_ident(s, ident));
if_ok!(commasep(s, Inconsistent, *tys, print_type_ref));
if_ok!(word(&mut s.s, ">"));
}
- if_ok!(print_call_post(s, sugar, &blk, &mut base_args));
+ if_ok!(print_call_post(s, base_args));
}
ast::ExprBinary(_, op, lhs, rhs) => {
if_ok!(print_expr(s, lhs));
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
}
}
+ pub fn push_all(&mut self, other: SmallVector<T>) {
+ for v in other.move_iter() {
+ self.push(v);
+ }
+ }
+
pub fn get<'a>(&'a self, idx: uint) -> &'a T {
match *self {
One(ref v) if idx == 0 => v,
visitor.visit_expr(*subexpression, env.clone())
}
}
- ExprCall(callee_expression, ref arguments, _) => {
+ ExprCall(callee_expression, ref arguments) => {
for argument in arguments.iter() {
visitor.visit_expr(*argument, env.clone())
}
visitor.visit_expr(callee_expression, env.clone())
}
- ExprMethodCall(_, _, ref types, ref arguments, _) => {
+ ExprMethodCall(_, _, ref types, ref arguments) => {
walk_exprs(visitor, *arguments, env.clone());
for &typ in types.iter() {
visitor.visit_ty(typ, env.clone())
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+// Verify the compiler fails with an error on infinite function
+// recursions.
+
+struct Data(~Option<Data>);
+
+fn generic<T>( _ : ~[(Data,T)] ) {
+ //~^ ERROR overly deep expansion of inlined function
+ let rec : ~[(Data,(bool,T))] = ~[];
+ generic( rec );
+}
+
+
+fn main () {
+ // Use generic<T> at least once to trigger instantiation.
+ let input : ~[(Data,())] = ~[];
+ generic(input);
+}
// error-pattern:runned an unexported test
// compile-flags:--test
+// check-stdout
extern mod extra;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// check-stdout
// error-pattern:task 'test_foo' failed at
// compile-flags: --test
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn main() {
+ #[deriving(ToStr)]
+ struct Foo {
+ foo: int,
+ }
+
+ let f = Foo { foo: 10 };
+ let _ = f.to_str();
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-fast
+
+extern mod green;
+
+static mut DROP: int = 0i;
+static mut DROP_S: int = 0i;
+static mut DROP_T: int = 0i;
+
+#[start]
+fn start(argc: int, argv: **u8) -> int {
+ let ret = green::start(argc, argv, main);
+ unsafe {
+ assert_eq!(2, DROP);
+ assert_eq!(1, DROP_S);
+ assert_eq!(1, DROP_T);
+ }
+ ret
+}
+
+struct S;
+impl Drop for S {
+ fn drop(&mut self) {
+ unsafe {
+ DROP_S += 1;
+ DROP += 1;
+ }
+ }
+}
+fn f(ref _s: S) {}
+
+struct T { i: int }
+impl Drop for T {
+ fn drop(&mut self) {
+ unsafe {
+ DROP_T += 1;
+ DROP += 1;
+ }
+ }
+}
+fn g(ref _t: T) {}
+
+fn main() {
+ let s = S;
+ f(s);
+ unsafe {
+ assert_eq!(1, DROP);
+ assert_eq!(1, DROP_S);
+ }
+ let t = T { i: 1 };
+ g(t);
+ unsafe { assert_eq!(1, DROP_T); }
+}