Patch for #20340. `rustdoc --version` panics because it uses `rustc_driver::version`, which in turn checks the `verbose` flag, which was not defined for rustdoc. In this patch I have added a verbose flag to rustdoc, because I think it should be useful for other things besides --version.
Another possible fix would be to check if a verbose option was defined in `rustc_driver` or add an extra `version` function for rustdoc.
-name '*.dll' -o \
-name '*.def' -o \
-name '*.py' -o \
+ -name '*.pyc' -o \
-name '*.bc' \
\) \
| xargs rm -f
clean$(1)_H_$(2): \
$$(foreach crate,$$(CRATES),clean$(1)_H_$(2)-lib-$$(crate)) \
- $$(foreach tool,$$(TOOLS) $$(DEBUGGER_BIN_SCRIPTS),clean$(1)_H_$(2)-tool-$$(tool))
+ $$(foreach tool,$$(TOOLS) $$(DEBUGGER_BIN_SCRIPTS_ALL),clean$(1)_H_$(2)-tool-$$(tool))
$$(Q)rm -fr $(2)/rt/libbacktrace
clean$(1)_H_$(2)-tool-%:
clean$(1)_T_$(2)_H_$(3): \
$$(foreach crate,$$(CRATES),clean$(1)_T_$(2)_H_$(3)-lib-$$(crate)) \
- $$(foreach tool,$$(TOOLS) $$(DEBUGGER_BIN_SCRIPTS),clean$(1)_T_$(2)_H_$(3)-tool-$$(tool))
+ $$(foreach tool,$$(TOOLS) $$(DEBUGGER_BIN_SCRIPTS_ALL),clean$(1)_T_$(2)_H_$(3)-tool-$$(tool))
$$(Q)rm -f $$(TLIB$(1)_T_$(2)_H_$(3))/libmorestack.a
$$(Q)rm -f $$(TLIB$(1)_T_$(2)_H_$(3))/libcompiler-rt.a
$(Q)rm -f $$(TLIB$(1)_T_$(2)_H_$(3))/librun_pass_stage* # For unix
# Copy debugger related scripts
######################################################################
-DEBUGGER_RUSTLIB_ETC_SCRIPTS=lldb_rust_formatters.py
-DEBUGGER_BIN_SCRIPTS=rust-lldb
-DEBUGGER_RUSTLIB_ETC_SCRIPTS_ABS=$(foreach script,$(DEBUGGER_RUSTLIB_ETC_SCRIPTS), \
- $(CFG_SRC_DIR)src/etc/$(script))
-DEBUGGER_BIN_SCRIPTS_ABS=$(foreach script,$(DEBUGGER_BIN_SCRIPTS), \
- $(CFG_SRC_DIR)src/etc/$(script))
+## GDB ##
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB=gdb_load_rust_pretty_printers.py \
+ gdb_rust_pretty_printing.py
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS=\
+ $(foreach script,$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB), \
+ $(CFG_SRC_DIR)src/etc/$(script))
+
+DEBUGGER_BIN_SCRIPTS_GDB=rust-gdb
+DEBUGGER_BIN_SCRIPTS_GDB_ABS=\
+ $(foreach script,$(DEBUGGER_BIN_SCRIPTS_GDB), \
+ $(CFG_SRC_DIR)src/etc/$(script))
+
+
+## LLDB ##
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB=lldb_rust_formatters.py
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS=\
+ $(foreach script,$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB), \
+ $(CFG_SRC_DIR)src/etc/$(script))
+
+DEBUGGER_BIN_SCRIPTS_LLDB=rust-lldb
+DEBUGGER_BIN_SCRIPTS_LLDB_ABS=\
+ $(foreach script,$(DEBUGGER_BIN_SCRIPTS_LLDB), \
+ $(CFG_SRC_DIR)src/etc/$(script))
+
+
+## ALL ##
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL=$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB) \
+ $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB)
+DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS=$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) \
+ $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS)
+DEBUGGER_BIN_SCRIPTS_ALL=$(DEBUGGER_BIN_SCRIPTS_GDB) \
+ $(DEBUGGER_BIN_SCRIPTS_LLDB)
+DEBUGGER_BIN_SCRIPTS_ALL_ABS=$(DEBUGGER_BIN_SCRIPTS_GDB_ABS) \
+ $(DEBUGGER_BIN_SCRIPTS_LLDB_ABS)
-DEBUGGER_SCRIPTS_ALL=$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ABS) $(DEBUGGER_BIN_SCRIPTS_ABS)
# $(1) - the stage to copy to
# $(2) - the host triple
define DEF_INSTALL_DEBUGGER_SCRIPTS_HOST
-tmp/install-debugger-scripts$(1)_H_$(2).done: $$(DEBUGGER_SCRIPTS_ALL)
+tmp/install-debugger-scripts$(1)_H_$(2)-gdb.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_GDB_ABS)
+ $(Q)mkdir -p $$(HBIN$(1)_H_$(2))
+ $(Q)mkdir -p $$(HLIB$(1)_H_$(2))/rustlib/etc
+ $(Q)install $$(DEBUGGER_BIN_SCRIPTS_GDB_ABS) $$(HBIN$(1)_H_$(2))
+ $(Q)install $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) $$(HLIB$(1)_H_$(2))/rustlib/etc
+ $(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_H_$(2)-lldb.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_LLDB_ABS)
+ $(Q)mkdir -p $$(HBIN$(1)_H_$(2))
+ $(Q)mkdir -p $$(HLIB$(1)_H_$(2))/rustlib/etc
+ $(Q)install $$(DEBUGGER_BIN_SCRIPTS_LLDB_ABS) $$(HBIN$(1)_H_$(2))
+ $(Q)install $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) $$(HLIB$(1)_H_$(2))/rustlib/etc
+ $(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_H_$(2)-all.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_ALL_ABS)
$(Q)mkdir -p $$(HBIN$(1)_H_$(2))
$(Q)mkdir -p $$(HLIB$(1)_H_$(2))/rustlib/etc
- $(Q)install $(DEBUGGER_BIN_SCRIPTS_ABS) $$(HBIN$(1)_H_$(2))
- $(Q)install $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ABS) $$(HLIB$(1)_H_$(2))/rustlib/etc
+ $(Q)install $$(DEBUGGER_BIN_SCRIPTS_ALL_ABS) $$(HBIN$(1)_H_$(2))
+ $(Q)install $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS) $$(HLIB$(1)_H_$(2))/rustlib/etc
$(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_H_$(2)-none.done:
+ $(Q)touch $$@
+
endef
# Expand host make-targets for all stages
# $(3) is the host triple
define DEF_INSTALL_DEBUGGER_SCRIPTS_TARGET
-tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3).done: $$(DEBUGGER_SCRIPTS_ALL)
+tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3)-gdb.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_GDB_ABS)
+ $(Q)mkdir -p $$(TBIN$(1)_T_$(2)_H_$(3))
+ $(Q)mkdir -p $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
+ $(Q)install $(DEBUGGER_BIN_SCRIPTS_GDB_ABS) $$(TBIN$(1)_T_$(2)_H_$(3))
+ $(Q)install $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
+ $(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3)-lldb.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_LLDB_ABS)
$(Q)mkdir -p $$(TBIN$(1)_T_$(2)_H_$(3))
$(Q)mkdir -p $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
- $(Q)install $(DEBUGGER_BIN_SCRIPTS_ABS) $$(TBIN$(1)_T_$(2)_H_$(3))
- $(Q)install $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ABS) $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
+ $(Q)install $(DEBUGGER_BIN_SCRIPTS_LLDB_ABS) $$(TBIN$(1)_T_$(2)_H_$(3))
+ $(Q)install $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
$(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3)-all.done: \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS) \
+ $$(DEBUGGER_BIN_SCRIPTS_ALL_ABS)
+ $(Q)mkdir -p $$(TBIN$(1)_T_$(2)_H_$(3))
+ $(Q)mkdir -p $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
+ $(Q)install $(DEBUGGER_BIN_SCRIPTS_ALL_ABS) $$(TBIN$(1)_T_$(2)_H_$(3))
+ $(Q)install $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS) $$(TLIB$(1)_T_$(2)_H_$(3))/rustlib/etc
+ $(Q)touch $$@
+
+tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3)-none.done:
+ $(Q)touch $$@
+
endef
# Expand target make-targets for all stages
# Source tarball
######################################################################
-PKG_TAR = dist/$(PKG_NAME).tar.gz
+PKG_TAR = dist/$(PKG_NAME)-src.tar.gz
PKG_GITMODULES := $(S)src/llvm $(S)src/compiler-rt \
$(S)src/rt/hoedown $(S)src/jemalloc
# The version number
CFG_RELEASE_NUM=0.13.0
+# An optional number to put after the label, e.g. '2' -> '-beta2'
+CFG_BETA_CYCLE=
+
CFG_FILENAME_EXTRA=4e7c5e5c
ifeq ($(CFG_RELEASE_CHANNEL),stable)
CFG_PACKAGE_VERS=$(CFG_RELEASE_NUM)
endif
ifeq ($(CFG_RELEASE_CHANNEL),beta)
-CFG_RELEASE=$(CFG_RELEASE_NUM)-beta
+# The beta channel is temporarily called 'alpha'
+CFG_RELEASE=$(CFG_RELEASE_NUM)-alpha$(CFG_BETA_CYCLE)
# When building beta/nightly distributables just reuse the same "beta"
# name so when we upload we'll always override the previous
# nighly. This doesn't actually impact the version reported by rustc -
# it's just for file naming.
-CFG_PACKAGE_VERS=beta
+CFG_PACKAGE_VERS=alpha
endif
ifeq ($(CFG_RELEASE_CHANNEL),nightly)
CFG_RELEASE=$(CFG_RELEASE_NUM)-nightly
# Per-stage targets and runner
######################################################################
+# Valid setting-strings are 'all', 'none', 'gdb', 'lldb'
+# This 'function' will determine which debugger scripts to copy based on a
+# target triple. See debuggers.mk for more information.
+TRIPLE_TO_DEBUGGER_SCRIPT_SETTING=\
+ $(if $(findstring windows,$(1)),none,$(if $(findstring darwin,$(1)),lldb,gdb))
+
STAGES = 0 1 2 3
define SREQ
HSREQ$(1)_H_$(3) = \
$$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
$$(MKFILE_DEPS) \
- tmp/install-debugger-scripts$(1)_H_$(3).done
+ tmp/install-debugger-scripts$(1)_H_$(3)-$$(call TRIPLE_TO_DEBUGGER_SCRIPT_SETTING,$(3)).done
endif
# Prerequisites for using the stageN compiler to build target artifacts
$$(TSREQ$(1)_T_$(2)_H_$(3)) \
$$(foreach dep,$$(TARGET_CRATES), \
$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$$(dep)) \
- tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3).done
+ tmp/install-debugger-scripts$(1)_T_$(2)_H_$(3)-$$(call TRIPLE_TO_DEBUGGER_SCRIPT_SETTING,$(2)).done
# Prerequisites for a working stageN compiler and complete set of target
# libraries
$$(call PREPARE_LIB,libcompiler-rt.a),),),)
endef
+define INSTALL_GDB_DEBUGGER_SCRIPTS_COMMANDS
+ $(Q)$(PREPARE_BIN_CMD) $(DEBUGGER_BIN_SCRIPTS_GDB_ABS) $(PREPARE_DEST_BIN_DIR)
+ $(Q)$(PREPARE_LIB_CMD) $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) $(PREPARE_DEST_LIB_DIR)/rustlib/etc
+endef
+
+define INSTALL_LLDB_DEBUGGER_SCRIPTS_COMMANDS
+ $(Q)$(PREPARE_BIN_CMD) $(DEBUGGER_BIN_SCRIPTS_LLDB_ABS) $(PREPARE_DEST_BIN_DIR)
+ $(Q)$(PREPARE_LIB_CMD) $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) $(PREPARE_DEST_LIB_DIR)/rustlib/etc
+endef
+
+define INSTALL_NO_DEBUGGER_SCRIPTS_COMMANDS
+ $(Q)echo "No debugger scripts will be installed for host $(PREPARE_HOST)"
+endef
+
+# $(1) is PREPARE_HOST
+INSTALL_DEBUGGER_SCRIPT_COMMANDS=$(if $(findstring windows,$(1)),\
+ $(INSTALL_NO_DEBUGGER_SCRIPTS_COMMANDS),\
+ $(if $(findstring darwin,$(1)),\
+ $(INSTALL_LLDB_DEBUGGER_SCRIPTS_COMMANDS),\
+ $(INSTALL_GDB_DEBUGGER_SCRIPTS_COMMANDS)))
+
define DEF_PREPARE
prepare-base-$(1): PREPARE_SOURCE_DIR=$$(PREPARE_HOST)/stage$$(PREPARE_STAGE)
$$(call PREPARE_DIR,$$(PREPARE_DEST_LIB_DIR)/rustlib/etc)
$$(call PREPARE_DIR,$$(PREPARE_DEST_MAN_DIR))
-prepare-debugger-scripts-$(1): prepare-host-dirs-$(1) $(DEBUGGER_SCRIPTS_ALL)
- $$(Q)$$(PREPARE_BIN_CMD) $(DEBUGGER_BIN_SCRIPTS_ABS) $$(PREPARE_DEST_BIN_DIR)
- $$(Q)$$(PREPARE_LIB_CMD) $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ABS) $$(PREPARE_DEST_LIB_DIR)/rustlib/etc
+prepare-debugger-scripts-$(1): prepare-host-dirs-$(1) \
+ $$(DEBUGGER_BIN_SCRIPTS_ALL_ABS) \
+ $$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS)
+ $$(call INSTALL_DEBUGGER_SCRIPT_COMMANDS,$$(PREPARE_HOST))
$$(foreach tool,$$(PREPARE_TOOLS), \
$$(foreach host,$$(CFG_HOST), \
let DebuggerCommands {
commands,
check_lines,
- use_gdb_pretty_printer,
breakpoint_lines
} = parse_debugger_commands(testfile, "gdb");
let mut cmds = commands.connect("\n");
if header::gdb_version_to_int(version.as_slice()) >
header::gdb_version_to_int("7.4") {
// Add the directory containing the pretty printers to
- // GDB's script auto loading safe path ...
+ // GDB's script auto loading safe path
script_str.push_str(
format!("add-auto-load-safe-path {}\n",
rust_pp_module_abs_path.replace("\\", "\\\\").as_slice())
.as_slice());
- // ... and also the test directory
- script_str.push_str(
- format!("add-auto-load-safe-path {}\n",
- config.build_base.as_str().unwrap().replace("\\", "\\\\"))
- .as_slice());
}
}
_ => {
// pretty printing, it just tells GDB to print values on one line:
script_str.push_str("set print pretty off\n");
+ // Add the pretty printer directory to GDB's source-file search path
+ script_str.push_str(format!("directory {}\n", rust_pp_module_abs_path)[]);
+
// Load the target executable
script_str.push_str(format!("file {}\n",
exe_file.as_str().unwrap().replace("\\", "\\\\"))
script_str.as_slice(),
"debugger.script");
- if use_gdb_pretty_printer {
- // Only emit the gdb auto-loading script if pretty printers
- // should actually be loaded
- dump_gdb_autoload_script(config, testfile);
- }
-
// run debugger script with gdb
#[cfg(windows)]
fn debugger() -> String {
}
check_debugger_output(&debugger_run_result, check_lines.as_slice());
-
- fn dump_gdb_autoload_script(config: &Config, testfile: &Path) {
- let mut script_path = output_base_name(config, testfile);
- let mut script_file_name = script_path.filename().unwrap().to_vec();
- script_file_name.push_all("-gdb.py".as_bytes());
- script_path.set_filename(script_file_name.as_slice());
-
- let script_content = "import gdb_rust_pretty_printing\n\
- gdb_rust_pretty_printing.register_printers(gdb.current_objfile())\n"
- .as_bytes();
-
- File::create(&script_path).write(script_content).unwrap();
- }
}
fn find_rust_src_root(config: &Config) -> Option<Path> {
commands: Vec<String>,
check_lines: Vec<String>,
breakpoint_lines: Vec<uint>,
- use_gdb_pretty_printer: bool
}
fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str)
let mut breakpoint_lines = vec!();
let mut commands = vec!();
let mut check_lines = vec!();
- let mut use_gdb_pretty_printer = false;
let mut counter = 1;
let mut reader = BufferedReader::new(File::open(file_path).unwrap());
for line in reader.lines() {
breakpoint_lines.push(counter);
}
- if line.as_slice().contains("gdb-use-pretty-printer") {
- use_gdb_pretty_printer = true;
- }
-
header::parse_name_value_directive(
line.as_slice(),
command_directive.as_slice()).map(|cmd| {
commands: commands,
check_lines: check_lines,
breakpoint_lines: breakpoint_lines,
- use_gdb_pretty_printer: use_gdb_pretty_printer,
}
}
There is no single reason that bindings are immutable by default, but we can
think about it through one of Rust's primary focuses: safety. If you forget to
say `mut`, the compiler will catch it, and let you know that you have mutated
-something you may not have cared to mutate. If bindings were mutable by
+something you may not have intended to mutate. If bindings were mutable by
default, the compiler would not be able to tell you this. If you _did_ intend
mutation, then the solution is quite easy: add `mut`.
When writing doc comments, adding sections for any arguments, return values,
and providing some examples of usage is very, very helpful.
-You can use the `rustdoc` tool to generate HTML documentation from these doc
-comments. We will talk more about `rustdoc` when we get to modules, as
-generally, you want to export documentation for a full module.
+You can use the [`rustdoc`](rustdoc.html) tool to generate HTML documentation
+from these doc comments.
# Compound Data Types
--- /dev/null
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+import gdb_rust_pretty_printing
+gdb_rust_pretty_printing.register_printers(gdb.current_objfile())
--- /dev/null
+#!/bin/sh
+# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+# Exit if anything fails
+set -e
+
+# Find out where the pretty printer Python module is
+RUSTC_SYSROOT=`rustc --print=sysroot`
+GDB_PYTHON_MODULE_DIRECTORY="$RUSTC_SYSROOT/lib/rustlib/etc"
+
+# Run GDB with the additional arguments that load the pretty printers
+PYTHONPATH="$PYTHONPATH:$GDB_PYTHON_MODULE_DIRECTORY" gdb \
+ -d "$GDB_PYTHON_MODULE_DIRECTORY" \
+ -iex "add-auto-load-safe-path $GDB_PYTHON_MODULE_DIRECTORY" \
+ "$@"
Reference grammar.
Uses [antlr4](http://www.antlr.org/) and a custom Rust tool to compare
-ASTs/token streams generated. You can use the `check-syntax` make target to
+ASTs/token streams generated. You can use the `check-lexer` make target to
run all of the available tests.
To use manually:
rustc -O verify.rs
for file in ../*/**.rs; do
echo $file;
- grun RustLexer tokens -tokens < $file | ./verify $file || break
+ grun RustLexer tokens -tokens < $file | ./verify $file RustLexer.tokens || break
done
```
;
LIT_FLOAT
- : [0-9][0-9_]* ( '.' {_input.LA(1) != '.'}?
- | ('.' [0-9][0-9_]*)? ([eE] [-+]? [0-9][0-9_]*)? SUFFIX?)
+ : [0-9][0-9_]* ('.' {
+ /* dot followed by another dot is a range, no float */
+ _input.LA(1) != '.' &&
+ /* dot followed by an identifier is an integer with a function call, no float */
+ _input.LA(1) != '_' &&
+ _input.LA(1) != 'a' &&
+ _input.LA(1) != 'b' &&
+ _input.LA(1) != 'c' &&
+ _input.LA(1) != 'd' &&
+ _input.LA(1) != 'e' &&
+ _input.LA(1) != 'f' &&
+ _input.LA(1) != 'g' &&
+ _input.LA(1) != 'h' &&
+ _input.LA(1) != 'i' &&
+ _input.LA(1) != 'j' &&
+ _input.LA(1) != 'k' &&
+ _input.LA(1) != 'l' &&
+ _input.LA(1) != 'm' &&
+ _input.LA(1) != 'n' &&
+ _input.LA(1) != 'o' &&
+ _input.LA(1) != 'p' &&
+ _input.LA(1) != 'q' &&
+ _input.LA(1) != 'r' &&
+ _input.LA(1) != 's' &&
+ _input.LA(1) != 't' &&
+ _input.LA(1) != 'u' &&
+ _input.LA(1) != 'v' &&
+ _input.LA(1) != 'w' &&
+ _input.LA(1) != 'x' &&
+ _input.LA(1) != 'y' &&
+ _input.LA(1) != 'z' &&
+ _input.LA(1) != 'A' &&
+ _input.LA(1) != 'B' &&
+ _input.LA(1) != 'C' &&
+ _input.LA(1) != 'D' &&
+ _input.LA(1) != 'E' &&
+ _input.LA(1) != 'F' &&
+ _input.LA(1) != 'G' &&
+ _input.LA(1) != 'H' &&
+ _input.LA(1) != 'I' &&
+ _input.LA(1) != 'J' &&
+ _input.LA(1) != 'K' &&
+ _input.LA(1) != 'L' &&
+ _input.LA(1) != 'M' &&
+ _input.LA(1) != 'N' &&
+ _input.LA(1) != 'O' &&
+ _input.LA(1) != 'P' &&
+ _input.LA(1) != 'Q' &&
+ _input.LA(1) != 'R' &&
+ _input.LA(1) != 'S' &&
+ _input.LA(1) != 'T' &&
+ _input.LA(1) != 'U' &&
+ _input.LA(1) != 'V' &&
+ _input.LA(1) != 'W' &&
+ _input.LA(1) != 'X' &&
+ _input.LA(1) != 'Y' &&
+ _input.LA(1) != 'Z'
+ }? | ('.' [0-9][0-9_]*)? ([eE] [-+]? [0-9][0-9_]*)? SUFFIX?)
;
LIT_STR
set -x
fi
+passed=0
+failed=0
+skipped=0
+
check() {
grep --silent "// ignore-lexer-test" $1;
# seem to have anny effect.
if $3 RustLexer tokens -tokens < $1 | $4 $1 $5; then
echo "pass: $1"
+ passed=`expr $passed + 1`
else
echo "fail: $1"
+ failed=`expr $failed + 1`
fi
else
echo "skip: $1"
+ skipped=`expr $skipped + 1`
fi
}
for file in $(find $1 -iname '*.rs' ! -path '*/test/compile-fail*'); do
check $file $2 $3 $4 $5
done
+
+printf "\ntest result: "
+
+if [ $failed -eq 0 ]; then
+ printf "ok. $passed passed; $failed failed; $skipped skipped\n\n"
+else
+ printf "failed. $passed passed; $failed failed; $skipped skipped\n\n"
+ exit 1
+fi
+
//! ```
use core::atomic;
+use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::borrow::BorrowFrom;
use core::clone::Clone;
use core::fmt::{mod, Show};
#[experimental = "Weak pointers may not belong in this module."]
pub fn downgrade(&self) -> Weak<T> {
// See the clone() impl for why this is relaxed
- self.inner().weak.fetch_add(1, atomic::Relaxed);
+ self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
/// Get the number of weak references to this value.
#[inline]
#[experimental]
-pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(atomic::SeqCst) - 1 }
+pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[experimental]
-pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(atomic::SeqCst) }
+pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
#[stable]
impl<T> Clone for Arc<T> {
// must already provide any required synchronization.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
- self.inner().strong.fetch_add(1, atomic::Relaxed);
+ self.inner().strong.fetch_add(1, Relaxed);
Arc { _ptr: self._ptr }
}
}
pub fn make_unique(&mut self) -> &mut T {
// Note that we hold a strong reference, which also counts as a weak reference, so we only
// clone if there is an additional reference of either kind.
- if self.inner().strong.load(atomic::SeqCst) != 1 ||
- self.inner().weak.load(atomic::SeqCst) != 1 {
+ if self.inner().strong.load(SeqCst) != 1 ||
+ self.inner().weak.load(SeqCst) != 1 {
*self = Arc::new((**self).clone())
}
// This unsafety is ok because we're guaranteed that the pointer returned is the *only*
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
// `fetch_sub` to the `weak` count.
- if self.inner().strong.fetch_sub(1, atomic::Release) != 1 { return }
+ if self.inner().strong.fetch_sub(1, Release) != 1 { return }
// This fence is needed to prevent reordering of use of the data and deletion of the data.
// Because it is marked `Release`, the decreasing of the reference count synchronizes with
// > operation before deleting the object.
//
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
- atomic::fence(atomic::Acquire);
+ atomic::fence(Acquire);
// Destroy the data at this time, even though we may not free the box allocation itself
// (there may still be weak pointers lying around).
unsafe { drop(ptr::read(&self.inner().data)); }
- if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
- atomic::fence(atomic::Acquire);
+ if self.inner().weak.fetch_sub(1, Release) == 1 {
+ atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
// count hits 0 is must never be above 0.
let inner = self.inner();
loop {
- let n = inner.strong.load(atomic::SeqCst);
+ let n = inner.strong.load(SeqCst);
if n == 0 { return None }
- let old = inner.strong.compare_and_swap(n, n + 1, atomic::SeqCst);
+ let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
if old == n { return Some(Arc { _ptr: self._ptr }) }
}
}
#[inline]
fn clone(&self) -> Weak<T> {
// See comments in Arc::clone() for why this is relaxed
- self.inner().weak.fetch_add(1, atomic::Relaxed);
+ self.inner().weak.fetch_add(1, Relaxed);
Weak { _ptr: self._ptr }
}
}
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
- if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
- atomic::fence(atomic::Acquire);
+ if self.inner().weak.fetch_sub(1, Release) == 1 {
+ atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
}
-#[unstable = "waiting on PartialEq"]
+#[stable]
impl<T: PartialEq> PartialEq for Arc<T> {
/// Equality for two `Arc<T>`s.
///
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
}
-#[unstable = "waiting on PartialOrd"]
+#[stable]
impl<T: PartialOrd> PartialOrd for Arc<T> {
/// Partial comparison for two `Arc<T>`s.
///
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
}
-#[unstable = "waiting on Ord"]
+#[stable]
impl<T: Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
}
-#[unstable = "waiting on Eq"]
+#[stable]
impl<T: Eq> Eq for Arc<T> {}
impl<T: fmt::Show> fmt::Show for Arc<T> {
unsafe {
match *self {
Canary(c) => {
- (*c).fetch_add(1, atomic::SeqCst);
+ (*c).fetch_add(1, SeqCst);
}
}
}
let mut canary = atomic::AtomicUint::new(0);
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
drop(x);
- assert!(canary.load(atomic::Acquire) == 1);
+ assert!(canary.load(Acquire) == 1);
}
#[test]
let mut canary = atomic::AtomicUint::new(0);
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
let arc_weak = arc.downgrade();
- assert!(canary.load(atomic::Acquire) == 0);
+ assert!(canary.load(Acquire) == 0);
drop(arc);
- assert!(canary.load(atomic::Acquire) == 1);
+ assert!(canary.load(Acquire) == 1);
drop(arc_weak);
}
}
}
+#[stable]
impl<Sized? T: PartialEq> PartialEq for Box<T> {
#[inline]
fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) }
#[inline]
fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) }
}
+#[stable]
impl<Sized? T: PartialOrd> PartialOrd for Box<T> {
#[inline]
fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> {
#[inline]
fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) }
}
+#[stable]
impl<Sized? T: Ord> Ord for Box<T> {
#[inline]
fn cmp(&self, other: &Box<T>) -> Ordering {
Ord::cmp(&**self, &**other)
}
-}
+
+#[stable]}
impl<Sized? T: Eq> Eq for Box<T> {}
impl<S: hash::Writer, Sized? T: Hash<S>> Hash<S> for Box<T> {
}
}
-#[unstable = "PartialEq is unstable."]
+#[stable]
impl<T: PartialEq> PartialEq for Rc<T> {
/// Equality for two `Rc<T>`s.
///
fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
}
-#[unstable = "Eq is unstable."]
+#[stable]
impl<T: Eq> Eq for Rc<T> {}
-#[unstable = "PartialOrd is unstable."]
+#[stable]
impl<T: PartialOrd> PartialOrd for Rc<T> {
/// Partial comparison for two `Rc<T>`s.
///
fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
}
-#[unstable = "Ord is unstable."]
+#[stable]
impl<T: Ord> Ord for Rc<T> {
/// Comparison for two `Rc<T>`s.
///
iter: slice::Iter<'a, T>,
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Iter<'a, T> {
+ Iter { iter: self.iter.clone() }
+ }
+}
+
impl<'a, T> Iterator<&'a T> for Iter<'a, T> {
#[inline]
fn next(&mut self) -> Option<&'a T> { self.iter.next() }
}
}
+#[stable]
impl PartialOrd for Bitv {
#[inline]
fn partial_cmp(&self, other: &Bitv) -> Option<Ordering> {
}
}
+#[stable]
impl Ord for Bitv {
#[inline]
fn cmp(&self, other: &Bitv) -> Ordering {
}
}
+#[stable]
impl cmp::PartialEq for Bitv {
#[inline]
fn eq(&self, other: &Bitv) -> bool {
}
}
+#[stable]
impl cmp::Eq for Bitv {}
/// An iterator for `Bitv`.
+#[deriving(Clone)]
pub struct Bits<'a> {
bitv: &'a Bitv,
next_idx: uint,
}
}
+#[stable]
impl PartialOrd for BitvSet {
#[inline]
fn partial_cmp(&self, other: &BitvSet) -> Option<Ordering> {
}
}
+#[stable]
impl Ord for BitvSet {
#[inline]
fn cmp(&self, other: &BitvSet) -> Ordering {
}
}
+#[stable]
impl cmp::PartialEq for BitvSet {
#[inline]
fn eq(&self, other: &BitvSet) -> bool {
}
}
+#[stable]
impl cmp::Eq for BitvSet {}
impl BitvSet {
}
/// An iterator for `BitvSet`.
+#[deriving(Clone)]
pub struct BitPositions<'a> {
set: &'a BitvSet,
next_idx: uint
}
/// An iterator combining two `BitvSet` iterators.
+#[deriving(Clone)]
pub struct TwoBitPositions<'a> {
set: &'a BitvSet,
other: &'a BitvSet,
}
}
+#[stable]
impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
fn eq(&self, other: &BTreeMap<K, V>) -> bool {
self.len() == other.len() &&
}
}
+#[stable]
impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+#[stable]
impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
#[inline]
fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
}
}
+#[stable]
impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
#[inline]
fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
use core::{slice, mem, ptr, cmp, num, raw};
use core::iter::Zip;
use core::borrow::BorrowFrom;
+use core::ptr::Unique;
use alloc::heap;
/// Represents the result of an Insertion: either the item fit, or the node had to split
// These will never be null during normal usage of a `Node`. However, to avoid the need for a
// drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
// up.
- keys: *mut K,
- vals: *mut V,
+ keys: Unique<K>,
+ vals: Unique<V>,
// In leaf nodes, this will be null, and no space will be allocated for edges.
- edges: *mut Node<K, V>,
+ edges: Unique<Node<K, V>>,
// At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
// `_len + 1` edges. In a leaf node, there will never be any edges.
#[unsafe_destructor]
impl<K, V> Drop for Node<K, V> {
fn drop(&mut self) {
- if self.keys.is_null() {
+ if self.keys.0.is_null() {
// We have already cleaned up this node.
return;
}
self.destroy();
}
- self.keys = ptr::null_mut();
+ self.keys.0 = ptr::null_mut();
}
}
let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
Node {
- keys: buffer as *mut K,
- vals: buffer.offset(vals_offset as int) as *mut V,
- edges: buffer.offset(edges_offset as int) as *mut Node<K, V>,
+ keys: Unique(buffer as *mut K),
+ vals: Unique(buffer.offset(vals_offset as int) as *mut V),
+ edges: Unique(buffer.offset(edges_offset as int) as *mut Node<K, V>),
_len: 0,
_capacity: capacity,
}
let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
Node {
- keys: buffer as *mut K,
- vals: unsafe { buffer.offset(vals_offset as int) as *mut V },
- edges: ptr::null_mut(),
+ keys: Unique(buffer as *mut K).
+ vals: Unique(unsafe { buffer.offset(vals_offset as int) as *mut V }),
+ edges: Unique(ptr::null_mut::<u8>()),
_len: 0,
_capacity: capacity,
}
unsafe fn destroy(&mut self) {
let (alignment, size) =
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
- heap::deallocate(self.keys as *mut u8, size, alignment);
+ heap::deallocate(self.keys.0 as *mut u8, size, alignment);
}
#[inline]
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
mem::transmute(raw::Slice {
- data: self.keys as *const K,
+ data: self.keys.0 as *const K,
len: self.len()
}),
mem::transmute(raw::Slice {
- data: self.vals as *const V,
+ data: self.vals.0 as *const V,
len: self.len()
})
)}
} else {
unsafe {
mem::transmute(raw::Slice {
- data: self.edges as *const Node<K, V>,
+ data: self.edges.0 as *const Node<K, V>,
len: self.len() + 1
})
}
}
}
+#[stable]
impl<A: PartialEq> PartialEq for DList<A> {
fn eq(&self, other: &DList<A>) -> bool {
self.len() == other.len() &&
}
}
+#[stable]
impl<A: Eq> Eq for DList<A> {}
+#[stable]
impl<A: PartialOrd> PartialOrd for DList<A> {
fn partial_cmp(&self, other: &DList<A>) -> Option<Ordering> {
iter::order::partial_cmp(self.iter(), other.iter())
}
}
+#[stable]
impl<A: Ord> Ord for DList<A> {
#[inline]
fn cmp(&self, other: &DList<A>) -> Ordering {
bits: uint,
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<E> Clone for Iter<E> {
+ fn clone(&self) -> Iter<E> {
+ Iter {
+ index: self.index,
+ bits: self.bits,
+ }
+ }
+}
+
impl<E:CLike> Iter<E> {
fn new(bits: uint) -> Iter<E> {
Iter { index: 0, bits: bits }
ptr: *mut T
}
+#[stable]
+unsafe impl<T: Send> Send for RingBuf<T> {}
+
+#[stable]
+unsafe impl<T: Sync> Sync for RingBuf<T> {}
+
#[stable]
impl<T: Clone> Clone for RingBuf<T> {
fn clone(&self) -> RingBuf<T> {
head: uint
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, T> Clone for Iter<'a, T> {
+ fn clone(&self) -> Iter<'a, T> {
+ Iter {
+ ring: self.ring,
+ tail: self.tail,
+ head: self.head
+ }
+ }
+}
+
impl<'a, T> Iterator<&'a T> for Iter<'a, T> {
#[inline]
fn next(&mut self) -> Option<&'a T> {
impl<'a, T: 'a> ExactSizeIterator<T> for Drain<'a, T> {}
+#[stable]
impl<A: PartialEq> PartialEq for RingBuf<A> {
fn eq(&self, other: &RingBuf<A>) -> bool {
self.len() == other.len() &&
}
}
+#[stable]
impl<A: Eq> Eq for RingBuf<A> {}
+#[stable]
impl<A: PartialOrd> PartialOrd for RingBuf<A> {
fn partial_cmp(&self, other: &RingBuf<A>) -> Option<Ordering> {
iter::order::partial_cmp(self.iter(), other.iter())
}
}
+#[stable]
impl<A: Ord> Ord for RingBuf<A> {
#[inline]
fn cmp(&self, other: &RingBuf<A>) -> Ordering {
///
/// The last generated swap is always (0, 1), and it returns the
/// sequence to its initial order.
+#[deriving(Clone)]
pub struct ElementSwaps {
sdir: Vec<SizeDirection>,
/// If `true`, emit the last swap that returns the sequence to initial
}
}
-#[deriving(Copy)]
+#[deriving(Copy, Clone)]
enum Direction { Pos, Neg }
/// An `Index` and `Direction` together.
-#[deriving(Copy)]
+#[deriving(Copy, Clone)]
struct SizeDirection {
size: uint,
dir: Direction,
/// swap applied.
///
/// Generates even and odd permutations alternately.
+#[deriving(Clone)]
pub struct Permutations<T> {
swaps: ElementSwaps,
v: Vec<T>,
}
}
+#[stable]
impl PartialEq for String {
#[inline]
fn eq(&self, other: &String) -> bool { PartialEq::eq(&**self, &**other) }
macro_rules! impl_eq {
($lhs:ty, $rhs: ty) => {
+ #[stable]
impl<'a> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
}
+ #[stable]
impl<'a> PartialEq<$lhs> for $rhs {
#[inline]
fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
impl_eq! { String, &'a str }
impl_eq! { CowString<'a>, String }
+#[stable]
impl<'a, 'b> PartialEq<&'b str> for CowString<'a> {
#[inline]
fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&**self, &**other) }
fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&**self, &**other) }
}
+#[stable]
impl<'a, 'b> PartialEq<CowString<'a>> for &'b str {
#[inline]
fn eq(&self, other: &CowString<'a>) -> bool { PartialEq::eq(&**self, &**other) }
}
}
+#[stable]
impl<A, B> PartialEq<Vec<B>> for Vec<A> where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
macro_rules! impl_eq {
($lhs:ty, $rhs:ty) => {
+ #[stable]
impl<'b, A, B> PartialEq<$rhs> for $lhs where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
}
+ #[stable]
impl<'b, A, B> PartialEq<$lhs> for $rhs where B: PartialEq<A> {
#[inline]
fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
impl_eq! { Vec<A>, &'b [B] }
impl_eq! { Vec<A>, &'b mut [B] }
+#[stable]
impl<'a, A, B> PartialEq<Vec<B>> for CowVec<'a, A> where A: PartialEq<B> + Clone {
#[inline]
fn eq(&self, other: &Vec<B>) -> bool { PartialEq::eq(&**self, &**other) }
fn ne(&self, other: &Vec<B>) -> bool { PartialEq::ne(&**self, &**other) }
}
+#[stable]
impl<'a, A, B> PartialEq<CowVec<'a, A>> for Vec<B> where A: Clone, B: PartialEq<A> {
#[inline]
fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
macro_rules! impl_eq_for_cowvec {
($rhs:ty) => {
+ #[stable]
impl<'a, 'b, A, B> PartialEq<$rhs> for CowVec<'a, A> where A: PartialEq<B> + Clone {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
}
+ #[stable]
impl<'a, 'b, A, B> PartialEq<CowVec<'a, A>> for $rhs where A: Clone, B: PartialEq<A> {
#[inline]
fn eq(&self, other: &CowVec<'a, A>) -> bool { PartialEq::eq(&**self, &**other) }
impl_eq_for_cowvec! { &'b [B] }
impl_eq_for_cowvec! { &'b mut [B] }
-#[unstable = "waiting on PartialOrd stability"]
+#[stable]
impl<T: PartialOrd> PartialOrd for Vec<T> {
#[inline]
fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
}
}
-#[unstable = "waiting on Eq stability"]
+#[stable]
impl<T: Eq> Eq for Vec<T> {}
#[allow(deprecated)]
fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
}
-#[unstable = "waiting on Ord stability"]
+#[stable]
impl<T: Ord> Ord for Vec<T> {
#[inline]
fn cmp(&self, other: &Vec<T>) -> Ordering {
}
}
+
+#[stable]
impl<V: PartialEq> PartialEq for VecMap<V> {
fn eq(&self, other: &VecMap<V>) -> bool {
iter::order::eq(self.iter(), other.iter())
}
}
+#[stable]
impl<V: Eq> Eq for VecMap<V> {}
+#[stable]
impl<V: PartialOrd> PartialOrd for VecMap<V> {
#[inline]
fn partial_cmp(&self, other: &VecMap<V>) -> Option<Ordering> {
}
}
+#[stable]
impl<V: Ord> Ord for VecMap<V> {
#[inline]
fn cmp(&self, other: &VecMap<V>) -> Ordering {
iter: slice::Iter<'a, Option<V>>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, V> Clone for Iter<'a, V> {
+ fn clone(&self) -> Iter<'a, V> {
+ Iter {
+ front: self.front,
+ back: self.back,
+ iter: self.iter.clone()
+ }
+ }
+}
+
iterator! { impl Iter -> (uint, &'a V), as_ref }
double_ended_iterator! { impl Iter -> (uint, &'a V), as_ref }
iter: Map<(uint, &'a V), uint, Iter<'a, V>, fn((uint, &'a V)) -> uint>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, V> Clone for Keys<'a, V> {
+ fn clone(&self) -> Keys<'a, V> {
+ Keys {
+ iter: self.iter.clone()
+ }
+ }
+}
+
/// An iterator over the values of a map.
pub struct Values<'a, V: 'a> {
iter: Map<(uint, &'a V), &'a V, Iter<'a, V>, fn((uint, &'a V)) -> &'a V>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, V> Clone for Values<'a, V> {
+ fn clone(&self) -> Values<'a, V> {
+ Values {
+ iter: self.iter.clone()
+ }
+ }
+}
+
/// A consuming iterator over the key-value pairs of a map.
pub struct IntoIter<V> {
iter: FilterMap<
}
}
- #[unstable = "waiting for PartialEq to stabilize"]
+ #[stable]
impl<A, B> PartialEq<[B, ..$N]> for [A, ..$N] where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &[B, ..$N]) -> bool {
}
}
+ #[stable]
impl<'a, A, B, Rhs> PartialEq<Rhs> for [A, ..$N] where
A: PartialEq<B>,
Rhs: Deref<[B]>,
fn ne(&self, other: &Rhs) -> bool { PartialEq::ne(self[], &**other) }
}
+ #[stable]
impl<'a, A, B, Lhs> PartialEq<[B, ..$N]> for Lhs where
A: PartialEq<B>,
Lhs: Deref<[A]>
fn ne(&self, other: &[B, ..$N]) -> bool { PartialEq::ne(&**self, other[]) }
}
- #[unstable = "waiting for Eq to stabilize"]
+ #[stable]
impl<T:Eq> Eq for [T, ..$N] { }
- #[unstable = "waiting for PartialOrd to stabilize"]
+ #[stable]
impl<T:PartialOrd> PartialOrd for [T, ..$N] {
#[inline]
fn partial_cmp(&self, other: &[T, ..$N]) -> Option<Ordering> {
}
}
- #[unstable = "waiting for Ord to stabilize"]
+ #[stable]
impl<T:Ord> Ord for [T, ..$N] {
#[inline]
fn cmp(&self, other: &[T, ..$N]) -> Ordering {
#![stable]
-pub use self::Ordering::*;
+use self::Ordering::*;
use kinds::Sync;
}
}
+#[stable]
impl<'a, T, Sized? B> Eq for Cow<'a, T, B> where B: Eq + ToOwned<T> {}
+#[stable]
impl<'a, T, Sized? B> Ord for Cow<'a, T, B> where B: Ord + ToOwned<T> {
#[inline]
fn cmp(&self, other: &Cow<'a, T, B>) -> Ordering {
}
}
+#[stable]
impl<'a, 'b, T, U, Sized? B, Sized? C> PartialEq<Cow<'b, U, C>> for Cow<'a, T, B> where
B: PartialEq<C> + ToOwned<T>,
C: ToOwned<U>,
}
}
+#[stable]
impl<'a, T, Sized? B> PartialOrd for Cow<'a, T, B> where B: PartialOrd + ToOwned<T> {
#[inline]
fn partial_cmp(&self, other: &Cow<'a, T, B>) -> Option<Ordering> {
}
}
-#[unstable = "waiting for `PartialEq` trait to become stable"]
+#[stable]
impl<T:PartialEq + Copy> PartialEq for Cell<T> {
fn eq(&self, other: &Cell<T>) -> bool {
self.get() == other.get()
}
}
-#[unstable = "waiting for `PartialEq` to become stable"]
+#[stable]
impl<T: PartialEq> PartialEq for RefCell<T> {
fn eq(&self, other: &RefCell<T>) -> bool {
*self.borrow() == *other.borrow()
/// An iterator over the characters that represent a `char`, as escaped by
/// Rust's unicode escaping rules.
+#[deriving(Clone)]
pub struct EscapeUnicode {
c: char,
state: EscapeUnicodeState
}
+#[deriving(Clone)]
enum EscapeUnicodeState {
Backslash,
Type,
/// An iterator over the characters that represent a `char`, escaped
/// for maximum portability.
+#[deriving(Clone)]
pub struct EscapeDefault {
state: EscapeDefaultState
}
+#[deriving(Clone)]
enum EscapeDefaultState {
Backslash(char),
Char(char),
}
}
}
-
use kinds::Sized;
use option::Option::{mod, Some, None};
-/// Trait for values that can be compared for equality and inequality.
+/// Trait for equality comparisons which are [partial equivalence relations](
+/// http://en.wikipedia.org/wiki/Partial_equivalence_relation).
///
-/// This trait allows for partial equality, for types that do not have an
+/// This trait allows for partial equality, for types that do not have a full
/// equivalence relation. For example, in floating point numbers `NaN != NaN`,
/// so floating point types implement `PartialEq` but not `Eq`.
///
+/// Formally, the equality must be (for all `a`, `b` and `c`):
+///
+/// - symmetric: `a == b` implies `b == a`; and
+/// - transitive: `a == b` and `b == c` implies `a == c`.
+///
+/// Note that these requirements mean that the trait itself must be
+/// implemented symmetrically and transitively: if `T: PartialEq<U>`
+/// and `U: PartialEq<V>` then `U: PartialEq<T>` and `T:
+/// PartialEq<V>`.
+///
/// PartialEq only requires the `eq` method to be implemented; `ne` is defined
/// in terms of it by default. Any manual implementation of `ne` *must* respect
/// the rule that `eq` is a strict inverse of `ne`; that is, `!(a == b)` if and
/// only if `a != b`.
-///
-/// Eventually, this will be implemented by default for types that implement
-/// `Eq`.
#[lang="eq"]
-#[unstable = "Definition may change slightly after trait reform"]
+#[stable]
pub trait PartialEq<Sized? Rhs = Self> for Sized? {
/// This method tests for `self` and `other` values to be equal, and is used by `==`.
+ #[stable]
fn eq(&self, other: &Rhs) -> bool;
/// This method tests for `!=`.
#[inline]
+ #[stable]
fn ne(&self, other: &Rhs) -> bool { !self.eq(other) }
}
/// - reflexive: `a == a`;
/// - symmetric: `a == b` implies `b == a`; and
/// - transitive: `a == b` and `b == c` implies `a == c`.
-#[unstable = "Definition may change slightly after trait reform"]
-pub trait Eq<Sized? Rhs = Self> for Sized?: PartialEq<Rhs> {
+#[stable]
+pub trait Eq for Sized?: PartialEq<Self> {
// FIXME #13101: this method is used solely by #[deriving] to
// assert that every component of a type implements #[deriving]
// itself, the current deriving infrastructure means doing this
#[deriving(Clone, Copy, PartialEq, Show)]
#[stable]
pub enum Ordering {
- /// An ordering where a compared value is less [than another].
- Less = -1i,
- /// An ordering where a compared value is equal [to another].
- Equal = 0i,
- /// An ordering where a compared value is greater [than another].
- Greater = 1i,
+ /// An ordering where a compared value is less [than another].
+ #[stable]
+ Less = -1i,
+ /// An ordering where a compared value is equal [to another].
+ #[stable]
+ Equal = 0i,
+ /// An ordering where a compared value is greater [than another].
+ #[stable]
+ Greater = 1i,
}
impl Ordering {
/// assert!(data == b);
/// ```
#[inline]
- #[experimental]
+ #[stable]
pub fn reverse(self) -> Ordering {
unsafe {
// this compiles really nicely (to a single instruction);
/// true; and
/// - transitive, `a < b` and `b < c` implies `a < c`. The same must hold for
/// both `==` and `>`.
-#[unstable = "Definition may change slightly after trait reform"]
-pub trait Ord<Sized? Rhs = Self> for Sized?: Eq<Rhs> + PartialOrd<Rhs> {
+#[stable]
+pub trait Ord for Sized?: Eq + PartialOrd<Self> {
/// This method returns an ordering between `self` and `other` values.
///
/// By convention, `self.cmp(&other)` returns the ordering matching
/// assert_eq!(10u.cmp(&5), Greater); // because 10 > 5
/// assert_eq!( 5u.cmp(&5), Equal); // because 5 == 5
/// ```
- fn cmp(&self, other: &Rhs) -> Ordering;
+ #[stable]
+ fn cmp(&self, other: &Self) -> Ordering;
}
-#[unstable = "Trait is unstable."]
+#[stable]
impl Eq for Ordering {}
-#[unstable = "Trait is unstable."]
+#[stable]
impl Ord for Ordering {
#[inline]
+ #[stable]
fn cmp(&self, other: &Ordering) -> Ordering {
(*self as int).cmp(&(*other as int))
}
}
-#[unstable = "Trait is unstable."]
+#[stable]
impl PartialOrd for Ordering {
#[inline]
+ #[stable]
fn partial_cmp(&self, other: &Ordering) -> Option<Ordering> {
(*self as int).partial_cmp(&(*other as int))
}
/// Trait for values that can be compared for a sort-order.
///
+/// The comparison must satisfy, for all `a`, `b` and `c`:
+///
+/// - antisymmetry: if `a < b` then `!(a > b)` and vice versa; and
+/// - transitivity: `a < b` and `b < c` implies `a < c`. The same must hold for
+/// both `==` and `>`.
+///
+/// Note that these requirements mean that the trait itself must be
+/// implemented symmetrically and transitively: if `T: PartialOrd<U>`
+/// and `U: PartialOrd<V>` then `U: PartialOrd<T>` and `T:
+/// PartialOrd<V>`.
+///
/// PartialOrd only requires implementation of the `partial_cmp` method,
/// with the others generated from default implementations.
///
/// `NaN < 0 == false` and `NaN >= 0 == false` (cf. IEEE 754-2008 section
/// 5.11).
#[lang="ord"]
-#[unstable = "Definition may change slightly after trait reform"]
+#[stable]
pub trait PartialOrd<Sized? Rhs = Self> for Sized?: PartialEq<Rhs> {
/// This method returns an ordering between `self` and `other` values
/// if one exists.
+ #[stable]
fn partial_cmp(&self, other: &Rhs) -> Option<Ordering>;
/// This method tests less than (for `self` and `other`) and is used by the `<` operator.
#[inline]
+ #[stable]
fn lt(&self, other: &Rhs) -> bool {
match self.partial_cmp(other) {
Some(Less) => true,
/// This method tests less than or equal to (`<=`).
#[inline]
+ #[stable]
fn le(&self, other: &Rhs) -> bool {
match self.partial_cmp(other) {
Some(Less) | Some(Equal) => true,
/// This method tests greater than (`>`).
#[inline]
+ #[stable]
fn gt(&self, other: &Rhs) -> bool {
match self.partial_cmp(other) {
Some(Greater) => true,
/// This method tests greater than or equal to (`>=`).
#[inline]
+ #[stable]
fn ge(&self, other: &Rhs) -> bool {
match self.partial_cmp(other) {
Some(Greater) | Some(Equal) => true,
macro_rules! partial_eq_impl {
($($t:ty)*) => ($(
- #[unstable = "Trait is unstable."]
+ #[stable]
impl PartialEq for $t {
#[inline]
fn eq(&self, other: &$t) -> bool { (*self) == (*other) }
)*)
}
- #[unstable = "Trait is unstable."]
+ #[stable]
impl PartialEq for () {
#[inline]
fn eq(&self, _other: &()) -> bool { true }
macro_rules! eq_impl {
($($t:ty)*) => ($(
- #[unstable = "Trait is unstable."]
+ #[stable]
impl Eq for $t {}
)*)
}
macro_rules! partial_ord_impl {
($($t:ty)*) => ($(
- #[unstable = "Trait is unstable."]
+ #[stable]
impl PartialOrd for $t {
#[inline]
fn partial_cmp(&self, other: &$t) -> Option<Ordering> {
)*)
}
- #[unstable = "Trait is unstable."]
+ #[stable]
impl PartialOrd for () {
#[inline]
fn partial_cmp(&self, _: &()) -> Option<Ordering> {
}
}
- #[unstable = "Trait is unstable."]
+ #[stable]
impl PartialOrd for bool {
#[inline]
fn partial_cmp(&self, other: &bool) -> Option<Ordering> {
macro_rules! ord_impl {
($($t:ty)*) => ($(
- #[unstable = "Trait is unstable."]
+ #[stable]
impl Ord for $t {
#[inline]
fn cmp(&self, other: &$t) -> Ordering {
)*)
}
- #[unstable = "Trait is unstable."]
+ #[stable]
impl Ord for () {
#[inline]
fn cmp(&self, _other: &()) -> Ordering { Equal }
}
- #[unstable = "Trait is unstable."]
+ #[stable]
impl Ord for bool {
#[inline]
fn cmp(&self, other: &bool) -> Ordering {
// & pointers
- #[unstable = "Trait is unstable."]
+ #[stable]
impl<'a, 'b, Sized? A, Sized? B> PartialEq<&'b B> for &'a A where A: PartialEq<B> {
#[inline]
fn eq(&self, other: & &'b B) -> bool { PartialEq::eq(*self, *other) }
#[inline]
fn ne(&self, other: & &'b B) -> bool { PartialEq::ne(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: PartialOrd> PartialOrd for &'a T {
+ #[stable]
+ impl<'a, 'b, Sized? A, Sized? B> PartialOrd<&'b B> for &'a A where A: PartialOrd<B> {
#[inline]
- fn partial_cmp(&self, other: &&'a T) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &&'b B) -> Option<Ordering> {
PartialOrd::partial_cmp(*self, *other)
}
#[inline]
- fn lt(&self, other: & &'a T) -> bool { PartialOrd::lt(*self, *other) }
+ fn lt(&self, other: & &'b B) -> bool { PartialOrd::lt(*self, *other) }
#[inline]
- fn le(&self, other: & &'a T) -> bool { PartialOrd::le(*self, *other) }
+ fn le(&self, other: & &'b B) -> bool { PartialOrd::le(*self, *other) }
#[inline]
- fn ge(&self, other: & &'a T) -> bool { PartialOrd::ge(*self, *other) }
+ fn ge(&self, other: & &'b B) -> bool { PartialOrd::ge(*self, *other) }
#[inline]
- fn gt(&self, other: & &'a T) -> bool { PartialOrd::gt(*self, *other) }
+ fn gt(&self, other: & &'b B) -> bool { PartialOrd::gt(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: Ord> Ord for &'a T {
+ #[stable]
+ impl<'a, Sized? A> Ord for &'a A where A: Ord {
#[inline]
- fn cmp(&self, other: & &'a T) -> Ordering { Ord::cmp(*self, *other) }
+ fn cmp(&self, other: & &'a A) -> Ordering { Ord::cmp(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: Eq> Eq for &'a T {}
+ #[stable]
+ impl<'a, Sized? A> Eq for &'a A where A: Eq {}
// &mut pointers
- #[unstable = "Trait is unstable."]
+ #[stable]
impl<'a, 'b, Sized? A, Sized? B> PartialEq<&'b mut B> for &'a mut A where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &&'b mut B) -> bool { PartialEq::eq(*self, *other) }
#[inline]
fn ne(&self, other: &&'b mut B) -> bool { PartialEq::ne(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: PartialOrd> PartialOrd for &'a mut T {
+ #[stable]
+ impl<'a, 'b, Sized? A, Sized? B> PartialOrd<&'b mut B> for &'a mut A where A: PartialOrd<B> {
#[inline]
- fn partial_cmp(&self, other: &&'a mut T) -> Option<Ordering> {
+ fn partial_cmp(&self, other: &&'b mut B) -> Option<Ordering> {
PartialOrd::partial_cmp(*self, *other)
}
#[inline]
- fn lt(&self, other: &&'a mut T) -> bool { PartialOrd::lt(*self, *other) }
+ fn lt(&self, other: &&'b mut B) -> bool { PartialOrd::lt(*self, *other) }
#[inline]
- fn le(&self, other: &&'a mut T) -> bool { PartialOrd::le(*self, *other) }
+ fn le(&self, other: &&'b mut B) -> bool { PartialOrd::le(*self, *other) }
#[inline]
- fn ge(&self, other: &&'a mut T) -> bool { PartialOrd::ge(*self, *other) }
+ fn ge(&self, other: &&'b mut B) -> bool { PartialOrd::ge(*self, *other) }
#[inline]
- fn gt(&self, other: &&'a mut T) -> bool { PartialOrd::gt(*self, *other) }
+ fn gt(&self, other: &&'b mut B) -> bool { PartialOrd::gt(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: Ord> Ord for &'a mut T {
+ #[stable]
+ impl<'a, Sized? A> Ord for &'a mut A where A: Ord {
#[inline]
- fn cmp(&self, other: &&'a mut T) -> Ordering { Ord::cmp(*self, *other) }
+ fn cmp(&self, other: &&'a mut A) -> Ordering { Ord::cmp(*self, *other) }
}
- #[unstable = "Trait is unstable."]
- impl<'a, Sized? T: Eq> Eq for &'a mut T {}
+ #[stable]
+ impl<'a, Sized? A> Eq for &'a mut A where A: Eq {}
+ #[stable]
impl<'a, 'b, Sized? A, Sized? B> PartialEq<&'b mut B> for &'a A where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &&'b mut B) -> bool { PartialEq::eq(*self, *other) }
fn ne(&self, other: &&'b mut B) -> bool { PartialEq::ne(*self, *other) }
}
+ #[stable]
impl<'a, 'b, Sized? A, Sized? B> PartialEq<&'b B> for &'a mut A where A: PartialEq<B> {
#[inline]
fn eq(&self, other: &&'b B) -> bool { PartialEq::eq(*self, *other) }
}
// Equality for pointers
+#[stable]
impl<T> PartialEq for *const T {
#[inline]
fn eq(&self, other: &*const T) -> bool {
fn ne(&self, other: &*const T) -> bool { !self.eq(other) }
}
+#[stable]
impl<T> Eq for *const T {}
+#[stable]
impl<T> PartialEq for *mut T {
#[inline]
fn eq(&self, other: &*mut T) -> bool {
fn ne(&self, other: &*mut T) -> bool { !self.eq(other) }
}
+#[stable]
impl<T> Eq for *mut T {}
// Equivalence for pointers
use mem;
use cmp::PartialEq;
+ #[stable]
impl<_R> PartialEq for extern "C" fn() -> _R {
#[inline]
fn eq(&self, other: &extern "C" fn() -> _R) -> bool {
}
macro_rules! fnptreq {
($($p:ident),*) => {
+ #[stable]
impl<_R,$($p),*> PartialEq for extern "C" fn($($p),*) -> _R {
#[inline]
fn eq(&self, other: &extern "C" fn($($p),*) -> _R) -> bool {
}
// Comparison for pointers
+#[stable]
impl<T> Ord for *const T {
#[inline]
fn cmp(&self, other: &*const T) -> Ordering {
}
}
+#[stable]
impl<T> PartialOrd for *const T {
#[inline]
fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
fn ge(&self, other: &*const T) -> bool { *self >= *other }
}
+#[stable]
impl<T> Ord for *mut T {
#[inline]
fn cmp(&self, other: &*mut T) -> Ordering {
}
}
+#[stable]
impl<T> PartialOrd for *mut T {
#[inline]
fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
// Boilerplate traits
//
-#[unstable = "waiting for DST"]
+#[stable]
impl<A, B> PartialEq<[B]> for [A] where A: PartialEq<B> {
fn eq(&self, other: &[B]) -> bool {
self.len() == other.len() &&
}
}
-#[unstable = "waiting for DST"]
+#[stable]
impl<T: Eq> Eq for [T] {}
#[allow(deprecated)]
fn equiv(&self, other: &V) -> bool { self.as_slice() == other.as_slice() }
}
-#[unstable = "waiting for DST"]
+#[stable]
impl<T: Ord> Ord for [T] {
fn cmp(&self, other: &[T]) -> Ordering {
order::cmp(self.iter(), other.iter())
}
}
-#[unstable = "waiting for DST"]
+#[stable]
impl<T: PartialOrd> PartialOrd for [T] {
#[inline]
fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
use ops;
use str::{Str, StrExt, eq_slice};
+ #[stable]
impl Ord for str {
#[inline]
fn cmp(&self, other: &str) -> Ordering {
}
}
+ #[stable]
impl PartialEq for str {
#[inline]
fn eq(&self, other: &str) -> bool {
fn ne(&self, other: &str) -> bool { !(*self).eq(other) }
}
+ #[stable]
impl Eq for str {}
+ #[stable]
impl PartialOrd for str {
#[inline]
fn partial_cmp(&self, other: &str) -> Option<Ordering> {
}
}
- #[unstable = "waiting for PartialEq to stabilize"]
+ #[stable]
impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
#[inline]
fn eq(&self, other: &($($T,)+)) -> bool {
}
}
- #[unstable = "waiting for Eq to stabilize"]
+ #[stable]
impl<$($T:Eq),+> Eq for ($($T,)+) {}
- #[unstable = "waiting for PartialOrd to stabilize"]
+ #[stable]
impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
#[inline]
fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
}
}
- #[unstable = "waiting for Ord to stabilize"]
+ #[stable]
impl<$($T:Ord),+> Ord for ($($T,)+) {
#[inline]
fn cmp(&self, other: &($($T,)+)) -> Ordering {
use std::os;
use std::rt;
use std::slice;
-use std::sync::{Once, ONCE_INIT};
+use std::sync::{Once, ONCE_INIT, StaticMutex, MUTEX_INIT};
use regex::Regex;
/// The default logging level of a crate if no other is specified.
const DEFAULT_LOG_LEVEL: u32 = 1;
+static LOCK: StaticMutex = MUTEX_INIT;
+
/// An unsafe constant that is the maximum logging level of any module
/// specified. This is the first line of defense to determining whether a
/// logging statement should be run.
pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) {
// Test the literal string from args against the current filter, if there
// is one.
- match unsafe { FILTER.as_ref() } {
- Some(filter) if !filter.is_match(args.to_string()[]) => return,
- _ => {}
+ unsafe {
+ let _g = LOCK.lock();
+ match FILTER as uint {
+ 0 => {}
+ 1 => panic!("cannot log after main thread has exited"),
+ n => {
+ let filter = mem::transmute::<_, &Regex>(n);
+ if !filter.is_match(args.to_string().as_slice()) {
+ return
+ }
+ }
+ }
}
// Completely remove the local logger from TLS in case anyone attempts to
// This assertion should never get tripped unless we're in an at_exit
// handler after logging has been torn down and a logging attempt was made.
- assert!(unsafe { !DIRECTIVES.is_null() });
- enabled(level, module, unsafe { (*DIRECTIVES).iter() })
+ let _g = LOCK.lock();
+ unsafe {
+ assert!(DIRECTIVES as uint != 0);
+ assert!(DIRECTIVES as uint != 1,
+ "cannot log after the main thread has exited");
+
+ enabled(level, module, (*DIRECTIVES).iter())
+ }
}
fn enabled(level: u32,
// Schedule the cleanup for the globals for when the runtime exits.
rt::at_exit(move |:| {
+ let _g = LOCK.lock();
assert!(!DIRECTIVES.is_null());
let _directives: Box<Vec<directive::LogDirective>> =
mem::transmute(DIRECTIVES);
- DIRECTIVES = 0 as *const Vec<directive::LogDirective>;
+ DIRECTIVES = 1 as *const Vec<directive::LogDirective>;
if !FILTER.is_null() {
let _filter: Box<Regex> = mem::transmute(FILTER);
- FILTER = 0 as *const _;
+ FILTER = 1 as *const _;
}
});
}
}
+#[deriving(Clone)]
pub enum NamesIter<'a> {
NamesIterNative(::std::slice::Iter<'a, Option<&'static str>>),
NamesIterDynamic(::std::slice::Iter<'a, Option<String>>)
///
/// `'r` is the lifetime of the compiled expression and `'t` is the lifetime
/// of the string being split.
+#[deriving(Clone)]
pub struct RegexSplits<'r, 't> {
finder: FindMatches<'r, 't>,
last: uint,
///
/// `'r` is the lifetime of the compiled expression and `'t` is the lifetime
/// of the string being split.
+#[deriving(Clone)]
pub struct RegexSplitsN<'r, 't> {
splits: RegexSplits<'r, 't>,
cur: uint,
/// expression.
///
/// `'t` is the lifetime of the matched text.
+#[deriving(Clone)]
pub struct SubCaptures<'t> {
idx: uint,
caps: &'t Captures<'t>,
/// Positions are byte indices in terms of the original string matched.
///
/// `'t` is the lifetime of the matched text.
+#[deriving(Clone)]
pub struct SubCapturesPos<'t> {
idx: uint,
caps: &'t Captures<'t>,
///
/// `'r` is the lifetime of the compiled expression and `'t` is the lifetime
/// of the matched string.
+#[deriving(Clone)]
pub struct FindCaptures<'r, 't> {
re: &'r Regex,
search: &'t str,
///
/// `'r` is the lifetime of the compiled expression and `'t` is the lifetime
/// of the matched string.
+#[deriving(Clone)]
pub struct FindMatches<'r, 't> {
re: &'r Regex,
search: &'t str,
"static_assert",
"thread_local",
"no_debug",
+ "omit_gdb_pretty_printer_section",
"unsafe_no_drop_flag",
// used in resolve
isOptimized: bool,
Flags: *const c_char,
RuntimeVer: c_uint,
- SplitName: *const c_char);
+ SplitName: *const c_char)
+ -> DIDescriptor;
pub fn LLVMDIBuilderCreateFile(Builder: DIBuilderRef,
Filename: *const c_char,
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Reduced graph building
+//!
+//! Here we build the "reduced graph": the graph of the module tree without
+//! any imports resolved.
+
+use {DefModifiers, PUBLIC, IMPORTABLE};
+use ImportDirective;
+use ImportDirectiveSubclass::{mod, SingleImport, GlobImport};
+use ImportResolution;
+use Module;
+use ModuleKind::*;
+use Namespace::{TypeNS, ValueNS};
+use NameBindings;
+use ParentLink::{mod, ModuleParentLink, BlockParentLink};
+use Resolver;
+use RibKind::*;
+use Shadowable;
+use TypeNsDef;
+use TypeParameters::HasTypeParameters;
+
+use self::DuplicateCheckingMode::*;
+use self::NamespaceError::*;
+
+use rustc::metadata::csearch;
+use rustc::metadata::decoder::{DefLike, DlDef, DlField, DlImpl};
+use rustc::middle::def::*;
+use rustc::middle::subst::FnSpace;
+
+use syntax::ast::{Block, Crate};
+use syntax::ast::{DeclItem, DefId};
+use syntax::ast::{ForeignItem, ForeignItemFn, ForeignItemStatic};
+use syntax::ast::{Item, ItemConst, ItemEnum, ItemFn};
+use syntax::ast::{ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic};
+use syntax::ast::{ItemStruct, ItemTrait, ItemTy};
+use syntax::ast::{MethodImplItem, Name, NamedField, NodeId};
+use syntax::ast::{PathListIdent, PathListMod};
+use syntax::ast::{Public, SelfStatic};
+use syntax::ast::StmtDecl;
+use syntax::ast::StructVariantKind;
+use syntax::ast::TupleVariantKind;
+use syntax::ast::TyObjectSum;
+use syntax::ast::{TypeImplItem, UnnamedField};
+use syntax::ast::{Variant, ViewItem, ViewItemExternCrate};
+use syntax::ast::{ViewItemUse, ViewPathGlob, ViewPathList, ViewPathSimple};
+use syntax::ast::{Visibility};
+use syntax::ast::TyPath;
+use syntax::ast;
+use syntax::ast_util::{mod, PostExpansionMethod, local_def};
+use syntax::attr::AttrMetaMethods;
+use syntax::parse::token::{mod, special_idents};
+use syntax::codemap::{Span, DUMMY_SP};
+use syntax::visit::{mod, Visitor};
+
+use std::rc::Rc;
+use std::mem::replace;
+
+// Specifies how duplicates should be handled when adding a child item if
+// another item exists with the same name in some namespace.
+#[deriving(Copy, PartialEq)]
+enum DuplicateCheckingMode {
+ ForbidDuplicateModules,
+ ForbidDuplicateTypesAndModules,
+ ForbidDuplicateValues,
+ ForbidDuplicateTypesAndValues,
+ OverwriteDuplicates
+}
+
+#[deriving(Copy, PartialEq)]
+enum NamespaceError {
+ NoError,
+ ModuleError,
+ TypeError,
+ ValueError
+}
+
+fn namespace_error_to_string(ns: NamespaceError) -> &'static str {
+ match ns {
+ NoError => "",
+ ModuleError | TypeError => "type or module",
+ ValueError => "value",
+ }
+}
+
+struct GraphBuilder<'a, 'b:'a, 'tcx:'b> {
+ resolver: &'a mut Resolver<'b, 'tcx>
+}
+
+impl<'a, 'b:'a, 'tcx:'b> Deref<Resolver<'b, 'tcx>> for GraphBuilder<'a, 'b, 'tcx> {
+ fn deref(&self) -> &Resolver<'b, 'tcx> {
+ &*self.resolver
+ }
+}
+
+impl<'a, 'b:'a, 'tcx:'b> DerefMut<Resolver<'b, 'tcx>> for GraphBuilder<'a, 'b, 'tcx> {
+ fn deref_mut(&mut self) -> &mut Resolver<'b, 'tcx> {
+ &mut *self.resolver
+ }
+}
+
+impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
+ /// Constructs the reduced graph for the entire crate.
+ fn build_reduced_graph(self, krate: &ast::Crate) {
+ let parent = self.graph_root.get_module();
+ let mut visitor = BuildReducedGraphVisitor {
+ builder: self,
+ parent: parent
+ };
+ visit::walk_crate(&mut visitor, krate);
+ }
+
+ /// Adds a new child item to the module definition of the parent node and
+ /// returns its corresponding name bindings as well as the current parent.
+ /// Or, if we're inside a block, creates (or reuses) an anonymous module
+ /// corresponding to the innermost block ID and returns the name bindings
+ /// as well as the newly-created parent.
+ ///
+ /// # Panics
+ ///
+ /// Panics if this node does not have a module definition and we are not inside
+ /// a block.
+ fn add_child(&self,
+ name: Name,
+ parent: &Rc<Module>,
+ duplicate_checking_mode: DuplicateCheckingMode,
+ // For printing errors
+ sp: Span)
+ -> Rc<NameBindings> {
+ // If this is the immediate descendant of a module, then we add the
+ // child name directly. Otherwise, we create or reuse an anonymous
+ // module and add the child to that.
+
+ self.check_for_conflicts_between_external_crates_and_items(&**parent,
+ name,
+ sp);
+
+ // Add or reuse the child.
+ let child = parent.children.borrow().get(&name).cloned();
+ match child {
+ None => {
+ let child = Rc::new(NameBindings::new());
+ parent.children.borrow_mut().insert(name, child.clone());
+ child
+ }
+ Some(child) => {
+ // Enforce the duplicate checking mode:
+ //
+ // * If we're requesting duplicate module checking, check that
+ // there isn't a module in the module with the same name.
+ //
+ // * If we're requesting duplicate type checking, check that
+ // there isn't a type in the module with the same name.
+ //
+ // * If we're requesting duplicate value checking, check that
+ // there isn't a value in the module with the same name.
+ //
+ // * If we're requesting duplicate type checking and duplicate
+ // value checking, check that there isn't a duplicate type
+ // and a duplicate value with the same name.
+ //
+ // * If no duplicate checking was requested at all, do
+ // nothing.
+
+ let mut duplicate_type = NoError;
+ let ns = match duplicate_checking_mode {
+ ForbidDuplicateModules => {
+ if child.get_module_if_available().is_some() {
+ duplicate_type = ModuleError;
+ }
+ Some(TypeNS)
+ }
+ ForbidDuplicateTypesAndModules => {
+ match child.def_for_namespace(TypeNS) {
+ None => {}
+ Some(_) if child.get_module_if_available()
+ .map(|m| m.kind.get()) ==
+ Some(ImplModuleKind) => {}
+ Some(_) => duplicate_type = TypeError
+ }
+ Some(TypeNS)
+ }
+ ForbidDuplicateValues => {
+ if child.defined_in_namespace(ValueNS) {
+ duplicate_type = ValueError;
+ }
+ Some(ValueNS)
+ }
+ ForbidDuplicateTypesAndValues => {
+ let mut n = None;
+ match child.def_for_namespace(TypeNS) {
+ Some(DefMod(_)) | None => {}
+ Some(_) => {
+ n = Some(TypeNS);
+ duplicate_type = TypeError;
+ }
+ };
+ if child.defined_in_namespace(ValueNS) {
+ duplicate_type = ValueError;
+ n = Some(ValueNS);
+ }
+ n
+ }
+ OverwriteDuplicates => None
+ };
+ if duplicate_type != NoError {
+ // Return an error here by looking up the namespace that
+ // had the duplicate.
+ let ns = ns.unwrap();
+ self.resolve_error(sp,
+ format!("duplicate definition of {} `{}`",
+ namespace_error_to_string(duplicate_type),
+ token::get_name(name))[]);
+ {
+ let r = child.span_for_namespace(ns);
+ for sp in r.iter() {
+ self.session.span_note(*sp,
+ format!("first definition of {} `{}` here",
+ namespace_error_to_string(duplicate_type),
+ token::get_name(name))[]);
+ }
+ }
+ }
+ child
+ }
+ }
+ }
+
+ fn block_needs_anonymous_module(&mut self, block: &Block) -> bool {
+ // If the block has view items, we need an anonymous module.
+ if block.view_items.len() > 0 {
+ return true;
+ }
+
+ // Check each statement.
+ for statement in block.stmts.iter() {
+ match statement.node {
+ StmtDecl(ref declaration, _) => {
+ match declaration.node {
+ DeclItem(_) => {
+ return true;
+ }
+ _ => {
+ // Keep searching.
+ }
+ }
+ }
+ _ => {
+ // Keep searching.
+ }
+ }
+ }
+
+ // If we found neither view items nor items, we don't need to create
+ // an anonymous module.
+
+ return false;
+ }
+
+ fn get_parent_link(&mut self, parent: &Rc<Module>, name: Name) -> ParentLink {
+ ModuleParentLink(parent.downgrade(), name)
+ }
+
+ /// Constructs the reduced graph for one item.
+ fn build_reduced_graph_for_item(&mut self, item: &Item, parent: &Rc<Module>) -> Rc<Module> {
+ let name = item.ident.name;
+ let sp = item.span;
+ let is_public = item.vis == ast::Public;
+ let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
+
+ match item.node {
+ ItemMod(..) => {
+ let name_bindings = self.add_child(name, parent, ForbidDuplicateModules, sp);
+
+ let parent_link = self.get_parent_link(parent, name);
+ let def_id = DefId { krate: 0, node: item.id };
+ name_bindings.define_module(parent_link,
+ Some(def_id),
+ NormalModuleKind,
+ false,
+ item.vis == ast::Public,
+ sp);
+
+ name_bindings.get_module()
+ }
+
+ ItemForeignMod(..) => parent.clone(),
+
+ // These items live in the value namespace.
+ ItemStatic(_, m, _) => {
+ let name_bindings = self.add_child(name, parent, ForbidDuplicateValues, sp);
+ let mutbl = m == ast::MutMutable;
+
+ name_bindings.define_value(DefStatic(local_def(item.id), mutbl), sp, modifiers);
+ parent.clone()
+ }
+ ItemConst(_, _) => {
+ self.add_child(name, parent, ForbidDuplicateValues, sp)
+ .define_value(DefConst(local_def(item.id)), sp, modifiers);
+ parent.clone()
+ }
+ ItemFn(_, _, _, _, _) => {
+ let name_bindings = self.add_child(name, parent, ForbidDuplicateValues, sp);
+
+ let def = DefFn(local_def(item.id), false);
+ name_bindings.define_value(def, sp, modifiers);
+ parent.clone()
+ }
+
+ // These items live in the type namespace.
+ ItemTy(..) => {
+ let name_bindings =
+ self.add_child(name, parent, ForbidDuplicateTypesAndModules, sp);
+
+ name_bindings.define_type(DefTy(local_def(item.id), false), sp, modifiers);
+ parent.clone()
+ }
+
+ ItemEnum(ref enum_definition, _) => {
+ let name_bindings =
+ self.add_child(name, parent, ForbidDuplicateTypesAndModules, sp);
+
+ name_bindings.define_type(DefTy(local_def(item.id), true), sp, modifiers);
+
+ let parent_link = self.get_parent_link(parent, name);
+ // We want to make sure the module type is EnumModuleKind
+ // even if there's already an ImplModuleKind module defined,
+ // since that's how we prevent duplicate enum definitions
+ name_bindings.set_module_kind(parent_link,
+ Some(local_def(item.id)),
+ EnumModuleKind,
+ false,
+ is_public,
+ sp);
+
+ let module = name_bindings.get_module();
+
+ for variant in (*enum_definition).variants.iter() {
+ self.build_reduced_graph_for_variant(
+ &**variant,
+ local_def(item.id),
+ &module);
+ }
+ parent.clone()
+ }
+
+ // These items live in both the type and value namespaces.
+ ItemStruct(ref struct_def, _) => {
+ // Adding to both Type and Value namespaces or just Type?
+ let (forbid, ctor_id) = match struct_def.ctor_id {
+ Some(ctor_id) => (ForbidDuplicateTypesAndValues, Some(ctor_id)),
+ None => (ForbidDuplicateTypesAndModules, None)
+ };
+
+ let name_bindings = self.add_child(name, parent, forbid, sp);
+
+ // Define a name in the type namespace.
+ name_bindings.define_type(DefTy(local_def(item.id), false), sp, modifiers);
+
+ // If this is a newtype or unit-like struct, define a name
+ // in the value namespace as well
+ if let Some(cid) = ctor_id {
+ name_bindings.define_value(DefStruct(local_def(cid)), sp, modifiers);
+ }
+
+ // Record the def ID and fields of this struct.
+ let named_fields = struct_def.fields.iter().filter_map(|f| {
+ match f.node.kind {
+ NamedField(ident, _) => Some(ident.name),
+ UnnamedField(_) => None
+ }
+ }).collect();
+ self.structs.insert(local_def(item.id), named_fields);
+
+ parent.clone()
+ }
+
+ ItemImpl(_, _, None, ref ty, ref impl_items) => {
+ // If this implements an anonymous trait, then add all the
+ // methods within to a new module, if the type was defined
+ // within this module.
+
+ let mod_name = match ty.node {
+ TyPath(ref path, _) if path.segments.len() == 1 => {
+ // FIXME(18446) we should distinguish between the name of
+ // a trait and the name of an impl of that trait.
+ Some(path.segments.last().unwrap().identifier.name)
+ }
+ TyObjectSum(ref lhs_ty, _) => {
+ match lhs_ty.node {
+ TyPath(ref path, _) if path.segments.len() == 1 => {
+ Some(path.segments.last().unwrap().identifier.name)
+ }
+ _ => {
+ None
+ }
+ }
+ }
+ _ => {
+ None
+ }
+ };
+
+ match mod_name {
+ None => {
+ self.resolve_error(ty.span,
+ "inherent implementations may \
+ only be implemented in the same \
+ module as the type they are \
+ implemented for")
+ }
+ Some(mod_name) => {
+ // Create the module and add all methods.
+ let parent_opt = parent.children.borrow().get(&mod_name).cloned();
+ let new_parent = match parent_opt {
+ // It already exists
+ Some(ref child) if child.get_module_if_available()
+ .is_some() &&
+ (child.get_module().kind.get() == ImplModuleKind ||
+ child.get_module().kind.get() == TraitModuleKind) => {
+ child.get_module()
+ }
+ Some(ref child) if child.get_module_if_available()
+ .is_some() &&
+ child.get_module().kind.get() ==
+ EnumModuleKind => child.get_module(),
+ // Create the module
+ _ => {
+ let name_bindings =
+ self.add_child(mod_name, parent, ForbidDuplicateModules, sp);
+
+ let parent_link = self.get_parent_link(parent, name);
+ let def_id = local_def(item.id);
+ let ns = TypeNS;
+ let is_public =
+ !name_bindings.defined_in_namespace(ns) ||
+ name_bindings.defined_in_public_namespace(ns);
+
+ name_bindings.define_module(parent_link,
+ Some(def_id),
+ ImplModuleKind,
+ false,
+ is_public,
+ sp);
+
+ name_bindings.get_module()
+ }
+ };
+
+ // For each implementation item...
+ for impl_item in impl_items.iter() {
+ match *impl_item {
+ MethodImplItem(ref method) => {
+ // Add the method to the module.
+ let name = method.pe_ident().name;
+ let method_name_bindings =
+ self.add_child(name,
+ &new_parent,
+ ForbidDuplicateValues,
+ method.span);
+ let def = match method.pe_explicit_self()
+ .node {
+ SelfStatic => {
+ // Static methods become
+ // `DefStaticMethod`s.
+ DefStaticMethod(local_def(method.id),
+ FromImpl(local_def(item.id)))
+ }
+ _ => {
+ // Non-static methods become
+ // `DefMethod`s.
+ DefMethod(local_def(method.id),
+ None,
+ FromImpl(local_def(item.id)))
+ }
+ };
+
+ // NB: not IMPORTABLE
+ let modifiers = if method.pe_vis() == ast::Public {
+ PUBLIC
+ } else {
+ DefModifiers::empty()
+ };
+ method_name_bindings.define_value(
+ def,
+ method.span,
+ modifiers);
+ }
+ TypeImplItem(ref typedef) => {
+ // Add the typedef to the module.
+ let name = typedef.ident.name;
+ let typedef_name_bindings =
+ self.add_child(
+ name,
+ &new_parent,
+ ForbidDuplicateTypesAndModules,
+ typedef.span);
+ let def = DefAssociatedTy(local_def(
+ typedef.id));
+ // NB: not IMPORTABLE
+ let modifiers = if typedef.vis == ast::Public {
+ PUBLIC
+ } else {
+ DefModifiers::empty()
+ };
+ typedef_name_bindings.define_type(
+ def,
+ typedef.span,
+ modifiers);
+ }
+ }
+ }
+ }
+ }
+
+ parent.clone()
+ }
+
+ ItemImpl(_, _, Some(_), _, _) => parent.clone(),
+
+ ItemTrait(_, _, _, ref items) => {
+ let name_bindings =
+ self.add_child(name, parent, ForbidDuplicateTypesAndModules, sp);
+
+ // Add all the items within to a new module.
+ let parent_link = self.get_parent_link(parent, name);
+ name_bindings.define_module(parent_link,
+ Some(local_def(item.id)),
+ TraitModuleKind,
+ false,
+ item.vis == ast::Public,
+ sp);
+ let module_parent = name_bindings.get_module();
+
+ let def_id = local_def(item.id);
+
+ // Add the names of all the items to the trait info.
+ for trait_item in items.iter() {
+ let (name, kind) = match *trait_item {
+ ast::RequiredMethod(_) |
+ ast::ProvidedMethod(_) => {
+ let ty_m = ast_util::trait_item_to_ty_method(trait_item);
+
+ let name = ty_m.ident.name;
+
+ // Add it as a name in the trait module.
+ let (def, static_flag) = match ty_m.explicit_self
+ .node {
+ SelfStatic => {
+ // Static methods become `DefStaticMethod`s.
+ (DefStaticMethod(
+ local_def(ty_m.id),
+ FromTrait(local_def(item.id))),
+ StaticMethodTraitItemKind)
+ }
+ _ => {
+ // Non-static methods become `DefMethod`s.
+ (DefMethod(local_def(ty_m.id),
+ Some(local_def(item.id)),
+ FromTrait(local_def(item.id))),
+ NonstaticMethodTraitItemKind)
+ }
+ };
+
+ let method_name_bindings =
+ self.add_child(name,
+ &module_parent,
+ ForbidDuplicateTypesAndValues,
+ ty_m.span);
+ // NB: not IMPORTABLE
+ method_name_bindings.define_value(def,
+ ty_m.span,
+ PUBLIC);
+
+ (name, static_flag)
+ }
+ ast::TypeTraitItem(ref associated_type) => {
+ let def = DefAssociatedTy(local_def(
+ associated_type.ty_param.id));
+
+ let name_bindings =
+ self.add_child(associated_type.ty_param.ident.name,
+ &module_parent,
+ ForbidDuplicateTypesAndValues,
+ associated_type.ty_param.span);
+ // NB: not IMPORTABLE
+ name_bindings.define_type(def,
+ associated_type.ty_param.span,
+ PUBLIC);
+
+ (associated_type.ty_param.ident.name, TypeTraitItemKind)
+ }
+ };
+
+ self.trait_item_map.insert((name, def_id), kind);
+ }
+
+ name_bindings.define_type(DefTrait(def_id), sp, modifiers);
+ parent.clone()
+ }
+ ItemMac(..) => parent.clone()
+ }
+ }
+
+ // Constructs the reduced graph for one variant. Variants exist in the
+ // type and value namespaces.
+ fn build_reduced_graph_for_variant(&mut self,
+ variant: &Variant,
+ item_id: DefId,
+ parent: &Rc<Module>) {
+ let name = variant.node.name.name;
+ let is_exported = match variant.node.kind {
+ TupleVariantKind(_) => false,
+ StructVariantKind(_) => {
+ // Not adding fields for variants as they are not accessed with a self receiver
+ self.structs.insert(local_def(variant.node.id), Vec::new());
+ true
+ }
+ };
+
+ let child = self.add_child(name, parent,
+ ForbidDuplicateTypesAndValues,
+ variant.span);
+ // variants are always treated as importable to allow them to be glob
+ // used
+ child.define_value(DefVariant(item_id,
+ local_def(variant.node.id), is_exported),
+ variant.span, PUBLIC | IMPORTABLE);
+ child.define_type(DefVariant(item_id,
+ local_def(variant.node.id), is_exported),
+ variant.span, PUBLIC | IMPORTABLE);
+ }
+
+ /// Constructs the reduced graph for one 'view item'. View items consist
+ /// of imports and use directives.
+ fn build_reduced_graph_for_view_item(&mut self, view_item: &ViewItem, parent: &Rc<Module>) {
+ match view_item.node {
+ ViewItemUse(ref view_path) => {
+ // Extract and intern the module part of the path. For
+ // globs and lists, the path is found directly in the AST;
+ // for simple paths we have to munge the path a little.
+ let module_path = match view_path.node {
+ ViewPathSimple(_, ref full_path, _) => {
+ full_path.segments
+ .init()
+ .iter().map(|ident| ident.identifier.name)
+ .collect()
+ }
+
+ ViewPathGlob(ref module_ident_path, _) |
+ ViewPathList(ref module_ident_path, _, _) => {
+ module_ident_path.segments
+ .iter().map(|ident| ident.identifier.name).collect()
+ }
+ };
+
+ // Build up the import directives.
+ let is_public = view_item.vis == ast::Public;
+ let shadowable =
+ view_item.attrs
+ .iter()
+ .any(|attr| {
+ attr.name() == token::get_name(
+ special_idents::prelude_import.name)
+ });
+ let shadowable = if shadowable {
+ Shadowable::Always
+ } else {
+ Shadowable::Never
+ };
+
+ match view_path.node {
+ ViewPathSimple(binding, ref full_path, id) => {
+ let source_name =
+ full_path.segments.last().unwrap().identifier.name;
+ if token::get_name(source_name).get() == "mod" {
+ self.resolve_error(view_path.span,
+ "`mod` imports are only allowed within a { } list");
+ }
+
+ let subclass = SingleImport(binding.name,
+ source_name);
+ self.build_import_directive(&**parent,
+ module_path,
+ subclass,
+ view_path.span,
+ id,
+ is_public,
+ shadowable);
+ }
+ ViewPathList(_, ref source_items, _) => {
+ // Make sure there's at most one `mod` import in the list.
+ let mod_spans = source_items.iter().filter_map(|item| match item.node {
+ PathListMod { .. } => Some(item.span),
+ _ => None
+ }).collect::<Vec<Span>>();
+ if mod_spans.len() > 1 {
+ self.resolve_error(mod_spans[0],
+ "`mod` import can only appear once in the list");
+ for other_span in mod_spans.iter().skip(1) {
+ self.session.span_note(*other_span,
+ "another `mod` import appears here");
+ }
+ }
+
+ for source_item in source_items.iter() {
+ let (module_path, name) = match source_item.node {
+ PathListIdent { name, .. } =>
+ (module_path.clone(), name.name),
+ PathListMod { .. } => {
+ let name = match module_path.last() {
+ Some(name) => *name,
+ None => {
+ self.resolve_error(source_item.span,
+ "`mod` import can only appear in an import list \
+ with a non-empty prefix");
+ continue;
+ }
+ };
+ let module_path = module_path.init();
+ (module_path.to_vec(), name)
+ }
+ };
+ self.build_import_directive(
+ &**parent,
+ module_path,
+ SingleImport(name, name),
+ source_item.span,
+ source_item.node.id(),
+ is_public,
+ shadowable);
+ }
+ }
+ ViewPathGlob(_, id) => {
+ self.build_import_directive(&**parent,
+ module_path,
+ GlobImport,
+ view_path.span,
+ id,
+ is_public,
+ shadowable);
+ }
+ }
+ }
+
+ ViewItemExternCrate(name, _, node_id) => {
+ // n.b. we don't need to look at the path option here, because cstore already did
+ for &crate_id in self.session.cstore
+ .find_extern_mod_stmt_cnum(node_id).iter() {
+ let def_id = DefId { krate: crate_id, node: 0 };
+ self.external_exports.insert(def_id);
+ let parent_link = ModuleParentLink(parent.downgrade(), name.name);
+ let external_module = Rc::new(Module::new(parent_link,
+ Some(def_id),
+ NormalModuleKind,
+ false,
+ true));
+ debug!("(build reduced graph for item) found extern `{}`",
+ self.module_to_string(&*external_module));
+ self.check_for_conflicts_between_external_crates(
+ &**parent,
+ name.name,
+ view_item.span);
+ parent.external_module_children.borrow_mut()
+ .insert(name.name, external_module.clone());
+ self.build_reduced_graph_for_external_crate(&external_module);
+ }
+ }
+ }
+ }
+
+ /// Constructs the reduced graph for one foreign item.
+ fn build_reduced_graph_for_foreign_item<F>(&mut self,
+ foreign_item: &ForeignItem,
+ parent: &Rc<Module>,
+ f: F) where
+ F: FnOnce(&mut Resolver),
+ {
+ let name = foreign_item.ident.name;
+ let is_public = foreign_item.vis == ast::Public;
+ let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
+ let name_bindings =
+ self.add_child(name, parent, ForbidDuplicateValues,
+ foreign_item.span);
+
+ match foreign_item.node {
+ ForeignItemFn(_, ref generics) => {
+ let def = DefFn(local_def(foreign_item.id), false);
+ name_bindings.define_value(def, foreign_item.span, modifiers);
+
+ self.with_type_parameter_rib(
+ HasTypeParameters(generics,
+ FnSpace,
+ foreign_item.id,
+ NormalRibKind),
+ f);
+ }
+ ForeignItemStatic(_, m) => {
+ let def = DefStatic(local_def(foreign_item.id), m);
+ name_bindings.define_value(def, foreign_item.span, modifiers);
+
+ f(self.resolver)
+ }
+ }
+ }
+
+ fn build_reduced_graph_for_block(&mut self, block: &Block, parent: &Rc<Module>) -> Rc<Module> {
+ if self.block_needs_anonymous_module(block) {
+ let block_id = block.id;
+
+ debug!("(building reduced graph for block) creating a new \
+ anonymous module for block {}",
+ block_id);
+
+ let new_module = Rc::new(Module::new(
+ BlockParentLink(parent.downgrade(), block_id),
+ None,
+ AnonymousModuleKind,
+ false,
+ false));
+ parent.anonymous_children.borrow_mut().insert(block_id, new_module.clone());
+ new_module
+ } else {
+ parent.clone()
+ }
+ }
+
+ fn handle_external_def(&mut self,
+ def: Def,
+ vis: Visibility,
+ child_name_bindings: &NameBindings,
+ final_ident: &str,
+ name: Name,
+ new_parent: &Rc<Module>) {
+ debug!("(building reduced graph for \
+ external crate) building external def, priv {}",
+ vis);
+ let is_public = vis == ast::Public;
+ let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
+ let is_exported = is_public && match new_parent.def_id.get() {
+ None => true,
+ Some(did) => self.external_exports.contains(&did)
+ };
+ if is_exported {
+ self.external_exports.insert(def.def_id());
+ }
+
+ let kind = match def {
+ DefTy(_, true) => EnumModuleKind,
+ DefStruct(..) | DefTy(..) => ImplModuleKind,
+ _ => NormalModuleKind
+ };
+
+ match def {
+ DefMod(def_id) | DefForeignMod(def_id) | DefStruct(def_id) |
+ DefTy(def_id, _) => {
+ let type_def = child_name_bindings.type_def.borrow().clone();
+ match type_def {
+ Some(TypeNsDef { module_def: Some(module_def), .. }) => {
+ debug!("(building reduced graph for external crate) \
+ already created module");
+ module_def.def_id.set(Some(def_id));
+ }
+ Some(_) | None => {
+ debug!("(building reduced graph for \
+ external crate) building module \
+ {}", final_ident);
+ let parent_link = self.get_parent_link(new_parent, name);
+
+ child_name_bindings.define_module(parent_link,
+ Some(def_id),
+ kind,
+ true,
+ is_public,
+ DUMMY_SP);
+ }
+ }
+ }
+ _ => {}
+ }
+
+ match def {
+ DefMod(_) | DefForeignMod(_) => {}
+ DefVariant(_, variant_id, is_struct) => {
+ debug!("(building reduced graph for external crate) building \
+ variant {}",
+ final_ident);
+ // variants are always treated as importable to allow them to be
+ // glob used
+ let modifiers = PUBLIC | IMPORTABLE;
+ if is_struct {
+ child_name_bindings.define_type(def, DUMMY_SP, modifiers);
+ // Not adding fields for variants as they are not accessed with a self receiver
+ self.structs.insert(variant_id, Vec::new());
+ } else {
+ child_name_bindings.define_value(def, DUMMY_SP, modifiers);
+ }
+ }
+ DefFn(ctor_id, true) => {
+ child_name_bindings.define_value(
+ csearch::get_tuple_struct_definition_if_ctor(&self.session.cstore, ctor_id)
+ .map_or(def, |_| DefStruct(ctor_id)), DUMMY_SP, modifiers);
+ }
+ DefFn(..) | DefStaticMethod(..) | DefStatic(..) | DefConst(..) | DefMethod(..) => {
+ debug!("(building reduced graph for external \
+ crate) building value (fn/static) {}", final_ident);
+ // impl methods have already been defined with the correct importability modifier
+ let mut modifiers = match *child_name_bindings.value_def.borrow() {
+ Some(ref def) => (modifiers & !IMPORTABLE) | (def.modifiers & IMPORTABLE),
+ None => modifiers
+ };
+ if new_parent.kind.get() != NormalModuleKind {
+ modifiers = modifiers & !IMPORTABLE;
+ }
+ child_name_bindings.define_value(def, DUMMY_SP, modifiers);
+ }
+ DefTrait(def_id) => {
+ debug!("(building reduced graph for external \
+ crate) building type {}", final_ident);
+
+ // If this is a trait, add all the trait item names to the trait
+ // info.
+
+ let trait_item_def_ids =
+ csearch::get_trait_item_def_ids(&self.session.cstore, def_id);
+ for trait_item_def_id in trait_item_def_ids.iter() {
+ let (trait_item_name, trait_item_kind) =
+ csearch::get_trait_item_name_and_kind(
+ &self.session.cstore,
+ trait_item_def_id.def_id());
+
+ debug!("(building reduced graph for external crate) ... \
+ adding trait item '{}'",
+ token::get_name(trait_item_name));
+
+ self.trait_item_map.insert((trait_item_name, def_id), trait_item_kind);
+
+ if is_exported {
+ self.external_exports
+ .insert(trait_item_def_id.def_id());
+ }
+ }
+
+ child_name_bindings.define_type(def, DUMMY_SP, modifiers);
+
+ // Define a module if necessary.
+ let parent_link = self.get_parent_link(new_parent, name);
+ child_name_bindings.set_module_kind(parent_link,
+ Some(def_id),
+ TraitModuleKind,
+ true,
+ is_public,
+ DUMMY_SP)
+ }
+ DefTy(..) | DefAssociatedTy(..) | DefAssociatedPath(..) => {
+ debug!("(building reduced graph for external \
+ crate) building type {}", final_ident);
+
+ child_name_bindings.define_type(def, DUMMY_SP, modifiers);
+ }
+ DefStruct(def_id) => {
+ debug!("(building reduced graph for external \
+ crate) building type and value for {}",
+ final_ident);
+ child_name_bindings.define_type(def, DUMMY_SP, modifiers);
+ let fields = csearch::get_struct_fields(&self.session.cstore, def_id).iter().map(|f| {
+ f.name
+ }).collect::<Vec<_>>();
+
+ if fields.len() == 0 {
+ child_name_bindings.define_value(def, DUMMY_SP, modifiers);
+ }
+
+ // Record the def ID and fields of this struct.
+ self.structs.insert(def_id, fields);
+ }
+ DefLocal(..) | DefPrimTy(..) | DefTyParam(..) |
+ DefUse(..) | DefUpvar(..) | DefRegion(..) |
+ DefTyParamBinder(..) | DefLabel(..) | DefSelfTy(..) => {
+ panic!("didn't expect `{}`", def);
+ }
+ }
+ }
+
+ /// Builds the reduced graph for a single item in an external crate.
+ fn build_reduced_graph_for_external_crate_def(&mut self,
+ root: &Rc<Module>,
+ def_like: DefLike,
+ name: Name,
+ visibility: Visibility) {
+ match def_like {
+ DlDef(def) => {
+ // Add the new child item, if necessary.
+ match def {
+ DefForeignMod(def_id) => {
+ // Foreign modules have no names. Recur and populate
+ // eagerly.
+ csearch::each_child_of_item(&self.session.cstore,
+ def_id,
+ |def_like,
+ child_name,
+ vis| {
+ self.build_reduced_graph_for_external_crate_def(
+ root,
+ def_like,
+ child_name,
+ vis)
+ });
+ }
+ _ => {
+ let child_name_bindings =
+ self.add_child(name,
+ root,
+ OverwriteDuplicates,
+ DUMMY_SP);
+
+ self.handle_external_def(def,
+ visibility,
+ &*child_name_bindings,
+ token::get_name(name).get(),
+ name,
+ root);
+ }
+ }
+ }
+ DlImpl(def) => {
+ match csearch::get_type_name_if_impl(&self.session.cstore, def) {
+ None => {}
+ Some(final_name) => {
+ let methods_opt =
+ csearch::get_methods_if_impl(&self.session.cstore, def);
+ match methods_opt {
+ Some(ref methods) if
+ methods.len() >= 1 => {
+ debug!("(building reduced graph for \
+ external crate) processing \
+ static methods for type name {}",
+ token::get_name(final_name));
+
+ let child_name_bindings =
+ self.add_child(
+ final_name,
+ root,
+ OverwriteDuplicates,
+ DUMMY_SP);
+
+ // Process the static methods. First,
+ // create the module.
+ let type_module;
+ let type_def = child_name_bindings.type_def.borrow().clone();
+ match type_def {
+ Some(TypeNsDef {
+ module_def: Some(module_def),
+ ..
+ }) => {
+ // We already have a module. This
+ // is OK.
+ type_module = module_def;
+
+ // Mark it as an impl module if
+ // necessary.
+ type_module.kind.set(ImplModuleKind);
+ }
+ Some(_) | None => {
+ let parent_link =
+ self.get_parent_link(root, final_name);
+ child_name_bindings.define_module(
+ parent_link,
+ Some(def),
+ ImplModuleKind,
+ true,
+ true,
+ DUMMY_SP);
+ type_module =
+ child_name_bindings.
+ get_module();
+ }
+ }
+
+ // Add each static method to the module.
+ let new_parent = type_module;
+ for method_info in methods.iter() {
+ let name = method_info.name;
+ debug!("(building reduced graph for \
+ external crate) creating \
+ static method '{}'",
+ token::get_name(name));
+
+ let method_name_bindings =
+ self.add_child(name,
+ &new_parent,
+ OverwriteDuplicates,
+ DUMMY_SP);
+ let def = DefFn(method_info.def_id, false);
+
+ // NB: not IMPORTABLE
+ let modifiers = if visibility == ast::Public {
+ PUBLIC
+ } else {
+ DefModifiers::empty()
+ };
+ method_name_bindings.define_value(
+ def, DUMMY_SP, modifiers);
+ }
+ }
+
+ // Otherwise, do nothing.
+ Some(_) | None => {}
+ }
+ }
+ }
+ }
+ DlField => {
+ debug!("(building reduced graph for external crate) \
+ ignoring field");
+ }
+ }
+ }
+
+ /// Builds the reduced graph rooted at the given external module.
+ fn populate_external_module(&mut self, module: &Rc<Module>) {
+ debug!("(populating external module) attempting to populate {}",
+ self.module_to_string(&**module));
+
+ let def_id = match module.def_id.get() {
+ None => {
+ debug!("(populating external module) ... no def ID!");
+ return
+ }
+ Some(def_id) => def_id,
+ };
+
+ csearch::each_child_of_item(&self.session.cstore,
+ def_id,
+ |def_like, child_name, visibility| {
+ debug!("(populating external module) ... found ident: {}",
+ token::get_name(child_name));
+ self.build_reduced_graph_for_external_crate_def(module,
+ def_like,
+ child_name,
+ visibility)
+ });
+ module.populated.set(true)
+ }
+
+ /// Ensures that the reduced graph rooted at the given external module
+ /// is built, building it if it is not.
+ fn populate_module_if_necessary(&mut self, module: &Rc<Module>) {
+ if !module.populated.get() {
+ self.populate_external_module(module)
+ }
+ assert!(module.populated.get())
+ }
+
+ /// Builds the reduced graph rooted at the 'use' directive for an external
+ /// crate.
+ fn build_reduced_graph_for_external_crate(&mut self, root: &Rc<Module>) {
+ csearch::each_top_level_item_of_crate(&self.session.cstore,
+ root.def_id
+ .get()
+ .unwrap()
+ .krate,
+ |def_like, name, visibility| {
+ self.build_reduced_graph_for_external_crate_def(root, def_like, name, visibility)
+ });
+ }
+
+ /// Creates and adds an import directive to the given module.
+ fn build_import_directive(&mut self,
+ module_: &Module,
+ module_path: Vec<Name>,
+ subclass: ImportDirectiveSubclass,
+ span: Span,
+ id: NodeId,
+ is_public: bool,
+ shadowable: Shadowable) {
+ module_.imports.borrow_mut().push(ImportDirective::new(module_path,
+ subclass,
+ span,
+ id,
+ is_public,
+ shadowable));
+ self.unresolved_imports += 1;
+ // Bump the reference count on the name. Or, if this is a glob, set
+ // the appropriate flag.
+
+ match subclass {
+ SingleImport(target, _) => {
+ debug!("(building import directive) building import \
+ directive: {}::{}",
+ self.names_to_string(module_.imports.borrow().last().unwrap()
+ .module_path[]),
+ token::get_name(target));
+
+ let mut import_resolutions = module_.import_resolutions
+ .borrow_mut();
+ match import_resolutions.get_mut(&target) {
+ Some(resolution) => {
+ debug!("(building import directive) bumping \
+ reference");
+ resolution.outstanding_references += 1;
+
+ // the source of this name is different now
+ resolution.type_id = id;
+ resolution.value_id = id;
+ resolution.is_public = is_public;
+ return;
+ }
+ None => {}
+ }
+ debug!("(building import directive) creating new");
+ let mut resolution = ImportResolution::new(id, is_public);
+ resolution.outstanding_references = 1;
+ import_resolutions.insert(target, resolution);
+ }
+ GlobImport => {
+ // Set the glob flag. This tells us that we don't know the
+ // module's exports ahead of time.
+
+ module_.glob_count.set(module_.glob_count.get() + 1);
+ }
+ }
+ }
+}
+
+struct BuildReducedGraphVisitor<'a, 'b:'a, 'tcx:'b> {
+ builder: GraphBuilder<'a, 'b, 'tcx>,
+ parent: Rc<Module>
+}
+
+impl<'a, 'b, 'v, 'tcx> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b, 'tcx> {
+ fn visit_item(&mut self, item: &Item) {
+ let p = self.builder.build_reduced_graph_for_item(item, &self.parent);
+ let old_parent = replace(&mut self.parent, p);
+ visit::walk_item(self, item);
+ self.parent = old_parent;
+ }
+
+ fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
+ let parent = &self.parent;
+ self.builder.build_reduced_graph_for_foreign_item(foreign_item,
+ parent,
+ |r| {
+ let mut v = BuildReducedGraphVisitor {
+ builder: GraphBuilder { resolver: r },
+ parent: parent.clone()
+ };
+ visit::walk_foreign_item(&mut v, foreign_item);
+ })
+ }
+
+ fn visit_view_item(&mut self, view_item: &ViewItem) {
+ self.builder.build_reduced_graph_for_view_item(view_item, &self.parent);
+ }
+
+ fn visit_block(&mut self, block: &Block) {
+ let np = self.builder.build_reduced_graph_for_block(block, &self.parent);
+ let old_parent = replace(&mut self.parent, np);
+ visit::walk_block(self, block);
+ self.parent = old_parent;
+ }
+}
+
+pub fn build_reduced_graph(resolver: &mut Resolver, krate: &ast::Crate) {
+ GraphBuilder {
+ resolver: resolver
+ }.build_reduced_graph(krate);
+}
+
+pub fn populate_module_if_necessary(resolver: &mut Resolver, module: &Rc<Module>) {
+ GraphBuilder {
+ resolver: resolver
+ }.populate_module_if_necessary(module);
+}
use self::PatternBindingMode::*;
use self::Namespace::*;
-use self::NamespaceError::*;
use self::NamespaceResult::*;
use self::NameDefinition::*;
use self::ImportDirectiveSubclass::*;
-use self::ReducedGraphParent::*;
use self::ResolveResult::*;
use self::FallbackSuggestion::*;
use self::TypeParameters::*;
use self::ModulePrefixResult::*;
use self::NameSearchType::*;
use self::BareIdentifierPatternResolution::*;
-use self::DuplicateCheckingMode::*;
use self::ParentLink::*;
use self::ModuleKind::*;
use self::TraitReferenceType::*;
use rustc::util::lev_distance::lev_distance;
use syntax::ast::{Arm, BindByRef, BindByValue, BindingMode, Block, Crate, CrateNum};
-use syntax::ast::{DeclItem, DefId, Expr, ExprAgain, ExprBreak, ExprField};
+use syntax::ast::{DefId, Expr, ExprAgain, ExprBreak, ExprField};
use syntax::ast::{ExprClosure, ExprForLoop, ExprLoop, ExprWhile, ExprMethodCall};
use syntax::ast::{ExprPath, ExprStruct, FnDecl};
-use syntax::ast::{ForeignItem, ForeignItemFn, ForeignItemStatic, Generics};
+use syntax::ast::{ForeignItemFn, ForeignItemStatic, Generics};
use syntax::ast::{Ident, ImplItem, Item, ItemConst, ItemEnum, ItemFn};
use syntax::ast::{ItemForeignMod, ItemImpl, ItemMac, ItemMod, ItemStatic};
use syntax::ast::{ItemStruct, ItemTrait, ItemTy, Local, LOCAL_CRATE};
-use syntax::ast::{MethodImplItem, Mod, Name, NamedField, NodeId};
+use syntax::ast::{MethodImplItem, Mod, Name, NodeId};
use syntax::ast::{Pat, PatEnum, PatIdent, PatLit};
-use syntax::ast::{PatRange, PatStruct, Path, PathListIdent, PathListMod};
-use syntax::ast::{PolyTraitRef, PrimTy, Public, SelfExplicit, SelfStatic};
-use syntax::ast::{RegionTyParamBound, StmtDecl, StructField};
-use syntax::ast::{StructVariantKind, TraitRef, TraitTyParamBound};
-use syntax::ast::{TupleVariantKind, Ty, TyBool, TyChar, TyClosure, TyF32};
+use syntax::ast::{PatRange, PatStruct, Path};
+use syntax::ast::{PolyTraitRef, PrimTy, SelfExplicit};
+use syntax::ast::{RegionTyParamBound, StructField};
+use syntax::ast::{TraitRef, TraitTyParamBound};
+use syntax::ast::{Ty, TyBool, TyChar, TyClosure, TyF32};
use syntax::ast::{TyF64, TyFloat, TyI, TyI8, TyI16, TyI32, TyI64, TyInt, TyObjectSum};
use syntax::ast::{TyParam, TyParamBound, TyPath, TyPtr, TyPolyTraitRef, TyQPath};
use syntax::ast::{TyRptr, TyStr, TyU, TyU8, TyU16, TyU32, TyU64, TyUint};
-use syntax::ast::{TypeImplItem, UnnamedField};
-use syntax::ast::{Variant, ViewItem, ViewItemExternCrate};
-use syntax::ast::{ViewItemUse, ViewPathGlob, ViewPathList, ViewPathSimple};
-use syntax::ast::{Visibility};
+use syntax::ast::{TypeImplItem};
use syntax::ast;
use syntax::ast_map;
-use syntax::ast_util::{mod, PostExpansionMethod, local_def, walk_pat};
+use syntax::ast_util::{PostExpansionMethod, local_def, walk_pat};
use syntax::attr::AttrMetaMethods;
use syntax::ext::mtwt;
use syntax::parse::token::{mod, special_names, special_idents};
-use syntax::codemap::{Span, DUMMY_SP, Pos};
+use syntax::codemap::{Span, Pos};
use syntax::owned_slice::OwnedSlice;
use syntax::visit::{mod, Visitor};
mod check_unused;
mod record_exports;
+mod build_reduced_graph;
#[deriving(Copy)]
struct BindingInfo {
ValueNS
}
-#[deriving(Copy, PartialEq)]
-enum NamespaceError {
- NoError,
- ModuleError,
- TypeError,
- ValueError
-}
-
/// A NamespaceResult represents the result of resolving an import in
/// a particular namespace. The result is either definitely-resolved,
/// definitely- unresolved, or unknown.
GlobImport
}
-/// The context that we thread through while building the reduced graph.
-#[deriving(Clone)]
-enum ReducedGraphParent {
- ModuleReducedGraphParent(Rc<Module>)
-}
-
-impl ReducedGraphParent {
- fn module(&self) -> Rc<Module> {
- match *self {
- ModuleReducedGraphParent(ref m) => {
- m.clone()
- }
- }
- }
-}
-
type ErrorMessage = Option<(Span, String)>;
enum ResolveResult<T> {
BareIdentifierPatternUnresolved
}
-// Specifies how duplicates should be handled when adding a child item if
-// another item exists with the same name in some namespace.
-#[deriving(Copy, PartialEq)]
-enum DuplicateCheckingMode {
- ForbidDuplicateModules,
- ForbidDuplicateTypesAndModules,
- ForbidDuplicateValues,
- ForbidDuplicateTypesAndValues,
- OverwriteDuplicates
-}
-
/// One local scope.
#[deriving(Show)]
struct Rib {
}
}
-
-fn namespace_error_to_string(ns: NamespaceError) -> &'static str {
- match ns {
- NoError => "",
- ModuleError | TypeError => "type or module",
- ValueError => "value",
- }
-}
-
/// The main resolver class.
struct Resolver<'a, 'tcx:'a> {
session: &'a Session,
used_crates: HashSet<CrateNum>,
}
-struct BuildReducedGraphVisitor<'a, 'b:'a, 'tcx:'b> {
- resolver: &'a mut Resolver<'b, 'tcx>,
- parent: ReducedGraphParent
-}
-
-impl<'a, 'b, 'v, 'tcx> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b, 'tcx> {
-
- fn visit_item(&mut self, item: &Item) {
- let p = self.resolver.build_reduced_graph_for_item(item, self.parent.clone());
- let old_parent = replace(&mut self.parent, p);
- visit::walk_item(self, item);
- self.parent = old_parent;
- }
-
- fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
- let parent = self.parent.clone();
- self.resolver.build_reduced_graph_for_foreign_item(foreign_item,
- parent.clone(),
- |r| {
- let mut v = BuildReducedGraphVisitor {
- resolver: r,
- parent: parent.clone()
- };
- visit::walk_foreign_item(&mut v, foreign_item);
- })
- }
-
- fn visit_view_item(&mut self, view_item: &ViewItem) {
- self.resolver.build_reduced_graph_for_view_item(view_item, self.parent.clone());
- }
-
- fn visit_block(&mut self, block: &Block) {
- let np = self.resolver.build_reduced_graph_for_block(block, self.parent.clone());
- let old_parent = replace(&mut self.parent, np);
- visit::walk_block(self, block);
- self.parent = old_parent;
- }
-
-}
-
#[deriving(PartialEq)]
enum FallbackChecks {
Everything,
}
}
- //
- // Reduced graph building
- //
- // Here we build the "reduced graph": the graph of the module tree without
- // any imports resolved.
- //
-
- /// Constructs the reduced graph for the entire crate.
- fn build_reduced_graph(&mut self, krate: &ast::Crate) {
- let parent = ModuleReducedGraphParent(self.graph_root.get_module());
- let mut visitor = BuildReducedGraphVisitor {
- resolver: self,
- parent: parent
- };
- visit::walk_crate(&mut visitor, krate);
- }
-
- /// Adds a new child item to the module definition of the parent node and
- /// returns its corresponding name bindings as well as the current parent.
- /// Or, if we're inside a block, creates (or reuses) an anonymous module
- /// corresponding to the innermost block ID and returns the name bindings
- /// as well as the newly-created parent.
- ///
- /// # Panics
- ///
- /// Panics if this node does not have a module definition and we are not inside
- /// a block.
- fn add_child(&self,
- name: Name,
- reduced_graph_parent: ReducedGraphParent,
- duplicate_checking_mode: DuplicateCheckingMode,
- // For printing errors
- sp: Span)
- -> Rc<NameBindings> {
- // If this is the immediate descendant of a module, then we add the
- // child name directly. Otherwise, we create or reuse an anonymous
- // module and add the child to that.
-
- let module_ = reduced_graph_parent.module();
-
- self.check_for_conflicts_between_external_crates_and_items(&*module_,
- name,
- sp);
-
- // Add or reuse the child.
- let child = module_.children.borrow().get(&name).cloned();
- match child {
- None => {
- let child = Rc::new(NameBindings::new());
- module_.children.borrow_mut().insert(name, child.clone());
- child
- }
- Some(child) => {
- // Enforce the duplicate checking mode:
- //
- // * If we're requesting duplicate module checking, check that
- // there isn't a module in the module with the same name.
- //
- // * If we're requesting duplicate type checking, check that
- // there isn't a type in the module with the same name.
- //
- // * If we're requesting duplicate value checking, check that
- // there isn't a value in the module with the same name.
- //
- // * If we're requesting duplicate type checking and duplicate
- // value checking, check that there isn't a duplicate type
- // and a duplicate value with the same name.
- //
- // * If no duplicate checking was requested at all, do
- // nothing.
-
- let mut duplicate_type = NoError;
- let ns = match duplicate_checking_mode {
- ForbidDuplicateModules => {
- if child.get_module_if_available().is_some() {
- duplicate_type = ModuleError;
- }
- Some(TypeNS)
- }
- ForbidDuplicateTypesAndModules => {
- match child.def_for_namespace(TypeNS) {
- None => {}
- Some(_) if child.get_module_if_available()
- .map(|m| m.kind.get()) ==
- Some(ImplModuleKind) => {}
- Some(_) => duplicate_type = TypeError
- }
- Some(TypeNS)
- }
- ForbidDuplicateValues => {
- if child.defined_in_namespace(ValueNS) {
- duplicate_type = ValueError;
- }
- Some(ValueNS)
- }
- ForbidDuplicateTypesAndValues => {
- let mut n = None;
- match child.def_for_namespace(TypeNS) {
- Some(DefMod(_)) | None => {}
- Some(_) => {
- n = Some(TypeNS);
- duplicate_type = TypeError;
- }
- };
- if child.defined_in_namespace(ValueNS) {
- duplicate_type = ValueError;
- n = Some(ValueNS);
- }
- n
- }
- OverwriteDuplicates => None
- };
- if duplicate_type != NoError {
- // Return an error here by looking up the namespace that
- // had the duplicate.
- let ns = ns.unwrap();
- self.resolve_error(sp,
- format!("duplicate definition of {} `{}`",
- namespace_error_to_string(duplicate_type),
- token::get_name(name))[]);
- {
- let r = child.span_for_namespace(ns);
- for sp in r.iter() {
- self.session.span_note(*sp,
- format!("first definition of {} `{}` here",
- namespace_error_to_string(duplicate_type),
- token::get_name(name))[]);
- }
- }
- }
- child
- }
- }
- }
-
- fn block_needs_anonymous_module(&mut self, block: &Block) -> bool {
- // If the block has view items, we need an anonymous module.
- if block.view_items.len() > 0 {
- return true;
- }
-
- // Check each statement.
- for statement in block.stmts.iter() {
- match statement.node {
- StmtDecl(ref declaration, _) => {
- match declaration.node {
- DeclItem(_) => {
- return true;
- }
- _ => {
- // Keep searching.
- }
- }
- }
- _ => {
- // Keep searching.
- }
- }
- }
-
- // If we found neither view items nor items, we don't need to create
- // an anonymous module.
-
- return false;
- }
-
- fn get_parent_link(&mut self, parent: ReducedGraphParent, name: Name)
- -> ParentLink {
- match parent {
- ModuleReducedGraphParent(module_) => {
- return ModuleParentLink(module_.downgrade(), name);
- }
- }
- }
-
- /// Constructs the reduced graph for one item.
- fn build_reduced_graph_for_item(&mut self,
- item: &Item,
- parent: ReducedGraphParent)
- -> ReducedGraphParent
- {
- let name = item.ident.name;
- let sp = item.span;
- let is_public = item.vis == ast::Public;
- let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
-
- match item.node {
- ItemMod(..) => {
- let name_bindings =
- self.add_child(name, parent.clone(), ForbidDuplicateModules, sp);
-
- let parent_link = self.get_parent_link(parent, name);
- let def_id = DefId { krate: 0, node: item.id };
- name_bindings.define_module(parent_link,
- Some(def_id),
- NormalModuleKind,
- false,
- item.vis == ast::Public,
- sp);
-
- ModuleReducedGraphParent(name_bindings.get_module())
- }
-
- ItemForeignMod(..) => parent,
-
- // These items live in the value namespace.
- ItemStatic(_, m, _) => {
- let name_bindings =
- self.add_child(name, parent.clone(), ForbidDuplicateValues, sp);
- let mutbl = m == ast::MutMutable;
-
- name_bindings.define_value
- (DefStatic(local_def(item.id), mutbl), sp, modifiers);
- parent
- }
- ItemConst(_, _) => {
- self.add_child(name, parent.clone(), ForbidDuplicateValues, sp)
- .define_value(DefConst(local_def(item.id)),
- sp, modifiers);
- parent
- }
- ItemFn(_, _, _, _, _) => {
- let name_bindings =
- self.add_child(name, parent.clone(), ForbidDuplicateValues, sp);
-
- let def = DefFn(local_def(item.id), false);
- name_bindings.define_value(def, sp, modifiers);
- parent
- }
-
- // These items live in the type namespace.
- ItemTy(..) => {
- let name_bindings =
- self.add_child(name,
- parent.clone(),
- ForbidDuplicateTypesAndModules,
- sp);
-
- name_bindings.define_type
- (DefTy(local_def(item.id), false), sp, modifiers);
- parent
- }
-
- ItemEnum(ref enum_definition, _) => {
- let name_bindings =
- self.add_child(name,
- parent.clone(),
- ForbidDuplicateTypesAndModules,
- sp);
-
- name_bindings.define_type
- (DefTy(local_def(item.id), true), sp, modifiers);
-
- let parent_link = self.get_parent_link(parent.clone(), name);
- // We want to make sure the module type is EnumModuleKind
- // even if there's already an ImplModuleKind module defined,
- // since that's how we prevent duplicate enum definitions
- name_bindings.set_module_kind(parent_link,
- Some(local_def(item.id)),
- EnumModuleKind,
- false,
- is_public,
- sp);
-
- for variant in (*enum_definition).variants.iter() {
- self.build_reduced_graph_for_variant(
- &**variant,
- local_def(item.id),
- ModuleReducedGraphParent(name_bindings.get_module()));
- }
- parent
- }
-
- // These items live in both the type and value namespaces.
- ItemStruct(ref struct_def, _) => {
- // Adding to both Type and Value namespaces or just Type?
- let (forbid, ctor_id) = match struct_def.ctor_id {
- Some(ctor_id) => (ForbidDuplicateTypesAndValues, Some(ctor_id)),
- None => (ForbidDuplicateTypesAndModules, None)
- };
-
- let name_bindings = self.add_child(name, parent.clone(), forbid, sp);
-
- // Define a name in the type namespace.
- name_bindings.define_type(DefTy(local_def(item.id), false), sp, modifiers);
-
- // If this is a newtype or unit-like struct, define a name
- // in the value namespace as well
- match ctor_id {
- Some(cid) => {
- name_bindings.define_value(DefStruct(local_def(cid)),
- sp, modifiers);
- }
- None => {}
- }
-
- // Record the def ID and fields of this struct.
- let named_fields = struct_def.fields.iter().filter_map(|f| {
- match f.node.kind {
- NamedField(ident, _) => Some(ident.name),
- UnnamedField(_) => None
- }
- }).collect();
- self.structs.insert(local_def(item.id), named_fields);
-
- parent
- }
-
- ItemImpl(_, _, None, ref ty, ref impl_items) => {
- // If this implements an anonymous trait, then add all the
- // methods within to a new module, if the type was defined
- // within this module.
-
- let mod_name = match ty.node {
- TyPath(ref path, _) if path.segments.len() == 1 => {
- // FIXME(18446) we should distinguish between the name of
- // a trait and the name of an impl of that trait.
- Some(path.segments.last().unwrap().identifier.name)
- }
- TyObjectSum(ref lhs_ty, _) => {
- match lhs_ty.node {
- TyPath(ref path, _) if path.segments.len() == 1 => {
- Some(path.segments.last().unwrap().identifier.name)
- }
- _ => {
- None
- }
- }
- }
- _ => {
- None
- }
- };
-
- match mod_name {
- None => {
- self.resolve_error(ty.span,
- "inherent implementations may \
- only be implemented in the same \
- module as the type they are \
- implemented for")
- }
- Some(mod_name) => {
- // Create the module and add all methods.
- let parent_opt = parent.module().children.borrow()
- .get(&mod_name).cloned();
- let new_parent = match parent_opt {
- // It already exists
- Some(ref child) if child.get_module_if_available()
- .is_some() &&
- (child.get_module().kind.get() == ImplModuleKind ||
- child.get_module().kind.get() == TraitModuleKind) => {
- ModuleReducedGraphParent(child.get_module())
- }
- Some(ref child) if child.get_module_if_available()
- .is_some() &&
- child.get_module().kind.get() ==
- EnumModuleKind => {
- ModuleReducedGraphParent(child.get_module())
- }
- // Create the module
- _ => {
- let name_bindings =
- self.add_child(mod_name,
- parent.clone(),
- ForbidDuplicateModules,
- sp);
-
- let parent_link =
- self.get_parent_link(parent.clone(), name);
- let def_id = local_def(item.id);
- let ns = TypeNS;
- let is_public =
- !name_bindings.defined_in_namespace(ns) ||
- name_bindings.defined_in_public_namespace(ns);
-
- name_bindings.define_module(parent_link,
- Some(def_id),
- ImplModuleKind,
- false,
- is_public,
- sp);
-
- ModuleReducedGraphParent(
- name_bindings.get_module())
- }
- };
-
- // For each implementation item...
- for impl_item in impl_items.iter() {
- match *impl_item {
- MethodImplItem(ref method) => {
- // Add the method to the module.
- let name = method.pe_ident().name;
- let method_name_bindings =
- self.add_child(name,
- new_parent.clone(),
- ForbidDuplicateValues,
- method.span);
- let def = match method.pe_explicit_self()
- .node {
- SelfStatic => {
- // Static methods become
- // `DefStaticMethod`s.
- DefStaticMethod(local_def(method.id),
- FromImpl(local_def(item.id)))
- }
- _ => {
- // Non-static methods become
- // `DefMethod`s.
- DefMethod(local_def(method.id),
- None,
- FromImpl(local_def(item.id)))
- }
- };
-
- // NB: not IMPORTABLE
- let modifiers = if method.pe_vis() == ast::Public {
- PUBLIC
- } else {
- DefModifiers::empty()
- };
- method_name_bindings.define_value(
- def,
- method.span,
- modifiers);
- }
- TypeImplItem(ref typedef) => {
- // Add the typedef to the module.
- let name = typedef.ident.name;
- let typedef_name_bindings =
- self.add_child(
- name,
- new_parent.clone(),
- ForbidDuplicateTypesAndModules,
- typedef.span);
- let def = DefAssociatedTy(local_def(
- typedef.id));
- // NB: not IMPORTABLE
- let modifiers = if typedef.vis == ast::Public {
- PUBLIC
- } else {
- DefModifiers::empty()
- };
- typedef_name_bindings.define_type(
- def,
- typedef.span,
- modifiers);
- }
- }
- }
- }
- }
-
- parent
- }
-
- ItemImpl(_, _, Some(_), _, _) => parent,
-
- ItemTrait(_, _, _, ref items) => {
- let name_bindings =
- self.add_child(name,
- parent.clone(),
- ForbidDuplicateTypesAndModules,
- sp);
-
- // Add all the items within to a new module.
- let parent_link = self.get_parent_link(parent.clone(), name);
- name_bindings.define_module(parent_link,
- Some(local_def(item.id)),
- TraitModuleKind,
- false,
- item.vis == ast::Public,
- sp);
- let module_parent = ModuleReducedGraphParent(name_bindings.
- get_module());
-
- let def_id = local_def(item.id);
-
- // Add the names of all the items to the trait info.
- for trait_item in items.iter() {
- let (name, kind) = match *trait_item {
- ast::RequiredMethod(_) |
- ast::ProvidedMethod(_) => {
- let ty_m = ast_util::trait_item_to_ty_method(trait_item);
-
- let name = ty_m.ident.name;
-
- // Add it as a name in the trait module.
- let (def, static_flag) = match ty_m.explicit_self
- .node {
- SelfStatic => {
- // Static methods become `DefStaticMethod`s.
- (DefStaticMethod(
- local_def(ty_m.id),
- FromTrait(local_def(item.id))),
- StaticMethodTraitItemKind)
- }
- _ => {
- // Non-static methods become `DefMethod`s.
- (DefMethod(local_def(ty_m.id),
- Some(local_def(item.id)),
- FromTrait(local_def(item.id))),
- NonstaticMethodTraitItemKind)
- }
- };
-
- let method_name_bindings =
- self.add_child(name,
- module_parent.clone(),
- ForbidDuplicateTypesAndValues,
- ty_m.span);
- // NB: not IMPORTABLE
- method_name_bindings.define_value(def,
- ty_m.span,
- PUBLIC);
-
- (name, static_flag)
- }
- ast::TypeTraitItem(ref associated_type) => {
- let def = DefAssociatedTy(local_def(
- associated_type.ty_param.id));
-
- let name_bindings =
- self.add_child(associated_type.ty_param.ident.name,
- module_parent.clone(),
- ForbidDuplicateTypesAndValues,
- associated_type.ty_param.span);
- // NB: not IMPORTABLE
- name_bindings.define_type(def,
- associated_type.ty_param.span,
- PUBLIC);
-
- (associated_type.ty_param.ident.name, TypeTraitItemKind)
- }
- };
-
- self.trait_item_map.insert((name, def_id), kind);
- }
-
- name_bindings.define_type(DefTrait(def_id), sp, modifiers);
- parent
- }
- ItemMac(..) => parent
- }
- }
-
- // Constructs the reduced graph for one variant. Variants exist in the
- // type and value namespaces.
- fn build_reduced_graph_for_variant(&mut self,
- variant: &Variant,
- item_id: DefId,
- parent: ReducedGraphParent) {
- let name = variant.node.name.name;
- let is_exported = match variant.node.kind {
- TupleVariantKind(_) => false,
- StructVariantKind(_) => {
- // Not adding fields for variants as they are not accessed with a self receiver
- self.structs.insert(local_def(variant.node.id), Vec::new());
- true
- }
- };
-
- let child = self.add_child(name, parent,
- ForbidDuplicateTypesAndValues,
- variant.span);
- // variants are always treated as importable to allow them to be glob
- // used
- child.define_value(DefVariant(item_id,
- local_def(variant.node.id), is_exported),
- variant.span, PUBLIC | IMPORTABLE);
- child.define_type(DefVariant(item_id,
- local_def(variant.node.id), is_exported),
- variant.span, PUBLIC | IMPORTABLE);
- }
-
- /// Constructs the reduced graph for one 'view item'. View items consist
- /// of imports and use directives.
- fn build_reduced_graph_for_view_item(&mut self, view_item: &ViewItem,
- parent: ReducedGraphParent) {
- match view_item.node {
- ViewItemUse(ref view_path) => {
- // Extract and intern the module part of the path. For
- // globs and lists, the path is found directly in the AST;
- // for simple paths we have to munge the path a little.
- let module_path = match view_path.node {
- ViewPathSimple(_, ref full_path, _) => {
- full_path.segments
- .init()
- .iter().map(|ident| ident.identifier.name)
- .collect()
- }
-
- ViewPathGlob(ref module_ident_path, _) |
- ViewPathList(ref module_ident_path, _, _) => {
- module_ident_path.segments
- .iter().map(|ident| ident.identifier.name).collect()
- }
- };
-
- // Build up the import directives.
- let module_ = parent.module();
- let is_public = view_item.vis == ast::Public;
- let shadowable =
- view_item.attrs
- .iter()
- .any(|attr| {
- attr.name() == token::get_name(
- special_idents::prelude_import.name)
- });
- let shadowable = if shadowable {
- Shadowable::Always
- } else {
- Shadowable::Never
- };
-
- match view_path.node {
- ViewPathSimple(binding, ref full_path, id) => {
- let source_name =
- full_path.segments.last().unwrap().identifier.name;
- if token::get_name(source_name).get() == "mod" {
- self.resolve_error(view_path.span,
- "`mod` imports are only allowed within a { } list");
- }
-
- let subclass = SingleImport(binding.name,
- source_name);
- self.build_import_directive(&*module_,
- module_path,
- subclass,
- view_path.span,
- id,
- is_public,
- shadowable);
- }
- ViewPathList(_, ref source_items, _) => {
- // Make sure there's at most one `mod` import in the list.
- let mod_spans = source_items.iter().filter_map(|item| match item.node {
- PathListMod { .. } => Some(item.span),
- _ => None
- }).collect::<Vec<Span>>();
- if mod_spans.len() > 1 {
- self.resolve_error(mod_spans[0],
- "`mod` import can only appear once in the list");
- for other_span in mod_spans.iter().skip(1) {
- self.session.span_note(*other_span,
- "another `mod` import appears here");
- }
- }
-
- for source_item in source_items.iter() {
- let (module_path, name) = match source_item.node {
- PathListIdent { name, .. } =>
- (module_path.clone(), name.name),
- PathListMod { .. } => {
- let name = match module_path.last() {
- Some(name) => *name,
- None => {
- self.resolve_error(source_item.span,
- "`mod` import can only appear in an import list \
- with a non-empty prefix");
- continue;
- }
- };
- let module_path = module_path.init();
- (module_path.to_vec(), name)
- }
- };
- self.build_import_directive(
- &*module_,
- module_path,
- SingleImport(name, name),
- source_item.span,
- source_item.node.id(),
- is_public,
- shadowable);
- }
- }
- ViewPathGlob(_, id) => {
- self.build_import_directive(&*module_,
- module_path,
- GlobImport,
- view_path.span,
- id,
- is_public,
- shadowable);
- }
- }
- }
-
- ViewItemExternCrate(name, _, node_id) => {
- // n.b. we don't need to look at the path option here, because cstore already did
- for &crate_id in self.session.cstore
- .find_extern_mod_stmt_cnum(node_id).iter() {
- let def_id = DefId { krate: crate_id, node: 0 };
- self.external_exports.insert(def_id);
- let parent_link =
- ModuleParentLink(parent.module().downgrade(), name.name);
- let external_module = Rc::new(Module::new(parent_link,
- Some(def_id),
- NormalModuleKind,
- false,
- true));
- debug!("(build reduced graph for item) found extern `{}`",
- self.module_to_string(&*external_module));
- self.check_for_conflicts_between_external_crates(
- &*parent.module(),
- name.name,
- view_item.span);
- parent.module().external_module_children.borrow_mut()
- .insert(name.name, external_module.clone());
- self.build_reduced_graph_for_external_crate(external_module);
- }
- }
- }
- }
-
- /// Constructs the reduced graph for one foreign item.
- fn build_reduced_graph_for_foreign_item<F>(&mut self,
- foreign_item: &ForeignItem,
- parent: ReducedGraphParent,
- f: F) where
- F: FnOnce(&mut Resolver),
- {
- let name = foreign_item.ident.name;
- let is_public = foreign_item.vis == ast::Public;
- let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
- let name_bindings =
- self.add_child(name, parent, ForbidDuplicateValues,
- foreign_item.span);
-
- match foreign_item.node {
- ForeignItemFn(_, ref generics) => {
- let def = DefFn(local_def(foreign_item.id), false);
- name_bindings.define_value(def, foreign_item.span, modifiers);
-
- self.with_type_parameter_rib(
- HasTypeParameters(generics,
- FnSpace,
- foreign_item.id,
- NormalRibKind),
- f);
- }
- ForeignItemStatic(_, m) => {
- let def = DefStatic(local_def(foreign_item.id), m);
- name_bindings.define_value(def, foreign_item.span, modifiers);
-
- f(self)
- }
- }
- }
-
- fn build_reduced_graph_for_block(&mut self,
- block: &Block,
- parent: ReducedGraphParent)
- -> ReducedGraphParent
- {
- if self.block_needs_anonymous_module(block) {
- let block_id = block.id;
-
- debug!("(building reduced graph for block) creating a new \
- anonymous module for block {}",
- block_id);
-
- let parent_module = parent.module();
- let new_module = Rc::new(Module::new(
- BlockParentLink(parent_module.downgrade(), block_id),
- None,
- AnonymousModuleKind,
- false,
- false));
- parent_module.anonymous_children.borrow_mut()
- .insert(block_id, new_module.clone());
- ModuleReducedGraphParent(new_module)
- } else {
- parent
- }
- }
-
- fn handle_external_def(&mut self,
- def: Def,
- vis: Visibility,
- child_name_bindings: &NameBindings,
- final_ident: &str,
- name: Name,
- new_parent: ReducedGraphParent) {
- debug!("(building reduced graph for \
- external crate) building external def, priv {}",
- vis);
- let is_public = vis == ast::Public;
- let modifiers = if is_public { PUBLIC } else { DefModifiers::empty() } | IMPORTABLE;
- let is_exported = is_public && match new_parent {
- ModuleReducedGraphParent(ref module) => {
- match module.def_id.get() {
- None => true,
- Some(did) => self.external_exports.contains(&did)
- }
- }
- };
- if is_exported {
- self.external_exports.insert(def.def_id());
- }
-
- let kind = match def {
- DefTy(_, true) => EnumModuleKind,
- DefStruct(..) | DefTy(..) => ImplModuleKind,
- _ => NormalModuleKind
- };
-
- match def {
- DefMod(def_id) | DefForeignMod(def_id) | DefStruct(def_id) |
- DefTy(def_id, _) => {
- let type_def = child_name_bindings.type_def.borrow().clone();
- match type_def {
- Some(TypeNsDef { module_def: Some(module_def), .. }) => {
- debug!("(building reduced graph for external crate) \
- already created module");
- module_def.def_id.set(Some(def_id));
- }
- Some(_) | None => {
- debug!("(building reduced graph for \
- external crate) building module \
- {}", final_ident);
- let parent_link = self.get_parent_link(new_parent.clone(), name);
-
- child_name_bindings.define_module(parent_link,
- Some(def_id),
- kind,
- true,
- is_public,
- DUMMY_SP);
- }
- }
- }
- _ => {}
- }
-
- match def {
- DefMod(_) | DefForeignMod(_) => {}
- DefVariant(_, variant_id, is_struct) => {
- debug!("(building reduced graph for external crate) building \
- variant {}",
- final_ident);
- // variants are always treated as importable to allow them to be
- // glob used
- let modifiers = PUBLIC | IMPORTABLE;
- if is_struct {
- child_name_bindings.define_type(def, DUMMY_SP, modifiers);
- // Not adding fields for variants as they are not accessed with a self receiver
- self.structs.insert(variant_id, Vec::new());
- } else {
- child_name_bindings.define_value(def, DUMMY_SP, modifiers);
- }
- }
- DefFn(ctor_id, true) => {
- child_name_bindings.define_value(
- csearch::get_tuple_struct_definition_if_ctor(&self.session.cstore, ctor_id)
- .map_or(def, |_| DefStruct(ctor_id)), DUMMY_SP, modifiers);
- }
- DefFn(..) | DefStaticMethod(..) | DefStatic(..) | DefConst(..) | DefMethod(..) => {
- debug!("(building reduced graph for external \
- crate) building value (fn/static) {}", final_ident);
- // impl methods have already been defined with the correct importability modifier
- let mut modifiers = match *child_name_bindings.value_def.borrow() {
- Some(ref def) => (modifiers & !IMPORTABLE) | (def.modifiers & IMPORTABLE),
- None => modifiers
- };
- if new_parent.module().kind.get() != NormalModuleKind {
- modifiers = modifiers & !IMPORTABLE;
- }
- child_name_bindings.define_value(def, DUMMY_SP, modifiers);
- }
- DefTrait(def_id) => {
- debug!("(building reduced graph for external \
- crate) building type {}", final_ident);
-
- // If this is a trait, add all the trait item names to the trait
- // info.
-
- let trait_item_def_ids =
- csearch::get_trait_item_def_ids(&self.session.cstore, def_id);
- for trait_item_def_id in trait_item_def_ids.iter() {
- let (trait_item_name, trait_item_kind) =
- csearch::get_trait_item_name_and_kind(
- &self.session.cstore,
- trait_item_def_id.def_id());
-
- debug!("(building reduced graph for external crate) ... \
- adding trait item '{}'",
- token::get_name(trait_item_name));
-
- self.trait_item_map.insert((trait_item_name, def_id), trait_item_kind);
-
- if is_exported {
- self.external_exports
- .insert(trait_item_def_id.def_id());
- }
- }
-
- child_name_bindings.define_type(def, DUMMY_SP, modifiers);
-
- // Define a module if necessary.
- let parent_link = self.get_parent_link(new_parent, name);
- child_name_bindings.set_module_kind(parent_link,
- Some(def_id),
- TraitModuleKind,
- true,
- is_public,
- DUMMY_SP)
- }
- DefTy(..) | DefAssociatedTy(..) | DefAssociatedPath(..) => {
- debug!("(building reduced graph for external \
- crate) building type {}", final_ident);
-
- child_name_bindings.define_type(def, DUMMY_SP, modifiers);
- }
- DefStruct(def_id) => {
- debug!("(building reduced graph for external \
- crate) building type and value for {}",
- final_ident);
- child_name_bindings.define_type(def, DUMMY_SP, modifiers);
- let fields = csearch::get_struct_fields(&self.session.cstore, def_id).iter().map(|f| {
- f.name
- }).collect::<Vec<_>>();
-
- if fields.len() == 0 {
- child_name_bindings.define_value(def, DUMMY_SP, modifiers);
- }
-
- // Record the def ID and fields of this struct.
- self.structs.insert(def_id, fields);
- }
- DefLocal(..) | DefPrimTy(..) | DefTyParam(..) |
- DefUse(..) | DefUpvar(..) | DefRegion(..) |
- DefTyParamBinder(..) | DefLabel(..) | DefSelfTy(..) => {
- panic!("didn't expect `{}`", def);
- }
- }
- }
-
- /// Builds the reduced graph for a single item in an external crate.
- fn build_reduced_graph_for_external_crate_def(&mut self,
- root: Rc<Module>,
- def_like: DefLike,
- name: Name,
- visibility: Visibility) {
- match def_like {
- DlDef(def) => {
- // Add the new child item, if necessary.
- match def {
- DefForeignMod(def_id) => {
- // Foreign modules have no names. Recur and populate
- // eagerly.
- csearch::each_child_of_item(&self.session.cstore,
- def_id,
- |def_like,
- child_name,
- vis| {
- self.build_reduced_graph_for_external_crate_def(
- root.clone(),
- def_like,
- child_name,
- vis)
- });
- }
- _ => {
- let child_name_bindings =
- self.add_child(name,
- ModuleReducedGraphParent(root.clone()),
- OverwriteDuplicates,
- DUMMY_SP);
-
- self.handle_external_def(def,
- visibility,
- &*child_name_bindings,
- token::get_name(name).get(),
- name,
- ModuleReducedGraphParent(root));
- }
- }
- }
- DlImpl(def) => {
- match csearch::get_type_name_if_impl(&self.session.cstore, def) {
- None => {}
- Some(final_name) => {
- let methods_opt =
- csearch::get_methods_if_impl(&self.session.cstore, def);
- match methods_opt {
- Some(ref methods) if
- methods.len() >= 1 => {
- debug!("(building reduced graph for \
- external crate) processing \
- static methods for type name {}",
- token::get_name(final_name));
-
- let child_name_bindings =
- self.add_child(
- final_name,
- ModuleReducedGraphParent(root.clone()),
- OverwriteDuplicates,
- DUMMY_SP);
-
- // Process the static methods. First,
- // create the module.
- let type_module;
- let type_def = child_name_bindings.type_def.borrow().clone();
- match type_def {
- Some(TypeNsDef {
- module_def: Some(module_def),
- ..
- }) => {
- // We already have a module. This
- // is OK.
- type_module = module_def;
-
- // Mark it as an impl module if
- // necessary.
- type_module.kind.set(ImplModuleKind);
- }
- Some(_) | None => {
- let parent_link =
- self.get_parent_link(ModuleReducedGraphParent(root),
- final_name);
- child_name_bindings.define_module(
- parent_link,
- Some(def),
- ImplModuleKind,
- true,
- true,
- DUMMY_SP);
- type_module =
- child_name_bindings.
- get_module();
- }
- }
-
- // Add each static method to the module.
- let new_parent =
- ModuleReducedGraphParent(type_module);
- for method_info in methods.iter() {
- let name = method_info.name;
- debug!("(building reduced graph for \
- external crate) creating \
- static method '{}'",
- token::get_name(name));
-
- let method_name_bindings =
- self.add_child(name,
- new_parent.clone(),
- OverwriteDuplicates,
- DUMMY_SP);
- let def = DefFn(method_info.def_id, false);
-
- // NB: not IMPORTABLE
- let modifiers = if visibility == ast::Public {
- PUBLIC
- } else {
- DefModifiers::empty()
- };
- method_name_bindings.define_value(
- def, DUMMY_SP, modifiers);
- }
- }
-
- // Otherwise, do nothing.
- Some(_) | None => {}
- }
- }
- }
- }
- DlField => {
- debug!("(building reduced graph for external crate) \
- ignoring field");
- }
- }
- }
-
- /// Builds the reduced graph rooted at the given external module.
- fn populate_external_module(&mut self, module: Rc<Module>) {
- debug!("(populating external module) attempting to populate {}",
- self.module_to_string(&*module));
-
- let def_id = match module.def_id.get() {
- None => {
- debug!("(populating external module) ... no def ID!");
- return
- }
- Some(def_id) => def_id,
- };
-
- csearch::each_child_of_item(&self.session.cstore,
- def_id,
- |def_like, child_name, visibility| {
- debug!("(populating external module) ... found ident: {}",
- token::get_name(child_name));
- self.build_reduced_graph_for_external_crate_def(module.clone(),
- def_like,
- child_name,
- visibility)
- });
- module.populated.set(true)
- }
-
- /// Ensures that the reduced graph rooted at the given external module
- /// is built, building it if it is not.
- fn populate_module_if_necessary(&mut self, module: &Rc<Module>) {
- if !module.populated.get() {
- self.populate_external_module(module.clone())
- }
- assert!(module.populated.get())
- }
-
- /// Builds the reduced graph rooted at the 'use' directive for an external
- /// crate.
- fn build_reduced_graph_for_external_crate(&mut self, root: Rc<Module>) {
- csearch::each_top_level_item_of_crate(&self.session.cstore,
- root.def_id
- .get()
- .unwrap()
- .krate,
- |def_like, name, visibility| {
- self.build_reduced_graph_for_external_crate_def(root.clone(),
- def_like,
- name,
- visibility)
- });
- }
-
- /// Creates and adds an import directive to the given module.
- fn build_import_directive(&mut self,
- module_: &Module,
- module_path: Vec<Name>,
- subclass: ImportDirectiveSubclass,
- span: Span,
- id: NodeId,
- is_public: bool,
- shadowable: Shadowable) {
- module_.imports.borrow_mut().push(ImportDirective::new(module_path,
- subclass,
- span,
- id,
- is_public,
- shadowable));
- self.unresolved_imports += 1;
- // Bump the reference count on the name. Or, if this is a glob, set
- // the appropriate flag.
-
- match subclass {
- SingleImport(target, _) => {
- debug!("(building import directive) building import \
- directive: {}::{}",
- self.names_to_string(module_.imports.borrow().last().unwrap()
- .module_path[]),
- token::get_name(target));
-
- let mut import_resolutions = module_.import_resolutions
- .borrow_mut();
- match import_resolutions.get_mut(&target) {
- Some(resolution) => {
- debug!("(building import directive) bumping \
- reference");
- resolution.outstanding_references += 1;
-
- // the source of this name is different now
- resolution.type_id = id;
- resolution.value_id = id;
- resolution.is_public = is_public;
- return;
- }
- None => {}
- }
- debug!("(building import directive) creating new");
- let mut resolution = ImportResolution::new(id, is_public);
- resolution.outstanding_references = 1;
- import_resolutions.insert(target, resolution);
- }
- GlobImport => {
- // Set the glob flag. This tells us that we don't know the
- // module's exports ahead of time.
-
- module_.glob_count.set(module_.glob_count.get() + 1);
- }
- }
- }
-
// Import resolution
//
// This is a fixed-point algorithm. We resolve imports until our efforts
self.resolve_imports_for_module(module_.clone());
self.current_module = orig_module;
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self, &module_);
for (_, child_node) in module_.children.borrow().iter() {
match child_node.get_module_if_available() {
None => {
let mut type_result = UnknownResult;
// Search for direct children of the containing module.
- self.populate_module_if_necessary(&containing_module);
+ build_reduced_graph::populate_module_if_necessary(self, &containing_module);
match containing_module.children.borrow().get(&source) {
None => {
}
// Add all children from the containing module.
- self.populate_module_if_necessary(&containing_module);
+ build_reduced_graph::populate_module_if_necessary(self, &containing_module);
for (&name, name_bindings) in containing_module.children.borrow().iter() {
self.merge_import_resolution(module_,
// The current module node is handled specially. First, check for
// its immediate children.
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self, &module_);
match module_.children.borrow().get(&name) {
Some(name_bindings)
self.module_to_string(&*module_));
// First, check the direct children of the module.
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self, &module_);
match module_.children.borrow().get(&name) {
Some(name_bindings)
}
// Descend into children and anonymous children.
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self, &module_);
for (_, child_node) in module_.children.borrow().iter() {
match child_node.get_module_if_available() {
// Nothing to do.
}
Some(name) => {
- self.populate_module_if_necessary(&orig_module);
+ build_reduced_graph::populate_module_if_necessary(self, &orig_module);
match orig_module.children.borrow().get(&name) {
None => {
namespace: Namespace)
-> NameDefinition {
// First, search children.
- self.populate_module_if_necessary(&containing_module);
+ build_reduced_graph::populate_module_if_necessary(self, &containing_module);
match containing_module.children.borrow().get(&name) {
Some(child_name_bindings) => {
}
// Look for trait children.
- self.populate_module_if_necessary(&search_module);
+ build_reduced_graph::populate_module_if_necessary(self, &search_module);
{
for (_, child_names) in search_module.children.borrow().iter() {
debug!("Dump of module `{}`:", self.module_to_string(&*module_));
debug!("Children:");
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self, &module_);
for (&name, _) in module_.children.borrow().iter() {
debug!("* {}", token::get_name(name));
}
-> CrateMap {
let mut resolver = Resolver::new(session, ast_map, krate.span, make_glob_map);
- resolver.build_reduced_graph(krate);
+ build_reduced_graph::build_reduced_graph(&mut resolver, krate);
session.abort_if_errors();
resolver.resolve_imports();
use {Module, NameBindings, Resolver};
use Namespace::{mod, TypeNS, ValueNS};
+use build_reduced_graph;
+
use rustc::middle::def::Export;
use syntax::ast;
use syntax::parse::token;
}
self.record_exports_for_module(&*module_);
- self.populate_module_if_necessary(&module_);
+ build_reduced_graph::populate_module_if_necessary(self.resolver, &module_);
for (_, child_name_bindings) in module_.children.borrow().iter() {
match child_name_bindings.get_module_if_available() {
use trans::cleanup;
use trans::closure;
use trans::common::{Block, C_bool, C_bytes_in_context, C_i32, C_integral};
-use trans::common::{C_null, C_struct_in_context, C_u64, C_u8, C_uint, C_undef};
+use trans::common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
use trans::common::{CrateContext, ExternMap, FunctionContext};
use trans::common::{NodeInfo, Result};
use trans::common::{node_id_type, return_type_is_void};
use trans::inline;
use trans::intrinsic;
use trans::machine;
-use trans::machine::{llsize_of, llsize_of_real, llalign_of_min};
+use trans::machine::{llsize_of, llsize_of_real};
use trans::meth;
use trans::monomorphize;
use trans::tvec;
Result::new(r.bcx, PointerCast(r.bcx, r.val, llty_ptr))
}
-pub fn malloc_raw_dyn_proc<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>)
- -> Result<'blk, 'tcx> {
- let _icx = push_ctxt("malloc_raw_dyn_proc");
- let ccx = bcx.ccx();
-
- // Grab the TypeRef type of ptr_ty.
- let ptr_ty = ty::mk_uniq(bcx.tcx(), t);
- let ptr_llty = type_of(ccx, ptr_ty);
-
- let llty = type_of(bcx.ccx(), t);
- let size = llsize_of(bcx.ccx(), llty);
- let llalign = C_uint(ccx, llalign_of_min(bcx.ccx(), llty));
-
- // Allocate space and store the destructor pointer:
- let Result {bcx, val: llbox} = malloc_raw_dyn(bcx, ptr_llty, t, size, llalign);
- let dtor_ptr = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_DROP_GLUE]);
- let drop_glue_field_ty = type_of(ccx, ty::mk_nil_ptr(bcx.tcx()));
- let drop_glue = PointerCast(bcx, glue::get_drop_glue(ccx, ty::mk_uniq(bcx.tcx(), t)),
- drop_glue_field_ty);
- Store(bcx, drop_glue, dtor_ptr);
-
- Result::new(bcx, llbox)
-}
-
// Type descriptor and type glue stuff
pub fn get_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
unsafe {
llvm::LLVMPositionBuilderAtEnd(bld, llbb);
+ debuginfo::insert_reference_to_gdb_debug_scripts_section_global(ccx);
+
let (start_fn, args) = if use_start_lang_item {
let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
Ok(id) => id,
ty::mk_tup(tcx, vec!(tcx.types.uint, ty::mk_nil_ptr(tcx), ptr, ptr, t))
}
-fn allocate_cbox<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- store: ty::TraitStore,
- cdata_ty: Ty<'tcx>)
- -> Result<'blk, 'tcx> {
- let _icx = push_ctxt("closure::allocate_cbox");
- let tcx = bcx.tcx();
-
- // Allocate and initialize the box:
- let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
- match store {
- ty::UniqTraitStore => {
- malloc_raw_dyn_proc(bcx, cbox_ty)
- }
- ty::RegionTraitStore(..) => {
- let llbox = alloc_ty(bcx, cbox_ty, "__closure");
- Result::new(bcx, llbox)
- }
- }
-}
-
pub struct ClosureResult<'blk, 'tcx: 'blk> {
llbox: ValueRef, // llvalue of ptr to closure
cdata_ty: Ty<'tcx>, // type of the closure data
// heap allocated closure that copies the upvars into environment.
// Otherwise, it is stack allocated and copies pointers to the upvars.
pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- bound_values: Vec<EnvValue<'tcx>> ,
- store: ty::TraitStore)
+ bound_values: Vec<EnvValue<'tcx>>)
-> ClosureResult<'blk, 'tcx> {
let _icx = push_ctxt("closure::store_environment");
let ccx = bcx.ccx();
}
// allocate closure in the heap
- let Result {bcx, val: llbox} = allocate_cbox(bcx, store, cdata_ty);
+ let llbox = alloc_ty(bcx, cbox_ty, "__closure");
let llbox = PointerCast(bcx, llbox, llboxptr_ty);
debug!("tuplify_box_ty = {}", ty_to_string(tcx, cbox_ty));
// collects the upvars and packages them up for store_environment.
fn build_closure<'blk, 'tcx>(bcx0: Block<'blk, 'tcx>,
freevar_mode: ast::CaptureClause,
- freevars: &Vec<ty::Freevar>,
- store: ty::TraitStore)
+ freevars: &Vec<ty::Freevar>)
-> ClosureResult<'blk, 'tcx> {
let _icx = push_ctxt("closure::build_closure");
env_vals.push(EnvValue {action: freevar_mode, datum: datum});
}
- store_environment(bcx, env_vals, store)
+ store_environment(bcx, env_vals)
}
// Given an enclosing block context, a new function context, a closure type,
llbox,
cdata_ty,
bcx
- } = build_closure(bcx, freevar_mode, &freevars, store);
+ } = build_closure(bcx, freevar_mode, &freevars);
trans_closure(ccx,
decl,
use std::rc::{Rc, Weak};
use syntax::util::interner::Interner;
use syntax::codemap::{Span, Pos};
-use syntax::{ast, codemap, ast_util, ast_map};
+use syntax::{ast, codemap, ast_util, ast_map, attr};
use syntax::ast_util::PostExpansionMethod;
use syntax::parse::token::{mod, special_idents};
}
debug!("finalize");
- compile_unit_metadata(cx);
+ let _ = compile_unit_metadata(cx);
+
+ if needs_gdb_debug_scripts_section(cx) {
+ // Add a .debug_gdb_scripts section to this compile-unit. This will
+ // cause GDB to try and load the gdb_load_rust_pretty_printers.py file,
+ // which activates the Rust pretty printers for binary this section is
+ // contained in.
+ get_or_insert_gdb_debug_scripts_section_global(cx);
+ }
+
unsafe {
llvm::LLVMDIBuilderFinalize(DIB(cx));
llvm::LLVMDIBuilderDispose(DIB(cx));
};
}
-fn compile_unit_metadata(cx: &CrateContext) {
+fn compile_unit_metadata(cx: &CrateContext) -> DIDescriptor {
let work_dir = &cx.sess().working_dir;
let compile_unit_name = match cx.sess().local_crate_source_file {
None => fallback_path(cx),
(option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr();
- work_dir.as_vec().with_c_str(|work_dir| {
+ return work_dir.as_vec().with_c_str(|work_dir| {
producer.with_c_str(|producer| {
"".with_c_str(|flags| {
"".with_c_str(|split_name| {
cx.sess().opts.optimize != config::No,
flags,
0,
- split_name);
+ split_name)
}
})
})
}
})
}
+
+
+//=-----------------------------------------------------------------------------
+// .debug_gdb_scripts binary section
+//=-----------------------------------------------------------------------------
+
+/// Inserts a side-effect free instruction sequence that makes sure that the
+/// .debug_gdb_scripts global is referenced, so it isn't removed by the linker.
+pub fn insert_reference_to_gdb_debug_scripts_section_global(ccx: &CrateContext) {
+ if needs_gdb_debug_scripts_section(ccx) {
+ let empty = b"".to_c_str();
+ let gdb_debug_scripts_section_global =
+ get_or_insert_gdb_debug_scripts_section_global(ccx);
+ unsafe {
+ let volative_load_instruction =
+ llvm::LLVMBuildLoad(ccx.raw_builder(),
+ gdb_debug_scripts_section_global,
+ empty.as_ptr());
+ llvm::LLVMSetVolatile(volative_load_instruction, llvm::True);
+ }
+ }
+}
+
+/// Allocates the global variable responsible for the .debug_gdb_scripts binary
+/// section.
+fn get_or_insert_gdb_debug_scripts_section_global(ccx: &CrateContext)
+ -> llvm::ValueRef {
+ let section_var_name = b"__rustc_debug_gdb_scripts_section__".to_c_str();
+
+ let section_var = unsafe {
+ llvm::LLVMGetNamedGlobal(ccx.llmod(), section_var_name.as_ptr())
+ };
+
+ if section_var == ptr::null_mut() {
+ let section_name = b".debug_gdb_scripts".to_c_str();
+ let section_contents = b"\x01gdb_load_rust_pretty_printers.py\0";
+
+ unsafe {
+ let llvm_type = Type::array(&Type::i8(ccx),
+ section_contents.len() as u64);
+ let section_var = llvm::LLVMAddGlobal(ccx.llmod(),
+ llvm_type.to_ref(),
+ section_var_name.as_ptr());
+ llvm::LLVMSetSection(section_var, section_name.as_ptr());
+ llvm::LLVMSetInitializer(section_var, C_bytes(ccx, section_contents));
+ llvm::LLVMSetGlobalConstant(section_var, llvm::True);
+ llvm::LLVMSetUnnamedAddr(section_var, llvm::True);
+ llvm::SetLinkage(section_var, llvm::Linkage::LinkOnceODRLinkage);
+ // This should make sure that the whole section is not larger than
+ // the string it contains. Otherwise we get a warning from GDB.
+ llvm::LLVMSetAlignment(section_var, 1);
+ section_var
+ }
+ } else {
+ section_var
+ }
+}
+
+fn needs_gdb_debug_scripts_section(ccx: &CrateContext) -> bool {
+ let omit_gdb_pretty_printer_section =
+ attr::contains_name(ccx.tcx()
+ .map
+ .krate()
+ .attrs
+ .as_slice(),
+ "omit_gdb_pretty_printer_section");
+
+ !omit_gdb_pretty_printer_section &&
+ !ccx.sess().target.target.options.is_like_osx &&
+ !ccx.sess().target.target.options.is_like_windows &&
+ ccx.sess().opts.debuginfo != NoDebugInfo
+}
+
}
}
- /// Returns a vec of error messages. If hte vec is empty - no errors!
+ /// Returns a vec of error messages. If the vec is empty - no errors!
///
/// There are some limitations to calling functions through an object, because (a) the self
/// type is not known (that's the whole point of a trait instance, after all, to obscure the
- /// self type) and (b) the call must go through a vtable and hence cannot be monomorphized.
+ /// self type), (b) the call must go through a vtable and hence cannot be monomorphized and
+ /// (c) the trait contains static methods which can't be called because we don't know the
+ /// concrete type.
fn check_object_safety_of_method<'tcx>(tcx: &ty::ctxt<'tcx>,
object_trait: &ty::PolyTraitRef<'tcx>,
method: &ty::Method<'tcx>)
}
ty::StaticExplicitSelfCategory => {
- // Static methods are always object-safe since they
- // can't be called through a trait object
- return msgs
+ // Static methods are never object safe (reason (c)).
+ msgs.push(format!("cannot call a static method (`{}`) \
+ through a trait object",
+ method_name));
+ return msgs;
}
ty::ByReferenceExplicitSelfCategory(..) |
ty::ByBoxExplicitSelfCategory => {}
/// External iterator for a CString's bytes.
///
/// Use with the `std::iter` module.
+#[allow(raw_pointer_deriving)]
+#[deriving(Clone)]
pub struct CChars<'a> {
ptr: *const libc::c_char,
marker: marker::ContravariantLifetime<'a>,
}
}
+#[stable]
impl<K: Eq + Hash<S>, V: PartialEq, S, H: Hasher<S>> PartialEq for HashMap<K, V, H> {
fn eq(&self, other: &HashMap<K, V, H>) -> bool {
if self.len() != other.len() { return false; }
}
}
+#[stable]
impl<K: Eq + Hash<S>, V: Eq, S, H: Hasher<S>> Eq for HashMap<K, V, H> {}
impl<K: Eq + Hash<S> + Show, V: Show, S, H: Hasher<S>> Show for HashMap<K, V, H> {
inner: table::Iter<'a, K, V>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, K, V> Clone for Entries<'a, K, V> {
+ fn clone(&self) -> Entries<'a, K, V> {
+ Entries {
+ inner: self.inner.clone()
+ }
+ }
+}
+
/// HashMap mutable values iterator
pub struct IterMut<'a, K: 'a, V: 'a> {
inner: table::IterMut<'a, K, V>
inner: Map<(&'a K, &'a V), &'a K, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a K>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, K, V> Clone for Keys<'a, K, V> {
+ fn clone(&self) -> Keys<'a, K, V> {
+ Keys {
+ inner: self.inner.clone()
+ }
+ }
+}
+
/// HashMap values iterator
pub struct Values<'a, K: 'a, V: 'a> {
inner: Map<(&'a K, &'a V), &'a V, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V>
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, K, V> Clone for Values<'a, K, V> {
+ fn clone(&self) -> Values<'a, K, V> {
+ Values {
+ inner: self.inner.clone()
+ }
+ }
+}
+
/// HashMap drain iterator
pub struct Drain<'a, K: 'a, V: 'a> {
inner: iter::Map<
}
}
+#[stable]
impl<T: Eq + Hash<S>, S, H: Hasher<S>> PartialEq for HashSet<T, H> {
fn eq(&self, other: &HashSet<T, H>) -> bool {
if self.len() != other.len() { return false; }
}
}
+#[stable]
impl<T: Eq + Hash<S>, S, H: Hasher<S>> Eq for HashSet<T, H> {}
impl<T: Eq + Hash<S> + fmt::Show, S, H: Hasher<S>> fmt::Show for HashSet<T, H> {
marker: marker::ContravariantLifetime<'a>,
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, K, V> Clone for RawBuckets<'a, K, V> {
+ fn clone(&self) -> RawBuckets<'a, K, V> {
+ RawBuckets {
+ raw: self.raw,
+ hashes_end: self.hashes_end,
+ marker: marker::ContravariantLifetime,
+ }
+ }
+}
+
+
impl<'a, K, V> Iterator<RawBucket<K, V>> for RawBuckets<'a, K, V> {
fn next(&mut self) -> Option<RawBucket<K, V>> {
while self.raw.hash != self.hashes_end {
elems_left: uint,
}
+// FIXME(#19839) Remove in favor of `#[deriving(Clone)]`
+impl<'a, K, V> Clone for Entries<'a, K, V> {
+ fn clone(&self) -> Entries<'a, K, V> {
+ Entries {
+ iter: self.iter.clone(),
+ elems_left: self.elems_left
+ }
+ }
+}
+
+
/// Iterator over mutable references to entries in a table.
pub struct IterMut<'a, K: 'a, V: 'a> {
iter: RawBuckets<'a, K, V>,
}
/// An iterator that walks over a directory
+#[deriving(Clone)]
pub struct Directories {
stack: Vec<Path>,
}
//! Synchronous DNS Resolution
//!
-//! Contains the functionality to perform DNS resolution in a style related to
-//! `getaddrinfo()`
+//! Contains the functionality to perform DNS resolution or reverse lookup,
+//! in a style related to `getaddrinfo()` and `getnameinfo()`, respectively.
#![allow(missing_docs)]
use io::net::ip::{SocketAddr, IpAddr};
use option::Option;
use option::Option::{Some, None};
+use string::String;
use sys;
use vec::Vec;
lookup(Some(host), None, None).map(|a| a.into_iter().map(|i| i.address.ip).collect())
}
+/// Reverse name resolution. Given an address, returns the corresponding
+/// hostname.
+pub fn get_address_name(addr: IpAddr) -> IoResult<String> {
+ sys::addrinfo::get_address_name(addr)
+}
+
/// Full-fledged resolution. This function will perform a synchronous call to
/// getaddrinfo, controlled by the parameters
///
//! ```
use self::StdSource::*;
+use prelude::*;
-use boxed::Box;
use cell::RefCell;
-use clone::Clone;
use failure::LOCAL_STDERR;
use fmt;
-use io::{Reader, Writer, IoResult, IoError, OtherIoError, Buffer,
- standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
-use kinds::{Sync, Send};
+use io::{IoResult, IoError, OtherIoError};
+use io::{standard_error, EndOfFile, LineBufferedWriter, BufferedReader};
use libc;
use mem;
-use option::Option;
-use option::Option::{Some, None};
-use ops::{Deref, DerefMut, FnOnce};
-use result::Result::{Ok, Err};
use rt;
-use slice::SliceExt;
-use str::StrExt;
-use string::String;
use sys::{fs, tty};
-use sync::{Arc, Mutex, MutexGuard, Once, ONCE_INIT};
+use sync::{Arc, Mutex, MutexGuard, StaticMutex, MUTEX_INIT};
use uint;
-use vec::Vec;
// And so begins the tale of acquiring a uv handle to a stdio stream on all
// platforms in all situations. Our story begins by splitting the world into two
pub fn stdin() -> StdinReader {
// We're following the same strategy as kimundi's lazy_static library
static mut STDIN: *const StdinReader = 0 as *const StdinReader;
- static ONCE: Once = ONCE_INIT;
+ static LOCK: StaticMutex = MUTEX_INIT;
unsafe {
- ONCE.doit(|| {
- // The default buffer capacity is 64k, but apparently windows doesn't like
- // 64k reads on stdin. See #13304 for details, but the idea is that on
- // windows we use a slightly smaller buffer that's been seen to be
- // acceptable.
+ let _g = LOCK.lock();
+ if STDIN as uint == 0 {
+ // The default buffer capacity is 64k, but apparently windows
+ // doesn't like 64k reads on stdin. See #13304 for details, but the
+ // idea is that on windows we use a slightly smaller buffer that's
+ // been seen to be acceptable.
let stdin = if cfg!(windows) {
BufferedReader::with_capacity(8 * 1024, stdin_raw())
} else {
// Make sure to free it at exit
rt::at_exit(|| {
- mem::transmute::<_, Box<StdinReader>>(STDIN);
- STDIN = 0 as *const _;
+ let g = LOCK.lock();
+ let stdin = STDIN;
+ STDIN = 1 as *const _;
+ drop(g);
+ mem::transmute::<_, Box<StdinReader>>(stdin);
});
- });
-
+ } else if STDIN as uint == 1 {
+ panic!("accessing stdin after the main thread has exited")
+ }
(*STDIN).clone()
}
}
/// A `Reader` which chains input from multiple `Reader`s, reading each to
/// completion before moving onto the next.
+#[deriving(Clone)]
pub struct ChainedReader<I, R> {
readers: I,
cur_reader: Option<R>,
}
/// An adaptor converting an `Iterator<u8>` to a `Reader`.
+#[deriving(Clone)]
pub struct IterReader<T> {
iter: T,
}
pub mod sync;
pub mod comm;
+#[path = "sys/common/mod.rs"] mod sys_common;
+
#[cfg(unix)]
#[path = "sys/unix/mod.rs"] mod sys;
#[cfg(windows)]
#[path = "sys/windows/mod.rs"] mod sys;
-#[path = "sys/common/mod.rs"] mod sys_common;
-
pub mod rt;
mod failure;
extern crate libc;
use io::{IoResult};
+ use kinds::Sync;
use mem;
use os;
use rand::Rng;
#[repr(C)]
struct SecRandom;
+ unsafe impl Sync for *const SecRandom {}
+
#[allow(non_upper_case_globals)]
static kSecRandomDefault: *const SecRandom = 0 as *const SecRandom;
static LOCK: Mutex = MUTEX_INIT;
static mut QUEUE: *mut Queue = 0 as *mut Queue;
+const DTOR_RUN_ITERS: uint = 10;
+
unsafe fn init() {
if QUEUE.is_null() {
let state: Box<Queue> = box Vec::new();
unsafe {
LOCK.lock();
let queue = QUEUE;
- QUEUE = 1 as *mut _;
+ QUEUE = 1u as *mut _;
LOCK.unlock();
// make sure we're not recursively cleaning up
// but we just do this to name the main thread and to give it correct
// info about the stack bounds.
let thread: Thread = NewThread::new(Some("<main>".to_string()));
- thread_info::set((my_stack_bottom, my_stack_top),
- sys::thread::guard::main(),
- thread);
+ thread_info::set(sys::thread::guard::main(), thread);
// By default, some platforms will send a *signal* when a EPIPE error
// would otherwise be delivered. This runtime doesn't install a SIGPIPE
}
}
-/// Enqueues a procedure to run when the runtime is cleaned up
-///
-/// The procedure passed to this function will be executed as part of the
-/// runtime cleanup phase. For normal rust programs, this means that it will run
-/// after all other threads have exited.
-///
-/// The procedure is *not* executed with a local `Thread` available to it, so
-/// primitives like logging, I/O, channels, spawning, etc, are *not* available.
-/// This is meant for "bare bones" usage to clean up runtime details, this is
-/// not meant as a general-purpose "let's clean everything up" function.
+/// Enqueues a procedure to run when the main thread exits.
///
/// It is forbidden for procedures to register more `at_exit` handlers when they
/// are running, and doing so will lead to a process abort.
-pub fn at_exit<F:FnOnce()+Send>(f: F) {
+///
+/// Note that other threads may still be running when `at_exit` routines start
+/// running.
+pub fn at_exit<F: FnOnce() + Send>(f: F) {
at_exit_imp::push(Thunk::new(f));
}
pub unsafe fn cleanup() {
args::cleanup();
sys::stack_overflow::cleanup();
- // FIXME: (#20012): the resources being cleaned up by at_exit
- // currently are not prepared for cleanup to happen asynchronously
- // with detached threads using the resources; for now, we leak.
- // at_exit_imp::cleanup();
+ at_exit_imp::cleanup();
}
use libc::c_void;
use mem;
use sync::atomic;
-use sync::{Once, ONCE_INIT};
+use sys_common::mutex::{Mutex, MUTEX_INIT};
use rt::libunwind as uw;
/// Doing this split took the LLVM IR line counts of `fn main() { panic!()
/// }` from ~1900/3700 (-O/no opts) to 180/590.
#[inline(never)] #[cold] // this is the slow path, please never inline this
-fn begin_unwind_inner(msg: Box<Any + Send>, file_line: &(&'static str, uint)) -> ! {
+fn begin_unwind_inner(msg: Box<Any + Send>,
+ file_line: &(&'static str, uint)) -> ! {
// Make sure the default failure handler is registered before we look at the
// callbacks.
- static INIT: Once = ONCE_INIT;
- INIT.doit(|| unsafe { register(failure::on_fail); });
+ unsafe {
+ static LOCK: Mutex = MUTEX_INIT;
+ static mut INIT: bool = false;
+ LOCK.lock();
+ if !INIT {
+ register(failure::on_fail);
+ INIT = true;
+ }
+ LOCK.unlock();
+ }
// First, invoke call the user-defined callbacks triggered on thread panic.
//
use core::prelude::{Send, Drop, None, Option, Some};
pub use core::atomic::{AtomicBool, AtomicInt, AtomicUint, AtomicPtr};
-pub use core::atomic::{Ordering, Relaxed, Release, Acquire, AcqRel, SeqCst};
pub use core::atomic::{INIT_ATOMIC_BOOL, INIT_ATOMIC_INT, INIT_ATOMIC_UINT};
pub use core::atomic::fence;
+pub use core::atomic::Ordering::{mod, Relaxed, Release, Acquire, AcqRel, SeqCst};
/// An atomic, nullable unique pointer
///
//! can be created in the future and there must be no active timers at that
//! time.
+#![macro_escape]
+
use prelude::*;
use cell::UnsafeCell;
unsafe impl Send for RaceBox {}
unsafe impl Sync for RaceBox {}
+macro_rules! helper_init { (static $name:ident: Helper<$m:ty>) => (
+ static $name: Helper<$m> = Helper {
+ lock: ::sync::MUTEX_INIT,
+ cond: ::sync::CONDVAR_INIT,
+ chan: ::cell::UnsafeCell { value: 0 as *mut Sender<$m> },
+ signal: ::cell::UnsafeCell { value: 0 },
+ initialized: ::cell::UnsafeCell { value: false },
+ shutdown: ::cell::UnsafeCell { value: false },
+ };
+) }
+
impl<M: Send> Helper<M> {
/// Lazily boots a helper thread, becoming a no-op if the helper has already
/// been spawned.
{
unsafe {
let _guard = self.lock.lock().unwrap();
- if !*self.initialized.get() {
+ if *self.chan.get() as uint == 0 {
let (tx, rx) = channel();
*self.chan.get() = mem::transmute(box tx);
let (receive, send) = helper_signal::new();
let receive = RaceBox(receive);
let t = f();
- Thread::spawn(move |:| {
+ Thread::spawn(move || {
helper(receive.0, rx, t);
let _g = self.lock.lock().unwrap();
*self.shutdown.get() = true;
self.cond.notify_one()
}).detach();
- rt::at_exit(move|:| { self.shutdown() });
+ rt::at_exit(move || { self.shutdown() });
*self.initialized.get() = true;
+ } else if *self.chan.get() as uint == 1 {
+ panic!("cannot continue usage after shutdown");
}
}
}
// Must send and *then* signal to ensure that the child receives the
// message. Otherwise it could wake up and go to sleep before we
// send the message.
- assert!(!self.chan.get().is_null());
+ assert!(*self.chan.get() as uint != 0);
+ assert!(*self.chan.get() as uint != 1,
+ "cannot continue usage after shutdown");
(**self.chan.get()).send(msg);
helper_signal::signal(*self.signal.get() as helper_signal::signal);
}
// returns.
let mut guard = self.lock.lock().unwrap();
+ let ptr = *self.chan.get();
+ if ptr as uint == 1 {
+ panic!("cannot continue usage after shutdown");
+ }
// Close the channel by destroying it
let chan: Box<Sender<M>> = mem::transmute(*self.chan.get());
- *self.chan.get() = 0 as *mut Sender<M>;
+ *self.chan.get() = 1 as *mut Sender<M>;
drop(chan);
helper_signal::signal(*self.signal.get() as helper_signal::signal);
// except according to those terms.
#![allow(missing_docs)]
-#![allow(dead_code)]
+#![macro_escape]
use io::{mod, IoError, IoResult};
use prelude::*;
/// Behavior is undefined if the mutex is moved after the first method is
/// called on the mutex.
#[inline]
+ #[allow(dead_code)] // sys is not exported yet
pub unsafe fn new() -> Mutex { Mutex(imp::Mutex::new()) }
/// Lock the mutex blocking the current thread until it is available.
use alloc::arc::Arc;
use libc::{mod, c_char, c_int};
+use c_str::CString;
use mem;
use num::Int;
use ptr::{mod, null, null_mut};
use sys::{mod, retry, c, sock_t, last_error, last_net_error, last_gai_error, close_sock,
wrlen, msglen_t, os, wouldblock, set_nonblocking, timer, ms_to_timeval,
decode_error_detailed};
-use sync::{Mutex, MutexGuard};
+use sync::Mutex;
+#[cfg(not(target_os = "linux"))]
+use sync::MutexGuard;
use sys_common::{mod, keep_going, short_write, timeout};
use prelude::*;
use cmp;
Ok(addrs)
}
+////////////////////////////////////////////////////////////////////////////////
+// get_address_name
+////////////////////////////////////////////////////////////////////////////////
+
+extern "system" {
+ fn getnameinfo(sa: *const libc::sockaddr, salen: libc::socklen_t,
+ host: *mut c_char, hostlen: libc::size_t,
+ serv: *mut c_char, servlen: libc::size_t,
+ flags: c_int) -> c_int;
+}
+
+const NI_MAXHOST: uint = 1025;
+
+pub fn get_address_name(addr: IpAddr) -> Result<String, IoError> {
+ let addr = SocketAddr{ip: addr, port: 0};
+
+ let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() };
+ let len = addr_to_sockaddr(addr, &mut storage);
+
+ let mut hostbuf = [0 as c_char, ..NI_MAXHOST];
+
+ let res = unsafe {
+ getnameinfo(&storage as *const _ as *const libc::sockaddr, len,
+ hostbuf.as_mut_ptr(), NI_MAXHOST as libc::size_t,
+ ptr::null_mut(), 0,
+ 0)
+ };
+
+ if res != 0 {
+ return Err(last_gai_error(res));
+ }
+
+ unsafe {
+ Ok(CString::new(hostbuf.as_ptr(), false).as_str().unwrap().to_string())
+ }
+}
+
////////////////////////////////////////////////////////////////////////////////
// Timeout helpers
//
fn drop(&mut self) { unsafe { close_sock(self.fd); } }
}
+#[cfg(not(target_os = "linux"))]
pub struct Guard<'a> {
pub fd: sock_t,
pub guard: MutexGuard<'a, ()>,
}
+#[cfg(not(target_os = "linux"))]
#[unsafe_destructor]
impl<'a> Drop for Guard<'a> {
fn drop(&mut self) {
/// Usage of an RWLock is undefined if it is moved after its first use (any
/// function calls below).
#[inline]
+ #[allow(dead_code)] // sys is not exported yet
pub unsafe fn new() -> RWLock { RWLock(imp::RWLock::new()) }
/// Acquire shared access to the underlying lock, blocking the current
record_sp_limit(stack_lo + RED_ZONE);
}
-#[inline(always)]
-pub unsafe fn record_rust_managed_stack_bounds(stack_lo: uint, stack_hi: uint) {
- // When the old runtime had segmented stacks, it used a calculation that was
- // "limit + RED_ZONE + FUDGE". The red zone was for things like dynamic
- // symbol resolution, llvm function calls, etc. In theory this red zone
- // value is 0, but it matters far less when we have gigantic stacks because
- // we don't need to be so exact about our stack budget. The "fudge factor"
- // was because LLVM doesn't emit a stack check for functions < 256 bytes in
- // size. Again though, we have giant stacks, so we round all these
- // calculations up to the nice round number of 20k.
- record_sp_limit(stack_lo + RED_ZONE);
-
- return target_record_stack_bounds(stack_lo, stack_hi);
-
- #[cfg(not(windows))] #[inline(always)]
- unsafe fn target_record_stack_bounds(_stack_lo: uint, _stack_hi: uint) {}
-
- #[cfg(all(windows, target_arch = "x86"))] #[inline(always)]
- unsafe fn target_record_stack_bounds(stack_lo: uint, stack_hi: uint) {
- // stack range is at TIB: %fs:0x04 (top) and %fs:0x08 (bottom)
- asm!("mov $0, %fs:0x04" :: "r"(stack_hi) :: "volatile");
- asm!("mov $0, %fs:0x08" :: "r"(stack_lo) :: "volatile");
- }
- #[cfg(all(windows, target_arch = "x86_64"))] #[inline(always)]
- unsafe fn target_record_stack_bounds(stack_lo: uint, stack_hi: uint) {
- // stack range is at TIB: %gs:0x08 (top) and %gs:0x10 (bottom)
- asm!("mov $0, %gs:0x08" :: "r"(stack_hi) :: "volatile");
- asm!("mov $0, %gs:0x10" :: "r"(stack_lo) :: "volatile");
- }
-}
-
/// Records the current limit of the stack as specified by `end`.
///
/// This is stored in an OS-dependent location, likely inside of the thread
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(dead_code)] // stack_guard isn't used right now on all platforms
+
use core::prelude::*;
use thread::Thread;
use string::String;
struct ThreadInfo {
- // This field holds the known bounds of the stack in (lo, hi)
- // form. Not all threads necessarily know their precise bounds,
- // hence this is optional.
- stack_bounds: (uint, uint),
stack_guard: uint,
thread: Thread,
}
THREAD_INFO.with(|c| {
if c.borrow().is_none() {
*c.borrow_mut() = Some(ThreadInfo {
- stack_bounds: (0, 0),
stack_guard: 0,
thread: NewThread::new(None),
})
ThreadInfo::with(|info| info.stack_guard)
}
-pub fn set(stack_bounds: (uint, uint), stack_guard: uint, thread: Thread) {
+pub fn set(stack_guard: uint, thread: Thread) {
THREAD_INFO.with(|c| assert!(c.borrow().is_none()));
THREAD_INFO.with(move |c| *c.borrow_mut() = Some(ThreadInfo{
- stack_bounds: stack_bounds,
stack_guard: stack_guard,
thread: thread,
}));
//! ```
#![allow(non_camel_case_types)]
+#![allow(dead_code)] // sys isn't exported yet
use prelude::*;
use sync::atomic::{mod, AtomicUint};
-use sync::{Mutex, Once, ONCE_INIT};
use sys::thread_local as imp;
key: atomic::INIT_ATOMIC_UINT,
};
-static INIT_KEYS: Once = ONCE_INIT;
-static mut KEYS: *mut Mutex<Vec<imp::Key>> = 0 as *mut _;
-
impl StaticKey {
/// Gets the value associated with this TLS key
///
/// to symbols. This is a bit of a hokey implementation as-is, but it works for
/// all unix platforms we support right now, so it at least gets the job done.
+use prelude::*;
+
use c_str::CString;
-use io::{IoResult, Writer};
+use io::IoResult;
use libc;
use mem;
-use option::Option::{mod, Some, None};
-use result::Result::{Ok, Err};
use sync::{StaticMutex, MUTEX_INIT};
use sys_common::backtrace::*;
// I/O done here is blocking I/O, not green I/O, so we don't have to
// worry about this being a native vs green mutex.
static LOCK: StaticMutex = MUTEX_INIT;
- let _g = unsafe { LOCK.lock() };
+ let _g = LOCK.lock();
try!(writeln!(w, "stack backtrace:"));
#[cfg(not(any(target_os = "macos", target_os = "ios")))]
fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void) -> IoResult<()> {
- use iter::{Iterator, IteratorExt};
use os;
- use path::GenericPath;
- use ptr::PtrExt;
use ptr;
- use slice::SliceExt;
////////////////////////////////////////////////////////////////////////
// libbacktrace.h API
sa_resv: [libc::c_int, ..1],
}
- impl ::kinds::Send for sigaction { }
- impl ::kinds::Sync for sigaction { }
+ unsafe impl ::kinds::Send for sigaction { }
+ unsafe impl ::kinds::Sync for sigaction { }
#[repr(C)]
pub struct sigset_t {
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
-#![allow(unused_imports)]
-#![allow(dead_code)]
-#![allow(unused_unsafe)]
-#![allow(unused_mut)]
extern crate libc;
-use num;
use num::{Int, SignedInt};
use prelude::*;
use io::{mod, IoResult, IoError};
use sys_common::mkerr_libc;
-macro_rules! helper_init { (static $name:ident: Helper<$m:ty>) => (
- static $name: Helper<$m> = Helper {
- lock: ::sync::MUTEX_INIT,
- cond: ::sync::CONDVAR_INIT,
- chan: ::cell::UnsafeCell { value: 0 as *mut Sender<$m> },
- signal: ::cell::UnsafeCell { value: 0 },
- initialized: ::cell::UnsafeCell { value: false },
- shutdown: ::cell::UnsafeCell { value: false },
- };
-) }
-
pub mod backtrace;
pub mod c;
pub mod ext;
pub mod addrinfo {
pub use sys_common::net::get_host_addresses;
+ pub use sys_common::net::get_address_name;
}
// FIXME: move these to c module
use cell::UnsafeCell;
use kinds::Sync;
use sys::sync as ffi;
-use sys_common::mutex;
pub struct Mutex { inner: UnsafeCell<ffi::pthread_mutex_t> }
unsafe impl Sync for Mutex {}
+#[allow(dead_code)] // sys isn't exported yet
impl Mutex {
#[inline]
pub unsafe fn new() -> Mutex {
//! Implementation of `std::os` functionality for unix systems
+#![allow(unused_imports)] // lots of cfg code here
+
use prelude::*;
-use error::{FromError, Error};
-use fmt;
use io::{IoError, IoResult};
-use libc::{mod, c_int, c_char, c_void};
+use libc::{mod, c_int, c_char};
+use os;
use path::BytesContainer;
use ptr;
-use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst};
use sys::fs::FileDesc;
-use os;
use os::TMPBUF_SZ;
fn lock_nonblocking<'a>(&'a self) -> Guard<'a> {
let ret = Guard {
fd: self.fd(),
- guard: unsafe { self.inner.lock.lock().unwrap() },
+ guard: self.inner.lock.lock().unwrap(),
};
assert!(set_nonblocking(self.fd(), true).is_ok());
ret
use libc::{mod, pid_t, c_void, c_int};
use c_str::CString;
-use io::{mod, IoResult, IoError, EndOfFile};
+use io::{IoResult, EndOfFile};
use mem;
use os;
use ptr;
// The actual communication between the helper thread and this thread is
// quite simple, just a channel moving data around.
- unsafe { HELPER.boot(register_sigchld, waitpid_helper) }
+ HELPER.boot(register_sigchld, waitpid_helper);
match self.try_wait() {
Some(ret) => return Ok(ret),
}
let (tx, rx) = channel();
- unsafe { HELPER.send(NewChild(self.pid, tx, deadline)); }
+ HELPER.send(NewChild(self.pid, tx, deadline));
return match rx.recv_opt() {
Ok(e) => Ok(e),
Err(()) => Err(timeout("wait timed out")),
Ok(NewChild(pid, tx, deadline)) => {
active.push((pid, tx, deadline));
}
+ // Once we've been disconnected it means the main
+ // thread is exiting (at_exit has run). We could
+ // still have active waiter for other threads, so
+ // we're just going to drop them all on the floor.
+ // This means that they won't receive a "you're
+ // done" message in which case they'll be considered
+ // as timed out, but more generally errors will
+ // start propagating.
Err(comm::Disconnected) => {
- assert!(active.len() == 0);
break 'outer;
}
Err(comm::Empty) => break,
inner: UnsafeCell { value: ffi::PTHREAD_RWLOCK_INITIALIZER },
};
+#[allow(dead_code)] // sys isn't exported yet
impl RWLock {
#[inline]
pub unsafe fn new() -> RWLock {
#[cfg(any(target_os = "linux", target_os = "macos"))]
mod imp {
- use core::prelude::*;
use sys_common::stack;
use super::Handler;
Err(sys_common::eof())
}
- pub fn socket_name(&mut self) -> IoResult<ip::SocketAddr> {
- net::sockname(self.fd(), libc::getsockname)
- }
-
pub fn set_timeout(&mut self, timeout: Option<u64>) {
self.deadline = timeout.map(|a| sys::timer::now() + a).unwrap_or(0);
}
fn helper(input: libc::c_int, messages: Receiver<Req>, _: ()) {
let mut set: c::fd_set = unsafe { mem::zeroed() };
- let mut fd = FileDesc::new(input, true);
+ let fd = FileDesc::new(input, true);
let mut timeout: libc::timeval = unsafe { mem::zeroed() };
// active timers are those which are able to be selected upon (and it's a
1 => {
loop {
match messages.try_recv() {
+ // Once we've been disconnected it means the main thread
+ // is exiting (at_exit has run). We could still have
+ // active timers for other threads, so we're just going
+ // to drop them all on the floor. This is all we can
+ // really do, however, to prevent resource leakage. The
+ // remaining timers will likely start panicking quickly
+ // as they attempt to re-use this thread but are
+ // disallowed to do so.
Err(comm::Disconnected) => {
- assert!(active.len() == 0);
break 'outer;
}
pub fn get_winsize(&mut self) -> IoResult<(int, int)> {
Err(sys_common::unimpl())
}
- pub fn isatty(&self) -> bool { false }
}
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/// As always, windows has something very different than unix, we mainly want
-/// to avoid having to depend too much on libunwind for windows.
-///
-/// If you google around, you'll find a fair bit of references to built-in
-/// functions to get backtraces on windows. It turns out that most of these are
-/// in an external library called dbghelp. I was unable to find this library
-/// via `-ldbghelp`, but it is apparently normal to do the `dlopen` equivalent
-/// of it.
-///
-/// You'll also find that there's a function called CaptureStackBackTrace
-/// mentioned frequently (which is also easy to use), but sadly I didn't have a
-/// copy of that function in my mingw install (maybe it was broken?). Instead,
-/// this takes the route of using StackWalk64 in order to walk the stack.
+
+//! As always, windows has something very different than unix, we mainly want
+//! to avoid having to depend too much on libunwind for windows.
+//!
+//! If you google around, you'll find a fair bit of references to built-in
+//! functions to get backtraces on windows. It turns out that most of these are
+//! in an external library called dbghelp. I was unable to find this library
+//! via `-ldbghelp`, but it is apparently normal to do the `dlopen` equivalent
+//! of it.
+//!
+//! You'll also find that there's a function called CaptureStackBackTrace
+//! mentioned frequently (which is also easy to use), but sadly I didn't have a
+//! copy of that function in my mingw install (maybe it was broken?). Instead,
+//! this takes the route of using StackWalk64 in order to walk the stack.
+
+#![allow(dead_code)] // constants/fields aren't always used on all platforms
use c_str::CString;
use intrinsics;
// According to windows documentation, all dbghelp functions are
// single-threaded.
static LOCK: StaticMutex = MUTEX_INIT;
- let _g = unsafe { LOCK.lock() };
+ let _g = LOCK.lock();
// Open up dbghelp.dll, we don't link to it explicitly because it can't
// always be found. Additionally, it's nice having fewer dependencies.
#![allow(non_camel_case_types)]
use libc;
-use prelude::*;
pub const WSADESCRIPTION_LEN: uint = 256;
pub const WSASYS_STATUS_LEN: uint = 128;
//! Blocking Windows-based file I/O
-use alloc::arc::Arc;
use libc::{mod, c_int};
-use c_str::CString;
+use io;
use mem;
-use sys::os::fill_utf16_buf_and_decode;
-use path;
use ptr;
-use str;
-use io;
+use sys::os::fill_utf16_buf_and_decode;
use prelude::*;
use sys;
use sys::os;
-use sys_common::{keep_going, eof, mkerr_libc};
+use sys_common::{unimpl, mkerr_libc};
use io::{FilePermission, Write, UnstableFileStat, Open, FileAccess, FileMode};
use io::{IoResult, IoError, FileStat, SeekStyle};
// FIXME: move this to platform-specific modules (for now)?
pub fn lstat(_p: &Path) -> IoResult<FileStat> {
// FIXME: implementation is missing
- Err(super::unimpl())
+ Err(unimpl())
}
pub fn utime(p: &Path, atime: u64, mtime: u64) -> IoResult<()> {
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
-#![allow(unused_imports)]
-#![allow(dead_code)]
-#![allow(unused_unsafe)]
-#![allow(unused_mut)]
extern crate libc;
-use num;
use mem;
use prelude::*;
use io::{mod, IoResult, IoError};
use sync::{Once, ONCE_INIT};
-macro_rules! helper_init { (static $name:ident: Helper<$m:ty>) => (
- static $name: Helper<$m> = Helper {
- lock: ::sync::MUTEX_INIT,
- cond: ::sync::CONDVAR_INIT,
- chan: ::cell::UnsafeCell { value: 0 as *mut Sender<$m> },
- signal: ::cell::UnsafeCell { value: 0 },
- initialized: ::cell::UnsafeCell { value: false },
- shutdown: ::cell::UnsafeCell { value: false },
- };
-) }
-
pub mod backtrace;
pub mod c;
pub mod ext;
pub mod addrinfo {
pub use sys_common::net::get_host_addresses;
+ pub use sys_common::net::get_address_name;
}
// FIXME: move these to c module
}
}
-pub fn unimpl() -> IoError {
- IoError {
- kind: io::IoUnavailable,
- desc: "operation is not implemented",
- detail: None,
- }
-}
-
pub fn to_utf16(s: Option<&str>) -> IoResult<Vec<u16>> {
match s {
Some(s) => Ok({
use prelude::*;
-use fmt;
use io::{IoResult, IoError};
-use libc::{c_int, c_char, c_void};
+use libc::{c_int, c_void};
use libc;
use os;
use path::BytesContainer;
use ptr;
-use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst};
use sys::fs::FileDesc;
use slice;
// acquire the lock.
//
// See comments in close_read() about why this lock is necessary.
- let guard = unsafe { self.inner.lock.lock() };
+ let guard = self.inner.lock.lock();
if self.read_closed() {
return Err(eof())
}
// going after we woke up.
//
// See comments in close_read() about why this lock is necessary.
- let guard = unsafe { self.inner.lock.lock() };
+ let guard = self.inner.lock.lock();
if self.write_closed() {
return Err(epipe())
}
// close_read() between steps 1 and 2. By atomically executing steps 1
// and 2 with a lock with respect to close_read(), we're guaranteed that
// no thread will erroneously sit in a read forever.
- let _guard = unsafe { self.inner.lock.lock() };
+ let _guard = self.inner.lock.lock();
self.inner.read_closed.store(true, atomic::SeqCst);
self.cancel_io()
}
pub fn close_write(&mut self) -> IoResult<()> {
// see comments in close_read() for why this lock is necessary
- let _guard = unsafe { self.inner.lock.lock() };
+ let _guard = self.inner.lock.lock();
self.inner.write_closed.store(true, atomic::SeqCst);
self.cancel_io()
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use libc::{pid_t, c_void, c_int};
+use prelude::*;
+
+use libc::{pid_t, c_void};
use libc;
use c_str::CString;
use io;
use mem;
use os;
use ptr;
-use prelude::*;
-use io::process::{ProcessExit, ExitStatus, ExitSignal};
+use io::process::{ProcessExit, ExitStatus};
use collections;
use path::BytesContainer;
use hash::Hash;
use io::{IoResult, IoError};
-use sys::fs;
-use sys::{mod, retry, c, wouldblock, set_nonblocking, ms_to_timeval, timer};
+use sys::timer;
use sys::fs::FileDesc;
-use sys_common::helper_thread::Helper;
-use sys_common::{AsInner, mkerr_libc, timeout};
+use sys_common::{AsInner, timeout};
use io::fs::PathExtensions;
use libc::funcs::extra::msvcrt::get_osfhandle;
use mem;
- use iter::{Iterator, IteratorExt};
- use str::StrExt;
if cfg.gid().is_some() || cfg.uid().is_some() {
return Err(IoError {
use mem;
use libc;
use libc::types::os::arch::extra::{LPVOID, DWORD, LONG, BOOL};
-use sys_common::{stack, thread_info};
+use sys_common::stack;
pub struct Handler {
_data: *mut libc::c_void
fn drop(&mut self) {}
}
-// get_task_info is called from an exception / signal handler.
-// It returns the guard page of the current task or 0 if that
-// guard page doesn't exist. None is returned if there's currently
-// no local task.
-unsafe fn get_task_guard_page() -> uint {
- thread_info::stack_guard()
-}
-
// This is initialized in init() and only read from after
static mut PAGE_SIZE: uint = 0;
use mem;
use ptr;
use prelude::*;
-use super::{last_error, last_net_error, retry, sock_t};
+use super::{last_error, last_net_error, sock_t};
use sync::{Arc, atomic};
-use sys::fs::FileDesc;
use sys::{mod, c, set_nonblocking, wouldblock, timer};
-use sys_common::{mod, timeout, eof, net};
+use sys_common::{timeout, eof, net};
pub use sys_common::net::TcpStream;
Err(eof())
}
- pub fn socket_name(&mut self) -> IoResult<ip::SocketAddr> {
- net::sockname(self.socket(), libc::getsockname)
- }
-
pub fn set_timeout(&mut self, timeout: Option<u64>) {
self.deadline = timeout.map(|a| timer::now() + a).unwrap_or(0);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use core::prelude::*;
-
use boxed::Box;
use cmp;
use mem;
rt::at_exit(move|| {
DTOR_LOCK.lock();
let dtors = DTORS;
- DTORS = 0 as *mut _;
+ DTORS = 1 as *mut _;
mem::transmute::<_, Box<Vec<(Key, Dtor)>>>(dtors);
- assert!(DTORS.is_null()); // can't re-init after destructing
+ assert!(DTORS as uint == 1); // can't re-init after destructing
DTOR_LOCK.unlock();
});
}
unsafe fn register_dtor(key: Key, dtor: Dtor) {
DTOR_LOCK.lock();
init_dtors();
+ assert!(DTORS as uint != 0);
+ assert!(DTORS as uint != 1,
+ "cannot create new TLS keys after the main thread has exited");
(*DTORS).push((key, dtor));
DTOR_LOCK.unlock();
}
unsafe fn unregister_dtor(key: Key) -> bool {
DTOR_LOCK.lock();
init_dtors();
+ assert!(DTORS as uint != 0);
+ assert!(DTORS as uint != 1,
+ "cannot unregister destructors after the main thread has exited");
let ret = {
let dtors = &mut *DTORS;
let before = dtors.len();
}
}
+#[allow(dead_code)] // not actually dead
unsafe fn run_dtors() {
let mut any_run = true;
for _ in range(0, 5i) {
use ptr;
use comm;
-use sys::c;
-use sys::fs::FileDesc;
use sys_common::helper_thread::Helper;
use prelude::*;
use io::IoResult;
None => {}
}
}
+ // See the comment in unix::timer for why we don't have any
+ // asserts here and why we're likely just leaving timers on
+ // the floor as we exit.
Err(comm::Disconnected) => {
- assert_eq!(objs.len(), 1);
- assert_eq!(chans.len(), 0);
break 'outer;
}
Err(..) => break
//! to working in raw UTF-16, with such a wrapper around it.
use super::c::{ReadConsoleW, WriteConsoleW, GetConsoleMode, SetConsoleMode};
-use super::c::{ERROR_ILLEGAL_CHARACTER};
use super::c::{ENABLE_ECHO_INPUT, ENABLE_EXTENDED_FLAGS};
use super::c::{ENABLE_INSERT_MODE, ENABLE_LINE_INPUT};
use super::c::{ENABLE_PROCESSED_INPUT, ENABLE_QUICK_EDIT_MODE};
use ptr;
use str::from_utf8;
+use sys_common::unimpl;
+
fn invalid_encoding() -> IoError {
IoError {
kind: io::InvalidInput,
// Make a CONSOLE_SCREEN_BUFFER_INFO
// Call GetConsoleScreenBufferInfo
// Maybe call GetLargestConsoleWindowSize instead?
- Err(super::unimpl())
+ Err(unimpl())
}
-
- // Let us magically declare this as a TTY
- pub fn isatty(&self) -> bool { true }
}
impl Drop for TTY {
let my_stack_top = addr as uint;
let my_stack_bottom = my_stack_top - stack_size + 1024;
unsafe {
- stack::record_os_managed_stack_bounds(my_stack_bottom, my_stack_top);
+ stack::record_os_managed_stack_bounds(my_stack_bottom,
+ my_stack_top);
+ thread_info::set(imp::guard::current(), their_thread);
}
- thread_info::set(
- (my_stack_bottom, my_stack_top),
- unsafe { imp::guard::current() },
- their_thread
- );
let mut output = None;
let f: Thunk<(), T> = if stdout.is_some() || stderr.is_some() {
Builder->finalize();
}
-extern "C" void LLVMDIBuilderCreateCompileUnit(
+extern "C" LLVMValueRef LLVMDIBuilderCreateCompileUnit(
DIBuilderRef Builder,
unsigned Lang,
const char* File,
const char* Flags,
unsigned RuntimeVer,
const char* SplitName) {
- Builder->createCompileUnit(Lang, File, Dir, Producer, isOptimized,
- Flags, RuntimeVer, SplitName);
+ return wrap(Builder->createCompileUnit(Lang,
+ File,
+ Dir,
+ Producer,
+ isOptimized,
+ Flags,
+ RuntimeVer,
+ SplitName));
}
extern "C" LLVMValueRef LLVMDIBuilderCreateFile(
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that static methods are not object-safe.
+
+trait Tr {
+ fn foo();
+}
+
+struct St;
+
+impl Tr for St {
+ fn foo() {}
+}
+
+fn main() {
+ let _: &Tr = &St; //~ ERROR cannot convert to a trait object because trait `Tr` is not
+ //~^ NOTE cannot call a static method (`foo`) through a trait object
+}
#![allow(unused_variables)]
#![allow(dead_code)]
+#![omit_gdb_pretty_printer_section]
static B: bool = false;
// gdb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
static B: bool = false;
static I: int = -1;
// gdb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let unit: () = ();
// gdb-check:$28 = 9.25
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
static mut B: bool = false;
static mut I: int = -1;
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let b: bool = false;
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let bool_val: bool = true;
// lldb-check:[...]$2 = TheC
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
enum ABC { TheA, TheB, TheC }
// lldb-check:[...]$2 = TheOnlyCase(4820353753753434)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
// The first element is to ensure proper alignment, irrespective of the machines word size. Since
// the size of the discriminant value is machine dependent, this has be taken into account when
// lldb-check:[...]$6 = 26.5
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct SomeStruct {
x: int,
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let stack_val: (i16, f32) = (-14, -19f32);
// lldb-check:[...]$12 = 3.5
#![allow(unused_variables)]
-
+#![omit_gdb_pretty_printer_section]
fn main() {
let bool_box: Box<bool> = box true;
// lldb-check:[...]$1 = (2, 3.5)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let a = box 1i;
// lldb-check:[...]$1 = StructWithDestructor { x: 77, y: 777, z: 7777, w: 77777 }
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct StructWithSomePadding {
x: i16,
// lldb-check:[...]$6 = Case1 { x: 0, y: 8970181431921507452 }
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
#[deriving(Clone)]
struct Struct {
a: int,
// lldb-check:[...]$2 = (4444.5, 5555, 6666, 7777.5)
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
trait Trait {
fn method(self) -> Self;
}
// lldb-check:[...]$6 = (StructWithDrop { a: OneHundred, b: Vienna }, 9)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::AnEnum::{OneHundred, OneThousand, OneMillion};
use self::AnotherEnum::{MountainView, Toronto, Vienna};
#![allow(unused_variables)]
#![allow(dead_code)]
+#![omit_gdb_pretty_printer_section]
use self::AutoDiscriminant::{One, Two, Three};
use self::ManualDiscriminant::{OneHundred, OneThousand, OneMillion};
// lldb-check:[...]$3 = 110
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn some_generic_fun<T1, T2>(a: T1, b: T2) -> (T2, T1) {
let closure = |x, y| {
// lldb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Univariant::Unit;
// lldb-check:[...]$23 = (34903493, 232323)
// lldb-command:continue
+#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: i16,
y: f32,
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Univariant::Unit;
// lldb-check:[...]$4 = StructPaddedAtEnd { x: [22, 23], y: [24, 25] }
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct NoPadding1 {
x: [u32; 3],
// lldb-command:continue
-
#![allow(unused_variables)]
-
-
+#![omit_gdb_pretty_printer_section]
fn immediate_args(a: int, b: bool, c: f64) {
::std::io::print("") // #break
// lldb-check:[...]$3 = 3000
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
fn main() {
fun(111102, true);
// lldb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
#[no_stack_check]
fn immediate_args(a: int, b: bool, c: f64) {
// lldb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn immediate_args(a: int, b: bool, c: f64) {
()
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
-// gdb-use-pretty-printer
// gdb-command: run
// ignore-lldb
// ignore-android: FIXME(#10381)
// compile-flags:-g
-// gdb-use-pretty-printer
// This test uses some GDB Python API features (e.g. accessing anonymous fields)
// which are only available in newer GDB version. The following directive will
// lldb-check:[...]$8 = ((5, Struct { a: 6, b: 7.5 }), (Struct { a: 6, b: 7.5 }, 5))
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
#[deriving(Clone)]
struct Struct {
// lldb-check:[...]$7 = 2.5
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
fn outer<TA: Clone>(a: TA) {
inner(a.clone(), 1i);
inner(a.clone(), 2.5f64);
// lldb-check:[...]$14 = -10.5
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
struct Struct<T> {
x: T
// gdb-check:$5 = 5
// gdb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: int
}
// gdb-command:print univariant
// gdb-check:$4 = {{a = -1}}
+
+#![omit_gdb_pretty_printer_section]
+
use self::Regular::{Case1, Case2, Case3};
use self::Univariant::TheOnlyCase;
// lldb-command:print float_int_float
// lldb-check:[...]$3 = AGenericStruct<f64, generic-struct::AGenericStruct<int, f64>> { key: 6.5, value: AGenericStruct<int, f64> { key: 7, value: 8.5 } }
+
+#![omit_gdb_pretty_printer_section]
+
struct AGenericStruct<TKey, TValue> {
key: TKey,
value: TValue
// gdb-check:$4 = {3.5, {4, 5, 6}}
// gdb-command:continue
+#![omit_gdb_pretty_printer_section]
struct Struct {
x: int
// lldb-command:print univariant
// lldb-check:[...]$3 = TheOnlyCase(-1)
+#![omit_gdb_pretty_printer_section]
+
use self::Regular::{Case1, Case2, Case3};
use self::Univariant::TheOnlyCase;
// lldb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
// This test case makes sure that debug info does not ICE when include_str is
// used multiple times (see issue #11322).
// gdb-command:run
// gdb-command:next
-// gdb-check:[...]32[...]s
+// gdb-check:[...]34[...]s
// gdb-command:continue
+#![omit_gdb_pretty_printer_section]
+
// IF YOU MODIFY THIS FILE, BE CAREFUL TO ADAPT THE LINE NUMBERS IN THE DEBUGGER COMMANDS
// This test makes sure that gdb does not set unwanted breakpoints in inlined functions. If a
// lldb-check:[...]$6 = 1000000
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let range = [1i, 2, 3];
// lldb-check:[...]$15 = -1
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
fn main() {
// lldb-check:[...]$17 = 232
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
struct Struct {
x: int,
// lldb-check:[...]$5 = false
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let x = false;
// lldb-check:[...]$12 = 2
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let mut x = 0i;
// lldb-check:[...]$5 = false
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let x = false;
// lldb-check:[...]$12 = 2
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
fn main() {
#![feature(macro_rules)]
+#![omit_gdb_pretty_printer_section]
macro_rules! trivial {
($e1:expr) => ($e1)
#![allow(unused_variables)]
#![allow(unused_assignments)]
+#![omit_gdb_pretty_printer_section]
static mut MUT_INT: int = 0;
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct Struct {
a: i64,
// lldb-check:[...]$14 = -10
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
enum Enum {
Variant1 { x: u16, y: u16 },
Variant2 (u32)
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
struct Struct<T> {
x: T
}
// lldb-check:[...]$14 = -10
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: int
}
// lldb-check:[...]$14 = -10
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: int
}
// lldb-check:[...]$14 = -10
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
struct TupleStruct(int, f64);
impl TupleStruct {
// lldb-check:[...]$2 = 30303
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn function_one() {
let abc = 10101i;
// lldb-check:[...]$2 = 30303
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn function_one() {
let a = 10101i;
// lldb-check:[...]$11 = 20
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let x = false;
let y = true;
// gdb-check:$2 = {<No data fields>}
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
enum ANilEnum {}
enum AnotherNilEnum {}
// gdb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn function_with_debuginfo() {
let abc = 10u;
// lldb-check:[...]$7 = None
+#![omit_gdb_pretty_printer_section]
+
// If a struct has exactly two variants, one of them is empty, and the other one
// contains a non-nullable pointer, then this value is used as the discriminator.
// The test cases in this file make sure that something readable is generated for
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
#[repr(packed)]
struct Packed {
// lldb-check:[...]$5 = 40
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
#[repr(packed)]
struct Packed {
// is taken from issue #11083.
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
pub struct Window<'a> {
callbacks: WindowCallbacks<'a>
// gdb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Opt::{Empty, Val};
// lldb-check:[...]$14 = -10
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
struct Struct {
x: int
// lldb-check:[...]$14 = -10.5
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
struct Struct {
x: int
// lldb-check:[...]$5 = 20
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
fn a_function(x: bool, y: bool) {
zzz(); // #break
sentinel();
// lldb-check:[...]$5 = 20
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let x = false;
let y = true;
#![allow(experimental)]
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use std::simd::{i8x16, i16x8,i32x4,i64x2,u8x16,u16x8,u32x4,u64x2,f32x4,f64x2};
// lldb-check:[...]$6 = false
// lldb-command:continue
+
+#![omit_gdb_pretty_printer_section]
+
fn main() {
let x = false;
// lldb-command:print padding_at_end
// lldb-check:[...]$5 = PaddingAtEnd { x: -10014, y: 10015 }
-#![allow(unused_variables)];
-#![allow(dead_code)];
+#![allow(unused_variables)]
+#![allow(dead_code)]
+#![omit_gdb_pretty_printer_section]
struct NoPadding16 {
x: u16,
#![allow(unused_variables)]
#![allow(dead_code)]
+#![omit_gdb_pretty_printer_section]
static mut NO_PADDING_8: (i8, u8) = (-50, 50);
static mut NO_PADDING_16: (i16, i16, u16) = (-1, 2, 3);
// lldb-check:[...]$4 = 5
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: int
}
// lldb-check:[...]$2 = TheOnlyCase(Struct { x: 123, y: 456, z: 789 })
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Regular::{Case1, Case2};
use self::Univariant::TheOnlyCase;
// lldb-check:[...]$7 = Tree { x: Simple { x: 25 }, y: InternalPaddingParent { x: InternalPadding { x: 26, y: 27 }, y: InternalPadding { x: 28, y: 29 }, z: InternalPadding { x: 30, y: 31 } }, z: BagInBag { x: Bag { x: Simple { x: 32 } } } }
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct Simple {
x: i32
// lldb-check:[...]$3 = TheOnlyCase { a: -1 }
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Regular::{Case1, Case2, Case3};
use self::Univariant::TheOnlyCase;
// lldb-check:[...]$3 = NestedOuter { a: NestedInner { a: WithDestructor { x: 7890, y: 9870 } } }
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct NoDestructor {
x: i32,
// lldb-check:[...]$3 = (1, 2, 3)
// lldb-command:continue
+#![omit_gdb_pretty_printer_section]
+
struct Struct {
x: int
}
// lldb-command:run
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
trait Trait {
fn method(&self) -> int { 0 }
// gdb-check:$10 = {x = {{40, 41, 42}, {43, 44}}, y = {45, 46, 47, 48}}
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct NoPadding1 {
x: (i32, i32),
// lldb-check:[...]$6 = ((21, 22), 23)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
fn main() {
let no_padding1: ((u32, u32), u32, u32) = ((0, 1), 2, 3);
// to all fields having the name "<unnamed_field>"). Otherwise they are handled the same a normal
// structs.
+
+#![omit_gdb_pretty_printer_section]
+
struct NoPadding16(u16, i16);
struct NoPadding32(i32, f32, u32);
struct NoPadding64(f64, i64, u64);
// lldb-check:[...]$3 = TheOnlyCase(-1)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
use self::Regular::{Case1, Case2, Case3};
use self::Univariant::TheOnlyCase;
// gdb-command:whatis stack_closure2
// gdb-check:type = struct (&mut|i8, f32| -> f32, uint)
+#![omit_gdb_pretty_printer_section]
+
use self::Enum1::{Variant1_1, Variant1_2};
use std::ptr;
// lldb-check:[...]$2 = TheOnlyCase(123234)
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
// The first element is to ensure proper alignment, irrespective of the machines word size. Since
// the size of the discriminant value is machine dependent, this has be taken into account when
// lldb-command:continue
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct Struct {
a: int,
#![allow(unused_variables)]
#![feature(unboxed_closures)]
+#![omit_gdb_pretty_printer_section]
struct Struct {
a: int,
#![feature(unboxed_closures)]
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
struct Struct {
a: int,
#![allow(unused_variables)]
#![feature(slicing_syntax)]
+#![omit_gdb_pretty_printer_section]
struct AStruct {
x: i16,
// lldb-check:[...]$0 = [1, 2, 3]
#![allow(unused_variables)]
+#![omit_gdb_pretty_printer_section]
static mut VECT: [i32; 3] = [1, 2, 3];
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// Check that object-safe methods are identified as such. Also
-// acts as a regression test for #18490
+// Check that object-safe methods are identified as such.
trait Tr {
- // Static methods are always safe regardless of other rules
- fn new() -> Self;
+ fn foo(&self);
}
struct St;
impl Tr for St {
- fn new() -> St { St }
+ fn foo(&self) {}
}
fn main() {
- &St as &Tr;
+ let s: &Tr = &St;
+ s.foo();
}