1 # FIXME(linux): need to configure core dumps, enable them, and then dump
2 # backtraces on failure from all core dumps:
4 # - bash: sudo apt install gdb
5 # - bash: sudo sh -c 'echo "/checkout/obj/cores/core.%p.%E" > /proc/sys/kernel/core_pattern'
7 # Check travis config for `gdb --batch` command to print all crash logs
10 # When this parameter is set to anything other than an empty string the tests
11 # will only be executed when the commit updates submodules
12 only_on_updated_submodules: ''
16 # Disable automatic line ending conversion, which is enabled by default on
17 # Azure's Windows image. Having the conversion enabled caused regressions both
18 # in our test suite (it broke miri tests) and in the ecosystem, since we
19 # started shipping install scripts with CRLF endings instead of the old LF.
21 # Note that we do this a couple times during the build as the PATH and current
22 # user/directory change, e.g. when mingw is enabled.
23 - bash: git config --global core.autocrlf false
24 displayName: "Disable git automatic line ending conversion"
29 # Set the SKIP_JOB environment variable if this job is supposed to only run
30 # when submodules are updated and they were not. The following time consuming
31 # tasks will be skipped when the environment variable is present.
32 - ${{ if parameters.only_on_updated_submodules }}:
35 # Submodules pseudo-files inside git have the 160000 permissions, so when
36 # those files are present in the diff a submodule was updated.
37 if git diff HEAD^ | grep "^index .* 160000" >/dev/null 2>&1; then
38 echo "Executing the job since submodules are updated"
40 echo "Not executing this job since no submodules were updated"
41 echo "##vso[task.setvariable variable=SKIP_JOB;]1"
43 displayName: Decide whether to run this job
45 # Spawn a background process to collect CPU usage statistics which we'll upload
46 # at the end of the build. See the comments in the script here for more
48 - bash: python src/ci/cpu-usage-over-time.py &> cpu-usage.csv &
49 displayName: "Collect CPU-usage statistics in the background"
51 - bash: printenv | sort
52 displayName: Show environment variables
57 du . | sort -nr | head -n100
58 displayName: Show disk usage
59 # FIXME: this hasn't been tested, but maybe it works on Windows? Should test!
60 condition: and(succeeded(), ne(variables['Agent.OS'], 'Windows_NT'))
62 - template: install-sccache.yml
63 - template: install-clang.yml
65 # Install some dependencies needed to build LLDB/Clang, currently only needed
66 # during the `dist` target
72 brew link --force swig@3
73 displayName: Install build dependencies (OSX)
74 condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'), eq(variables['SCRIPT'],'./x.py dist'))
76 # Switch to XCode 9.3 on OSX since it seems to be the last version that supports
77 # i686-apple-darwin. We'll eventually want to upgrade this and it will probably
78 # force us to drop i686-apple-darwin, but let's keep the wheels turning for now.
81 sudo xcode-select --switch /Applications/Xcode_9.3.app
82 displayName: Switch to Xcode 9.3 (OSX)
83 condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
85 - template: install-windows-build-deps.yml
87 # Looks like docker containers have IPv6 disabled by default, so let's turn it
88 # on since libstd tests require it
91 sudo mkdir -p /etc/docker
92 echo '{"ipv6":true,"fixed-cidr-v6":"fd9a:8454:6789:13f7::/64"}' | sudo tee /etc/docker/daemon.json
93 sudo service docker restart
94 displayName: Enable IPv6
95 condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Linux'))
97 # Disable automatic line ending conversion (again). On Windows, when we're
98 # installing dependencies, something switches the git configuration directory or
99 # re-enables autocrlf. We've not tracked down the exact cause -- and there may
100 # be multiple -- but this should ensure submodules are checked out with the
101 # appropriate line endings.
102 - bash: git config --replace-all --global core.autocrlf false
103 displayName: "Disable git automatic line ending conversion"
105 # Check out all our submodules, but more quickly than using git by using one of
109 mkdir -p $HOME/rustsrc
110 $BUILD_SOURCESDIRECTORY/src/ci/init_repo.sh . $HOME/rustsrc
111 condition: and(succeeded(), not(variables.SKIP_JOB), ne(variables['Agent.OS'], 'Windows_NT'))
112 displayName: Check out submodules (Unix)
114 if not exist C:\cache\rustsrc\NUL mkdir C:\cache\rustsrc
115 sh src/ci/init_repo.sh . /c/cache/rustsrc
116 condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['Agent.OS'], 'Windows_NT'))
117 displayName: Check out submodules (Windows)
119 # See also the disable for autocrlf above, this just checks that it worked
121 # We check both in rust-lang/rust and in a submodule to make sure both are
122 # accurate. Submodules are checked out significantly later than the main
123 # repository in this script, so settings can (and do!) change between then.
125 # Linux (and maybe macOS) builders don't currently have dos2unix so just only
126 # run this step on Windows.
129 # print out the git configuration so we can better investigate failures in
131 git config --list --show-origin
132 dos2unix -ih Cargo.lock src/tools/rust-installer/install-template.sh
133 endings=$(dos2unix -ic Cargo.lock src/tools/rust-installer/install-template.sh)
134 # if endings has non-zero length, error out
135 if [ -n "$endings" ]; then exit 1 ; fi
136 condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
137 displayName: Verify line endings are LF
139 # Ensure the `aws` CLI is installed so we can deploy later on, cache docker
141 - bash: src/ci/install-awscli.sh
143 AGENT_OS: $(Agent.OS)
144 condition: and(succeeded(), not(variables.SKIP_JOB))
145 displayName: Install awscli
147 # Configure our CI_JOB_NAME variable which log analyzers can use for the main
148 # step to see what's going on.
150 builder=$(echo $AGENT_JOBNAME | cut -d ' ' -f 2)
151 echo "##vso[task.setvariable variable=CI_JOB_NAME]$builder"
152 displayName: Configure Job Name
154 # As a quick smoke check on the otherwise very fast mingw-check linux builder
155 # check our own internal scripts.
158 git clone --depth=1 https://github.com/rust-lang-nursery/rust-toolstate.git
160 python2.7 "$BUILD_SOURCESDIRECTORY/src/tools/publish_toolstate.py" "$(git rev-parse HEAD)" "$(git log --format=%s -n1 HEAD)" "" ""
162 rm -rf rust-toolstate
163 condition: and(succeeded(), not(variables.SKIP_JOB), eq(variables['IMAGE'], 'mingw-check'))
164 displayName: Verify the publish_toolstate script works
168 # Remove any preexisting rustup installation since it can interfere
169 # with the cargotest step and its auto-detection of things like Clippy in
171 rustup self uninstall -y || true
172 if [ "$IMAGE" = "" ]; then
175 src/ci/docker/run.sh $IMAGE
177 #timeoutInMinutes: 180
178 timeoutInMinutes: 600
182 AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
183 TOOLSTATE_REPO_ACCESS_TOKEN: $(TOOLSTATE_REPO_ACCESS_TOKEN)
184 condition: and(succeeded(), not(variables.SKIP_JOB))
185 displayName: Run build
187 # If we're a deploy builder, use the `aws` command to publish everything to our
191 source src/ci/shared.sh
192 if [ "$AGENT_OS" = "Linux" ]; then
193 rm -rf obj/build/dist/doc
194 upload_dir=obj/build/dist
196 rm -rf build/dist/doc
197 upload_dir=build/dist
200 deploy_dir=rustc-builds
201 if [ "$DEPLOY_ALT" == "1" ]; then
202 deploy_dir=rustc-builds-alt
204 retry aws s3 cp --no-progress --recursive --acl public-read ./$upload_dir s3://$DEPLOY_BUCKET/$deploy_dir/$BUILD_SOURCEVERSION
206 AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
207 condition: and(succeeded(), not(variables.SKIP_JOB), or(eq(variables.DEPLOY, '1'), eq(variables.DEPLOY_ALT, '1')))
208 displayName: Upload artifacts
210 # Upload CPU usage statistics that we've been gathering this whole time. Always
211 # execute this step in case we want to inspect failed builds, but don't let
212 # errors here ever fail the build since this is just informational.
213 - bash: aws s3 cp --acl public-read cpu-usage.csv s3://$DEPLOY_BUCKET/rustc-builds/$BUILD_SOURCEVERSION/cpu-$SYSTEM_JOBNAME.csv
215 AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
216 condition: variables['AWS_SECRET_ACCESS_KEY']
217 continueOnError: true
218 displayName: Upload CPU usage statistics