Check-in [58fe99e749]
Not logged in
Overview
Comment:wiki references
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: 58fe99e749cc4e596ab81f7915b3b512a2d2ca17
User & Date: martin_vahi on 2017-05-16 00:49:42
Other Links: manifest | tags
Context
2017-05-19 18:47
wiki reference upgrade check-in: dee8e3e8ea user: vhost7825ssh tags: trunk
2017-05-16 00:49
wiki references check-in: 58fe99e749 user: martin_vahi tags: trunk
2017-03-22 04:01
Massive_Array_of_Internet_Disks Safe MaidSafe + Rust check-in: 0105d215ce user: martin_vahi tags: trunk
Changes
Hide Diffs Unified Diffs Ignore Whitespace Patch

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/after_success.sh version [01f3445e7c].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#!/bin/bash

# Show expanded commands while running
set -x

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

CHANNEL=${TRAVIS_RUST_VERSION:-${CHANNEL:-stable}}

# We only want to deploy the docs and run coverage if it's a pull request to 'master' and only on
# the first job number in the Travis matrix.  This should be a Linux run.
if [[ ! $CHANNEL = stable ]] || [[ ! $TRAVIS_OS_NAME = linux ]] || [[ ${TRAVIS_JOB_NUMBER##*.} -ne 1 ]] ||
   [[ ! $TRAVIS_BRANCH = master ]] || [[ ! $TRAVIS_PULL_REQUEST = false ]]; then
  exit 0
fi

curl -sSL https://github.com/maidsafe/QA/raw/master/Bash%20Scripts/Travis/deploy_docs.sh | bash
# curl -sSL https://github.com/maidsafe/QA/raw/master/Bash%20Scripts/Travis/run_coverage.sh | bash
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/build_and_run_tests.sh version [c9eb426954].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
#!/bin/bash

# Show expanded commands while running
set -x

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

cd $TRAVIS_BUILD_DIR

RUST_BACKTRACE=1
export RUST_BACKTRACE

if [[ $TRAVIS_RUST_VERSION = nightly ]]; then
  # To ignore this failure, set `allow_failures` in build matrix for nightly builds
  cargo test --no-run --features clippy
  for Feature in $Features; do
    cargo test --no-run --features "clippy $Feature"
  done
else
  # Run the tests for each feature
  for Feature in $Features; do
    cargo build --release --verbose --features $Feature
    cargo test --release --features $Feature
  done
  if [ -z "$Features" ]; then
    # There are no features, so run the default test suite
    cargo build --release --verbose
    cargo test --release
  elif [[ $TRAVIS_OS_NAME = linux ]]; then
    # We currently don't run the default tests if there are any features
    cargo test --release --verbose --no-run
  fi
fi
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/deploy_docs.sh version [3030a1946c].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/bin/bash

# Show executed commands (not expanded) while running
set -v

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

ProjectName=${TRAVIS_REPO_SLUG##*/};

cd $TRAVIS_BUILD_DIR
cargo doc --features generate-diagrams || cargo doc
echo "<meta http-equiv=refresh content=0;url=${ProjectName}/index.html>" > target/doc/index.html
rm -rf /tmp/doc
mv target/doc /tmp/doc

git config --global user.email qa@maidsafe.net
git config --global user.name MaidSafe-QA

CommitMessage=$(git log -1 | tr '[:upper:]' '[:lower:]' | grep "version change to " | tr -d ' ')
if [[ $CommitMessage == versionchangeto* ]]; then
  Version=${CommitMessage##*to}
  Commit=$(git rev-parse HEAD)
  git tag $Version -am "Version $Version" $Commit
  # Pipe output to null if the following command fails to thereby not print expanded variables
  git push https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG} tag $Version > /dev/null 2>&1
fi

# Since we did a shallow clone which only clones the master branch, ensure we can fetch the gh-pages
# branch if it exists
git remote set-branches origin '*'
git fetch

# Checkout to the gh-pages branch if it already exists, otherwise clear out the repo and prepare
# for the first push to gh-pages.
if git rev-parse --verify origin/gh-pages > /dev/null 2>&1; then
  git checkout gh-pages
  git clean -df
else
  rm -rf ./*
  rm ./.**&
  git checkout --orphan gh-pages
  git rm -rf .
  echo "<meta http-equiv=refresh content=0;url=master/${ProjectName}/index.html>" > index.html
  touch .nojekyll
fi

rm -rf master
cp -rf /tmp/doc master

# Temporary patch to remove any named version docs
rm -rf 0* > /dev/null 2>&1

if [[ -n ${Version+x} ]]; then
  rm -rf latest
  cp -rf /tmp/doc latest
fi

git add --all . > /dev/null 2>&1
if git commit -m"Updated documentation." > /dev/null 2>&1; then
  # Pipe output to null if the following command fails to thereby not print expanded variables
  git push https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages > /dev/null 2>&1
fi

git checkout master
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/install_elfutils.sh version [50259b8c2b].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/bin/bash

# Show expanded commands while running
set -x

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

# We only need elfutils to run coverage, and this only happens if it's a pull request to 'master'
# and only on the first job number in the Travis matrix.  This should be a Linux run.
if [[ $TRAVIS_RUST_VERSION = stable ]] && [[ $TRAVIS_OS_NAME = linux ]] && [[ ${TRAVIS_JOB_NUMBER##*.} -eq 1 ]] &&
   [[ $TRAVIS_BRANCH = master ]] && [[ $TRAVIS_PULL_REQUEST = false ]]; then

  # Set the elfutils version if it isn't already set
  if [ -z "$ElfUtilsVersion" ]; then
    ElfUtilsVersion=0.164
  fi

  # Check to see if elfutils dir has been retrieved from cache
  ElfUtilsInstallPath=$HOME/elfutils/$ElfUtilsVersion
  Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
  if [ ! -d "$ElfUtilsInstallPath/lib" ]; then
    # If not, build and install it
    cd $HOME
    rm -rf elfutils
    mkdir -p temp
    cd temp
    wget https://fedorahosted.org/releases/e/l/elfutils/$ElfUtilsVersion/elfutils-$ElfUtilsVersion.tar.bz2
    tar jxf elfutils-$ElfUtilsVersion.tar.bz2
    cd elfutils-$ElfUtilsVersion
    ./configure --prefix=$ElfUtilsInstallPath
    make check -j$Cores
    make install
  else
    echo "Using cached elfutils directory (version $ElfUtilsVersion)"
  fi

  export LD_LIBRARY_PATH=$ElfUtilsInstallPath/lib:$LD_LIBRARY_PATH
  export ElfUtilsInstallPath=$ElfUtilsInstallPath

fi

trap '' ERR
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/install_libsodium.sh version [5630345f22].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
#!/bin/bash

# Set the libsodium version if it isn't already set
if [ -z "$LibSodiumVersion" ]; then
  LibSodiumVersion=1.0.9
fi

# Check to see if libsodium dir has been retrieved from cache
LibSodiumInstallPath=$HOME/libsodium/$LibSodiumVersion
if [ ! -d "$LibSodiumInstallPath/lib" ]; then
  # If not, build and install it
  cd $HOME
  rm -rf libsodium
  mkdir -p temp
  cd temp
  wget https://github.com/jedisct1/libsodium/releases/download/$LibSodiumVersion/libsodium-$LibSodiumVersion.tar.gz
  tar xfz libsodium-$LibSodiumVersion.tar.gz
  cd libsodium-$LibSodiumVersion
  ./configure --prefix=$LibSodiumInstallPath --enable-shared=no --disable-pie
  Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
  make check -j$Cores
  make install
else
  echo "Using cached libsodium directory (version $LibSodiumVersion)";
fi

export PKG_CONFIG_PATH=$LibSodiumInstallPath/lib/pkgconfig:$PKG_CONFIG_PATH
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_cargo_prune.sh version [79a4fe6ff1].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/bash

cur_ver=`cargo install --list | grep 'prune v' | sed -E 's/.*([0-9]+\.[0-9]+\.[0-9]+).*/\1/g'`

if [ $? -ne 0 ]; then
  # cargo-prune is not installed yet
  cargo install cargo-prune
fi

latest_ver=`curl -s -H "Accept: application/json" -H "Content-Type: application/json" -X GET https://crates.io/api/v1/crates/cargo-prune | sed -E 's/.*"max_version":"([^"]*).*/\1/g'`

if [ "$cur_ver" != "$latest_ver" ]; then
  # Update to latest cargo-prune
  cargo install cargo-prune --force
fi

cargo prune
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_clippy.sh version [8b6e8486e3].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/bash

# Show expanded commands while running
set -x

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

cd $TRAVIS_BUILD_DIR

if [[ $TRAVIS_RUST_VERSION = nightly ]]; then
  # Only run clippy on Linux
  if [[ ! $TRAVIS_OS_NAME = linux ]]; then
    exit 0
  fi
  # To ignore this failure, set `allow_failures` in build matrix for nightly builds
  cargo rustc --features clippy -- --test -Zno-trans
  for Feature in $Features; do
    cargo rustc --features "clippy $Feature" -- --test -Zno-trans
  done
fi

# Hide expanded commands while running
set +x
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_coverage.sh version [2b7030d337].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#!/bin/bash

# Show expanded commands while running
set -x

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

# Build and install kcov (which is fast and not versioned, so there's little point in caching it)
cd $HOME
mkdir -p temp
cd temp
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz
tar xzf master.tar.gz
cmake -Bbuild_kcov -Hkcov-master -DCMAKE_INSTALL_PREFIX=$HOME/ -DCMAKE_BUILD_TYPE=Release -DCMAKE_INCLUDE_PATH="$ElfUtilsInstallPath/include" -DCMAKE_LIBRARY_PATH="$ElfUtilsInstallPath/lib"
cd build_kcov
make -j$Cores
make install

# Build the project's tests and run them under kcov
if [ ! -z "$Features" ]; then
  WithFeatures=" --features $Features"
fi
cd $TRAVIS_BUILD_DIR
cargo test --no-run $WithFeatures
ProjectName=${TRAVIS_REPO_SLUG##*/};
$HOME/bin/kcov --coveralls-id=$TRAVIS_JOB_ID --include-path=src target/kcov target/debug/$ProjectName-*
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_rustfmt.sh version [24107c903b].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/bash

cur_ver=`cargo install --list | grep 'rustfmt v' | sed -E 's/.*([0-9]+\.[0-9]+\.[0-9]+).*/\1/g'`

if [ $? -ne 0 ]; then
  # rustfmt is not installed yet
  cargo install rustfmt
fi

latest_ver=`curl -s -H "Accept: application/json" -H "Content-Type: application/json" -X GET https://crates.io/api/v1/crates/rustfmt | sed -E 's/.*"max_version":"([^"]*).*/\1/g'`

if [ "$cur_ver" != "$latest_ver" ]; then
  # Update to latest rustfmt
  cargo install rustfmt --force
fi

res=0
for i in `find . -name '*.rs'`; do
  $HOME/.cargo/bin/rustfmt --skip-children --write-mode=diff $i
  if [ $? -ne 0 ]; then
    res=1
  fi
done

exit $res
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/add_public_keys.sh version [d95cef1438].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
#!/bin/bash

if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This adds all public keys inside the Public Keys folder of this repository to
the ~/.ssh/authorized_keys file of the remote target.

You should pass a single arg to this script which will be the target user and
hostname for the ssh commands.  You must already be able to ssh to the target
without the need for a password.

Example usage:
    ./${0##*/} peer1@peer_prog.maidsafe.net
"
  exit 0;
fi

IFS=$(echo -en "\n\b")
PublicKeysDir=$(cd "$(dirname "${BASH_SOURCE[0]}")/../Public Keys" && pwd)

for File in $PublicKeysDir/*
do
  echo "Processing \"$File\"..."
  echo "$(echo -en "\n")$(cat $File)" | ssh "$1" 'cat >> .ssh/authorized_keys'
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/create_linux_vault_package.sh version [e01e657554].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
#!/bin/bash

# Stop the script if any command fails
set -o errtrace
trap 'exit' ERR

function help {
  echo "
This invokes the 'create_package.sh' script in the SAFE Vault project which
builds the linux packages.  It then copies them to the apt and yum servers.

You should either invoke this script from the root of the safe_vault repo, or
pass a single arg to this script which is the absolute path to the safe_vault
repo.

Ideally, you should be able to ssh to the apt and yum servers without the need
for a password.

Example usage:
    ./${0##*/} \"/home/maidsafe/safe_vault\"
"
  exit 0;
}

# Handle help arg
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
  help
fi

# Set path to script in safe_vault repo
if [[ "$#" -eq 1 ]]; then
  VaultRoot="$1"
else
  VaultRoot=$(pwd)
fi
CreatePackageScript="$VaultRoot/installer/linux/create_packages.sh"

# Check the repo path contains the expected script
if [[ "$#" -gt 1 || ! -x "$CreatePackageScript" ]]; then
  help
fi

# Invoke the script and scp the resulting packages
CurrentPath=$(pwd)
cd $VaultRoot
"$CreatePackageScript"
cd $CurrentPath
ssh apt.maidsafe.net 'mkdir -p ~/systemd/ && mkdir -p ~/SysV-style/'
ssh yum.maidsafe.net 'mkdir -p ~/systemd/ && mkdir -p ~/SysV-style/'
scp "$VaultRoot"/packages/linux/safe_vault_*.tar.gz apt.maidsafe.net:~/ &
scp "$VaultRoot"/packages/linux/systemd/safe*.deb apt.maidsafe.net:~/systemd/ &
scp "$VaultRoot"/packages/linux/SysV-style/safe*.deb apt.maidsafe.net:~/SysV-style/ &
scp "$VaultRoot"/packages/linux/safe_vault_latest_version.txt apt.maidsafe.net:~/ &
scp "$VaultRoot"/packages/linux/systemd/safe*.rpm yum.maidsafe.net:~/systemd/ &
scp "$VaultRoot"/packages/linux/SysV-style/safe*.rpm yum.maidsafe.net:~/SysV-style/ &
scp "$VaultRoot"/packages/linux/safe_vault_latest_version.txt yum.maidsafe.net:~/ &

wait
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/execute_command_on_all_seeds.sh version [8f8520f04e].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
#!/bin/bash

if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This executes a single command on each of the seed VMs as the \"qa\" user.
You should pass a single arg to this script which will be the command
to execute.  It can't require user-input on the remote machine.

Example usage:
    ./${0##*/} \"ls -laG\"
"
  exit 0;
fi

# Show commands as they execute
set -x

for peer in 1 2 3 4 5 6; do
  ssh qa@seed-$peer.maidsafe.net "$1" &
done

wait
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/execute_command_on_group.sh version [37deb60b10].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/bin/bash

IpList=ip_list

if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This executes a single command on each of the entered IPs.  The remote IPs
should be a space-separated list in a file called \"$IpList\" in the current
working directory (i.e. where you run this script from - not necessarily the
folder containing this script).

The command will be executed as the \"qa\" user on the remote machine.

You should pass a single arg to this script which will be the command
to execute.  It can't require user-input on the remote machine.

Example usage:
    ./${0##*/} \"ls -laG\"
"
  exit 0;
fi

if [ ! -f $IpList ]; then
    echo "
This script requires a space-separated list of IPs to exist in a file called
\"$IpList\" in the current working directory.

For further info, run this script with '--help'
"
    exit 1
fi

IPs=`cat $IpList`
for peer in $IPs; do
  # Show commands as they execute
  set -x
  ssh qa@$peer "$1"
  set +x
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<














































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_droplet_logfiles.sh version [6191bb5922].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
#!/bin/bash

IpList=ip_list

if [[ "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This tries to scp /home/qa/Node.log from each of the entered IPs.  The remote
IPs should be a space-separated list in a file called \"$IpList\" in the current
working directory (i.e. where you run this script from - not necessarily the
folder containing this script).

The logfiles will each be renamed to include the nodes' index numbers, e.g.
Node 1's logfile will be renamed from Node.log to Node001.log.  The files will
be copied to the current working directory.

"
  exit 0;
fi

if [ ! -f $IpList ]; then
    echo "
This script requires a space-separated list of IPs to exist in a file called
\"$IpList\" in the current working directory.

For further info, run this script with '--help'
"
    exit 1
fi

IPs=`cat $IpList`
Count=1
for Peer in $IPs; do
  printf -v Command "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no qa@$Peer:~/Node.log Node%03d.log" $Count
  # Show commands as they execute
  set -x
  $Command
  set +x
  ((++Count))
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<














































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_on_all_seeds.sh version [8cff2ba158].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
#!/bin/bash

if [[ "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This runs an scp comamnd across each of the seed VMs.

Replace \"scp\" with this script and use the term \"REMOTE\" to represent
the \"qa\" user on the remote endpoint.

Example usage:
Copy the file \"foobar.txt\" from seed VM to local folder
    ./${0##*/} REMOTE:foobar.txt /some/local/dir

Copy the dir \"foo\" from local to seeds' \"bar\" dir
    ./${0##*/} -r foo REMOTE:/some/remote/dir/bar
"
  exit 0;
fi

regex='(.*)REMOTE:(.*)'
# Show commands as they execute
set -x
for peer in 1 2 3 4 5 6; do
  command="scp"
  for var in "$@"; do
    while [[ $var =~ $regex ]]; do
      var="${BASH_REMATCH[1]}qa@seed-$peer.maidsafe.net:${BASH_REMATCH[2]}"
    done
    command="$command $var"
  done
  $command &
done

wait
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_on_group.sh version [b7fd70b992].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#!/bin/bash

IpList=ip_list

if [[ "$1" == "-h" || "$1" == "--help" ]]; then
  echo "
This runs an scp command across each of the entered IPs.  The remote IPs should
be a space-separated list in a file called \"$IpList\" in the current working
directory (i.e. where you run this script from - not necessarily the folder
containing this script).

Replace \"scp\" with this script and use the term \"REMOTE\" to represent the \"qa\"
user on the remote endpoint.

To avoid having to confirm each IP's identity, you can pass the args:
    -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no

Example usage:
Copy the dir \"foo\" from local to remote IP's \"bar\" dir
    ./${0##*/} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -r foo REMOTE:/some/remote/dir/bar

Copy the file \"log*.txt\" from remote IP to local folder
    ./${0##*/} REMOTE:log*.txt /some/local/dir


"
  exit 0;
fi

if [ ! -f $IpList ]; then
    echo "
This script requires a space-separated list of IPs to exist in a file called
\"$IpList\" in the current working directory.

For further info, run this script with '--help'
"
    exit 1
fi

IPs=`cat $IpList`
regex='(.*)REMOTE:(.*)'
for peer in $IPs; do
  command="scp"
  for var in "$@"; do
    while [[ $var =~ $regex ]]; do
      var="${BASH_REMATCH[1]}qa@$peer:${BASH_REMATCH[2]}"
    done
    command="$command $var"
  done
  # Show commands as they execute
  set -x
  $command
  set +x
done
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/soak_test.sh version [e00d92e3d6].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#!/bin/bash

((count=0))
((failurec=0))
((perc=0))
while(true)
do
  ((count=count+1))
  ./<INSERT TEST HERE>
  if [ $? -ne 0 ]
  then
    ((failurec=failurec+1))
  fi
  ((perc = 100*failurec/count))
  echo "Failed $failurec times out of $count ($perc perc)" 
  #sleep 30
done

echo "======================================"
echo "Failed $failurec times out of $count ($perc perc)"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Add New Repository.md version [84bb4c199f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
## Add a New GitHub Repository - QA Steps

New GitHub repo created? Then this document should walk you through the QA steps to standardise your repo, alongside all the other MaidSafe GitHub repositories. For steps and tools please use the MaidSafe-QA user unless instructions specify otherwise.

### Fork the New Repository

While logged into GitHub under your own account, fork the new repo and clone it locally.

### Login to GitHub as MaidSafe-QA

Log out of your own account and log back in as the MaidSafe-QA user.

*At this stage you need to request temporary GitHub "Admin" privileges from Fraser, Viv or David.*

### Add Repository to Travis

Login to [Travis](https://travis-ci.org/), sync account, find the new repository you want to add and flick the switch to on.

![Sync Account](Images/01.png)

### Add Repository to AppVeyor

Login to [AppVeyor](https://ci.appveyor.com/login) and select  `+ NEW PROJECT`

![AppVeyor](Images/02.png)

Then select the repository you would like to add

![AppVeyor select repo](Images/03.png)

Add appveyor.yml and .travis.yml scripts to new repository.

From another [MaidSafe GitHub repository](https://github.com/maidsafe), copy and add the `appveyor.yml` and `.travis.yml` files to the root of your newly-forked local clone of the new repository. The `.travis.yml` will require minor tweaking (more of which in the following steps) especially creating and updating the secure token, which is used to upload rust documentation.

### Give Travis Permissions

While still logged into GitHub as the MaidSafe-QA user, go to settings and select "Personal access tokens". Now click `Generate new token` and create a new "Travis Deploy Token - <new repo name>"

![Travis permissions](Images/04.png)

and limit scopes to `public_repo` as shown below

![Limit scopes](Images/05.png)

Once you have clicked on "Generate token", copy the output as you will not see it again.

[Install Travis gem](https://github.com/travis-ci/travis.rb#installation) to encrypt secure GitHub access

Run this, where `<YOUR_TOKEN>` is the one we copied in the previous step.

`travis encrypt -r maidsafe/<new_repo> GH_TOKEN=<YOUR_TOKEN>`

Edit the `.travis.yml` file you added to the new repo and replace the long string in the line `-secure:` with the output you have just generated - example of what this looks like is below (the string has been shortened in this image).

![travis.yml](Images/06.png)

If you are not at this point going to update the repository's `README.md` then you can push all your local changes upstream and issue a PR to add them to the main repository.

### Webhooks - Add Highfive

Go to the project's settings (the `maidsafe` fork - not your fork) *> Settings > Webhooks & services > Add webhook*

The Payload URL is

```
http://visualiser.maidsafe.net/cgi-bin/highfive/newpr.py
```

![Webhooks](Images/07.png)

![Manage webhook](Images/08.png)


### Highfive Backend Configuration

SSH (details in private assets GitHub repository) to the droplet hosting Highfive

![Droplet](Images/09.png)

Navigate to `/usr/lib/cgi-bin/highfive/configs/`

![ls](Images/10.png)

create a new `<repository_name>.json` file (copy an existing .json file)

![json edit](Images/11.png)

Edit the new `<repository_name>.json` file and update the maintainers' names.

The important section is "groups" - note that entries & file names are case sensitive.

### Add Coverage

Login to [coveralls.io](https://coveralls.io/) using the MaidSafe-QA GitHub account and click `RE-SYNC REPOS`

![coveralls](Images/12.png)

Click `ADD REPOS`

![add repo](Images/13.png)

Flick the switch on your new repository

![flick the switch](Images/14.png)

### Update New Repo's `README.md`

![repo example](Images/15.png)

Above is a screenshot and below is a template, best take the markdown from another repository and edit to fit the purposes of the new repository.

# < repository_name >

[![](https://img.shields.io/badge/Project%20SAFE-Approved-green.svg)](http://maidsafe.net/applications) [![](https://img.shields.io/badge/License-GPL3-green.svg)](https://github.com/maidsafe/crust/blob/master/COPYING)


**Primary Maintainer:** < name > (< email_address >)

**Secondary Maintainer:** < name > (< email_address >)

Reliable peer-to-peer network connections in Rust with NAT traversal.

|Crate|Linux/OS X|Windows|Coverage|Issues|
|:---:|:--------:|:-----:|:------:|:----:|
|[![](http://meritbadge.herokuapp.com/crust)](https://crates.io/crates/crust)|[![Build Status](https://travis-ci.org/maidsafe/crust.svg?branch=master)](https://travis-ci.org/maidsafe/crust)|[![Build status](https://ci.appveyor.com/api/projects/status/ajw6ab26p86jdac4/branch/master?svg=true)](https://ci.appveyor.com/project/MaidSafe-QA/crust/branch/master)|[![Coverage Status](https://coveralls.io/repos/maidsafe/crust/badge.svg)](https://coveralls.io/r/maidsafe/crust)|[![Stories in Ready](https://badge.waffle.io/maidsafe/crust.png?label=ready&title=Ready)](https://waffle.io/maidsafe/crust)|

|[API Documentation - master branch](http://maidsafe.net/crust/master)|[SAFE Network System Documentation](http://systemdocs.maidsafe.net)|[MaidSafe website](http://maidsafe.net)| [SAFE Network Forum](https://forum.safenetwork.io)|
|:------:|:-------:|:-------:|:-------:|


## Overview
< insert_overview >
## Todo Items
< insert_todo_items >

*In the above example the badges and links are for `crust` just for illustrative purposes*

One niggle worth noting for AppVeyor badges that has caught a few folk out: you need to grab the markdown for master branch badge - this can be found on the AppVeyor site in the new repo page under: *Settings > Badges* and is the 6th or last entry on the page see below.
This is the one that needs pasted into the project's `README.md` and the QA `README.md`

![AppVeyor badge](Images/16.png)

### Switch On "Build only if .travis.yml / appveyor.yml is present"

Log into Travis and go to repository *> settings > general settings* and switch `ON` *Build only if .travis.yml is present* setting.

![Travis yml present switch](Images/17.png)

Log into Appveyor and go to repository *> settings > general* and tick the *Do not build tags* , *Skip branches without appveyor.yml* and *Rolling builds* check boxes.

![Appveyor yml present switch](Images/19.png)

![Appveyor yml present switch](Images/18.png)

### Add Reviewable

Login to https://reviewable.io using the MaidSafe-QA GitHub account and go to *Repositories* section and toggle to green to enable Reviewable for pull requests.

![Reviewable switch](Images/20.png)


### Update QA readme.md

Finally add a new entry to https://github.com/maidsafe/QA/blob/master/README.md and issue a PR for this.

### Revoke Github "Admin" from MaidSafe-QA user

Once everything is complete, we need to revoke elevated privileges and reduce them back to "Write".

*Ensure `Owners` have "Admin" privileges and `Bots` and `Developers` have "Write" privileges.*

### Checklist to see if everything is ok:

* Did Travis run?
* Did AppVeyor run?
* Does Highfive allocate a reviewer for a PR?
* Do all the links and badges go to the correct places?
* On a successful merge to master did Travis create and publish the documentation?
* Did Coverage run?
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































































































































































































































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Install libsodium for OS X or Linux.md version [3c45363b35].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# Install libsodium for OS X or Linux

[libsodium](https://github.com/jedisct1/libsodium) is a native dependency of [sodiumoxide](https://github.com/dnaq/sodiumoxide).

Download, unpack the most recent tarball of [libsodium](https://download.libsodium.org/libsodium/releases/), build the static variant and install to "/usr/local/":

```bash
Version=1.0.9
mkdir temp
cd temp
wget https://github.com/jedisct1/libsodium/releases/download/$Version/libsodium-$Version.tar.gz
tar xfz libsodium-$Version.tar.gz
cd libsodium-$Version
./configure --enable-shared=no --disable-pie
Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
make check -j$Cores
sudo make install
```

Set environment variable `SODIUM_LIB_DIR` to the folder containing libsodium.a:

```bash
export SODIUM_LIB_DIR=/usr/local/lib
```

You can make this a permanent environment variable by adding this export command to your OS / shell specific .profile config file (e.g. `~/.bashrc`, `~/.bash_profile`).

If you wish to do this system wide on Ubuntu for example you could update `/etc/environment`.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Install libsodium for Windows.md version [e7e8c05f48].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# Install libsodium for Windows

[libsodium](https://github.com/jedisct1/libsodium) is a native dependency of [sodiumoxide](https://github.com/dnaq/sodiumoxide).

Download the appropriate version (32-bit or 64-bit) [prebuilt libsodium static library](https://github.com/maidsafe/QA/tree/master/Dependencies/Windows).

N.B. The path of the folder where libsodium.a will live cannot contain any spaces.

Set environment variable `SODIUM_LIB_DIR` to the folder containing libsodium.a:

```batch
setx SODIUM_LIB_DIR <path-to-libsodium.a-dir>
```

Start a new command-prompt to continue.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/Linux Process.md version [579123b592].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
# Create Package for Vault on Linux

- [ ] Run the package creation script ` safe_vault/installer/linux/scripts/create_packages.sh` in the `safe_vault` repository
- Check RPM (on e.g. a Fedora test machine)
  - Check installer can upgrade an existing version which is running
    - [ ] Check test machine has older version already installed and `safe_vault` is running
    - [ ] Copy the current bootstrap and config files
    - [ ] New installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check installer can upgrade an existing version which is not running
    - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
    - [ ] Copy the current bootstrap and config files
    - [ ] New installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check installer succeeds on machine with no previous version installed
    - [ ] Check test machine has no version already installed
    - [ ] Installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check repair where current version already installed
    - [ ] Kill and remove existing version of `maidsafe_vault`
    - [ ] Copy the current bootstrap and config files
    - [ ] Installer should rerun without errors
    - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files haven't been overwritten
    - [ ] Remove bootstrap and config files
    - [ ] Installer should rerun without errors
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
  - Check uninstall
    - [ ] Check `safe_vault` is running
    - [ ] Uninstall should run without errors
    - [ ] Check `safe_vault` is not running
    - [ ] Check `safe_vault`, bootstrap and config files have all been removed
  - [ ] Copy installer from slave to yum repository machine
  - [ ] Update yum repository
  - [ ] Check `yum install safe-vault` works on a clean machine
  - [ ] Check `yum update` updates existing version
- Check .deb (on e.g. an Ubuntu test machine)
  - Check installer can upgrade an existing version which is running
    - [ ] Check test machine has older version already installed and `safe_vault` is running
    - [ ] Copy the current bootstrap and config files
    - [ ] New installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check installer can upgrade an existing version which is not running
    - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
    - [ ] Copy the current bootstrap and config files
    - [ ] New installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check installer succeeds on machine with no previous version installed
    - [ ] Check test machine has no version already installed
    - [ ] Installer should run without errors
    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
  - Check repair where current version already installed
    - [ ] Kill and remove existing version of `safe_vault`
    - [ ] Copy the current bootstrap and config files
    - [ ] Installer should rerun without errors
    - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check bootstrap and config files haven't been overwritten
    - [ ] Remove bootstrap and config files
    - [ ] Installer should rerun without errors
    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
  - Check uninstall
    - [ ] Check `safe_vault` is running
    - [ ] Uninstall should run without errors
    - [ ] Check `safe_vault` is not running
    - [ ] Check `safe_vault`, bootstrap and config files have all been removed
  - [ ] Copy installer from slave to apt repository machine
  - [ ] Update apt repository
  - [ ] Check `apt-get install safe-vault` works on a clean machine
  - [ ] Check `apt-get update && apt-get upgrade` updates existing version
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/OS X Process.md version [d121b73ad8].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# Create Package for Vault on OS X

- [ ] Run the package creation script safe_vault/installer/osx/scripts/create_packages.sh in the safe_vault repository
- Check installer can upgrade an existing version which is running
  - [ ] Check test machine has older version already installed and `safe_vault` is running
  - [ ] Copy the current bootstrap and config files
  - [ ] New installer should run without errors
  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
- Check installer can upgrade an existing version which is not running
  - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
  - [ ] Copy the current bootstrap and config files
  - [ ] New installer should run without errors
  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
- Check installer succeeds on machine with no previous version installed
  - [ ] Check test machine has no version already installed
  - [ ] Installer should run without errors
  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
- Check repair where current version already installed
  - [ ] Kill and remove existing version of `safe_vault`
  - [ ] Copy the current bootstrap and config files
  - [ ] Installer should rerun without errors
  - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
  - [ ] Check bootstrap and config files haven't been overwritten
  - [ ] Remove bootstrap and config files
  - [ ] Installer should rerun without errors
  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
  - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
- Check uninstall
  - [ ] Check `safe_vault` is running
  - [ ] Uninstall should run without errors
  - [ ] Check `safe_vault` is not running
  - [ ] Check `safe_vault`, bootstrap and config files have all been removed
- Check installer can be downloaded
  - [ ] Webpage should detect OS and show link to appropriate installer
  - [ ] Download installer and hash check it against original
  - [ ] Check downloaded filename is meaningful
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/Windows Process.md version [8e241a7f6a].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# Create Package for Vault on Windows

- [ ] Run the installer creation script `safe_vault/installer/windows/create_installer.ps1` in the safe_vault repository
- Check installer can upgrade (using default options) an existing version installed to default location which is running
  - [ ] Check test machine has older version already installed using default options and `safe_vault.exe` is running
  - [ ] Copy the current bootstrap and config files
  - [ ] New installer should run without errors
  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
  - [ ] Check bootstrap and config files haven't been overwritten
- Check installer can upgrade (using default options) an existing version installed to default location which is not running
  - [ ] Check test machine has older version already installed using default options and `safe_vault.exe` is NOT running
  - [ ] Copy the current bootstrap and config files
  - [ ] New installer should run without errors
  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
  - [ ] Check bootstrap and config files haven't been overwritten
- Check installer can upgrade (using default options) an existing version installed to non-default location which is running
  - [ ] Check test machine has older version already installed using NON-default options and `safe_vault.exe` is running
  - [ ] Copy the current bootstrap and config files
  - [ ] New installer should run without errors
  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
  - [ ] Check old version of `safe_vault.exe` has been deleted from non-default location
  - [ ] Check bootstrap and config files haven't been overwritten
- Check installer succeeds using default options on machine with no previous version installed
  - [ ] Check test machine has no version already installed
  - [ ] Installer should run without errors
  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
  - [ ] Check bootstrap and config files are installed in their default locations
- Check repair where current version installed using defaults
  - [ ] Kill and remove existing version of `safe_vault.exe`
  - [ ] Copy the current bootstrap and config files
  - [ ] Installer should run repair without errors
  - [ ] Check `safe_vault.exe` is running and has been re-installed to previous location
  - [ ] Check bootstrap and config files haven't been overwritten
  - [ ] Remove bootstrap and config files
  - [ ] Installer should run repair without errors
  - [ ] Check `safe_vault.exe` is running and is installed in previous location
- Check repair where current version installed to non-default location
  - [ ] Kill and remove existing version of `safe_vault.exe`
  - [ ] Copy the current bootstrap and config files
  - [ ] Installer should run repair without errors
  - [ ] Check `safe_vault.exe` is running and has been re-installed to previous location
  - [ ] Check bootstrap and config files haven't been overwritten
  - [ ] Remove bootstrap and config files
  - [ ] Installer should run repair without errors
  - [ ] Check `safe_vault.exe` is running and is installed in previous location
- Check uninstall where current version installed using defaults
  - [ ] Check `safe_vault.exe` is running
  - [ ] Uninstall should run without errors
  - [ ] Check `safe_vault.exe` is not running
  - [ ] Check `safe_vault.exe`, bootstrap and config files have all been removed
- Check uninstall where current version installed to non-default location
  - [ ] Check `safe_vault.exe` is running
  - [ ] Uninstall should run without errors
  - [ ] Check `safe_vault.exe` is not running
  - [ ] Check `safe_vault.exe`, bootstrap and config files have all been removed
- [ ] Copy installer from slave to website
- [ ] Update website to link to new installer
- Check installer can be downloaded
  - [ ] Webpage should detect OS and show link to appropriate installer
  - [ ] Download installer and hash check it against original
  - [ ] Check downloaded filename is meaningful
  - [ ] Check installer has appropriate high-res icon
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Update Apt and Yum Repos.md version [bf7f894505].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# Update Apt and Yum Repos

##### Build and Transfer 32-bit Package

```sh
ssh maidsafe@178.62.25.205

rustup update
git -C QA pull

cd safe_vault
git pull

~/QA/Bash\ Scripts/create_linux_vault_package.sh
```


##### Build and Transfer 64-bit Package
```sh
ssh maidsafe@178.62.85.248

rustup update
git -C QA pull

cd safe_vault
git pull

~/QA/Bash\ Scripts/create_linux_vault_package.sh
```


##### Update Apt Repo

```sh
ssh maidsafe@apt.maidsafe.net
Version=$(cat safe_vault_latest_version.txt)
cd /var/www/repos/apt/debian

# sudo reprepro remove jessie safe-vault
# sudo reprepro remove wheezy safe-vault

sudo reprepro includedeb jessie ~/SysV-style/safe-vault_"$Version"_amd64.deb
sudo reprepro includedeb jessie ~/SysV-style/safe-vault_"$Version"_i386.deb
sudo reprepro includedeb wheezy ~/SysV-style/safe-vault_"$Version"_amd64.deb
sudo reprepro includedeb wheezy ~/SysV-style/safe-vault_"$Version"_i386.deb

mv ~/safe_*.tar.gz /var/www/tarballs/
```

##### Update Yum Repo

```sh
ssh maidsafe@yum.maidsafe.net
cd /var/www/repos
cp ~/SysV-style/* .
rpm --resign *.rpm
createrepo .  # need '--checksum sha' for at least CentOS <= 5.10  See http://linux.die.net/man/8/createrepo
gpg2 --detach-sign --armor repodata/repomd.xml
```

---

##### Apt Links

- http://www.jejik.com/articles/2006/09/setting_up_and_managing_an_apt_repository_with_reprepro/
- https://mirrorer.alioth.debian.org/reprepro.1.html
- https://wiki.debian.org/HowToSetupADebianRepository#reprepro_for_new_packages
- https://wiki.debian.org/SettingUpSignedAptRepositoryWithReprepro
- https://scotbofh.wordpress.com/2011/04/26/creating-your-own-signed-apt-repository-and-debian-packages/

##### Yum Links

- http://www.idimmu.net/2009/10/20/creating-a-local-and-http-redhat-yum-repository/
- http://yum.baseurl.org/wiki/RepoCreate
- http://fedoranews.org/tchung/gpg/
- https://iuscommunity.org/pages/CreatingAGPGKeyandSigningRPMs.html
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
























































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Windows Installers.md version [1d5cc36384].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# Windows Installers

On each of the Windows build machines in the office (one 32-bit, one 64-bit, both Windows 7) do the following process:

- Open C:\MaidSafe\safe_vault\installer\windows\safe_vault_32_and_64_bit.aip in a text editor
- Search for the phrase `Enter path to certificate.p12` and replace it with the actual path to the certificate
- Open a **Powershell** terminal and run the following commands:

```batch
. rustup update
. "C:\Program Files\Git\bin\git.exe" -C C:\MaidSafe\QA pull

cd C:\MaidSafe\safe_vault
. "C:\Program Files\Git\bin\git.exe" pull

. installer\windows\create_installer.ps1

. "C:\Program Files\Git\bin\git.exe" checkout .
```
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Managing Remote Servers.md version [fb67971f79].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# Managing Remote Servers

The objective of this document is to detail a standard process for handling remote servers (e.g.
Droplets), so that all MaidSafe remote servers are secure and can be accessed in a similar way.
This should make working with and scripting for these simpler.

Note that this does not apply to "throw-away" remote servers which are used for short-term testing,
and need not be secure.

### Setting up a New Server

Where there is a choice, we should never allow the host to send us a root password via email.  If a
root or sudo user's password _is_ ever emailed (even internally between two MaidSafe employees), it
should immediately be treated as compromised and changed.

In the case of Droplets, we should add all QA members' SSH keys by default.  This allows any QA
member to ssh into the droplet as root.  However, this should generally only ever be done once, in
order to create the new `qa` user as detailed below.  Working as root is not a good practice and
should be kept to a minimum.

As soon as a new server is created, the following steps should be taken:

1. ssh into the server as root
1. create a sudo user named `qa` with a strong, unique, random password.  On Ubuntu:

    ```bash
    adduser qa
    adduser qa sudo
    ```

    or on Fedora:

    ```bash
    useradd qa
    passwd qa
    usermod qa -a -G wheel
    ```

1. exit the ssh session
1. add details of the server to an existing or new document in the QA folder of the private
[Assets](https://github.com/maidsafe/Assets/tree/master/QA) repository

### Managing the Servers

#### Compromised Password

If the password of a sudo user is compromised (e.g. laptop lost/stolen, password emailed), all
affected servers should be updated as soon as possible.  As passwords should be unique, this should
apply to just a single user account on a single server.

The fix can either be to change the password, or to delete the user.

#### Compromised SSH Key

If the private SSH key of a sudo user is compromised (e.g. laptop lost/stolen, private key
emailed!), all affected servers should be updated as soon as possible.

The hard part will be identifying all the accounts to which this key has access.  For a QA team
member, this will likely include the root user, their own user account and perhaps other users'
accounts on every remote server.

The fix is to remove the affected key from the relevant `authorized_keys` files.  This will be in
`/home/<USER>/.ssh/` or `/root/.ssh/`.

#### Adding new Users

If for whatever reason, a non-QA team member wants to access a remote server, don't share
credentials with that member; instead create a new user account for them.  Normally, the only shared
account should be the `qa` one (an exception is the `peer1` account on the `peer_prog.maidsafe.net`
Droplet).

Before creating an account for them, ensure that they really need access to the secure server.  If
their work can be done on a non-secure, throw-away Droplet for example, then that is the best
option.

Don't give the new user sudo access if not required.  If sudo access _is_ required, then create the
new user with a strong, unique, random password, but **don't email this password** to the team
member.  Instead, send it via a mumble message.

The team member should be asked to never change the password to a weak one, nor to one which they
use elsewhere.  They should also notify QA once the account can be deleted.
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Rust Lint Checks.md version [4b22ac8612].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# Rust Lint Checks

```
#![forbid(
    bad_style,              // Includes:
                            // - non_camel_case_types:   types, variants, traits and type parameters
                            //                           should have camel case names,
                            // - non_snake_case:         methods, functions, lifetime parameters and
                            //                           modules should have snake case names
                            // - non_upper_case_globals: static constants should have uppercase
                            //                           identifiers
    exceeding_bitshifts,    // shift exceeds the type's number of bits
    mutable_transmutes,     // mutating transmuted &mut T from &T may cause undefined behavior
    no_mangle_const_items,  // const items will not have their symbols exported
    unknown_crate_types,    // unknown crate type found in #[crate_type] directive
    warnings                // mass-change the level for lints which produce warnings
    )]

#![deny(
    deprecated,                    // detects use of #[deprecated] items
    drop_with_repr_extern,         // use of #[repr(C)] on a type that implements Drop
    improper_ctypes,               // proper use of libc types in foreign modules
    missing_docs,                  // detects missing documentation for public members
    non_shorthand_field_patterns,  // using `Struct { x: x }` instead of `Struct { x }`
    overflowing_literals,          // literal out of range for its type
    plugin_as_library,             // compiler plugin used as ordinary library in non-plugin crate
    private_no_mangle_fns,         // functions marked #[no_mangle] should be exported
    private_no_mangle_statics,     // statics marked #[no_mangle] should be exported
    raw_pointer_derive,            // uses of #[derive] with raw pointers are rarely correct
    stable_features,               // stable features found in #[feature] directive
    unconditional_recursion,       // functions that cannot return without calling themselves
    unknown_lints,                 // unrecognized lint attribute
    unsafe_code,                   // usage of `unsafe` code
    unused,                        // Includes:
                                   // - unused_imports:     imports that are never used
                                   // - unused_variables:   detect variables which are not used in
                                   //                       any way
                                   // - unused_assignments: detect assignments that will never be
                                   //                       read
                                   // - dead_code:          detect unused, unexported items
                                   // - unused_mut:         detect mut variables which don't need to
                                   //                       be mutable
                                   // - unreachable_code:   detects unreachable code paths
                                   // - unused_must_use:    unused result of a type flagged as
                                   //                       #[must_use]
                                   // - unused_unsafe:      unnecessary use of an `unsafe` block
                                   // - path_statements: path statements with no effect
    unused_allocation,             // detects unnecessary allocations that can be eliminated
    unused_attributes,             // detects attributes that were not used by the compiler
    unused_comparisons,            // comparisons made useless by limits of the types involved
    unused_features,               // unused or unknown features found in crate-level #[feature]
                                   // directives
    unused_parens,                 // `if`, `match`, `while` and `return` do not need parentheses
    while_true                     // suggest using `loop { }` instead of `while true { }`
    )]

#![warn(
    trivial_casts,            // detects trivial casts which could be removed
    trivial_numeric_casts,    // detects trivial casts of numeric types which could be removed
    unused_extern_crates,     // extern crates that are never used
    unused_import_braces,     // unnecessary braces around an imported item
    unused_qualifications,    // detects unnecessarily qualified names
    unused_results,           // unused result of an expression in a statement
    variant_size_differences  // detects enums with widely varying variant sizes
    )]

#![allow(
    box_pointers,                  // use of owned (Box type) heap memory
    fat_ptr_transmutes,            // detects transmutes of fat pointers
    missing_copy_implementations,  // detects potentially-forgotten implementations of `Copy`
    missing_debug_implementations  // detects missing implementations of fmt::Debug
    )]
```
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Rust Style.md version [7401f55d1a].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# Contributing Rust code to MaidSafe

We don't maintain a separate style guide but in general try to follow [common good practice](https://aturon.github.io/), write readable and idiomatic code and aim for full test coverage. In addition, this document lists a few decisions we've reached in discussions about specific topics.

## Rust version

We currently use Rust stable 1.16.0.

## Unwrap

Don't unwrap [`Option`](https://doc.rust-lang.org/std/option/enum.Option.html)s or [`Result`](https://doc.rust-lang.org/std/result/enum.Result.html)s, except possibly when:

1. locking a mutex,
2. spawning a thread,
3. joining a thread

or in other patterns where using them makes the code _much simpler_ and it is _obvious at first glance_ to the reader (even one unfamiliar with the code) that the value cannot be `None`/`Err`.

In these cases, as well as in tests, consider using the macros from the [`unwrap` crate](https://crates.io/crates/unwrap).

## Threads

Generally avoid detached threads. Give child threads meaningful names.

This can easily be achieved by preferring to create child threads using [`maidsafe_utilities::thread::named()`](http://docs.maidsafe.net/maidsafe_utilities/master/maidsafe_utilities/thread/fn.named.html).

* it returns a [`Joiner`](http://docs.maidsafe.net/maidsafe_utilities/master/maidsafe_utilities/thread/struct.Joiner.html) which helps to avoid detached threads
* it requires that the child thread is given a name

## Rustfmt

Apply the latest `rustfmt` to new code before committing, using the default configuration or, if present, the repository's `rustfmt.toml` file.

## Function ordering

In `impl`s, always put public functions before private ones.

## Clippy

If a crate has that feature, make sure your code does not produce any new errors when compiling with `--features=clippy`. If you don't agree with a [Clippy lint](https://github.com/Manishearth/rust-clippy#lints), discuss it with the team before explicitly adding an `#[allow(lint)]` attribute.

For clippy, we currently use Clippy 0.0.120 and nightly installed by `rustup install nightly-2017-03-16`:
```rust
rustc --version
rustc 1.17.0-nightly (0aeb9c129 2017-03-15)
```

**Note for Windows users:** Due to a recent bug in rustup, you may get a missing dll error when trying to run `cargo clippy`.  In this case, you can work around the issue by modifying your `PATH` environment variable:

```
setx PATH "%USERPROFILE%\.multirust\toolchains\nightly-2017-03-16-x86_64-pc-windows-gnu\bin;%PATH%"
```

## Cargo

Use `cargo-edit` to update dependencies or keep the `Cargo.toml` in the formatting that `cargo-edit` uses.

## Other crates

Adding new dependencies to MaidSafe crates in general should be discussed in the team first, except if other MaidSafe crates already have the same dependency. E.g. [quick-error](https://crates.io/crates/quick-error) and [unwrap](https://crates.io/crates/unwrap) are fine to use.

## Git Commit Messages

The first line of the commit message should have the format `<type>/<scope>: <subject>`. For details see the [Leaf project's guidelines](https://github.com/autumnai/leaf/blob/master/CONTRIBUTING.md#git-commit-guidelines).
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Update Snapshot Used by Droplet Deployer.md version [d8b1fae71f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
## Update Snapshot Used by Droplet Deployer

1. Create a new droplet from the existing "Droplet Deployer" [snapshot][0].
1. Make whatever changes are required (user is `qa`, password is held in [release_config repo][1]).
1. On the droplet, run `sudo rm -rf /root/.ssh/ && sudo shutdown -h now`
1. Once the droplet has shut down, take a new snapshot called `Droplet Deployer`.
1. Replicate [the snapshot][0] to all regions (click the "More" option, then "Add to Region").
1. Rename the [old snapshot][0] to `Old Droplet Deployer` (check "Created" values).
1. [Generate a new Personal Access Token][2].
1. To get the ID of the newly-created snapshot, run `curl -sX GET -H "Content-Type: application/json" -H "Authorization: Bearer <token here>" "https://api.digitalocean.com/v2/images?private=true" | sed -n 's/.*"id":\([^,]*\),"name":"Droplet Deployer".*/\n\1\n\n/p'`
1. If this doesn't yield an ID, it may be due to pagination of the response; you may need to add `&page=2` (or whatever value the last page has) to the end of the URL after `private=true`.  Alternatively, check that the [new snapshot][0] has finished being created.
1. Replace the existing value of `"imageId"` in [Droplet Deployer's config.json file][3] with the new one.
1. Test the [Droplet Deployer][4] tool.
1. Commit and push the change.
1. [Delete the Personal Access Token][5].
1. [Delete the `Old Droplet Deployer` snapshot][0].
1. [Delete the freshly-shutdown Droplet][6] used to create the new snapshot.


[0]: https://cloud.digitalocean.com/images/snapshots
[1]: https://github.com/maidsafe/release_config/blob/master/droplets/credentials.json#L3
[2]: https://cloud.digitalocean.com/settings/api/tokens/new
[3]: https://github.com/maidsafe/QA/blob/master/droplet_deployer/config.json#L37
[4]: https://github.com/maidsafe/QA/tree/master/droplet_deployer
[5]: https://cloud.digitalocean.com/settings/api/tokens
[6]: https://cloud.digitalocean.com/droplets
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Build.ps1 version [1e2ab278e6].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
# rustup default $env:RUST_VERSION

# Use features if they've been set
if ($env:Features) {
    $with_features = "--features",$env:Features
}

# Use Release flag if required
if ($env:CONFIGURATION -eq "Release") {
    $release_flag = "--release"
}

# Build library and tests
Invoke-Command { cargo test --no-run --verbose $with_features $release_flag } -NoNewScope
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Install Rust.ps1 version [0847f77ff4].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# Determine the appropriate arch to install
if ($env:PLATFORM -eq "x86") {
    $arch = "i686"
} else {
    $arch = "x86_64"
}

# Download Rust installer
$url = "https://github.com/rust-lang-nursery/multirust-rs-binaries/raw/master/$arch-pc-windows-gnu/multirust-setup.exe"
$installer = $env:TEMP + "\multirust-rs.exe"
(New-Object System.Net.WebClient).DownloadFile($url, $installer)

# Install MultiRust
$input_file = $env:TEMP + "\input.txt"
Set-Content $input_file "y`r`ny`r`n"
Start-Process $installer -Wait -NoNewWindow -RedirectStandardInput $input_file

# Add MultiRust to path
$env:Path = $env:USERPROFILE + "\.cargo\bin;" + $env:Path

# Set the requested channel and install nightly
# multirust update nightly
multirust default $env:RUST_VERSION

"Rust version:"
""
rustc -vV
if (!$?) {
    exit 99
}
""
""

"Cargo version:"
""
cargo -V
if (!$?) {
    exit 99
}
""
""
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Install Rustup.ps1 version [76877b477e].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# Determine the appropriate arch to install
if ($env:PLATFORM -eq "x86") {
    $env:Path = "C:\msys64\mingw32\bin;C:\msys64\usr\bin;" + $env:Path
    $arch = "i686"
} else {
    $env:Path = "C:\msys64\mingw64\bin;C:\msys64\usr\bin;" + $env:Path
    $arch = "x86_64"
}

# Install gcc if required
bash -lc "pacman -S --noconfirm --needed mingw-w64-$arch-gcc"

# Download Rust installer
$url = "https://static.rust-lang.org/rustup/dist/$arch-pc-windows-gnu/rustup-init.exe"
$installer = $env:TEMP + "\rustup-init.exe"
(New-Object System.Net.WebClient).DownloadFile($url, $installer)

# Run installer
$installer = $installer.Replace("\", "/")
bash -lc "$installer -y --default-host $arch-pc-windows-gnu"

# Add rustup to path
$env:Path = $env:USERPROFILE + "\.cargo\bin;" + $env:Path

# Set the requested channel and install nightly
# rustup update nightly
rustup default $env:RUST_VERSION

"Rust version:"
""
rustc -vV
if (!$?) {
    exit 99
}
""
""

"Cargo version:"
""
cargo -V
if (!$?) {
    exit 99
}
""
""
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Run Tests.ps1 version [dc011bd38e].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# Exit the script if building fails
$ErrorActionPreference = "Stop"

cd $env:APPVEYOR_BUILD_FOLDER

# Prepare test script
$cargo_test = {
    # Check cargo has installed properly
    cargo -V
    if (!$?) {
        99 > ($env:TEMP + "\TestResult.txt")
        return
    }

    cd $env:APPVEYOR_BUILD_FOLDER

    # Use features if they've been set
    if ($env:Features) {
        $with_features = "--features",$env:Features
    }

    # Use Release flag if required
    if ($env:CONFIGURATION -eq "Release") {
        $release_flag = "--release"
    }

    cargo test $with_features $release_flag -- --nocapture
    $LASTEXITCODE > ($env:TEMP + "\TestResult.txt")
}

# Run the test script
""
"Starting tests."
$job = Start-Job -ScriptBlock $cargo_test

# Set timeout to env var or use default of 10 minutes
$timeout_ms = 600000
if ($env:TimeoutSeconds) {
    $timeout_ms = [Int32]$env:TimeoutSeconds * 1000
}

# Loop until timed out or tests have completed
$ErrorActionPreference = "Continue"
$start_time = Get-Date
$current_time = $start_time
$completed = $false
while ((($current_time - $start_time).TotalMilliseconds -lt $timeout_ms) -and (-not $completed)) {
    $sleep_ms = 100
    Start-Sleep -m $sleep_ms

    # Display test's results so far
    Receive-Job $job

    # Check if the tests have completed
    $running = $job | Where-Object { $_.State -match 'running' }
    if (-not $running) {
        $completed = $true
    }
    $current_time = Get-Date
}

if (-not $completed) {
    # Exit with non-zero value if the test timed out

    # Kill job and retrieve and buffered output
    Get-ChildItem "target\$env:CONFIGURATION" -Filter *.exe | Foreach-Object { Stop-Process -name $_.BaseName *>$null }
    Stop-Job $job
    Receive-Job $job

    $timeout_seconds = $timeout_ms / 1000
    ""
    "Tests ran for longer than $timeout_seconds seconds, so have timed out."
    $test_result = -2
} else {
    # Retrieve the return code of the test command, so we can return it later
    $test_result = Get-Content ($env:TEMP + "\TestResult.txt")
}

# Run Clippy, but don't fail overall if Clippy fails.
# ""
# "Running Clippy."
# multirust run nightly cargo test --no-run --features clippy

exit $test_result
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Adam.pub version [92584adc12].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDYrVCkGerTtN6QDfZK30PMORBO7Up6Cbg3fikqIaGlLFN+osMn6NjZvfKBXb2JOnlPGRtuzb8KUYl14gtHo/eQ9BT5ASKbKp+LUw6eEmfcaZdd7H3x9GfsbH3+EG9ALm/NPqUBDXNshRq563yfPJMkz4Rk/hcTVURl0E3IPcLHE5ymjCz8Ar8NMdvmWAD7ft/QqoRRG4Bnx3Tc6uSi5s35jHdj66zQlLpoDpZ+IW3z7mk03nE7B8in1quHfNKwRYNIb0vBoV5nKSFwquGpYfB+M0/g1R9a8JRrLeMGv+XkGVGt6Ltja76fxYygZZDP99XrFqw89bEL4mOzrDCGTwDZ adam@higgsboson
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Andreas.pub version [c2c9b6e8ef].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwyrjDbQQhVzXk2mdMLm80/+2eHW1L4fw/flmmee+4FI3WF7b8L1bMjl7TApeMNU+HXc3KxBupkni5LjuXLZOS2L/Zo6yIcrudQpyAb8275phueT3KS36Q4oNLEv+E3IXQiyfNeE8hsvqoFdoo+V9FyR9SFPlDndfUsTC4O/nANWv+jO+1K6Iyd4b5OhZUP+Iw563OtSXFwFGxpgEhz3dUOqL6C0i5M2hxnqdx0FesBowE6uu4Npsjf1KUE/aNcM/+9+loD1PCnQja634V5m6jKy2y121h7n5S/y0gbusoml9Kfe8z30CMwyP4SkHwtBIPG1bf38N08/LUfbr83p7CpIz6wOCcDdY8mx2SsfCoyb0eJcCP7czlqHe70i6F9o77SWWdRX/m25x9bcKug6MUYgVNB5BXbN3nj0RxmitNQ7MpPcs6YD0WxtY8KDh1XZ2a73bie+h/bjN2FqT92AnC9mmZ82YP/v/4l0GI3854dxB5uGGG3m9j1TqYg1I/GVpuqiF7lGRvnR7ip+ahpOVnmaV/pUOQPZGuYps/0hSo5UIo8G1o89nk4eICScwU9h6cSx+MrUjVciPssIadiL4SZ8KU55arkyzXb6zZRhm3MKKBTmB3FIU6/9MW/2N2LcoTurwcC8+wELvGTfYXOIxUVjvpIZ21ZfR7F1n61XRAQ== AndreasFackler@gmx.de
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Bart.pub version [c76b5d6893].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnf8sXey8Q5tBPKEAkJAErTIYgluP/NnMpqG86dcSWDbJOXay5PKQh5iXwRLCJ+ZJLvft2a/QGMVXain/yF9wKugUPosFg7dqgQKyFQk0Y3nKK/I4OGyKd3XJtOBVckYow/wEPDLkAWThf2VimDudUbsJ6VPDbAlWBg8NTiDJRaPzohpkru7c/y+yyuFVxmRi4m+1YzM00R12HJr5jqf/qNOZI/pUccNEhMnchFlU7t++Pk0ZhwOgvLEeGfLGfI622HdNVToVNJ7VVxVMr+qyvqBXiIVfIdRVGvoBeoIboTpUxEcYvkgPouxQxkJOSrbxOF/b+3nQ6bff9UTUDL9zf bartek@bartek
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/David.pub version [d28b9b3b21].

1
ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAynpCS4y8Yvvd8w1ZeDlXTjjdgTxTsStqNl9lDWxjlwd8dyIWSOfSyJowB0gz1PAS7/gyuz1+RfOP6n3NmJCg1l1TQI6CXt/0HFTp5ucdL5bvfmUM786rOH4jKxQUbw8Mk6p9upVNaEF6R/WyQP2UwPyQgV+wNBIdheR7ytu5YXXmvaE1bCZ3gXbWvhY0PKQYgpX6dVkTJTYvRPFnffw3M99gIFOkk2lvDhuh/GQeeMC+LMml+NskQfiw+oBxKU4ws756HKr0ZlwyrBfH0SmTW+YxXZl5gsnxz32g2wSc7N/jjnJGZ9CAY/7UrARNfXVg7SByNAf38qqwl6TiFtkjyw== dirvine@dirvine-desktop
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryHome.pub version [023e024fb8].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXPPVjQWY1A66cXlZIi7xrmwMa8TPIeYKMX9xWL5rW4IN1FJ0c6smL7qDRKs6eZP5XWYWV+aaeMoV5vBtCEEnA63xnEgQYgiY8UjLxWTY/0owpJWYg6WJNp26b8eKb/5Kwbpy88ETi52mSDTjJY+djfz30SPBOmHRV34Cmpi1paxWxSEzsxblCEU1Hv9WnE/fjt0E1VCKMKS6YGBEFuTRAnfOKIu7wlrbHkB5NaqGTqaj6ChO73TQe77qFnxQOp9Ph2jERaWFwvIZdFH0cD7+WpgmOaSjdzEYUESicqanZSgY2nN23zgMt16rigkuSoUWKhQavHpUFar17tAuQ7HQr dhardy@TPH-L13071
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryLaptop.pub version [73f3ce665d].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN0PnwrwauBsFhuDz1I86Dq/yyteYU+qDYdnYcwyknbx8RrDJ9zzf2rPvFgyQwPPE/HZxXO2jp2nRrUnobucC8nFPFU+owf0mgKkWyT+UD1iVvqT3QHvpKgVzcsM4mSKYoQSf0OymPUNbYRRy01BHdNLXrqHFnC6YshPejuLpijiFsKe0OSQIkjcUffx+Xe/iTFmXHSaZTb23wjTwInBNA7ZofTZCJ94uQRxGXqW0hGqeCr6lw5rL18iomX8IhCFSPZnBzVBET9ll4QLVpadeq35noXy+ArgmCoyS60cPnbX/ZpMDleNgV8ClSzjoE0+N7FPb/7OL3L7ZRCgTqO9Pt dhardy@yoga.dhardy
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryWork.pub version [7217746439].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDbHl0Nu0wXbj0WFVACmG12uQcNduZsgMpLxL0ANoKMvue4VWhhW/nhIK1AIyW+iSvgf1DVQGduWkoeh7SGWN/eHzAqJ2/o4UFbmsl8mL0bcvSakz9xrwhhZQpaK/Vy2N8319cF3uUwujg3SA9S4Q7tu0UKVYA9YF2AN070z5jnJyqK2VVROoWHM48cm/zwHZJBWsqRya7GxpvG70NsyzR+Ap8oe7NKXynZr8bxnQ3JPJr7PsWnnQiiTlzWhjSInoLU1+5xxvnZe0xPhB8K1BBzoOvJDqeI9IrDVGFcxu5PduIyEP9G43swjU/dMuY7Y87WKzHUCU5EMYx4/R5R/I1 dhardy@localhost.localdomain
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/FraserHomeWindows.pub version [4e917b90c0].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwgBg3rxrbI/SBm25Q1tIXV9iym7xBIspRtyY3yhxaPBcggVNU63cwfbjXFUCIHAkA3ZZBHAn+4P6uXYqyz0c7ticl9LOfDQm/mPCyZw3gOrGtcI6/xV5dwvYJpOd8pBFS5jIUXto3EG0YOmSqvxIPllHhzd+6IeK/5QKJPNqaEKYXWtgA55iBUq0JqNOWfJx/whJPzOJVdeWeHQjMg++DBrbBFpbLSh3S3qAda88jKBNL9LtOfXK/VJsdJ7/yW1xYeSA3Zu770y60fvzHOUUTpPuvMKqamHKubU54A8/aSzpaHpNIHuFdAfmwKYT3DfeFIR8644+6GTVVd5jVvF7TBg5+lDABcRqruSx6kc4rFxMWzkcHWZA9dXW2B4KP1WrRzSUmXOMWXcbgdZeCMR9QVP3K/AZdBwhXp8LEJXhOlcsEXplGEcp3FrR6SKtut/dOpLur8z/SOTctgmctHrNKJ145Mmu8ws5b1UNRBmVY+CMNvXHw2pXgz1LACaKx3R2dhTouZiGX19eN6V/Qaa+06hizX6ybsBh/zukdTkHtbLzzaMO46RZISFRFZ+zZzLQtenBTSFlR+8V9e5VhfVy8CxQKupLMeeADKoqrGUEGtouYZ1XoAmAAbX2ctO3sSPqeSYusI3F6tVZ38UpcOjwlWUattLXAL8miF7Pbzixdw== Fraser-Home-Windows
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/FraserOfficeLinux.pub version [b5a3047fff].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDeTwVlMND8ubS3U7OmMhxXg0x+pLIr47JV2+Ks86QKR4uGxby2/CcH18lPydwPSmvM2vWuSL1WvHTItX5jmq2BA6guEMY4GBgs3l/2nAR+rN1A5JKgI1T2HgAOL2tRYrAboNIna0KAl1lMMJPsNv26b9PK6w6NhFl/U8qG8iJkv9FbZClvw34UrDw0qpydrGfS/2xikTSXqcjlofZvzUiK0kaD3R5yDqPc3Sz64UhiLKos/gSKQHNbeNc9W/C1Em/DDM8WneVRfmYMbPru2/6DG1F6z4QIaFm/AeyYlRN5PWtVdH6ycg+WB85ZJyjQvN9JtUGdBJ3rvGHALpm5fCxAwmsR6PGI8r4xJVKOMGf3jYkDLdfNgKgKCuQKV4JL7QMMQxCz5HeoMrBjXbQfoTjkQ3Py2C2iz17Aol6BSyYAdZuD2dIEwV0ds81iRfYVTCw+Hd17iUkWoIS2R74EOYfjMkbdkaMz7Dpoqgn6p5FjSrvwHmkQ+b7zXTlWgmAURYMe67gt8ndm16m+/qyFTy0O6AXK2bo2lpxfq68f4bkWQWY7md7YWE7JRaMH+pu/VFfD/mSeNBN8cWljzlC3iSfT6vBnbLxoPsFdX7GZceks9AQvZMgvpWKjMeJmWmdDVhULBSJH1LjLA1/ddmFRoT036FL3he7+b7GYwZ+mR5RTrw== Fraser-Office-Linux
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Krishna.pub version [1590b2d723].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXT3Mawk2S1O1sMLSkr/k2E1LERCc9AxSM3SK+asHaNTHPceBFiOyOn+QwePWO8o87lIww4/cNv+lY3aglFfGRyRaJ6mcL0H8Ccoz2AwBdUEbJSY75CJWGZTBFmeL4q3sYU71mdUBYDZwYxWSUSmEmxfATxZG9MZKlvxElCQQXDSDorj/TPPMYaWzhwSl1jhC2wxTrxcU/e9sSm67hBi8hNFxdNlooNhAWYl/pq39/uzRyWrH+lCfq17yuil+1cVQVDs5MF8/caK+jO6mTeHgkO+q+NdEObtijkhQEOZc0+eH0t7/RPdDvUSXe6W9JMYgjFDK4DKn0lFBHPcupjiWSVCVBpbUKbBUHPh75GIN8CYmO/w5VGWgjP5SBQrGtMiPHcFNELDSvcEp5gBQAmjKTbCycD1O6NffejhAcvRMKHMU08EUqHg9phzMbkuh4HUtrTBmf6xYyWLKCzgZSwddt5zRHuPNbca2kH6AFVaCVeuCvNeGlirti6JEVlcxYG1oD2kM0tFKa4UsDuNHmJbEUJW28S5diurXJVpo+iIJ2rLfoCGWnfXYzTyAPXT1t/Wjo3AmJHWym16XGNHmwnjrVXqmLumc+VwOS3xc7nR/utQiH1UZzKlBgUYfXN6pkdq2JNj3awFMLlHSYmAxUlNR7YrpZwZL4nEuRekJOxQFcxQ== krishnaof1988@gmail.com
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Michael.pub version [9a49a43de4].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCoMq7SaKOpcan3UlMqP9CYSrF1IkNekjQ3BKVV7fF0LTN5lxQ9rOi3knEEuFCvH2VMbYEKFGAieJa3OuZHlBQMfv66LChZCeAiBaG68iRww84DXBiGWDTuUOBmwepOhXfKIO4I2Qr/st3sPWbD4ddprHd7TJdFqpnTmGwG61m4wf0m3jWZygfqxA85UlweUjWsP6DerMVrfG7F+kNYGdpFcDR0CjPKC2cHwGyIhmBI9jhLHfR1k03+qLKLAcPIIjh8+iAep4FELpnPkrC222DmAL7X9KDuYeh+V2GWc/jcaERFzk3xUx59L4Q6YGnLcO2EoRlGiBOITdrut9DBCIjCcyd/MCkHovL+zdmWCqxYT4ITFsOW91a5UlAAStQLRtCkHbprmIaNEsu6mWAW6owTAIAj0u5f5wyBOEkb7BSifPpbg0jN1EqbKnx+YuXN5MvrKmRQzARpJCIGyhJBpvP7Uh+IJHtULoJNbd5XzWN0F6Z+szlIsPUt31NbPLIeLzqqHuW+rmf1Cl/wcEX8BzOnP3PtTH6TfxfwcwP3v4n2HchPdzY9ZJRd+E5zuEAW4hJL3iWtTM5ARWZC2RSk1wCXggbUkhUQxpPS4GpTzmaBiHNirNZUJU0SDnHcsYuEsQditSqrh01ss9Y8HQRYJ0n2Qh/soV4sUCoe5dyGp3SfHw== michael@michael-macbook.local
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/QiLinux.pub version [6de8d8a8d9].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm34L9WS2OpYeJ8caQ3Z6dNrjIB1AZOCzejPJW33CeYMSN153l0p9pAlLNJPgOET/JcSHv07gOsdTzAqWZvEcuaLhCeX3X/WBXg1ZaqPvxXLsJLW4EtXDuENaQQ18oRpKFBuHULjkI4wopw34JMHWh6WIQrPVOLDcXsX9cfknviCGdlBScHahxB5ZZ9w5wKxdRDFqJEkit8rQlJR5grVrUq9SYb9zWUUBE0/YeULD6wIhrm5bDepfuTuELdhXF1nzUNQb6Kis5lsi9N1jeG5jMDWsP0cLYvUg1zkB4COiiI95ZT7Rwggbvj2/qrHG3P4LhJlXjaZTzyxjxZojMG+Tfjd0su3J+cnMGhkwj++f4CeFVo7Vbox6U5WT8E+UCXVqRcgvCOePdO76EI17bkHshhDef2RDGvBCYrkSy4f6iqCoKXRnPav2buEI+/pQgacfdxz3CeBrhuL1mXETO4BWf/YvDZYiX6L2+NgVcAVJEDXFrDNsMR2zRkqAKL3ysBOhGKJY20MxL6DuWMZv3byT3f8W3wnLDOQgN+k4HNtg/q5hi4a5KwoLPMTat/dD9lAgRpUhcdxh1AhkTmWxc12CrCpbVCc8kyzu4gdZLPE7ZGKP5YtbbHMJw5p2TzLMs9w8ZkB/WycTqZsqyYcHDPM5UDIh18/ncbITEhDyZIX6iHQ== qi.ma@maidsafe.net
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/QiWindows.pub version [4a751e88d0].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDX2xmkt7sgPmh13dG51YC2QMrznFWEHeMCqzUfP96mSh1dRPZB6nOFhEvMvHmJhqy6oyxWYChttPtSzWZf3o68+ur/YkRbFONV3Kn8sP9qfQHDGa7scT9n5EDxTLzGm1yN4RlQDD2bdhVkYmdkfLcdsEYntOi4Zj45N+xMziH1NQou02iwHuJTIHOscCxWyuTbKFYydNw1NWbCOX8AA0lZoqtrYTsZMceQ/AkLkG1N/dCZtQxMbfSBuRM9cbLsDK58n9PI+1c6OflIba2pb8lHiq7ThrZY8CcZolvFYRWlVYMfPysjKiiCQzegNQkGvKrb7r89swr6QAd/wGldqGab qi.ma@maidsafe.net
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Ross.pub version [78b91b0f6e].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCl0HFTx1cmWG94LxtxYyr0Go4K0kJI3Kd9DU97qPuqrbRnb3Sym8+5C4Xqe4QflqRhluJiWtyZ+XzIiEb0uNGvS2peP7Gb4sdRdfGKFuYg8vfQumv/JhRRn1tw45dOQNDGTAUKFcZmBdpTG8R990LN8991ORSA4jSCzJ3KPbIErhHFI2IknNyURUcopeIu1B3HOwu5WFdC3gWo6XzzgKsenKCQJdlZ1SRSJrHY5L4a4eGTDnkuguE78jx+DpIOJ5UJC1NxfwKOhSG1O34GsBur1lonae5Fx1HwyMRgTmTYGUDNyCo+gqV65y5352wQZrQFc++0YU8cJi3496PQUgWR user@QA-ROSS
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Spandan.pub version [6966862f9b].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCpG3EOW3aKzk0NHR6dsP4ORd/gUpttGwECd7IRx4mxUkDYM3cROqy0kbT4IJzUri44EGiKDk5EUhuoNUhA1yW4SgqecE+1AbFfBxUHmzJemqSkctjIxZSuYA+R4c3kbeMLAlk+nEcxxZqTBzyPhNQVqhtLlWYqYVVp41y4HSybInHn4q7vkoUsyAqp+taQX5tafEI2VmokMFdUbVsJDUSGxrzIlj5hPxL4kXzMxMcPMCeuxIKBOJsb/+KjrlsHMrfSrMIdM677Qx4ycoCt1hMpndVXECvBPFT7y/CpXdF3xMT5+hFsdrwYsu4uG8ggi+NZUqFjgcW7FJVDAx0CYr9L ustulation@gmail.com
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Vinicius.pub version [f6ff062577].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMVwr0GWuka5X+GDUyamKe8zFvUVblNce4/p1j/O9bFnHs9d8N+OkYkz6CkBsXsfJpb0+YYwpYdl55/Lg4ohP5mJjWnMDB0pacooSYLwpJSZnlV0+aJgu4gMMRfpP4amYnBVm80iPkZZ42OC/ZVNW5Hd0yTuAFUtdnwDKgV57Rk6rhT5pGWSPYrchIGWJCQzMHAkNMmmA5xPdRzAKo7tTy3mGqdWJfiyqM1J1NSDi7UgQCm8ehu2rN2/Gs+I2E3N08MQnJUOAcMrxe1X3lgA6kXEnYEWurEq5ZhC3sOXw8erOWmNtXvqI2O6C/rXBQgzlVliNxtubl8yWnmNPX8UXF vinipsmaker@vinipsmaker-netbook
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Viv.pub version [48138a48c3].

1
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC/s9I/3XYObaipr54raYtt/f1StRcuybWWCfB03rv0yG1duNrIrpSP8dv1uscmt/OXMvUSLdaGURTZZ8XytD6SFwisVSkTQD4tqqk1NmTQt8DEUZ9KErbQCiyEAAcI02QMJ5IeqvismdyvRnfcNV9Nx2vaaCftppJ2R9rTtm9hNOsa4eoLCnuUACvef6jiAa0Fzn5GV7y91dcrVuWiKnUIdBRtxwv1sJRPT6epm6l6AZcpyA+2Qc4kgS2ak4tAjmSlAWUAIoyYECSweCiIwKJL7WLNSNVV3omhljNLONrckOlfglg7LqUrLYMNh2gHPAdUTCPHFuMlIW4rWvSZi9E0JNTZ7o7+x4PWu+SI8a0faXQ1i8S5qSBhNl3HUbChPH7VxktHrZ4rohOpd4WbV75PrzOoycJwplyuyLzLluWOtE/P+a/EmDV/2iUrlYujQQKHaXhbVIaffI8fct+BuPQAN+EmmMIx/h8BSoeWIBMK/ZdxCcDAuCXeoqonYp3QCFef2+dL8CM5EAjGKkxKHPUcFagf/RsM1VMgb0k3Q30jXqc45k8e5XxsI1cXegRrj6z6ZZmLjPOZrdNxclNDz4xigzZwqf6s9uG+0RxgqCvZZoIJpkfGtGviN6Pm1o8/PPGHI3bmrOv8r/ktjy+V2xjKae6Q5Sw/h83gd1csFoosCQ== viv.rajkumar@maidsafe.net
<


Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/authorized_keys version [6d38c40df4].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAynpCS4y8Yvvd8w1ZeDlXTjjdgTxTsStqNl9lDWxjlwd8dyIWSOfSyJowB0gz1PAS7/gyuz1+RfOP6n3NmJCg1l1TQI6CXt/0HFTp5ucdL5bvfmUM786rOH4jKxQUbw8Mk6p9upVNaEF6R/WyQP2UwPyQgV+wNBIdheR7ytu5YXXmvaE1bCZ3gXbWvhY0PKQYgpX6dVkTJTYvRPFnffw3M99gIFOkk2lvDhuh/GQeeMC+LMml+NskQfiw+oBxKU4ws756HKr0ZlwyrBfH0SmTW+YxXZl5gsnxz32g2wSc7N/jjnJGZ9CAY/7UrARNfXVg7SByNAf38qqwl6TiFtkjyw== dirvine@dirvine-desktop
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwgBg3rxrbI/SBm25Q1tIXV9iym7xBIspRtyY3yhxaPBcggVNU63cwfbjXFUCIHAkA3ZZBHAn+4P6uXYqyz0c7ticl9LOfDQm/mPCyZw3gOrGtcI6/xV5dwvYJpOd8pBFS5jIUXto3EG0YOmSqvxIPllHhzd+6IeK/5QKJPNqaEKYXWtgA55iBUq0JqNOWfJx/whJPzOJVdeWeHQjMg++DBrbBFpbLSh3S3qAda88jKBNL9LtOfXK/VJsdJ7/yW1xYeSA3Zu770y60fvzHOUUTpPuvMKqamHKubU54A8/aSzpaHpNIHuFdAfmwKYT3DfeFIR8644+6GTVVd5jVvF7TBg5+lDABcRqruSx6kc4rFxMWzkcHWZA9dXW2B4KP1WrRzSUmXOMWXcbgdZeCMR9QVP3K/AZdBwhXp8LEJXhOlcsEXplGEcp3FrR6SKtut/dOpLur8z/SOTctgmctHrNKJ145Mmu8ws5b1UNRBmVY+CMNvXHw2pXgz1LACaKx3R2dhTouZiGX19eN6V/Qaa+06hizX6ybsBh/zukdTkHtbLzzaMO46RZISFRFZ+zZzLQtenBTSFlR+8V9e5VhfVy8CxQKupLMeeADKoqrGUEGtouYZ1XoAmAAbX2ctO3sSPqeSYusI3F6tVZ38UpcOjwlWUattLXAL8miF7Pbzixdw== Fraser-Home-Windows
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDeTwVlMND8ubS3U7OmMhxXg0x+pLIr47JV2+Ks86QKR4uGxby2/CcH18lPydwPSmvM2vWuSL1WvHTItX5jmq2BA6guEMY4GBgs3l/2nAR+rN1A5JKgI1T2HgAOL2tRYrAboNIna0KAl1lMMJPsNv26b9PK6w6NhFl/U8qG8iJkv9FbZClvw34UrDw0qpydrGfS/2xikTSXqcjlofZvzUiK0kaD3R5yDqPc3Sz64UhiLKos/gSKQHNbeNc9W/C1Em/DDM8WneVRfmYMbPru2/6DG1F6z4QIaFm/AeyYlRN5PWtVdH6ycg+WB85ZJyjQvN9JtUGdBJ3rvGHALpm5fCxAwmsR6PGI8r4xJVKOMGf3jYkDLdfNgKgKCuQKV4JL7QMMQxCz5HeoMrBjXbQfoTjkQ3Py2C2iz17Aol6BSyYAdZuD2dIEwV0ds81iRfYVTCw+Hd17iUkWoIS2R74EOYfjMkbdkaMz7Dpoqgn6p5FjSrvwHmkQ+b7zXTlWgmAURYMe67gt8ndm16m+/qyFTy0O6AXK2bo2lpxfq68f4bkWQWY7md7YWE7JRaMH+pu/VFfD/mSeNBN8cWljzlC3iSfT6vBnbLxoPsFdX7GZceks9AQvZMgvpWKjMeJmWmdDVhULBSJH1LjLA1/ddmFRoT036FL3he7+b7GYwZ+mR5RTrw== Fraser-Office-Linux
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXT3Mawk2S1O1sMLSkr/k2E1LERCc9AxSM3SK+asHaNTHPceBFiOyOn+QwePWO8o87lIww4/cNv+lY3aglFfGRyRaJ6mcL0H8Ccoz2AwBdUEbJSY75CJWGZTBFmeL4q3sYU71mdUBYDZwYxWSUSmEmxfATxZG9MZKlvxElCQQXDSDorj/TPPMYaWzhwSl1jhC2wxTrxcU/e9sSm67hBi8hNFxdNlooNhAWYl/pq39/uzRyWrH+lCfq17yuil+1cVQVDs5MF8/caK+jO6mTeHgkO+q+NdEObtijkhQEOZc0+eH0t7/RPdDvUSXe6W9JMYgjFDK4DKn0lFBHPcupjiWSVCVBpbUKbBUHPh75GIN8CYmO/w5VGWgjP5SBQrGtMiPHcFNELDSvcEp5gBQAmjKTbCycD1O6NffejhAcvRMKHMU08EUqHg9phzMbkuh4HUtrTBmf6xYyWLKCzgZSwddt5zRHuPNbca2kH6AFVaCVeuCvNeGlirti6JEVlcxYG1oD2kM0tFKa4UsDuNHmJbEUJW28S5diurXJVpo+iIJ2rLfoCGWnfXYzTyAPXT1t/Wjo3AmJHWym16XGNHmwnjrVXqmLumc+VwOS3xc7nR/utQiH1UZzKlBgUYfXN6pkdq2JNj3awFMLlHSYmAxUlNR7YrpZwZL4nEuRekJOxQFcxQ== krishnaof1988@gmail.com
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCwTp0j1PVpCCi8L6OV0VzCl8tP8eyRBM/eBuud+uEjna6HtpEsvtnTzQmp0Tqx62ktGFKEYqKL/F9m0gNgP1nBC6LqExNXkR7+YVXRNgAoF1J8JF+zdIBOyTaGcFqB1R8/1iL7Aybl8u+eS0wM2I++kgAi5npRQDmNgA/b5AotoSsSwgIatmq6c4PY0wiNr9NF9C58VFHiw+p4IIFO1Jfnx3pkSjaL/DmXvawwbeOit/ik4V7ESvM5Ioao2F1Gydim8DEIKfH/r8FHpaE4TlwuIuveP/Fcz9iS5K/pqVNEQlvwLAyrYrjwOc01JRKQE1q1oF6aaryd2UjzbqtKN2Xt qi.ma@maidsafe.net
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCl0HFTx1cmWG94LxtxYyr0Go4K0kJI3Kd9DU97qPuqrbRnb3Sym8+5C4Xqe4QflqRhluJiWtyZ+XzIiEb0uNGvS2peP7Gb4sdRdfGKFuYg8vfQumv/JhRRn1tw45dOQNDGTAUKFcZmBdpTG8R990LN8991ORSA4jSCzJ3KPbIErhHFI2IknNyURUcopeIu1B3HOwu5WFdC3gWo6XzzgKsenKCQJdlZ1SRSJrHY5L4a4eGTDnkuguE78jx+DpIOJ5UJC1NxfwKOhSG1O34GsBur1lonae5Fx1HwyMRgTmTYGUDNyCo+gqV65y5352wQZrQFc++0YU8cJi3496PQUgWR user@QA-ROSS
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCpG3EOW3aKzk0NHR6dsP4ORd/gUpttGwECd7IRx4mxUkDYM3cROqy0kbT4IJzUri44EGiKDk5EUhuoNUhA1yW4SgqecE+1AbFfBxUHmzJemqSkctjIxZSuYA+R4c3kbeMLAlk+nEcxxZqTBzyPhNQVqhtLlWYqYVVp41y4HSybInHn4q7vkoUsyAqp+taQX5tafEI2VmokMFdUbVsJDUSGxrzIlj5hPxL4kXzMxMcPMCeuxIKBOJsb/+KjrlsHMrfSrMIdM677Qx4ycoCt1hMpndVXECvBPFT7y/CpXdF3xMT5+hFsdrwYsu4uG8ggi+NZUqFjgcW7FJVDAx0CYr9L ustulation@gmail.com
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMVwr0GWuka5X+GDUyamKe8zFvUVblNce4/p1j/O9bFnHs9d8N+OkYkz6CkBsXsfJpb0+YYwpYdl55/Lg4ohP5mJjWnMDB0pacooSYLwpJSZnlV0+aJgu4gMMRfpP4amYnBVm80iPkZZ42OC/ZVNW5Hd0yTuAFUtdnwDKgV57Rk6rhT5pGWSPYrchIGWJCQzMHAkNMmmA5xPdRzAKo7tTy3mGqdWJfiyqM1J1NSDi7UgQCm8ehu2rN2/Gs+I2E3N08MQnJUOAcMrxe1X3lgA6kXEnYEWurEq5ZhC3sOXw8erOWmNtXvqI2O6C/rXBQgzlVliNxtubl8yWnmNPX8UXF vinipsmaker@vinipsmaker-netbook
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC/s9I/3XYObaipr54raYtt/f1StRcuybWWCfB03rv0yG1duNrIrpSP8dv1uscmt/OXMvUSLdaGURTZZ8XytD6SFwisVSkTQD4tqqk1NmTQt8DEUZ9KErbQCiyEAAcI02QMJ5IeqvismdyvRnfcNV9Nx2vaaCftppJ2R9rTtm9hNOsa4eoLCnuUACvef6jiAa0Fzn5GV7y91dcrVuWiKnUIdBRtxwv1sJRPT6epm6l6AZcpyA+2Qc4kgS2ak4tAjmSlAWUAIoyYECSweCiIwKJL7WLNSNVV3omhljNLONrckOlfglg7LqUrLYMNh2gHPAdUTCPHFuMlIW4rWvSZi9E0JNTZ7o7+x4PWu+SI8a0faXQ1i8S5qSBhNl3HUbChPH7VxktHrZ4rohOpd4WbV75PrzOoycJwplyuyLzLluWOtE/P+a/EmDV/2iUrlYujQQKHaXhbVIaffI8fct+BuPQAN+EmmMIx/h8BSoeWIBMK/ZdxCcDAuCXeoqonYp3QCFef2+dL8CM5EAjGKkxKHPUcFagf/RsM1VMgb0k3Q30jXqc45k8e5XxsI1cXegRrj6z6ZZmLjPOZrdNxclNDz4xigzZwqf6s9uG+0RxgqCvZZoIJpkfGtGviN6Pm1o8/PPGHI3bmrOv8r/ktjy+V2xjKae6Q5Sw/h83gd1csFoosCQ== viv.rajkumar@maidsafe.net
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwyrjDbQQhVzXk2mdMLm80/+2eHW1L4fw/flmmee+4FI3WF7b8L1bMjl7TApeMNU+HXc3KxBupkni5LjuXLZOS2L/Zo6yIcrudQpyAb8275phueT3KS36Q4oNLEv+E3IXQiyfNeE8hsvqoFdoo+V9FyR9SFPlDndfUsTC4O/nANWv+jO+1K6Iyd4b5OhZUP+Iw563OtSXFwFGxpgEhz3dUOqL6C0i5M2hxnqdx0FesBowE6uu4Npsjf1KUE/aNcM/+9+loD1PCnQja634V5m6jKy2y121h7n5S/y0gbusoml9Kfe8z30CMwyP4SkHwtBIPG1bf38N08/LUfbr83p7CpIz6wOCcDdY8mx2SsfCoyb0eJcCP7czlqHe70i6F9o77SWWdRX/m25x9bcKug6MUYgVNB5BXbN3nj0RxmitNQ7MpPcs6YD0WxtY8KDh1XZ2a73bie+h/bjN2FqT92AnC9mmZ82YP/v/4l0GI3854dxB5uGGG3m9j1TqYg1I/GVpuqiF7lGRvnR7ip+ahpOVnmaV/pUOQPZGuYps/0hSo5UIo8G1o89nk4eICScwU9h6cSx+MrUjVciPssIadiL4SZ8KU55arkyzXb6zZRhm3MKKBTmB3FIU6/9MW/2N2LcoTurwcC8+wELvGTfYXOIxUVjvpIZ21ZfR7F1n61XRAQ== AndreasFackler@gmx.de
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDYrVCkGerTtN6QDfZK30PMORBO7Up6Cbg3fikqIaGlLFN+osMn6NjZvfKBXb2JOnlPGRtuzb8KUYl14gtHo/eQ9BT5ASKbKp+LUw6eEmfcaZdd7H3x9GfsbH3+EG9ALm/NPqUBDXNshRq563yfPJMkz4Rk/hcTVURl0E3IPcLHE5ymjCz8Ar8NMdvmWAD7ft/QqoRRG4Bnx3Tc6uSi5s35jHdj66zQlLpoDpZ+IW3z7mk03nE7B8in1quHfNKwRYNIb0vBoV5nKSFwquGpYfB+M0/g1R9a8JRrLeMGv+XkGVGt6Ltja76fxYygZZDP99XrFqw89bEL4mOzrDCGTwDZ adam@higgsboson
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnf8sXey8Q5tBPKEAkJAErTIYgluP/NnMpqG86dcSWDbJOXay5PKQh5iXwRLCJ+ZJLvft2a/QGMVXain/yF9wKugUPosFg7dqgQKyFQk0Y3nKK/I4OGyKd3XJtOBVckYow/wEPDLkAWThf2VimDudUbsJ6VPDbAlWBg8NTiDJRaPzohpkru7c/y+yyuFVxmRi4m+1YzM00R12HJr5jqf/qNOZI/pUccNEhMnchFlU7t++Pk0ZhwOgvLEeGfLGfI622HdNVToVNJ7VVxVMr+qyvqBXiIVfIdRVGvoBeoIboTpUxEcYvkgPouxQxkJOSrbxOF/b+3nQ6bff9UTUDL9zf bart@home
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm34L9WS2OpYeJ8caQ3Z6dNrjIB1AZOCzejPJW33CeYMSN153l0p9pAlLNJPgOET/JcSHv07gOsdTzAqWZvEcuaLhCeX3X/WBXg1ZaqPvxXLsJLW4EtXDuENaQQ18oRpKFBuHULjkI4wopw34JMHWh6WIQrPVOLDcXsX9cfknviCGdlBScHahxB5ZZ9w5wKxdRDFqJEkit8rQlJR5grVrUq9SYb9zWUUBE0/YeULD6wIhrm5bDepfuTuELdhXF1nzUNQb6Kis5lsi9N1jeG5jMDWsP0cLYvUg1zkB4COiiI95ZT7Rwggbvj2/qrHG3P4LhJlXjaZTzyxjxZojMG+Tfjd0su3J+cnMGhkwj++f4CeFVo7Vbox6U5WT8E+UCXVqRcgvCOePdO76EI17bkHshhDef2RDGvBCYrkSy4f6iqCoKXRnPav2buEI+/pQgacfdxz3CeBrhuL1mXETO4BWf/YvDZYiX6L2+NgVcAVJEDXFrDNsMR2zRkqAKL3ysBOhGKJY20MxL6DuWMZv3byT3f8W3wnLDOQgN+k4HNtg/q5hi4a5KwoLPMTat/dD9lAgRpUhcdxh1AhkTmWxc12CrCpbVCc8kyzu4gdZLPE7ZGKP5YtbbHMJw5p2TzLMs9w8ZkB/WycTqZsqyYcHDPM5UDIh18/ncbITEhDyZIX6iHQ== qi.ma@maidsafe.net
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDX2xmkt7sgPmh13dG51YC2QMrznFWEHeMCqzUfP96mSh1dRPZB6nOFhEvMvHmJhqy6oyxWYChttPtSzWZf3o68+ur/YkRbFONV3Kn8sP9qfQHDGa7scT9n5EDxTLzGm1yN4RlQDD2bdhVkYmdkfLcdsEYntOi4Zj45N+xMziH1NQou02iwHuJTIHOscCxWyuTbKFYydNw1NWbCOX8AA0lZoqtrYTsZMceQ/AkLkG1N/dCZtQxMbfSBuRM9cbLsDK58n9PI+1c6OflIba2pb8lHiq7ThrZY8CcZolvFYRWlVYMfPysjKiiCQzegNQkGvKrb7r89swr6QAd/wGldqGab qi.ma@maidsafe.net
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXPPVjQWY1A66cXlZIi7xrmwMa8TPIeYKMX9xWL5rW4IN1FJ0c6smL7qDRKs6eZP5XWYWV+aaeMoV5vBtCEEnA63xnEgQYgiY8UjLxWTY/0owpJWYg6WJNp26b8eKb/5Kwbpy88ETi52mSDTjJY+djfz30SPBOmHRV34Cmpi1paxWxSEzsxblCEU1Hv9WnE/fjt0E1VCKMKS6YGBEFuTRAnfOKIu7wlrbHkB5NaqGTqaj6ChO73TQe77qFnxQOp9Ph2jERaWFwvIZdFH0cD7+WpgmOaSjdzEYUESicqanZSgY2nN23zgMt16rigkuSoUWKhQavHpUFar17tAuQ7HQr dhardy@TPH-L13071
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN0PnwrwauBsFhuDz1I86Dq/yyteYU+qDYdnYcwyknbx8RrDJ9zzf2rPvFgyQwPPE/HZxXO2jp2nRrUnobucC8nFPFU+owf0mgKkWyT+UD1iVvqT3QHvpKgVzcsM4mSKYoQSf0OymPUNbYRRy01BHdNLXrqHFnC6YshPejuLpijiFsKe0OSQIkjcUffx+Xe/iTFmXHSaZTb23wjTwInBNA7ZofTZCJ94uQRxGXqW0hGqeCr6lw5rL18iomX8IhCFSPZnBzVBET9ll4QLVpadeq35noXy+ArgmCoyS60cPnbX/ZpMDleNgV8ClSzjoE0+N7FPb/7OL3L7ZRCgTqO9Pt dhardy@yoga.dhardy
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDbHl0Nu0wXbj0WFVACmG12uQcNduZsgMpLxL0ANoKMvue4VWhhW/nhIK1AIyW+iSvgf1DVQGduWkoeh7SGWN/eHzAqJ2/o4UFbmsl8mL0bcvSakz9xrwhhZQpaK/Vy2N8319cF3uUwujg3SA9S4Q7tu0UKVYA9YF2AN070z5jnJyqK2VVROoWHM48cm/zwHZJBWsqRya7GxpvG70NsyzR+Ap8oe7NKXynZr8bxnQ3JPJr7PsWnnQiiTlzWhjSInoLU1+5xxvnZe0xPhB8K1BBzoOvJDqeI9IrDVGFcxu5PduIyEP9G43swjU/dMuY7Y87WKzHUCU5EMYx4/R5R/I1 dhardy@localhost.localdomain
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCoMq7SaKOpcan3UlMqP9CYSrF1IkNekjQ3BKVV7fF0LTN5lxQ9rOi3knEEuFCvH2VMbYEKFGAieJa3OuZHlBQMfv66LChZCeAiBaG68iRww84DXBiGWDTuUOBmwepOhXfKIO4I2Qr/st3sPWbD4ddprHd7TJdFqpnTmGwG61m4wf0m3jWZygfqxA85UlweUjWsP6DerMVrfG7F+kNYGdpFcDR0CjPKC2cHwGyIhmBI9jhLHfR1k03+qLKLAcPIIjh8+iAep4FELpnPkrC222DmAL7X9KDuYeh+V2GWc/jcaERFzk3xUx59L4Q6YGnLcO2EoRlGiBOITdrut9DBCIjCcyd/MCkHovL+zdmWCqxYT4ITFsOW91a5UlAAStQLRtCkHbprmIaNEsu6mWAW6owTAIAj0u5f5wyBOEkb7BSifPpbg0jN1EqbKnx+YuXN5MvrKmRQzARpJCIGyhJBpvP7Uh+IJHtULoJNbd5XzWN0F6Z+szlIsPUt31NbPLIeLzqqHuW+rmf1Cl/wcEX8BzOnP3PtTH6TfxfwcwP3v4n2HchPdzY9ZJRd+E5zuEAW4hJL3iWtTM5ARWZC2RSk1wCXggbUkhUQxpPS4GpTzmaBiHNirNZUJU0SDnHcsYuEsQditSqrh01ss9Y8HQRYJ0n2Qh/soV4sUCoe5dyGp3SfHw== michael@michael-macbook.local
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin farming speed.png version [590e37927e].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin resources.png version [ab4b3bd4f0].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin transfer mech.png version [0940833af9].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/safecoin citations.bib version [c35882d830].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
%% This BibTeX bibliography file was created using BibDesk.
%% http://bibdesk.sourceforge.net/


%% Created for Nick Lambert at 2015-01-07 10:11:39 +0000 


%% Saved with string encoding Unicode (UTF-8) 



@jurthesis{19,
	Date-Added = {2015-01-07 10:07:00 +0000},
	Date-Modified = {2015-01-07 10:11:29 +0000},
	Lastchecked = {7},
	Month = {January},
	Title = {Kademlia wikipedia page},
	Url = {http://en.wikipedia.org/wiki/Kademlia},
	Year = {2015}}

@webpage{18,
	Author = {John Aziz},
	Date-Added = {2014-12-11 16:38:39 +0000},
	Date-Modified = {2014-12-11 16:40:04 +0000},
	Lastchecked = {11},
	Month = {December},
	Title = {Does the Federal Reserve really control the money supply?},
	Url = {http://theweek.com/article/index/244899/does-the-federal-reserve-really-control-the-money-supply},
	Year = {2014},
	Bdsk-Url-1 = {http://theweek.com/article/index/244899/does-the-federal-reserve-really-control-the-money-supply}}

@webpage{17,
	Author = {Paul Krugman},
	Date-Added = {2014-12-11 15:08:47 +0000},
	Date-Modified = {2014-12-11 15:10:58 +0000},
	Lastchecked = {11},
	Month = {December},
	Title = {The textbook economics of cap-and-trade},
	Url = {http://krugman.blogs.nytimes.com/2009/09/27/the-textbook-economics-of-cap-and-trade/?_r=0},
	Year = {2014},
	Bdsk-Url-1 = {http://krugman.blogs.nytimes.com/2009/09/27/the-textbook-economics-of-cap-and-trade/?_r=0}}

@webpage{16,
	Author = {The Atlantic},
	Date-Added = {2014-11-28 11:03:07 +0000},
	Date-Modified = {2014-11-28 11:03:45 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {The Internet's Original Sin},
	Url = {http://www.theatlantic.com/technology/archive/2014/08/advertising-is-the-internets-original-sin/376041/},
	Year = {2014},
	Bdsk-Url-1 = {http://www.theatlantic.com/technology/archive/2014/08/advertising-is-the-internets-original-sin/376041/}}

@webpage{15,
	Author = {Facebook Inc},
	Date-Added = {2014-11-28 11:00:05 +0000},
	Date-Modified = {2014-11-28 11:00:53 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Facebook Reports Fourth Quarter and Full Year 2013 Results},
	Url = {http://investor.fb.com/releasedetail.cfm?ReleaseID=821954},
	Year = {2014},
	Bdsk-Url-1 = {http://investor.fb.com/releasedetail.cfm?ReleaseID=821954}}

@jurthesis{14,
	Author = {Google Inc},
	Date-Added = {2014-11-28 10:58:41 +0000},
	Date-Modified = {2014-12-11 16:48:12 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {2013 Financial Tables},
	Url = {https://investor.google.com/financial/2013/tables.html},
	Year = {2014},
	Bdsk-Url-1 = {https://investor.google.com/financial/2013/tables.html}}

@webpage{13,
	Author = {Joe McCann},
	Date-Added = {2014-11-28 10:55:50 +0000},
	Date-Modified = {2014-11-28 11:01:03 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Data Is The Most Valuable Commodity On Earth},
	Url = {http://subprint.com/blog/data-is-the-most-valuable-commodity-on-earth},
	Year = {2014},
	Bdsk-Url-1 = {http://subprint.com/blog/data-is-the-most-valuable-commodity-on-earth}}

@webpage{12,
	Author = {World Economic Forum},
	Date-Added = {2014-11-28 10:51:45 +0000},
	Date-Modified = {2014-11-28 10:52:51 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Personal Data: The Emergence of a New Asset Class},
	Url = {http://www3.weforum.org/docs/WEF_ITTC_PersonalDataNewAsset_Report_2011.pdf},
	Year = {2014},
	Bdsk-Url-1 = {http://www3.weforum.org/docs/WEF_ITTC_PersonalDataNewAsset_Report_2011.pdf}}

@webpage{11,
	Author = {BBC News Web Page},
	Date-Added = {2014-11-28 10:36:05 +0000},
	Date-Modified = {2014-11-28 10:36:58 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Gold v paper money},
	Url = {http://www.bbc.co.uk/news/business-18644230},
	Year = {2014},
	Bdsk-Url-1 = {http://www.bbc.co.uk/news/business-18644230}}

@webpage{10,
	Date-Added = {2014-11-28 10:34:17 +0000},
	Date-Modified = {2014-11-28 10:35:07 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {ECR Research Web Page},
	Url = {http://www.ecrresearch.com/world-economy/dangers-and-drawbacks-quantitative-easing},
	Year = {2014},
	Bdsk-Url-1 = {http://www.ecrresearch.com/world-economy/dangers-and-drawbacks-quantitative-easing}}

@webpage{9,
	Date-Added = {2014-11-28 10:31:55 +0000},
	Date-Modified = {2014-11-28 10:32:47 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Federal Reserve Web Site},
	Url = {http://www.federalreserve.gov/faqs/currency_12773.htm},
	Year = {2014},
	Bdsk-Url-1 = {http://www.federalreserve.gov/faqs/currency_12773.htm}}

@webpage{8,
	Date-Added = {2014-11-28 10:29:03 +0000},
	Date-Modified = {2014-11-28 11:01:10 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Bountify Web Page},
	Url = {https://bountify.co/},
	Year = {2014},
	Bdsk-Url-1 = {https://bountify.co/}}

@webpage{7,
	Date-Added = {2014-11-28 10:27:49 +0000},
	Date-Modified = {2014-11-28 10:28:30 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Bounty Source Web Page},
	Url = {https://www.bountysource.com/},
	Year = {2014},
	Bdsk-Url-1 = {https://www.bountysource.com/}}

@webpage{6,
	Date-Added = {2014-11-28 10:25:36 +0000},
	Date-Modified = {2014-11-28 11:01:22 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {MaidSafe Wikipedia},
	Url = {http://en.wikipedia.org/wiki/MaidSafe},
	Year = {2014},
	Bdsk-Url-1 = {http://en.wikipedia.org/wiki/MaidSafe}}

@webpage{5,
	Date-Added = {2014-11-28 10:23:00 +0000},
	Date-Modified = {2014-11-28 10:24:14 +0000},
	Lastchecked = {28},
	Month = {November},
	Title = {Tor Incentives Roundup},
	Url = {https://blog.torproject.org/blog/tor-incentives-research-roundup-goldstar-par-braids-lira-tears-and-torcoin},
	Year = {2014},
	Bdsk-Url-1 = {https://blog.torproject.org/blog/tor-incentives-research-roundup-goldstar-par-braids-lira-tears-and-torcoin}}

@webpage{4,
	Date-Added = {2014-11-27 16:52:58 +0000},
	Date-Modified = {2014-11-28 11:01:57 +0000},
	Lastchecked = {27},
	Month = {November},
	Title = {Tor Metrics --- Direct users by country},
	Url = {https://metrics.torproject.org/userstats-relay-country.html},
	Year = {2014},
	Bdsk-Url-1 = {https://metrics.torproject.org/userstats-relay-country.html}}

@webpage{3,
	Date-Added = {2014-11-27 16:49:37 +0000},
	Date-Modified = {2014-11-27 16:51:52 +0000},
	Lastchecked = {27},
	Month = {November},
	Title = {Tor Metrics --- Relays and bridges in the network},
	Url = {https://metrics.torproject.org/networksize.html},
	Year = {2014},
	Bdsk-Url-1 = {https://metrics.torproject.org/networksize.html}}

@url{2,
	Author = {Christopher Doll, T. F. McLaughlin, Anjali Barretto},
	Date-Added = {2014-11-27 16:29:54 +0000},
	Date-Modified = {2015-01-06 10:07:32 +0000},
	Journal = {The International Journal of Basic and Applied Science},
	Month = {July},
	Number = {01},
	Pages = {131-149},
	Title = {The Token Economy: A Recent Review and Evaluation},
	Url = {http://www.insikapub.com/Vol-02/No-01/12IJBAS(2)(1).pdf},
	Volume = {02},
	Year = {2013},
	Bdsk-Url-1 = {http://www.insikapub.com/Vol-02/No-01/12IJBAS(2)(1).pdf}}

@webpage{1,
	Date-Modified = {2014-11-27 16:36:09 +0000},
	Owner = {nicklambert},
	Timestamp = {2014.11.27},
	Title = {Crypto-Currency Market Capitalizations},
	Url = {https://coinmarketcap.com/all/},
	Bdsk-Url-1 = {https://coinmarketcap.com/all/}}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































































































































































































































































































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0009-mpid-messaging/MPID Message Flow.png version [e7d83bbd48].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0011-improved-connection-management/Connection Management for Bootstrapping.png version [48210e296d].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0011-improved-connection-management/Connection Management.png version [c128e0074a].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_examples/demo_app/resources/osx/helper_apps/Info EH.plist version [bb000d13a1].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>CFBundleDisplayName</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundleExecutable</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundleIdentifier</key>
    <string>{{identifier}}.helper.EH</string>
    <key>CFBundleName</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundlePackageType</key>
    <string>APPL</string>
    <key>DTSDKName</key>
    <string>macosx</string>
    <key>LSUIElement</key>
    <true/>
    <key>NSSupportsAutomaticGraphicsSwitching</key>
    <true/>
</dict>
</plist>
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_examples/demo_app/resources/osx/helper_apps/Info NP.plist version [0a518159ab].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>CFBundleDisplayName</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundleExecutable</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundleIdentifier</key>
    <string>{{identifier}}.helper.NP</string>
    <key>CFBundleName</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundlePackageType</key>
    <string>APPL</string>
    <key>DTSDKName</key>
    <string>macosx</string>
    <key>LSUIElement</key>
    <true/>
    <key>NSSupportsAutomaticGraphicsSwitching</key>
    <true/>
</dict>
</plist>
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_launcher/resources/osx/helper_apps/Info EH.plist version [bb000d13a1].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>CFBundleDisplayName</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundleExecutable</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundleIdentifier</key>
    <string>{{identifier}}.helper.EH</string>
    <key>CFBundleName</key>
    <string>{{productName}} Helper EH</string>
    <key>CFBundlePackageType</key>
    <string>APPL</string>
    <key>DTSDKName</key>
    <string>macosx</string>
    <key>LSUIElement</key>
    <true/>
    <key>NSSupportsAutomaticGraphicsSwitching</key>
    <true/>
</dict>
</plist>
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_launcher/resources/osx/helper_apps/Info NP.plist version [0a518159ab].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
    <key>CFBundleDisplayName</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundleExecutable</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundleIdentifier</key>
    <string>{{identifier}}.helper.NP</string>
    <key>CFBundleName</key>
    <string>{{productName}} Helper NP</string>
    <key>CFBundlePackageType</key>
    <string>APPL</string>
    <key>DTSDKName</key>
    <string>macosx</string>
    <key>LSUIElement</key>
    <true/>
    <key>NSSupportsAutomaticGraphicsSwitching</key>
    <true/>
</dict>
</plist>
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/Cargo.toml version [e5b4ce8041].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
[package]
authors = ["The Rust Project Developers"]
name = "bootstrap"
version = "0.0.0"

[lib]
name = "bootstrap"
path = "lib.rs"
doctest = false

[[bin]]
name = "bootstrap"
path = "bin/main.rs"
test = false

[[bin]]
name = "rustc"
path = "bin/rustc.rs"
test = false

[[bin]]
name = "rustdoc"
path = "bin/rustdoc.rs"
test = false

[dependencies]
build_helper = { path = "../build_helper" }
cmake = "0.1.17"
filetime = "0.1"
num_cpus = "0.2"
toml = "0.1"
getopts = "0.2"
rustc-serialize = "0.3"
gcc = "0.3.38"
libc = "0.2"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/README.md version [8c74938c29].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
# rustbuild - Bootstrapping Rust

This is an in-progress README which is targeted at helping to explain how Rust
is bootstrapped and in general some of the technical details of the build
system.

> **Note**: This build system is currently under active development and is not
> intended to be the primarily used one just yet. The makefiles are currently
> the ones that are still "guaranteed to work" as much as possible at least.

## Using rustbuild

The rustbuild build system has a primary entry point, a top level `x.py` script:

```
python ./x.py build
```

Note that if you're on Unix you should be able to execute the script directly:

```
./x.py build
```

The script accepts commands, flags, and arguments to determine what to do:

* `build` - a general purpose command for compiling code. Alone `build` will
  bootstrap the entire compiler, and otherwise arguments passed indicate what to
  build. For example:

  ```
  # build the whole compiler
  ./x.py build

  # build the stage1 compiler
  ./x.py build --stage 1

  # build stage0 libstd
  ./x.py build --stage 0 src/libstd

  # build a particular crate in stage0
  ./x.py build --stage 0 src/libtest
  ```

  If files are dirty that would normally be rebuilt from stage 0, that can be
  overidden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
  that belong to stage n or earlier:

  ```
  # keep old build products for stage 0 and build stage 1
  ./x.py build --keep-stage 0 --stage 1
  ```

* `test` - a command for executing unit tests. Like the `build` command this
  will execute the entire test suite by default, and otherwise it can be used to
  select which test suite is run:

  ```
  # run all unit tests
  ./x.py test

  # execute the run-pass test suite
  ./x.py test src/test/run-pass

  # execute only some tests in the run-pass test suite
  ./x.py test src/test/run-pass --test-args substring-of-test-name

  # execute tests in the standard library in stage0
  ./x.py test --stage 0 src/libstd

  # execute all doc tests
  ./x.py test src/doc
  ```

* `doc` - a command for building documentation. Like above can take arguments
  for what to document.

## Configuring rustbuild

There are currently two primary methods for configuring the rustbuild build
system. First, the `./configure` options serialized in `config.mk` will be
parsed and read. That is, if any `./configure` options are passed, they'll be
handled naturally.

Next, rustbuild offers a TOML-based configuration system with a `config.toml`
file in the same location as `config.mk`. An example of this configuration can
be found at `src/bootstrap/config.toml.example`, and the configuration file
can also be passed as `--config path/to/config.toml` if the build system is
being invoked manually (via the python script).

Finally, rustbuild makes use of the [gcc-rs crate] which has [its own
method][env-vars] of configuring C compilers and C flags via environment
variables.

[gcc-rs crate]: https://github.com/alexcrichton/gcc-rs
[env-vars]: https://github.com/alexcrichton/gcc-rs#external-configuration-via-environment-variables

## Build stages

The rustbuild build system goes through a few phases to actually build the
compiler. What actually happens when you invoke rustbuild is:

1. The entry point script, `x.py` is run. This script is
   responsible for downloading the stage0 compiler/Cargo binaries, and it then
   compiles the build system itself (this folder). Finally, it then invokes the
   actual `bootstrap` binary build system.
2. In Rust, `bootstrap` will slurp up all configuration, perform a number of
   sanity checks (compilers exist for example), and then start building the
   stage0 artifacts.
3. The stage0 `cargo` downloaded earlier is used to build the standard library
   and the compiler, and then these binaries are then copied to the `stage1`
   directory. That compiler is then used to generate the stage1 artifacts which
   are then copied to the stage2 directory, and then finally the stage2
   artifacts are generated using that compiler.

The goal of each stage is to (a) leverage Cargo as much as possible and failing
that (b) leverage Rust as much as possible!

## Incremental builds

You can configure rustbuild to use incremental compilation. Because
incremental is new and evolving rapidly, if you want to use it, it is
recommended that you replace the snapshot with a locally installed
nightly build of rustc. You will want to keep this up to date.

To follow this course of action, first thing you will want to do is to
install a nightly, presumably using `rustup`. You will then want to
configure your directory to use this build, like so:

```
# configure to use local rust instead of downloding a beta.
# `--local-rust-root` is optional here. If elided, we will
# use whatever rustc we find on your PATH.
> configure --enable-rustbuild --local-rust-root=~/.cargo/ --enable-local-rebuild
```

After that, you can use the `--incremental` flag to actually do
incremental builds:

```
> ../x.py build --incremental
```

The `--incremental` flag will store incremental compilation artifacts
in `build/<host>/stage0-incremental`. Note that we only use incremental
compilation for the stage0 -> stage1 compilation -- this is because
the stage1 compiler is changing, and we don't try to cache and reuse
incremental artifacts across different versions of the compiler. For
this reason, `--incremental` defaults to `--stage 1` (though you can
manually select a higher stage, if you prefer).

You can always drop the `--incremental` to build as normal (but you
will still be using the local nightly as your bootstrap).

## Directory Layout

This build system houses all output under the `build` directory, which looks
like this:

```
# Root folder of all output. Everything is scoped underneath here
build/

  # Location where the stage0 compiler downloads are all cached. This directory
  # only contains the tarballs themselves as they're extracted elsewhere.
  cache/
    2015-12-19/
    2016-01-15/
    2016-01-21/
    ...

  # Output directory for building this build system itself. The stage0
  # cargo/rustc are used to build the build system into this location.
  bootstrap/
    debug/
    release/

  # Output of the dist-related steps like dist-std, dist-rustc, and dist-docs
  dist/

  # Temporary directory used for various input/output as part of various stages
  tmp/

  # Each remaining directory is scoped by the "host" triple of compilation at
  # hand.
  x86_64-unknown-linux-gnu/

    # The build artifacts for the `compiler-rt` library for the target this
    # folder is under. The exact layout here will likely depend on the platform,
    # and this is also built with CMake so the build system is also likely
    # different.
    compiler-rt/
      build/

    # Output folder for LLVM if it is compiled for this target
    llvm/

      # build folder (e.g. the platform-specific build system). Like with
      # compiler-rt this is compiled with CMake
      build/

      # Installation of LLVM. Note that we run the equivalent of 'make install'
      # for LLVM to setup these folders.
      bin/
      lib/
      include/
      share/
      ...

    # Output folder for all documentation of this target. This is what's filled
    # in whenever the `doc` step is run.
    doc/

    # Output for all compiletest-based test suites
    test/
      run-pass/
      compile-fail/
      debuginfo/
      ...

    # Location where the stage0 Cargo and Rust compiler are unpacked. This
    # directory is purely an extracted and overlaid tarball of these two (done
    # by the bootstrapy python script). In theory the build system does not
    # modify anything under this directory afterwards.
    stage0/

    # These to build directories are the cargo output directories for builds of
    # the standard library and compiler, respectively. Internally these may also
    # have other target directories, which represent artifacts being compiled
    # from the host to the specified target.
    #
    # Essentially, each of these directories is filled in by one `cargo`
    # invocation. The build system instruments calling Cargo in the right order
    # with the right variables to ensure these are filled in correctly.
    stageN-std/
    stageN-test/
    stageN-rustc/
    stageN-tools/

    # This is a special case of the above directories, **not** filled in via
    # Cargo but rather the build system itself. The stage0 compiler already has
    # a set of target libraries for its own host triple (in its own sysroot)
    # inside of stage0/. When we run the stage0 compiler to bootstrap more
    # things, however, we don't want to use any of these libraries (as those are
    # the ones that we're building). So essentially, when the stage1 compiler is
    # being compiled (e.g. after libstd has been built), *this* is used as the
    # sysroot for the stage0 compiler being run.
    #
    # Basically this directory is just a temporary artifact use to configure the
    # stage0 compiler to ensure that the libstd we just built is used to
    # compile the stage1 compiler.
    stage0-sysroot/lib/

    # These output directories are intended to be standalone working
    # implementations of the compiler (corresponding to each stage). The build
    # system will link (using hard links) output from stageN-{std,rustc} into
    # each of these directories.
    #
    # In theory there is no extra build output in these directories.
    stage1/
    stage2/
    stage3/
```

## Cargo projects

The current build is unfortunately not quite as simple as `cargo build` in a
directory, but rather the compiler is split into three different Cargo projects:

* `src/libstd` - the standard library
* `src/libtest` - testing support, depends on libstd
* `src/rustc` - the actual compiler itself

Each "project" has a corresponding Cargo.lock file with all dependencies, and
this means that building the compiler involves running Cargo three times. The
structure here serves two goals:

1. Facilitating dependencies coming from crates.io. These dependencies don't
   depend on `std`, so libstd is a separate project compiled ahead of time
   before the actual compiler builds.
2. Splitting "host artifacts" from "target artifacts". That is, when building
   code for an arbitrary target you don't need the entire compiler, but you'll
   end up needing libraries like libtest that depend on std but also want to use
   crates.io dependencies. Hence, libtest is split out as its own project that
   is sequenced after `std` but before `rustc`. This project is built for all
   targets.

There is some loss in build parallelism here because libtest can be compiled in
parallel with a number of rustc artifacts, but in theory the loss isn't too bad!

## Build tools

We've actually got quite a few tools that we use in the compiler's build system
and for testing. To organize these, each tool is a project in `src/tools` with a
corresponding `Cargo.toml`. All tools are compiled with Cargo (currently having
independent `Cargo.lock` files) and do not currently explicitly depend on the
compiler or standard library. Compiling each tool is sequenced after the
appropriate libstd/libtest/librustc compile above.

## Extending rustbuild

So you'd like to add a feature to the rustbuild build system or just fix a bug.
Great! One of the major motivational factors for moving away from `make` is that
Rust is in theory much easier to read, modify, and write. If you find anything
excessively confusing, please open an issue on this and we'll try to get it
documented or simplified pronto.

First up, you'll probably want to read over the documentation above as that'll
give you a high level overview of what rustbuild is doing. You also probably
want to play around a bit yourself by just getting it up and running before you
dive too much into the actual build system itself.

After that, each module in rustbuild should have enough documentation to keep
you up and running. Some general areas that you may be interested in modifying
are:

* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of
  other tools.
* Adding a new compiler crate? Look no further! Adding crates can be done by
  adding a new directory with `Cargo.toml` followed by configuring all
  `Cargo.toml` files accordingly.
* Adding a new dependency from crates.io? We're still working on that, so hold
  off on that for now.
* Adding a new configuration option? Take a look at `bootstrap/config.rs` or
  perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that
  option.
* Adding a sanity check? Take a look at `bootstrap/sanity.rs`.

If you have any questions feel free to reach out on `#rust-internals` on IRC or
open an issue in the bug tracker!
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/main.rs version [46a25b876b].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! rustbuild, the Rust build system
//!
//! This is the entry point for the build system used to compile the `rustc`
//! compiler. Lots of documentation can be found in the `README.md` file in the
//! parent directory, and otherwise documentation can be found throughout the `build`
//! directory in each respective module.

#![deny(warnings)]

extern crate bootstrap;

use std::env;

use bootstrap::{Flags, Config, Build};

fn main() {
    let args = env::args().skip(1).collect::<Vec<_>>();
    let flags = Flags::parse(&args);
    let mut config = Config::parse(&flags.build, flags.config.clone());

    // compat with `./configure` while we're still using that
    if std::fs::metadata("config.mk").is_ok() {
        config.update_with_config_mk();
    }

    Build::new(flags, config).build();
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/rustc.rs version [9e1753f78f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
//!
//! This shim will take care of some various tasks that our build process
//! requires that Cargo can't quite do through normal configuration:
//!
//! 1. When compiling build scripts and build dependencies, we need a guaranteed
//!    full standard library available. The only compiler which actually has
//!    this is the snapshot, so we detect this situation and always compile with
//!    the snapshot compiler.
//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
//!    (and this slightly differs based on a whether we're using a snapshot or
//!    not), so we do that all here.
//!
//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
//! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced.

#![deny(warnings)]

extern crate bootstrap;

use std::env;
use std::ffi::OsString;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use std::path::PathBuf;
use std::process::{Command, ExitStatus};

fn main() {
    let args = env::args_os().skip(1).collect::<Vec<_>>();
    // Detect whether or not we're a build script depending on whether --target
    // is passed (a bit janky...)
    let target = args.windows(2)
        .find(|w| &*w[0] == "--target")
        .and_then(|w| w[1].to_str());
    let version = args.iter().find(|w| &**w == "-vV");

    let verbose = match env::var("RUSTC_VERBOSE") {
        Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
        Err(_) => 0,
    };

    // Build scripts always use the snapshot compiler which is guaranteed to be
    // able to produce an executable, whereas intermediate compilers may not
    // have the standard library built yet and may not be able to produce an
    // executable. Otherwise we just use the standard compiler we're
    // bootstrapping with.
    //
    // Also note that cargo will detect the version of the compiler to trigger
    // a rebuild when the compiler changes. If this happens, we want to make
    // sure to use the actual compiler instead of the snapshot compiler becase
    // that's the one that's actually changing.
    let (rustc, libdir) = if target.is_none() && version.is_none() {
        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
    } else {
        ("RUSTC_REAL", "RUSTC_LIBDIR")
    };
    let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
    let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
    let mut on_fail = env::var_os("RUSTC_ON_FAIL").map(|of| Command::new(of));

    let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
    let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
    let mut dylib_path = bootstrap::util::dylib_path();
    dylib_path.insert(0, PathBuf::from(libdir));

    let mut cmd = Command::new(rustc);
    cmd.args(&args)
        .arg("--cfg")
        .arg(format!("stage{}", stage))
        .env(bootstrap::util::dylib_path_var(),
             env::join_paths(&dylib_path).unwrap());

    if let Some(target) = target {
        // The stage0 compiler has a special sysroot distinct from what we
        // actually downloaded, so we just always pass the `--sysroot` option.
        cmd.arg("--sysroot").arg(sysroot);

        // When we build Rust dylibs they're all intended for intermediate
        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
        // linking all deps statically into the dylib.
        if env::var_os("RUSTC_NO_PREFER_DYNAMIC").is_none() {
            cmd.arg("-Cprefer-dynamic");
        }

        // Pass the `rustbuild` feature flag to crates which rustbuild is
        // building. See the comment in bootstrap/lib.rs where this env var is
        // set for more details.
        if env::var_os("RUSTBUILD_UNSTABLE").is_some() {
            cmd.arg("--cfg").arg("rustbuild");
        }

        // Help the libc crate compile by assisting it in finding the MUSL
        // native libraries.
        if let Some(s) = env::var_os("MUSL_ROOT") {
            let mut root = OsString::from("native=");
            root.push(&s);
            root.push("/lib");
            cmd.arg("-L").arg(&root);
        }

        // Pass down extra flags, commonly used to configure `-Clinker` when
        // cross compiling.
        if let Ok(s) = env::var("RUSTC_FLAGS") {
            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
        }

        // Pass down incremental directory, if any.
        if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
            cmd.arg(format!("-Zincremental={}", dir));

            if verbose > 0 {
                cmd.arg("-Zincremental-info");
            }
        }

        // If we're compiling specifically the `panic_abort` crate then we pass
        // the `-C panic=abort` option. Note that we do not do this for any
        // other crate intentionally as this is the only crate for now that we
        // ship with panic=abort.
        //
        // This... is a bit of a hack how we detect this. Ideally this
        // information should be encoded in the crate I guess? Would likely
        // require an RFC amendment to RFC 1513, however.
        let is_panic_abort = args.windows(2)
            .any(|a| &*a[0] == "--crate-name" && &*a[1] == "panic_abort");
        if is_panic_abort {
            cmd.arg("-C").arg("panic=abort");
        }

        // Set various options from config.toml to configure how we're building
        // code.
        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
            cmd.arg("-g");
        } else if env::var("RUSTC_DEBUGINFO_LINES") == Ok("true".to_string()) {
            cmd.arg("-Cdebuginfo=1");
        }
        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
            Ok(s) => if s == "true" { "y" } else { "n" },
            Err(..) => "n",
        };
        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
            cmd.arg("-C").arg(format!("codegen-units={}", s));
        }

        // Emit save-analysis info.
        if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) {
            cmd.arg("-Zsave-analysis-api");
        }

        // Dealing with rpath here is a little special, so let's go into some
        // detail. First off, `-rpath` is a linker option on Unix platforms
        // which adds to the runtime dynamic loader path when looking for
        // dynamic libraries. We use this by default on Unix platforms to ensure
        // that our nightlies behave the same on Windows, that is they work out
        // of the box. This can be disabled, of course, but basically that's why
        // we're gated on RUSTC_RPATH here.
        //
        // Ok, so the astute might be wondering "why isn't `-C rpath` used
        // here?" and that is indeed a good question to task. This codegen
        // option is the compiler's current interface to generating an rpath.
        // Unfortunately it doesn't quite suffice for us. The flag currently
        // takes no value as an argument, so the compiler calculates what it
        // should pass to the linker as `-rpath`. This unfortunately is based on
        // the **compile time** directory structure which when building with
        // Cargo will be very different than the runtime directory structure.
        //
        // All that's a really long winded way of saying that if we use
        // `-Crpath` then the executables generated have the wrong rpath of
        // something like `$ORIGIN/deps` when in fact the way we distribute
        // rustc requires the rpath to be `$ORIGIN/../lib`.
        //
        // So, all in all, to set up the correct rpath we pass the linker
        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
        // to change a flag in a binary?
        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
            let rpath = if target.contains("apple") {

                // Note that we need to take one extra step on macOS to also pass
                // `-Wl,-instal_name,@rpath/...` to get things to work right. To
                // do that we pass a weird flag to the compiler to get it to do
                // so. Note that this is definitely a hack, and we should likely
                // flesh out rpath support more fully in the future.
                if stage != "0" {
                    cmd.arg("-Z").arg("osx-rpath-install-name");
                }
                Some("-Wl,-rpath,@loader_path/../lib")
            } else if !target.contains("windows") {
                Some("-Wl,-rpath,$ORIGIN/../lib")
            } else {
                None
            };
            if let Some(rpath) = rpath {
                cmd.arg("-C").arg(format!("link-args={}", rpath));
            }

            if let Ok(s) = env::var("RUSTFLAGS") {
                for flag in s.split_whitespace() {
                    cmd.arg(flag);
                }
            }
        }

        if target.contains("pc-windows-msvc") {
            cmd.arg("-Z").arg("unstable-options");
            cmd.arg("-C").arg("target-feature=+crt-static");
        }
    }

    if verbose > 1 {
        writeln!(&mut io::stderr(), "rustc command: {:?}", cmd).unwrap();
    }

    // Actually run the compiler!
    std::process::exit(if let Some(ref mut on_fail) = on_fail {
        match cmd.status() {
            Ok(s) if s.success() => 0,
            _ => {
                println!("\nDid not run successfully:\n{:?}\n-------------", cmd);
                exec_cmd(on_fail).expect("could not run the backup command");
                1
            }
        }
    } else {
        std::process::exit(match exec_cmd(&mut cmd) {
            Ok(s) => s.code().unwrap_or(0xfe),
            Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
        })
    })
}

#[cfg(unix)]
fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
    use std::os::unix::process::CommandExt;
    Err(cmd.exec())
}

#[cfg(not(unix))]
fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
    cmd.status()
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/rustdoc.rs version [608b78285f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
//!
//! See comments in `src/bootstrap/rustc.rs` for more information.

#![deny(warnings)]

extern crate bootstrap;

use std::env;
use std::process::Command;
use std::path::PathBuf;

fn main() {
    let args = env::args_os().skip(1).collect::<Vec<_>>();
    let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
    let libdir = env::var_os("RUSTC_LIBDIR").expect("RUSTC_LIBDIR was not set");
    let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
    let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");

    let mut dylib_path = bootstrap::util::dylib_path();
    dylib_path.insert(0, PathBuf::from(libdir));

    let mut cmd = Command::new(rustdoc);
    cmd.args(&args)
        .arg("--cfg")
        .arg(format!("stage{}", stage))
        .arg("--cfg")
        .arg("dox")
        .arg("--sysroot")
        .arg(sysroot)
        .env(bootstrap::util::dylib_path_var(),
             env::join_paths(&dylib_path).unwrap());
    std::process::exit(match cmd.status() {
        Ok(s) => s.code().unwrap_or(1),
        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
    })
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bootstrap.py version [6512ebcb00].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
# Copyright 2015-2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.

from __future__ import print_function
import argparse
import contextlib
import datetime
import hashlib
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile

from time import time


def get(url, path, verbose=False):
    sha_url = url + ".sha256"
    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
        temp_path = temp_file.name
    with tempfile.NamedTemporaryFile(suffix=".sha256", delete=False) as sha_file:
        sha_path = sha_file.name

    try:
        download(sha_path, sha_url, False, verbose)
        if os.path.exists(path):
            if verify(path, sha_path, False):
                if verbose:
                    print("using already-download file " + path)
                return
            else:
                if verbose:
                    print("ignoring already-download file " + path + " due to failed verification")
                os.unlink(path)
        download(temp_path, url, True, verbose)
        if not verify(temp_path, sha_path, verbose):
            raise RuntimeError("failed verification")
        if verbose:
            print("moving {} to {}".format(temp_path, path))
        shutil.move(temp_path, path)
    finally:
        delete_if_present(sha_path, verbose)
        delete_if_present(temp_path, verbose)


def delete_if_present(path, verbose):
    if os.path.isfile(path):
        if verbose:
            print("removing " + path)
        os.unlink(path)


def download(path, url, probably_big, verbose):
    for x in range(0, 4):
        try:
            _download(path, url, probably_big, verbose, True)
            return
        except RuntimeError:
            print("\nspurious failure, trying again")
    _download(path, url, probably_big, verbose, False)


def _download(path, url, probably_big, verbose, exception):
    if probably_big or verbose:
        print("downloading {}".format(url))
    # see http://serverfault.com/questions/301128/how-to-download
    if sys.platform == 'win32':
        run(["PowerShell.exe", "/nologo", "-Command",
             "(New-Object System.Net.WebClient)"
             ".DownloadFile('{}', '{}')".format(url, path)],
            verbose=verbose,
            exception=exception)
    else:
        if probably_big or verbose:
            option = "-#"
        else:
            option = "-s"
        run(["curl", option, "--retry", "3", "-Sf", "-o", path, url],
            verbose=verbose,
            exception=exception)


def verify(path, sha_path, verbose):
    if verbose:
        print("verifying " + path)
    with open(path, "rb") as f:
        found = hashlib.sha256(f.read()).hexdigest()
    with open(sha_path, "r") as f:
        expected = f.readline().split()[0]
    verified = found == expected
    if not verified:
        print("invalid checksum:\n"
               "    found:    {}\n"
               "    expected: {}".format(found, expected))
    return verified


def unpack(tarball, dst, verbose=False, match=None):
    print("extracting " + tarball)
    fname = os.path.basename(tarball).replace(".tar.gz", "")
    with contextlib.closing(tarfile.open(tarball)) as tar:
        for p in tar.getnames():
            if "/" not in p:
                continue
            name = p.replace(fname + "/", "", 1)
            if match is not None and not name.startswith(match):
                continue
            name = name[len(match) + 1:]

            fp = os.path.join(dst, name)
            if verbose:
                print("  extracting " + p)
            tar.extract(p, dst)
            tp = os.path.join(dst, p)
            if os.path.isdir(tp) and os.path.exists(fp):
                continue
            shutil.move(tp, fp)
    shutil.rmtree(os.path.join(dst, fname))

def run(args, verbose=False, exception=False):
    if verbose:
        print("running: " + ' '.join(args))
    sys.stdout.flush()
    # Use Popen here instead of call() as it apparently allows powershell on
    # Windows to not lock up waiting for input presumably.
    ret = subprocess.Popen(args)
    code = ret.wait()
    if code != 0:
        err = "failed to run: " + ' '.join(args)
        if verbose or exception:
            raise RuntimeError(err)
        sys.exit(err)

def stage0_data(rust_root):
    nightlies = os.path.join(rust_root, "src/stage0.txt")
    data = {}
    with open(nightlies, 'r') as nightlies:
        for line in nightlies:
            line = line.rstrip()  # Strip newline character, '\n'
            if line.startswith("#") or line == '':
                continue
            a, b = line.split(": ", 1)
            data[a] = b
    return data

def format_build_time(duration):
    return str(datetime.timedelta(seconds=int(duration)))


class RustBuild(object):
    def download_stage0(self):
        cache_dst = os.path.join(self.build_dir, "cache")
        rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date())
        cargo_cache = os.path.join(cache_dst, self.stage0_cargo_rev())
        if not os.path.exists(rustc_cache):
            os.makedirs(rustc_cache)
        if not os.path.exists(cargo_cache):
            os.makedirs(cargo_cache)

        if self.rustc().startswith(self.bin_root()) and \
                (not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
            self.print_what_it_means_to_bootstrap()
            if os.path.exists(self.bin_root()):
                shutil.rmtree(self.bin_root())
            channel = self.stage0_rustc_channel()
            filename = "rust-std-{}-{}.tar.gz".format(channel, self.build)
            url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
            tarball = os.path.join(rustc_cache, filename)
            if not os.path.exists(tarball):
                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
            unpack(tarball, self.bin_root(),
                   match="rust-std-" + self.build,
                   verbose=self.verbose)

            filename = "rustc-{}-{}.tar.gz".format(channel, self.build)
            url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
            tarball = os.path.join(rustc_cache, filename)
            if not os.path.exists(tarball):
                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
            unpack(tarball, self.bin_root(), match="rustc", verbose=self.verbose)
            self.fix_executable(self.bin_root() + "/bin/rustc")
            self.fix_executable(self.bin_root() + "/bin/rustdoc")
            with open(self.rustc_stamp(), 'w') as f:
                f.write(self.stage0_rustc_date())

        if self.cargo().startswith(self.bin_root()) and \
                (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
            self.print_what_it_means_to_bootstrap()
            filename = "cargo-nightly-{}.tar.gz".format(self.build)
            url = "https://s3.amazonaws.com/rust-lang-ci/cargo-builds/" + self.stage0_cargo_rev()
            tarball = os.path.join(cargo_cache, filename)
            if not os.path.exists(tarball):
                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
            unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose)
            self.fix_executable(self.bin_root() + "/bin/cargo")
            with open(self.cargo_stamp(), 'w') as f:
                f.write(self.stage0_cargo_rev())

    def fix_executable(self, fname):
        # If we're on NixOS we need to change the path to the dynamic loader

        default_encoding = sys.getdefaultencoding()
        try:
            ostype = subprocess.check_output(['uname', '-s']).strip().decode(default_encoding)
        except (subprocess.CalledProcessError, WindowsError):
            return

        if ostype != "Linux":
            return

        if not os.path.exists("/etc/NIXOS"):
            return
        if os.path.exists("/lib"):
            return

        # At this point we're pretty sure the user is running NixOS
        print("info: you seem to be running NixOS. Attempting to patch " + fname)

        try:
            interpreter = subprocess.check_output(["patchelf", "--print-interpreter", fname])
            interpreter = interpreter.strip().decode(default_encoding)
        except subprocess.CalledProcessError as e:
            print("warning: failed to call patchelf: %s" % e)
            return

        loader = interpreter.split("/")[-1]

        try:
            ldd_output = subprocess.check_output(['ldd', '/run/current-system/sw/bin/sh'])
            ldd_output = ldd_output.strip().decode(default_encoding)
        except subprocess.CalledProcessError as e:
            print("warning: unable to call ldd: %s" % e)
            return

        for line in ldd_output.splitlines():
            libname = line.split()[0]
            if libname.endswith(loader):
                loader_path = libname[:len(libname) - len(loader)]
                break
        else:
            print("warning: unable to find the path to the dynamic linker")
            return

        correct_interpreter = loader_path + loader

        try:
            subprocess.check_output(["patchelf", "--set-interpreter", correct_interpreter, fname])
        except subprocess.CalledProcessError as e:
            print("warning: failed to call patchelf: %s" % e)
            return

    def stage0_cargo_rev(self):
        return self._cargo_rev

    def stage0_rustc_date(self):
        return self._rustc_date

    def stage0_rustc_channel(self):
        return self._rustc_channel

    def rustc_stamp(self):
        return os.path.join(self.bin_root(), '.rustc-stamp')

    def cargo_stamp(self):
        return os.path.join(self.bin_root(), '.cargo-stamp')

    def rustc_out_of_date(self):
        if not os.path.exists(self.rustc_stamp()) or self.clean:
            return True
        with open(self.rustc_stamp(), 'r') as f:
            return self.stage0_rustc_date() != f.read()

    def cargo_out_of_date(self):
        if not os.path.exists(self.cargo_stamp()) or self.clean:
            return True
        with open(self.cargo_stamp(), 'r') as f:
            return self.stage0_cargo_rev() != f.read()

    def bin_root(self):
        return os.path.join(self.build_dir, self.build, "stage0")

    def get_toml(self, key):
        for line in self.config_toml.splitlines():
            if line.startswith(key + ' ='):
                return self.get_string(line)
        return None

    def get_mk(self, key):
        for line in iter(self.config_mk.splitlines()):
            if line.startswith(key + ' '):
                var = line[line.find(':=') + 2:].strip()
                if var != '':
                    return var
        return None

    def cargo(self):
        config = self.get_toml('cargo')
        if config:
            return config
        config = self.get_mk('CFG_LOCAL_RUST_ROOT')
        if config:
            return config + '/bin/cargo' + self.exe_suffix()
        return os.path.join(self.bin_root(), "bin/cargo" + self.exe_suffix())

    def rustc(self):
        config = self.get_toml('rustc')
        if config:
            return config
        config = self.get_mk('CFG_LOCAL_RUST_ROOT')
        if config:
            return config + '/bin/rustc' + self.exe_suffix()
        return os.path.join(self.bin_root(), "bin/rustc" + self.exe_suffix())

    def get_string(self, line):
        start = line.find('"')
        end = start + 1 + line[start + 1:].find('"')
        return line[start + 1:end]

    def exe_suffix(self):
        if sys.platform == 'win32':
            return '.exe'
        else:
            return ''

    def print_what_it_means_to_bootstrap(self):
        if hasattr(self, 'printed'):
            return
        self.printed = True
        if os.path.exists(self.bootstrap_binary()):
            return
        if not '--help' in sys.argv or len(sys.argv) == 1:
            return

        print('info: the build system for Rust is written in Rust, so this')
        print('      script is now going to download a stage0 rust compiler')
        print('      and then compile the build system itself')
        print('')
        print('info: in the meantime you can read more about rustbuild at')
        print('      src/bootstrap/README.md before the download finishes')

    def bootstrap_binary(self):
        return os.path.join(self.build_dir, "bootstrap/debug/bootstrap")

    def build_bootstrap(self):
        self.print_what_it_means_to_bootstrap()
        build_dir = os.path.join(self.build_dir, "bootstrap")
        if self.clean and os.path.exists(build_dir):
            shutil.rmtree(build_dir)
        env = os.environ.copy()
        env["CARGO_TARGET_DIR"] = build_dir
        env["RUSTC"] = self.rustc()
        env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
                                 (os.pathsep + env["LD_LIBRARY_PATH"]) \
                                 if "LD_LIBRARY_PATH" in env else ""
        env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
                                   (os.pathsep + env["DYLD_LIBRARY_PATH"]) \
                                   if "DYLD_LIBRARY_PATH" in env else ""
        env["PATH"] = os.path.join(self.bin_root(), "bin") + \
                      os.pathsep + env["PATH"]
        if not os.path.isfile(self.cargo()):
            raise Exception("no cargo executable found at `%s`" % self.cargo())
        args = [self.cargo(), "build", "--manifest-path",
                os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
        if self.use_locked_deps:
            args.append("--locked")
        if self.use_vendored_sources:
            args.append("--frozen")
        self.run(args, env)

    def run(self, args, env):
        proc = subprocess.Popen(args, env=env)
        ret = proc.wait()
        if ret != 0:
            sys.exit(ret)

    def build_triple(self):
        default_encoding = sys.getdefaultencoding()
        config = self.get_toml('build')
        if config:
            return config
        config = self.get_mk('CFG_BUILD')
        if config:
            return config
        try:
            ostype = subprocess.check_output(['uname', '-s']).strip().decode(default_encoding)
            cputype = subprocess.check_output(['uname', '-m']).strip().decode(default_encoding)
        except (subprocess.CalledProcessError, OSError):
            if sys.platform == 'win32':
                return 'x86_64-pc-windows-msvc'
            err = "uname not found"
            if self.verbose:
                raise Exception(err)
            sys.exit(err)

        # Darwin's `uname -s` lies and always returns i386. We have to use
        # sysctl instead.
        if ostype == 'Darwin' and cputype == 'i686':
            args = ['sysctl', 'hw.optional.x86_64']
            sysctl = subprocess.check_output(args).decode(default_encoding)
            if ': 1' in sysctl:
                cputype = 'x86_64'

        # The goal here is to come up with the same triple as LLVM would,
        # at least for the subset of platforms we're willing to target.
        if ostype == 'Linux':
            ostype = 'unknown-linux-gnu'
        elif ostype == 'FreeBSD':
            ostype = 'unknown-freebsd'
        elif ostype == 'DragonFly':
            ostype = 'unknown-dragonfly'
        elif ostype == 'Bitrig':
            ostype = 'unknown-bitrig'
        elif ostype == 'OpenBSD':
            ostype = 'unknown-openbsd'
        elif ostype == 'NetBSD':
            ostype = 'unknown-netbsd'
        elif ostype == 'SunOS':
            ostype = 'sun-solaris'
            # On Solaris, uname -m will return a machine classification instead
            # of a cpu type, so uname -p is recommended instead.  However, the
            # output from that option is too generic for our purposes (it will
            # always emit 'i386' on x86/amd64 systems).  As such, isainfo -k
            # must be used instead.
            try:
                cputype = subprocess.check_output(['isainfo',
                  '-k']).strip().decode(default_encoding)
            except (subprocess.CalledProcessError, OSError):
                err = "isainfo not found"
                if self.verbose:
                    raise Exception(err)
                sys.exit(err)
        elif ostype == 'Darwin':
            ostype = 'apple-darwin'
        elif ostype == 'Haiku':
            ostype = 'unknown-haiku'
        elif ostype.startswith('MINGW'):
            # msys' `uname` does not print gcc configuration, but prints msys
            # configuration. so we cannot believe `uname -m`:
            # msys1 is always i686 and msys2 is always x86_64.
            # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
            # MINGW64 on x86_64.
            ostype = 'pc-windows-gnu'
            cputype = 'i686'
            if os.environ.get('MSYSTEM') == 'MINGW64':
                cputype = 'x86_64'
        elif ostype.startswith('MSYS'):
            ostype = 'pc-windows-gnu'
        elif ostype.startswith('CYGWIN_NT'):
            cputype = 'i686'
            if ostype.endswith('WOW64'):
                cputype = 'x86_64'
            ostype = 'pc-windows-gnu'
        else:
            err = "unknown OS type: " + ostype
            if self.verbose:
                raise ValueError(err)
            sys.exit(err)

        if cputype in {'i386', 'i486', 'i686', 'i786', 'x86'}:
            cputype = 'i686'
        elif cputype in {'xscale', 'arm'}:
            cputype = 'arm'
        elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
            cputype = 'arm'
            ostype += 'eabihf'
        elif cputype == 'armv7l':
            cputype = 'armv7'
            ostype += 'eabihf'
        elif cputype == 'aarch64':
            cputype = 'aarch64'
        elif cputype == 'arm64':
            cputype = 'aarch64'
        elif cputype == 'mips':
            if sys.byteorder == 'big':
                cputype = 'mips'
            elif sys.byteorder == 'little':
                cputype = 'mipsel'
            else:
                raise ValueError('unknown byteorder: ' + sys.byteorder)
        elif cputype == 'mips64':
            if sys.byteorder == 'big':
                cputype = 'mips64'
            elif sys.byteorder == 'little':
                cputype = 'mips64el'
            else:
                raise ValueError('unknown byteorder: ' + sys.byteorder)
            # only the n64 ABI is supported, indicate it
            ostype += 'abi64'
        elif cputype in {'powerpc', 'ppc'}:
            cputype = 'powerpc'
        elif cputype in {'powerpc64', 'ppc64'}:
            cputype = 'powerpc64'
        elif cputype in {'powerpc64le', 'ppc64le'}:
            cputype = 'powerpc64le'
        elif cputype == 'sparcv9':
            pass
        elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
            cputype = 'x86_64'
        elif cputype == 's390x':
            cputype = 's390x'
        elif cputype == 'BePC':
            cputype = 'i686'
        else:
            err = "unknown cpu type: " + cputype
            if self.verbose:
                raise ValueError(err)
            sys.exit(err)

        return "{}-{}".format(cputype, ostype)

def bootstrap():
    parser = argparse.ArgumentParser(description='Build rust')
    parser.add_argument('--config')
    parser.add_argument('--clean', action='store_true')
    parser.add_argument('-v', '--verbose', action='store_true')

    args = [a for a in sys.argv if a != '-h' and a != '--help']
    args, _ = parser.parse_known_args(args)

    # Configure initial bootstrap
    rb = RustBuild()
    rb.config_toml = ''
    rb.config_mk = ''
    rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
    rb.build_dir = os.path.join(os.getcwd(), "build")
    rb.verbose = args.verbose
    rb.clean = args.clean

    try:
        with open(args.config or 'config.toml') as config:
            rb.config_toml = config.read()
    except:
        pass
    try:
        rb.config_mk = open('config.mk').read()
    except:
        pass

    rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \
                              'CFG_ENABLE_VENDOR' in rb.config_mk

    rb.use_locked_deps = '\nlocked-deps = true' in rb.config_toml or \
                         'CFG_ENABLE_LOCKED_DEPS' in rb.config_mk

    if 'SUDO_USER' in os.environ and not rb.use_vendored_sources:
        if os.environ.get('USER') != os.environ['SUDO_USER']:
            rb.use_vendored_sources = True
            print('info: looks like you are running this command under `sudo`')
            print('      and so in order to preserve your $HOME this will now')
            print('      use vendored sources by default. Note that if this')
            print('      does not work you should run a normal build first')
            print('      before running a command like `sudo make install`')

    if rb.use_vendored_sources:
        if not os.path.exists('.cargo'):
            os.makedirs('.cargo')
        with open('.cargo/config','w') as f:
            f.write("""
                [source.crates-io]
                replace-with = 'vendored-sources'
                registry = 'https://example.com'

                [source.vendored-sources]
                directory = '{}/src/vendor'
            """.format(rb.rust_root))
    else:
        if os.path.exists('.cargo'):
            shutil.rmtree('.cargo')

    data = stage0_data(rb.rust_root)
    rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
    rb._cargo_rev = data['cargo']

    # Fetch/build the bootstrap
    rb.build = rb.build_triple()
    rb.download_stage0()
    sys.stdout.flush()
    rb.build_bootstrap()
    sys.stdout.flush()

    # Run the bootstrap
    args = [rb.bootstrap_binary()]
    args.extend(sys.argv[1:])
    env = os.environ.copy()
    env["BUILD"] = rb.build
    env["SRC"] = rb.rust_root
    env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
    rb.run(args, env)

def main():
    start_time = time()
    try:
        bootstrap()
        print("Build completed successfully in %s" % format_build_time(time() - start_time))
    except (SystemExit, KeyboardInterrupt) as e:
        if hasattr(e, 'code') and isinstance(e.code, int):
            exit_code = e.code
        else:
            exit_code = 1
            print(e)
        print("Build completed unsuccessfully in %s" % format_build_time(time() - start_time))
        sys.exit(exit_code)

if __name__ == '__main__':
    main()
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/cc.rs version [2af5c09bc2].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! C-compiler probing and detection.
//!
//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
//! C and C++ compilers for each target configured. A compiler is found through
//! a number of vectors (in order of precedence)
//!
//! 1. Configuration via `target.$target.cc` in `config.toml`.
//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
//!    applicable
//! 3. Special logic to probe on OpenBSD
//! 4. The `CC_$target` environment variable.
//! 5. The `CC` environment variable.
//! 6. "cc"
//!
//! Some of this logic is implemented here, but much of it is farmed out to the
//! `gcc` crate itself, so we end up having the same fallbacks as there.
//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
//! used.
//!
//! It is intended that after this module has run no C/C++ compiler will
//! ever be probed for. Instead the compilers found here will be used for
//! everything.

use std::process::Command;

use build_helper::{cc2ar, output};
use gcc;

use Build;
use config::Target;

pub fn find(build: &mut Build) {
    // For all targets we're going to need a C compiler for building some shims
    // and such as well as for being a linker for Rust code.
    for target in build.config.target.iter() {
        let mut cfg = gcc::Config::new();
        cfg.cargo_metadata(false).opt_level(0).debug(false)
           .target(target).host(&build.config.build);

        let config = build.config.target_config.get(target);
        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
            cfg.compiler(cc);
        } else {
            set_compiler(&mut cfg, "gcc", target, config, build);
        }

        let compiler = cfg.get_compiler();
        let ar = cc2ar(compiler.path(), target);
        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
        if let Some(ref ar) = ar {
            build.verbose(&format!("AR_{} = {:?}", target, ar));
        }
        build.cc.insert(target.to_string(), (compiler, ar));
    }

    // For all host triples we need to find a C++ compiler as well
    for host in build.config.host.iter() {
        let mut cfg = gcc::Config::new();
        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
           .target(host).host(&build.config.build);
        let config = build.config.target_config.get(host);
        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
            cfg.compiler(cxx);
        } else {
            set_compiler(&mut cfg, "g++", host, config, build);
        }
        let compiler = cfg.get_compiler();
        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
        build.cxx.insert(host.to_string(), compiler);
    }
}

fn set_compiler(cfg: &mut gcc::Config,
                gnu_compiler: &str,
                target: &str,
                config: Option<&Target>,
                build: &Build) {
    match target {
        // When compiling for android we may have the NDK configured in the
        // config.toml in which case we look there. Otherwise the default
        // compiler already takes into account the triple in question.
        t if t.contains("android") => {
            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
                let target = target.replace("armv7", "arm");
                let compiler = format!("{}-{}", target, gnu_compiler);
                cfg.compiler(ndk.join("bin").join(compiler));
            }
        }

        // The default gcc version from OpenBSD may be too old, try using egcc,
        // which is a gcc version from ports, if this is the case.
        t if t.contains("openbsd") => {
            let c = cfg.get_compiler();
            if !c.path().ends_with(gnu_compiler) {
                return
            }

            let output = output(c.to_command().arg("--version"));
            let i = match output.find(" 4.") {
                Some(i) => i,
                None => return,
            };
            match output[i + 3..].chars().next().unwrap() {
                '0' ... '6' => {}
                _ => return,
            }
            let alternative = format!("e{}", gnu_compiler);
            if Command::new(&alternative).output().is_ok() {
                cfg.compiler(alternative);
            }
        }

        "mips-unknown-linux-musl" => {
            if cfg.get_compiler().path().to_str() == Some("gcc") {
                cfg.compiler("mips-linux-musl-gcc");
            }
        }
        "mipsel-unknown-linux-musl" => {
            if cfg.get_compiler().path().to_str() == Some("gcc") {
                cfg.compiler("mipsel-linux-musl-gcc");
            }
        }

        t if t.contains("musl") => {
            if let Some(root) = build.musl_root(target) {
                let guess = root.join("bin/musl-gcc");
                if guess.exists() {
                    cfg.compiler(guess);
                }
            }
        }

        _ => {}
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/channel.rs version [9c6be0d1dd].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Build configuration for Rust's release channels.
//!
//! Implements the stable/beta/nightly channel distinctions by setting various
//! flags like the `unstable_features`, calculating variables like `release` and
//! `package_vers`, and otherwise indicating to the compiler what it should
//! print out as part of its version information.

use std::path::Path;
use std::process::Command;

use build_helper::output;

use Build;

// The version number
pub const CFG_RELEASE_NUM: &'static str = "1.17.0";

// An optional number to put after the label, e.g. '.2' -> '-beta.2'
// Be sure to make this starts with a dot to conform to semver pre-release
// versions (section 9)
pub const CFG_PRERELEASE_VERSION: &'static str = ".1";

pub struct GitInfo {
    inner: Option<Info>,
}

struct Info {
    commit_date: String,
    sha: String,
    short_sha: String,
}

impl GitInfo {
    pub fn new(dir: &Path) -> GitInfo {
        // See if this even begins to look like a git dir
        if !dir.join(".git").exists() {
            return GitInfo { inner: None }
        }

        // Make sure git commands work
        let out = Command::new("git")
                          .arg("rev-parse")
                          .current_dir(dir)
                          .output()
                          .expect("failed to spawn git");
        if !out.status.success() {
            return GitInfo { inner: None }
        }

        // Ok, let's scrape some info
        let ver_date = output(Command::new("git").current_dir(dir)
                                      .arg("log").arg("-1")
                                      .arg("--date=short")
                                      .arg("--pretty=format:%cd"));
        let ver_hash = output(Command::new("git").current_dir(dir)
                                      .arg("rev-parse").arg("HEAD"));
        let short_ver_hash = output(Command::new("git")
                                            .current_dir(dir)
                                            .arg("rev-parse")
                                            .arg("--short=9")
                                            .arg("HEAD"));
        GitInfo {
            inner: Some(Info {
                commit_date: ver_date.trim().to_string(),
                sha: ver_hash.trim().to_string(),
                short_sha: short_ver_hash.trim().to_string(),
            }),
        }
    }

    pub fn sha(&self) -> Option<&str> {
        self.inner.as_ref().map(|s| &s.sha[..])
    }

    pub fn sha_short(&self) -> Option<&str> {
        self.inner.as_ref().map(|s| &s.short_sha[..])
    }

    pub fn commit_date(&self) -> Option<&str> {
        self.inner.as_ref().map(|s| &s.commit_date[..])
    }

    pub fn version(&self, build: &Build, num: &str) -> String {
        let mut version = build.release(num);
        if let Some(ref inner) = self.inner {
            version.push_str(" (");
            version.push_str(&inner.short_sha);
            version.push_str(" ");
            version.push_str(&inner.commit_date);
            version.push_str(")");
        }
        return version
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/check.rs version [00c539931a].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of the test-related targets of the build system.
//!
//! This file implements the various regression test suites that we execute on
//! our CI.

extern crate build_helper;

use std::collections::HashSet;
use std::env;
use std::fmt;
use std::fs;
use std::path::{PathBuf, Path};
use std::process::Command;

use build_helper::output;

use {Build, Compiler, Mode};
use dist;
use util::{self, dylib_path, dylib_path_var, exe};

const ADB_TEST_DIR: &'static str = "/data/tmp";

/// The two modes of the test runner; tests or benchmarks.
#[derive(Copy, Clone)]
pub enum TestKind {
    /// Run `cargo test`
    Test,
    /// Run `cargo bench`
    Bench,
}

impl TestKind {
    // Return the cargo subcommand for this test kind
    fn subcommand(self) -> &'static str {
        match self {
            TestKind::Test => "test",
            TestKind::Bench => "bench",
        }
    }
}

impl fmt::Display for TestKind {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        f.write_str(match *self {
            TestKind::Test => "Testing",
            TestKind::Bench => "Benchmarking",
        })
    }
}

/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
///
/// This tool in `src/tools` will verify the validity of all our links in the
/// documentation to ensure we don't have a bunch of dead ones.
pub fn linkcheck(build: &Build, host: &str) {
    println!("Linkcheck ({})", host);
    let compiler = Compiler::new(0, host);

    let _time = util::timeit();
    build.run(build.tool_cmd(&compiler, "linkchecker")
                   .arg(build.out.join(host).join("doc")));
}

/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
///
/// This tool in `src/tools` will check out a few Rust projects and run `cargo
/// test` to ensure that we don't regress the test suites there.
pub fn cargotest(build: &Build, stage: u32, host: &str) {
    let ref compiler = Compiler::new(stage, host);

    // Configure PATH to find the right rustc. NB. we have to use PATH
    // and not RUSTC because the Cargo test suite has tests that will
    // fail if rustc is not spelled `rustc`.
    let path = build.sysroot(compiler).join("bin");
    let old_path = ::std::env::var("PATH").expect("");
    let sep = if cfg!(windows) { ";" } else {":" };
    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);

    // Note that this is a short, cryptic, and not scoped directory name. This
    // is currently to minimize the length of path on Windows where we otherwise
    // quickly run into path name limit constraints.
    let out_dir = build.out.join("ct");
    t!(fs::create_dir_all(&out_dir));

    let _time = util::timeit();
    let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
    build.prepare_tool_cmd(compiler, &mut cmd);
    build.run(cmd.env("PATH", newpath)
                 .arg(&build.cargo)
                 .arg(&out_dir));
}

/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
///
/// This tool in `src/tools` checks up on various bits and pieces of style and
/// otherwise just implements a few lint-like checks that are specific to the
/// compiler itself.
pub fn tidy(build: &Build, host: &str) {
    println!("tidy check ({})", host);
    let compiler = Compiler::new(0, host);
    let mut cmd = build.tool_cmd(&compiler, "tidy");
    cmd.arg(build.src.join("src"));
    if !build.config.vendor {
        cmd.arg("--no-vendor");
    }
    build.run(&mut cmd);
}

fn testdir(build: &Build, host: &str) -> PathBuf {
    build.out.join(host).join("test")
}

/// Executes the `compiletest` tool to run a suite of tests.
///
/// Compiles all tests with `compiler` for `target` with the specified
/// compiletest `mode` and `suite` arguments. For example `mode` can be
/// "run-pass" or `suite` can be something like `debuginfo`.
pub fn compiletest(build: &Build,
                   compiler: &Compiler,
                   target: &str,
                   mode: &str,
                   suite: &str) {
    println!("Check compiletest suite={} mode={} ({} -> {})",
             suite, mode, compiler.host, target);
    let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
                                          "compiletest"));
    build.prepare_tool_cmd(compiler, &mut cmd);

    // compiletest currently has... a lot of arguments, so let's just pass all
    // of them!

    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
    cmd.arg("--mode").arg(mode);
    cmd.arg("--target").arg(target);
    cmd.arg("--host").arg(compiler.host);
    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));

    if let Some(nodejs) = build.config.nodejs.as_ref() {
        cmd.arg("--nodejs").arg(nodejs);
    }

    let mut flags = vec!["-Crpath".to_string()];
    if build.config.rust_optimize_tests {
        flags.push("-O".to_string());
    }
    if build.config.rust_debuginfo_tests {
        flags.push("-g".to_string());
    }

    let mut hostflags = build.rustc_flags(&compiler.host);
    hostflags.extend(flags.clone());
    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));

    let mut targetflags = build.rustc_flags(&target);
    targetflags.extend(flags);
    targetflags.push(format!("-Lnative={}",
                             build.test_helpers_out(target).display()));
    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));

    cmd.arg("--docck-python").arg(build.python());

    if build.config.build.ends_with("apple-darwin") {
        // Force /usr/bin/python on macOS for LLDB tests because we're loading the
        // LLDB plugin's compiled module which only works with the system python
        // (namely not Homebrew-installed python)
        cmd.arg("--lldb-python").arg("/usr/bin/python");
    } else {
        cmd.arg("--lldb-python").arg(build.python());
    }

    if let Some(ref gdb) = build.config.gdb {
        cmd.arg("--gdb").arg(gdb);
    }
    if let Some(ref vers) = build.lldb_version {
        cmd.arg("--lldb-version").arg(vers);
    }
    if let Some(ref dir) = build.lldb_python_dir {
        cmd.arg("--lldb-python-dir").arg(dir);
    }
    let llvm_config = build.llvm_config(target);
    let llvm_version = output(Command::new(&llvm_config).arg("--version"));
    cmd.arg("--llvm-version").arg(llvm_version);

    cmd.args(&build.flags.cmd.test_args());

    if build.config.verbose() || build.flags.verbose() {
        cmd.arg("--verbose");
    }

    if build.config.quiet_tests {
        cmd.arg("--quiet");
    }

    // Only pass correct values for these flags for the `run-make` suite as it
    // requires that a C++ compiler was configured which isn't always the case.
    if suite == "run-make" {
        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
        cmd.arg("--cc").arg(build.cc(target))
           .arg("--cxx").arg(build.cxx(target))
           .arg("--cflags").arg(build.cflags(target).join(" "))
           .arg("--llvm-components").arg(llvm_components.trim())
           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
    } else {
        cmd.arg("--cc").arg("")
           .arg("--cxx").arg("")
           .arg("--cflags").arg("")
           .arg("--llvm-components").arg("")
           .arg("--llvm-cxxflags").arg("");
    }

    if build.qemu_rootfs(target).is_some() {
        cmd.arg("--qemu-test-client")
           .arg(build.tool(&Compiler::new(0, &build.config.build),
                           "qemu-test-client"));
    }

    // Running a C compiler on MSVC requires a few env vars to be set, to be
    // sure to set them here.
    //
    // Note that if we encounter `PATH` we make sure to append to our own `PATH`
    // rather than stomp over it.
    if target.contains("msvc") {
        for &(ref k, ref v) in build.cc[target].0.env() {
            if k != "PATH" {
                cmd.env(k, v);
            }
        }
    }
    cmd.env("RUSTC_BOOTSTRAP", "1");
    build.add_rust_test_threads(&mut cmd);

    if build.config.sanitizers {
        cmd.env("SANITIZER_SUPPORT", "1");
    }

    cmd.arg("--adb-path").arg("adb");
    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
    if target.contains("android") {
        // Assume that cc for this target comes from the android sysroot
        cmd.arg("--android-cross-path")
           .arg(build.cc(target).parent().unwrap().parent().unwrap());
    } else {
        cmd.arg("--android-cross-path").arg("");
    }

    let _time = util::timeit();
    build.run(&mut cmd);
}

/// Run `rustdoc --test` for all documentation in `src/doc`.
///
/// This will run all tests in our markdown documentation (e.g. the book)
/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
/// `compiler`.
pub fn docs(build: &Build, compiler: &Compiler) {
    // Do a breadth-first traversal of the `src/doc` directory and just run
    // tests for all files that end in `*.md`
    let mut stack = vec![build.src.join("src/doc")];
    let _time = util::timeit();

    while let Some(p) = stack.pop() {
        if p.is_dir() {
            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
            continue
        }

        if p.extension().and_then(|s| s.to_str()) != Some("md") {
            continue
        }

        // The nostarch directory in the book is for no starch, and so isn't guaranteed to build.
        // we don't care if it doesn't build, so skip it.
        use std::ffi::OsStr;
        let path: &OsStr = p.as_ref();
        if let Some(path) = path.to_str() {
            if path.contains("nostarch") {
                continue;
            }
        }

        println!("doc tests for: {}", p.display());
        markdown_test(build, compiler, &p);
    }
}

/// Run the error index generator tool to execute the tests located in the error
/// index.
///
/// The `error_index_generator` tool lives in `src/tools` and is used to
/// generate a markdown file from the error indexes of the code base which is
/// then passed to `rustdoc --test`.
pub fn error_index(build: &Build, compiler: &Compiler) {
    println!("Testing error-index stage{}", compiler.stage);

    let dir = testdir(build, compiler.host);
    t!(fs::create_dir_all(&dir));
    let output = dir.join("error-index.md");

    let _time = util::timeit();
    build.run(build.tool_cmd(&Compiler::new(0, compiler.host),
                             "error_index_generator")
                   .arg("markdown")
                   .arg(&output)
                   .env("CFG_BUILD", &build.config.build));

    markdown_test(build, compiler, &output);
}

fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
    let mut cmd = Command::new(build.rustdoc(compiler));
    build.add_rustc_lib_path(compiler, &mut cmd);
    build.add_rust_test_threads(&mut cmd);
    cmd.arg("--test");
    cmd.arg(markdown);
    cmd.env("RUSTC_BOOTSTRAP", "1");

    let mut test_args = build.flags.cmd.test_args().join(" ");
    if build.config.quiet_tests {
        test_args.push_str(" --quiet");
    }
    cmd.arg("--test-args").arg(test_args);

    build.run(&mut cmd);
}

/// Run all unit tests plus documentation tests for an entire crate DAG defined
/// by a `Cargo.toml`
///
/// This is what runs tests for crates like the standard library, compiler, etc.
/// It essentially is the driver for running `cargo test`.
///
/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
/// arguments, and those arguments are discovered from `cargo metadata`.
pub fn krate(build: &Build,
             compiler: &Compiler,
             target: &str,
             mode: Mode,
             test_kind: TestKind,
             krate: Option<&str>) {
    let (name, path, features, root) = match mode {
        Mode::Libstd => {
            ("libstd", "src/libstd", build.std_features(), "std")
        }
        Mode::Libtest => {
            ("libtest", "src/libtest", String::new(), "test")
        }
        Mode::Librustc => {
            ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
        }
        _ => panic!("can only test libraries"),
    };
    println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
             compiler.host, target);

    // If we're not doing a full bootstrap but we're testing a stage2 version of
    // libstd, then what we're actually testing is the libstd produced in
    // stage1. Reflect that here by updating the compiler that we're working
    // with automatically.
    let compiler = if build.force_use_stage1(compiler, target) {
        Compiler::new(1, compiler.host)
    } else {
        compiler.clone()
    };

    // Build up the base `cargo test` command.
    //
    // Pass in some standard flags then iterate over the graph we've discovered
    // in `cargo metadata` with the maps above and figure out what `-p`
    // arguments need to get passed.
    let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
    cargo.arg("--manifest-path")
         .arg(build.src.join(path).join("Cargo.toml"))
         .arg("--features").arg(features);

    match krate {
        Some(krate) => {
            cargo.arg("-p").arg(krate);
        }
        None => {
            let mut visited = HashSet::new();
            let mut next = vec![root];
            while let Some(name) = next.pop() {
                // Right now jemalloc is our only target-specific crate in the
                // sense that it's not present on all platforms. Custom skip it
                // here for now, but if we add more this probably wants to get
                // more generalized.
                //
                // Also skip `build_helper` as it's not compiled normally for
                // target during the bootstrap and it's just meant to be a
                // helper crate, not tested. If it leaks through then it ends up
                // messing with various mtime calculations and such.
                if !name.contains("jemalloc") && name != "build_helper" {
                    cargo.arg("-p").arg(&format!("{}:0.0.0", name));
                }
                for dep in build.crates[name].deps.iter() {
                    if visited.insert(dep) {
                        next.push(dep);
                    }
                }
            }
        }
    }

    // The tests are going to run with the *target* libraries, so we need to
    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
    //
    // Note that to run the compiler we need to run with the *host* libraries,
    // but our wrapper scripts arrange for that to be the case anyway.
    let mut dylib_path = dylib_path();
    dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());

    if target.contains("android") ||
       target.contains("emscripten") ||
       build.qemu_rootfs(target).is_some() {
        cargo.arg("--no-run");
    }

    cargo.arg("--");

    if build.config.quiet_tests {
        cargo.arg("--quiet");
    }

    let _time = util::timeit();

    if target.contains("android") {
        build.run(&mut cargo);
        krate_android(build, &compiler, target, mode);
    } else if target.contains("emscripten") {
        build.run(&mut cargo);
        krate_emscripten(build, &compiler, target, mode);
    } else if build.qemu_rootfs(target).is_some() {
        build.run(&mut cargo);
        krate_qemu(build, &compiler, target, mode);
    } else {
        cargo.args(&build.flags.cmd.test_args());
        build.run(&mut cargo);
    }
}

fn krate_android(build: &Build,
                 compiler: &Compiler,
                 target: &str,
                 mode: Mode) {
    let mut tests = Vec::new();
    let out_dir = build.cargo_out(compiler, mode, target);
    find_tests(&out_dir, target, &mut tests);
    find_tests(&out_dir.join("deps"), target, &mut tests);

    for test in tests {
        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));

        let test_file_name = test.file_name().unwrap().to_string_lossy();
        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
                          ADB_TEST_DIR,
                          compiler.stage,
                          target,
                          compiler.host,
                          test_file_name);
        let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
        let program = format!("(cd {dir}; \
                                LD_LIBRARY_PATH=./{target} ./{test} \
                                    --logfile {log} \
                                    {quiet} \
                                    {args})",
                              dir = ADB_TEST_DIR,
                              target = target,
                              test = test_file_name,
                              log = log,
                              quiet = quiet,
                              args = build.flags.cmd.test_args().join(" "));

        let output = output(Command::new("adb").arg("shell").arg(&program));
        println!("{}", output);

        t!(fs::create_dir_all(build.out.join("tmp")));
        build.run(Command::new("adb")
                          .arg("pull")
                          .arg(&log)
                          .arg(build.out.join("tmp")));
        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
        if !output.contains("result: ok") {
            panic!("some tests failed");
        }
    }
}

fn krate_emscripten(build: &Build,
                    compiler: &Compiler,
                    target: &str,
                    mode: Mode) {
    let mut tests = Vec::new();
    let out_dir = build.cargo_out(compiler, mode, target);
    find_tests(&out_dir, target, &mut tests);
    find_tests(&out_dir.join("deps"), target, &mut tests);

    for test in tests {
        let test_file_name = test.to_string_lossy().into_owned();
        println!("running {}", test_file_name);
        let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
        let mut cmd = Command::new(nodejs);
        cmd.arg(&test_file_name);
        if build.config.quiet_tests {
            cmd.arg("--quiet");
        }
        build.run(&mut cmd);
    }
}

fn krate_qemu(build: &Build,
              compiler: &Compiler,
              target: &str,
              mode: Mode) {
    let mut tests = Vec::new();
    let out_dir = build.cargo_out(compiler, mode, target);
    find_tests(&out_dir, target, &mut tests);
    find_tests(&out_dir.join("deps"), target, &mut tests);

    let tool = build.tool(&Compiler::new(0, &build.config.build),
                          "qemu-test-client");
    for test in tests {
        let mut cmd = Command::new(&tool);
        cmd.arg("run")
           .arg(&test);
        if build.config.quiet_tests {
            cmd.arg("--quiet");
        }
        cmd.args(&build.flags.cmd.test_args());
        build.run(&mut cmd);
    }
}


fn find_tests(dir: &Path,
              target: &str,
              dst: &mut Vec<PathBuf>) {
    for e in t!(dir.read_dir()).map(|e| t!(e)) {
        let file_type = t!(e.file_type());
        if !file_type.is_file() {
            continue
        }
        let filename = e.file_name().into_string().unwrap();
        if (target.contains("windows") && filename.ends_with(".exe")) ||
           (!target.contains("windows") && !filename.contains(".")) ||
           (target.contains("emscripten") && filename.ends_with(".js")) {
            dst.push(e.path());
        }
    }
}

pub fn emulator_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
    if target.contains("android") {
        android_copy_libs(build, compiler, target)
    } else if let Some(s) = build.qemu_rootfs(target) {
        qemu_copy_libs(build, compiler, target, s)
    }
}

fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
    println!("Android copy libs to emulator ({})", target);
    build.run(Command::new("adb").arg("wait-for-device"));
    build.run(Command::new("adb").arg("remount"));
    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
    build.run(Command::new("adb")
                      .arg("push")
                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
                      .arg(ADB_TEST_DIR));

    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));

    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
        let f = t!(f);
        let name = f.file_name().into_string().unwrap();
        if util::is_dylib(&name) {
            build.run(Command::new("adb")
                              .arg("push")
                              .arg(f.path())
                              .arg(&target_dir));
        }
    }
}

fn qemu_copy_libs(build: &Build,
                  compiler: &Compiler,
                  target: &str,
                  rootfs: &Path) {
    println!("QEMU copy libs to emulator ({})", target);
    assert!(target.starts_with("arm"), "only works with arm for now");
    t!(fs::create_dir_all(build.out.join("tmp")));

    // Copy our freshly compiled test server over to the rootfs
    let server = build.cargo_out(compiler, Mode::Tool, target)
                      .join(exe("qemu-test-server", target));
    t!(fs::copy(&server, rootfs.join("testd")));

    // Spawn the emulator and wait for it to come online
    let tool = build.tool(&Compiler::new(0, &build.config.build),
                          "qemu-test-client");
    build.run(Command::new(&tool)
                      .arg("spawn-emulator")
                      .arg(rootfs)
                      .arg(build.out.join("tmp")));

    // Push all our dylibs to the emulator
    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
        let f = t!(f);
        let name = f.file_name().into_string().unwrap();
        if util::is_dylib(&name) {
            build.run(Command::new(&tool)
                              .arg("push")
                              .arg(f.path()));
        }
    }
}

/// Run "distcheck", a 'make check' from a tarball
pub fn distcheck(build: &Build) {
    if build.config.build != "x86_64-unknown-linux-gnu" {
        return
    }
    if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
        return
    }
    if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
        return
    }

    let dir = build.out.join("tmp").join("distcheck");
    let _ = fs::remove_dir_all(&dir);
    t!(fs::create_dir_all(&dir));

    let mut cmd = Command::new("tar");
    cmd.arg("-xzf")
       .arg(dist::rust_src_location(build))
       .arg("--strip-components=1")
       .current_dir(&dir);
    build.run(&mut cmd);
    build.run(Command::new("./configure")
                     .args(&build.config.configure_args)
                     .arg("--enable-vendor")
                     .current_dir(&dir));
    build.run(Command::new(build_helper::make(&build.config.build))
                     .arg("check")
                     .current_dir(&dir));
}

/// Test the build system itself
pub fn bootstrap(build: &Build) {
    let mut cmd = Command::new(&build.cargo);
    cmd.arg("test")
       .current_dir(build.src.join("src/bootstrap"))
       .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
       .env("RUSTC", &build.rustc);
    cmd.arg("--").args(&build.flags.cmd.test_args());
    build.run(&mut cmd);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/clean.rs version [ed821a18ff].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of `make clean` in rustbuild.
//!
//! Responsible for cleaning out a build directory of all old and stale
//! artifacts to prepare for a fresh build. Currently doesn't remove the
//! `build/cache` directory (download cache) or the `build/$target/llvm`
//! directory as we want that cached between builds.

use std::fs;
use std::io::{self, ErrorKind};
use std::path::Path;

use Build;

pub fn clean(build: &Build) {
    rm_rf("tmp".as_ref());
    rm_rf(&build.out.join("tmp"));
    rm_rf(&build.out.join("dist"));

    for host in build.config.host.iter() {
        let entries = match build.out.join(host).read_dir() {
            Ok(iter) => iter,
            Err(_) => continue,
        };

        for entry in entries {
            let entry = t!(entry);
            if entry.file_name().to_str() == Some("llvm") {
                continue
            }
            let path = t!(entry.path().canonicalize());
            rm_rf(&path);
        }
    }
}

fn rm_rf(path: &Path) {
    if !path.exists() {
        return
    }
    if path.is_file() {
        return do_op(path, "remove file", |p| fs::remove_file(p));
    }

    for file in t!(fs::read_dir(path)) {
        let file = t!(file).path();

        if file.is_dir() {
            rm_rf(&file);
        } else {
            // On windows we can't remove a readonly file, and git will
            // often clone files as readonly. As a result, we have some
            // special logic to remove readonly files on windows.
            do_op(&file, "remove file", |p| fs::remove_file(p));
        }
    }
    do_op(path, "remove dir", |p| fs::remove_dir(p));
}

fn do_op<F>(path: &Path, desc: &str, mut f: F)
    where F: FnMut(&Path) -> io::Result<()>
{
    match f(path) {
        Ok(()) => {}
        Err(ref e) if cfg!(windows) &&
                      e.kind() == ErrorKind::PermissionDenied => {
            let mut p = t!(path.metadata()).permissions();
            p.set_readonly(false);
            t!(fs::set_permissions(path, p));
            f(path).unwrap_or_else(|e| {
                panic!("failed to {} {}: {}", desc, path.display(), e);
            })
        }
        Err(e) => {
            panic!("failed to {} {}: {}", desc, path.display(), e);
        }
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<














































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/compile.rs version [9f852c43c3].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of compiling various phases of the compiler and standard
//! library.
//!
//! This module contains some of the real meat in the rustbuild build system
//! which is where Cargo is used to compiler the standard library, libtest, and
//! compiler. This module is also responsible for assembling the sysroot as it
//! goes along from the output of the previous stage.

use std::collections::HashMap;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::env;

use build_helper::{output, mtime, up_to_date};
use filetime::FileTime;

use channel::GitInfo;
use util::{exe, libdir, is_dylib, copy};
use {Build, Compiler, Mode};

/// Build the standard library.
///
/// This will build the standard library for a particular stage of the build
/// using the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
pub fn std(build: &Build, target: &str, compiler: &Compiler) {
    let libdir = build.sysroot_libdir(compiler, target);
    t!(fs::create_dir_all(&libdir));

    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
             compiler.host, target);

    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
    let mut features = build.std_features();

    if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
    }

    // When doing a local rebuild we tell cargo that we're stage1 rather than
    // stage0. This works fine if the local rust and being-built rust have the
    // same view of what the default allocator is, but fails otherwise. Since
    // we don't have a way to express an allocator preference yet, work
    // around the issue in the case of a local rebuild with jemalloc disabled.
    if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
        features.push_str(" force_alloc_system");
    }

    if compiler.stage != 0 && build.config.sanitizers {
        // This variable is used by the sanitizer runtime crates, e.g.
        // rustc_lsan, to build the sanitizer runtime from C code
        // When this variable is missing, those crates won't compile the C code,
        // so we don't set this variable during stage0 where llvm-config is
        // missing
        // We also only build the runtimes when --enable-sanitizers (or its
        // config.toml equivalent) is used
        cargo.env("LLVM_CONFIG", build.llvm_config(target));
    }
    cargo.arg("--features").arg(features)
         .arg("--manifest-path")
         .arg(build.src.join("src/libstd/Cargo.toml"));

    if let Some(target) = build.config.target_config.get(target) {
        if let Some(ref jemalloc) = target.jemalloc {
            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
        }
    }
    if target.contains("musl") {
        if let Some(p) = build.musl_root(target) {
            cargo.env("MUSL_ROOT", p);
        }
    }

    build.run(&mut cargo);
    update_mtime(build, &libstd_stamp(build, &compiler, target));
}

/// Link all libstd rlibs/dylibs into the sysroot location.
///
/// Links those artifacts generated by `compiler` to a the `stage` compiler's
/// sysroot for the specified `host` and `target`.
///
/// Note that this assumes that `compiler` has already generated the libstd
/// libraries for `target`, and this method will find them in the relevant
/// output directory.
pub fn std_link(build: &Build,
                compiler: &Compiler,
                target_compiler: &Compiler,
                target: &str) {
    println!("Copying stage{} std from stage{} ({} -> {} / {})",
             target_compiler.stage,
             compiler.stage,
             compiler.host,
             target_compiler.host,
             target);
    let libdir = build.sysroot_libdir(&target_compiler, target);
    let out_dir = build.cargo_out(&compiler, Mode::Libstd, target);

    t!(fs::create_dir_all(&libdir));
    add_to_sysroot(&out_dir, &libdir);

    if target.contains("musl") && !target.contains("mips") {
        copy_musl_third_party_objects(build, target, &libdir);
    }
}

/// Copies the crt(1,i,n).o startup objects
///
/// Only required for musl targets that statically link to libc
fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) {
    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
        copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
    }
}

/// Build and prepare startup objects like rsbegin.o and rsend.o
///
/// These are primarily used on Windows right now for linking executables/dlls.
/// They don't require any library support as they're just plain old object
/// files, so we just use the nightly snapshot compiler to always build them (as
/// no other compilers are guaranteed to be available).
pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) {
    if !target.contains("pc-windows-gnu") {
        return
    }

    let compiler = Compiler::new(0, &build.config.build);
    let compiler_path = build.compiler_path(&compiler);
    let src_dir = &build.src.join("src/rtstartup");
    let dst_dir = &build.native_dir(target).join("rtstartup");
    let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
    t!(fs::create_dir_all(dst_dir));
    t!(fs::create_dir_all(sysroot_dir));

    for file in &["rsbegin", "rsend"] {
        let src_file = &src_dir.join(file.to_string() + ".rs");
        let dst_file = &dst_dir.join(file.to_string() + ".o");
        if !up_to_date(src_file, dst_file) {
            let mut cmd = Command::new(&compiler_path);
            build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
                        .arg("--target").arg(target)
                        .arg("--emit=obj")
                        .arg("--out-dir").arg(dst_dir)
                        .arg(src_file));
        }

        copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
    }

    for obj in ["crt2.o", "dllcrt2.o"].iter() {
        copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
    }
}

/// Build libtest.
///
/// This will build libtest and supporting libraries for a particular stage of
/// the build using the `compiler` targeting the `target` architecture. The
/// artifacts created will also be linked into the sysroot directory.
pub fn test(build: &Build, target: &str, compiler: &Compiler) {
    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
             compiler.host, target);
    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
    build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
    if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
    }
    cargo.arg("--manifest-path")
         .arg(build.src.join("src/libtest/Cargo.toml"));
    build.run(&mut cargo);
    update_mtime(build, &libtest_stamp(build, compiler, target));
}

/// Same as `std_link`, only for libtest
pub fn test_link(build: &Build,
                 compiler: &Compiler,
                 target_compiler: &Compiler,
                 target: &str) {
    println!("Copying stage{} test from stage{} ({} -> {} / {})",
             target_compiler.stage,
             compiler.stage,
             compiler.host,
             target_compiler.host,
             target);
    let libdir = build.sysroot_libdir(&target_compiler, target);
    let out_dir = build.cargo_out(&compiler, Mode::Libtest, target);
    add_to_sysroot(&out_dir, &libdir);
}

/// Build the compiler.
///
/// This will build the compiler for a particular stage of the build using
/// the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
    println!("Building stage{} compiler artifacts ({} -> {})",
             compiler.stage, compiler.host, target);

    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
    build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));

    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
    cargo.arg("--features").arg(build.rustc_features())
         .arg("--manifest-path")
         .arg(build.src.join("src/rustc/Cargo.toml"));

    // Set some configuration variables picked up by build scripts and
    // the compiler alike
    cargo.env("CFG_RELEASE", build.rust_release())
         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
         .env("CFG_VERSION", build.rust_version())
         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(PathBuf::new()));

    if compiler.stage == 0 {
        cargo.env("CFG_LIBDIR_RELATIVE", "lib");
    } else {
        let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
        cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
    }

    // If we're not building a compiler with debugging information then remove
    // these two env vars which would be set otherwise.
    if build.config.rust_debuginfo_only_std {
        cargo.env_remove("RUSTC_DEBUGINFO");
        cargo.env_remove("RUSTC_DEBUGINFO_LINES");
    }

    if let Some(ref ver_date) = build.rust_info.commit_date() {
        cargo.env("CFG_VER_DATE", ver_date);
    }
    if let Some(ref ver_hash) = build.rust_info.sha() {
        cargo.env("CFG_VER_HASH", ver_hash);
    }
    if !build.unstable_features() {
        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
    }
    // Flag that rust llvm is in use
    if build.is_rust_llvm(target) {
        cargo.env("LLVM_RUSTLLVM", "1");
    }
    cargo.env("LLVM_CONFIG", build.llvm_config(target));
    let target_config = build.config.target_config.get(target);
    if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
        cargo.env("CFG_LLVM_ROOT", s);
    }
    // Building with a static libstdc++ is only supported on linux right now,
    // not for MSVC or macOS
    if build.config.llvm_static_stdcpp &&
       !target.contains("windows") &&
       !target.contains("apple") {
        cargo.env("LLVM_STATIC_STDCPP",
                  compiler_file(build.cxx(target), "libstdc++.a"));
    }
    if build.config.llvm_link_shared {
        cargo.env("LLVM_LINK_SHARED", "1");
    }
    if let Some(ref s) = build.config.rustc_default_linker {
        cargo.env("CFG_DEFAULT_LINKER", s);
    }
    if let Some(ref s) = build.config.rustc_default_ar {
        cargo.env("CFG_DEFAULT_AR", s);
    }
    build.run(&mut cargo);
}

/// Same as `std_link`, only for librustc
pub fn rustc_link(build: &Build,
                  compiler: &Compiler,
                  target_compiler: &Compiler,
                  target: &str) {
    println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
             target_compiler.stage,
             compiler.stage,
             compiler.host,
             target_compiler.host,
             target);
    let libdir = build.sysroot_libdir(&target_compiler, target);
    let out_dir = build.cargo_out(&compiler, Mode::Librustc, target);
    add_to_sysroot(&out_dir, &libdir);
}

/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
fn libstd_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
    build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
}

/// Cargo's output path for libtest in a given stage, compiled by a particular
/// compiler for the specified target.
fn libtest_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
    build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
}

fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
    let out = output(Command::new(compiler)
                            .arg(format!("-print-file-name={}", file)));
    PathBuf::from(out.trim())
}

pub fn create_sysroot(build: &Build, compiler: &Compiler) {
    let sysroot = build.sysroot(compiler);
    let _ = fs::remove_dir_all(&sysroot);
    t!(fs::create_dir_all(&sysroot));
}

/// Prepare a new compiler from the artifacts in `stage`
///
/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
/// must have been previously produced by the `stage - 1` build.config.build
/// compiler.
pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
    // nothing to do in stage0
    if stage == 0 {
        return
    }

    println!("Copying stage{} compiler ({})", stage, host);

    // The compiler that we're assembling
    let target_compiler = Compiler::new(stage, host);

    // The compiler that compiled the compiler we're assembling
    let build_compiler = Compiler::new(stage - 1, &build.config.build);

    // Link in all dylibs to the libdir
    let sysroot = build.sysroot(&target_compiler);
    let sysroot_libdir = sysroot.join(libdir(host));
    t!(fs::create_dir_all(&sysroot_libdir));
    let src_libdir = build.sysroot_libdir(&build_compiler, host);
    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
        let filename = f.file_name().into_string().unwrap();
        if is_dylib(&filename) {
            copy(&f.path(), &sysroot_libdir.join(&filename));
        }
    }

    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);

    // Link the compiler binary itself into place
    let rustc = out_dir.join(exe("rustc", host));
    let bindir = sysroot.join("bin");
    t!(fs::create_dir_all(&bindir));
    let compiler = build.compiler_path(&Compiler::new(stage, host));
    let _ = fs::remove_file(&compiler);
    copy(&rustc, &compiler);

    // See if rustdoc exists to link it into place
    let rustdoc = exe("rustdoc", host);
    let rustdoc_src = out_dir.join(&rustdoc);
    let rustdoc_dst = bindir.join(&rustdoc);
    if fs::metadata(&rustdoc_src).is_ok() {
        let _ = fs::remove_file(&rustdoc_dst);
        copy(&rustdoc_src, &rustdoc_dst);
    }
}

/// Link some files into a rustc sysroot.
///
/// For a particular stage this will link all of the contents of `out_dir`
/// into the sysroot of the `host` compiler, assuming the artifacts are
/// compiled for the specified `target`.
fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
    // Collect the set of all files in the dependencies directory, keyed
    // off the name of the library. We assume everything is of the form
    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
    // `<hash>` values for the same name (of old builds).
    let mut map = HashMap::new();
    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
        let filename = file.file_name().into_string().unwrap();

        // We're only interested in linking rlibs + dylibs, other things like
        // unit tests don't get linked in
        if !filename.ends_with(".rlib") &&
           !filename.ends_with(".lib") &&
           !is_dylib(&filename) {
            continue
        }
        let file = file.path();
        let dash = filename.find("-").unwrap();
        let key = (filename[..dash].to_string(),
                   file.extension().unwrap().to_owned());
        map.entry(key).or_insert(Vec::new())
           .push(file.clone());
    }

    // For all hash values found, pick the most recent one to move into the
    // sysroot, that should be the one we just built.
    for (_, paths) in map {
        let (_, path) = paths.iter().map(|path| {
            (mtime(&path).seconds(), path)
        }).max().unwrap();
        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
    }
}

/// Build a tool in `src/tools`
///
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
    println!("Building stage{} tool {} ({})", stage, tool, target);

    let compiler = Compiler::new(stage, &build.config.build);

    // FIXME: need to clear out previous tool and ideally deps, may require
    //        isolating output directories or require a pseudo shim step to
    //        clear out all the info.
    //
    //        Maybe when libstd is compiled it should clear out the rustc of the
    //        corresponding stage?
    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
    // build.clear_if_dirty(&out_dir, &libstd_stamp(build, stage, &host, target));

    let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
    let mut dir = build.src.join(tool);
    if !dir.exists() {
        dir = build.src.join("src/tools").join(tool);
    }
    cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));

    // We don't want to build tools dynamically as they'll be running across
    // stages and such and it's just easier if they're not dynamically linked.
    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");

    if let Some(dir) = build.openssl_install_dir(target) {
        cargo.env("OPENSSL_STATIC", "1");
        cargo.env("OPENSSL_DIR", dir);
        cargo.env("LIBZ_SYS_STATIC", "1");
    }

    cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);

    let info = GitInfo::new(&dir);
    if let Some(sha) = info.sha() {
        cargo.env("CFG_COMMIT_HASH", sha);
    }
    if let Some(sha_short) = info.sha_short() {
        cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
    }
    if let Some(date) = info.commit_date() {
        cargo.env("CFG_COMMIT_DATE", date);
    }

    build.run(&mut cargo);
}

/// Updates the mtime of a stamp file if necessary, only changing it if it's
/// older than some other library file in the same directory.
///
/// We don't know what file Cargo is going to output (because there's a hash in
/// the file name) but we know where it's going to put it. We use this helper to
/// detect changes to that output file by looking at the modification time for
/// all files in a directory and updating the stamp if any are newer.
///
/// Note that we only consider Rust libraries as that's what we're interested in
/// propagating changes from. Files like executables are tracked elsewhere.
fn update_mtime(build: &Build, path: &Path) {
    let entries = match path.parent().unwrap().join("deps").read_dir() {
        Ok(entries) => entries,
        Err(_) => return,
    };
    let files = entries.map(|e| t!(e)).filter(|e| t!(e.file_type()).is_file());
    let files = files.filter(|e| {
        let filename = e.file_name();
        let filename = filename.to_str().unwrap();
        filename.ends_with(".rlib") ||
            filename.ends_with(".lib") ||
            is_dylib(&filename)
    });
    let max = files.max_by_key(|entry| {
        let meta = t!(entry.metadata());
        FileTime::from_last_modification_time(&meta)
    });
    let max = match max {
        Some(max) => max,
        None => return,
    };

    if mtime(&max.path()) > mtime(path) {
        build.verbose(&format!("updating {:?} as {:?} changed", path, max.path()));
        t!(File::create(path));
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/config.rs version [0397316892].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Serialized configuration of a build.
//!
//! This module implements parsing `config.mk` and `config.toml` configuration
//! files to tweak how the build runs.

use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf;
use std::process;

use num_cpus;
use rustc_serialize::Decodable;
use toml::{Parser, Decoder, Value};
use util::push_exe_path;

/// Global configuration for the entire build and/or bootstrap.
///
/// This structure is derived from a combination of both `config.toml` and
/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
/// is used all that much, so this is primarily filled out by `config.mk` which
/// is generated from `./configure`.
///
/// Note that this structure is not decoded directly into, but rather it is
/// filled out from the decoded forms of the structs below. For documentation
/// each field, see the corresponding fields in
/// `src/bootstrap/config.toml.example`.
#[derive(Default)]
pub struct Config {
    pub ccache: Option<String>,
    pub ninja: bool,
    pub verbose: usize,
    pub submodules: bool,
    pub compiler_docs: bool,
    pub docs: bool,
    pub locked_deps: bool,
    pub vendor: bool,
    pub target_config: HashMap<String, Target>,
    pub full_bootstrap: bool,
    pub extended: bool,
    pub sanitizers: bool,

    // llvm codegen options
    pub llvm_assertions: bool,
    pub llvm_optimize: bool,
    pub llvm_release_debuginfo: bool,
    pub llvm_version_check: bool,
    pub llvm_static_stdcpp: bool,
    pub llvm_link_shared: bool,
    pub llvm_targets: Option<String>,
    pub llvm_link_jobs: Option<u32>,
    pub llvm_clean_rebuild: bool,

    // rust codegen options
    pub rust_optimize: bool,
    pub rust_codegen_units: u32,
    pub rust_debug_assertions: bool,
    pub rust_debuginfo: bool,
    pub rust_debuginfo_lines: bool,
    pub rust_debuginfo_only_std: bool,
    pub rust_rpath: bool,
    pub rustc_default_linker: Option<String>,
    pub rustc_default_ar: Option<String>,
    pub rust_optimize_tests: bool,
    pub rust_debuginfo_tests: bool,
    pub rust_save_analysis: bool,
    pub rust_dist_src: bool,

    pub build: String,
    pub host: Vec<String>,
    pub target: Vec<String>,
    pub rustc: Option<PathBuf>,
    pub cargo: Option<PathBuf>,
    pub local_rebuild: bool,

    // dist misc
    pub dist_sign_folder: Option<PathBuf>,
    pub dist_upload_addr: Option<String>,
    pub dist_gpg_password_file: Option<PathBuf>,

    // libstd features
    pub debug_jemalloc: bool,
    pub use_jemalloc: bool,
    pub backtrace: bool, // support for RUST_BACKTRACE

    // misc
    pub channel: String,
    pub quiet_tests: bool,
    // Fallback musl-root for all targets
    pub musl_root: Option<PathBuf>,
    pub prefix: Option<PathBuf>,
    pub docdir: Option<PathBuf>,
    pub libdir: Option<PathBuf>,
    pub libdir_relative: Option<PathBuf>,
    pub mandir: Option<PathBuf>,
    pub codegen_tests: bool,
    pub nodejs: Option<PathBuf>,
    pub gdb: Option<PathBuf>,
    pub python: Option<PathBuf>,
    pub configure_args: Vec<String>,
    pub openssl_static: bool,
}

/// Per-target configuration stored in the global configuration structure.
#[derive(Default)]
pub struct Target {
    pub llvm_config: Option<PathBuf>,
    pub jemalloc: Option<PathBuf>,
    pub cc: Option<PathBuf>,
    pub cxx: Option<PathBuf>,
    pub ndk: Option<PathBuf>,
    pub musl_root: Option<PathBuf>,
    pub qemu_rootfs: Option<PathBuf>,
}

/// Structure of the `config.toml` file that configuration is read from.
///
/// This structure uses `Decodable` to automatically decode a TOML configuration
/// file into this format, and then this is traversed and written into the above
/// `Config` structure.
#[derive(RustcDecodable, Default)]
struct TomlConfig {
    build: Option<Build>,
    install: Option<Install>,
    llvm: Option<Llvm>,
    rust: Option<Rust>,
    target: Option<HashMap<String, TomlTarget>>,
    dist: Option<Dist>,
}

/// TOML representation of various global build decisions.
#[derive(RustcDecodable, Default, Clone)]
struct Build {
    build: Option<String>,
    host: Vec<String>,
    target: Vec<String>,
    cargo: Option<String>,
    rustc: Option<String>,
    compiler_docs: Option<bool>,
    docs: Option<bool>,
    submodules: Option<bool>,
    gdb: Option<String>,
    locked_deps: Option<bool>,
    vendor: Option<bool>,
    nodejs: Option<String>,
    python: Option<String>,
    full_bootstrap: Option<bool>,
    extended: Option<bool>,
    verbose: Option<usize>,
    sanitizers: Option<bool>,
    openssl_static: Option<bool>,
}

/// TOML representation of various global install decisions.
#[derive(RustcDecodable, Default, Clone)]
struct Install {
    prefix: Option<String>,
    mandir: Option<String>,
    docdir: Option<String>,
    libdir: Option<String>,
}

/// TOML representation of how the LLVM build is configured.
#[derive(RustcDecodable, Default)]
struct Llvm {
    ccache: Option<StringOrBool>,
    ninja: Option<bool>,
    assertions: Option<bool>,
    optimize: Option<bool>,
    release_debuginfo: Option<bool>,
    version_check: Option<bool>,
    static_libstdcpp: Option<bool>,
    targets: Option<String>,
    link_jobs: Option<u32>,
    clean_rebuild: Option<bool>,
}

#[derive(RustcDecodable, Default, Clone)]
struct Dist {
    sign_folder: Option<String>,
    gpg_password_file: Option<String>,
    upload_addr: Option<String>,
    src_tarball: Option<bool>,
}

#[derive(RustcDecodable)]
enum StringOrBool {
    String(String),
    Bool(bool),
}

impl Default for StringOrBool {
    fn default() -> StringOrBool {
        StringOrBool::Bool(false)
    }
}

/// TOML representation of how the Rust build is configured.
#[derive(RustcDecodable, Default)]
struct Rust {
    optimize: Option<bool>,
    codegen_units: Option<u32>,
    debug_assertions: Option<bool>,
    debuginfo: Option<bool>,
    debuginfo_lines: Option<bool>,
    debuginfo_only_std: Option<bool>,
    debug_jemalloc: Option<bool>,
    use_jemalloc: Option<bool>,
    backtrace: Option<bool>,
    default_linker: Option<String>,
    default_ar: Option<String>,
    channel: Option<String>,
    musl_root: Option<String>,
    rpath: Option<bool>,
    optimize_tests: Option<bool>,
    debuginfo_tests: Option<bool>,
    codegen_tests: Option<bool>,
    save_analysis: Option<bool>,
}

/// TOML representation of how each build target is configured.
#[derive(RustcDecodable, Default)]
struct TomlTarget {
    llvm_config: Option<String>,
    jemalloc: Option<String>,
    cc: Option<String>,
    cxx: Option<String>,
    android_ndk: Option<String>,
    musl_root: Option<String>,
    qemu_rootfs: Option<String>,
}

impl Config {
    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
        let mut config = Config::default();
        config.llvm_optimize = true;
        config.use_jemalloc = true;
        config.backtrace = true;
        config.rust_optimize = true;
        config.rust_optimize_tests = true;
        config.submodules = true;
        config.docs = true;
        config.rust_rpath = true;
        config.rust_codegen_units = 1;
        config.build = build.to_string();
        config.channel = "dev".to_string();
        config.codegen_tests = true;
        config.rust_dist_src = true;

        let toml = file.map(|file| {
            let mut f = t!(File::open(&file));
            let mut toml = String::new();
            t!(f.read_to_string(&mut toml));
            let mut p = Parser::new(&toml);
            let table = match p.parse() {
                Some(table) => table,
                None => {
                    println!("failed to parse TOML configuration:");
                    for err in p.errors.iter() {
                        let (loline, locol) = p.to_linecol(err.lo);
                        let (hiline, hicol) = p.to_linecol(err.hi);
                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
                                 hicol, err.desc);
                    }
                    process::exit(2);
                }
            };
            let mut d = Decoder::new(Value::Table(table));
            match Decodable::decode(&mut d) {
                Ok(cfg) => cfg,
                Err(e) => {
                    println!("failed to decode TOML: {}", e);
                    process::exit(2);
                }
            }
        }).unwrap_or_else(|| TomlConfig::default());

        let build = toml.build.clone().unwrap_or(Build::default());
        set(&mut config.build, build.build.clone());
        config.host.push(config.build.clone());
        for host in build.host.iter() {
            if !config.host.contains(host) {
                config.host.push(host.clone());
            }
        }
        for target in config.host.iter().chain(&build.target) {
            if !config.target.contains(target) {
                config.target.push(target.clone());
            }
        }
        config.rustc = build.rustc.map(PathBuf::from);
        config.cargo = build.cargo.map(PathBuf::from);
        config.nodejs = build.nodejs.map(PathBuf::from);
        config.gdb = build.gdb.map(PathBuf::from);
        config.python = build.python.map(PathBuf::from);
        set(&mut config.compiler_docs, build.compiler_docs);
        set(&mut config.docs, build.docs);
        set(&mut config.submodules, build.submodules);
        set(&mut config.locked_deps, build.locked_deps);
        set(&mut config.vendor, build.vendor);
        set(&mut config.full_bootstrap, build.full_bootstrap);
        set(&mut config.extended, build.extended);
        set(&mut config.verbose, build.verbose);
        set(&mut config.sanitizers, build.sanitizers);
        set(&mut config.openssl_static, build.openssl_static);

        if let Some(ref install) = toml.install {
            config.prefix = install.prefix.clone().map(PathBuf::from);
            config.mandir = install.mandir.clone().map(PathBuf::from);
            config.docdir = install.docdir.clone().map(PathBuf::from);
            config.libdir = install.libdir.clone().map(PathBuf::from);
        }

        if let Some(ref llvm) = toml.llvm {
            match llvm.ccache {
                Some(StringOrBool::String(ref s)) => {
                    config.ccache = Some(s.to_string())
                }
                Some(StringOrBool::Bool(true)) => {
                    config.ccache = Some("ccache".to_string());
                }
                Some(StringOrBool::Bool(false)) | None => {}
            }
            set(&mut config.ninja, llvm.ninja);
            set(&mut config.llvm_assertions, llvm.assertions);
            set(&mut config.llvm_optimize, llvm.optimize);
            set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo);
            set(&mut config.llvm_version_check, llvm.version_check);
            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
            set(&mut config.llvm_clean_rebuild, llvm.clean_rebuild);
            config.llvm_targets = llvm.targets.clone();
            config.llvm_link_jobs = llvm.link_jobs;
        }

        if let Some(ref rust) = toml.rust {
            set(&mut config.rust_debug_assertions, rust.debug_assertions);
            set(&mut config.rust_debuginfo, rust.debuginfo);
            set(&mut config.rust_debuginfo_lines, rust.debuginfo_lines);
            set(&mut config.rust_debuginfo_only_std, rust.debuginfo_only_std);
            set(&mut config.rust_optimize, rust.optimize);
            set(&mut config.rust_optimize_tests, rust.optimize_tests);
            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
            set(&mut config.codegen_tests, rust.codegen_tests);
            set(&mut config.rust_save_analysis, rust.save_analysis);
            set(&mut config.rust_rpath, rust.rpath);
            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
            set(&mut config.use_jemalloc, rust.use_jemalloc);
            set(&mut config.backtrace, rust.backtrace);
            set(&mut config.channel, rust.channel.clone());
            config.rustc_default_linker = rust.default_linker.clone();
            config.rustc_default_ar = rust.default_ar.clone();
            config.musl_root = rust.musl_root.clone().map(PathBuf::from);

            match rust.codegen_units {
                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
                Some(n) => config.rust_codegen_units = n,
                None => {}
            }
        }

        if let Some(ref t) = toml.target {
            for (triple, cfg) in t {
                let mut target = Target::default();

                if let Some(ref s) = cfg.llvm_config {
                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
                }
                if let Some(ref s) = cfg.jemalloc {
                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
                }
                if let Some(ref s) = cfg.android_ndk {
                    target.ndk = Some(env::current_dir().unwrap().join(s));
                }
                target.cxx = cfg.cxx.clone().map(PathBuf::from);
                target.cc = cfg.cc.clone().map(PathBuf::from);
                target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
                target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);

                config.target_config.insert(triple.clone(), target);
            }
        }

        if let Some(ref t) = toml.dist {
            config.dist_sign_folder = t.sign_folder.clone().map(PathBuf::from);
            config.dist_gpg_password_file = t.gpg_password_file.clone().map(PathBuf::from);
            config.dist_upload_addr = t.upload_addr.clone();
            set(&mut config.rust_dist_src, t.src_tarball);
        }

        return config
    }

    /// "Temporary" routine to parse `config.mk` into this configuration.
    ///
    /// While we still have `./configure` this implements the ability to decode
    /// that configuration into this. This isn't exactly a full-blown makefile
    /// parser, but hey it gets the job done!
    pub fn update_with_config_mk(&mut self) {
        let mut config = String::new();
        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
        for line in config.lines() {
            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
            let key = parts.next().unwrap();
            let value = match parts.next() {
                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
                Some(n) => n,
                None => continue
            };

            macro_rules! check {
                ($(($name:expr, $val:expr),)*) => {
                    if value == "1" {
                        $(
                            if key == concat!("CFG_ENABLE_", $name) {
                                $val = true;
                                continue
                            }
                            if key == concat!("CFG_DISABLE_", $name) {
                                $val = false;
                                continue
                            }
                        )*
                    }
                }
            }

            check! {
                ("MANAGE_SUBMODULES", self.submodules),
                ("COMPILER_DOCS", self.compiler_docs),
                ("DOCS", self.docs),
                ("LLVM_ASSERTIONS", self.llvm_assertions),
                ("LLVM_RELEASE_DEBUGINFO", self.llvm_release_debuginfo),
                ("OPTIMIZE_LLVM", self.llvm_optimize),
                ("LLVM_VERSION_CHECK", self.llvm_version_check),
                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
                ("LLVM_LINK_SHARED", self.llvm_link_shared),
                ("LLVM_CLEAN_REBUILD", self.llvm_clean_rebuild),
                ("OPTIMIZE", self.rust_optimize),
                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
                ("DEBUGINFO", self.rust_debuginfo),
                ("DEBUGINFO_LINES", self.rust_debuginfo_lines),
                ("DEBUGINFO_ONLY_STD", self.rust_debuginfo_only_std),
                ("JEMALLOC", self.use_jemalloc),
                ("DEBUG_JEMALLOC", self.debug_jemalloc),
                ("RPATH", self.rust_rpath),
                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
                ("QUIET_TESTS", self.quiet_tests),
                ("LOCAL_REBUILD", self.local_rebuild),
                ("NINJA", self.ninja),
                ("CODEGEN_TESTS", self.codegen_tests),
                ("SAVE_ANALYSIS", self.rust_save_analysis),
                ("LOCKED_DEPS", self.locked_deps),
                ("VENDOR", self.vendor),
                ("FULL_BOOTSTRAP", self.full_bootstrap),
                ("EXTENDED", self.extended),
                ("SANITIZERS", self.sanitizers),
                ("DIST_SRC", self.rust_dist_src),
                ("CARGO_OPENSSL_STATIC", self.openssl_static),
            }

            match key {
                "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
                "CFG_HOST" if value.len() > 0 => {
                    self.host.extend(value.split(" ").map(|s| s.to_string()));

                }
                "CFG_TARGET" if value.len() > 0 => {
                    self.target.extend(value.split(" ").map(|s| s.to_string()));
                }
                "CFG_MUSL_ROOT" if value.len() > 0 => {
                    self.musl_root = Some(parse_configure_path(value));
                }
                "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => {
                    let target = "x86_64-unknown-linux-musl".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.musl_root = Some(parse_configure_path(value));
                }
                "CFG_MUSL_ROOT_I686" if value.len() > 0 => {
                    let target = "i686-unknown-linux-musl".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.musl_root = Some(parse_configure_path(value));
                }
                "CFG_MUSL_ROOT_ARM" if value.len() > 0 => {
                    let target = "arm-unknown-linux-musleabi".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.musl_root = Some(parse_configure_path(value));
                }
                "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => {
                    let target = "arm-unknown-linux-musleabihf".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.musl_root = Some(parse_configure_path(value));
                }
                "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => {
                    let target = "armv7-unknown-linux-musleabihf".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.musl_root = Some(parse_configure_path(value));
                }
                "CFG_DEFAULT_AR" if value.len() > 0 => {
                    self.rustc_default_ar = Some(value.to_string());
                }
                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
                    self.rustc_default_linker = Some(value.to_string());
                }
                "CFG_GDB" if value.len() > 0 => {
                    self.gdb = Some(parse_configure_path(value));
                }
                "CFG_RELEASE_CHANNEL" => {
                    self.channel = value.to_string();
                }
                "CFG_PREFIX" => {
                    self.prefix = Some(PathBuf::from(value));
                }
                "CFG_DOCDIR" => {
                    self.docdir = Some(PathBuf::from(value));
                }
                "CFG_LIBDIR" => {
                    self.libdir = Some(PathBuf::from(value));
                }
                "CFG_LIBDIR_RELATIVE" => {
                    self.libdir_relative = Some(PathBuf::from(value));
                }
                "CFG_MANDIR" => {
                    self.mandir = Some(PathBuf::from(value));
                }
                "CFG_LLVM_ROOT" if value.len() > 0 => {
                    let target = self.target_config.entry(self.build.clone())
                                     .or_insert(Target::default());
                    let root = parse_configure_path(value);
                    target.llvm_config = Some(push_exe_path(root, &["bin", "llvm-config"]));
                }
                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
                    let target = self.target_config.entry(self.build.clone())
                                     .or_insert(Target::default());
                    target.jemalloc = Some(parse_configure_path(value).join("libjemalloc_pic.a"));
                }
                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
                    let target = "arm-linux-androideabi".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.ndk = Some(parse_configure_path(value));
                }
                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
                    let target = "armv7-linux-androideabi".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.ndk = Some(parse_configure_path(value));
                }
                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
                    let target = "i686-linux-android".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.ndk = Some(parse_configure_path(value));
                }
                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
                    let target = "aarch64-linux-android".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.ndk = Some(parse_configure_path(value));
                }
                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
                    let path = parse_configure_path(value);
                    self.rustc = Some(push_exe_path(path.clone(), &["bin", "rustc"]));
                    self.cargo = Some(push_exe_path(path, &["bin", "cargo"]));
                }
                "CFG_PYTHON" if value.len() > 0 => {
                    let path = parse_configure_path(value);
                    self.python = Some(path);
                }
                "CFG_ENABLE_CCACHE" if value == "1" => {
                    self.ccache = Some("ccache".to_string());
                }
                "CFG_ENABLE_SCCACHE" if value == "1" => {
                    self.ccache = Some("sccache".to_string());
                }
                "CFG_CONFIGURE_ARGS" if value.len() > 0 => {
                    self.configure_args = value.split_whitespace()
                                               .map(|s| s.to_string())
                                               .collect();
                }
                "CFG_QEMU_ARMHF_ROOTFS" if value.len() > 0 => {
                    let target = "arm-unknown-linux-gnueabihf".to_string();
                    let target = self.target_config.entry(target)
                                     .or_insert(Target::default());
                    target.qemu_rootfs = Some(parse_configure_path(value));
                }
                _ => {}
            }
        }
    }

    pub fn verbose(&self) -> bool {
        self.verbose > 0
    }

    pub fn very_verbose(&self) -> bool {
        self.verbose > 1
    }
}

#[cfg(not(windows))]
fn parse_configure_path(path: &str) -> PathBuf {
    path.into()
}

#[cfg(windows)]
fn parse_configure_path(path: &str) -> PathBuf {
    // on windows, configure produces unix style paths e.g. /c/some/path but we
    // only want real windows paths

    use std::process::Command;
    use build_helper;

    // '/' is invalid in windows paths, so we can detect unix paths by the presence of it
    if !path.contains('/') {
        return path.into();
    }

    let win_path = build_helper::output(Command::new("cygpath").arg("-w").arg(path));
    let win_path = win_path.trim();

    win_path.into()
}

fn set<T>(field: &mut T, val: Option<T>) {
    if let Some(v) = val {
        *field = v;
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/config.toml.example version [dff2f5bba7].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
# Sample TOML configuration file for building Rust.
#
# To configure rustbuild, copy this file to the directory from which you will be
# running the build, and name it config.toml.
#
# All options are commented out by default in this file, and they're commented
# out with their default values. The build system by default looks for
# `config.toml` in the current directory of a build for build configuration, but
# a custom configuration file can also be specified with `--config` to the build
# system.

# =============================================================================
# Tweaking how LLVM is compiled
# =============================================================================
[llvm]

# Indicates whether the LLVM build is a Release or Debug build
#optimize = true

# Indicates whether an LLVM Release build should include debug info
#release-debuginfo = false

# Indicates whether the LLVM assertions are enabled or not
#assertions = false

# Indicates whether ccache is used when building LLVM
#ccache = false
# or alternatively ...
#ccache = "/path/to/ccache"

# If an external LLVM root is specified, we automatically check the version by
# default to make sure it's within the range that we're expecting, but setting
# this flag will indicate that this version check should not be done.
#version-check = false

# Link libstdc++ statically into the librustc_llvm instead of relying on a
# dynamic version to be available.
#static-libstdcpp = false

# Tell the LLVM build system to use Ninja instead of the platform default for
# the generated build system. This can sometimes be faster than make, for
# example.
#ninja = false

# LLVM targets to build support for.
# Note: this is NOT related to Rust compilation targets. However, as Rust is
# dependent on LLVM for code generation, turning targets off here WILL lead to
# the resulting rustc being unable to compile for the disabled architectures.
# Also worth pointing out is that, in case support for new targets are added to
# LLVM, enabling them here doesn't mean Rust is automatically gaining said
# support. You'll need to write a target specification at least, and most
# likely, teach rustc about the C ABI of the target. Get in touch with the
# Rust team and file an issue if you need assistance in porting!
#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX"

# Cap the number of parallel linker invocations when compiling LLVM.
# This can be useful when building LLVM with debug info, which significantly
# increases the size of binaries and consequently the memory required by
# each linker process.
# If absent or 0, linker invocations are treated like any other job and
# controlled by rustbuild's -j parameter.
#link-jobs = 0

# Delete LLVM build directory on LLVM rebuild.
# This option defaults to `false` for local development, but CI may want to
# always perform clean full builds (possibly accelerated by (s)ccache).
#clean-rebuild = false

# =============================================================================
# General build configuration options
# =============================================================================
[build]

# Build triple for the original snapshot compiler. This must be a compiler that
# nightlies are already produced for. The current platform must be able to run
# binaries of this build triple and the nightly will be used to bootstrap the
# first compiler.
#build = "x86_64-unknown-linux-gnu"    # defaults to your host platform

# In addition to the build triple, other triples to produce full compiler
# toolchains for. Each of these triples will be bootstrapped from the build
# triple and then will continue to bootstrap themselves. This platform must
# currently be able to run all of the triples provided here.
#host = ["x86_64-unknown-linux-gnu"]   # defaults to just the build triple

# In addition to all host triples, other triples to produce the standard library
# for. Each host triple will be used to produce a copy of the standard library
# for each target triple.
#target = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple

# Instead of downloading the src/stage0.txt version of Cargo specified, use
# this Cargo binary instead to build all Rust code
#cargo = "/path/to/bin/cargo"

# Instead of downloading the src/stage0.txt version of the compiler
# specified, use this rustc binary instead as the stage0 snapshot compiler.
#rustc = "/path/to/bin/rustc"

# Flag to specify whether any documentation is built. If false, rustdoc and
# friends will still be compiled but they will not be used to generate any
# documentation.
#docs = true

# Indicate whether the compiler should be documented in addition to the standard
# library and facade crates.
#compiler-docs = false

# Indicate whether submodules are managed and updated automatically.
#submodules = true

# The path to (or name of) the GDB executable to use. This is only used for
# executing the debuginfo test suite.
#gdb = "gdb"

# The node.js executable to use. Note that this is only used for the emscripten
# target when running tests, otherwise this can be omitted.
#nodejs = "node"

# Python interpreter to use for various tasks throughout the build, notably
# rustdoc tests, the lldb python interpreter, and some dist bits and pieces.
# Note that Python 2 is currently required.
#python = "python2.7"

# Force Cargo to check that Cargo.lock describes the precise dependency
# set that all the Cargo.toml files create, instead of updating it.
#locked-deps = false

# Indicate whether the vendored sources are used for Rust dependencies or not
#vendor = false

# Typically the build system will build the rust compiler twice. The second
# compiler, however, will simply use its own libraries to link against. If you
# would rather to perform a full bootstrap, compiling the compiler three times,
# then you can set this option to true. You shouldn't ever need to set this
# option to true.
#full-bootstrap = false

# Enable a build of the and extended rust tool set which is not only the
# compiler but also tools such as Cargo. This will also produce "combined
# installers" which are used to install Rust and Cargo together. This is
# disabled by default.
#extended = false

# Verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
#verbose = 0

# Build the sanitizer runtimes
#sanitizers = false

# Indicates whether the OpenSSL linked into Cargo will be statically linked or
# not. If static linkage is specified then the build system will download a
# known-good version of OpenSSL, compile it, and link it to Cargo.
#openssl-static = false

# =============================================================================
# General install configuration options
# =============================================================================
[install]

# Instead of installing to /usr/local, install to this path instead.
#prefix = "/usr/local"

# Where to install libraries in `prefix` above
#libdir = "lib"

# Where to install man pages in `prefix` above
#mandir = "share/man"

# Where to install documentation in `prefix` above
#docdir = "share/doc/rust"

# =============================================================================
# Options for compiling Rust code itself
# =============================================================================
[rust]

# Whether or not to optimize the compiler and standard library
#optimize = true

# Number of codegen units to use for each compiler invocation. A value of 0
# means "the number of cores on this machine", and 1+ is passed through to the
# compiler.
#codegen-units = 1

# Whether or not debug assertions are enabled for the compiler and standard
# library
#debug-assertions = false

# Whether or not debuginfo is emitted
#debuginfo = false

# Whether or not line number debug information is emitted
#debuginfo-lines = false

# Whether or not to only build debuginfo for the standard library if enabled.
# If enabled, this will not compile the compiler with debuginfo, just the
# standard library.
#debuginfo-only-std = false

# Whether or not jemalloc is built and enabled
#use-jemalloc = true

# Whether or not jemalloc is built with its debug option set
#debug-jemalloc = false

# Whether or not `panic!`s generate backtraces (RUST_BACKTRACE)
#backtrace = true

# The default linker that will be used by the generated compiler. Note that this
# is not the linker used to link said compiler.
#default-linker = "cc"

# The default ar utility that will be used by the generated compiler if LLVM
# cannot be used. Note that this is not used to assemble said compiler.
#default-ar = "ar"

# The "channel" for the Rust build to produce. The stable/beta channels only
# allow using stable features, whereas the nightly and dev channels allow using
# nightly features
#channel = "dev"

# By default the `rustc` executable is built with `-Wl,-rpath` flags on Unix
# platforms to ensure that the compiler is usable by default from the build
# directory (as it links to a number of dynamic libraries). This may not be
# desired in distributions, for example.
#rpath = true

# Flag indicating whether tests are compiled with optimizations (the -O flag) or
# with debuginfo (the -g flag)
#optimize-tests = true
#debuginfo-tests = true

# Flag indicating whether codegen tests will be run or not. If you get an error
# saying that the FileCheck executable is missing, you may want to disable this.
#codegen-tests = true

# Flag indicating whether the API analysis data should be saved.
#save-analysis = false

# =============================================================================
# Options for specific targets
#
# Each of the following options is scoped to the specific target triple in
# question and is used for determining how to compile each target.
# =============================================================================
[target.x86_64-unknown-linux-gnu]

# C compiler to be used to compiler C code and link Rust code. Note that the
# default value is platform specific, and if not specified it may also depend on
# what platform is crossing to what platform.
#cc = "cc"

# C++ compiler to be used to compiler C++ code (e.g. LLVM and our LLVM shims).
# This is only used for host targets.
#cxx = "c++"

# Path to the `llvm-config` binary of the installation of a custom LLVM to link
# against. Note that if this is specifed we don't compile LLVM at all for this
# target.
#llvm-config = "../path/to/llvm/root/bin/llvm-config"

# Path to the custom jemalloc static library to link into the standard library
# by default. This is only used if jemalloc is still enabled above
#jemalloc = "/path/to/jemalloc/libjemalloc_pic.a"

# If this target is for Android, this option will be required to specify where
# the NDK for the target lives. This is used to find the C compiler to link and
# build native code.
#android-ndk = "/path/to/ndk"

# The root location of the MUSL installation directory. The library directory
# will also need to contain libunwind.a for an unwinding implementation. Note
# that this option only makes sense for MUSL targets that produce statically
# linked binaries
#musl-root = "..."

# =============================================================================
# Distribution options
#
# These options are related to distribution, mostly for the Rust project itself.
# You probably won't need to concern yourself with any of these options
# =============================================================================
[dist]

# This is the folder of artifacts that the build system will sign. All files in
# this directory will be signed with the default gpg key using the system `gpg`
# binary. The `asc` and `sha256` files will all be output into the standard dist
# output folder (currently `build/dist`)
#
# This folder should be populated ahead of time before the build system is
# invoked.
#sign-folder = "path/to/folder/to/sign"

# This is a file which contains the password of the default gpg key. This will
# be passed to `gpg` down the road when signing all files in `sign-folder`
# above. This should be stored in plaintext.
#gpg-password-file = "path/to/gpg/password"

# The remote address that all artifacts will eventually be uploaded to. The
# build system generates manifests which will point to these urls, and for the
# manifests to be correct they'll have to have the right URLs encoded.
#
# Note that this address should not contain a trailing slash as file names will
# be appended to it.
#upload-addr = "https://example.com/folder"
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/dist.rs version [6a1cf42725].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of the various distribution aspects of the compiler.
//!
//! This module is responsible for creating tarballs of the standard library,
//! compiler, and documentation. This ends up being what we distribute to
//! everyone as well.
//!
//! No tarball is actually created literally in this file, but rather we shell
//! out to `rust-installer` still. This may one day be replaced with bits and
//! pieces of `rustup.rs`!

use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::{PathBuf, Path};
use std::process::{Command, Stdio};

use build_helper::output;

#[cfg(not(target_os = "solaris"))]
const SH_CMD: &'static str = "sh";
// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
#[cfg(target_os = "solaris")]
const SH_CMD: &'static str = "bash";

use {Build, Compiler, Mode};
use channel;
use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};

fn pkgname(build: &Build, component: &str) -> String {
    assert!(component.starts_with("rust")); // does not work with cargo
    format!("{}-{}", component, build.rust_package_vers())
}

fn distdir(build: &Build) -> PathBuf {
    build.out.join("dist")
}

pub fn tmpdir(build: &Build) -> PathBuf {
    build.out.join("tmp/dist")
}

/// Builds the `rust-docs` installer component.
///
/// Slurps up documentation from the `stage`'s `host`.
pub fn docs(build: &Build, stage: u32, host: &str) {
    println!("Dist docs stage{} ({})", stage, host);
    if !build.config.docs {
        println!("\tskipping - docs disabled");
        return
    }

    let name = pkgname(build, "rust-docs");
    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
    let _ = fs::remove_dir_all(&image);

    let dst = image.join("share/doc/rust/html");
    t!(fs::create_dir_all(&dst));
    let src = build.out.join(host).join("doc");
    cp_r(&src, &dst);

    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust-Documentation")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Rust-documentation-is-installed.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}-{}", name, host))
       .arg("--component-name=rust-docs")
       .arg("--legacy-manifest-dirs=rustlib,cargo")
       .arg("--bulk-dirs=share/doc/rust/html");
    build.run(&mut cmd);
    t!(fs::remove_dir_all(&image));

    // As part of this step, *also* copy the docs directory to a directory which
    // buildbot typically uploads.
    if host == build.config.build {
        let dst = distdir(build).join("doc").join(build.rust_package_vers());
        t!(fs::create_dir_all(&dst));
        cp_r(&src, &dst);
    }
}

/// Build the `rust-mingw` installer component.
///
/// This contains all the bits and pieces to run the MinGW Windows targets
/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
/// Currently just shells out to a python script, but that should be rewritten
/// in Rust.
pub fn mingw(build: &Build, host: &str) {
    println!("Dist mingw ({})", host);
    let name = pkgname(build, "rust-mingw");
    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
    let _ = fs::remove_dir_all(&image);
    t!(fs::create_dir_all(&image));

    // The first argument to the script is a "temporary directory" which is just
    // thrown away (this contains the runtime DLLs included in the rustc package
    // above) and the second argument is where to place all the MinGW components
    // (which is what we want).
    //
    // FIXME: this script should be rewritten into Rust
    let mut cmd = Command::new(build.python());
    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
       .arg(tmpdir(build))
       .arg(&image)
       .arg(host);
    build.run(&mut cmd);

    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust-MinGW")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Rust-MinGW-is-installed.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}-{}", name, host))
       .arg("--component-name=rust-mingw")
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);
    t!(fs::remove_dir_all(&image));
}

/// Creates the `rustc` installer component.
pub fn rustc(build: &Build, stage: u32, host: &str) {
    println!("Dist rustc stage{} ({})", stage, host);
    let name = pkgname(build, "rustc");
    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
    let _ = fs::remove_dir_all(&image);
    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
    let _ = fs::remove_dir_all(&overlay);

    // Prepare the rustc "image", what will actually end up getting installed
    prepare_image(build, stage, host, &image);

    // Prepare the overlay which is part of the tarball but won't actually be
    // installed
    let cp = |file: &str| {
        install(&build.src.join(file), &overlay, 0o644);
    };
    cp("COPYRIGHT");
    cp("LICENSE-APACHE");
    cp("LICENSE-MIT");
    cp("README.md");
    // tiny morsel of metadata is used by rust-packaging
    let version = build.rust_version();
    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));

    // On MinGW we've got a few runtime DLL dependencies that we need to
    // include. The first argument to this script is where to put these DLLs
    // (the image we're creating), and the second argument is a junk directory
    // to ignore all other MinGW stuff the script creates.
    //
    // On 32-bit MinGW we're always including a DLL which needs some extra
    // licenses to distribute. On 64-bit MinGW we don't actually distribute
    // anything requiring us to distribute a license, but it's likely the
    // install will *also* include the rust-mingw package, which also needs
    // licenses, so to be safe we just include it here in all MinGW packages.
    //
    // FIXME: this script should be rewritten into Rust
    if host.contains("pc-windows-gnu") {
        let mut cmd = Command::new(build.python());
        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
           .arg(&image)
           .arg(tmpdir(build))
           .arg(host);
        build.run(&mut cmd);

        let dst = image.join("share/doc");
        t!(fs::create_dir_all(&dst));
        cp_r(&build.src.join("src/etc/third-party"), &dst);
    }

    // Finally, wrap everything up in a nice tarball!
    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Rust-is-ready-to-roll.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
       .arg(format!("--package-name={}-{}", name, host))
       .arg("--component-name=rustc")
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);
    t!(fs::remove_dir_all(&image));
    t!(fs::remove_dir_all(&overlay));

    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
        let src = build.sysroot(&Compiler::new(stage, host));
        let libdir = libdir(host);

        // Copy rustc/rustdoc binaries
        t!(fs::create_dir_all(image.join("bin")));
        cp_r(&src.join("bin"), &image.join("bin"));

        // Copy runtime DLLs needed by the compiler
        if libdir != "bin" {
            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
                let name = entry.file_name();
                if let Some(s) = name.to_str() {
                    if is_dylib(s) {
                        install(&entry.path(), &image.join(libdir), 0o644);
                    }
                }
            }
        }

        // Man pages
        t!(fs::create_dir_all(image.join("share/man/man1")));
        cp_r(&build.src.join("man"), &image.join("share/man/man1"));

        // Debugger scripts
        debugger_scripts(build, &image, host);

        // Misc license info
        let cp = |file: &str| {
            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
        };
        cp("COPYRIGHT");
        cp("LICENSE-APACHE");
        cp("LICENSE-MIT");
        cp("README.md");
    }
}

/// Copies debugger scripts for `host` into the `sysroot` specified.
pub fn debugger_scripts(build: &Build,
                        sysroot: &Path,
                        host: &str) {
    let cp_debugger_script = |file: &str| {
        let dst = sysroot.join("lib/rustlib/etc");
        t!(fs::create_dir_all(&dst));
        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
    };
    if host.contains("windows-msvc") {
        // no debugger scripts
    } else {
        cp_debugger_script("debugger_pretty_printers_common.py");

        // gdb debugger scripts
        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
                0o755);

        cp_debugger_script("gdb_load_rust_pretty_printers.py");
        cp_debugger_script("gdb_rust_pretty_printing.py");

        // lldb debugger scripts
        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
                0o755);

        cp_debugger_script("lldb_rust_formatters.py");
    }
}

/// Creates the `rust-std` installer component as compiled by `compiler` for the
/// target `target`.
pub fn std(build: &Build, compiler: &Compiler, target: &str) {
    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
             target);

    // The only true set of target libraries came from the build triple, so
    // let's reduce redundant work by only producing archives from that host.
    if compiler.host != build.config.build {
        println!("\tskipping, not a build host");
        return
    }

    let name = pkgname(build, "rust-std");
    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
    let _ = fs::remove_dir_all(&image);

    let dst = image.join("lib/rustlib").join(target);
    t!(fs::create_dir_all(&dst));
    let src = build.sysroot(compiler).join("lib/rustlib");
    cp_r(&src.join(target), &dst);

    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=std-is-standing-at-the-ready.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}-{}", name, target))
       .arg(format!("--component-name=rust-std-{}", target))
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);
    t!(fs::remove_dir_all(&image));
}

pub fn rust_src_location(build: &Build) -> PathBuf {
    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
    distdir(build).join(&format!("{}.tar.gz", plain_name))
}

/// Creates a tarball of save-analysis metadata, if available.
pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
    if !build.config.rust_save_analysis {
        return
    }

    println!("Dist analysis");

    if compiler.host != build.config.build {
        println!("\tskipping, not a build host");
        return
    }

    // Package save-analysis from stage1 if not doing a full bootstrap, as the
    // stage2 artifacts is simply copied from stage1 in that case.
    let compiler = if build.force_use_stage1(compiler, target) {
        Compiler::new(1, compiler.host)
    } else {
        compiler.clone()
    };

    let name = pkgname(build, "rust-analysis");
    let image = tmpdir(build).join(format!("{}-{}-image", name, target));

    let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps");

    let image_src = src.join("save-analysis");
    let dst = image.join("lib/rustlib").join(target).join("analysis");
    t!(fs::create_dir_all(&dst));
    println!("image_src: {:?}, dst: {:?}", image_src, dst);
    cp_r(&image_src, &dst);

    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=save-analysis-saved.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}-{}", name, target))
       .arg(format!("--component-name=rust-analysis-{}", target))
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);
    t!(fs::remove_dir_all(&image));
}

const CARGO_VENDOR_VERSION: &'static str = "0.1.4";

/// Creates the `rust-src` installer component and the plain source tarball
pub fn rust_src(build: &Build) {
    if !build.config.rust_dist_src {
        return
    }

    println!("Dist src");

    let name = pkgname(build, "rust-src");
    let image = tmpdir(build).join(format!("{}-image", name));
    let _ = fs::remove_dir_all(&image);

    let dst = image.join("lib/rustlib/src");
    let dst_src = dst.join("rust");
    t!(fs::create_dir_all(&dst_src));

    // This is the set of root paths which will become part of the source package
    let src_files = [
        "COPYRIGHT",
        "LICENSE-APACHE",
        "LICENSE-MIT",
        "CONTRIBUTING.md",
        "README.md",
        "RELEASES.md",
        "configure",
        "x.py",
    ];
    let src_dirs = [
        "man",
        "src",
        "cargo",
    ];

    let filter_fn = move |path: &Path| {
        let spath = match path.to_str() {
            Some(path) => path,
            None => return false,
        };
        if spath.ends_with("~") || spath.ends_with(".pyc") {
            return false
        }
        if spath.contains("llvm/test") || spath.contains("llvm\\test") {
            if spath.ends_with(".ll") ||
               spath.ends_with(".td") ||
               spath.ends_with(".s") {
                return false
            }
        }

        let excludes = [
            "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules",
            ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}",
            "=RELEASE-ID", "=meta-update", "=update", ".bzr", ".bzrignore",
            ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs",
        ];
        !path.iter()
             .map(|s| s.to_str().unwrap())
             .any(|s| excludes.contains(&s))
    };

    // Copy the directories using our filter
    for item in &src_dirs {
        let dst = &dst_src.join(item);
        t!(fs::create_dir(dst));
        cp_filtered(&build.src.join(item), dst, &filter_fn);
    }
    // Copy the files normally
    for item in &src_files {
        copy(&build.src.join(item), &dst_src.join(item));
    }

    // Get cargo-vendor installed, if it isn't already.
    let mut has_cargo_vendor = false;
    let mut cmd = Command::new(&build.cargo);
    for line in output(cmd.arg("install").arg("--list")).lines() {
        has_cargo_vendor |= line.starts_with("cargo-vendor ");
    }
    if !has_cargo_vendor {
        let mut cmd = Command::new(&build.cargo);
        cmd.arg("install")
           .arg("--force")
           .arg("--debug")
           .arg("--vers").arg(CARGO_VENDOR_VERSION)
           .arg("cargo-vendor")
           .env("RUSTC", &build.rustc);
        build.run(&mut cmd);
    }

    // Vendor all Cargo dependencies
    let mut cmd = Command::new(&build.cargo);
    cmd.arg("vendor")
       .current_dir(&dst_src.join("src"));
    build.run(&mut cmd);

    // Create source tarball in rust-installer format
    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Awesome-Source.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}", name))
       .arg("--component-name=rust-src")
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);

    // Rename directory, so that root folder of tarball has the correct name
    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
    let plain_dst_src = tmpdir(build).join(&plain_name);
    let _ = fs::remove_dir_all(&plain_dst_src);
    t!(fs::create_dir_all(&plain_dst_src));
    cp_r(&dst_src, &plain_dst_src);

    // Create the version file
    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());

    // Create plain source tarball
    let mut cmd = Command::new("tar");
    cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build)))
       .arg(&plain_name)
       .current_dir(tmpdir(build));
    build.run(&mut cmd);

    t!(fs::remove_dir_all(&image));
    t!(fs::remove_dir_all(&plain_dst_src));
}

fn install(src: &Path, dstdir: &Path, perms: u32) {
    let dst = dstdir.join(src.file_name().unwrap());
    t!(fs::create_dir_all(dstdir));
    t!(fs::copy(src, &dst));
    chmod(&dst, perms);
}

#[cfg(unix)]
fn chmod(path: &Path, perms: u32) {
    use std::os::unix::fs::*;
    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
}
#[cfg(windows)]
fn chmod(_path: &Path, _perms: u32) {}

// We have to run a few shell scripts, which choke quite a bit on both `\`
// characters and on `C:\` paths, so normalize both of them away.
pub fn sanitize_sh(path: &Path) -> String {
    let path = path.to_str().unwrap().replace("\\", "/");
    return change_drive(&path).unwrap_or(path);

    fn change_drive(s: &str) -> Option<String> {
        let mut ch = s.chars();
        let drive = ch.next().unwrap_or('C');
        if ch.next() != Some(':') {
            return None
        }
        if ch.next() != Some('/') {
            return None
        }
        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
    }
}

fn write_file(path: &Path, data: &[u8]) {
    let mut vf = t!(fs::File::create(path));
    t!(vf.write_all(data));
}

pub fn cargo(build: &Build, stage: u32, target: &str) {
    println!("Dist cargo stage{} ({})", stage, target);
    let compiler = Compiler::new(stage, &build.config.build);

    let src = build.src.join("cargo");
    let etc = src.join("src/etc");
    let release_num = build.cargo_release_num();
    let name = format!("cargo-{}", build.package_vers(&release_num));
    let version = build.cargo_info.version(build, &release_num);

    let tmp = tmpdir(build);
    let image = tmp.join("cargo-image");
    drop(fs::remove_dir_all(&image));
    t!(fs::create_dir_all(&image));

    // Prepare the image directory
    t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
    t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
    let cargo = build.cargo_out(&compiler, Mode::Tool, target)
                     .join(exe("cargo", target));
    install(&cargo, &image.join("bin"), 0o755);
    for man in t!(etc.join("man").read_dir()) {
        let man = t!(man);
        install(&man.path(), &image.join("share/man/man1"), 0o644);
    }
    install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
    copy(&etc.join("cargo.bashcomp.sh"),
         &image.join("etc/bash_completions.d/cargo"));
    let doc = image.join("share/doc/cargo");
    install(&src.join("README.md"), &doc, 0o644);
    install(&src.join("LICENSE-MIT"), &doc, 0o644);
    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
    install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);

    // Prepare the overlay
    let overlay = tmp.join("cargo-overlay");
    drop(fs::remove_dir_all(&overlay));
    t!(fs::create_dir_all(&overlay));
    install(&src.join("README.md"), &overlay, 0o644);
    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
    install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));

    // Generate the installer tarball
    let mut cmd = Command::new("sh");
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Rust-is-ready-to-roll.")
       .arg(format!("--image-dir={}", sanitize_sh(&image)))
       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
       .arg(format!("--package-name={}-{}", name, target))
       .arg("--component-name=cargo")
       .arg("--legacy-manifest-dirs=rustlib,cargo");
    build.run(&mut cmd);
}

/// Creates a combined installer for the specified target in the provided stage.
pub fn extended(build: &Build, stage: u32, target: &str) {
    println!("Dist extended stage{} ({})", stage, target);

    let dist = distdir(build);
    let cargo_vers = build.cargo_release_num();
    let rustc_installer = dist.join(format!("{}-{}.tar.gz",
                                            pkgname(build, "rustc"),
                                            target));
    let cargo_installer = dist.join(format!("cargo-{}-{}.tar.gz",
                                            build.package_vers(&cargo_vers),
                                            target));
    let docs_installer = dist.join(format!("{}-{}.tar.gz",
                                           pkgname(build, "rust-docs"),
                                           target));
    let mingw_installer = dist.join(format!("{}-{}.tar.gz",
                                            pkgname(build, "rust-mingw"),
                                            target));
    let std_installer = dist.join(format!("{}-{}.tar.gz",
                                          pkgname(build, "rust-std"),
                                          target));

    let tmp = tmpdir(build);
    let overlay = tmp.join("extended-overlay");
    let etc = build.src.join("src/etc/installer");
    let work = tmp.join("work");

    let _ = fs::remove_dir_all(&overlay);
    install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
    install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
    install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
    let version = build.rust_version();
    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
    install(&etc.join("README.md"), &overlay, 0o644);

    // When rust-std package split from rustc, we needed to ensure that during
    // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
    // the std files during uninstall. To do this ensure that rustc comes
    // before rust-std in the list below.
    let mut input_tarballs = format!("{},{},{},{}",
                                     sanitize_sh(&rustc_installer),
                                     sanitize_sh(&cargo_installer),
                                     sanitize_sh(&docs_installer),
                                     sanitize_sh(&std_installer));
    if target.contains("pc-windows-gnu") {
        input_tarballs.push_str(",");
        input_tarballs.push_str(&sanitize_sh(&mingw_installer));
    }

    let mut cmd = Command::new(SH_CMD);
    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
       .arg("--product-name=Rust")
       .arg("--rel-manifest-dir=rustlib")
       .arg("--success-message=Rust-is-ready-to-roll.")
       .arg(format!("--work-dir={}", sanitize_sh(&work)))
       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
       .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
       .arg("--legacy-manifest-dirs=rustlib,cargo")
       .arg(format!("--input-tarballs={}", input_tarballs))
       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
    build.run(&mut cmd);

    let mut license = String::new();
    t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
    license.push_str("\n");
    t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
    license.push_str("\n");
    t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));

    let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
    let mut rtf = rtf.to_string();
    rtf.push_str("\n");
    for line in license.lines() {
        rtf.push_str(line);
        rtf.push_str("\\line ");
    }
    rtf.push_str("}");

    if target.contains("apple-darwin") {
        let pkg = tmp.join("pkg");
        let _ = fs::remove_dir_all(&pkg);
        t!(fs::create_dir_all(pkg.join("rustc")));
        t!(fs::create_dir_all(pkg.join("cargo")));
        t!(fs::create_dir_all(pkg.join("rust-docs")));
        t!(fs::create_dir_all(pkg.join("rust-std")));

        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
             &pkg.join("rustc"));
        cp_r(&work.join(&format!("cargo-nightly-{}", target)),
             &pkg.join("cargo"));
        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
             &pkg.join("rust-docs"));
        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
             &pkg.join("rust-std"));

        install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
        install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
        install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
        install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);

        let pkgbuild = |component: &str| {
            let mut cmd = Command::new("pkgbuild");
            cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
               .arg("--scripts").arg(pkg.join(component))
               .arg("--nopayload")
               .arg(pkg.join(component).with_extension("pkg"));
            build.run(&mut cmd);
        };
        pkgbuild("rustc");
        pkgbuild("cargo");
        pkgbuild("rust-docs");
        pkgbuild("rust-std");

        // create an 'uninstall' package
        install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
        pkgbuild("uninstall");

        t!(fs::create_dir_all(pkg.join("res")));
        t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
        install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
        let mut cmd = Command::new("productbuild");
        cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
           .arg("--resources").arg(pkg.join("res"))
           .arg(distdir(build).join(format!("{}-{}.pkg",
                                             pkgname(build, "rust"),
                                             target)))
           .arg("--package-path").arg(&pkg);
        build.run(&mut cmd);
    }

    if target.contains("windows") {
        let exe = tmp.join("exe");
        let _ = fs::remove_dir_all(&exe);
        t!(fs::create_dir_all(exe.join("rustc")));
        t!(fs::create_dir_all(exe.join("cargo")));
        t!(fs::create_dir_all(exe.join("rust-docs")));
        t!(fs::create_dir_all(exe.join("rust-std")));
        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
                  .join("rustc"),
             &exe.join("rustc"));
        cp_r(&work.join(&format!("cargo-nightly-{}", target))
                  .join("cargo"),
             &exe.join("cargo"));
        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
                  .join("rust-docs"),
             &exe.join("rust-docs"));
        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
                  .join(format!("rust-std-{}", target)),
             &exe.join("rust-std"));

        t!(fs::remove_file(exe.join("rustc/manifest.in")));
        t!(fs::remove_file(exe.join("cargo/manifest.in")));
        t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
        t!(fs::remove_file(exe.join("rust-std/manifest.in")));

        if target.contains("windows-gnu") {
            t!(fs::create_dir_all(exe.join("rust-mingw")));
            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
                      .join("rust-mingw"),
                 &exe.join("rust-mingw"));
            t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
        }

        install(&etc.join("exe/rust.iss"), &exe, 0o644);
        install(&etc.join("exe/modpath.iss"), &exe, 0o644);
        install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
        install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
        t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));

        // Generate exe installer
        let mut cmd = Command::new("iscc");
        cmd.arg("rust.iss")
           .current_dir(&exe);
        if target.contains("windows-gnu") {
            cmd.arg("/dMINGW");
        }
        add_env(build, &mut cmd, target);
        build.run(&mut cmd);
        install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
                &distdir(build),
                0o755);

        // Generate msi installer
        let wix = PathBuf::from(env::var_os("WIX").unwrap());
        let heat = wix.join("bin/heat.exe");
        let candle = wix.join("bin/candle.exe");
        let light = wix.join("bin/light.exe");

        let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
        build.run(Command::new(&heat)
                        .current_dir(&exe)
                        .arg("dir")
                        .arg("rustc")
                        .args(&heat_flags)
                        .arg("-cg").arg("RustcGroup")
                        .arg("-dr").arg("Rustc")
                        .arg("-var").arg("var.RustcDir")
                        .arg("-out").arg(exe.join("RustcGroup.wxs")));
        build.run(Command::new(&heat)
                        .current_dir(&exe)
                        .arg("dir")
                        .arg("rust-docs")
                        .args(&heat_flags)
                        .arg("-cg").arg("DocsGroup")
                        .arg("-dr").arg("Docs")
                        .arg("-var").arg("var.DocsDir")
                        .arg("-out").arg(exe.join("DocsGroup.wxs"))
                        .arg("-t").arg(etc.join("msi/squash-components.xsl")));
        build.run(Command::new(&heat)
                        .current_dir(&exe)
                        .arg("dir")
                        .arg("cargo")
                        .args(&heat_flags)
                        .arg("-cg").arg("CargoGroup")
                        .arg("-dr").arg("Cargo")
                        .arg("-var").arg("var.CargoDir")
                        .arg("-out").arg(exe.join("CargoGroup.wxs"))
                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
        build.run(Command::new(&heat)
                        .current_dir(&exe)
                        .arg("dir")
                        .arg("rust-std")
                        .args(&heat_flags)
                        .arg("-cg").arg("StdGroup")
                        .arg("-dr").arg("Std")
                        .arg("-var").arg("var.StdDir")
                        .arg("-out").arg(exe.join("StdGroup.wxs")));
        if target.contains("windows-gnu") {
            build.run(Command::new(&heat)
                            .current_dir(&exe)
                            .arg("dir")
                            .arg("rust-mingw")
                            .args(&heat_flags)
                            .arg("-cg").arg("GccGroup")
                            .arg("-dr").arg("Gcc")
                            .arg("-var").arg("var.GccDir")
                            .arg("-out").arg(exe.join("GccGroup.wxs")));
        }

        let candle = |input: &Path| {
            let output = exe.join(input.file_stem().unwrap())
                            .with_extension("wixobj");
            let arch = if target.contains("x86_64") {"x64"} else {"x86"};
            let mut cmd = Command::new(&candle);
            cmd.current_dir(&exe)
               .arg("-nologo")
               .arg("-dRustcDir=rustc")
               .arg("-dDocsDir=rust-docs")
               .arg("-dCargoDir=cargo")
               .arg("-dStdDir=rust-std")
               .arg("-arch").arg(&arch)
               .arg("-out").arg(&output)
               .arg(&input);
            add_env(build, &mut cmd, target);

            if target.contains("windows-gnu") {
               cmd.arg("-dGccDir=rust-mingw");
            }
            build.run(&mut cmd);
        };
        candle(&etc.join("msi/rust.wxs"));
        candle(&etc.join("msi/ui.wxs"));
        candle(&etc.join("msi/rustwelcomedlg.wxs"));
        candle("RustcGroup.wxs".as_ref());
        candle("DocsGroup.wxs".as_ref());
        candle("CargoGroup.wxs".as_ref());
        candle("StdGroup.wxs".as_ref());

        if target.contains("windows-gnu") {
            candle("GccGroup.wxs".as_ref());
        }

        t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
        install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
        install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);

        let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
        let mut cmd = Command::new(&light);
        cmd.arg("-nologo")
           .arg("-ext").arg("WixUIExtension")
           .arg("-ext").arg("WixUtilExtension")
           .arg("-out").arg(exe.join(&filename))
           .arg("rust.wixobj")
           .arg("ui.wixobj")
           .arg("rustwelcomedlg.wixobj")
           .arg("RustcGroup.wixobj")
           .arg("DocsGroup.wixobj")
           .arg("CargoGroup.wixobj")
           .arg("StdGroup.wixobj")
           .current_dir(&exe);

        if target.contains("windows-gnu") {
           cmd.arg("GccGroup.wixobj");
        }
        // ICE57 wrongly complains about the shortcuts
        cmd.arg("-sice:ICE57");

        build.run(&mut cmd);

        t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
    }
}

fn add_env(build: &Build, cmd: &mut Command, target: &str) {
    let mut parts = channel::CFG_RELEASE_NUM.split('.');
    cmd.env("CFG_RELEASE_INFO", build.rust_version())
       .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
       .env("CFG_RELEASE", build.rust_release())
       .env("CFG_PRERELEASE_VERSION", channel::CFG_PRERELEASE_VERSION)
       .env("CFG_VER_MAJOR", parts.next().unwrap())
       .env("CFG_VER_MINOR", parts.next().unwrap())
       .env("CFG_VER_PATCH", parts.next().unwrap())
       .env("CFG_VER_BUILD", "0") // just needed to build
       .env("CFG_PACKAGE_VERS", build.rust_package_vers())
       .env("CFG_PACKAGE_NAME", pkgname(build, "rust"))
       .env("CFG_BUILD", target)
       .env("CFG_CHANNEL", &build.config.channel);

    if target.contains("windows-gnu") {
       cmd.env("CFG_MINGW", "1")
          .env("CFG_ABI", "GNU");
    } else {
       cmd.env("CFG_MINGW", "0")
          .env("CFG_ABI", "MSVC");
    }

    if target.contains("x86_64") {
       cmd.env("CFG_PLATFORM", "x64");
    } else {
       cmd.env("CFG_PLATFORM", "x86");
    }
}

pub fn hash_and_sign(build: &Build) {
    let compiler = Compiler::new(0, &build.config.build);
    let mut cmd = build.tool_cmd(&compiler, "build-manifest");
    let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
        panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
    });
    let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
        panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
    });
    let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
        panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
    });
    let mut pass = String::new();
    t!(t!(File::open(&file)).read_to_string(&mut pass));

    let today = output(Command::new("date").arg("+%Y-%m-%d"));

    cmd.arg(sign);
    cmd.arg(distdir(build));
    cmd.arg(today.trim());
    cmd.arg(build.rust_package_vers());
    cmd.arg(build.package_vers(&build.cargo_release_num()));
    cmd.arg(addr);

    t!(fs::create_dir_all(distdir(build)));

    let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
    t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
    let status = t!(child.wait());
    assert!(status.success());
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/doc.rs version [fcdfc4a443].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Documentation generation for rustbuild.
//!
//! This module implements generation for all bits and pieces of documentation
//! for the Rust project. This notably includes suites like the rust book, the
//! nomicon, standalone documentation, etc.
//!
//! Everything here is basically just a shim around calling either `rustbook` or
//! `rustdoc`.

use std::fs::{self, File};
use std::io::prelude::*;
use std::io;
use std::path::Path;
use std::process::Command;

use {Build, Compiler, Mode};
use util::{cp_r, symlink_dir};
use build_helper::up_to_date;

/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
/// `name` into the `out` path.
///
/// This will not actually generate any documentation if the documentation has
/// already been generated.
pub fn rustbook(build: &Build, target: &str, name: &str) {
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));

    let out = out.join(name);
    let compiler = Compiler::new(0, &build.config.build);
    let src = build.src.join("src/doc").join(name);
    let index = out.join("index.html");
    let rustbook = build.tool(&compiler, "rustbook");
    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
        return
    }
    println!("Rustbook ({}) - {}", target, name);
    let _ = fs::remove_dir_all(&out);
    build.run(build.tool_cmd(&compiler, "rustbook")
                   .arg("build")
                   .arg(&src)
                   .arg("-d")
                   .arg(out));
}

/// Build the book and associated stuff.
///
/// We need to build:
///
/// * Book (first edition)
/// * Book (second edition)
/// * Index page
/// * Redirect pages
pub fn book(build: &Build, target: &str, name: &str) {
    // build book first edition
    rustbook(build, target, &format!("{}/first-edition", name));

    // build book second edition
    rustbook(build, target, &format!("{}/second-edition", name));

    // build the index page
    let index = format!("{}/index.md", name);
    println!("Documenting book index ({})", target);
    invoke_rustdoc(build, target, &index);

    // build the redirect pages
    println!("Documenting book redirect pages ({})", target);
    for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
        let file = t!(file);
        let path = file.path();
        let path = path.to_str().unwrap();

        invoke_rustdoc(build, target, path);
    }
}

fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
    let out = build.doc_out(target);

    let compiler = Compiler::new(0, &build.config.build);

    let path = build.src.join("src/doc").join(markdown);

    let rustdoc = build.rustdoc(&compiler);

    let favicon = build.src.join("src/doc/favicon.inc");
    let footer = build.src.join("src/doc/footer.inc");

    let version_input = build.src.join("src/doc/version_info.html.template");
    let version_info = out.join("version_info.html");

    if !up_to_date(&version_input, &version_info) {
        let mut info = String::new();
        t!(t!(File::open(&version_input)).read_to_string(&mut info));
        let info = info.replace("VERSION", &build.rust_release())
                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
    }

    let mut cmd = Command::new(&rustdoc);

    build.add_rustc_lib_path(&compiler, &mut cmd);

    let out = out.join("book");

    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));

    cmd.arg("--html-after-content").arg(&footer)
        .arg("--html-before-content").arg(&version_info)
        .arg("--html-in-header").arg(&favicon)
        .arg("--markdown-playground-url")
        .arg("https://play.rust-lang.org/")
        .arg("-o").arg(&out)
        .arg(&path)
        .arg("--markdown-css")
        .arg("rust.css");

    build.run(&mut cmd);
}

/// Generates all standalone documentation as compiled by the rustdoc in `stage`
/// for the `target` into `out`.
///
/// This will list all of `src/doc` looking for markdown files and appropriately
/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
///
/// In the end, this is just a glorified wrapper around rustdoc!
pub fn standalone(build: &Build, target: &str) {
    println!("Documenting standalone ({})", target);
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));

    let compiler = Compiler::new(0, &build.config.build);

    let favicon = build.src.join("src/doc/favicon.inc");
    let footer = build.src.join("src/doc/footer.inc");
    let full_toc = build.src.join("src/doc/full-toc.inc");
    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));

    let version_input = build.src.join("src/doc/version_info.html.template");
    let version_info = out.join("version_info.html");

    if !up_to_date(&version_input, &version_info) {
        let mut info = String::new();
        t!(t!(File::open(&version_input)).read_to_string(&mut info));
        let info = info.replace("VERSION", &build.rust_release())
                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
    }

    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
        let file = t!(file);
        let path = file.path();
        let filename = path.file_name().unwrap().to_str().unwrap();
        if !filename.ends_with(".md") || filename == "README.md" {
            continue
        }

        let html = out.join(filename).with_extension("html");
        let rustdoc = build.rustdoc(&compiler);
        if up_to_date(&path, &html) &&
           up_to_date(&footer, &html) &&
           up_to_date(&favicon, &html) &&
           up_to_date(&full_toc, &html) &&
           up_to_date(&version_info, &html) &&
           up_to_date(&rustdoc, &html) {
            continue
        }

        let mut cmd = Command::new(&rustdoc);
        build.add_rustc_lib_path(&compiler, &mut cmd);
        cmd.arg("--html-after-content").arg(&footer)
           .arg("--html-before-content").arg(&version_info)
           .arg("--html-in-header").arg(&favicon)
           .arg("--markdown-playground-url")
           .arg("https://play.rust-lang.org/")
           .arg("-o").arg(&out)
           .arg(&path);

        if filename == "not_found.md" {
            cmd.arg("--markdown-no-toc")
               .arg("--markdown-css")
               .arg("https://doc.rust-lang.org/rust.css");
        } else {
            cmd.arg("--markdown-css").arg("rust.css");
        }
        build.run(&mut cmd);
    }
}

/// Compile all standard library documentation.
///
/// This will generate all documentation for the standard library and its
/// dependencies. This is largely just a wrapper around `cargo doc`.
pub fn std(build: &Build, stage: u32, target: &str) {
    println!("Documenting stage{} std ({})", stage, target);
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));
    let compiler = Compiler::new(stage, &build.config.build);
    let compiler = if build.force_use_stage1(&compiler, target) {
        Compiler::new(1, compiler.host)
    } else {
        compiler
    };
    let out_dir = build.stage_out(&compiler, Mode::Libstd)
                       .join(target).join("doc");
    let rustdoc = build.rustdoc(&compiler);

    // Here what we're doing is creating a *symlink* (directory junction on
    // Windows) to the final output location. This is not done as an
    // optimization but rather for correctness. We've got three trees of
    // documentation, one for std, one for test, and one for rustc. It's then
    // our job to merge them all together.
    //
    // Unfortunately rustbuild doesn't know nearly as well how to merge doc
    // trees as rustdoc does itself, so instead of actually having three
    // separate trees we just have rustdoc output to the same location across
    // all of them.
    //
    // This way rustdoc generates output directly into the output, and rustdoc
    // will also directly handle merging.
    let my_out = build.crate_doc_out(target);
    build.clear_if_dirty(&my_out, &rustdoc);
    t!(symlink_dir_force(&my_out, &out_dir));

    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
    cargo.arg("--manifest-path")
         .arg(build.src.join("src/libstd/Cargo.toml"))
         .arg("--features").arg(build.std_features());

    // We don't want to build docs for internal std dependencies unless
    // in compiler-docs mode. When not in that mode, we whitelist the crates
    // for which docs must be built.
    if !build.config.compiler_docs {
        cargo.arg("--no-deps");
        for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
            cargo.arg("-p").arg(krate);
            // Create all crate output directories first to make sure rustdoc uses
            // relative links.
            // FIXME: Cargo should probably do this itself.
            t!(fs::create_dir_all(out_dir.join(krate)));
        }
    }


    build.run(&mut cargo);
    cp_r(&my_out, &out);
}

/// Compile all libtest documentation.
///
/// This will generate all documentation for libtest and its dependencies. This
/// is largely just a wrapper around `cargo doc`.
pub fn test(build: &Build, stage: u32, target: &str) {
    println!("Documenting stage{} test ({})", stage, target);
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));
    let compiler = Compiler::new(stage, &build.config.build);
    let compiler = if build.force_use_stage1(&compiler, target) {
        Compiler::new(1, compiler.host)
    } else {
        compiler
    };
    let out_dir = build.stage_out(&compiler, Mode::Libtest)
                       .join(target).join("doc");
    let rustdoc = build.rustdoc(&compiler);

    // See docs in std above for why we symlink
    let my_out = build.crate_doc_out(target);
    build.clear_if_dirty(&my_out, &rustdoc);
    t!(symlink_dir_force(&my_out, &out_dir));

    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
    cargo.arg("--manifest-path")
         .arg(build.src.join("src/libtest/Cargo.toml"));
    build.run(&mut cargo);
    cp_r(&my_out, &out);
}

/// Generate all compiler documentation.
///
/// This will generate all documentation for the compiler libraries and their
/// dependencies. This is largely just a wrapper around `cargo doc`.
pub fn rustc(build: &Build, stage: u32, target: &str) {
    println!("Documenting stage{} compiler ({})", stage, target);
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));
    let compiler = Compiler::new(stage, &build.config.build);
    let compiler = if build.force_use_stage1(&compiler, target) {
        Compiler::new(1, compiler.host)
    } else {
        compiler
    };
    let out_dir = build.stage_out(&compiler, Mode::Librustc)
                       .join(target).join("doc");
    let rustdoc = build.rustdoc(&compiler);

    // See docs in std above for why we symlink
    let my_out = build.crate_doc_out(target);
    build.clear_if_dirty(&my_out, &rustdoc);
    t!(symlink_dir_force(&my_out, &out_dir));

    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
    cargo.arg("--manifest-path")
         .arg(build.src.join("src/rustc/Cargo.toml"))
         .arg("--features").arg(build.rustc_features());

    if build.config.compiler_docs {
        // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
        // which would otherwise overwrite the docs for the real rustc and
        // rustdoc lib crates.
        cargo.arg("-p").arg("rustc_driver")
             .arg("-p").arg("rustdoc");
    } else {
        // Like with libstd above if compiler docs aren't enabled then we're not
        // documenting internal dependencies, so we have a whitelist.
        cargo.arg("--no-deps");
        for krate in &["proc_macro"] {
            cargo.arg("-p").arg(krate);
        }
    }

    build.run(&mut cargo);
    cp_r(&my_out, &out);
}

/// Generates the HTML rendered error-index by running the
/// `error_index_generator` tool.
pub fn error_index(build: &Build, target: &str) {
    println!("Documenting error index ({})", target);
    let out = build.doc_out(target);
    t!(fs::create_dir_all(&out));
    let compiler = Compiler::new(0, &build.config.build);
    let mut index = build.tool_cmd(&compiler, "error_index_generator");
    index.arg("html");
    index.arg(out.join("error-index.html"));

    // FIXME: shouldn't have to pass this env var
    index.env("CFG_BUILD", &build.config.build);

    build.run(&mut index);
}

fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
    if let Ok(m) = fs::symlink_metadata(dst) {
        if m.file_type().is_dir() {
            try!(fs::remove_dir_all(dst));
        } else {
            // handle directory junctions on windows by falling back to
            // `remove_dir`.
            try!(fs::remove_file(dst).or_else(|_| {
                fs::remove_dir(dst)
            }));
        }
    }

    symlink_dir(src, dst)
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/flags.rs version [81f6e410e1].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Command-line interface of the rustbuild build system.
//!
//! This module implements the command-line parsing of the build system which
//! has various flags to configure how it's run.

use std::env;
use std::fs;
use std::path::PathBuf;
use std::process;

use getopts::{Matches, Options};

use Build;
use config::Config;
use metadata;
use step;

/// Deserialized version of all flags for this compile.
pub struct Flags {
    pub verbose: usize, // verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
    pub on_fail: Option<String>,
    pub stage: Option<u32>,
    pub keep_stage: Option<u32>,
    pub build: String,
    pub host: Vec<String>,
    pub target: Vec<String>,
    pub config: Option<PathBuf>,
    pub src: Option<PathBuf>,
    pub jobs: Option<u32>,
    pub cmd: Subcommand,
    pub incremental: bool,
}

impl Flags {
    pub fn verbose(&self) -> bool {
        self.verbose > 0
    }

    pub fn very_verbose(&self) -> bool {
        self.verbose > 1
    }
}

pub enum Subcommand {
    Build {
        paths: Vec<PathBuf>,
    },
    Doc {
        paths: Vec<PathBuf>,
    },
    Test {
        paths: Vec<PathBuf>,
        test_args: Vec<String>,
    },
    Bench {
        paths: Vec<PathBuf>,
        test_args: Vec<String>,
    },
    Clean,
    Dist {
        paths: Vec<PathBuf>,
        install: bool,
    },
}

impl Flags {
    pub fn parse(args: &[String]) -> Flags {
        let mut opts = Options::new();
        opts.optflagmulti("v", "verbose", "use verbose output (-vv for very verbose)");
        opts.optflag("i", "incremental", "use incremental compilation");
        opts.optopt("", "config", "TOML configuration file for build", "FILE");
        opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
        opts.optmulti("", "host", "host targets to build", "HOST");
        opts.optmulti("", "target", "target targets to build", "TARGET");
        opts.optopt("", "on-fail", "command to run on failure", "CMD");
        opts.optopt("", "stage", "stage to build", "N");
        opts.optopt("", "keep-stage", "stage to keep without recompiling", "N");
        opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
        opts.optflag("h", "help", "print this help message");

        let usage = |n, opts: &Options| -> ! {
            let command = args.get(0).map(|s| &**s);
            let brief = format!("Usage: x.py {} [options] [<args>...]",
                                command.unwrap_or("<command>"));

            println!("{}", opts.usage(&brief));
            match command {
                Some("build") => {
                    println!("\
Arguments:
    This subcommand accepts a number of positional arguments of directories to
    the crates and/or artifacts to compile. For example:

        ./x.py build src/libcore
        ./x.py build src/libproc_macro
        ./x.py build src/libstd --stage 1

    If no arguments are passed then the complete artifacts for that stage are
    also compiled.

        ./x.py build
        ./x.py build --stage 1

    For a quick build with a usable compile, you can pass:

        ./x.py build --stage 1 src/libtest
");
                }

                Some("test") => {
                    println!("\
Arguments:
    This subcommand accepts a number of positional arguments of directories to
    tests that should be compiled and run. For example:

        ./x.py test src/test/run-pass
        ./x.py test src/libstd --test-args hash_map
        ./x.py test src/libstd --stage 0

    If no arguments are passed then the complete artifacts for that stage are
    compiled and tested.

        ./x.py test
        ./x.py test --stage 1
");
                }

                Some("doc") => {
                    println!("\
Arguments:
    This subcommand accepts a number of positional arguments of directories of
    documentation to build. For example:

        ./x.py doc src/doc/book
        ./x.py doc src/doc/nomicon
        ./x.py doc src/libstd

    If no arguments are passed then everything is documented:

        ./x.py doc
        ./x.py doc --stage 1
");
                }

                _ => {}
            }

            if let Some(command) = command {
                if command == "build" ||
                   command == "dist" ||
                   command == "doc" ||
                   command == "test" ||
                   command == "bench" ||
                   command == "clean"  {
                    println!("Available invocations:");
                    if args.iter().any(|a| a == "-v") {
                        let flags = Flags::parse(&["build".to_string()]);
                        let mut config = Config::default();
                        config.build = flags.build.clone();
                        let mut build = Build::new(flags, config);
                        metadata::build(&mut build);
                        step::build_rules(&build).print_help(command);
                    } else {
                        println!("    ... elided, run `./x.py {} -h -v` to see",
                                 command);
                    }

                    println!("");
                }
            }

println!("\
Subcommands:
    build       Compile either the compiler or libraries
    test        Build and run some test suites
    bench       Build and run some benchmarks
    doc         Build documentation
    clean       Clean out build directories
    dist        Build and/or install distribution artifacts

To learn more about a subcommand, run `./x.py <command> -h`
");

            process::exit(n);
        };
        if args.len() == 0 {
            println!("a command must be passed");
            usage(1, &opts);
        }
        let parse = |opts: &Options| {
            let m = opts.parse(&args[1..]).unwrap_or_else(|e| {
                println!("failed to parse options: {}", e);
                usage(1, opts);
            });
            if m.opt_present("h") {
                usage(0, opts);
            }
            return m
        };

        let cwd = t!(env::current_dir());
        let remaining_as_path = |m: &Matches| {
            m.free.iter().map(|p| cwd.join(p)).collect::<Vec<_>>()
        };

        let m: Matches;
        let cmd = match &args[0][..] {
            "build" => {
                m = parse(&opts);
                Subcommand::Build { paths: remaining_as_path(&m) }
            }
            "doc" => {
                m = parse(&opts);
                Subcommand::Doc { paths: remaining_as_path(&m) }
            }
            "test" => {
                opts.optmulti("", "test-args", "extra arguments", "ARGS");
                m = parse(&opts);
                Subcommand::Test {
                    paths: remaining_as_path(&m),
                    test_args: m.opt_strs("test-args"),
                }
            }
            "bench" => {
                opts.optmulti("", "test-args", "extra arguments", "ARGS");
                m = parse(&opts);
                Subcommand::Bench {
                    paths: remaining_as_path(&m),
                    test_args: m.opt_strs("test-args"),
                }
            }
            "clean" => {
                m = parse(&opts);
                if m.free.len() > 0 {
                    println!("clean takes no arguments");
                    usage(1, &opts);
                }
                Subcommand::Clean
            }
            "dist" => {
                opts.optflag("", "install", "run installer as well");
                m = parse(&opts);
                Subcommand::Dist {
                    paths: remaining_as_path(&m),
                    install: m.opt_present("install"),
                }
            }
            "--help" => usage(0, &opts),
            cmd => {
                println!("unknown command: {}", cmd);
                usage(1, &opts);
            }
        };


        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
            if fs::metadata("config.toml").is_ok() {
                Some(PathBuf::from("config.toml"))
            } else {
                None
            }
        });

        let mut stage = m.opt_str("stage").map(|j| j.parse().unwrap());

        let incremental = m.opt_present("i");

        if incremental {
            if stage.is_none() {
                stage = Some(1);
            }
        }

        Flags {
            verbose: m.opt_count("v"),
            stage: stage,
            on_fail: m.opt_str("on-fail"),
            keep_stage: m.opt_str("keep-stage").map(|j| j.parse().unwrap()),
            build: m.opt_str("build").unwrap_or_else(|| {
                env::var("BUILD").unwrap()
            }),
            host: split(m.opt_strs("host")),
            target: split(m.opt_strs("target")),
            config: cfg_file,
            src: m.opt_str("src").map(PathBuf::from),
            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
            cmd: cmd,
            incremental: incremental,
        }
    }
}

impl Subcommand {
    pub fn test_args(&self) -> Vec<&str> {
        match *self {
            Subcommand::Test { ref test_args, .. } |
            Subcommand::Bench { ref test_args, .. } => {
                test_args.iter().flat_map(|s| s.split_whitespace()).collect()
            }
            _ => Vec::new(),
        }
    }
}

fn split(s: Vec<String>) -> Vec<String> {
    s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




























































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/install.rs version [cfae41ed18].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of the install aspects of the compiler.
//!
//! This module is responsible for installing the standard library,
//! compiler, and documentation.

use std::env;
use std::fs;
use std::path::{Path, PathBuf, Component};
use std::process::Command;

use Build;
use dist::{sanitize_sh, tmpdir};

/// Installs everything.
pub fn install(build: &Build, stage: u32, host: &str) {
    let prefix_default = PathBuf::from("/usr/local");
    let docdir_default = PathBuf::from("share/doc/rust");
    let mandir_default = PathBuf::from("share/man");
    let libdir_default = PathBuf::from("lib");
    let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
    let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
    let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
    let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);

    let docdir = prefix.join(docdir);
    let libdir = prefix.join(libdir);
    let mandir = prefix.join(mandir);

    let destdir = env::var_os("DESTDIR").map(PathBuf::from);

    let prefix = add_destdir(&prefix, &destdir);
    let docdir = add_destdir(&docdir, &destdir);
    let libdir = add_destdir(&libdir, &destdir);
    let mandir = add_destdir(&mandir, &destdir);

    let empty_dir = build.out.join("tmp/empty_dir");
    t!(fs::create_dir_all(&empty_dir));
    if build.config.docs {
        install_sh(&build, "docs", "rust-docs", stage, host, &prefix,
                   &docdir, &libdir, &mandir, &empty_dir);
    }
    if build.config.rust_save_analysis {
        install_sh(&build, "analysis", "rust-analysis", stage, host, &prefix,
                   &docdir, &libdir, &mandir, &empty_dir);
    }
    install_sh(&build, "std", "rust-std", stage, host, &prefix,
               &docdir, &libdir, &mandir, &empty_dir);
    install_sh(&build, "rustc", "rustc", stage, host, &prefix,
               &docdir, &libdir, &mandir, &empty_dir);
    t!(fs::remove_dir_all(&empty_dir));
}

fn install_sh(build: &Build, package: &str, name: &str, stage: u32, host: &str,
              prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
    println!("Install {} stage{} ({})", package, stage, host);
    let package_name = format!("{}-{}-{}", name, build.rust_package_vers(), host);

    let mut cmd = Command::new("sh");
    cmd.current_dir(empty_dir)
       .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
       .arg(format!("--prefix={}", sanitize_sh(prefix)))
       .arg(format!("--docdir={}", sanitize_sh(docdir)))
       .arg(format!("--libdir={}", sanitize_sh(libdir)))
       .arg(format!("--mandir={}", sanitize_sh(mandir)))
       .arg("--disable-ldconfig");
    build.run(&mut cmd);
}

fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {
    let mut ret = match *destdir {
        Some(ref dest) => dest.clone(),
        None => return path.to_path_buf(),
    };
    for part in path.components() {
        match part {
            Component::Normal(s) => ret.push(s),
            _ => {}
        }
    }
    return ret
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/job.rs version [beec712824].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Job management on Windows for bootstrapping
//!
//! Most of the time when you're running a build system (e.g. make) you expect
//! Ctrl-C or abnormal termination to actually terminate the entire tree of
//! process in play, not just the one at the top. This currently works "by
//! default" on Unix platforms because Ctrl-C actually sends a signal to the
//! *process group* rather than the parent process, so everything will get torn
//! down. On Windows, however, this does not happen and Ctrl-C just kills the
//! parent process.
//!
//! To achieve the same semantics on Windows we use Job Objects to ensure that
//! all processes die at the same time. Job objects have a mode of operation
//! where when all handles to the object are closed it causes all child
//! processes associated with the object to be terminated immediately.
//! Conveniently whenever a process in the job object spawns a new process the
//! child will be associated with the job object as well. This means if we add
//! ourselves to the job object we create then everything will get torn down!
//!
//! Unfortunately most of the time the build system is actually called from a
//! python wrapper (which manages things like building the build system) so this
//! all doesn't quite cut it so far. To go the last mile we duplicate the job
//! object handle into our parent process (a python process probably) and then
//! close our own handle. This means that the only handle to the job object
//! resides in the parent python process, so when python dies the whole build
//! system dies (as one would probably expect!).
//!
//! Note that this module has a #[cfg(windows)] above it as none of this logic
//! is required on Unix.

#![allow(bad_style, dead_code)]

use std::env;
use std::io;
use std::mem;

type HANDLE = *mut u8;
type BOOL = i32;
type DWORD = u32;
type LPHANDLE = *mut HANDLE;
type LPVOID = *mut u8;
type JOBOBJECTINFOCLASS = i32;
type SIZE_T = usize;
type LARGE_INTEGER = i64;
type UINT = u32;
type ULONG_PTR = usize;
type ULONGLONG = u64;

const FALSE: BOOL = 0;
const DUPLICATE_SAME_ACCESS: DWORD = 0x2;
const PROCESS_DUP_HANDLE: DWORD = 0x40;
const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9;
const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000;
const SEM_FAILCRITICALERRORS: UINT = 0x0001;
const SEM_NOGPFAULTERRORBOX: UINT = 0x0002;

extern "system" {
    fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
    fn CloseHandle(hObject: HANDLE) -> BOOL;
    fn GetCurrentProcess() -> HANDLE;
    fn OpenProcess(dwDesiredAccess: DWORD,
                   bInheritHandle: BOOL,
                   dwProcessId: DWORD) -> HANDLE;
    fn DuplicateHandle(hSourceProcessHandle: HANDLE,
                       hSourceHandle: HANDLE,
                       hTargetProcessHandle: HANDLE,
                       lpTargetHandle: LPHANDLE,
                       dwDesiredAccess: DWORD,
                       bInheritHandle: BOOL,
                       dwOptions: DWORD) -> BOOL;
    fn AssignProcessToJobObject(hJob: HANDLE, hProcess: HANDLE) -> BOOL;
    fn SetInformationJobObject(hJob: HANDLE,
                               JobObjectInformationClass: JOBOBJECTINFOCLASS,
                               lpJobObjectInformation: LPVOID,
                               cbJobObjectInformationLength: DWORD) -> BOOL;
    fn SetErrorMode(mode: UINT) -> UINT;
}

#[repr(C)]
struct JOBOBJECT_EXTENDED_LIMIT_INFORMATION {
    BasicLimitInformation: JOBOBJECT_BASIC_LIMIT_INFORMATION,
    IoInfo: IO_COUNTERS,
    ProcessMemoryLimit: SIZE_T,
    JobMemoryLimit: SIZE_T,
    PeakProcessMemoryUsed: SIZE_T,
    PeakJobMemoryUsed: SIZE_T,
}

#[repr(C)]
struct IO_COUNTERS {
    ReadOperationCount: ULONGLONG,
    WriteOperationCount: ULONGLONG,
    OtherOperationCount: ULONGLONG,
    ReadTransferCount: ULONGLONG,
    WriteTransferCount: ULONGLONG,
    OtherTransferCount: ULONGLONG,
}

#[repr(C)]
struct JOBOBJECT_BASIC_LIMIT_INFORMATION {
    PerProcessUserTimeLimit: LARGE_INTEGER,
    PerJobUserTimeLimit: LARGE_INTEGER,
    LimitFlags: DWORD,
    MinimumWorkingsetSize: SIZE_T,
    MaximumWorkingsetSize: SIZE_T,
    ActiveProcessLimit: DWORD,
    Affinity: ULONG_PTR,
    PriorityClass: DWORD,
    SchedulingClass: DWORD,
}

pub unsafe fn setup() {
    // Tell Windows to not show any UI on errors (such as not finding a required dll
    // during startup or terminating abnormally).  This is important for running tests,
    // since some of them use abnormal termination by design.
    // This mode is inherited by all child processes.
    let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
    SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);

    // Create a new job object for us to use
    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());

    // Indicate that when all handles to the job object are gone that all
    // process in the object should be killed. Note that this includes our
    // entire process tree by default because we've added ourselves and our
    // children will reside in the job by default.
    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
    let r = SetInformationJobObject(job,
                                    JobObjectExtendedLimitInformation,
                                    &mut info as *mut _ as LPVOID,
                                    mem::size_of_val(&info) as DWORD);
    assert!(r != 0, "{}", io::Error::last_os_error());

    // Assign our process to this job object. Note that if this fails, one very
    // likely reason is that we are ourselves already in a job object! This can
    // happen on the build bots that we've got for Windows, or if just anyone
    // else is instrumenting the build. In this case we just bail out
    // immediately and assume that they take care of it.
    //
    // Also note that nested jobs (why this might fail) are supported in recent
    // versions of Windows, but the version of Windows that our bots are running
    // at least don't support nested job objects.
    let r = AssignProcessToJobObject(job, GetCurrentProcess());
    if r == 0 {
        CloseHandle(job);
        return
    }

    // If we've got a parent process (e.g. the python script that called us)
    // then move ownership of this job object up to them. That way if the python
    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
    //
    // If we don't have a parent (e.g. this was run directly) then we
    // intentionally leak the job object handle. When our process exits
    // (normally or abnormally) it will close the handle implicitly, causing all
    // processes in the job to be cleaned up.
    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
        Ok(s) => s,
        Err(..) => return,
    };

    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
    let mut parent_handle = 0 as *mut _;
    let r = DuplicateHandle(GetCurrentProcess(), job,
                            parent, &mut parent_handle,
                            0, FALSE, DUPLICATE_SAME_ACCESS);

    // If this failed, well at least we tried! An example of DuplicateHandle
    // failing in the past has been when the wrong python2 package spawed this
    // build system (e.g. the `python2` package in MSYS instead of
    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
    // mode" here is that we only clean everything up when the build system
    // dies, not when the python parent does, so not too bad.
    if r != 0 {
        CloseHandle(job);
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/lib.rs version [6b3eeb7492].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Implementation of rustbuild, the Rust build system.
//!
//! This module, and its descendants, are the implementation of the Rust build
//! system. Most of this build system is backed by Cargo but the outer layer
//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
//! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
//!
//! * To be an easily understandable, easily extensible, and maintainable build
//!   system.
//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
//!   crates.io and Cargo.
//! * A standard interface to build across all platforms, including MSVC
//!
//! ## Architecture
//!
//! Although this build system defers most of the complicated logic to Cargo
//! itself, it still needs to maintain a list of targets and dependencies which
//! it can itself perform. Rustbuild is made up of a list of rules with
//! dependencies amongst them (created in the `step` module) and then knows how
//! to execute each in sequence. Each time rustbuild is invoked, it will simply
//! iterate through this list of steps and execute each serially in turn.  For
//! each step rustbuild relies on the step internally being incremental and
//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
//! to appropriate test harnesses and such.
//!
//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
//! have its own parallelism and incremental management. Later steps, like
//! tests, aren't incremental and simply run the entire suite currently.
//!
//! When you execute `x.py build`, the steps which are executed are:
//!
//! * First, the python script is run. This will automatically download the
//!   stage0 rustc and cargo according to `src/stage0.txt`, or using the cached
//!   versions if they're available. These are then used to compile rustbuild
//!   itself (using Cargo). Finally, control is then transferred to rustbuild.
//!
//! * Rustbuild takes over, performs sanity checks, probes the environment,
//!   reads configuration, builds up a list of steps, and then starts executing
//!   them.
//!
//! * The stage0 libstd is compiled
//! * The stage0 libtest is compiled
//! * The stage0 librustc is compiled
//! * The stage1 compiler is assembled
//! * The stage1 libstd, libtest, librustc are compiled
//! * The stage2 compiler is assembled
//! * The stage2 libstd, libtest, librustc are compiled
//!
//! Each step is driven by a separate Cargo project and rustbuild orchestrates
//! copying files between steps and otherwise preparing for Cargo to run.
//!
//! ## Further information
//!
//! More documentation can be found in each respective module below, and you can
//! also check out the `src/bootstrap/README.md` file for more information.

#![deny(warnings)]

#[macro_use]
extern crate build_helper;
extern crate cmake;
extern crate filetime;
extern crate gcc;
extern crate getopts;
extern crate num_cpus;
extern crate rustc_serialize;
extern crate toml;

use std::cmp;
use std::collections::HashMap;
use std::env;
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::Read;
use std::path::{Component, PathBuf, Path};
use std::process::Command;

use build_helper::{run_silent, run_suppressed, output, mtime};

use util::{exe, libdir, add_lib_path};

mod cc;
mod channel;
mod check;
mod clean;
mod compile;
mod metadata;
mod config;
mod dist;
mod doc;
mod flags;
mod install;
mod native;
mod sanity;
mod step;
pub mod util;

#[cfg(windows)]
mod job;

#[cfg(not(windows))]
mod job {
    pub unsafe fn setup() {}
}

pub use config::Config;
pub use flags::{Flags, Subcommand};

/// A structure representing a Rust compiler.
///
/// Each compiler has a `stage` that it is associated with and a `host` that
/// corresponds to the platform the compiler runs on. This structure is used as
/// a parameter to many methods below.
#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
pub struct Compiler<'a> {
    stage: u32,
    host: &'a str,
}

/// Global configuration for the build system.
///
/// This structure transitively contains all configuration for the build system.
/// All filesystem-encoded configuration is in `config`, all flags are in
/// `flags`, and then parsed or probed information is listed in the keys below.
///
/// This structure is a parameter of almost all methods in the build system,
/// although most functions are implemented as free functions rather than
/// methods specifically on this structure itself (to make it easier to
/// organize).
pub struct Build {
    // User-specified configuration via config.toml
    config: Config,

    // User-specified configuration via CLI flags
    flags: Flags,

    // Derived properties from the above two configurations
    cargo: PathBuf,
    rustc: PathBuf,
    src: PathBuf,
    out: PathBuf,
    rust_info: channel::GitInfo,
    cargo_info: channel::GitInfo,
    local_rebuild: bool,

    // Probed tools at runtime
    lldb_version: Option<String>,
    lldb_python_dir: Option<String>,

    // Runtime state filled in later on
    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
    cxx: HashMap<String, gcc::Tool>,
    crates: HashMap<String, Crate>,
    is_sudo: bool,
}

#[derive(Debug)]
struct Crate {
    name: String,
    version: String,
    deps: Vec<String>,
    path: PathBuf,
    doc_step: String,
    build_step: String,
    test_step: String,
    bench_step: String,
}

/// The various "modes" of invoking Cargo.
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum Mode {
    /// This cargo is going to build the standard library, placing output in the
    /// "stageN-std" directory.
    Libstd,

    /// This cargo is going to build libtest, placing output in the
    /// "stageN-test" directory.
    Libtest,

    /// This cargo is going to build librustc and compiler libraries, placing
    /// output in the "stageN-rustc" directory.
    Librustc,

    /// This cargo is going to some build tool, placing output in the
    /// "stageN-tools" directory.
    Tool,
}

impl Build {
    /// Creates a new set of build configuration from the `flags` on the command
    /// line and the filesystem `config`.
    ///
    /// By default all build output will be placed in the current directory.
    pub fn new(flags: Flags, config: Config) -> Build {
        let cwd = t!(env::current_dir());
        let src = flags.src.clone().or_else(|| {
            env::var_os("SRC").map(|x| x.into())
        }).unwrap_or(cwd.clone());
        let out = cwd.join("build");

        let stage0_root = out.join(&config.build).join("stage0/bin");
        let rustc = match config.rustc {
            Some(ref s) => PathBuf::from(s),
            None => stage0_root.join(exe("rustc", &config.build)),
        };
        let cargo = match config.cargo {
            Some(ref s) => PathBuf::from(s),
            None => stage0_root.join(exe("cargo", &config.build)),
        };
        let local_rebuild = config.local_rebuild;

        let is_sudo = match env::var_os("SUDO_USER") {
            Some(sudo_user) => {
                match env::var_os("USER") {
                    Some(user) => user != sudo_user,
                    None => false,
                }
            }
            None => false,
        };
        let rust_info = channel::GitInfo::new(&src);
        let cargo_info = channel::GitInfo::new(&src.join("cargo"));

        Build {
            flags: flags,
            config: config,
            cargo: cargo,
            rustc: rustc,
            src: src,
            out: out,

            rust_info: rust_info,
            cargo_info: cargo_info,
            local_rebuild: local_rebuild,
            cc: HashMap::new(),
            cxx: HashMap::new(),
            crates: HashMap::new(),
            lldb_version: None,
            lldb_python_dir: None,
            is_sudo: is_sudo,
        }
    }

    /// Executes the entire build, as configured by the flags and configuration.
    pub fn build(&mut self) {
        unsafe {
            job::setup();
        }

        if let Subcommand::Clean = self.flags.cmd {
            return clean::clean(self);
        }

        self.verbose("finding compilers");
        cc::find(self);
        self.verbose("running sanity check");
        sanity::check(self);
        // If local-rust is the same major.minor as the current version, then force a local-rebuild
        let local_version_verbose = output(
            Command::new(&self.rustc).arg("--version").arg("--verbose"));
        let local_release = local_version_verbose
            .lines().filter(|x| x.starts_with("release:"))
            .next().unwrap().trim_left_matches("release:").trim();
        let my_version = channel::CFG_RELEASE_NUM;
        if local_release.split('.').take(2).eq(my_version.split('.').take(2)) {
            self.verbose(&format!("auto-detected local-rebuild {}", local_release));
            self.local_rebuild = true;
        }
        self.verbose("updating submodules");
        self.update_submodules();
        self.verbose("learning about cargo");
        metadata::build(self);

        step::run(self);
    }

    /// Updates all git submodules that we have.
    ///
    /// This will detect if any submodules are out of date an run the necessary
    /// commands to sync them all with upstream.
    fn update_submodules(&self) {
        struct Submodule<'a> {
            path: &'a Path,
            state: State,
        }

        enum State {
            // The submodule may have staged/unstaged changes
            MaybeDirty,
            // Or could be initialized but never updated
            NotInitialized,
            // The submodule, itself, has extra commits but those changes haven't been commited to
            // the (outer) git repository
            OutOfSync,
        }

        if !self.config.submodules {
            return
        }
        if fs::metadata(self.src.join(".git")).is_err() {
            return
        }
        let git = || {
            let mut cmd = Command::new("git");
            cmd.current_dir(&self.src);
            return cmd
        };
        let git_submodule = || {
            let mut cmd = Command::new("git");
            cmd.current_dir(&self.src).arg("submodule");
            return cmd
        };

        // FIXME: this takes a seriously long time to execute on Windows and a
        //        nontrivial amount of time on Unix, we should have a better way
        //        of detecting whether we need to run all the submodule commands
        //        below.
        let out = output(git_submodule().arg("status"));
        let mut submodules = vec![];
        for line in out.lines() {
            // NOTE `git submodule status` output looks like this:
            //
            // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
            // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
            //  e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
            //
            // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
            // Right next to this character is the SHA-1 of the submodule HEAD
            // And after that comes the path to the submodule
            let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
            let state = if line.starts_with('-') {
                State::NotInitialized
            } else if line.starts_with('+') {
                State::OutOfSync
            } else if line.starts_with(' ') {
                State::MaybeDirty
            } else {
                panic!("unexpected git submodule state: {:?}", line.chars().next());
            };

            submodules.push(Submodule { path: path, state: state })
        }

        self.run(git_submodule().arg("sync"));

        for submodule in submodules {
            // If using llvm-root then don't touch the llvm submodule.
            if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
                self.config.target_config.get(&self.config.build)
                    .and_then(|c| c.llvm_config.as_ref()).is_some()
            {
                continue
            }

            if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
                !self.config.use_jemalloc
            {
                continue
            }

            // `submodule.path` is the relative path to a submodule (from the repository root)
            // `submodule_path` is the path to a submodule from the cwd

            // use `submodule.path` when e.g. executing a submodule specific command from the
            // repository root
            // use `submodule_path` when e.g. executing a normal git command for the submodule
            // (set via `current_dir`)
            let submodule_path = self.src.join(submodule.path);

            match submodule.state {
                State::MaybeDirty => {
                    // drop staged changes
                    self.run(git().current_dir(&submodule_path)
                                  .args(&["reset", "--hard"]));
                    // drops unstaged changes
                    self.run(git().current_dir(&submodule_path)
                                  .args(&["clean", "-fdx"]));
                },
                State::NotInitialized => {
                    self.run(git_submodule().arg("init").arg(submodule.path));
                    self.run(git_submodule().arg("update").arg(submodule.path));
                },
                State::OutOfSync => {
                    // drops submodule commits that weren't reported to the (outer) git repository
                    self.run(git_submodule().arg("update").arg(submodule.path));
                    self.run(git().current_dir(&submodule_path)
                                  .args(&["reset", "--hard"]));
                    self.run(git().current_dir(&submodule_path)
                                  .args(&["clean", "-fdx"]));
                },
            }
        }
    }

    /// Clear out `dir` if `input` is newer.
    ///
    /// After this executes, it will also ensure that `dir` exists.
    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
        let stamp = dir.join(".stamp");
        if mtime(&stamp) < mtime(input) {
            self.verbose(&format!("Dirty - {}", dir.display()));
            let _ = fs::remove_dir_all(dir);
        } else if stamp.exists() {
            return
        }
        t!(fs::create_dir_all(dir));
        t!(File::create(stamp));
    }

    /// Prepares an invocation of `cargo` to be run.
    ///
    /// This will create a `Command` that represents a pending execution of
    /// Cargo. This cargo will be configured to use `compiler` as the actual
    /// rustc compiler, its output will be scoped by `mode`'s output directory,
    /// it will pass the `--target` flag for the specified `target`, and will be
    /// executing the Cargo command `cmd`.
    fn cargo(&self,
             compiler: &Compiler,
             mode: Mode,
             target: &str,
             cmd: &str) -> Command {
        let mut cargo = Command::new(&self.cargo);
        let out_dir = self.stage_out(compiler, mode);
        cargo.env("CARGO_TARGET_DIR", out_dir)
             .arg(cmd)
             .arg("-j").arg(self.jobs().to_string())
             .arg("--target").arg(target);

        // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
        // Force cargo to output binaries with disambiguating hashes in the name
        cargo.env("__CARGO_DEFAULT_LIB_METADATA", "1");

        let stage;
        if compiler.stage == 0 && self.local_rebuild {
            // Assume the local-rebuild rustc already has stage1 features.
            stage = 1;
        } else {
            stage = compiler.stage;
        }

        // Customize the compiler we're running. Specify the compiler to cargo
        // as our shim and then pass it some various options used to configure
        // how the actual compiler itself is called.
        //
        // These variables are primarily all read by
        // src/bootstrap/bin/{rustc.rs,rustdoc.rs}
        cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
             .env("RUSTC", self.out.join("bootstrap/debug/rustc"))
             .env("RUSTC_REAL", self.compiler_path(compiler))
             .env("RUSTC_STAGE", stage.to_string())
             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
             .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
             .env("RUSTC_CODEGEN_UNITS",
                  self.config.rust_codegen_units.to_string())
             .env("RUSTC_DEBUG_ASSERTIONS",
                  self.config.rust_debug_assertions.to_string())
             .env("RUSTC_SYSROOT", self.sysroot(compiler))
             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
             .env("RUSTDOC_REAL", self.rustdoc(compiler))
             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));

        // Enable usage of unstable features
        cargo.env("RUSTC_BOOTSTRAP", "1");
        self.add_rust_test_threads(&mut cargo);

        // Almost all of the crates that we compile as part of the bootstrap may
        // have a build script, including the standard library. To compile a
        // build script, however, it itself needs a standard library! This
        // introduces a bit of a pickle when we're compiling the standard
        // library itself.
        //
        // To work around this we actually end up using the snapshot compiler
        // (stage0) for compiling build scripts of the standard library itself.
        // The stage0 compiler is guaranteed to have a libstd available for use.
        //
        // For other crates, however, we know that we've already got a standard
        // library up and running, so we can use the normal compiler to compile
        // build scripts in that situation.
        if mode == Mode::Libstd {
            cargo.env("RUSTC_SNAPSHOT", &self.rustc)
                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
        } else {
            cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler))
                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
        }

        // There are two invariants we try must maintain:
        // * stable crates cannot depend on unstable crates (general Rust rule),
        // * crates that end up in the sysroot must be unstable (rustbuild rule).
        //
        // In order to do enforce the latter, we pass the env var
        // `RUSTBUILD_UNSTABLE` down the line for any crates which will end up
        // in the sysroot. We read this in bootstrap/bin/rustc.rs and if it is
        // set, then we pass the `rustbuild` feature to rustc when building the
        // the crate.
        //
        // In turn, crates that can be used here should recognise the `rustbuild`
        // feature and opt-in to `rustc_private`.
        //
        // We can't always pass `rustbuild` because crates which are outside of
        // the comipiler, libs, and tests are stable and we don't want to make
        // their deps unstable (since this would break the first invariant
        // above).
        if mode != Mode::Tool {
            cargo.env("RUSTBUILD_UNSTABLE", "1");
        }

        // Ignore incremental modes except for stage0, since we're
        // not guaranteeing correctness acros builds if the compiler
        // is changing under your feet.`
        if self.flags.incremental && compiler.stage == 0 {
            let incr_dir = self.incremental_dir(compiler);
            cargo.env("RUSTC_INCREMENTAL", incr_dir);
        }

        if let Some(ref on_fail) = self.flags.on_fail {
            cargo.env("RUSTC_ON_FAIL", on_fail);
        }

        let verbose = cmp::max(self.config.verbose, self.flags.verbose);
        cargo.env("RUSTC_VERBOSE", format!("{}", verbose));

        // Specify some various options for build scripts used throughout
        // the build.
        //
        // FIXME: the guard against msvc shouldn't need to be here
        if !target.contains("msvc") {
            cargo.env(format!("CC_{}", target), self.cc(target))
                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
        }

        if self.config.rust_save_analysis && compiler.is_final_stage(self) {
            cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
        }

        // Environment variables *required* needed throughout the build
        //
        // FIXME: should update code to not require this env var
        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);

        if self.config.verbose() || self.flags.verbose() {
            cargo.arg("-v");
        }
        // FIXME: cargo bench does not accept `--release`
        if self.config.rust_optimize && cmd != "bench" {
            cargo.arg("--release");
        }
        if self.config.locked_deps {
            cargo.arg("--locked");
        }
        if self.config.vendor || self.is_sudo {
            cargo.arg("--frozen");
        }
        return cargo
    }

    /// Get a path to the compiler specified.
    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
        if compiler.is_snapshot(self) {
            self.rustc.clone()
        } else {
            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
        }
    }

    /// Get the specified tool built by the specified compiler
    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
        self.cargo_out(compiler, Mode::Tool, compiler.host)
            .join(exe(tool, compiler.host))
    }

    /// Get the `rustdoc` executable next to the specified compiler
    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
        let mut rustdoc = self.compiler_path(compiler);
        rustdoc.pop();
        rustdoc.push(exe("rustdoc", compiler.host));
        return rustdoc
    }

    /// Get a `Command` which is ready to run `tool` in `stage` built for
    /// `host`.
    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
        let mut cmd = Command::new(self.tool(&compiler, tool));
        self.prepare_tool_cmd(compiler, &mut cmd);
        return cmd
    }

    /// Prepares the `cmd` provided to be able to run the `compiler` provided.
    ///
    /// Notably this munges the dynamic library lookup path to point to the
    /// right location to run `compiler`.
    fn prepare_tool_cmd(&self, compiler: &Compiler, cmd: &mut Command) {
        let host = compiler.host;
        let mut paths = vec![
            self.sysroot_libdir(compiler, compiler.host),
            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
        ];

        // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
        // mode) and that C compiler may need some extra PATH modification. Do
        // so here.
        if compiler.host.contains("msvc") {
            let curpaths = env::var_os("PATH").unwrap_or(OsString::new());
            let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
            for &(ref k, ref v) in self.cc[compiler.host].0.env() {
                if k != "PATH" {
                    continue
                }
                for path in env::split_paths(v) {
                    if !curpaths.contains(&path) {
                        paths.push(path);
                    }
                }
            }
        }
        add_lib_path(paths, cmd);
    }

    /// Get the space-separated set of activated features for the standard
    /// library.
    fn std_features(&self) -> String {
        let mut features = "panic-unwind".to_string();

        if self.config.debug_jemalloc {
            features.push_str(" debug-jemalloc");
        }
        if self.config.use_jemalloc {
            features.push_str(" jemalloc");
        }
        if self.config.backtrace {
            features.push_str(" backtrace");
        }
        return features
    }

    /// Get the space-separated set of activated features for the compiler.
    fn rustc_features(&self) -> String {
        let mut features = String::new();
        if self.config.use_jemalloc {
            features.push_str(" jemalloc");
        }
        return features
    }

    /// Component directory that Cargo will produce output into (e.g.
    /// release/debug)
    fn cargo_dir(&self) -> &'static str {
        if self.config.rust_optimize {"release"} else {"debug"}
    }

    /// Returns the sysroot for the `compiler` specified that *this build system
    /// generates*.
    ///
    /// That is, the sysroot for the stage0 compiler is not what the compiler
    /// thinks it is by default, but it's the same as the default for stages
    /// 1-3.
    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
        if compiler.stage == 0 {
            self.out.join(compiler.host).join("stage0-sysroot")
        } else {
            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
        }
    }

    /// Get the directory for incremental by-products when using the
    /// given compiler.
    fn incremental_dir(&self, compiler: &Compiler) -> PathBuf {
        self.out.join(compiler.host).join(format!("stage{}-incremental", compiler.stage))
    }

    /// Returns the libdir where the standard library and other artifacts are
    /// found for a compiler's sysroot.
    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
        self.sysroot(compiler).join("lib").join("rustlib")
            .join(target).join("lib")
    }

    /// Returns the root directory for all output generated in a particular
    /// stage when running with a particular host compiler.
    ///
    /// The mode indicates what the root directory is for.
    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
        let suffix = match mode {
            Mode::Libstd => "-std",
            Mode::Libtest => "-test",
            Mode::Tool => "-tools",
            Mode::Librustc => "-rustc",
        };
        self.out.join(compiler.host)
                .join(format!("stage{}{}", compiler.stage, suffix))
    }

    /// Returns the root output directory for all Cargo output in a given stage,
    /// running a particular comipler, wehther or not we're building the
    /// standard library, and targeting the specified architecture.
    fn cargo_out(&self,
                 compiler: &Compiler,
                 mode: Mode,
                 target: &str) -> PathBuf {
        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
    }

    /// Root output directory for LLVM compiled for `target`
    ///
    /// Note that if LLVM is configured externally then the directory returned
    /// will likely be empty.
    fn llvm_out(&self, target: &str) -> PathBuf {
        self.out.join(target).join("llvm")
    }

    /// Output directory for all documentation for a target
    fn doc_out(&self, target: &str) -> PathBuf {
        self.out.join(target).join("doc")
    }

    /// Output directory for all crate documentation for a target (temporary)
    ///
    /// The artifacts here are then copied into `doc_out` above.
    fn crate_doc_out(&self, target: &str) -> PathBuf {
        self.out.join(target).join("crate-docs")
    }

    /// Returns true if no custom `llvm-config` is set for the specified target.
    ///
    /// If no custom `llvm-config` was specified then Rust's llvm will be used.
    fn is_rust_llvm(&self, target: &str) -> bool {
        match self.config.target_config.get(target) {
            Some(ref c) => c.llvm_config.is_none(),
            None => true
        }
    }

    /// Returns the path to `llvm-config` for the specified target.
    ///
    /// If a custom `llvm-config` was specified for target then that's returned
    /// instead.
    fn llvm_config(&self, target: &str) -> PathBuf {
        let target_config = self.config.target_config.get(target);
        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
            s.clone()
        } else {
            self.llvm_out(&self.config.build).join("bin")
                .join(exe("llvm-config", target))
        }
    }

    /// Returns the path to `FileCheck` binary for the specified target
    fn llvm_filecheck(&self, target: &str) -> PathBuf {
        let target_config = self.config.target_config.get(target);
        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
            let llvm_bindir = output(Command::new(s).arg("--bindir"));
            Path::new(llvm_bindir.trim()).join(exe("FileCheck", target))
        } else {
            let base = self.llvm_out(&self.config.build).join("build");
            let exe = exe("FileCheck", target);
            if !self.config.ninja && self.config.build.contains("msvc") {
                base.join("Release/bin").join(exe)
            } else {
                base.join("bin").join(exe)
            }
        }
    }

    /// Directory for libraries built from C/C++ code and shared between stages.
    fn native_dir(&self, target: &str) -> PathBuf {
        self.out.join(target).join("native")
    }

    /// Root output directory for rust_test_helpers library compiled for
    /// `target`
    fn test_helpers_out(&self, target: &str) -> PathBuf {
        self.native_dir(target).join("rust-test-helpers")
    }

    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
    /// library lookup path.
    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
        // Windows doesn't need dylib path munging because the dlls for the
        // compiler live next to the compiler and the system will find them
        // automatically.
        if cfg!(windows) {
            return
        }

        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
    }

    /// Adds the `RUST_TEST_THREADS` env var if necessary
    fn add_rust_test_threads(&self, cmd: &mut Command) {
        if env::var_os("RUST_TEST_THREADS").is_none() {
            cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
        }
    }

    /// Returns the compiler's libdir where it stores the dynamic libraries that
    /// it itself links against.
    ///
    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
    /// Windows.
    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
        if compiler.is_snapshot(self) {
            self.rustc_snapshot_libdir()
        } else {
            self.sysroot(compiler).join(libdir(compiler.host))
        }
    }

    /// Returns the libdir of the snapshot compiler.
    fn rustc_snapshot_libdir(&self) -> PathBuf {
        self.rustc.parent().unwrap().parent().unwrap()
            .join(libdir(&self.config.build))
    }

    /// Runs a command, printing out nice contextual information if it fails.
    fn run(&self, cmd: &mut Command) {
        self.verbose(&format!("running: {:?}", cmd));
        run_silent(cmd)
    }

    /// Runs a command, printing out nice contextual information if it fails.
    fn run_quiet(&self, cmd: &mut Command) {
        self.verbose(&format!("running: {:?}", cmd));
        run_suppressed(cmd)
    }

    /// Prints a message if this build is configured in verbose mode.
    fn verbose(&self, msg: &str) {
        if self.flags.verbose() || self.config.verbose() {
            println!("{}", msg);
        }
    }

    /// Returns the number of parallel jobs that have been configured for this
    /// build.
    fn jobs(&self) -> u32 {
        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
    }

    /// Returns the path to the C compiler for the target specified.
    fn cc(&self, target: &str) -> &Path {
        self.cc[target].0.path()
    }

    /// Returns a list of flags to pass to the C compiler for the target
    /// specified.
    fn cflags(&self, target: &str) -> Vec<String> {
        // Filter out -O and /O (the optimization flags) that we picked up from
        // gcc-rs because the build scripts will determine that for themselves.
        let mut base = self.cc[target].0.args().iter()
                           .map(|s| s.to_string_lossy().into_owned())
                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
                           .collect::<Vec<_>>();

        // If we're compiling on macOS then we add a few unconditional flags
        // indicating that we want libc++ (more filled out than libstdc++) and
        // we want to compile for 10.7. This way we can ensure that
        // LLVM/jemalloc/etc are all properly compiled.
        if target.contains("apple-darwin") {
            base.push("-stdlib=libc++".into());
        }
        return base
    }

    /// Returns the path to the `ar` archive utility for the target specified.
    fn ar(&self, target: &str) -> Option<&Path> {
        self.cc[target].1.as_ref().map(|p| &**p)
    }

    /// Returns the path to the C++ compiler for the target specified, may panic
    /// if no C++ compiler was configured for the target.
    fn cxx(&self, target: &str) -> &Path {
        match self.cxx.get(target) {
            Some(p) => p.path(),
            None => panic!("\n\ntarget `{}` is not configured as a host,
                            only as a target\n\n", target),
        }
    }

    /// Returns flags to pass to the compiler to generate code for `target`.
    fn rustc_flags(&self, target: &str) -> Vec<String> {
        // New flags should be added here with great caution!
        //
        // It's quite unfortunate to **require** flags to generate code for a
        // target, so it should only be passed here if absolutely necessary!
        // Most default configuration should be done through target specs rather
        // than an entry here.

        let mut base = Vec::new();
        if target != self.config.build && !target.contains("msvc") &&
            !target.contains("emscripten") {
            base.push(format!("-Clinker={}", self.cc(target).display()));
        }
        return base
    }

    /// Returns the "musl root" for this `target`, if defined
    fn musl_root(&self, target: &str) -> Option<&Path> {
        self.config.target_config.get(target)
            .and_then(|t| t.musl_root.as_ref())
            .or(self.config.musl_root.as_ref())
            .map(|p| &**p)
    }

    /// Returns the root of the "rootfs" image that this target will be using,
    /// if one was configured.
    ///
    /// If `Some` is returned then that means that tests for this target are
    /// emulated with QEMU and binaries will need to be shipped to the emulator.
    fn qemu_rootfs(&self, target: &str) -> Option<&Path> {
        self.config.target_config.get(target)
            .and_then(|t| t.qemu_rootfs.as_ref())
            .map(|p| &**p)
    }

    /// Path to the python interpreter to use
    fn python(&self) -> &Path {
        self.config.python.as_ref().unwrap()
    }

    /// Tests whether the `compiler` compiling for `target` should be forced to
    /// use a stage1 compiler instead.
    ///
    /// Currently, by default, the build system does not perform a "full
    /// bootstrap" by default where we compile the compiler three times.
    /// Instead, we compile the compiler two times. The final stage (stage2)
    /// just copies the libraries from the previous stage, which is what this
    /// method detects.
    ///
    /// Here we return `true` if:
    ///
    /// * The build isn't performing a full bootstrap
    /// * The `compiler` is in the final stage, 2
    /// * We're not cross-compiling, so the artifacts are already available in
    ///   stage1
    ///
    /// When all of these conditions are met the build will lift artifacts from
    /// the previous stage forward.
    fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool {
        !self.config.full_bootstrap &&
            compiler.stage >= 2 &&
            self.config.host.iter().any(|h| h == target)
    }

    /// Returns the directory that OpenSSL artifacts are compiled into if
    /// configured to do so.
    fn openssl_dir(&self, target: &str) -> Option<PathBuf> {
        // OpenSSL not used on Windows
        if target.contains("windows") {
            None
        } else if self.config.openssl_static {
            Some(self.out.join(target).join("openssl"))
        } else {
            None
        }
    }

    /// Returns the directory that OpenSSL artifacts are installed into if
    /// configured as such.
    fn openssl_install_dir(&self, target: &str) -> Option<PathBuf> {
        self.openssl_dir(target).map(|p| p.join("install"))
    }

    /// Given `num` in the form "a.b.c" return a "release string" which
    /// describes the release version number.
    ///
    /// For example on nightly this returns "a.b.c-nightly", on beta it returns
    /// "a.b.c-beta.1" and on stable it just returns "a.b.c".
    fn release(&self, num: &str) -> String {
        match &self.config.channel[..] {
            "stable" => num.to_string(),
            "beta" => format!("{}-beta{}", num, channel::CFG_PRERELEASE_VERSION),
            "nightly" => format!("{}-nightly", num),
            _ => format!("{}-dev", num),
        }
    }

    /// Returns the value of `release` above for Rust itself.
    fn rust_release(&self) -> String {
        self.release(channel::CFG_RELEASE_NUM)
    }

    /// Returns the "package version" for a component given the `num` release
    /// number.
    ///
    /// The package version is typically what shows up in the names of tarballs.
    /// For channels like beta/nightly it's just the channel name, otherwise
    /// it's the `num` provided.
    fn package_vers(&self, num: &str) -> String {
        match &self.config.channel[..] {
            "stable" => num.to_string(),
            "beta" => "beta".to_string(),
            "nightly" => "nightly".to_string(),
            _ => format!("{}-dev", num),
        }
    }

    /// Returns the value of `package_vers` above for Rust itself.
    fn rust_package_vers(&self) -> String {
        self.package_vers(channel::CFG_RELEASE_NUM)
    }

    /// Returns the `version` string associated with this compiler for Rust
    /// itself.
    ///
    /// Note that this is a descriptive string which includes the commit date,
    /// sha, version, etc.
    fn rust_version(&self) -> String {
        self.rust_info.version(self, channel::CFG_RELEASE_NUM)
    }

    /// Returns the `a.b.c` version that Cargo is at.
    fn cargo_release_num(&self) -> String {
        let mut toml = String::new();
        t!(t!(File::open(self.src.join("cargo/Cargo.toml"))).read_to_string(&mut toml));
        for line in toml.lines() {
            let prefix = "version = \"";
            let suffix = "\"";
            if line.starts_with(prefix) && line.ends_with(suffix) {
                return line[prefix.len()..line.len() - suffix.len()].to_string()
            }
        }

        panic!("failed to find version in cargo's Cargo.toml")
    }

    /// Returns whether unstable features should be enabled for the compiler
    /// we're building.
    fn unstable_features(&self) -> bool {
        match &self.config.channel[..] {
            "stable" | "beta" => false,
            "nightly" | _ => true,
        }
    }
}

impl<'a> Compiler<'a> {
    /// Creates a new complier for the specified stage/host
    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
        Compiler { stage: stage, host: host }
    }

    /// Returns whether this is a snapshot compiler for `build`'s configuration
    fn is_snapshot(&self, build: &Build) -> bool {
        self.stage == 0 && self.host == build.config.build
    }

    /// Returns if this compiler should be treated as a final stage one in the
    /// current build session.
    /// This takes into account whether we're performing a full bootstrap or
    /// not; don't directly compare the stage with `2`!
    fn is_final_stage(&self, build: &Build) -> bool {
        let final_stage = if build.config.full_bootstrap { 2 } else { 1 };
        self.stage >= final_stage
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






























































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/metadata.rs version [bacc1faca2].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use std::collections::HashMap;
use std::process::Command;
use std::path::PathBuf;

use build_helper::output;
use rustc_serialize::json;

use {Build, Crate};

#[derive(RustcDecodable)]
struct Output {
    packages: Vec<Package>,
    resolve: Resolve,
}

#[derive(RustcDecodable)]
struct Package {
    id: String,
    name: String,
    version: String,
    source: Option<String>,
    manifest_path: String,
}

#[derive(RustcDecodable)]
struct Resolve {
    nodes: Vec<ResolveNode>,
}

#[derive(RustcDecodable)]
struct ResolveNode {
    id: String,
    dependencies: Vec<String>,
}

pub fn build(build: &mut Build) {
    build_krate(build, "src/libstd");
    build_krate(build, "src/libtest");
    build_krate(build, "src/rustc");
}

fn build_krate(build: &mut Build, krate: &str) {
    // Run `cargo metadata` to figure out what crates we're testing.
    //
    // Down below we're going to call `cargo test`, but to test the right set
    // of packages we're going to have to know what `-p` arguments to pass it
    // to know what crates to test. Here we run `cargo metadata` to learn about
    // the dependency graph and what `-p` arguments there are.
    let mut cargo = Command::new(&build.cargo);
    cargo.arg("metadata")
         .arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
    let output = output(&mut cargo);
    let output: Output = json::decode(&output).unwrap();
    let mut id2name = HashMap::new();
    for package in output.packages {
        if package.source.is_none() {
            id2name.insert(package.id, package.name.clone());
            let mut path = PathBuf::from(package.manifest_path);
            path.pop();
            build.crates.insert(package.name.clone(), Crate {
                build_step: format!("build-crate-{}", package.name),
                doc_step: format!("doc-crate-{}", package.name),
                test_step: format!("test-crate-{}", package.name),
                bench_step: format!("bench-crate-{}", package.name),
                name: package.name,
                version: package.version,
                deps: Vec::new(),
                path: path,
            });
        }
    }

    for node in output.resolve.nodes {
        let name = match id2name.get(&node.id) {
            Some(name) => name,
            None => continue,
        };

        let krate = build.crates.get_mut(name).unwrap();
        for dep in node.dependencies.iter() {
            let dep = match id2name.get(dep) {
                Some(dep) => dep,
                None => continue,
            };
            krate.deps.push(dep.clone());
        }
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/mk/Makefile.in version [32442cf9e4].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.

include config.mk

ifdef VERBOSE
Q :=
BOOTSTRAP_ARGS := -v
else
Q := @
BOOTSTRAP_ARGS :=
endif

BOOTSTRAP := $(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap.py

all:
	$(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS)
	$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)

help:
	$(Q)echo 'Welcome to the rustbuild build system!'
	$(Q)echo
	$(Q)echo This makefile is a thin veneer over the ./x.py script located
	$(Q)echo in this directory. To get the full power of the build system
	$(Q)echo you can run x.py directly.
	$(Q)echo
	$(Q)echo To learn more run \`./x.py --help\`

clean:
	$(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS)

rustc-stage1:
	$(Q)$(BOOTSTRAP) build --stage 1 src/libtest $(BOOTSTRAP_ARGS)
rustc-stage2:
	$(Q)$(BOOTSTRAP) build --stage 2 src/libtest $(BOOTSTRAP_ARGS)

docs: doc
doc:
	$(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS)
nomicon:
	$(Q)$(BOOTSTRAP) doc src/doc/nomicon $(BOOTSTRAP_ARGS)
book:
	$(Q)$(BOOTSTRAP) doc src/doc/book $(BOOTSTRAP_ARGS)
standalone-docs:
	$(Q)$(BOOTSTRAP) doc src/doc $(BOOTSTRAP_ARGS)
check:
	$(Q)$(BOOTSTRAP) test $(BOOTSTRAP_ARGS)
check-aux:
	$(Q)$(BOOTSTRAP) test \
		src/tools/cargotest \
		src/test/pretty \
		src/test/run-pass/pretty \
		src/test/run-fail/pretty \
		src/test/run-pass-valgrind/pretty \
		src/test/run-pass-fulldeps/pretty \
		src/test/run-fail-fulldeps/pretty \
		$(BOOTSTRAP_ARGS)
dist:
	$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
distcheck:
	$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
	$(Q)$(BOOTSTRAP) test distcheck $(BOOTSTRAP_ARGS)
install:
	$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
tidy:
	$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS)
prepare:
	$(Q)$(BOOTSTRAP) build nonexistent/path/to/trigger/cargo/metadata

check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu:
	$(Q)$(BOOTSTRAP) test --target arm-linux-androideabi
check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu:
	$(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-musl


.PHONY: dist
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/native.rs version [74224c615d].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Compilation of native dependencies like LLVM.
//!
//! Native projects like LLVM unfortunately aren't suited just yet for
//! compilation in build scripts that Cargo has. This is because thie
//! compilation takes a *very* long time but also because we don't want to
//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
//!
//! LLVM and compiler-rt are essentially just wired up to everything else to
//! ensure that they're always in place if needed.

use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::Path;
use std::process::Command;

use build_helper::output;
use cmake;
use gcc;

use Build;
use util;
use build_helper::up_to_date;

/// Compile LLVM for `target`.
pub fn llvm(build: &Build, target: &str) {
    // If we're using a custom LLVM bail out here, but we can only use a
    // custom LLVM for the build triple.
    if let Some(config) = build.config.target_config.get(target) {
        if let Some(ref s) = config.llvm_config {
            return check_llvm_version(build, s);
        }
    }

    let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
    let mut rebuild_trigger_contents = String::new();
    t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));

    let out_dir = build.llvm_out(target);
    let done_stamp = out_dir.join("llvm-finished-building");
    if done_stamp.exists() {
        let mut done_contents = String::new();
        t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));

        // If LLVM was already built previously and contents of the rebuild-trigger file
        // didn't change from the previous build, then no action is required.
        if done_contents == rebuild_trigger_contents {
            return
        }
    }
    if build.config.llvm_clean_rebuild {
        drop(fs::remove_dir_all(&out_dir));
    }

    println!("Building LLVM for {}", target);
    let _time = util::timeit();
    t!(fs::create_dir_all(&out_dir));

    // http://llvm.org/docs/CMake.html
    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
    if build.config.ninja {
        cfg.generator("Ninja");
    }

    let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
        (false, _) => "Debug",
        (true, false) => "Release",
        (true, true) => "RelWithDebInfo",
    };

    // NOTE: remember to also update `config.toml.example` when changing the defaults!
    let llvm_targets = match build.config.llvm_targets {
        Some(ref s) => s,
        None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX",
    };

    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};

    cfg.target(target)
       .host(&build.config.build)
       .out_dir(&out_dir)
       .profile(profile)
       .define("LLVM_ENABLE_ASSERTIONS", assertions)
       .define("LLVM_TARGETS_TO_BUILD", llvm_targets)
       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
       .define("LLVM_INCLUDE_TESTS", "OFF")
       .define("LLVM_INCLUDE_DOCS", "OFF")
       .define("LLVM_ENABLE_ZLIB", "OFF")
       .define("WITH_POLLY", "OFF")
       .define("LLVM_ENABLE_TERMINFO", "OFF")
       .define("LLVM_ENABLE_LIBEDIT", "OFF")
       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
       .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
       .define("LLVM_DEFAULT_TARGET_TRIPLE", target);

    if target.contains("msvc") {
        cfg.define("LLVM_USE_CRT_DEBUG", "MT");
        cfg.define("LLVM_USE_CRT_RELEASE", "MT");
        cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
    }

    if target.starts_with("i686") {
        cfg.define("LLVM_BUILD_32_BITS", "ON");
    }

    if let Some(num_linkers) = build.config.llvm_link_jobs {
        if num_linkers > 0 {
            cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
        }
    }

    // http://llvm.org/docs/HowToCrossCompileLLVM.html
    if target != build.config.build {
        // FIXME: if the llvm root for the build triple is overridden then we
        //        should use llvm-tblgen from there, also should verify that it
        //        actually exists most of the time in normal installs of LLVM.
        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
        cfg.define("CMAKE_CROSSCOMPILING", "True")
           .define("LLVM_TABLEGEN", &host);
    }

    // MSVC handles compiler business itself
    if !target.contains("msvc") {
        if let Some(ref ccache) = build.config.ccache {
           cfg.define("CMAKE_C_COMPILER", ccache)
              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
              .define("CMAKE_CXX_COMPILER", ccache)
              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
        } else {
           cfg.define("CMAKE_C_COMPILER", build.cc(target))
              .define("CMAKE_CXX_COMPILER", build.cxx(target));
        }
        cfg.build_arg("-j").build_arg(build.jobs().to_string());

        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
    }

    if env::var_os("SCCACHE_ERROR_LOG").is_some() {
        cfg.env("RUST_LOG", "sccache=debug");
    }

    // FIXME: we don't actually need to build all LLVM tools and all LLVM
    //        libraries here, e.g. we just want a few components and a few
    //        tools. Figure out how to filter them down and only build the right
    //        tools and libs on all platforms.
    cfg.build();

    t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
}

fn check_llvm_version(build: &Build, llvm_config: &Path) {
    if !build.config.llvm_version_check {
        return
    }

    let mut cmd = Command::new(llvm_config);
    let version = output(cmd.arg("--version"));
    if version.starts_with("3.5") || version.starts_with("3.6") ||
       version.starts_with("3.7") {
        return
    }
    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
}

/// Compiles the `rust_test_helpers.c` library which we used in various
/// `run-pass` test suites for ABI testing.
pub fn test_helpers(build: &Build, target: &str) {
    let dst = build.test_helpers_out(target);
    let src = build.src.join("src/rt/rust_test_helpers.c");
    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
        return
    }

    println!("Building test helpers");
    t!(fs::create_dir_all(&dst));
    let mut cfg = gcc::Config::new();

    // We may have found various cross-compilers a little differently due to our
    // extra configuration, so inform gcc of these compilers. Note, though, that
    // on MSVC we still need gcc's detection of env vars (ugh).
    if !target.contains("msvc") {
        if let Some(ar) = build.ar(target) {
            cfg.archiver(ar);
        }
        cfg.compiler(build.cc(target));
    }

    cfg.cargo_metadata(false)
       .out_dir(&dst)
       .target(target)
       .host(&build.config.build)
       .opt_level(0)
       .debug(false)
       .file(build.src.join("src/rt/rust_test_helpers.c"))
       .compile("librust_test_helpers.a");
}
const OPENSSL_VERS: &'static str = "1.0.2k";
const OPENSSL_SHA256: &'static str =
    "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0";

pub fn openssl(build: &Build, target: &str) {
    let out = match build.openssl_dir(target) {
        Some(dir) => dir,
        None => return,
    };

    let stamp = out.join(".stamp");
    let mut contents = String::new();
    drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
    if contents == OPENSSL_VERS {
        return
    }
    t!(fs::create_dir_all(&out));

    let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
    let tarball = out.join(&name);
    if !tarball.exists() {
        let tmp = tarball.with_extension("tmp");
        // originally from https://www.openssl.org/source/...
        let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
                          name);
        let mut ok = false;
        for _ in 0..3 {
            let status = Command::new("curl")
                            .arg("-o").arg(&tmp)
                            .arg(&url)
                            .status()
                            .expect("failed to spawn curl");
            if status.success() {
                ok = true;
                break
            }
        }
        if !ok {
            panic!("failed to download openssl source")
        }
        let mut shasum = if target.contains("apple") {
            let mut cmd = Command::new("shasum");
            cmd.arg("-a").arg("256");
            cmd
        } else {
            Command::new("sha256sum")
        };
        let output = output(&mut shasum.arg(&tmp));
        let found = output.split_whitespace().next().unwrap();
        if found != OPENSSL_SHA256 {
            panic!("downloaded openssl sha256 different\n\
                    expected: {}\n\
                    found:    {}\n", OPENSSL_SHA256, found);
        }
        t!(fs::rename(&tmp, &tarball));
    }
    let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
    let dst = build.openssl_install_dir(target).unwrap();
    drop(fs::remove_dir_all(&obj));
    drop(fs::remove_dir_all(&dst));
    build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));

    let mut configure = Command::new(obj.join("Configure"));
    configure.arg(format!("--prefix={}", dst.display()));
    configure.arg("no-dso");
    configure.arg("no-ssl2");
    configure.arg("no-ssl3");

    let os = match target {
        "aarch64-unknown-linux-gnu" => "linux-aarch64",
        "arm-unknown-linux-gnueabi" => "linux-armv4",
        "arm-unknown-linux-gnueabihf" => "linux-armv4",
        "armv7-unknown-linux-gnueabihf" => "linux-armv4",
        "i686-apple-darwin" => "darwin-i386-cc",
        "i686-unknown-freebsd" => "BSD-x86-elf",
        "i686-unknown-linux-gnu" => "linux-elf",
        "i686-unknown-linux-musl" => "linux-elf",
        "mips-unknown-linux-gnu" => "linux-mips32",
        "mips64-unknown-linux-gnuabi64" => "linux64-mips64",
        "mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
        "mipsel-unknown-linux-gnu" => "linux-mips32",
        "powerpc-unknown-linux-gnu" => "linux-ppc",
        "powerpc64-unknown-linux-gnu" => "linux-ppc64",
        "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
        "s390x-unknown-linux-gnu" => "linux64-s390x",
        "x86_64-apple-darwin" => "darwin64-x86_64-cc",
        "x86_64-unknown-freebsd" => "BSD-x86_64",
        "x86_64-unknown-linux-gnu" => "linux-x86_64",
        "x86_64-unknown-linux-musl" => "linux-x86_64",
        "x86_64-unknown-netbsd" => "BSD-x86_64",
        _ => panic!("don't know how to configure OpenSSL for {}", target),
    };
    configure.arg(os);
    configure.env("CC", build.cc(target));
    for flag in build.cflags(target) {
        configure.arg(flag);
    }
    configure.current_dir(&obj);
    println!("Configuring openssl for {}", target);
    build.run_quiet(&mut configure);
    println!("Building openssl for {}", target);
    build.run_quiet(Command::new("make").current_dir(&obj));
    println!("Installing openssl for {}", target);
    build.run_quiet(Command::new("make").arg("install").current_dir(&obj));

    let mut f = t!(File::create(&stamp));
    t!(f.write_all(OPENSSL_VERS.as_bytes()));
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






















































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/sanity.rs version [522867ad8f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Sanity checking performed by rustbuild before actually executing anything.
//!
//! This module contains the implementation of ensuring that the build
//! environment looks reasonable before progressing. This will verify that
//! various programs like git and python exist, along with ensuring that all C
//! compilers for cross-compiling are found.
//!
//! In theory if we get past this phase it's a bug if a build fails, but in
//! practice that's likely not true!

use std::collections::HashSet;
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs;
use std::process::Command;

use build_helper::output;

use Build;

pub fn check(build: &mut Build) {
    let mut checked = HashSet::new();
    let path = env::var_os("PATH").unwrap_or(OsString::new());
    // On Windows, quotes are invalid characters for filename paths, and if
    // one is present as part of the PATH then that can lead to the system
    // being unable to identify the files properly. See
    // https://github.com/rust-lang/rust/issues/34959 for more details.
    if cfg!(windows) {
        if path.to_string_lossy().contains("\"") {
            panic!("PATH contains invalid character '\"'");
        }
    }
    let have_cmd = |cmd: &OsStr| {
        for path in env::split_paths(&path) {
            let target = path.join(cmd);
            let mut cmd_alt = cmd.to_os_string();
            cmd_alt.push(".exe");
            if target.is_file() ||
               target.with_extension("exe").exists() ||
               target.join(cmd_alt).exists() {
                return Some(target);
            }
        }
        return None;
    };

    let mut need_cmd = |cmd: &OsStr| {
        if !checked.insert(cmd.to_owned()) {
            return
        }
        if have_cmd(cmd).is_none() {
            panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
        }
    };

    // If we've got a git directory we're gona need git to update
    // submodules and learn about various other aspects.
    if fs::metadata(build.src.join(".git")).is_ok() {
        need_cmd("git".as_ref());
    }

    // We need cmake, but only if we're actually building LLVM
    for host in build.config.host.iter() {
        if let Some(config) = build.config.target_config.get(host) {
            if config.llvm_config.is_some() {
                continue
            }
        }
        need_cmd("cmake".as_ref());
        if build.config.ninja {
            // Some Linux distros rename `ninja` to `ninja-build`.
            // CMake can work with either binary name.
            if have_cmd("ninja-build".as_ref()).is_none() {
                need_cmd("ninja".as_ref());
            }
        }
        break
    }

    if build.config.python.is_none() {
        build.config.python = have_cmd("python2.7".as_ref());
    }
    if build.config.python.is_none() {
        build.config.python = have_cmd("python2".as_ref());
    }
    if build.config.python.is_none() {
        need_cmd("python".as_ref());
        build.config.python = Some("python".into());
    }
    need_cmd(build.config.python.as_ref().unwrap().as_ref());


    if let Some(ref s) = build.config.nodejs {
        need_cmd(s.as_ref());
    } else {
        // Look for the nodejs command, needed for emscripten testing
        if let Some(node) = have_cmd("node".as_ref()) {
            build.config.nodejs = Some(node);
        } else if let Some(node) = have_cmd("nodejs".as_ref()) {
            build.config.nodejs = Some(node);
        }
    }

    if let Some(ref gdb) = build.config.gdb {
        need_cmd(gdb.as_ref());
    } else {
        build.config.gdb = have_cmd("gdb".as_ref());
    }

    // We're gonna build some custom C code here and there, host triples
    // also build some C++ shims for LLVM so we need a C++ compiler.
    for target in build.config.target.iter() {
        // On emscripten we don't actually need the C compiler to just
        // build the target artifacts, only for testing. For the sake
        // of easier bot configuration, just skip detection.
        if target.contains("emscripten") {
            continue;
        }

        need_cmd(build.cc(target).as_ref());
        if let Some(ar) = build.ar(target) {
            need_cmd(ar.as_ref());
        }
    }
    for host in build.config.host.iter() {
        need_cmd(build.cxx(host).as_ref());
    }

    // The msvc hosts don't use jemalloc, turn it off globally to
    // avoid packaging the dummy liballoc_jemalloc on that platform.
    for host in build.config.host.iter() {
        if host.contains("msvc") {
            build.config.use_jemalloc = false;
        }
    }

    // Externally configured LLVM requires FileCheck to exist
    let filecheck = build.llvm_filecheck(&build.config.build);
    if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {
        panic!("FileCheck executable {:?} does not exist", filecheck);
    }

    for target in build.config.target.iter() {
        // Can't compile for iOS unless we're on macOS
        if target.contains("apple-ios") &&
           !build.config.build.contains("apple-darwin") {
            panic!("the iOS target is only supported on macOS");
        }

        // Make sure musl-root is valid if specified
        if target.contains("musl") && !target.contains("mips") {
            match build.musl_root(target) {
                Some(root) => {
                    if fs::metadata(root.join("lib/libc.a")).is_err() {
                        panic!("couldn't find libc.a in musl dir: {}",
                               root.join("lib").display());
                    }
                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
                        panic!("couldn't find libunwind.a in musl dir: {}",
                               root.join("lib").display());
                    }
                }
                None => {
                    panic!("when targeting MUSL either the rust.musl-root \
                            option or the target.$TARGET.musl-root option must \
                            be specified in config.toml")
                }
            }
        }

        if target.contains("msvc") {
            // There are three builds of cmake on windows: MSVC, MinGW, and
            // Cygwin. The Cygwin build does not have generators for Visual
            // Studio, so detect that here and error.
            let out = output(Command::new("cmake").arg("--help"));
            if !out.contains("Visual Studio") {
                panic!("
cmake does not support Visual Studio generators.

This is likely due to it being an msys/cygwin build of cmake,
rather than the required windows version, built using MinGW
or Visual Studio.

If you are building under msys2 try installing the mingw-w64-x86_64-cmake
package instead of cmake:

$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
");
            }
        }
    }

    for host in build.flags.host.iter() {
        if !build.config.host.contains(host) {
            panic!("specified host `{}` is not in the ./configure list", host);
        }
    }
    for target in build.flags.target.iter() {
        if !build.config.target.contains(target) {
            panic!("specified target `{}` is not in the ./configure list",
                   target);
        }
    }

    let run = |cmd: &mut Command| {
        cmd.output().map(|output| {
            String::from_utf8_lossy(&output.stdout)
                   .lines().next().unwrap()
                   .to_string()
        })
    };
    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
    if build.lldb_version.is_some() {
        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
    }

    if let Some(ref s) = build.config.ccache {
        need_cmd(s.as_ref());
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/step.rs version [8aff62c0c9].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Definition of steps of the build system.
//!
//! This is where some of the real meat of rustbuild is located, in how we
//! define targets and the dependencies amongst them. This file can sort of be
//! viewed as just defining targets in a makefile which shell out to predefined
//! functions elsewhere about how to execute the target.
//!
//! The primary function here you're likely interested in is the `build_rules`
//! function. This will create a `Rules` structure which basically just lists
//! everything that rustbuild can do. Each rule has a human-readable name, a
//! path associated with it, some dependencies, and then a closure of how to
//! actually perform the rule.
//!
//! All steps below are defined in self-contained units, so adding a new target
//! to the build system should just involve adding the meta information here
//! along with the actual implementation elsewhere. You can find more comments
//! about how to define rules themselves below.

use std::collections::{BTreeMap, HashSet};
use std::mem;

use check::{self, TestKind};
use compile;
use dist;
use doc;
use flags::Subcommand;
use install;
use native;
use {Compiler, Build, Mode};

pub fn run(build: &Build) {
    let rules = build_rules(build);
    let steps = rules.plan();
    rules.run(&steps);
}

pub fn build_rules<'a>(build: &'a Build) -> Rules {
    let mut rules = Rules::new(build);

    // This is the first rule that we're going to define for rustbuild, which is
    // used to compile LLVM itself. All rules are added through the `rules`
    // structure created above and are configured through a builder-style
    // interface.
    //
    // First up we see the `build` method. This represents a rule that's part of
    // the top-level `build` subcommand. For example `./x.py build` is what this
    // is associating with. Note that this is normally only relevant if you flag
    // a rule as `default`, which we'll talk about later.
    //
    // Next up we'll see two arguments to this method:
    //
    // * `llvm` - this is the "human readable" name of this target. This name is
    //            not accessed anywhere outside this file itself (e.g. not in
    //            the CLI nor elsewhere in rustbuild). The purpose of this is to
    //            easily define dependencies between rules. That is, other rules
    //            will depend on this with the name "llvm".
    // * `src/llvm` - this is the relevant path to the rule that we're working
    //                with. This path is the engine behind how commands like
    //                `./x.py build src/llvm` work. This should typically point
    //                to the relevant component, but if there's not really a
    //                path to be assigned here you can pass something like
    //                `path/to/nowhere` to ignore it.
    //
    // After we create the rule with the `build` method we can then configure
    // various aspects of it. For example this LLVM rule uses `.host(true)` to
    // flag that it's a rule only for host targets. In other words, LLVM isn't
    // compiled for targets configured through `--target` (e.g. those we're just
    // building a standard library for).
    //
    // Next up the `dep` method will add a dependency to this rule. The closure
    // is yielded the step that represents executing the `llvm` rule itself
    // (containing information like stage, host, target, ...) and then it must
    // return a target that the step depends on. Here LLVM is actually
    // interesting where a cross-compiled LLVM depends on the host LLVM, but
    // otherwise it has no dependencies.
    //
    // To handle this we do a bit of dynamic dispatch to see what the dependency
    // is. If we're building a LLVM for the build triple, then we don't actually
    // have any dependencies! To do that we return a dependency on the `Step::noop()`
    // target which does nothing.
    //
    // If we're build a cross-compiled LLVM, however, we need to assemble the
    // libraries from the previous compiler. This step has the same name as
    // ours (llvm) but we want it for a different target, so we use the
    // builder-style methods on `Step` to configure this target to the build
    // triple.
    //
    // Finally, to finish off this rule, we define how to actually execute it.
    // That logic is all defined in the `native` module so we just delegate to
    // the relevant function there. The argument to the closure passed to `run`
    // is a `Step` (defined below) which encapsulates information like the
    // stage, target, host, etc.
    rules.build("llvm", "src/llvm")
         .host(true)
         .dep(move |s| {
             if s.target == build.config.build {
                 Step::noop()
             } else {
                 s.target(&build.config.build)
             }
         })
         .run(move |s| native::llvm(build, s.target));

    // Ok! After that example rule  that's hopefully enough to explain what's
    // going on here. You can check out the API docs below and also see a bunch
    // more examples of rules directly below as well.

    // the compiler with no target libraries ready to go
    rules.build("rustc", "src/rustc")
         .dep(|s| s.name("create-sysroot").target(s.host))
         .dep(move |s| {
             if s.stage == 0 {
                 Step::noop()
             } else {
                 s.name("librustc")
                  .host(&build.config.build)
                  .stage(s.stage - 1)
             }
         })
         .run(move |s| compile::assemble_rustc(build, s.stage, s.target));

    // Helper for loading an entire DAG of crates, rooted at `name`
    let krates = |name: &str| {
        let mut ret = Vec::new();
        let mut list = vec![name];
        let mut visited = HashSet::new();
        while let Some(krate) = list.pop() {
            let default = krate == name;
            let krate = &build.crates[krate];
            let path = krate.path.strip_prefix(&build.src).unwrap();
            ret.push((krate, path.to_str().unwrap(), default));
            for dep in krate.deps.iter() {
                if visited.insert(dep) && dep != "build_helper" {
                    list.push(dep);
                }
            }
        }
        return ret
    };

    // ========================================================================
    // Crate compilations
    //
    // Tools used during the build system but not shipped
    rules.build("create-sysroot", "path/to/nowhere")
         .run(move |s| compile::create_sysroot(build, &s.compiler()));

    // These rules are "pseudo rules" that don't actually do any work
    // themselves, but represent a complete sysroot with the relevant compiler
    // linked into place.
    //
    // That is, depending on "libstd" means that when the rule is completed then
    // the `stage` sysroot for the compiler `host` will be available with a
    // standard library built for `target` linked in place. Not all rules need
    // the compiler itself to be available, just the standard library, so
    // there's a distinction between the two.
    rules.build("libstd", "src/libstd")
         .dep(|s| s.name("rustc").target(s.host))
         .dep(|s| s.name("libstd-link"));
    rules.build("libtest", "src/libtest")
         .dep(|s| s.name("libstd"))
         .dep(|s| s.name("libtest-link"))
         .default(true);
    rules.build("librustc", "src/librustc")
         .dep(|s| s.name("libtest"))
         .dep(|s| s.name("librustc-link"))
         .host(true)
         .default(true);

    // Helper method to define the rules to link a crate into its place in the
    // sysroot.
    //
    // The logic here is a little subtle as there's a few cases to consider.
    // Not all combinations of (stage, host, target) actually require something
    // to be compiled, but rather libraries could get propagated from a
    // different location. For example:
    //
    // * Any crate with a `host` that's not the build triple will not actually
    //   compile something. A different `host` means that the build triple will
    //   actually compile the libraries, and then we'll copy them over from the
    //   build triple to the `host` directory.
    //
    // * Some crates aren't even compiled by the build triple, but may be copied
    //   from previous stages. For example if we're not doing a full bootstrap
    //   then we may just depend on the stage1 versions of libraries to be
    //   available to get linked forward.
    //
    // * Finally, there are some cases, however, which do indeed comiple crates
    //   and link them into place afterwards.
    //
    // The rule definition below mirrors these three cases. The `dep` method
    // calculates the correct dependency which either comes from stage1, a
    // different compiler, or from actually building the crate itself (the `dep`
    // rule). The `run` rule then mirrors these three cases and links the cases
    // forward into the compiler sysroot specified from the correct location.
    fn crate_rule<'a, 'b>(build: &'a Build,
                          rules: &'b mut Rules<'a>,
                          krate: &'a str,
                          dep: &'a str,
                          link: fn(&Build, &Compiler, &Compiler, &str))
                          -> RuleBuilder<'a, 'b> {
        let mut rule = rules.build(&krate, "path/to/nowhere");
        rule.dep(move |s| {
                if build.force_use_stage1(&s.compiler(), s.target) {
                    s.host(&build.config.build).stage(1)
                } else if s.host == build.config.build {
                    s.name(dep)
                } else {
                    s.host(&build.config.build)
                }
            })
            .run(move |s| {
                if build.force_use_stage1(&s.compiler(), s.target) {
                    link(build,
                         &s.stage(1).host(&build.config.build).compiler(),
                         &s.compiler(),
                         s.target)
                } else if s.host == build.config.build {
                    link(build, &s.compiler(), &s.compiler(), s.target)
                } else {
                    link(build,
                         &s.host(&build.config.build).compiler(),
                         &s.compiler(),
                         s.target)
                }
            });
            return rule
    }

    // Similar to the `libstd`, `libtest`, and `librustc` rules above, except
    // these rules only represent the libraries being available in the sysroot,
    // not the compiler itself. This is done as not all rules need a compiler in
    // the sysroot, but may just need the libraries.
    //
    // All of these rules use the helper definition above.
    crate_rule(build,
               &mut rules,
               "libstd-link",
               "build-crate-std",
               compile::std_link)
        .dep(|s| s.name("startup-objects"))
        .dep(|s| s.name("create-sysroot").target(s.host));
    crate_rule(build,
               &mut rules,
               "libtest-link",
               "build-crate-test",
               compile::test_link)
        .dep(|s| s.name("libstd-link"));
    crate_rule(build,
               &mut rules,
               "librustc-link",
               "build-crate-rustc-main",
               compile::rustc_link)
        .dep(|s| s.name("libtest-link"));

    for (krate, path, _default) in krates("std") {
        rules.build(&krate.build_step, path)
             .dep(|s| s.name("startup-objects"))
             .dep(move |s| s.name("rustc").host(&build.config.build).target(s.host))
             .run(move |s| compile::std(build, s.target, &s.compiler()));
    }
    for (krate, path, _default) in krates("test") {
        rules.build(&krate.build_step, path)
             .dep(|s| s.name("libstd-link"))
             .run(move |s| compile::test(build, s.target, &s.compiler()));
    }
    for (krate, path, _default) in krates("rustc-main") {
        rules.build(&krate.build_step, path)
             .dep(|s| s.name("libtest-link"))
             .dep(move |s| s.name("llvm").host(&build.config.build).stage(0))
             .dep(|s| s.name("may-run-build-script"))
             .run(move |s| compile::rustc(build, s.target, &s.compiler()));
    }

    // Crates which have build scripts need to rely on this rule to ensure that
    // the necessary prerequisites for a build script are linked and located in
    // place.
    rules.build("may-run-build-script", "path/to/nowhere")
         .dep(move |s| {
             s.name("libstd-link")
              .host(&build.config.build)
              .target(&build.config.build)
         });
    rules.build("startup-objects", "src/rtstartup")
         .dep(|s| s.name("create-sysroot").target(s.host))
         .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target));

    // ========================================================================
    // Test targets
    //
    // Various unit tests and tests suites we can run
    {
        let mut suite = |name, path, mode, dir| {
            rules.test(name, path)
                 .dep(|s| s.name("libtest"))
                 .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
                 .dep(|s| s.name("test-helpers"))
                 .dep(|s| s.name("emulator-copy-libs"))
                 .default(mode != "pretty") // pretty tests don't run everywhere
                 .run(move |s| {
                     check::compiletest(build, &s.compiler(), s.target, mode, dir)
                 });
        };

        suite("check-ui", "src/test/ui", "ui", "ui");
        suite("check-rpass", "src/test/run-pass", "run-pass", "run-pass");
        suite("check-cfail", "src/test/compile-fail", "compile-fail", "compile-fail");
        suite("check-pfail", "src/test/parse-fail", "parse-fail", "parse-fail");
        suite("check-rfail", "src/test/run-fail", "run-fail", "run-fail");
        suite("check-rpass-valgrind", "src/test/run-pass-valgrind",
              "run-pass-valgrind", "run-pass-valgrind");
        suite("check-mir-opt", "src/test/mir-opt", "mir-opt", "mir-opt");
        if build.config.codegen_tests {
            suite("check-codegen", "src/test/codegen", "codegen", "codegen");
        }
        suite("check-codegen-units", "src/test/codegen-units", "codegen-units",
              "codegen-units");
        suite("check-incremental", "src/test/incremental", "incremental",
              "incremental");
    }

    if build.config.build.contains("msvc") {
        // nothing to do for debuginfo tests
    } else {
        rules.test("check-debuginfo-lldb", "src/test/debuginfo-lldb")
             .dep(|s| s.name("libtest"))
             .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
             .dep(|s| s.name("test-helpers"))
             .dep(|s| s.name("debugger-scripts"))
             .run(move |s| check::compiletest(build, &s.compiler(), s.target,
                                         "debuginfo-lldb", "debuginfo"));
        rules.test("check-debuginfo-gdb", "src/test/debuginfo-gdb")
             .dep(|s| s.name("libtest"))
             .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
             .dep(|s| s.name("test-helpers"))
             .dep(|s| s.name("debugger-scripts"))
             .dep(|s| s.name("emulator-copy-libs"))
             .run(move |s| check::compiletest(build, &s.compiler(), s.target,
                                         "debuginfo-gdb", "debuginfo"));
        let mut rule = rules.test("check-debuginfo", "src/test/debuginfo");
        rule.default(true);
        if build.config.build.contains("apple") {
            rule.dep(|s| s.name("check-debuginfo-lldb"));
        } else {
            rule.dep(|s| s.name("check-debuginfo-gdb"));
        }
    }

    rules.test("debugger-scripts", "src/etc/lldb_batchmode.py")
         .run(move |s| dist::debugger_scripts(build, &build.sysroot(&s.compiler()),
                                         s.target));

    {
        let mut suite = |name, path, mode, dir| {
            rules.test(name, path)
                 .dep(|s| s.name("librustc"))
                 .dep(|s| s.name("test-helpers"))
                 .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
                 .default(mode != "pretty")
                 .host(true)
                 .run(move |s| {
                     check::compiletest(build, &s.compiler(), s.target, mode, dir)
                 });
        };

        suite("check-ui-full", "src/test/ui-fulldeps", "ui", "ui-fulldeps");
        suite("check-rpass-full", "src/test/run-pass-fulldeps",
              "run-pass", "run-pass-fulldeps");
        suite("check-rfail-full", "src/test/run-fail-fulldeps",
              "run-fail", "run-fail-fulldeps");
        suite("check-cfail-full", "src/test/compile-fail-fulldeps",
              "compile-fail", "compile-fail-fulldeps");
        suite("check-rmake", "src/test/run-make", "run-make", "run-make");
        suite("check-rustdoc", "src/test/rustdoc", "rustdoc", "rustdoc");
        suite("check-pretty", "src/test/pretty", "pretty", "pretty");
        suite("check-pretty-rpass", "src/test/run-pass/pretty", "pretty",
              "run-pass");
        suite("check-pretty-rfail", "src/test/run-fail/pretty", "pretty",
              "run-fail");
        suite("check-pretty-valgrind", "src/test/run-pass-valgrind/pretty", "pretty",
              "run-pass-valgrind");
        suite("check-pretty-rpass-full", "src/test/run-pass-fulldeps/pretty",
              "pretty", "run-pass-fulldeps");
        suite("check-pretty-rfail-full", "src/test/run-fail-fulldeps/pretty",
              "pretty", "run-fail-fulldeps");
    }

    for (krate, path, _default) in krates("std") {
        rules.test(&krate.test_step, path)
             .dep(|s| s.name("libtest"))
             .dep(|s| s.name("emulator-copy-libs"))
             .run(move |s| check::krate(build, &s.compiler(), s.target,
                                        Mode::Libstd, TestKind::Test,
                                        Some(&krate.name)));
    }
    rules.test("check-std-all", "path/to/nowhere")
         .dep(|s| s.name("libtest"))
         .dep(|s| s.name("emulator-copy-libs"))
         .default(true)
         .run(move |s| check::krate(build, &s.compiler(), s.target,
                                    Mode::Libstd, TestKind::Test, None));

    // std benchmarks
    for (krate, path, _default) in krates("std") {
        rules.bench(&krate.bench_step, path)
             .dep(|s| s.name("libtest"))
             .dep(|s| s.name("emulator-copy-libs"))
             .run(move |s| check::krate(build, &s.compiler(), s.target,
                                        Mode::Libstd, TestKind::Bench,
                                        Some(&krate.name)));
    }
    rules.bench("bench-std-all", "path/to/nowhere")
         .dep(|s| s.name("libtest"))
         .dep(|s| s.name("emulator-copy-libs"))
         .default(true)
         .run(move |s| check::krate(build, &s.compiler(), s.target,
                                    Mode::Libstd, TestKind::Bench, None));

    for (krate, path, _default) in krates("test") {
        rules.test(&krate.test_step, path)
             .dep(|s| s.name("libtest"))
             .dep(|s| s.name("emulator-copy-libs"))
             .run(move |s| check::krate(build, &s.compiler(), s.target,
                                        Mode::Libtest, TestKind::Test,
                                        Some(&krate.name)));
    }
    rules.test("check-test-all", "path/to/nowhere")
         .dep(|s| s.name("libtest"))
         .dep(|s| s.name("emulator-copy-libs"))
         .default(true)
         .run(move |s| check::krate(build, &s.compiler(), s.target,
                                    Mode::Libtest, TestKind::Test, None));
    for (krate, path, _default) in krates("rustc-main") {
        rules.test(&krate.test_step, path)
             .dep(|s| s.name("librustc"))
             .dep(|s| s.name("emulator-copy-libs"))
             .host(true)
             .run(move |s| check::krate(build, &s.compiler(), s.target,
                                        Mode::Librustc, TestKind::Test,
                                        Some(&krate.name)));
    }
    rules.test("check-rustc-all", "path/to/nowhere")
         .dep(|s| s.name("librustc"))
         .dep(|s| s.name("emulator-copy-libs"))
         .default(true)
         .host(true)
         .run(move |s| check::krate(build, &s.compiler(), s.target,
                                    Mode::Librustc, TestKind::Test, None));

    rules.test("check-linkchecker", "src/tools/linkchecker")
         .dep(|s| s.name("tool-linkchecker").stage(0))
         .dep(|s| s.name("default:doc"))
         .default(true)
         .host(true)
         .run(move |s| check::linkcheck(build, s.target));
    rules.test("check-cargotest", "src/tools/cargotest")
         .dep(|s| s.name("tool-cargotest").stage(0))
         .dep(|s| s.name("librustc"))
         .host(true)
         .run(move |s| check::cargotest(build, s.stage, s.target));
    rules.test("check-tidy", "src/tools/tidy")
         .dep(|s| s.name("tool-tidy").stage(0))
         .default(true)
         .host(true)
         .only_build(true)
         .run(move |s| check::tidy(build, s.target));
    rules.test("check-error-index", "src/tools/error_index_generator")
         .dep(|s| s.name("libstd"))
         .dep(|s| s.name("tool-error-index").host(s.host).stage(0))
         .default(true)
         .host(true)
         .run(move |s| check::error_index(build, &s.compiler()));
    rules.test("check-docs", "src/doc")
         .dep(|s| s.name("libtest"))
         .default(true)
         .host(true)
         .run(move |s| check::docs(build, &s.compiler()));
    rules.test("check-distcheck", "distcheck")
         .dep(|s| s.name("dist-src"))
         .run(move |_| check::distcheck(build));

    rules.build("test-helpers", "src/rt/rust_test_helpers.c")
         .run(move |s| native::test_helpers(build, s.target));
    rules.build("openssl", "path/to/nowhere")
         .run(move |s| native::openssl(build, s.target));

    // Some test suites are run inside emulators, and most of our test binaries
    // are linked dynamically which means we need to ship the standard library
    // and such to the emulator ahead of time. This step represents this and is
    // a dependency of all test suites.
    //
    // Most of the time this step is a noop (the `check::emulator_copy_libs`
    // only does work if necessary). For some steps such as shipping data to
    // QEMU we have to build our own tools so we've got conditional dependencies
    // on those programs as well. Note that the QEMU client is built for the
    // build target (us) and the server is built for the target.
    rules.test("emulator-copy-libs", "path/to/nowhere")
         .dep(|s| s.name("libtest"))
         .dep(move |s| {
             if build.qemu_rootfs(s.target).is_some() {
                s.name("tool-qemu-test-client").target(s.host).stage(0)
             } else {
                 Step::noop()
             }
         })
         .dep(move |s| {
             if build.qemu_rootfs(s.target).is_some() {
                s.name("tool-qemu-test-server")
             } else {
                 Step::noop()
             }
         })
         .run(move |s| check::emulator_copy_libs(build, &s.compiler(), s.target));

    rules.test("check-bootstrap", "src/bootstrap")
         .default(true)
         .host(true)
         .only_build(true)
         .run(move |_| check::bootstrap(build));

    // ========================================================================
    // Build tools
    //
    // Tools used during the build system but not shipped
    rules.build("tool-rustbook", "src/tools/rustbook")
         .dep(|s| s.name("librustc"))
         .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
    rules.build("tool-error-index", "src/tools/error_index_generator")
         .dep(|s| s.name("librustc"))
         .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
    rules.build("tool-tidy", "src/tools/tidy")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
    rules.build("tool-linkchecker", "src/tools/linkchecker")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
    rules.build("tool-cargotest", "src/tools/cargotest")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
    rules.build("tool-compiletest", "src/tools/compiletest")
         .dep(|s| s.name("libtest"))
         .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
    rules.build("tool-build-manifest", "src/tools/build-manifest")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
    rules.build("tool-qemu-test-server", "src/tools/qemu-test-server")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-server"));
    rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
         .dep(|s| s.name("libstd"))
         .run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
    rules.build("tool-cargo", "cargo")
         .dep(|s| s.name("libstd"))
         .dep(|s| s.stage(0).host(s.target).name("openssl"))
         .dep(move |s| {
             // Cargo depends on procedural macros, which requires a full host
             // compiler to be available, so we need to depend on that.
             s.name("librustc-link")
              .target(&build.config.build)
              .host(&build.config.build)
         })
         .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));

    // ========================================================================
    // Documentation targets
    rules.doc("doc-book", "src/doc/book")
         .dep(move |s| {
             s.name("tool-rustbook")
              .host(&build.config.build)
              .target(&build.config.build)
              .stage(0)
         })
         .default(build.config.docs)
         .run(move |s| doc::book(build, s.target, "book"));
    rules.doc("doc-nomicon", "src/doc/nomicon")
         .dep(move |s| {
             s.name("tool-rustbook")
              .host(&build.config.build)
              .target(&build.config.build)
              .stage(0)
         })
         .default(build.config.docs)
         .run(move |s| doc::rustbook(build, s.target, "nomicon"));
    rules.doc("doc-reference", "src/doc/reference")
         .dep(move |s| {
             s.name("tool-rustbook")
              .host(&build.config.build)
              .target(&build.config.build)
              .stage(0)
         })
         .default(build.config.docs)
         .run(move |s| doc::rustbook(build, s.target, "reference"));
    rules.doc("doc-unstable-book", "src/doc/unstable-book")
         .dep(move |s| {
             s.name("tool-rustbook")
              .host(&build.config.build)
              .target(&build.config.build)
              .stage(0)
         })
         .default(build.config.docs)
         .run(move |s| doc::rustbook(build, s.target, "unstable-book"));
    rules.doc("doc-standalone", "src/doc")
         .dep(move |s| {
             s.name("rustc")
              .host(&build.config.build)
              .target(&build.config.build)
              .stage(0)
         })
         .default(build.config.docs)
         .run(move |s| doc::standalone(build, s.target));
    rules.doc("doc-error-index", "src/tools/error_index_generator")
         .dep(move |s| s.name("tool-error-index").target(&build.config.build).stage(0))
         .dep(move |s| s.name("librustc-link"))
         .default(build.config.docs)
         .host(true)
         .run(move |s| doc::error_index(build, s.target));
    for (krate, path, default) in krates("std") {
        rules.doc(&krate.doc_step, path)
             .dep(|s| s.name("libstd-link"))
             .default(default && build.config.docs)
             .run(move |s| doc::std(build, s.stage, s.target));
    }
    for (krate, path, default) in krates("test") {
        rules.doc(&krate.doc_step, path)
             .dep(|s| s.name("libtest-link"))
             // Needed so rustdoc generates relative links to std.
             .dep(|s| s.name("doc-crate-std"))
             .default(default && build.config.compiler_docs)
             .run(move |s| doc::test(build, s.stage, s.target));
    }
    for (krate, path, default) in krates("rustc-main") {
        rules.doc(&krate.doc_step, path)
             .dep(|s| s.name("librustc-link"))
             // Needed so rustdoc generates relative links to std.
             .dep(|s| s.name("doc-crate-std"))
             .host(true)
             .default(default && build.config.docs)
             .run(move |s| doc::rustc(build, s.stage, s.target));
    }

    // ========================================================================
    // Distribution targets
    rules.dist("dist-rustc", "src/librustc")
         .dep(move |s| s.name("rustc").host(&build.config.build))
         .host(true)
         .only_host_build(true)
         .default(true)
         .run(move |s| dist::rustc(build, s.stage, s.target));
    rules.dist("dist-std", "src/libstd")
         .dep(move |s| {
             // We want to package up as many target libraries as possible
             // for the `rust-std` package, so if this is a host target we
             // depend on librustc and otherwise we just depend on libtest.
             if build.config.host.iter().any(|t| t == s.target) {
                 s.name("librustc-link")
             } else {
                 s.name("libtest-link")
             }
         })
         .default(true)
         .only_host_build(true)
         .run(move |s| dist::std(build, &s.compiler(), s.target));
    rules.dist("dist-mingw", "path/to/nowhere")
         .default(true)
         .only_host_build(true)
         .run(move |s| {
             if s.target.contains("pc-windows-gnu") {
                 dist::mingw(build, s.target)
             }
         });
    rules.dist("dist-src", "src")
         .default(true)
         .host(true)
         .only_build(true)
         .only_host_build(true)
         .run(move |_| dist::rust_src(build));
    rules.dist("dist-docs", "src/doc")
         .default(true)
         .only_host_build(true)
         .dep(|s| s.name("default:doc"))
         .run(move |s| dist::docs(build, s.stage, s.target));
    rules.dist("dist-analysis", "analysis")
         .dep(|s| s.name("dist-std"))
         .default(true)
         .only_host_build(true)
         .run(move |s| dist::analysis(build, &s.compiler(), s.target));
    rules.dist("install", "path/to/nowhere")
         .dep(|s| s.name("default:dist"))
         .run(move |s| install::install(build, s.stage, s.target));
    rules.dist("dist-cargo", "cargo")
         .host(true)
         .only_host_build(true)
         .dep(|s| s.name("tool-cargo"))
         .run(move |s| dist::cargo(build, s.stage, s.target));
    rules.dist("dist-extended", "extended")
         .default(build.config.extended)
         .host(true)
         .only_host_build(true)
         .dep(|d| d.name("dist-std"))
         .dep(|d| d.name("dist-rustc"))
         .dep(|d| d.name("dist-mingw"))
         .dep(|d| d.name("dist-docs"))
         .dep(|d| d.name("dist-cargo"))
         .run(move |s| dist::extended(build, s.stage, s.target));

    rules.dist("dist-sign", "hash-and-sign")
         .host(true)
         .only_build(true)
         .only_host_build(true)
         .dep(move |s| s.name("tool-build-manifest").target(&build.config.build).stage(0))
         .run(move |_| dist::hash_and_sign(build));

    rules.verify();
    return rules;
}

#[derive(PartialEq, Eq, Hash, Clone, Debug)]
struct Step<'a> {
    /// Human readable name of the rule this step is executing. Possible names
    /// are all defined above in `build_rules`.
    name: &'a str,

    /// The stage this step is executing in. This is typically 0, 1, or 2.
    stage: u32,

    /// This step will likely involve a compiler, and the target that compiler
    /// itself is built for is called the host, this variable. Typically this is
    /// the target of the build machine itself.
    host: &'a str,

    /// The target that this step represents generating. If you're building a
    /// standard library for a new suite of targets, for example, this'll be set
    /// to those targets.
    target: &'a str,
}

impl<'a> Step<'a> {
    fn noop() -> Step<'a> {
        Step { name: "", stage: 0, host: "", target: "" }
    }

    /// Creates a new step which is the same as this, except has a new name.
    fn name(&self, name: &'a str) -> Step<'a> {
        Step { name: name, ..*self }
    }

    /// Creates a new step which is the same as this, except has a new stage.
    fn stage(&self, stage: u32) -> Step<'a> {
        Step { stage: stage, ..*self }
    }

    /// Creates a new step which is the same as this, except has a new host.
    fn host(&self, host: &'a str) -> Step<'a> {
        Step { host: host, ..*self }
    }

    /// Creates a new step which is the same as this, except has a new target.
    fn target(&self, target: &'a str) -> Step<'a> {
        Step { target: target, ..*self }
    }

    /// Returns the `Compiler` structure that this step corresponds to.
    fn compiler(&self) -> Compiler<'a> {
        Compiler::new(self.stage, self.host)
    }
}

struct Rule<'a> {
    /// The human readable name of this target, defined in `build_rules`.
    name: &'a str,

    /// The path associated with this target, used in the `./x.py` driver for
    /// easy and ergonomic specification of what to do.
    path: &'a str,

    /// The "kind" of top-level command that this rule is associated with, only
    /// relevant if this is a default rule.
    kind: Kind,

    /// List of dependencies this rule has. Each dependency is a function from a
    /// step that's being executed to another step that should be executed.
    deps: Vec<Box<Fn(&Step<'a>) -> Step<'a> + 'a>>,

    /// How to actually execute this rule. Takes a step with contextual
    /// information and then executes it.
    run: Box<Fn(&Step<'a>) + 'a>,

    /// Whether or not this is a "default" rule. That basically means that if
    /// you run, for example, `./x.py test` whether it's included or not.
    default: bool,

    /// Whether or not this is a "host" rule, or in other words whether this is
    /// only intended for compiler hosts and not for targets that are being
    /// generated.
    host: bool,

    /// Whether this rule is only for steps where the host is the build triple,
    /// not anything in hosts or targets.
    only_host_build: bool,

    /// Whether this rule is only for the build triple, not anything in hosts or
    /// targets.
    only_build: bool,
}

#[derive(PartialEq)]
enum Kind {
    Build,
    Test,
    Bench,
    Dist,
    Doc,
}

impl<'a> Rule<'a> {
    fn new(name: &'a str, path: &'a str, kind: Kind) -> Rule<'a> {
        Rule {
            name: name,
            deps: Vec::new(),
            run: Box::new(|_| ()),
            path: path,
            kind: kind,
            default: false,
            host: false,
            only_host_build: false,
            only_build: false,
        }
    }
}

/// Builder pattern returned from the various methods on `Rules` which will add
/// the rule to the internal list on `Drop`.
struct RuleBuilder<'a: 'b, 'b> {
    rules: &'b mut Rules<'a>,
    rule: Rule<'a>,
}

impl<'a, 'b> RuleBuilder<'a, 'b> {
    fn dep<F>(&mut self, f: F) -> &mut Self
        where F: Fn(&Step<'a>) -> Step<'a> + 'a,
    {
        self.rule.deps.push(Box::new(f));
        self
    }

    fn run<F>(&mut self, f: F) -> &mut Self
        where F: Fn(&Step<'a>) + 'a,
    {
        self.rule.run = Box::new(f);
        self
    }

    fn default(&mut self, default: bool) -> &mut Self {
        self.rule.default = default;
        self
    }

    fn host(&mut self, host: bool) -> &mut Self {
        self.rule.host = host;
        self
    }

    fn only_build(&mut self, only_build: bool) -> &mut Self {
        self.rule.only_build = only_build;
        self
    }

    fn only_host_build(&mut self, only_host_build: bool) -> &mut Self {
        self.rule.only_host_build = only_host_build;
        self
    }
}

impl<'a, 'b> Drop for RuleBuilder<'a, 'b> {
    fn drop(&mut self) {
        let rule = mem::replace(&mut self.rule, Rule::new("", "", Kind::Build));
        let prev = self.rules.rules.insert(rule.name, rule);
        if let Some(prev) = prev {
            panic!("duplicate rule named: {}", prev.name);
        }
    }
}

pub struct Rules<'a> {
    build: &'a Build,
    sbuild: Step<'a>,
    rules: BTreeMap<&'a str, Rule<'a>>,
}

impl<'a> Rules<'a> {
    fn new(build: &'a Build) -> Rules<'a> {
        Rules {
            build: build,
            sbuild: Step {
                stage: build.flags.stage.unwrap_or(2),
                target: &build.config.build,
                host: &build.config.build,
                name: "",
            },
            rules: BTreeMap::new(),
        }
    }

    /// Creates a new rule of `Kind::Build` with the specified human readable
    /// name and path associated with it.
    ///
    /// The builder returned should be configured further with information such
    /// as how to actually run this rule.
    fn build<'b>(&'b mut self, name: &'a str, path: &'a str)
                 -> RuleBuilder<'a, 'b> {
        self.rule(name, path, Kind::Build)
    }

    /// Same as `build`, but for `Kind::Test`.
    fn test<'b>(&'b mut self, name: &'a str, path: &'a str)
                -> RuleBuilder<'a, 'b> {
        self.rule(name, path, Kind::Test)
    }

    /// Same as `build`, but for `Kind::Bench`.
    fn bench<'b>(&'b mut self, name: &'a str, path: &'a str)
                -> RuleBuilder<'a, 'b> {
        self.rule(name, path, Kind::Bench)
    }

    /// Same as `build`, but for `Kind::Doc`.
    fn doc<'b>(&'b mut self, name: &'a str, path: &'a str)
               -> RuleBuilder<'a, 'b> {
        self.rule(name, path, Kind::Doc)
    }

    /// Same as `build`, but for `Kind::Dist`.
    fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
                -> RuleBuilder<'a, 'b> {
        self.rule(name, path, Kind::Dist)
    }

    fn rule<'b>(&'b mut self,
                name: &'a str,
                path: &'a str,
                kind: Kind) -> RuleBuilder<'a, 'b> {
        RuleBuilder {
            rules: self,
            rule: Rule::new(name, path, kind),
        }
    }

    /// Verify the dependency graph defined by all our rules are correct, e.g.
    /// everything points to a valid something else.
    fn verify(&self) {
        for rule in self.rules.values() {
            for dep in rule.deps.iter() {
                let dep = dep(&self.sbuild.name(rule.name));
                if self.rules.contains_key(&dep.name) || dep.name.starts_with("default:") {
                    continue
                }
                if dep == Step::noop() {
                    continue
                }
                panic!("\

invalid rule dependency graph detected, was a rule added and maybe typo'd?

    `{}` depends on `{}` which does not exist

", rule.name, dep.name);
            }
        }
    }

    pub fn print_help(&self, command: &str) {
        let kind = match command {
            "build" => Kind::Build,
            "doc" => Kind::Doc,
            "test" => Kind::Test,
            "bench" => Kind::Bench,
            "dist" => Kind::Dist,
            _ => return,
        };
        let rules = self.rules.values().filter(|r| r.kind == kind);
        let rules = rules.filter(|r| !r.path.contains("nowhere"));
        let mut rules = rules.collect::<Vec<_>>();
        rules.sort_by_key(|r| r.path);

        println!("Available paths:\n");
        for rule in rules {
            print!("    ./x.py {} {}", command, rule.path);

            println!("");
        }
    }

    /// Construct the top-level build steps that we're going to be executing,
    /// given the subcommand that our build is performing.
    fn plan(&self) -> Vec<Step<'a>> {
        // Ok, the logic here is pretty subtle, and involves quite a few
        // conditionals. The basic idea here is to:
        //
        // 1. First, filter all our rules to the relevant ones. This means that
        //    the command specified corresponds to one of our `Kind` variants,
        //    and we filter all rules based on that.
        //
        // 2. Next, we determine which rules we're actually executing. If a
        //    number of path filters were specified on the command line we look
        //    for those, otherwise we look for anything tagged `default`.
        //    Here we also compute the priority of each rule based on how early
        //    in the command line the matching path filter showed up.
        //
        // 3. Finally, we generate some steps with host and target information.
        //
        // The last step is by far the most complicated and subtle. The basic
        // thinking here is that we want to take the cartesian product of
        // specified hosts and targets and build rules with that. The list of
        // hosts and targets, if not specified, come from the how this build was
        // configured. If the rule we're looking at is a host-only rule the we
        // ignore the list of targets and instead consider the list of hosts
        // also the list of targets.
        //
        // Once the host and target lists are generated we take the cartesian
        // product of the two and then create a step based off them. Note that
        // the stage each step is associated was specified with the `--step`
        // flag on the command line.
        let (kind, paths) = match self.build.flags.cmd {
            Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
            Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
            Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
            Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
            Subcommand::Dist { ref paths, install } => {
                if install {
                    return vec![self.sbuild.name("install")]
                } else {
                    (Kind::Dist, &paths[..])
                }
            }
            Subcommand::Clean => panic!(),
        };

        let mut rules: Vec<_> = self.rules.values().filter_map(|rule| {
            if rule.kind != kind {
                return None;
            }

            if paths.len() == 0 && rule.default {
                Some((rule, 0))
            } else {
                paths.iter().position(|path| path.ends_with(rule.path))
                     .map(|priority| (rule, priority))
            }
        }).collect();

        rules.sort_by_key(|&(_, priority)| priority);

        rules.into_iter().flat_map(|(rule, _)| {
            let hosts = if rule.only_host_build || rule.only_build {
                &self.build.config.host[..1]
            } else if self.build.flags.host.len() > 0 {
                &self.build.flags.host
            } else {
                &self.build.config.host
            };
            let targets = if self.build.flags.target.len() > 0 {
                &self.build.flags.target
            } else {
                &self.build.config.target
            };
            // Determine the actual targets participating in this rule.
            // NOTE: We should keep the full projection from build triple to
            // the hosts for the dist steps, now that the hosts array above is
            // truncated to avoid duplication of work in that case. Therefore
            // the original non-shadowed hosts array is used below.
            let arr = if rule.host {
                // If --target was specified but --host wasn't specified,
                // don't run any host-only tests. Also, respect any `--host`
                // overrides as done for `hosts`.
                if self.build.flags.host.len() > 0 {
                    &self.build.flags.host[..]
                } else if self.build.flags.target.len() > 0 {
                    &[]
                } else if rule.only_build {
                    &self.build.config.host[..1]
                } else {
                    &self.build.config.host[..]
                }
            } else {
                targets
            };

            hosts.iter().flat_map(move |host| {
                arr.iter().map(move |target| {
                    self.sbuild.name(rule.name).target(target).host(host)
                })
            })
        }).collect()
    }

    /// Execute all top-level targets indicated by `steps`.
    ///
    /// This will take the list returned by `plan` and then execute each step
    /// along with all required dependencies as it goes up the chain.
    fn run(&self, steps: &[Step<'a>]) {
        self.build.verbose("bootstrap top targets:");
        for step in steps.iter() {
            self.build.verbose(&format!("\t{:?}", step));
        }

        // Using `steps` as the top-level targets, make a topological ordering
        // of what we need to do.
        let order = self.expand(steps);

        // Print out what we're doing for debugging
        self.build.verbose("bootstrap build plan:");
        for step in order.iter() {
            self.build.verbose(&format!("\t{:?}", step));
        }

        // And finally, iterate over everything and execute it.
        for step in order.iter() {
            if self.build.flags.keep_stage.map_or(false, |s| step.stage <= s) {
                self.build.verbose(&format!("keeping step {:?}", step));
                continue;
            }
            self.build.verbose(&format!("executing step {:?}", step));
            (self.rules[step.name].run)(step);
        }
    }

    /// From the top level targets `steps` generate a topological ordering of
    /// all steps needed to run those steps.
    fn expand(&self, steps: &[Step<'a>]) -> Vec<Step<'a>> {
        let mut order = Vec::new();
        let mut added = HashSet::new();
        added.insert(Step::noop());
        for step in steps.iter().cloned() {
            self.fill(step, &mut order, &mut added);
        }
        return order
    }

    /// Performs topological sort of dependencies rooted at the `step`
    /// specified, pushing all results onto the `order` vector provided.
    ///
    /// In other words, when this method returns, the `order` vector will
    /// contain a list of steps which if executed in order will eventually
    /// complete the `step` specified as well.
    ///
    /// The `added` set specified here is the set of steps that are already
    /// present in `order` (and hence don't need to be added again).
    fn fill(&self,
            step: Step<'a>,
            order: &mut Vec<Step<'a>>,
            added: &mut HashSet<Step<'a>>) {
        if !added.insert(step.clone()) {
            return
        }
        for dep in self.rules[step.name].deps.iter() {
            let dep = dep(&step);
            if dep.name.starts_with("default:") {
                let kind = match &dep.name[8..] {
                    "doc" => Kind::Doc,
                    "dist" => Kind::Dist,
                    kind => panic!("unknown kind: `{}`", kind),
                };
                let host = self.build.config.host.iter().any(|h| h == dep.target);
                let rules = self.rules.values().filter(|r| r.default);
                for rule in rules.filter(|r| r.kind == kind && (!r.host || host)) {
                    self.fill(dep.name(rule.name), order, added);
                }
            } else {
                self.fill(dep, order, added);
            }
        }
        order.push(step);
    }
}

#[cfg(test)]
mod tests {
    use std::env;

    use Build;
    use config::Config;
    use flags::Flags;

    macro_rules! a {
        ($($a:expr),*) => (vec![$($a.to_string()),*])
    }

    fn build(args: &[&str],
             extra_host: &[&str],
             extra_target: &[&str]) -> Build {
        let mut args = args.iter().map(|s| s.to_string()).collect::<Vec<_>>();
        args.push("--build".to_string());
        args.push("A".to_string());
        let flags = Flags::parse(&args);

        let mut config = Config::default();
        config.docs = true;
        config.build = "A".to_string();
        config.host = vec![config.build.clone()];
        config.host.extend(extra_host.iter().map(|s| s.to_string()));
        config.target = config.host.clone();
        config.target.extend(extra_target.iter().map(|s| s.to_string()));

        let mut build = Build::new(flags, config);
        let cwd = env::current_dir().unwrap();
        build.crates.insert("std".to_string(), ::Crate {
            name: "std".to_string(),
            deps: Vec::new(),
            path: cwd.join("src/std"),
            doc_step: "doc-crate-std".to_string(),
            build_step: "build-crate-std".to_string(),
            test_step: "test-crate-std".to_string(),
            bench_step: "bench-crate-std".to_string(),
            version: String::new(),
        });
        build.crates.insert("test".to_string(), ::Crate {
            name: "test".to_string(),
            deps: Vec::new(),
            path: cwd.join("src/test"),
            doc_step: "doc-crate-test".to_string(),
            build_step: "build-crate-test".to_string(),
            test_step: "test-crate-test".to_string(),
            bench_step: "bench-crate-test".to_string(),
            version: String::new(),
        });
        build.crates.insert("rustc-main".to_string(), ::Crate {
            name: "rustc-main".to_string(),
            deps: Vec::new(),
            version: String::new(),
            path: cwd.join("src/rustc-main"),
            doc_step: "doc-crate-rustc-main".to_string(),
            build_step: "build-crate-rustc-main".to_string(),
            test_step: "test-crate-rustc-main".to_string(),
            bench_step: "bench-crate-rustc-main".to_string(),
        });
        return build
    }

    #[test]
    fn dist_baseline() {
        let build = build(&["dist"], &[], &[]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));
        assert!(plan.iter().all(|s| s.host == "A" ));
        assert!(plan.iter().all(|s| s.target == "A" ));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(plan.contains(&step.name("dist-docs")));
        assert!(plan.contains(&step.name("dist-mingw")));
        assert!(plan.contains(&step.name("dist-rustc")));
        assert!(plan.contains(&step.name("dist-std")));
        assert!(plan.contains(&step.name("dist-src")));
    }

    #[test]
    fn dist_with_targets() {
        let build = build(&["dist"], &[], &["B"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));
        assert!(plan.iter().all(|s| s.host == "A" ));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(plan.contains(&step.name("dist-docs")));
        assert!(plan.contains(&step.name("dist-mingw")));
        assert!(plan.contains(&step.name("dist-rustc")));
        assert!(plan.contains(&step.name("dist-std")));
        assert!(plan.contains(&step.name("dist-src")));

        assert!(plan.contains(&step.target("B").name("dist-docs")));
        assert!(plan.contains(&step.target("B").name("dist-mingw")));
        assert!(!plan.contains(&step.target("B").name("dist-rustc")));
        assert!(plan.contains(&step.target("B").name("dist-std")));
        assert!(!plan.contains(&step.target("B").name("dist-src")));
    }

    #[test]
    fn dist_with_hosts() {
        let build = build(&["dist"], &["B"], &[]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(!plan.iter().any(|s| s.host == "B"));

        assert!(plan.contains(&step.name("dist-docs")));
        assert!(plan.contains(&step.name("dist-mingw")));
        assert!(plan.contains(&step.name("dist-rustc")));
        assert!(plan.contains(&step.name("dist-std")));
        assert!(plan.contains(&step.name("dist-src")));

        assert!(plan.contains(&step.target("B").name("dist-docs")));
        assert!(plan.contains(&step.target("B").name("dist-mingw")));
        assert!(plan.contains(&step.target("B").name("dist-rustc")));
        assert!(plan.contains(&step.target("B").name("dist-std")));
        assert!(!plan.contains(&step.target("B").name("dist-src")));
    }

    #[test]
    fn dist_with_targets_and_hosts() {
        let build = build(&["dist"], &["B"], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(!plan.iter().any(|s| s.host == "B"));
        assert!(!plan.iter().any(|s| s.host == "C"));

        assert!(plan.contains(&step.name("dist-docs")));
        assert!(plan.contains(&step.name("dist-mingw")));
        assert!(plan.contains(&step.name("dist-rustc")));
        assert!(plan.contains(&step.name("dist-std")));
        assert!(plan.contains(&step.name("dist-src")));

        assert!(plan.contains(&step.target("B").name("dist-docs")));
        assert!(plan.contains(&step.target("B").name("dist-mingw")));
        assert!(plan.contains(&step.target("B").name("dist-rustc")));
        assert!(plan.contains(&step.target("B").name("dist-std")));
        assert!(!plan.contains(&step.target("B").name("dist-src")));

        assert!(plan.contains(&step.target("C").name("dist-docs")));
        assert!(plan.contains(&step.target("C").name("dist-mingw")));
        assert!(!plan.contains(&step.target("C").name("dist-rustc")));
        assert!(plan.contains(&step.target("C").name("dist-std")));
        assert!(!plan.contains(&step.target("C").name("dist-src")));
    }

    #[test]
    fn dist_target_with_target_flag() {
        let build = build(&["dist", "--target=C"], &["B"], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(!plan.iter().any(|s| s.target == "A"));
        assert!(!plan.iter().any(|s| s.target == "B"));
        assert!(!plan.iter().any(|s| s.host == "B"));
        assert!(!plan.iter().any(|s| s.host == "C"));

        assert!(plan.contains(&step.target("C").name("dist-docs")));
        assert!(plan.contains(&step.target("C").name("dist-mingw")));
        assert!(!plan.contains(&step.target("C").name("dist-rustc")));
        assert!(plan.contains(&step.target("C").name("dist-std")));
        assert!(!plan.contains(&step.target("C").name("dist-src")));
    }

    #[test]
    fn dist_host_with_target_flag() {
        let build = build(&["dist", "--host=B", "--target=B"], &["B"], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        assert!(!plan.iter().any(|s| s.target == "A"));
        assert!(!plan.iter().any(|s| s.target == "C"));
        assert!(!plan.iter().any(|s| s.host == "B"));
        assert!(!plan.iter().any(|s| s.host == "C"));

        assert!(plan.contains(&step.target("B").name("dist-docs")));
        assert!(plan.contains(&step.target("B").name("dist-mingw")));
        assert!(plan.contains(&step.target("B").name("dist-rustc")));
        assert!(plan.contains(&step.target("B").name("dist-std")));
        assert!(plan.contains(&step.target("B").name("dist-src")));

        let all = rules.expand(&plan);
        println!("all rules: {:#?}", all);
        assert!(!all.contains(&step.name("rustc")));
        assert!(!all.contains(&step.name("build-crate-test").stage(1)));

        // all stage0 compiles should be for the build target, A
        for step in all.iter().filter(|s| s.stage == 0) {
            if !step.name.contains("build-crate") {
                continue
            }
            println!("step: {:?}", step);
            assert!(step.host != "B");
            assert!(step.target != "B");
            assert!(step.host != "C");
            assert!(step.target != "C");
        }
    }

    #[test]
    fn build_default() {
        let build = build(&["build"], &["B"], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        let step = super::Step {
            name: "",
            stage: 2,
            host: &build.config.build,
            target: &build.config.build,
        };

        // rustc built for all for of (A, B) x (A, B)
        assert!(plan.contains(&step.name("librustc")));
        assert!(plan.contains(&step.target("B").name("librustc")));
        assert!(plan.contains(&step.host("B").target("A").name("librustc")));
        assert!(plan.contains(&step.host("B").target("B").name("librustc")));

        // rustc never built for C
        assert!(!plan.iter().any(|s| {
            s.name.contains("rustc") && (s.host == "C" || s.target == "C")
        }));

        // test built for everything
        assert!(plan.contains(&step.name("libtest")));
        assert!(plan.contains(&step.target("B").name("libtest")));
        assert!(plan.contains(&step.host("B").target("A").name("libtest")));
        assert!(plan.contains(&step.host("B").target("B").name("libtest")));
        assert!(plan.contains(&step.host("A").target("C").name("libtest")));
        assert!(plan.contains(&step.host("B").target("C").name("libtest")));

        let all = rules.expand(&plan);
        println!("all rules: {:#?}", all);
        assert!(all.contains(&step.name("rustc")));
        assert!(all.contains(&step.name("libstd")));
    }

    #[test]
    fn build_filtered() {
        let build = build(&["build", "--target=C"], &["B"], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));

        assert!(!plan.iter().any(|s| s.name.contains("rustc")));
        assert!(plan.iter().all(|s| {
            !s.name.contains("test") || s.target == "C"
        }));
    }

    #[test]
    fn test_default() {
        let build = build(&["test"], &[], &[]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));
        assert!(plan.iter().all(|s| s.host == "A"));
        assert!(plan.iter().all(|s| s.target == "A"));

        assert!(plan.iter().any(|s| s.name.contains("-ui")));
        assert!(plan.iter().any(|s| s.name.contains("cfail")));
        assert!(plan.iter().any(|s| s.name.contains("cfail-full")));
        assert!(plan.iter().any(|s| s.name.contains("codegen-units")));
        assert!(plan.iter().any(|s| s.name.contains("debuginfo")));
        assert!(plan.iter().any(|s| s.name.contains("docs")));
        assert!(plan.iter().any(|s| s.name.contains("error-index")));
        assert!(plan.iter().any(|s| s.name.contains("incremental")));
        assert!(plan.iter().any(|s| s.name.contains("linkchecker")));
        assert!(plan.iter().any(|s| s.name.contains("mir-opt")));
        assert!(plan.iter().any(|s| s.name.contains("pfail")));
        assert!(plan.iter().any(|s| s.name.contains("rfail")));
        assert!(plan.iter().any(|s| s.name.contains("rfail-full")));
        assert!(plan.iter().any(|s| s.name.contains("rmake")));
        assert!(plan.iter().any(|s| s.name.contains("rpass")));
        assert!(plan.iter().any(|s| s.name.contains("rpass-full")));
        assert!(plan.iter().any(|s| s.name.contains("rustc-all")));
        assert!(plan.iter().any(|s| s.name.contains("rustdoc")));
        assert!(plan.iter().any(|s| s.name.contains("std-all")));
        assert!(plan.iter().any(|s| s.name.contains("test-all")));
        assert!(plan.iter().any(|s| s.name.contains("tidy")));
        assert!(plan.iter().any(|s| s.name.contains("valgrind")));
    }

    #[test]
    fn test_with_a_target() {
        let build = build(&["test", "--target=C"], &[], &["C"]);
        let rules = super::build_rules(&build);
        let plan = rules.plan();
        println!("rules: {:#?}", plan);
        assert!(plan.iter().all(|s| s.stage == 2));
        assert!(plan.iter().all(|s| s.host == "A"));
        assert!(plan.iter().all(|s| s.target == "C"));

        assert!(plan.iter().any(|s| s.name.contains("-ui")));
        assert!(!plan.iter().any(|s| s.name.contains("ui-full")));
        assert!(plan.iter().any(|s| s.name.contains("cfail")));
        assert!(!plan.iter().any(|s| s.name.contains("cfail-full")));
        assert!(plan.iter().any(|s| s.name.contains("codegen-units")));
        assert!(plan.iter().any(|s| s.name.contains("debuginfo")));
        assert!(!plan.iter().any(|s| s.name.contains("docs")));
        assert!(!plan.iter().any(|s| s.name.contains("error-index")));
        assert!(plan.iter().any(|s| s.name.contains("incremental")));
        assert!(!plan.iter().any(|s| s.name.contains("linkchecker")));
        assert!(plan.iter().any(|s| s.name.contains("mir-opt")));
        assert!(plan.iter().any(|s| s.name.contains("pfail")));
        assert!(plan.iter().any(|s| s.name.contains("rfail")));
        assert!(!plan.iter().any(|s| s.name.contains("rfail-full")));
        assert!(!plan.iter().any(|s| s.name.contains("rmake")));
        assert!(plan.iter().any(|s| s.name.contains("rpass")));
        assert!(!plan.iter().any(|s| s.name.contains("rpass-full")));
        assert!(!plan.iter().any(|s| s.name.contains("rustc-all")));
        assert!(!plan.iter().any(|s| s.name.contains("rustdoc")));
        assert!(plan.iter().any(|s| s.name.contains("std-all")));
        assert!(plan.iter().any(|s| s.name.contains("test-all")));
        assert!(!plan.iter().any(|s| s.name.contains("tidy")));
        assert!(plan.iter().any(|s| s.name.contains("valgrind")));
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




















































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/util.rs version [d7366913e0].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! Various utility functions used throughout rustbuild.
//!
//! Simple things like testing the various filesystem operations here and there,
//! not a lot of interesting happenings here unfortunately.

use std::env;
use std::ffi::OsString;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::Instant;

use filetime::{self, FileTime};

/// Returns the `name` as the filename of a static library for `target`.
pub fn staticlib(name: &str, target: &str) -> String {
    if target.contains("windows") {
        format!("{}.lib", name)
    } else {
        format!("lib{}.a", name)
    }
}

/// Copies a file from `src` to `dst`, attempting to use hard links and then
/// falling back to an actually filesystem copy if necessary.
pub fn copy(src: &Path, dst: &Path) {
    // A call to `hard_link` will fail if `dst` exists, so remove it if it
    // already exists so we can try to help `hard_link` succeed.
    let _ = fs::remove_file(&dst);

    // Attempt to "easy copy" by creating a hard link (symlinks don't work on
    // windows), but if that fails just fall back to a slow `copy` operation.
    // let res = fs::hard_link(src, dst);
    let res = fs::copy(src, dst);
    if let Err(e) = res {
        panic!("failed to copy `{}` to `{}`: {}", src.display(),
               dst.display(), e)
    }
    let metadata = t!(src.metadata());
    t!(fs::set_permissions(dst, metadata.permissions()));
    let atime = FileTime::from_last_access_time(&metadata);
    let mtime = FileTime::from_last_modification_time(&metadata);
    t!(filetime::set_file_times(dst, atime, mtime));

}

/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called.
pub fn cp_r(src: &Path, dst: &Path) {
    for f in t!(fs::read_dir(src)) {
        let f = t!(f);
        let path = f.path();
        let name = path.file_name().unwrap();
        let dst = dst.join(name);
        if t!(f.file_type()).is_dir() {
            t!(fs::create_dir_all(&dst));
            cp_r(&path, &dst);
        } else {
            let _ = fs::remove_file(&dst);
            copy(&path, &dst);
        }
    }
}

/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called. Unwanted files or directories can be skipped
/// by returning `false` from the filter function.
pub fn cp_filtered(src: &Path, dst: &Path, filter: &Fn(&Path) -> bool) {
    // Inner function does the actual work
    fn recurse(src: &Path, dst: &Path, relative: &Path, filter: &Fn(&Path) -> bool) {
        for f in t!(fs::read_dir(src)) {
            let f = t!(f);
            let path = f.path();
            let name = path.file_name().unwrap();
            let dst = dst.join(name);
            let relative = relative.join(name);
            // Only copy file or directory if the filter function returns true
            if filter(&relative) {
                if t!(f.file_type()).is_dir() {
                    let _ = fs::remove_dir_all(&dst);
                    t!(fs::create_dir(&dst));
                    recurse(&path, &dst, &relative, filter);
                } else {
                    let _ = fs::remove_file(&dst);
                    copy(&path, &dst);
                }
            }
        }
    }
    // Immediately recurse with an empty relative path
    recurse(src, dst, Path::new(""), filter)
}

/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
pub fn exe(name: &str, target: &str) -> String {
    if target.contains("windows") {
        format!("{}.exe", name)
    } else {
        name.to_string()
    }
}

/// Returns whether the file name given looks like a dynamic library.
pub fn is_dylib(name: &str) -> bool {
    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
}

/// Returns the corresponding relative library directory that the compiler's
/// dylibs will be found in.
pub fn libdir(target: &str) -> &'static str {
    if target.contains("windows") {"bin"} else {"lib"}
}

/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
    let mut list = dylib_path();
    for path in path {
        list.insert(0, path);
    }
    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
}

/// Returns the environment variable which the dynamic library lookup path
/// resides in for this platform.
pub fn dylib_path_var() -> &'static str {
    if cfg!(target_os = "windows") {
        "PATH"
    } else if cfg!(target_os = "macos") {
        "DYLD_LIBRARY_PATH"
    } else {
        "LD_LIBRARY_PATH"
    }
}

/// Parses the `dylib_path_var()` environment variable, returning a list of
/// paths that are members of this lookup path.
pub fn dylib_path() -> Vec<PathBuf> {
    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
        .collect()
}

/// `push` all components to `buf`. On windows, append `.exe` to the last component.
pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
    let (&file, components) = components.split_last().expect("at least one component required");
    let mut file = file.to_owned();

    if cfg!(windows) {
        file.push_str(".exe");
    }

    for c in components {
        buf.push(c);
    }

    buf.push(file);

    buf
}

pub struct TimeIt(Instant);

/// Returns an RAII structure that prints out how long it took to drop.
pub fn timeit() -> TimeIt {
    TimeIt(Instant::now())
}

impl Drop for TimeIt {
    fn drop(&mut self) {
        let time = self.0.elapsed();
        println!("\tfinished in {}.{:03}",
                 time.as_secs(),
                 time.subsec_nanos() / 1_000_000);
    }
}

/// Symlinks two directories, using junctions on Windows and normal symlinks on
/// Unix.
pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> {
    let _ = fs::remove_dir(dest);
    return symlink_dir_inner(src, dest);

    #[cfg(not(windows))]
    fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> {
        use std::os::unix::fs;
        fs::symlink(src, dest)
    }

    // Creating a directory junction on windows involves dealing with reparse
    // points and the DeviceIoControl function, and this code is a skeleton of
    // what can be found here:
    //
    // http://www.flexhex.com/docs/articles/hard-links.phtml
    //
    // Copied from std
    #[cfg(windows)]
    #[allow(bad_style)]
    fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> {
        use std::ptr;
        use std::ffi::OsStr;
        use std::os::windows::ffi::OsStrExt;

        const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
        const GENERIC_WRITE: DWORD = 0x40000000;
        const OPEN_EXISTING: DWORD = 3;
        const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000;
        const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000;
        const FSCTL_SET_REPARSE_POINT: DWORD = 0x900a4;
        const IO_REPARSE_TAG_MOUNT_POINT: DWORD = 0xa0000003;
        const FILE_SHARE_DELETE: DWORD = 0x4;
        const FILE_SHARE_READ: DWORD = 0x1;
        const FILE_SHARE_WRITE: DWORD = 0x2;

        type BOOL = i32;
        type DWORD = u32;
        type HANDLE = *mut u8;
        type LPCWSTR = *const u16;
        type LPDWORD = *mut DWORD;
        type LPOVERLAPPED = *mut u8;
        type LPSECURITY_ATTRIBUTES = *mut u8;
        type LPVOID = *mut u8;
        type WCHAR = u16;
        type WORD = u16;

        #[repr(C)]
        struct REPARSE_MOUNTPOINT_DATA_BUFFER {
            ReparseTag: DWORD,
            ReparseDataLength: DWORD,
            Reserved: WORD,
            ReparseTargetLength: WORD,
            ReparseTargetMaximumLength: WORD,
            Reserved1: WORD,
            ReparseTarget: WCHAR,
        }

        extern "system" {
            fn CreateFileW(lpFileName: LPCWSTR,
                           dwDesiredAccess: DWORD,
                           dwShareMode: DWORD,
                           lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
                           dwCreationDisposition: DWORD,
                           dwFlagsAndAttributes: DWORD,
                           hTemplateFile: HANDLE)
                           -> HANDLE;
            fn DeviceIoControl(hDevice: HANDLE,
                               dwIoControlCode: DWORD,
                               lpInBuffer: LPVOID,
                               nInBufferSize: DWORD,
                               lpOutBuffer: LPVOID,
                               nOutBufferSize: DWORD,
                               lpBytesReturned: LPDWORD,
                               lpOverlapped: LPOVERLAPPED) -> BOOL;
        }

        fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
            Ok(s.as_ref().encode_wide().chain(Some(0)).collect())
        }

        // We're using low-level APIs to create the junction, and these are more
        // picky about paths. For example, forward slashes cannot be used as a
        // path separator, so we should try to canonicalize the path first.
        let target = try!(fs::canonicalize(target));

        try!(fs::create_dir(junction));

        let path = try!(to_u16s(junction));

        unsafe {
            let h = CreateFileW(path.as_ptr(),
                                GENERIC_WRITE,
                                FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
                                0 as *mut _,
                                OPEN_EXISTING,
                                FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
                                ptr::null_mut());

            let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
            let mut db = data.as_mut_ptr()
                            as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
            let buf = &mut (*db).ReparseTarget as *mut _;
            let mut i = 0;
            // FIXME: this conversion is very hacky
            let v = br"\??\";
            let v = v.iter().map(|x| *x as u16);
            for c in v.chain(target.as_os_str().encode_wide().skip(4)) {
                *buf.offset(i) = c;
                i += 1;
            }
            *buf.offset(i) = 0;
            i += 1;
            (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
            (*db).ReparseTargetMaximumLength = (i * 2) as WORD;
            (*db).ReparseTargetLength = ((i - 1) * 2) as WORD;
            (*db).ReparseDataLength =
                    (*db).ReparseTargetLength as DWORD + 12;

            let mut ret = 0;
            let res = DeviceIoControl(h as *mut _,
                                      FSCTL_SET_REPARSE_POINT,
                                      data.as_ptr() as *mut _,
                                      (*db).ReparseDataLength + 8,
                                      ptr::null_mut(), 0,
                                      &mut ret,
                                      ptr::null_mut());

            if res == 0 {
                Err(io::Error::last_os_error())
            } else {
                Ok(())
            }
        }
    }
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/.gitignore version [9b3020b6e1].

1
2
3
4
5
*~
darwin_fat
clang_darwin
multi_arch
*.sw?
<
<
<
<
<










Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/CODE_OWNERS.TXT version [f3067dda99].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
This file is a list of the people responsible for ensuring that patches for a
particular part of compiler-rt are reviewed, either by themself or by
someone else. They are also the gatekeepers for their part of compiler-rt, with
the final word on what goes in or not.

The list is sorted by surname and formatted to allow easy grepping and
beautification by scripts. The fields are: name (N), email (E), web-address
(W), PGP key ID and fingerprint (P), description (D), and snail-mail address
(S).

N: Peter Collingbourne
E: peter@pcc.me.uk
D: DataFlowSanitizer

N: Daniel Dunbar
E: daniel@zuster.org
D: Makefile build

N: Timur Iskhodzhanov
E: timurrrr@google.com
D: AddressSanitizer for Windows

N: Howard Hinnant
E: howard.hinnant@gmail.com
D: builtins library

N: Alexander Potapenko
E: glider@google.com
D: MacOS/iOS port of sanitizers

N: Alexey Samsonov
E: samsonov@google.com
D: CMake build, test suite

N: Kostya Serebryany
E: kcc@google.com
D: AddressSanitizer, sanitizer_common, porting sanitizers to another platforms, LeakSanitizer

N: Richard Smith
E: richard-llvm@metafoo.co.uk
D: UndefinedBehaviorSanitizer

N: Evgeniy Stepanov
E: eugenis@google.com
D: MemorySanitizer, Android port of sanitizers

N: Dmitry Vyukov
E: dvyukov@google.com
D: ThreadSanitizer

N: Bill Wendling
E: isanbard@gmail.com
D: Profile runtime library
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<










































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/CMakeLists.txt version [6285a83cd8].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
# This directory contains a large amount of C code which provides
# generic implementations of the core runtime library along with optimized
# architecture-specific code in various subdirectories.

if (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)
  cmake_minimum_required(VERSION 3.4.3)

  project(CompilerRTBuiltins C ASM)
  set(COMPILER_RT_STANDALONE_BUILD TRUE)
  set(COMPILER_RT_BUILTINS_STANDALONE_BUILD TRUE)
  list(INSERT CMAKE_MODULE_PATH 0
    "${CMAKE_SOURCE_DIR}/../../cmake"
    "${CMAKE_SOURCE_DIR}/../../cmake/Modules")
  include(base-config-ix)
  include(CompilerRTUtils)
  if(APPLE)
    include(CompilerRTDarwinUtils)
  endif()
  include(AddCompilerRT)
endif()

include(builtin-config-ix)

# TODO: Need to add a mechanism for logging errors when builtin source files are
# added to a sub-directory and not this CMakeLists file.
set(GENERIC_SOURCES
  absvdi2.c
  absvsi2.c
  absvti2.c
  adddf3.c
  addsf3.c
  addtf3.c
  addvdi3.c
  addvsi3.c
  addvti3.c
  apple_versioning.c
  ashldi3.c
  ashlti3.c
  ashrdi3.c
  ashrti3.c
  # FIXME: atomic.c may only be compiled if host compiler understands _Atomic
  # atomic.c
  clear_cache.c
  clzdi2.c
  clzsi2.c
  clzti2.c
  cmpdi2.c
  cmpti2.c
  comparedf2.c
  comparesf2.c
  cpu_model.c
  ctzdi2.c
  ctzsi2.c
  ctzti2.c
  divdc3.c
  divdf3.c
  divdi3.c
  divmoddi4.c
  divmodsi4.c
  divsc3.c
  divsf3.c
  divsi3.c
  divtc3.c
  divti3.c
  divtf3.c
  divxc3.c
  enable_execute_stack.c
  eprintf.c
  extendsfdf2.c
  extendhfsf2.c
  ffsdi2.c
  ffsti2.c
  fixdfdi.c
  fixdfsi.c
  fixdfti.c
  fixsfdi.c
  fixsfsi.c
  fixsfti.c
  fixunsdfdi.c
  fixunsdfsi.c
  fixunsdfti.c
  fixunssfdi.c
  fixunssfsi.c
  fixunssfti.c
  fixunsxfdi.c
  fixunsxfsi.c
  fixunsxfti.c
  fixxfdi.c
  fixxfti.c
  floatdidf.c
  floatdisf.c
  floatdixf.c
  floatsidf.c
  floatsisf.c
  floattidf.c
  floattisf.c
  floattixf.c
  floatundidf.c
  floatundisf.c
  floatundixf.c
  floatunsidf.c
  floatunsisf.c
  floatuntidf.c
  floatuntisf.c
  floatuntixf.c
  int_util.c
  lshrdi3.c
  lshrti3.c
  moddi3.c
  modsi3.c
  modti3.c
  muldc3.c
  muldf3.c
  muldi3.c
  mulodi4.c
  mulosi4.c
  muloti4.c
  mulsc3.c
  mulsf3.c
  multi3.c
  multf3.c
  mulvdi3.c
  mulvsi3.c
  mulvti3.c
  mulxc3.c
  negdf2.c
  negdi2.c
  negsf2.c
  negti2.c
  negvdi2.c
  negvsi2.c
  negvti2.c
  paritydi2.c
  paritysi2.c
  parityti2.c
  popcountdi2.c
  popcountsi2.c
  popcountti2.c
  powidf2.c
  powisf2.c
  powitf2.c
  powixf2.c
  subdf3.c
  subsf3.c
  subvdi3.c
  subvsi3.c
  subvti3.c
  subtf3.c
  trampoline_setup.c
  truncdfhf2.c
  truncdfsf2.c
  truncsfhf2.c
  ucmpdi2.c
  ucmpti2.c
  udivdi3.c
  udivmoddi4.c
  udivmodsi4.c
  udivmodti4.c
  udivsi3.c
  udivti3.c
  umoddi3.c
  umodsi3.c
  umodti3.c)

set(MSVC_SOURCES
 divsc3.c
 divdc3.c
 divxc3.c
 mulsc3.c
 muldc3.c
 mulxc3.c)


if(APPLE)
  set(GENERIC_SOURCES
    ${GENERIC_SOURCES}
    atomic_flag_clear.c
    atomic_flag_clear_explicit.c
    atomic_flag_test_and_set.c
    atomic_flag_test_and_set_explicit.c
    atomic_signal_fence.c
    atomic_thread_fence.c)
endif()

if(NOT WIN32 OR MINGW)
  set(GENERIC_SOURCES
      ${GENERIC_SOURCES}
      emutls.c)
endif()

if (HAVE_UNWIND_H)
  set(GENERIC_SOURCES
      ${GENERIC_SOURCES}
      gcc_personality_v0.c)
endif ()

if (NOT MSVC)
  set(x86_64_SOURCES
      x86_64/chkstk.S
      x86_64/chkstk2.S
      x86_64/floatdidf.c
      x86_64/floatdisf.c
      x86_64/floatdixf.c
      x86_64/floatundidf.S
      x86_64/floatundisf.S
      x86_64/floatundixf.S
      ${GENERIC_SOURCES})
  set(x86_64h_SOURCES ${x86_64_SOURCES})

  if (WIN32)
    set(x86_64_SOURCES
        ${x86_64_SOURCES}
        x86_64/chkstk.S
        x86_64/chkstk2.S)
  endif()

  set(i386_SOURCES
      i386/ashldi3.S
      i386/ashrdi3.S
      i386/chkstk.S
      i386/chkstk2.S
      i386/divdi3.S
      i386/floatdidf.S
      i386/floatdisf.S
      i386/floatdixf.S
      i386/floatundidf.S
      i386/floatundisf.S
      i386/floatundixf.S
      i386/lshrdi3.S
      i386/moddi3.S
      i386/muldi3.S
      i386/udivdi3.S
      i386/umoddi3.S
      ${GENERIC_SOURCES})

  if (WIN32)
    set(i386_SOURCES
        ${i386_SOURCES}
        i386/chkstk.S
        i386/chkstk2.S)
  endif()

  set(i686_SOURCES
      ${i386_SOURCES})
else () # MSVC
  # Use C versions of functions when building on MSVC
  # MSVC's assembler takes Intel syntax, not AT&T syntax.
  # Also use only MSVC compilable builtin implementations.
  set(x86_64_SOURCES
      x86_64/floatdidf.c
      x86_64/floatdisf.c
      x86_64/floatdixf.c
      ${MSVC_SOURCES})
  set(x86_64h_SOURCES ${x86_64_SOURCES})
  set(i386_SOURCES ${MSVC_SOURCES})
  set(i686_SOURCES ${i386_SOURCES})
endif () # if (NOT MSVC)

set(arm_SOURCES
  arm/adddf3vfp.S
  arm/addsf3vfp.S
  arm/aeabi_cdcmp.S
  arm/aeabi_cdcmpeq_check_nan.c
  arm/aeabi_cfcmp.S
  arm/aeabi_cfcmpeq_check_nan.c
  arm/aeabi_dcmp.S
  arm/aeabi_div0.c
  arm/aeabi_drsub.c
  arm/aeabi_fcmp.S
  arm/aeabi_frsub.c
  arm/aeabi_idivmod.S
  arm/aeabi_ldivmod.S
  arm/aeabi_memcmp.S
  arm/aeabi_memcpy.S
  arm/aeabi_memmove.S
  arm/aeabi_memset.S
  arm/aeabi_uidivmod.S
  arm/aeabi_uldivmod.S
  arm/bswapdi2.S
  arm/bswapsi2.S
  arm/clzdi2.S
  arm/clzsi2.S
  arm/comparesf2.S
  arm/divdf3vfp.S
  arm/divmodsi4.S
  arm/divsf3vfp.S
  arm/divsi3.S
  arm/eqdf2vfp.S
  arm/eqsf2vfp.S
  arm/extendsfdf2vfp.S
  arm/fixdfsivfp.S
  arm/fixsfsivfp.S
  arm/fixunsdfsivfp.S
  arm/fixunssfsivfp.S
  arm/floatsidfvfp.S
  arm/floatsisfvfp.S
  arm/floatunssidfvfp.S
  arm/floatunssisfvfp.S
  arm/gedf2vfp.S
  arm/gesf2vfp.S
  arm/gtdf2vfp.S
  arm/gtsf2vfp.S
  arm/ledf2vfp.S
  arm/lesf2vfp.S
  arm/ltdf2vfp.S
  arm/ltsf2vfp.S
  arm/modsi3.S
  arm/muldf3vfp.S
  arm/mulsf3vfp.S
  arm/nedf2vfp.S
  arm/negdf2vfp.S
  arm/negsf2vfp.S
  arm/nesf2vfp.S
  arm/restore_vfp_d8_d15_regs.S
  arm/save_vfp_d8_d15_regs.S
  arm/subdf3vfp.S
  arm/subsf3vfp.S
  arm/switch16.S
  arm/switch32.S
  arm/switch8.S
  arm/switchu8.S
  arm/sync_fetch_and_add_4.S
  arm/sync_fetch_and_add_8.S
  arm/sync_fetch_and_and_4.S
  arm/sync_fetch_and_and_8.S
  arm/sync_fetch_and_max_4.S
  arm/sync_fetch_and_max_8.S
  arm/sync_fetch_and_min_4.S
  arm/sync_fetch_and_min_8.S
  arm/sync_fetch_and_nand_4.S
  arm/sync_fetch_and_nand_8.S
  arm/sync_fetch_and_or_4.S
  arm/sync_fetch_and_or_8.S
  arm/sync_fetch_and_sub_4.S
  arm/sync_fetch_and_sub_8.S
  arm/sync_fetch_and_umax_4.S
  arm/sync_fetch_and_umax_8.S
  arm/sync_fetch_and_umin_4.S
  arm/sync_fetch_and_umin_8.S
  arm/sync_fetch_and_xor_4.S
  arm/sync_fetch_and_xor_8.S
  arm/sync_synchronize.S
  arm/truncdfsf2vfp.S
  arm/udivmodsi4.S
  arm/udivsi3.S
  arm/umodsi3.S
  arm/unorddf2vfp.S
  arm/unordsf2vfp.S
  ${GENERIC_SOURCES})

set(aarch64_SOURCES
  comparetf2.c
  extenddftf2.c
  extendsftf2.c
  fixtfdi.c
  fixtfsi.c
  fixtfti.c
  fixunstfdi.c
  fixunstfsi.c
  fixunstfti.c
  floatditf.c
  floatsitf.c
  floatunditf.c
  floatunsitf.c
  multc3.c
  trunctfdf2.c
  trunctfsf2.c
  ${GENERIC_SOURCES})

set(armhf_SOURCES ${arm_SOURCES})
set(armv7_SOURCES ${arm_SOURCES})
set(armv7s_SOURCES ${arm_SOURCES})
set(armv7k_SOURCES ${arm_SOURCES})
set(arm64_SOURCES ${aarch64_SOURCES})

# macho_embedded archs
set(armv6m_SOURCES ${GENERIC_SOURCES})
set(armv7m_SOURCES ${arm_SOURCES})
set(armv7em_SOURCES ${arm_SOURCES})

set(mips_SOURCES ${GENERIC_SOURCES})
set(mipsel_SOURCES ${mips_SOURCES})
set(mips64_SOURCES ${mips_SOURCES})
set(mips64el_SOURCES ${mips_SOURCES})

set(wasm32_SOURCES ${GENERIC_SOURCES})
set(wasm64_SOURCES ${GENERIC_SOURCES})

add_custom_target(builtins)
set_target_properties(builtins PROPERTIES FOLDER "Compiler-RT Misc")

if (APPLE)
  add_subdirectory(Darwin-excludes)
  add_subdirectory(macho_embedded)
  darwin_add_builtin_libraries(${BUILTIN_SUPPORTED_OS})
else ()
  append_string_if(COMPILER_RT_HAS_STD_C99_FLAG -std=gnu99 maybe_stdc99)

  foreach (arch ${BUILTIN_SUPPORTED_ARCH})
    if (CAN_TARGET_${arch})
      # Filter out generic versions of routines that are re-implemented in
      # architecture specific manner.  This prevents multiple definitions of the
      # same symbols, making the symbol selection non-deterministic.
      foreach (_file ${${arch}_SOURCES})
        if (${_file} MATCHES ${arch}/*)
          get_filename_component(_name ${_file} NAME)
          string(REPLACE ".S" ".c" _cname "${_name}")
          list(REMOVE_ITEM ${arch}_SOURCES ${_cname})
        endif ()
      endforeach ()

      add_compiler_rt_runtime(clang_rt.builtins
                              STATIC
                              ARCHS ${arch}
                              SOURCES ${${arch}_SOURCES}
                              CFLAGS ${maybe_stdc99}
                              PARENT_TARGET builtins)
    endif ()
  endforeach ()
endif ()

add_dependencies(compiler-rt builtins)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































































































































































































































































































































































































































































































































































































































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/10.4.txt version [7fae259794].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
absvdi2
absvsi2
absvti2
adddf3
addsf3
addtf3
addvdi3
addvsi3
addvti3
apple_versioning
ashldi3
ashlti3
ashrdi3
ashrti3
atomic_flag_clear
atomic_flag_clear_explicit
atomic_flag_test_and_set
atomic_flag_test_and_set_explicit
atomic_signal_fence
atomic_thread_fence
clear_cache
clzdi2
clzsi2
clzti2
cmpdi2
cmpti2
comparedf2
comparesf2
ctzdi2
ctzsi2
ctzti2
divdc3
divdf3
divdi3
divmoddi4
divmodsi4
divsc3
divsf3
divsi3
divtf3
divti3
divxc3
enable_execute_stack
extendhfsf2
extendsfdf2
ffsdi2
ffsti2
fixdfdi
fixdfsi
fixdfti
fixsfdi
fixsfsi
fixsfti
fixunsdfdi
fixunsdfsi
fixunsdfti
fixunssfdi
fixunssfsi
fixunssfti
fixunsxfdi
fixunsxfsi
fixunsxfti
fixxfdi
fixxfti
floatdidf
floatdisf
floatdixf
floatsidf
floatsisf
floattidf
floattisf
floattixf
floatunsidf
floatunsisf
floatuntidf
floatuntisf
floatuntixf
gcc_personality_v0
gnu_f2h_ieee
gnu_h2f_ieee
lshrdi3
lshrti3
moddi3
modsi3
modti3
muldc3
muldf3
muldi3
mulodi4
mulosi4
muloti4
mulsc3
mulsf3
multf3
multi3
mulvdi3
mulvsi3
mulvti3
mulxc3
negdf2
negdi2
negsf2
negti2
negvdi2
negvsi2
negvti2
paritydi2
paritysi2
parityti2
popcountdi2
popcountsi2
popcountti2
powidf2
powisf2
powitf2
powixf2
subdf3
subsf3
subtf3
subvdi3
subvsi3
subvti3
trampoline_setup
truncdfhf2
truncdfsf2
truncsfhf2
ucmpdi2
ucmpti2
udivdi3
udivmoddi4
udivmodsi4
udivmodti4
udivsi3
udivti3
umoddi3
umodsi3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/CMakeLists.txt version [f422a2f46b].

1
2
3
4
file(GLOB filter_files ${CMAKE_CURRENT_SOURCE_DIR}/*.txt)
foreach(filter_file ${filter_files})
  set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${filter_file})
endforeach()
<
<
<
<








Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/README.TXT version [713eb93671].

1
2
3
4
5
6
7
8
9
10
11
This folder contains list of symbols that should be excluded from the builtin
libraries for Darwin. There are two reasons symbols are excluded:

(1) They aren't supported on Darwin
(2) They are contained within the OS on the minimum supported target

The builtin libraries must contain all symbols not provided by the lowest
supported target OS. Meaning if minimum deployment target is iOS 6, all builtins
not included in the ios6-<arch>.txt files need to be included. The one catch is
that this is per-architecture. Since iOS 6 doesn't support arm64, when supporting
iOS 6, the minimum deployment target for arm64 binaries is iOS 7.
<
<
<
<
<
<
<
<
<
<
<






















Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios-armv7.txt version [6406715ca4].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
absvti2
addtf3
addvti3
aeabi_cdcmp
aeabi_cdcmpeq_check_nan
aeabi_cfcmp
aeabi_cfcmpeq_check_nan
aeabi_dcmp
aeabi_div0
aeabi_drsub
aeabi_fcmp
aeabi_frsub
aeabi_idivmod
aeabi_ldivmod
aeabi_memcmp
aeabi_memcpy
aeabi_memmove
aeabi_memset
aeabi_uidivmod
aeabi_uldivmod
ashlti3
ashrti3
clzti2
cmpti2
ctzti2
divtf3
divti3
ffsti2
fixdfti
fixsfti
fixunsdfti
fixunssfti
fixunsxfti
fixxfti
floattidf
floattisf
floattixf
floatuntidf
floatuntisf
floatuntixf
lshrti3
modti3
multf3
multi3
mulvti3
negti2
negvti2
parityti2
popcountti2
powitf2
subtf3
subvti3
trampoline_setup
ucmpti2
udivmodti4
udivti3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios-armv7s.txt version [de4e3cd8c1].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
absvti2
addtf3
addvti3
aeabi_cdcmp
aeabi_cdcmpeq_check_nan
aeabi_cfcmp
aeabi_cfcmpeq_check_nan
aeabi_dcmp
aeabi_div0
aeabi_drsub
aeabi_fcmp
aeabi_frsub
aeabi_idivmod
aeabi_ldivmod
aeabi_memcmp
aeabi_memcpy
aeabi_memmove
aeabi_memset
aeabi_uidivmod
aeabi_uldivmod
ashlti3
ashrti3
clzti2
cmpti2
ctzti2
divtf3
divti3
ffsti2
fixdfti
fixsfti
fixunsdfti
fixunssfti
fixunsxfti
fixxfti
floattidf
floattisf
floattixf
floatuntidf
floatuntisf
floatuntixf
lshrti3
modti3
multf
multi3
mulvti3
negti2
negvti2
parityti2
popcountti2
powitf2
subtf3
subvti3
trampoline_setup
ucmpti2
udivmodti4
udivti3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


















































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios.txt version [3e57aa9aa8].

1
apple_versioning
<


Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios6-armv7.txt version [a8ba56e3c2].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
absvdi2
absvsi2
adddf3
adddf3vfp
addsf3
addsf3vfp
addvdi3
addvsi3
ashldi3
ashrdi3
bswapdi2
bswapsi2
clzdi2
clzsi2
cmpdi2
ctzdi2
ctzsi2
divdc3
divdf3
divdf3vfp
divdi3
divmodsi4
divsc3
divsf3
divsf3vfp
divsi3
eqdf2
eqdf2vfp
eqsf2
eqsf2vfp
extendsfdf2
extendsfdf2vfp
ffsdi2
fixdfdi
fixdfsi
fixdfsivfp
fixsfdi
fixsfsi
fixsfsivfp
fixunsdfdi
fixunsdfsi
fixunsdfsivfp
fixunssfdi
fixunssfsi
fixunssfsivfp
floatdidf
floatdisf
floatsidf
floatsidfvfp
floatsisf
floatsisfvfp
floatundidf
floatundisf
floatunsidf
floatunsisf
floatunssidfvfp
floatunssisfvfp
gcc_personality_sj0
gedf2
gedf2vfp
gesf2
gesf2vfp
gtdf2
gtdf2vfp
gtsf2
gtsf2vfp
ledf2
ledf2vfp
lesf2
lesf2vfp
lshrdi3
ltdf2
ltdf2vfp
ltsf2
ltsf2vfp
moddi3
modsi3
muldc3
muldf3
muldf3vfp
muldi3
mulodi4
mulosi4
mulsc3
mulsf3
mulsf3vfp
mulvdi3
mulvsi3
nedf2
nedf2vfp
negdi2
negvdi2
negvsi2
nesf2
nesf2vfp
paritydi2
paritysi2
popcountdi2
popcountsi2
powidf2
powisf2
subdf3
subdf3vfp
subsf3
subsf3vfp
subvdi3
subvsi3
truncdfsf2
truncdfsf2vfp
ucmpdi2
udivdi3
udivmoddi4
udivmodsi4
udivsi3
umoddi3
umodsi3
unorddf2
unorddf2vfp
unordsf2
unordsf2vfp
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios6-armv7s.txt version [a8ba56e3c2].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
absvdi2
absvsi2
adddf3
adddf3vfp
addsf3
addsf3vfp
addvdi3
addvsi3
ashldi3
ashrdi3
bswapdi2
bswapsi2
clzdi2
clzsi2
cmpdi2
ctzdi2
ctzsi2
divdc3
divdf3
divdf3vfp
divdi3
divmodsi4
divsc3
divsf3
divsf3vfp
divsi3
eqdf2
eqdf2vfp
eqsf2
eqsf2vfp
extendsfdf2
extendsfdf2vfp
ffsdi2
fixdfdi
fixdfsi
fixdfsivfp
fixsfdi
fixsfsi
fixsfsivfp
fixunsdfdi
fixunsdfsi
fixunsdfsivfp
fixunssfdi
fixunssfsi
fixunssfsivfp
floatdidf
floatdisf
floatsidf
floatsidfvfp
floatsisf
floatsisfvfp
floatundidf
floatundisf
floatunsidf
floatunsisf
floatunssidfvfp
floatunssisfvfp
gcc_personality_sj0
gedf2
gedf2vfp
gesf2
gesf2vfp
gtdf2
gtdf2vfp
gtsf2
gtsf2vfp
ledf2
ledf2vfp
lesf2
lesf2vfp
lshrdi3
ltdf2
ltdf2vfp
ltsf2
ltsf2vfp
moddi3
modsi3
muldc3
muldf3
muldf3vfp
muldi3
mulodi4
mulosi4
mulsc3
mulsf3
mulsf3vfp
mulvdi3
mulvsi3
nedf2
nedf2vfp
negdi2
negvdi2
negvsi2
nesf2
nesf2vfp
paritydi2
paritysi2
popcountdi2
popcountsi2
powidf2
powisf2
subdf3
subdf3vfp
subsf3
subsf3vfp
subvdi3
subvsi3
truncdfsf2
truncdfsf2vfp
ucmpdi2
udivdi3
udivmoddi4
udivmodsi4
udivsi3
umoddi3
umodsi3
unorddf2
unorddf2vfp
unordsf2
unordsf2vfp
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
















































































































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/ios7-arm64.txt version [2fffd0bfbc].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
clzti2
divti3
fixdfti
fixsfti
fixunsdfti
floattidf
floattisf
floatuntidf
floatuntisf
gcc_personality_v0
modti3
powidf2
powisf2
udivmodti4
udivti3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/iossim-i386.txt version [10da0fd10f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
absvti2
addtf3
addvti3
ashlti3
ashrti3
clzti2
cmpti2
ctzti2
divti3
divtf3
ffsti2
fixdfti
fixsfti
fixunsdfti
fixunssfti
fixunsxfti
fixxfti
floattidf
floattisf
floattixf
floatuntidf
floatuntisf
floatuntixf
lshrti3
modti3
muloti4
multi3
multf3
mulvti3
negti2
negvti2
parityti2
popcountti2
powitf2
subvti3
subtf3
trampoline_setup
ucmpti2
udivmodti4
udivti3
umodti3
absvti2
addtf3
addvti3
ashlti3
ashrti3
clzti2
cmpti2
ctzti2
divti3
divtf3
ffsti2
fixdfti
fixsfti
fixunsdfti
fixunssfti
fixunsxfti
fixxfti
floattidf
floattisf
floattixf
floatuntidf
floatuntisf
floatuntixf
lshrti3
modti3
muloti4
multi3
multf3
mulvti3
negti2
negvti2
parityti2
popcountti2
powitf2
subvti3
subtf3
trampoline_setup
ucmpti2
udivmodti4
udivti3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<




































































































































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/iossim-x86_64.txt version [36dfe4345f].

1
2
3
4
5
6
7
8
9
10
11
12
addtf3
divtf3
multf3
powitf2
subtf3
trampoline_setup
addtf3
divtf3
multf3
powitf2
subtf3
trampoline_setup
<
<
<
<
<
<
<
<
<
<
<
<
























Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/iossim.txt version [3e57aa9aa8].

1
apple_versioning
<


Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/osx-i386.txt version [b0ed917a95].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
absvti2
addvti3
ashlti3
ashrti3
clzti2
cmpti2
ctzti2
divti3
ffsti2
fixdfti
fixsfti
fixunsdfti
fixunssfti
fixunsxfti
fixxfti
floattidf
floattisf
floattixf
floatuntidf
floatuntisf
floatuntixf
lshrti3
modti3
muloti4
multi3
mulvti3
negti2
negvti2
parityti2
popcountti2
subvti3
ucmpti2
udivmodti4
udivti3
umodti3
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<






































































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/Darwin-excludes/osx.txt version [ec03c35acb].

1
2
3
4
5
6
7
apple_versioning
addtf3
divtf3
multf3
powitf2
subtf3
trampoline_setup
<
<
<
<
<
<
<














Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/adddf3.c version [a906f3239f].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
//===-- lib/adddf3.c - Double-precision addition ------------------*- C -*-===//
//
//                     The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements double-precision soft-float addition with the IEEE-754
// default rounding (to nearest, ties to even).
//
//===----------------------------------------------------------------------===//

#define DOUBLE_PRECISION
#include "fp_add_impl.inc"

ARM_EABI_FNALIAS(dadd, adddf3)

COMPILER_RT_ABI double __adddf3(double a, double b){
    return __addXf3__(a, b);
}
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<












































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/arm/Makefile.mk version [5724fefc21].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
#===- lib/builtins/arm/Makefile.mk -------------------------*- Makefile -*--===#
#
#                     The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#

ModuleName := builtins
SubDirs := 
OnlyArchs := armv5 armv6 armv7 armv7k armv7m armv7em armv7s

AsmSources := $(foreach file,$(wildcard $(Dir)/*.S),$(notdir $(file)))
Sources := $(foreach file,$(wildcard $(Dir)/*.c),$(notdir $(file)))
ObjNames := $(Sources:%.c=%.o) $(AsmSources:%.S=%.o)
Implementation := Optimized

# FIXME: use automatic dependencies?
Dependencies := $(wildcard lib/*.h $(Dir)/*.h)
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<








































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/arm/adddf3vfp.S version [09920e03a7].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
//===-- adddf3vfp.S - Implement adddf3vfp ---------------------------------===//
//
//                     The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//

#include "../assembly.h"

//
// double __adddf3vfp(double a, double b) { return a + b; }
//
// Adds two double precision floating point numbers using the Darwin
// calling convention where double arguments are passsed in GPR pairs
//
	.syntax unified
	.p2align 2
DEFINE_COMPILERRT_FUNCTION(__adddf3vfp)
	vmov	d6, r0, r1		// move first param from r0/r1 pair into d6
	vmov	d7, r2, r3		// move second param from r2/r3 pair into d7
	vadd.f64 d6, d6, d7		
	vmov	r0, r1, d6		// move result back to r0/r1 pair
	bx	lr
END_COMPILERRT_FUNCTION(__adddf3vfp)

NO_EXEC_STACK_DIRECTIVE

<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/arm/addsf3vfp.S version [4922993065].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
//===-- addsf3vfp.S - Implement addsf3vfp ---------------------------------===//
//
//                     The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//

#include "../assembly.h"

//
// extern float __addsf3vfp(float a, float b);
//
// Adds two single precision floating point numbers using the Darwin
// calling convention where single arguments are passsed in GPRs
//
	.syntax unified
	.p2align 2
DEFINE_COMPILERRT_FUNCTION(__addsf3vfp)
	vmov	s14, r0		// move first param from r0 into float register
	vmov	s15, r1		// move second param from r1 into float register
	vadd.f32 s14, s14, s15
	vmov	r0, s14		// move result back to r0
	bx	lr
END_COMPILERRT_FUNCTION(__addsf3vfp)

NO_EXEC_STACK_DIRECTIVE

<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<


























































Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/compiler-rt/lib/builtins/arm/aeabi_cdcmp.S version [3a2ba8c0dc].

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
//===-- aeabi_cdcmp.S - EABI cdcmp* implementation ------------------------===//
//
//                     The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//

#include "../assembly.h"

#if __BYTE_ORDER__ != __ORDER_LITTLE_ENDIAN__
#error big endian support not implemented
#endif

#define APSR_Z (1 << 30)
#define APSR_C (1 << 29)

// void __aeabi_cdcmpeq(double a, double b) {
//   if (isnan(a) || isnan(b)) {
//     Z = 0; C = 1;
//   } else {
//     __aeabi_cdcmple(a, b);
//   }
// }

        .syntax unified
        .p2align 2
DEFINE_COMPILERRT_FUNCTION(__aeabi_cdcmpeq)
        push {r0-r3, lr}
        bl __aeabi_cdcmpeq_check_nan
        cmp r0, #1
        pop {r0-r3, lr}

        // NaN has been ruled out, so __aeabi_cdcmple can't trap
        bne __aeabi_cdcmple

        msr CPSR_f, #APSR_C
        JMP(lr)
END_COMPILERRT_FUNCTION(__aeabi_cdcmpeq)


// void __aeabi_cdcmple(double a, double b) {
//   if (__aeabi_dcmplt(a, b)) {
//     Z = 0; C = 0;
//   } else if (__aeabi_dcmpeq(a, b)) {
//     Z = 1; C = 1;
//   } else {
//     Z = 0; C = 1;
//   }
// }

        .syntax unified
        .p2align 2
DEFINE_COMPILERRT_FUNCTION(__aeabi_cdcmple)
        // Per the RTABI, this function must preserve r0-r11.
        // Save lr in the same instruction for compactness
        push {r0-r3, lr}

        bl __aeabi_dcmplt
        cmp r0, #1
        moveq ip, #0
        beq 1f

        ldm sp, {r0-r3}
        bl __aeabi_dcmpeq
        cmp r0, #1
        moveq ip, #(APSR_C | APSR_Z)
        movne ip, #(APSR_C)

1:
        msr CPSR_f, ip
        pop {r0-r3}
        POP_PC()
END_COMPILERRT_FUNCTION(__aeabi_cdcmple)

// int __aeabi_cdrcmple(double a, double b) {
//   return __aeabi_cdcmple(b, a);
// }

        .syntax unified
        .p2align 2
DEFINE_COMPILERRT_FUNCTION(__aeabi_cdrcmple)
        // Swap r0 and r2
        mov ip, r0
        mov r0, r2
        mov r2, ip

        // Swap r1 and r3
        mov ip, r1
        mov r1, r3