Check-in [58fe99e749]
Not logged in
Overview
Comment:wiki references
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA1: 58fe99e749cc4e596ab81f7915b3b512a2d2ca17
User & Date: martin_vahi on 2017-05-16 00:49:42
Other Links: manifest | tags
Context
2017-05-19 18:47
wiki reference upgrade check-in: dee8e3e8ea user: vhost7825ssh tags: trunk
2017-05-16 00:49
wiki references check-in: 58fe99e749 user: martin_vahi tags: trunk
2017-03-22 04:01
Massive_Array_of_Internet_Disks Safe MaidSafe + Rust check-in: 0105d215ce user: martin_vahi tags: trunk
Changes
Hide Diffs Side-by-Side Diffs Ignore Whitespace Patch

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/after_success.sh version [01f3445e7c].

     1         -#!/bin/bash
     2         -
     3         -# Show expanded commands while running
     4         -set -x
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -CHANNEL=${TRAVIS_RUST_VERSION:-${CHANNEL:-stable}}
    11         -
    12         -# We only want to deploy the docs and run coverage if it's a pull request to 'master' and only on
    13         -# the first job number in the Travis matrix.  This should be a Linux run.
    14         -if [[ ! $CHANNEL = stable ]] || [[ ! $TRAVIS_OS_NAME = linux ]] || [[ ${TRAVIS_JOB_NUMBER##*.} -ne 1 ]] ||
    15         -   [[ ! $TRAVIS_BRANCH = master ]] || [[ ! $TRAVIS_PULL_REQUEST = false ]]; then
    16         -  exit 0
    17         -fi
    18         -
    19         -curl -sSL https://github.com/maidsafe/QA/raw/master/Bash%20Scripts/Travis/deploy_docs.sh | bash
    20         -# curl -sSL https://github.com/maidsafe/QA/raw/master/Bash%20Scripts/Travis/run_coverage.sh | bash

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/build_and_run_tests.sh version [c9eb426954].

     1         -#!/bin/bash
     2         -
     3         -# Show expanded commands while running
     4         -set -x
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -cd $TRAVIS_BUILD_DIR
    11         -
    12         -RUST_BACKTRACE=1
    13         -export RUST_BACKTRACE
    14         -
    15         -if [[ $TRAVIS_RUST_VERSION = nightly ]]; then
    16         -  # To ignore this failure, set `allow_failures` in build matrix for nightly builds
    17         -  cargo test --no-run --features clippy
    18         -  for Feature in $Features; do
    19         -    cargo test --no-run --features "clippy $Feature"
    20         -  done
    21         -else
    22         -  # Run the tests for each feature
    23         -  for Feature in $Features; do
    24         -    cargo build --release --verbose --features $Feature
    25         -    cargo test --release --features $Feature
    26         -  done
    27         -  if [ -z "$Features" ]; then
    28         -    # There are no features, so run the default test suite
    29         -    cargo build --release --verbose
    30         -    cargo test --release
    31         -  elif [[ $TRAVIS_OS_NAME = linux ]]; then
    32         -    # We currently don't run the default tests if there are any features
    33         -    cargo test --release --verbose --no-run
    34         -  fi
    35         -fi

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/deploy_docs.sh version [3030a1946c].

     1         -#!/bin/bash
     2         -
     3         -# Show executed commands (not expanded) while running
     4         -set -v
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -ProjectName=${TRAVIS_REPO_SLUG##*/};
    11         -
    12         -cd $TRAVIS_BUILD_DIR
    13         -cargo doc --features generate-diagrams || cargo doc
    14         -echo "<meta http-equiv=refresh content=0;url=${ProjectName}/index.html>" > target/doc/index.html
    15         -rm -rf /tmp/doc
    16         -mv target/doc /tmp/doc
    17         -
    18         -git config --global user.email qa@maidsafe.net
    19         -git config --global user.name MaidSafe-QA
    20         -
    21         -CommitMessage=$(git log -1 | tr '[:upper:]' '[:lower:]' | grep "version change to " | tr -d ' ')
    22         -if [[ $CommitMessage == versionchangeto* ]]; then
    23         -  Version=${CommitMessage##*to}
    24         -  Commit=$(git rev-parse HEAD)
    25         -  git tag $Version -am "Version $Version" $Commit
    26         -  # Pipe output to null if the following command fails to thereby not print expanded variables
    27         -  git push https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG} tag $Version > /dev/null 2>&1
    28         -fi
    29         -
    30         -# Since we did a shallow clone which only clones the master branch, ensure we can fetch the gh-pages
    31         -# branch if it exists
    32         -git remote set-branches origin '*'
    33         -git fetch
    34         -
    35         -# Checkout to the gh-pages branch if it already exists, otherwise clear out the repo and prepare
    36         -# for the first push to gh-pages.
    37         -if git rev-parse --verify origin/gh-pages > /dev/null 2>&1; then
    38         -  git checkout gh-pages
    39         -  git clean -df
    40         -else
    41         -  rm -rf ./*
    42         -  rm ./.**&
    43         -  git checkout --orphan gh-pages
    44         -  git rm -rf .
    45         -  echo "<meta http-equiv=refresh content=0;url=master/${ProjectName}/index.html>" > index.html
    46         -  touch .nojekyll
    47         -fi
    48         -
    49         -rm -rf master
    50         -cp -rf /tmp/doc master
    51         -
    52         -# Temporary patch to remove any named version docs
    53         -rm -rf 0* > /dev/null 2>&1
    54         -
    55         -if [[ -n ${Version+x} ]]; then
    56         -  rm -rf latest
    57         -  cp -rf /tmp/doc latest
    58         -fi
    59         -
    60         -git add --all . > /dev/null 2>&1
    61         -if git commit -m"Updated documentation." > /dev/null 2>&1; then
    62         -  # Pipe output to null if the following command fails to thereby not print expanded variables
    63         -  git push https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages > /dev/null 2>&1
    64         -fi
    65         -
    66         -git checkout master

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/install_elfutils.sh version [50259b8c2b].

     1         -#!/bin/bash
     2         -
     3         -# Show expanded commands while running
     4         -set -x
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -# We only need elfutils to run coverage, and this only happens if it's a pull request to 'master'
    11         -# and only on the first job number in the Travis matrix.  This should be a Linux run.
    12         -if [[ $TRAVIS_RUST_VERSION = stable ]] && [[ $TRAVIS_OS_NAME = linux ]] && [[ ${TRAVIS_JOB_NUMBER##*.} -eq 1 ]] &&
    13         -   [[ $TRAVIS_BRANCH = master ]] && [[ $TRAVIS_PULL_REQUEST = false ]]; then
    14         -
    15         -  # Set the elfutils version if it isn't already set
    16         -  if [ -z "$ElfUtilsVersion" ]; then
    17         -    ElfUtilsVersion=0.164
    18         -  fi
    19         -
    20         -  # Check to see if elfutils dir has been retrieved from cache
    21         -  ElfUtilsInstallPath=$HOME/elfutils/$ElfUtilsVersion
    22         -  Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
    23         -  if [ ! -d "$ElfUtilsInstallPath/lib" ]; then
    24         -    # If not, build and install it
    25         -    cd $HOME
    26         -    rm -rf elfutils
    27         -    mkdir -p temp
    28         -    cd temp
    29         -    wget https://fedorahosted.org/releases/e/l/elfutils/$ElfUtilsVersion/elfutils-$ElfUtilsVersion.tar.bz2
    30         -    tar jxf elfutils-$ElfUtilsVersion.tar.bz2
    31         -    cd elfutils-$ElfUtilsVersion
    32         -    ./configure --prefix=$ElfUtilsInstallPath
    33         -    make check -j$Cores
    34         -    make install
    35         -  else
    36         -    echo "Using cached elfutils directory (version $ElfUtilsVersion)"
    37         -  fi
    38         -
    39         -  export LD_LIBRARY_PATH=$ElfUtilsInstallPath/lib:$LD_LIBRARY_PATH
    40         -  export ElfUtilsInstallPath=$ElfUtilsInstallPath
    41         -
    42         -fi
    43         -
    44         -trap '' ERR

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/install_libsodium.sh version [5630345f22].

     1         -#!/bin/bash
     2         -
     3         -# Set the libsodium version if it isn't already set
     4         -if [ -z "$LibSodiumVersion" ]; then
     5         -  LibSodiumVersion=1.0.9
     6         -fi
     7         -
     8         -# Check to see if libsodium dir has been retrieved from cache
     9         -LibSodiumInstallPath=$HOME/libsodium/$LibSodiumVersion
    10         -if [ ! -d "$LibSodiumInstallPath/lib" ]; then
    11         -  # If not, build and install it
    12         -  cd $HOME
    13         -  rm -rf libsodium
    14         -  mkdir -p temp
    15         -  cd temp
    16         -  wget https://github.com/jedisct1/libsodium/releases/download/$LibSodiumVersion/libsodium-$LibSodiumVersion.tar.gz
    17         -  tar xfz libsodium-$LibSodiumVersion.tar.gz
    18         -  cd libsodium-$LibSodiumVersion
    19         -  ./configure --prefix=$LibSodiumInstallPath --enable-shared=no --disable-pie
    20         -  Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
    21         -  make check -j$Cores
    22         -  make install
    23         -else
    24         -  echo "Using cached libsodium directory (version $LibSodiumVersion)";
    25         -fi
    26         -
    27         -export PKG_CONFIG_PATH=$LibSodiumInstallPath/lib/pkgconfig:$PKG_CONFIG_PATH

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_cargo_prune.sh version [79a4fe6ff1].

     1         -#!/bin/bash
     2         -
     3         -cur_ver=`cargo install --list | grep 'prune v' | sed -E 's/.*([0-9]+\.[0-9]+\.[0-9]+).*/\1/g'`
     4         -
     5         -if [ $? -ne 0 ]; then
     6         -  # cargo-prune is not installed yet
     7         -  cargo install cargo-prune
     8         -fi
     9         -
    10         -latest_ver=`curl -s -H "Accept: application/json" -H "Content-Type: application/json" -X GET https://crates.io/api/v1/crates/cargo-prune | sed -E 's/.*"max_version":"([^"]*).*/\1/g'`
    11         -
    12         -if [ "$cur_ver" != "$latest_ver" ]; then
    13         -  # Update to latest cargo-prune
    14         -  cargo install cargo-prune --force
    15         -fi
    16         -
    17         -cargo prune

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_clippy.sh version [8b6e8486e3].

     1         -#!/bin/bash
     2         -
     3         -# Show expanded commands while running
     4         -set -x
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -cd $TRAVIS_BUILD_DIR
    11         -
    12         -if [[ $TRAVIS_RUST_VERSION = nightly ]]; then
    13         -  # Only run clippy on Linux
    14         -  if [[ ! $TRAVIS_OS_NAME = linux ]]; then
    15         -    exit 0
    16         -  fi
    17         -  # To ignore this failure, set `allow_failures` in build matrix for nightly builds
    18         -  cargo rustc --features clippy -- --test -Zno-trans
    19         -  for Feature in $Features; do
    20         -    cargo rustc --features "clippy $Feature" -- --test -Zno-trans
    21         -  done
    22         -fi
    23         -
    24         -# Hide expanded commands while running
    25         -set +x

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_coverage.sh version [2b7030d337].

     1         -#!/bin/bash
     2         -
     3         -# Show expanded commands while running
     4         -set -x
     5         -
     6         -# Stop the script if any command fails
     7         -set -o errtrace
     8         -trap 'exit' ERR
     9         -
    10         -# Build and install kcov (which is fast and not versioned, so there's little point in caching it)
    11         -cd $HOME
    12         -mkdir -p temp
    13         -cd temp
    14         -wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz
    15         -tar xzf master.tar.gz
    16         -cmake -Bbuild_kcov -Hkcov-master -DCMAKE_INSTALL_PREFIX=$HOME/ -DCMAKE_BUILD_TYPE=Release -DCMAKE_INCLUDE_PATH="$ElfUtilsInstallPath/include" -DCMAKE_LIBRARY_PATH="$ElfUtilsInstallPath/lib"
    17         -cd build_kcov
    18         -make -j$Cores
    19         -make install
    20         -
    21         -# Build the project's tests and run them under kcov
    22         -if [ ! -z "$Features" ]; then
    23         -  WithFeatures=" --features $Features"
    24         -fi
    25         -cd $TRAVIS_BUILD_DIR
    26         -cargo test --no-run $WithFeatures
    27         -ProjectName=${TRAVIS_REPO_SLUG##*/};
    28         -$HOME/bin/kcov --coveralls-id=$TRAVIS_JOB_ID --include-path=src target/kcov target/debug/$ProjectName-*

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/Travis/run_rustfmt.sh version [24107c903b].

     1         -#!/bin/bash
     2         -
     3         -cur_ver=`cargo install --list | grep 'rustfmt v' | sed -E 's/.*([0-9]+\.[0-9]+\.[0-9]+).*/\1/g'`
     4         -
     5         -if [ $? -ne 0 ]; then
     6         -  # rustfmt is not installed yet
     7         -  cargo install rustfmt
     8         -fi
     9         -
    10         -latest_ver=`curl -s -H "Accept: application/json" -H "Content-Type: application/json" -X GET https://crates.io/api/v1/crates/rustfmt | sed -E 's/.*"max_version":"([^"]*).*/\1/g'`
    11         -
    12         -if [ "$cur_ver" != "$latest_ver" ]; then
    13         -  # Update to latest rustfmt
    14         -  cargo install rustfmt --force
    15         -fi
    16         -
    17         -res=0
    18         -for i in `find . -name '*.rs'`; do
    19         -  $HOME/.cargo/bin/rustfmt --skip-children --write-mode=diff $i
    20         -  if [ $? -ne 0 ]; then
    21         -    res=1
    22         -  fi
    23         -done
    24         -
    25         -exit $res

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/add_public_keys.sh version [d95cef1438].

     1         -#!/bin/bash
     2         -
     3         -if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
     4         -  echo "
     5         -This adds all public keys inside the Public Keys folder of this repository to
     6         -the ~/.ssh/authorized_keys file of the remote target.
     7         -
     8         -You should pass a single arg to this script which will be the target user and
     9         -hostname for the ssh commands.  You must already be able to ssh to the target
    10         -without the need for a password.
    11         -
    12         -Example usage:
    13         -    ./${0##*/} peer1@peer_prog.maidsafe.net
    14         -"
    15         -  exit 0;
    16         -fi
    17         -
    18         -IFS=$(echo -en "\n\b")
    19         -PublicKeysDir=$(cd "$(dirname "${BASH_SOURCE[0]}")/../Public Keys" && pwd)
    20         -
    21         -for File in $PublicKeysDir/*
    22         -do
    23         -  echo "Processing \"$File\"..."
    24         -  echo "$(echo -en "\n")$(cat $File)" | ssh "$1" 'cat >> .ssh/authorized_keys'
    25         -done

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/create_linux_vault_package.sh version [e01e657554].

     1         -#!/bin/bash
     2         -
     3         -# Stop the script if any command fails
     4         -set -o errtrace
     5         -trap 'exit' ERR
     6         -
     7         -function help {
     8         -  echo "
     9         -This invokes the 'create_package.sh' script in the SAFE Vault project which
    10         -builds the linux packages.  It then copies them to the apt and yum servers.
    11         -
    12         -You should either invoke this script from the root of the safe_vault repo, or
    13         -pass a single arg to this script which is the absolute path to the safe_vault
    14         -repo.
    15         -
    16         -Ideally, you should be able to ssh to the apt and yum servers without the need
    17         -for a password.
    18         -
    19         -Example usage:
    20         -    ./${0##*/} \"/home/maidsafe/safe_vault\"
    21         -"
    22         -  exit 0;
    23         -}
    24         -
    25         -# Handle help arg
    26         -if [[ "$1" == "-h" || "$1" == "--help" ]]; then
    27         -  help
    28         -fi
    29         -
    30         -# Set path to script in safe_vault repo
    31         -if [[ "$#" -eq 1 ]]; then
    32         -  VaultRoot="$1"
    33         -else
    34         -  VaultRoot=$(pwd)
    35         -fi
    36         -CreatePackageScript="$VaultRoot/installer/linux/create_packages.sh"
    37         -
    38         -# Check the repo path contains the expected script
    39         -if [[ "$#" -gt 1 || ! -x "$CreatePackageScript" ]]; then
    40         -  help
    41         -fi
    42         -
    43         -# Invoke the script and scp the resulting packages
    44         -CurrentPath=$(pwd)
    45         -cd $VaultRoot
    46         -"$CreatePackageScript"
    47         -cd $CurrentPath
    48         -ssh apt.maidsafe.net 'mkdir -p ~/systemd/ && mkdir -p ~/SysV-style/'
    49         -ssh yum.maidsafe.net 'mkdir -p ~/systemd/ && mkdir -p ~/SysV-style/'
    50         -scp "$VaultRoot"/packages/linux/safe_vault_*.tar.gz apt.maidsafe.net:~/ &
    51         -scp "$VaultRoot"/packages/linux/systemd/safe*.deb apt.maidsafe.net:~/systemd/ &
    52         -scp "$VaultRoot"/packages/linux/SysV-style/safe*.deb apt.maidsafe.net:~/SysV-style/ &
    53         -scp "$VaultRoot"/packages/linux/safe_vault_latest_version.txt apt.maidsafe.net:~/ &
    54         -scp "$VaultRoot"/packages/linux/systemd/safe*.rpm yum.maidsafe.net:~/systemd/ &
    55         -scp "$VaultRoot"/packages/linux/SysV-style/safe*.rpm yum.maidsafe.net:~/SysV-style/ &
    56         -scp "$VaultRoot"/packages/linux/safe_vault_latest_version.txt yum.maidsafe.net:~/ &
    57         -
    58         -wait

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/execute_command_on_all_seeds.sh version [8f8520f04e].

     1         -#!/bin/bash
     2         -
     3         -if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
     4         -  echo "
     5         -This executes a single command on each of the seed VMs as the \"qa\" user.
     6         -You should pass a single arg to this script which will be the command
     7         -to execute.  It can't require user-input on the remote machine.
     8         -
     9         -Example usage:
    10         -    ./${0##*/} \"ls -laG\"
    11         -"
    12         -  exit 0;
    13         -fi
    14         -
    15         -# Show commands as they execute
    16         -set -x
    17         -
    18         -for peer in 1 2 3 4 5 6; do
    19         -  ssh qa@seed-$peer.maidsafe.net "$1" &
    20         -done
    21         -
    22         -wait

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/execute_command_on_group.sh version [37deb60b10].

     1         -#!/bin/bash
     2         -
     3         -IpList=ip_list
     4         -
     5         -if [[ $# != 1 || "$1" == "-h" || "$1" == "--help" ]]; then
     6         -  echo "
     7         -This executes a single command on each of the entered IPs.  The remote IPs
     8         -should be a space-separated list in a file called \"$IpList\" in the current
     9         -working directory (i.e. where you run this script from - not necessarily the
    10         -folder containing this script).
    11         -
    12         -The command will be executed as the \"qa\" user on the remote machine.
    13         -
    14         -You should pass a single arg to this script which will be the command
    15         -to execute.  It can't require user-input on the remote machine.
    16         -
    17         -Example usage:
    18         -    ./${0##*/} \"ls -laG\"
    19         -"
    20         -  exit 0;
    21         -fi
    22         -
    23         -if [ ! -f $IpList ]; then
    24         -    echo "
    25         -This script requires a space-separated list of IPs to exist in a file called
    26         -\"$IpList\" in the current working directory.
    27         -
    28         -For further info, run this script with '--help'
    29         -"
    30         -    exit 1
    31         -fi
    32         -
    33         -IPs=`cat $IpList`
    34         -for peer in $IPs; do
    35         -  # Show commands as they execute
    36         -  set -x
    37         -  ssh qa@$peer "$1"
    38         -  set +x
    39         -done

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_droplet_logfiles.sh version [6191bb5922].

     1         -#!/bin/bash
     2         -
     3         -IpList=ip_list
     4         -
     5         -if [[ "$1" == "-h" || "$1" == "--help" ]]; then
     6         -  echo "
     7         -This tries to scp /home/qa/Node.log from each of the entered IPs.  The remote
     8         -IPs should be a space-separated list in a file called \"$IpList\" in the current
     9         -working directory (i.e. where you run this script from - not necessarily the
    10         -folder containing this script).
    11         -
    12         -The logfiles will each be renamed to include the nodes' index numbers, e.g.
    13         -Node 1's logfile will be renamed from Node.log to Node001.log.  The files will
    14         -be copied to the current working directory.
    15         -
    16         -"
    17         -  exit 0;
    18         -fi
    19         -
    20         -if [ ! -f $IpList ]; then
    21         -    echo "
    22         -This script requires a space-separated list of IPs to exist in a file called
    23         -\"$IpList\" in the current working directory.
    24         -
    25         -For further info, run this script with '--help'
    26         -"
    27         -    exit 1
    28         -fi
    29         -
    30         -IPs=`cat $IpList`
    31         -Count=1
    32         -for Peer in $IPs; do
    33         -  printf -v Command "scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no qa@$Peer:~/Node.log Node%03d.log" $Count
    34         -  # Show commands as they execute
    35         -  set -x
    36         -  $Command
    37         -  set +x
    38         -  ((++Count))
    39         -done

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_on_all_seeds.sh version [8cff2ba158].

     1         -#!/bin/bash
     2         -
     3         -if [[ "$1" == "-h" || "$1" == "--help" ]]; then
     4         -  echo "
     5         -This runs an scp comamnd across each of the seed VMs.
     6         -
     7         -Replace \"scp\" with this script and use the term \"REMOTE\" to represent
     8         -the \"qa\" user on the remote endpoint.
     9         -
    10         -Example usage:
    11         -Copy the file \"foobar.txt\" from seed VM to local folder
    12         -    ./${0##*/} REMOTE:foobar.txt /some/local/dir
    13         -
    14         -Copy the dir \"foo\" from local to seeds' \"bar\" dir
    15         -    ./${0##*/} -r foo REMOTE:/some/remote/dir/bar
    16         -"
    17         -  exit 0;
    18         -fi
    19         -
    20         -regex='(.*)REMOTE:(.*)'
    21         -# Show commands as they execute
    22         -set -x
    23         -for peer in 1 2 3 4 5 6; do
    24         -  command="scp"
    25         -  for var in "$@"; do
    26         -    while [[ $var =~ $regex ]]; do
    27         -      var="${BASH_REMATCH[1]}qa@seed-$peer.maidsafe.net:${BASH_REMATCH[2]}"
    28         -    done
    29         -    command="$command $var"
    30         -  done
    31         -  $command &
    32         -done
    33         -
    34         -wait

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/scp_on_group.sh version [b7fd70b992].

     1         -#!/bin/bash
     2         -
     3         -IpList=ip_list
     4         -
     5         -if [[ "$1" == "-h" || "$1" == "--help" ]]; then
     6         -  echo "
     7         -This runs an scp command across each of the entered IPs.  The remote IPs should
     8         -be a space-separated list in a file called \"$IpList\" in the current working
     9         -directory (i.e. where you run this script from - not necessarily the folder
    10         -containing this script).
    11         -
    12         -Replace \"scp\" with this script and use the term \"REMOTE\" to represent the \"qa\"
    13         -user on the remote endpoint.
    14         -
    15         -To avoid having to confirm each IP's identity, you can pass the args:
    16         -    -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no
    17         -
    18         -Example usage:
    19         -Copy the dir \"foo\" from local to remote IP's \"bar\" dir
    20         -    ./${0##*/} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -r foo REMOTE:/some/remote/dir/bar
    21         -
    22         -Copy the file \"log*.txt\" from remote IP to local folder
    23         -    ./${0##*/} REMOTE:log*.txt /some/local/dir
    24         -
    25         -
    26         -"
    27         -  exit 0;
    28         -fi
    29         -
    30         -if [ ! -f $IpList ]; then
    31         -    echo "
    32         -This script requires a space-separated list of IPs to exist in a file called
    33         -\"$IpList\" in the current working directory.
    34         -
    35         -For further info, run this script with '--help'
    36         -"
    37         -    exit 1
    38         -fi
    39         -
    40         -IPs=`cat $IpList`
    41         -regex='(.*)REMOTE:(.*)'
    42         -for peer in $IPs; do
    43         -  command="scp"
    44         -  for var in "$@"; do
    45         -    while [[ $var =~ $regex ]]; do
    46         -      var="${BASH_REMATCH[1]}qa@$peer:${BASH_REMATCH[2]}"
    47         -    done
    48         -    command="$command $var"
    49         -  done
    50         -  # Show commands as they execute
    51         -  set -x
    52         -  $command
    53         -  set +x
    54         -done

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Bash Scripts/soak_test.sh version [e00d92e3d6].

     1         -#!/bin/bash
     2         -
     3         -((count=0))
     4         -((failurec=0))
     5         -((perc=0))
     6         -while(true)
     7         -do
     8         -  ((count=count+1))
     9         -  ./<INSERT TEST HERE>
    10         -  if [ $? -ne 0 ]
    11         -  then
    12         -    ((failurec=failurec+1))
    13         -  fi
    14         -  ((perc = 100*failurec/count))
    15         -  echo "Failed $failurec times out of $count ($perc perc)" 
    16         -  #sleep 30
    17         -done
    18         -
    19         -echo "======================================"
    20         -echo "Failed $failurec times out of $count ($perc perc)"

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Add New Repository.md version [84bb4c199f].

     1         -## Add a New GitHub Repository - QA Steps
     2         -
     3         -New GitHub repo created? Then this document should walk you through the QA steps to standardise your repo, alongside all the other MaidSafe GitHub repositories. For steps and tools please use the MaidSafe-QA user unless instructions specify otherwise.
     4         -
     5         -### Fork the New Repository
     6         -
     7         -While logged into GitHub under your own account, fork the new repo and clone it locally.
     8         -
     9         -### Login to GitHub as MaidSafe-QA
    10         -
    11         -Log out of your own account and log back in as the MaidSafe-QA user.
    12         -
    13         -*At this stage you need to request temporary GitHub "Admin" privileges from Fraser, Viv or David.*
    14         -
    15         -### Add Repository to Travis
    16         -
    17         -Login to [Travis](https://travis-ci.org/), sync account, find the new repository you want to add and flick the switch to on.
    18         -
    19         -![Sync Account](Images/01.png)
    20         -
    21         -### Add Repository to AppVeyor
    22         -
    23         -Login to [AppVeyor](https://ci.appveyor.com/login) and select  `+ NEW PROJECT`
    24         -
    25         -![AppVeyor](Images/02.png)
    26         -
    27         -Then select the repository you would like to add
    28         -
    29         -![AppVeyor select repo](Images/03.png)
    30         -
    31         -Add appveyor.yml and .travis.yml scripts to new repository.
    32         -
    33         -From another [MaidSafe GitHub repository](https://github.com/maidsafe), copy and add the `appveyor.yml` and `.travis.yml` files to the root of your newly-forked local clone of the new repository. The `.travis.yml` will require minor tweaking (more of which in the following steps) especially creating and updating the secure token, which is used to upload rust documentation.
    34         -
    35         -### Give Travis Permissions
    36         -
    37         -While still logged into GitHub as the MaidSafe-QA user, go to settings and select "Personal access tokens". Now click `Generate new token` and create a new "Travis Deploy Token - <new repo name>"
    38         -
    39         -![Travis permissions](Images/04.png)
    40         -
    41         -and limit scopes to `public_repo` as shown below
    42         -
    43         -![Limit scopes](Images/05.png)
    44         -
    45         -Once you have clicked on "Generate token", copy the output as you will not see it again.
    46         -
    47         -[Install Travis gem](https://github.com/travis-ci/travis.rb#installation) to encrypt secure GitHub access
    48         -
    49         -Run this, where `<YOUR_TOKEN>` is the one we copied in the previous step.
    50         -
    51         -`travis encrypt -r maidsafe/<new_repo> GH_TOKEN=<YOUR_TOKEN>`
    52         -
    53         -Edit the `.travis.yml` file you added to the new repo and replace the long string in the line `-secure:` with the output you have just generated - example of what this looks like is below (the string has been shortened in this image).
    54         -
    55         -![travis.yml](Images/06.png)
    56         -
    57         -If you are not at this point going to update the repository's `README.md` then you can push all your local changes upstream and issue a PR to add them to the main repository.
    58         -
    59         -### Webhooks - Add Highfive
    60         -
    61         -Go to the project's settings (the `maidsafe` fork - not your fork) *> Settings > Webhooks & services > Add webhook*
    62         -
    63         -The Payload URL is
    64         -
    65         -```
    66         -http://visualiser.maidsafe.net/cgi-bin/highfive/newpr.py
    67         -```
    68         -
    69         -![Webhooks](Images/07.png)
    70         -
    71         -![Manage webhook](Images/08.png)
    72         -
    73         -
    74         -### Highfive Backend Configuration
    75         -
    76         -SSH (details in private assets GitHub repository) to the droplet hosting Highfive
    77         -
    78         -![Droplet](Images/09.png)
    79         -
    80         -Navigate to `/usr/lib/cgi-bin/highfive/configs/`
    81         -
    82         -![ls](Images/10.png)
    83         -
    84         -create a new `<repository_name>.json` file (copy an existing .json file)
    85         -
    86         -![json edit](Images/11.png)
    87         -
    88         -Edit the new `<repository_name>.json` file and update the maintainers' names.
    89         -
    90         -The important section is "groups" - note that entries & file names are case sensitive.
    91         -
    92         -### Add Coverage
    93         -
    94         -Login to [coveralls.io](https://coveralls.io/) using the MaidSafe-QA GitHub account and click `RE-SYNC REPOS`
    95         -
    96         -![coveralls](Images/12.png)
    97         -
    98         -Click `ADD REPOS`
    99         -
   100         -![add repo](Images/13.png)
   101         -
   102         -Flick the switch on your new repository
   103         -
   104         -![flick the switch](Images/14.png)
   105         -
   106         -### Update New Repo's `README.md`
   107         -
   108         -![repo example](Images/15.png)
   109         -
   110         -Above is a screenshot and below is a template, best take the markdown from another repository and edit to fit the purposes of the new repository.
   111         -
   112         -# < repository_name >
   113         -
   114         -[![](https://img.shields.io/badge/Project%20SAFE-Approved-green.svg)](http://maidsafe.net/applications) [![](https://img.shields.io/badge/License-GPL3-green.svg)](https://github.com/maidsafe/crust/blob/master/COPYING)
   115         -
   116         -
   117         -**Primary Maintainer:** < name > (< email_address >)
   118         -
   119         -**Secondary Maintainer:** < name > (< email_address >)
   120         -
   121         -Reliable peer-to-peer network connections in Rust with NAT traversal.
   122         -
   123         -|Crate|Linux/OS X|Windows|Coverage|Issues|
   124         -|:---:|:--------:|:-----:|:------:|:----:|
   125         -|[![](http://meritbadge.herokuapp.com/crust)](https://crates.io/crates/crust)|[![Build Status](https://travis-ci.org/maidsafe/crust.svg?branch=master)](https://travis-ci.org/maidsafe/crust)|[![Build status](https://ci.appveyor.com/api/projects/status/ajw6ab26p86jdac4/branch/master?svg=true)](https://ci.appveyor.com/project/MaidSafe-QA/crust/branch/master)|[![Coverage Status](https://coveralls.io/repos/maidsafe/crust/badge.svg)](https://coveralls.io/r/maidsafe/crust)|[![Stories in Ready](https://badge.waffle.io/maidsafe/crust.png?label=ready&title=Ready)](https://waffle.io/maidsafe/crust)|
   126         -
   127         -|[API Documentation - master branch](http://maidsafe.net/crust/master)|[SAFE Network System Documentation](http://systemdocs.maidsafe.net)|[MaidSafe website](http://maidsafe.net)| [SAFE Network Forum](https://forum.safenetwork.io)|
   128         -|:------:|:-------:|:-------:|:-------:|
   129         -
   130         -
   131         -## Overview
   132         -< insert_overview >
   133         -## Todo Items
   134         -< insert_todo_items >
   135         -
   136         -*In the above example the badges and links are for `crust` just for illustrative purposes*
   137         -
   138         -One niggle worth noting for AppVeyor badges that has caught a few folk out: you need to grab the markdown for master branch badge - this can be found on the AppVeyor site in the new repo page under: *Settings > Badges* and is the 6th or last entry on the page see below.
   139         -This is the one that needs pasted into the project's `README.md` and the QA `README.md`
   140         -
   141         -![AppVeyor badge](Images/16.png)
   142         -
   143         -### Switch On "Build only if .travis.yml / appveyor.yml is present"
   144         -
   145         -Log into Travis and go to repository *> settings > general settings* and switch `ON` *Build only if .travis.yml is present* setting.
   146         -
   147         -![Travis yml present switch](Images/17.png)
   148         -
   149         -Log into Appveyor and go to repository *> settings > general* and tick the *Do not build tags* , *Skip branches without appveyor.yml* and *Rolling builds* check boxes.
   150         -
   151         -![Appveyor yml present switch](Images/19.png)
   152         -
   153         -![Appveyor yml present switch](Images/18.png)
   154         -
   155         -### Add Reviewable
   156         -
   157         -Login to https://reviewable.io using the MaidSafe-QA GitHub account and go to *Repositories* section and toggle to green to enable Reviewable for pull requests.
   158         -
   159         -![Reviewable switch](Images/20.png)
   160         -
   161         -
   162         -### Update QA readme.md
   163         -
   164         -Finally add a new entry to https://github.com/maidsafe/QA/blob/master/README.md and issue a PR for this.
   165         -
   166         -### Revoke Github "Admin" from MaidSafe-QA user
   167         -
   168         -Once everything is complete, we need to revoke elevated privileges and reduce them back to "Write".
   169         -
   170         -*Ensure `Owners` have "Admin" privileges and `Bots` and `Developers` have "Write" privileges.*
   171         -
   172         -### Checklist to see if everything is ok:
   173         -
   174         -* Did Travis run?
   175         -* Did AppVeyor run?
   176         -* Does Highfive allocate a reviewer for a PR?
   177         -* Do all the links and badges go to the correct places?
   178         -* On a successful merge to master did Travis create and publish the documentation?
   179         -* Did Coverage run?

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Install libsodium for OS X or Linux.md version [3c45363b35].

     1         -# Install libsodium for OS X or Linux
     2         -
     3         -[libsodium](https://github.com/jedisct1/libsodium) is a native dependency of [sodiumoxide](https://github.com/dnaq/sodiumoxide).
     4         -
     5         -Download, unpack the most recent tarball of [libsodium](https://download.libsodium.org/libsodium/releases/), build the static variant and install to "/usr/local/":
     6         -
     7         -```bash
     8         -Version=1.0.9
     9         -mkdir temp
    10         -cd temp
    11         -wget https://github.com/jedisct1/libsodium/releases/download/$Version/libsodium-$Version.tar.gz
    12         -tar xfz libsodium-$Version.tar.gz
    13         -cd libsodium-$Version
    14         -./configure --enable-shared=no --disable-pie
    15         -Cores=$((hash nproc 2>/dev/null && nproc) || (hash sysctl 2>/dev/null && sysctl -n hw.ncpu) || echo 1)
    16         -make check -j$Cores
    17         -sudo make install
    18         -```
    19         -
    20         -Set environment variable `SODIUM_LIB_DIR` to the folder containing libsodium.a:
    21         -
    22         -```bash
    23         -export SODIUM_LIB_DIR=/usr/local/lib
    24         -```
    25         -
    26         -You can make this a permanent environment variable by adding this export command to your OS / shell specific .profile config file (e.g. `~/.bashrc`, `~/.bash_profile`).
    27         -
    28         -If you wish to do this system wide on Ubuntu for example you could update `/etc/environment`.

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Install libsodium for Windows.md version [e7e8c05f48].

     1         -# Install libsodium for Windows
     2         -
     3         -[libsodium](https://github.com/jedisct1/libsodium) is a native dependency of [sodiumoxide](https://github.com/dnaq/sodiumoxide).
     4         -
     5         -Download the appropriate version (32-bit or 64-bit) [prebuilt libsodium static library](https://github.com/maidsafe/QA/tree/master/Dependencies/Windows).
     6         -
     7         -N.B. The path of the folder where libsodium.a will live cannot contain any spaces.
     8         -
     9         -Set environment variable `SODIUM_LIB_DIR` to the folder containing libsodium.a:
    10         -
    11         -```batch
    12         -setx SODIUM_LIB_DIR <path-to-libsodium.a-dir>
    13         -```
    14         -
    15         -Start a new command-prompt to continue.

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/Linux Process.md version [579123b592].

     1         -# Create Package for Vault on Linux
     2         -
     3         -- [ ] Run the package creation script ` safe_vault/installer/linux/scripts/create_packages.sh` in the `safe_vault` repository
     4         -- Check RPM (on e.g. a Fedora test machine)
     5         -  - Check installer can upgrade an existing version which is running
     6         -    - [ ] Check test machine has older version already installed and `safe_vault` is running
     7         -    - [ ] Copy the current bootstrap and config files
     8         -    - [ ] New installer should run without errors
     9         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    10         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    11         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    12         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    13         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    14         -  - Check installer can upgrade an existing version which is not running
    15         -    - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
    16         -    - [ ] Copy the current bootstrap and config files
    17         -    - [ ] New installer should run without errors
    18         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    19         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    20         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    21         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    22         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    23         -  - Check installer succeeds on machine with no previous version installed
    24         -    - [ ] Check test machine has no version already installed
    25         -    - [ ] Installer should run without errors
    26         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    27         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    28         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    29         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    30         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    31         -  - Check repair where current version already installed
    32         -    - [ ] Kill and remove existing version of `maidsafe_vault`
    33         -    - [ ] Copy the current bootstrap and config files
    34         -    - [ ] Installer should rerun without errors
    35         -    - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
    36         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    37         -    - [ ] Check bootstrap and config files haven't been overwritten
    38         -    - [ ] Remove bootstrap and config files
    39         -    - [ ] Installer should rerun without errors
    40         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    41         -    - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
    42         -  - Check uninstall
    43         -    - [ ] Check `safe_vault` is running
    44         -    - [ ] Uninstall should run without errors
    45         -    - [ ] Check `safe_vault` is not running
    46         -    - [ ] Check `safe_vault`, bootstrap and config files have all been removed
    47         -  - [ ] Copy installer from slave to yum repository machine
    48         -  - [ ] Update yum repository
    49         -  - [ ] Check `yum install safe-vault` works on a clean machine
    50         -  - [ ] Check `yum update` updates existing version
    51         -- Check .deb (on e.g. an Ubuntu test machine)
    52         -  - Check installer can upgrade an existing version which is running
    53         -    - [ ] Check test machine has older version already installed and `safe_vault` is running
    54         -    - [ ] Copy the current bootstrap and config files
    55         -    - [ ] New installer should run without errors
    56         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    57         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    58         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    59         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    60         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    61         -  - Check installer can upgrade an existing version which is not running
    62         -    - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
    63         -    - [ ] Copy the current bootstrap and config files
    64         -    - [ ] New installer should run without errors
    65         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    66         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    67         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    68         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    69         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    70         -  - Check installer succeeds on machine with no previous version installed
    71         -    - [ ] Check test machine has no version already installed
    72         -    - [ ] Installer should run without errors
    73         -    - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    74         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    75         -    - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    76         -    - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    77         -    - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    78         -  - Check repair where current version already installed
    79         -    - [ ] Kill and remove existing version of `safe_vault`
    80         -    - [ ] Copy the current bootstrap and config files
    81         -    - [ ] Installer should rerun without errors
    82         -    - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
    83         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    84         -    - [ ] Check bootstrap and config files haven't been overwritten
    85         -    - [ ] Remove bootstrap and config files
    86         -    - [ ] Installer should rerun without errors
    87         -    - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    88         -    - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
    89         -  - Check uninstall
    90         -    - [ ] Check `safe_vault` is running
    91         -    - [ ] Uninstall should run without errors
    92         -    - [ ] Check `safe_vault` is not running
    93         -    - [ ] Check `safe_vault`, bootstrap and config files have all been removed
    94         -  - [ ] Copy installer from slave to apt repository machine
    95         -  - [ ] Update apt repository
    96         -  - [ ] Check `apt-get install safe-vault` works on a clean machine
    97         -  - [ ] Check `apt-get update && apt-get upgrade` updates existing version

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/OS X Process.md version [d121b73ad8].

     1         -# Create Package for Vault on OS X
     2         -
     3         -- [ ] Run the package creation script safe_vault/installer/osx/scripts/create_packages.sh in the safe_vault repository
     4         -- Check installer can upgrade an existing version which is running
     5         -  - [ ] Check test machine has older version already installed and `safe_vault` is running
     6         -  - [ ] Copy the current bootstrap and config files
     7         -  - [ ] New installer should run without errors
     8         -  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
     9         -  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    10         -  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    11         -  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    12         -  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    13         -- Check installer can upgrade an existing version which is not running
    14         -  - [ ] Check test machine has older version already installed and `safe_vault` is NOT running
    15         -  - [ ] Copy the current bootstrap and config files
    16         -  - [ ] New installer should run without errors
    17         -  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    18         -  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    19         -  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    20         -  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    21         -  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    22         -- Check installer succeeds on machine with no previous version installed
    23         -  - [ ] Check test machine has no version already installed
    24         -  - [ ] Installer should run without errors
    25         -  - [ ] Check new version of `safe_vault` is running and is installed in `/usr/bin/`
    26         -  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    27         -  - [ ] Check correct config file(s) are installed to the system cache dir `/var/cache/safe_vault`
    28         -  - [ ] Check `safe_vault.crust.config` file has `-rw-r--r--` permissions and `safe` owner name and group name
    29         -  - [ ] Check bootstrap and config files are not present in app support dir `$HOME/.config/safe_vault/`
    30         -- Check repair where current version already installed
    31         -  - [ ] Kill and remove existing version of `safe_vault`
    32         -  - [ ] Copy the current bootstrap and config files
    33         -  - [ ] Installer should rerun without errors
    34         -  - [ ] Check `safe_vault` is running and is installed in `/usr/bin/`
    35         -  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    36         -  - [ ] Check bootstrap and config files haven't been overwritten
    37         -  - [ ] Remove bootstrap and config files
    38         -  - [ ] Installer should rerun without errors
    39         -  - [ ] Check `safe_vault` has `-rwxr-xr-x` permissions and `safe` owner name and group name
    40         -  - [ ] Check config file is installed in `/var/cache/safe_vault/` has `-rw-r--r--` permissions and `safe` owner name and `root` group name
    41         -- Check uninstall
    42         -  - [ ] Check `safe_vault` is running
    43         -  - [ ] Uninstall should run without errors
    44         -  - [ ] Check `safe_vault` is not running
    45         -  - [ ] Check `safe_vault`, bootstrap and config files have all been removed
    46         -- Check installer can be downloaded
    47         -  - [ ] Webpage should detect OS and show link to appropriate installer
    48         -  - [ ] Download installer and hash check it against original
    49         -  - [ ] Check downloaded filename is meaningful

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Draft Tests/Windows Process.md version [8e241a7f6a].

     1         -# Create Package for Vault on Windows
     2         -
     3         -- [ ] Run the installer creation script `safe_vault/installer/windows/create_installer.ps1` in the safe_vault repository
     4         -- Check installer can upgrade (using default options) an existing version installed to default location which is running
     5         -  - [ ] Check test machine has older version already installed using default options and `safe_vault.exe` is running
     6         -  - [ ] Copy the current bootstrap and config files
     7         -  - [ ] New installer should run without errors
     8         -  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
     9         -  - [ ] Check bootstrap and config files haven't been overwritten
    10         -- Check installer can upgrade (using default options) an existing version installed to default location which is not running
    11         -  - [ ] Check test machine has older version already installed using default options and `safe_vault.exe` is NOT running
    12         -  - [ ] Copy the current bootstrap and config files
    13         -  - [ ] New installer should run without errors
    14         -  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
    15         -  - [ ] Check bootstrap and config files haven't been overwritten
    16         -- Check installer can upgrade (using default options) an existing version installed to non-default location which is running
    17         -  - [ ] Check test machine has older version already installed using NON-default options and `safe_vault.exe` is running
    18         -  - [ ] Copy the current bootstrap and config files
    19         -  - [ ] New installer should run without errors
    20         -  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
    21         -  - [ ] Check old version of `safe_vault.exe` has been deleted from non-default location
    22         -  - [ ] Check bootstrap and config files haven't been overwritten
    23         -- Check installer succeeds using default options on machine with no previous version installed
    24         -  - [ ] Check test machine has no version already installed
    25         -  - [ ] Installer should run without errors
    26         -  - [ ] Check new version of `safe_vault.exe` is running and is installed in default location
    27         -  - [ ] Check bootstrap and config files are installed in their default locations
    28         -- Check repair where current version installed using defaults
    29         -  - [ ] Kill and remove existing version of `safe_vault.exe`
    30         -  - [ ] Copy the current bootstrap and config files
    31         -  - [ ] Installer should run repair without errors
    32         -  - [ ] Check `safe_vault.exe` is running and has been re-installed to previous location
    33         -  - [ ] Check bootstrap and config files haven't been overwritten
    34         -  - [ ] Remove bootstrap and config files
    35         -  - [ ] Installer should run repair without errors
    36         -  - [ ] Check `safe_vault.exe` is running and is installed in previous location
    37         -- Check repair where current version installed to non-default location
    38         -  - [ ] Kill and remove existing version of `safe_vault.exe`
    39         -  - [ ] Copy the current bootstrap and config files
    40         -  - [ ] Installer should run repair without errors
    41         -  - [ ] Check `safe_vault.exe` is running and has been re-installed to previous location
    42         -  - [ ] Check bootstrap and config files haven't been overwritten
    43         -  - [ ] Remove bootstrap and config files
    44         -  - [ ] Installer should run repair without errors
    45         -  - [ ] Check `safe_vault.exe` is running and is installed in previous location
    46         -- Check uninstall where current version installed using defaults
    47         -  - [ ] Check `safe_vault.exe` is running
    48         -  - [ ] Uninstall should run without errors
    49         -  - [ ] Check `safe_vault.exe` is not running
    50         -  - [ ] Check `safe_vault.exe`, bootstrap and config files have all been removed
    51         -- Check uninstall where current version installed to non-default location
    52         -  - [ ] Check `safe_vault.exe` is running
    53         -  - [ ] Uninstall should run without errors
    54         -  - [ ] Check `safe_vault.exe` is not running
    55         -  - [ ] Check `safe_vault.exe`, bootstrap and config files have all been removed
    56         -- [ ] Copy installer from slave to website
    57         -- [ ] Update website to link to new installer
    58         -- Check installer can be downloaded
    59         -  - [ ] Webpage should detect OS and show link to appropriate installer
    60         -  - [ ] Download installer and hash check it against original
    61         -  - [ ] Check downloaded filename is meaningful
    62         -  - [ ] Check installer has appropriate high-res icon

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Update Apt and Yum Repos.md version [bf7f894505].

     1         -# Update Apt and Yum Repos
     2         -
     3         -##### Build and Transfer 32-bit Package
     4         -
     5         -```sh
     6         -ssh maidsafe@178.62.25.205
     7         -
     8         -rustup update
     9         -git -C QA pull
    10         -
    11         -cd safe_vault
    12         -git pull
    13         -
    14         -~/QA/Bash\ Scripts/create_linux_vault_package.sh
    15         -```
    16         -
    17         -
    18         -##### Build and Transfer 64-bit Package
    19         -```sh
    20         -ssh maidsafe@178.62.85.248
    21         -
    22         -rustup update
    23         -git -C QA pull
    24         -
    25         -cd safe_vault
    26         -git pull
    27         -
    28         -~/QA/Bash\ Scripts/create_linux_vault_package.sh
    29         -```
    30         -
    31         -
    32         -##### Update Apt Repo
    33         -
    34         -```sh
    35         -ssh maidsafe@apt.maidsafe.net
    36         -Version=$(cat safe_vault_latest_version.txt)
    37         -cd /var/www/repos/apt/debian
    38         -
    39         -# sudo reprepro remove jessie safe-vault
    40         -# sudo reprepro remove wheezy safe-vault
    41         -
    42         -sudo reprepro includedeb jessie ~/SysV-style/safe-vault_"$Version"_amd64.deb
    43         -sudo reprepro includedeb jessie ~/SysV-style/safe-vault_"$Version"_i386.deb
    44         -sudo reprepro includedeb wheezy ~/SysV-style/safe-vault_"$Version"_amd64.deb
    45         -sudo reprepro includedeb wheezy ~/SysV-style/safe-vault_"$Version"_i386.deb
    46         -
    47         -mv ~/safe_*.tar.gz /var/www/tarballs/
    48         -```
    49         -
    50         -##### Update Yum Repo
    51         -
    52         -```sh
    53         -ssh maidsafe@yum.maidsafe.net
    54         -cd /var/www/repos
    55         -cp ~/SysV-style/* .
    56         -rpm --resign *.rpm
    57         -createrepo .  # need '--checksum sha' for at least CentOS <= 5.10  See http://linux.die.net/man/8/createrepo
    58         -gpg2 --detach-sign --armor repodata/repomd.xml
    59         -```
    60         -
    61         ----
    62         -
    63         -##### Apt Links
    64         -
    65         -- http://www.jejik.com/articles/2006/09/setting_up_and_managing_an_apt_repository_with_reprepro/
    66         -- https://mirrorer.alioth.debian.org/reprepro.1.html
    67         -- https://wiki.debian.org/HowToSetupADebianRepository#reprepro_for_new_packages
    68         -- https://wiki.debian.org/SettingUpSignedAptRepositoryWithReprepro
    69         -- https://scotbofh.wordpress.com/2011/04/26/creating-your-own-signed-apt-repository-and-debian-packages/
    70         -
    71         -##### Yum Links
    72         -
    73         -- http://www.idimmu.net/2009/10/20/creating-a-local-and-http-redhat-yum-repository/
    74         -- http://yum.baseurl.org/wiki/RepoCreate
    75         -- http://fedoranews.org/tchung/gpg/
    76         -- https://iuscommunity.org/pages/CreatingAGPGKeyandSigningRPMs.html

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Installers/Vault/Windows Installers.md version [1d5cc36384].

     1         -# Windows Installers
     2         -
     3         -On each of the Windows build machines in the office (one 32-bit, one 64-bit, both Windows 7) do the following process:
     4         -
     5         -- Open C:\MaidSafe\safe_vault\installer\windows\safe_vault_32_and_64_bit.aip in a text editor
     6         -- Search for the phrase `Enter path to certificate.p12` and replace it with the actual path to the certificate
     7         -- Open a **Powershell** terminal and run the following commands:
     8         -
     9         -```batch
    10         -. rustup update
    11         -. "C:\Program Files\Git\bin\git.exe" -C C:\MaidSafe\QA pull
    12         -
    13         -cd C:\MaidSafe\safe_vault
    14         -. "C:\Program Files\Git\bin\git.exe" pull
    15         -
    16         -. installer\windows\create_installer.ps1
    17         -
    18         -. "C:\Program Files\Git\bin\git.exe" checkout .
    19         -```

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Managing Remote Servers.md version [fb67971f79].

     1         -# Managing Remote Servers
     2         -
     3         -The objective of this document is to detail a standard process for handling remote servers (e.g.
     4         -Droplets), so that all MaidSafe remote servers are secure and can be accessed in a similar way.
     5         -This should make working with and scripting for these simpler.
     6         -
     7         -Note that this does not apply to "throw-away" remote servers which are used for short-term testing,
     8         -and need not be secure.
     9         -
    10         -### Setting up a New Server
    11         -
    12         -Where there is a choice, we should never allow the host to send us a root password via email.  If a
    13         -root or sudo user's password _is_ ever emailed (even internally between two MaidSafe employees), it
    14         -should immediately be treated as compromised and changed.
    15         -
    16         -In the case of Droplets, we should add all QA members' SSH keys by default.  This allows any QA
    17         -member to ssh into the droplet as root.  However, this should generally only ever be done once, in
    18         -order to create the new `qa` user as detailed below.  Working as root is not a good practice and
    19         -should be kept to a minimum.
    20         -
    21         -As soon as a new server is created, the following steps should be taken:
    22         -
    23         -1. ssh into the server as root
    24         -1. create a sudo user named `qa` with a strong, unique, random password.  On Ubuntu:
    25         -
    26         -    ```bash
    27         -    adduser qa
    28         -    adduser qa sudo
    29         -    ```
    30         -
    31         -    or on Fedora:
    32         -
    33         -    ```bash
    34         -    useradd qa
    35         -    passwd qa
    36         -    usermod qa -a -G wheel
    37         -    ```
    38         -
    39         -1. exit the ssh session
    40         -1. add details of the server to an existing or new document in the QA folder of the private
    41         -[Assets](https://github.com/maidsafe/Assets/tree/master/QA) repository
    42         -
    43         -### Managing the Servers
    44         -
    45         -#### Compromised Password
    46         -
    47         -If the password of a sudo user is compromised (e.g. laptop lost/stolen, password emailed), all
    48         -affected servers should be updated as soon as possible.  As passwords should be unique, this should
    49         -apply to just a single user account on a single server.
    50         -
    51         -The fix can either be to change the password, or to delete the user.
    52         -
    53         -#### Compromised SSH Key
    54         -
    55         -If the private SSH key of a sudo user is compromised (e.g. laptop lost/stolen, private key
    56         -emailed!), all affected servers should be updated as soon as possible.
    57         -
    58         -The hard part will be identifying all the accounts to which this key has access.  For a QA team
    59         -member, this will likely include the root user, their own user account and perhaps other users'
    60         -accounts on every remote server.
    61         -
    62         -The fix is to remove the affected key from the relevant `authorized_keys` files.  This will be in
    63         -`/home/<USER>/.ssh/` or `/root/.ssh/`.
    64         -
    65         -#### Adding new Users
    66         -
    67         -If for whatever reason, a non-QA team member wants to access a remote server, don't share
    68         -credentials with that member; instead create a new user account for them.  Normally, the only shared
    69         -account should be the `qa` one (an exception is the `peer1` account on the `peer_prog.maidsafe.net`
    70         -Droplet).
    71         -
    72         -Before creating an account for them, ensure that they really need access to the secure server.  If
    73         -their work can be done on a non-secure, throw-away Droplet for example, then that is the best
    74         -option.
    75         -
    76         -Don't give the new user sudo access if not required.  If sudo access _is_ required, then create the
    77         -new user with a strong, unique, random password, but **don't email this password** to the team
    78         -member.  Instead, send it via a mumble message.
    79         -
    80         -The team member should be asked to never change the password to a weak one, nor to one which they
    81         -use elsewhere.  They should also notify QA once the account can be deleted.

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Rust Lint Checks.md version [4b22ac8612].

     1         -# Rust Lint Checks
     2         -
     3         -```
     4         -#![forbid(
     5         -    bad_style,              // Includes:
     6         -                            // - non_camel_case_types:   types, variants, traits and type parameters
     7         -                            //                           should have camel case names,
     8         -                            // - non_snake_case:         methods, functions, lifetime parameters and
     9         -                            //                           modules should have snake case names
    10         -                            // - non_upper_case_globals: static constants should have uppercase
    11         -                            //                           identifiers
    12         -    exceeding_bitshifts,    // shift exceeds the type's number of bits
    13         -    mutable_transmutes,     // mutating transmuted &mut T from &T may cause undefined behavior
    14         -    no_mangle_const_items,  // const items will not have their symbols exported
    15         -    unknown_crate_types,    // unknown crate type found in #[crate_type] directive
    16         -    warnings                // mass-change the level for lints which produce warnings
    17         -    )]
    18         -
    19         -#![deny(
    20         -    deprecated,                    // detects use of #[deprecated] items
    21         -    drop_with_repr_extern,         // use of #[repr(C)] on a type that implements Drop
    22         -    improper_ctypes,               // proper use of libc types in foreign modules
    23         -    missing_docs,                  // detects missing documentation for public members
    24         -    non_shorthand_field_patterns,  // using `Struct { x: x }` instead of `Struct { x }`
    25         -    overflowing_literals,          // literal out of range for its type
    26         -    plugin_as_library,             // compiler plugin used as ordinary library in non-plugin crate
    27         -    private_no_mangle_fns,         // functions marked #[no_mangle] should be exported
    28         -    private_no_mangle_statics,     // statics marked #[no_mangle] should be exported
    29         -    raw_pointer_derive,            // uses of #[derive] with raw pointers are rarely correct
    30         -    stable_features,               // stable features found in #[feature] directive
    31         -    unconditional_recursion,       // functions that cannot return without calling themselves
    32         -    unknown_lints,                 // unrecognized lint attribute
    33         -    unsafe_code,                   // usage of `unsafe` code
    34         -    unused,                        // Includes:
    35         -                                   // - unused_imports:     imports that are never used
    36         -                                   // - unused_variables:   detect variables which are not used in
    37         -                                   //                       any way
    38         -                                   // - unused_assignments: detect assignments that will never be
    39         -                                   //                       read
    40         -                                   // - dead_code:          detect unused, unexported items
    41         -                                   // - unused_mut:         detect mut variables which don't need to
    42         -                                   //                       be mutable
    43         -                                   // - unreachable_code:   detects unreachable code paths
    44         -                                   // - unused_must_use:    unused result of a type flagged as
    45         -                                   //                       #[must_use]
    46         -                                   // - unused_unsafe:      unnecessary use of an `unsafe` block
    47         -                                   // - path_statements: path statements with no effect
    48         -    unused_allocation,             // detects unnecessary allocations that can be eliminated
    49         -    unused_attributes,             // detects attributes that were not used by the compiler
    50         -    unused_comparisons,            // comparisons made useless by limits of the types involved
    51         -    unused_features,               // unused or unknown features found in crate-level #[feature]
    52         -                                   // directives
    53         -    unused_parens,                 // `if`, `match`, `while` and `return` do not need parentheses
    54         -    while_true                     // suggest using `loop { }` instead of `while true { }`
    55         -    )]
    56         -
    57         -#![warn(
    58         -    trivial_casts,            // detects trivial casts which could be removed
    59         -    trivial_numeric_casts,    // detects trivial casts of numeric types which could be removed
    60         -    unused_extern_crates,     // extern crates that are never used
    61         -    unused_import_braces,     // unnecessary braces around an imported item
    62         -    unused_qualifications,    // detects unnecessarily qualified names
    63         -    unused_results,           // unused result of an expression in a statement
    64         -    variant_size_differences  // detects enums with widely varying variant sizes
    65         -    )]
    66         -
    67         -#![allow(
    68         -    box_pointers,                  // use of owned (Box type) heap memory
    69         -    fat_ptr_transmutes,            // detects transmutes of fat pointers
    70         -    missing_copy_implementations,  // detects potentially-forgotten implementations of `Copy`
    71         -    missing_debug_implementations  // detects missing implementations of fmt::Debug
    72         -    )]
    73         -```

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Rust Style.md version [7401f55d1a].

     1         -# Contributing Rust code to MaidSafe
     2         -
     3         -We don't maintain a separate style guide but in general try to follow [common good practice](https://aturon.github.io/), write readable and idiomatic code and aim for full test coverage. In addition, this document lists a few decisions we've reached in discussions about specific topics.
     4         -
     5         -## Rust version
     6         -
     7         -We currently use Rust stable 1.16.0.
     8         -
     9         -## Unwrap
    10         -
    11         -Don't unwrap [`Option`](https://doc.rust-lang.org/std/option/enum.Option.html)s or [`Result`](https://doc.rust-lang.org/std/result/enum.Result.html)s, except possibly when:
    12         -
    13         -1. locking a mutex,
    14         -2. spawning a thread,
    15         -3. joining a thread
    16         -
    17         -or in other patterns where using them makes the code _much simpler_ and it is _obvious at first glance_ to the reader (even one unfamiliar with the code) that the value cannot be `None`/`Err`.
    18         -
    19         -In these cases, as well as in tests, consider using the macros from the [`unwrap` crate](https://crates.io/crates/unwrap).
    20         -
    21         -## Threads
    22         -
    23         -Generally avoid detached threads. Give child threads meaningful names.
    24         -
    25         -This can easily be achieved by preferring to create child threads using [`maidsafe_utilities::thread::named()`](http://docs.maidsafe.net/maidsafe_utilities/master/maidsafe_utilities/thread/fn.named.html).
    26         -
    27         -* it returns a [`Joiner`](http://docs.maidsafe.net/maidsafe_utilities/master/maidsafe_utilities/thread/struct.Joiner.html) which helps to avoid detached threads
    28         -* it requires that the child thread is given a name
    29         -
    30         -## Rustfmt
    31         -
    32         -Apply the latest `rustfmt` to new code before committing, using the default configuration or, if present, the repository's `rustfmt.toml` file.
    33         -
    34         -## Function ordering
    35         -
    36         -In `impl`s, always put public functions before private ones.
    37         -
    38         -## Clippy
    39         -
    40         -If a crate has that feature, make sure your code does not produce any new errors when compiling with `--features=clippy`. If you don't agree with a [Clippy lint](https://github.com/Manishearth/rust-clippy#lints), discuss it with the team before explicitly adding an `#[allow(lint)]` attribute.
    41         -
    42         -For clippy, we currently use Clippy 0.0.120 and nightly installed by `rustup install nightly-2017-03-16`:
    43         -```rust
    44         -rustc --version
    45         -rustc 1.17.0-nightly (0aeb9c129 2017-03-15)
    46         -```
    47         -
    48         -**Note for Windows users:** Due to a recent bug in rustup, you may get a missing dll error when trying to run `cargo clippy`.  In this case, you can work around the issue by modifying your `PATH` environment variable:
    49         -
    50         -```
    51         -setx PATH "%USERPROFILE%\.multirust\toolchains\nightly-2017-03-16-x86_64-pc-windows-gnu\bin;%PATH%"
    52         -```
    53         -
    54         -## Cargo
    55         -
    56         -Use `cargo-edit` to update dependencies or keep the `Cargo.toml` in the formatting that `cargo-edit` uses.
    57         -
    58         -## Other crates
    59         -
    60         -Adding new dependencies to MaidSafe crates in general should be discussed in the team first, except if other MaidSafe crates already have the same dependency. E.g. [quick-error](https://crates.io/crates/quick-error) and [unwrap](https://crates.io/crates/unwrap) are fine to use.
    61         -
    62         -## Git Commit Messages
    63         -
    64         -The first line of the commit message should have the format `<type>/<scope>: <subject>`. For details see the [Leaf project's guidelines](https://github.com/autumnai/leaf/blob/master/CONTRIBUTING.md#git-commit-guidelines).

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Documentation/Update Snapshot Used by Droplet Deployer.md version [d8b1fae71f].

     1         -## Update Snapshot Used by Droplet Deployer
     2         -
     3         -1. Create a new droplet from the existing "Droplet Deployer" [snapshot][0].
     4         -1. Make whatever changes are required (user is `qa`, password is held in [release_config repo][1]).
     5         -1. On the droplet, run `sudo rm -rf /root/.ssh/ && sudo shutdown -h now`
     6         -1. Once the droplet has shut down, take a new snapshot called `Droplet Deployer`.
     7         -1. Replicate [the snapshot][0] to all regions (click the "More" option, then "Add to Region").
     8         -1. Rename the [old snapshot][0] to `Old Droplet Deployer` (check "Created" values).
     9         -1. [Generate a new Personal Access Token][2].
    10         -1. To get the ID of the newly-created snapshot, run `curl -sX GET -H "Content-Type: application/json" -H "Authorization: Bearer <token here>" "https://api.digitalocean.com/v2/images?private=true" | sed -n 's/.*"id":\([^,]*\),"name":"Droplet Deployer".*/\n\1\n\n/p'`
    11         -1. If this doesn't yield an ID, it may be due to pagination of the response; you may need to add `&page=2` (or whatever value the last page has) to the end of the URL after `private=true`.  Alternatively, check that the [new snapshot][0] has finished being created.
    12         -1. Replace the existing value of `"imageId"` in [Droplet Deployer's config.json file][3] with the new one.
    13         -1. Test the [Droplet Deployer][4] tool.
    14         -1. Commit and push the change.
    15         -1. [Delete the Personal Access Token][5].
    16         -1. [Delete the `Old Droplet Deployer` snapshot][0].
    17         -1. [Delete the freshly-shutdown Droplet][6] used to create the new snapshot.
    18         -
    19         -
    20         -[0]: https://cloud.digitalocean.com/images/snapshots
    21         -[1]: https://github.com/maidsafe/release_config/blob/master/droplets/credentials.json#L3
    22         -[2]: https://cloud.digitalocean.com/settings/api/tokens/new
    23         -[3]: https://github.com/maidsafe/QA/blob/master/droplet_deployer/config.json#L37
    24         -[4]: https://github.com/maidsafe/QA/tree/master/droplet_deployer
    25         -[5]: https://cloud.digitalocean.com/settings/api/tokens
    26         -[6]: https://cloud.digitalocean.com/droplets

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Build.ps1 version [1e2ab278e6].

     1         -# rustup default $env:RUST_VERSION
     2         -
     3         -# Use features if they've been set
     4         -if ($env:Features) {
     5         -    $with_features = "--features",$env:Features
     6         -}
     7         -
     8         -# Use Release flag if required
     9         -if ($env:CONFIGURATION -eq "Release") {
    10         -    $release_flag = "--release"
    11         -}
    12         -
    13         -# Build library and tests
    14         -Invoke-Command { cargo test --no-run --verbose $with_features $release_flag } -NoNewScope

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Install Rust.ps1 version [0847f77ff4].

     1         -# Determine the appropriate arch to install
     2         -if ($env:PLATFORM -eq "x86") {
     3         -    $arch = "i686"
     4         -} else {
     5         -    $arch = "x86_64"
     6         -}
     7         -
     8         -# Download Rust installer
     9         -$url = "https://github.com/rust-lang-nursery/multirust-rs-binaries/raw/master/$arch-pc-windows-gnu/multirust-setup.exe"
    10         -$installer = $env:TEMP + "\multirust-rs.exe"
    11         -(New-Object System.Net.WebClient).DownloadFile($url, $installer)
    12         -
    13         -# Install MultiRust
    14         -$input_file = $env:TEMP + "\input.txt"
    15         -Set-Content $input_file "y`r`ny`r`n"
    16         -Start-Process $installer -Wait -NoNewWindow -RedirectStandardInput $input_file
    17         -
    18         -# Add MultiRust to path
    19         -$env:Path = $env:USERPROFILE + "\.cargo\bin;" + $env:Path
    20         -
    21         -# Set the requested channel and install nightly
    22         -# multirust update nightly
    23         -multirust default $env:RUST_VERSION
    24         -
    25         -"Rust version:"
    26         -""
    27         -rustc -vV
    28         -if (!$?) {
    29         -    exit 99
    30         -}
    31         -""
    32         -""
    33         -
    34         -"Cargo version:"
    35         -""
    36         -cargo -V
    37         -if (!$?) {
    38         -    exit 99
    39         -}
    40         -""
    41         -""

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Install Rustup.ps1 version [76877b477e].

     1         -# Determine the appropriate arch to install
     2         -if ($env:PLATFORM -eq "x86") {
     3         -    $env:Path = "C:\msys64\mingw32\bin;C:\msys64\usr\bin;" + $env:Path
     4         -    $arch = "i686"
     5         -} else {
     6         -    $env:Path = "C:\msys64\mingw64\bin;C:\msys64\usr\bin;" + $env:Path
     7         -    $arch = "x86_64"
     8         -}
     9         -
    10         -# Install gcc if required
    11         -bash -lc "pacman -S --noconfirm --needed mingw-w64-$arch-gcc"
    12         -
    13         -# Download Rust installer
    14         -$url = "https://static.rust-lang.org/rustup/dist/$arch-pc-windows-gnu/rustup-init.exe"
    15         -$installer = $env:TEMP + "\rustup-init.exe"
    16         -(New-Object System.Net.WebClient).DownloadFile($url, $installer)
    17         -
    18         -# Run installer
    19         -$installer = $installer.Replace("\", "/")
    20         -bash -lc "$installer -y --default-host $arch-pc-windows-gnu"
    21         -
    22         -# Add rustup to path
    23         -$env:Path = $env:USERPROFILE + "\.cargo\bin;" + $env:Path
    24         -
    25         -# Set the requested channel and install nightly
    26         -# rustup update nightly
    27         -rustup default $env:RUST_VERSION
    28         -
    29         -"Rust version:"
    30         -""
    31         -rustc -vV
    32         -if (!$?) {
    33         -    exit 99
    34         -}
    35         -""
    36         -""
    37         -
    38         -"Cargo version:"
    39         -""
    40         -cargo -V
    41         -if (!$?) {
    42         -    exit 99
    43         -}
    44         -""
    45         -""

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Powershell Scripts/AppVeyor/Run Tests.ps1 version [dc011bd38e].

     1         -# Exit the script if building fails
     2         -$ErrorActionPreference = "Stop"
     3         -
     4         -cd $env:APPVEYOR_BUILD_FOLDER
     5         -
     6         -# Prepare test script
     7         -$cargo_test = {
     8         -    # Check cargo has installed properly
     9         -    cargo -V
    10         -    if (!$?) {
    11         -        99 > ($env:TEMP + "\TestResult.txt")
    12         -        return
    13         -    }
    14         -
    15         -    cd $env:APPVEYOR_BUILD_FOLDER
    16         -
    17         -    # Use features if they've been set
    18         -    if ($env:Features) {
    19         -        $with_features = "--features",$env:Features
    20         -    }
    21         -
    22         -    # Use Release flag if required
    23         -    if ($env:CONFIGURATION -eq "Release") {
    24         -        $release_flag = "--release"
    25         -    }
    26         -
    27         -    cargo test $with_features $release_flag -- --nocapture
    28         -    $LASTEXITCODE > ($env:TEMP + "\TestResult.txt")
    29         -}
    30         -
    31         -# Run the test script
    32         -""
    33         -"Starting tests."
    34         -$job = Start-Job -ScriptBlock $cargo_test
    35         -
    36         -# Set timeout to env var or use default of 10 minutes
    37         -$timeout_ms = 600000
    38         -if ($env:TimeoutSeconds) {
    39         -    $timeout_ms = [Int32]$env:TimeoutSeconds * 1000
    40         -}
    41         -
    42         -# Loop until timed out or tests have completed
    43         -$ErrorActionPreference = "Continue"
    44         -$start_time = Get-Date
    45         -$current_time = $start_time
    46         -$completed = $false
    47         -while ((($current_time - $start_time).TotalMilliseconds -lt $timeout_ms) -and (-not $completed)) {
    48         -    $sleep_ms = 100
    49         -    Start-Sleep -m $sleep_ms
    50         -
    51         -    # Display test's results so far
    52         -    Receive-Job $job
    53         -
    54         -    # Check if the tests have completed
    55         -    $running = $job | Where-Object { $_.State -match 'running' }
    56         -    if (-not $running) {
    57         -        $completed = $true
    58         -    }
    59         -    $current_time = Get-Date
    60         -}
    61         -
    62         -if (-not $completed) {
    63         -    # Exit with non-zero value if the test timed out
    64         -
    65         -    # Kill job and retrieve and buffered output
    66         -    Get-ChildItem "target\$env:CONFIGURATION" -Filter *.exe | Foreach-Object { Stop-Process -name $_.BaseName *>$null }
    67         -    Stop-Job $job
    68         -    Receive-Job $job
    69         -
    70         -    $timeout_seconds = $timeout_ms / 1000
    71         -    ""
    72         -    "Tests ran for longer than $timeout_seconds seconds, so have timed out."
    73         -    $test_result = -2
    74         -} else {
    75         -    # Retrieve the return code of the test command, so we can return it later
    76         -    $test_result = Get-Content ($env:TEMP + "\TestResult.txt")
    77         -}
    78         -
    79         -# Run Clippy, but don't fail overall if Clippy fails.
    80         -# ""
    81         -# "Running Clippy."
    82         -# multirust run nightly cargo test --no-run --features clippy
    83         -
    84         -exit $test_result

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Adam.pub version [92584adc12].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDYrVCkGerTtN6QDfZK30PMORBO7Up6Cbg3fikqIaGlLFN+osMn6NjZvfKBXb2JOnlPGRtuzb8KUYl14gtHo/eQ9BT5ASKbKp+LUw6eEmfcaZdd7H3x9GfsbH3+EG9ALm/NPqUBDXNshRq563yfPJMkz4Rk/hcTVURl0E3IPcLHE5ymjCz8Ar8NMdvmWAD7ft/QqoRRG4Bnx3Tc6uSi5s35jHdj66zQlLpoDpZ+IW3z7mk03nE7B8in1quHfNKwRYNIb0vBoV5nKSFwquGpYfB+M0/g1R9a8JRrLeMGv+XkGVGt6Ltja76fxYygZZDP99XrFqw89bEL4mOzrDCGTwDZ adam@higgsboson

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Andreas.pub version [c2c9b6e8ef].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwyrjDbQQhVzXk2mdMLm80/+2eHW1L4fw/flmmee+4FI3WF7b8L1bMjl7TApeMNU+HXc3KxBupkni5LjuXLZOS2L/Zo6yIcrudQpyAb8275phueT3KS36Q4oNLEv+E3IXQiyfNeE8hsvqoFdoo+V9FyR9SFPlDndfUsTC4O/nANWv+jO+1K6Iyd4b5OhZUP+Iw563OtSXFwFGxpgEhz3dUOqL6C0i5M2hxnqdx0FesBowE6uu4Npsjf1KUE/aNcM/+9+loD1PCnQja634V5m6jKy2y121h7n5S/y0gbusoml9Kfe8z30CMwyP4SkHwtBIPG1bf38N08/LUfbr83p7CpIz6wOCcDdY8mx2SsfCoyb0eJcCP7czlqHe70i6F9o77SWWdRX/m25x9bcKug6MUYgVNB5BXbN3nj0RxmitNQ7MpPcs6YD0WxtY8KDh1XZ2a73bie+h/bjN2FqT92AnC9mmZ82YP/v/4l0GI3854dxB5uGGG3m9j1TqYg1I/GVpuqiF7lGRvnR7ip+ahpOVnmaV/pUOQPZGuYps/0hSo5UIo8G1o89nk4eICScwU9h6cSx+MrUjVciPssIadiL4SZ8KU55arkyzXb6zZRhm3MKKBTmB3FIU6/9MW/2N2LcoTurwcC8+wELvGTfYXOIxUVjvpIZ21ZfR7F1n61XRAQ== AndreasFackler@gmx.de

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Bart.pub version [c76b5d6893].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnf8sXey8Q5tBPKEAkJAErTIYgluP/NnMpqG86dcSWDbJOXay5PKQh5iXwRLCJ+ZJLvft2a/QGMVXain/yF9wKugUPosFg7dqgQKyFQk0Y3nKK/I4OGyKd3XJtOBVckYow/wEPDLkAWThf2VimDudUbsJ6VPDbAlWBg8NTiDJRaPzohpkru7c/y+yyuFVxmRi4m+1YzM00R12HJr5jqf/qNOZI/pUccNEhMnchFlU7t++Pk0ZhwOgvLEeGfLGfI622HdNVToVNJ7VVxVMr+qyvqBXiIVfIdRVGvoBeoIboTpUxEcYvkgPouxQxkJOSrbxOF/b+3nQ6bff9UTUDL9zf bartek@bartek

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/David.pub version [d28b9b3b21].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAynpCS4y8Yvvd8w1ZeDlXTjjdgTxTsStqNl9lDWxjlwd8dyIWSOfSyJowB0gz1PAS7/gyuz1+RfOP6n3NmJCg1l1TQI6CXt/0HFTp5ucdL5bvfmUM786rOH4jKxQUbw8Mk6p9upVNaEF6R/WyQP2UwPyQgV+wNBIdheR7ytu5YXXmvaE1bCZ3gXbWvhY0PKQYgpX6dVkTJTYvRPFnffw3M99gIFOkk2lvDhuh/GQeeMC+LMml+NskQfiw+oBxKU4ws756HKr0ZlwyrBfH0SmTW+YxXZl5gsnxz32g2wSc7N/jjnJGZ9CAY/7UrARNfXVg7SByNAf38qqwl6TiFtkjyw== dirvine@dirvine-desktop

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryHome.pub version [023e024fb8].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXPPVjQWY1A66cXlZIi7xrmwMa8TPIeYKMX9xWL5rW4IN1FJ0c6smL7qDRKs6eZP5XWYWV+aaeMoV5vBtCEEnA63xnEgQYgiY8UjLxWTY/0owpJWYg6WJNp26b8eKb/5Kwbpy88ETi52mSDTjJY+djfz30SPBOmHRV34Cmpi1paxWxSEzsxblCEU1Hv9WnE/fjt0E1VCKMKS6YGBEFuTRAnfOKIu7wlrbHkB5NaqGTqaj6ChO73TQe77qFnxQOp9Ph2jERaWFwvIZdFH0cD7+WpgmOaSjdzEYUESicqanZSgY2nN23zgMt16rigkuSoUWKhQavHpUFar17tAuQ7HQr dhardy@TPH-L13071

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryLaptop.pub version [73f3ce665d].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN0PnwrwauBsFhuDz1I86Dq/yyteYU+qDYdnYcwyknbx8RrDJ9zzf2rPvFgyQwPPE/HZxXO2jp2nRrUnobucC8nFPFU+owf0mgKkWyT+UD1iVvqT3QHvpKgVzcsM4mSKYoQSf0OymPUNbYRRy01BHdNLXrqHFnC6YshPejuLpijiFsKe0OSQIkjcUffx+Xe/iTFmXHSaZTb23wjTwInBNA7ZofTZCJ94uQRxGXqW0hGqeCr6lw5rL18iomX8IhCFSPZnBzVBET9ll4QLVpadeq35noXy+ArgmCoyS60cPnbX/ZpMDleNgV8ClSzjoE0+N7FPb/7OL3L7ZRCgTqO9Pt dhardy@yoga.dhardy

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/DiggoryWork.pub version [7217746439].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDbHl0Nu0wXbj0WFVACmG12uQcNduZsgMpLxL0ANoKMvue4VWhhW/nhIK1AIyW+iSvgf1DVQGduWkoeh7SGWN/eHzAqJ2/o4UFbmsl8mL0bcvSakz9xrwhhZQpaK/Vy2N8319cF3uUwujg3SA9S4Q7tu0UKVYA9YF2AN070z5jnJyqK2VVROoWHM48cm/zwHZJBWsqRya7GxpvG70NsyzR+Ap8oe7NKXynZr8bxnQ3JPJr7PsWnnQiiTlzWhjSInoLU1+5xxvnZe0xPhB8K1BBzoOvJDqeI9IrDVGFcxu5PduIyEP9G43swjU/dMuY7Y87WKzHUCU5EMYx4/R5R/I1 dhardy@localhost.localdomain

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/FraserHomeWindows.pub version [4e917b90c0].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwgBg3rxrbI/SBm25Q1tIXV9iym7xBIspRtyY3yhxaPBcggVNU63cwfbjXFUCIHAkA3ZZBHAn+4P6uXYqyz0c7ticl9LOfDQm/mPCyZw3gOrGtcI6/xV5dwvYJpOd8pBFS5jIUXto3EG0YOmSqvxIPllHhzd+6IeK/5QKJPNqaEKYXWtgA55iBUq0JqNOWfJx/whJPzOJVdeWeHQjMg++DBrbBFpbLSh3S3qAda88jKBNL9LtOfXK/VJsdJ7/yW1xYeSA3Zu770y60fvzHOUUTpPuvMKqamHKubU54A8/aSzpaHpNIHuFdAfmwKYT3DfeFIR8644+6GTVVd5jVvF7TBg5+lDABcRqruSx6kc4rFxMWzkcHWZA9dXW2B4KP1WrRzSUmXOMWXcbgdZeCMR9QVP3K/AZdBwhXp8LEJXhOlcsEXplGEcp3FrR6SKtut/dOpLur8z/SOTctgmctHrNKJ145Mmu8ws5b1UNRBmVY+CMNvXHw2pXgz1LACaKx3R2dhTouZiGX19eN6V/Qaa+06hizX6ybsBh/zukdTkHtbLzzaMO46RZISFRFZ+zZzLQtenBTSFlR+8V9e5VhfVy8CxQKupLMeeADKoqrGUEGtouYZ1XoAmAAbX2ctO3sSPqeSYusI3F6tVZ38UpcOjwlWUattLXAL8miF7Pbzixdw== Fraser-Home-Windows

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/FraserOfficeLinux.pub version [b5a3047fff].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDeTwVlMND8ubS3U7OmMhxXg0x+pLIr47JV2+Ks86QKR4uGxby2/CcH18lPydwPSmvM2vWuSL1WvHTItX5jmq2BA6guEMY4GBgs3l/2nAR+rN1A5JKgI1T2HgAOL2tRYrAboNIna0KAl1lMMJPsNv26b9PK6w6NhFl/U8qG8iJkv9FbZClvw34UrDw0qpydrGfS/2xikTSXqcjlofZvzUiK0kaD3R5yDqPc3Sz64UhiLKos/gSKQHNbeNc9W/C1Em/DDM8WneVRfmYMbPru2/6DG1F6z4QIaFm/AeyYlRN5PWtVdH6ycg+WB85ZJyjQvN9JtUGdBJ3rvGHALpm5fCxAwmsR6PGI8r4xJVKOMGf3jYkDLdfNgKgKCuQKV4JL7QMMQxCz5HeoMrBjXbQfoTjkQ3Py2C2iz17Aol6BSyYAdZuD2dIEwV0ds81iRfYVTCw+Hd17iUkWoIS2R74EOYfjMkbdkaMz7Dpoqgn6p5FjSrvwHmkQ+b7zXTlWgmAURYMe67gt8ndm16m+/qyFTy0O6AXK2bo2lpxfq68f4bkWQWY7md7YWE7JRaMH+pu/VFfD/mSeNBN8cWljzlC3iSfT6vBnbLxoPsFdX7GZceks9AQvZMgvpWKjMeJmWmdDVhULBSJH1LjLA1/ddmFRoT036FL3he7+b7GYwZ+mR5RTrw== Fraser-Office-Linux

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Krishna.pub version [1590b2d723].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXT3Mawk2S1O1sMLSkr/k2E1LERCc9AxSM3SK+asHaNTHPceBFiOyOn+QwePWO8o87lIww4/cNv+lY3aglFfGRyRaJ6mcL0H8Ccoz2AwBdUEbJSY75CJWGZTBFmeL4q3sYU71mdUBYDZwYxWSUSmEmxfATxZG9MZKlvxElCQQXDSDorj/TPPMYaWzhwSl1jhC2wxTrxcU/e9sSm67hBi8hNFxdNlooNhAWYl/pq39/uzRyWrH+lCfq17yuil+1cVQVDs5MF8/caK+jO6mTeHgkO+q+NdEObtijkhQEOZc0+eH0t7/RPdDvUSXe6W9JMYgjFDK4DKn0lFBHPcupjiWSVCVBpbUKbBUHPh75GIN8CYmO/w5VGWgjP5SBQrGtMiPHcFNELDSvcEp5gBQAmjKTbCycD1O6NffejhAcvRMKHMU08EUqHg9phzMbkuh4HUtrTBmf6xYyWLKCzgZSwddt5zRHuPNbca2kH6AFVaCVeuCvNeGlirti6JEVlcxYG1oD2kM0tFKa4UsDuNHmJbEUJW28S5diurXJVpo+iIJ2rLfoCGWnfXYzTyAPXT1t/Wjo3AmJHWym16XGNHmwnjrVXqmLumc+VwOS3xc7nR/utQiH1UZzKlBgUYfXN6pkdq2JNj3awFMLlHSYmAxUlNR7YrpZwZL4nEuRekJOxQFcxQ== krishnaof1988@gmail.com

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Michael.pub version [9a49a43de4].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCoMq7SaKOpcan3UlMqP9CYSrF1IkNekjQ3BKVV7fF0LTN5lxQ9rOi3knEEuFCvH2VMbYEKFGAieJa3OuZHlBQMfv66LChZCeAiBaG68iRww84DXBiGWDTuUOBmwepOhXfKIO4I2Qr/st3sPWbD4ddprHd7TJdFqpnTmGwG61m4wf0m3jWZygfqxA85UlweUjWsP6DerMVrfG7F+kNYGdpFcDR0CjPKC2cHwGyIhmBI9jhLHfR1k03+qLKLAcPIIjh8+iAep4FELpnPkrC222DmAL7X9KDuYeh+V2GWc/jcaERFzk3xUx59L4Q6YGnLcO2EoRlGiBOITdrut9DBCIjCcyd/MCkHovL+zdmWCqxYT4ITFsOW91a5UlAAStQLRtCkHbprmIaNEsu6mWAW6owTAIAj0u5f5wyBOEkb7BSifPpbg0jN1EqbKnx+YuXN5MvrKmRQzARpJCIGyhJBpvP7Uh+IJHtULoJNbd5XzWN0F6Z+szlIsPUt31NbPLIeLzqqHuW+rmf1Cl/wcEX8BzOnP3PtTH6TfxfwcwP3v4n2HchPdzY9ZJRd+E5zuEAW4hJL3iWtTM5ARWZC2RSk1wCXggbUkhUQxpPS4GpTzmaBiHNirNZUJU0SDnHcsYuEsQditSqrh01ss9Y8HQRYJ0n2Qh/soV4sUCoe5dyGp3SfHw== michael@michael-macbook.local

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/QiLinux.pub version [6de8d8a8d9].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm34L9WS2OpYeJ8caQ3Z6dNrjIB1AZOCzejPJW33CeYMSN153l0p9pAlLNJPgOET/JcSHv07gOsdTzAqWZvEcuaLhCeX3X/WBXg1ZaqPvxXLsJLW4EtXDuENaQQ18oRpKFBuHULjkI4wopw34JMHWh6WIQrPVOLDcXsX9cfknviCGdlBScHahxB5ZZ9w5wKxdRDFqJEkit8rQlJR5grVrUq9SYb9zWUUBE0/YeULD6wIhrm5bDepfuTuELdhXF1nzUNQb6Kis5lsi9N1jeG5jMDWsP0cLYvUg1zkB4COiiI95ZT7Rwggbvj2/qrHG3P4LhJlXjaZTzyxjxZojMG+Tfjd0su3J+cnMGhkwj++f4CeFVo7Vbox6U5WT8E+UCXVqRcgvCOePdO76EI17bkHshhDef2RDGvBCYrkSy4f6iqCoKXRnPav2buEI+/pQgacfdxz3CeBrhuL1mXETO4BWf/YvDZYiX6L2+NgVcAVJEDXFrDNsMR2zRkqAKL3ysBOhGKJY20MxL6DuWMZv3byT3f8W3wnLDOQgN+k4HNtg/q5hi4a5KwoLPMTat/dD9lAgRpUhcdxh1AhkTmWxc12CrCpbVCc8kyzu4gdZLPE7ZGKP5YtbbHMJw5p2TzLMs9w8ZkB/WycTqZsqyYcHDPM5UDIh18/ncbITEhDyZIX6iHQ== qi.ma@maidsafe.net

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/QiWindows.pub version [4a751e88d0].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDX2xmkt7sgPmh13dG51YC2QMrznFWEHeMCqzUfP96mSh1dRPZB6nOFhEvMvHmJhqy6oyxWYChttPtSzWZf3o68+ur/YkRbFONV3Kn8sP9qfQHDGa7scT9n5EDxTLzGm1yN4RlQDD2bdhVkYmdkfLcdsEYntOi4Zj45N+xMziH1NQou02iwHuJTIHOscCxWyuTbKFYydNw1NWbCOX8AA0lZoqtrYTsZMceQ/AkLkG1N/dCZtQxMbfSBuRM9cbLsDK58n9PI+1c6OflIba2pb8lHiq7ThrZY8CcZolvFYRWlVYMfPysjKiiCQzegNQkGvKrb7r89swr6QAd/wGldqGab qi.ma@maidsafe.net

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Ross.pub version [78b91b0f6e].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCl0HFTx1cmWG94LxtxYyr0Go4K0kJI3Kd9DU97qPuqrbRnb3Sym8+5C4Xqe4QflqRhluJiWtyZ+XzIiEb0uNGvS2peP7Gb4sdRdfGKFuYg8vfQumv/JhRRn1tw45dOQNDGTAUKFcZmBdpTG8R990LN8991ORSA4jSCzJ3KPbIErhHFI2IknNyURUcopeIu1B3HOwu5WFdC3gWo6XzzgKsenKCQJdlZ1SRSJrHY5L4a4eGTDnkuguE78jx+DpIOJ5UJC1NxfwKOhSG1O34GsBur1lonae5Fx1HwyMRgTmTYGUDNyCo+gqV65y5352wQZrQFc++0YU8cJi3496PQUgWR user@QA-ROSS

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Spandan.pub version [6966862f9b].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCpG3EOW3aKzk0NHR6dsP4ORd/gUpttGwECd7IRx4mxUkDYM3cROqy0kbT4IJzUri44EGiKDk5EUhuoNUhA1yW4SgqecE+1AbFfBxUHmzJemqSkctjIxZSuYA+R4c3kbeMLAlk+nEcxxZqTBzyPhNQVqhtLlWYqYVVp41y4HSybInHn4q7vkoUsyAqp+taQX5tafEI2VmokMFdUbVsJDUSGxrzIlj5hPxL4kXzMxMcPMCeuxIKBOJsb/+KjrlsHMrfSrMIdM677Qx4ycoCt1hMpndVXECvBPFT7y/CpXdF3xMT5+hFsdrwYsu4uG8ggi+NZUqFjgcW7FJVDAx0CYr9L ustulation@gmail.com

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Vinicius.pub version [f6ff062577].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMVwr0GWuka5X+GDUyamKe8zFvUVblNce4/p1j/O9bFnHs9d8N+OkYkz6CkBsXsfJpb0+YYwpYdl55/Lg4ohP5mJjWnMDB0pacooSYLwpJSZnlV0+aJgu4gMMRfpP4amYnBVm80iPkZZ42OC/ZVNW5Hd0yTuAFUtdnwDKgV57Rk6rhT5pGWSPYrchIGWJCQzMHAkNMmmA5xPdRzAKo7tTy3mGqdWJfiyqM1J1NSDi7UgQCm8ehu2rN2/Gs+I2E3N08MQnJUOAcMrxe1X3lgA6kXEnYEWurEq5ZhC3sOXw8erOWmNtXvqI2O6C/rXBQgzlVliNxtubl8yWnmNPX8UXF vinipsmaker@vinipsmaker-netbook

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/Viv.pub version [48138a48c3].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC/s9I/3XYObaipr54raYtt/f1StRcuybWWCfB03rv0yG1duNrIrpSP8dv1uscmt/OXMvUSLdaGURTZZ8XytD6SFwisVSkTQD4tqqk1NmTQt8DEUZ9KErbQCiyEAAcI02QMJ5IeqvismdyvRnfcNV9Nx2vaaCftppJ2R9rTtm9hNOsa4eoLCnuUACvef6jiAa0Fzn5GV7y91dcrVuWiKnUIdBRtxwv1sJRPT6epm6l6AZcpyA+2Qc4kgS2ak4tAjmSlAWUAIoyYECSweCiIwKJL7WLNSNVV3omhljNLONrckOlfglg7LqUrLYMNh2gHPAdUTCPHFuMlIW4rWvSZi9E0JNTZ7o7+x4PWu+SI8a0faXQ1i8S5qSBhNl3HUbChPH7VxktHrZ4rohOpd4WbV75PrzOoycJwplyuyLzLluWOtE/P+a/EmDV/2iUrlYujQQKHaXhbVIaffI8fct+BuPQAN+EmmMIx/h8BSoeWIBMK/ZdxCcDAuCXeoqonYp3QCFef2+dL8CM5EAjGKkxKHPUcFagf/RsM1VMgb0k3Q30jXqc45k8e5XxsI1cXegRrj6z6ZZmLjPOZrdNxclNDz4xigzZwqf6s9uG+0RxgqCvZZoIJpkfGtGviN6Pm1o8/PPGHI3bmrOv8r/ktjy+V2xjKae6Q5Sw/h83gd1csFoosCQ== viv.rajkumar@maidsafe.net

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/QA/Public Keys/authorized_keys version [6d38c40df4].

     1         -ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAynpCS4y8Yvvd8w1ZeDlXTjjdgTxTsStqNl9lDWxjlwd8dyIWSOfSyJowB0gz1PAS7/gyuz1+RfOP6n3NmJCg1l1TQI6CXt/0HFTp5ucdL5bvfmUM786rOH4jKxQUbw8Mk6p9upVNaEF6R/WyQP2UwPyQgV+wNBIdheR7ytu5YXXmvaE1bCZ3gXbWvhY0PKQYgpX6dVkTJTYvRPFnffw3M99gIFOkk2lvDhuh/GQeeMC+LMml+NskQfiw+oBxKU4ws756HKr0ZlwyrBfH0SmTW+YxXZl5gsnxz32g2wSc7N/jjnJGZ9CAY/7UrARNfXVg7SByNAf38qqwl6TiFtkjyw== dirvine@dirvine-desktop
     2         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwgBg3rxrbI/SBm25Q1tIXV9iym7xBIspRtyY3yhxaPBcggVNU63cwfbjXFUCIHAkA3ZZBHAn+4P6uXYqyz0c7ticl9LOfDQm/mPCyZw3gOrGtcI6/xV5dwvYJpOd8pBFS5jIUXto3EG0YOmSqvxIPllHhzd+6IeK/5QKJPNqaEKYXWtgA55iBUq0JqNOWfJx/whJPzOJVdeWeHQjMg++DBrbBFpbLSh3S3qAda88jKBNL9LtOfXK/VJsdJ7/yW1xYeSA3Zu770y60fvzHOUUTpPuvMKqamHKubU54A8/aSzpaHpNIHuFdAfmwKYT3DfeFIR8644+6GTVVd5jVvF7TBg5+lDABcRqruSx6kc4rFxMWzkcHWZA9dXW2B4KP1WrRzSUmXOMWXcbgdZeCMR9QVP3K/AZdBwhXp8LEJXhOlcsEXplGEcp3FrR6SKtut/dOpLur8z/SOTctgmctHrNKJ145Mmu8ws5b1UNRBmVY+CMNvXHw2pXgz1LACaKx3R2dhTouZiGX19eN6V/Qaa+06hizX6ybsBh/zukdTkHtbLzzaMO46RZISFRFZ+zZzLQtenBTSFlR+8V9e5VhfVy8CxQKupLMeeADKoqrGUEGtouYZ1XoAmAAbX2ctO3sSPqeSYusI3F6tVZ38UpcOjwlWUattLXAL8miF7Pbzixdw== Fraser-Home-Windows
     3         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDeTwVlMND8ubS3U7OmMhxXg0x+pLIr47JV2+Ks86QKR4uGxby2/CcH18lPydwPSmvM2vWuSL1WvHTItX5jmq2BA6guEMY4GBgs3l/2nAR+rN1A5JKgI1T2HgAOL2tRYrAboNIna0KAl1lMMJPsNv26b9PK6w6NhFl/U8qG8iJkv9FbZClvw34UrDw0qpydrGfS/2xikTSXqcjlofZvzUiK0kaD3R5yDqPc3Sz64UhiLKos/gSKQHNbeNc9W/C1Em/DDM8WneVRfmYMbPru2/6DG1F6z4QIaFm/AeyYlRN5PWtVdH6ycg+WB85ZJyjQvN9JtUGdBJ3rvGHALpm5fCxAwmsR6PGI8r4xJVKOMGf3jYkDLdfNgKgKCuQKV4JL7QMMQxCz5HeoMrBjXbQfoTjkQ3Py2C2iz17Aol6BSyYAdZuD2dIEwV0ds81iRfYVTCw+Hd17iUkWoIS2R74EOYfjMkbdkaMz7Dpoqgn6p5FjSrvwHmkQ+b7zXTlWgmAURYMe67gt8ndm16m+/qyFTy0O6AXK2bo2lpxfq68f4bkWQWY7md7YWE7JRaMH+pu/VFfD/mSeNBN8cWljzlC3iSfT6vBnbLxoPsFdX7GZceks9AQvZMgvpWKjMeJmWmdDVhULBSJH1LjLA1/ddmFRoT036FL3he7+b7GYwZ+mR5RTrw== Fraser-Office-Linux
     4         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDXT3Mawk2S1O1sMLSkr/k2E1LERCc9AxSM3SK+asHaNTHPceBFiOyOn+QwePWO8o87lIww4/cNv+lY3aglFfGRyRaJ6mcL0H8Ccoz2AwBdUEbJSY75CJWGZTBFmeL4q3sYU71mdUBYDZwYxWSUSmEmxfATxZG9MZKlvxElCQQXDSDorj/TPPMYaWzhwSl1jhC2wxTrxcU/e9sSm67hBi8hNFxdNlooNhAWYl/pq39/uzRyWrH+lCfq17yuil+1cVQVDs5MF8/caK+jO6mTeHgkO+q+NdEObtijkhQEOZc0+eH0t7/RPdDvUSXe6W9JMYgjFDK4DKn0lFBHPcupjiWSVCVBpbUKbBUHPh75GIN8CYmO/w5VGWgjP5SBQrGtMiPHcFNELDSvcEp5gBQAmjKTbCycD1O6NffejhAcvRMKHMU08EUqHg9phzMbkuh4HUtrTBmf6xYyWLKCzgZSwddt5zRHuPNbca2kH6AFVaCVeuCvNeGlirti6JEVlcxYG1oD2kM0tFKa4UsDuNHmJbEUJW28S5diurXJVpo+iIJ2rLfoCGWnfXYzTyAPXT1t/Wjo3AmJHWym16XGNHmwnjrVXqmLumc+VwOS3xc7nR/utQiH1UZzKlBgUYfXN6pkdq2JNj3awFMLlHSYmAxUlNR7YrpZwZL4nEuRekJOxQFcxQ== krishnaof1988@gmail.com
     5         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCwTp0j1PVpCCi8L6OV0VzCl8tP8eyRBM/eBuud+uEjna6HtpEsvtnTzQmp0Tqx62ktGFKEYqKL/F9m0gNgP1nBC6LqExNXkR7+YVXRNgAoF1J8JF+zdIBOyTaGcFqB1R8/1iL7Aybl8u+eS0wM2I++kgAi5npRQDmNgA/b5AotoSsSwgIatmq6c4PY0wiNr9NF9C58VFHiw+p4IIFO1Jfnx3pkSjaL/DmXvawwbeOit/ik4V7ESvM5Ioao2F1Gydim8DEIKfH/r8FHpaE4TlwuIuveP/Fcz9iS5K/pqVNEQlvwLAyrYrjwOc01JRKQE1q1oF6aaryd2UjzbqtKN2Xt qi.ma@maidsafe.net
     6         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCl0HFTx1cmWG94LxtxYyr0Go4K0kJI3Kd9DU97qPuqrbRnb3Sym8+5C4Xqe4QflqRhluJiWtyZ+XzIiEb0uNGvS2peP7Gb4sdRdfGKFuYg8vfQumv/JhRRn1tw45dOQNDGTAUKFcZmBdpTG8R990LN8991ORSA4jSCzJ3KPbIErhHFI2IknNyURUcopeIu1B3HOwu5WFdC3gWo6XzzgKsenKCQJdlZ1SRSJrHY5L4a4eGTDnkuguE78jx+DpIOJ5UJC1NxfwKOhSG1O34GsBur1lonae5Fx1HwyMRgTmTYGUDNyCo+gqV65y5352wQZrQFc++0YU8cJi3496PQUgWR user@QA-ROSS
     7         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCpG3EOW3aKzk0NHR6dsP4ORd/gUpttGwECd7IRx4mxUkDYM3cROqy0kbT4IJzUri44EGiKDk5EUhuoNUhA1yW4SgqecE+1AbFfBxUHmzJemqSkctjIxZSuYA+R4c3kbeMLAlk+nEcxxZqTBzyPhNQVqhtLlWYqYVVp41y4HSybInHn4q7vkoUsyAqp+taQX5tafEI2VmokMFdUbVsJDUSGxrzIlj5hPxL4kXzMxMcPMCeuxIKBOJsb/+KjrlsHMrfSrMIdM677Qx4ycoCt1hMpndVXECvBPFT7y/CpXdF3xMT5+hFsdrwYsu4uG8ggi+NZUqFjgcW7FJVDAx0CYr9L ustulation@gmail.com
     8         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMVwr0GWuka5X+GDUyamKe8zFvUVblNce4/p1j/O9bFnHs9d8N+OkYkz6CkBsXsfJpb0+YYwpYdl55/Lg4ohP5mJjWnMDB0pacooSYLwpJSZnlV0+aJgu4gMMRfpP4amYnBVm80iPkZZ42OC/ZVNW5Hd0yTuAFUtdnwDKgV57Rk6rhT5pGWSPYrchIGWJCQzMHAkNMmmA5xPdRzAKo7tTy3mGqdWJfiyqM1J1NSDi7UgQCm8ehu2rN2/Gs+I2E3N08MQnJUOAcMrxe1X3lgA6kXEnYEWurEq5ZhC3sOXw8erOWmNtXvqI2O6C/rXBQgzlVliNxtubl8yWnmNPX8UXF vinipsmaker@vinipsmaker-netbook
     9         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC/s9I/3XYObaipr54raYtt/f1StRcuybWWCfB03rv0yG1duNrIrpSP8dv1uscmt/OXMvUSLdaGURTZZ8XytD6SFwisVSkTQD4tqqk1NmTQt8DEUZ9KErbQCiyEAAcI02QMJ5IeqvismdyvRnfcNV9Nx2vaaCftppJ2R9rTtm9hNOsa4eoLCnuUACvef6jiAa0Fzn5GV7y91dcrVuWiKnUIdBRtxwv1sJRPT6epm6l6AZcpyA+2Qc4kgS2ak4tAjmSlAWUAIoyYECSweCiIwKJL7WLNSNVV3omhljNLONrckOlfglg7LqUrLYMNh2gHPAdUTCPHFuMlIW4rWvSZi9E0JNTZ7o7+x4PWu+SI8a0faXQ1i8S5qSBhNl3HUbChPH7VxktHrZ4rohOpd4WbV75PrzOoycJwplyuyLzLluWOtE/P+a/EmDV/2iUrlYujQQKHaXhbVIaffI8fct+BuPQAN+EmmMIx/h8BSoeWIBMK/ZdxCcDAuCXeoqonYp3QCFef2+dL8CM5EAjGKkxKHPUcFagf/RsM1VMgb0k3Q30jXqc45k8e5XxsI1cXegRrj6z6ZZmLjPOZrdNxclNDz4xigzZwqf6s9uG+0RxgqCvZZoIJpkfGtGviN6Pm1o8/PPGHI3bmrOv8r/ktjy+V2xjKae6Q5Sw/h83gd1csFoosCQ== viv.rajkumar@maidsafe.net
    10         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDUwyrjDbQQhVzXk2mdMLm80/+2eHW1L4fw/flmmee+4FI3WF7b8L1bMjl7TApeMNU+HXc3KxBupkni5LjuXLZOS2L/Zo6yIcrudQpyAb8275phueT3KS36Q4oNLEv+E3IXQiyfNeE8hsvqoFdoo+V9FyR9SFPlDndfUsTC4O/nANWv+jO+1K6Iyd4b5OhZUP+Iw563OtSXFwFGxpgEhz3dUOqL6C0i5M2hxnqdx0FesBowE6uu4Npsjf1KUE/aNcM/+9+loD1PCnQja634V5m6jKy2y121h7n5S/y0gbusoml9Kfe8z30CMwyP4SkHwtBIPG1bf38N08/LUfbr83p7CpIz6wOCcDdY8mx2SsfCoyb0eJcCP7czlqHe70i6F9o77SWWdRX/m25x9bcKug6MUYgVNB5BXbN3nj0RxmitNQ7MpPcs6YD0WxtY8KDh1XZ2a73bie+h/bjN2FqT92AnC9mmZ82YP/v/4l0GI3854dxB5uGGG3m9j1TqYg1I/GVpuqiF7lGRvnR7ip+ahpOVnmaV/pUOQPZGuYps/0hSo5UIo8G1o89nk4eICScwU9h6cSx+MrUjVciPssIadiL4SZ8KU55arkyzXb6zZRhm3MKKBTmB3FIU6/9MW/2N2LcoTurwcC8+wELvGTfYXOIxUVjvpIZ21ZfR7F1n61XRAQ== AndreasFackler@gmx.de
    11         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDYrVCkGerTtN6QDfZK30PMORBO7Up6Cbg3fikqIaGlLFN+osMn6NjZvfKBXb2JOnlPGRtuzb8KUYl14gtHo/eQ9BT5ASKbKp+LUw6eEmfcaZdd7H3x9GfsbH3+EG9ALm/NPqUBDXNshRq563yfPJMkz4Rk/hcTVURl0E3IPcLHE5ymjCz8Ar8NMdvmWAD7ft/QqoRRG4Bnx3Tc6uSi5s35jHdj66zQlLpoDpZ+IW3z7mk03nE7B8in1quHfNKwRYNIb0vBoV5nKSFwquGpYfB+M0/g1R9a8JRrLeMGv+XkGVGt6Ltja76fxYygZZDP99XrFqw89bEL4mOzrDCGTwDZ adam@higgsboson
    12         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCnf8sXey8Q5tBPKEAkJAErTIYgluP/NnMpqG86dcSWDbJOXay5PKQh5iXwRLCJ+ZJLvft2a/QGMVXain/yF9wKugUPosFg7dqgQKyFQk0Y3nKK/I4OGyKd3XJtOBVckYow/wEPDLkAWThf2VimDudUbsJ6VPDbAlWBg8NTiDJRaPzohpkru7c/y+yyuFVxmRi4m+1YzM00R12HJr5jqf/qNOZI/pUccNEhMnchFlU7t++Pk0ZhwOgvLEeGfLGfI622HdNVToVNJ7VVxVMr+qyvqBXiIVfIdRVGvoBeoIboTpUxEcYvkgPouxQxkJOSrbxOF/b+3nQ6bff9UTUDL9zf bart@home
    13         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCm34L9WS2OpYeJ8caQ3Z6dNrjIB1AZOCzejPJW33CeYMSN153l0p9pAlLNJPgOET/JcSHv07gOsdTzAqWZvEcuaLhCeX3X/WBXg1ZaqPvxXLsJLW4EtXDuENaQQ18oRpKFBuHULjkI4wopw34JMHWh6WIQrPVOLDcXsX9cfknviCGdlBScHahxB5ZZ9w5wKxdRDFqJEkit8rQlJR5grVrUq9SYb9zWUUBE0/YeULD6wIhrm5bDepfuTuELdhXF1nzUNQb6Kis5lsi9N1jeG5jMDWsP0cLYvUg1zkB4COiiI95ZT7Rwggbvj2/qrHG3P4LhJlXjaZTzyxjxZojMG+Tfjd0su3J+cnMGhkwj++f4CeFVo7Vbox6U5WT8E+UCXVqRcgvCOePdO76EI17bkHshhDef2RDGvBCYrkSy4f6iqCoKXRnPav2buEI+/pQgacfdxz3CeBrhuL1mXETO4BWf/YvDZYiX6L2+NgVcAVJEDXFrDNsMR2zRkqAKL3ysBOhGKJY20MxL6DuWMZv3byT3f8W3wnLDOQgN+k4HNtg/q5hi4a5KwoLPMTat/dD9lAgRpUhcdxh1AhkTmWxc12CrCpbVCc8kyzu4gdZLPE7ZGKP5YtbbHMJw5p2TzLMs9w8ZkB/WycTqZsqyYcHDPM5UDIh18/ncbITEhDyZIX6iHQ== qi.ma@maidsafe.net
    14         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDX2xmkt7sgPmh13dG51YC2QMrznFWEHeMCqzUfP96mSh1dRPZB6nOFhEvMvHmJhqy6oyxWYChttPtSzWZf3o68+ur/YkRbFONV3Kn8sP9qfQHDGa7scT9n5EDxTLzGm1yN4RlQDD2bdhVkYmdkfLcdsEYntOi4Zj45N+xMziH1NQou02iwHuJTIHOscCxWyuTbKFYydNw1NWbCOX8AA0lZoqtrYTsZMceQ/AkLkG1N/dCZtQxMbfSBuRM9cbLsDK58n9PI+1c6OflIba2pb8lHiq7ThrZY8CcZolvFYRWlVYMfPysjKiiCQzegNQkGvKrb7r89swr6QAd/wGldqGab qi.ma@maidsafe.net
    15         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXPPVjQWY1A66cXlZIi7xrmwMa8TPIeYKMX9xWL5rW4IN1FJ0c6smL7qDRKs6eZP5XWYWV+aaeMoV5vBtCEEnA63xnEgQYgiY8UjLxWTY/0owpJWYg6WJNp26b8eKb/5Kwbpy88ETi52mSDTjJY+djfz30SPBOmHRV34Cmpi1paxWxSEzsxblCEU1Hv9WnE/fjt0E1VCKMKS6YGBEFuTRAnfOKIu7wlrbHkB5NaqGTqaj6ChO73TQe77qFnxQOp9Ph2jERaWFwvIZdFH0cD7+WpgmOaSjdzEYUESicqanZSgY2nN23zgMt16rigkuSoUWKhQavHpUFar17tAuQ7HQr dhardy@TPH-L13071
    16         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN0PnwrwauBsFhuDz1I86Dq/yyteYU+qDYdnYcwyknbx8RrDJ9zzf2rPvFgyQwPPE/HZxXO2jp2nRrUnobucC8nFPFU+owf0mgKkWyT+UD1iVvqT3QHvpKgVzcsM4mSKYoQSf0OymPUNbYRRy01BHdNLXrqHFnC6YshPejuLpijiFsKe0OSQIkjcUffx+Xe/iTFmXHSaZTb23wjTwInBNA7ZofTZCJ94uQRxGXqW0hGqeCr6lw5rL18iomX8IhCFSPZnBzVBET9ll4QLVpadeq35noXy+ArgmCoyS60cPnbX/ZpMDleNgV8ClSzjoE0+N7FPb/7OL3L7ZRCgTqO9Pt dhardy@yoga.dhardy
    17         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDbHl0Nu0wXbj0WFVACmG12uQcNduZsgMpLxL0ANoKMvue4VWhhW/nhIK1AIyW+iSvgf1DVQGduWkoeh7SGWN/eHzAqJ2/o4UFbmsl8mL0bcvSakz9xrwhhZQpaK/Vy2N8319cF3uUwujg3SA9S4Q7tu0UKVYA9YF2AN070z5jnJyqK2VVROoWHM48cm/zwHZJBWsqRya7GxpvG70NsyzR+Ap8oe7NKXynZr8bxnQ3JPJr7PsWnnQiiTlzWhjSInoLU1+5xxvnZe0xPhB8K1BBzoOvJDqeI9IrDVGFcxu5PduIyEP9G43swjU/dMuY7Y87WKzHUCU5EMYx4/R5R/I1 dhardy@localhost.localdomain
    18         -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCoMq7SaKOpcan3UlMqP9CYSrF1IkNekjQ3BKVV7fF0LTN5lxQ9rOi3knEEuFCvH2VMbYEKFGAieJa3OuZHlBQMfv66LChZCeAiBaG68iRww84DXBiGWDTuUOBmwepOhXfKIO4I2Qr/st3sPWbD4ddprHd7TJdFqpnTmGwG61m4wf0m3jWZygfqxA85UlweUjWsP6DerMVrfG7F+kNYGdpFcDR0CjPKC2cHwGyIhmBI9jhLHfR1k03+qLKLAcPIIjh8+iAep4FELpnPkrC222DmAL7X9KDuYeh+V2GWc/jcaERFzk3xUx59L4Q6YGnLcO2EoRlGiBOITdrut9DBCIjCcyd/MCkHovL+zdmWCqxYT4ITFsOW91a5UlAAStQLRtCkHbprmIaNEsu6mWAW6owTAIAj0u5f5wyBOEkb7BSifPpbg0jN1EqbKnx+YuXN5MvrKmRQzARpJCIGyhJBpvP7Uh+IJHtULoJNbd5XzWN0F6Z+szlIsPUt31NbPLIeLzqqHuW+rmf1Cl/wcEX8BzOnP3PtTH6TfxfwcwP3v4n2HchPdzY9ZJRd+E5zuEAW4hJL3iWtTM5ARWZC2RSk1wCXggbUkhUQxpPS4GpTzmaBiHNirNZUJU0SDnHcsYuEsQditSqrh01ss9Y8HQRYJ0n2Qh/soV4sUCoe5dyGp3SfHw== michael@michael-macbook.local

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin farming speed.png version [590e37927e].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin resources.png version [ab4b3bd4f0].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/img/safecoin transfer mech.png version [0940833af9].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/Whitepapers/technical_papers/safecoin citations.bib version [c35882d830].

     1         -%% This BibTeX bibliography file was created using BibDesk.
     2         -%% http://bibdesk.sourceforge.net/
     3         -
     4         -
     5         -%% Created for Nick Lambert at 2015-01-07 10:11:39 +0000 
     6         -
     7         -
     8         -%% Saved with string encoding Unicode (UTF-8) 
     9         -
    10         -
    11         -
    12         -@jurthesis{19,
    13         -	Date-Added = {2015-01-07 10:07:00 +0000},
    14         -	Date-Modified = {2015-01-07 10:11:29 +0000},
    15         -	Lastchecked = {7},
    16         -	Month = {January},
    17         -	Title = {Kademlia wikipedia page},
    18         -	Url = {http://en.wikipedia.org/wiki/Kademlia},
    19         -	Year = {2015}}
    20         -
    21         -@webpage{18,
    22         -	Author = {John Aziz},
    23         -	Date-Added = {2014-12-11 16:38:39 +0000},
    24         -	Date-Modified = {2014-12-11 16:40:04 +0000},
    25         -	Lastchecked = {11},
    26         -	Month = {December},
    27         -	Title = {Does the Federal Reserve really control the money supply?},
    28         -	Url = {http://theweek.com/article/index/244899/does-the-federal-reserve-really-control-the-money-supply},
    29         -	Year = {2014},
    30         -	Bdsk-Url-1 = {http://theweek.com/article/index/244899/does-the-federal-reserve-really-control-the-money-supply}}
    31         -
    32         -@webpage{17,
    33         -	Author = {Paul Krugman},
    34         -	Date-Added = {2014-12-11 15:08:47 +0000},
    35         -	Date-Modified = {2014-12-11 15:10:58 +0000},
    36         -	Lastchecked = {11},
    37         -	Month = {December},
    38         -	Title = {The textbook economics of cap-and-trade},
    39         -	Url = {http://krugman.blogs.nytimes.com/2009/09/27/the-textbook-economics-of-cap-and-trade/?_r=0},
    40         -	Year = {2014},
    41         -	Bdsk-Url-1 = {http://krugman.blogs.nytimes.com/2009/09/27/the-textbook-economics-of-cap-and-trade/?_r=0}}
    42         -
    43         -@webpage{16,
    44         -	Author = {The Atlantic},
    45         -	Date-Added = {2014-11-28 11:03:07 +0000},
    46         -	Date-Modified = {2014-11-28 11:03:45 +0000},
    47         -	Lastchecked = {28},
    48         -	Month = {November},
    49         -	Title = {The Internet's Original Sin},
    50         -	Url = {http://www.theatlantic.com/technology/archive/2014/08/advertising-is-the-internets-original-sin/376041/},
    51         -	Year = {2014},
    52         -	Bdsk-Url-1 = {http://www.theatlantic.com/technology/archive/2014/08/advertising-is-the-internets-original-sin/376041/}}
    53         -
    54         -@webpage{15,
    55         -	Author = {Facebook Inc},
    56         -	Date-Added = {2014-11-28 11:00:05 +0000},
    57         -	Date-Modified = {2014-11-28 11:00:53 +0000},
    58         -	Lastchecked = {28},
    59         -	Month = {November},
    60         -	Title = {Facebook Reports Fourth Quarter and Full Year 2013 Results},
    61         -	Url = {http://investor.fb.com/releasedetail.cfm?ReleaseID=821954},
    62         -	Year = {2014},
    63         -	Bdsk-Url-1 = {http://investor.fb.com/releasedetail.cfm?ReleaseID=821954}}
    64         -
    65         -@jurthesis{14,
    66         -	Author = {Google Inc},
    67         -	Date-Added = {2014-11-28 10:58:41 +0000},
    68         -	Date-Modified = {2014-12-11 16:48:12 +0000},
    69         -	Lastchecked = {28},
    70         -	Month = {November},
    71         -	Title = {2013 Financial Tables},
    72         -	Url = {https://investor.google.com/financial/2013/tables.html},
    73         -	Year = {2014},
    74         -	Bdsk-Url-1 = {https://investor.google.com/financial/2013/tables.html}}
    75         -
    76         -@webpage{13,
    77         -	Author = {Joe McCann},
    78         -	Date-Added = {2014-11-28 10:55:50 +0000},
    79         -	Date-Modified = {2014-11-28 11:01:03 +0000},
    80         -	Lastchecked = {28},
    81         -	Month = {November},
    82         -	Title = {Data Is The Most Valuable Commodity On Earth},
    83         -	Url = {http://subprint.com/blog/data-is-the-most-valuable-commodity-on-earth},
    84         -	Year = {2014},
    85         -	Bdsk-Url-1 = {http://subprint.com/blog/data-is-the-most-valuable-commodity-on-earth}}
    86         -
    87         -@webpage{12,
    88         -	Author = {World Economic Forum},
    89         -	Date-Added = {2014-11-28 10:51:45 +0000},
    90         -	Date-Modified = {2014-11-28 10:52:51 +0000},
    91         -	Lastchecked = {28},
    92         -	Month = {November},
    93         -	Title = {Personal Data: The Emergence of a New Asset Class},
    94         -	Url = {http://www3.weforum.org/docs/WEF_ITTC_PersonalDataNewAsset_Report_2011.pdf},
    95         -	Year = {2014},
    96         -	Bdsk-Url-1 = {http://www3.weforum.org/docs/WEF_ITTC_PersonalDataNewAsset_Report_2011.pdf}}
    97         -
    98         -@webpage{11,
    99         -	Author = {BBC News Web Page},
   100         -	Date-Added = {2014-11-28 10:36:05 +0000},
   101         -	Date-Modified = {2014-11-28 10:36:58 +0000},
   102         -	Lastchecked = {28},
   103         -	Month = {November},
   104         -	Title = {Gold v paper money},
   105         -	Url = {http://www.bbc.co.uk/news/business-18644230},
   106         -	Year = {2014},
   107         -	Bdsk-Url-1 = {http://www.bbc.co.uk/news/business-18644230}}
   108         -
   109         -@webpage{10,
   110         -	Date-Added = {2014-11-28 10:34:17 +0000},
   111         -	Date-Modified = {2014-11-28 10:35:07 +0000},
   112         -	Lastchecked = {28},
   113         -	Month = {November},
   114         -	Title = {ECR Research Web Page},
   115         -	Url = {http://www.ecrresearch.com/world-economy/dangers-and-drawbacks-quantitative-easing},
   116         -	Year = {2014},
   117         -	Bdsk-Url-1 = {http://www.ecrresearch.com/world-economy/dangers-and-drawbacks-quantitative-easing}}
   118         -
   119         -@webpage{9,
   120         -	Date-Added = {2014-11-28 10:31:55 +0000},
   121         -	Date-Modified = {2014-11-28 10:32:47 +0000},
   122         -	Lastchecked = {28},
   123         -	Month = {November},
   124         -	Title = {Federal Reserve Web Site},
   125         -	Url = {http://www.federalreserve.gov/faqs/currency_12773.htm},
   126         -	Year = {2014},
   127         -	Bdsk-Url-1 = {http://www.federalreserve.gov/faqs/currency_12773.htm}}
   128         -
   129         -@webpage{8,
   130         -	Date-Added = {2014-11-28 10:29:03 +0000},
   131         -	Date-Modified = {2014-11-28 11:01:10 +0000},
   132         -	Lastchecked = {28},
   133         -	Month = {November},
   134         -	Title = {Bountify Web Page},
   135         -	Url = {https://bountify.co/},
   136         -	Year = {2014},
   137         -	Bdsk-Url-1 = {https://bountify.co/}}
   138         -
   139         -@webpage{7,
   140         -	Date-Added = {2014-11-28 10:27:49 +0000},
   141         -	Date-Modified = {2014-11-28 10:28:30 +0000},
   142         -	Lastchecked = {28},
   143         -	Month = {November},
   144         -	Title = {Bounty Source Web Page},
   145         -	Url = {https://www.bountysource.com/},
   146         -	Year = {2014},
   147         -	Bdsk-Url-1 = {https://www.bountysource.com/}}
   148         -
   149         -@webpage{6,
   150         -	Date-Added = {2014-11-28 10:25:36 +0000},
   151         -	Date-Modified = {2014-11-28 11:01:22 +0000},
   152         -	Lastchecked = {28},
   153         -	Month = {November},
   154         -	Title = {MaidSafe Wikipedia},
   155         -	Url = {http://en.wikipedia.org/wiki/MaidSafe},
   156         -	Year = {2014},
   157         -	Bdsk-Url-1 = {http://en.wikipedia.org/wiki/MaidSafe}}
   158         -
   159         -@webpage{5,
   160         -	Date-Added = {2014-11-28 10:23:00 +0000},
   161         -	Date-Modified = {2014-11-28 10:24:14 +0000},
   162         -	Lastchecked = {28},
   163         -	Month = {November},
   164         -	Title = {Tor Incentives Roundup},
   165         -	Url = {https://blog.torproject.org/blog/tor-incentives-research-roundup-goldstar-par-braids-lira-tears-and-torcoin},
   166         -	Year = {2014},
   167         -	Bdsk-Url-1 = {https://blog.torproject.org/blog/tor-incentives-research-roundup-goldstar-par-braids-lira-tears-and-torcoin}}
   168         -
   169         -@webpage{4,
   170         -	Date-Added = {2014-11-27 16:52:58 +0000},
   171         -	Date-Modified = {2014-11-28 11:01:57 +0000},
   172         -	Lastchecked = {27},
   173         -	Month = {November},
   174         -	Title = {Tor Metrics --- Direct users by country},
   175         -	Url = {https://metrics.torproject.org/userstats-relay-country.html},
   176         -	Year = {2014},
   177         -	Bdsk-Url-1 = {https://metrics.torproject.org/userstats-relay-country.html}}
   178         -
   179         -@webpage{3,
   180         -	Date-Added = {2014-11-27 16:49:37 +0000},
   181         -	Date-Modified = {2014-11-27 16:51:52 +0000},
   182         -	Lastchecked = {27},
   183         -	Month = {November},
   184         -	Title = {Tor Metrics --- Relays and bridges in the network},
   185         -	Url = {https://metrics.torproject.org/networksize.html},
   186         -	Year = {2014},
   187         -	Bdsk-Url-1 = {https://metrics.torproject.org/networksize.html}}
   188         -
   189         -@url{2,
   190         -	Author = {Christopher Doll, T. F. McLaughlin, Anjali Barretto},
   191         -	Date-Added = {2014-11-27 16:29:54 +0000},
   192         -	Date-Modified = {2015-01-06 10:07:32 +0000},
   193         -	Journal = {The International Journal of Basic and Applied Science},
   194         -	Month = {July},
   195         -	Number = {01},
   196         -	Pages = {131-149},
   197         -	Title = {The Token Economy: A Recent Review and Evaluation},
   198         -	Url = {http://www.insikapub.com/Vol-02/No-01/12IJBAS(2)(1).pdf},
   199         -	Volume = {02},
   200         -	Year = {2013},
   201         -	Bdsk-Url-1 = {http://www.insikapub.com/Vol-02/No-01/12IJBAS(2)(1).pdf}}
   202         -
   203         -@webpage{1,
   204         -	Date-Modified = {2014-11-27 16:36:09 +0000},
   205         -	Owner = {nicklambert},
   206         -	Timestamp = {2014.11.27},
   207         -	Title = {Crypto-Currency Market Capitalizations},
   208         -	Url = {https://coinmarketcap.com/all/},
   209         -	Bdsk-Url-1 = {https://coinmarketcap.com/all/}}

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0009-mpid-messaging/MPID Message Flow.png version [e7d83bbd48].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0011-improved-connection-management/Connection Management for Bootstrapping.png version [48210e296d].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/rfcs/text/0011-improved-connection-management/Connection Management.png version [c128e0074a].

cannot compute difference between binary files

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_examples/demo_app/resources/osx/helper_apps/Info EH.plist version [bb000d13a1].

     1         -<?xml version="1.0" encoding="UTF-8"?>
     2         -<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
     3         -<plist version="1.0">
     4         -<dict>
     5         -    <key>CFBundleDisplayName</key>
     6         -    <string>{{productName}} Helper EH</string>
     7         -    <key>CFBundleExecutable</key>
     8         -    <string>{{productName}} Helper EH</string>
     9         -    <key>CFBundleIdentifier</key>
    10         -    <string>{{identifier}}.helper.EH</string>
    11         -    <key>CFBundleName</key>
    12         -    <string>{{productName}} Helper EH</string>
    13         -    <key>CFBundlePackageType</key>
    14         -    <string>APPL</string>
    15         -    <key>DTSDKName</key>
    16         -    <string>macosx</string>
    17         -    <key>LSUIElement</key>
    18         -    <true/>
    19         -    <key>NSSupportsAutomaticGraphicsSwitching</key>
    20         -    <true/>
    21         -</dict>
    22         -</plist>

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_examples/demo_app/resources/osx/helper_apps/Info NP.plist version [0a518159ab].

     1         -<?xml version="1.0" encoding="UTF-8"?>
     2         -<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
     3         -<plist version="1.0">
     4         -<dict>
     5         -    <key>CFBundleDisplayName</key>
     6         -    <string>{{productName}} Helper NP</string>
     7         -    <key>CFBundleExecutable</key>
     8         -    <string>{{productName}} Helper NP</string>
     9         -    <key>CFBundleIdentifier</key>
    10         -    <string>{{identifier}}.helper.NP</string>
    11         -    <key>CFBundleName</key>
    12         -    <string>{{productName}} Helper NP</string>
    13         -    <key>CFBundlePackageType</key>
    14         -    <string>APPL</string>
    15         -    <key>DTSDKName</key>
    16         -    <string>macosx</string>
    17         -    <key>LSUIElement</key>
    18         -    <true/>
    19         -    <key>NSSupportsAutomaticGraphicsSwitching</key>
    20         -    <true/>
    21         -</dict>
    22         -</plist>

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_launcher/resources/osx/helper_apps/Info EH.plist version [bb000d13a1].

     1         -<?xml version="1.0" encoding="UTF-8"?>
     2         -<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
     3         -<plist version="1.0">
     4         -<dict>
     5         -    <key>CFBundleDisplayName</key>
     6         -    <string>{{productName}} Helper EH</string>
     7         -    <key>CFBundleExecutable</key>
     8         -    <string>{{productName}} Helper EH</string>
     9         -    <key>CFBundleIdentifier</key>
    10         -    <string>{{identifier}}.helper.EH</string>
    11         -    <key>CFBundleName</key>
    12         -    <string>{{productName}} Helper EH</string>
    13         -    <key>CFBundlePackageType</key>
    14         -    <string>APPL</string>
    15         -    <key>DTSDKName</key>
    16         -    <string>macosx</string>
    17         -    <key>LSUIElement</key>
    18         -    <true/>
    19         -    <key>NSSupportsAutomaticGraphicsSwitching</key>
    20         -    <true/>
    21         -</dict>
    22         -</plist>

Deleted wiki_references/2017/software/MaidSafe_net/src_from_GitHub/the_repository_clones/safe_launcher/resources/osx/helper_apps/Info NP.plist version [0a518159ab].

     1         -<?xml version="1.0" encoding="UTF-8"?>
     2         -<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
     3         -<plist version="1.0">
     4         -<dict>
     5         -    <key>CFBundleDisplayName</key>
     6         -    <string>{{productName}} Helper NP</string>
     7         -    <key>CFBundleExecutable</key>
     8         -    <string>{{productName}} Helper NP</string>
     9         -    <key>CFBundleIdentifier</key>
    10         -    <string>{{identifier}}.helper.NP</string>
    11         -    <key>CFBundleName</key>
    12         -    <string>{{productName}} Helper NP</string>
    13         -    <key>CFBundlePackageType</key>
    14         -    <string>APPL</string>
    15         -    <key>DTSDKName</key>
    16         -    <string>macosx</string>
    17         -    <key>LSUIElement</key>
    18         -    <true/>
    19         -    <key>NSSupportsAutomaticGraphicsSwitching</key>
    20         -    <true/>
    21         -</dict>
    22         -</plist>

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/Cargo.toml version [e5b4ce8041].

     1         -[package]
     2         -authors = ["The Rust Project Developers"]
     3         -name = "bootstrap"
     4         -version = "0.0.0"
     5         -
     6         -[lib]
     7         -name = "bootstrap"
     8         -path = "lib.rs"
     9         -doctest = false
    10         -
    11         -[[bin]]
    12         -name = "bootstrap"
    13         -path = "bin/main.rs"
    14         -test = false
    15         -
    16         -[[bin]]
    17         -name = "rustc"
    18         -path = "bin/rustc.rs"
    19         -test = false
    20         -
    21         -[[bin]]
    22         -name = "rustdoc"
    23         -path = "bin/rustdoc.rs"
    24         -test = false
    25         -
    26         -[dependencies]
    27         -build_helper = { path = "../build_helper" }
    28         -cmake = "0.1.17"
    29         -filetime = "0.1"
    30         -num_cpus = "0.2"
    31         -toml = "0.1"
    32         -getopts = "0.2"
    33         -rustc-serialize = "0.3"
    34         -gcc = "0.3.38"
    35         -libc = "0.2"

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/README.md version [8c74938c29].

     1         -# rustbuild - Bootstrapping Rust
     2         -
     3         -This is an in-progress README which is targeted at helping to explain how Rust
     4         -is bootstrapped and in general some of the technical details of the build
     5         -system.
     6         -
     7         -> **Note**: This build system is currently under active development and is not
     8         -> intended to be the primarily used one just yet. The makefiles are currently
     9         -> the ones that are still "guaranteed to work" as much as possible at least.
    10         -
    11         -## Using rustbuild
    12         -
    13         -The rustbuild build system has a primary entry point, a top level `x.py` script:
    14         -
    15         -```
    16         -python ./x.py build
    17         -```
    18         -
    19         -Note that if you're on Unix you should be able to execute the script directly:
    20         -
    21         -```
    22         -./x.py build
    23         -```
    24         -
    25         -The script accepts commands, flags, and arguments to determine what to do:
    26         -
    27         -* `build` - a general purpose command for compiling code. Alone `build` will
    28         -  bootstrap the entire compiler, and otherwise arguments passed indicate what to
    29         -  build. For example:
    30         -
    31         -  ```
    32         -  # build the whole compiler
    33         -  ./x.py build
    34         -
    35         -  # build the stage1 compiler
    36         -  ./x.py build --stage 1
    37         -
    38         -  # build stage0 libstd
    39         -  ./x.py build --stage 0 src/libstd
    40         -
    41         -  # build a particular crate in stage0
    42         -  ./x.py build --stage 0 src/libtest
    43         -  ```
    44         -
    45         -  If files are dirty that would normally be rebuilt from stage 0, that can be
    46         -  overidden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
    47         -  that belong to stage n or earlier:
    48         -
    49         -  ```
    50         -  # keep old build products for stage 0 and build stage 1
    51         -  ./x.py build --keep-stage 0 --stage 1
    52         -  ```
    53         -
    54         -* `test` - a command for executing unit tests. Like the `build` command this
    55         -  will execute the entire test suite by default, and otherwise it can be used to
    56         -  select which test suite is run:
    57         -
    58         -  ```
    59         -  # run all unit tests
    60         -  ./x.py test
    61         -
    62         -  # execute the run-pass test suite
    63         -  ./x.py test src/test/run-pass
    64         -
    65         -  # execute only some tests in the run-pass test suite
    66         -  ./x.py test src/test/run-pass --test-args substring-of-test-name
    67         -
    68         -  # execute tests in the standard library in stage0
    69         -  ./x.py test --stage 0 src/libstd
    70         -
    71         -  # execute all doc tests
    72         -  ./x.py test src/doc
    73         -  ```
    74         -
    75         -* `doc` - a command for building documentation. Like above can take arguments
    76         -  for what to document.
    77         -
    78         -## Configuring rustbuild
    79         -
    80         -There are currently two primary methods for configuring the rustbuild build
    81         -system. First, the `./configure` options serialized in `config.mk` will be
    82         -parsed and read. That is, if any `./configure` options are passed, they'll be
    83         -handled naturally.
    84         -
    85         -Next, rustbuild offers a TOML-based configuration system with a `config.toml`
    86         -file in the same location as `config.mk`. An example of this configuration can
    87         -be found at `src/bootstrap/config.toml.example`, and the configuration file
    88         -can also be passed as `--config path/to/config.toml` if the build system is
    89         -being invoked manually (via the python script).
    90         -
    91         -Finally, rustbuild makes use of the [gcc-rs crate] which has [its own
    92         -method][env-vars] of configuring C compilers and C flags via environment
    93         -variables.
    94         -
    95         -[gcc-rs crate]: https://github.com/alexcrichton/gcc-rs
    96         -[env-vars]: https://github.com/alexcrichton/gcc-rs#external-configuration-via-environment-variables
    97         -
    98         -## Build stages
    99         -
   100         -The rustbuild build system goes through a few phases to actually build the
   101         -compiler. What actually happens when you invoke rustbuild is:
   102         -
   103         -1. The entry point script, `x.py` is run. This script is
   104         -   responsible for downloading the stage0 compiler/Cargo binaries, and it then
   105         -   compiles the build system itself (this folder). Finally, it then invokes the
   106         -   actual `bootstrap` binary build system.
   107         -2. In Rust, `bootstrap` will slurp up all configuration, perform a number of
   108         -   sanity checks (compilers exist for example), and then start building the
   109         -   stage0 artifacts.
   110         -3. The stage0 `cargo` downloaded earlier is used to build the standard library
   111         -   and the compiler, and then these binaries are then copied to the `stage1`
   112         -   directory. That compiler is then used to generate the stage1 artifacts which
   113         -   are then copied to the stage2 directory, and then finally the stage2
   114         -   artifacts are generated using that compiler.
   115         -
   116         -The goal of each stage is to (a) leverage Cargo as much as possible and failing
   117         -that (b) leverage Rust as much as possible!
   118         -
   119         -## Incremental builds
   120         -
   121         -You can configure rustbuild to use incremental compilation. Because
   122         -incremental is new and evolving rapidly, if you want to use it, it is
   123         -recommended that you replace the snapshot with a locally installed
   124         -nightly build of rustc. You will want to keep this up to date.
   125         -
   126         -To follow this course of action, first thing you will want to do is to
   127         -install a nightly, presumably using `rustup`. You will then want to
   128         -configure your directory to use this build, like so:
   129         -
   130         -```
   131         -# configure to use local rust instead of downloding a beta.
   132         -# `--local-rust-root` is optional here. If elided, we will
   133         -# use whatever rustc we find on your PATH.
   134         -> configure --enable-rustbuild --local-rust-root=~/.cargo/ --enable-local-rebuild
   135         -```
   136         -
   137         -After that, you can use the `--incremental` flag to actually do
   138         -incremental builds:
   139         -
   140         -```
   141         -> ../x.py build --incremental
   142         -```
   143         -
   144         -The `--incremental` flag will store incremental compilation artifacts
   145         -in `build/<host>/stage0-incremental`. Note that we only use incremental
   146         -compilation for the stage0 -> stage1 compilation -- this is because
   147         -the stage1 compiler is changing, and we don't try to cache and reuse
   148         -incremental artifacts across different versions of the compiler. For
   149         -this reason, `--incremental` defaults to `--stage 1` (though you can
   150         -manually select a higher stage, if you prefer).
   151         -
   152         -You can always drop the `--incremental` to build as normal (but you
   153         -will still be using the local nightly as your bootstrap).
   154         -
   155         -## Directory Layout
   156         -
   157         -This build system houses all output under the `build` directory, which looks
   158         -like this:
   159         -
   160         -```
   161         -# Root folder of all output. Everything is scoped underneath here
   162         -build/
   163         -
   164         -  # Location where the stage0 compiler downloads are all cached. This directory
   165         -  # only contains the tarballs themselves as they're extracted elsewhere.
   166         -  cache/
   167         -    2015-12-19/
   168         -    2016-01-15/
   169         -    2016-01-21/
   170         -    ...
   171         -
   172         -  # Output directory for building this build system itself. The stage0
   173         -  # cargo/rustc are used to build the build system into this location.
   174         -  bootstrap/
   175         -    debug/
   176         -    release/
   177         -
   178         -  # Output of the dist-related steps like dist-std, dist-rustc, and dist-docs
   179         -  dist/
   180         -
   181         -  # Temporary directory used for various input/output as part of various stages
   182         -  tmp/
   183         -
   184         -  # Each remaining directory is scoped by the "host" triple of compilation at
   185         -  # hand.
   186         -  x86_64-unknown-linux-gnu/
   187         -
   188         -    # The build artifacts for the `compiler-rt` library for the target this
   189         -    # folder is under. The exact layout here will likely depend on the platform,
   190         -    # and this is also built with CMake so the build system is also likely
   191         -    # different.
   192         -    compiler-rt/
   193         -      build/
   194         -
   195         -    # Output folder for LLVM if it is compiled for this target
   196         -    llvm/
   197         -
   198         -      # build folder (e.g. the platform-specific build system). Like with
   199         -      # compiler-rt this is compiled with CMake
   200         -      build/
   201         -
   202         -      # Installation of LLVM. Note that we run the equivalent of 'make install'
   203         -      # for LLVM to setup these folders.
   204         -      bin/
   205         -      lib/
   206         -      include/
   207         -      share/
   208         -      ...
   209         -
   210         -    # Output folder for all documentation of this target. This is what's filled
   211         -    # in whenever the `doc` step is run.
   212         -    doc/
   213         -
   214         -    # Output for all compiletest-based test suites
   215         -    test/
   216         -      run-pass/
   217         -      compile-fail/
   218         -      debuginfo/
   219         -      ...
   220         -
   221         -    # Location where the stage0 Cargo and Rust compiler are unpacked. This
   222         -    # directory is purely an extracted and overlaid tarball of these two (done
   223         -    # by the bootstrapy python script). In theory the build system does not
   224         -    # modify anything under this directory afterwards.
   225         -    stage0/
   226         -
   227         -    # These to build directories are the cargo output directories for builds of
   228         -    # the standard library and compiler, respectively. Internally these may also
   229         -    # have other target directories, which represent artifacts being compiled
   230         -    # from the host to the specified target.
   231         -    #
   232         -    # Essentially, each of these directories is filled in by one `cargo`
   233         -    # invocation. The build system instruments calling Cargo in the right order
   234         -    # with the right variables to ensure these are filled in correctly.
   235         -    stageN-std/
   236         -    stageN-test/
   237         -    stageN-rustc/
   238         -    stageN-tools/
   239         -
   240         -    # This is a special case of the above directories, **not** filled in via
   241         -    # Cargo but rather the build system itself. The stage0 compiler already has
   242         -    # a set of target libraries for its own host triple (in its own sysroot)
   243         -    # inside of stage0/. When we run the stage0 compiler to bootstrap more
   244         -    # things, however, we don't want to use any of these libraries (as those are
   245         -    # the ones that we're building). So essentially, when the stage1 compiler is
   246         -    # being compiled (e.g. after libstd has been built), *this* is used as the
   247         -    # sysroot for the stage0 compiler being run.
   248         -    #
   249         -    # Basically this directory is just a temporary artifact use to configure the
   250         -    # stage0 compiler to ensure that the libstd we just built is used to
   251         -    # compile the stage1 compiler.
   252         -    stage0-sysroot/lib/
   253         -
   254         -    # These output directories are intended to be standalone working
   255         -    # implementations of the compiler (corresponding to each stage). The build
   256         -    # system will link (using hard links) output from stageN-{std,rustc} into
   257         -    # each of these directories.
   258         -    #
   259         -    # In theory there is no extra build output in these directories.
   260         -    stage1/
   261         -    stage2/
   262         -    stage3/
   263         -```
   264         -
   265         -## Cargo projects
   266         -
   267         -The current build is unfortunately not quite as simple as `cargo build` in a
   268         -directory, but rather the compiler is split into three different Cargo projects:
   269         -
   270         -* `src/libstd` - the standard library
   271         -* `src/libtest` - testing support, depends on libstd
   272         -* `src/rustc` - the actual compiler itself
   273         -
   274         -Each "project" has a corresponding Cargo.lock file with all dependencies, and
   275         -this means that building the compiler involves running Cargo three times. The
   276         -structure here serves two goals:
   277         -
   278         -1. Facilitating dependencies coming from crates.io. These dependencies don't
   279         -   depend on `std`, so libstd is a separate project compiled ahead of time
   280         -   before the actual compiler builds.
   281         -2. Splitting "host artifacts" from "target artifacts". That is, when building
   282         -   code for an arbitrary target you don't need the entire compiler, but you'll
   283         -   end up needing libraries like libtest that depend on std but also want to use
   284         -   crates.io dependencies. Hence, libtest is split out as its own project that
   285         -   is sequenced after `std` but before `rustc`. This project is built for all
   286         -   targets.
   287         -
   288         -There is some loss in build parallelism here because libtest can be compiled in
   289         -parallel with a number of rustc artifacts, but in theory the loss isn't too bad!
   290         -
   291         -## Build tools
   292         -
   293         -We've actually got quite a few tools that we use in the compiler's build system
   294         -and for testing. To organize these, each tool is a project in `src/tools` with a
   295         -corresponding `Cargo.toml`. All tools are compiled with Cargo (currently having
   296         -independent `Cargo.lock` files) and do not currently explicitly depend on the
   297         -compiler or standard library. Compiling each tool is sequenced after the
   298         -appropriate libstd/libtest/librustc compile above.
   299         -
   300         -## Extending rustbuild
   301         -
   302         -So you'd like to add a feature to the rustbuild build system or just fix a bug.
   303         -Great! One of the major motivational factors for moving away from `make` is that
   304         -Rust is in theory much easier to read, modify, and write. If you find anything
   305         -excessively confusing, please open an issue on this and we'll try to get it
   306         -documented or simplified pronto.
   307         -
   308         -First up, you'll probably want to read over the documentation above as that'll
   309         -give you a high level overview of what rustbuild is doing. You also probably
   310         -want to play around a bit yourself by just getting it up and running before you
   311         -dive too much into the actual build system itself.
   312         -
   313         -After that, each module in rustbuild should have enough documentation to keep
   314         -you up and running. Some general areas that you may be interested in modifying
   315         -are:
   316         -
   317         -* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of
   318         -  other tools.
   319         -* Adding a new compiler crate? Look no further! Adding crates can be done by
   320         -  adding a new directory with `Cargo.toml` followed by configuring all
   321         -  `Cargo.toml` files accordingly.
   322         -* Adding a new dependency from crates.io? We're still working on that, so hold
   323         -  off on that for now.
   324         -* Adding a new configuration option? Take a look at `bootstrap/config.rs` or
   325         -  perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that
   326         -  option.
   327         -* Adding a sanity check? Take a look at `bootstrap/sanity.rs`.
   328         -
   329         -If you have any questions feel free to reach out on `#rust-internals` on IRC or
   330         -open an issue in the bug tracker!

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/main.rs version [46a25b876b].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! rustbuild, the Rust build system
    12         -//!
    13         -//! This is the entry point for the build system used to compile the `rustc`
    14         -//! compiler. Lots of documentation can be found in the `README.md` file in the
    15         -//! parent directory, and otherwise documentation can be found throughout the `build`
    16         -//! directory in each respective module.
    17         -
    18         -#![deny(warnings)]
    19         -
    20         -extern crate bootstrap;
    21         -
    22         -use std::env;
    23         -
    24         -use bootstrap::{Flags, Config, Build};
    25         -
    26         -fn main() {
    27         -    let args = env::args().skip(1).collect::<Vec<_>>();
    28         -    let flags = Flags::parse(&args);
    29         -    let mut config = Config::parse(&flags.build, flags.config.clone());
    30         -
    31         -    // compat with `./configure` while we're still using that
    32         -    if std::fs::metadata("config.mk").is_ok() {
    33         -        config.update_with_config_mk();
    34         -    }
    35         -
    36         -    Build::new(flags, config).build();
    37         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/rustc.rs version [9e1753f78f].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
    12         -//!
    13         -//! This shim will take care of some various tasks that our build process
    14         -//! requires that Cargo can't quite do through normal configuration:
    15         -//!
    16         -//! 1. When compiling build scripts and build dependencies, we need a guaranteed
    17         -//!    full standard library available. The only compiler which actually has
    18         -//!    this is the snapshot, so we detect this situation and always compile with
    19         -//!    the snapshot compiler.
    20         -//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
    21         -//!    (and this slightly differs based on a whether we're using a snapshot or
    22         -//!    not), so we do that all here.
    23         -//!
    24         -//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
    25         -//! switching compilers for the bootstrap and for build scripts will probably
    26         -//! never get replaced.
    27         -
    28         -#![deny(warnings)]
    29         -
    30         -extern crate bootstrap;
    31         -
    32         -use std::env;
    33         -use std::ffi::OsString;
    34         -use std::io;
    35         -use std::io::prelude::*;
    36         -use std::str::FromStr;
    37         -use std::path::PathBuf;
    38         -use std::process::{Command, ExitStatus};
    39         -
    40         -fn main() {
    41         -    let args = env::args_os().skip(1).collect::<Vec<_>>();
    42         -    // Detect whether or not we're a build script depending on whether --target
    43         -    // is passed (a bit janky...)
    44         -    let target = args.windows(2)
    45         -        .find(|w| &*w[0] == "--target")
    46         -        .and_then(|w| w[1].to_str());
    47         -    let version = args.iter().find(|w| &**w == "-vV");
    48         -
    49         -    let verbose = match env::var("RUSTC_VERBOSE") {
    50         -        Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
    51         -        Err(_) => 0,
    52         -    };
    53         -
    54         -    // Build scripts always use the snapshot compiler which is guaranteed to be
    55         -    // able to produce an executable, whereas intermediate compilers may not
    56         -    // have the standard library built yet and may not be able to produce an
    57         -    // executable. Otherwise we just use the standard compiler we're
    58         -    // bootstrapping with.
    59         -    //
    60         -    // Also note that cargo will detect the version of the compiler to trigger
    61         -    // a rebuild when the compiler changes. If this happens, we want to make
    62         -    // sure to use the actual compiler instead of the snapshot compiler becase
    63         -    // that's the one that's actually changing.
    64         -    let (rustc, libdir) = if target.is_none() && version.is_none() {
    65         -        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
    66         -    } else {
    67         -        ("RUSTC_REAL", "RUSTC_LIBDIR")
    68         -    };
    69         -    let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
    70         -    let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
    71         -    let mut on_fail = env::var_os("RUSTC_ON_FAIL").map(|of| Command::new(of));
    72         -
    73         -    let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
    74         -    let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
    75         -    let mut dylib_path = bootstrap::util::dylib_path();
    76         -    dylib_path.insert(0, PathBuf::from(libdir));
    77         -
    78         -    let mut cmd = Command::new(rustc);
    79         -    cmd.args(&args)
    80         -        .arg("--cfg")
    81         -        .arg(format!("stage{}", stage))
    82         -        .env(bootstrap::util::dylib_path_var(),
    83         -             env::join_paths(&dylib_path).unwrap());
    84         -
    85         -    if let Some(target) = target {
    86         -        // The stage0 compiler has a special sysroot distinct from what we
    87         -        // actually downloaded, so we just always pass the `--sysroot` option.
    88         -        cmd.arg("--sysroot").arg(sysroot);
    89         -
    90         -        // When we build Rust dylibs they're all intended for intermediate
    91         -        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
    92         -        // linking all deps statically into the dylib.
    93         -        if env::var_os("RUSTC_NO_PREFER_DYNAMIC").is_none() {
    94         -            cmd.arg("-Cprefer-dynamic");
    95         -        }
    96         -
    97         -        // Pass the `rustbuild` feature flag to crates which rustbuild is
    98         -        // building. See the comment in bootstrap/lib.rs where this env var is
    99         -        // set for more details.
   100         -        if env::var_os("RUSTBUILD_UNSTABLE").is_some() {
   101         -            cmd.arg("--cfg").arg("rustbuild");
   102         -        }
   103         -
   104         -        // Help the libc crate compile by assisting it in finding the MUSL
   105         -        // native libraries.
   106         -        if let Some(s) = env::var_os("MUSL_ROOT") {
   107         -            let mut root = OsString::from("native=");
   108         -            root.push(&s);
   109         -            root.push("/lib");
   110         -            cmd.arg("-L").arg(&root);
   111         -        }
   112         -
   113         -        // Pass down extra flags, commonly used to configure `-Clinker` when
   114         -        // cross compiling.
   115         -        if let Ok(s) = env::var("RUSTC_FLAGS") {
   116         -            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
   117         -        }
   118         -
   119         -        // Pass down incremental directory, if any.
   120         -        if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
   121         -            cmd.arg(format!("-Zincremental={}", dir));
   122         -
   123         -            if verbose > 0 {
   124         -                cmd.arg("-Zincremental-info");
   125         -            }
   126         -        }
   127         -
   128         -        // If we're compiling specifically the `panic_abort` crate then we pass
   129         -        // the `-C panic=abort` option. Note that we do not do this for any
   130         -        // other crate intentionally as this is the only crate for now that we
   131         -        // ship with panic=abort.
   132         -        //
   133         -        // This... is a bit of a hack how we detect this. Ideally this
   134         -        // information should be encoded in the crate I guess? Would likely
   135         -        // require an RFC amendment to RFC 1513, however.
   136         -        let is_panic_abort = args.windows(2)
   137         -            .any(|a| &*a[0] == "--crate-name" && &*a[1] == "panic_abort");
   138         -        if is_panic_abort {
   139         -            cmd.arg("-C").arg("panic=abort");
   140         -        }
   141         -
   142         -        // Set various options from config.toml to configure how we're building
   143         -        // code.
   144         -        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
   145         -            cmd.arg("-g");
   146         -        } else if env::var("RUSTC_DEBUGINFO_LINES") == Ok("true".to_string()) {
   147         -            cmd.arg("-Cdebuginfo=1");
   148         -        }
   149         -        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
   150         -            Ok(s) => if s == "true" { "y" } else { "n" },
   151         -            Err(..) => "n",
   152         -        };
   153         -        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
   154         -        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
   155         -            cmd.arg("-C").arg(format!("codegen-units={}", s));
   156         -        }
   157         -
   158         -        // Emit save-analysis info.
   159         -        if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) {
   160         -            cmd.arg("-Zsave-analysis-api");
   161         -        }
   162         -
   163         -        // Dealing with rpath here is a little special, so let's go into some
   164         -        // detail. First off, `-rpath` is a linker option on Unix platforms
   165         -        // which adds to the runtime dynamic loader path when looking for
   166         -        // dynamic libraries. We use this by default on Unix platforms to ensure
   167         -        // that our nightlies behave the same on Windows, that is they work out
   168         -        // of the box. This can be disabled, of course, but basically that's why
   169         -        // we're gated on RUSTC_RPATH here.
   170         -        //
   171         -        // Ok, so the astute might be wondering "why isn't `-C rpath` used
   172         -        // here?" and that is indeed a good question to task. This codegen
   173         -        // option is the compiler's current interface to generating an rpath.
   174         -        // Unfortunately it doesn't quite suffice for us. The flag currently
   175         -        // takes no value as an argument, so the compiler calculates what it
   176         -        // should pass to the linker as `-rpath`. This unfortunately is based on
   177         -        // the **compile time** directory structure which when building with
   178         -        // Cargo will be very different than the runtime directory structure.
   179         -        //
   180         -        // All that's a really long winded way of saying that if we use
   181         -        // `-Crpath` then the executables generated have the wrong rpath of
   182         -        // something like `$ORIGIN/deps` when in fact the way we distribute
   183         -        // rustc requires the rpath to be `$ORIGIN/../lib`.
   184         -        //
   185         -        // So, all in all, to set up the correct rpath we pass the linker
   186         -        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
   187         -        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
   188         -        // to change a flag in a binary?
   189         -        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
   190         -            let rpath = if target.contains("apple") {
   191         -
   192         -                // Note that we need to take one extra step on macOS to also pass
   193         -                // `-Wl,-instal_name,@rpath/...` to get things to work right. To
   194         -                // do that we pass a weird flag to the compiler to get it to do
   195         -                // so. Note that this is definitely a hack, and we should likely
   196         -                // flesh out rpath support more fully in the future.
   197         -                if stage != "0" {
   198         -                    cmd.arg("-Z").arg("osx-rpath-install-name");
   199         -                }
   200         -                Some("-Wl,-rpath,@loader_path/../lib")
   201         -            } else if !target.contains("windows") {
   202         -                Some("-Wl,-rpath,$ORIGIN/../lib")
   203         -            } else {
   204         -                None
   205         -            };
   206         -            if let Some(rpath) = rpath {
   207         -                cmd.arg("-C").arg(format!("link-args={}", rpath));
   208         -            }
   209         -
   210         -            if let Ok(s) = env::var("RUSTFLAGS") {
   211         -                for flag in s.split_whitespace() {
   212         -                    cmd.arg(flag);
   213         -                }
   214         -            }
   215         -        }
   216         -
   217         -        if target.contains("pc-windows-msvc") {
   218         -            cmd.arg("-Z").arg("unstable-options");
   219         -            cmd.arg("-C").arg("target-feature=+crt-static");
   220         -        }
   221         -    }
   222         -
   223         -    if verbose > 1 {
   224         -        writeln!(&mut io::stderr(), "rustc command: {:?}", cmd).unwrap();
   225         -    }
   226         -
   227         -    // Actually run the compiler!
   228         -    std::process::exit(if let Some(ref mut on_fail) = on_fail {
   229         -        match cmd.status() {
   230         -            Ok(s) if s.success() => 0,
   231         -            _ => {
   232         -                println!("\nDid not run successfully:\n{:?}\n-------------", cmd);
   233         -                exec_cmd(on_fail).expect("could not run the backup command");
   234         -                1
   235         -            }
   236         -        }
   237         -    } else {
   238         -        std::process::exit(match exec_cmd(&mut cmd) {
   239         -            Ok(s) => s.code().unwrap_or(0xfe),
   240         -            Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
   241         -        })
   242         -    })
   243         -}
   244         -
   245         -#[cfg(unix)]
   246         -fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
   247         -    use std::os::unix::process::CommandExt;
   248         -    Err(cmd.exec())
   249         -}
   250         -
   251         -#[cfg(not(unix))]
   252         -fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
   253         -    cmd.status()
   254         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bin/rustdoc.rs version [608b78285f].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
    12         -//!
    13         -//! See comments in `src/bootstrap/rustc.rs` for more information.
    14         -
    15         -#![deny(warnings)]
    16         -
    17         -extern crate bootstrap;
    18         -
    19         -use std::env;
    20         -use std::process::Command;
    21         -use std::path::PathBuf;
    22         -
    23         -fn main() {
    24         -    let args = env::args_os().skip(1).collect::<Vec<_>>();
    25         -    let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
    26         -    let libdir = env::var_os("RUSTC_LIBDIR").expect("RUSTC_LIBDIR was not set");
    27         -    let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
    28         -    let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
    29         -
    30         -    let mut dylib_path = bootstrap::util::dylib_path();
    31         -    dylib_path.insert(0, PathBuf::from(libdir));
    32         -
    33         -    let mut cmd = Command::new(rustdoc);
    34         -    cmd.args(&args)
    35         -        .arg("--cfg")
    36         -        .arg(format!("stage{}", stage))
    37         -        .arg("--cfg")
    38         -        .arg("dox")
    39         -        .arg("--sysroot")
    40         -        .arg(sysroot)
    41         -        .env(bootstrap::util::dylib_path_var(),
    42         -             env::join_paths(&dylib_path).unwrap());
    43         -    std::process::exit(match cmd.status() {
    44         -        Ok(s) => s.code().unwrap_or(1),
    45         -        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
    46         -    })
    47         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/bootstrap.py version [6512ebcb00].

     1         -# Copyright 2015-2016 The Rust Project Developers. See the COPYRIGHT
     2         -# file at the top-level directory of this distribution and at
     3         -# http://rust-lang.org/COPYRIGHT.
     4         -#
     5         -# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -# option. This file may not be copied, modified, or distributed
     9         -# except according to those terms.
    10         -
    11         -from __future__ import print_function
    12         -import argparse
    13         -import contextlib
    14         -import datetime
    15         -import hashlib
    16         -import os
    17         -import shutil
    18         -import subprocess
    19         -import sys
    20         -import tarfile
    21         -import tempfile
    22         -
    23         -from time import time
    24         -
    25         -
    26         -def get(url, path, verbose=False):
    27         -    sha_url = url + ".sha256"
    28         -    with tempfile.NamedTemporaryFile(delete=False) as temp_file:
    29         -        temp_path = temp_file.name
    30         -    with tempfile.NamedTemporaryFile(suffix=".sha256", delete=False) as sha_file:
    31         -        sha_path = sha_file.name
    32         -
    33         -    try:
    34         -        download(sha_path, sha_url, False, verbose)
    35         -        if os.path.exists(path):
    36         -            if verify(path, sha_path, False):
    37         -                if verbose:
    38         -                    print("using already-download file " + path)
    39         -                return
    40         -            else:
    41         -                if verbose:
    42         -                    print("ignoring already-download file " + path + " due to failed verification")
    43         -                os.unlink(path)
    44         -        download(temp_path, url, True, verbose)
    45         -        if not verify(temp_path, sha_path, verbose):
    46         -            raise RuntimeError("failed verification")
    47         -        if verbose:
    48         -            print("moving {} to {}".format(temp_path, path))
    49         -        shutil.move(temp_path, path)
    50         -    finally:
    51         -        delete_if_present(sha_path, verbose)
    52         -        delete_if_present(temp_path, verbose)
    53         -
    54         -
    55         -def delete_if_present(path, verbose):
    56         -    if os.path.isfile(path):
    57         -        if verbose:
    58         -            print("removing " + path)
    59         -        os.unlink(path)
    60         -
    61         -
    62         -def download(path, url, probably_big, verbose):
    63         -    for x in range(0, 4):
    64         -        try:
    65         -            _download(path, url, probably_big, verbose, True)
    66         -            return
    67         -        except RuntimeError:
    68         -            print("\nspurious failure, trying again")
    69         -    _download(path, url, probably_big, verbose, False)
    70         -
    71         -
    72         -def _download(path, url, probably_big, verbose, exception):
    73         -    if probably_big or verbose:
    74         -        print("downloading {}".format(url))
    75         -    # see http://serverfault.com/questions/301128/how-to-download
    76         -    if sys.platform == 'win32':
    77         -        run(["PowerShell.exe", "/nologo", "-Command",
    78         -             "(New-Object System.Net.WebClient)"
    79         -             ".DownloadFile('{}', '{}')".format(url, path)],
    80         -            verbose=verbose,
    81         -            exception=exception)
    82         -    else:
    83         -        if probably_big or verbose:
    84         -            option = "-#"
    85         -        else:
    86         -            option = "-s"
    87         -        run(["curl", option, "--retry", "3", "-Sf", "-o", path, url],
    88         -            verbose=verbose,
    89         -            exception=exception)
    90         -
    91         -
    92         -def verify(path, sha_path, verbose):
    93         -    if verbose:
    94         -        print("verifying " + path)
    95         -    with open(path, "rb") as f:
    96         -        found = hashlib.sha256(f.read()).hexdigest()
    97         -    with open(sha_path, "r") as f:
    98         -        expected = f.readline().split()[0]
    99         -    verified = found == expected
   100         -    if not verified:
   101         -        print("invalid checksum:\n"
   102         -               "    found:    {}\n"
   103         -               "    expected: {}".format(found, expected))
   104         -    return verified
   105         -
   106         -
   107         -def unpack(tarball, dst, verbose=False, match=None):
   108         -    print("extracting " + tarball)
   109         -    fname = os.path.basename(tarball).replace(".tar.gz", "")
   110         -    with contextlib.closing(tarfile.open(tarball)) as tar:
   111         -        for p in tar.getnames():
   112         -            if "/" not in p:
   113         -                continue
   114         -            name = p.replace(fname + "/", "", 1)
   115         -            if match is not None and not name.startswith(match):
   116         -                continue
   117         -            name = name[len(match) + 1:]
   118         -
   119         -            fp = os.path.join(dst, name)
   120         -            if verbose:
   121         -                print("  extracting " + p)
   122         -            tar.extract(p, dst)
   123         -            tp = os.path.join(dst, p)
   124         -            if os.path.isdir(tp) and os.path.exists(fp):
   125         -                continue
   126         -            shutil.move(tp, fp)
   127         -    shutil.rmtree(os.path.join(dst, fname))
   128         -
   129         -def run(args, verbose=False, exception=False):
   130         -    if verbose:
   131         -        print("running: " + ' '.join(args))
   132         -    sys.stdout.flush()
   133         -    # Use Popen here instead of call() as it apparently allows powershell on
   134         -    # Windows to not lock up waiting for input presumably.
   135         -    ret = subprocess.Popen(args)
   136         -    code = ret.wait()
   137         -    if code != 0:
   138         -        err = "failed to run: " + ' '.join(args)
   139         -        if verbose or exception:
   140         -            raise RuntimeError(err)
   141         -        sys.exit(err)
   142         -
   143         -def stage0_data(rust_root):
   144         -    nightlies = os.path.join(rust_root, "src/stage0.txt")
   145         -    data = {}
   146         -    with open(nightlies, 'r') as nightlies:
   147         -        for line in nightlies:
   148         -            line = line.rstrip()  # Strip newline character, '\n'
   149         -            if line.startswith("#") or line == '':
   150         -                continue
   151         -            a, b = line.split(": ", 1)
   152         -            data[a] = b
   153         -    return data
   154         -
   155         -def format_build_time(duration):
   156         -    return str(datetime.timedelta(seconds=int(duration)))
   157         -
   158         -
   159         -class RustBuild(object):
   160         -    def download_stage0(self):
   161         -        cache_dst = os.path.join(self.build_dir, "cache")
   162         -        rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date())
   163         -        cargo_cache = os.path.join(cache_dst, self.stage0_cargo_rev())
   164         -        if not os.path.exists(rustc_cache):
   165         -            os.makedirs(rustc_cache)
   166         -        if not os.path.exists(cargo_cache):
   167         -            os.makedirs(cargo_cache)
   168         -
   169         -        if self.rustc().startswith(self.bin_root()) and \
   170         -                (not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
   171         -            self.print_what_it_means_to_bootstrap()
   172         -            if os.path.exists(self.bin_root()):
   173         -                shutil.rmtree(self.bin_root())
   174         -            channel = self.stage0_rustc_channel()
   175         -            filename = "rust-std-{}-{}.tar.gz".format(channel, self.build)
   176         -            url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
   177         -            tarball = os.path.join(rustc_cache, filename)
   178         -            if not os.path.exists(tarball):
   179         -                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
   180         -            unpack(tarball, self.bin_root(),
   181         -                   match="rust-std-" + self.build,
   182         -                   verbose=self.verbose)
   183         -
   184         -            filename = "rustc-{}-{}.tar.gz".format(channel, self.build)
   185         -            url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
   186         -            tarball = os.path.join(rustc_cache, filename)
   187         -            if not os.path.exists(tarball):
   188         -                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
   189         -            unpack(tarball, self.bin_root(), match="rustc", verbose=self.verbose)
   190         -            self.fix_executable(self.bin_root() + "/bin/rustc")
   191         -            self.fix_executable(self.bin_root() + "/bin/rustdoc")
   192         -            with open(self.rustc_stamp(), 'w') as f:
   193         -                f.write(self.stage0_rustc_date())
   194         -
   195         -        if self.cargo().startswith(self.bin_root()) and \
   196         -                (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
   197         -            self.print_what_it_means_to_bootstrap()
   198         -            filename = "cargo-nightly-{}.tar.gz".format(self.build)
   199         -            url = "https://s3.amazonaws.com/rust-lang-ci/cargo-builds/" + self.stage0_cargo_rev()
   200         -            tarball = os.path.join(cargo_cache, filename)
   201         -            if not os.path.exists(tarball):
   202         -                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
   203         -            unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose)
   204         -            self.fix_executable(self.bin_root() + "/bin/cargo")
   205         -            with open(self.cargo_stamp(), 'w') as f:
   206         -                f.write(self.stage0_cargo_rev())
   207         -
   208         -    def fix_executable(self, fname):
   209         -        # If we're on NixOS we need to change the path to the dynamic loader
   210         -
   211         -        default_encoding = sys.getdefaultencoding()
   212         -        try:
   213         -            ostype = subprocess.check_output(['uname', '-s']).strip().decode(default_encoding)
   214         -        except (subprocess.CalledProcessError, WindowsError):
   215         -            return
   216         -
   217         -        if ostype != "Linux":
   218         -            return
   219         -
   220         -        if not os.path.exists("/etc/NIXOS"):
   221         -            return
   222         -        if os.path.exists("/lib"):
   223         -            return
   224         -
   225         -        # At this point we're pretty sure the user is running NixOS
   226         -        print("info: you seem to be running NixOS. Attempting to patch " + fname)
   227         -
   228         -        try:
   229         -            interpreter = subprocess.check_output(["patchelf", "--print-interpreter", fname])
   230         -            interpreter = interpreter.strip().decode(default_encoding)
   231         -        except subprocess.CalledProcessError as e:
   232         -            print("warning: failed to call patchelf: %s" % e)
   233         -            return
   234         -
   235         -        loader = interpreter.split("/")[-1]
   236         -
   237         -        try:
   238         -            ldd_output = subprocess.check_output(['ldd', '/run/current-system/sw/bin/sh'])
   239         -            ldd_output = ldd_output.strip().decode(default_encoding)
   240         -        except subprocess.CalledProcessError as e:
   241         -            print("warning: unable to call ldd: %s" % e)
   242         -            return
   243         -
   244         -        for line in ldd_output.splitlines():
   245         -            libname = line.split()[0]
   246         -            if libname.endswith(loader):
   247         -                loader_path = libname[:len(libname) - len(loader)]
   248         -                break
   249         -        else:
   250         -            print("warning: unable to find the path to the dynamic linker")
   251         -            return
   252         -
   253         -        correct_interpreter = loader_path + loader
   254         -
   255         -        try:
   256         -            subprocess.check_output(["patchelf", "--set-interpreter", correct_interpreter, fname])
   257         -        except subprocess.CalledProcessError as e:
   258         -            print("warning: failed to call patchelf: %s" % e)
   259         -            return
   260         -
   261         -    def stage0_cargo_rev(self):
   262         -        return self._cargo_rev
   263         -
   264         -    def stage0_rustc_date(self):
   265         -        return self._rustc_date
   266         -
   267         -    def stage0_rustc_channel(self):
   268         -        return self._rustc_channel
   269         -
   270         -    def rustc_stamp(self):
   271         -        return os.path.join(self.bin_root(), '.rustc-stamp')
   272         -
   273         -    def cargo_stamp(self):
   274         -        return os.path.join(self.bin_root(), '.cargo-stamp')
   275         -
   276         -    def rustc_out_of_date(self):
   277         -        if not os.path.exists(self.rustc_stamp()) or self.clean:
   278         -            return True
   279         -        with open(self.rustc_stamp(), 'r') as f:
   280         -            return self.stage0_rustc_date() != f.read()
   281         -
   282         -    def cargo_out_of_date(self):
   283         -        if not os.path.exists(self.cargo_stamp()) or self.clean:
   284         -            return True
   285         -        with open(self.cargo_stamp(), 'r') as f:
   286         -            return self.stage0_cargo_rev() != f.read()
   287         -
   288         -    def bin_root(self):
   289         -        return os.path.join(self.build_dir, self.build, "stage0")
   290         -
   291         -    def get_toml(self, key):
   292         -        for line in self.config_toml.splitlines():
   293         -            if line.startswith(key + ' ='):
   294         -                return self.get_string(line)
   295         -        return None
   296         -
   297         -    def get_mk(self, key):
   298         -        for line in iter(self.config_mk.splitlines()):
   299         -            if line.startswith(key + ' '):
   300         -                var = line[line.find(':=') + 2:].strip()
   301         -                if var != '':
   302         -                    return var
   303         -        return None
   304         -
   305         -    def cargo(self):
   306         -        config = self.get_toml('cargo')
   307         -        if config:
   308         -            return config
   309         -        config = self.get_mk('CFG_LOCAL_RUST_ROOT')
   310         -        if config:
   311         -            return config + '/bin/cargo' + self.exe_suffix()
   312         -        return os.path.join(self.bin_root(), "bin/cargo" + self.exe_suffix())
   313         -
   314         -    def rustc(self):
   315         -        config = self.get_toml('rustc')
   316         -        if config:
   317         -            return config
   318         -        config = self.get_mk('CFG_LOCAL_RUST_ROOT')
   319         -        if config:
   320         -            return config + '/bin/rustc' + self.exe_suffix()
   321         -        return os.path.join(self.bin_root(), "bin/rustc" + self.exe_suffix())
   322         -
   323         -    def get_string(self, line):
   324         -        start = line.find('"')
   325         -        end = start + 1 + line[start + 1:].find('"')
   326         -        return line[start + 1:end]
   327         -
   328         -    def exe_suffix(self):
   329         -        if sys.platform == 'win32':
   330         -            return '.exe'
   331         -        else:
   332         -            return ''
   333         -
   334         -    def print_what_it_means_to_bootstrap(self):
   335         -        if hasattr(self, 'printed'):
   336         -            return
   337         -        self.printed = True
   338         -        if os.path.exists(self.bootstrap_binary()):
   339         -            return
   340         -        if not '--help' in sys.argv or len(sys.argv) == 1:
   341         -            return
   342         -
   343         -        print('info: the build system for Rust is written in Rust, so this')
   344         -        print('      script is now going to download a stage0 rust compiler')
   345         -        print('      and then compile the build system itself')
   346         -        print('')
   347         -        print('info: in the meantime you can read more about rustbuild at')
   348         -        print('      src/bootstrap/README.md before the download finishes')
   349         -
   350         -    def bootstrap_binary(self):
   351         -        return os.path.join(self.build_dir, "bootstrap/debug/bootstrap")
   352         -
   353         -    def build_bootstrap(self):
   354         -        self.print_what_it_means_to_bootstrap()
   355         -        build_dir = os.path.join(self.build_dir, "bootstrap")
   356         -        if self.clean and os.path.exists(build_dir):
   357         -            shutil.rmtree(build_dir)
   358         -        env = os.environ.copy()
   359         -        env["CARGO_TARGET_DIR"] = build_dir
   360         -        env["RUSTC"] = self.rustc()
   361         -        env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
   362         -                                 (os.pathsep + env["LD_LIBRARY_PATH"]) \
   363         -                                 if "LD_LIBRARY_PATH" in env else ""
   364         -        env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
   365         -                                   (os.pathsep + env["DYLD_LIBRARY_PATH"]) \
   366         -                                   if "DYLD_LIBRARY_PATH" in env else ""
   367         -        env["PATH"] = os.path.join(self.bin_root(), "bin") + \
   368         -                      os.pathsep + env["PATH"]
   369         -        if not os.path.isfile(self.cargo()):
   370         -            raise Exception("no cargo executable found at `%s`" % self.cargo())
   371         -        args = [self.cargo(), "build", "--manifest-path",
   372         -                os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
   373         -        if self.use_locked_deps:
   374         -            args.append("--locked")
   375         -        if self.use_vendored_sources:
   376         -            args.append("--frozen")
   377         -        self.run(args, env)
   378         -
   379         -    def run(self, args, env):
   380         -        proc = subprocess.Popen(args, env=env)
   381         -        ret = proc.wait()
   382         -        if ret != 0:
   383         -            sys.exit(ret)
   384         -
   385         -    def build_triple(self):
   386         -        default_encoding = sys.getdefaultencoding()
   387         -        config = self.get_toml('build')
   388         -        if config:
   389         -            return config
   390         -        config = self.get_mk('CFG_BUILD')
   391         -        if config:
   392         -            return config
   393         -        try:
   394         -            ostype = subprocess.check_output(['uname', '-s']).strip().decode(default_encoding)
   395         -            cputype = subprocess.check_output(['uname', '-m']).strip().decode(default_encoding)
   396         -        except (subprocess.CalledProcessError, OSError):
   397         -            if sys.platform == 'win32':
   398         -                return 'x86_64-pc-windows-msvc'
   399         -            err = "uname not found"
   400         -            if self.verbose:
   401         -                raise Exception(err)
   402         -            sys.exit(err)
   403         -
   404         -        # Darwin's `uname -s` lies and always returns i386. We have to use
   405         -        # sysctl instead.
   406         -        if ostype == 'Darwin' and cputype == 'i686':
   407         -            args = ['sysctl', 'hw.optional.x86_64']
   408         -            sysctl = subprocess.check_output(args).decode(default_encoding)
   409         -            if ': 1' in sysctl:
   410         -                cputype = 'x86_64'
   411         -
   412         -        # The goal here is to come up with the same triple as LLVM would,
   413         -        # at least for the subset of platforms we're willing to target.
   414         -        if ostype == 'Linux':
   415         -            ostype = 'unknown-linux-gnu'
   416         -        elif ostype == 'FreeBSD':
   417         -            ostype = 'unknown-freebsd'
   418         -        elif ostype == 'DragonFly':
   419         -            ostype = 'unknown-dragonfly'
   420         -        elif ostype == 'Bitrig':
   421         -            ostype = 'unknown-bitrig'
   422         -        elif ostype == 'OpenBSD':
   423         -            ostype = 'unknown-openbsd'
   424         -        elif ostype == 'NetBSD':
   425         -            ostype = 'unknown-netbsd'
   426         -        elif ostype == 'SunOS':
   427         -            ostype = 'sun-solaris'
   428         -            # On Solaris, uname -m will return a machine classification instead
   429         -            # of a cpu type, so uname -p is recommended instead.  However, the
   430         -            # output from that option is too generic for our purposes (it will
   431         -            # always emit 'i386' on x86/amd64 systems).  As such, isainfo -k
   432         -            # must be used instead.
   433         -            try:
   434         -                cputype = subprocess.check_output(['isainfo',
   435         -                  '-k']).strip().decode(default_encoding)
   436         -            except (subprocess.CalledProcessError, OSError):
   437         -                err = "isainfo not found"
   438         -                if self.verbose:
   439         -                    raise Exception(err)
   440         -                sys.exit(err)
   441         -        elif ostype == 'Darwin':
   442         -            ostype = 'apple-darwin'
   443         -        elif ostype == 'Haiku':
   444         -            ostype = 'unknown-haiku'
   445         -        elif ostype.startswith('MINGW'):
   446         -            # msys' `uname` does not print gcc configuration, but prints msys
   447         -            # configuration. so we cannot believe `uname -m`:
   448         -            # msys1 is always i686 and msys2 is always x86_64.
   449         -            # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
   450         -            # MINGW64 on x86_64.
   451         -            ostype = 'pc-windows-gnu'
   452         -            cputype = 'i686'
   453         -            if os.environ.get('MSYSTEM') == 'MINGW64':
   454         -                cputype = 'x86_64'
   455         -        elif ostype.startswith('MSYS'):
   456         -            ostype = 'pc-windows-gnu'
   457         -        elif ostype.startswith('CYGWIN_NT'):
   458         -            cputype = 'i686'
   459         -            if ostype.endswith('WOW64'):
   460         -                cputype = 'x86_64'
   461         -            ostype = 'pc-windows-gnu'
   462         -        else:
   463         -            err = "unknown OS type: " + ostype
   464         -            if self.verbose:
   465         -                raise ValueError(err)
   466         -            sys.exit(err)
   467         -
   468         -        if cputype in {'i386', 'i486', 'i686', 'i786', 'x86'}:
   469         -            cputype = 'i686'
   470         -        elif cputype in {'xscale', 'arm'}:
   471         -            cputype = 'arm'
   472         -        elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
   473         -            cputype = 'arm'
   474         -            ostype += 'eabihf'
   475         -        elif cputype == 'armv7l':
   476         -            cputype = 'armv7'
   477         -            ostype += 'eabihf'
   478         -        elif cputype == 'aarch64':
   479         -            cputype = 'aarch64'
   480         -        elif cputype == 'arm64':
   481         -            cputype = 'aarch64'
   482         -        elif cputype == 'mips':
   483         -            if sys.byteorder == 'big':
   484         -                cputype = 'mips'
   485         -            elif sys.byteorder == 'little':
   486         -                cputype = 'mipsel'
   487         -            else:
   488         -                raise ValueError('unknown byteorder: ' + sys.byteorder)
   489         -        elif cputype == 'mips64':
   490         -            if sys.byteorder == 'big':
   491         -                cputype = 'mips64'
   492         -            elif sys.byteorder == 'little':
   493         -                cputype = 'mips64el'
   494         -            else:
   495         -                raise ValueError('unknown byteorder: ' + sys.byteorder)
   496         -            # only the n64 ABI is supported, indicate it
   497         -            ostype += 'abi64'
   498         -        elif cputype in {'powerpc', 'ppc'}:
   499         -            cputype = 'powerpc'
   500         -        elif cputype in {'powerpc64', 'ppc64'}:
   501         -            cputype = 'powerpc64'
   502         -        elif cputype in {'powerpc64le', 'ppc64le'}:
   503         -            cputype = 'powerpc64le'
   504         -        elif cputype == 'sparcv9':
   505         -            pass
   506         -        elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
   507         -            cputype = 'x86_64'
   508         -        elif cputype == 's390x':
   509         -            cputype = 's390x'
   510         -        elif cputype == 'BePC':
   511         -            cputype = 'i686'
   512         -        else:
   513         -            err = "unknown cpu type: " + cputype
   514         -            if self.verbose:
   515         -                raise ValueError(err)
   516         -            sys.exit(err)
   517         -
   518         -        return "{}-{}".format(cputype, ostype)
   519         -
   520         -def bootstrap():
   521         -    parser = argparse.ArgumentParser(description='Build rust')
   522         -    parser.add_argument('--config')
   523         -    parser.add_argument('--clean', action='store_true')
   524         -    parser.add_argument('-v', '--verbose', action='store_true')
   525         -
   526         -    args = [a for a in sys.argv if a != '-h' and a != '--help']
   527         -    args, _ = parser.parse_known_args(args)
   528         -
   529         -    # Configure initial bootstrap
   530         -    rb = RustBuild()
   531         -    rb.config_toml = ''
   532         -    rb.config_mk = ''
   533         -    rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
   534         -    rb.build_dir = os.path.join(os.getcwd(), "build")
   535         -    rb.verbose = args.verbose
   536         -    rb.clean = args.clean
   537         -
   538         -    try:
   539         -        with open(args.config or 'config.toml') as config:
   540         -            rb.config_toml = config.read()
   541         -    except:
   542         -        pass
   543         -    try:
   544         -        rb.config_mk = open('config.mk').read()
   545         -    except:
   546         -        pass
   547         -
   548         -    rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \
   549         -                              'CFG_ENABLE_VENDOR' in rb.config_mk
   550         -
   551         -    rb.use_locked_deps = '\nlocked-deps = true' in rb.config_toml or \
   552         -                         'CFG_ENABLE_LOCKED_DEPS' in rb.config_mk
   553         -
   554         -    if 'SUDO_USER' in os.environ and not rb.use_vendored_sources:
   555         -        if os.environ.get('USER') != os.environ['SUDO_USER']:
   556         -            rb.use_vendored_sources = True
   557         -            print('info: looks like you are running this command under `sudo`')
   558         -            print('      and so in order to preserve your $HOME this will now')
   559         -            print('      use vendored sources by default. Note that if this')
   560         -            print('      does not work you should run a normal build first')
   561         -            print('      before running a command like `sudo make install`')
   562         -
   563         -    if rb.use_vendored_sources:
   564         -        if not os.path.exists('.cargo'):
   565         -            os.makedirs('.cargo')
   566         -        with open('.cargo/config','w') as f:
   567         -            f.write("""
   568         -                [source.crates-io]
   569         -                replace-with = 'vendored-sources'
   570         -                registry = 'https://example.com'
   571         -
   572         -                [source.vendored-sources]
   573         -                directory = '{}/src/vendor'
   574         -            """.format(rb.rust_root))
   575         -    else:
   576         -        if os.path.exists('.cargo'):
   577         -            shutil.rmtree('.cargo')
   578         -
   579         -    data = stage0_data(rb.rust_root)
   580         -    rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
   581         -    rb._cargo_rev = data['cargo']
   582         -
   583         -    # Fetch/build the bootstrap
   584         -    rb.build = rb.build_triple()
   585         -    rb.download_stage0()
   586         -    sys.stdout.flush()
   587         -    rb.build_bootstrap()
   588         -    sys.stdout.flush()
   589         -
   590         -    # Run the bootstrap
   591         -    args = [rb.bootstrap_binary()]
   592         -    args.extend(sys.argv[1:])
   593         -    env = os.environ.copy()
   594         -    env["BUILD"] = rb.build
   595         -    env["SRC"] = rb.rust_root
   596         -    env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
   597         -    rb.run(args, env)
   598         -
   599         -def main():
   600         -    start_time = time()
   601         -    try:
   602         -        bootstrap()
   603         -        print("Build completed successfully in %s" % format_build_time(time() - start_time))
   604         -    except (SystemExit, KeyboardInterrupt) as e:
   605         -        if hasattr(e, 'code') and isinstance(e.code, int):
   606         -            exit_code = e.code
   607         -        else:
   608         -            exit_code = 1
   609         -            print(e)
   610         -        print("Build completed unsuccessfully in %s" % format_build_time(time() - start_time))
   611         -        sys.exit(exit_code)
   612         -
   613         -if __name__ == '__main__':
   614         -    main()

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/cc.rs version [2af5c09bc2].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! C-compiler probing and detection.
    12         -//!
    13         -//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
    14         -//! C and C++ compilers for each target configured. A compiler is found through
    15         -//! a number of vectors (in order of precedence)
    16         -//!
    17         -//! 1. Configuration via `target.$target.cc` in `config.toml`.
    18         -//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
    19         -//!    applicable
    20         -//! 3. Special logic to probe on OpenBSD
    21         -//! 4. The `CC_$target` environment variable.
    22         -//! 5. The `CC` environment variable.
    23         -//! 6. "cc"
    24         -//!
    25         -//! Some of this logic is implemented here, but much of it is farmed out to the
    26         -//! `gcc` crate itself, so we end up having the same fallbacks as there.
    27         -//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
    28         -//! used.
    29         -//!
    30         -//! It is intended that after this module has run no C/C++ compiler will
    31         -//! ever be probed for. Instead the compilers found here will be used for
    32         -//! everything.
    33         -
    34         -use std::process::Command;
    35         -
    36         -use build_helper::{cc2ar, output};
    37         -use gcc;
    38         -
    39         -use Build;
    40         -use config::Target;
    41         -
    42         -pub fn find(build: &mut Build) {
    43         -    // For all targets we're going to need a C compiler for building some shims
    44         -    // and such as well as for being a linker for Rust code.
    45         -    for target in build.config.target.iter() {
    46         -        let mut cfg = gcc::Config::new();
    47         -        cfg.cargo_metadata(false).opt_level(0).debug(false)
    48         -           .target(target).host(&build.config.build);
    49         -
    50         -        let config = build.config.target_config.get(target);
    51         -        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
    52         -            cfg.compiler(cc);
    53         -        } else {
    54         -            set_compiler(&mut cfg, "gcc", target, config, build);
    55         -        }
    56         -
    57         -        let compiler = cfg.get_compiler();
    58         -        let ar = cc2ar(compiler.path(), target);
    59         -        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
    60         -        if let Some(ref ar) = ar {
    61         -            build.verbose(&format!("AR_{} = {:?}", target, ar));
    62         -        }
    63         -        build.cc.insert(target.to_string(), (compiler, ar));
    64         -    }
    65         -
    66         -    // For all host triples we need to find a C++ compiler as well
    67         -    for host in build.config.host.iter() {
    68         -        let mut cfg = gcc::Config::new();
    69         -        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
    70         -           .target(host).host(&build.config.build);
    71         -        let config = build.config.target_config.get(host);
    72         -        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
    73         -            cfg.compiler(cxx);
    74         -        } else {
    75         -            set_compiler(&mut cfg, "g++", host, config, build);
    76         -        }
    77         -        let compiler = cfg.get_compiler();
    78         -        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
    79         -        build.cxx.insert(host.to_string(), compiler);
    80         -    }
    81         -}
    82         -
    83         -fn set_compiler(cfg: &mut gcc::Config,
    84         -                gnu_compiler: &str,
    85         -                target: &str,
    86         -                config: Option<&Target>,
    87         -                build: &Build) {
    88         -    match target {
    89         -        // When compiling for android we may have the NDK configured in the
    90         -        // config.toml in which case we look there. Otherwise the default
    91         -        // compiler already takes into account the triple in question.
    92         -        t if t.contains("android") => {
    93         -            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
    94         -                let target = target.replace("armv7", "arm");
    95         -                let compiler = format!("{}-{}", target, gnu_compiler);
    96         -                cfg.compiler(ndk.join("bin").join(compiler));
    97         -            }
    98         -        }
    99         -
   100         -        // The default gcc version from OpenBSD may be too old, try using egcc,
   101         -        // which is a gcc version from ports, if this is the case.
   102         -        t if t.contains("openbsd") => {
   103         -            let c = cfg.get_compiler();
   104         -            if !c.path().ends_with(gnu_compiler) {
   105         -                return
   106         -            }
   107         -
   108         -            let output = output(c.to_command().arg("--version"));
   109         -            let i = match output.find(" 4.") {
   110         -                Some(i) => i,
   111         -                None => return,
   112         -            };
   113         -            match output[i + 3..].chars().next().unwrap() {
   114         -                '0' ... '6' => {}
   115         -                _ => return,
   116         -            }
   117         -            let alternative = format!("e{}", gnu_compiler);
   118         -            if Command::new(&alternative).output().is_ok() {
   119         -                cfg.compiler(alternative);
   120         -            }
   121         -        }
   122         -
   123         -        "mips-unknown-linux-musl" => {
   124         -            if cfg.get_compiler().path().to_str() == Some("gcc") {
   125         -                cfg.compiler("mips-linux-musl-gcc");
   126         -            }
   127         -        }
   128         -        "mipsel-unknown-linux-musl" => {
   129         -            if cfg.get_compiler().path().to_str() == Some("gcc") {
   130         -                cfg.compiler("mipsel-linux-musl-gcc");
   131         -            }
   132         -        }
   133         -
   134         -        t if t.contains("musl") => {
   135         -            if let Some(root) = build.musl_root(target) {
   136         -                let guess = root.join("bin/musl-gcc");
   137         -                if guess.exists() {
   138         -                    cfg.compiler(guess);
   139         -                }
   140         -            }
   141         -        }
   142         -
   143         -        _ => {}
   144         -    }
   145         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/channel.rs version [9c6be0d1dd].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Build configuration for Rust's release channels.
    12         -//!
    13         -//! Implements the stable/beta/nightly channel distinctions by setting various
    14         -//! flags like the `unstable_features`, calculating variables like `release` and
    15         -//! `package_vers`, and otherwise indicating to the compiler what it should
    16         -//! print out as part of its version information.
    17         -
    18         -use std::path::Path;
    19         -use std::process::Command;
    20         -
    21         -use build_helper::output;
    22         -
    23         -use Build;
    24         -
    25         -// The version number
    26         -pub const CFG_RELEASE_NUM: &'static str = "1.17.0";
    27         -
    28         -// An optional number to put after the label, e.g. '.2' -> '-beta.2'
    29         -// Be sure to make this starts with a dot to conform to semver pre-release
    30         -// versions (section 9)
    31         -pub const CFG_PRERELEASE_VERSION: &'static str = ".1";
    32         -
    33         -pub struct GitInfo {
    34         -    inner: Option<Info>,
    35         -}
    36         -
    37         -struct Info {
    38         -    commit_date: String,
    39         -    sha: String,
    40         -    short_sha: String,
    41         -}
    42         -
    43         -impl GitInfo {
    44         -    pub fn new(dir: &Path) -> GitInfo {
    45         -        // See if this even begins to look like a git dir
    46         -        if !dir.join(".git").exists() {
    47         -            return GitInfo { inner: None }
    48         -        }
    49         -
    50         -        // Make sure git commands work
    51         -        let out = Command::new("git")
    52         -                          .arg("rev-parse")
    53         -                          .current_dir(dir)
    54         -                          .output()
    55         -                          .expect("failed to spawn git");
    56         -        if !out.status.success() {
    57         -            return GitInfo { inner: None }
    58         -        }
    59         -
    60         -        // Ok, let's scrape some info
    61         -        let ver_date = output(Command::new("git").current_dir(dir)
    62         -                                      .arg("log").arg("-1")
    63         -                                      .arg("--date=short")
    64         -                                      .arg("--pretty=format:%cd"));
    65         -        let ver_hash = output(Command::new("git").current_dir(dir)
    66         -                                      .arg("rev-parse").arg("HEAD"));
    67         -        let short_ver_hash = output(Command::new("git")
    68         -                                            .current_dir(dir)
    69         -                                            .arg("rev-parse")
    70         -                                            .arg("--short=9")
    71         -                                            .arg("HEAD"));
    72         -        GitInfo {
    73         -            inner: Some(Info {
    74         -                commit_date: ver_date.trim().to_string(),
    75         -                sha: ver_hash.trim().to_string(),
    76         -                short_sha: short_ver_hash.trim().to_string(),
    77         -            }),
    78         -        }
    79         -    }
    80         -
    81         -    pub fn sha(&self) -> Option<&str> {
    82         -        self.inner.as_ref().map(|s| &s.sha[..])
    83         -    }
    84         -
    85         -    pub fn sha_short(&self) -> Option<&str> {
    86         -        self.inner.as_ref().map(|s| &s.short_sha[..])
    87         -    }
    88         -
    89         -    pub fn commit_date(&self) -> Option<&str> {
    90         -        self.inner.as_ref().map(|s| &s.commit_date[..])
    91         -    }
    92         -
    93         -    pub fn version(&self, build: &Build, num: &str) -> String {
    94         -        let mut version = build.release(num);
    95         -        if let Some(ref inner) = self.inner {
    96         -            version.push_str(" (");
    97         -            version.push_str(&inner.short_sha);
    98         -            version.push_str(" ");
    99         -            version.push_str(&inner.commit_date);
   100         -            version.push_str(")");
   101         -        }
   102         -        return version
   103         -    }
   104         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/check.rs version [00c539931a].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of the test-related targets of the build system.
    12         -//!
    13         -//! This file implements the various regression test suites that we execute on
    14         -//! our CI.
    15         -
    16         -extern crate build_helper;
    17         -
    18         -use std::collections::HashSet;
    19         -use std::env;
    20         -use std::fmt;
    21         -use std::fs;
    22         -use std::path::{PathBuf, Path};
    23         -use std::process::Command;
    24         -
    25         -use build_helper::output;
    26         -
    27         -use {Build, Compiler, Mode};
    28         -use dist;
    29         -use util::{self, dylib_path, dylib_path_var, exe};
    30         -
    31         -const ADB_TEST_DIR: &'static str = "/data/tmp";
    32         -
    33         -/// The two modes of the test runner; tests or benchmarks.
    34         -#[derive(Copy, Clone)]
    35         -pub enum TestKind {
    36         -    /// Run `cargo test`
    37         -    Test,
    38         -    /// Run `cargo bench`
    39         -    Bench,
    40         -}
    41         -
    42         -impl TestKind {
    43         -    // Return the cargo subcommand for this test kind
    44         -    fn subcommand(self) -> &'static str {
    45         -        match self {
    46         -            TestKind::Test => "test",
    47         -            TestKind::Bench => "bench",
    48         -        }
    49         -    }
    50         -}
    51         -
    52         -impl fmt::Display for TestKind {
    53         -    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
    54         -        f.write_str(match *self {
    55         -            TestKind::Test => "Testing",
    56         -            TestKind::Bench => "Benchmarking",
    57         -        })
    58         -    }
    59         -}
    60         -
    61         -/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
    62         -///
    63         -/// This tool in `src/tools` will verify the validity of all our links in the
    64         -/// documentation to ensure we don't have a bunch of dead ones.
    65         -pub fn linkcheck(build: &Build, host: &str) {
    66         -    println!("Linkcheck ({})", host);
    67         -    let compiler = Compiler::new(0, host);
    68         -
    69         -    let _time = util::timeit();
    70         -    build.run(build.tool_cmd(&compiler, "linkchecker")
    71         -                   .arg(build.out.join(host).join("doc")));
    72         -}
    73         -
    74         -/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
    75         -///
    76         -/// This tool in `src/tools` will check out a few Rust projects and run `cargo
    77         -/// test` to ensure that we don't regress the test suites there.
    78         -pub fn cargotest(build: &Build, stage: u32, host: &str) {
    79         -    let ref compiler = Compiler::new(stage, host);
    80         -
    81         -    // Configure PATH to find the right rustc. NB. we have to use PATH
    82         -    // and not RUSTC because the Cargo test suite has tests that will
    83         -    // fail if rustc is not spelled `rustc`.
    84         -    let path = build.sysroot(compiler).join("bin");
    85         -    let old_path = ::std::env::var("PATH").expect("");
    86         -    let sep = if cfg!(windows) { ";" } else {":" };
    87         -    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
    88         -
    89         -    // Note that this is a short, cryptic, and not scoped directory name. This
    90         -    // is currently to minimize the length of path on Windows where we otherwise
    91         -    // quickly run into path name limit constraints.
    92         -    let out_dir = build.out.join("ct");
    93         -    t!(fs::create_dir_all(&out_dir));
    94         -
    95         -    let _time = util::timeit();
    96         -    let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
    97         -    build.prepare_tool_cmd(compiler, &mut cmd);
    98         -    build.run(cmd.env("PATH", newpath)
    99         -                 .arg(&build.cargo)
   100         -                 .arg(&out_dir));
   101         -}
   102         -
   103         -/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
   104         -///
   105         -/// This tool in `src/tools` checks up on various bits and pieces of style and
   106         -/// otherwise just implements a few lint-like checks that are specific to the
   107         -/// compiler itself.
   108         -pub fn tidy(build: &Build, host: &str) {
   109         -    println!("tidy check ({})", host);
   110         -    let compiler = Compiler::new(0, host);
   111         -    let mut cmd = build.tool_cmd(&compiler, "tidy");
   112         -    cmd.arg(build.src.join("src"));
   113         -    if !build.config.vendor {
   114         -        cmd.arg("--no-vendor");
   115         -    }
   116         -    build.run(&mut cmd);
   117         -}
   118         -
   119         -fn testdir(build: &Build, host: &str) -> PathBuf {
   120         -    build.out.join(host).join("test")
   121         -}
   122         -
   123         -/// Executes the `compiletest` tool to run a suite of tests.
   124         -///
   125         -/// Compiles all tests with `compiler` for `target` with the specified
   126         -/// compiletest `mode` and `suite` arguments. For example `mode` can be
   127         -/// "run-pass" or `suite` can be something like `debuginfo`.
   128         -pub fn compiletest(build: &Build,
   129         -                   compiler: &Compiler,
   130         -                   target: &str,
   131         -                   mode: &str,
   132         -                   suite: &str) {
   133         -    println!("Check compiletest suite={} mode={} ({} -> {})",
   134         -             suite, mode, compiler.host, target);
   135         -    let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
   136         -                                          "compiletest"));
   137         -    build.prepare_tool_cmd(compiler, &mut cmd);
   138         -
   139         -    // compiletest currently has... a lot of arguments, so let's just pass all
   140         -    // of them!
   141         -
   142         -    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
   143         -    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
   144         -    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
   145         -    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
   146         -    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
   147         -    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
   148         -    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
   149         -    cmd.arg("--mode").arg(mode);
   150         -    cmd.arg("--target").arg(target);
   151         -    cmd.arg("--host").arg(compiler.host);
   152         -    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
   153         -
   154         -    if let Some(nodejs) = build.config.nodejs.as_ref() {
   155         -        cmd.arg("--nodejs").arg(nodejs);
   156         -    }
   157         -
   158         -    let mut flags = vec!["-Crpath".to_string()];
   159         -    if build.config.rust_optimize_tests {
   160         -        flags.push("-O".to_string());
   161         -    }
   162         -    if build.config.rust_debuginfo_tests {
   163         -        flags.push("-g".to_string());
   164         -    }
   165         -
   166         -    let mut hostflags = build.rustc_flags(&compiler.host);
   167         -    hostflags.extend(flags.clone());
   168         -    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
   169         -
   170         -    let mut targetflags = build.rustc_flags(&target);
   171         -    targetflags.extend(flags);
   172         -    targetflags.push(format!("-Lnative={}",
   173         -                             build.test_helpers_out(target).display()));
   174         -    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
   175         -
   176         -    cmd.arg("--docck-python").arg(build.python());
   177         -
   178         -    if build.config.build.ends_with("apple-darwin") {
   179         -        // Force /usr/bin/python on macOS for LLDB tests because we're loading the
   180         -        // LLDB plugin's compiled module which only works with the system python
   181         -        // (namely not Homebrew-installed python)
   182         -        cmd.arg("--lldb-python").arg("/usr/bin/python");
   183         -    } else {
   184         -        cmd.arg("--lldb-python").arg(build.python());
   185         -    }
   186         -
   187         -    if let Some(ref gdb) = build.config.gdb {
   188         -        cmd.arg("--gdb").arg(gdb);
   189         -    }
   190         -    if let Some(ref vers) = build.lldb_version {
   191         -        cmd.arg("--lldb-version").arg(vers);
   192         -    }
   193         -    if let Some(ref dir) = build.lldb_python_dir {
   194         -        cmd.arg("--lldb-python-dir").arg(dir);
   195         -    }
   196         -    let llvm_config = build.llvm_config(target);
   197         -    let llvm_version = output(Command::new(&llvm_config).arg("--version"));
   198         -    cmd.arg("--llvm-version").arg(llvm_version);
   199         -
   200         -    cmd.args(&build.flags.cmd.test_args());
   201         -
   202         -    if build.config.verbose() || build.flags.verbose() {
   203         -        cmd.arg("--verbose");
   204         -    }
   205         -
   206         -    if build.config.quiet_tests {
   207         -        cmd.arg("--quiet");
   208         -    }
   209         -
   210         -    // Only pass correct values for these flags for the `run-make` suite as it
   211         -    // requires that a C++ compiler was configured which isn't always the case.
   212         -    if suite == "run-make" {
   213         -        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
   214         -        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
   215         -        cmd.arg("--cc").arg(build.cc(target))
   216         -           .arg("--cxx").arg(build.cxx(target))
   217         -           .arg("--cflags").arg(build.cflags(target).join(" "))
   218         -           .arg("--llvm-components").arg(llvm_components.trim())
   219         -           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
   220         -    } else {
   221         -        cmd.arg("--cc").arg("")
   222         -           .arg("--cxx").arg("")
   223         -           .arg("--cflags").arg("")
   224         -           .arg("--llvm-components").arg("")
   225         -           .arg("--llvm-cxxflags").arg("");
   226         -    }
   227         -
   228         -    if build.qemu_rootfs(target).is_some() {
   229         -        cmd.arg("--qemu-test-client")
   230         -           .arg(build.tool(&Compiler::new(0, &build.config.build),
   231         -                           "qemu-test-client"));
   232         -    }
   233         -
   234         -    // Running a C compiler on MSVC requires a few env vars to be set, to be
   235         -    // sure to set them here.
   236         -    //
   237         -    // Note that if we encounter `PATH` we make sure to append to our own `PATH`
   238         -    // rather than stomp over it.
   239         -    if target.contains("msvc") {
   240         -        for &(ref k, ref v) in build.cc[target].0.env() {
   241         -            if k != "PATH" {
   242         -                cmd.env(k, v);
   243         -            }
   244         -        }
   245         -    }
   246         -    cmd.env("RUSTC_BOOTSTRAP", "1");
   247         -    build.add_rust_test_threads(&mut cmd);
   248         -
   249         -    if build.config.sanitizers {
   250         -        cmd.env("SANITIZER_SUPPORT", "1");
   251         -    }
   252         -
   253         -    cmd.arg("--adb-path").arg("adb");
   254         -    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
   255         -    if target.contains("android") {
   256         -        // Assume that cc for this target comes from the android sysroot
   257         -        cmd.arg("--android-cross-path")
   258         -           .arg(build.cc(target).parent().unwrap().parent().unwrap());
   259         -    } else {
   260         -        cmd.arg("--android-cross-path").arg("");
   261         -    }
   262         -
   263         -    let _time = util::timeit();
   264         -    build.run(&mut cmd);
   265         -}
   266         -
   267         -/// Run `rustdoc --test` for all documentation in `src/doc`.
   268         -///
   269         -/// This will run all tests in our markdown documentation (e.g. the book)
   270         -/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
   271         -/// `compiler`.
   272         -pub fn docs(build: &Build, compiler: &Compiler) {
   273         -    // Do a breadth-first traversal of the `src/doc` directory and just run
   274         -    // tests for all files that end in `*.md`
   275         -    let mut stack = vec![build.src.join("src/doc")];
   276         -    let _time = util::timeit();
   277         -
   278         -    while let Some(p) = stack.pop() {
   279         -        if p.is_dir() {
   280         -            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
   281         -            continue
   282         -        }
   283         -
   284         -        if p.extension().and_then(|s| s.to_str()) != Some("md") {
   285         -            continue
   286         -        }
   287         -
   288         -        // The nostarch directory in the book is for no starch, and so isn't guaranteed to build.
   289         -        // we don't care if it doesn't build, so skip it.
   290         -        use std::ffi::OsStr;
   291         -        let path: &OsStr = p.as_ref();
   292         -        if let Some(path) = path.to_str() {
   293         -            if path.contains("nostarch") {
   294         -                continue;
   295         -            }
   296         -        }
   297         -
   298         -        println!("doc tests for: {}", p.display());
   299         -        markdown_test(build, compiler, &p);
   300         -    }
   301         -}
   302         -
   303         -/// Run the error index generator tool to execute the tests located in the error
   304         -/// index.
   305         -///
   306         -/// The `error_index_generator` tool lives in `src/tools` and is used to
   307         -/// generate a markdown file from the error indexes of the code base which is
   308         -/// then passed to `rustdoc --test`.
   309         -pub fn error_index(build: &Build, compiler: &Compiler) {
   310         -    println!("Testing error-index stage{}", compiler.stage);
   311         -
   312         -    let dir = testdir(build, compiler.host);
   313         -    t!(fs::create_dir_all(&dir));
   314         -    let output = dir.join("error-index.md");
   315         -
   316         -    let _time = util::timeit();
   317         -    build.run(build.tool_cmd(&Compiler::new(0, compiler.host),
   318         -                             "error_index_generator")
   319         -                   .arg("markdown")
   320         -                   .arg(&output)
   321         -                   .env("CFG_BUILD", &build.config.build));
   322         -
   323         -    markdown_test(build, compiler, &output);
   324         -}
   325         -
   326         -fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
   327         -    let mut cmd = Command::new(build.rustdoc(compiler));
   328         -    build.add_rustc_lib_path(compiler, &mut cmd);
   329         -    build.add_rust_test_threads(&mut cmd);
   330         -    cmd.arg("--test");
   331         -    cmd.arg(markdown);
   332         -    cmd.env("RUSTC_BOOTSTRAP", "1");
   333         -
   334         -    let mut test_args = build.flags.cmd.test_args().join(" ");
   335         -    if build.config.quiet_tests {
   336         -        test_args.push_str(" --quiet");
   337         -    }
   338         -    cmd.arg("--test-args").arg(test_args);
   339         -
   340         -    build.run(&mut cmd);
   341         -}
   342         -
   343         -/// Run all unit tests plus documentation tests for an entire crate DAG defined
   344         -/// by a `Cargo.toml`
   345         -///
   346         -/// This is what runs tests for crates like the standard library, compiler, etc.
   347         -/// It essentially is the driver for running `cargo test`.
   348         -///
   349         -/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
   350         -/// arguments, and those arguments are discovered from `cargo metadata`.
   351         -pub fn krate(build: &Build,
   352         -             compiler: &Compiler,
   353         -             target: &str,
   354         -             mode: Mode,
   355         -             test_kind: TestKind,
   356         -             krate: Option<&str>) {
   357         -    let (name, path, features, root) = match mode {
   358         -        Mode::Libstd => {
   359         -            ("libstd", "src/libstd", build.std_features(), "std")
   360         -        }
   361         -        Mode::Libtest => {
   362         -            ("libtest", "src/libtest", String::new(), "test")
   363         -        }
   364         -        Mode::Librustc => {
   365         -            ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
   366         -        }
   367         -        _ => panic!("can only test libraries"),
   368         -    };
   369         -    println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
   370         -             compiler.host, target);
   371         -
   372         -    // If we're not doing a full bootstrap but we're testing a stage2 version of
   373         -    // libstd, then what we're actually testing is the libstd produced in
   374         -    // stage1. Reflect that here by updating the compiler that we're working
   375         -    // with automatically.
   376         -    let compiler = if build.force_use_stage1(compiler, target) {
   377         -        Compiler::new(1, compiler.host)
   378         -    } else {
   379         -        compiler.clone()
   380         -    };
   381         -
   382         -    // Build up the base `cargo test` command.
   383         -    //
   384         -    // Pass in some standard flags then iterate over the graph we've discovered
   385         -    // in `cargo metadata` with the maps above and figure out what `-p`
   386         -    // arguments need to get passed.
   387         -    let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
   388         -    cargo.arg("--manifest-path")
   389         -         .arg(build.src.join(path).join("Cargo.toml"))
   390         -         .arg("--features").arg(features);
   391         -
   392         -    match krate {
   393         -        Some(krate) => {
   394         -            cargo.arg("-p").arg(krate);
   395         -        }
   396         -        None => {
   397         -            let mut visited = HashSet::new();
   398         -            let mut next = vec![root];
   399         -            while let Some(name) = next.pop() {
   400         -                // Right now jemalloc is our only target-specific crate in the
   401         -                // sense that it's not present on all platforms. Custom skip it
   402         -                // here for now, but if we add more this probably wants to get
   403         -                // more generalized.
   404         -                //
   405         -                // Also skip `build_helper` as it's not compiled normally for
   406         -                // target during the bootstrap and it's just meant to be a
   407         -                // helper crate, not tested. If it leaks through then it ends up
   408         -                // messing with various mtime calculations and such.
   409         -                if !name.contains("jemalloc") && name != "build_helper" {
   410         -                    cargo.arg("-p").arg(&format!("{}:0.0.0", name));
   411         -                }
   412         -                for dep in build.crates[name].deps.iter() {
   413         -                    if visited.insert(dep) {
   414         -                        next.push(dep);
   415         -                    }
   416         -                }
   417         -            }
   418         -        }
   419         -    }
   420         -
   421         -    // The tests are going to run with the *target* libraries, so we need to
   422         -    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
   423         -    //
   424         -    // Note that to run the compiler we need to run with the *host* libraries,
   425         -    // but our wrapper scripts arrange for that to be the case anyway.
   426         -    let mut dylib_path = dylib_path();
   427         -    dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
   428         -    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
   429         -
   430         -    if target.contains("android") ||
   431         -       target.contains("emscripten") ||
   432         -       build.qemu_rootfs(target).is_some() {
   433         -        cargo.arg("--no-run");
   434         -    }
   435         -
   436         -    cargo.arg("--");
   437         -
   438         -    if build.config.quiet_tests {
   439         -        cargo.arg("--quiet");
   440         -    }
   441         -
   442         -    let _time = util::timeit();
   443         -
   444         -    if target.contains("android") {
   445         -        build.run(&mut cargo);
   446         -        krate_android(build, &compiler, target, mode);
   447         -    } else if target.contains("emscripten") {
   448         -        build.run(&mut cargo);
   449         -        krate_emscripten(build, &compiler, target, mode);
   450         -    } else if build.qemu_rootfs(target).is_some() {
   451         -        build.run(&mut cargo);
   452         -        krate_qemu(build, &compiler, target, mode);
   453         -    } else {
   454         -        cargo.args(&build.flags.cmd.test_args());
   455         -        build.run(&mut cargo);
   456         -    }
   457         -}
   458         -
   459         -fn krate_android(build: &Build,
   460         -                 compiler: &Compiler,
   461         -                 target: &str,
   462         -                 mode: Mode) {
   463         -    let mut tests = Vec::new();
   464         -    let out_dir = build.cargo_out(compiler, mode, target);
   465         -    find_tests(&out_dir, target, &mut tests);
   466         -    find_tests(&out_dir.join("deps"), target, &mut tests);
   467         -
   468         -    for test in tests {
   469         -        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
   470         -
   471         -        let test_file_name = test.file_name().unwrap().to_string_lossy();
   472         -        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
   473         -                          ADB_TEST_DIR,
   474         -                          compiler.stage,
   475         -                          target,
   476         -                          compiler.host,
   477         -                          test_file_name);
   478         -        let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
   479         -        let program = format!("(cd {dir}; \
   480         -                                LD_LIBRARY_PATH=./{target} ./{test} \
   481         -                                    --logfile {log} \
   482         -                                    {quiet} \
   483         -                                    {args})",
   484         -                              dir = ADB_TEST_DIR,
   485         -                              target = target,
   486         -                              test = test_file_name,
   487         -                              log = log,
   488         -                              quiet = quiet,
   489         -                              args = build.flags.cmd.test_args().join(" "));
   490         -
   491         -        let output = output(Command::new("adb").arg("shell").arg(&program));
   492         -        println!("{}", output);
   493         -
   494         -        t!(fs::create_dir_all(build.out.join("tmp")));
   495         -        build.run(Command::new("adb")
   496         -                          .arg("pull")
   497         -                          .arg(&log)
   498         -                          .arg(build.out.join("tmp")));
   499         -        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
   500         -        if !output.contains("result: ok") {
   501         -            panic!("some tests failed");
   502         -        }
   503         -    }
   504         -}
   505         -
   506         -fn krate_emscripten(build: &Build,
   507         -                    compiler: &Compiler,
   508         -                    target: &str,
   509         -                    mode: Mode) {
   510         -    let mut tests = Vec::new();
   511         -    let out_dir = build.cargo_out(compiler, mode, target);
   512         -    find_tests(&out_dir, target, &mut tests);
   513         -    find_tests(&out_dir.join("deps"), target, &mut tests);
   514         -
   515         -    for test in tests {
   516         -        let test_file_name = test.to_string_lossy().into_owned();
   517         -        println!("running {}", test_file_name);
   518         -        let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured");
   519         -        let mut cmd = Command::new(nodejs);
   520         -        cmd.arg(&test_file_name);
   521         -        if build.config.quiet_tests {
   522         -            cmd.arg("--quiet");
   523         -        }
   524         -        build.run(&mut cmd);
   525         -    }
   526         -}
   527         -
   528         -fn krate_qemu(build: &Build,
   529         -              compiler: &Compiler,
   530         -              target: &str,
   531         -              mode: Mode) {
   532         -    let mut tests = Vec::new();
   533         -    let out_dir = build.cargo_out(compiler, mode, target);
   534         -    find_tests(&out_dir, target, &mut tests);
   535         -    find_tests(&out_dir.join("deps"), target, &mut tests);
   536         -
   537         -    let tool = build.tool(&Compiler::new(0, &build.config.build),
   538         -                          "qemu-test-client");
   539         -    for test in tests {
   540         -        let mut cmd = Command::new(&tool);
   541         -        cmd.arg("run")
   542         -           .arg(&test);
   543         -        if build.config.quiet_tests {
   544         -            cmd.arg("--quiet");
   545         -        }
   546         -        cmd.args(&build.flags.cmd.test_args());
   547         -        build.run(&mut cmd);
   548         -    }
   549         -}
   550         -
   551         -
   552         -fn find_tests(dir: &Path,
   553         -              target: &str,
   554         -              dst: &mut Vec<PathBuf>) {
   555         -    for e in t!(dir.read_dir()).map(|e| t!(e)) {
   556         -        let file_type = t!(e.file_type());
   557         -        if !file_type.is_file() {
   558         -            continue
   559         -        }
   560         -        let filename = e.file_name().into_string().unwrap();
   561         -        if (target.contains("windows") && filename.ends_with(".exe")) ||
   562         -           (!target.contains("windows") && !filename.contains(".")) ||
   563         -           (target.contains("emscripten") && filename.ends_with(".js")) {
   564         -            dst.push(e.path());
   565         -        }
   566         -    }
   567         -}
   568         -
   569         -pub fn emulator_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
   570         -    if target.contains("android") {
   571         -        android_copy_libs(build, compiler, target)
   572         -    } else if let Some(s) = build.qemu_rootfs(target) {
   573         -        qemu_copy_libs(build, compiler, target, s)
   574         -    }
   575         -}
   576         -
   577         -fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
   578         -    println!("Android copy libs to emulator ({})", target);
   579         -    build.run(Command::new("adb").arg("wait-for-device"));
   580         -    build.run(Command::new("adb").arg("remount"));
   581         -    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
   582         -    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
   583         -    build.run(Command::new("adb")
   584         -                      .arg("push")
   585         -                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
   586         -                      .arg(ADB_TEST_DIR));
   587         -
   588         -    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
   589         -    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
   590         -
   591         -    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
   592         -        let f = t!(f);
   593         -        let name = f.file_name().into_string().unwrap();
   594         -        if util::is_dylib(&name) {
   595         -            build.run(Command::new("adb")
   596         -                              .arg("push")
   597         -                              .arg(f.path())
   598         -                              .arg(&target_dir));
   599         -        }
   600         -    }
   601         -}
   602         -
   603         -fn qemu_copy_libs(build: &Build,
   604         -                  compiler: &Compiler,
   605         -                  target: &str,
   606         -                  rootfs: &Path) {
   607         -    println!("QEMU copy libs to emulator ({})", target);
   608         -    assert!(target.starts_with("arm"), "only works with arm for now");
   609         -    t!(fs::create_dir_all(build.out.join("tmp")));
   610         -
   611         -    // Copy our freshly compiled test server over to the rootfs
   612         -    let server = build.cargo_out(compiler, Mode::Tool, target)
   613         -                      .join(exe("qemu-test-server", target));
   614         -    t!(fs::copy(&server, rootfs.join("testd")));
   615         -
   616         -    // Spawn the emulator and wait for it to come online
   617         -    let tool = build.tool(&Compiler::new(0, &build.config.build),
   618         -                          "qemu-test-client");
   619         -    build.run(Command::new(&tool)
   620         -                      .arg("spawn-emulator")
   621         -                      .arg(rootfs)
   622         -                      .arg(build.out.join("tmp")));
   623         -
   624         -    // Push all our dylibs to the emulator
   625         -    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
   626         -        let f = t!(f);
   627         -        let name = f.file_name().into_string().unwrap();
   628         -        if util::is_dylib(&name) {
   629         -            build.run(Command::new(&tool)
   630         -                              .arg("push")
   631         -                              .arg(f.path()));
   632         -        }
   633         -    }
   634         -}
   635         -
   636         -/// Run "distcheck", a 'make check' from a tarball
   637         -pub fn distcheck(build: &Build) {
   638         -    if build.config.build != "x86_64-unknown-linux-gnu" {
   639         -        return
   640         -    }
   641         -    if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
   642         -        return
   643         -    }
   644         -    if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
   645         -        return
   646         -    }
   647         -
   648         -    let dir = build.out.join("tmp").join("distcheck");
   649         -    let _ = fs::remove_dir_all(&dir);
   650         -    t!(fs::create_dir_all(&dir));
   651         -
   652         -    let mut cmd = Command::new("tar");
   653         -    cmd.arg("-xzf")
   654         -       .arg(dist::rust_src_location(build))
   655         -       .arg("--strip-components=1")
   656         -       .current_dir(&dir);
   657         -    build.run(&mut cmd);
   658         -    build.run(Command::new("./configure")
   659         -                     .args(&build.config.configure_args)
   660         -                     .arg("--enable-vendor")
   661         -                     .current_dir(&dir));
   662         -    build.run(Command::new(build_helper::make(&build.config.build))
   663         -                     .arg("check")
   664         -                     .current_dir(&dir));
   665         -}
   666         -
   667         -/// Test the build system itself
   668         -pub fn bootstrap(build: &Build) {
   669         -    let mut cmd = Command::new(&build.cargo);
   670         -    cmd.arg("test")
   671         -       .current_dir(build.src.join("src/bootstrap"))
   672         -       .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
   673         -       .env("RUSTC", &build.rustc);
   674         -    cmd.arg("--").args(&build.flags.cmd.test_args());
   675         -    build.run(&mut cmd);
   676         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/clean.rs version [ed821a18ff].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of `make clean` in rustbuild.
    12         -//!
    13         -//! Responsible for cleaning out a build directory of all old and stale
    14         -//! artifacts to prepare for a fresh build. Currently doesn't remove the
    15         -//! `build/cache` directory (download cache) or the `build/$target/llvm`
    16         -//! directory as we want that cached between builds.
    17         -
    18         -use std::fs;
    19         -use std::io::{self, ErrorKind};
    20         -use std::path::Path;
    21         -
    22         -use Build;
    23         -
    24         -pub fn clean(build: &Build) {
    25         -    rm_rf("tmp".as_ref());
    26         -    rm_rf(&build.out.join("tmp"));
    27         -    rm_rf(&build.out.join("dist"));
    28         -
    29         -    for host in build.config.host.iter() {
    30         -        let entries = match build.out.join(host).read_dir() {
    31         -            Ok(iter) => iter,
    32         -            Err(_) => continue,
    33         -        };
    34         -
    35         -        for entry in entries {
    36         -            let entry = t!(entry);
    37         -            if entry.file_name().to_str() == Some("llvm") {
    38         -                continue
    39         -            }
    40         -            let path = t!(entry.path().canonicalize());
    41         -            rm_rf(&path);
    42         -        }
    43         -    }
    44         -}
    45         -
    46         -fn rm_rf(path: &Path) {
    47         -    if !path.exists() {
    48         -        return
    49         -    }
    50         -    if path.is_file() {
    51         -        return do_op(path, "remove file", |p| fs::remove_file(p));
    52         -    }
    53         -
    54         -    for file in t!(fs::read_dir(path)) {
    55         -        let file = t!(file).path();
    56         -
    57         -        if file.is_dir() {
    58         -            rm_rf(&file);
    59         -        } else {
    60         -            // On windows we can't remove a readonly file, and git will
    61         -            // often clone files as readonly. As a result, we have some
    62         -            // special logic to remove readonly files on windows.
    63         -            do_op(&file, "remove file", |p| fs::remove_file(p));
    64         -        }
    65         -    }
    66         -    do_op(path, "remove dir", |p| fs::remove_dir(p));
    67         -}
    68         -
    69         -fn do_op<F>(path: &Path, desc: &str, mut f: F)
    70         -    where F: FnMut(&Path) -> io::Result<()>
    71         -{
    72         -    match f(path) {
    73         -        Ok(()) => {}
    74         -        Err(ref e) if cfg!(windows) &&
    75         -                      e.kind() == ErrorKind::PermissionDenied => {
    76         -            let mut p = t!(path.metadata()).permissions();
    77         -            p.set_readonly(false);
    78         -            t!(fs::set_permissions(path, p));
    79         -            f(path).unwrap_or_else(|e| {
    80         -                panic!("failed to {} {}: {}", desc, path.display(), e);
    81         -            })
    82         -        }
    83         -        Err(e) => {
    84         -            panic!("failed to {} {}: {}", desc, path.display(), e);
    85         -        }
    86         -    }
    87         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/compile.rs version [9f852c43c3].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of compiling various phases of the compiler and standard
    12         -//! library.
    13         -//!
    14         -//! This module contains some of the real meat in the rustbuild build system
    15         -//! which is where Cargo is used to compiler the standard library, libtest, and
    16         -//! compiler. This module is also responsible for assembling the sysroot as it
    17         -//! goes along from the output of the previous stage.
    18         -
    19         -use std::collections::HashMap;
    20         -use std::fs::{self, File};
    21         -use std::path::{Path, PathBuf};
    22         -use std::process::Command;
    23         -use std::env;
    24         -
    25         -use build_helper::{output, mtime, up_to_date};
    26         -use filetime::FileTime;
    27         -
    28         -use channel::GitInfo;
    29         -use util::{exe, libdir, is_dylib, copy};
    30         -use {Build, Compiler, Mode};
    31         -
    32         -/// Build the standard library.
    33         -///
    34         -/// This will build the standard library for a particular stage of the build
    35         -/// using the `compiler` targeting the `target` architecture. The artifacts
    36         -/// created will also be linked into the sysroot directory.
    37         -pub fn std(build: &Build, target: &str, compiler: &Compiler) {
    38         -    let libdir = build.sysroot_libdir(compiler, target);
    39         -    t!(fs::create_dir_all(&libdir));
    40         -
    41         -    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
    42         -             compiler.host, target);
    43         -
    44         -    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
    45         -    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
    46         -    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
    47         -    let mut features = build.std_features();
    48         -
    49         -    if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
    50         -        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
    51         -    }
    52         -
    53         -    // When doing a local rebuild we tell cargo that we're stage1 rather than
    54         -    // stage0. This works fine if the local rust and being-built rust have the
    55         -    // same view of what the default allocator is, but fails otherwise. Since
    56         -    // we don't have a way to express an allocator preference yet, work
    57         -    // around the issue in the case of a local rebuild with jemalloc disabled.
    58         -    if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
    59         -        features.push_str(" force_alloc_system");
    60         -    }
    61         -
    62         -    if compiler.stage != 0 && build.config.sanitizers {
    63         -        // This variable is used by the sanitizer runtime crates, e.g.
    64         -        // rustc_lsan, to build the sanitizer runtime from C code
    65         -        // When this variable is missing, those crates won't compile the C code,
    66         -        // so we don't set this variable during stage0 where llvm-config is
    67         -        // missing
    68         -        // We also only build the runtimes when --enable-sanitizers (or its
    69         -        // config.toml equivalent) is used
    70         -        cargo.env("LLVM_CONFIG", build.llvm_config(target));
    71         -    }
    72         -    cargo.arg("--features").arg(features)
    73         -         .arg("--manifest-path")
    74         -         .arg(build.src.join("src/libstd/Cargo.toml"));
    75         -
    76         -    if let Some(target) = build.config.target_config.get(target) {
    77         -        if let Some(ref jemalloc) = target.jemalloc {
    78         -            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
    79         -        }
    80         -    }
    81         -    if target.contains("musl") {
    82         -        if let Some(p) = build.musl_root(target) {
    83         -            cargo.env("MUSL_ROOT", p);
    84         -        }
    85         -    }
    86         -
    87         -    build.run(&mut cargo);
    88         -    update_mtime(build, &libstd_stamp(build, &compiler, target));
    89         -}
    90         -
    91         -/// Link all libstd rlibs/dylibs into the sysroot location.
    92         -///
    93         -/// Links those artifacts generated by `compiler` to a the `stage` compiler's
    94         -/// sysroot for the specified `host` and `target`.
    95         -///
    96         -/// Note that this assumes that `compiler` has already generated the libstd
    97         -/// libraries for `target`, and this method will find them in the relevant
    98         -/// output directory.
    99         -pub fn std_link(build: &Build,
   100         -                compiler: &Compiler,
   101         -                target_compiler: &Compiler,
   102         -                target: &str) {
   103         -    println!("Copying stage{} std from stage{} ({} -> {} / {})",
   104         -             target_compiler.stage,
   105         -             compiler.stage,
   106         -             compiler.host,
   107         -             target_compiler.host,
   108         -             target);
   109         -    let libdir = build.sysroot_libdir(&target_compiler, target);
   110         -    let out_dir = build.cargo_out(&compiler, Mode::Libstd, target);
   111         -
   112         -    t!(fs::create_dir_all(&libdir));
   113         -    add_to_sysroot(&out_dir, &libdir);
   114         -
   115         -    if target.contains("musl") && !target.contains("mips") {
   116         -        copy_musl_third_party_objects(build, target, &libdir);
   117         -    }
   118         -}
   119         -
   120         -/// Copies the crt(1,i,n).o startup objects
   121         -///
   122         -/// Only required for musl targets that statically link to libc
   123         -fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) {
   124         -    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
   125         -        copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
   126         -    }
   127         -}
   128         -
   129         -/// Build and prepare startup objects like rsbegin.o and rsend.o
   130         -///
   131         -/// These are primarily used on Windows right now for linking executables/dlls.
   132         -/// They don't require any library support as they're just plain old object
   133         -/// files, so we just use the nightly snapshot compiler to always build them (as
   134         -/// no other compilers are guaranteed to be available).
   135         -pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) {
   136         -    if !target.contains("pc-windows-gnu") {
   137         -        return
   138         -    }
   139         -
   140         -    let compiler = Compiler::new(0, &build.config.build);
   141         -    let compiler_path = build.compiler_path(&compiler);
   142         -    let src_dir = &build.src.join("src/rtstartup");
   143         -    let dst_dir = &build.native_dir(target).join("rtstartup");
   144         -    let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
   145         -    t!(fs::create_dir_all(dst_dir));
   146         -    t!(fs::create_dir_all(sysroot_dir));
   147         -
   148         -    for file in &["rsbegin", "rsend"] {
   149         -        let src_file = &src_dir.join(file.to_string() + ".rs");
   150         -        let dst_file = &dst_dir.join(file.to_string() + ".o");
   151         -        if !up_to_date(src_file, dst_file) {
   152         -            let mut cmd = Command::new(&compiler_path);
   153         -            build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
   154         -                        .arg("--target").arg(target)
   155         -                        .arg("--emit=obj")
   156         -                        .arg("--out-dir").arg(dst_dir)
   157         -                        .arg(src_file));
   158         -        }
   159         -
   160         -        copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
   161         -    }
   162         -
   163         -    for obj in ["crt2.o", "dllcrt2.o"].iter() {
   164         -        copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
   165         -    }
   166         -}
   167         -
   168         -/// Build libtest.
   169         -///
   170         -/// This will build libtest and supporting libraries for a particular stage of
   171         -/// the build using the `compiler` targeting the `target` architecture. The
   172         -/// artifacts created will also be linked into the sysroot directory.
   173         -pub fn test(build: &Build, target: &str, compiler: &Compiler) {
   174         -    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
   175         -             compiler.host, target);
   176         -    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
   177         -    build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
   178         -    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
   179         -    if let Ok(target) = env::var("MACOSX_STD_DEPLOYMENT_TARGET") {
   180         -        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
   181         -    }
   182         -    cargo.arg("--manifest-path")
   183         -         .arg(build.src.join("src/libtest/Cargo.toml"));
   184         -    build.run(&mut cargo);
   185         -    update_mtime(build, &libtest_stamp(build, compiler, target));
   186         -}
   187         -
   188         -/// Same as `std_link`, only for libtest
   189         -pub fn test_link(build: &Build,
   190         -                 compiler: &Compiler,
   191         -                 target_compiler: &Compiler,
   192         -                 target: &str) {
   193         -    println!("Copying stage{} test from stage{} ({} -> {} / {})",
   194         -             target_compiler.stage,
   195         -             compiler.stage,
   196         -             compiler.host,
   197         -             target_compiler.host,
   198         -             target);
   199         -    let libdir = build.sysroot_libdir(&target_compiler, target);
   200         -    let out_dir = build.cargo_out(&compiler, Mode::Libtest, target);
   201         -    add_to_sysroot(&out_dir, &libdir);
   202         -}
   203         -
   204         -/// Build the compiler.
   205         -///
   206         -/// This will build the compiler for a particular stage of the build using
   207         -/// the `compiler` targeting the `target` architecture. The artifacts
   208         -/// created will also be linked into the sysroot directory.
   209         -pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
   210         -    println!("Building stage{} compiler artifacts ({} -> {})",
   211         -             compiler.stage, compiler.host, target);
   212         -
   213         -    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
   214         -    build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
   215         -
   216         -    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
   217         -    cargo.arg("--features").arg(build.rustc_features())
   218         -         .arg("--manifest-path")
   219         -         .arg(build.src.join("src/rustc/Cargo.toml"));
   220         -
   221         -    // Set some configuration variables picked up by build scripts and
   222         -    // the compiler alike
   223         -    cargo.env("CFG_RELEASE", build.rust_release())
   224         -         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
   225         -         .env("CFG_VERSION", build.rust_version())
   226         -         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(PathBuf::new()));
   227         -
   228         -    if compiler.stage == 0 {
   229         -        cargo.env("CFG_LIBDIR_RELATIVE", "lib");
   230         -    } else {
   231         -        let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
   232         -        cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
   233         -    }
   234         -
   235         -    // If we're not building a compiler with debugging information then remove
   236         -    // these two env vars which would be set otherwise.
   237         -    if build.config.rust_debuginfo_only_std {
   238         -        cargo.env_remove("RUSTC_DEBUGINFO");
   239         -        cargo.env_remove("RUSTC_DEBUGINFO_LINES");
   240         -    }
   241         -
   242         -    if let Some(ref ver_date) = build.rust_info.commit_date() {
   243         -        cargo.env("CFG_VER_DATE", ver_date);
   244         -    }
   245         -    if let Some(ref ver_hash) = build.rust_info.sha() {
   246         -        cargo.env("CFG_VER_HASH", ver_hash);
   247         -    }
   248         -    if !build.unstable_features() {
   249         -        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
   250         -    }
   251         -    // Flag that rust llvm is in use
   252         -    if build.is_rust_llvm(target) {
   253         -        cargo.env("LLVM_RUSTLLVM", "1");
   254         -    }
   255         -    cargo.env("LLVM_CONFIG", build.llvm_config(target));
   256         -    let target_config = build.config.target_config.get(target);
   257         -    if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
   258         -        cargo.env("CFG_LLVM_ROOT", s);
   259         -    }
   260         -    // Building with a static libstdc++ is only supported on linux right now,
   261         -    // not for MSVC or macOS
   262         -    if build.config.llvm_static_stdcpp &&
   263         -       !target.contains("windows") &&
   264         -       !target.contains("apple") {
   265         -        cargo.env("LLVM_STATIC_STDCPP",
   266         -                  compiler_file(build.cxx(target), "libstdc++.a"));
   267         -    }
   268         -    if build.config.llvm_link_shared {
   269         -        cargo.env("LLVM_LINK_SHARED", "1");
   270         -    }
   271         -    if let Some(ref s) = build.config.rustc_default_linker {
   272         -        cargo.env("CFG_DEFAULT_LINKER", s);
   273         -    }
   274         -    if let Some(ref s) = build.config.rustc_default_ar {
   275         -        cargo.env("CFG_DEFAULT_AR", s);
   276         -    }
   277         -    build.run(&mut cargo);
   278         -}
   279         -
   280         -/// Same as `std_link`, only for librustc
   281         -pub fn rustc_link(build: &Build,
   282         -                  compiler: &Compiler,
   283         -                  target_compiler: &Compiler,
   284         -                  target: &str) {
   285         -    println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
   286         -             target_compiler.stage,
   287         -             compiler.stage,
   288         -             compiler.host,
   289         -             target_compiler.host,
   290         -             target);
   291         -    let libdir = build.sysroot_libdir(&target_compiler, target);
   292         -    let out_dir = build.cargo_out(&compiler, Mode::Librustc, target);
   293         -    add_to_sysroot(&out_dir, &libdir);
   294         -}
   295         -
   296         -/// Cargo's output path for the standard library in a given stage, compiled
   297         -/// by a particular compiler for the specified target.
   298         -fn libstd_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
   299         -    build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
   300         -}
   301         -
   302         -/// Cargo's output path for libtest in a given stage, compiled by a particular
   303         -/// compiler for the specified target.
   304         -fn libtest_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
   305         -    build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
   306         -}
   307         -
   308         -fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
   309         -    let out = output(Command::new(compiler)
   310         -                            .arg(format!("-print-file-name={}", file)));
   311         -    PathBuf::from(out.trim())
   312         -}
   313         -
   314         -pub fn create_sysroot(build: &Build, compiler: &Compiler) {
   315         -    let sysroot = build.sysroot(compiler);
   316         -    let _ = fs::remove_dir_all(&sysroot);
   317         -    t!(fs::create_dir_all(&sysroot));
   318         -}
   319         -
   320         -/// Prepare a new compiler from the artifacts in `stage`
   321         -///
   322         -/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
   323         -/// must have been previously produced by the `stage - 1` build.config.build
   324         -/// compiler.
   325         -pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
   326         -    // nothing to do in stage0
   327         -    if stage == 0 {
   328         -        return
   329         -    }
   330         -
   331         -    println!("Copying stage{} compiler ({})", stage, host);
   332         -
   333         -    // The compiler that we're assembling
   334         -    let target_compiler = Compiler::new(stage, host);
   335         -
   336         -    // The compiler that compiled the compiler we're assembling
   337         -    let build_compiler = Compiler::new(stage - 1, &build.config.build);
   338         -
   339         -    // Link in all dylibs to the libdir
   340         -    let sysroot = build.sysroot(&target_compiler);
   341         -    let sysroot_libdir = sysroot.join(libdir(host));
   342         -    t!(fs::create_dir_all(&sysroot_libdir));
   343         -    let src_libdir = build.sysroot_libdir(&build_compiler, host);
   344         -    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
   345         -        let filename = f.file_name().into_string().unwrap();
   346         -        if is_dylib(&filename) {
   347         -            copy(&f.path(), &sysroot_libdir.join(&filename));
   348         -        }
   349         -    }
   350         -
   351         -    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
   352         -
   353         -    // Link the compiler binary itself into place
   354         -    let rustc = out_dir.join(exe("rustc", host));
   355         -    let bindir = sysroot.join("bin");
   356         -    t!(fs::create_dir_all(&bindir));
   357         -    let compiler = build.compiler_path(&Compiler::new(stage, host));
   358         -    let _ = fs::remove_file(&compiler);
   359         -    copy(&rustc, &compiler);
   360         -
   361         -    // See if rustdoc exists to link it into place
   362         -    let rustdoc = exe("rustdoc", host);
   363         -    let rustdoc_src = out_dir.join(&rustdoc);
   364         -    let rustdoc_dst = bindir.join(&rustdoc);
   365         -    if fs::metadata(&rustdoc_src).is_ok() {
   366         -        let _ = fs::remove_file(&rustdoc_dst);
   367         -        copy(&rustdoc_src, &rustdoc_dst);
   368         -    }
   369         -}
   370         -
   371         -/// Link some files into a rustc sysroot.
   372         -///
   373         -/// For a particular stage this will link all of the contents of `out_dir`
   374         -/// into the sysroot of the `host` compiler, assuming the artifacts are
   375         -/// compiled for the specified `target`.
   376         -fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
   377         -    // Collect the set of all files in the dependencies directory, keyed
   378         -    // off the name of the library. We assume everything is of the form
   379         -    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
   380         -    // `<hash>` values for the same name (of old builds).
   381         -    let mut map = HashMap::new();
   382         -    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
   383         -        let filename = file.file_name().into_string().unwrap();
   384         -
   385         -        // We're only interested in linking rlibs + dylibs, other things like
   386         -        // unit tests don't get linked in
   387         -        if !filename.ends_with(".rlib") &&
   388         -           !filename.ends_with(".lib") &&
   389         -           !is_dylib(&filename) {
   390         -            continue
   391         -        }
   392         -        let file = file.path();
   393         -        let dash = filename.find("-").unwrap();
   394         -        let key = (filename[..dash].to_string(),
   395         -                   file.extension().unwrap().to_owned());
   396         -        map.entry(key).or_insert(Vec::new())
   397         -           .push(file.clone());
   398         -    }
   399         -
   400         -    // For all hash values found, pick the most recent one to move into the
   401         -    // sysroot, that should be the one we just built.
   402         -    for (_, paths) in map {
   403         -        let (_, path) = paths.iter().map(|path| {
   404         -            (mtime(&path).seconds(), path)
   405         -        }).max().unwrap();
   406         -        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
   407         -    }
   408         -}
   409         -
   410         -/// Build a tool in `src/tools`
   411         -///
   412         -/// This will build the specified tool with the specified `host` compiler in
   413         -/// `stage` into the normal cargo output directory.
   414         -pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
   415         -    println!("Building stage{} tool {} ({})", stage, tool, target);
   416         -
   417         -    let compiler = Compiler::new(stage, &build.config.build);
   418         -
   419         -    // FIXME: need to clear out previous tool and ideally deps, may require
   420         -    //        isolating output directories or require a pseudo shim step to
   421         -    //        clear out all the info.
   422         -    //
   423         -    //        Maybe when libstd is compiled it should clear out the rustc of the
   424         -    //        corresponding stage?
   425         -    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
   426         -    // build.clear_if_dirty(&out_dir, &libstd_stamp(build, stage, &host, target));
   427         -
   428         -    let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
   429         -    let mut dir = build.src.join(tool);
   430         -    if !dir.exists() {
   431         -        dir = build.src.join("src/tools").join(tool);
   432         -    }
   433         -    cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
   434         -
   435         -    // We don't want to build tools dynamically as they'll be running across
   436         -    // stages and such and it's just easier if they're not dynamically linked.
   437         -    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
   438         -
   439         -    if let Some(dir) = build.openssl_install_dir(target) {
   440         -        cargo.env("OPENSSL_STATIC", "1");
   441         -        cargo.env("OPENSSL_DIR", dir);
   442         -        cargo.env("LIBZ_SYS_STATIC", "1");
   443         -    }
   444         -
   445         -    cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
   446         -
   447         -    let info = GitInfo::new(&dir);
   448         -    if let Some(sha) = info.sha() {
   449         -        cargo.env("CFG_COMMIT_HASH", sha);
   450         -    }
   451         -    if let Some(sha_short) = info.sha_short() {
   452         -        cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
   453         -    }
   454         -    if let Some(date) = info.commit_date() {
   455         -        cargo.env("CFG_COMMIT_DATE", date);
   456         -    }
   457         -
   458         -    build.run(&mut cargo);
   459         -}
   460         -
   461         -/// Updates the mtime of a stamp file if necessary, only changing it if it's
   462         -/// older than some other library file in the same directory.
   463         -///
   464         -/// We don't know what file Cargo is going to output (because there's a hash in
   465         -/// the file name) but we know where it's going to put it. We use this helper to
   466         -/// detect changes to that output file by looking at the modification time for
   467         -/// all files in a directory and updating the stamp if any are newer.
   468         -///
   469         -/// Note that we only consider Rust libraries as that's what we're interested in
   470         -/// propagating changes from. Files like executables are tracked elsewhere.
   471         -fn update_mtime(build: &Build, path: &Path) {
   472         -    let entries = match path.parent().unwrap().join("deps").read_dir() {
   473         -        Ok(entries) => entries,
   474         -        Err(_) => return,
   475         -    };
   476         -    let files = entries.map(|e| t!(e)).filter(|e| t!(e.file_type()).is_file());
   477         -    let files = files.filter(|e| {
   478         -        let filename = e.file_name();
   479         -        let filename = filename.to_str().unwrap();
   480         -        filename.ends_with(".rlib") ||
   481         -            filename.ends_with(".lib") ||
   482         -            is_dylib(&filename)
   483         -    });
   484         -    let max = files.max_by_key(|entry| {
   485         -        let meta = t!(entry.metadata());
   486         -        FileTime::from_last_modification_time(&meta)
   487         -    });
   488         -    let max = match max {
   489         -        Some(max) => max,
   490         -        None => return,
   491         -    };
   492         -
   493         -    if mtime(&max.path()) > mtime(path) {
   494         -        build.verbose(&format!("updating {:?} as {:?} changed", path, max.path()));
   495         -        t!(File::create(path));
   496         -    }
   497         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/config.rs version [0397316892].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Serialized configuration of a build.
    12         -//!
    13         -//! This module implements parsing `config.mk` and `config.toml` configuration
    14         -//! files to tweak how the build runs.
    15         -
    16         -use std::collections::HashMap;
    17         -use std::env;
    18         -use std::fs::File;
    19         -use std::io::prelude::*;
    20         -use std::path::PathBuf;
    21         -use std::process;
    22         -
    23         -use num_cpus;
    24         -use rustc_serialize::Decodable;
    25         -use toml::{Parser, Decoder, Value};
    26         -use util::push_exe_path;
    27         -
    28         -/// Global configuration for the entire build and/or bootstrap.
    29         -///
    30         -/// This structure is derived from a combination of both `config.toml` and
    31         -/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
    32         -/// is used all that much, so this is primarily filled out by `config.mk` which
    33         -/// is generated from `./configure`.
    34         -///
    35         -/// Note that this structure is not decoded directly into, but rather it is
    36         -/// filled out from the decoded forms of the structs below. For documentation
    37         -/// each field, see the corresponding fields in
    38         -/// `src/bootstrap/config.toml.example`.
    39         -#[derive(Default)]
    40         -pub struct Config {
    41         -    pub ccache: Option<String>,
    42         -    pub ninja: bool,
    43         -    pub verbose: usize,
    44         -    pub submodules: bool,
    45         -    pub compiler_docs: bool,
    46         -    pub docs: bool,
    47         -    pub locked_deps: bool,
    48         -    pub vendor: bool,
    49         -    pub target_config: HashMap<String, Target>,
    50         -    pub full_bootstrap: bool,
    51         -    pub extended: bool,
    52         -    pub sanitizers: bool,
    53         -
    54         -    // llvm codegen options
    55         -    pub llvm_assertions: bool,
    56         -    pub llvm_optimize: bool,
    57         -    pub llvm_release_debuginfo: bool,
    58         -    pub llvm_version_check: bool,
    59         -    pub llvm_static_stdcpp: bool,
    60         -    pub llvm_link_shared: bool,
    61         -    pub llvm_targets: Option<String>,
    62         -    pub llvm_link_jobs: Option<u32>,
    63         -    pub llvm_clean_rebuild: bool,
    64         -
    65         -    // rust codegen options
    66         -    pub rust_optimize: bool,
    67         -    pub rust_codegen_units: u32,
    68         -    pub rust_debug_assertions: bool,
    69         -    pub rust_debuginfo: bool,
    70         -    pub rust_debuginfo_lines: bool,
    71         -    pub rust_debuginfo_only_std: bool,
    72         -    pub rust_rpath: bool,
    73         -    pub rustc_default_linker: Option<String>,
    74         -    pub rustc_default_ar: Option<String>,
    75         -    pub rust_optimize_tests: bool,
    76         -    pub rust_debuginfo_tests: bool,
    77         -    pub rust_save_analysis: bool,
    78         -    pub rust_dist_src: bool,
    79         -
    80         -    pub build: String,
    81         -    pub host: Vec<String>,
    82         -    pub target: Vec<String>,
    83         -    pub rustc: Option<PathBuf>,
    84         -    pub cargo: Option<PathBuf>,
    85         -    pub local_rebuild: bool,
    86         -
    87         -    // dist misc
    88         -    pub dist_sign_folder: Option<PathBuf>,
    89         -    pub dist_upload_addr: Option<String>,
    90         -    pub dist_gpg_password_file: Option<PathBuf>,
    91         -
    92         -    // libstd features
    93         -    pub debug_jemalloc: bool,
    94         -    pub use_jemalloc: bool,
    95         -    pub backtrace: bool, // support for RUST_BACKTRACE
    96         -
    97         -    // misc
    98         -    pub channel: String,
    99         -    pub quiet_tests: bool,
   100         -    // Fallback musl-root for all targets
   101         -    pub musl_root: Option<PathBuf>,
   102         -    pub prefix: Option<PathBuf>,
   103         -    pub docdir: Option<PathBuf>,
   104         -    pub libdir: Option<PathBuf>,
   105         -    pub libdir_relative: Option<PathBuf>,
   106         -    pub mandir: Option<PathBuf>,
   107         -    pub codegen_tests: bool,
   108         -    pub nodejs: Option<PathBuf>,
   109         -    pub gdb: Option<PathBuf>,
   110         -    pub python: Option<PathBuf>,
   111         -    pub configure_args: Vec<String>,
   112         -    pub openssl_static: bool,
   113         -}
   114         -
   115         -/// Per-target configuration stored in the global configuration structure.
   116         -#[derive(Default)]
   117         -pub struct Target {
   118         -    pub llvm_config: Option<PathBuf>,
   119         -    pub jemalloc: Option<PathBuf>,
   120         -    pub cc: Option<PathBuf>,
   121         -    pub cxx: Option<PathBuf>,
   122         -    pub ndk: Option<PathBuf>,
   123         -    pub musl_root: Option<PathBuf>,
   124         -    pub qemu_rootfs: Option<PathBuf>,
   125         -}
   126         -
   127         -/// Structure of the `config.toml` file that configuration is read from.
   128         -///
   129         -/// This structure uses `Decodable` to automatically decode a TOML configuration
   130         -/// file into this format, and then this is traversed and written into the above
   131         -/// `Config` structure.
   132         -#[derive(RustcDecodable, Default)]
   133         -struct TomlConfig {
   134         -    build: Option<Build>,
   135         -    install: Option<Install>,
   136         -    llvm: Option<Llvm>,
   137         -    rust: Option<Rust>,
   138         -    target: Option<HashMap<String, TomlTarget>>,
   139         -    dist: Option<Dist>,
   140         -}
   141         -
   142         -/// TOML representation of various global build decisions.
   143         -#[derive(RustcDecodable, Default, Clone)]
   144         -struct Build {
   145         -    build: Option<String>,
   146         -    host: Vec<String>,
   147         -    target: Vec<String>,
   148         -    cargo: Option<String>,
   149         -    rustc: Option<String>,
   150         -    compiler_docs: Option<bool>,
   151         -    docs: Option<bool>,
   152         -    submodules: Option<bool>,
   153         -    gdb: Option<String>,
   154         -    locked_deps: Option<bool>,
   155         -    vendor: Option<bool>,
   156         -    nodejs: Option<String>,
   157         -    python: Option<String>,
   158         -    full_bootstrap: Option<bool>,
   159         -    extended: Option<bool>,
   160         -    verbose: Option<usize>,
   161         -    sanitizers: Option<bool>,
   162         -    openssl_static: Option<bool>,
   163         -}
   164         -
   165         -/// TOML representation of various global install decisions.
   166         -#[derive(RustcDecodable, Default, Clone)]
   167         -struct Install {
   168         -    prefix: Option<String>,
   169         -    mandir: Option<String>,
   170         -    docdir: Option<String>,
   171         -    libdir: Option<String>,
   172         -}
   173         -
   174         -/// TOML representation of how the LLVM build is configured.
   175         -#[derive(RustcDecodable, Default)]
   176         -struct Llvm {
   177         -    ccache: Option<StringOrBool>,
   178         -    ninja: Option<bool>,
   179         -    assertions: Option<bool>,
   180         -    optimize: Option<bool>,
   181         -    release_debuginfo: Option<bool>,
   182         -    version_check: Option<bool>,
   183         -    static_libstdcpp: Option<bool>,
   184         -    targets: Option<String>,
   185         -    link_jobs: Option<u32>,
   186         -    clean_rebuild: Option<bool>,
   187         -}
   188         -
   189         -#[derive(RustcDecodable, Default, Clone)]
   190         -struct Dist {
   191         -    sign_folder: Option<String>,
   192         -    gpg_password_file: Option<String>,
   193         -    upload_addr: Option<String>,
   194         -    src_tarball: Option<bool>,
   195         -}
   196         -
   197         -#[derive(RustcDecodable)]
   198         -enum StringOrBool {
   199         -    String(String),
   200         -    Bool(bool),
   201         -}
   202         -
   203         -impl Default for StringOrBool {
   204         -    fn default() -> StringOrBool {
   205         -        StringOrBool::Bool(false)
   206         -    }
   207         -}
   208         -
   209         -/// TOML representation of how the Rust build is configured.
   210         -#[derive(RustcDecodable, Default)]
   211         -struct Rust {
   212         -    optimize: Option<bool>,
   213         -    codegen_units: Option<u32>,
   214         -    debug_assertions: Option<bool>,
   215         -    debuginfo: Option<bool>,
   216         -    debuginfo_lines: Option<bool>,
   217         -    debuginfo_only_std: Option<bool>,
   218         -    debug_jemalloc: Option<bool>,
   219         -    use_jemalloc: Option<bool>,
   220         -    backtrace: Option<bool>,
   221         -    default_linker: Option<String>,
   222         -    default_ar: Option<String>,
   223         -    channel: Option<String>,
   224         -    musl_root: Option<String>,
   225         -    rpath: Option<bool>,
   226         -    optimize_tests: Option<bool>,
   227         -    debuginfo_tests: Option<bool>,
   228         -    codegen_tests: Option<bool>,
   229         -    save_analysis: Option<bool>,
   230         -}
   231         -
   232         -/// TOML representation of how each build target is configured.
   233         -#[derive(RustcDecodable, Default)]
   234         -struct TomlTarget {
   235         -    llvm_config: Option<String>,
   236         -    jemalloc: Option<String>,
   237         -    cc: Option<String>,
   238         -    cxx: Option<String>,
   239         -    android_ndk: Option<String>,
   240         -    musl_root: Option<String>,
   241         -    qemu_rootfs: Option<String>,
   242         -}
   243         -
   244         -impl Config {
   245         -    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
   246         -        let mut config = Config::default();
   247         -        config.llvm_optimize = true;
   248         -        config.use_jemalloc = true;
   249         -        config.backtrace = true;
   250         -        config.rust_optimize = true;
   251         -        config.rust_optimize_tests = true;
   252         -        config.submodules = true;
   253         -        config.docs = true;
   254         -        config.rust_rpath = true;
   255         -        config.rust_codegen_units = 1;
   256         -        config.build = build.to_string();
   257         -        config.channel = "dev".to_string();
   258         -        config.codegen_tests = true;
   259         -        config.rust_dist_src = true;
   260         -
   261         -        let toml = file.map(|file| {
   262         -            let mut f = t!(File::open(&file));
   263         -            let mut toml = String::new();
   264         -            t!(f.read_to_string(&mut toml));
   265         -            let mut p = Parser::new(&toml);
   266         -            let table = match p.parse() {
   267         -                Some(table) => table,
   268         -                None => {
   269         -                    println!("failed to parse TOML configuration:");
   270         -                    for err in p.errors.iter() {
   271         -                        let (loline, locol) = p.to_linecol(err.lo);
   272         -                        let (hiline, hicol) = p.to_linecol(err.hi);
   273         -                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
   274         -                                 hicol, err.desc);
   275         -                    }
   276         -                    process::exit(2);
   277         -                }
   278         -            };
   279         -            let mut d = Decoder::new(Value::Table(table));
   280         -            match Decodable::decode(&mut d) {
   281         -                Ok(cfg) => cfg,
   282         -                Err(e) => {
   283         -                    println!("failed to decode TOML: {}", e);
   284         -                    process::exit(2);
   285         -                }
   286         -            }
   287         -        }).unwrap_or_else(|| TomlConfig::default());
   288         -
   289         -        let build = toml.build.clone().unwrap_or(Build::default());
   290         -        set(&mut config.build, build.build.clone());
   291         -        config.host.push(config.build.clone());
   292         -        for host in build.host.iter() {
   293         -            if !config.host.contains(host) {
   294         -                config.host.push(host.clone());
   295         -            }
   296         -        }
   297         -        for target in config.host.iter().chain(&build.target) {
   298         -            if !config.target.contains(target) {
   299         -                config.target.push(target.clone());
   300         -            }
   301         -        }
   302         -        config.rustc = build.rustc.map(PathBuf::from);
   303         -        config.cargo = build.cargo.map(PathBuf::from);
   304         -        config.nodejs = build.nodejs.map(PathBuf::from);
   305         -        config.gdb = build.gdb.map(PathBuf::from);
   306         -        config.python = build.python.map(PathBuf::from);
   307         -        set(&mut config.compiler_docs, build.compiler_docs);
   308         -        set(&mut config.docs, build.docs);
   309         -        set(&mut config.submodules, build.submodules);
   310         -        set(&mut config.locked_deps, build.locked_deps);
   311         -        set(&mut config.vendor, build.vendor);
   312         -        set(&mut config.full_bootstrap, build.full_bootstrap);
   313         -        set(&mut config.extended, build.extended);
   314         -        set(&mut config.verbose, build.verbose);
   315         -        set(&mut config.sanitizers, build.sanitizers);
   316         -        set(&mut config.openssl_static, build.openssl_static);
   317         -
   318         -        if let Some(ref install) = toml.install {
   319         -            config.prefix = install.prefix.clone().map(PathBuf::from);
   320         -            config.mandir = install.mandir.clone().map(PathBuf::from);
   321         -            config.docdir = install.docdir.clone().map(PathBuf::from);
   322         -            config.libdir = install.libdir.clone().map(PathBuf::from);
   323         -        }
   324         -
   325         -        if let Some(ref llvm) = toml.llvm {
   326         -            match llvm.ccache {
   327         -                Some(StringOrBool::String(ref s)) => {
   328         -                    config.ccache = Some(s.to_string())
   329         -                }
   330         -                Some(StringOrBool::Bool(true)) => {
   331         -                    config.ccache = Some("ccache".to_string());
   332         -                }
   333         -                Some(StringOrBool::Bool(false)) | None => {}
   334         -            }
   335         -            set(&mut config.ninja, llvm.ninja);
   336         -            set(&mut config.llvm_assertions, llvm.assertions);
   337         -            set(&mut config.llvm_optimize, llvm.optimize);
   338         -            set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo);
   339         -            set(&mut config.llvm_version_check, llvm.version_check);
   340         -            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
   341         -            set(&mut config.llvm_clean_rebuild, llvm.clean_rebuild);
   342         -            config.llvm_targets = llvm.targets.clone();
   343         -            config.llvm_link_jobs = llvm.link_jobs;
   344         -        }
   345         -
   346         -        if let Some(ref rust) = toml.rust {
   347         -            set(&mut config.rust_debug_assertions, rust.debug_assertions);
   348         -            set(&mut config.rust_debuginfo, rust.debuginfo);
   349         -            set(&mut config.rust_debuginfo_lines, rust.debuginfo_lines);
   350         -            set(&mut config.rust_debuginfo_only_std, rust.debuginfo_only_std);
   351         -            set(&mut config.rust_optimize, rust.optimize);
   352         -            set(&mut config.rust_optimize_tests, rust.optimize_tests);
   353         -            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
   354         -            set(&mut config.codegen_tests, rust.codegen_tests);
   355         -            set(&mut config.rust_save_analysis, rust.save_analysis);
   356         -            set(&mut config.rust_rpath, rust.rpath);
   357         -            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
   358         -            set(&mut config.use_jemalloc, rust.use_jemalloc);
   359         -            set(&mut config.backtrace, rust.backtrace);
   360         -            set(&mut config.channel, rust.channel.clone());
   361         -            config.rustc_default_linker = rust.default_linker.clone();
   362         -            config.rustc_default_ar = rust.default_ar.clone();
   363         -            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
   364         -
   365         -            match rust.codegen_units {
   366         -                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
   367         -                Some(n) => config.rust_codegen_units = n,
   368         -                None => {}
   369         -            }
   370         -        }
   371         -
   372         -        if let Some(ref t) = toml.target {
   373         -            for (triple, cfg) in t {
   374         -                let mut target = Target::default();
   375         -
   376         -                if let Some(ref s) = cfg.llvm_config {
   377         -                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
   378         -                }
   379         -                if let Some(ref s) = cfg.jemalloc {
   380         -                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
   381         -                }
   382         -                if let Some(ref s) = cfg.android_ndk {
   383         -                    target.ndk = Some(env::current_dir().unwrap().join(s));
   384         -                }
   385         -                target.cxx = cfg.cxx.clone().map(PathBuf::from);
   386         -                target.cc = cfg.cc.clone().map(PathBuf::from);
   387         -                target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
   388         -                target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
   389         -
   390         -                config.target_config.insert(triple.clone(), target);
   391         -            }
   392         -        }
   393         -
   394         -        if let Some(ref t) = toml.dist {
   395         -            config.dist_sign_folder = t.sign_folder.clone().map(PathBuf::from);
   396         -            config.dist_gpg_password_file = t.gpg_password_file.clone().map(PathBuf::from);
   397         -            config.dist_upload_addr = t.upload_addr.clone();
   398         -            set(&mut config.rust_dist_src, t.src_tarball);
   399         -        }
   400         -
   401         -        return config
   402         -    }
   403         -
   404         -    /// "Temporary" routine to parse `config.mk` into this configuration.
   405         -    ///
   406         -    /// While we still have `./configure` this implements the ability to decode
   407         -    /// that configuration into this. This isn't exactly a full-blown makefile
   408         -    /// parser, but hey it gets the job done!
   409         -    pub fn update_with_config_mk(&mut self) {
   410         -        let mut config = String::new();
   411         -        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
   412         -        for line in config.lines() {
   413         -            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
   414         -            let key = parts.next().unwrap();
   415         -            let value = match parts.next() {
   416         -                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
   417         -                Some(n) => n,
   418         -                None => continue
   419         -            };
   420         -
   421         -            macro_rules! check {
   422         -                ($(($name:expr, $val:expr),)*) => {
   423         -                    if value == "1" {
   424         -                        $(
   425         -                            if key == concat!("CFG_ENABLE_", $name) {
   426         -                                $val = true;
   427         -                                continue
   428         -                            }
   429         -                            if key == concat!("CFG_DISABLE_", $name) {
   430         -                                $val = false;
   431         -                                continue
   432         -                            }
   433         -                        )*
   434         -                    }
   435         -                }
   436         -            }
   437         -
   438         -            check! {
   439         -                ("MANAGE_SUBMODULES", self.submodules),
   440         -                ("COMPILER_DOCS", self.compiler_docs),
   441         -                ("DOCS", self.docs),
   442         -                ("LLVM_ASSERTIONS", self.llvm_assertions),
   443         -                ("LLVM_RELEASE_DEBUGINFO", self.llvm_release_debuginfo),
   444         -                ("OPTIMIZE_LLVM", self.llvm_optimize),
   445         -                ("LLVM_VERSION_CHECK", self.llvm_version_check),
   446         -                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
   447         -                ("LLVM_LINK_SHARED", self.llvm_link_shared),
   448         -                ("LLVM_CLEAN_REBUILD", self.llvm_clean_rebuild),
   449         -                ("OPTIMIZE", self.rust_optimize),
   450         -                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
   451         -                ("DEBUGINFO", self.rust_debuginfo),
   452         -                ("DEBUGINFO_LINES", self.rust_debuginfo_lines),
   453         -                ("DEBUGINFO_ONLY_STD", self.rust_debuginfo_only_std),
   454         -                ("JEMALLOC", self.use_jemalloc),
   455         -                ("DEBUG_JEMALLOC", self.debug_jemalloc),
   456         -                ("RPATH", self.rust_rpath),
   457         -                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
   458         -                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
   459         -                ("QUIET_TESTS", self.quiet_tests),
   460         -                ("LOCAL_REBUILD", self.local_rebuild),
   461         -                ("NINJA", self.ninja),
   462         -                ("CODEGEN_TESTS", self.codegen_tests),
   463         -                ("SAVE_ANALYSIS", self.rust_save_analysis),
   464         -                ("LOCKED_DEPS", self.locked_deps),
   465         -                ("VENDOR", self.vendor),
   466         -                ("FULL_BOOTSTRAP", self.full_bootstrap),
   467         -                ("EXTENDED", self.extended),
   468         -                ("SANITIZERS", self.sanitizers),
   469         -                ("DIST_SRC", self.rust_dist_src),
   470         -                ("CARGO_OPENSSL_STATIC", self.openssl_static),
   471         -            }
   472         -
   473         -            match key {
   474         -                "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
   475         -                "CFG_HOST" if value.len() > 0 => {
   476         -                    self.host.extend(value.split(" ").map(|s| s.to_string()));
   477         -
   478         -                }
   479         -                "CFG_TARGET" if value.len() > 0 => {
   480         -                    self.target.extend(value.split(" ").map(|s| s.to_string()));
   481         -                }
   482         -                "CFG_MUSL_ROOT" if value.len() > 0 => {
   483         -                    self.musl_root = Some(parse_configure_path(value));
   484         -                }
   485         -                "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => {
   486         -                    let target = "x86_64-unknown-linux-musl".to_string();
   487         -                    let target = self.target_config.entry(target)
   488         -                                     .or_insert(Target::default());
   489         -                    target.musl_root = Some(parse_configure_path(value));
   490         -                }
   491         -                "CFG_MUSL_ROOT_I686" if value.len() > 0 => {
   492         -                    let target = "i686-unknown-linux-musl".to_string();
   493         -                    let target = self.target_config.entry(target)
   494         -                                     .or_insert(Target::default());
   495         -                    target.musl_root = Some(parse_configure_path(value));
   496         -                }
   497         -                "CFG_MUSL_ROOT_ARM" if value.len() > 0 => {
   498         -                    let target = "arm-unknown-linux-musleabi".to_string();
   499         -                    let target = self.target_config.entry(target)
   500         -                                     .or_insert(Target::default());
   501         -                    target.musl_root = Some(parse_configure_path(value));
   502         -                }
   503         -                "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => {
   504         -                    let target = "arm-unknown-linux-musleabihf".to_string();
   505         -                    let target = self.target_config.entry(target)
   506         -                                     .or_insert(Target::default());
   507         -                    target.musl_root = Some(parse_configure_path(value));
   508         -                }
   509         -                "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => {
   510         -                    let target = "armv7-unknown-linux-musleabihf".to_string();
   511         -                    let target = self.target_config.entry(target)
   512         -                                     .or_insert(Target::default());
   513         -                    target.musl_root = Some(parse_configure_path(value));
   514         -                }
   515         -                "CFG_DEFAULT_AR" if value.len() > 0 => {
   516         -                    self.rustc_default_ar = Some(value.to_string());
   517         -                }
   518         -                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
   519         -                    self.rustc_default_linker = Some(value.to_string());
   520         -                }
   521         -                "CFG_GDB" if value.len() > 0 => {
   522         -                    self.gdb = Some(parse_configure_path(value));
   523         -                }
   524         -                "CFG_RELEASE_CHANNEL" => {
   525         -                    self.channel = value.to_string();
   526         -                }
   527         -                "CFG_PREFIX" => {
   528         -                    self.prefix = Some(PathBuf::from(value));
   529         -                }
   530         -                "CFG_DOCDIR" => {
   531         -                    self.docdir = Some(PathBuf::from(value));
   532         -                }
   533         -                "CFG_LIBDIR" => {
   534         -                    self.libdir = Some(PathBuf::from(value));
   535         -                }
   536         -                "CFG_LIBDIR_RELATIVE" => {
   537         -                    self.libdir_relative = Some(PathBuf::from(value));
   538         -                }
   539         -                "CFG_MANDIR" => {
   540         -                    self.mandir = Some(PathBuf::from(value));
   541         -                }
   542         -                "CFG_LLVM_ROOT" if value.len() > 0 => {
   543         -                    let target = self.target_config.entry(self.build.clone())
   544         -                                     .or_insert(Target::default());
   545         -                    let root = parse_configure_path(value);
   546         -                    target.llvm_config = Some(push_exe_path(root, &["bin", "llvm-config"]));
   547         -                }
   548         -                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
   549         -                    let target = self.target_config.entry(self.build.clone())
   550         -                                     .or_insert(Target::default());
   551         -                    target.jemalloc = Some(parse_configure_path(value).join("libjemalloc_pic.a"));
   552         -                }
   553         -                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
   554         -                    let target = "arm-linux-androideabi".to_string();
   555         -                    let target = self.target_config.entry(target)
   556         -                                     .or_insert(Target::default());
   557         -                    target.ndk = Some(parse_configure_path(value));
   558         -                }
   559         -                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
   560         -                    let target = "armv7-linux-androideabi".to_string();
   561         -                    let target = self.target_config.entry(target)
   562         -                                     .or_insert(Target::default());
   563         -                    target.ndk = Some(parse_configure_path(value));
   564         -                }
   565         -                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
   566         -                    let target = "i686-linux-android".to_string();
   567         -                    let target = self.target_config.entry(target)
   568         -                                     .or_insert(Target::default());
   569         -                    target.ndk = Some(parse_configure_path(value));
   570         -                }
   571         -                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
   572         -                    let target = "aarch64-linux-android".to_string();
   573         -                    let target = self.target_config.entry(target)
   574         -                                     .or_insert(Target::default());
   575         -                    target.ndk = Some(parse_configure_path(value));
   576         -                }
   577         -                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
   578         -                    let path = parse_configure_path(value);
   579         -                    self.rustc = Some(push_exe_path(path.clone(), &["bin", "rustc"]));
   580         -                    self.cargo = Some(push_exe_path(path, &["bin", "cargo"]));
   581         -                }
   582         -                "CFG_PYTHON" if value.len() > 0 => {
   583         -                    let path = parse_configure_path(value);
   584         -                    self.python = Some(path);
   585         -                }
   586         -                "CFG_ENABLE_CCACHE" if value == "1" => {
   587         -                    self.ccache = Some("ccache".to_string());
   588         -                }
   589         -                "CFG_ENABLE_SCCACHE" if value == "1" => {
   590         -                    self.ccache = Some("sccache".to_string());
   591         -                }
   592         -                "CFG_CONFIGURE_ARGS" if value.len() > 0 => {
   593         -                    self.configure_args = value.split_whitespace()
   594         -                                               .map(|s| s.to_string())
   595         -                                               .collect();
   596         -                }
   597         -                "CFG_QEMU_ARMHF_ROOTFS" if value.len() > 0 => {
   598         -                    let target = "arm-unknown-linux-gnueabihf".to_string();
   599         -                    let target = self.target_config.entry(target)
   600         -                                     .or_insert(Target::default());
   601         -                    target.qemu_rootfs = Some(parse_configure_path(value));
   602         -                }
   603         -                _ => {}
   604         -            }
   605         -        }
   606         -    }
   607         -
   608         -    pub fn verbose(&self) -> bool {
   609         -        self.verbose > 0
   610         -    }
   611         -
   612         -    pub fn very_verbose(&self) -> bool {
   613         -        self.verbose > 1
   614         -    }
   615         -}
   616         -
   617         -#[cfg(not(windows))]
   618         -fn parse_configure_path(path: &str) -> PathBuf {
   619         -    path.into()
   620         -}
   621         -
   622         -#[cfg(windows)]
   623         -fn parse_configure_path(path: &str) -> PathBuf {
   624         -    // on windows, configure produces unix style paths e.g. /c/some/path but we
   625         -    // only want real windows paths
   626         -
   627         -    use std::process::Command;
   628         -    use build_helper;
   629         -
   630         -    // '/' is invalid in windows paths, so we can detect unix paths by the presence of it
   631         -    if !path.contains('/') {
   632         -        return path.into();
   633         -    }
   634         -
   635         -    let win_path = build_helper::output(Command::new("cygpath").arg("-w").arg(path));
   636         -    let win_path = win_path.trim();
   637         -
   638         -    win_path.into()
   639         -}
   640         -
   641         -fn set<T>(field: &mut T, val: Option<T>) {
   642         -    if let Some(v) = val {
   643         -        *field = v;
   644         -    }
   645         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/config.toml.example version [dff2f5bba7].

     1         -# Sample TOML configuration file for building Rust.
     2         -#
     3         -# To configure rustbuild, copy this file to the directory from which you will be
     4         -# running the build, and name it config.toml.
     5         -#
     6         -# All options are commented out by default in this file, and they're commented
     7         -# out with their default values. The build system by default looks for
     8         -# `config.toml` in the current directory of a build for build configuration, but
     9         -# a custom configuration file can also be specified with `--config` to the build
    10         -# system.
    11         -
    12         -# =============================================================================
    13         -# Tweaking how LLVM is compiled
    14         -# =============================================================================
    15         -[llvm]
    16         -
    17         -# Indicates whether the LLVM build is a Release or Debug build
    18         -#optimize = true
    19         -
    20         -# Indicates whether an LLVM Release build should include debug info
    21         -#release-debuginfo = false
    22         -
    23         -# Indicates whether the LLVM assertions are enabled or not
    24         -#assertions = false
    25         -
    26         -# Indicates whether ccache is used when building LLVM
    27         -#ccache = false
    28         -# or alternatively ...
    29         -#ccache = "/path/to/ccache"
    30         -
    31         -# If an external LLVM root is specified, we automatically check the version by
    32         -# default to make sure it's within the range that we're expecting, but setting
    33         -# this flag will indicate that this version check should not be done.
    34         -#version-check = false
    35         -
    36         -# Link libstdc++ statically into the librustc_llvm instead of relying on a
    37         -# dynamic version to be available.
    38         -#static-libstdcpp = false
    39         -
    40         -# Tell the LLVM build system to use Ninja instead of the platform default for
    41         -# the generated build system. This can sometimes be faster than make, for
    42         -# example.
    43         -#ninja = false
    44         -
    45         -# LLVM targets to build support for.
    46         -# Note: this is NOT related to Rust compilation targets. However, as Rust is
    47         -# dependent on LLVM for code generation, turning targets off here WILL lead to
    48         -# the resulting rustc being unable to compile for the disabled architectures.
    49         -# Also worth pointing out is that, in case support for new targets are added to
    50         -# LLVM, enabling them here doesn't mean Rust is automatically gaining said
    51         -# support. You'll need to write a target specification at least, and most
    52         -# likely, teach rustc about the C ABI of the target. Get in touch with the
    53         -# Rust team and file an issue if you need assistance in porting!
    54         -#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX"
    55         -
    56         -# Cap the number of parallel linker invocations when compiling LLVM.
    57         -# This can be useful when building LLVM with debug info, which significantly
    58         -# increases the size of binaries and consequently the memory required by
    59         -# each linker process.
    60         -# If absent or 0, linker invocations are treated like any other job and
    61         -# controlled by rustbuild's -j parameter.
    62         -#link-jobs = 0
    63         -
    64         -# Delete LLVM build directory on LLVM rebuild.
    65         -# This option defaults to `false` for local development, but CI may want to
    66         -# always perform clean full builds (possibly accelerated by (s)ccache).
    67         -#clean-rebuild = false
    68         -
    69         -# =============================================================================
    70         -# General build configuration options
    71         -# =============================================================================
    72         -[build]
    73         -
    74         -# Build triple for the original snapshot compiler. This must be a compiler that
    75         -# nightlies are already produced for. The current platform must be able to run
    76         -# binaries of this build triple and the nightly will be used to bootstrap the
    77         -# first compiler.
    78         -#build = "x86_64-unknown-linux-gnu"    # defaults to your host platform
    79         -
    80         -# In addition to the build triple, other triples to produce full compiler
    81         -# toolchains for. Each of these triples will be bootstrapped from the build
    82         -# triple and then will continue to bootstrap themselves. This platform must
    83         -# currently be able to run all of the triples provided here.
    84         -#host = ["x86_64-unknown-linux-gnu"]   # defaults to just the build triple
    85         -
    86         -# In addition to all host triples, other triples to produce the standard library
    87         -# for. Each host triple will be used to produce a copy of the standard library
    88         -# for each target triple.
    89         -#target = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple
    90         -
    91         -# Instead of downloading the src/stage0.txt version of Cargo specified, use
    92         -# this Cargo binary instead to build all Rust code
    93         -#cargo = "/path/to/bin/cargo"
    94         -
    95         -# Instead of downloading the src/stage0.txt version of the compiler
    96         -# specified, use this rustc binary instead as the stage0 snapshot compiler.
    97         -#rustc = "/path/to/bin/rustc"
    98         -
    99         -# Flag to specify whether any documentation is built. If false, rustdoc and
   100         -# friends will still be compiled but they will not be used to generate any
   101         -# documentation.
   102         -#docs = true
   103         -
   104         -# Indicate whether the compiler should be documented in addition to the standard
   105         -# library and facade crates.
   106         -#compiler-docs = false
   107         -
   108         -# Indicate whether submodules are managed and updated automatically.
   109         -#submodules = true
   110         -
   111         -# The path to (or name of) the GDB executable to use. This is only used for
   112         -# executing the debuginfo test suite.
   113         -#gdb = "gdb"
   114         -
   115         -# The node.js executable to use. Note that this is only used for the emscripten
   116         -# target when running tests, otherwise this can be omitted.
   117         -#nodejs = "node"
   118         -
   119         -# Python interpreter to use for various tasks throughout the build, notably
   120         -# rustdoc tests, the lldb python interpreter, and some dist bits and pieces.
   121         -# Note that Python 2 is currently required.
   122         -#python = "python2.7"
   123         -
   124         -# Force Cargo to check that Cargo.lock describes the precise dependency
   125         -# set that all the Cargo.toml files create, instead of updating it.
   126         -#locked-deps = false
   127         -
   128         -# Indicate whether the vendored sources are used for Rust dependencies or not
   129         -#vendor = false
   130         -
   131         -# Typically the build system will build the rust compiler twice. The second
   132         -# compiler, however, will simply use its own libraries to link against. If you
   133         -# would rather to perform a full bootstrap, compiling the compiler three times,
   134         -# then you can set this option to true. You shouldn't ever need to set this
   135         -# option to true.
   136         -#full-bootstrap = false
   137         -
   138         -# Enable a build of the and extended rust tool set which is not only the
   139         -# compiler but also tools such as Cargo. This will also produce "combined
   140         -# installers" which are used to install Rust and Cargo together. This is
   141         -# disabled by default.
   142         -#extended = false
   143         -
   144         -# Verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
   145         -#verbose = 0
   146         -
   147         -# Build the sanitizer runtimes
   148         -#sanitizers = false
   149         -
   150         -# Indicates whether the OpenSSL linked into Cargo will be statically linked or
   151         -# not. If static linkage is specified then the build system will download a
   152         -# known-good version of OpenSSL, compile it, and link it to Cargo.
   153         -#openssl-static = false
   154         -
   155         -# =============================================================================
   156         -# General install configuration options
   157         -# =============================================================================
   158         -[install]
   159         -
   160         -# Instead of installing to /usr/local, install to this path instead.
   161         -#prefix = "/usr/local"
   162         -
   163         -# Where to install libraries in `prefix` above
   164         -#libdir = "lib"
   165         -
   166         -# Where to install man pages in `prefix` above
   167         -#mandir = "share/man"
   168         -
   169         -# Where to install documentation in `prefix` above
   170         -#docdir = "share/doc/rust"
   171         -
   172         -# =============================================================================
   173         -# Options for compiling Rust code itself
   174         -# =============================================================================
   175         -[rust]
   176         -
   177         -# Whether or not to optimize the compiler and standard library
   178         -#optimize = true
   179         -
   180         -# Number of codegen units to use for each compiler invocation. A value of 0
   181         -# means "the number of cores on this machine", and 1+ is passed through to the
   182         -# compiler.
   183         -#codegen-units = 1
   184         -
   185         -# Whether or not debug assertions are enabled for the compiler and standard
   186         -# library
   187         -#debug-assertions = false
   188         -
   189         -# Whether or not debuginfo is emitted
   190         -#debuginfo = false
   191         -
   192         -# Whether or not line number debug information is emitted
   193         -#debuginfo-lines = false
   194         -
   195         -# Whether or not to only build debuginfo for the standard library if enabled.
   196         -# If enabled, this will not compile the compiler with debuginfo, just the
   197         -# standard library.
   198         -#debuginfo-only-std = false
   199         -
   200         -# Whether or not jemalloc is built and enabled
   201         -#use-jemalloc = true
   202         -
   203         -# Whether or not jemalloc is built with its debug option set
   204         -#debug-jemalloc = false
   205         -
   206         -# Whether or not `panic!`s generate backtraces (RUST_BACKTRACE)
   207         -#backtrace = true
   208         -
   209         -# The default linker that will be used by the generated compiler. Note that this
   210         -# is not the linker used to link said compiler.
   211         -#default-linker = "cc"
   212         -
   213         -# The default ar utility that will be used by the generated compiler if LLVM
   214         -# cannot be used. Note that this is not used to assemble said compiler.
   215         -#default-ar = "ar"
   216         -
   217         -# The "channel" for the Rust build to produce. The stable/beta channels only
   218         -# allow using stable features, whereas the nightly and dev channels allow using
   219         -# nightly features
   220         -#channel = "dev"
   221         -
   222         -# By default the `rustc` executable is built with `-Wl,-rpath` flags on Unix
   223         -# platforms to ensure that the compiler is usable by default from the build
   224         -# directory (as it links to a number of dynamic libraries). This may not be
   225         -# desired in distributions, for example.
   226         -#rpath = true
   227         -
   228         -# Flag indicating whether tests are compiled with optimizations (the -O flag) or
   229         -# with debuginfo (the -g flag)
   230         -#optimize-tests = true
   231         -#debuginfo-tests = true
   232         -
   233         -# Flag indicating whether codegen tests will be run or not. If you get an error
   234         -# saying that the FileCheck executable is missing, you may want to disable this.
   235         -#codegen-tests = true
   236         -
   237         -# Flag indicating whether the API analysis data should be saved.
   238         -#save-analysis = false
   239         -
   240         -# =============================================================================
   241         -# Options for specific targets
   242         -#
   243         -# Each of the following options is scoped to the specific target triple in
   244         -# question and is used for determining how to compile each target.
   245         -# =============================================================================
   246         -[target.x86_64-unknown-linux-gnu]
   247         -
   248         -# C compiler to be used to compiler C code and link Rust code. Note that the
   249         -# default value is platform specific, and if not specified it may also depend on
   250         -# what platform is crossing to what platform.
   251         -#cc = "cc"
   252         -
   253         -# C++ compiler to be used to compiler C++ code (e.g. LLVM and our LLVM shims).
   254         -# This is only used for host targets.
   255         -#cxx = "c++"
   256         -
   257         -# Path to the `llvm-config` binary of the installation of a custom LLVM to link
   258         -# against. Note that if this is specifed we don't compile LLVM at all for this
   259         -# target.
   260         -#llvm-config = "../path/to/llvm/root/bin/llvm-config"
   261         -
   262         -# Path to the custom jemalloc static library to link into the standard library
   263         -# by default. This is only used if jemalloc is still enabled above
   264         -#jemalloc = "/path/to/jemalloc/libjemalloc_pic.a"
   265         -
   266         -# If this target is for Android, this option will be required to specify where
   267         -# the NDK for the target lives. This is used to find the C compiler to link and
   268         -# build native code.
   269         -#android-ndk = "/path/to/ndk"
   270         -
   271         -# The root location of the MUSL installation directory. The library directory
   272         -# will also need to contain libunwind.a for an unwinding implementation. Note
   273         -# that this option only makes sense for MUSL targets that produce statically
   274         -# linked binaries
   275         -#musl-root = "..."
   276         -
   277         -# =============================================================================
   278         -# Distribution options
   279         -#
   280         -# These options are related to distribution, mostly for the Rust project itself.
   281         -# You probably won't need to concern yourself with any of these options
   282         -# =============================================================================
   283         -[dist]
   284         -
   285         -# This is the folder of artifacts that the build system will sign. All files in
   286         -# this directory will be signed with the default gpg key using the system `gpg`
   287         -# binary. The `asc` and `sha256` files will all be output into the standard dist
   288         -# output folder (currently `build/dist`)
   289         -#
   290         -# This folder should be populated ahead of time before the build system is
   291         -# invoked.
   292         -#sign-folder = "path/to/folder/to/sign"
   293         -
   294         -# This is a file which contains the password of the default gpg key. This will
   295         -# be passed to `gpg` down the road when signing all files in `sign-folder`
   296         -# above. This should be stored in plaintext.
   297         -#gpg-password-file = "path/to/gpg/password"
   298         -
   299         -# The remote address that all artifacts will eventually be uploaded to. The
   300         -# build system generates manifests which will point to these urls, and for the
   301         -# manifests to be correct they'll have to have the right URLs encoded.
   302         -#
   303         -# Note that this address should not contain a trailing slash as file names will
   304         -# be appended to it.
   305         -#upload-addr = "https://example.com/folder"

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/dist.rs version [6a1cf42725].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of the various distribution aspects of the compiler.
    12         -//!
    13         -//! This module is responsible for creating tarballs of the standard library,
    14         -//! compiler, and documentation. This ends up being what we distribute to
    15         -//! everyone as well.
    16         -//!
    17         -//! No tarball is actually created literally in this file, but rather we shell
    18         -//! out to `rust-installer` still. This may one day be replaced with bits and
    19         -//! pieces of `rustup.rs`!
    20         -
    21         -use std::env;
    22         -use std::fs::{self, File};
    23         -use std::io::{Read, Write};
    24         -use std::path::{PathBuf, Path};
    25         -use std::process::{Command, Stdio};
    26         -
    27         -use build_helper::output;
    28         -
    29         -#[cfg(not(target_os = "solaris"))]
    30         -const SH_CMD: &'static str = "sh";
    31         -// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
    32         -#[cfg(target_os = "solaris")]
    33         -const SH_CMD: &'static str = "bash";
    34         -
    35         -use {Build, Compiler, Mode};
    36         -use channel;
    37         -use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
    38         -
    39         -fn pkgname(build: &Build, component: &str) -> String {
    40         -    assert!(component.starts_with("rust")); // does not work with cargo
    41         -    format!("{}-{}", component, build.rust_package_vers())
    42         -}
    43         -
    44         -fn distdir(build: &Build) -> PathBuf {
    45         -    build.out.join("dist")
    46         -}
    47         -
    48         -pub fn tmpdir(build: &Build) -> PathBuf {
    49         -    build.out.join("tmp/dist")
    50         -}
    51         -
    52         -/// Builds the `rust-docs` installer component.
    53         -///
    54         -/// Slurps up documentation from the `stage`'s `host`.
    55         -pub fn docs(build: &Build, stage: u32, host: &str) {
    56         -    println!("Dist docs stage{} ({})", stage, host);
    57         -    if !build.config.docs {
    58         -        println!("\tskipping - docs disabled");
    59         -        return
    60         -    }
    61         -
    62         -    let name = pkgname(build, "rust-docs");
    63         -    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
    64         -    let _ = fs::remove_dir_all(&image);
    65         -
    66         -    let dst = image.join("share/doc/rust/html");
    67         -    t!(fs::create_dir_all(&dst));
    68         -    let src = build.out.join(host).join("doc");
    69         -    cp_r(&src, &dst);
    70         -
    71         -    let mut cmd = Command::new(SH_CMD);
    72         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
    73         -       .arg("--product-name=Rust-Documentation")
    74         -       .arg("--rel-manifest-dir=rustlib")
    75         -       .arg("--success-message=Rust-documentation-is-installed.")
    76         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
    77         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
    78         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
    79         -       .arg(format!("--package-name={}-{}", name, host))
    80         -       .arg("--component-name=rust-docs")
    81         -       .arg("--legacy-manifest-dirs=rustlib,cargo")
    82         -       .arg("--bulk-dirs=share/doc/rust/html");
    83         -    build.run(&mut cmd);
    84         -    t!(fs::remove_dir_all(&image));
    85         -
    86         -    // As part of this step, *also* copy the docs directory to a directory which
    87         -    // buildbot typically uploads.
    88         -    if host == build.config.build {
    89         -        let dst = distdir(build).join("doc").join(build.rust_package_vers());
    90         -        t!(fs::create_dir_all(&dst));
    91         -        cp_r(&src, &dst);
    92         -    }
    93         -}
    94         -
    95         -/// Build the `rust-mingw` installer component.
    96         -///
    97         -/// This contains all the bits and pieces to run the MinGW Windows targets
    98         -/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
    99         -/// Currently just shells out to a python script, but that should be rewritten
   100         -/// in Rust.
   101         -pub fn mingw(build: &Build, host: &str) {
   102         -    println!("Dist mingw ({})", host);
   103         -    let name = pkgname(build, "rust-mingw");
   104         -    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
   105         -    let _ = fs::remove_dir_all(&image);
   106         -    t!(fs::create_dir_all(&image));
   107         -
   108         -    // The first argument to the script is a "temporary directory" which is just
   109         -    // thrown away (this contains the runtime DLLs included in the rustc package
   110         -    // above) and the second argument is where to place all the MinGW components
   111         -    // (which is what we want).
   112         -    //
   113         -    // FIXME: this script should be rewritten into Rust
   114         -    let mut cmd = Command::new(build.python());
   115         -    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
   116         -       .arg(tmpdir(build))
   117         -       .arg(&image)
   118         -       .arg(host);
   119         -    build.run(&mut cmd);
   120         -
   121         -    let mut cmd = Command::new(SH_CMD);
   122         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   123         -       .arg("--product-name=Rust-MinGW")
   124         -       .arg("--rel-manifest-dir=rustlib")
   125         -       .arg("--success-message=Rust-MinGW-is-installed.")
   126         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   127         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   128         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   129         -       .arg(format!("--package-name={}-{}", name, host))
   130         -       .arg("--component-name=rust-mingw")
   131         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   132         -    build.run(&mut cmd);
   133         -    t!(fs::remove_dir_all(&image));
   134         -}
   135         -
   136         -/// Creates the `rustc` installer component.
   137         -pub fn rustc(build: &Build, stage: u32, host: &str) {
   138         -    println!("Dist rustc stage{} ({})", stage, host);
   139         -    let name = pkgname(build, "rustc");
   140         -    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
   141         -    let _ = fs::remove_dir_all(&image);
   142         -    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
   143         -    let _ = fs::remove_dir_all(&overlay);
   144         -
   145         -    // Prepare the rustc "image", what will actually end up getting installed
   146         -    prepare_image(build, stage, host, &image);
   147         -
   148         -    // Prepare the overlay which is part of the tarball but won't actually be
   149         -    // installed
   150         -    let cp = |file: &str| {
   151         -        install(&build.src.join(file), &overlay, 0o644);
   152         -    };
   153         -    cp("COPYRIGHT");
   154         -    cp("LICENSE-APACHE");
   155         -    cp("LICENSE-MIT");
   156         -    cp("README.md");
   157         -    // tiny morsel of metadata is used by rust-packaging
   158         -    let version = build.rust_version();
   159         -    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
   160         -
   161         -    // On MinGW we've got a few runtime DLL dependencies that we need to
   162         -    // include. The first argument to this script is where to put these DLLs
   163         -    // (the image we're creating), and the second argument is a junk directory
   164         -    // to ignore all other MinGW stuff the script creates.
   165         -    //
   166         -    // On 32-bit MinGW we're always including a DLL which needs some extra
   167         -    // licenses to distribute. On 64-bit MinGW we don't actually distribute
   168         -    // anything requiring us to distribute a license, but it's likely the
   169         -    // install will *also* include the rust-mingw package, which also needs
   170         -    // licenses, so to be safe we just include it here in all MinGW packages.
   171         -    //
   172         -    // FIXME: this script should be rewritten into Rust
   173         -    if host.contains("pc-windows-gnu") {
   174         -        let mut cmd = Command::new(build.python());
   175         -        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
   176         -           .arg(&image)
   177         -           .arg(tmpdir(build))
   178         -           .arg(host);
   179         -        build.run(&mut cmd);
   180         -
   181         -        let dst = image.join("share/doc");
   182         -        t!(fs::create_dir_all(&dst));
   183         -        cp_r(&build.src.join("src/etc/third-party"), &dst);
   184         -    }
   185         -
   186         -    // Finally, wrap everything up in a nice tarball!
   187         -    let mut cmd = Command::new(SH_CMD);
   188         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   189         -       .arg("--product-name=Rust")
   190         -       .arg("--rel-manifest-dir=rustlib")
   191         -       .arg("--success-message=Rust-is-ready-to-roll.")
   192         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   193         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   194         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   195         -       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
   196         -       .arg(format!("--package-name={}-{}", name, host))
   197         -       .arg("--component-name=rustc")
   198         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   199         -    build.run(&mut cmd);
   200         -    t!(fs::remove_dir_all(&image));
   201         -    t!(fs::remove_dir_all(&overlay));
   202         -
   203         -    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
   204         -        let src = build.sysroot(&Compiler::new(stage, host));
   205         -        let libdir = libdir(host);
   206         -
   207         -        // Copy rustc/rustdoc binaries
   208         -        t!(fs::create_dir_all(image.join("bin")));
   209         -        cp_r(&src.join("bin"), &image.join("bin"));
   210         -
   211         -        // Copy runtime DLLs needed by the compiler
   212         -        if libdir != "bin" {
   213         -            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
   214         -                let name = entry.file_name();
   215         -                if let Some(s) = name.to_str() {
   216         -                    if is_dylib(s) {
   217         -                        install(&entry.path(), &image.join(libdir), 0o644);
   218         -                    }
   219         -                }
   220         -            }
   221         -        }
   222         -
   223         -        // Man pages
   224         -        t!(fs::create_dir_all(image.join("share/man/man1")));
   225         -        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
   226         -
   227         -        // Debugger scripts
   228         -        debugger_scripts(build, &image, host);
   229         -
   230         -        // Misc license info
   231         -        let cp = |file: &str| {
   232         -            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
   233         -        };
   234         -        cp("COPYRIGHT");
   235         -        cp("LICENSE-APACHE");
   236         -        cp("LICENSE-MIT");
   237         -        cp("README.md");
   238         -    }
   239         -}
   240         -
   241         -/// Copies debugger scripts for `host` into the `sysroot` specified.
   242         -pub fn debugger_scripts(build: &Build,
   243         -                        sysroot: &Path,
   244         -                        host: &str) {
   245         -    let cp_debugger_script = |file: &str| {
   246         -        let dst = sysroot.join("lib/rustlib/etc");
   247         -        t!(fs::create_dir_all(&dst));
   248         -        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
   249         -    };
   250         -    if host.contains("windows-msvc") {
   251         -        // no debugger scripts
   252         -    } else {
   253         -        cp_debugger_script("debugger_pretty_printers_common.py");
   254         -
   255         -        // gdb debugger scripts
   256         -        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
   257         -                0o755);
   258         -
   259         -        cp_debugger_script("gdb_load_rust_pretty_printers.py");
   260         -        cp_debugger_script("gdb_rust_pretty_printing.py");
   261         -
   262         -        // lldb debugger scripts
   263         -        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
   264         -                0o755);
   265         -
   266         -        cp_debugger_script("lldb_rust_formatters.py");
   267         -    }
   268         -}
   269         -
   270         -/// Creates the `rust-std` installer component as compiled by `compiler` for the
   271         -/// target `target`.
   272         -pub fn std(build: &Build, compiler: &Compiler, target: &str) {
   273         -    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
   274         -             target);
   275         -
   276         -    // The only true set of target libraries came from the build triple, so
   277         -    // let's reduce redundant work by only producing archives from that host.
   278         -    if compiler.host != build.config.build {
   279         -        println!("\tskipping, not a build host");
   280         -        return
   281         -    }
   282         -
   283         -    let name = pkgname(build, "rust-std");
   284         -    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
   285         -    let _ = fs::remove_dir_all(&image);
   286         -
   287         -    let dst = image.join("lib/rustlib").join(target);
   288         -    t!(fs::create_dir_all(&dst));
   289         -    let src = build.sysroot(compiler).join("lib/rustlib");
   290         -    cp_r(&src.join(target), &dst);
   291         -
   292         -    let mut cmd = Command::new(SH_CMD);
   293         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   294         -       .arg("--product-name=Rust")
   295         -       .arg("--rel-manifest-dir=rustlib")
   296         -       .arg("--success-message=std-is-standing-at-the-ready.")
   297         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   298         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   299         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   300         -       .arg(format!("--package-name={}-{}", name, target))
   301         -       .arg(format!("--component-name=rust-std-{}", target))
   302         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   303         -    build.run(&mut cmd);
   304         -    t!(fs::remove_dir_all(&image));
   305         -}
   306         -
   307         -pub fn rust_src_location(build: &Build) -> PathBuf {
   308         -    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
   309         -    distdir(build).join(&format!("{}.tar.gz", plain_name))
   310         -}
   311         -
   312         -/// Creates a tarball of save-analysis metadata, if available.
   313         -pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
   314         -    if !build.config.rust_save_analysis {
   315         -        return
   316         -    }
   317         -
   318         -    println!("Dist analysis");
   319         -
   320         -    if compiler.host != build.config.build {
   321         -        println!("\tskipping, not a build host");
   322         -        return
   323         -    }
   324         -
   325         -    // Package save-analysis from stage1 if not doing a full bootstrap, as the
   326         -    // stage2 artifacts is simply copied from stage1 in that case.
   327         -    let compiler = if build.force_use_stage1(compiler, target) {
   328         -        Compiler::new(1, compiler.host)
   329         -    } else {
   330         -        compiler.clone()
   331         -    };
   332         -
   333         -    let name = pkgname(build, "rust-analysis");
   334         -    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
   335         -
   336         -    let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps");
   337         -
   338         -    let image_src = src.join("save-analysis");
   339         -    let dst = image.join("lib/rustlib").join(target).join("analysis");
   340         -    t!(fs::create_dir_all(&dst));
   341         -    println!("image_src: {:?}, dst: {:?}", image_src, dst);
   342         -    cp_r(&image_src, &dst);
   343         -
   344         -    let mut cmd = Command::new(SH_CMD);
   345         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   346         -       .arg("--product-name=Rust")
   347         -       .arg("--rel-manifest-dir=rustlib")
   348         -       .arg("--success-message=save-analysis-saved.")
   349         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   350         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   351         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   352         -       .arg(format!("--package-name={}-{}", name, target))
   353         -       .arg(format!("--component-name=rust-analysis-{}", target))
   354         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   355         -    build.run(&mut cmd);
   356         -    t!(fs::remove_dir_all(&image));
   357         -}
   358         -
   359         -const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
   360         -
   361         -/// Creates the `rust-src` installer component and the plain source tarball
   362         -pub fn rust_src(build: &Build) {
   363         -    if !build.config.rust_dist_src {
   364         -        return
   365         -    }
   366         -
   367         -    println!("Dist src");
   368         -
   369         -    let name = pkgname(build, "rust-src");
   370         -    let image = tmpdir(build).join(format!("{}-image", name));
   371         -    let _ = fs::remove_dir_all(&image);
   372         -
   373         -    let dst = image.join("lib/rustlib/src");
   374         -    let dst_src = dst.join("rust");
   375         -    t!(fs::create_dir_all(&dst_src));
   376         -
   377         -    // This is the set of root paths which will become part of the source package
   378         -    let src_files = [
   379         -        "COPYRIGHT",
   380         -        "LICENSE-APACHE",
   381         -        "LICENSE-MIT",
   382         -        "CONTRIBUTING.md",
   383         -        "README.md",
   384         -        "RELEASES.md",
   385         -        "configure",
   386         -        "x.py",
   387         -    ];
   388         -    let src_dirs = [
   389         -        "man",
   390         -        "src",
   391         -        "cargo",
   392         -    ];
   393         -
   394         -    let filter_fn = move |path: &Path| {
   395         -        let spath = match path.to_str() {
   396         -            Some(path) => path,
   397         -            None => return false,
   398         -        };
   399         -        if spath.ends_with("~") || spath.ends_with(".pyc") {
   400         -            return false
   401         -        }
   402         -        if spath.contains("llvm/test") || spath.contains("llvm\\test") {
   403         -            if spath.ends_with(".ll") ||
   404         -               spath.ends_with(".td") ||
   405         -               spath.ends_with(".s") {
   406         -                return false
   407         -            }
   408         -        }
   409         -
   410         -        let excludes = [
   411         -            "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules",
   412         -            ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}",
   413         -            "=RELEASE-ID", "=meta-update", "=update", ".bzr", ".bzrignore",
   414         -            ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs",
   415         -        ];
   416         -        !path.iter()
   417         -             .map(|s| s.to_str().unwrap())
   418         -             .any(|s| excludes.contains(&s))
   419         -    };
   420         -
   421         -    // Copy the directories using our filter
   422         -    for item in &src_dirs {
   423         -        let dst = &dst_src.join(item);
   424         -        t!(fs::create_dir(dst));
   425         -        cp_filtered(&build.src.join(item), dst, &filter_fn);
   426         -    }
   427         -    // Copy the files normally
   428         -    for item in &src_files {
   429         -        copy(&build.src.join(item), &dst_src.join(item));
   430         -    }
   431         -
   432         -    // Get cargo-vendor installed, if it isn't already.
   433         -    let mut has_cargo_vendor = false;
   434         -    let mut cmd = Command::new(&build.cargo);
   435         -    for line in output(cmd.arg("install").arg("--list")).lines() {
   436         -        has_cargo_vendor |= line.starts_with("cargo-vendor ");
   437         -    }
   438         -    if !has_cargo_vendor {
   439         -        let mut cmd = Command::new(&build.cargo);
   440         -        cmd.arg("install")
   441         -           .arg("--force")
   442         -           .arg("--debug")
   443         -           .arg("--vers").arg(CARGO_VENDOR_VERSION)
   444         -           .arg("cargo-vendor")
   445         -           .env("RUSTC", &build.rustc);
   446         -        build.run(&mut cmd);
   447         -    }
   448         -
   449         -    // Vendor all Cargo dependencies
   450         -    let mut cmd = Command::new(&build.cargo);
   451         -    cmd.arg("vendor")
   452         -       .current_dir(&dst_src.join("src"));
   453         -    build.run(&mut cmd);
   454         -
   455         -    // Create source tarball in rust-installer format
   456         -    let mut cmd = Command::new(SH_CMD);
   457         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   458         -       .arg("--product-name=Rust")
   459         -       .arg("--rel-manifest-dir=rustlib")
   460         -       .arg("--success-message=Awesome-Source.")
   461         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   462         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   463         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   464         -       .arg(format!("--package-name={}", name))
   465         -       .arg("--component-name=rust-src")
   466         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   467         -    build.run(&mut cmd);
   468         -
   469         -    // Rename directory, so that root folder of tarball has the correct name
   470         -    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
   471         -    let plain_dst_src = tmpdir(build).join(&plain_name);
   472         -    let _ = fs::remove_dir_all(&plain_dst_src);
   473         -    t!(fs::create_dir_all(&plain_dst_src));
   474         -    cp_r(&dst_src, &plain_dst_src);
   475         -
   476         -    // Create the version file
   477         -    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
   478         -
   479         -    // Create plain source tarball
   480         -    let mut cmd = Command::new("tar");
   481         -    cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build)))
   482         -       .arg(&plain_name)
   483         -       .current_dir(tmpdir(build));
   484         -    build.run(&mut cmd);
   485         -
   486         -    t!(fs::remove_dir_all(&image));
   487         -    t!(fs::remove_dir_all(&plain_dst_src));
   488         -}
   489         -
   490         -fn install(src: &Path, dstdir: &Path, perms: u32) {
   491         -    let dst = dstdir.join(src.file_name().unwrap());
   492         -    t!(fs::create_dir_all(dstdir));
   493         -    t!(fs::copy(src, &dst));
   494         -    chmod(&dst, perms);
   495         -}
   496         -
   497         -#[cfg(unix)]
   498         -fn chmod(path: &Path, perms: u32) {
   499         -    use std::os::unix::fs::*;
   500         -    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
   501         -}
   502         -#[cfg(windows)]
   503         -fn chmod(_path: &Path, _perms: u32) {}
   504         -
   505         -// We have to run a few shell scripts, which choke quite a bit on both `\`
   506         -// characters and on `C:\` paths, so normalize both of them away.
   507         -pub fn sanitize_sh(path: &Path) -> String {
   508         -    let path = path.to_str().unwrap().replace("\\", "/");
   509         -    return change_drive(&path).unwrap_or(path);
   510         -
   511         -    fn change_drive(s: &str) -> Option<String> {
   512         -        let mut ch = s.chars();
   513         -        let drive = ch.next().unwrap_or('C');
   514         -        if ch.next() != Some(':') {
   515         -            return None
   516         -        }
   517         -        if ch.next() != Some('/') {
   518         -            return None
   519         -        }
   520         -        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
   521         -    }
   522         -}
   523         -
   524         -fn write_file(path: &Path, data: &[u8]) {
   525         -    let mut vf = t!(fs::File::create(path));
   526         -    t!(vf.write_all(data));
   527         -}
   528         -
   529         -pub fn cargo(build: &Build, stage: u32, target: &str) {
   530         -    println!("Dist cargo stage{} ({})", stage, target);
   531         -    let compiler = Compiler::new(stage, &build.config.build);
   532         -
   533         -    let src = build.src.join("cargo");
   534         -    let etc = src.join("src/etc");
   535         -    let release_num = build.cargo_release_num();
   536         -    let name = format!("cargo-{}", build.package_vers(&release_num));
   537         -    let version = build.cargo_info.version(build, &release_num);
   538         -
   539         -    let tmp = tmpdir(build);
   540         -    let image = tmp.join("cargo-image");
   541         -    drop(fs::remove_dir_all(&image));
   542         -    t!(fs::create_dir_all(&image));
   543         -
   544         -    // Prepare the image directory
   545         -    t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
   546         -    t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
   547         -    let cargo = build.cargo_out(&compiler, Mode::Tool, target)
   548         -                     .join(exe("cargo", target));
   549         -    install(&cargo, &image.join("bin"), 0o755);
   550         -    for man in t!(etc.join("man").read_dir()) {
   551         -        let man = t!(man);
   552         -        install(&man.path(), &image.join("share/man/man1"), 0o644);
   553         -    }
   554         -    install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
   555         -    copy(&etc.join("cargo.bashcomp.sh"),
   556         -         &image.join("etc/bash_completions.d/cargo"));
   557         -    let doc = image.join("share/doc/cargo");
   558         -    install(&src.join("README.md"), &doc, 0o644);
   559         -    install(&src.join("LICENSE-MIT"), &doc, 0o644);
   560         -    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
   561         -    install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
   562         -
   563         -    // Prepare the overlay
   564         -    let overlay = tmp.join("cargo-overlay");
   565         -    drop(fs::remove_dir_all(&overlay));
   566         -    t!(fs::create_dir_all(&overlay));
   567         -    install(&src.join("README.md"), &overlay, 0o644);
   568         -    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
   569         -    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
   570         -    install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
   571         -    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
   572         -
   573         -    // Generate the installer tarball
   574         -    let mut cmd = Command::new("sh");
   575         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
   576         -       .arg("--product-name=Rust")
   577         -       .arg("--rel-manifest-dir=rustlib")
   578         -       .arg("--success-message=Rust-is-ready-to-roll.")
   579         -       .arg(format!("--image-dir={}", sanitize_sh(&image)))
   580         -       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
   581         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   582         -       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
   583         -       .arg(format!("--package-name={}-{}", name, target))
   584         -       .arg("--component-name=cargo")
   585         -       .arg("--legacy-manifest-dirs=rustlib,cargo");
   586         -    build.run(&mut cmd);
   587         -}
   588         -
   589         -/// Creates a combined installer for the specified target in the provided stage.
   590         -pub fn extended(build: &Build, stage: u32, target: &str) {
   591         -    println!("Dist extended stage{} ({})", stage, target);
   592         -
   593         -    let dist = distdir(build);
   594         -    let cargo_vers = build.cargo_release_num();
   595         -    let rustc_installer = dist.join(format!("{}-{}.tar.gz",
   596         -                                            pkgname(build, "rustc"),
   597         -                                            target));
   598         -    let cargo_installer = dist.join(format!("cargo-{}-{}.tar.gz",
   599         -                                            build.package_vers(&cargo_vers),
   600         -                                            target));
   601         -    let docs_installer = dist.join(format!("{}-{}.tar.gz",
   602         -                                           pkgname(build, "rust-docs"),
   603         -                                           target));
   604         -    let mingw_installer = dist.join(format!("{}-{}.tar.gz",
   605         -                                            pkgname(build, "rust-mingw"),
   606         -                                            target));
   607         -    let std_installer = dist.join(format!("{}-{}.tar.gz",
   608         -                                          pkgname(build, "rust-std"),
   609         -                                          target));
   610         -
   611         -    let tmp = tmpdir(build);
   612         -    let overlay = tmp.join("extended-overlay");
   613         -    let etc = build.src.join("src/etc/installer");
   614         -    let work = tmp.join("work");
   615         -
   616         -    let _ = fs::remove_dir_all(&overlay);
   617         -    install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
   618         -    install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
   619         -    install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
   620         -    let version = build.rust_version();
   621         -    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
   622         -    install(&etc.join("README.md"), &overlay, 0o644);
   623         -
   624         -    // When rust-std package split from rustc, we needed to ensure that during
   625         -    // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
   626         -    // the std files during uninstall. To do this ensure that rustc comes
   627         -    // before rust-std in the list below.
   628         -    let mut input_tarballs = format!("{},{},{},{}",
   629         -                                     sanitize_sh(&rustc_installer),
   630         -                                     sanitize_sh(&cargo_installer),
   631         -                                     sanitize_sh(&docs_installer),
   632         -                                     sanitize_sh(&std_installer));
   633         -    if target.contains("pc-windows-gnu") {
   634         -        input_tarballs.push_str(",");
   635         -        input_tarballs.push_str(&sanitize_sh(&mingw_installer));
   636         -    }
   637         -
   638         -    let mut cmd = Command::new(SH_CMD);
   639         -    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
   640         -       .arg("--product-name=Rust")
   641         -       .arg("--rel-manifest-dir=rustlib")
   642         -       .arg("--success-message=Rust-is-ready-to-roll.")
   643         -       .arg(format!("--work-dir={}", sanitize_sh(&work)))
   644         -       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
   645         -       .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
   646         -       .arg("--legacy-manifest-dirs=rustlib,cargo")
   647         -       .arg(format!("--input-tarballs={}", input_tarballs))
   648         -       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
   649         -    build.run(&mut cmd);
   650         -
   651         -    let mut license = String::new();
   652         -    t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
   653         -    license.push_str("\n");
   654         -    t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
   655         -    license.push_str("\n");
   656         -    t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));
   657         -
   658         -    let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
   659         -    let mut rtf = rtf.to_string();
   660         -    rtf.push_str("\n");
   661         -    for line in license.lines() {
   662         -        rtf.push_str(line);
   663         -        rtf.push_str("\\line ");
   664         -    }
   665         -    rtf.push_str("}");
   666         -
   667         -    if target.contains("apple-darwin") {
   668         -        let pkg = tmp.join("pkg");
   669         -        let _ = fs::remove_dir_all(&pkg);
   670         -        t!(fs::create_dir_all(pkg.join("rustc")));
   671         -        t!(fs::create_dir_all(pkg.join("cargo")));
   672         -        t!(fs::create_dir_all(pkg.join("rust-docs")));
   673         -        t!(fs::create_dir_all(pkg.join("rust-std")));
   674         -
   675         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
   676         -             &pkg.join("rustc"));
   677         -        cp_r(&work.join(&format!("cargo-nightly-{}", target)),
   678         -             &pkg.join("cargo"));
   679         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
   680         -             &pkg.join("rust-docs"));
   681         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
   682         -             &pkg.join("rust-std"));
   683         -
   684         -        install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
   685         -        install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
   686         -        install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
   687         -        install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
   688         -
   689         -        let pkgbuild = |component: &str| {
   690         -            let mut cmd = Command::new("pkgbuild");
   691         -            cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
   692         -               .arg("--scripts").arg(pkg.join(component))
   693         -               .arg("--nopayload")
   694         -               .arg(pkg.join(component).with_extension("pkg"));
   695         -            build.run(&mut cmd);
   696         -        };
   697         -        pkgbuild("rustc");
   698         -        pkgbuild("cargo");
   699         -        pkgbuild("rust-docs");
   700         -        pkgbuild("rust-std");
   701         -
   702         -        // create an 'uninstall' package
   703         -        install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
   704         -        pkgbuild("uninstall");
   705         -
   706         -        t!(fs::create_dir_all(pkg.join("res")));
   707         -        t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
   708         -        install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
   709         -        let mut cmd = Command::new("productbuild");
   710         -        cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
   711         -           .arg("--resources").arg(pkg.join("res"))
   712         -           .arg(distdir(build).join(format!("{}-{}.pkg",
   713         -                                             pkgname(build, "rust"),
   714         -                                             target)))
   715         -           .arg("--package-path").arg(&pkg);
   716         -        build.run(&mut cmd);
   717         -    }
   718         -
   719         -    if target.contains("windows") {
   720         -        let exe = tmp.join("exe");
   721         -        let _ = fs::remove_dir_all(&exe);
   722         -        t!(fs::create_dir_all(exe.join("rustc")));
   723         -        t!(fs::create_dir_all(exe.join("cargo")));
   724         -        t!(fs::create_dir_all(exe.join("rust-docs")));
   725         -        t!(fs::create_dir_all(exe.join("rust-std")));
   726         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
   727         -                  .join("rustc"),
   728         -             &exe.join("rustc"));
   729         -        cp_r(&work.join(&format!("cargo-nightly-{}", target))
   730         -                  .join("cargo"),
   731         -             &exe.join("cargo"));
   732         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
   733         -                  .join("rust-docs"),
   734         -             &exe.join("rust-docs"));
   735         -        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
   736         -                  .join(format!("rust-std-{}", target)),
   737         -             &exe.join("rust-std"));
   738         -
   739         -        t!(fs::remove_file(exe.join("rustc/manifest.in")));
   740         -        t!(fs::remove_file(exe.join("cargo/manifest.in")));
   741         -        t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
   742         -        t!(fs::remove_file(exe.join("rust-std/manifest.in")));
   743         -
   744         -        if target.contains("windows-gnu") {
   745         -            t!(fs::create_dir_all(exe.join("rust-mingw")));
   746         -            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
   747         -                      .join("rust-mingw"),
   748         -                 &exe.join("rust-mingw"));
   749         -            t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
   750         -        }
   751         -
   752         -        install(&etc.join("exe/rust.iss"), &exe, 0o644);
   753         -        install(&etc.join("exe/modpath.iss"), &exe, 0o644);
   754         -        install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
   755         -        install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
   756         -        t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));
   757         -
   758         -        // Generate exe installer
   759         -        let mut cmd = Command::new("iscc");
   760         -        cmd.arg("rust.iss")
   761         -           .current_dir(&exe);
   762         -        if target.contains("windows-gnu") {
   763         -            cmd.arg("/dMINGW");
   764         -        }
   765         -        add_env(build, &mut cmd, target);
   766         -        build.run(&mut cmd);
   767         -        install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
   768         -                &distdir(build),
   769         -                0o755);
   770         -
   771         -        // Generate msi installer
   772         -        let wix = PathBuf::from(env::var_os("WIX").unwrap());
   773         -        let heat = wix.join("bin/heat.exe");
   774         -        let candle = wix.join("bin/candle.exe");
   775         -        let light = wix.join("bin/light.exe");
   776         -
   777         -        let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
   778         -        build.run(Command::new(&heat)
   779         -                        .current_dir(&exe)
   780         -                        .arg("dir")
   781         -                        .arg("rustc")
   782         -                        .args(&heat_flags)
   783         -                        .arg("-cg").arg("RustcGroup")
   784         -                        .arg("-dr").arg("Rustc")
   785         -                        .arg("-var").arg("var.RustcDir")
   786         -                        .arg("-out").arg(exe.join("RustcGroup.wxs")));
   787         -        build.run(Command::new(&heat)
   788         -                        .current_dir(&exe)
   789         -                        .arg("dir")
   790         -                        .arg("rust-docs")
   791         -                        .args(&heat_flags)
   792         -                        .arg("-cg").arg("DocsGroup")
   793         -                        .arg("-dr").arg("Docs")
   794         -                        .arg("-var").arg("var.DocsDir")
   795         -                        .arg("-out").arg(exe.join("DocsGroup.wxs"))
   796         -                        .arg("-t").arg(etc.join("msi/squash-components.xsl")));
   797         -        build.run(Command::new(&heat)
   798         -                        .current_dir(&exe)
   799         -                        .arg("dir")
   800         -                        .arg("cargo")
   801         -                        .args(&heat_flags)
   802         -                        .arg("-cg").arg("CargoGroup")
   803         -                        .arg("-dr").arg("Cargo")
   804         -                        .arg("-var").arg("var.CargoDir")
   805         -                        .arg("-out").arg(exe.join("CargoGroup.wxs"))
   806         -                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
   807         -        build.run(Command::new(&heat)
   808         -                        .current_dir(&exe)
   809         -                        .arg("dir")
   810         -                        .arg("rust-std")
   811         -                        .args(&heat_flags)
   812         -                        .arg("-cg").arg("StdGroup")
   813         -                        .arg("-dr").arg("Std")
   814         -                        .arg("-var").arg("var.StdDir")
   815         -                        .arg("-out").arg(exe.join("StdGroup.wxs")));
   816         -        if target.contains("windows-gnu") {
   817         -            build.run(Command::new(&heat)
   818         -                            .current_dir(&exe)
   819         -                            .arg("dir")
   820         -                            .arg("rust-mingw")
   821         -                            .args(&heat_flags)
   822         -                            .arg("-cg").arg("GccGroup")
   823         -                            .arg("-dr").arg("Gcc")
   824         -                            .arg("-var").arg("var.GccDir")
   825         -                            .arg("-out").arg(exe.join("GccGroup.wxs")));
   826         -        }
   827         -
   828         -        let candle = |input: &Path| {
   829         -            let output = exe.join(input.file_stem().unwrap())
   830         -                            .with_extension("wixobj");
   831         -            let arch = if target.contains("x86_64") {"x64"} else {"x86"};
   832         -            let mut cmd = Command::new(&candle);
   833         -            cmd.current_dir(&exe)
   834         -               .arg("-nologo")
   835         -               .arg("-dRustcDir=rustc")
   836         -               .arg("-dDocsDir=rust-docs")
   837         -               .arg("-dCargoDir=cargo")
   838         -               .arg("-dStdDir=rust-std")
   839         -               .arg("-arch").arg(&arch)
   840         -               .arg("-out").arg(&output)
   841         -               .arg(&input);
   842         -            add_env(build, &mut cmd, target);
   843         -
   844         -            if target.contains("windows-gnu") {
   845         -               cmd.arg("-dGccDir=rust-mingw");
   846         -            }
   847         -            build.run(&mut cmd);
   848         -        };
   849         -        candle(&etc.join("msi/rust.wxs"));
   850         -        candle(&etc.join("msi/ui.wxs"));
   851         -        candle(&etc.join("msi/rustwelcomedlg.wxs"));
   852         -        candle("RustcGroup.wxs".as_ref());
   853         -        candle("DocsGroup.wxs".as_ref());
   854         -        candle("CargoGroup.wxs".as_ref());
   855         -        candle("StdGroup.wxs".as_ref());
   856         -
   857         -        if target.contains("windows-gnu") {
   858         -            candle("GccGroup.wxs".as_ref());
   859         -        }
   860         -
   861         -        t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
   862         -        install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
   863         -        install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
   864         -
   865         -        let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
   866         -        let mut cmd = Command::new(&light);
   867         -        cmd.arg("-nologo")
   868         -           .arg("-ext").arg("WixUIExtension")
   869         -           .arg("-ext").arg("WixUtilExtension")
   870         -           .arg("-out").arg(exe.join(&filename))
   871         -           .arg("rust.wixobj")
   872         -           .arg("ui.wixobj")
   873         -           .arg("rustwelcomedlg.wixobj")
   874         -           .arg("RustcGroup.wixobj")
   875         -           .arg("DocsGroup.wixobj")
   876         -           .arg("CargoGroup.wixobj")
   877         -           .arg("StdGroup.wixobj")
   878         -           .current_dir(&exe);
   879         -
   880         -        if target.contains("windows-gnu") {
   881         -           cmd.arg("GccGroup.wixobj");
   882         -        }
   883         -        // ICE57 wrongly complains about the shortcuts
   884         -        cmd.arg("-sice:ICE57");
   885         -
   886         -        build.run(&mut cmd);
   887         -
   888         -        t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
   889         -    }
   890         -}
   891         -
   892         -fn add_env(build: &Build, cmd: &mut Command, target: &str) {
   893         -    let mut parts = channel::CFG_RELEASE_NUM.split('.');
   894         -    cmd.env("CFG_RELEASE_INFO", build.rust_version())
   895         -       .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
   896         -       .env("CFG_RELEASE", build.rust_release())
   897         -       .env("CFG_PRERELEASE_VERSION", channel::CFG_PRERELEASE_VERSION)
   898         -       .env("CFG_VER_MAJOR", parts.next().unwrap())
   899         -       .env("CFG_VER_MINOR", parts.next().unwrap())
   900         -       .env("CFG_VER_PATCH", parts.next().unwrap())
   901         -       .env("CFG_VER_BUILD", "0") // just needed to build
   902         -       .env("CFG_PACKAGE_VERS", build.rust_package_vers())
   903         -       .env("CFG_PACKAGE_NAME", pkgname(build, "rust"))
   904         -       .env("CFG_BUILD", target)
   905         -       .env("CFG_CHANNEL", &build.config.channel);
   906         -
   907         -    if target.contains("windows-gnu") {
   908         -       cmd.env("CFG_MINGW", "1")
   909         -          .env("CFG_ABI", "GNU");
   910         -    } else {
   911         -       cmd.env("CFG_MINGW", "0")
   912         -          .env("CFG_ABI", "MSVC");
   913         -    }
   914         -
   915         -    if target.contains("x86_64") {
   916         -       cmd.env("CFG_PLATFORM", "x64");
   917         -    } else {
   918         -       cmd.env("CFG_PLATFORM", "x86");
   919         -    }
   920         -}
   921         -
   922         -pub fn hash_and_sign(build: &Build) {
   923         -    let compiler = Compiler::new(0, &build.config.build);
   924         -    let mut cmd = build.tool_cmd(&compiler, "build-manifest");
   925         -    let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
   926         -        panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
   927         -    });
   928         -    let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
   929         -        panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
   930         -    });
   931         -    let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
   932         -        panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
   933         -    });
   934         -    let mut pass = String::new();
   935         -    t!(t!(File::open(&file)).read_to_string(&mut pass));
   936         -
   937         -    let today = output(Command::new("date").arg("+%Y-%m-%d"));
   938         -
   939         -    cmd.arg(sign);
   940         -    cmd.arg(distdir(build));
   941         -    cmd.arg(today.trim());
   942         -    cmd.arg(build.rust_package_vers());
   943         -    cmd.arg(build.package_vers(&build.cargo_release_num()));
   944         -    cmd.arg(addr);
   945         -
   946         -    t!(fs::create_dir_all(distdir(build)));
   947         -
   948         -    let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
   949         -    t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
   950         -    let status = t!(child.wait());
   951         -    assert!(status.success());
   952         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/doc.rs version [fcdfc4a443].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Documentation generation for rustbuild.
    12         -//!
    13         -//! This module implements generation for all bits and pieces of documentation
    14         -//! for the Rust project. This notably includes suites like the rust book, the
    15         -//! nomicon, standalone documentation, etc.
    16         -//!
    17         -//! Everything here is basically just a shim around calling either `rustbook` or
    18         -//! `rustdoc`.
    19         -
    20         -use std::fs::{self, File};
    21         -use std::io::prelude::*;
    22         -use std::io;
    23         -use std::path::Path;
    24         -use std::process::Command;
    25         -
    26         -use {Build, Compiler, Mode};
    27         -use util::{cp_r, symlink_dir};
    28         -use build_helper::up_to_date;
    29         -
    30         -/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
    31         -/// `name` into the `out` path.
    32         -///
    33         -/// This will not actually generate any documentation if the documentation has
    34         -/// already been generated.
    35         -pub fn rustbook(build: &Build, target: &str, name: &str) {
    36         -    let out = build.doc_out(target);
    37         -    t!(fs::create_dir_all(&out));
    38         -
    39         -    let out = out.join(name);
    40         -    let compiler = Compiler::new(0, &build.config.build);
    41         -    let src = build.src.join("src/doc").join(name);
    42         -    let index = out.join("index.html");
    43         -    let rustbook = build.tool(&compiler, "rustbook");
    44         -    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
    45         -        return
    46         -    }
    47         -    println!("Rustbook ({}) - {}", target, name);
    48         -    let _ = fs::remove_dir_all(&out);
    49         -    build.run(build.tool_cmd(&compiler, "rustbook")
    50         -                   .arg("build")
    51         -                   .arg(&src)
    52         -                   .arg("-d")
    53         -                   .arg(out));
    54         -}
    55         -
    56         -/// Build the book and associated stuff.
    57         -///
    58         -/// We need to build:
    59         -///
    60         -/// * Book (first edition)
    61         -/// * Book (second edition)
    62         -/// * Index page
    63         -/// * Redirect pages
    64         -pub fn book(build: &Build, target: &str, name: &str) {
    65         -    // build book first edition
    66         -    rustbook(build, target, &format!("{}/first-edition", name));
    67         -
    68         -    // build book second edition
    69         -    rustbook(build, target, &format!("{}/second-edition", name));
    70         -
    71         -    // build the index page
    72         -    let index = format!("{}/index.md", name);
    73         -    println!("Documenting book index ({})", target);
    74         -    invoke_rustdoc(build, target, &index);
    75         -
    76         -    // build the redirect pages
    77         -    println!("Documenting book redirect pages ({})", target);
    78         -    for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
    79         -        let file = t!(file);
    80         -        let path = file.path();
    81         -        let path = path.to_str().unwrap();
    82         -
    83         -        invoke_rustdoc(build, target, path);
    84         -    }
    85         -}
    86         -
    87         -fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
    88         -    let out = build.doc_out(target);
    89         -
    90         -    let compiler = Compiler::new(0, &build.config.build);
    91         -
    92         -    let path = build.src.join("src/doc").join(markdown);
    93         -
    94         -    let rustdoc = build.rustdoc(&compiler);
    95         -
    96         -    let favicon = build.src.join("src/doc/favicon.inc");
    97         -    let footer = build.src.join("src/doc/footer.inc");
    98         -
    99         -    let version_input = build.src.join("src/doc/version_info.html.template");
   100         -    let version_info = out.join("version_info.html");
   101         -
   102         -    if !up_to_date(&version_input, &version_info) {
   103         -        let mut info = String::new();
   104         -        t!(t!(File::open(&version_input)).read_to_string(&mut info));
   105         -        let info = info.replace("VERSION", &build.rust_release())
   106         -                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
   107         -                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
   108         -        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
   109         -    }
   110         -
   111         -    let mut cmd = Command::new(&rustdoc);
   112         -
   113         -    build.add_rustc_lib_path(&compiler, &mut cmd);
   114         -
   115         -    let out = out.join("book");
   116         -
   117         -    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
   118         -
   119         -    cmd.arg("--html-after-content").arg(&footer)
   120         -        .arg("--html-before-content").arg(&version_info)
   121         -        .arg("--html-in-header").arg(&favicon)
   122         -        .arg("--markdown-playground-url")
   123         -        .arg("https://play.rust-lang.org/")
   124         -        .arg("-o").arg(&out)
   125         -        .arg(&path)
   126         -        .arg("--markdown-css")
   127         -        .arg("rust.css");
   128         -
   129         -    build.run(&mut cmd);
   130         -}
   131         -
   132         -/// Generates all standalone documentation as compiled by the rustdoc in `stage`
   133         -/// for the `target` into `out`.
   134         -///
   135         -/// This will list all of `src/doc` looking for markdown files and appropriately
   136         -/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
   137         -/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
   138         -///
   139         -/// In the end, this is just a glorified wrapper around rustdoc!
   140         -pub fn standalone(build: &Build, target: &str) {
   141         -    println!("Documenting standalone ({})", target);
   142         -    let out = build.doc_out(target);
   143         -    t!(fs::create_dir_all(&out));
   144         -
   145         -    let compiler = Compiler::new(0, &build.config.build);
   146         -
   147         -    let favicon = build.src.join("src/doc/favicon.inc");
   148         -    let footer = build.src.join("src/doc/footer.inc");
   149         -    let full_toc = build.src.join("src/doc/full-toc.inc");
   150         -    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
   151         -
   152         -    let version_input = build.src.join("src/doc/version_info.html.template");
   153         -    let version_info = out.join("version_info.html");
   154         -
   155         -    if !up_to_date(&version_input, &version_info) {
   156         -        let mut info = String::new();
   157         -        t!(t!(File::open(&version_input)).read_to_string(&mut info));
   158         -        let info = info.replace("VERSION", &build.rust_release())
   159         -                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
   160         -                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
   161         -        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
   162         -    }
   163         -
   164         -    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
   165         -        let file = t!(file);
   166         -        let path = file.path();
   167         -        let filename = path.file_name().unwrap().to_str().unwrap();
   168         -        if !filename.ends_with(".md") || filename == "README.md" {
   169         -            continue
   170         -        }
   171         -
   172         -        let html = out.join(filename).with_extension("html");
   173         -        let rustdoc = build.rustdoc(&compiler);
   174         -        if up_to_date(&path, &html) &&
   175         -           up_to_date(&footer, &html) &&
   176         -           up_to_date(&favicon, &html) &&
   177         -           up_to_date(&full_toc, &html) &&
   178         -           up_to_date(&version_info, &html) &&
   179         -           up_to_date(&rustdoc, &html) {
   180         -            continue
   181         -        }
   182         -
   183         -        let mut cmd = Command::new(&rustdoc);
   184         -        build.add_rustc_lib_path(&compiler, &mut cmd);
   185         -        cmd.arg("--html-after-content").arg(&footer)
   186         -           .arg("--html-before-content").arg(&version_info)
   187         -           .arg("--html-in-header").arg(&favicon)
   188         -           .arg("--markdown-playground-url")
   189         -           .arg("https://play.rust-lang.org/")
   190         -           .arg("-o").arg(&out)
   191         -           .arg(&path);
   192         -
   193         -        if filename == "not_found.md" {
   194         -            cmd.arg("--markdown-no-toc")
   195         -               .arg("--markdown-css")
   196         -               .arg("https://doc.rust-lang.org/rust.css");
   197         -        } else {
   198         -            cmd.arg("--markdown-css").arg("rust.css");
   199         -        }
   200         -        build.run(&mut cmd);
   201         -    }
   202         -}
   203         -
   204         -/// Compile all standard library documentation.
   205         -///
   206         -/// This will generate all documentation for the standard library and its
   207         -/// dependencies. This is largely just a wrapper around `cargo doc`.
   208         -pub fn std(build: &Build, stage: u32, target: &str) {
   209         -    println!("Documenting stage{} std ({})", stage, target);
   210         -    let out = build.doc_out(target);
   211         -    t!(fs::create_dir_all(&out));
   212         -    let compiler = Compiler::new(stage, &build.config.build);
   213         -    let compiler = if build.force_use_stage1(&compiler, target) {
   214         -        Compiler::new(1, compiler.host)
   215         -    } else {
   216         -        compiler
   217         -    };
   218         -    let out_dir = build.stage_out(&compiler, Mode::Libstd)
   219         -                       .join(target).join("doc");
   220         -    let rustdoc = build.rustdoc(&compiler);
   221         -
   222         -    // Here what we're doing is creating a *symlink* (directory junction on
   223         -    // Windows) to the final output location. This is not done as an
   224         -    // optimization but rather for correctness. We've got three trees of
   225         -    // documentation, one for std, one for test, and one for rustc. It's then
   226         -    // our job to merge them all together.
   227         -    //
   228         -    // Unfortunately rustbuild doesn't know nearly as well how to merge doc
   229         -    // trees as rustdoc does itself, so instead of actually having three
   230         -    // separate trees we just have rustdoc output to the same location across
   231         -    // all of them.
   232         -    //
   233         -    // This way rustdoc generates output directly into the output, and rustdoc
   234         -    // will also directly handle merging.
   235         -    let my_out = build.crate_doc_out(target);
   236         -    build.clear_if_dirty(&my_out, &rustdoc);
   237         -    t!(symlink_dir_force(&my_out, &out_dir));
   238         -
   239         -    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
   240         -    cargo.arg("--manifest-path")
   241         -         .arg(build.src.join("src/libstd/Cargo.toml"))
   242         -         .arg("--features").arg(build.std_features());
   243         -
   244         -    // We don't want to build docs for internal std dependencies unless
   245         -    // in compiler-docs mode. When not in that mode, we whitelist the crates
   246         -    // for which docs must be built.
   247         -    if !build.config.compiler_docs {
   248         -        cargo.arg("--no-deps");
   249         -        for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
   250         -            cargo.arg("-p").arg(krate);
   251         -            // Create all crate output directories first to make sure rustdoc uses
   252         -            // relative links.
   253         -            // FIXME: Cargo should probably do this itself.
   254         -            t!(fs::create_dir_all(out_dir.join(krate)));
   255         -        }
   256         -    }
   257         -
   258         -
   259         -    build.run(&mut cargo);
   260         -    cp_r(&my_out, &out);
   261         -}
   262         -
   263         -/// Compile all libtest documentation.
   264         -///
   265         -/// This will generate all documentation for libtest and its dependencies. This
   266         -/// is largely just a wrapper around `cargo doc`.
   267         -pub fn test(build: &Build, stage: u32, target: &str) {
   268         -    println!("Documenting stage{} test ({})", stage, target);
   269         -    let out = build.doc_out(target);
   270         -    t!(fs::create_dir_all(&out));
   271         -    let compiler = Compiler::new(stage, &build.config.build);
   272         -    let compiler = if build.force_use_stage1(&compiler, target) {
   273         -        Compiler::new(1, compiler.host)
   274         -    } else {
   275         -        compiler
   276         -    };
   277         -    let out_dir = build.stage_out(&compiler, Mode::Libtest)
   278         -                       .join(target).join("doc");
   279         -    let rustdoc = build.rustdoc(&compiler);
   280         -
   281         -    // See docs in std above for why we symlink
   282         -    let my_out = build.crate_doc_out(target);
   283         -    build.clear_if_dirty(&my_out, &rustdoc);
   284         -    t!(symlink_dir_force(&my_out, &out_dir));
   285         -
   286         -    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
   287         -    cargo.arg("--manifest-path")
   288         -         .arg(build.src.join("src/libtest/Cargo.toml"));
   289         -    build.run(&mut cargo);
   290         -    cp_r(&my_out, &out);
   291         -}
   292         -
   293         -/// Generate all compiler documentation.
   294         -///
   295         -/// This will generate all documentation for the compiler libraries and their
   296         -/// dependencies. This is largely just a wrapper around `cargo doc`.
   297         -pub fn rustc(build: &Build, stage: u32, target: &str) {
   298         -    println!("Documenting stage{} compiler ({})", stage, target);
   299         -    let out = build.doc_out(target);
   300         -    t!(fs::create_dir_all(&out));
   301         -    let compiler = Compiler::new(stage, &build.config.build);
   302         -    let compiler = if build.force_use_stage1(&compiler, target) {
   303         -        Compiler::new(1, compiler.host)
   304         -    } else {
   305         -        compiler
   306         -    };
   307         -    let out_dir = build.stage_out(&compiler, Mode::Librustc)
   308         -                       .join(target).join("doc");
   309         -    let rustdoc = build.rustdoc(&compiler);
   310         -
   311         -    // See docs in std above for why we symlink
   312         -    let my_out = build.crate_doc_out(target);
   313         -    build.clear_if_dirty(&my_out, &rustdoc);
   314         -    t!(symlink_dir_force(&my_out, &out_dir));
   315         -
   316         -    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
   317         -    cargo.arg("--manifest-path")
   318         -         .arg(build.src.join("src/rustc/Cargo.toml"))
   319         -         .arg("--features").arg(build.rustc_features());
   320         -
   321         -    if build.config.compiler_docs {
   322         -        // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
   323         -        // which would otherwise overwrite the docs for the real rustc and
   324         -        // rustdoc lib crates.
   325         -        cargo.arg("-p").arg("rustc_driver")
   326         -             .arg("-p").arg("rustdoc");
   327         -    } else {
   328         -        // Like with libstd above if compiler docs aren't enabled then we're not
   329         -        // documenting internal dependencies, so we have a whitelist.
   330         -        cargo.arg("--no-deps");
   331         -        for krate in &["proc_macro"] {
   332         -            cargo.arg("-p").arg(krate);
   333         -        }
   334         -    }
   335         -
   336         -    build.run(&mut cargo);
   337         -    cp_r(&my_out, &out);
   338         -}
   339         -
   340         -/// Generates the HTML rendered error-index by running the
   341         -/// `error_index_generator` tool.
   342         -pub fn error_index(build: &Build, target: &str) {
   343         -    println!("Documenting error index ({})", target);
   344         -    let out = build.doc_out(target);
   345         -    t!(fs::create_dir_all(&out));
   346         -    let compiler = Compiler::new(0, &build.config.build);
   347         -    let mut index = build.tool_cmd(&compiler, "error_index_generator");
   348         -    index.arg("html");
   349         -    index.arg(out.join("error-index.html"));
   350         -
   351         -    // FIXME: shouldn't have to pass this env var
   352         -    index.env("CFG_BUILD", &build.config.build);
   353         -
   354         -    build.run(&mut index);
   355         -}
   356         -
   357         -fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
   358         -    if let Ok(m) = fs::symlink_metadata(dst) {
   359         -        if m.file_type().is_dir() {
   360         -            try!(fs::remove_dir_all(dst));
   361         -        } else {
   362         -            // handle directory junctions on windows by falling back to
   363         -            // `remove_dir`.
   364         -            try!(fs::remove_file(dst).or_else(|_| {
   365         -                fs::remove_dir(dst)
   366         -            }));
   367         -        }
   368         -    }
   369         -
   370         -    symlink_dir(src, dst)
   371         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/flags.rs version [81f6e410e1].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Command-line interface of the rustbuild build system.
    12         -//!
    13         -//! This module implements the command-line parsing of the build system which
    14         -//! has various flags to configure how it's run.
    15         -
    16         -use std::env;
    17         -use std::fs;
    18         -use std::path::PathBuf;
    19         -use std::process;
    20         -
    21         -use getopts::{Matches, Options};
    22         -
    23         -use Build;
    24         -use config::Config;
    25         -use metadata;
    26         -use step;
    27         -
    28         -/// Deserialized version of all flags for this compile.
    29         -pub struct Flags {
    30         -    pub verbose: usize, // verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
    31         -    pub on_fail: Option<String>,
    32         -    pub stage: Option<u32>,
    33         -    pub keep_stage: Option<u32>,
    34         -    pub build: String,
    35         -    pub host: Vec<String>,
    36         -    pub target: Vec<String>,
    37         -    pub config: Option<PathBuf>,
    38         -    pub src: Option<PathBuf>,
    39         -    pub jobs: Option<u32>,
    40         -    pub cmd: Subcommand,
    41         -    pub incremental: bool,
    42         -}
    43         -
    44         -impl Flags {
    45         -    pub fn verbose(&self) -> bool {
    46         -        self.verbose > 0
    47         -    }
    48         -
    49         -    pub fn very_verbose(&self) -> bool {
    50         -        self.verbose > 1
    51         -    }
    52         -}
    53         -
    54         -pub enum Subcommand {
    55         -    Build {
    56         -        paths: Vec<PathBuf>,
    57         -    },
    58         -    Doc {
    59         -        paths: Vec<PathBuf>,
    60         -    },
    61         -    Test {
    62         -        paths: Vec<PathBuf>,
    63         -        test_args: Vec<String>,
    64         -    },
    65         -    Bench {
    66         -        paths: Vec<PathBuf>,
    67         -        test_args: Vec<String>,
    68         -    },
    69         -    Clean,
    70         -    Dist {
    71         -        paths: Vec<PathBuf>,
    72         -        install: bool,
    73         -    },
    74         -}
    75         -
    76         -impl Flags {
    77         -    pub fn parse(args: &[String]) -> Flags {
    78         -        let mut opts = Options::new();
    79         -        opts.optflagmulti("v", "verbose", "use verbose output (-vv for very verbose)");
    80         -        opts.optflag("i", "incremental", "use incremental compilation");
    81         -        opts.optopt("", "config", "TOML configuration file for build", "FILE");
    82         -        opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
    83         -        opts.optmulti("", "host", "host targets to build", "HOST");
    84         -        opts.optmulti("", "target", "target targets to build", "TARGET");
    85         -        opts.optopt("", "on-fail", "command to run on failure", "CMD");
    86         -        opts.optopt("", "stage", "stage to build", "N");
    87         -        opts.optopt("", "keep-stage", "stage to keep without recompiling", "N");
    88         -        opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
    89         -        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
    90         -        opts.optflag("h", "help", "print this help message");
    91         -
    92         -        let usage = |n, opts: &Options| -> ! {
    93         -            let command = args.get(0).map(|s| &**s);
    94         -            let brief = format!("Usage: x.py {} [options] [<args>...]",
    95         -                                command.unwrap_or("<command>"));
    96         -
    97         -            println!("{}", opts.usage(&brief));
    98         -            match command {
    99         -                Some("build") => {
   100         -                    println!("\
   101         -Arguments:
   102         -    This subcommand accepts a number of positional arguments of directories to
   103         -    the crates and/or artifacts to compile. For example:
   104         -
   105         -        ./x.py build src/libcore
   106         -        ./x.py build src/libproc_macro
   107         -        ./x.py build src/libstd --stage 1
   108         -
   109         -    If no arguments are passed then the complete artifacts for that stage are
   110         -    also compiled.
   111         -
   112         -        ./x.py build
   113         -        ./x.py build --stage 1
   114         -
   115         -    For a quick build with a usable compile, you can pass:
   116         -
   117         -        ./x.py build --stage 1 src/libtest
   118         -");
   119         -                }
   120         -
   121         -                Some("test") => {
   122         -                    println!("\
   123         -Arguments:
   124         -    This subcommand accepts a number of positional arguments of directories to
   125         -    tests that should be compiled and run. For example:
   126         -
   127         -        ./x.py test src/test/run-pass
   128         -        ./x.py test src/libstd --test-args hash_map
   129         -        ./x.py test src/libstd --stage 0
   130         -
   131         -    If no arguments are passed then the complete artifacts for that stage are
   132         -    compiled and tested.
   133         -
   134         -        ./x.py test
   135         -        ./x.py test --stage 1
   136         -");
   137         -                }
   138         -
   139         -                Some("doc") => {
   140         -                    println!("\
   141         -Arguments:
   142         -    This subcommand accepts a number of positional arguments of directories of
   143         -    documentation to build. For example:
   144         -
   145         -        ./x.py doc src/doc/book
   146         -        ./x.py doc src/doc/nomicon
   147         -        ./x.py doc src/libstd
   148         -
   149         -    If no arguments are passed then everything is documented:
   150         -
   151         -        ./x.py doc
   152         -        ./x.py doc --stage 1
   153         -");
   154         -                }
   155         -
   156         -                _ => {}
   157         -            }
   158         -
   159         -            if let Some(command) = command {
   160         -                if command == "build" ||
   161         -                   command == "dist" ||
   162         -                   command == "doc" ||
   163         -                   command == "test" ||
   164         -                   command == "bench" ||
   165         -                   command == "clean"  {
   166         -                    println!("Available invocations:");
   167         -                    if args.iter().any(|a| a == "-v") {
   168         -                        let flags = Flags::parse(&["build".to_string()]);
   169         -                        let mut config = Config::default();
   170         -                        config.build = flags.build.clone();
   171         -                        let mut build = Build::new(flags, config);
   172         -                        metadata::build(&mut build);
   173         -                        step::build_rules(&build).print_help(command);
   174         -                    } else {
   175         -                        println!("    ... elided, run `./x.py {} -h -v` to see",
   176         -                                 command);
   177         -                    }
   178         -
   179         -                    println!("");
   180         -                }
   181         -            }
   182         -
   183         -println!("\
   184         -Subcommands:
   185         -    build       Compile either the compiler or libraries
   186         -    test        Build and run some test suites
   187         -    bench       Build and run some benchmarks
   188         -    doc         Build documentation
   189         -    clean       Clean out build directories
   190         -    dist        Build and/or install distribution artifacts
   191         -
   192         -To learn more about a subcommand, run `./x.py <command> -h`
   193         -");
   194         -
   195         -            process::exit(n);
   196         -        };
   197         -        if args.len() == 0 {
   198         -            println!("a command must be passed");
   199         -            usage(1, &opts);
   200         -        }
   201         -        let parse = |opts: &Options| {
   202         -            let m = opts.parse(&args[1..]).unwrap_or_else(|e| {
   203         -                println!("failed to parse options: {}", e);
   204         -                usage(1, opts);
   205         -            });
   206         -            if m.opt_present("h") {
   207         -                usage(0, opts);
   208         -            }
   209         -            return m
   210         -        };
   211         -
   212         -        let cwd = t!(env::current_dir());
   213         -        let remaining_as_path = |m: &Matches| {
   214         -            m.free.iter().map(|p| cwd.join(p)).collect::<Vec<_>>()
   215         -        };
   216         -
   217         -        let m: Matches;
   218         -        let cmd = match &args[0][..] {
   219         -            "build" => {
   220         -                m = parse(&opts);
   221         -                Subcommand::Build { paths: remaining_as_path(&m) }
   222         -            }
   223         -            "doc" => {
   224         -                m = parse(&opts);
   225         -                Subcommand::Doc { paths: remaining_as_path(&m) }
   226         -            }
   227         -            "test" => {
   228         -                opts.optmulti("", "test-args", "extra arguments", "ARGS");
   229         -                m = parse(&opts);
   230         -                Subcommand::Test {
   231         -                    paths: remaining_as_path(&m),
   232         -                    test_args: m.opt_strs("test-args"),
   233         -                }
   234         -            }
   235         -            "bench" => {
   236         -                opts.optmulti("", "test-args", "extra arguments", "ARGS");
   237         -                m = parse(&opts);
   238         -                Subcommand::Bench {
   239         -                    paths: remaining_as_path(&m),
   240         -                    test_args: m.opt_strs("test-args"),
   241         -                }
   242         -            }
   243         -            "clean" => {
   244         -                m = parse(&opts);
   245         -                if m.free.len() > 0 {
   246         -                    println!("clean takes no arguments");
   247         -                    usage(1, &opts);
   248         -                }
   249         -                Subcommand::Clean
   250         -            }
   251         -            "dist" => {
   252         -                opts.optflag("", "install", "run installer as well");
   253         -                m = parse(&opts);
   254         -                Subcommand::Dist {
   255         -                    paths: remaining_as_path(&m),
   256         -                    install: m.opt_present("install"),
   257         -                }
   258         -            }
   259         -            "--help" => usage(0, &opts),
   260         -            cmd => {
   261         -                println!("unknown command: {}", cmd);
   262         -                usage(1, &opts);
   263         -            }
   264         -        };
   265         -
   266         -
   267         -        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
   268         -            if fs::metadata("config.toml").is_ok() {
   269         -                Some(PathBuf::from("config.toml"))
   270         -            } else {
   271         -                None
   272         -            }
   273         -        });
   274         -
   275         -        let mut stage = m.opt_str("stage").map(|j| j.parse().unwrap());
   276         -
   277         -        let incremental = m.opt_present("i");
   278         -
   279         -        if incremental {
   280         -            if stage.is_none() {
   281         -                stage = Some(1);
   282         -            }
   283         -        }
   284         -
   285         -        Flags {
   286         -            verbose: m.opt_count("v"),
   287         -            stage: stage,
   288         -            on_fail: m.opt_str("on-fail"),
   289         -            keep_stage: m.opt_str("keep-stage").map(|j| j.parse().unwrap()),
   290         -            build: m.opt_str("build").unwrap_or_else(|| {
   291         -                env::var("BUILD").unwrap()
   292         -            }),
   293         -            host: split(m.opt_strs("host")),
   294         -            target: split(m.opt_strs("target")),
   295         -            config: cfg_file,
   296         -            src: m.opt_str("src").map(PathBuf::from),
   297         -            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
   298         -            cmd: cmd,
   299         -            incremental: incremental,
   300         -        }
   301         -    }
   302         -}
   303         -
   304         -impl Subcommand {
   305         -    pub fn test_args(&self) -> Vec<&str> {
   306         -        match *self {
   307         -            Subcommand::Test { ref test_args, .. } |
   308         -            Subcommand::Bench { ref test_args, .. } => {
   309         -                test_args.iter().flat_map(|s| s.split_whitespace()).collect()
   310         -            }
   311         -            _ => Vec::new(),
   312         -        }
   313         -    }
   314         -}
   315         -
   316         -fn split(s: Vec<String>) -> Vec<String> {
   317         -    s.iter().flat_map(|s| s.split(',')).map(|s| s.to_string()).collect()
   318         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/install.rs version [cfae41ed18].

     1         -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of the install aspects of the compiler.
    12         -//!
    13         -//! This module is responsible for installing the standard library,
    14         -//! compiler, and documentation.
    15         -
    16         -use std::env;
    17         -use std::fs;
    18         -use std::path::{Path, PathBuf, Component};
    19         -use std::process::Command;
    20         -
    21         -use Build;
    22         -use dist::{sanitize_sh, tmpdir};
    23         -
    24         -/// Installs everything.
    25         -pub fn install(build: &Build, stage: u32, host: &str) {
    26         -    let prefix_default = PathBuf::from("/usr/local");
    27         -    let docdir_default = PathBuf::from("share/doc/rust");
    28         -    let mandir_default = PathBuf::from("share/man");
    29         -    let libdir_default = PathBuf::from("lib");
    30         -    let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
    31         -    let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
    32         -    let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
    33         -    let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
    34         -
    35         -    let docdir = prefix.join(docdir);
    36         -    let libdir = prefix.join(libdir);
    37         -    let mandir = prefix.join(mandir);
    38         -
    39         -    let destdir = env::var_os("DESTDIR").map(PathBuf::from);
    40         -
    41         -    let prefix = add_destdir(&prefix, &destdir);
    42         -    let docdir = add_destdir(&docdir, &destdir);
    43         -    let libdir = add_destdir(&libdir, &destdir);
    44         -    let mandir = add_destdir(&mandir, &destdir);
    45         -
    46         -    let empty_dir = build.out.join("tmp/empty_dir");
    47         -    t!(fs::create_dir_all(&empty_dir));
    48         -    if build.config.docs {
    49         -        install_sh(&build, "docs", "rust-docs", stage, host, &prefix,
    50         -                   &docdir, &libdir, &mandir, &empty_dir);
    51         -    }
    52         -    if build.config.rust_save_analysis {
    53         -        install_sh(&build, "analysis", "rust-analysis", stage, host, &prefix,
    54         -                   &docdir, &libdir, &mandir, &empty_dir);
    55         -    }
    56         -    install_sh(&build, "std", "rust-std", stage, host, &prefix,
    57         -               &docdir, &libdir, &mandir, &empty_dir);
    58         -    install_sh(&build, "rustc", "rustc", stage, host, &prefix,
    59         -               &docdir, &libdir, &mandir, &empty_dir);
    60         -    t!(fs::remove_dir_all(&empty_dir));
    61         -}
    62         -
    63         -fn install_sh(build: &Build, package: &str, name: &str, stage: u32, host: &str,
    64         -              prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
    65         -    println!("Install {} stage{} ({})", package, stage, host);
    66         -    let package_name = format!("{}-{}-{}", name, build.rust_package_vers(), host);
    67         -
    68         -    let mut cmd = Command::new("sh");
    69         -    cmd.current_dir(empty_dir)
    70         -       .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
    71         -       .arg(format!("--prefix={}", sanitize_sh(prefix)))
    72         -       .arg(format!("--docdir={}", sanitize_sh(docdir)))
    73         -       .arg(format!("--libdir={}", sanitize_sh(libdir)))
    74         -       .arg(format!("--mandir={}", sanitize_sh(mandir)))
    75         -       .arg("--disable-ldconfig");
    76         -    build.run(&mut cmd);
    77         -}
    78         -
    79         -fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {
    80         -    let mut ret = match *destdir {
    81         -        Some(ref dest) => dest.clone(),
    82         -        None => return path.to_path_buf(),
    83         -    };
    84         -    for part in path.components() {
    85         -        match part {
    86         -            Component::Normal(s) => ret.push(s),
    87         -            _ => {}
    88         -        }
    89         -    }
    90         -    return ret
    91         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/job.rs version [beec712824].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Job management on Windows for bootstrapping
    12         -//!
    13         -//! Most of the time when you're running a build system (e.g. make) you expect
    14         -//! Ctrl-C or abnormal termination to actually terminate the entire tree of
    15         -//! process in play, not just the one at the top. This currently works "by
    16         -//! default" on Unix platforms because Ctrl-C actually sends a signal to the
    17         -//! *process group* rather than the parent process, so everything will get torn
    18         -//! down. On Windows, however, this does not happen and Ctrl-C just kills the
    19         -//! parent process.
    20         -//!
    21         -//! To achieve the same semantics on Windows we use Job Objects to ensure that
    22         -//! all processes die at the same time. Job objects have a mode of operation
    23         -//! where when all handles to the object are closed it causes all child
    24         -//! processes associated with the object to be terminated immediately.
    25         -//! Conveniently whenever a process in the job object spawns a new process the
    26         -//! child will be associated with the job object as well. This means if we add
    27         -//! ourselves to the job object we create then everything will get torn down!
    28         -//!
    29         -//! Unfortunately most of the time the build system is actually called from a
    30         -//! python wrapper (which manages things like building the build system) so this
    31         -//! all doesn't quite cut it so far. To go the last mile we duplicate the job
    32         -//! object handle into our parent process (a python process probably) and then
    33         -//! close our own handle. This means that the only handle to the job object
    34         -//! resides in the parent python process, so when python dies the whole build
    35         -//! system dies (as one would probably expect!).
    36         -//!
    37         -//! Note that this module has a #[cfg(windows)] above it as none of this logic
    38         -//! is required on Unix.
    39         -
    40         -#![allow(bad_style, dead_code)]
    41         -
    42         -use std::env;
    43         -use std::io;
    44         -use std::mem;
    45         -
    46         -type HANDLE = *mut u8;
    47         -type BOOL = i32;
    48         -type DWORD = u32;
    49         -type LPHANDLE = *mut HANDLE;
    50         -type LPVOID = *mut u8;
    51         -type JOBOBJECTINFOCLASS = i32;
    52         -type SIZE_T = usize;
    53         -type LARGE_INTEGER = i64;
    54         -type UINT = u32;
    55         -type ULONG_PTR = usize;
    56         -type ULONGLONG = u64;
    57         -
    58         -const FALSE: BOOL = 0;
    59         -const DUPLICATE_SAME_ACCESS: DWORD = 0x2;
    60         -const PROCESS_DUP_HANDLE: DWORD = 0x40;
    61         -const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9;
    62         -const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000;
    63         -const SEM_FAILCRITICALERRORS: UINT = 0x0001;
    64         -const SEM_NOGPFAULTERRORBOX: UINT = 0x0002;
    65         -
    66         -extern "system" {
    67         -    fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE;
    68         -    fn CloseHandle(hObject: HANDLE) -> BOOL;
    69         -    fn GetCurrentProcess() -> HANDLE;
    70         -    fn OpenProcess(dwDesiredAccess: DWORD,
    71         -                   bInheritHandle: BOOL,
    72         -                   dwProcessId: DWORD) -> HANDLE;
    73         -    fn DuplicateHandle(hSourceProcessHandle: HANDLE,
    74         -                       hSourceHandle: HANDLE,
    75         -                       hTargetProcessHandle: HANDLE,
    76         -                       lpTargetHandle: LPHANDLE,
    77         -                       dwDesiredAccess: DWORD,
    78         -                       bInheritHandle: BOOL,
    79         -                       dwOptions: DWORD) -> BOOL;
    80         -    fn AssignProcessToJobObject(hJob: HANDLE, hProcess: HANDLE) -> BOOL;
    81         -    fn SetInformationJobObject(hJob: HANDLE,
    82         -                               JobObjectInformationClass: JOBOBJECTINFOCLASS,
    83         -                               lpJobObjectInformation: LPVOID,
    84         -                               cbJobObjectInformationLength: DWORD) -> BOOL;
    85         -    fn SetErrorMode(mode: UINT) -> UINT;
    86         -}
    87         -
    88         -#[repr(C)]
    89         -struct JOBOBJECT_EXTENDED_LIMIT_INFORMATION {
    90         -    BasicLimitInformation: JOBOBJECT_BASIC_LIMIT_INFORMATION,
    91         -    IoInfo: IO_COUNTERS,
    92         -    ProcessMemoryLimit: SIZE_T,
    93         -    JobMemoryLimit: SIZE_T,
    94         -    PeakProcessMemoryUsed: SIZE_T,
    95         -    PeakJobMemoryUsed: SIZE_T,
    96         -}
    97         -
    98         -#[repr(C)]
    99         -struct IO_COUNTERS {
   100         -    ReadOperationCount: ULONGLONG,
   101         -    WriteOperationCount: ULONGLONG,
   102         -    OtherOperationCount: ULONGLONG,
   103         -    ReadTransferCount: ULONGLONG,
   104         -    WriteTransferCount: ULONGLONG,
   105         -    OtherTransferCount: ULONGLONG,
   106         -}
   107         -
   108         -#[repr(C)]
   109         -struct JOBOBJECT_BASIC_LIMIT_INFORMATION {
   110         -    PerProcessUserTimeLimit: LARGE_INTEGER,
   111         -    PerJobUserTimeLimit: LARGE_INTEGER,
   112         -    LimitFlags: DWORD,
   113         -    MinimumWorkingsetSize: SIZE_T,
   114         -    MaximumWorkingsetSize: SIZE_T,
   115         -    ActiveProcessLimit: DWORD,
   116         -    Affinity: ULONG_PTR,
   117         -    PriorityClass: DWORD,
   118         -    SchedulingClass: DWORD,
   119         -}
   120         -
   121         -pub unsafe fn setup() {
   122         -    // Tell Windows to not show any UI on errors (such as not finding a required dll
   123         -    // during startup or terminating abnormally).  This is important for running tests,
   124         -    // since some of them use abnormal termination by design.
   125         -    // This mode is inherited by all child processes.
   126         -    let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
   127         -    SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
   128         -
   129         -    // Create a new job object for us to use
   130         -    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
   131         -    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
   132         -
   133         -    // Indicate that when all handles to the job object are gone that all
   134         -    // process in the object should be killed. Note that this includes our
   135         -    // entire process tree by default because we've added ourselves and our
   136         -    // children will reside in the job by default.
   137         -    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
   138         -    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
   139         -    let r = SetInformationJobObject(job,
   140         -                                    JobObjectExtendedLimitInformation,
   141         -                                    &mut info as *mut _ as LPVOID,
   142         -                                    mem::size_of_val(&info) as DWORD);
   143         -    assert!(r != 0, "{}", io::Error::last_os_error());
   144         -
   145         -    // Assign our process to this job object. Note that if this fails, one very
   146         -    // likely reason is that we are ourselves already in a job object! This can
   147         -    // happen on the build bots that we've got for Windows, or if just anyone
   148         -    // else is instrumenting the build. In this case we just bail out
   149         -    // immediately and assume that they take care of it.
   150         -    //
   151         -    // Also note that nested jobs (why this might fail) are supported in recent
   152         -    // versions of Windows, but the version of Windows that our bots are running
   153         -    // at least don't support nested job objects.
   154         -    let r = AssignProcessToJobObject(job, GetCurrentProcess());
   155         -    if r == 0 {
   156         -        CloseHandle(job);
   157         -        return
   158         -    }
   159         -
   160         -    // If we've got a parent process (e.g. the python script that called us)
   161         -    // then move ownership of this job object up to them. That way if the python
   162         -    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
   163         -    //
   164         -    // If we don't have a parent (e.g. this was run directly) then we
   165         -    // intentionally leak the job object handle. When our process exits
   166         -    // (normally or abnormally) it will close the handle implicitly, causing all
   167         -    // processes in the job to be cleaned up.
   168         -    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
   169         -        Ok(s) => s,
   170         -        Err(..) => return,
   171         -    };
   172         -
   173         -    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
   174         -    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
   175         -    let mut parent_handle = 0 as *mut _;
   176         -    let r = DuplicateHandle(GetCurrentProcess(), job,
   177         -                            parent, &mut parent_handle,
   178         -                            0, FALSE, DUPLICATE_SAME_ACCESS);
   179         -
   180         -    // If this failed, well at least we tried! An example of DuplicateHandle
   181         -    // failing in the past has been when the wrong python2 package spawed this
   182         -    // build system (e.g. the `python2` package in MSYS instead of
   183         -    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
   184         -    // mode" here is that we only clean everything up when the build system
   185         -    // dies, not when the python parent does, so not too bad.
   186         -    if r != 0 {
   187         -        CloseHandle(job);
   188         -    }
   189         -}

Deleted wiki_references/2017/software/Rust/src_from_GitHub/the_repository_clones/rust/src/bootstrap/lib.rs version [6b3eeb7492].

     1         -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
     2         -// file at the top-level directory of this distribution and at
     3         -// http://rust-lang.org/COPYRIGHT.
     4         -//
     5         -// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
     6         -// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
     7         -// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
     8         -// option. This file may not be copied, modified, or distributed
     9         -// except according to those terms.
    10         -
    11         -//! Implementation of rustbuild, the Rust build system.
    12         -//!
    13         -//! This module, and its descendants, are the implementation of the Rust build
    14         -//! system. Most of this build system is backed by Cargo but the outer layer
    15         -//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
    16         -//! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
    17         -//!
    18         -//! * To be an easily understandable, easily extensible, and maintainable build
    19         -//!   system.
    20         -//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
    21         -//!   crates.io and Cargo.
    22         -//! * A standard interface to build across all platforms, including MSVC
    23         -//!
    24         -//! ## Architecture
    25         -//!
    26         -//! Although this build system defers most of the complicated logic to Cargo
    27         -//! itself, it still needs to maintain a list of targets and dependencies which
    28         -//! it can itself perform. Rustbuild is made up of a list of rules with
    29         -//! dependencies amongst them (created in the `step` module) and then knows how
    30         -//! to execute each in sequence. Each time rustbuild is invoked, it will simply
    31         -//! iterate through this list of steps and execute each serially in turn.  For
    32         -//! each step rustbuild relies on the step internally being incremental and
    33         -//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
    34         -//! to appropriate test harnesses and such.
    35         -//!
    36         -//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
    37         -//! have its own parallelism and incremental management. Later steps, like
    38         -//! tests, aren't incremental and simply run the entire suite currently.
    39         -//!
    40         -//! When you execute `x.py build`, the steps which are executed are:
    41         -//!
    42         -//! * First, the python script is run. This will automatically download the
    43         -//!   stage0 rustc and cargo according to `src/stage0.txt`, or using the cached
    44         -//!   versions if they're available. These are then used to compile rustbuild
    45         -//!   itself (using Cargo). Finally, control is then transferred to rustbuild.
    46         -//!
    47         -//! * Rustbuild takes over, performs sanity checks, probes the environment,
    48         -//!   reads configuration, builds up a list of steps, and then starts executing
    49         -//!   them.
    50         -//!
    51         -//! * The stage0 libstd is compiled
    52         -//! * The stage0 libtest is compiled
    53         -//! * The stage0 librustc is compiled
    54         -//! * The stage1 compiler is assembled
    55         -//! * The stage1 libstd, libtest, librustc are compiled
    56         -//! * The stage2 compiler is assembled
    57         -//! * The stage2 libstd, libtest, librustc are compiled
    58         -//!
    59         -//! Each step is driven by a separate Cargo project and rustbuild orchestrates
    60         -//! copying files between steps and otherwise preparing for Cargo to run.
    61         -//!
    62         -//! ## Further information
    63         -//!
    64         -//! More documentation can be found in each respective module below, and you can
    65         -//! also check out the `src/bootstrap/README.md` file for more information.
    66         -
    67         -#![deny(warnings)]
    68         -
    69         -#[macro_use]
    70         -extern crate build_helper;
    71         -extern crate cmake;
    72         -extern crate filetime;
    73         -extern crate gcc;
    74         -extern crate getopts;
    75         -extern crate num_cpus;
    76         -extern crate rustc_serialize;
    77         -extern crate toml;
    78         -
    79         -use std::cmp;
    80         -use std::collections::HashMap;
    81         -use std::env;
    82         -use std::ffi::OsString;
    83         -use std::fs::{self, File};
    84         -use std::io::Read;
    85         -use std::path::{Component, PathBuf, Path};
    86         -use std::process::Command;
    87         -
    88         -use build_helper::{run_silent, run_suppressed, output, mtime};
    89         -
    90         -use util::{exe, libdir, add_lib_path};
    91         -
    92         -mod cc;
    93         -mod channel;
    94         -mod check;
    95         -mod clean;
    96         -mod compile;
    97         -mod metadata;
    98         -mod config;
    99         -mod dist;
   100         -mod doc;
   101         -mod flags;
   102         -mod install;
   103         -mod native;
   104         -mod sanity;
   105         -mod step;
   106         -pub mod util;
   107         -
   108         -#[cfg(windows)]
   109         -mod job;
   110         -
   111         -#[cfg(not(windows))]
   112         -mod job {
   113         -    pub unsafe fn setup() {}
   114         -}
   115         -
   116         -pub use config::Config;
   117         -pub use flags::{Flags, Subcommand};
   118         -
   119         -/// A structure representing a Rust compiler.
   120         -///
   121         -/// Each compiler has a `stage` that it is associated with and a `host` that
   122         -/// corresponds to the platform the compiler runs on. This structure is used as
   123         -/// a parameter to many methods below.
   124         -#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
   125         -pub struct Compiler<'a> {
   126         -    stage: u32,
   127         -    host: &'a str,
   128         -}
   129         -
   130         -/// Global configuration for the build system.
   131         -///
   132         -/// This structure transitively contains all configuration for the build system.
   133         -/// All filesystem-encoded configuration is in `config`, all flags are in
   134         -/// `flags`, and then parsed or probed information is listed in the keys below.
   135         -///
   136         -/// This structure is a parameter of almost all methods in the build system,
   137         -/// although most functions are implemented as free functions rather than
   138         -/// methods specifically on this structure itself (to make it easier to
   139         -/// organize).
   140         -pub struct Build {
   141         -    // User-specified configuration via config.toml
   142         -    config: Config,
   143         -
   144         -    // User-specified configuration via CLI flags
   145         -    flags: Flags,
   146         -
   147         -    // Derived properties from the above two configurations
   148         -    cargo: PathBuf,
   149         -    rustc: PathBuf,
   150         -    src: PathBuf,
   151         -    out: PathBuf,
   152         -    rust_info: channel::GitInfo,
   153         -    cargo_info: channel::GitInfo,
   154         -    local_rebuild: bool,
   155         -
   156         -    // Probed tools at runtime
   157         -    lldb_version: Option<String>,
   158         -    lldb_python_dir: Option<String>,
   159         -
   160         -    // Runtime state filled in later on
   161         -    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
   162         -    cxx: HashMap<String, gcc::Tool>,
   163         -    crates: HashMap<String, Crate>,
   164         -    is_sudo: bool,
   165         -}
   166         -
   167         -#[derive(Debug)]
   168         -struct Crate {
   169         -    name: String,
   170         -    version: String,
   171         -    deps: Vec<String>,
   172         -    path: PathBuf,
   173         -    doc_step: String,
   174         -    build_step: String,
   175         -    test_step: String,
   176         -    bench_step: String,
   177         -}
   178         -
   179         -/// The various "modes" of invoking Cargo.
   180         -///
   181         -/// These entries currently correspond to the various output directories of the
   182         -/// build system, with each mod generating output in a different directory.
   183         -#[derive(Clone, Copy, PartialEq, Eq)]
   184         -pub enum Mode {
   185         -    /// This cargo is going to build the standard library, placing output in the
   186         -    /// "stageN-std" directory.
   187         -    Libstd,
   188         -
   189         -    /// This cargo is going to build libtest, placing output in the
   190         -    /// "stageN-test" directory.
   191         -    Libtest,
   192         -
   193         -    /// This cargo is going to build librustc and compiler libraries, placing
   194         -    /// output in the "stageN-rustc" directory.
   195         -    Librustc,
   196         -
   197         -    /// This cargo is going to some build tool, placing output in the
   198         -    /// "stageN-tools" directory.
   199         -    Tool,
   200         -}
   201         -
   202         -impl Build {
   203         -    /// Creates a new set of build configuration from the `flags` on the command
   204         -    /// line and the filesystem `config`.
   205         -    ///
   206         -    /// By default all build output will be placed in the current directory.
   207         -    pub fn new(flags: Flags, config: Config) -> Build {
   208         -        let cwd = t!(env::current_dir());
   209         -        let src = flags.src.clone().or_else(|| {
   210         -            env::var_os("SRC").map(|x| x.into())
   211         -        }).unwrap_or(cwd.clone());
   212         -        let out = cwd.join("build");
   213         -
   214         -        let stage0_root = out.join(&config.build).join("stage0/bin");
   215         -        let rustc = match config.rustc {
   216         -            Some(ref s) => PathBuf::from(s),
   217         -            None => stage0_root.join(exe("rustc", &config.build)),
   218         -        };
   219         -        let cargo = match config.cargo {
   220         -            Some(ref s) => PathBuf::from(s),
   221         -            None => stage0_root.join(exe("cargo", &config.build)),
   222         -        };
   223         -        let local_rebuild = config.local_rebuild;
   224         -
   225         -        let is_sudo = match env::var_os("SUDO_USER") {
   226         -            Some(sudo_user) => {
   227         -                match env::var_os("USER") {
   228         -                    Some(user) => user != sudo_user,
   229         -                    None => false,
   230         -                }
   231         -            }
   232         -            None => false,
   233         -        };
   234         -        let rust_info = channel::GitInfo::new(&src);
   235         -        let cargo_info = channel::GitInfo::new(&src.join("cargo"));
   236         -
   237         -        Build {
   238         -            flags: flags,
   239         -            config: config,
   240         -            cargo: cargo,
   241         -            rustc: rustc,
   242         -            src: src,
   243         -            out: out,
   244         -
   245         -            rust_info: rust_info,
   246         -            cargo_info: cargo_info,
   247         -            local_rebuild: local_rebuild,
   248         -            cc: HashMap::new(),
   249         -            cxx: HashMap::new(),
   250         -            crates: HashMap::new(),
   251         -            lldb_version: None,
   252         -            lldb_python_dir: None,
   253         -            is_sudo: is_sudo,
   254         -        }
   255         -    }
   256         -
   257         -    /// Executes the entire build, as configured by the flags and configuration.
   258         -    pub fn build(&mut self) {
   259         -        unsafe {
   260         -            job::setup();
   261         -        }
   262         -
   263         -        if let Subcommand::Clean = self.flags.cmd {
   264         -            return clean::clean(self);
   265         -        }
   266         -
   267         -        self.verbose("finding compilers");
   268         -        cc::find(self);
   269         -        self.verbose("running sanity check");
   270         -        sanity::check(self);
   271         -        // If local-rust is the same major.minor as the current version, then force a local-rebuild
   272         -        let local_version_verbose = output(
   273         -            Command::new(&self.rustc).arg("--version").arg("--verbose"));
   274         -        let local_release = local_version_verbose
   275         -            .lines().filter(|x| x.starts_with("release:"))
   276         -            .next().unwrap().trim_left_matches("release:").trim();
   277         -        let my_version = channel::CFG_RELEASE_NUM;
   278         -        if local_release.split('.').take(2).eq(my_version.split('.').take(2)) {
   279         -            self.verbose(&format!("auto-detected local-rebuild {}", local_release));
   280         -            self.local_rebuild = true;
   281         -        }
   282         -        self.verbose("updating submodules");
   283         -        self.update_submodules();
   284         -        self.verbose("learning about cargo");
   285         -        metadata::build(self);
   286         -
   287         -        step::run(self);
   288         -    }
   289         -
   290         -    /// Updates all git submodules that we have.
   291         -    ///
   292         -    /// This will detect if any submodules are out of date an run the necessary
   293         -    /// commands to sync them all with upstream.
   294         -    fn update_submodules(&self) {
   295         -        struct Submodule<'a> {
   296         -            path: &'a Path,
   297         -            state: State,
   298         -        }
   299         -
   300         -        enum State {
   301         -            // The submodule may have staged/unstaged changes
   302         -            MaybeDirty,
   303         -            // Or could be initialized but never updated
   304         -            NotInitialized,
   305         -            // The submodule, itself, has extra commits but those changes haven't been commited to
   306         -            // the (outer) git repository
   307         -            OutOfSync,
   308         -        }
   309         -
   310         -        if !self.config.submodules {
   311         -            return
   312         -        }
   313         -        if fs::metadata(self.src.join(".git")).is_err() {
   314         -            return
   315         -        }
   316         -        let git = || {
   317         -            let mut cmd = Command::new("git");
   318         -            cmd.current_dir(&self.src);
   319         -            return cmd
   320         -        };
   321         -        let git_submodule = || {
   322         -            let mut cmd = Command::new("git");
   323         -            cmd.current_dir(&self.src).arg("submodule");
   324         -            return cmd
   325         -        };
   326         -
   327         -        // FIXME: this takes a seriously long time to execute on Windows and a
   328         -        //        nontrivial amount of time on Unix, we should have a better way
   329         -        //        of detecting whether we need to run all the submodule commands
   330         -        //        below.
   331         -        let out = output(git_submodule().arg("status"));
   332         -        let mut submodules = vec![];
   333         -        for line in out.lines() {
   334         -            // NOTE `git submodule status` output looks like this:
   335         -            //
   336         -            // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
   337         -            // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
   338         -            //  e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
   339         -            //
   340         -            // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
   341         -            // Right next to this character is the SHA-1 of the submodule HEAD
   342         -            // And after that comes the path to the submodule
   343         -            let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
   344         -            let state = if line.starts_with('-') {
   345         -                State::NotInitialized
   346         -            } else if line.starts_with('+') {
   347         -                State::OutOfSync
   348         -            } else if line.starts_with(' ') {
   349         -                State::MaybeDirty
   350         -            } else {
   351         -                panic!("unexpected git submodule state: {:?}", line.chars().next());
   352         -            };
   353         -
   354         -            submodules.push(Submodule { path: path, state: state })
   355         -        }
   356         -
   357         -        self.run(git_submodule().arg("sync"));
   358         -
   359         -        for submodule in submodules {
   360         -            // If using llvm-root then don't touch the llvm submodule.
   361         -            if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
   362         -                self.config.target_config.get(&self.config.build)
   363         -                    .and_then(|c| c.llvm_config.as_ref()).is_some()
   364         -            {
   365         -                continue
   366         -            }
   367         -
   368         -            if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
   369         -                !self.config.use_jemalloc
   370         -            {
   371         -                continue
   372         -            }
   373         -
   374         -            // `submodule.path` is the relative path to a submodule (from the repository root)
   375         -            // `submodule_path` is the path to a submodule from the cwd
   376         -
   377         -            // use `submodule.path` when e.g. executing a submodule specific command from the
   378         -            // repository root
   379         -            // use `submodule_path` when e.g. executing a normal git command for the submodule
   380         -            // (set via `current_dir`)
   381         -            let submodule_path = self.src.join(submodule.path);
   382         -
   383         -            match submodule.state {
   384         -                State::MaybeDirty => {
   385         -                    // drop staged changes
   386         -                    self.run(git().current_dir(&submodule_path)
   387         -                                  .args(&["reset", "--hard"]));
   388         -                    // drops unstaged changes
   389         -                    self.run(git().current_dir(&submodule_path)
   390         -                                  .args(&["clean", "-fdx"]));
   391         -                },
   392         -                State::NotInitialized => {
   393         -                    self.run(git_submodule().arg("init").arg(submodule.path));
   394         -                    self.run(git_submodule().arg("update").arg(submodule.path));
   395         -                },
   396         -                State::OutOfSync => {
   397         -                    // drops submodule commits that weren't reported to the (outer) git repository
   398         -                    self.run(git_submodule().arg("update").arg(submodule.path));
   399         -                    self.run(git().current_dir(&submodule_path)
   400         -                                  .args(&["reset", "--hard"]));
   401         -                    self.run(git().current_dir(&submodule_path)
   402         -                                  .args(&["clean", "-fdx"]));
   403         -                },
   404         -            }
   405         -        }
   406         -    }
   407         -
   408         -    /// Clear out `dir` if `input` is newer.
   409         -    ///
   410         -    /// After this executes, it will also ensure that `dir` exists.
   411         -    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
   412         -        let stamp = dir.join(".stamp");
   413         -        if mtime(&stamp) < mtime(input) {
   414         -            self.verbose(&format!("Dirty - {}", dir.display()));
   415         -            let _ = fs::remove_dir_all(dir);
   416         -        } else if stamp.exists() {
   417         -            return
   418         -        }
   419         -        t!(fs::create_dir_all(dir));
   420         -        t!(File::create(stamp));
   421         -    }
   422         -
   423         -    /// Prepares an invocation of `cargo` to be run.
   424         -    ///
   425         -    /// This will create a `Command` that represents a pending execution of
   426         -    /// Cargo. This cargo will be configured to use `compiler` as the actual
   427         -    /// rustc compiler, its output will be scoped by `mode`'s output directory,
   428         -    /// it will pass the `--target` flag for the specified `target`, and will be
   429         -    /// executing the Cargo command `cmd`.
   430         -    fn cargo(&self,
   431         -             compiler: &Compiler,
   432         -             mode: Mode,
   433         -             target: &str,
   434         -             cmd: &str) -> Command {
   435         -        let mut cargo = Command::new(&self.cargo);
   436         -        let out_dir = self.stage_out(compiler, mode);
   437         -        cargo.env("CARGO_TARGET_DIR", out_dir)
   438         -             .arg(cmd)
   439         -             .arg("-j").arg(self.jobs().to_string())
   440         -             .arg("--target").arg(target);
   441         -
   442         -        // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
   443         -        // Force cargo to output binaries with disambiguating hashes in the name
   444         -        cargo.env("__CARGO_DEFAULT_LIB_METADATA", "1");
   445         -
   446         -        let stage;
   447         -        if compiler.stage == 0 && self.local_rebuild {
   448         -            // Assume the local-rebuild rustc already has stage1 features.
   449         -            stage = 1;
   450         -        } else {
   451         -            stage = compiler.stage;
   452         -        }
   453         -
   454         -        // Customize the compiler we're running. Specify the compiler to cargo
   455         -        // as our shim and then pass it some various options used to configure
   456         -        // how the actual compiler itself is called.
   457         -        //
   458         -        // These variables are primarily all read by
   459         -        // src/bootstrap/bin/{rustc.rs,rustdoc.rs}
   460         -        cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
   461         -             .env("RUSTC", self.out.join("bootstrap/debug/rustc"))
   462         -             .env("RUSTC_REAL", self.compiler_path(compiler))
   463         -             .env("RUSTC_STAGE", stage.to_string())
   464         -             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
   465         -             .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
   466         -             .env("RUSTC_CODEGEN_UNITS",
   467         -                  self.config.rust_codegen_units.to_string())
   468         -             .env("RUSTC_DEBUG_ASSERTIONS",
   469         -                  self.config.rust_debug_assertions.to_string())
   470         -             .env("RUSTC_SYSROOT", self.sysroot(compiler))
   471         -             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
   472         -             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
   473         -             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
   474         -             .env("RUSTDOC_REAL", self.rustdoc(compiler))
   475         -             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
   476         -
   477         -        // Enable usage of unstable features
   478         -        cargo.env("RUSTC_BOOTSTRAP", "1");
   479         -        self.add_rust_test_threads(&mut cargo);
   480         -
   481         -        // Almost all of the crates that we compile as part of the bootstrap may
   482         -        // have a build script, including the standard library. To compile a
   483         -        // build script, however, it itself needs a standard library! This
   484         -        // introduces a bit of a pickle when we're compiling the standard
   485         -        // library itself.
   486         -        //
   487         -        // To work around this we actually end up using the snapshot compiler
   488         -        // (stage0) for compiling build scripts of the standard library itself.
   489         -        // The stage0 compiler is guaranteed to have a libstd available for use.
   490         -        //
   491         -        // For other crates, however, we know that we've already got a standard
   492         -        // library up and running, so we can use the normal compiler to compile
   493         -        // build scripts in that situation.
   494         -        if mode == Mode::Libstd {
   495         -            cargo.env("RUSTC_SNAPSHOT", &self.rustc)
   496         -                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
   497         -        } else {
   498         -            cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler))
   499         -                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
   500         -        }
   501         -
   502         -        // There are two invariants we try must maintain:
   503         -        // * stable crates cannot depend on unstable crates (general Rust rule),
   504         -        // * crates that end up in the sysroot must be unstable (rustbuild rule).
   505         -        //
   506         -        // In order to do enforce the latter, we pass the env var
   507         -        // `RUSTBUILD_UNSTABLE` down the line for any crates which will end up
   508         -        // in the sysroot. We read this in bootstrap/bin/rustc.rs and if it is
   509         -        // set, then we pass the `rustbuild` feature to rustc when building the
   510         -        // the crate.
   511         -        //
   512         -        // In turn, crates that can be used here should recognise the `rustbuild`
   513         -        // feature and opt-in to `rustc_private`.
   514         -        //
   515         -        // We can't always pass `rustbuild` because crates which are outside of
   516         -        // the comipiler, libs, and tests are stable and we don't want to make
   517         -        // their deps unstable (since this would break the first invariant
   518         -        // above).
   519         -        if mode != Mode::Tool {
   520         -            cargo.env("RUSTBUILD_UNSTABLE", "1");
   521         -        }
   522         -
   523         -        // Ignore incremental modes except for stage0, since we're
   524         -        // not guaranteeing correctness acros builds if the compiler
   525         -        // is changing under your feet.`
   526         -        if self.flags.incremental && compiler.stage == 0 {
   527         -            let incr_dir = self.incremental_dir(compiler);
   528         -            cargo.env("RUSTC_INCREMENTAL", incr_dir);
   529         -        }
   530         -
   531         -        if let Some(ref on_fail) = self.flags.on_fail {
   532         -            cargo.env("RUSTC_ON_FAIL", on_fail);
   533         -        }
   534         -
   535         -        let verbose = cmp::max(self.config.verbose, self.flags.verbose);
   536         -        cargo.env("RUSTC_VERBOSE", format!("{}", verbose));
   537         -
   538         -        // Specify some various options for build scripts used throughout
   539         -        // the build.
   540         -        //
   541         -        // FIXME: the guard against msvc shouldn't need to be here
   542         -        if !target.contains("msvc") {
   543         -            cargo.env(format!("CC_{}", target), self.cc(target))
   544         -                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
   545         -                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
   546         -        }
   547         -
   548         -        if self.config.rust_save_analysis && compiler.is_final_stage(self) {
   549         -            cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
   550         -        }
   551         -
   552         -        // Environment variables *required* needed throughout the build
   553         -        //
   554         -        // FIXME: should update code to not require this env var
   555         -        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
   556         -
   557         -        if self.config.verbose() || self.flags.verbose() {
   558         -            cargo.arg("-v");
   559         -        }
   560         -        // FIXME: cargo bench does not accept `--release`
   561         -        if self.config.rust_optimize && cmd != "bench" {
   562         -            cargo.arg("--release");
   563         -        }
   564         -        if self.config.locked_deps {
   565         -            cargo.arg("--locked");
   566         -        }
   567         -        if self.config.vendor || self.is_sudo {
   568         -            cargo.arg("--frozen");
   569         -        }
   570         -        return cargo
   571         -    }
   572         -
   573         -    /// Get a path to the compiler specified.
   574         -    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
   575         -        if compiler.is_snapshot(self) {
   576         -            self.rustc.clone()
   577         -        } else {
   578         -            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
   579         -        }
   580         -    }
   581         -
   582         -    /// Get the specified tool built by the specified compiler
   583         -    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
   584         -        self.cargo_out(compiler, Mode::Tool, compiler.host)
   585         -            .join(exe(tool, compiler.host))
   586         -    }
   587         -
   588         -    /// Get the `rustdoc` executable next to the specified compiler
   589         -    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
   590         -        let mut rustdoc = self.compiler_path(compiler);
   591         -        rustdoc.pop();
   592         -        rustdoc.push(exe("rustdoc", compiler.host));
   593         -        return rustdoc
   594         -    }
   595         -
   596         -    /// Get a `Command` which is ready to run `tool` in `stage` built for
   597         -    /// `host`.
   598         -    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
   599         -        let mut cmd = Command::new(self.tool(&compiler, tool));
   600         -        self.prepare_tool_cmd(compiler, &mut cmd);
   601         -        return cmd
   602         -    }
   603         -
   604         -    /// Prepares the `cmd` provided to be able to run the `compiler` provided.
   605         -    ///
   606         -    /// Notably this munges the dynamic library lookup path to point to the
   607         -    /// right location to run `compiler`.
   608         -    fn prepare_tool_cmd(&self, compiler: &Compiler, cmd: &mut Command) {
   609         -        let host = compiler.host;
   610         -        let mut paths = vec![
   611         -            self.sysroot_libdir(compiler, compiler.host),
   612         -            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
   613         -        ];
   614         -
   615         -        // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
   616         -        // mode) and that C compiler may need some extra PATH modification. Do
   617         -        // so here.
   618         -        if compiler.host.contains("msvc") {
   619         -            let curpaths = env::var_os("PATH").unwrap_or(OsString::new());
   620         -            let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
   621         -            for &(ref k, ref v) in self.cc[compiler.host].0.env() {
   622         -                if k != "PATH" {
   623         -                    continue
   624         -                }
   625         -                for path in env::split_paths(v) {
   626         -                    if !curpaths.contains(&path) {
   627         -                        paths.push(path);
   628         -                    }
   629         -                }
   630         -            }
   631         -        }
   632         -        add_lib_path(paths, cmd);
   633         -    }
   634         -
   635         -    /// Get the space-separated set of activated features for the standard
   636         -    /// library.
   637         -    fn std_features(&self) -> String {
   638         -        let mut features = "panic-unwind".to_string();
   639         -
   640         -        if self.config.debug_jemalloc {
   641         -            features.push_str(" debug-jemalloc");
   642         -        }
   643         -        if self.config.use_jemalloc {
   644         -            features.push_str(" jemalloc");
   645         -        }
   646         -        if self.config.backtrace {
   647         -            features.push_str(" backtrace");
   648         -        }
   649         -        return features
   650         -    }
   651         -
   652         -    /// Get the space-separated set of activated features for the compiler.
   653         -    fn rustc_features(&self) -> String {
   654         -        let mut features = String::new();
   655         -        if self.config.use_jemalloc {
   656         -            features.push_str(" jemalloc");
   657         -        }
   658         -        return features
   659         -    }
   660         -
   661         -    /// Component directory that Cargo will produce output into (e.g.
   662         -    /// release/debug)
   663         -    fn cargo_dir(&self) -> &'static str {
   664         -        if self.config.rust_optimize {"release"} else {"debug"}
   665         -    }
   666         -
   667         -    /// Returns the sysroot for the `compiler` specified that *this build system
   668         -    /// generates*.
   669         -    ///
   670         -    /// That is, the sysroot for the stage0 compiler is not what the compiler
   671         -    /// thinks it is by default, but it's the same as the default for stages
   672         -    /// 1-3.
   673         -    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
   674         -        if compiler.stage == 0 {
   675         -            self.out.join(compiler.host).join("stage0-sysroot")
   676         -        } else {
   677         -            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
   678         -        }
   679         -    }
   680         -
   681         -    /// Get the directory for incremental by-products when using the
   682         -    /// given compiler.
   683         -    fn incremental_dir(&self, compiler: &Compiler) -> PathBuf {
   684         -        self.out.join(compiler.host).join(format!("stage{}-incremental", compiler.stage))
   685         -    }
   686         -
   687         -    /// Returns the libdir where the standard library and other artifacts are
   688         -    /// found for a compiler's sysroot.
   689         -    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
   690         -        self.sysroot(compiler).join("lib").join("rustlib")
   691         -            .join(target).join("lib")
   692         -    }
   693         -
   694         -    /// Returns the root directory for all output generated in a particular
   695         -    /// stage when running with a particular host compiler.
   696         -    ///
   697         -    /// The mode indicates what the root directory is for.
   698         -    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
   699         -        let suffix = match mode {
   700         -            Mode::Libstd => "-std",
   701         -            Mode::Libtest => "-test",
   702         -            Mode::Tool => "-tools",
   703         -            Mode::Librustc => "-rustc",
   704         -        };
   705         -        self.out.join(compiler.host)
   706         -                .join(format!("stage{}{}", compiler.stage, suffix))
   707         -    }
   708         -
   709         -    /// Returns the root output directory for all Cargo output in a given stage,
   710         -    /// running a particular comipler, wehther or not we're building the
   711         -    /// standard library, and targeting the specified architecture.
   712         -    fn cargo_out(&self,
   713         -                 compiler: &Compiler,
   714         -                 mode: Mode,
   715         -                 target: &str) -> PathBuf {
   716         -        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
   717         -    }
   718         -
   719         -    /// Root output directory for LLVM compiled for `target`
   720         -    ///
   721         -    /// Note that if LLVM is configured externally then the directory returned
   722         -    /// will likely be empty.
   723         -    fn llvm_out(&self, target: &str) -> PathBuf {
   724         -        self.out.join(target).join("llvm")
   725         -    }
   726         -
   727         -    /// Output directory for all documentation for a target
   728         -    fn doc_out(&self, target: &str) -> PathBuf {
   729         -        self.out.join(target).join("doc")
   730         -    }
   731         -
   732         -    /// Output directory for all crate documentation for a target (temporary)
   733         -    ///
   734         -    /// The artifacts here are then copied into `doc_out` above.
   735         -    fn crate_doc_out(&self, target: &str) -> PathBuf {
   736         -        self.out.join(target).join("crate-docs")
   737         -    }
   738         -
   739         -    /// Returns true if no custom `llvm-config` is set for the specified target.
   740         -    ///
   741         -    /// If no custom `llvm-config` was specified then Rust's llvm will be used.
   742         -    fn is_rust_llvm(&self, target: &str) -> bool {
   743         -        match self.config.target_config.get(target) {
   744         -            Some(ref c) => c.llvm_config.is_none(),
   745         -            None => true
   746         -        }
   747         -    }
   748         -
   749         -    /// Returns the path to `llvm-config` for the specified target.
   750         -    ///
   751         -    /// If a custom `llvm-config` was specified for target then that's returned
   752         -    /// instead.
   753         -    fn llvm_config(&self, target: &str) -> PathBuf {
   754         -        let target_config = self.config.target_config.get(target);
   755         -        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
   756         -            s.clone()
   757         -        } else {
   758         -            self.llvm_out(&self.config.build).join("bin")
   759         -                .join(exe("llvm-config", target))
   760         -        }
   761         -    }
   762         -
   763         -    /// Returns the path to `FileCheck` binary for the specified target
   764         -    fn llvm_filecheck(&self, target: &str) -> PathBuf {
   765         -        let target_config = self.config.target_config.get(target);
   766         -        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
   767         -            let llvm_bindir = output(Command::new(s).arg("--bindir"));
   768         -            Path::new(llvm_bindir.trim()).join(exe("FileCheck", target))
   769         -        } else {
   770         -            let base = self.llvm_out(&self.config.build).join("build");
   771         -            let exe = exe("FileCheck", target);
   772         -            if !self.config.ninja && self.config.build.contains("msvc") {
   773         -                base.join("Release/bin").join(exe)
   774         -            } else {
   775         -                base.join("bin").join(exe)
   776         -            }
   777         -        }
   778         -    }
   779         -
   780         -    /// Directory for libraries built from C/C++ code and shared between stages.
   781         -    fn native_dir(&self, target: &str) -> PathBuf {
   782         -        self.out.join(target).join("native")
   783         -    }
   784         -
   785         -    /// Root output directory for rust_test_helpers library compiled for
   786         -    /// `target`
   787         -    fn test_helpers_out(&self, target: &str) -> PathBuf {
   788         -        self.native_dir(target).join("rust-test-helpers")
   789         -    }
   790         -
   791         -    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
   792         -    /// library lookup path.
   793         -    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
   794         -        // Windows doesn't need dylib path munging because the dlls for the
   795         -        // compiler live next to the compiler and the system will find them
   796         -        // automatically.
   797         -        if cfg!(windows) {
   798         -            return
   799         -        }
   800         -
   801         -        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
   802         -    }
   803         -
   804         -    /// Adds the `RUST_TEST_THREADS` env var if necessary
   805         -    fn add_rust_test_threads(&self, cmd: &mut Command) {
   806         -        if env::var_os("RUST_TEST_THREADS").is_none() {
   807         -            cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
   808         -        }
   809         -    }
   810         -
   811         -    /// Returns the compiler's libdir where it stores the dynamic libraries that
   812         -    /// it itself links against.
   813         -    ///
   814         -    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
   815         -    /// Windows.
   816         -    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
   817         -        if compiler.is_snapshot(self) {
   818         -            self.rustc_snapshot_libdir()
   819         -        } else {
   820         -            self.sysroot(compiler).join(libdir(compiler.host))
   821         -        }
   822         -    }
   823         -
   824         -    /// Returns the libdir of the snapshot compiler.
   825         -    fn rustc_snapshot_libdir(&self) -> PathBuf {
   826         -        self.rustc.parent().unwrap().parent().unwrap()
   827         -            .join(libdir(&self.config.build))
   828         -    }
   829         -
   830         -    /// Runs a command, printing out nice contextual information if it fails.
   831         -    fn run(&self, cmd: &mut Command) {
   832         -        self.verbose(&format!("running: {:?}", cmd));
   833         -        run_silent(cmd)
   834         -    }
   835         -
   836         -    /// Runs a command, printing out nice contextual information if it fails.
   837         -    fn run_quiet(&self, cmd: &mut Command) {
   838         -        self.verbose(&format!("running: {:?}", cmd));
   839         -        run_suppressed(cmd)
   840         -    }
   841         -
   842         -    /// Prints a message if this build is configured in verbose mode.
   843         -    fn verbose(&self, msg: &str) {
   844         -        if self.flags.verbose() || self.config.verbose() {
   845         -            println!("{}", msg);
   846         -        }
   847         -    }
   848         -
   849         -    /// Returns the number of parallel jobs that have been configured for this
   850         -    /// build.
   851         -    fn jobs(&self) -> u32 {
   852         -        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
   853         -    }
   854         -
   855         -    /// Returns the path to the C compiler for the target specified.
   856         -    fn cc(&self, target: &str) -> &Path {
   857         -        self.cc[target].0.path()
   858         -    }
   859         -
   860         -    /// Returns a list of flags to pass to the C compiler for the target
   861         -    /// specified.
   862         -    fn cflags(&self, target: &str) -> Vec<String> {
   863         -        // Filter out -O and /O (the optimization flags) that we picked up from
   864         -        // gcc-rs because the build scripts will determine that for themselves.
   865         -        let mut base = self.cc[target].0.args().iter()
   866         -                           .map(|s| s.to_string_lossy().into_owned())
   867         -                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
   868         -                           .collect::<Vec<_>>();
   869         -
   870         -        // If we're compiling on macOS then we add a few unconditional flags
   871         -        // indicating that we want libc++ (more filled out than libstdc++) and
   872         -        // we want to compile for 10.7. This way we can ensure that
   873         -        // LLVM/jemalloc/etc are all properly compiled.
   874         -        if target.contains("apple-darwin") {
   875         -            base.push("-stdlib=libc++".into());
   876         -        }
   877         -        return base
   878         -    }
   879         -
   880         -    /// Returns the path to the `ar` archive utility for the target specified.
   881         -    fn ar(&self, target: &str) -> Option<&Path> {
   882         -        self.cc[target].1.as_ref().map(|p| &**p)
   883         -    }
   884         -
   885         -    /// Returns the path to the C++ compiler for the target specified, may panic
   886         -    /// if no C++ compiler was configured for the target.
   887         -    fn cxx(&self, target: &str) -> &Path {
   888         -        match self.cxx.get(target) {
   889         -            Some(p) => p.path(),
   890         -            None => panic!("\n\ntarget `{}` is not configured as a host,
   891         -                            only as a target\n\n", target),
   892         -        }
   893         -    }
   894         -
   895         -    /// Returns flags to pass to the compiler to generate code for `target`.
   896         -    fn rustc_flags(&self, target: &str) -> Vec<String> {
   897         -        // New flags should be added here with great caution!
   898         -        //
   899         -        // It's quite unfortunate to **require** flags to generate code for a
   900         -        // target, so it should only be passed here if absolutely necessary!
   901         -        // Most default configuration should be done through target specs rather
   902         -        // than an entry here.
   903         -
   904         -        let mut base = Vec::new();
   905         -        if target != self.config.build && !target.contains("msvc") &&
   906         -            !target.contains("emscripten") {
   907         -            base.push(format!("-Clinker={}", self.cc(target).display()));
   908         -        }
   909         -        return base
   910         -    }
   911         -
   912         -    /// Returns the "musl root" for this `target`, if defined
   913         -    fn musl_root(&self, target: &str) -> Option<&Path> {
   914         -        self.config.target_config.get(target)
   915         -            .and_then(|t| t.musl_root.as_ref())
   916         -            .or(self.config.musl_root.as_ref())
   917         -            .map(|p| &**p)
   918         -    }
   919         -
   920         -    /// Returns the root of the "rootfs" image that this target will be using,
   921         -    /// if one was configured.
   922         -    ///
   923         -    /// If `Some` is returned then that means that tests for this target are
   924         -    /// emulated with QEMU and binaries will need to be shipped to the emulator.
   925         -    fn qemu_rootfs(&self, target: &str) -> Option<&Path> {
   926         -        self.config.target_config.get(target)
   927         -            .and_then(|t| t.qemu_rootfs.as_ref())
   928         -            .map(|p| &**p)
   929         -    }
   930         -
   931         -    /// Path to the python interpreter to use
   932         -    fn python(&self) -> &Path {
   933         -        self.config.python.as_ref().unwrap()
   934         -    }
   935         -
   936         -    /// Tests whether the `compiler` compiling for `target` should be forced to
   937         -    /// use a stage1 compiler instead.
   938         -    ///
   939         -    /// Currently, by default, the build system does not perform a "full
   940         -    /// bootstrap" by default where we compile the compiler three times.
   941         -    /// Instead, we compile the compiler two times. The final stage (stage2)
   942         -    /// just copies the libraries from the previous stage, which is what this
   943         -    /// method detects.
   944         -    ///
   945         -    /// Here we return `true` if:
   946         -    ///
   947         -    /// * The build isn't performing a full bootstrap
   948         -    /// * The `compiler` is in the final stage, 2
   949         -    /// * We're not cross-compiling, so the artifacts are already available in
   950         -    ///   stage1
   951         -    ///
   952         -    /// When all of these conditions are met the build will lift artifacts from
   953         -    /// the previous stage forward.
   954         -    fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool {
   955         -        !self.config.full_bootstrap &&
   956         -            compiler.stage >= 2 &&
   957         -            self.config.host.iter().any(|h| h == target)
   958         -    }
   959         -
   960         -    /// Returns the directory that OpenSSL artifacts are compiled into if
   961         -    /// configured to do so.
   962         -    fn openssl_dir(&self, target: &str) -> Option<PathBuf> {
   963         -        // OpenSSL not used on Windows
   964         -        if target.contains("windows") {
   965         -            None
   966         -        } else if self.config.openssl_static {
   967         -            Some(self.out.join(target).join("openssl"))
   968         -        } else {
   969         -            None
   970         -        }
   971         -    }
   972         -
   973         -    /// Returns the directory that OpenSSL artifacts are installed into if
   974         -    /// configured as such.
   975         -    fn openssl_install_dir(&self, target: &str) -> Option<PathBuf> {
   976         -        self.openssl_dir(target).map(|p| p.join("install"))
   977         -    }
   978         -
   979         -    /// Given `num` in the form "a.b.c" return a "release string" which
   980         -    /// describes the release version number.
   981         -    ///
   982         -    /// For example on nightly this returns "a.b.c-nightly", on beta it returns
   983         -    /// "a.b.c-beta.1" and on stable it just returns "a.b.c".
   984         -    fn release(&self, num: &str) -> String {
   985         -        match &self.config.channel[..] {
   986         -            "stable" => num.to_string(),
   987         -            "beta" => format!("{}-beta{}", num, channel::CFG_PRERELEASE_VERSION),
   988         -            "nightly" => format!("{}-nightly", num),
   989         -            _ => format!("{}-dev", num),
   990         -        }
   991         -    }
   992         -
   993         -    /// Returns the value of `release` above for Rust itself.
   994         -    fn rust_release(&self) -> String {
   995         -        self.release(channel::CFG_RELEASE_NUM)
   996         -    }
   997         -
   998         -    /// Returns the "package version" for a component given the `num` release
   999         -    /// number.
  1000         -    ///
  1001         -    /// The package version is typically what shows up in the names of tarballs.
  1002         -    /// For channels like beta/nightly it's just the channel name, otherwise
  1003         -    /// it's the `num` provided.
  1004         -    fn package_vers(&self, num: &str) -> String {
  1005         -        match &self.config.channel[..] {
  1006         -            "stable" => num.to_string(),
  1007         -            "beta" => "beta".to_string(),
  1008         -            "nightly" => "nightly".to_string(),
  1009         -            _ => format!("{}-dev", num),
  1010         -        }
  1011         -    }
  1012         -
  1013         -    /// Returns the value of `package_vers` above for Rust itself.
  1014         -    fn rust_package_vers(&self) -> String {
  1015         -        self.package_vers(channel::CFG_RELEASE_NUM)
  1016         -    }
  1017         -
  1018         -    /// Returns the `version` string associated with this compiler for Rust
  1019         -    /// itself.
  1020         -    ///
  1021         -    /// Note that this is a descriptive string which includes the commit date,
  1022         -    /// sha, version, etc.
  1023         -    fn rust_version(&self) -> String {
  1024         -        self.rust_info.version(self, channel::CFG_RELEASE_NUM)
  1025         -    }
  1026         -
  1027         -    /// Returns the `a.b.c` version that Cargo is at.
  1028         -    fn cargo_release_num(&self) -> String {
  1029         -        let mut toml = String::new();
  1030         -        t!(t!(File::open(self.src.join("cargo/Cargo.toml"))).read_to_string(&mut toml));
  1031         -        for line in toml.lines() {
  1032         -            let prefix = "version = \"";
  1033         -            let suffix = "\"";
  1034         -            if line.starts_with(prefix) && line.ends_with(suffix) {
  1035         -                return line[prefix.len()..line.len() - suffix.len()].to_string()
  1036         -            }
  1037         -        }
  1038         -
  1039         -        panic!("failed to find version in cargo's Cargo.toml")
  1040         -    }
  1041         -
  1042         -    /// Returns whether unstable features should be enabled for the compiler
  1043         -    /// we're building.
  1044         -    fn unstable_features(&self) -> bool {
  1045         -        match &self.config.channel[..] {
  1046         -            "stable" | "beta" => false,
  1047         -            "nightly" | _ => true,
  1048         -        }
  1049         -    }
  1050         -}
  1051         -
  1052         -impl<'a> Compiler<'a> {
  1053         -    //