Auto merge of #116578 - antoyo:subtree-update_cg_gcc_2023-10-09, r=bjorn3

subtree update cg_gcc 2023/10/09
This commit is contained in:
bors 2023-10-11 00:04:04 +00:00
commit dcf89f43a5
82 changed files with 2856 additions and 669 deletions

View File

@ -57,8 +57,8 @@ jobs:
uses: dawidd6/action-download-artifact@v2
with:
workflow: main.yml
name: ${{ matrix.libgccjit_version.gcc }}
path: gcc-build
name: gcc-13
path: gcc-13
repo: antoyo/gcc
branch: ${{ matrix.libgccjit_version.artifacts_branch }}
event: push
@ -71,9 +71,8 @@ jobs:
- name: Setup path to libgccjit
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
run: |
echo $(readlink -f gcc-build) > gcc_path
# NOTE: the filename is still libgccjit.so even when the artifact name is different.
ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
echo /usr/lib/ > gcc_path
- name: Set env
run: |
@ -119,8 +118,8 @@ jobs:
- name: Build
run: |
./prepare_build.sh
${{ matrix.libgccjit_version.env_extra }} ./build.sh ${{ matrix.libgccjit_version.extra }}
./y.sh prepare --only-libcore
${{ matrix.libgccjit_version.env_extra }} ./y.sh build ${{ matrix.libgccjit_version.extra }}
${{ matrix.libgccjit_version.env_extra }} cargo test ${{ matrix.libgccjit_version.extra }}
./clean_all.sh
@ -128,7 +127,7 @@ jobs:
run: |
git config --global user.email "user@example.com"
git config --global user.name "User"
./prepare.sh
./y.sh prepare
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
- name: Compile
@ -141,6 +140,9 @@ jobs:
if: ${{ matrix.libgccjit_version.gcc == 'libgccjit12.so' }}
run: cat failing-ui-tests12.txt >> failing-ui-tests.txt
- name: Add more failing tests because the sysroot is not compiled with LTO
run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
- name: Run tests
run: |
${{ matrix.libgccjit_version.env_extra }} ./test.sh --release --clean --build-sysroot ${{ matrix.commands }} ${{ matrix.libgccjit_version.extra }}

View File

@ -18,8 +18,6 @@ jobs:
strategy:
fail-fast: false
matrix:
libgccjit_version:
- { gcc: "libgccjit.so", artifacts_branch: "master" }
commands: [
"--test-successful-rustc --nb-parts 2 --current-part 0",
"--test-successful-rustc --nb-parts 2 --current-part 1",
@ -40,18 +38,17 @@ jobs:
uses: dawidd6/action-download-artifact@v2
with:
workflow: main.yml
name: ${{ matrix.libgccjit_version.gcc }}
path: gcc-build
name: gcc-13
path: gcc-13
repo: antoyo/gcc
branch: ${{ matrix.libgccjit_version.artifacts_branch }}
branch: "master"
event: push
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
- name: Setup path to libgccjit
run: |
echo $(readlink -f gcc-build) > gcc_path
# NOTE: the filename is still libgccjit.so even when the artifact name is different.
ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
echo /usr/lib/ > gcc_path
- name: Set env
run: |
@ -88,8 +85,8 @@ jobs:
- name: Build
run: |
./prepare_build.sh
./build.sh --release --release-sysroot
./y.sh prepare --only-libcore
EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot
cargo test
./clean_all.sh
@ -97,7 +94,9 @@ jobs:
run: |
git config --global user.email "user@example.com"
git config --global user.name "User"
./prepare.sh
./y.sh prepare
# FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros.
echo -n 'lto = "fat"' >> build_sysroot/Cargo.toml
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
- name: Compile
@ -106,6 +105,9 @@ jobs:
command: build
args: --release
- name: Add more failing tests because of undefined symbol errors (FIXME)
run: cat failing-lto-tests.txt >> failing-ui-tests.txt
- name: Run tests
run: |
./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}
EMBED_LTO_BITCODE=1 ./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}

View File

@ -18,8 +18,6 @@ jobs:
strategy:
fail-fast: false
matrix:
libgccjit_version:
- { gcc: "libgccjit.so", artifacts_branch: "master" }
cargo_runner: [
"sde -future -rtm_mode full --",
"",
@ -54,18 +52,17 @@ jobs:
uses: dawidd6/action-download-artifact@v2
with:
workflow: main.yml
name: ${{ matrix.libgccjit_version.gcc }}
path: gcc-build
name: gcc-13
path: gcc-13
repo: antoyo/gcc
branch: ${{ matrix.libgccjit_version.artifacts_branch }}
branch: "master"
event: push
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
- name: Setup path to libgccjit
run: |
echo $(readlink -f gcc-build) > gcc_path
# NOTE: the filename is still libgccjit.so even when the artifact name is different.
ln gcc-build/libgccjit.so gcc-build/libgccjit.so.0
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
echo /usr/lib/ > gcc_path
- name: Set env
run: |
@ -102,8 +99,8 @@ jobs:
- name: Build
run: |
./prepare_build.sh
./build.sh --release --release-sysroot
./y.sh prepare --only-libcore
./y.sh build --release --release-sysroot
cargo test
- name: Clean
@ -115,7 +112,7 @@ jobs:
run: |
git config --global user.email "user@example.com"
git config --global user.name "User"
./prepare.sh
./y.sh prepare
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
- name: Compile
@ -133,10 +130,11 @@ jobs:
if: ${{ !matrix.cargo_runner }}
run: |
cd build_sysroot/sysroot_src/library/stdarch/
CHANNEL=release TARGET=x86_64-unknown-linux-gnu ../../../../cargo.sh test
CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test
- name: Run stdarch tests
if: ${{ matrix.cargo_runner }}
run: |
cd build_sysroot/sysroot_src/library/stdarch/
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a
# FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro.
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a

View File

@ -25,3 +25,4 @@ tools/llvmint
tools/llvmint-2
# The `llvm` folder is generated by the `tools/generate_intrinsics.py` script to update intrinsics.
llvm
build_system/target

View File

@ -4,9 +4,9 @@ version = 3
[[package]]
name = "aho-corasick"
version = "0.7.18"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
dependencies = [
"memchr",
]
@ -17,12 +17,51 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
[[package]]
name = "cc"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "errno"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a"
dependencies = [
"errno-dragonfly",
"libc",
"windows-sys",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "fastrand"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
[[package]]
name = "fm"
version = "0.1.4"
@ -35,7 +74,7 @@ dependencies = [
[[package]]
name = "gccjit"
version = "1.0.0"
source = "git+https://github.com/antoyo/gccjit.rs#d6e52626cfc6f487094a5d5ac66302baf3439984"
source = "git+https://github.com/antoyo/gccjit.rs#0b158c68bf7e46732869d90550a98e886dee8858"
dependencies = [
"gccjit_sys",
]
@ -43,7 +82,7 @@ dependencies = [
[[package]]
name = "gccjit_sys"
version = "0.0.1"
source = "git+https://github.com/antoyo/gccjit.rs#d6e52626cfc6f487094a5d5ac66302baf3439984"
source = "git+https://github.com/antoyo/gccjit.rs#0b158c68bf7e46732869d90550a98e886dee8858"
dependencies = [
"libc",
]
@ -57,25 +96,11 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "getrandom"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "hermit-abi"
version = "0.1.19"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
[[package]]
name = "lang_tester"
@ -95,86 +120,55 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.112"
version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "linux-raw-sys"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]]
name = "memchr"
version = "2.4.1"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "num_cpus"
version = "1.13.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "ppv-lite86"
version = "0.2.15"
name = "object"
version = "0.30.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
[[package]]
name = "rand"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"rand_hc",
]
[[package]]
name = "rand_chacha"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
dependencies = [
"getrandom",
]
[[package]]
name = "rand_hc"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
dependencies = [
"rand_core",
"memchr",
]
[[package]]
name = "redox_syscall"
version = "0.2.10"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
name = "regex"
version = "1.5.4"
version = "1.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
dependencies = [
"aho-corasick",
"memchr",
@ -183,18 +177,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.6.25"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
[[package]]
name = "remove_dir_all"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
dependencies = [
"winapi",
]
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
[[package]]
name = "rustc_codegen_gcc"
@ -202,10 +187,24 @@ version = "0.1.0"
dependencies = [
"gccjit",
"lang_tester",
"object",
"smallvec",
"tempfile",
]
[[package]]
name = "rustix"
version = "0.38.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f"
dependencies = [
"bitflags 2.4.0",
"errno",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "same-file"
version = "1.0.6"
@ -223,23 +222,22 @@ checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "tempfile"
version = "3.2.0"
version = "3.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651"
dependencies = [
"cfg-if",
"libc",
"rand",
"fastrand",
"redox_syscall",
"remove_dir_all",
"winapi",
"rustix",
"windows-sys",
]
[[package]]
name = "termcolor"
version = "1.1.2"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
dependencies = [
"winapi-util",
]
@ -255,9 +253,9 @@ dependencies = [
[[package]]
name = "unicode-width"
version = "0.1.9"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "wait-timeout"
@ -270,21 +268,14 @@ dependencies = [
[[package]]
name = "walkdir"
version = "2.3.2"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698"
dependencies = [
"same-file",
"winapi",
"winapi-util",
]
[[package]]
name = "wasi"
version = "0.10.2+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]]
name = "winapi"
version = "0.3.9"
@ -315,3 +306,69 @@ name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
[[package]]
name = "windows_i686_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
[[package]]
name = "windows_i686_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"

View File

@ -27,7 +27,13 @@ gccjit = { git = "https://github.com/antoyo/gccjit.rs" }
# Local copy.
#gccjit = { path = "../gccjit.rs" }
object = { version = "0.30.1", default-features = false, features = [
"std",
"read",
] }
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
# TODO(antoyo): make tempfile optional.
tempfile = "3.7.1"
[dev-dependencies]
lang_tester = "0.3.9"

View File

@ -1,6 +1,7 @@
# WIP libgccjit codegen backend for rust
[![Chat on IRC](https://img.shields.io/badge/irc.libera.chat-%23rustc__codegen__gcc-blue.svg)](https://web.libera.chat/#rustc_codegen_gcc)
[![Chat on Matrix](https://img.shields.io/badge/matrix.org-%23rustc__codegen__gcc-blue.svg)](https://matrix.to/#/#rustc_codegen_gcc:matrix.org)
This is a GCC codegen for rustc, which means it can be loaded by the existing rustc frontend, but benefits from GCC: more architectures are supported and GCC's optimizations are used.
@ -14,9 +15,7 @@ A secondary goal is to check if using the gcc backend will provide any run-time
## Building
**This requires a patched libgccjit in order to work.
The patches in [this repository](https://github.com/antoyo/libgccjit-patches) need to be applied.
(Those patches should work when applied on master, but in case it doesn't work, they are known to work when applied on 079c23cfe079f203d5df83fea8e92a60c7d7e878.)
You can also use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.**
You need to use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.**
To build it (most of these instructions come from [here](https://gcc.gnu.org/onlinedocs/jit/internals/index.html), so don't hesitate to take a look there if you encounter an issue):
@ -66,8 +65,8 @@ $ export RUST_COMPILER_RT_ROOT="$PWD/llvm/compiler-rt"
Then you can run commands like this:
```bash
$ ./prepare.sh # download and patch sysroot src and install hyperfine for benchmarking
$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./build.sh --release
$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking
$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./y.sh build --release
```
To run the tests:
@ -78,22 +77,29 @@ $ ./test.sh --release
## Usage
`$cg_gccjit_dir` is the directory you cloned this repo into in the following instructions.
`$CG_GCCJIT_DIR` is the directory you cloned this repo into in the following instructions:
```bash
export CG_GCCJIT_DIR=[the full path to rustc_codegen_gcc]
```
### Cargo
```bash
$ CHANNEL="release" $cg_gccjit_dir/cargo.sh run
$ CHANNEL="release" $CG_GCCJIT_DIR/cargo.sh run
```
If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./test.sh`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely.
To use LTO, you need to set the variable `FAT_LTO=1` and `EMBED_LTO_BITCODE=1` in addition to setting `lto = "fat"` in the `Cargo.toml`.
Don't set `FAT_LTO` when compiling the sysroot, though: only set `EMBED_LTO_BITCODE=1`.
### Rustc
> You should prefer using the Cargo method.
```bash
$ rustc +$(cat $cg_gccjit_dir/rust-toolchain) -Cpanic=abort -Zcodegen-backend=$cg_gccjit_dir/target/release/librustc_codegen_gcc.so --sysroot $cg_gccjit_dir/build_sysroot/sysroot my_crate.rs
$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
```
## Env vars
@ -105,8 +111,18 @@ $ rustc +$(cat $cg_gccjit_dir/rust-toolchain) -Cpanic=abort -Zcodegen-backend=$c
object files when their content should have been changed by a change to cg_gccjit.</dd>
<dt>CG_GCCJIT_DISPLAY_CG_TIME</dt>
<dd>Display the time it took to perform codegen for a crate</dd>
<dt>CG_RUSTFLAGS</dt>
<dd>Send additional flags to rustc. Can be used to build the sysroot without unwinding by setting `CG_RUSTFLAGS=-Cpanic=abort`.</dd>
<dt>CG_GCCJIT_DUMP_TO_FILE</dt>
<dd>Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.</dd>
</dl>
## Licensing
While this crate is licensed under a dual Apache/MIT license, it links to `libgccjit` which is under the GPLv3+ and thus, the resulting toolchain (rustc + GCC codegen) will need to be released under the GPL license.
However, programs compiled with `rustc_codegen_gcc` do not need to be released under a GPL license.
## Debugging
Sometimes, libgccjit will crash and output an error like this:
@ -182,6 +198,61 @@ set substitute-path /usr/src/debug/gcc /path/to/gcc-repo/gcc
TODO(antoyo): but that's not what I remember I was doing.
### `failed to build archive` error
When you get this error:
```
error: failed to build archive: failed to open object file: No such file or directory (os error 2)
```
That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO.
(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.)
### ld: cannot find crtbegin.o
When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors:
```
ld: cannot find crtbegin.o: No such file or directory
ld: cannot find -lgcc: No such file or directory
ld: cannot find -lgcc: No such file or directory
libgccjit.so: error: error invoking gcc driver
```
To fix this, set the variables to `gcc-build/build/gcc`.
### How to debug GCC LTO
Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger.
### How to send arguments to the GCC linker
```
CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../cargo.sh build
```
### How to see the personality functions in the asm dump
```
CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../cargo.sh build
```
### How to see the LLVM IR for a sysroot crate
```
cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std
# Take the command from the output and add --emit=llvm-ir
```
### To prevent the linker from unmangling symbols
Run with:
```
COLLECT_NO_DEMANGLE=1
```
### How to use a custom-build rustc
* Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
@ -223,6 +294,11 @@ https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.2
`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
### How to generate GIMPLE
If you need to check what gccjit is generating (GIMPLE), then take a look at how to
generate it in [gimple.md](./doc/gimple.md).
### How to build a cross-compiling libgccjit
#### Building libgccjit
@ -239,4 +315,4 @@ https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.2
* Set `linker='-Clinker=m68k-linux-gcc'`.
* Set the path to the cross-compiling libgccjit in `gcc_path`.
* Comment the line: `context.add_command_line_option("-masm=intel");` in src/base.rs.
* (might not be necessary) Disable the compilation of libstd.so (and possibly libcore.so?).
* (might not be necessary) Disable the compilation of libstd.so (and possibly libcore.so?): Remove dylib from build_sysroot/sysroot_src/library/std/Cargo.toml.

View File

@ -1,67 +0,0 @@
#!/usr/bin/env bash
#set -x
set -e
codegen_channel=debug
sysroot_channel=debug
flags=
while [[ $# -gt 0 ]]; do
case $1 in
--release)
codegen_channel=release
shift
;;
--release-sysroot)
sysroot_channel=release
shift
;;
--no-default-features)
flags="$flags --no-default-features"
shift
;;
--features)
shift
flags="$flags --features $1"
shift
;;
*)
echo "Unknown option $1"
exit 1
;;
esac
done
if [ -f ./gcc_path ]; then
export GCC_PATH=$(cat gcc_path)
else
echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details'
exit 1
fi
export LD_LIBRARY_PATH="$GCC_PATH"
export LIBRARY_PATH="$GCC_PATH"
if [[ "$codegen_channel" == "release" ]]; then
export CHANNEL='release'
CARGO_INCREMENTAL=1 cargo rustc --release $flags
else
echo $LD_LIBRARY_PATH
export CHANNEL='debug'
cargo rustc $flags
fi
source config.sh
rm -r target/out || true
mkdir -p target/out/gccjit
echo "[BUILD] sysroot"
if [[ "$sysroot_channel" == "release" ]]; then
time ./build_sysroot/build_sysroot.sh --release
else
time ./build_sysroot/build_sysroot.sh
fi

View File

@ -2,6 +2,7 @@
authors = ["bjorn3 <bjorn3@users.noreply.github.com>"]
name = "sysroot"
version = "0.0.0"
resolver = "2"
[dependencies]
core = { path = "./sysroot_src/library/core" }
@ -18,3 +19,4 @@ rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-st
[profile.release]
debug = true
#lto = "fat" # TODO(antoyo): re-enable when the failing LTO tests regarding proc-macros are fixed.

View File

@ -5,9 +5,9 @@
set -e
cd $(dirname "$0")
pushd ../ >/dev/null
pushd ../
source ./config.sh
popd >/dev/null
popd
# Cleanup for previous run
# v Clean target dir except for build scripts and incremental cache

View File

@ -1,39 +0,0 @@
#!/usr/bin/env bash
set -e
cd $(dirname "$0")
SRC_DIR=$(dirname $(rustup which rustc))"/../lib/rustlib/src/rust/"
DST_DIR="sysroot_src"
if [ ! -e $SRC_DIR ]; then
echo "Please install rust-src component"
exit 1
fi
rm -rf $DST_DIR
mkdir -p $DST_DIR/library
cp -r $SRC_DIR/library $DST_DIR/
pushd $DST_DIR
echo "[GIT] init"
git init
echo "[GIT] add"
git add .
echo "[GIT] commit"
# This is needed on systems where nothing is configured.
# git really needs something here, or it will fail.
# Even using --author is not enough.
git config user.email || git config user.email "none@example.com"
git config user.name || git config user.name "None"
git commit -m "Initial commit" -q
for file in $(ls ../../patches/ | grep -v patcha); do
echo "[GIT] apply" $file
git apply ../../patches/$file
git add -A
git commit --no-gpg-sign -m "Patch $file"
done
popd
echo "Successfully prepared libcore for building"

View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "y"
version = "0.1.0"

View File

@ -0,0 +1,8 @@
[package]
name = "y"
version = "0.1.0"
edition = "2021"
[[bin]]
name = "y"
path = "src/main.rs"

View File

@ -0,0 +1,233 @@
use crate::config::set_config;
use crate::utils::{
get_gcc_path, run_command, run_command_with_env, run_command_with_output_and_env, walk_dir,
};
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs;
use std::path::Path;
#[derive(Default)]
struct BuildArg {
codegen_release_channel: bool,
sysroot_release_channel: bool,
features: Vec<String>,
gcc_path: String,
}
impl BuildArg {
fn new() -> Result<Option<Self>, String> {
let gcc_path = get_gcc_path()?;
let mut build_arg = Self {
gcc_path,
..Default::default()
};
// We skip binary name and the `build` command.
let mut args = std::env::args().skip(2);
while let Some(arg) = args.next() {
match arg.as_str() {
"--release" => build_arg.codegen_release_channel = true,
"--release-sysroot" => build_arg.sysroot_release_channel = true,
"--no-default-features" => {
build_arg.features.push("--no-default-features".to_string());
}
"--features" => {
if let Some(arg) = args.next() {
build_arg.features.push("--features".to_string());
build_arg.features.push(arg.as_str().into());
} else {
return Err(
"Expected a value after `--features`, found nothing".to_string()
);
}
}
"--help" => {
Self::usage();
return Ok(None);
}
arg => return Err(format!("Unknown argument `{}`", arg)),
}
}
Ok(Some(build_arg))
}
fn usage() {
println!(
r#"
`build` command help:
--release : Build codegen in release mode
--release-sysroot : Build sysroot in release mode
--no-default-features : Add `--no-default-features` flag
--features [arg] : Add a new feature [arg]
--help : Show this help
"#
)
}
}
fn build_sysroot(
env: &mut HashMap<String, String>,
release_mode: bool,
target_triple: &str,
) -> Result<(), String> {
std::env::set_current_dir("build_sysroot")
.map_err(|error| format!("Failed to go to `build_sysroot` directory: {:?}", error))?;
// Cleanup for previous run
// Clean target dir except for build scripts and incremental cache
let _ = walk_dir(
"target",
|dir: &Path| {
for top in &["debug", "release"] {
let _ = fs::remove_dir_all(dir.join(top).join("build"));
let _ = fs::remove_dir_all(dir.join(top).join("deps"));
let _ = fs::remove_dir_all(dir.join(top).join("examples"));
let _ = fs::remove_dir_all(dir.join(top).join("native"));
let _ = walk_dir(
dir.join(top),
|sub_dir: &Path| {
if sub_dir
.file_name()
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
.unwrap_or(false)
{
let _ = fs::remove_dir_all(sub_dir);
}
Ok(())
},
|file: &Path| {
if file
.file_name()
.map(|filename| filename.to_str().unwrap().starts_with("libsysroot"))
.unwrap_or(false)
{
let _ = fs::remove_file(file);
}
Ok(())
},
);
}
Ok(())
},
|_| Ok(()),
);
let _ = fs::remove_file("Cargo.lock");
let _ = fs::remove_file("test_target/Cargo.lock");
let _ = fs::remove_dir_all("sysroot");
// Builds libs
let channel = if release_mode {
let rustflags = env
.get("RUSTFLAGS")
.cloned()
.unwrap_or_default();
env.insert(
"RUSTFLAGS".to_string(),
format!("{} -Zmir-opt-level=3", rustflags),
);
run_command_with_output_and_env(
&[
&"cargo",
&"build",
&"--target",
&target_triple,
&"--release",
],
None,
Some(&env),
)?;
"release"
} else {
run_command_with_output_and_env(
&[
&"cargo",
&"build",
&"--target",
&target_triple,
&"--features",
&"compiler_builtins/c",
],
None,
Some(env),
)?;
"debug"
};
// Copy files to sysroot
let sysroot_path = format!("sysroot/lib/rustlib/{}/lib/", target_triple);
fs::create_dir_all(&sysroot_path)
.map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_path, error))?;
let copier = |dir_to_copy: &Path| {
run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ())
};
walk_dir(
&format!("target/{}/{}/deps", target_triple, channel),
copier,
copier,
)?;
Ok(())
}
fn build_codegen(args: &BuildArg) -> Result<(), String> {
let mut env = HashMap::new();
let current_dir =
std::env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
if let Ok(rt_root) = std::env::var("RUST_COMPILER_RT_ROOT") {
env.insert("RUST_COMPILER_RT_ROOT".to_string(), rt_root);
} else {
env.insert(
"RUST_COMPILER_RT_ROOT".to_string(),
format!("{}", current_dir.join("llvm/compiler-rt").display()),
);
}
env.insert("LD_LIBRARY_PATH".to_string(), args.gcc_path.clone());
env.insert("LIBRARY_PATH".to_string(), args.gcc_path.clone());
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"rustc"];
if args.codegen_release_channel {
command.push(&"--release");
env.insert("CHANNEL".to_string(), "release".to_string());
env.insert("CARGO_INCREMENTAL".to_string(), "1".to_string());
} else {
env.insert("CHANNEL".to_string(), "debug".to_string());
}
let ref_features = args.features.iter().map(|s| s.as_str()).collect::<Vec<_>>();
for feature in &ref_features {
command.push(feature);
}
run_command_with_env(&command, None, Some(&env))?;
let config = set_config(&mut env, &[], Some(&args.gcc_path))?;
// We voluntarily ignore the error.
let _ = fs::remove_dir_all("target/out");
let gccjit_target = "target/out/gccjit";
fs::create_dir_all(gccjit_target).map_err(|error| {
format!(
"Failed to create directory `{}`: {:?}",
gccjit_target, error
)
})?;
println!("[BUILD] sysroot");
build_sysroot(
&mut env,
args.sysroot_release_channel,
&config.target_triple,
)?;
Ok(())
}
pub fn run() -> Result<(), String> {
let args = match BuildArg::new()? {
Some(args) => args,
None => return Ok(()),
};
build_codegen(&args)?;
Ok(())
}

View File

@ -0,0 +1,125 @@
use crate::utils::{get_gcc_path, get_os_name, get_rustc_host_triple};
use std::collections::HashMap;
use std::env as std_env;
pub struct ConfigInfo {
pub target_triple: String,
pub rustc_command: Vec<String>,
pub run_wrapper: Option<&'static str>,
}
// Returns the beginning for the command line of rustc.
pub fn set_config(
env: &mut HashMap<String, String>,
test_flags: &[String],
gcc_path: Option<&str>,
) -> Result<ConfigInfo, String> {
env.insert("CARGO_INCREMENTAL".to_string(), "0".to_string());
let gcc_path = match gcc_path {
Some(path) => path.to_string(),
None => get_gcc_path()?,
};
env.insert("GCC_PATH".to_string(), gcc_path.clone());
let os_name = get_os_name()?;
let dylib_ext = match os_name.as_str() {
"Linux" => "so",
"Darwin" => "dylib",
os => return Err(format!("unsupported OS `{}`", os)),
};
let host_triple = get_rustc_host_triple()?;
let mut linker = None;
let mut target_triple = host_triple.as_str();
let mut run_wrapper = None;
// FIXME: handle this with a command line flag?
// let mut target_triple = "m68k-unknown-linux-gnu";
if host_triple != target_triple {
if target_triple == "m68k-unknown-linux-gnu" {
target_triple = "mips-unknown-linux-gnu";
linker = Some("-Clinker=m68k-linux-gcc");
} else if target_triple == "aarch64-unknown-linux-gnu" {
// We are cross-compiling for aarch64. Use the correct linker and run tests in qemu.
linker = Some("-Clinker=aarch64-linux-gnu-gcc");
run_wrapper = Some("qemu-aarch64 -L /usr/aarch64-linux-gnu");
} else {
return Err(format!("unknown non-native platform `{}`", target_triple));
}
}
let current_dir =
std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
let channel = if let Some(channel) = env.get("CHANNEL") {
channel.as_str()
} else {
"debug"
};
let cg_backend_path = current_dir
.join("target")
.join(channel)
.join(&format!("librustc_codegen_gcc.{}", dylib_ext));
let sysroot_path = current_dir.join("build_sysroot/sysroot");
let mut rustflags = Vec::new();
if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") {
rustflags.push(cg_rustflags.clone());
}
if let Some(linker) = linker {
rustflags.push(linker.to_string());
}
rustflags.extend_from_slice(&[
"-Csymbol-mangling-version=v0".to_string(),
"-Cdebuginfo=2".to_string(),
format!("-Zcodegen-backend={}", cg_backend_path.display()),
"--sysroot".to_string(),
sysroot_path.display().to_string(),
]);
// Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
// TODO(antoyo): remove when we can handle ThinLTO.
if !env.contains_key(&"FAT_LTO".to_string()) {
rustflags.push("-Clto=off".to_string());
}
rustflags.extend_from_slice(test_flags);
// FIXME(antoyo): remove once the atomic shim is gone
if os_name == "Darwin" {
rustflags.extend_from_slice(&[
"-Clink-arg=-undefined".to_string(),
"-Clink-arg=dynamic_lookup".to_string(),
]);
}
env.insert("RUSTFLAGS".to_string(), rustflags.join(" "));
// display metadata load errors
env.insert("RUSTC_LOG".to_string(), "warn".to_string());
let sysroot = current_dir.join(&format!(
"build_sysroot/sysroot/lib/rustlib/{}/lib",
target_triple
));
let ld_library_path = format!(
"{target}:{sysroot}:{gcc_path}",
target = current_dir.join("target/out").display(),
sysroot = sysroot.display(),
);
env.insert("LD_LIBRARY_PATH".to_string(), ld_library_path.clone());
env.insert("DYLD_LIBRARY_PATH".to_string(), ld_library_path);
// NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
// To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
// Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
let path = std::env::var("PATH").unwrap_or_default();
env.insert("PATH".to_string(), format!("/opt/gcc/bin:{}", path));
let mut rustc_command = vec!["rustc".to_string()];
rustc_command.extend_from_slice(&rustflags);
rustc_command.extend_from_slice(&[
"-L".to_string(),
"crate=target/out".to_string(),
"--out-dir".to_string(),
"target/out".to_string(),
]);
Ok(ConfigInfo {
target_triple: target_triple.to_string(),
rustc_command,
run_wrapper,
})
}

View File

@ -0,0 +1,62 @@
use std::env;
use std::process;
mod build;
mod config;
mod prepare;
mod rustc_info;
mod utils;
macro_rules! arg_error {
($($err:tt)*) => {{
eprintln!($($err)*);
eprintln!();
usage();
std::process::exit(1);
}};
}
fn usage() {
println!(
"\
Available commands for build_system:
prepare : Run prepare command
build : Run build command
--help : Show this message"
);
}
pub enum Command {
Prepare,
Build,
}
fn main() {
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "1");
}
let command = match env::args().nth(1).as_deref() {
Some("prepare") => Command::Prepare,
Some("build") => Command::Build,
Some("--help") => {
usage();
process::exit(0);
}
Some(flag) if flag.starts_with('-') => arg_error!("Expected command found flag {}", flag),
Some(command) => arg_error!("Unknown command {}", command),
None => {
usage();
process::exit(0);
}
};
if let Err(e) = match command {
Command::Prepare => prepare::run(),
Command::Build => build::run(),
} {
eprintln!("Command failed to run: {e:?}");
process::exit(1);
}
}

View File

@ -0,0 +1,227 @@
use crate::rustc_info::get_rustc_path;
use crate::utils::{cargo_install, git_clone, run_command, run_command_with_output, walk_dir};
use std::fs;
use std::path::Path;
fn prepare_libcore(sysroot_path: &Path) -> Result<(), String> {
let rustc_path = match get_rustc_path() {
Some(path) => path,
None => return Err("`rustc` path not found".to_string()),
};
let parent = match rustc_path.parent() {
Some(path) => path,
None => return Err(format!("No parent for `{}`", rustc_path.display())),
};
let rustlib_dir = parent
.join("../lib/rustlib/src/rust")
.canonicalize()
.map_err(|error| format!("Failed to canonicalize path: {:?}", error))?;
if !rustlib_dir.is_dir() {
return Err("Please install `rust-src` component".to_string());
}
let sysroot_dir = sysroot_path.join("sysroot_src");
if sysroot_dir.is_dir() {
if let Err(error) = fs::remove_dir_all(&sysroot_dir) {
return Err(format!(
"Failed to remove `{}`: {:?}",
sysroot_dir.display(),
error,
));
}
}
let sysroot_library_dir = sysroot_dir.join("library");
fs::create_dir_all(&sysroot_library_dir).map_err(|error| {
format!(
"Failed to create folder `{}`: {:?}",
sysroot_library_dir.display(),
error,
)
})?;
run_command(
&[&"cp", &"-r", &rustlib_dir.join("library"), &sysroot_dir],
None,
)?;
println!("[GIT] init (cwd): `{}`", sysroot_dir.display());
run_command(&[&"git", &"init"], Some(&sysroot_dir))?;
println!("[GIT] add (cwd): `{}`", sysroot_dir.display());
run_command(&[&"git", &"add", &"."], Some(&sysroot_dir))?;
println!("[GIT] commit (cwd): `{}`", sysroot_dir.display());
// This is needed on systems where nothing is configured.
// git really needs something here, or it will fail.
// Even using --author is not enough.
run_command(
&[&"git", &"config", &"user.email", &"none@example.com"],
Some(&sysroot_dir),
)?;
run_command(
&[&"git", &"config", &"user.name", &"None"],
Some(&sysroot_dir),
)?;
run_command(
&[&"git", &"config", &"core.autocrlf", &"false"],
Some(&sysroot_dir),
)?;
run_command(
&[&"git", &"config", &"commit.gpgSign", &"false"],
Some(&sysroot_dir),
)?;
run_command(
&[&"git", &"commit", &"-m", &"Initial commit", &"-q"],
Some(&sysroot_dir),
)?;
let mut patches = Vec::new();
walk_dir(
"patches",
|_| Ok(()),
|file_path: &Path| {
patches.push(file_path.to_path_buf());
Ok(())
},
)?;
patches.sort();
for file_path in patches {
println!("[GIT] apply `{}`", file_path.display());
let path = Path::new("../..").join(file_path);
run_command_with_output(&[&"git", &"apply", &path], Some(&sysroot_dir))?;
run_command_with_output(&[&"git", &"add", &"-A"], Some(&sysroot_dir))?;
run_command_with_output(
&[
&"git",
&"commit",
&"--no-gpg-sign",
&"-m",
&format!("Patch {}", path.display()),
],
Some(&sysroot_dir),
)?;
}
println!("Successfully prepared libcore for building");
Ok(())
}
// build with cg_llvm for perf comparison
fn build_raytracer(repo_dir: &Path) -> Result<(), String> {
run_command(&[&"cargo", &"build"], Some(repo_dir))?;
let mv_target = repo_dir.join("raytracer_cg_llvm");
if mv_target.is_file() {
std::fs::remove_file(&mv_target)
.map_err(|e| format!("Failed to remove file `{}`: {e:?}", mv_target.display()))?;
}
run_command(
&[&"mv", &"target/debug/main", &"raytracer_cg_llvm"],
Some(repo_dir),
)?;
Ok(())
}
fn clone_and_setup<F>(repo_url: &str, checkout_commit: &str, extra: Option<F>) -> Result<(), String>
where
F: Fn(&Path) -> Result<(), String>,
{
let clone_result = git_clone(repo_url, None)?;
if !clone_result.ran_clone {
println!("`{}` has already been cloned", clone_result.repo_name);
}
let repo_path = Path::new(&clone_result.repo_name);
run_command(&[&"git", &"checkout", &"--", &"."], Some(&repo_path))?;
run_command(&[&"git", &"checkout", &checkout_commit], Some(&repo_path))?;
let filter = format!("-{}-", clone_result.repo_name);
walk_dir(
"crate_patches",
|_| Ok(()),
|file_path| {
let patch = file_path.as_os_str().to_str().unwrap();
if patch.contains(&filter) && patch.ends_with(".patch") {
run_command_with_output(
&[&"git", &"am", &file_path.canonicalize().unwrap()],
Some(&repo_path),
)?;
}
Ok(())
},
)?;
if let Some(extra) = extra {
extra(&repo_path)?;
}
Ok(())
}
struct PrepareArg {
only_libcore: bool,
}
impl PrepareArg {
fn new() -> Result<Option<Self>, String> {
let mut only_libcore = false;
for arg in std::env::args().skip(2) {
match arg.as_str() {
"--only-libcore" => only_libcore = true,
"--help" => {
Self::usage();
return Ok(None);
}
a => return Err(format!("Unknown argument `{a}`")),
}
}
Ok(Some(Self { only_libcore }))
}
fn usage() {
println!(
r#"
`prepare` command help:
--only-libcore : Only setup libcore and don't clone other repositories
--help : Show this help
"#
)
}
}
pub fn run() -> Result<(), String> {
let args = match PrepareArg::new()? {
Some(a) => a,
None => return Ok(()),
};
let sysroot_path = Path::new("build_sysroot");
prepare_libcore(sysroot_path)?;
if !args.only_libcore {
cargo_install("hyperfine")?;
let to_clone = &[
(
"https://github.com/rust-random/rand.git",
"0f933f9c7176e53b2a3c7952ded484e1783f0bf1",
None,
),
(
"https://github.com/rust-lang/regex.git",
"341f207c1071f7290e3f228c710817c280c8dca1",
None,
),
(
"https://github.com/ebobby/simple-raytracer",
"804a7a21b9e673a482797aa289a18ed480e4d813",
Some(build_raytracer),
),
];
for (repo_url, checkout_commit, cb) in to_clone {
clone_and_setup(repo_url, checkout_commit, *cb)?;
}
}
println!("Successfully ran `prepare`");
Ok(())
}

View File

@ -0,0 +1,12 @@
use std::path::{Path, PathBuf};
use crate::utils::run_command;
pub fn get_rustc_path() -> Option<PathBuf> {
if let Ok(rustc) = std::env::var("RUSTC") {
return Some(PathBuf::from(rustc));
}
run_command(&[&"rustup", &"which", &"rustc"], None)
.ok()
.map(|out| Path::new(String::from_utf8(out.stdout).unwrap().trim()).to_path_buf())
}

View File

@ -0,0 +1,240 @@
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fmt::Debug;
use std::fs;
use std::path::Path;
use std::process::{Command, ExitStatus, Output};
fn get_command_inner(
input: &[&dyn AsRef<OsStr>],
cwd: Option<&Path>,
env: Option<&HashMap<String, String>>,
) -> Command {
let (cmd, args) = match input {
[] => panic!("empty command"),
[cmd, args @ ..] => (cmd, args),
};
let mut command = Command::new(cmd);
command.args(args);
if let Some(cwd) = cwd {
command.current_dir(cwd);
}
if let Some(env) = env {
command.envs(env.iter().map(|(k, v)| (k.as_str(), v.as_str())));
}
command
}
fn check_exit_status(
input: &[&dyn AsRef<OsStr>],
cwd: Option<&Path>,
exit_status: ExitStatus,
) -> Result<(), String> {
if exit_status.success() {
Ok(())
} else {
Err(format!(
"Command `{}`{} exited with status {:?}",
input
.iter()
.map(|s| s.as_ref().to_str().unwrap())
.collect::<Vec<_>>()
.join(" "),
cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display()))
.unwrap_or_default(),
exit_status.code(),
))
}
}
fn command_error<D: Debug>(input: &[&dyn AsRef<OsStr>], cwd: &Option<&Path>, error: D) -> String {
format!(
"Command `{}`{} failed to run: {error:?}",
input
.iter()
.map(|s| s.as_ref().to_str().unwrap())
.collect::<Vec<_>>()
.join(" "),
cwd.as_ref()
.map(|cwd| format!(" (running in folder `{}`)", cwd.display(),))
.unwrap_or_default(),
)
}
pub fn run_command(input: &[&dyn AsRef<OsStr>], cwd: Option<&Path>) -> Result<Output, String> {
run_command_with_env(input, cwd, None)
}
pub fn run_command_with_env(
input: &[&dyn AsRef<OsStr>],
cwd: Option<&Path>,
env: Option<&HashMap<String, String>>,
) -> Result<Output, String> {
let output = get_command_inner(input, cwd, env)
.output()
.map_err(|e| command_error(input, &cwd, e))?;
check_exit_status(input, cwd, output.status)?;
Ok(output)
}
pub fn run_command_with_output(
input: &[&dyn AsRef<OsStr>],
cwd: Option<&Path>,
) -> Result<(), String> {
let exit_status = get_command_inner(input, cwd, None)
.spawn()
.map_err(|e| command_error(input, &cwd, e))?
.wait()
.map_err(|e| command_error(input, &cwd, e))?;
check_exit_status(input, cwd, exit_status)?;
Ok(())
}
pub fn run_command_with_output_and_env(
input: &[&dyn AsRef<OsStr>],
cwd: Option<&Path>,
env: Option<&HashMap<String, String>>,
) -> Result<(), String> {
let exit_status = get_command_inner(input, cwd, env)
.spawn()
.map_err(|e| command_error(input, &cwd, e))?
.wait()
.map_err(|e| command_error(input, &cwd, e))?;
check_exit_status(input, cwd, exit_status)?;
Ok(())
}
pub fn cargo_install(to_install: &str) -> Result<(), String> {
let output = run_command(&[&"cargo", &"install", &"--list"], None)?;
let to_install_needle = format!("{to_install} ");
// cargo install --list returns something like this:
//
// mdbook-toc v0.8.0:
// mdbook-toc
// rust-reduce v0.1.0:
// rust-reduce
//
// We are only interested into the command name so we only look for lines ending with `:`.
if String::from_utf8(output.stdout)
.unwrap()
.lines()
.any(|line| line.ends_with(':') && line.starts_with(&to_install_needle))
{
return Ok(());
}
// We voluntarily ignore this error.
if run_command_with_output(&[&"cargo", &"install", &to_install], None).is_err() {
println!("Skipping installation of `{to_install}`");
}
Ok(())
}
pub fn get_os_name() -> Result<String, String> {
let output = run_command(&[&"uname"], None)?;
let name = std::str::from_utf8(&output.stdout)
.unwrap_or("")
.trim()
.to_string();
if !name.is_empty() {
Ok(name)
} else {
Err("Failed to retrieve the OS name".to_string())
}
}
pub fn get_rustc_host_triple() -> Result<String, String> {
let output = run_command(&[&"rustc", &"-vV"], None)?;
let content = std::str::from_utf8(&output.stdout).unwrap_or("");
for line in content.split('\n').map(|line| line.trim()) {
if !line.starts_with("host:") {
continue;
}
return Ok(line.split(':').nth(1).unwrap().trim().to_string());
}
Err("Cannot find host triple".to_string())
}
pub fn get_gcc_path() -> Result<String, String> {
let content = match fs::read_to_string("gcc_path") {
Ok(content) => content,
Err(_) => {
return Err(
"Please put the path to your custom build of libgccjit in the file \
`gcc_path`, see Readme.md for details"
.into(),
)
}
};
match content
.split('\n')
.map(|line| line.trim())
.filter(|line| !line.is_empty())
.next()
{
Some(gcc_path) => {
let path = Path::new(gcc_path);
if !path.exists() {
Err(format!(
"Path `{}` contained in the `gcc_path` file doesn't exist",
gcc_path,
))
} else {
Ok(gcc_path.into())
}
}
None => Err("No path found in `gcc_path` file".into()),
}
}
pub struct CloneResult {
pub ran_clone: bool,
pub repo_name: String,
}
pub fn git_clone(to_clone: &str, dest: Option<&Path>) -> Result<CloneResult, String> {
let repo_name = to_clone.split('/').last().unwrap();
let repo_name = match repo_name.strip_suffix(".git") {
Some(n) => n.to_string(),
None => repo_name.to_string(),
};
let dest = dest
.map(|dest| dest.join(&repo_name))
.unwrap_or_else(|| Path::new(&repo_name).into());
if dest.is_dir() {
return Ok(CloneResult {
ran_clone: false,
repo_name,
});
}
run_command_with_output(&[&"git", &"clone", &to_clone, &dest], None)?;
Ok(CloneResult {
ran_clone: true,
repo_name,
})
}
pub fn walk_dir<P, D, F>(dir: P, mut dir_cb: D, mut file_cb: F) -> Result<(), String>
where
P: AsRef<Path>,
D: FnMut(&Path) -> Result<(), String>,
F: FnMut(&Path) -> Result<(), String>,
{
let dir = dir.as_ref();
for entry in fs::read_dir(dir)
.map_err(|error| format!("Failed to read dir `{}`: {:?}", dir.display(), error))?
{
let entry = entry
.map_err(|error| format!("Failed to read entry in `{}`: {:?}", dir.display(), error))?;
let entry_path = entry.path();
if entry_path.is_dir() {
dir_cb(&entry_path)?;
} else {
file_cb(&entry_path)?;
}
}
Ok(())
}

View File

@ -38,10 +38,17 @@ if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
fi
fi
export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 -Clto=off -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS"
# Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
# TODO(antoyo): remove when we can handle ThinLTO.
disable_lto_flags=''
if [[ ! -v FAT_LTO ]]; then
disable_lto_flags='-Clto=off'
fi
export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS"
# FIXME(antoyo): remove once the atomic shim is gone
if [[ `uname` == 'Darwin' ]]; then
if [[ unamestr == 'Darwin' ]]; then
export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
fi
@ -50,3 +57,7 @@ export RUSTC_LOG=warn # display metadata load errors
export LD_LIBRARY_PATH="$(pwd)/target/out:$(pwd)/build_sysroot/sysroot/lib/rustlib/$TARGET_TRIPLE/lib:$GCC_PATH"
export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH
# NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
# To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
# Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
export PATH="/opt/gcc/bin:$PATH"

View File

@ -0,0 +1,17 @@
# Add support for a new function attribute
To add support for a new function attribute in libgccjit, you need to do the following steps:
1. Copy the corresponding function from `c-family/c-attribs.cc` into `jit/dummy-frontend.cc`. For example if you add the `target` attribute, the function name will be `handle_target_attribute`.
2. Copy the corresponding entry from the `c_common_attribute_table` variable in the `c-family/c-attribs.cc` file into the `jit_attribute_table` variable in `jit/dummy-frontend.cc`.
3. Add a new variant in the `gcc_jit_fn_attribute` enum in the `jit/libgccjit.h` file.
4. Add a test to ensure the attribute is correctly applied in `gcc/testsuite/jit.dg/`. Take a look at `gcc/testsuite/jit.dg/test-nonnull.c` if you want an example.
5. Run the example like this (in your `gcc-build` folder): `make check-jit RUNTESTFLAGS="-v -v -v jit.exp=jit.dg/test-nonnull.c"`
Once done, you need to update the [gccjit.rs] crate to add the new enum variant in the corresponding enum (`FnAttribute`).
Finally, you need to update this repository by calling the relevant API you added in [gccjit.rs].
To test it, build `gcc`, run `cargo update -p gccjit` and then you can test the generated output for a given Rust crate.
[gccjit.rs]: https://github.com/antoyo/gccjit.rs

View File

@ -0,0 +1,111 @@
# GIMPLE
You can see the full documentation about what GIMPLE is [here](https://gcc.gnu.org/onlinedocs/gccint/GIMPLE.html). In this document we will explain how to generate it.
First, we'll copy the content from `gcc/gcc/testsuite/jit.dg/test-const-attribute.c` into a
file named `local.c` and remove the content we're not interested into:
```diff
- /* { dg-do compile { target x86_64-*-* } } */
...
- /* We don't want set_options() in harness.h to set -O3 to see that the const
- attribute affects the optimizations. */
- #define TEST_ESCHEWS_SET_OPTIONS
- static void set_options (gcc_jit_context *ctxt, const char *argv0)
- {
- // Set "-O3".
- gcc_jit_context_set_int_option(ctxt, GCC_JIT_INT_OPTION_OPTIMIZATION_LEVEL, 3);
- }
-
- #define TEST_COMPILING_TO_FILE
- #define OUTPUT_KIND GCC_JIT_OUTPUT_KIND_ASSEMBLER
- #define OUTPUT_FILENAME "output-of-test-const-attribute.c.s"
- #include "harness.h"
...
- /* { dg-final { jit-verify-output-file-was-created "" } } */
- /* Check that the loop was optimized away */
- /* { dg-final { jit-verify-assembler-output-not "jne" } } */
```
Then we'll add a `main` function which will call the `create_code` function but
also add the calls we need to generate the GIMPLE:
```C
int main() {
gcc_jit_context *ctxt = gcc_jit_context_acquire();
// To set `-O3`, update it depending on your needs.
gcc_jit_context_set_int_option(ctxt, GCC_JIT_INT_OPTION_OPTIMIZATION_LEVEL, 3);
// Very important option to generate the gimple format.
gcc_jit_context_set_bool_option(ctxt, GCC_JIT_BOOL_OPTION_DUMP_INITIAL_GIMPLE, 1);
create_code(ctxt, NULL);
gcc_jit_context_compile(ctxt);
// If you want to compile to assembly (or any other format) directly, you can
// use the following call instead:
// gcc_jit_context_compile_to_file(ctxt, GCC_JIT_OUTPUT_KIND_ASSEMBLER, "out.s");
return 0;
}
```
Then we can compile it by using:
```console
gcc local.c -I `pwd`/gcc/gcc/jit/ -L `pwd`/gcc-build/gcc -lgccjit -o out
```
And finally when you run it:
```console
LD_LIBRARY_PATH=`pwd`/gcc-build/gcc LIBRARY_PATH=`pwd`/gcc-build/gcc ./out
```
It should display:
```c
__attribute__((const))
int xxx ()
{
int D.3394;
int sum;
int x;
<D.3377>:
x = 45;
sum = 0;
goto loop_cond;
loop_cond:
x = x >> 1;
if (x != 0) goto after_loop; else goto loop_body;
loop_body:
_1 = foo (x);
_2 = _1 * 2;
x = x + _2;
goto loop_cond;
after_loop:
D.3394 = sum;
return D.3394;
}
```
An alternative way to generate the GIMPLE is to replace:
```c
gcc_jit_context_set_bool_option(ctxt, GCC_JIT_BOOL_OPTION_DUMP_INITIAL_GIMPLE, 1);
```
with:
```c
gcc_jit_context_add_command_line_option(ctxt, "-fdump-tree-gimple");
```
(although you can have both at the same time too). Then you can compile it like previously. Only one difference: before executing it, I recommend to run:
```console
rm -rf /tmp/libgccjit-*
```
to make it easier for you to know which folder to look into.
Once the execution is done, you should now have a file with path looking like `/tmp/libgccjit-9OFqkD/fake.c.006t.gimple` which contains the GIMPLE format.

View File

@ -0,0 +1,44 @@
This guide explains what to do to send a GCC patch for review.
All the commands are supposed to be run in the folder where you cloned GCC.
```bash
./contrib/gcc-changelog/git_check_commit.py
```
You can provide a specific commit hash:
```bash
./contrib/gcc-changelog/git_check_commit.py abdef78989
```
a range:
```bash
./contrib/gcc-changelog/git_check_commit.py HEAD~2
```
or even a comparison with a remote branch:
```bash
./contrib/gcc-changelog/git_check_commit.py upstream/master..HEAD
```
When there is no more errors, generate the git patch:
```bash
git format-patch -1 `git rev-parse --short HEAD`
```
Then you can run the remaining checks using:
```bash
contrib/check_GNU_style.sh 0001-your-patch.patch
```
When you have no more errors, you can send the `.patch` file to GCC by sending an
email to `gcc-patches@gcc.gnu.org` and to the relevant GCC mailing lists
depending on what your patch changes. You can find the list of the mailing lists
[here](https://gcc.gnu.org/lists.html).
You can find more information about "contributing to GCC" [here](https://gcc.gnu.org/contribute.html).

View File

@ -1,5 +1,6 @@
#![feature(start, core_intrinsics, alloc_error_handler, lang_items)]
#![no_std]
#![allow(internal_features)]
extern crate alloc;
extern crate alloc_system;

View File

@ -12,7 +12,7 @@
target_arch = "mips",
target_arch = "mips32r6",
target_arch = "powerpc",
target_arch = "csky"
target_arch = "csky",
target_arch = "powerpc64"))]
const MIN_ALIGN: usize = 8;
#[cfg(any(target_arch = "x86_64",

View File

@ -2,6 +2,7 @@
#![feature(arbitrary_self_types, unsize, coerce_unsized, dispatch_from_dyn)]
#![feature(rustc_attrs)]
#![allow(internal_features)]
use std::{
ops::{Deref, CoerceUnsized, DispatchFromDyn},

View File

@ -4,7 +4,7 @@
thread_local
)]
#![no_core]
#![allow(dead_code)]
#![allow(dead_code, internal_features)]
#[no_mangle]
unsafe extern "C" fn _Unwind_Resume() {
@ -429,6 +429,15 @@ fn panic_cannot_unwind() -> ! {
}
}
#[lang = "panic_in_cleanup"]
#[rustc_nounwind]
fn panic_in_cleanup() -> ! {
unsafe {
libc::printf("panic in a destructor during cleanup\n\0" as *const str as *const i8);
intrinsics::abort();
}
}
#[lang = "panic_bounds_check"]
#[track_caller]
fn panic_bounds_check(index: usize, len: usize) -> ! {

View File

@ -5,7 +5,7 @@
extern_types, thread_local
)]
#![no_core]
#![allow(dead_code, non_camel_case_types)]
#![allow(dead_code, internal_features, non_camel_case_types)]
extern crate mini_core;

View File

@ -1,5 +1,6 @@
#![feature(start, core_intrinsics, lang_items)]
#![no_std]
#![allow(internal_features)]
#[link(name = "c")]
extern {}

View File

@ -0,0 +1,23 @@
tests/ui/lint/unsafe_code/forge_unsafe_block.rs
tests/ui/lint/unused-qualification-in-derive-expansion.rs
tests/ui/macro-quote-test.rs
tests/ui/macros/proc_macro.rs
tests/ui/panic-runtime/lto-unwind.rs
tests/ui/resolve/derive-macro-1.rs
tests/ui/resolve/derive-macro-2.rs
tests/ui/rfcs/rfc-2565-param-attrs/param-attrs-pretty.rs
tests/ui/rfcs/rfc-2565-param-attrs/issue-64682-dropping-first-attrs-in-impl-fns.rs
tests/ui/rfcs/rfc-3348-c-string-literals/edition-spans.rs
tests/ui/rust-2018/suggestions-not-always-applicable.rs
tests/ui/rust-2021/reserved-prefixes-via-macro.rs
tests/ui/underscore-imports/duplicate.rs
tests/ui/async-await/issues/issue-60674.rs
tests/ui/attributes/main-removed-2/main.rs
tests/ui/cfg/assume-incomplete-release/assume-incomplete.rs
tests/ui/crate-loading/cross-compiled-proc-macro.rs
tests/ui/derives/derive-marker-tricky.rs
tests/ui/diagnostic_namespace/existing_proc_macros.rs
tests/ui/fmt/format-args-capture-issue-106408.rs
tests/ui/fmt/indoc-issue-106408.rs
tests/ui/hygiene/issue-77523-def-site-async-await.rs
tests/ui/inherent-impls-overlap-check/no-overlap.rs

View File

@ -0,0 +1,11 @@
tests/ui/issues/issue-44056.rs
tests/ui/lto/fat-lto.rs
tests/ui/lto/debuginfo-lto.rs
tests/ui/lto/lto-many-codegen-units.rs
tests/ui/lto/issue-100772.rs
tests/ui/lto/lto-rustc-loads-linker-plugin.rs
tests/ui/panic-runtime/lto-unwind.rs
tests/ui/sanitize/issue-111184-generator-witness.rs
tests/ui/sepcomp/sepcomp-lib-lto.rs
tests/ui/lto/lto-opt-level-s.rs
tests/ui/lto/lto-opt-level-z.rs

View File

@ -1,11 +1,5 @@
tests/ui/allocator/custom-in-block.rs
tests/ui/allocator/custom-in-submodule.rs
tests/ui/allocator/custom.rs
tests/ui/allocator/hygiene.rs
tests/ui/allocator/no_std-alloc-error-handler-custom.rs
tests/ui/allocator/no_std-alloc-error-handler-default.rs
tests/ui/allocator/xcrate-use.rs
tests/ui/allocator/xcrate-use2.rs
tests/ui/asm/may_unwind.rs
tests/ui/asm/x86_64/multiple-clobber-abi.rs
tests/ui/debuginfo/debuginfo-emit-llvm-ir-and-split-debuginfo.rs
@ -14,15 +8,12 @@ tests/ui/linkage-attr/linkage1.rs
tests/ui/lto/dylib-works.rs
tests/ui/numbers-arithmetic/saturating-float-casts.rs
tests/ui/polymorphization/promoted-function.rs
tests/ui/process/nofile-limit.rs
tests/ui/sepcomp/sepcomp-cci.rs
tests/ui/sepcomp/sepcomp-extern.rs
tests/ui/sepcomp/sepcomp-fns-backwards.rs
tests/ui/sepcomp/sepcomp-fns.rs
tests/ui/sepcomp/sepcomp-statics.rs
tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
tests/ui/sse2.rs
tests/ui/target-feature/missing-plusminus.rs
tests/ui/asm/x86_64/may_unwind.rs
tests/ui/backtrace.rs
tests/ui/catch-unwind-bang.rs
@ -54,8 +45,8 @@ tests/ui/issues/issue-40883.rs
tests/ui/issues/issue-43853.rs
tests/ui/issues/issue-47364.rs
tests/ui/macros/rfc-2011-nicer-assert-messages/assert-without-captures-does-not-create-unnecessary-code.rs
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
tests/ui/simd/issue-17170.rs
tests/ui/simd/issue-39720.rs
tests/ui/simd/issue-89193.rs
@ -65,6 +56,18 @@ tests/ui/alloc-error/default-alloc-error-hook.rs
tests/ui/generator/panic-safe.rs
tests/ui/issues/issue-14875.rs
tests/ui/issues/issue-29948.rs
tests/ui/panic-while-printing.rs
tests/ui/enum-discriminant/get_discr.rs
tests/ui/panics/nested_panic_caught.rs
tests/ui/simd/intrinsic/generic-bswap-byte.rs
tests/ui/const_prop/ice-issue-111353.rs
tests/ui/process/println-with-broken-pipe.rs
tests/ui/panic-runtime/lto-abort.rs
tests/ui/lto/thin-lto-inlines2.rs
tests/ui/lto/weak-works.rs
tests/ui/lto/thin-lto-inlines.rs
tests/ui/lto/thin-lto-global-allocator.rs
tests/ui/lto/msvc-imp-present.rs
tests/ui/lto/lto-thin-rustc-loads-linker-plugin.rs
tests/ui/lto/all-crates.rs
tests/ui/async-await/deep-futures-are-freeze.rs
tests/ui/closures/capture-unsized-by-ref.rs
tests/ui/generator/resume-after-return.rs

View File

@ -37,3 +37,4 @@ tests/ui/simd/intrinsic/generic-gather-pass.rs
tests/ui/simd/issue-85915-simd-ptrs.rs
tests/ui/issues/issue-68010-large-zst-consts.rs
tests/ui/rust-2018/proc-macro-crate-in-paths.rs
tests/ui/target-feature/missing-plusminus.rs

View File

@ -1,3 +1,7 @@
codegen_gcc_unknown_ctarget_feature_prefix =
unknown feature specified for `-Ctarget-feature`: `{$feature}`
.note = features must begin with a `+` to enable or `-` to disable it
codegen_gcc_invalid_minimum_alignment =
invalid minimum global alignment: {$err}
@ -9,3 +13,29 @@ codegen_gcc_tied_target_features = the target features {$features} must all be e
codegen_gcc_unwinding_inline_asm =
GCC backend does not support unwinding from inline asm
codegen_gcc_copy_bitcode = failed to copy bitcode to object file: {$err}
codegen_gcc_dynamic_linking_with_lto =
cannot prefer dynamic linking when performing LTO
.note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO
codegen_gcc_load_bitcode = failed to load bitcode of module "{$name}"
codegen_gcc_lto_disallowed = lto can only be run for executables, cdylibs and static library outputs
codegen_gcc_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto`
codegen_gcc_lto_bitcode_from_rlib = failed to get bitcode from object file for LTO ({$gcc_err})
codegen_gcc_unknown_ctarget_feature =
unknown feature specified for `-Ctarget-feature`: `{$feature}`
.note = it is still passed through to the codegen backend
.possible_feature = you might have meant: `{$rust_feature}`
.consider_filing_feature_request = consider filing a feature request
codegen_gcc_missing_features =
add the missing features in a `target_feature` attribute
codegen_gcc_target_feature_disable_or_enable =
the target features {$features} must all be either enabled or disabled together

View File

@ -1,25 +1,26 @@
From c3821e02fbd6cb5ad6e06d759fccdc9073712375 Mon Sep 17 00:00:00 2001
From b8f3eed3053c9333b5dfbeaeb2a6a65a4b3156df Mon Sep 17 00:00:00 2001
From: Antoni Boucher <bouanto@zoho.com>
Date: Tue, 7 Jun 2022 21:40:13 -0400
Subject: [PATCH] Add stdarch Cargo.toml for testing
Date: Tue, 29 Aug 2023 13:06:34 -0400
Subject: [PATCH] Patch 0001-Add-stdarch-Cargo.toml-for-testing.patch
---
library/stdarch/Cargo.toml | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
library/stdarch/Cargo.toml | 23 +++++++++++++++++++++++
1 file changed, 23 insertions(+)
create mode 100644 library/stdarch/Cargo.toml
diff --git a/library/stdarch/Cargo.toml b/library/stdarch/Cargo.toml
new file mode 100644
index 0000000..fbe0a95
index 0000000..4c63700
--- /dev/null
+++ b/library/stdarch/Cargo.toml
@@ -0,0 +1,20 @@
@@ -0,0 +1,21 @@
+[workspace]
+resolver = "1"
+members = [
+ "crates/core_arch",
+ "crates/std_detect",
+ "crates/stdarch-gen",
+ "examples/"
+ #"examples/"
+]
+exclude = [
+ "crates/wasm-assert-instr-tests"
@ -35,5 +36,5 @@ index 0000000..fbe0a95
+opt-level = 3
+incremental = true
--
2.26.2.7.g19db9cfb68.dirty
2.42.0

View File

@ -1,25 +0,0 @@
From a2d53a324a02c04b76c0e9d39dc15cd443a3b8b2 Mon Sep 17 00:00:00 2001
From: Antoni Boucher <bouanto@zoho.com>
Date: Fri, 25 Nov 2022 11:18:11 -0500
Subject: [PATCH] Disable examples
---
library/stdarch/Cargo.toml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/library/stdarch/Cargo.toml b/library/stdarch/Cargo.toml
index fbe0a95..748d72d 100644
--- a/library/stdarch/Cargo.toml
+++ b/library/stdarch/Cargo.toml
@@ -3,7 +3,7 @@ members = [
"crates/core_arch",
"crates/std_detect",
"crates/stdarch-gen",
- "examples/"
+ #"examples/"
]
exclude = [
"crates/wasm-assert-instr-tests"
--
2.26.2.7.g19db9cfb68.dirty

View File

@ -1,30 +0,0 @@
#!/usr/bin/env bash
set -e
set -v
source prepare_build.sh
cargo install hyperfine || echo "Skipping hyperfine install"
git clone https://github.com/rust-random/rand.git || echo "rust-random/rand has already been cloned"
pushd rand
git checkout -- .
git checkout 0f933f9c7176e53b2a3c7952ded484e1783f0bf1
git am ../crate_patches/*-rand-*.patch
popd
git clone https://github.com/rust-lang/regex.git || echo "rust-lang/regex has already been cloned"
pushd regex
git checkout -- .
git checkout 341f207c1071f7290e3f228c710817c280c8dca1
popd
git clone https://github.com/ebobby/simple-raytracer || echo "ebobby/simple-raytracer has already been cloned"
pushd simple-raytracer
git checkout -- .
git checkout 804a7a21b9e673a482797aa289a18ed480e4d813
# build with cg_llvm for perf comparison
cargo build
mv target/debug/main raytracer_cg_llvm
popd

View File

@ -1,5 +0,0 @@
#!/usr/bin/env bash
set -e
set -v
./build_sysroot/prepare_sysroot_src.sh

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2023-06-19"
channel = "nightly-2023-10-08"
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]

View File

@ -16,7 +16,7 @@ case $1 in
done
./clean_all.sh
./prepare.sh
./y.sh prepare
;;
"commit")
git add rust-toolchain

View File

@ -3,7 +3,9 @@ use rustc_codegen_ssa::traits::{AbiBuilderMethods, BaseTypeMethods};
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::bug;
use rustc_middle::ty::Ty;
use rustc_target::abi::call::{CastTarget, FnAbi, PassMode, Reg, RegKind};
#[cfg(feature = "master")]
use rustc_session::config;
use rustc_target::abi::call::{ArgAttributes, CastTarget, FnAbi, PassMode, Reg, RegKind};
use crate::builder::Builder;
use crate::context::CodegenCx;
@ -120,30 +122,50 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
}
};
#[cfg(feature = "master")]
let apply_attrs = |ty: Type<'gcc>, attrs: &ArgAttributes| {
if cx.sess().opts.optimize != config::OptLevel::No
&& attrs.regular.contains(rustc_target::abi::call::ArgAttribute::NoAlias)
{
ty.make_restrict()
} else {
ty
}
};
#[cfg(not(feature = "master"))]
let apply_attrs = |ty: Type<'gcc>, _attrs: &ArgAttributes| {
ty
};
for arg in self.args.iter() {
let arg_ty = match arg.mode {
PassMode::Ignore => continue,
PassMode::Direct(_) => arg.layout.immediate_gcc_type(cx),
PassMode::Pair(..) => {
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 0));
argument_tys.push(arg.layout.scalar_pair_element_gcc_type(cx, 1));
PassMode::Pair(a, b) => {
argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 0), &a));
argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 1), &b));
continue;
}
PassMode::Indirect { meta_attrs: Some(_), .. } => {
unimplemented!();
}
PassMode::Cast { ref cast, pad_i32 } => {
// add padding
if pad_i32 {
argument_tys.push(Reg::i32().gcc_type(cx));
}
cast.gcc_type(cx)
let ty = cast.gcc_type(cx);
apply_attrs(ty, &cast.attrs)
}
PassMode::Indirect { meta_attrs: None, on_stack: true, .. } => {
PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: true } => {
// This is a "byval" argument, so we don't apply the `restrict` attribute on it.
on_stack_param_indices.insert(argument_tys.len());
arg.memory_ty(cx)
},
PassMode::Indirect { meta_attrs: None, on_stack: false, .. } => cx.type_ptr_to(arg.memory_ty(cx)),
PassMode::Direct(attrs) => apply_attrs(arg.layout.immediate_gcc_type(cx), &attrs),
PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs)
}
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
assert!(!on_stack);
apply_attrs(apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs), &meta_attrs)
}
};
argument_tys.push(arg_ty);
}

View File

@ -1,6 +1,6 @@
#[cfg(feature="master")]
use gccjit::FnAttribute;
use gccjit::{FunctionType, GlobalKind, ToRValue};
use gccjit::{Context, FunctionType, GlobalKind, ToRValue, Type};
use rustc_ast::expand::allocator::{
alloc_error_handler_name, default_fn_name, global_fn_name, AllocatorKind, AllocatorTy,
ALLOCATOR_METHODS, NO_ALLOC_SHIM_IS_UNSTABLE,
@ -22,7 +22,6 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
};
let i8 = context.new_type::<i8>();
let i8p = i8.make_pointer();
let void = context.new_type::<()>();
if kind == AllocatorKind::Default {
for method in ALLOCATOR_METHODS {
@ -47,80 +46,22 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
panic!("invalid allocator output")
}
};
let name = global_fn_name(method.name);
let from_name = global_fn_name(method.name);
let to_name = default_fn_name(method.name);
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let func = context.new_function(None, FunctionType::Exported, output.unwrap_or(void), &args, name, false);
if tcx.sess.target.options.default_hidden_visibility {
#[cfg(feature="master")]
func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
}
if tcx.sess.must_emit_unwind_tables() {
// TODO(antoyo): emit unwind tables.
}
let callee = default_fn_name(method.name);
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let callee = context.new_function(None, FunctionType::Extern, output.unwrap_or(void), &args, callee, false);
#[cfg(feature="master")]
callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
let block = func.new_block("entry");
let args = args
.iter()
.enumerate()
.map(|(i, _)| func.get_param(i as i32).to_rvalue())
.collect::<Vec<_>>();
let ret = context.new_call(None, callee, &args);
//llvm::LLVMSetTailCall(ret, True);
if output.is_some() {
block.end_with_return(None, ret);
}
else {
block.end_with_void_return(None);
}
// TODO(@Commeownist): Check if we need to emit some extra debugging info in certain circumstances
// as described in https://github.com/rust-lang/rust/commit/77a96ed5646f7c3ee8897693decc4626fe380643
create_wrapper_function(tcx, context, &from_name, &to_name, &types, output);
}
}
let types = [usize, usize];
let name = "__rust_alloc_error_handler".to_string();
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let func = context.new_function(None, FunctionType::Exported, void, &args, name, false);
if tcx.sess.target.default_hidden_visibility {
#[cfg(feature="master")]
func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
}
let callee = alloc_error_handler_name(alloc_error_handler_kind);
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let callee = context.new_function(None, FunctionType::Extern, void, &args, callee, false);
#[cfg(feature="master")]
callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
let block = func.new_block("entry");
let args = args
.iter()
.enumerate()
.map(|(i, _)| func.get_param(i as i32).to_rvalue())
.collect::<Vec<_>>();
let _ret = context.new_call(None, callee, &args);
//llvm::LLVMSetTailCall(ret, True);
block.end_with_void_return(None);
// FIXME(bjorn3): Add noreturn attribute
create_wrapper_function(
tcx,
context,
"__rust_alloc_error_handler",
&alloc_error_handler_name(alloc_error_handler_kind),
&[usize, usize],
None,
);
let name = OomStrategy::SYMBOL.to_string();
let global = context.new_global(None, GlobalKind::Exported, i8, name);
@ -133,3 +74,53 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
let value = context.new_rvalue_from_int(i8, 0);
global.global_set_initializer_rvalue(value);
}
fn create_wrapper_function(
tcx: TyCtxt<'_>,
context: &Context<'_>,
from_name: &str,
to_name: &str,
types: &[Type<'_>],
output: Option<Type<'_>>,
) {
let void = context.new_type::<()>();
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let func = context.new_function(None, FunctionType::Exported, output.unwrap_or(void), &args, from_name, false);
if tcx.sess.target.options.default_hidden_visibility {
#[cfg(feature="master")]
func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
}
if tcx.sess.must_emit_unwind_tables() {
// TODO(antoyo): emit unwind tables.
}
let args: Vec<_> = types.iter().enumerate()
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
.collect();
let callee = context.new_function(None, FunctionType::Extern, output.unwrap_or(void), &args, to_name, false);
#[cfg(feature="master")]
callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
let block = func.new_block("entry");
let args = args
.iter()
.enumerate()
.map(|(i, _)| func.get_param(i as i32).to_rvalue())
.collect::<Vec<_>>();
let ret = context.new_call(None, callee, &args);
//llvm::LLVMSetTailCall(ret, True);
if output.is_some() {
block.end_with_return(None, ret);
}
else {
block.end_with_void_return(None);
}
// TODO(@Commeownist): Check if we need to emit some extra debugging info in certain circumstances
// as described in https://github.com/rust-lang/rust/commit/77a96ed5646f7c3ee8897693decc4626fe380643
}

View File

@ -452,10 +452,6 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
}
InlineAsmOperandRef::Const { ref string } => {
// Const operands get injected directly into the template
if att_dialect {
template_str.push('$');
}
template_str.push_str(string);
}
}

View File

@ -4,72 +4,13 @@ use gccjit::Function;
use rustc_attr::InstructionSetAttr;
#[cfg(feature="master")]
use rustc_attr::InlineAttr;
use rustc_codegen_ssa::target_features::tied_target_features;
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::ty;
#[cfg(feature="master")]
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_session::Session;
use rustc_span::symbol::sym;
use smallvec::{smallvec, SmallVec};
use crate::{context::CodegenCx, errors::TiedTargetFeatures};
// Given a map from target_features to whether they are enabled or disabled,
// ensure only valid combinations are allowed.
pub fn check_tied_features(sess: &Session, features: &FxHashMap<&str, bool>) -> Option<&'static [&'static str]> {
for tied in tied_target_features(sess) {
// Tied features must be set to the same value, or not set at all
let mut tied_iter = tied.iter();
let enabled = features.get(tied_iter.next().unwrap());
if tied_iter.any(|feature| enabled != features.get(feature)) {
return Some(tied);
}
}
None
}
// TODO(antoyo): maybe move to a new module gcc_util.
// To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html
fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> {
let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch };
match (arch, s) {
("x86", "sse4.2") => smallvec!["sse4.2", "crc32"],
("x86", "pclmulqdq") => smallvec!["pclmul"],
("x86", "rdrand") => smallvec!["rdrnd"],
("x86", "bmi1") => smallvec!["bmi"],
("x86", "cmpxchg16b") => smallvec!["cx16"],
("x86", "avx512vaes") => smallvec!["vaes"],
("x86", "avx512gfni") => smallvec!["gfni"],
("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"],
// NOTE: seems like GCC requires 'avx512bw' for 'avx512vbmi2'.
("x86", "avx512vbmi2") => smallvec!["avx512vbmi2", "avx512bw"],
// NOTE: seems like GCC requires 'avx512bw' for 'avx512bitalg'.
("x86", "avx512bitalg") => smallvec!["avx512bitalg", "avx512bw"],
("aarch64", "rcpc2") => smallvec!["rcpc-immo"],
("aarch64", "dpb") => smallvec!["ccpp"],
("aarch64", "dpb2") => smallvec!["ccdp"],
("aarch64", "frintts") => smallvec!["fptoint"],
("aarch64", "fcma") => smallvec!["complxnum"],
("aarch64", "pmuv3") => smallvec!["perfmon"],
("aarch64", "paca") => smallvec!["pauth"],
("aarch64", "pacg") => smallvec!["pauth"],
// Rust ties fp and neon together. In LLVM neon implicitly enables fp,
// but we manually enable neon when a feature only implicitly enables fp
("aarch64", "f32mm") => smallvec!["f32mm", "neon"],
("aarch64", "f64mm") => smallvec!["f64mm", "neon"],
("aarch64", "fhm") => smallvec!["fp16fml", "neon"],
("aarch64", "fp16") => smallvec!["fullfp16", "neon"],
("aarch64", "jsconv") => smallvec!["jsconv", "neon"],
("aarch64", "sve") => smallvec!["sve", "neon"],
("aarch64", "sve2") => smallvec!["sve2", "neon"],
("aarch64", "sve2-aes") => smallvec!["sve2-aes", "neon"],
("aarch64", "sve2-sm4") => smallvec!["sve2-sm4", "neon"],
("aarch64", "sve2-sha3") => smallvec!["sve2-sha3", "neon"],
("aarch64", "sve2-bitperm") => smallvec!["sve2-bitperm", "neon"],
(_, s) => smallvec![s],
}
}
use crate::gcc_util::{check_tied_features, to_gcc_features};
/// Get GCC attribute for the provided inline heuristic.
#[cfg(feature="master")]
@ -114,6 +55,19 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
if let Some(attr) = inline_attr(cx, inline) {
func.add_attribute(attr);
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
func.add_attribute(FnAttribute::Cold);
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_RETURNS_TWICE) {
func.add_attribute(FnAttribute::ReturnsTwice);
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_PURE) {
func.add_attribute(FnAttribute::Pure);
}
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::FFI_CONST) {
func.add_attribute(FnAttribute::Const);
}
}
let function_features =
@ -140,11 +94,33 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
}))
.collect::<Vec<_>>();
// TODO(antoyo): check if we really need global backend features. (Maybe they could be applied
// globally?)
// TODO(antoyo): cg_llvm adds global features to each function so that LTO keep them.
// Check if GCC requires the same.
let mut global_features = cx.tcx.global_backend_features(()).iter().map(|s| s.as_str());
function_features.extend(&mut global_features);
let target_features = function_features.join(",");
let target_features = function_features
.iter()
.filter_map(|feature| {
// FIXME(antoyo): for some reasons, disabling SSE results in the following error when
// compiling Rust for Linux:
// SSE register return with SSE disabled
// TODO(antoyo): support soft-float and retpoline-external-thunk.
if feature.contains("soft-float") || feature.contains("retpoline-external-thunk") || *feature == "-sse" {
return None;
}
if feature.starts_with('-') {
Some(format!("no{}", feature))
}
else if feature.starts_with('+') {
Some(feature[1..].to_string())
}
else {
Some(feature.to_string())
}
})
.collect::<Vec<_>>()
.join(",");
if !target_features.is_empty() {
#[cfg(feature="master")]
func.add_attribute(FnAttribute::Target(&target_features));

View File

@ -0,0 +1,341 @@
/// GCC requires to use the same toolchain for the whole compilation when doing LTO.
/// So, we need the same version/commit of the linker (gcc) and lto front-end binaries (lto1,
/// lto-wrapper, liblto_plugin.so).
// FIXME(antoyo): the executables compiled with LTO are bigger than those compiled without LTO.
// Since it is the opposite for cg_llvm, check if this is normal.
//
// Maybe we embed the bitcode in the final binary?
// It doesn't look like we try to generate fat objects for the final binary.
// Check if the way we combine the object files make it keep the LTO sections on the final link.
// Maybe that's because the combined object files contain the IR (true) and the final link
// does not remove it?
//
// TODO(antoyo): for performance, check which optimizations the C++ frontend enables.
//
// Fix these warnings:
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
// /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization
use std::ffi::CString;
use std::fs::{self, File};
use std::path::{Path, PathBuf};
use gccjit::OutputKind;
use object::read::archive::ArchiveFile;
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule};
use rustc_codegen_ssa::back::symbol_export;
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput};
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind};
use rustc_data_structures::memmap::Mmap;
use rustc_errors::{FatalError, Handler};
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::dep_graph::WorkProduct;
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
use rustc_session::config::{CrateType, Lto};
use tempfile::{TempDir, tempdir};
use crate::back::write::save_temp_bitcode;
use crate::errors::{
DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib,
};
use crate::{GccCodegenBackend, GccContext, to_gcc_opt_level};
/// We keep track of the computed LTO cache keys from the previous
/// session to determine which CGUs we can reuse.
//pub const THIN_LTO_KEYS_INCR_COMP_FILE_NAME: &str = "thin-lto-past-keys.bin";
pub fn crate_type_allows_lto(crate_type: CrateType) -> bool {
match crate_type {
CrateType::Executable | CrateType::Dylib | CrateType::Staticlib | CrateType::Cdylib => true,
CrateType::Rlib | CrateType::ProcMacro => false,
}
}
struct LtoData {
// TODO(antoyo): use symbols_below_threshold.
//symbols_below_threshold: Vec<CString>,
upstream_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
tmp_path: TempDir,
}
fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, diag_handler: &Handler) -> Result<LtoData, FatalError> {
let export_threshold = match cgcx.lto {
// We're just doing LTO for our one crate
Lto::ThinLocal => SymbolExportLevel::Rust,
// We're doing LTO for the entire crate graph
Lto::Fat | Lto::Thin => symbol_export::crates_export_threshold(&cgcx.crate_types),
Lto::No => panic!("didn't request LTO but we're doing LTO"),
};
let tmp_path =
match tempdir() {
Ok(tmp_path) => tmp_path,
Err(error) => {
eprintln!("Cannot create temporary directory: {}", error);
return Err(FatalError);
},
};
let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| {
if info.level.is_below_threshold(export_threshold) || info.used {
Some(CString::new(name.as_str()).unwrap())
} else {
None
}
};
let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO");
let mut symbols_below_threshold = {
let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold");
exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::<Vec<CString>>()
};
info!("{} symbols to preserve in this crate", symbols_below_threshold.len());
// If we're performing LTO for the entire crate graph, then for each of our
// upstream dependencies, find the corresponding rlib and load the bitcode
// from the archive.
//
// We save off all the bytecode and GCC module file path for later processing
// with either fat or thin LTO
let mut upstream_modules = Vec::new();
if cgcx.lto != Lto::ThinLocal {
// Make sure we actually can run LTO
for crate_type in cgcx.crate_types.iter() {
if !crate_type_allows_lto(*crate_type) {
diag_handler.emit_err(LtoDisallowed);
return Err(FatalError);
} else if *crate_type == CrateType::Dylib {
if !cgcx.opts.unstable_opts.dylib_lto {
diag_handler.emit_err(LtoDylib);
return Err(FatalError);
}
}
}
if cgcx.opts.cg.prefer_dynamic && !cgcx.opts.unstable_opts.dylib_lto {
diag_handler.emit_err(DynamicLinkingWithLTO);
return Err(FatalError);
}
for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
let exported_symbols =
cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO");
{
let _timer =
cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold");
symbols_below_threshold
.extend(exported_symbols[&cnum].iter().filter_map(symbol_filter));
}
let archive_data = unsafe {
Mmap::map(File::open(&path).expect("couldn't open rlib"))
.expect("couldn't map rlib")
};
let archive = ArchiveFile::parse(&*archive_data).expect("wanted an rlib");
let obj_files = archive
.members()
.filter_map(|child| {
child.ok().and_then(|c| {
std::str::from_utf8(c.name()).ok().map(|name| (name.trim(), c))
})
})
.filter(|&(name, _)| looks_like_rust_object_file(name));
for (name, child) in obj_files {
info!("adding bitcode from {}", name);
let path = tmp_path.path().join(name);
match save_as_file(child.data(&*archive_data).expect("corrupt rlib"), &path) {
Ok(()) => {
let buffer = ModuleBuffer::new(path);
let module = SerializedModule::Local(buffer);
upstream_modules.push((module, CString::new(name).unwrap()));
}
Err(e) => {
diag_handler.emit_err(e);
return Err(FatalError);
}
}
}
}
}
Ok(LtoData {
//symbols_below_threshold,
upstream_modules,
tmp_path,
})
}
fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> {
fs::write(path, obj)
.map_err(|error| LtoBitcodeFromRlib {
gcc_err: format!("write object file to temp dir: {}", error)
})
}
/// Performs fat LTO by merging all modules into a single one and returning it
/// for further optimization.
pub(crate) fn run_fat(
cgcx: &CodegenContext<GccCodegenBackend>,
modules: Vec<FatLtoInput<GccCodegenBackend>>,
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
) -> Result<LtoModuleCodegen<GccCodegenBackend>, FatalError> {
let diag_handler = cgcx.create_diag_handler();
let lto_data = prepare_lto(cgcx, &diag_handler)?;
/*let symbols_below_threshold =
lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();*/
fat_lto(cgcx, &diag_handler, modules, cached_modules, lto_data.upstream_modules, lto_data.tmp_path,
//&symbols_below_threshold,
)
}
fn fat_lto(cgcx: &CodegenContext<GccCodegenBackend>, _diag_handler: &Handler, modules: Vec<FatLtoInput<GccCodegenBackend>>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, tmp_path: TempDir,
//symbols_below_threshold: &[*const libc::c_char],
) -> Result<LtoModuleCodegen<GccCodegenBackend>, FatalError> {
let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module");
info!("going for a fat lto");
// Sort out all our lists of incoming modules into two lists.
//
// * `serialized_modules` (also and argument to this function) contains all
// modules that are serialized in-memory.
// * `in_memory` contains modules which are already parsed and in-memory,
// such as from multi-CGU builds.
//
// All of `cached_modules` (cached from previous incremental builds) can
// immediately go onto the `serialized_modules` modules list and then we can
// split the `modules` array into these two lists.
let mut in_memory = Vec::new();
serialized_modules.extend(cached_modules.into_iter().map(|(buffer, wp)| {
info!("pushing cached module {:?}", wp.cgu_name);
(buffer, CString::new(wp.cgu_name).unwrap())
}));
for module in modules {
match module {
FatLtoInput::InMemory(m) => in_memory.push(m),
FatLtoInput::Serialized { name, buffer } => {
info!("pushing serialized module {:?}", name);
let buffer = SerializedModule::Local(buffer);
serialized_modules.push((buffer, CString::new(name).unwrap()));
}
}
}
// Find the "costliest" module and merge everything into that codegen unit.
// All the other modules will be serialized and reparsed into the new
// context, so this hopefully avoids serializing and parsing the largest
// codegen unit.
//
// Additionally use a regular module as the base here to ensure that various
// file copy operations in the backend work correctly. The only other kind
// of module here should be an allocator one, and if your crate is smaller
// than the allocator module then the size doesn't really matter anyway.
let costliest_module = in_memory
.iter()
.enumerate()
.filter(|&(_, module)| module.kind == ModuleKind::Regular)
.map(|(i, _module)| {
//let cost = unsafe { llvm::LLVMRustModuleCost(module.module_llvm.llmod()) };
// TODO(antoyo): compute the cost of a module if GCC allows this.
(0, i)
})
.max();
// If we found a costliest module, we're good to go. Otherwise all our
// inputs were serialized which could happen in the case, for example, that
// all our inputs were incrementally reread from the cache and we're just
// re-executing the LTO passes. If that's the case deserialize the first
// module and create a linker with it.
let mut module: ModuleCodegen<GccContext> = match costliest_module {
Some((_cost, i)) => in_memory.remove(i),
None => {
unimplemented!("Incremental");
/*assert!(!serialized_modules.is_empty(), "must have at least one serialized module");
let (buffer, name) = serialized_modules.remove(0);
info!("no in-memory regular modules to choose from, parsing {:?}", name);
ModuleCodegen {
module_llvm: GccContext::parse(cgcx, &name, buffer.data(), diag_handler)?,
name: name.into_string().unwrap(),
kind: ModuleKind::Regular,
}*/
}
};
let mut serialized_bitcode = Vec::new();
{
info!("using {:?} as a base module", module.name);
// We cannot load and merge GCC contexts in memory like cg_llvm is doing.
// Instead, we combine the object files into a single object file.
for module in in_memory {
let path = tmp_path.path().to_path_buf().join(&module.name);
let path = path.to_str().expect("path");
let context = &module.module_llvm.context;
let config = cgcx.config(module.kind);
// NOTE: we need to set the optimization level here in order for LTO to do its job.
context.set_optimization_level(to_gcc_opt_level(config.opt_level));
context.add_command_line_option("-flto=auto");
context.add_command_line_option("-flto-partition=one");
context.compile_to_file(OutputKind::ObjectFile, path);
let buffer = ModuleBuffer::new(PathBuf::from(path));
let llmod_id = CString::new(&module.name[..]).unwrap();
serialized_modules.push((SerializedModule::Local(buffer), llmod_id));
}
// Sort the modules to ensure we produce deterministic results.
serialized_modules.sort_by(|module1, module2| module1.1.cmp(&module2.1));
// We add the object files and save in should_combine_object_files that we should combine
// them into a single object file when compiling later.
for (bc_decoded, name) in serialized_modules {
let _timer = cgcx
.prof
.generic_activity_with_arg_recorder("GCC_fat_lto_link_module", |recorder| {
recorder.record_arg(format!("{:?}", name))
});
info!("linking {:?}", name);
match bc_decoded {
SerializedModule::Local(ref module_buffer) => {
module.module_llvm.should_combine_object_files = true;
module.module_llvm.context.add_driver_option(module_buffer.0.to_str().expect("path"));
},
SerializedModule::FromRlib(_) => unimplemented!("from rlib"),
SerializedModule::FromUncompressedFile(_) => unimplemented!("from uncompressed file"),
}
serialized_bitcode.push(bc_decoded);
}
save_temp_bitcode(cgcx, &module, "lto.input");
// Internalize everything below threshold to help strip out more modules and such.
/*unsafe {
let ptr = symbols_below_threshold.as_ptr();
llvm::LLVMRustRunRestrictionPass(
llmod,
ptr as *const *const libc::c_char,
symbols_below_threshold.len() as libc::size_t,
);*/
save_temp_bitcode(cgcx, &module, "lto.after-restriction");
//}
}
// NOTE: save the temporary directory used by LTO so that it gets deleted after linking instead
// of now.
module.module_llvm.temp_dir = Some(tmp_path);
Ok(LtoModuleCodegen::Fat { module, _serialized_bitcode: serialized_bitcode })
}
pub struct ModuleBuffer(PathBuf);
impl ModuleBuffer {
pub fn new(path: PathBuf) -> ModuleBuffer {
ModuleBuffer(path)
}
}
impl ModuleBufferMethods for ModuleBuffer {
fn data(&self) -> &[u8] {
unimplemented!("data not needed for GCC codegen");
}
}

View File

@ -1 +1,2 @@
pub mod lto;
pub mod write;

View File

@ -2,27 +2,71 @@ use std::{env, fs};
use gccjit::OutputKind;
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
use rustc_codegen_ssa::back::write::{CodegenContext, EmitObj, ModuleConfig};
use rustc_codegen_ssa::back::link::ensure_removed;
use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig};
use rustc_errors::Handler;
use rustc_fs_util::link_or_copy;
use rustc_session::config::OutputType;
use rustc_span::fatal_error::FatalError;
use rustc_target::spec::SplitDebuginfo;
use crate::{GccCodegenBackend, GccContext};
use crate::errors::CopyBitcode;
pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_handler: &Handler, module: ModuleCodegen<GccContext>, config: &ModuleConfig) -> Result<CompiledModule, FatalError> {
let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_codegen", &*module.name);
pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, diag_handler: &Handler, module: ModuleCodegen<GccContext>, config: &ModuleConfig) -> Result<CompiledModule, FatalError> {
let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen", &*module.name);
{
let context = &module.module_llvm.context;
let module_name = module.name.clone();
let should_combine_object_files = module.module_llvm.should_combine_object_files;
let module_name = Some(&module_name[..]);
let _bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
// NOTE: Only generate object files with GIMPLE when this environment variable is set for
// now because this requires a particular setup (same gcc/lto1/lto-wrapper commit as libgccjit).
let fat_lto = env::var("EMBED_LTO_BITCODE").as_deref() == Ok("1");
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
if config.bitcode_needed() {
if config.bitcode_needed() && fat_lto {
let _timer = cgcx
.prof
.generic_activity_with_arg("GCC_module_codegen_make_bitcode", &*module.name);
// TODO(antoyo)
/*if let Some(bitcode_filename) = bc_out.file_name() {
cgcx.prof.artifact_size(
"llvm_bitcode",
bitcode_filename.to_string_lossy(),
data.len() as u64,
);
}*/
if config.emit_bc || config.emit_obj == EmitObj::Bitcode {
let _timer = cgcx
.prof
.generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name);
context.add_command_line_option("-flto=auto");
context.add_command_line_option("-flto-partition=one");
context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
}
if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) {
let _timer = cgcx
.prof
.generic_activity_with_arg("GCC_module_codegen_embed_bitcode", &*module.name);
// TODO(antoyo): maybe we should call embed_bitcode to have the proper iOS fixes?
//embed_bitcode(cgcx, llcx, llmod, &config.bc_cmdline, data);
context.add_command_line_option("-flto=auto");
context.add_command_line_option("-flto-partition=one");
context.add_command_line_option("-ffat-lto-objects");
// TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument).
context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
}
}
if config.emit_ir {
@ -32,7 +76,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
if config.emit_asm {
let _timer = cgcx
.prof
.generic_activity_with_arg("LLVM_module_codegen_emit_asm", &*module.name);
.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name);
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str"));
}
@ -41,7 +85,7 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
EmitObj::ObjectCode(_) => {
let _timer = cgcx
.prof
.generic_activity_with_arg("LLVM_module_codegen_emit_obj", &*module.name);
.generic_activity_with_arg("GCC_module_codegen_emit_obj", &*module.name);
if env::var("CG_GCCJIT_DUMP_MODULE_NAMES").as_deref() == Ok("1") {
println!("Module {}", module.name);
}
@ -60,11 +104,36 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
context.set_debug_info(true);
context.dump_to_file(path, true);
}
context.compile_to_file(OutputKind::ObjectFile, obj_out.to_str().expect("path to str"));
if should_combine_object_files && fat_lto {
context.add_command_line_option("-flto=auto");
context.add_command_line_option("-flto-partition=one");
context.add_driver_option("-Wl,-r");
// NOTE: we need -nostdlib, otherwise, we get the following error:
// /usr/bin/ld: cannot find -lgcc_s: No such file or directory
context.add_driver_option("-nostdlib");
// NOTE: without -fuse-linker-plugin, we get the following error:
// lto1: internal compiler error: decompressed stream: Destination buffer is too small
context.add_driver_option("-fuse-linker-plugin");
// NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o.
context.compile_to_file(OutputKind::Executable, obj_out.to_str().expect("path to str"));
}
else {
context.compile_to_file(OutputKind::ObjectFile, obj_out.to_str().expect("path to str"));
}
}
EmitObj::Bitcode => {
// TODO(antoyo)
debug!("copying bitcode {:?} to obj {:?}", bc_out, obj_out);
if let Err(err) = link_or_copy(&bc_out, &obj_out) {
diag_handler.emit_err(CopyBitcode { err });
}
if !config.emit_bc {
debug!("removing_bitcode {:?}", bc_out);
ensure_removed(diag_handler, &bc_out);
}
}
EmitObj::None => {}
@ -82,3 +151,18 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, _diag_han
pub(crate) fn link(_cgcx: &CodegenContext<GccCodegenBackend>, _diag_handler: &Handler, mut _modules: Vec<ModuleCodegen<GccContext>>) -> Result<ModuleCodegen<GccContext>, FatalError> {
unimplemented!();
}
pub(crate) fn save_temp_bitcode(cgcx: &CodegenContext<GccCodegenBackend>, _module: &ModuleCodegen<GccContext>, _name: &str) {
if !cgcx.save_temps {
return;
}
unimplemented!();
/*unsafe {
let ext = format!("{}.bc", name);
let cgu = Some(&module.name[..]);
let path = cgcx.output_filenames.temp_path_ext(&ext, cgu);
let cstr = path_to_c_string(&path);
let llmod = module.module_llvm.llmod();
llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
}*/
}

View File

@ -1,3 +1,4 @@
use std::collections::HashSet;
use std::env;
use std::time::Instant;
@ -18,6 +19,7 @@ use rustc_codegen_ssa::traits::DebugInfoMethods;
use rustc_session::config::DebugInfo;
use rustc_span::Symbol;
use crate::{LockedTargetInfo, gcc_util};
use crate::GccContext;
use crate::builder::Builder;
use crate::context::CodegenCx;
@ -50,6 +52,7 @@ pub fn global_linkage_to_gcc(linkage: Linkage) -> GlobalKind {
pub fn linkage_to_gcc(linkage: Linkage) -> FunctionType {
match linkage {
Linkage::External => FunctionType::Exported,
// TODO(antoyo): set the attribute externally_visible.
Linkage::AvailableExternally => FunctionType::Extern,
Linkage::LinkOnceAny => unimplemented!(),
Linkage::LinkOnceODR => unimplemented!(),
@ -63,7 +66,7 @@ pub fn linkage_to_gcc(linkage: Linkage) -> FunctionType {
}
}
pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_integers: bool) -> (ModuleCodegen<GccContext>, u64) {
pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: LockedTargetInfo) -> (ModuleCodegen<GccContext>, u64) {
let prof_timer = tcx.prof.generic_activity("codegen_module");
let start_time = Instant::now();
@ -71,7 +74,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
let (module, _) = tcx.dep_graph.with_task(
dep_node,
tcx,
(cgu_name, supports_128bit_integers),
(cgu_name, target_info),
module_codegen,
Some(dep_graph::hash_result),
);
@ -82,38 +85,28 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
// the time we needed for codegenning it.
let cost = time_to_codegen.as_secs() * 1_000_000_000 + time_to_codegen.subsec_nanos() as u64;
fn module_codegen(tcx: TyCtxt<'_>, (cgu_name, supports_128bit_integers): (Symbol, bool)) -> ModuleCodegen<GccContext> {
fn module_codegen(tcx: TyCtxt<'_>, (cgu_name, target_info): (Symbol, LockedTargetInfo)) -> ModuleCodegen<GccContext> {
let cgu = tcx.codegen_unit(cgu_name);
// Instantiate monomorphizations without filling out definitions yet...
//let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str());
let context = Context::default();
context.add_command_line_option("-fexceptions");
context.add_driver_option("-fexceptions");
let disabled_features: HashSet<_> = tcx.sess.opts.cg.target_feature.split(',')
.filter(|feature| feature.starts_with('-'))
.map(|string| &string[1..])
.collect();
// TODO(antoyo): only set on x86 platforms.
context.add_command_line_option("-masm=intel");
// TODO(antoyo): only add the following cli argument if the feature is supported.
context.add_command_line_option("-msse2");
context.add_command_line_option("-mavx2");
// FIXME(antoyo): the following causes an illegal instruction on vmovdqu64 in std_example on my CPU.
// Only add if the CPU supports it.
context.add_command_line_option("-msha");
context.add_command_line_option("-mpclmul");
context.add_command_line_option("-mfma");
context.add_command_line_option("-mfma4");
context.add_command_line_option("-m64");
context.add_command_line_option("-mbmi");
context.add_command_line_option("-mgfni");
//context.add_command_line_option("-mavxvnni"); // The CI doesn't support this option.
context.add_command_line_option("-mf16c");
context.add_command_line_option("-maes");
context.add_command_line_option("-mxsavec");
context.add_command_line_option("-mbmi2");
context.add_command_line_option("-mrtm");
context.add_command_line_option("-mvaes");
context.add_command_line_option("-mvpclmulqdq");
context.add_command_line_option("-mavx");
if !disabled_features.contains("avx") {
// NOTE: we always enable AVX because the equivalent of llvm.x86.sse2.cmp.pd in GCC for
// SSE2 is multiple builtins, so we use the AVX __builtin_ia32_cmppd instead.
// FIXME(antoyo): use the proper builtins for llvm.x86.sse2.cmp.pd and similar.
context.add_command_line_option("-mavx");
}
for arg in &tcx.sess.opts.cg.llvm_args {
context.add_command_line_option(arg);
@ -127,6 +120,16 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
// NOTE: Rust relies on LLVM doing wrapping on overflow.
context.add_command_line_option("-fwrapv");
if tcx.sess.relocation_model() == rustc_target::spec::RelocModel::Static {
context.add_command_line_option("-mcmodel=kernel");
context.add_command_line_option("-fno-pie");
}
let target_cpu = gcc_util::target_cpu(tcx.sess);
if target_cpu != "generic" {
context.add_command_line_option(&format!("-march={}", target_cpu));
}
if tcx.sess.opts.unstable_opts.function_sections.unwrap_or(tcx.sess.target.function_sections) {
context.add_command_line_option("-ffunction-sections");
context.add_command_line_option("-fdata-sections");
@ -135,8 +138,14 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
if env::var("CG_GCCJIT_DUMP_RTL").as_deref() == Ok("1") {
context.add_command_line_option("-fdump-rtl-vregs");
}
if env::var("CG_GCCJIT_DUMP_RTL_ALL").as_deref() == Ok("1") {
context.add_command_line_option("-fdump-rtl-all");
}
if env::var("CG_GCCJIT_DUMP_TREE_ALL").as_deref() == Ok("1") {
context.add_command_line_option("-fdump-tree-all");
context.add_command_line_option("-fdump-tree-all-eh");
}
if env::var("CG_GCCJIT_DUMP_IPA_ALL").as_deref() == Ok("1") {
context.add_command_line_option("-fdump-ipa-all-eh");
}
if env::var("CG_GCCJIT_DUMP_CODE").as_deref() == Ok("1") {
context.set_dump_code_on_compile(true);
@ -152,11 +161,15 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
context.set_keep_intermediates(true);
}
if env::var("CG_GCCJIT_VERBOSE").as_deref() == Ok("1") {
context.add_driver_option("-v");
}
// NOTE: The codegen generates unrechable blocks.
context.set_allow_unreachable_blocks(true);
{
let cx = CodegenCx::new(&context, cgu, tcx, supports_128bit_integers);
let cx = CodegenCx::new(&context, cgu, tcx, target_info.supports_128bit_int());
let mono_items = cgu.items_in_deterministic_order(tcx);
for &(mono_item, data) in &mono_items {
@ -181,7 +194,9 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
ModuleCodegen {
name: cgu_name.to_string(),
module_llvm: GccContext {
context
context,
should_combine_object_files: false,
temp_dir: None,
},
kind: ModuleKind::Regular,
}

View File

@ -247,16 +247,9 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
}
fn check_store(&mut self, val: RValue<'gcc>, ptr: RValue<'gcc>) -> RValue<'gcc> {
let dest_ptr_ty = self.cx.val_ty(ptr).make_pointer(); // TODO(antoyo): make sure make_pointer() is okay here.
let stored_ty = self.cx.val_ty(val);
let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
if dest_ptr_ty == stored_ptr_ty {
ptr
}
else {
self.bitcast(ptr, stored_ptr_ty)
}
self.bitcast(ptr, stored_ptr_ty)
}
pub fn current_func(&self) -> Function<'gcc> {
@ -500,7 +493,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
}
#[cfg(not(feature="master"))]
fn invoke(&mut self, typ: Type<'gcc>, fn_attrs: &CodegenFnAttrs, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, func: RValue<'gcc>, args: &[RValue<'gcc>], then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>) -> RValue<'gcc> {
fn invoke(&mut self, typ: Type<'gcc>, fn_attrs: Option<&CodegenFnAttrs>, fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>, func: RValue<'gcc>, args: &[RValue<'gcc>], then: Block<'gcc>, catch: Block<'gcc>, _funclet: Option<&Funclet>) -> RValue<'gcc> {
let call_site = self.call(typ, fn_attrs, None, func, args, None);
let condition = self.context.new_rvalue_from_int(self.bool_type, 1);
self.llbb().end_with_conditional(None, condition, then, catch);
@ -663,7 +656,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
}
fn unchecked_sadd(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
a + b
self.gcc_add(a, b)
}
fn unchecked_uadd(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
@ -671,7 +664,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
}
fn unchecked_ssub(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
a - b
self.gcc_sub(a, b)
}
fn unchecked_usub(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
@ -680,11 +673,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
}
fn unchecked_smul(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
a * b
self.gcc_mul(a, b)
}
fn unchecked_umul(&mut self, a: RValue<'gcc>, b: RValue<'gcc>) -> RValue<'gcc> {
a * b
self.gcc_mul(a, b)
}
fn fadd_fast(&mut self, lhs: RValue<'gcc>, rhs: RValue<'gcc>) -> RValue<'gcc> {
@ -916,7 +909,9 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
.add_eval(None, self.context.new_call(None, atomic_store, &[ptr, value, ordering]));
}
fn gep(&mut self, _typ: Type<'gcc>, ptr: RValue<'gcc>, indices: &[RValue<'gcc>]) -> RValue<'gcc> {
fn gep(&mut self, typ: Type<'gcc>, ptr: RValue<'gcc>, indices: &[RValue<'gcc>]) -> RValue<'gcc> {
// NOTE: due to opaque pointers now being used, we need to cast here.
let ptr = self.context.new_cast(None, ptr, typ.make_pointer());
let ptr_type = ptr.get_type();
let mut pointee_type = ptr.get_type();
// NOTE: we cannot use array indexing here like in inbounds_gep because array indexing is
@ -927,6 +922,12 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
// require dereferencing the pointer.
for index in indices {
pointee_type = pointee_type.get_pointee().expect("pointee type");
#[cfg(feature="master")]
let pointee_size = {
let size = self.cx.context.new_sizeof(pointee_type);
self.context.new_cast(None, size, index.get_type())
};
#[cfg(not(feature="master"))]
let pointee_size = self.context.new_rvalue_from_int(index.get_type(), pointee_type.get_size() as i32);
result = result + self.gcc_int_cast(*index * pointee_size, self.sizet_type);
}

View File

@ -1,4 +1,6 @@
use gccjit::{Function, FunctionType, GlobalKind, LValue, RValue, Type};
#[cfg(feature="master")]
use gccjit::{FnAttribute, ToRValue};
use rustc_codegen_ssa::traits::BaseTypeMethods;
use rustc_middle::ty::Ty;
use rustc_span::Symbol;
@ -114,6 +116,44 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
.collect();
let func = cx.context.new_function(None, cx.linkage.get(), return_type, &params, mangle_name(name), variadic);
cx.functions.borrow_mut().insert(name.to_string(), func);
#[cfg(feature="master")]
if name == "rust_eh_personality" {
// NOTE: GCC will sometimes change the personality function set on a function from
// rust_eh_personality to __gcc_personality_v0 as an optimization.
// As such, we need to create a weak alias from __gcc_personality_v0 to
// rust_eh_personality in order to avoid a linker error.
// This needs to be weak in order to still allow using the standard
// __gcc_personality_v0 when the linking to it.
// Since aliases don't work (maybe because of a bug in LTO partitioning?), we
// create a wrapper function that calls rust_eh_personality.
let params: Vec<_> = param_types.into_iter().enumerate()
.map(|(index, param)| cx.context.new_parameter(None, *param, &format!("param{}", index))) // TODO(antoyo): set name.
.collect();
let gcc_func = cx.context.new_function(None, FunctionType::Exported, return_type, &params, "__gcc_personality_v0", variadic);
// We need a normal extern function for the crates that access rust_eh_personality
// without defining it, otherwise we'll get a compiler error.
//
// For the crate defining it, that needs to be a weak alias instead.
gcc_func.add_attribute(FnAttribute::Weak);
let block = gcc_func.new_block("start");
let mut args = vec![];
for param in &params {
args.push(param.to_rvalue());
}
let call = cx.context.new_call(None, func, &args);
if return_type == cx.type_void() {
block.add_eval(None, call);
block.end_with_void_return(None);
}
else {
block.end_with_return(None, call);
}
}
func
};

View File

@ -1,8 +1,36 @@
use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg};
use rustc_macros::Diagnostic;
use rustc_errors::{
DiagnosticArgValue, DiagnosticBuilder, ErrorGuaranteed, Handler, IntoDiagnostic, IntoDiagnosticArg,
};
use rustc_macros::{Diagnostic, Subdiagnostic};
use rustc_span::Span;
use std::borrow::Cow;
use crate::fluent_generated as fluent;
#[derive(Diagnostic)]
#[diag(codegen_gcc_unknown_ctarget_feature_prefix)]
#[note]
pub(crate) struct UnknownCTargetFeaturePrefix<'a> {
pub feature: &'a str,
}
#[derive(Diagnostic)]
#[diag(codegen_gcc_unknown_ctarget_feature)]
#[note]
pub(crate) struct UnknownCTargetFeature<'a> {
pub feature: &'a str,
#[subdiagnostic]
pub rust_feature: PossibleFeature<'a>,
}
#[derive(Subdiagnostic)]
pub(crate) enum PossibleFeature<'a> {
#[help(codegen_gcc_possible_feature)]
Some { rust_feature: &'a str },
#[help(codegen_gcc_consider_filing_feature_request)]
None,
}
struct ExitCode(Option<i32>);
impl IntoDiagnosticArg for ExitCode {
@ -40,3 +68,58 @@ pub(crate) struct TiedTargetFeatures {
pub span: Span,
pub features: String,
}
#[derive(Diagnostic)]
#[diag(codegen_gcc_copy_bitcode)]
pub(crate) struct CopyBitcode {
pub err: std::io::Error,
}
#[derive(Diagnostic)]
#[diag(codegen_gcc_dynamic_linking_with_lto)]
#[note]
pub(crate) struct DynamicLinkingWithLTO;
#[derive(Diagnostic)]
#[diag(codegen_gcc_load_bitcode)]
pub(crate) struct LoadBitcode {
name: String,
}
#[derive(Diagnostic)]
#[diag(codegen_gcc_lto_disallowed)]
pub(crate) struct LtoDisallowed;
#[derive(Diagnostic)]
#[diag(codegen_gcc_lto_dylib)]
pub(crate) struct LtoDylib;
#[derive(Diagnostic)]
#[diag(codegen_gcc_lto_bitcode_from_rlib)]
pub(crate) struct LtoBitcodeFromRlib {
pub gcc_err: String,
}
pub(crate) struct TargetFeatureDisableOrEnable<'a> {
pub features: &'a [&'a str],
pub span: Option<Span>,
pub missing_features: Option<MissingFeatures>,
}
#[derive(Subdiagnostic)]
#[help(codegen_gcc_missing_features)]
pub(crate) struct MissingFeatures;
impl IntoDiagnostic<'_, ErrorGuaranteed> for TargetFeatureDisableOrEnable<'_> {
fn into_diagnostic(self, sess: &'_ Handler) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
let mut diag = sess.struct_err(fluent::codegen_gcc_target_feature_disable_or_enable);
if let Some(span) = self.span {
diag.set_span(span);
};
if let Some(missing_features) = self.missing_features {
diag.subdiagnostic(missing_features);
}
diag.set_arg("features", self.features.join(", "));
diag
}
}

View File

@ -0,0 +1,223 @@
#[cfg(feature="master")]
use gccjit::Context;
use smallvec::{smallvec, SmallVec};
use rustc_codegen_ssa::target_features::{
supported_target_features, tied_target_features, RUSTC_SPECIFIC_FEATURES,
};
use rustc_data_structures::fx::FxHashMap;
use rustc_middle::bug;
use rustc_session::Session;
use crate::errors::{PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature, UnknownCTargetFeaturePrefix};
/// The list of GCC features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`,
/// `--target` and similar).
pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<String> {
// Features that come earlier are overridden by conflicting features later in the string.
// Typically we'll want more explicit settings to override the implicit ones, so:
//
// * Features from -Ctarget-cpu=*; are overridden by [^1]
// * Features implied by --target; are overridden by
// * Features from -Ctarget-feature; are overridden by
// * function specific features.
//
// [^1]: target-cpu=native is handled here, other target-cpu values are handled implicitly
// through GCC march implementation.
//
// FIXME(nagisa): it isn't clear what's the best interaction between features implied by
// `-Ctarget-cpu` and `--target` are. On one hand, you'd expect CLI arguments to always
// override anything that's implicit, so e.g. when there's no `--target` flag, features implied
// the host target are overridden by `-Ctarget-cpu=*`. On the other hand, what about when both
// `--target` and `-Ctarget-cpu=*` are specified? Both then imply some target features and both
// flags are specified by the user on the CLI. It isn't as clear-cut which order of precedence
// should be taken in cases like these.
let mut features = vec![];
// Features implied by an implicit or explicit `--target`.
features.extend(
sess.target
.features
.split(',')
.filter(|v| !v.is_empty() && backend_feature_name(v).is_some())
.map(String::from),
);
// -Ctarget-features
let supported_features = supported_target_features(sess);
let mut featsmap = FxHashMap::default();
let feats = sess.opts.cg.target_feature
.split(',')
.filter_map(|s| {
let enable_disable = match s.chars().next() {
None => return None,
Some(c @ ('+' | '-')) => c,
Some(_) => {
if diagnostics {
sess.emit_warning(UnknownCTargetFeaturePrefix { feature: s });
}
return None;
}
};
let feature = backend_feature_name(s)?;
// Warn against use of GCC specific feature names on the CLI.
if diagnostics && !supported_features.iter().any(|&(v, _)| v == feature) {
let rust_feature = supported_features.iter().find_map(|&(rust_feature, _)| {
let gcc_features = to_gcc_features(sess, rust_feature);
if gcc_features.contains(&feature) && !gcc_features.contains(&rust_feature) {
Some(rust_feature)
} else {
None
}
});
let unknown_feature =
if let Some(rust_feature) = rust_feature {
UnknownCTargetFeature {
feature,
rust_feature: PossibleFeature::Some { rust_feature },
}
}
else {
UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None }
};
sess.emit_warning(unknown_feature);
}
if diagnostics {
// FIXME(nagisa): figure out how to not allocate a full hashset here.
featsmap.insert(feature, enable_disable == '+');
}
// rustc-specific features do not get passed down to GCC…
if RUSTC_SPECIFIC_FEATURES.contains(&feature) {
return None;
}
// ... otherwise though we run through `to_gcc_features` when
// passing requests down to GCC. This means that all in-language
// features also work on the command line instead of having two
// different names when the GCC name and the Rust name differ.
Some(to_gcc_features(sess, feature)
.iter()
.flat_map(|feat| to_gcc_features(sess, feat).into_iter())
.map(|feature| {
if enable_disable == '-' {
format!("-{}", feature)
}
else {
feature.to_string()
}
})
.collect::<Vec<_>>(),
)
})
.flatten();
features.extend(feats);
if diagnostics {
if let Some(f) = check_tied_features(sess, &featsmap) {
sess.emit_err(TargetFeatureDisableOrEnable {
features: f,
span: None,
missing_features: None,
});
}
}
features
}
/// Returns a feature name for the given `+feature` or `-feature` string.
///
/// Only allows features that are backend specific (i.e. not [`RUSTC_SPECIFIC_FEATURES`].)
fn backend_feature_name(s: &str) -> Option<&str> {
// features must start with a `+` or `-`.
let feature = s.strip_prefix(&['+', '-'][..]).unwrap_or_else(|| {
bug!("target feature `{}` must begin with a `+` or `-`", s);
});
// Rustc-specific feature requests like `+crt-static` or `-crt-static`
// are not passed down to GCC.
if RUSTC_SPECIFIC_FEATURES.contains(&feature) {
return None;
}
Some(feature)
}
// To find a list of GCC's names, check https://gcc.gnu.org/onlinedocs/gcc/Function-Attributes.html
pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> {
let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch };
match (arch, s) {
("x86", "sse4.2") => smallvec!["sse4.2", "crc32"],
("x86", "pclmulqdq") => smallvec!["pclmul"],
("x86", "rdrand") => smallvec!["rdrnd"],
("x86", "bmi1") => smallvec!["bmi"],
("x86", "cmpxchg16b") => smallvec!["cx16"],
("x86", "avx512vaes") => smallvec!["vaes"],
("x86", "avx512gfni") => smallvec!["gfni"],
("x86", "avx512vpclmulqdq") => smallvec!["vpclmulqdq"],
// NOTE: seems like GCC requires 'avx512bw' for 'avx512vbmi2'.
("x86", "avx512vbmi2") => smallvec!["avx512vbmi2", "avx512bw"],
// NOTE: seems like GCC requires 'avx512bw' for 'avx512bitalg'.
("x86", "avx512bitalg") => smallvec!["avx512bitalg", "avx512bw"],
("aarch64", "rcpc2") => smallvec!["rcpc-immo"],
("aarch64", "dpb") => smallvec!["ccpp"],
("aarch64", "dpb2") => smallvec!["ccdp"],
("aarch64", "frintts") => smallvec!["fptoint"],
("aarch64", "fcma") => smallvec!["complxnum"],
("aarch64", "pmuv3") => smallvec!["perfmon"],
("aarch64", "paca") => smallvec!["pauth"],
("aarch64", "pacg") => smallvec!["pauth"],
// Rust ties fp and neon together. In GCC neon implicitly enables fp,
// but we manually enable neon when a feature only implicitly enables fp
("aarch64", "f32mm") => smallvec!["f32mm", "neon"],
("aarch64", "f64mm") => smallvec!["f64mm", "neon"],
("aarch64", "fhm") => smallvec!["fp16fml", "neon"],
("aarch64", "fp16") => smallvec!["fullfp16", "neon"],
("aarch64", "jsconv") => smallvec!["jsconv", "neon"],
("aarch64", "sve") => smallvec!["sve", "neon"],
("aarch64", "sve2") => smallvec!["sve2", "neon"],
("aarch64", "sve2-aes") => smallvec!["sve2-aes", "neon"],
("aarch64", "sve2-sm4") => smallvec!["sve2-sm4", "neon"],
("aarch64", "sve2-sha3") => smallvec!["sve2-sha3", "neon"],
("aarch64", "sve2-bitperm") => smallvec!["sve2-bitperm", "neon"],
(_, s) => smallvec![s],
}
}
// Given a map from target_features to whether they are enabled or disabled,
// ensure only valid combinations are allowed.
pub fn check_tied_features(sess: &Session, features: &FxHashMap<&str, bool>) -> Option<&'static [&'static str]> {
for tied in tied_target_features(sess) {
// Tied features must be set to the same value, or not set at all
let mut tied_iter = tied.iter();
let enabled = features.get(tied_iter.next().unwrap());
if tied_iter.any(|feature| enabled != features.get(feature)) {
return Some(tied);
}
}
None
}
fn handle_native(name: &str) -> &str {
if name != "native" {
return name;
}
#[cfg(feature="master")]
{
// Get the native arch.
let context = Context::default();
context.get_target_info().arch().unwrap()
.to_str()
.unwrap()
}
#[cfg(not(feature="master"))]
unimplemented!();
}
pub fn target_cpu(sess: &Session) -> &str {
match sess.opts.cg.target_cpu {
Some(ref name) => handle_native(name),
None => handle_native(sess.target.cpu.as_ref()),
}
}

View File

@ -36,7 +36,6 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
self.cx.context.new_unary_op(None, operation, typ, a)
}
else {
// TODO(antoyo): use __negdi2 and __negti2 instead?
let element_type = typ.dyncast_array().expect("element type");
let values = [
self.cx.context.new_unary_op(None, UnaryOp::BitwiseNegate, element_type, self.low(a)),
@ -52,9 +51,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
self.cx.context.new_unary_op(None, UnaryOp::Minus, a.get_type(), a)
}
else {
let param_a = self.context.new_parameter(None, a_type, "a");
let func = self.context.new_function(None, FunctionType::Extern, a_type, &[param_a], "__negti2", false);
self.context.new_call(None, func, &[a])
self.gcc_add(self.gcc_not(a), self.gcc_int(a_type, 1))
}
}
@ -353,23 +350,63 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
(res.dereference(None).to_rvalue(), overflow)
}
pub fn gcc_icmp(&self, op: IntPredicate, mut lhs: RValue<'gcc>, mut rhs: RValue<'gcc>) -> RValue<'gcc> {
pub fn gcc_icmp(&mut self, op: IntPredicate, mut lhs: RValue<'gcc>, mut rhs: RValue<'gcc>) -> RValue<'gcc> {
let a_type = lhs.get_type();
let b_type = rhs.get_type();
if self.is_non_native_int_type(a_type) || self.is_non_native_int_type(b_type) {
let signed = a_type.is_compatible_with(self.i128_type);
let sign =
if signed {
""
}
else {
"u"
};
let func_name = format!("__{}cmpti2", sign);
let param_a = self.context.new_parameter(None, a_type, "a");
let param_b = self.context.new_parameter(None, b_type, "b");
let func = self.context.new_function(None, FunctionType::Extern, self.int_type, &[param_a, param_b], func_name, false);
let cmp = self.context.new_call(None, func, &[lhs, rhs]);
// This algorithm is based on compiler-rt's __cmpti2:
// https://github.com/llvm-mirror/compiler-rt/blob/f0745e8476f069296a7c71accedd061dce4cdf79/lib/builtins/cmpti2.c#L21
let result = self.current_func().new_local(None, self.int_type, "icmp_result");
let block1 = self.current_func().new_block("block1");
let block2 = self.current_func().new_block("block2");
let block3 = self.current_func().new_block("block3");
let block4 = self.current_func().new_block("block4");
let block5 = self.current_func().new_block("block5");
let block6 = self.current_func().new_block("block6");
let block7 = self.current_func().new_block("block7");
let block8 = self.current_func().new_block("block8");
let after = self.current_func().new_block("after");
let native_int_type = a_type.dyncast_array().expect("get element type");
// NOTE: cast low to its unsigned type in order to perform a comparison correctly (e.g.
// the sign is only on high).
let unsigned_type = native_int_type.to_unsigned(&self.cx);
let lhs_low = self.context.new_cast(None, self.low(lhs), unsigned_type);
let rhs_low = self.context.new_cast(None, self.low(rhs), unsigned_type);
let condition = self.context.new_comparison(None, ComparisonOp::LessThan, self.high(lhs), self.high(rhs));
self.llbb().end_with_conditional(None, condition, block1, block2);
block1.add_assignment(None, result, self.context.new_rvalue_zero(self.int_type));
block1.end_with_jump(None, after);
let condition = self.context.new_comparison(None, ComparisonOp::GreaterThan, self.high(lhs), self.high(rhs));
block2.end_with_conditional(None, condition, block3, block4);
block3.add_assignment(None, result, self.context.new_rvalue_from_int(self.int_type, 2));
block3.end_with_jump(None, after);
let condition = self.context.new_comparison(None, ComparisonOp::LessThan, lhs_low, rhs_low);
block4.end_with_conditional(None, condition, block5, block6);
block5.add_assignment(None, result, self.context.new_rvalue_zero(self.int_type));
block5.end_with_jump(None, after);
let condition = self.context.new_comparison(None, ComparisonOp::GreaterThan, lhs_low, rhs_low);
block6.end_with_conditional(None, condition, block7, block8);
block7.add_assignment(None, result, self.context.new_rvalue_from_int(self.int_type, 2));
block7.end_with_jump(None, after);
block8.add_assignment(None, result, self.context.new_rvalue_one(self.int_type));
block8.end_with_jump(None, after);
// NOTE: since jumps were added in a place rustc does not expect, the current block in the
// state need to be updated.
self.switch_to_block(after);
let cmp = result.to_rvalue();
let (op, limit) =
match op {
IntPredicate::IntEQ => {
@ -546,7 +583,12 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
}
pub fn gcc_uint(&self, typ: Type<'gcc>, int: u64) -> RValue<'gcc> {
if self.is_native_int_type_or_bool(typ) {
if typ.is_u128(self) {
// FIXME(antoyo): libgccjit cannot create 128-bit values yet.
let num = self.context.new_rvalue_from_long(self.u64_type, int as i64);
self.gcc_int_cast(num, typ)
}
else if self.is_native_int_type_or_bool(typ) {
self.context.new_rvalue_from_long(typ, u64::try_from(int).expect("u64::try_from") as i64)
}
else {
@ -572,6 +614,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
}
}
else if typ.is_i128(self) {
// FIXME(antoyo): libgccjit cannot create 128-bit values yet.
let num = self.context.new_rvalue_from_long(self.u64_type, num as u64 as i64);
self.gcc_int_cast(num, typ)
}

View File

@ -2254,6 +2254,42 @@ match name {
"llvm.hexagon.prefetch" => "__builtin_HEXAGON_prefetch",
"llvm.hexagon.vmemcpy" => "__builtin_hexagon_vmemcpy",
"llvm.hexagon.vmemset" => "__builtin_hexagon_vmemset",
// loongarch
"llvm.loongarch.asrtgt.d" => "__builtin_loongarch_asrtgt_d",
"llvm.loongarch.asrtle.d" => "__builtin_loongarch_asrtle_d",
"llvm.loongarch.break" => "__builtin_loongarch_break",
"llvm.loongarch.cacop.d" => "__builtin_loongarch_cacop_d",
"llvm.loongarch.cacop.w" => "__builtin_loongarch_cacop_w",
"llvm.loongarch.cpucfg" => "__builtin_loongarch_cpucfg",
"llvm.loongarch.crc.w.b.w" => "__builtin_loongarch_crc_w_b_w",
"llvm.loongarch.crc.w.d.w" => "__builtin_loongarch_crc_w_d_w",
"llvm.loongarch.crc.w.h.w" => "__builtin_loongarch_crc_w_h_w",
"llvm.loongarch.crc.w.w.w" => "__builtin_loongarch_crc_w_w_w",
"llvm.loongarch.crcc.w.b.w" => "__builtin_loongarch_crcc_w_b_w",
"llvm.loongarch.crcc.w.d.w" => "__builtin_loongarch_crcc_w_d_w",
"llvm.loongarch.crcc.w.h.w" => "__builtin_loongarch_crcc_w_h_w",
"llvm.loongarch.crcc.w.w.w" => "__builtin_loongarch_crcc_w_w_w",
"llvm.loongarch.csrrd.d" => "__builtin_loongarch_csrrd_d",
"llvm.loongarch.csrrd.w" => "__builtin_loongarch_csrrd_w",
"llvm.loongarch.csrwr.d" => "__builtin_loongarch_csrwr_d",
"llvm.loongarch.csrwr.w" => "__builtin_loongarch_csrwr_w",
"llvm.loongarch.csrxchg.d" => "__builtin_loongarch_csrxchg_d",
"llvm.loongarch.csrxchg.w" => "__builtin_loongarch_csrxchg_w",
"llvm.loongarch.dbar" => "__builtin_loongarch_dbar",
"llvm.loongarch.ibar" => "__builtin_loongarch_ibar",
"llvm.loongarch.iocsrrd.b" => "__builtin_loongarch_iocsrrd_b",
"llvm.loongarch.iocsrrd.d" => "__builtin_loongarch_iocsrrd_d",
"llvm.loongarch.iocsrrd.h" => "__builtin_loongarch_iocsrrd_h",
"llvm.loongarch.iocsrrd.w" => "__builtin_loongarch_iocsrrd_w",
"llvm.loongarch.iocsrwr.b" => "__builtin_loongarch_iocsrwr_b",
"llvm.loongarch.iocsrwr.d" => "__builtin_loongarch_iocsrwr_d",
"llvm.loongarch.iocsrwr.h" => "__builtin_loongarch_iocsrwr_h",
"llvm.loongarch.iocsrwr.w" => "__builtin_loongarch_iocsrwr_w",
"llvm.loongarch.lddir.d" => "__builtin_loongarch_lddir_d",
"llvm.loongarch.ldpte.d" => "__builtin_loongarch_ldpte_d",
"llvm.loongarch.movfcsr2gr" => "__builtin_loongarch_movfcsr2gr",
"llvm.loongarch.movgr2fcsr" => "__builtin_loongarch_movgr2fcsr",
"llvm.loongarch.syscall" => "__builtin_loongarch_syscall",
// mips
"llvm.mips.absq.s.ph" => "__builtin_mips_absq_s_ph",
"llvm.mips.absq.s.qb" => "__builtin_mips_absq_s_qb",
@ -2954,6 +2990,8 @@ match name {
"llvm.nvvm.barrier0.and" => "__nvvm_bar0_and",
"llvm.nvvm.barrier0.or" => "__nvvm_bar0_or",
"llvm.nvvm.barrier0.popc" => "__nvvm_bar0_popc",
"llvm.nvvm.bf2h.rn" => "__nvvm_bf2h_rn",
"llvm.nvvm.bf2h.rn.ftz" => "__nvvm_bf2h_rn_ftz",
"llvm.nvvm.bitcast.d2ll" => "__nvvm_bitcast_d2ll",
"llvm.nvvm.bitcast.f2i" => "__nvvm_bitcast_f2i",
"llvm.nvvm.bitcast.i2f" => "__nvvm_bitcast_i2f",
@ -3016,8 +3054,6 @@ match name {
"llvm.nvvm.div.rz.ftz.f" => "__nvvm_div_rz_ftz_f",
"llvm.nvvm.ex2.approx.d" => "__nvvm_ex2_approx_d",
"llvm.nvvm.ex2.approx.f" => "__nvvm_ex2_approx_f",
"llvm.nvvm.ex2.approx.f16" => "__nvvm_ex2_approx_f16",
"llvm.nvvm.ex2.approx.f16x2" => "__nvvm_ex2_approx_f16x2",
"llvm.nvvm.ex2.approx.ftz.f" => "__nvvm_ex2_approx_ftz_f",
"llvm.nvvm.f2bf16.rn" => "__nvvm_f2bf16_rn",
"llvm.nvvm.f2bf16.rn.relu" => "__nvvm_f2bf16_rn_relu",
@ -3079,11 +3115,17 @@ match name {
"llvm.nvvm.fma.rn.bf16x2" => "__nvvm_fma_rn_bf16x2",
"llvm.nvvm.fma.rn.d" => "__nvvm_fma_rn_d",
"llvm.nvvm.fma.rn.f" => "__nvvm_fma_rn_f",
"llvm.nvvm.fma.rn.f16" => "__nvvm_fma_rn_f16",
"llvm.nvvm.fma.rn.f16x2" => "__nvvm_fma_rn_f16x2",
"llvm.nvvm.fma.rn.ftz.bf16" => "__nvvm_fma_rn_ftz_bf16",
"llvm.nvvm.fma.rn.ftz.bf16x2" => "__nvvm_fma_rn_ftz_bf16x2",
"llvm.nvvm.fma.rn.ftz.f" => "__nvvm_fma_rn_ftz_f",
"llvm.nvvm.fma.rn.ftz.relu.bf16" => "__nvvm_fma_rn_ftz_relu_bf16",
"llvm.nvvm.fma.rn.ftz.relu.bf16x2" => "__nvvm_fma_rn_ftz_relu_bf16x2",
"llvm.nvvm.fma.rn.ftz.sat.bf16" => "__nvvm_fma_rn_ftz_sat_bf16",
"llvm.nvvm.fma.rn.ftz.sat.bf16x2" => "__nvvm_fma_rn_ftz_sat_bf16x2",
"llvm.nvvm.fma.rn.relu.bf16" => "__nvvm_fma_rn_relu_bf16",
"llvm.nvvm.fma.rn.relu.bf16x2" => "__nvvm_fma_rn_relu_bf16x2",
"llvm.nvvm.fma.rn.sat.bf16" => "__nvvm_fma_rn_sat_bf16",
"llvm.nvvm.fma.rn.sat.bf16x2" => "__nvvm_fma_rn_sat_bf16x2",
"llvm.nvvm.fma.rp.d" => "__nvvm_fma_rp_d",
"llvm.nvvm.fma.rp.f" => "__nvvm_fma_rp_f",
"llvm.nvvm.fma.rp.ftz.f" => "__nvvm_fma_rp_ftz_f",
@ -3094,11 +3136,17 @@ match name {
"llvm.nvvm.fmax.bf16x2" => "__nvvm_fmax_bf16x2",
"llvm.nvvm.fmax.d" => "__nvvm_fmax_d",
"llvm.nvvm.fmax.f" => "__nvvm_fmax_f",
"llvm.nvvm.fmax.f16" => "__nvvm_fmax_f16",
"llvm.nvvm.fmax.f16x2" => "__nvvm_fmax_f16x2",
"llvm.nvvm.fmax.ftz.bf16" => "__nvvm_fmax_ftz_bf16",
"llvm.nvvm.fmax.ftz.bf16x2" => "__nvvm_fmax_ftz_bf16x2",
"llvm.nvvm.fmax.ftz.f" => "__nvvm_fmax_ftz_f",
"llvm.nvvm.fmax.ftz.nan.bf16" => "__nvvm_fmax_ftz_nan_bf16",
"llvm.nvvm.fmax.ftz.nan.bf16x2" => "__nvvm_fmax_ftz_nan_bf16x2",
"llvm.nvvm.fmax.ftz.nan.f" => "__nvvm_fmax_ftz_nan_f",
"llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16",
"llvm.nvvm.fmax.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_nan_xorsign_abs_bf16x2",
"llvm.nvvm.fmax.ftz.nan.xorsign.abs.f" => "__nvvm_fmax_ftz_nan_xorsign_abs_f",
"llvm.nvvm.fmax.ftz.xorsign.abs.bf16" => "__nvvm_fmax_ftz_xorsign_abs_bf16",
"llvm.nvvm.fmax.ftz.xorsign.abs.bf16x2" => "__nvvm_fmax_ftz_xorsign_abs_bf16x2",
"llvm.nvvm.fmax.ftz.xorsign.abs.f" => "__nvvm_fmax_ftz_xorsign_abs_f",
"llvm.nvvm.fmax.nan.bf16" => "__nvvm_fmax_nan_bf16",
"llvm.nvvm.fmax.nan.bf16x2" => "__nvvm_fmax_nan_bf16x2",
@ -3113,11 +3161,17 @@ match name {
"llvm.nvvm.fmin.bf16x2" => "__nvvm_fmin_bf16x2",
"llvm.nvvm.fmin.d" => "__nvvm_fmin_d",
"llvm.nvvm.fmin.f" => "__nvvm_fmin_f",
"llvm.nvvm.fmin.f16" => "__nvvm_fmin_f16",
"llvm.nvvm.fmin.f16x2" => "__nvvm_fmin_f16x2",
"llvm.nvvm.fmin.ftz.bf16" => "__nvvm_fmin_ftz_bf16",
"llvm.nvvm.fmin.ftz.bf16x2" => "__nvvm_fmin_ftz_bf16x2",
"llvm.nvvm.fmin.ftz.f" => "__nvvm_fmin_ftz_f",
"llvm.nvvm.fmin.ftz.nan.bf16" => "__nvvm_fmin_ftz_nan_bf16",
"llvm.nvvm.fmin.ftz.nan.bf16x2" => "__nvvm_fmin_ftz_nan_bf16x2",
"llvm.nvvm.fmin.ftz.nan.f" => "__nvvm_fmin_ftz_nan_f",
"llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16",
"llvm.nvvm.fmin.ftz.nan.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_nan_xorsign_abs_bf16x2",
"llvm.nvvm.fmin.ftz.nan.xorsign.abs.f" => "__nvvm_fmin_ftz_nan_xorsign_abs_f",
"llvm.nvvm.fmin.ftz.xorsign.abs.bf16" => "__nvvm_fmin_ftz_xorsign_abs_bf16",
"llvm.nvvm.fmin.ftz.xorsign.abs.bf16x2" => "__nvvm_fmin_ftz_xorsign_abs_bf16x2",
"llvm.nvvm.fmin.ftz.xorsign.abs.f" => "__nvvm_fmin_ftz_xorsign_abs_f",
"llvm.nvvm.fmin.nan.bf16" => "__nvvm_fmin_nan_bf16",
"llvm.nvvm.fmin.nan.bf16x2" => "__nvvm_fmin_nan_bf16x2",
@ -4213,6 +4267,28 @@ match name {
"llvm.r600.read.tgid.x" => "__builtin_r600_read_tgid_x",
"llvm.r600.read.tgid.y" => "__builtin_r600_read_tgid_y",
"llvm.r600.read.tgid.z" => "__builtin_r600_read_tgid_z",
// riscv
"llvm.riscv.aes32dsi" => "__builtin_riscv_aes32dsi",
"llvm.riscv.aes32dsmi" => "__builtin_riscv_aes32dsmi",
"llvm.riscv.aes32esi" => "__builtin_riscv_aes32esi",
"llvm.riscv.aes32esmi" => "__builtin_riscv_aes32esmi",
"llvm.riscv.aes64ds" => "__builtin_riscv_aes64ds",
"llvm.riscv.aes64dsm" => "__builtin_riscv_aes64dsm",
"llvm.riscv.aes64es" => "__builtin_riscv_aes64es",
"llvm.riscv.aes64esm" => "__builtin_riscv_aes64esm",
"llvm.riscv.aes64im" => "__builtin_riscv_aes64im",
"llvm.riscv.aes64ks1i" => "__builtin_riscv_aes64ks1i",
"llvm.riscv.aes64ks2" => "__builtin_riscv_aes64ks2",
"llvm.riscv.sha512sig0" => "__builtin_riscv_sha512sig0",
"llvm.riscv.sha512sig0h" => "__builtin_riscv_sha512sig0h",
"llvm.riscv.sha512sig0l" => "__builtin_riscv_sha512sig0l",
"llvm.riscv.sha512sig1" => "__builtin_riscv_sha512sig1",
"llvm.riscv.sha512sig1h" => "__builtin_riscv_sha512sig1h",
"llvm.riscv.sha512sig1l" => "__builtin_riscv_sha512sig1l",
"llvm.riscv.sha512sum0" => "__builtin_riscv_sha512sum0",
"llvm.riscv.sha512sum0r" => "__builtin_riscv_sha512sum0r",
"llvm.riscv.sha512sum1" => "__builtin_riscv_sha512sum1",
"llvm.riscv.sha512sum1r" => "__builtin_riscv_sha512sum1r",
// s390
"llvm.s390.efpc" => "__builtin_s390_efpc",
"llvm.s390.etnd" => "__builtin_tx_nesting_depth",
@ -5912,6 +5988,18 @@ match name {
"llvm.x86.avx2.vpdpbuud.256" => "__builtin_ia32_vpdpbuud256",
"llvm.x86.avx2.vpdpbuuds.128" => "__builtin_ia32_vpdpbuuds128",
"llvm.x86.avx2.vpdpbuuds.256" => "__builtin_ia32_vpdpbuuds256",
"llvm.x86.avx2.vpdpwsud.128" => "__builtin_ia32_vpdpwsud128",
"llvm.x86.avx2.vpdpwsud.256" => "__builtin_ia32_vpdpwsud256",
"llvm.x86.avx2.vpdpwsuds.128" => "__builtin_ia32_vpdpwsuds128",
"llvm.x86.avx2.vpdpwsuds.256" => "__builtin_ia32_vpdpwsuds256",
"llvm.x86.avx2.vpdpwusd.128" => "__builtin_ia32_vpdpwusd128",
"llvm.x86.avx2.vpdpwusd.256" => "__builtin_ia32_vpdpwusd256",
"llvm.x86.avx2.vpdpwusds.128" => "__builtin_ia32_vpdpwusds128",
"llvm.x86.avx2.vpdpwusds.256" => "__builtin_ia32_vpdpwusds256",
"llvm.x86.avx2.vpdpwuud.128" => "__builtin_ia32_vpdpwuud128",
"llvm.x86.avx2.vpdpwuud.256" => "__builtin_ia32_vpdpwuud256",
"llvm.x86.avx2.vpdpwuuds.128" => "__builtin_ia32_vpdpwuuds128",
"llvm.x86.avx2.vpdpwuuds.256" => "__builtin_ia32_vpdpwuuds256",
"llvm.x86.avx2.vperm2i128" => "__builtin_ia32_permti256",
"llvm.x86.avx512.add.pd.512" => "__builtin_ia32_addpd512",
"llvm.x86.avx512.add.ps.512" => "__builtin_ia32_addps512",
@ -7909,6 +7997,16 @@ match name {
"llvm.x86.vgf2p8mulb.128" => "__builtin_ia32_vgf2p8mulb_v16qi",
"llvm.x86.vgf2p8mulb.256" => "__builtin_ia32_vgf2p8mulb_v32qi",
"llvm.x86.vgf2p8mulb.512" => "__builtin_ia32_vgf2p8mulb_v64qi",
"llvm.x86.vsha512msg1" => "__builtin_ia32_vsha512msg1",
"llvm.x86.vsha512msg2" => "__builtin_ia32_vsha512msg2",
"llvm.x86.vsha512rnds2" => "__builtin_ia32_vsha512rnds2",
"llvm.x86.vsm3msg1" => "__builtin_ia32_vsm3msg1",
"llvm.x86.vsm3msg2" => "__builtin_ia32_vsm3msg2",
"llvm.x86.vsm3rnds2" => "__builtin_ia32_vsm3rnds2",
"llvm.x86.vsm4key4128" => "__builtin_ia32_vsm4key4128",
"llvm.x86.vsm4key4256" => "__builtin_ia32_vsm4key4256",
"llvm.x86.vsm4rnds4128" => "__builtin_ia32_vsm4rnds4128",
"llvm.x86.vsm4rnds4256" => "__builtin_ia32_vsm4rnds4256",
"llvm.x86.wbinvd" => "__builtin_ia32_wbinvd",
"llvm.x86.wbnoinvd" => "__builtin_ia32_wbnoinvd",
"llvm.x86.wrfsbase.32" => "__builtin_ia32_wrfsbase32",

View File

@ -236,11 +236,17 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
let arg2 = builder.context.new_cast(None, arg2, arg2_type);
args = vec![new_args[0], arg2].into();
},
// These builtins are sent one more argument than needed.
"__builtin_prefetch" => {
let mut new_args = args.to_vec();
new_args.pop();
args = new_args.into();
},
// The GCC version returns one value of the tuple through a pointer.
"__builtin_ia32_rdrand64_step" => {
let arg = builder.current_func().new_local(None, builder.ulonglong_type, "return_rdrand_arg");
args = vec![arg.get_address(None)].into();
},
_ => (),
}
}
@ -361,6 +367,19 @@ pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc,
// builtin twice, we overwrite the return value with a dummy value.
return_value = builder.context.new_rvalue_zero(builder.int_type);
},
"__builtin_ia32_rdrand64_step" => {
let random_number = args[0].dereference(None).to_rvalue();
let success_variable = builder.current_func().new_local(None, return_value.get_type(), "success");
builder.llbb().add_assignment(None, success_variable, return_value);
let field1 = builder.context.new_field(None, random_number.get_type(), "random_number");
let field2 = builder.context.new_field(None, return_value.get_type(), "success");
let struct_type = builder.context.new_struct_type(None, "rdrand_result", &[field1, field2]);
return_value = builder.context.new_struct_constructor(None, struct_type.as_type(), None, &[
random_number,
success_variable.to_rvalue(),
]);
},
_ => (),
}
@ -613,6 +632,7 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function
"llvm.fshr.v8i16" => "__builtin_ia32_vpshrdv_v8hi",
"llvm.x86.fma.vfmadd.sd" => "__builtin_ia32_vfmaddsd3",
"llvm.x86.fma.vfmadd.ss" => "__builtin_ia32_vfmaddss3",
"llvm.x86.rdrand.64" => "__builtin_ia32_rdrand64_step",
// The above doc points to unknown builtins for the following, so override them:
"llvm.x86.avx2.gather.d.d" => "__builtin_ia32_gathersiv4si",

View File

@ -10,9 +10,9 @@ use rustc_codegen_ssa::base::wants_msvc_seh;
use rustc_codegen_ssa::common::IntPredicate;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::traits::{ArgAbiMethods, BaseTypeMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods};
use rustc_codegen_ssa::traits::{ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods};
#[cfg(feature="master")]
use rustc_codegen_ssa::traits::MiscMethods;
use rustc_codegen_ssa::traits::{BaseTypeMethods, MiscMethods};
use rustc_codegen_ssa::errors::InvalidMonomorphization;
use rustc_middle::bug;
use rustc_middle::ty::{self, Instance, Ty};

View File

@ -2,6 +2,12 @@
* TODO(antoyo): implement equality in libgccjit based on https://zpz.github.io/blog/overloading-equality-operator-in-cpp-class-hierarchy/ (for type equality?)
* TODO(antoyo): support #[inline] attributes.
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
* For Thin LTO, this might be helpful:
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.
*
* Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
* Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans.
* TODO: disable debug info always being emitted. Perhaps this slows down things?
*
* TODO(antoyo): remove the patches.
*/
@ -28,6 +34,7 @@ extern crate rustc_codegen_ssa;
extern crate rustc_data_structures;
extern crate rustc_errors;
extern crate rustc_fluent_macro;
extern crate rustc_fs_util;
extern crate rustc_hir;
extern crate rustc_macros;
extern crate rustc_metadata;
@ -35,7 +42,8 @@ extern crate rustc_middle;
extern crate rustc_session;
extern crate rustc_span;
extern crate rustc_target;
extern crate tempfile;
#[macro_use]
extern crate tracing;
// This prevents duplicating functions and statics that are already part of the host rustc process.
#[allow(unused_extern_crates)]
@ -57,6 +65,7 @@ mod coverageinfo;
mod debuginfo;
mod declare;
mod errors;
mod gcc_util;
mod int;
mod intrinsic;
mod mono_item;
@ -64,18 +73,29 @@ mod type_;
mod type_of;
use std::any::Any;
use std::sync::{Arc, Mutex};
use std::fmt::Debug;
use std::sync::Arc;
use std::sync::Mutex;
#[cfg(not(feature="master"))]
use std::sync::atomic::AtomicBool;
#[cfg(not(feature="master"))]
use std::sync::atomic::Ordering;
use crate::errors::LTONotSupported;
use gccjit::{Context, OptimizationLevel, CType};
use gccjit::{Context, OptimizationLevel};
#[cfg(feature="master")]
use gccjit::TargetInfo;
#[cfg(not(feature="master"))]
use gccjit::CType;
use errors::LTONotSupported;
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
use rustc_codegen_ssa::base::codegen_crate;
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn};
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
use rustc_codegen_ssa::target_features::supported_target_features;
use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, ModuleBufferMethods, ThinBufferMethods, WriteBackendMethods};
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::IntoDynSyncSend;
use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, ThinBufferMethods, WriteBackendMethods};
use rustc_errors::{DiagnosticMessage, ErrorGuaranteed, Handler, SubdiagnosticMessage};
use rustc_fluent_macro::fluent_messages;
use rustc_metadata::EncodedMetadata;
@ -88,6 +108,9 @@ use rustc_span::Symbol;
use rustc_span::fatal_error::FatalError;
use tempfile::TempDir;
use crate::back::lto::ModuleBuffer;
use crate::gcc_util::target_cpu;
fluent_messages! { "../messages.ftl" }
pub struct PrintOnPanic<F: Fn() -> String>(pub F);
@ -100,9 +123,47 @@ impl<F: Fn() -> String> Drop for PrintOnPanic<F> {
}
}
#[cfg(not(feature="master"))]
#[derive(Debug)]
pub struct TargetInfo {
supports_128bit_integers: AtomicBool,
}
#[cfg(not(feature="master"))]
impl TargetInfo {
fn cpu_supports(&self, _feature: &str) -> bool {
false
}
fn supports_128bit_int(&self) -> bool {
self.supports_128bit_integers.load(Ordering::SeqCst)
}
}
#[derive(Clone)]
pub struct LockedTargetInfo {
info: Arc<Mutex<IntoDynSyncSend<TargetInfo>>>,
}
impl Debug for LockedTargetInfo {
fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.info.lock().expect("lock").fmt(formatter)
}
}
impl LockedTargetInfo {
fn cpu_supports(&self, feature: &str) -> bool {
self.info.lock().expect("lock").cpu_supports(feature)
}
fn supports_128bit_int(&self) -> bool {
self.info.lock().expect("lock").supports_128bit_int()
}
}
#[derive(Clone)]
pub struct GccCodegenBackend {
supports_128bit_integers: Arc<Mutex<bool>>,
target_info: LockedTargetInfo,
}
impl CodegenBackend for GccCodegenBackend {
@ -111,25 +172,41 @@ impl CodegenBackend for GccCodegenBackend {
}
fn init(&self, sess: &Session) {
#[cfg(feature="master")]
{
let target_cpu = target_cpu(sess);
// Get the second TargetInfo with the correct CPU features by setting the arch.
let context = Context::default();
if target_cpu != "generic" {
context.add_command_line_option(&format!("-march={}", target_cpu));
}
**self.target_info.info.lock().expect("lock") = context.get_target_info();
}
#[cfg(feature="master")]
gccjit::set_global_personality_function_name(b"rust_eh_personality\0");
if sess.lto() != Lto::No {
if sess.lto() == Lto::Thin {
sess.emit_warning(LTONotSupported {});
}
let temp_dir = TempDir::new().expect("cannot create temporary directory");
let temp_file = temp_dir.into_path().join("result.asm");
let check_context = Context::default();
check_context.set_print_errors_to_stderr(false);
let _int128_ty = check_context.new_c_type(CType::UInt128t);
// NOTE: we cannot just call compile() as this would require other files than libgccjit.so.
check_context.compile_to_file(gccjit::OutputKind::Assembler, temp_file.to_str().expect("path to str"));
*self.supports_128bit_integers.lock().expect("lock") = check_context.get_last_error() == Ok(None);
#[cfg(not(feature="master"))]
{
let temp_dir = TempDir::new().expect("cannot create temporary directory");
let temp_file = temp_dir.into_path().join("result.asm");
let check_context = Context::default();
check_context.set_print_errors_to_stderr(false);
let _int128_ty = check_context.new_c_type(CType::UInt128t);
// NOTE: we cannot just call compile() as this would require other files than libgccjit.so.
check_context.compile_to_file(gccjit::OutputKind::Assembler, temp_file.to_str().expect("path to str"));
self.target_info.info.lock().expect("lock").supports_128bit_integers.store(check_context.get_last_error() == Ok(None), Ordering::SeqCst);
}
}
fn provide(&self, providers: &mut Providers) {
// FIXME(antoyo) compute list of enabled features from cli flags
providers.global_backend_features = |_tcx, ()| vec![];
providers.global_backend_features =
|tcx, ()| gcc_util::global_gcc_features(tcx.sess, true)
}
fn codegen_crate<'tcx>(&self, tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool) -> Box<dyn Any> {
@ -160,7 +237,7 @@ impl CodegenBackend for GccCodegenBackend {
}
fn target_features(&self, sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
target_features(sess, allow_unstable)
target_features(sess, allow_unstable, &self.target_info)
}
}
@ -168,13 +245,18 @@ impl ExtraBackendMethods for GccCodegenBackend {
fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) -> Self::Module {
let mut mods = GccContext {
context: Context::default(),
should_combine_object_files: false,
temp_dir: None,
};
// TODO(antoyo): only set for x86.
mods.context.add_command_line_option("-masm=intel");
unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind); }
mods
}
fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen<Self::Module>, u64) {
base::compile_codegen_unit(tcx, cgu_name, *self.supports_128bit_integers.lock().expect("lock"))
base::compile_codegen_unit(tcx, cgu_name, self.target_info.clone())
}
fn target_machine_factory(&self, _sess: &Session, _opt_level: OptLevel, _features: &[String]) -> TargetMachineFactoryFn<Self> {
@ -185,14 +267,6 @@ impl ExtraBackendMethods for GccCodegenBackend {
}
}
pub struct ModuleBuffer;
impl ModuleBufferMethods for ModuleBuffer {
fn data(&self) -> &[u8] {
unimplemented!();
}
}
pub struct ThinBuffer;
impl ThinBufferMethods for ThinBuffer {
@ -203,6 +277,9 @@ impl ThinBufferMethods for ThinBuffer {
pub struct GccContext {
context: Context<'static>,
should_combine_object_files: bool,
// Temporary directory used by LTO. We keep it here so that it's not removed before linking.
temp_dir: Option<TempDir>,
}
unsafe impl Send for GccContext {}
@ -217,18 +294,8 @@ impl WriteBackendMethods for GccCodegenBackend {
type ThinData = ();
type ThinBuffer = ThinBuffer;
fn run_fat_lto(_cgcx: &CodegenContext<Self>, mut modules: Vec<FatLtoInput<Self>>, _cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>) -> Result<LtoModuleCodegen<Self>, FatalError> {
// TODO(antoyo): implement LTO by sending -flto to libgccjit and adding the appropriate gcc linker plugins.
// NOTE: implemented elsewhere.
// TODO(antoyo): what is implemented elsewhere ^ ?
let module =
match modules.remove(0) {
FatLtoInput::InMemory(module) => module,
FatLtoInput::Serialized { .. } => {
unimplemented!();
}
};
Ok(LtoModuleCodegen::Fat { module, _serialized_bitcode: vec![] })
fn run_fat_lto(cgcx: &CodegenContext<Self>, modules: Vec<FatLtoInput<Self>>, cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>) -> Result<LtoModuleCodegen<Self>, FatalError> {
back::lto::run_fat(cgcx, modules, cached_modules)
}
fn run_thin_lto(_cgcx: &CodegenContext<Self>, _modules: Vec<(String, Self::ThinBuffer)>, _cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
@ -277,8 +344,19 @@ impl WriteBackendMethods for GccCodegenBackend {
/// This is the entrypoint for a hot plugged rustc_codegen_gccjit
#[no_mangle]
pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> {
#[cfg(feature="master")]
let info = {
// Check whether the target supports 128-bit integers.
let context = Context::default();
Arc::new(Mutex::new(IntoDynSyncSend(context.get_target_info())))
};
#[cfg(not(feature="master"))]
let info = Arc::new(Mutex::new(IntoDynSyncSend(TargetInfo {
supports_128bit_integers: AtomicBool::new(false),
})));
Box::new(GccCodegenBackend {
supports_128bit_integers: Arc::new(Mutex::new(false)),
target_info: LockedTargetInfo { info },
})
}
@ -297,22 +375,7 @@ fn to_gcc_opt_level(optlevel: Option<OptLevel>) -> OptimizationLevel {
}
}
fn handle_native(name: &str) -> &str {
if name != "native" {
return name;
}
unimplemented!();
}
pub fn target_cpu(sess: &Session) -> &str {
match sess.opts.cg.target_cpu {
Some(ref name) => handle_native(name),
None => handle_native(sess.target.cpu.as_ref()),
}
}
pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
pub fn target_features(sess: &Session, allow_unstable: bool, target_info: &LockedTargetInfo) -> Vec<Symbol> {
supported_target_features(sess)
.iter()
.filter_map(
@ -321,26 +384,13 @@ pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
},
)
.filter(|_feature| {
// TODO(antoyo): implement a way to get enabled feature in libgccjit.
// Probably using the equivalent of __builtin_cpu_supports.
// TODO(antoyo): maybe use whatever outputs the following command:
// gcc -march=native -Q --help=target
#[cfg(feature="master")]
{
// NOTE: the CPU in the CI doesn't support sse4a, so disable it to make the stdarch tests pass in the CI.
(_feature.contains("sse") || _feature.contains("avx")) && !_feature.contains("avx512") && !_feature.contains("sse4a")
}
#[cfg(not(feature="master"))]
{
false
}
target_info.cpu_supports(_feature)
/*
adx, aes, avx, avx2, avx512bf16, avx512bitalg, avx512bw, avx512cd, avx512dq, avx512er, avx512f, avx512ifma,
avx512pf, avx512vbmi, avx512vbmi2, avx512vl, avx512vnni, avx512vp2intersect, avx512vpopcntdq,
bmi1, bmi2, cmpxchg16b, ermsb, f16c, fma, fxsr, gfni, lzcnt, movbe, pclmulqdq, popcnt, rdrand, rdseed, rtm,
sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves
*/
//false
})
.map(|feature| Symbol::intern(feature))
.collect()

View File

@ -182,6 +182,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
/// of that field's type - this is useful for taking the address of
/// that field and ensuring the struct has the right alignment.
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
use crate::rustc_middle::ty::layout::FnAbiOf;
// This must produce the same result for `repr(transparent)` wrappers as for the inner type!
// In other words, this should generally not look at the type at all, but only at the
// layout.
@ -191,7 +192,14 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) {
return ty;
}
let ty = self.scalar_gcc_type_at(cx, scalar, Size::ZERO);
let ty =
match *self.ty.kind() {
// NOTE: we cannot remove this match like in the LLVM codegen because the call
// to fn_ptr_backend_type handle the on-stack attribute.
// TODO(antoyo): find a less hackish way to hande the on-stack attribute.
ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())),
_ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
};
cx.scalar_types.borrow_mut().insert(self.ty, ty);
return ty;
}

View File

@ -3,6 +3,7 @@
# TODO(antoyo): rewrite to cargo-make (or just) or something like that to only rebuild the sysroot when needed?
set -e
#set -x
if [ -f ./gcc_path ]; then
export GCC_PATH=$(cat gcc_path)
@ -219,6 +220,7 @@ change-id = 115898
[rust]
codegen-backends = []
deny-warnings = false
verbose-tests = true
[build]
cargo = "$(rustup which cargo)"
@ -345,15 +347,19 @@ function test_rustc() {
git checkout -- tests/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed
rm -r tests/ui/{abi*,extern/,unsized-locals/,proc-macro/,threads-sendsync/,thinlto/,borrowck/,chalkify/bugs/,test*,*lto*.rs,consts/const-float-bits-reject-conv.rs,consts/issue-miri-1910.rs} || true
rm tests/ui/mir/mir_heavy_promoted.rs # this tests is oom-killed in the CI.
for test in $(rg --files-with-matches "thread|lto" tests/ui); do
rm -r tests/ui/{abi*,extern/,unsized-locals/,proc-macro/,threads-sendsync/,thinlto/,borrowck/,chalkify/bugs/,test*,consts/const-float-bits-reject-conv.rs,consts/issue-miri-1910.rs} || true
rm tests/ui/mir/mir_heavy_promoted.rs # this test is oom-killed in the CI.
# Tests generating errors.
rm tests/ui/consts/const-eval/nonnull_as_ref_ub.rs tests/ui/consts/issue-94675.rs
for test in $(rg --files-with-matches "thread" tests/ui); do
rm $test
done
git checkout tests/ui/lto/auxiliary/dylib.rs
git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice.rs
git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice2.rs
git checkout tests/ui/macros/rfc-2011-nicer-assert-messages/auxiliary/common.rs
git checkout tests/ui/imports/ambiguous-1.rs
git checkout tests/ui/imports/ambiguous-4-extern.rs
git checkout tests/ui/entry-point/auxiliary/bad_main_functions.rs
RUSTC_ARGS="$TEST_FLAGS -Csymbol-mangling-version=v0 -Zcodegen-backend="$(pwd)"/../target/"$CHANNEL"/librustc_codegen_gcc."$dylib_ext" --sysroot "$(pwd)"/../build_sysroot/sysroot"

View File

@ -3,7 +3,8 @@
// Run-time:
// status: signal
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -3,7 +3,8 @@
// Run-time:
// status: signal
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -7,7 +7,8 @@
// 5
// 10
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -124,7 +124,7 @@ fn main() {
// check const (ATT syntax)
let mut x: u64 = 42;
unsafe {
asm!("add {}, {}",
asm!("add ${}, {}",
const 1,
inout(reg) x,
options(att_syntax)

View File

@ -5,8 +5,8 @@
// 7 8
// 10
#![allow(unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, track_caller)]
#![allow(internal_features, unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs, track_caller)]
#![no_std]
#![no_core]

View File

@ -9,7 +9,8 @@
// Both args: 11
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics,
unboxed_closures)]
unboxed_closures, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -5,7 +5,8 @@
// stdout: true
// 1
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -4,6 +4,7 @@
// status: 0
#![feature(auto_traits, lang_items, no_core, start)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -4,6 +4,7 @@
// status: 2
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -4,6 +4,7 @@
// status: 1
#![feature(auto_traits, lang_items, no_core, start)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -4,7 +4,8 @@
// status: 0
// stdout: 1
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -0,0 +1,10 @@
// Compiler:
//
// Run-time:
// status: 0
fn main() {
let mut value = (1, 1);
let ptr = &mut value as *mut (i32, i32);
println!("{:?}", ptr.wrapping_offset(10));
}

View File

@ -4,8 +4,8 @@
// stdout: Success
// status: signal
#![allow(unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![allow(internal_features, unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![no_std]
#![no_core]

View File

@ -7,8 +7,8 @@
// 6
// 11
#![allow(unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, track_caller)]
#![allow(internal_features, unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs, track_caller)]
#![no_std]
#![no_core]

View File

@ -5,8 +5,8 @@
// 39
// 10
#![allow(unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, arbitrary_self_types)]
#![allow(internal_features, unused_attributes)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, arbitrary_self_types, rustc_attrs)]
#![no_std]
#![no_core]

View File

@ -4,7 +4,8 @@
// status: 0
// stdout: 1
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -7,6 +7,7 @@
// 42
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -4,7 +4,8 @@
// status: 0
// stdout: 5
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(arbitrary_self_types, auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -9,7 +9,8 @@
// 12
// 1
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![feature(auto_traits, lang_items, no_core, start, intrinsics, rustc_attrs)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -6,6 +6,7 @@
// 2
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -5,6 +5,7 @@
// stdout: 3
#![feature(auto_traits, lang_items, no_core, start, intrinsics)]
#![allow(internal_features)]
#![no_std]
#![no_core]

View File

@ -46,10 +46,10 @@ def convert_to_string(content):
def extract_instrinsics_from_llvm(llvm_path, intrinsics):
p = subprocess.Popen(
["llvm-tblgen", "llvm/IR/Intrinsics.td"],
cwd=os.path.join(llvm_path, "llvm/include"),
stdout=subprocess.PIPE)
command = ["llvm-tblgen", "llvm/IR/Intrinsics.td"]
cwd = os.path.join(llvm_path, "llvm/include")
print("=> Running command `{}` from `{}`".format(command, cwd))
p = subprocess.Popen(command, cwd=cwd, stdout=subprocess.PIPE)
output, err = p.communicate()
lines = convert_to_string(output).splitlines()
pos = 0

View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -e
echo "[BUILD] build system" 1>&2
cd build_system
cargo build --release
cd ..
./build_system/target/release/y $@