Skip to content

Rollup of 7 pull requests #143760

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 21 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
e9191ec
moved tests
Kivooeo Jul 1, 2025
1fb5e01
moved tests
Kivooeo Jul 1, 2025
d8273d3
std: sys: net: uefi: tcp4: Add timeout support
Ayush1325 Jul 7, 2025
425cd0f
test: Verify frontmatter unpretty behavior
epage Jul 9, 2025
45a1e49
feat(lexer): Allow including frontmatter with 'tokenize'
epage Jul 9, 2025
e0f7687
rust: library: Add setsid method to CommandExt trait
LevitatingBusinessMan Jun 12, 2025
58d7c2d
Make UB transmutes really UB in LLVM
scottmcm Jul 10, 2025
dcbae2c
moved tests
Kivooeo Jul 1, 2025
3ad95cc
cleaned up some tests
Kivooeo Jul 1, 2025
259512e
cleaned up some tests
Kivooeo Jul 1, 2025
df53b3d
test(lexer): Add frontmatter unit test
epage Jul 10, 2025
a11ee56
fix: Include frontmatter in -Zunpretty output
epage Jul 9, 2025
f5fc872
Add `BuilderMethods::unreachable_nonterminator`
scottmcm Jul 10, 2025
cd2a50e
cleaned up some tests
Kivooeo Jul 1, 2025
fee7440
Rollup merge of #142391 - LevitatingBusinessMan:setsid, r=workingjubilee
tgross35 Jul 10, 2025
a662802
Rollup merge of #143301 - Kivooeo:tf26, r=tgross35
tgross35 Jul 10, 2025
ea6c761
Rollup merge of #143302 - Kivooeo:tf27, r=tgross35
tgross35 Jul 10, 2025
671616a
Rollup merge of #143303 - Kivooeo:tf28, r=tgross35
tgross35 Jul 10, 2025
c89701c
Rollup merge of #143568 - Ayush1325:uefi-tcp4-timeout, r=tgross35
tgross35 Jul 10, 2025
ad7f3cc
Rollup merge of #143708 - epage:pretty, r=compiler-errors
tgross35 Jul 10, 2025
c6d2963
Rollup merge of #143718 - scottmcm:ub-transmute-is-ub, r=WaffleLapkin
tgross35 Jul 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
pos += shebang_len;
}

for token in rustc_lexer::tokenize(&text[pos..]) {
for token in rustc_lexer::tokenize(&text[pos..], rustc_lexer::FrontmatterAllowed::Yes) {
let token_text = &text[pos..pos + token.len as usize];
match token.kind {
rustc_lexer::TokenKind::Whitespace => {
Expand Down Expand Up @@ -171,6 +171,14 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
})
}
}
rustc_lexer::TokenKind::Frontmatter { .. } => {
code_to_the_left = false;
comments.push(Comment {
style: CommentStyle::Isolated,
lines: vec![token_text.to_string()],
pos: start_bpos + BytePos(pos as u32),
});
}
_ => {
code_to_the_left = true;
}
Expand Down
14 changes: 4 additions & 10 deletions compiler/rustc_codegen_ssa/src/mir/rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,9 +207,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
{
// These cases are all UB to actually hit, so don't emit code for them.
// (The size mismatches are reachable via `transmute_unchecked`.)
// We can't use unreachable because that's a terminator, and we
// need something that can be in the middle of a basic block.
bx.assume(bx.cx().const_bool(false))
bx.unreachable_nonterminator();
} else {
// Since in this path we have a place anyway, we can store or copy to it,
// making sure we use the destination place's alignment even if the
Expand All @@ -236,14 +234,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|| operand.layout.is_uninhabited()
|| cast.is_uninhabited()
{
if !operand.layout.is_uninhabited() {
// Since this is known statically and the input could have existed
// without already having hit UB, might as well trap for it.
bx.abort();
}
bx.unreachable_nonterminator();

// Because this transmute is UB, return something easy to generate,
// since it's fine that later uses of the value are probably UB.
// We still need to return a value of the appropriate type, but
// it's already UB so do the easiest thing available.
return OperandValue::poison(bx, cast);
}

Expand Down
10 changes: 10 additions & 0 deletions compiler/rustc_codegen_ssa/src/traits/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,16 @@ pub trait BuilderMethods<'a, 'tcx>:
) -> Self::Value;
fn unreachable(&mut self);

/// Like [`Self::unreachable`], but for use in the middle of a basic block.
fn unreachable_nonterminator(&mut self) {
// This is the preferred LLVM incantation for this per
// https://llvm.org/docs/Frontend/PerformanceTips.html#other-things-to-consider
// Other backends may override if they have a better way.
let const_true = self.cx().const_bool(true);
let poison_ptr = self.const_poison(self.cx().type_ptr());
self.store(const_true, poison_ptr, Align::ONE);
}

fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
Expand Down
29 changes: 19 additions & 10 deletions compiler/rustc_lexer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,14 +273,15 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
if let Some(input_tail) = input.strip_prefix("#!") {
// Ok, this is a shebang but if the next non-whitespace token is `[`,
// then it may be valid Rust code, so consider it Rust code.
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| {
!matches!(
tok,
TokenKind::Whitespace
| TokenKind::LineComment { doc_style: None }
| TokenKind::BlockComment { doc_style: None, .. }
)
});
let next_non_whitespace_token =
tokenize(input_tail, FrontmatterAllowed::No).map(|tok| tok.kind).find(|tok| {
!matches!(
tok,
TokenKind::Whitespace
| TokenKind::LineComment { doc_style: None }
| TokenKind::BlockComment { doc_style: None, .. }
)
});
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
// No other choice than to consider this a shebang.
return Some(2 + input_tail.lines().next().unwrap_or_default().len());
Expand All @@ -303,8 +304,16 @@ pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError>
}

/// Creates an iterator that produces tokens from the input string.
pub fn tokenize(input: &str) -> impl Iterator<Item = Token> {
let mut cursor = Cursor::new(input, FrontmatterAllowed::No);
///
/// When parsing a full Rust document,
/// first [`strip_shebang`] and then allow frontmatters with [`FrontmatterAllowed::Yes`].
///
/// When tokenizing a slice of a document, be sure to disallow frontmatters with [`FrontmatterAllowed::No`]
pub fn tokenize(
input: &str,
frontmatter_allowed: FrontmatterAllowed,
) -> impl Iterator<Item = Token> {
let mut cursor = Cursor::new(input, frontmatter_allowed);
std::iter::from_fn(move || {
let token = cursor.advance_token();
if token.kind != TokenKind::Eof { Some(token) } else { None }
Expand Down
87 changes: 85 additions & 2 deletions compiler/rustc_lexer/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,15 +124,17 @@ fn test_valid_shebang() {
assert_eq!(strip_shebang(input), None);
}

fn check_lexing(src: &str, expect: Expect) {
let actual: String = tokenize(src).map(|token| format!("{:?}\n", token)).collect();
fn check_lexing(src: &str, frontmatter_allowed: FrontmatterAllowed, expect: Expect) {
let actual: String =
tokenize(src, frontmatter_allowed).map(|token| format!("{:?}\n", token)).collect();
expect.assert_eq(&actual)
}

#[test]
fn smoke_test() {
check_lexing(
"/* my source file */ fn main() { println!(\"zebra\"); }\n",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 20 }
Token { kind: Whitespace, len: 1 }
Expand Down Expand Up @@ -171,6 +173,7 @@ fn comment_flavors() {
/** outer doc block */
/*! inner doc block */
",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Whitespace, len: 1 }
Token { kind: LineComment { doc_style: None }, len: 7 }
Expand Down Expand Up @@ -199,6 +202,7 @@ fn comment_flavors() {
fn nested_block_comments() {
check_lexing(
"/* /* */ */'a'",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
Expand All @@ -210,6 +214,7 @@ fn nested_block_comments() {
fn characters() {
check_lexing(
"'a' ' ' '\\n'",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
Token { kind: Whitespace, len: 1 }
Expand All @@ -224,6 +229,7 @@ fn characters() {
fn lifetime() {
check_lexing(
"'abc",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Lifetime { starts_with_number: false }, len: 4 }
"#]],
Expand All @@ -234,6 +240,7 @@ fn lifetime() {
fn raw_string() {
check_lexing(
"r###\"\"#a\\b\x00c\"\"###",
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Literal { kind: RawStr { n_hashes: Some(3) }, suffix_start: 17 }, len: 17 }
"#]],
Expand All @@ -257,6 +264,7 @@ b"a"
r###"raw"###suffix
br###"raw"###suffix
"####,
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Whitespace, len: 1 }
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
Expand Down Expand Up @@ -286,3 +294,78 @@ br###"raw"###suffix
"#]],
)
}

#[test]
fn frontmatter_allowed() {
check_lexing(
r#"
---cargo
[dependencies]
clap = "4"
---
fn main() {}
"#,
FrontmatterAllowed::Yes,
expect![[r#"
Token { kind: Whitespace, len: 1 }
Token { kind: Frontmatter { has_invalid_preceding_whitespace: false, invalid_infostring: false }, len: 38 }
Token { kind: Whitespace, len: 2 }
Token { kind: Ident, len: 2 }
Token { kind: Whitespace, len: 1 }
Token { kind: Ident, len: 4 }
Token { kind: OpenParen, len: 1 }
Token { kind: CloseParen, len: 1 }
Token { kind: Whitespace, len: 1 }
Token { kind: OpenBrace, len: 1 }
Token { kind: CloseBrace, len: 1 }
Token { kind: Whitespace, len: 1 }
"#]],
)
}

#[test]
fn frontmatter_disallowed() {
check_lexing(
r#"
---cargo
[dependencies]
clap = "4"
---
fn main() {}
"#,
FrontmatterAllowed::No,
expect![[r#"
Token { kind: Whitespace, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Ident, len: 5 }
Token { kind: Whitespace, len: 1 }
Token { kind: OpenBracket, len: 1 }
Token { kind: Ident, len: 12 }
Token { kind: CloseBracket, len: 1 }
Token { kind: Whitespace, len: 1 }
Token { kind: Ident, len: 4 }
Token { kind: Whitespace, len: 1 }
Token { kind: Eq, len: 1 }
Token { kind: Whitespace, len: 1 }
Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 3 }, len: 3 }
Token { kind: Whitespace, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Minus, len: 1 }
Token { kind: Whitespace, len: 2 }
Token { kind: Ident, len: 2 }
Token { kind: Whitespace, len: 1 }
Token { kind: Ident, len: 4 }
Token { kind: OpenParen, len: 1 }
Token { kind: CloseParen, len: 1 }
Token { kind: Whitespace, len: 1 }
Token { kind: OpenBrace, len: 1 }
Token { kind: CloseBrace, len: 1 }
Token { kind: Whitespace, len: 1 }
"#]],
)
}
8 changes: 8 additions & 0 deletions library/std/src/os/unix/process.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,9 @@ pub trait CommandExt: Sealed {
/// intentional difference from the underlying `chroot` system call.)
#[unstable(feature = "process_chroot", issue = "141298")]
fn chroot<P: AsRef<Path>>(&mut self, dir: P) -> &mut process::Command;

#[unstable(feature = "process_setsid", issue = "105376")]
fn setsid(&mut self, setsid: bool) -> &mut process::Command;
}

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down Expand Up @@ -260,6 +263,11 @@ impl CommandExt for process::Command {
self.as_inner_mut().chroot(dir.as_ref());
self
}

fn setsid(&mut self, setsid: bool) -> &mut process::Command {
self.as_inner_mut().setsid(setsid);
self
}
}

/// Unix-specific extensions to [`process::ExitStatus`] and
Expand Down
41 changes: 29 additions & 12 deletions library/std/src/sys/net/connection/uefi/mod.rs
Original file line number Diff line number Diff line change
@@ -1,45 +1,62 @@
use crate::fmt;
use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut};
use crate::net::{Ipv4Addr, Ipv6Addr, Shutdown, SocketAddr};
use crate::sync::{Arc, Mutex};
use crate::sys::unsupported;
use crate::time::Duration;

mod tcp;
pub(crate) mod tcp4;

pub struct TcpStream(tcp::Tcp);
pub struct TcpStream {
inner: tcp::Tcp,
read_timeout: Arc<Mutex<Option<Duration>>>,
write_timeout: Arc<Mutex<Option<Duration>>>,
}

impl TcpStream {
pub fn connect(addr: io::Result<&SocketAddr>) -> io::Result<TcpStream> {
tcp::Tcp::connect(addr?).map(Self)
let inner = tcp::Tcp::connect(addr?, None)?;
Ok(Self {
inner,
read_timeout: Arc::new(Mutex::new(None)),
write_timeout: Arc::new(Mutex::new(None)),
})
}

pub fn connect_timeout(_: &SocketAddr, _: Duration) -> io::Result<TcpStream> {
unsupported()
pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result<TcpStream> {
let inner = tcp::Tcp::connect(addr, Some(timeout))?;
Ok(Self {
inner,
read_timeout: Arc::new(Mutex::new(None)),
write_timeout: Arc::new(Mutex::new(None)),
})
}

pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
unsupported()
pub fn set_read_timeout(&self, t: Option<Duration>) -> io::Result<()> {
self.read_timeout.set(t).unwrap();
Ok(())
}

pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
unsupported()
pub fn set_write_timeout(&self, t: Option<Duration>) -> io::Result<()> {
self.write_timeout.set(t).unwrap();
Ok(())
}

pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
unsupported()
Ok(self.read_timeout.get_cloned().unwrap())
}

pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
unsupported()
Ok(self.write_timeout.get_cloned().unwrap())
}

pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
unsupported()
}

pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
self.0.read(buf)
self.inner.read(buf, self.read_timeout()?)
}

pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
Expand All @@ -56,7 +73,7 @@ impl TcpStream {
}

pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
self.0.write(buf)
self.inner.write(buf, self.write_timeout()?)
}

pub fn write_vectored(&self, buf: &[IoSlice<'_>]) -> io::Result<usize> {
Expand Down
Loading