Skip to content

Commit 55cd449

Browse files
committed
Auto merge of #143760 - tgross35:rollup-hyioynq, r=tgross35
Rollup of 7 pull requests Successful merges: - #142391 (rust: library: Add `setsid` method to `CommandExt` trait) - #143301 (`tests/ui`: A New Order [26/N]) - #143302 (`tests/ui`: A New Order [27/N]) - #143303 (`tests/ui`: A New Order [28/28] FINAL PART) - #143568 (std: sys: net: uefi: tcp4: Add timeout support) - #143708 (fix: Include frontmatter in -Zunpretty output ) - #143718 (Make UB transmutes really UB in LLVM) r? `@ghost` `@rustbot` modify labels: rollup
2 parents cdac44e + c6d2963 commit 55cd449

File tree

139 files changed

+1129
-668
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

139 files changed

+1129
-668
lines changed

compiler/rustc_ast_pretty/src/pprust/state.rs

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
120120
pos += shebang_len;
121121
}
122122

123-
for token in rustc_lexer::tokenize(&text[pos..]) {
123+
for token in rustc_lexer::tokenize(&text[pos..], rustc_lexer::FrontmatterAllowed::Yes) {
124124
let token_text = &text[pos..pos + token.len as usize];
125125
match token.kind {
126126
rustc_lexer::TokenKind::Whitespace => {
@@ -171,6 +171,14 @@ fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment>
171171
})
172172
}
173173
}
174+
rustc_lexer::TokenKind::Frontmatter { .. } => {
175+
code_to_the_left = false;
176+
comments.push(Comment {
177+
style: CommentStyle::Isolated,
178+
lines: vec![token_text.to_string()],
179+
pos: start_bpos + BytePos(pos as u32),
180+
});
181+
}
174182
_ => {
175183
code_to_the_left = true;
176184
}

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -207,9 +207,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
207207
{
208208
// These cases are all UB to actually hit, so don't emit code for them.
209209
// (The size mismatches are reachable via `transmute_unchecked`.)
210-
// We can't use unreachable because that's a terminator, and we
211-
// need something that can be in the middle of a basic block.
212-
bx.assume(bx.cx().const_bool(false))
210+
bx.unreachable_nonterminator();
213211
} else {
214212
// Since in this path we have a place anyway, we can store or copy to it,
215213
// making sure we use the destination place's alignment even if the
@@ -236,14 +234,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
236234
|| operand.layout.is_uninhabited()
237235
|| cast.is_uninhabited()
238236
{
239-
if !operand.layout.is_uninhabited() {
240-
// Since this is known statically and the input could have existed
241-
// without already having hit UB, might as well trap for it.
242-
bx.abort();
243-
}
237+
bx.unreachable_nonterminator();
244238

245-
// Because this transmute is UB, return something easy to generate,
246-
// since it's fine that later uses of the value are probably UB.
239+
// We still need to return a value of the appropriate type, but
240+
// it's already UB so do the easiest thing available.
247241
return OperandValue::poison(bx, cast);
248242
}
249243

compiler/rustc_codegen_ssa/src/traits/builder.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,16 @@ pub trait BuilderMethods<'a, 'tcx>:
136136
) -> Self::Value;
137137
fn unreachable(&mut self);
138138

139+
/// Like [`Self::unreachable`], but for use in the middle of a basic block.
140+
fn unreachable_nonterminator(&mut self) {
141+
// This is the preferred LLVM incantation for this per
142+
// https://llvm.org/docs/Frontend/PerformanceTips.html#other-things-to-consider
143+
// Other backends may override if they have a better way.
144+
let const_true = self.cx().const_bool(true);
145+
let poison_ptr = self.const_poison(self.cx().type_ptr());
146+
self.store(const_true, poison_ptr, Align::ONE);
147+
}
148+
139149
fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
140150
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
141151
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;

compiler/rustc_lexer/src/lib.rs

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -273,14 +273,15 @@ pub fn strip_shebang(input: &str) -> Option<usize> {
273273
if let Some(input_tail) = input.strip_prefix("#!") {
274274
// Ok, this is a shebang but if the next non-whitespace token is `[`,
275275
// then it may be valid Rust code, so consider it Rust code.
276-
let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| {
277-
!matches!(
278-
tok,
279-
TokenKind::Whitespace
280-
| TokenKind::LineComment { doc_style: None }
281-
| TokenKind::BlockComment { doc_style: None, .. }
282-
)
283-
});
276+
let next_non_whitespace_token =
277+
tokenize(input_tail, FrontmatterAllowed::No).map(|tok| tok.kind).find(|tok| {
278+
!matches!(
279+
tok,
280+
TokenKind::Whitespace
281+
| TokenKind::LineComment { doc_style: None }
282+
| TokenKind::BlockComment { doc_style: None, .. }
283+
)
284+
});
284285
if next_non_whitespace_token != Some(TokenKind::OpenBracket) {
285286
// No other choice than to consider this a shebang.
286287
return Some(2 + input_tail.lines().next().unwrap_or_default().len());
@@ -303,8 +304,16 @@ pub fn validate_raw_str(input: &str, prefix_len: u32) -> Result<(), RawStrError>
303304
}
304305

305306
/// Creates an iterator that produces tokens from the input string.
306-
pub fn tokenize(input: &str) -> impl Iterator<Item = Token> {
307-
let mut cursor = Cursor::new(input, FrontmatterAllowed::No);
307+
///
308+
/// When parsing a full Rust document,
309+
/// first [`strip_shebang`] and then allow frontmatters with [`FrontmatterAllowed::Yes`].
310+
///
311+
/// When tokenizing a slice of a document, be sure to disallow frontmatters with [`FrontmatterAllowed::No`]
312+
pub fn tokenize(
313+
input: &str,
314+
frontmatter_allowed: FrontmatterAllowed,
315+
) -> impl Iterator<Item = Token> {
316+
let mut cursor = Cursor::new(input, frontmatter_allowed);
308317
std::iter::from_fn(move || {
309318
let token = cursor.advance_token();
310319
if token.kind != TokenKind::Eof { Some(token) } else { None }

compiler/rustc_lexer/src/tests.rs

Lines changed: 85 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,15 +124,17 @@ fn test_valid_shebang() {
124124
assert_eq!(strip_shebang(input), None);
125125
}
126126

127-
fn check_lexing(src: &str, expect: Expect) {
128-
let actual: String = tokenize(src).map(|token| format!("{:?}\n", token)).collect();
127+
fn check_lexing(src: &str, frontmatter_allowed: FrontmatterAllowed, expect: Expect) {
128+
let actual: String =
129+
tokenize(src, frontmatter_allowed).map(|token| format!("{:?}\n", token)).collect();
129130
expect.assert_eq(&actual)
130131
}
131132

132133
#[test]
133134
fn smoke_test() {
134135
check_lexing(
135136
"/* my source file */ fn main() { println!(\"zebra\"); }\n",
137+
FrontmatterAllowed::No,
136138
expect![[r#"
137139
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 20 }
138140
Token { kind: Whitespace, len: 1 }
@@ -171,6 +173,7 @@ fn comment_flavors() {
171173
/** outer doc block */
172174
/*! inner doc block */
173175
",
176+
FrontmatterAllowed::No,
174177
expect![[r#"
175178
Token { kind: Whitespace, len: 1 }
176179
Token { kind: LineComment { doc_style: None }, len: 7 }
@@ -199,6 +202,7 @@ fn comment_flavors() {
199202
fn nested_block_comments() {
200203
check_lexing(
201204
"/* /* */ */'a'",
205+
FrontmatterAllowed::No,
202206
expect![[r#"
203207
Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
204208
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
@@ -210,6 +214,7 @@ fn nested_block_comments() {
210214
fn characters() {
211215
check_lexing(
212216
"'a' ' ' '\\n'",
217+
FrontmatterAllowed::No,
213218
expect![[r#"
214219
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
215220
Token { kind: Whitespace, len: 1 }
@@ -224,6 +229,7 @@ fn characters() {
224229
fn lifetime() {
225230
check_lexing(
226231
"'abc",
232+
FrontmatterAllowed::No,
227233
expect![[r#"
228234
Token { kind: Lifetime { starts_with_number: false }, len: 4 }
229235
"#]],
@@ -234,6 +240,7 @@ fn lifetime() {
234240
fn raw_string() {
235241
check_lexing(
236242
"r###\"\"#a\\b\x00c\"\"###",
243+
FrontmatterAllowed::No,
237244
expect![[r#"
238245
Token { kind: Literal { kind: RawStr { n_hashes: Some(3) }, suffix_start: 17 }, len: 17 }
239246
"#]],
@@ -257,6 +264,7 @@ b"a"
257264
r###"raw"###suffix
258265
br###"raw"###suffix
259266
"####,
267+
FrontmatterAllowed::No,
260268
expect![[r#"
261269
Token { kind: Whitespace, len: 1 }
262270
Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
@@ -286,3 +294,78 @@ br###"raw"###suffix
286294
"#]],
287295
)
288296
}
297+
298+
#[test]
299+
fn frontmatter_allowed() {
300+
check_lexing(
301+
r#"
302+
---cargo
303+
[dependencies]
304+
clap = "4"
305+
---
306+
307+
fn main() {}
308+
"#,
309+
FrontmatterAllowed::Yes,
310+
expect![[r#"
311+
Token { kind: Whitespace, len: 1 }
312+
Token { kind: Frontmatter { has_invalid_preceding_whitespace: false, invalid_infostring: false }, len: 38 }
313+
Token { kind: Whitespace, len: 2 }
314+
Token { kind: Ident, len: 2 }
315+
Token { kind: Whitespace, len: 1 }
316+
Token { kind: Ident, len: 4 }
317+
Token { kind: OpenParen, len: 1 }
318+
Token { kind: CloseParen, len: 1 }
319+
Token { kind: Whitespace, len: 1 }
320+
Token { kind: OpenBrace, len: 1 }
321+
Token { kind: CloseBrace, len: 1 }
322+
Token { kind: Whitespace, len: 1 }
323+
"#]],
324+
)
325+
}
326+
327+
#[test]
328+
fn frontmatter_disallowed() {
329+
check_lexing(
330+
r#"
331+
---cargo
332+
[dependencies]
333+
clap = "4"
334+
---
335+
336+
fn main() {}
337+
"#,
338+
FrontmatterAllowed::No,
339+
expect![[r#"
340+
Token { kind: Whitespace, len: 1 }
341+
Token { kind: Minus, len: 1 }
342+
Token { kind: Minus, len: 1 }
343+
Token { kind: Minus, len: 1 }
344+
Token { kind: Ident, len: 5 }
345+
Token { kind: Whitespace, len: 1 }
346+
Token { kind: OpenBracket, len: 1 }
347+
Token { kind: Ident, len: 12 }
348+
Token { kind: CloseBracket, len: 1 }
349+
Token { kind: Whitespace, len: 1 }
350+
Token { kind: Ident, len: 4 }
351+
Token { kind: Whitespace, len: 1 }
352+
Token { kind: Eq, len: 1 }
353+
Token { kind: Whitespace, len: 1 }
354+
Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 3 }, len: 3 }
355+
Token { kind: Whitespace, len: 1 }
356+
Token { kind: Minus, len: 1 }
357+
Token { kind: Minus, len: 1 }
358+
Token { kind: Minus, len: 1 }
359+
Token { kind: Whitespace, len: 2 }
360+
Token { kind: Ident, len: 2 }
361+
Token { kind: Whitespace, len: 1 }
362+
Token { kind: Ident, len: 4 }
363+
Token { kind: OpenParen, len: 1 }
364+
Token { kind: CloseParen, len: 1 }
365+
Token { kind: Whitespace, len: 1 }
366+
Token { kind: OpenBrace, len: 1 }
367+
Token { kind: CloseBrace, len: 1 }
368+
Token { kind: Whitespace, len: 1 }
369+
"#]],
370+
)
371+
}

library/std/src/os/unix/process.rs

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,9 @@ pub trait CommandExt: Sealed {
210210
/// intentional difference from the underlying `chroot` system call.)
211211
#[unstable(feature = "process_chroot", issue = "141298")]
212212
fn chroot<P: AsRef<Path>>(&mut self, dir: P) -> &mut process::Command;
213+
214+
#[unstable(feature = "process_setsid", issue = "105376")]
215+
fn setsid(&mut self, setsid: bool) -> &mut process::Command;
213216
}
214217

215218
#[stable(feature = "rust1", since = "1.0.0")]
@@ -260,6 +263,11 @@ impl CommandExt for process::Command {
260263
self.as_inner_mut().chroot(dir.as_ref());
261264
self
262265
}
266+
267+
fn setsid(&mut self, setsid: bool) -> &mut process::Command {
268+
self.as_inner_mut().setsid(setsid);
269+
self
270+
}
263271
}
264272

265273
/// Unix-specific extensions to [`process::ExitStatus`] and

library/std/src/sys/net/connection/uefi/mod.rs

Lines changed: 29 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,62 @@
11
use crate::fmt;
22
use crate::io::{self, BorrowedCursor, IoSlice, IoSliceMut};
33
use crate::net::{Ipv4Addr, Ipv6Addr, Shutdown, SocketAddr};
4+
use crate::sync::{Arc, Mutex};
45
use crate::sys::unsupported;
56
use crate::time::Duration;
67

78
mod tcp;
89
pub(crate) mod tcp4;
910

10-
pub struct TcpStream(tcp::Tcp);
11+
pub struct TcpStream {
12+
inner: tcp::Tcp,
13+
read_timeout: Arc<Mutex<Option<Duration>>>,
14+
write_timeout: Arc<Mutex<Option<Duration>>>,
15+
}
1116

1217
impl TcpStream {
1318
pub fn connect(addr: io::Result<&SocketAddr>) -> io::Result<TcpStream> {
14-
tcp::Tcp::connect(addr?).map(Self)
19+
let inner = tcp::Tcp::connect(addr?, None)?;
20+
Ok(Self {
21+
inner,
22+
read_timeout: Arc::new(Mutex::new(None)),
23+
write_timeout: Arc::new(Mutex::new(None)),
24+
})
1525
}
1626

17-
pub fn connect_timeout(_: &SocketAddr, _: Duration) -> io::Result<TcpStream> {
18-
unsupported()
27+
pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result<TcpStream> {
28+
let inner = tcp::Tcp::connect(addr, Some(timeout))?;
29+
Ok(Self {
30+
inner,
31+
read_timeout: Arc::new(Mutex::new(None)),
32+
write_timeout: Arc::new(Mutex::new(None)),
33+
})
1934
}
2035

21-
pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
22-
unsupported()
36+
pub fn set_read_timeout(&self, t: Option<Duration>) -> io::Result<()> {
37+
self.read_timeout.set(t).unwrap();
38+
Ok(())
2339
}
2440

25-
pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
26-
unsupported()
41+
pub fn set_write_timeout(&self, t: Option<Duration>) -> io::Result<()> {
42+
self.write_timeout.set(t).unwrap();
43+
Ok(())
2744
}
2845

2946
pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
30-
unsupported()
47+
Ok(self.read_timeout.get_cloned().unwrap())
3148
}
3249

3350
pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
34-
unsupported()
51+
Ok(self.write_timeout.get_cloned().unwrap())
3552
}
3653

3754
pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
3855
unsupported()
3956
}
4057

4158
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
42-
self.0.read(buf)
59+
self.inner.read(buf, self.read_timeout()?)
4360
}
4461

4562
pub fn read_buf(&self, cursor: BorrowedCursor<'_>) -> io::Result<()> {
@@ -56,7 +73,7 @@ impl TcpStream {
5673
}
5774

5875
pub fn write(&self, buf: &[u8]) -> io::Result<usize> {
59-
self.0.write(buf)
76+
self.inner.write(buf, self.write_timeout()?)
6077
}
6178

6279
pub fn write_vectored(&self, buf: &[IoSlice<'_>]) -> io::Result<usize> {

0 commit comments

Comments
 (0)