Skip to content

Commit 5a1d52d

Browse files
committed
reuse stuff
1 parent 485c0a1 commit 5a1d52d

File tree

3 files changed

+91
-20
lines changed

3 files changed

+91
-20
lines changed

library/proc_macro/src/bridge/client.rs

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
33
use std::cell::RefCell;
44
use std::marker::PhantomData;
5+
use std::num::NonZero;
56
use std::sync::atomic::AtomicU32;
67

78
use super::*;
@@ -110,6 +111,10 @@ impl Clone for TokenStream {
110111
}
111112

112113
impl Span {
114+
pub(crate) fn dummy() -> Span {
115+
Span { handle: NonZero::new(1).unwrap() }
116+
}
117+
113118
pub(crate) fn def_site() -> Span {
114119
Bridge::with(|bridge| bridge.globals.def_site)
115120
}
@@ -197,7 +202,7 @@ mod state {
197202
thread_local! {
198203
static BRIDGE_STATE: Cell<*const ()> = const { Cell::new(ptr::null()) };
199204
static STANDALONE: RefCell<Bridge<'static>> = RefCell::new(standalone_bridge());
200-
pub(super) static USE_STANDALONE: Cell<StandaloneLevel> = const { Cell::new(StandaloneLevel::Never) };
205+
pub(super) static USE_STANDALONE: Cell<StandaloneLevel> = const { Cell::new(StandaloneLevel::FallbackOnly) };
201206
}
202207

203208
fn standalone_bridge() -> Bridge<'static> {

library/proc_macro/src/bridge/standalone.rs

Lines changed: 74 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,10 +3,10 @@
33
use std::cell::RefCell;
44
use std::ops::{Bound, Range};
55

6-
use crate::Delimiter;
6+
use crate::{Delimiter, LEGAL_PUNCT_CHARS};
77
use crate::bridge::client::Symbol;
88
use crate::bridge::fxhash::FxHashMap;
9-
use crate::bridge::{Diagnostic, ExpnGlobals, LitKind, Literal, TokenTree, server};
9+
use crate::bridge::{server, DelimSpan, Diagnostic, ExpnGlobals, Group, LitKind, Literal, Punct, TokenTree};
1010

1111
pub struct NoRustc;
1212

@@ -130,7 +130,22 @@ impl server::FreeFunctions for NoRustc {
130130
fn track_path(&mut self, _path: &str) {}
131131

132132
fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
133-
todo!()
133+
let mut chars = s.chars();
134+
let Some(first) = chars.next() else {
135+
return Err(());
136+
};
137+
br"";
138+
cr"";
139+
140+
match first {
141+
'b' => todo!(),
142+
'c' => todo!(),
143+
'r' => todo!(),
144+
'0'..='9' | '-' => todo!(),
145+
'\'' => todo!(),
146+
'"' => todo!(),
147+
_ => Err(())
148+
}
134149
}
135150

136151
fn emit_diagnostic(&mut self, diagnostic: Diagnostic<Self::Span>) {
@@ -148,10 +163,53 @@ impl server::TokenStream for NoRustc {
148163
}
149164

150165
fn from_str(&mut self, src: &str) -> Self::TokenStream {
151-
todo!()
166+
/// Returns the delimiter, and whether it is the opening form.
167+
fn char_to_delim(c: char) -> Option<(Delimiter, bool)> {
168+
Some(match c {
169+
'(' => (Delimiter::Parenthesis, true),
170+
')' => (Delimiter::Parenthesis, false),
171+
'{' => (Delimiter::Brace, true),
172+
'}' => (Delimiter::Brace, false),
173+
'[' => (Delimiter::Bracket, true),
174+
']' => (Delimiter::Bracket, false),
175+
_ => return None,
176+
})
177+
}
178+
179+
let mut unfinished_streams = vec![TokenStream::new()];
180+
let mut unclosed_delimiters = Vec::new();
181+
let mut current_ident = String::new();
182+
for c in src.chars() {
183+
if let Some((delim, is_opening)) = char_to_delim(c) {
184+
if is_opening {
185+
unclosed_delimiters.push(delim);
186+
unfinished_streams.push(TokenStream::new());
187+
} else if unclosed_delimiters.pop() == Some(delim) {
188+
let group = TokenTree::<_, _, Symbol>::Group(Group {
189+
delimiter: delim,
190+
stream: unfinished_streams.pop(),
191+
span: DelimSpan::from_single(Span::DUMMY)
192+
});
193+
unfinished_streams.last_mut().unwrap().0.push(group);
194+
} else {
195+
panic!("cannot parse string into token stream")
196+
}
197+
} else if LEGAL_PUNCT_CHARS.contains(&c) {
198+
unfinished_streams.last_mut().unwrap().0.push(TokenTree::Punct(Punct {
199+
ch: c as u8,
200+
joint: false, // TODO
201+
span: Span::DUMMY,
202+
}));
203+
}
204+
match c {
205+
_ => todo!(),
206+
}
207+
}
208+
unfinished_streams[0].clone()
152209
}
153210

154211
fn to_string(&mut self, tokens: &Self::TokenStream) -> String {
212+
/*
155213
/// Returns a string containing exactly `num` '#' characters.
156214
/// Uses a 256-character source string literal which is always safe to
157215
/// index with a `u8` index.
@@ -164,7 +222,7 @@ impl server::TokenStream for NoRustc {
164222
";
165223
const _: () = assert!(HASHES.len() == 256);
166224
&HASHES[..num as usize]
167-
}
225+
}*/
168226

169227
let mut s = String::new();
170228
let mut last = String::new();
@@ -200,13 +258,20 @@ impl server::TokenStream for NoRustc {
200258
}
201259
}
202260
TokenTree::Literal(lit) => {
203-
let inner = if let Some(suffix) = lit.suffix {
261+
let respanned = Literal {
262+
kind: lit.kind,
263+
symbol: lit.symbol,
264+
suffix: lit.suffix,
265+
span: super::client::Span::dummy(),
266+
};
267+
crate::Literal(respanned).to_string()
268+
/*let inner = if let Some(suffix) = lit.suffix {
204269
format!("{}{suffix}", lit.symbol)
205270
} else {
206271
lit.symbol.to_string()
207272
};
208273
match lit.kind {
209-
LitKind::Byte => todo!(),
274+
LitKind::Byte => format!("b'{inner}'"),
210275
LitKind::ByteStr => format!("b\"{inner}\""),
211276
LitKind::ByteStrRaw(raw) => {
212277
format!("br{0}\"{inner}\"{0}", get_hashes_str(raw))
@@ -220,7 +285,7 @@ impl server::TokenStream for NoRustc {
220285
LitKind::Float | LitKind::Integer => inner,
221286
LitKind::Str => format!("\"{inner}\""),
222287
LitKind::StrRaw(raw) => format!("r{0}\"{inner}\"{0}", get_hashes_str(raw)),
223-
}
288+
}*/
224289
}
225290
TokenTree::Punct(punct) => {
226291
let c = punct.ch as char;
@@ -231,7 +296,7 @@ impl server::TokenStream for NoRustc {
231296
}
232297
};
233298

234-
const NON_SEPARATABLE_TOKENS: &[(char, char)] = &[(':', ':'), ('-', '>')];
299+
const NON_SEPARATABLE_TOKENS: &[(char, char)] = &[(':', ':'), ('-', '>'), ('=', '>')];
235300

236301
for (first, second) in NON_SEPARATABLE_TOKENS {
237302
if second_last == first.to_string() && last == second.to_string() && new_part != ":"

library/proc_macro/src/lib.rs

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -969,6 +969,11 @@ pub enum Spacing {
969969
Alone,
970970
}
971971

972+
pub(crate) const LEGAL_PUNCT_CHARS: &[char] = &[
973+
'=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
974+
':', '#', '$', '?', '\'',
975+
];
976+
972977
impl Punct {
973978
/// Creates a new `Punct` from the given character and spacing.
974979
/// The `ch` argument must be a valid punctuation character permitted by the language,
@@ -978,11 +983,7 @@ impl Punct {
978983
/// which can be further configured with the `set_span` method below.
979984
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
980985
pub fn new(ch: char, spacing: Spacing) -> Punct {
981-
const LEGAL_CHARS: &[char] = &[
982-
'=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
983-
':', '#', '$', '?', '\'',
984-
];
985-
if !LEGAL_CHARS.contains(&ch) {
986+
if !LEGAL_PUNCT_CHARS.contains(&ch) {
986987
panic!("unsupported character `{:?}`", ch);
987988
}
988989
Punct(bridge::Punct {
@@ -1178,7 +1179,7 @@ macro_rules! unsuffixed_int_literals {
11781179
/// specified on this token, meaning that invocations like
11791180
/// `Literal::i8_unsuffixed(1)` are equivalent to
11801181
/// `Literal::u32_unsuffixed(1)`.
1181-
/// Literals created from negative numbers might not survive rountrips through
1182+
/// Literals created from negative numbers might not survive roundtrips through
11821183
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
11831184
///
11841185
/// Literals created through this method have the `Span::call_site()`
@@ -1241,7 +1242,7 @@ impl Literal {
12411242
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
12421243
/// the float's value is emitted directly into the token but no suffix is
12431244
/// used, so it may be inferred to be a `f64` later in the compiler.
1244-
/// Literals created from negative numbers might not survive rountrips through
1245+
/// Literals created from negative numbers might not survive roundtrips through
12451246
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
12461247
///
12471248
/// # Panics
@@ -1266,7 +1267,7 @@ impl Literal {
12661267
/// specified is the preceding part of the token and `f32` is the suffix of
12671268
/// the token. This token will always be inferred to be an `f32` in the
12681269
/// compiler.
1269-
/// Literals created from negative numbers might not survive rountrips through
1270+
/// Literals created from negative numbers might not survive roundtrips through
12701271
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
12711272
///
12721273
/// # Panics
@@ -1286,7 +1287,7 @@ impl Literal {
12861287
/// This constructor is similar to those like `Literal::i8_unsuffixed` where
12871288
/// the float's value is emitted directly into the token but no suffix is
12881289
/// used, so it may be inferred to be a `f64` later in the compiler.
1289-
/// Literals created from negative numbers might not survive rountrips through
1290+
/// Literals created from negative numbers might not survive roundtrips through
12901291
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
12911292
///
12921293
/// # Panics
@@ -1311,7 +1312,7 @@ impl Literal {
13111312
/// specified is the preceding part of the token and `f64` is the suffix of
13121313
/// the token. This token will always be inferred to be an `f64` in the
13131314
/// compiler.
1314-
/// Literals created from negative numbers might not survive rountrips through
1315+
/// Literals created from negative numbers might not survive roundtrips through
13151316
/// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
13161317
///
13171318
/// # Panics

0 commit comments

Comments
 (0)