pw_tokenizer/lib.rs
1// Copyright 2023 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14
15//! `pw_tokenizer` - Efficient string handling and printf style encoding.
16//!
17//! Logging is critical, but developers are often forced to choose between
18//! additional logging or saving crucial flash space. The `pw_tokenizer` crate
19//! helps address this by replacing printf-style strings with binary tokens
20//! during compilation. This enables extensive logging with substantially less
21//! memory usage.
22//!
23//! For a more in depth explanation of the systems design and motivations,
24//! see [Pigweed's pw_tokenizer module documentation](https://pigweed.dev/pw_tokenizer/).
25//!
26//! # Examples
27//!
28//! Pigweed's tokenization database uses `printf` style strings internally so
29//! those are supported directly.
30//!
31//! ```
32//! use pw_tokenizer::tokenize_printf_to_buffer;
33//!
34//! let mut buffer = [0u8; 1024];
35//! let len = tokenize_printf_to_buffer!(&mut buffer, "The answer is %d", 42)?;
36//!
37//! // 4 bytes used to encode the token and one to encode the value 42. This
38//! // is a **3.5x** reduction in size compared to the raw string!
39//! assert_eq!(len, 5);
40//! # Ok::<(), pw_status::Error>(())
41//! ```
42//!
43//! We also support Rust's `core::fmt` style syntax. These format strings are
44//! converted to `printf` style at compile time to maintain compatibly with the
45//! rest of the Pigweed tokenizer ecosystem. The below example produces the
46//! same token and output as the above one.
47//!
48//! ```
49//! use pw_tokenizer::tokenize_core_fmt_to_buffer;
50//!
51//! let mut buffer = [0u8; 1024];
52//! let len = tokenize_core_fmt_to_buffer!(&mut buffer, "The answer is {}", 42 as i32)?;
53//! assert_eq!(len, 5);
54//! # Ok::<(), pw_status::Error>(())
55//! ```
56#![cfg_attr(not(feature = "std"), no_std)]
57#![deny(missing_docs)]
58
59use pw_status::Result;
60
61#[doc(hidden)]
62pub mod internal;
63
64#[doc(hidden)]
65// Creating a __private namespace allows us a way to get to the modules
66// we need from macros by doing:
67// use $crate::__private as __pw_tokenizer_crate;
68//
69// This is how proc macro generated code can reliably reference back to
70// `pw_tokenizer` while still allowing a user to import it under a different
71// name.
72pub mod __private {
73 pub use pw_bytes::concat_static_strs;
74 pub use pw_format_core::{PrintfFormatter, PrintfHexFormatter, PrintfUpperHexFormatter};
75 pub use pw_status::Result;
76 pub use pw_stream::{Cursor, Seek, WriteInteger, WriteVarint};
77 pub use pw_tokenizer_core::hash_string;
78 pub use pw_tokenizer_macro::{
79 _token, _tokenize_core_fmt_to_buffer, _tokenize_core_fmt_to_writer,
80 _tokenize_printf_to_buffer, _tokenize_printf_to_writer,
81 };
82
83 pub use crate::*;
84}
85
86/// Return the [`u32`] token for the specified string and add it to the token
87/// database.
88///
89/// This is where the magic happens in `pw_tokenizer`! ... and by magic
90/// we mean hiding information in a special linker section that ends up in the
91/// final elf binary but does not get flashed to the device.
92///
93/// Two things are accomplished here:
94/// 1) The string is hashed into its stable `u32` token. This is the value that
95/// is returned from the macro.
96/// 2) A [token database entry](https://pigweed.dev/pw_tokenizer/design.html#binary-database-format)
97/// is generated, assigned to a unique static symbol, placed in a linker
98/// section named `pw_tokenizer.entries.<TOKEN_HASH>`. A
99/// [linker script](https://pigweed.googlesource.com/pigweed/pigweed/+/refs/heads/main/pw_tokenizer/pw_tokenizer_linker_sections.ld)
100/// is responsible for picking these symbols up and aggregating them into a
101/// single `.pw_tokenizer.entries` section in the final binary.
102///
103/// # Example
104/// ```
105/// use pw_tokenizer::token;
106///
107/// let token = token!("hello, \"world\"");
108/// assert_eq!(token, 3537412730);
109/// ```
110///
111/// Currently there is no support for encoding tokens to specific domains
112/// or with "fixed lengths" per [`pw_tokenizer_core::hash_bytes_fixed`].
113#[macro_export]
114macro_rules! token {
115 ($string:literal) => {{
116 use $crate::__private as __pw_tokenizer_crate;
117 $crate::__private::_token!($string)
118 }};
119}
120
121/// Tokenize a `core::fmt` style format string and arguments to an [`AsMut<u8>`]
122/// buffer. The format string is converted in to a `printf` and added token to
123/// the token database.
124///
125/// See [`token`] for an explanation on how strings are tokenized and entries
126/// are added to the token database. The token's domain is set to `""`.
127///
128/// Returns a [`pw_status::Result<usize>`] the number of bytes written to the buffer.
129///
130/// `tokenize_to_buffer!` supports concatenation of format strings as described
131/// in [`pw_format::macros::FormatAndArgs`].
132///
133/// # Errors
134/// - [`pw_status::Error::OutOfRange`] - Buffer is not large enough to fit
135/// tokenized data.
136/// - [`pw_status::Error::InvalidArgument`] - Invalid buffer was provided.
137///
138/// # Example
139///
140/// ```
141/// use pw_tokenizer::tokenize_core_fmt_to_buffer;
142///
143/// // Tokenize a format string and argument into a buffer.
144/// let mut buffer = [0u8; 1024];
145/// let len = tokenize_core_fmt_to_buffer!(&mut buffer, "The answer is {}", 42 as i32)?;
146///
147/// // 4 bytes used to encode the token and one to encode the value 42.
148/// assert_eq!(len, 5);
149///
150/// // The format string can be composed of multiple strings literals using
151/// // the custom`PW_FMT_CONCAT` operator.
152/// let len = tokenize_core_fmt_to_buffer!(&mut buffer, "Hello " PW_FMT_CONCAT "Pigweed")?;
153///
154/// // Only a single 4 byte token is emitted after concatenation of the string
155/// // literals above.
156/// assert_eq!(len, 4);
157/// # Ok::<(), pw_status::Error>(())
158/// ```
159#[macro_export]
160macro_rules! tokenize_core_fmt_to_buffer {
161 ($buffer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
162 use $crate::__private as __pw_tokenizer_crate;
163 __pw_tokenizer_crate::_tokenize_core_fmt_to_buffer!($buffer, $($format_string)PW_FMT_CONCAT+, $($args),*)
164 }};
165}
166
167/// Tokenize a printf format string and arguments to an [`AsMut<u8>`] buffer
168/// and add the format string's token to the token database.
169///
170/// See [`token`] for an explanation on how strings are tokenized and entries
171/// are added to the token database. The token's domain is set to `""`.
172///
173/// Returns a [`pw_status::Result<usize>`] the number of bytes written to the buffer.
174///
175/// `tokenize_to_buffer!` supports concatenation of format strings as described
176/// in [`pw_format::macros::FormatAndArgs`].
177///
178/// # Errors
179/// - [`pw_status::Error::OutOfRange`] - Buffer is not large enough to fit
180/// tokenized data.
181/// - [`pw_status::Error::InvalidArgument`] - Invalid buffer was provided.
182///
183/// # Example
184///
185/// ```
186/// use pw_tokenizer::tokenize_printf_to_buffer;
187///
188/// // Tokenize a format string and argument into a buffer.
189/// let mut buffer = [0u8; 1024];
190/// let len = tokenize_printf_to_buffer!(&mut buffer, "The answer is %d", 42)?;
191///
192/// // 4 bytes used to encode the token and one to encode the value 42.
193/// assert_eq!(len, 5);
194///
195/// // The format string can be composed of multiple strings literals using
196/// // the custom`PW_FMT_CONCAT` operator.
197/// let len = tokenize_printf_to_buffer!(&mut buffer, "Hello " PW_FMT_CONCAT "Pigweed")?;
198///
199/// // Only a single 4 byte token is emitted after concatenation of the string
200/// // literals above.
201/// assert_eq!(len, 4);
202/// # Ok::<(), pw_status::Error>(())
203/// ```
204#[macro_export]
205macro_rules! tokenize_printf_to_buffer {
206 ($buffer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
207 use $crate::__private as __pw_tokenizer_crate;
208 __pw_tokenizer_crate::_tokenize_printf_to_buffer!($buffer, $($format_string)PW_FMT_CONCAT+, $($args),*)
209 }};
210}
211
212/// Deprecated alias for [`tokenize_printf_to_buffer!`].
213#[macro_export]
214macro_rules! tokenize_to_buffer {
215 ($buffer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
216 $crate::tokenize_printf_to_buffer!($buffer, $($format_string)PW_FMT_CONCAT+, $($args),*)
217 }};
218}
219
220/// Tokenize a `core::fmt` format string and arguments to a [`MessageWriter`].
221/// The format string is converted in to a `printf` and added token to the token
222/// database.
223///
224/// `tokenize_core_fmt_to_writer!` and the accompanying [`MessageWriter`] trait
225/// provide an optimized API for use cases like logging where the output of the
226/// tokenization will be written to a shared/ambient resource like stdio, a
227/// UART, or a shared buffer.
228///
229/// The `writer_type` should implement [`MessageWriter`] and [`Default`] traits.
230/// The writer is instantiated with the [`Default`] allowing any intermediate
231/// buffers to be declared on the stack of the internal writing engine instead
232/// of the caller's stack.
233///
234/// See [`token`] for an explanation on how strings are tokenized and entries
235/// are added to the token database. The token's domain is set to `""`.
236///
237/// Returns a [`pw_status::Result<()>`].
238///
239/// `tokenize_core_fmt_to_writer!` supports concatenation of format strings as
240/// described in [`pw_format::macros::FormatAndArgs`].
241///
242/// # Errors
243/// - [`pw_status::Error::OutOfRange`] - [`MessageWriter`] does not have enough
244/// space to fit tokenized data.
245/// - others - `tokenize_core_fmt_to_writer!` will pass on any errors returned
246/// by the [`MessageWriter`].
247///
248/// # Code Size
249///
250/// This data was collected by examining the disassembly of a test program
251/// built for a Cortex M0.
252///
253/// | Tokenized Message | Per Call-site Cost (bytes) |
254/// | --------------------| -------------------------- |
255/// | no arguments | 10 |
256/// | one `i32` argument | 18 |
257///
258/// # Example
259///
260/// ```
261/// use pw_status::Result;
262/// use pw_stream::{Cursor, Write};
263/// use pw_tokenizer::{MessageWriter, tokenize_core_fmt_to_writer};
264///
265/// const BUFFER_LEN: usize = 32;
266///
267/// // Declare a simple MessageWriter that uses a [`pw_status::Cursor`] to
268/// // maintain an internal buffer.
269/// struct TestMessageWriter {
270/// cursor: Cursor<[u8; BUFFER_LEN]>,
271/// }
272///
273/// impl Default for TestMessageWriter {
274/// fn default() -> Self {
275/// Self {
276/// cursor: Cursor::new([0u8; BUFFER_LEN]),
277/// }
278/// }
279/// }
280///
281/// impl MessageWriter for TestMessageWriter {
282/// fn write(&mut self, data: &[u8]) -> Result<()> {
283/// self.cursor.write_all(data)
284/// }
285///
286/// fn remaining(&self) -> usize {
287/// self.cursor.remaining()
288/// }
289///
290/// fn finalize(self) -> Result<()> {
291/// let len = self.cursor.position();
292/// // 4 bytes used to encode the token and one to encode the value 42.
293/// assert_eq!(len, 5);
294/// Ok(())
295/// }
296/// }
297///
298/// // Tokenize a format string and argument into the writer. Note how we
299/// // pass in the message writer's type, not an instance of it.
300/// let len = tokenize_core_fmt_to_writer!(TestMessageWriter, "The answer is {}", 42 as i32)?;
301/// # Ok::<(), pw_status::Error>(())
302/// ```
303#[macro_export]
304macro_rules! tokenize_core_fmt_to_writer {
305 ($writer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
306 use $crate::__private as __pw_tokenizer_crate;
307 __pw_tokenizer_crate::_tokenize_core_fmt_to_writer!($writer, $($format_string)PW_FMT_CONCAT+, $($args),*)
308 }};
309}
310
311/// Tokenize a `printf` format string and arguments to a [`MessageWriter`] and
312/// add the format string's token to the token database.
313///
314/// `tokenize_printf_fmt_to_writer!` and the accompanying [`MessageWriter`] trait
315/// provide an optimized API for use cases like logging where the output of the
316/// tokenization will be written to a shared/ambient resource like stdio, a
317/// UART, or a shared buffer.
318///
319/// The `writer_type` should implement [`MessageWriter`] and [`Default`] traits.
320/// The writer is instantiated with the [`Default`] allowing any intermediate
321/// buffers to be declared on the stack of the internal writing engine instead
322/// of the caller's stack.
323///
324/// See [`token`] for an explanation on how strings are tokenized and entries
325/// are added to the token database. The token's domain is set to `""`.
326///
327/// Returns a [`pw_status::Result<()>`].
328///
329/// `tokenize_core_fmt_to_writer!` supports concatenation of format strings as
330/// described in [`pw_format::macros::FormatAndArgs`].
331///
332/// # Errors
333/// - [`pw_status::Error::OutOfRange`] - [`MessageWriter`] does not have enough
334/// space to fit tokenized data.
335/// - others - `tokenize_printf_to_writer!` will pass on any errors returned
336/// by the [`MessageWriter`].
337///
338/// # Code Size
339///
340/// This data was collected by examining the disassembly of a test program
341/// built for a Cortex M0.
342///
343/// | Tokenized Message | Per Call-site Cost (bytes) |
344/// | --------------------| -------------------------- |
345/// | no arguments | 10 |
346/// | one `i32` argument | 18 |
347///
348/// # Example
349///
350/// ```
351/// use pw_status::Result;
352/// use pw_stream::{Cursor, Write};
353/// use pw_tokenizer::{MessageWriter, tokenize_printf_to_writer};
354///
355/// const BUFFER_LEN: usize = 32;
356///
357/// // Declare a simple MessageWriter that uses a [`pw_status::Cursor`] to
358/// // maintain an internal buffer.
359/// struct TestMessageWriter {
360/// cursor: Cursor<[u8; BUFFER_LEN]>,
361/// }
362///
363/// impl Default for TestMessageWriter {
364/// fn default() -> Self {
365/// Self {
366/// cursor: Cursor::new([0u8; BUFFER_LEN]),
367/// }
368/// }
369/// }
370///
371/// impl MessageWriter for TestMessageWriter {
372/// fn write(&mut self, data: &[u8]) -> Result<()> {
373/// self.cursor.write_all(data)
374/// }
375///
376/// fn remaining(&self) -> usize {
377/// self.cursor.remaining()
378/// }
379///
380/// fn finalize(self) -> Result<()> {
381/// let len = self.cursor.position();
382/// // 4 bytes used to encode the token and one to encode the value 42.
383/// assert_eq!(len, 5);
384/// Ok(())
385/// }
386/// }
387///
388/// // Tokenize a format string and argument into the writer. Note how we
389/// // pass in the message writer's type, not an instance of it.
390/// let len = tokenize_printf_to_writer!(TestMessageWriter, "The answer is %d", 42)?;
391/// # Ok::<(), pw_status::Error>(())
392/// ```
393#[macro_export]
394macro_rules! tokenize_printf_to_writer {
395 ($writer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
396 use $crate::__private as __pw_tokenizer_crate;
397 __pw_tokenizer_crate::_tokenize_printf_to_writer!($writer, $($format_string)PW_FMT_CONCAT+, $($args),*)
398 }};
399}
400
401/// Deprecated alias for [`tokenize_printf_to_writer!`].
402#[macro_export]
403macro_rules! tokenize_to_writer {
404 ($writer:expr, $($format_string:literal)PW_FMT_CONCAT+ $(, $args:expr)* $(,)?) => {{
405 $crate::tokenize_printf_to_writer!($writer, $($format_string)PW_FMT_CONCAT+, $($args),*)
406 }};
407}
408
409/// A trait used by [`tokenize_to_writer!`] to output tokenized messages.
410///
411/// For more details on how this type is used, see the [`tokenize_to_writer!`]
412/// documentation.
413pub trait MessageWriter {
414 /// Append `data` to the message.
415 fn write(&mut self, data: &[u8]) -> Result<()>;
416
417 /// Return the remaining space in this message instance.
418 ///
419 /// If there are no space constraints, return `usize::MAX`.
420 fn remaining(&self) -> usize;
421
422 /// Finalize message.
423 ///
424 /// `finalize()` is called when the tokenized message is complete.
425 fn finalize(self) -> Result<()>;
426}
427
428#[cfg(test)]
429// Untyped prints code rely on as casts to annotate type information.
430#[allow(clippy::unnecessary_cast)]
431#[allow(clippy::literal_string_with_formatting_args)]
432mod tests {
433 use super::*;
434 extern crate self as pw_tokenizer;
435 use std::cell::RefCell;
436
437 use pw_stream::{Cursor, Write};
438
439 // This is not meant to be an exhaustive test of tokenization which is
440 // covered by `pw_tokenizer_core`'s unit tests. Rather, this is testing
441 // that the `tokenize!` macro connects to that correctly.
442 #[test]
443 fn test_token() {}
444
445 macro_rules! tokenize_to_buffer_test {
446 ($expected_data:expr, $buffer_len:expr, $printf_fmt:literal, $core_fmt:literal $(, $args:expr)* $(,)?) => {{
447 if $printf_fmt != "" {
448 let mut buffer = [0u8; $buffer_len];
449 let len = tokenize_printf_to_buffer!(&mut buffer, $printf_fmt, $($args),*).unwrap();
450 assert_eq!(
451 &buffer[..len],
452 $expected_data,
453 "printf style input does not produce expected output",
454 );
455 }
456 if $core_fmt != "" {
457 let mut buffer = [0u8; $buffer_len];
458 let len = tokenize_core_fmt_to_buffer!(&mut buffer, $core_fmt, $($args),*).unwrap();
459 assert_eq!(
460 &buffer[..len],
461 $expected_data,
462 "core::fmt style input does not produce expected output",
463 );
464 }
465 }}
466 }
467
468 macro_rules! tokenize_to_writer_test {
469 ($expected_data:expr, $buffer_len:expr, $printf_fmt:literal, $core_fmt:literal $(, $args:expr)* $(,)?) => {{
470 // The `MessageWriter` API is used in places like logging where it
471 // accesses an shared/ambient resource (like stdio or an UART). To test
472 // it in a hermetic way we declare test specific `MessageWriter` that
473 // writes it's output to a scoped static variable that can be checked
474 // after the test is run.
475
476 // Since these tests are not multi-threaded, we can use a thread_local!
477 // instead of a mutex.
478 thread_local!(static TEST_OUTPUT: RefCell<Option<Vec<u8>>> = RefCell::new(None));
479
480 struct TestMessageWriter {
481 cursor: Cursor<[u8; $buffer_len]>,
482 }
483
484 impl Default for TestMessageWriter {
485 fn default() -> Self {
486 Self {
487 cursor: Cursor::new([0u8; $buffer_len]),
488 }
489 }
490 }
491
492 impl MessageWriter for TestMessageWriter {
493 fn write(&mut self, data: &[u8]) -> Result<()> {
494 self.cursor.write_all(data)
495 }
496
497 fn remaining(&self) -> usize {
498 self.cursor.remaining()
499 }
500
501 fn finalize(self) -> Result<()> {
502 let write_len = self.cursor.position();
503 let data = self.cursor.into_inner();
504 TEST_OUTPUT.with(|output| *output.borrow_mut() = Some(data[..write_len].to_vec()));
505
506 Ok(())
507 }
508 }
509
510 if $printf_fmt != "" {
511 TEST_OUTPUT.with(|output| *output.borrow_mut() = None);
512 tokenize_printf_to_writer!(TestMessageWriter, $printf_fmt, $($args),*).unwrap();
513 TEST_OUTPUT.with(|output| {
514 assert_eq!(
515 *output.borrow(),
516 Some($expected_data.to_vec()),
517 )
518 });
519 }
520
521 if $core_fmt != "" {
522 TEST_OUTPUT.with(|output| *output.borrow_mut() = None);
523 tokenize_core_fmt_to_writer!(TestMessageWriter, $core_fmt, $($args),*).unwrap();
524 TEST_OUTPUT.with(|output| {
525 assert_eq!(
526 *output.borrow(),
527 Some($expected_data.to_vec()),
528 )
529 });
530 }
531 }}
532 }
533
534 macro_rules! tokenize_test {
535 ($expected_data:expr, $buffer_len:expr, $printf_fmt:literal, $core_fmt:literal $(, $args:expr)* $(,)?) => {{
536 tokenize_to_buffer_test!($expected_data, $buffer_len, $printf_fmt, $core_fmt, $($args),*);
537 tokenize_to_writer_test!($expected_data, $buffer_len, $printf_fmt, $core_fmt, $($args),*);
538 }};
539 }
540
541 #[test]
542 fn bare_string_encodes_correctly() {
543 tokenize_test!(
544 &[0xe0, 0x92, 0xe0, 0xa], // expected buffer
545 64, // buffer size
546 "Hello Pigweed", // printf style
547 "Hello Pigweed", // core::fmt style
548 );
549 }
550
551 #[test]
552 fn test_decimal_format() {
553 // "as casts" are used for the integer arguments below. They are only
554 // need for the core::fmt style arguments but are added so that we can
555 // check that the printf and core::fmt style equivalents encode the same.
556 tokenize_test!(
557 &[0x52, 0x1c, 0xb0, 0x4c, 0x2], // expected buffer
558 64, // buffer size
559 "The answer is %d!", // printf style
560 "The answer is {}!", // core::fmt style
561 1 as i32
562 );
563
564 tokenize_test!(
565 &[0x36, 0xd0, 0xfb, 0x69, 0x1], // expected buffer
566 64, // buffer size
567 "No! The answer is %d!", // printf style
568 "No! The answer is {}!", // core::fmt style
569 -1 as i32
570 );
571
572 tokenize_test!(
573 &[0xa4, 0xad, 0x50, 0x54, 0x0], // expected buffer
574 64, // buffer size
575 "I think you'll find that the answer is %d!", // printf style
576 "I think you'll find that the answer is {}!", // core::fmt style
577 0 as i32
578 );
579 }
580
581 #[test]
582 fn test_misc_integer_format() {
583 // %d, %i, %o, %u, %x, %X all encode integers the same.
584 tokenize_test!(
585 &[0x52, 0x1c, 0xb0, 0x4c, 0x2], // expected buffer
586 64, // buffer size
587 "The answer is %d!", // printf style
588 "", // no equivalent core::fmt style
589 1
590 );
591
592 // Because %i is an alias for %d, it gets converted to a %d by the
593 // `pw_format` macro infrastructure.
594 tokenize_test!(
595 &[0x52, 0x1c, 0xb0, 0x4c, 0x2], // expected buffer
596 64, // buffer size
597 "The answer is %i!", // printf style
598 "", // no equivalent core::fmt style
599 1
600 );
601
602 tokenize_test!(
603 &[0x5d, 0x70, 0x12, 0xb4, 0x2], // expected buffer
604 64, // buffer size
605 "The answer is %o!", // printf style
606 "", // no equivalent core::fmt style
607 1u32
608 );
609
610 tokenize_test!(
611 &[0x63, 0x58, 0x5f, 0x8f, 0x2], // expected buffer
612 64, // buffer size
613 "The answer is %u!", // printf style
614 "", // no equivalent core::fmt style
615 1u32
616 );
617
618 tokenize_test!(
619 &[0x66, 0xcc, 0x05, 0x7d, 0x2], // expected buffer
620 64, // buffer size
621 "The answer is %x!", // printf style
622 "", // no equivalent core::fmt style
623 1u32
624 );
625
626 tokenize_test!(
627 &[0x46, 0x4c, 0x16, 0x96, 0x2], // expected buffer
628 64, // buffer size
629 "The answer is %X!", // printf style
630 "", // no equivalent core::fmt style
631 1u32
632 );
633 }
634
635 #[test]
636 fn test_string_format() {
637 tokenize_test!(
638 b"\x25\xf6\x2e\x66\x07Pigweed", // expected buffer
639 64, // buffer size
640 "Hello: %s!", // printf style
641 "", // no equivalent core::fmt style
642 "Pigweed"
643 );
644 }
645
646 #[test]
647 fn test_string_format_overflow() {
648 tokenize_test!(
649 b"\x25\xf6\x2e\x66\x83Pig", // expected buffer
650 8, // buffer size
651 "Hello: %s!", // printf style
652 "", // no equivalent core::fmt style
653 "Pigweed"
654 );
655 }
656
657 #[test]
658 fn test_char_format() {
659 tokenize_test!(
660 &[0x2e, 0x52, 0xac, 0xe4, 0xa0, 0x1], // expected buffer
661 64, // buffer size
662 "Hello: %cigweed", // printf style
663 "", // no equivalent core::fmt style
664 "P".as_bytes()[0]
665 );
666 }
667
668 #[test]
669 fn test_untyped_format() {
670 tokenize_test!(
671 &[0x63, 0x58, 0x5f, 0x8f, 0x2], // expected buffer
672 64, // buffer size
673 "The answer is %u!", // printf style
674 "The answer is {}!", // core::fmt style
675 1 as u32
676 );
677
678 tokenize_test!(
679 &[0x36, 0xd0, 0xfb, 0x69, 0x1], // expected buffer
680 64, // buffer size
681 "No! The answer is %v!", // printf style
682 "No! The answer is {}!", // core::fmt style
683 -1 as i32
684 );
685
686 tokenize_test!(
687 b"\x25\xf6\x2e\x66\x07Pigweed", // expected buffer
688 64, // buffer size
689 "Hello: %v!", // printf style
690 "Hello: {}!", // core::fmt style
691 "Pigweed" as &str
692 );
693 }
694
695 #[test]
696 fn test_field_width_and_zero_pad_format() {
697 tokenize_test!(
698 &[0x3a, 0xc2, 0x1a, 0x05, 0xfc, 0xab, 0x06], // expected buffer
699 64, // buffer size
700 "Lets go to the %x", // printf style
701 "Lets go to the {:x}", // core::fmt style
702 0xcafe as u32
703 );
704
705 tokenize_test!(
706 &[0xf3, 0x16, 0x03, 0x99, 0xfc, 0xab, 0x06], // expected buffer
707 64, // buffer size
708 "Lets go to the %8x", // printf style
709 "Lets go to the {:8x}", // core::fmt style
710 0xcafe as u32
711 );
712
713 tokenize_test!(
714 &[0x44, 0xce, 0xa3, 0x7e, 0xfc, 0xab, 0x06], // expected buffer
715 64, // buffer size
716 "Lets go to the %08x", // printf style
717 "Lets go to the {:08x}", // core::fmt style
718 0xcafe as u32
719 );
720 }
721
722 #[test]
723 fn tokenizer_supports_concatenated_printf_format_strings() {
724 // Since the no argument and some arguments cases are handled differently
725 // by `tokenize_to_buffer!` we need to test both.
726 let mut buffer = [0u8; 64];
727 let len =
728 tokenize_printf_to_buffer!(&mut buffer, "Hello" PW_FMT_CONCAT " Pigweed").unwrap();
729 assert_eq!(&buffer[..len], &[0xe0, 0x92, 0xe0, 0xa]);
730
731 let len = tokenize_printf_to_buffer!(&mut buffer, "Hello: " PW_FMT_CONCAT "%cigweed",
732 "P".as_bytes()[0])
733 .unwrap();
734 assert_eq!(&buffer[..len], &[0x2e, 0x52, 0xac, 0xe4, 0xa0, 0x1]);
735 }
736
737 #[test]
738 fn tokenizer_supports_concatenated_core_fmt_format_strings() {
739 // Since the no argument and some arguments cases are handled differently
740 // by `tokenize_to_buffer!` we need to test both.
741 let mut buffer = [0u8; 64];
742 let len =
743 tokenize_core_fmt_to_buffer!(&mut buffer, "Hello" PW_FMT_CONCAT " Pigweed").unwrap();
744 assert_eq!(&buffer[..len], &[0xe0, 0x92, 0xe0, 0xa]);
745
746 let len = tokenize_core_fmt_to_buffer!(&mut buffer, "The answer is " PW_FMT_CONCAT "{}!",
747 1 as i32)
748 .unwrap();
749 assert_eq!(&buffer[..len], &[0x52, 0x1c, 0xb0, 0x4c, 0x2]);
750 }
751}