diff --git a/examples/print_ast.rs b/examples/print_ast.rs new file mode 100644 index 0000000000..a9530afae0 --- /dev/null +++ b/examples/print_ast.rs @@ -0,0 +1,25 @@ +/// Print the AST for a given Python file. +use std::path::PathBuf; + +use anyhow::Result; +use clap::{Parser, ValueHint}; +use rustpython_parser::parser; + +use ruff::fs; + +#[derive(Debug, Parser)] +struct Cli { + #[clap(parse(from_os_str), value_hint = ValueHint::FilePath, required = true)] + file: PathBuf, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + + let contents = fs::read_file(&cli.file)?; + let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?; + + println!("{:#?}", python_ast); + + Ok(()) +} diff --git a/examples/print_tokens.rs b/examples/print_tokens.rs new file mode 100644 index 0000000000..f320dcb88d --- /dev/null +++ b/examples/print_tokens.rs @@ -0,0 +1,25 @@ +/// Print the token stream for a given Python file. +use std::path::PathBuf; + +use anyhow::Result; +use clap::{Parser, ValueHint}; +use rustpython_parser::{lexer, parser}; + +use ruff::fs; + +#[derive(Debug, Parser)] +struct Cli { + #[clap(parse(from_os_str), value_hint = ValueHint::FilePath, required = true)] + file: PathBuf, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + + let contents = fs::read_file(&cli.file)?; + for (_, tok, _) in lexer::make_tokenizer(&contents).flatten() { + println!("{:#?}", tok); + } + + Ok(()) +}