@@ -21,6 +21,7 @@ use std::str::Chars;
2121
2222use super :: dialect:: keywords:: ALL_KEYWORDS ;
2323use super :: dialect:: Dialect ;
24+ use std:: error:: Error ;
2425use std:: fmt;
2526
2627/// SQL Token enumeration
@@ -38,6 +39,10 @@ pub enum Token {
3839 NationalStringLiteral ( String ) ,
3940 /// Hexadecimal string literal: i.e.: X'deadbeef'
4041 HexStringLiteral ( String ) ,
42+ /// An unsigned numeric literal representing positional
43+ /// parameters like $1, $2, etc. in prepared statements and
44+ /// function definitions
45+ Parameter ( String ) ,
4146 /// Comma
4247 Comma ,
4348 /// Whitespace (space, tab, etc)
@@ -99,6 +104,7 @@ impl fmt::Display for Token {
99104 Token :: SingleQuotedString ( ref s) => write ! ( f, "'{}'" , s) ,
100105 Token :: NationalStringLiteral ( ref s) => write ! ( f, "N'{}'" , s) ,
101106 Token :: HexStringLiteral ( ref s) => write ! ( f, "X'{}'" , s) ,
107+ Token :: Parameter ( n) => write ! ( f, "${}" , n) ,
102108 Token :: Comma => f. write_str ( "," ) ,
103109 Token :: Whitespace ( ws) => write ! ( f, "{}" , ws) ,
104110 Token :: Eq => f. write_str ( "=" ) ,
@@ -212,6 +218,14 @@ impl fmt::Display for Whitespace {
212218#[ derive( Debug , PartialEq ) ]
213219pub struct TokenizerError ( String ) ;
214220
221+ impl fmt:: Display for TokenizerError {
222+ fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
223+ f. write_str ( & self . 0 )
224+ }
225+ }
226+
227+ impl Error for TokenizerError { }
228+
215229/// SQL Tokenizer
216230pub struct Tokenizer < ' a > {
217231 dialect : & ' a dyn Dialect ,
@@ -249,6 +263,7 @@ impl<'a> Tokenizer<'a> {
249263 Token :: Word ( w) if w. quote_style != None => self . col += w. value . len ( ) as u64 + 2 ,
250264 Token :: Number ( s) => self . col += s. len ( ) as u64 ,
251265 Token :: SingleQuotedString ( s) => self . col += s. len ( ) as u64 ,
266+ Token :: Parameter ( s) => self . col += s. len ( ) as u64 ,
252267 _ => self . col += 1 ,
253268 }
254269
@@ -415,6 +430,7 @@ impl<'a> Tokenizer<'a> {
415430 '&' => self . consume_and_return ( chars, Token :: Ampersand ) ,
416431 '{' => self . consume_and_return ( chars, Token :: LBrace ) ,
417432 '}' => self . consume_and_return ( chars, Token :: RBrace ) ,
433+ '$' => self . tokenize_parameter ( chars) ,
418434 other => self . consume_and_return ( chars, Token :: Char ( other) ) ,
419435 } ,
420436 None => Ok ( None ) ,
@@ -490,6 +506,31 @@ impl<'a> Tokenizer<'a> {
490506 }
491507 }
492508
509+ /// PostgreSQL supports positional parameters (like $1, $2, etc.) for
510+ /// prepared statements and function definitions.
511+ /// Grab the positional argument following a $ to parse it.
512+ fn tokenize_parameter (
513+ & self ,
514+ chars : & mut Peekable < Chars < ' _ > > ,
515+ ) -> Result < Option < Token > , TokenizerError > {
516+ assert_eq ! ( Some ( '$' ) , chars. next( ) ) ;
517+
518+ let n = peeking_take_while ( chars, |ch| match ch {
519+ '0' ..='9' => true ,
520+ _ => false ,
521+ } ) ;
522+
523+ if n. is_empty ( ) {
524+ return Err ( TokenizerError (
525+ "parameter marker ($) was not followed by \
526+ at least one digit"
527+ . into ( ) ,
528+ ) ) ;
529+ }
530+
531+ Ok ( Some ( Token :: Parameter ( n) ) )
532+ }
533+
493534 fn consume_and_return (
494535 & self ,
495536 chars : & mut Peekable < Chars < ' _ > > ,
0 commit comments