Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 76 additions & 0 deletions src/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -928,6 +928,17 @@ pub enum Expr {
IsDistinctFrom(Box<Expr>, Box<Expr>),
/// `IS NOT DISTINCT FROM` operator
IsNotDistinctFrom(Box<Expr>, Box<Expr>),
/// `<expr> IS [NOT] JSON [VALUE|SCALAR|ARRAY|OBJECT] [WITH|WITHOUT UNIQUE [KEYS]]`
IsJson {
/// Expression being tested.
expr: Box<Expr>,
/// Optional JSON shape constraint.
kind: Option<JsonPredicateType>,
/// Optional duplicate-key handling constraint for JSON objects.
unique_keys: Option<JsonKeyUniqueness>,
/// `true` when `NOT` is present.
negated: bool,
},
/// `<expr> IS [ NOT ] [ form ] NORMALIZED`
IsNormalized {
/// Expression being tested.
Expand Down Expand Up @@ -1737,6 +1748,25 @@ impl fmt::Display for Expr {
Expr::IsNotNull(ast) => write!(f, "{ast} IS NOT NULL"),
Expr::IsUnknown(ast) => write!(f, "{ast} IS UNKNOWN"),
Expr::IsNotUnknown(ast) => write!(f, "{ast} IS NOT UNKNOWN"),
Expr::IsJson {
expr,
kind,
unique_keys,
negated,
} => {
write!(f, "{expr} IS ")?;
if *negated {
write!(f, "NOT ")?;
}
write!(f, "JSON")?;
if let Some(kind) = kind {
write!(f, " {kind}")?;
}
if let Some(unique_keys) = unique_keys {
write!(f, " {unique_keys}")?;
}
Ok(())
}
Expr::InList {
expr,
list,
Expand Down Expand Up @@ -8336,6 +8366,52 @@ pub enum AnalyzeFormat {
TREE,
}

/// Optional type constraint for `IS JSON`.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum JsonPredicateType {
/// `VALUE` form.
Value,
/// `SCALAR` form.
Scalar,
/// `ARRAY` form.
Array,
/// `OBJECT` form.
Object,
}

impl fmt::Display for JsonPredicateType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
JsonPredicateType::Value => write!(f, "VALUE"),
JsonPredicateType::Scalar => write!(f, "SCALAR"),
JsonPredicateType::Array => write!(f, "ARRAY"),
JsonPredicateType::Object => write!(f, "OBJECT"),
}
}
}

/// Optional duplicate-key handling for `IS JSON`.
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub enum JsonKeyUniqueness {
/// `WITH UNIQUE KEYS` form.
WithUniqueKeys,
/// `WITHOUT UNIQUE KEYS` form.
WithoutUniqueKeys,
}

impl fmt::Display for JsonKeyUniqueness {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
JsonKeyUniqueness::WithUniqueKeys => write!(f, "WITH UNIQUE KEYS"),
JsonKeyUniqueness::WithoutUniqueKeys => write!(f, "WITHOUT UNIQUE KEYS"),
}
}
}

impl fmt::Display for AnalyzeFormat {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
f.write_str(match self {
Expand Down
6 changes: 6 additions & 0 deletions src/ast/spans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1476,6 +1476,12 @@ impl Spanned for Expr {
Expr::IsNotNull(expr) => expr.span(),
Expr::IsUnknown(expr) => expr.span(),
Expr::IsNotUnknown(expr) => expr.span(),
Expr::IsJson {
expr,
kind: _,
unique_keys: _,
negated: _,
} => expr.span(),
Expr::IsDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()),
Expr::IsNotDistinctFrom(lhs, rhs) => lhs.span().union(&rhs.span()),
Expr::InList {
Expand Down
4 changes: 4 additions & 0 deletions src/dialect/ansi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,8 @@ impl Dialect for AnsiDialect {
fn supports_nested_comments(&self) -> bool {
true
}

fn supports_is_json_predicate(&self) -> bool {
true
}
}
4 changes: 4 additions & 0 deletions src/dialect/generic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,10 @@ impl Dialect for GenericDialect {
true
}

fn supports_is_json_predicate(&self) -> bool {
true
}

fn supports_comma_separated_trim(&self) -> bool {
true
}
Expand Down
5 changes: 5 additions & 0 deletions src/dialect/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1458,6 +1458,11 @@ pub trait Dialect: Debug + Any {
false
}

/// Returns true if the dialect supports the `IS [NOT] JSON` predicate.
fn supports_is_json_predicate(&self) -> bool {
false
}

/// Returns true if this dialect allows an optional `SIGNED` suffix after integer data types.
///
/// Example:
Expand Down
4 changes: 4 additions & 0 deletions src/dialect/oracle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,8 @@ impl Dialect for OracleDialect {
fn supports_insert_table_query(&self) -> bool {
true
}

fn supports_is_json_predicate(&self) -> bool {
true
}
}
4 changes: 4 additions & 0 deletions src/dialect/postgresql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,10 @@ impl Dialect for PostgreSqlDialect {
true
}

fn supports_is_json_predicate(&self) -> bool {
true
}

fn supports_comma_separated_trim(&self) -> bool {
true
}
Expand Down
1 change: 1 addition & 0 deletions src/keywords.rs
Original file line number Diff line number Diff line change
Expand Up @@ -912,6 +912,7 @@ define_keywords!(
SAFE_CAST,
SAMPLE,
SAVEPOINT,
SCALAR,
SCHEMA,
SCHEMAS,
SCOPE,
Expand Down
82 changes: 76 additions & 6 deletions src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,6 @@ impl<'a> Parser<'a> {

match &self.peek_token_ref().token {
Token::EOF => break,

// end of statement
Token::Word(word)
if expecting_statement_delimiter && word.keyword == Keyword::END =>
Expand Down Expand Up @@ -3986,13 +3985,23 @@ impl<'a> Parser<'a> {
{
let expr2 = self.parse_expr()?;
Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
} else if self.dialect.supports_is_json_predicate()
&& self.parse_keyword(Keyword::JSON)
{
self.parse_is_json_predicate(expr, false)
} else if self.dialect.supports_is_json_predicate()
&& self.parse_keywords(&[Keyword::NOT, Keyword::JSON])
{
self.parse_is_json_predicate(expr, true)
} else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
Ok(is_normalized)
} else {
self.expected_ref(
"[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
self.peek_token_ref(),
)
let expected = if self.dialect.supports_is_json_predicate() {
"[NOT] NULL | TRUE | FALSE | DISTINCT | [NOT] JSON [VALUE | SCALAR | ARRAY | OBJECT] [WITH | WITHOUT UNIQUE [KEYS]] | [form] NORMALIZED FROM after IS"
} else {
"[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS"
};
self.expected_ref(expected, self.peek_token_ref())
}
}
Keyword::AT => {
Expand Down Expand Up @@ -8442,6 +8451,7 @@ impl<'a> Parser<'a> {
char: self.parse_identifier()?,
});
}
Some(Keyword::NULL) => break,
_ => {
break;
}
Expand Down Expand Up @@ -12237,6 +12247,43 @@ impl<'a> Parser<'a> {
}
}

/// Parse the `IS [NOT] JSON` predicate after `JSON` (and optional `NOT`) was consumed.
fn parse_is_json_predicate(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
let kind = match self.parse_one_of_keywords(&[
Keyword::VALUE,
Keyword::SCALAR,
Keyword::ARRAY,
Keyword::OBJECT,
]) {
Some(Keyword::VALUE) => Some(JsonPredicateType::Value),
Some(Keyword::SCALAR) => Some(JsonPredicateType::Scalar),
Some(Keyword::ARRAY) => Some(JsonPredicateType::Array),
Some(Keyword::OBJECT) => Some(JsonPredicateType::Object),
_ => None,
};

let unique_keys = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
Some(Keyword::WITH) => {
self.expect_keyword_is(Keyword::UNIQUE)?;
let _ = self.parse_keyword(Keyword::KEYS);
Some(JsonKeyUniqueness::WithUniqueKeys)
}
Some(Keyword::WITHOUT) => {
self.expect_keyword_is(Keyword::UNIQUE)?;
let _ = self.parse_keyword(Keyword::KEYS);
Some(JsonKeyUniqueness::WithoutUniqueKeys)
}
_ => None,
};

Ok(Expr::IsJson {
expr: Box::new(expr),
kind,
unique_keys,
negated,
})
}

/// Parse a literal unicode normalization clause
pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
let neg = self.parse_keyword(Keyword::NOT);
Expand Down Expand Up @@ -21218,12 +21265,35 @@ mod tests {
assert_eq!(
ast,
Err(ParserError::ParserError(
"Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
"Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [NOT] JSON [VALUE | SCALAR | ARRAY | OBJECT] [WITH | WITHOUT UNIQUE [KEYS]] | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
.to_string()
))
);
}

#[test]
fn test_is_predicate_error_hint_depends_on_dialect() {
let sql = "SELECT this is a syntax error";

let generic_err = Parser::parse_sql(&GenericDialect, sql).unwrap_err();
let ParserError::ParserError(generic_msg) = generic_err else {
panic!("Expected ParserError::ParserError, got: {generic_err:?}");
};
assert!(
generic_msg.contains("[NOT] JSON [VALUE | SCALAR | ARRAY | OBJECT]"),
"Expected Generic dialect to include JSON predicate hint, got: {generic_msg}"
);

let mysql_err = Parser::parse_sql(&MySqlDialect {}, sql).unwrap_err();
let ParserError::ParserError(mysql_msg) = mysql_err else {
panic!("Expected ParserError::ParserError, got: {mysql_err:?}");
};
assert!(
!mysql_msg.contains("[NOT] JSON [VALUE | SCALAR | ARRAY | OBJECT]"),
"Expected MySQL dialect to exclude JSON predicate hint, got: {mysql_msg}"
);
}

#[test]
fn test_nested_explain_error() {
let sql = "EXPLAIN EXPLAIN SELECT 1";
Expand Down
Loading
Loading