353 lines
9.1 KiB
Plaintext
353 lines
9.1 KiB
Plaintext
--echo # See that SQL NULL is handled properly
|
|
SELECT JSON_SCHEMA_VALID(NULL, NULL);
|
|
SELECT JSON_SCHEMA_VALID('{}', NULL);
|
|
SELECT JSON_SCHEMA_VALID(NULL, '{}');
|
|
|
|
--echo # Check some basic scenarios to verify that everything is working as
|
|
--echo # expected. Note that the rapidjson library contains a bunch of tests to
|
|
--echo # verify the correctness of the JSON Schema validation, so we don't do
|
|
--echo # any extensive testing of the validation process itself here.
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"latitude": {
|
|
"type": "number",
|
|
"minimum": 63
|
|
},
|
|
"longitude": {
|
|
"type": "number"
|
|
}
|
|
},
|
|
"required": ["latitude", "longitude"]
|
|
}','{
|
|
"latitude": 63.444697,
|
|
"longitude": 10.445118
|
|
}') AS should_be_valid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"latitude": {
|
|
"type": "number",
|
|
"minimum": 63
|
|
},
|
|
"longitude": {
|
|
"type": "number"
|
|
}
|
|
},
|
|
"required": ["latitude", "longitude"]
|
|
}','{
|
|
"longitude": 10.445118
|
|
}') AS should_be_invalid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"latitude": {
|
|
"type": "number",
|
|
"minimum": 63
|
|
},
|
|
"longitude": {
|
|
"type": "number"
|
|
}
|
|
},
|
|
"required": ["latitude", "longitude"]
|
|
}','{
|
|
"latitude": 62,
|
|
"longitude": 10.445118
|
|
}') AS should_be_invalid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "[5-9]"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "8"
|
|
}') AS should_be_valid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "[5-9]"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "4"
|
|
}') AS should_be_invalid;
|
|
|
|
--error ER_NOT_SUPPORTED_YET
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": { "$ref": "http://example.com" }
|
|
}','
|
|
{
|
|
"latitude": 63.444697
|
|
}') AS invalid;
|
|
|
|
--error ER_JSON_DOCUMENT_TOO_DEEP
|
|
SELECT JSON_SCHEMA_VALID(repeat('[', 100000), json_object());
|
|
|
|
--error ER_JSON_DOCUMENT_TOO_DEEP
|
|
SELECT JSON_SCHEMA_VALID(json_object(), repeat('[', 100000));
|
|
|
|
--echo # Check that we can use JSON_SCHEMA_VALID as a CHECK constraint.
|
|
CREATE TABLE t1 (
|
|
geometry JSON,
|
|
CHECK(JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"latitude": {
|
|
"type": "number",
|
|
"minimum": -90,
|
|
"maximum": 90
|
|
},
|
|
"longitude": {
|
|
"type": "number",
|
|
"minimum": -180,
|
|
"maximum": 180
|
|
}
|
|
},
|
|
"required": ["latitude", "longitude"]
|
|
}', geometry)
|
|
)
|
|
);
|
|
|
|
INSERT INTO t1 VALUES ('{"latitude": 0, "longitude": 0}');
|
|
INSERT INTO t1 VALUES ('{"latitude": -90, "longitude": -180}');
|
|
INSERT INTO t1 VALUES ('{"latitude": 90, "longitude": 180}');
|
|
|
|
SELECT geometry FROM t1;
|
|
|
|
--error ER_CHECK_CONSTRAINT_VIOLATED
|
|
INSERT INTO t1 VALUES ('{"latitude": 0}'); # missing required property "longitude"
|
|
--error ER_CHECK_CONSTRAINT_VIOLATED
|
|
INSERT INTO t1 VALUES ('{"latitude": 181, "longitude": 0}'); # latitude out of range
|
|
|
|
DROP TABLE t1;
|
|
|
|
--echo # Negative test for wrong number of arguments
|
|
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
|
|
SELECT JSON_SCHEMA_VALID();
|
|
|
|
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
|
|
SELECT JSON_SCHEMA_VALID(NULL);
|
|
|
|
--error ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT
|
|
SELECT JSON_SCHEMA_VALID(NULL, NULL, NULL);
|
|
|
|
--echo # Invalid JSON document in either argument
|
|
SET @invalid_json = '{"foo": "bar"'; # Missing closing '}'
|
|
SET @valid_json = '{}';
|
|
|
|
SELECT JSON_SCHEMA_VALID(@valid_json, @valid_json) AS should_be_true;
|
|
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(@invalid_json, @valid_json);
|
|
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(@valid_json, @invalid_json);
|
|
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(@invalid_json, @invalid_json);
|
|
|
|
--echo # Invalid regex patterns. rapidjson ignores invalid regex patterns, so
|
|
--echo # they are removed from the validation process.
|
|
SELECT JSON_SCHEMA_VALID('{"type":"string","pattern":"("}', '"abc"') AS should_be_true;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{"type":"string","pattern":"[asdf@123"}', '"abc"') AS should_be_true;
|
|
|
|
--echo # Positive and negative tests for anchored regex patterns
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "[5-9]"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "a8"
|
|
}') AS should_be_valid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "^[5-9]"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "a8"
|
|
}') AS should_be_invalid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "[5-9]"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "8a"
|
|
}') AS should_be_valid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "[5-9]$"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "8a"
|
|
}') AS should_be_invalid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "^[5-9]$"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "8"
|
|
}') AS should_be_valid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "^[5-9]$"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "a8"
|
|
}') AS should_be_invalid;
|
|
|
|
SELECT JSON_SCHEMA_VALID('{
|
|
"type": "object",
|
|
"properties": {
|
|
"a_string": {
|
|
"type": "string",
|
|
"pattern": "^[5-9]$"
|
|
}
|
|
}
|
|
}','{
|
|
"a_string": "8a"
|
|
}') AS should_be_invalid;
|
|
|
|
|
|
--echo # The JSON Schema must be an object, so see that we don't accept any
|
|
--echo # other type as the first argument.
|
|
--error ER_INVALID_JSON_TYPE
|
|
SELECT JSON_SCHEMA_VALID('[]', '{}');
|
|
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(CAST('test' AS JSON), '{}');
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID('{"type":"object"}', '{');
|
|
|
|
--echo # Test some various scenarios with cached JSON schema (first argument is const)
|
|
CREATE TABLE t1 (col1 JSON);
|
|
INSERT INTO t1 VALUES ('{"latitude": 0, "longitude": 0}');
|
|
INSERT INTO t1 VALUES ('{"latitude": -90, "longitude": -180}');
|
|
INSERT INTO t1 VALUES (NULL);
|
|
INSERT INTO t1 VALUES ('[]');
|
|
INSERT INTO t1 VALUES ('{"latitude": 90, "longitude": 180}');
|
|
|
|
--echo # All rows should return SQL NULL
|
|
SELECT JSON_SCHEMA_VALID(NULL, col1) FROM t1;
|
|
|
|
--echo # We should get true, true, null, false, true
|
|
SELECT JSON_SCHEMA_VALID('{"type":"object"}', col1) AS valid FROM t1;
|
|
|
|
--echo # Both arguments should be JSON
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(JSON_DEPTH(col1), col1) FROM t1;
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID('{"type":"object"}', JSON_DEPTH(col1)) FROM t1;
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(123, col1) FROM t1;
|
|
|
|
DROP TABLE t1;
|
|
|
|
--echo # Test some scenarios with non-cached JSON schema (first argument is not const)
|
|
CREATE TABLE t1 (col1 JSON);
|
|
INSERT INTO t1 VALUES ('{"type":"object"}');
|
|
INSERT INTO t1 VALUES ('{"type":"array"}');
|
|
INSERT INTO t1 VALUES (NULL);
|
|
INSERT INTO t1 VALUES ('{"type":"string"}');
|
|
|
|
--echo # All rows should return SQL NULL
|
|
SELECT JSON_SCHEMA_VALID(col1, NULL) FROM t1;
|
|
|
|
--echo # We should get false, true, null, false
|
|
SELECT JSON_SCHEMA_VALID(col1, '[]') AS valid FROM t1;
|
|
|
|
DROP TABLE t1;
|
|
|
|
CREATE TABLE t1(s VARCHAR(100), d VARCHAR(100));
|
|
--echo # Invalid JSON document, non-const schema
|
|
INSERT INTO t1 VALUES('{"type":"object"}', '{');
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(s, d) FROM t1;
|
|
--echo # Invalid, non-const schema
|
|
UPDATE t1 SET s = '{', d = '{}';
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(s, d) FROM t1;
|
|
DROP TABLE t1;
|
|
|
|
--echo # Ensure that our item tree transformation doesn't get stuck forever when
|
|
--echo # using prepared statements.
|
|
PREPARE stmt FROM 'SELECT JSON_SCHEMA_VALID(?, ''{}'') FROM DUAL';
|
|
SET @json_schema = '{"type":"object"}';
|
|
SET @null = NULL;
|
|
EXECUTE stmt USING @json_schema;
|
|
EXECUTE stmt USING @null;
|
|
EXECUTE stmt USING @json_schema;
|
|
|
|
--echo #
|
|
--echo # Bug#29366780 WL#11999: SIG6 IN SETUP_FIELDS() AT SQL/SQL_BASE.CC
|
|
--echo #
|
|
--error ER_INVALID_JSON_TEXT_IN_PARAM
|
|
SELECT JSON_SCHEMA_VALID(CAST('NULL' AS JSON), CAST('NULL' AS JSON));
|
|
|
|
--echo # See that we don't accept non-JSON types like geometry, bool, ints etc.
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(JSON_OBJECT(), 123);
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(JSON_OBJECT(), POINT(1, 1));
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(JSON_OBJECT(), true);
|
|
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(123, JSON_OBJECT());
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(POINT(1, 1), JSON_OBJECT());
|
|
--error ER_INVALID_TYPE_FOR_JSON
|
|
SELECT JSON_SCHEMA_VALID(true, JSON_OBJECT());
|
|
|
|
--echo #
|
|
--echo # Bug#29524331: WL#11999: ASSERTION FAILURE: `!ARGS[0]->CONST_ITEM()'
|
|
--echo # Bug#29528888: WL#11999: SIG6 IN ITEM_FUNC_JSON_SCHEMA_VALID::VAL_BOOL()
|
|
--echo # AT ITEM_JSON_FUNC.CC
|
|
--echo #
|
|
CREATE TABLE t1 (pk INT PRIMARY KEY, j JSON);
|
|
INSERT INTO t1 VALUES (1, '{"key": "foobar"}' );
|
|
SELECT JSON_SCHEMA_VALID(j, j) FROM t1 WHERE pk = 1;
|
|
SELECT JSON_SCHEMA_VALID(t2.j, t2.j)
|
|
FROM t1, (SELECT * FROM t1 WHERE pk = 1) t2;
|
|
DROP TABLE t1;
|