polardbxengine/mysql-test/t/opt_costmodel_flush.test

337 lines
8.3 KiB
Plaintext

#
# Test of the FLUSH OPTIMIZER_COSTS command
#
# To get stable cost estimates, the test should only be run with
# 16K InnoDB page size.
CREATE TABLE t0 (
i1 INTEGER
);
INSERT INTO t0 VALUE (0),(1),(2),(3),(4),(5),(6),(7),(8),(9);
# Create a table with 100 records each having size approximately 1000 bytes
CREATE TABLE t1 (
pk INTEGER PRIMARY KEY,
i1 INTEGER NOT NULL,
c1 CHAR(250),
c2 CHAR(250),
c3 CHAR(250),
c4 CHAR(250),
INDEX i1_key (i1)
) ENGINE=InnoDB;
--echo # Since ANALYZE TABLE only reads a subset of the data, the statistics for
--echo # table t1 depends on the row order. And since the INSERT INTO ... SELECT
--echo # may be executed using different execution plans, we've added ORDER BY
--echo # to ensure that we rows has the same order every time. If not, the
--echo # estimated number of rows in EXPLAIN may change on different platforms.
INSERT INTO t1
SELECT a0.i1 + 10 * a1.i1, a0.i1, 'abc', 'def', 'ghi', 'jkl'
FROM t0 AS a0, t0 AS a1 ORDER BY a0.i1, a1.i1;
ANALYZE TABLE t1;
# Test query for table scan
let query= SELECT * FROM t1;
# Create a user connection
connect (con1,localhost,root,,);
connection con1;
# Get cost estimates with default values
eval EXPLAIN FORMAT=JSON $query;
connection default;
--echo #
--echo # Test that updating the cost constants does not influence on an
--echo # existing connection
--echo #
# Master connection: Update the value of the row_evaluate_cost
UPDATE mysql.server_cost
SET cost_value=0.4
WHERE cost_name="row_evaluate_cost";
FLUSH OPTIMIZER_COSTS;
# Switch to existing user connection and verify that doing a change
# to a cost constant does not influence on an existing connection,
# ie the cost estimates in EXPLAIN should be the same
connection con1;
--echo #
--echo # The cost estimates should not have changed
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
--echo #
--echo # Test effect of changing the row_evalute_cost
--echo #
# Master connection: Update the value of the row_evaluate_cost
UPDATE mysql.server_cost
SET cost_value=0.2
WHERE cost_name="row_evaluate_cost";
FLUSH OPTIMIZER_COSTS;
# Create a NEW connection
connect (con1,localhost,root,,);
connection con1;
--echo #
--echo # Verify that the cost estimate for evalution cost has doubled
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
--echo #
--echo # Test effect of changing key_compare_cost
--echo #
--echo # Use a query that will use loose index scan since loose index scan
--echo # is comparing keys in order to determine group boundaries.
--echo # Result from running query with default cost constants:
--echo #
let query= SELECT DISTINCT(i1) FROM t1;
eval EXPLAIN FORMAT=JSON $query;
# Master connection: Update the value of the key_compare_cost
UPDATE mysql.server_cost
SET cost_value=0.1
WHERE cost_name="key_compare_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
--echo #
--echo # Verify that the cost estimate for read cost is increased
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
--echo #
--echo # Test effect of changing memory_temptable_create_cost
--echo #
--echo # Use a query that creates a temporary table for a subquery.
--echo # Run query with default cost constants.
--echo #
let query=
SELECT * FROM t1
WHERE i1 IN (SELECT i1 FROM t1);
eval EXPLAIN FORMAT=JSON $query;
# Master connection:
# Update the value of the memory_temptable_create_cost from default (1.0)
# to 10.0 (up by 9.0)
UPDATE mysql.server_cost
SET cost_value=10.0
WHERE cost_name="memory_temptable_create_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
--echo #
--echo # Verify that the cost estimate for the query is incrased (by 9.0)
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
--echo #
--echo # Test effect of changing memory_temptable_row_cost
--echo #
--echo # Use a query that creates a temporary table for a subquery.
--echo # Result from running query with default cost constants:
--echo #
let query=
SELECT * FROM t1
WHERE i1 IN (SELECT i1 FROM t1);
eval EXPLAIN FORMAT=JSON $query;
# Master connection:
# Update the value of the memory_temptable_row_cost from default (0.1)
# to 0.2
UPDATE mysql.server_cost
SET cost_value=0.2
WHERE cost_name="memory_temptable_row_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
--echo #
--echo # Verify that the cost estimate for the query is incrased by:
--echo # 100 rows that needs to be inserted and read from the temporary table.
--echo # The cost of each insert and read is increased by 0.1, ie cost should
--echo # be increased by 100 * 2 * 0.1 = 20.
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
# The next two tests are for cost of disk based temporary tables
# To ensure this temp table is written to a disk based storage engine
set global internal_tmp_mem_storage_engine='memory';
set session internal_tmp_mem_storage_engine='memory';
set @max_heap_table_size_save= @@max_heap_table_size;
set max_heap_table_size= 16384;
--echo #
--echo # Test effect of changing disk_temptable_create_cost
--echo #
--echo # Use a query that creates a temporary table for a subquery.
--echo # Result from running query with default cost constants:
--echo #
let query=
SELECT * FROM t1
WHERE c1 IN (SELECT c1 FROM t1);
eval EXPLAIN FORMAT=JSON $query;
# Master connection:
# Update the value of the disk_temptable_create_cost from default (20.0)
# to 40.0
UPDATE mysql.server_cost
SET cost_value=40.0
WHERE cost_name="disk_temptable_create_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
# To ensure a disk temp table is used
set max_heap_table_size= 16384;
--echo #
--echo # Verify that the cost estimate for the query is incrased (by 20.0)
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
--echo #
--echo # Test effect of changing disk_temptable_row_cost
--echo #
--echo # Use a query that creates a temporary table for a subquery
--echo # Result from running query with default cost constants:
--echo #
let query=
SELECT * FROM t1
WHERE c1 IN (SELECT c1 FROM t1);
eval EXPLAIN FORMAT=JSON $query;
# Master connection:
# Update the value of the disk_temptable_row_cost from default (0.5)
# to 1.0
UPDATE mysql.server_cost
SET cost_value=1.0
WHERE cost_name="disk_temptable_row_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
set max_heap_table_size= 16384;
--echo #
--echo # Verify that the cost estimate for the query is incrased by:
--echo # 100 rows that needs to be inserted and read from the temporary table.
--echo # The cost of each insert and read is increased by 0.5, ie cost should
--echo # be increased by 100 * 2 * 0.5 = 100.
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.server_cost
SET cost_value=DEFAULT;
# Finished testing disk based temporary tables
set max_heap_table_size= @max_heap_table_size_save;
set session internal_tmp_mem_storage_engine=default;
set global internal_tmp_mem_storage_engine=default;
--echo #
--echo # Test effect of changing the memory_block_read_cost
--echo #
--echo # Result from running query with default cost constants:
--echo #
let query= SELECT * FROM t1;
eval EXPLAIN FORMAT=JSON $query;
# Master connection: Update the value of the memory_block_read_cost
UPDATE mysql.engine_cost
SET cost_value=0.5
WHERE cost_name="memory_block_read_cost";
FLUSH OPTIMIZER_COSTS;
# Create NEW connection
connect (con1,localhost,root,,);
connection con1;
--echo #
--echo # Verify that the cost estimate for read cost has doubled
--echo #
eval EXPLAIN FORMAT=JSON $query;
disconnect con1;
connection default;
# Reset cost constants
UPDATE mysql.engine_cost
SET cost_value=DEFAULT;
FLUSH OPTIMIZER_COSTS;
DROP TABLE t0,t1;