Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

planner: add a switch control if prepare stmt with param limit can be cached #40712

Merged
merged 21 commits into from
Jan 30, 2023
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 60 additions & 0 deletions executor/seqtest/prepared_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -806,3 +806,63 @@ func TestIssue38323(t *testing.T) {
tk.MustExec("set @a = 1;")
tk.MustQuery("execute stmt using @a, @a").Check(tk.MustQuery("explain select * from t where 1 = id and 1 = k group by id, k").Rows())
}

func TestSetPlanCacheLimitSwitch(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")

tk.MustQuery("select @@session.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("1"))
tk.MustQuery("select @@global.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("1"))

tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = OFF;")
tk.MustQuery("select @@session.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("0"))

tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = 1;")
tk.MustQuery("select @@session.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("1"))

tk.MustExec("set @@global.tidb_enable_plan_cache_for_param_limit = off;")
tk.MustQuery("select @@global.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("0"))

tk.MustExec("set @@global.tidb_enable_plan_cache_for_param_limit = ON;")
tk.MustQuery("select @@global.tidb_enable_plan_cache_for_param_limit").Check(testkit.Rows("1"))
}

func TestPlanCacheLimitSwitchEffective(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(a int, key(a))")

checkIfCached := func(res string) {
tk.MustExec("set @a = 1")
tk.MustExec("execute stmt using @a")
tk.MustExec("execute stmt using @a")
tk.MustQuery("select @@last_plan_from_cache").Check(testkit.Rows(res))
}

// before prepare
tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = OFF")
tk.MustExec("prepare stmt from 'select * from t limit ?'")
tk.MustQuery("show warnings").Check(testkit.Rows("Warning 1105 skip plan-cache: query has 'limit ?' is un-cacheable"))
checkIfCached("0")
tk.MustExec("deallocate prepare stmt")

// after prepare
tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = ON")
tk.MustExec("prepare stmt from 'select * from t limit ?'")
tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = OFF")
checkIfCached("0")
tk.MustExec("execute stmt using @a")
tk.MustQuery("show warnings").Check(testkit.Rows("Warning 1105 skip plan-cache: the switch 'tidb_enable_plan_cache_for_param_limit' is off"))
tk.MustExec("deallocate prepare stmt")

// after execute
tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = ON")
tk.MustExec("prepare stmt from 'select * from t limit ?'")
checkIfCached("1")
tk.MustExec("set @@session.tidb_enable_plan_cache_for_param_limit = OFF")
checkIfCached("0")
tk.MustExec("deallocate prepare stmt")
}
9 changes: 7 additions & 2 deletions planner/core/plan_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ func generateNewPlan(ctx context.Context, sctx sessionctx.Context, isNonPrepared

// check whether this plan is cacheable.
if stmtCtx.UseCache {
checkPlanCacheability(sctx, p, len(paramTypes))
checkPlanCacheability(sctx, p, len(paramTypes), len(limitParams))
}

// put this plan into the plan cache.
Expand All @@ -312,7 +312,7 @@ func generateNewPlan(ctx context.Context, sctx sessionctx.Context, isNonPrepared
}

// checkPlanCacheability checks whether this plan is cacheable and set to skip plan cache if it's uncacheable.
func checkPlanCacheability(sctx sessionctx.Context, p Plan, paramNum int) {
func checkPlanCacheability(sctx sessionctx.Context, p Plan, paramNum int, limitParamNum int) {
stmtCtx := sctx.GetSessionVars().StmtCtx
var pp PhysicalPlan
switch x := p.(type) {
Expand Down Expand Up @@ -347,6 +347,11 @@ func checkPlanCacheability(sctx sessionctx.Context, p Plan, paramNum int) {
stmtCtx.SetSkipPlanCache(errors.New("skip plan-cache: the plan with IndexMerge accessing Multi-Valued Index is un-cacheable"))
return
}

// before cache the param limit plan, check switch
if limitParamNum != 0 && !sctx.GetSessionVars().EnablePlanCacheForParamLimit {
stmtCtx.SetSkipPlanCache(errors.New("skip plan-cache: the switch 'tidb_enable_plan_cache_for_param_limit' is off"))
}
}

// RebuildPlan4CachedPlan will rebuild this plan under current user parameters.
Expand Down
12 changes: 10 additions & 2 deletions planner/core/plan_cache_lru.go
Original file line number Diff line number Diff line change
Expand Up @@ -258,14 +258,22 @@ func (l *LRUPlanCache) memoryControl() {
func (l *LRUPlanCache) pickFromBucket(bucket map[*list.Element]struct{}, matchOpts *planCacheMatchOpts) (*list.Element, bool) {
for k := range bucket {
plan := k.Value.(*planCacheEntry).PlanValue.(*PlanCacheValue)
// check param types' compatibility
ok1 := plan.matchOpts.paramTypes.CheckTypesCompatibility4PC(matchOpts.paramTypes)
if !ok1 {
continue
}

// check limit offset and key if equal and check switch if enabled
ok2 := checkUint64SliceIfEqual(plan.matchOpts.limitOffsetAndCount, matchOpts.limitOffsetAndCount)
if ok2 {
return k, true
if !ok2 {
continue
}
if len(plan.matchOpts.limitOffsetAndCount) > 0 && !l.sctx.GetSessionVars().EnablePlanCacheForParamLimit {
// offset and key slice matched, but it is a plan with param limit and the switch is disabled
continue
}
return k, true
}
return nil, false
}
Expand Down
14 changes: 10 additions & 4 deletions planner/core/plan_cache_lru_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,13 @@ func randomPlanCacheValue(types []*types.FieldType) *PlanCacheValue {

func TestLRUPCPut(t *testing.T) {
// test initialize
lruA := NewLRUPlanCache(0, 0, 0, MockContext())
mockCtx := MockContext()
mockCtx.GetSessionVars().EnablePlanCacheForParamLimit = true
lruA := NewLRUPlanCache(0, 0, 0, mockCtx)
require.Equal(t, lruA.capacity, uint(100))

maxMemDroppedKv := make(map[kvcache.Key]kvcache.Value)
lru := NewLRUPlanCache(3, 0, 0, MockContext())
lru := NewLRUPlanCache(3, 0, 0, mockCtx)
lru.onEvict = func(key kvcache.Key, value kvcache.Value) {
maxMemDroppedKv[key] = value
}
Expand Down Expand Up @@ -131,7 +133,9 @@ func TestLRUPCPut(t *testing.T) {
}

func TestLRUPCGet(t *testing.T) {
lru := NewLRUPlanCache(3, 0, 0, MockContext())
mockCtx := MockContext()
mockCtx.GetSessionVars().EnablePlanCacheForParamLimit = true
lru := NewLRUPlanCache(3, 0, 0, mockCtx)

keys := make([]*planCacheKey, 5)
vals := make([]*PlanCacheValue, 5)
Expand Down Expand Up @@ -185,7 +189,9 @@ func TestLRUPCGet(t *testing.T) {
}

func TestLRUPCDelete(t *testing.T) {
lru := NewLRUPlanCache(3, 0, 0, MockContext())
mockCtx := MockContext()
mockCtx.GetSessionVars().EnablePlanCacheForParamLimit = true
lru := NewLRUPlanCache(3, 0, 0, mockCtx)

keys := make([]*planCacheKey, 3)
vals := make([]*PlanCacheValue, 3)
Expand Down
34 changes: 18 additions & 16 deletions planner/core/plan_cacheable_checker.go
Original file line number Diff line number Diff line change
Expand Up @@ -135,22 +135,24 @@ func (checker *cacheableChecker) Enter(in ast.Node) (out ast.Node, skipChildren
return in, true
}
}
// todo: these comment is used to add switch in the later pr
//case *ast.Limit:
// if node.Count != nil {
// if _, isParamMarker := node.Count.(*driver.ParamMarkerExpr); isParamMarker {
// checker.cacheable = false
// checker.reason = "query has 'limit ?' is un-cacheable"
// return in, true
// }
// }
// if node.Offset != nil {
// if _, isParamMarker := node.Offset.(*driver.ParamMarkerExpr); isParamMarker {
// checker.cacheable = false
// checker.reason = "query has 'limit ?, 10' is un-cacheable"
// return in, true
// }
// }
case *ast.Limit:
if checker.sctx.GetSessionVars().EnablePlanCacheForParamLimit {
return in, false
}
if node.Count != nil {
if _, isParamMarker := node.Count.(*driver.ParamMarkerExpr); isParamMarker {
checker.cacheable = false
checker.reason = "query has 'limit ?' is un-cacheable"
return in, true
}
}
if node.Offset != nil {
if _, isParamMarker := node.Offset.(*driver.ParamMarkerExpr); isParamMarker {
checker.cacheable = false
checker.reason = "query has 'limit ?, 10' is un-cacheable"
return in, true
}
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How about:

		if node.Count != nil {
			if _, isParamMarker := node.Count.(*driver.ParamMarkerExpr); isParamMarker && !checker.sctx.GetSessionVars().EnablePlanCacheForParamLimit {
				checker.cacheable = false
				checker.reason = "query has 'limit ?' is un-cacheable"
				return in, true
			}
		}
		if node.Offset != nil {
			if _, isParamMarker := node.Offset.(*driver.ParamMarkerExpr); isParamMarker && !checker.sctx.GetSessionVars().EnablePlanCacheForParamLimit {
				checker.cacheable = false
				checker.reason = "query has 'limit ?, 10' is un-cacheable"
				return in, true
			}
		}

case *ast.FrameBound:
if _, ok := node.Expr.(*driver.ParamMarkerExpr); ok {
checker.cacheable = false
Expand Down
30 changes: 21 additions & 9 deletions planner/core/plan_cacheable_checker_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,14 @@ import (
"github.com/pingcap/tidb/planner/core"
"github.com/pingcap/tidb/testkit"
driver "github.com/pingcap/tidb/types/parser_driver"
"github.com/pingcap/tidb/util/mock"
"github.com/stretchr/testify/require"
)

func TestCacheable(t *testing.T) {
store := testkit.CreateMockStore(t)
mockCtx := mock.NewContext()
mockCtx.GetSessionVars().EnablePlanCacheForParamLimit = true

tk := testkit.NewTestKit(t, store)

Expand Down Expand Up @@ -87,7 +90,8 @@ func TestCacheable(t *testing.T) {
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ := core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{
Offset: &driver.ParamMarkerExpr{},
Expand All @@ -96,14 +100,16 @@ func TestCacheable(t *testing.T) {
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{}
stmt = &ast.DeleteStmt{
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

stmt.(*ast.DeleteStmt).TableHints = append(stmt.(*ast.DeleteStmt).TableHints, &ast.TableOptimizerHint{
HintName: model.NewCIStr(core.HintIgnorePlanCache),
Expand Down Expand Up @@ -139,7 +145,8 @@ func TestCacheable(t *testing.T) {
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{
Offset: &driver.ParamMarkerExpr{},
Expand All @@ -148,14 +155,16 @@ func TestCacheable(t *testing.T) {
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{}
stmt = &ast.UpdateStmt{
TableRefs: tableRefsClause,
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

stmt.(*ast.UpdateStmt).TableHints = append(stmt.(*ast.UpdateStmt).TableHints, &ast.TableOptimizerHint{
HintName: model.NewCIStr(core.HintIgnorePlanCache),
Expand Down Expand Up @@ -188,21 +197,24 @@ func TestCacheable(t *testing.T) {
stmt = &ast.SelectStmt{
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{
Offset: &driver.ParamMarkerExpr{},
}
stmt = &ast.SelectStmt{
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

limitStmt = &ast.Limit{}
stmt = &ast.SelectStmt{
Limit: limitStmt,
}
require.True(t, core.Cacheable(stmt, is))
c, _ = core.CacheableWithCtx(mockCtx, stmt, is)
require.True(t, c)

paramExpr := &driver.ParamMarkerExpr{}
orderByClause := &ast.OrderByClause{Items: []*ast.ByItem{{Expr: paramExpr}}}
Expand Down
3 changes: 3 additions & 0 deletions sessionctx/variable/session.go
Original file line number Diff line number Diff line change
Expand Up @@ -1253,6 +1253,9 @@ type SessionVars struct {
// PreparedPlanCacheMonitor indicates whether to enable prepared plan cache monitor.
EnablePreparedPlanCacheMemoryMonitor bool

// EnablePlanCacheForParamLimit controls whether the prepare statement with parameterized limit can be cached
EnablePlanCacheForParamLimit bool

// EnableNonPreparedPlanCache indicates whether to enable non-prepared plan cache.
EnableNonPreparedPlanCache bool

Expand Down
4 changes: 4 additions & 0 deletions sessionctx/variable/sysvar.go
Original file line number Diff line number Diff line change
Expand Up @@ -2267,6 +2267,10 @@ var defaultSysVars = []*SysVar{
s.PessimisticTransactionAggressiveLocking = TiDBOptOn(val)
return nil
}},
{Scope: ScopeGlobal | ScopeSession, Name: TiDBEnablePlanCacheForParamLimit, Value: BoolToOnOff(DefTiDBEnablePlanCacheForParamLimit), Type: TypeBool, SetSession: func(s *SessionVars, val string) error {
s.EnablePlanCacheForParamLimit = TiDBOptOn(val)
return nil
}},
}

// FeedbackProbability points to the FeedbackProbability in statistics package.
Expand Down
4 changes: 4 additions & 0 deletions sessionctx/variable/tidb_vars.go
Original file line number Diff line number Diff line change
Expand Up @@ -796,6 +796,9 @@ const (
// TiDBPessimisticTransactionAggressiveLocking controls whether aggressive locking for pessimistic transaction
// is enabled.
TiDBPessimisticTransactionAggressiveLocking = "tidb_pessimistic_txn_aggressive_locking"

// TiDBEnablePlanCacheForParamLimit controls whether prepare statement with parameterized limit can be cached
TiDBEnablePlanCacheForParamLimit = "tidb_enable_plan_cache_for_param_limit"
)

// TiDB vars that have only global scope
Expand Down Expand Up @@ -1167,6 +1170,7 @@ const (
DefTiDBTTLDeleteWorkerCount = 4
DefTiDBEnableResourceControl = false
DefTiDBPessimisticTransactionAggressiveLocking = false
DefTiDBEnablePlanCacheForParamLimit = true
)

// Process global variables.
Expand Down