Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion pkg/statistics/handle/syncload/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ go_library(
"//pkg/parser/mysql",
"//pkg/sessionctx",
"//pkg/sessionctx/stmtctx",
"//pkg/sessionctx/variable",
"//pkg/statistics",
"//pkg/statistics/handle/storage",
"//pkg/statistics/handle/types",
Expand All @@ -35,7 +36,7 @@ go_test(
srcs = ["stats_syncload_test.go"],
flaky = True,
race = "on",
shard_count = 5,
shard_count = 6,
deps = [
":syncload",
"//pkg/config",
Expand Down
17 changes: 17 additions & 0 deletions pkg/statistics/handle/syncload/stats_syncload.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import (
"github.com/pingcap/tidb/pkg/parser/mysql"
"github.com/pingcap/tidb/pkg/sessionctx"
"github.com/pingcap/tidb/pkg/sessionctx/stmtctx"
"github.com/pingcap/tidb/pkg/sessionctx/variable"
"github.com/pingcap/tidb/pkg/statistics"
"github.com/pingcap/tidb/pkg/statistics/handle/storage"
statstypes "github.com/pingcap/tidb/pkg/statistics/handle/types"
Expand Down Expand Up @@ -300,6 +301,14 @@ func (s *statsSyncLoad) handleOneItemTask(task *statstypes.NeededItemTask) (err
s.statsHandle.SPool().Put(se)
}
}()
var skipTypes map[string]struct{}
val, err := sctx.GetSessionVars().GlobalVarsAccessor.GetGlobalSysVar(variable.TiDBAnalyzeSkipColumnTypes)
if err != nil {
logutil.BgLogger().Warn("failed to get global variable", zap.Error(err))
} else {
skipTypes = variable.ParseAnalyzeSkipColumnTypes(val)
}

item := task.Item.TableItemID
tbl, ok := s.statsHandle.Get(item.TableID)

Expand Down Expand Up @@ -335,6 +344,13 @@ func (s *statsSyncLoad) handleOneItemTask(task *statstypes.NeededItemTask) (err
// so we have to get the column info from the domain.
wrapper.colInfo = tblInfo.Meta().GetColumnByID(item.ID)
}
if skipTypes != nil {
_, skip := skipTypes[types.TypeToStr(wrapper.colInfo.FieldType.GetType(), wrapper.colInfo.FieldType.GetCharset())]
if skip {
return nil
}
}

// If this column is not analyzed yet and we don't have it in memory.
// We create a fake one for the pseudo estimation.
if loadNeeded && !analyzed {
Expand All @@ -348,6 +364,7 @@ func (s *statsSyncLoad) handleOneItemTask(task *statstypes.NeededItemTask) (err
return nil
}
}
failpoint.Inject("handleOneItemTaskPanic", nil)
t := time.Now()
needUpdate := false
wrapper, err = s.readStatsForOneItem(sctx, item, wrapper, isPkIsHandle, task.Item.FullLoad)
Expand Down
16 changes: 16 additions & 0 deletions pkg/statistics/handle/syncload/stats_syncload_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,22 @@ func TestSyncLoadSkipUnAnalyzedItems(t *testing.T) {
failpoint.Disable("github.com/pingcap/tidb/pkg/statistics/handle/syncload/assertSyncLoadItems")
}

func TestSyncLoadSkipAnalyzSkipColumnItems(t *testing.T) {
store, dom := testkit.CreateMockStoreAndDomain(t)
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t")
tk.MustExec("create table t(`id` bigint(20) NOT NULL AUTO_INCREMENT,content text,PRIMARY KEY (`id`))")
h := dom.StatsHandle()
h.SetLease(1)

tk.MustExec("analyze table t")
tk.MustExec("set @@session.tidb_analyze_skip_column_types = 'json, text, blob'") // text is not default.
require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/pkg/statistics/handle/syncload/handleOneItemTaskPanic", `panic`))
tk.MustQuery("trace plan select * from t where content ='ab'")
require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/pkg/statistics/handle/syncload/handleOneItemTaskPanic"))
}

func TestConcurrentLoadHist(t *testing.T) {
store, dom := testkit.CreateMockStoreAndDomain(t)

Expand Down