From 6061a6e09d057f86209952036a32cfbb03139edf Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Wed, 13 Nov 2024 13:03:24 +0800 Subject: [PATCH 01/12] add fields --- Makefile | 2 +- pkg/lightning/checkpoints/checkpoints.go | 83 +++++-- .../checkpointspb/file_checkpoints.pb.go | 225 +++++++++++++----- .../checkpointspb/file_checkpoints.proto | 10 +- pkg/lightning/common/util.go | 4 +- 5 files changed, 233 insertions(+), 91 deletions(-) diff --git a/Makefile b/Makefile index c19da72f13b78..7e3e129d68b79 100644 --- a/Makefile +++ b/Makefile @@ -530,7 +530,7 @@ br_bins: .PHONY: data_parsers data_parsers: tools/bin/vfsgendev pkg/lightning/mydump/parser_generated.go lightning_web - PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOPATH)/src" pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. + PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOPATH)/pkg/mod" pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. tools/bin/vfsgendev -source='"github.com/pingcap/tidb/lightning/pkg/web".Res' && mv res_vfsdata.go lightning/pkg/web/ .PHONY: build_dumpling diff --git a/pkg/lightning/checkpoints/checkpoints.go b/pkg/lightning/checkpoints/checkpoints.go index 1904ddbe9ef15..b22d0f2eec029 100644 --- a/pkg/lightning/checkpoints/checkpoints.go +++ b/pkg/lightning/checkpoints/checkpoints.go @@ -71,7 +71,7 @@ const WholeTableEngineID = math.MaxInt32 // remember to increase the version number in case of incompatible change. const ( CheckpointTableNameTask = "task_v2" - CheckpointTableNameTable = "table_v9" + CheckpointTableNameTable = "table_v10" CheckpointTableNameEngine = "engine_v5" CheckpointTableNameChunk = "chunk_v5" ) @@ -113,6 +113,9 @@ const ( kv_bytes bigint unsigned NOT NULL DEFAULT 0, kv_kvs bigint unsigned NOT NULL DEFAULT 0, kv_checksum bigint unsigned NOT NULL DEFAULT 0, + auto_rand_base bigint NOT NULL DEFAULT 0, + auto_incr_base bigint NOT NULL DEFAULT 0, + auto_row_id_base bigint NOT NULL DEFAULT 0, INDEX(task_id) );` CreateEngineTableTemplate = ` @@ -168,7 +171,8 @@ const ( FROM %s.%s WHERE table_name = ? ORDER BY engine_id, path, offset;` ReadTableRemainTemplate = ` - SELECT status, alloc_base, table_id, table_info, kv_bytes, kv_kvs, kv_checksum FROM %s.%s WHERE table_name = ?;` + SELECT status, alloc_base, table_id, table_info, kv_bytes, kv_kvs, kv_checksum, auto_rand_base, auto_incr_base, auto_row_id_base + FROM %s.%s WHERE table_name = ?;` ReplaceEngineTemplate = ` REPLACE INTO %s.%s (table_name, engine_id, status) VALUES (?, ?, ?);` ReplaceChunkTemplate = ` @@ -187,7 +191,12 @@ const ( UPDATE %s.%s SET pos = ?, prev_rowid_max = ?, kvc_bytes = ?, kvc_kvs = ?, kvc_checksum = ?, columns = ? WHERE (table_name, engine_id, path, offset) = (?, ?, ?, ?);` UpdateTableRebaseTemplate = ` - UPDATE %s.%s SET alloc_base = GREATEST(?, alloc_base) WHERE table_name = ?;` + UPDATE %s.%s + SET alloc_base = GREATEST(?, alloc_base), + auto_rand_base = GREATEST(?, auto_rand_base), + auto_incr_base = GREATEST(?, auto_incr_base), + auto_row_id_base = GREATEST(?, auto_row_id_base) + WHERE table_name = ?;` UpdateTableStatusTemplate = ` UPDATE %s.%s SET status = ? WHERE table_name = ?;` UpdateTableChecksumTemplate = `UPDATE %s.%s SET kv_bytes = ?, kv_kvs = ?, kv_checksum = ? WHERE table_name = ?;` @@ -348,6 +357,12 @@ type TableCheckpoint struct { TableInfo *model.TableInfo // remote checksum before restore Checksum verify.KVChecksum + // used to record the max auto random ID without the sharding bits that has been used. + AutoRandBase int64 + // used to record the max auto increment ID that has been used. + AutoIncrBase int64 + // used to record the max auto row ID that has been used. + AutoRowIDBase int64 } // DeepCopy returns a deep copy of the table checkpoint. @@ -362,6 +377,10 @@ func (cp *TableCheckpoint) DeepCopy() *TableCheckpoint { Engines: engines, TableID: cp.TableID, Checksum: cp.Checksum, + + AutoRandBase: cp.AutoRandBase, + AutoIncrBase: cp.AutoIncrBase, + AutoRowIDBase: cp.AutoRowIDBase, } } @@ -389,13 +408,17 @@ type engineCheckpointDiff struct { // TableCheckpointDiff is the difference between two table checkpoints. type TableCheckpointDiff struct { - hasStatus bool - hasRebase bool - hasChecksum bool - status CheckpointStatus - allocBase int64 - engines map[int32]engineCheckpointDiff - checksum verify.KVChecksum + hasStatus bool + // it means some XXXBase fields has been updated. + hasRebase bool + hasChecksum bool + status CheckpointStatus + allocBase int64 + engines map[int32]engineCheckpointDiff + checksum verify.KVChecksum + autoRandBase int64 + autoIncrBase int64 + autoRowIDBase int64 } // NewTableCheckpointDiff returns a new TableCheckpointDiff. @@ -433,7 +456,10 @@ func (cp *TableCheckpoint) Apply(cpd *TableCheckpointDiff) { cp.Status = cpd.status } if cpd.hasRebase { - cp.AllocBase = cpd.allocBase + cp.AllocBase = max(cp.AllocBase, cpd.allocBase) + cp.AutoRandBase = max(cp.AutoRandBase, cpd.autoRandBase) + cp.AutoIncrBase = max(cp.AutoIncrBase, cpd.autoIncrBase) + cp.AutoRowIDBase = max(cp.AutoRowIDBase, cpd.autoRowIDBase) } for engineID, engineDiff := range cpd.engines { engine := cp.Engines[engineID] @@ -932,7 +958,10 @@ func (cpdb *MySQLCheckpointsDB) Get(ctx context.Context, tableName string) (*Tab var status uint8 var kvs, bytes, checksum uint64 var rawTableInfo []byte - if err := tableRow.Scan(&status, &cp.AllocBase, &cp.TableID, &rawTableInfo, &bytes, &kvs, &checksum); err != nil { + if err := tableRow.Scan( + &status, &cp.AllocBase, &cp.TableID, &rawTableInfo, &bytes, &kvs, &checksum, + &cp.AutoRandBase, &cp.AutoIncrBase, &cp.AutoRowIDBase, + ); err != nil { if err == sql.ErrNoRows { return errors.NotFoundf("checkpoint for table %s", tableName) } @@ -954,7 +983,8 @@ func (cpdb *MySQLCheckpointsDB) Get(ctx context.Context, tableName string) (*Tab } // InsertEngineCheckpoints implements the DB interface. -func (cpdb *MySQLCheckpointsDB) InsertEngineCheckpoints(ctx context.Context, tableName string, checkpoints map[int32]*EngineCheckpoint) error { +func (cpdb *MySQLCheckpointsDB) InsertEngineCheckpoints(ctx context.Context, + tableName string, checkpoints map[int32]*EngineCheckpoint) error { s := common.SQLWithRetry{ DB: cpdb.db, Logger: log.FromContext(ctx).With(zap.String("table", tableName)), @@ -1052,7 +1082,7 @@ func (cpdb *MySQLCheckpointsDB) Update(taskCtx context.Context, checkpointDiffs } } if cpd.hasRebase { - if _, e := rebaseStmt.ExecContext(c, cpd.allocBase, tableName); e != nil { + if _, e := rebaseStmt.ExecContext(c, cpd.allocBase, cpd.autoRandBase, cpd.autoIncrBase, cpd.autoRowIDBase, tableName); e != nil { return errors.Trace(e) } } @@ -1330,12 +1360,15 @@ func (cpdb *FileCheckpointsDB) Get(_ context.Context, tableName string) (*TableC } cp := &TableCheckpoint{ - Status: CheckpointStatus(tableModel.Status), - AllocBase: tableModel.AllocBase, - Engines: make(map[int32]*EngineCheckpoint, len(tableModel.Engines)), - TableID: tableModel.TableID, - TableInfo: tableInfo, - Checksum: verify.MakeKVChecksum(tableModel.KvBytes, tableModel.KvKvs, tableModel.KvChecksum), + Status: CheckpointStatus(tableModel.Status), + AllocBase: tableModel.AllocBase, + Engines: make(map[int32]*EngineCheckpoint, len(tableModel.Engines)), + TableID: tableModel.TableID, + TableInfo: tableInfo, + Checksum: verify.MakeKVChecksum(tableModel.KvBytes, tableModel.KvKvs, tableModel.KvChecksum), + AutoRandBase: tableModel.AutoRandBase, + AutoIncrBase: tableModel.AutoIncrBase, + AutoRowIDBase: tableModel.AutoRowIDBase, } for engineID, engineModel := range tableModel.Engines { @@ -1434,7 +1467,10 @@ func (cpdb *FileCheckpointsDB) Update(_ context.Context, checkpointDiffs map[str tableModel.Status = uint32(cpd.status) } if cpd.hasRebase { - tableModel.AllocBase = cpd.allocBase + tableModel.AllocBase = max(tableModel.AllocBase, cpd.allocBase) + tableModel.AutoRandBase = max(tableModel.AutoRandBase, cpd.autoRandBase) + tableModel.AutoIncrBase = max(tableModel.AutoIncrBase, cpd.autoIncrBase) + tableModel.AutoRowIDBase = max(tableModel.AutoRowIDBase, cpd.autoRowIDBase) } if cpd.hasChecksum { tableModel.KvBytes = cpd.checksum.SumSize() @@ -1751,7 +1787,10 @@ func (cpdb *MySQLCheckpointsDB) DumpTables(ctx context.Context, writer io.Writer status, alloc_base, create_time, - update_time + update_time, + auto_rand_base, + auto_incr_base, + auto_row_id_base, FROM %s.%s; `, cpdb.schema, CheckpointTableNameTable)) if err != nil { diff --git a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go index 957b712794ed2..74f93acfb7f87 100644 --- a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go +++ b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: file_checkpoints.proto +// source: pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto package checkpointspb @@ -35,7 +35,7 @@ func (m *CheckpointsModel) Reset() { *m = CheckpointsModel{} } func (m *CheckpointsModel) String() string { return proto.CompactTextString(m) } func (*CheckpointsModel) ProtoMessage() {} func (*CheckpointsModel) Descriptor() ([]byte, []int) { - return fileDescriptor_975f281c215c00f7, []int{0} + return fileDescriptor_c57c7b77a714394c, []int{0} } func (m *CheckpointsModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -80,7 +80,7 @@ func (m *TaskCheckpointModel) Reset() { *m = TaskCheckpointModel{} } func (m *TaskCheckpointModel) String() string { return proto.CompactTextString(m) } func (*TaskCheckpointModel) ProtoMessage() {} func (*TaskCheckpointModel) Descriptor() ([]byte, []int) { - return fileDescriptor_975f281c215c00f7, []int{1} + return fileDescriptor_c57c7b77a714394c, []int{1} } func (m *TaskCheckpointModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -112,20 +112,26 @@ var xxx_messageInfo_TaskCheckpointModel proto.InternalMessageInfo type TableCheckpointModel struct { Hash []byte `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` Status uint32 `protobuf:"varint,3,opt,name=status,proto3" json:"status,omitempty"` - AllocBase int64 `protobuf:"varint,4,opt,name=alloc_base,json=allocBase,proto3" json:"alloc_base,omitempty"` + AllocBase int64 `protobuf:"varint,4,opt,name=alloc_base,json=allocBase,proto3" json:"alloc_base,omitempty"` // Deprecated: Do not use. Engines map[int32]*EngineCheckpointModel `protobuf:"bytes,8,rep,name=engines,proto3" json:"engines,omitempty" protobuf_key:"zigzag32,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` TableID int64 `protobuf:"varint,9,opt,name=tableID,proto3" json:"tableID,omitempty"` KvBytes uint64 `protobuf:"varint,10,opt,name=kv_bytes,json=kvBytes,proto3" json:"kv_bytes,omitempty"` KvKvs uint64 `protobuf:"varint,11,opt,name=kv_kvs,json=kvKvs,proto3" json:"kv_kvs,omitempty"` KvChecksum uint64 `protobuf:"fixed64,12,opt,name=kv_checksum,json=kvChecksum,proto3" json:"kv_checksum,omitempty"` TableInfo []byte `protobuf:"bytes,13,opt,name=table_info,json=tableInfo,proto3" json:"table_info,omitempty"` + // used to record the max auto random ID without the sharding bits that has been used. + AutoRandBase int64 `protobuf:"varint,14,opt,name=autoRandBase,proto3" json:"autoRandBase,omitempty"` + // used to record the max auto increment ID that has been used. + AutoIncrBase int64 `protobuf:"varint,15,opt,name=autoIncrBase,proto3" json:"autoIncrBase,omitempty"` + // used to record the max auto row ID that has been used. + AutoRowIDBase int64 `protobuf:"varint,16,opt,name=autoRowIDBase,proto3" json:"autoRowIDBase,omitempty"` } func (m *TableCheckpointModel) Reset() { *m = TableCheckpointModel{} } func (m *TableCheckpointModel) String() string { return proto.CompactTextString(m) } func (*TableCheckpointModel) ProtoMessage() {} func (*TableCheckpointModel) Descriptor() ([]byte, []int) { - return fileDescriptor_975f281c215c00f7, []int{2} + return fileDescriptor_c57c7b77a714394c, []int{2} } func (m *TableCheckpointModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -164,7 +170,7 @@ func (m *EngineCheckpointModel) Reset() { *m = EngineCheckpointModel{} } func (m *EngineCheckpointModel) String() string { return proto.CompactTextString(m) } func (*EngineCheckpointModel) ProtoMessage() {} func (*EngineCheckpointModel) Descriptor() ([]byte, []int) { - return fileDescriptor_975f281c215c00f7, []int{3} + return fileDescriptor_c57c7b77a714394c, []int{3} } func (m *EngineCheckpointModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -215,7 +221,7 @@ func (m *ChunkCheckpointModel) Reset() { *m = ChunkCheckpointModel{} } func (m *ChunkCheckpointModel) String() string { return proto.CompactTextString(m) } func (*ChunkCheckpointModel) ProtoMessage() {} func (*ChunkCheckpointModel) Descriptor() ([]byte, []int) { - return fileDescriptor_975f281c215c00f7, []int{4} + return fileDescriptor_c57c7b77a714394c, []int{4} } func (m *ChunkCheckpointModel) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -255,65 +261,71 @@ func init() { proto.RegisterType((*ChunkCheckpointModel)(nil), "ChunkCheckpointModel") } -func init() { proto.RegisterFile("file_checkpoints.proto", fileDescriptor_975f281c215c00f7) } +func init() { + proto.RegisterFile("pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto", fileDescriptor_c57c7b77a714394c) +} -var fileDescriptor_975f281c215c00f7 = []byte{ - // 873 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcd, 0x6e, 0x1c, 0x45, - 0x10, 0xf6, 0x78, 0xbc, 0x7f, 0x35, 0xbb, 0xce, 0xba, 0xb1, 0x9d, 0xc6, 0xc0, 0xb2, 0x6c, 0x38, - 0xac, 0x14, 0xb2, 0x91, 0xc2, 0x05, 0x45, 0x70, 0xc0, 0x76, 0x24, 0x22, 0x2b, 0xc2, 0x6a, 0x02, - 0x07, 0x2e, 0xa3, 0xf9, 0xe9, 0xdd, 0x1d, 0xf5, 0xce, 0xf4, 0x68, 0xba, 0x67, 0xc8, 0xe6, 0x29, - 0x78, 0x0c, 0x5e, 0x82, 0x7b, 0xc4, 0x29, 0x47, 0x8e, 0x60, 0xdf, 0x79, 0x05, 0x50, 0x57, 0x8f, - 0xbd, 0xe3, 0x68, 0x15, 0x71, 0xeb, 0xfa, 0xea, 0xeb, 0xaf, 0xab, 0x6a, 0xbe, 0xee, 0x81, 0xe3, - 0x79, 0xb2, 0xe2, 0x7e, 0xb4, 0xe4, 0x91, 0xc8, 0x65, 0x92, 0x69, 0x35, 0xcb, 0x0b, 0xa9, 0xe5, - 0xc9, 0xa3, 0x45, 0xa2, 0x97, 0x65, 0x38, 0x8b, 0x64, 0xfa, 0x78, 0x21, 0x17, 0xf2, 0x31, 0xc2, - 0x61, 0x39, 0xc7, 0x08, 0x03, 0x5c, 0x59, 0xfa, 0xe4, 0x1f, 0x07, 0x86, 0x67, 0x1b, 0x91, 0x17, - 0x32, 0xe6, 0x2b, 0x72, 0x0e, 0x5e, 0x43, 0x98, 0x3a, 0x63, 0x77, 0xea, 0x3d, 0x99, 0xcc, 0xde, - 0xe5, 0x35, 0x81, 0x67, 0x99, 0x2e, 0xd6, 0xac, 0xb9, 0x8d, 0x7c, 0x03, 0xf7, 0x74, 0xa0, 0x44, - 0xa3, 0x46, 0xba, 0x3b, 0x76, 0xa6, 0xde, 0x93, 0xc3, 0xd9, 0xcb, 0x40, 0x89, 0xcd, 0x66, 0x14, - 0x63, 0xfb, 0xfa, 0x0e, 0x78, 0xf2, 0xe3, 0x9d, 0xc2, 0x50, 0x9f, 0x0c, 0xc1, 0x15, 0x7c, 0x4d, - 0x9d, 0xb1, 0x33, 0xed, 0x31, 0xb3, 0x24, 0x0f, 0xa1, 0x55, 0x05, 0xab, 0x92, 0xd7, 0xd2, 0x47, - 0xb3, 0x97, 0x41, 0xb8, 0xe2, 0xef, 0x6a, 0x5b, 0xce, 0xd3, 0xdd, 0xaf, 0x9c, 0xc9, 0x6f, 0xbb, - 0xf0, 0xc1, 0x96, 0xe3, 0xc9, 0x7d, 0xe8, 0x60, 0xb5, 0x49, 0x8c, 0xf2, 0x2e, 0x6b, 0x9b, 0xf0, - 0x79, 0x4c, 0x3e, 0x01, 0x50, 0xb2, 0x2c, 0x22, 0xee, 0xc7, 0x49, 0x81, 0xc7, 0xf4, 0x58, 0xcf, - 0x22, 0xe7, 0x49, 0x41, 0x28, 0x74, 0xc2, 0x20, 0x12, 0x3c, 0x8b, 0xa9, 0x8b, 0xb9, 0x9b, 0x90, - 0x3c, 0x80, 0x41, 0x92, 0xe6, 0xb2, 0xd0, 0xbc, 0xf0, 0x83, 0x38, 0x2e, 0xe8, 0x1e, 0xe6, 0xfb, - 0x37, 0xe0, 0xb7, 0x71, 0x5c, 0x90, 0x8f, 0xa0, 0xa7, 0x93, 0x38, 0xf4, 0x97, 0x52, 0x69, 0xda, - 0x42, 0x42, 0xd7, 0x00, 0xdf, 0x49, 0xa5, 0x6f, 0x93, 0x86, 0x4f, 0xdb, 0x63, 0x67, 0xda, 0xb2, - 0xc9, 0x4b, 0x59, 0x68, 0x53, 0x70, 0x1e, 0x5b, 0xe1, 0x0e, 0xee, 0x6b, 0xe7, 0x31, 0x4a, 0x4e, - 0x60, 0xa0, 0xcc, 0x01, 0xb1, 0x2f, 0x2a, 0xac, 0xb9, 0x8b, 0x69, 0xcf, 0x82, 0x17, 0x95, 0xa9, - 0xfa, 0x01, 0x0c, 0x56, 0xc9, 0x62, 0xa9, 0xb3, 0x24, 0x5b, 0xf8, 0x15, 0x2f, 0x68, 0xcf, 0xd6, - 0x76, 0x0b, 0xfe, 0xc4, 0x8b, 0xc9, 0xbf, 0xbb, 0x70, 0xb8, 0x6d, 0x9c, 0x84, 0xc0, 0xde, 0x32, - 0x50, 0x4b, 0x1c, 0x54, 0x9f, 0xe1, 0x9a, 0x1c, 0x43, 0x5b, 0xe9, 0x40, 0x97, 0x0a, 0xc7, 0x30, - 0x60, 0x75, 0x64, 0xc6, 0x17, 0xac, 0x56, 0x32, 0xf2, 0xc3, 0x40, 0x71, 0x1c, 0x81, 0xcb, 0x7a, - 0x88, 0x9c, 0x06, 0x8a, 0x93, 0xaf, 0xa1, 0xc3, 0xb3, 0x45, 0x92, 0x71, 0x45, 0xbb, 0xb5, 0xcd, - 0xb6, 0x1d, 0x39, 0x7b, 0x66, 0x49, 0xd6, 0x66, 0x37, 0x5b, 0xcc, 0xf0, 0xb5, 0x61, 0x3f, 0x3f, - 0xc7, 0x06, 0x5c, 0x76, 0x13, 0x92, 0x0f, 0xa1, 0x2b, 0x2a, 0x3f, 0x5c, 0x6b, 0xae, 0x28, 0x8c, - 0x9d, 0xe9, 0x1e, 0xeb, 0x88, 0xea, 0xd4, 0x84, 0xe4, 0x08, 0xda, 0xa2, 0xf2, 0x45, 0xa5, 0xa8, - 0x87, 0x89, 0x96, 0xa8, 0x2e, 0x2a, 0x45, 0x3e, 0x05, 0x4f, 0x54, 0xd6, 0xac, 0xaa, 0x4c, 0x69, - 0x7f, 0xec, 0x4c, 0xdb, 0x0c, 0x44, 0x75, 0x56, 0x23, 0xa6, 0x13, 0x54, 0xf7, 0x93, 0x6c, 0x2e, - 0xe9, 0x00, 0x7b, 0xef, 0xd9, 0xf3, 0xb2, 0xb9, 0x3c, 0x61, 0xd0, 0x6f, 0x16, 0xd9, 0xf4, 0xea, - 0x81, 0xf5, 0xea, 0x17, 0x77, 0xbd, 0x7a, 0x5c, 0x37, 0xf5, 0x1e, 0xb3, 0xfe, 0xee, 0xc0, 0xd1, - 0x56, 0x52, 0x63, 0xdc, 0xce, 0x9d, 0x71, 0x3f, 0x85, 0x76, 0xb4, 0x2c, 0x33, 0xa1, 0xe8, 0x6e, - 0x3d, 0xce, 0xad, 0xfb, 0x67, 0x67, 0x48, 0xb2, 0xe3, 0xac, 0x77, 0x9c, 0x5c, 0x82, 0xd7, 0x80, - 0xff, 0xcf, 0x65, 0x43, 0xfa, 0x7b, 0xea, 0xff, 0xc3, 0x85, 0xc3, 0x6d, 0x1c, 0xe3, 0xa0, 0x3c, - 0xd0, 0xcb, 0x5a, 0x1c, 0xd7, 0xa6, 0x25, 0x39, 0x9f, 0x2b, 0x6e, 0x9f, 0x09, 0x97, 0xd5, 0x11, - 0x79, 0x04, 0x24, 0x92, 0xab, 0x32, 0xcd, 0xfc, 0x9c, 0x17, 0x69, 0xa9, 0x03, 0x9d, 0xc8, 0x8c, - 0xf6, 0xc7, 0xee, 0xb4, 0xc5, 0x0e, 0x6c, 0xe6, 0x72, 0x93, 0x30, 0x9f, 0x89, 0x67, 0xb1, 0x5f, - 0x4b, 0xb5, 0xac, 0xe1, 0x78, 0x16, 0x7f, 0x6f, 0xd5, 0x86, 0xe0, 0xe6, 0x52, 0xe1, 0x6d, 0x72, - 0x99, 0x59, 0x92, 0xcf, 0x61, 0x3f, 0x2f, 0x78, 0xe5, 0x17, 0xf2, 0x97, 0x24, 0xf6, 0xd3, 0xe0, - 0x15, 0xde, 0x27, 0x97, 0xf5, 0x0d, 0xca, 0x0c, 0xf8, 0x22, 0x78, 0x65, 0xee, 0xe2, 0x86, 0xd0, - 0x45, 0x42, 0xb7, 0x68, 0x24, 0x45, 0x15, 0xd5, 0x76, 0xeb, 0xa1, 0xab, 0xba, 0xa2, 0x8a, 0xac, - 0xdf, 0xee, 0x43, 0xc7, 0x24, 0x8d, 0xe1, 0xac, 0x13, 0xdb, 0xa2, 0x8a, 0x8c, 0xe3, 0x3e, 0x83, - 0xbe, 0x49, 0xdc, 0x5a, 0xce, 0x43, 0xcb, 0x79, 0xa2, 0x8a, 0x6e, 0x3d, 0xf7, 0xb1, 0x79, 0x01, - 0x52, 0xae, 0x74, 0x90, 0xe6, 0x68, 0xb9, 0x21, 0xdb, 0x00, 0x66, 0x8a, 0x7a, 0x9d, 0x73, 0xba, - 0x8f, 0x4f, 0x03, 0xae, 0xc9, 0x18, 0xbc, 0x48, 0xa6, 0x79, 0xc1, 0x95, 0x32, 0x63, 0xba, 0x87, - 0xa9, 0x26, 0x64, 0xae, 0x86, 0x79, 0x0a, 0x7c, 0xf3, 0x71, 0x87, 0xf6, 0xc9, 0x32, 0xf1, 0x05, - 0x5f, 0x9b, 0x3e, 0xf0, 0xb7, 0xa2, 0x92, 0xd7, 0x9c, 0x1e, 0xd8, 0x26, 0x0d, 0xf0, 0x43, 0xf2, - 0x9a, 0x9f, 0x3e, 0x7c, 0xf3, 0xf7, 0x68, 0xe7, 0xcd, 0xd5, 0xc8, 0x79, 0x7b, 0x35, 0x72, 0xfe, - 0xba, 0x1a, 0x39, 0xbf, 0x5e, 0x8f, 0x76, 0xde, 0x5e, 0x8f, 0x76, 0xfe, 0xbc, 0x1e, 0xed, 0xfc, - 0x3c, 0x68, 0x3c, 0xfd, 0x79, 0x18, 0xb6, 0xf1, 0xf7, 0xf2, 0xe5, 0x7f, 0x01, 0x00, 0x00, 0xff, - 0xff, 0xd2, 0x2d, 0x6f, 0xe5, 0xa7, 0x06, 0x00, 0x00, +var fileDescriptor_c57c7b77a714394c = []byte{ + // 940 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcf, 0x6e, 0xdb, 0xc6, + 0x13, 0x36, 0x45, 0xeb, 0xdf, 0x50, 0xb2, 0xe5, 0xfd, 0xd9, 0x09, 0x7f, 0x6e, 0xab, 0x2a, 0x4a, + 0x0e, 0x02, 0xd2, 0x4a, 0xa8, 0xd3, 0x43, 0x11, 0xb4, 0x45, 0x6b, 0x3b, 0x40, 0x0d, 0x23, 0xa8, + 0xb1, 0x4d, 0x7b, 0xe8, 0x85, 0xa0, 0xc8, 0x95, 0x44, 0xac, 0xc4, 0x25, 0xb8, 0x4b, 0x26, 0xca, + 0x53, 0xf4, 0x31, 0xfa, 0x12, 0xbd, 0x07, 0x3d, 0xe5, 0xd8, 0x63, 0x6b, 0xdf, 0x0b, 0xf4, 0x0d, + 0x8a, 0x9d, 0xa5, 0x24, 0x2a, 0x10, 0x82, 0xde, 0x76, 0xbf, 0xf9, 0xe6, 0xdb, 0xd9, 0xd9, 0x6f, + 0x48, 0xf8, 0x3a, 0xe1, 0xd3, 0xd1, 0x3c, 0x9a, 0xce, 0x54, 0x1c, 0xc5, 0xd3, 0x51, 0x30, 0x63, + 0x01, 0x4f, 0x44, 0x14, 0x2b, 0x59, 0x5e, 0x27, 0xe3, 0xd1, 0x24, 0x9a, 0x33, 0xaf, 0x04, 0x0d, + 0x93, 0x54, 0x28, 0x71, 0xfa, 0xf9, 0x34, 0x52, 0xb3, 0x6c, 0x3c, 0x0c, 0xc4, 0x62, 0x34, 0x15, + 0x53, 0x31, 0x42, 0x78, 0x9c, 0x4d, 0xbe, 0xc9, 0x3f, 0x1b, 0x3e, 0x19, 0x9e, 0x21, 0x88, 0x18, + 0xae, 0x4c, 0x56, 0xff, 0x6f, 0x0b, 0x3a, 0x17, 0x1b, 0xad, 0xe7, 0x22, 0x64, 0x73, 0x72, 0x09, + 0x4e, 0x49, 0xdf, 0xb5, 0x7a, 0xf6, 0xc0, 0x39, 0xeb, 0x0f, 0xdf, 0xe5, 0x95, 0x81, 0x67, 0xb1, + 0x4a, 0x97, 0xb4, 0x9c, 0x46, 0xbe, 0x82, 0x43, 0xe5, 0x4b, 0x5e, 0x2a, 0xd5, 0xad, 0xf4, 0xac, + 0x81, 0x73, 0x76, 0x3c, 0x7c, 0xe1, 0x4b, 0xbe, 0x49, 0x46, 0x31, 0x7a, 0xa0, 0xb6, 0xc0, 0xd3, + 0x1f, 0xb7, 0x0a, 0x43, 0x7d, 0xd2, 0x01, 0x9b, 0xb3, 0xa5, 0x6b, 0xf5, 0xac, 0x41, 0x93, 0xea, + 0x25, 0x79, 0x0c, 0xd5, 0xdc, 0x9f, 0x67, 0xac, 0x90, 0x3e, 0x19, 0xbe, 0xf0, 0xc7, 0x73, 0xf6, + 0xae, 0xb6, 0xe1, 0x3c, 0xad, 0x7c, 0x61, 0xf5, 0x7f, 0xad, 0xc0, 0xff, 0x76, 0x1c, 0x4f, 0xee, + 0x43, 0x1d, 0xab, 0x8d, 0x42, 0x94, 0xb7, 0x69, 0x4d, 0x6f, 0xaf, 0x42, 0xf2, 0x11, 0x80, 0x14, + 0x59, 0x1a, 0x30, 0x2f, 0x8c, 0x52, 0x3c, 0xa6, 0x49, 0x9b, 0x06, 0xb9, 0x8c, 0x52, 0xe2, 0x42, + 0x7d, 0xec, 0x07, 0x9c, 0xc5, 0xa1, 0x6b, 0x63, 0x6c, 0xb5, 0x25, 0x0f, 0xa1, 0x1d, 0x2d, 0x12, + 0x91, 0x2a, 0x96, 0x7a, 0x7e, 0x18, 0xa6, 0xee, 0x3e, 0xc6, 0x5b, 0x2b, 0xf0, 0xdb, 0x30, 0x4c, + 0xc9, 0x07, 0xd0, 0x54, 0x51, 0x38, 0xf6, 0x66, 0x42, 0x2a, 0xb7, 0x8a, 0x84, 0x86, 0x06, 0xbe, + 0x13, 0x52, 0xad, 0x83, 0x9a, 0xef, 0xd6, 0x7a, 0xd6, 0xa0, 0x6a, 0x82, 0x37, 0x22, 0x55, 0xba, + 0xe0, 0x24, 0x34, 0xc2, 0x75, 0xcc, 0xab, 0x25, 0x21, 0x4a, 0xf6, 0xa1, 0x2d, 0xf5, 0x01, 0xa1, + 0xc7, 0x73, 0xac, 0xb9, 0x81, 0x61, 0xc7, 0x80, 0xd7, 0xb9, 0xae, 0xfa, 0x21, 0xb4, 0xd7, 0x56, + 0xf3, 0x72, 0x96, 0xba, 0x4d, 0x53, 0xdb, 0x1a, 0xfc, 0x89, 0xa5, 0xfd, 0x7f, 0x6c, 0x38, 0xde, + 0xd5, 0x4e, 0x42, 0x60, 0x7f, 0xe6, 0xcb, 0x19, 0x36, 0xaa, 0x45, 0x71, 0x4d, 0xee, 0x41, 0x4d, + 0x2a, 0x5f, 0x65, 0x12, 0xdb, 0xd0, 0xa6, 0xc5, 0x8e, 0x3c, 0x00, 0xf0, 0xe7, 0x73, 0x11, 0x78, + 0x63, 0x5f, 0x32, 0x6c, 0x81, 0x7d, 0x5e, 0x71, 0x2d, 0xda, 0x44, 0xf4, 0xdc, 0x97, 0x8c, 0x7c, + 0x09, 0x75, 0x16, 0x4f, 0xa3, 0x98, 0x49, 0xb7, 0x51, 0x58, 0x6d, 0xd7, 0xb1, 0xc3, 0x67, 0x86, + 0x64, 0xac, 0xb6, 0x4a, 0xd1, 0x0f, 0xa0, 0x34, 0xfb, 0xea, 0x12, 0x2f, 0x61, 0xd3, 0xd5, 0x96, + 0xfc, 0x1f, 0x1a, 0x3c, 0xf7, 0xc6, 0x4b, 0xc5, 0xa4, 0x0b, 0x3d, 0x6b, 0xb0, 0x4f, 0xeb, 0x3c, + 0x3f, 0xd7, 0x5b, 0x72, 0x02, 0x35, 0x9e, 0x7b, 0x3c, 0x97, 0xae, 0x83, 0x81, 0x2a, 0xcf, 0xaf, + 0x73, 0x49, 0x3e, 0x06, 0x87, 0xe7, 0xc6, 0xb0, 0x32, 0x5b, 0xb8, 0xad, 0x9e, 0x35, 0xa8, 0x51, + 0xe0, 0xf9, 0x45, 0x81, 0x68, 0x33, 0xa0, 0xba, 0x17, 0xc5, 0x13, 0xe1, 0xb6, 0xf1, 0xfe, 0x4d, + 0x73, 0x5e, 0x3c, 0x11, 0xa4, 0x0f, 0x2d, 0x3f, 0x53, 0x82, 0xfa, 0x71, 0xa8, 0x6f, 0xe6, 0x1e, + 0x60, 0x41, 0x5b, 0xd8, 0x8a, 0x73, 0x15, 0x07, 0x29, 0x72, 0x0e, 0x37, 0x9c, 0x15, 0x46, 0x1e, + 0x41, 0x1b, 0x73, 0xc4, 0xcb, 0xab, 0x4b, 0x24, 0x75, 0x90, 0xb4, 0x0d, 0x9e, 0x52, 0x68, 0x95, + 0x5b, 0x52, 0x9e, 0x8e, 0x23, 0x33, 0x1d, 0x9f, 0x6c, 0x4f, 0xc7, 0xbd, 0xa2, 0x85, 0xef, 0x19, + 0x8f, 0xdf, 0x2c, 0x38, 0xd9, 0x49, 0x2a, 0x3d, 0xb0, 0xb5, 0xf5, 0xc0, 0x4f, 0xa1, 0x16, 0xcc, + 0xb2, 0x98, 0x4b, 0xb7, 0x52, 0x3c, 0xde, 0xce, 0xfc, 0xe1, 0x05, 0x92, 0xcc, 0xe3, 0x15, 0x19, + 0xa7, 0x37, 0xe0, 0x94, 0xe0, 0xff, 0x32, 0xde, 0x48, 0x7f, 0x4f, 0xfd, 0xbf, 0xdb, 0x70, 0xbc, + 0x8b, 0xa3, 0x3d, 0x9b, 0xf8, 0x6a, 0x56, 0x88, 0xe3, 0x5a, 0x5f, 0x49, 0x4c, 0x26, 0x92, 0x99, + 0x0f, 0x93, 0x4d, 0x8b, 0x1d, 0xf9, 0x14, 0x48, 0x20, 0xe6, 0xd9, 0x22, 0xf6, 0x12, 0x96, 0x2e, + 0x32, 0xe5, 0xab, 0x48, 0xc4, 0x6e, 0xab, 0x67, 0x0f, 0xaa, 0xf4, 0xc8, 0x44, 0x6e, 0x36, 0x01, + 0x6d, 0x0a, 0x16, 0x87, 0x5e, 0x21, 0x55, 0x45, 0xa9, 0x26, 0x8b, 0xc3, 0xef, 0x8d, 0x5a, 0x07, + 0xec, 0x44, 0x48, 0x9c, 0x5f, 0x9b, 0xea, 0x25, 0x79, 0x04, 0x07, 0x49, 0xca, 0x72, 0x2f, 0x15, + 0x2f, 0xa3, 0xd0, 0x5b, 0xf8, 0xaf, 0x70, 0x82, 0x6d, 0xda, 0xd2, 0x28, 0xd5, 0xe0, 0x73, 0xff, + 0x95, 0x9e, 0xfe, 0x0d, 0xa1, 0x81, 0x84, 0x46, 0x5a, 0x0a, 0xf2, 0x3c, 0x28, 0xcc, 0xdd, 0x44, + 0x0f, 0x37, 0x78, 0x1e, 0x18, 0x77, 0xdf, 0x87, 0xba, 0x0e, 0x6a, 0x7b, 0x1b, 0xdf, 0xd7, 0x78, + 0x1e, 0x68, 0x7f, 0x3f, 0x80, 0x96, 0x0e, 0xac, 0x0d, 0xee, 0xa0, 0xc1, 0x1d, 0x9e, 0x07, 0x6b, + 0x87, 0x7f, 0xa8, 0xbf, 0x39, 0x0b, 0x26, 0x95, 0xbf, 0x48, 0xd0, 0xe0, 0x1d, 0xba, 0x01, 0x74, + 0x17, 0xd5, 0x32, 0x31, 0xc6, 0xae, 0x52, 0x5c, 0x93, 0x1e, 0x38, 0x81, 0x58, 0x24, 0x29, 0x93, + 0x52, 0xb7, 0xe9, 0x10, 0x43, 0x65, 0x48, 0x0f, 0xa2, 0xfe, 0xf8, 0x78, 0xfa, 0x71, 0x3b, 0xe6, + 0x23, 0xa9, 0xf7, 0xd7, 0x6c, 0xa9, 0xef, 0x81, 0xff, 0x33, 0x19, 0xbd, 0x66, 0xee, 0x91, 0xb9, + 0xa4, 0x06, 0x7e, 0x88, 0x5e, 0xb3, 0xf3, 0xc7, 0x6f, 0xfe, 0xea, 0xee, 0xbd, 0xb9, 0xed, 0x5a, + 0x6f, 0x6f, 0xbb, 0xd6, 0x9f, 0xb7, 0x5d, 0xeb, 0x97, 0xbb, 0xee, 0xde, 0xdb, 0xbb, 0xee, 0xde, + 0x1f, 0x77, 0xdd, 0xbd, 0x9f, 0xdb, 0x5b, 0xbf, 0xc5, 0x71, 0x0d, 0x7f, 0x68, 0x4f, 0xfe, 0x0d, + 0x00, 0x00, 0xff, 0xff, 0x1e, 0x86, 0x14, 0xc9, 0x48, 0x07, 0x00, 0x00, } func (m *CheckpointsModel) Marshal() (dAtA []byte, err error) { @@ -479,6 +491,23 @@ func (m *TableCheckpointModel) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.AutoRowIDBase != 0 { + i = encodeVarintFileCheckpoints(dAtA, i, uint64(m.AutoRowIDBase)) + i-- + dAtA[i] = 0x1 + i-- + dAtA[i] = 0x80 + } + if m.AutoIncrBase != 0 { + i = encodeVarintFileCheckpoints(dAtA, i, uint64(m.AutoIncrBase)) + i-- + dAtA[i] = 0x78 + } + if m.AutoRandBase != 0 { + i = encodeVarintFileCheckpoints(dAtA, i, uint64(m.AutoRandBase)) + i-- + dAtA[i] = 0x70 + } if len(m.TableInfo) > 0 { i -= len(m.TableInfo) copy(dAtA[i:], m.TableInfo) @@ -852,6 +881,15 @@ func (m *TableCheckpointModel) Size() (n int) { if l > 0 { n += 1 + l + sovFileCheckpoints(uint64(l)) } + if m.AutoRandBase != 0 { + n += 1 + sovFileCheckpoints(uint64(m.AutoRandBase)) + } + if m.AutoIncrBase != 0 { + n += 1 + sovFileCheckpoints(uint64(m.AutoIncrBase)) + } + if m.AutoRowIDBase != 0 { + n += 2 + sovFileCheckpoints(uint64(m.AutoRowIDBase)) + } return n } @@ -1793,6 +1831,63 @@ func (m *TableCheckpointModel) Unmarshal(dAtA []byte) error { m.TableInfo = []byte{} } iNdEx = postIndex + case 14: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field AutoRandBase", wireType) + } + m.AutoRandBase = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowFileCheckpoints + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.AutoRandBase |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 15: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field AutoIncrBase", wireType) + } + m.AutoIncrBase = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowFileCheckpoints + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.AutoIncrBase |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 16: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field AutoRowIDBase", wireType) + } + m.AutoRowIDBase = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowFileCheckpoints + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.AutoRowIDBase |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipFileCheckpoints(dAtA[iNdEx:]) diff --git a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto index 9be5795e19c8a..136131fca28d9 100644 --- a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto +++ b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto @@ -14,7 +14,7 @@ syntax = "proto3"; -import "github.com/gogo/protobuf/gogoproto/gogo.proto"; +import "github.com/gogo/protobuf@v1.3.2/gogoproto/gogo.proto"; option go_package = "checkpointspb"; option (gogoproto.goproto_getters_all) = false; @@ -40,13 +40,19 @@ message TaskCheckpointModel { message TableCheckpointModel { bytes hash = 1; uint32 status = 3; - int64 alloc_base = 4; + int64 alloc_base = 4 [deprecated=true]; map engines = 8; int64 tableID = 9; uint64 kv_bytes = 10; uint64 kv_kvs = 11; fixed64 kv_checksum = 12; bytes table_info = 13; + // used to record the max auto random ID without the sharding bits that has been used. + int64 autoRandBase = 14; + // used to record the max auto increment ID that has been used. + int64 autoIncrBase = 15; + // used to record the max auto row ID that has been used. + int64 autoRowIDBase = 16; } message EngineCheckpointModel { diff --git a/pkg/lightning/common/util.go b/pkg/lightning/common/util.go index 7017e79a646c2..445aabf418467 100644 --- a/pkg/lightning/common/util.go +++ b/pkg/lightning/common/util.go @@ -51,7 +51,9 @@ import ( const ( retryTimeout = 3 * time.Second - defaultMaxRetry = 3 + // we are using optimistic txn all the time, we need retry more because we + // might face more write conflicts during parallel import. + defaultMaxRetry = 10 ) // MySQLConnectParam records the parameters needed to connect to a MySQL database. From 74b83cbccdb004e27aec3c5a282336f773e8d457 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Thu, 14 Nov 2024 16:41:00 +0800 Subject: [PATCH 02/12] base --- lightning/pkg/importer/get_pre_info.go | 2 +- lightning/pkg/importer/import.go | 14 +- lightning/pkg/importer/table_import.go | 25 ++- lightning/pkg/importer/table_import_test.go | 4 +- pkg/disttask/importinto/planner.go | 2 +- pkg/disttask/importinto/task_executor.go | 2 +- pkg/executor/importer/table_import.go | 4 +- pkg/lightning/backend/kv/allocator.go | 25 +-- pkg/lightning/backend/kv/allocator_test.go | 2 +- pkg/lightning/backend/kv/base_test.go | 2 +- pkg/lightning/backend/kv/kv2sql_test.go | 4 +- pkg/lightning/backend/kv/sql2kv_test.go | 22 +-- pkg/lightning/backend/local/duplicate_test.go | 6 +- pkg/lightning/backend/tidb/tidb_test.go | 4 +- pkg/lightning/checkpoints/checkpoints.go | 43 +++-- .../checkpointspb/file_checkpoints.pb.go | 152 +++++++----------- .../checkpointspb/file_checkpoints.proto | 4 +- .../errormanager/errormanager_test.go | 4 +- .../errormanager/resolveconflict_test.go | 8 +- pkg/table/tables/index_test.go | 2 +- 20 files changed, 153 insertions(+), 178 deletions(-) diff --git a/lightning/pkg/importer/get_pre_info.go b/lightning/pkg/importer/get_pre_info.go index 775cf7ec5f3eb..77c6d6558c8e4 100644 --- a/lightning/pkg/importer/get_pre_info.go +++ b/lightning/pkg/importer/get_pre_info.go @@ -622,7 +622,7 @@ func (p *PreImportInfoGetterImpl) sampleDataFromTable( if err != nil { return 0.0, false, errors.Trace(err) } - idAlloc := kv.NewPanickingAllocators(tableInfo.SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(tableInfo.SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, tableInfo) if err != nil { return 0.0, false, errors.Trace(err) diff --git a/lightning/pkg/importer/import.go b/lightning/pkg/importer/import.go index 512aa277d6b34..1f4376820fbd3 100644 --- a/lightning/pkg/importer/import.go +++ b/lightning/pkg/importer/import.go @@ -1990,20 +1990,12 @@ type deliverResult struct { } func saveCheckpoint(rc *Controller, t *TableImporter, engineID int32, chunk *checkpoints.ChunkCheckpoint) { - // We need to update the AllocBase every time we've finished a file. - // The AllocBase is determined by the maximum of the "handle" (_tidb_rowid - // or integer primary key), which can only be obtained by reading all data. - - var base int64 - if t.tableInfo.Core.ContainsAutoRandomBits() { - base = t.alloc.Get(autoid.AutoRandomType).Base() + 1 - } else { - base = t.alloc.Get(autoid.RowIDAllocType).Base() + 1 - } rc.saveCpCh <- saveCp{ tableName: t.tableName, merger: &checkpoints.RebaseCheckpointMerger{ - AllocBase: base, + AutoRandBase: t.alloc.Get(autoid.AutoRandomType).Base(), + AutoIncrBase: t.alloc.Get(autoid.AutoIncrementType).Base(), + AutoRowIDBase: t.alloc.Get(autoid.RowIDAllocType).Base(), }, } rc.saveCpCh <- saveCp{ diff --git a/lightning/pkg/importer/table_import.go b/lightning/pkg/importer/table_import.go index 658d17430ebc6..ea617ad1c9b83 100644 --- a/lightning/pkg/importer/table_import.go +++ b/lightning/pkg/importer/table_import.go @@ -92,7 +92,7 @@ func NewTableImporter( etcdCli *clientv3.Client, logger log.Logger, ) (*TableImporter, error) { - idAlloc := kv.NewPanickingAllocators(tableInfo.Core.SepAutoInc(), cp.AllocBase) + idAlloc := kv.NewPanickingAllocatorsWithBase(tableInfo.Core.SepAutoInc(), cp.AutoRandBase, cp.AutoIncrBase, cp.AutoRowIDBase) tbl, err := tables.TableFromMeta(idAlloc, tableInfo.Core) if err != nil { return nil, errors.Annotatef(err, "failed to tables.TableFromMeta %s", tableName) @@ -187,22 +187,31 @@ func (tr *TableImporter) importTable( } web.BroadcastTableCheckpoint(tr.tableName, cp) - // rebase the allocator so it exceeds the number of rows. - if tr.tableInfo.Core.ContainsAutoRandomBits() { - cp.AllocBase = max(cp.AllocBase, tr.tableInfo.Core.AutoRandID) - if err := tr.alloc.Get(autoid.AutoRandomType).Rebase(context.Background(), cp.AllocBase, false); err != nil { + // rebase the allocator based on the max ID from table info. + ti := tr.tableInfo.Core + if ti.ContainsAutoRandomBits() { + cp.AutoRandBase = max(cp.AutoRandBase, ti.AutoRandID) + if err := tr.alloc.Get(autoid.AutoRandomType).Rebase(context.Background(), cp.AutoRandBase, false); err != nil { return false, err } } else { - cp.AllocBase = max(cp.AllocBase, tr.tableInfo.Core.AutoIncID) - if err := tr.alloc.Get(autoid.RowIDAllocType).Rebase(context.Background(), cp.AllocBase, false); err != nil { + if ti.GetAutoIncrementColInfo() != nil && ti.SepAutoInc() { + cp.AutoIncrBase = max(cp.AutoIncrBase, ti.AutoIncID) + if err := tr.alloc.Get(autoid.AutoIncrementType).Rebase(context.Background(), cp.AutoIncrBase, false); err != nil { + return false, err + } + } + cp.AutoRowIDBase = max(cp.AutoRowIDBase, ti.AutoIncID) + if err := tr.alloc.Get(autoid.RowIDAllocType).Rebase(context.Background(), cp.AutoRowIDBase, false); err != nil { return false, err } } rc.saveCpCh <- saveCp{ tableName: tr.tableName, merger: &checkpoints.RebaseCheckpointMerger{ - AllocBase: cp.AllocBase, + AutoRandBase: cp.AutoRandBase, + AutoIncrBase: cp.AutoIncrBase, + AutoRowIDBase: cp.AutoRowIDBase, }, } } diff --git a/lightning/pkg/importer/table_import_test.go b/lightning/pkg/importer/table_import_test.go index aeb024863952f..60beb1d206baf 100644 --- a/lightning/pkg/importer/table_import_test.go +++ b/lightning/pkg/importer/table_import_test.go @@ -409,7 +409,7 @@ func (s *tableRestoreSuite) TestRestoreEngineFailed() { mockEngineWriter.EXPECT().IsSynced().Return(true).AnyTimes() mockEngineWriter.EXPECT().Close(gomock.Any()).Return(mockChunkFlushStatus, nil).AnyTimes() - tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(s.tableInfo.Core.SepAutoInc(), 0), s.tableInfo.Core) + tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(s.tableInfo.Core.SepAutoInc()), s.tableInfo.Core) require.NoError(s.T(), err) _, indexUUID := backend.MakeUUID("`db`.`table`", -1) _, dataUUID := backend.MakeUUID("`db`.`table`", 0) @@ -1445,7 +1445,7 @@ func (s *tableRestoreSuite) TestEstimate() { controller := gomock.NewController(s.T()) defer controller.Finish() mockEncBuilder := mock.NewMockEncodingBuilder(controller) - idAlloc := kv.NewPanickingAllocators(s.tableInfo.Core.SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(s.tableInfo.Core.SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, s.tableInfo.Core) require.NoError(s.T(), err) diff --git a/pkg/disttask/importinto/planner.go b/pkg/disttask/importinto/planner.go index ff4add8bd3d88..8df53b5a13a40 100644 --- a/pkg/disttask/importinto/planner.go +++ b/pkg/disttask/importinto/planner.go @@ -239,7 +239,7 @@ func buildControllerForPlan(p *LogicalPlan) (*importer.LoadDataController, error } func buildController(plan *importer.Plan, stmt string) (*importer.LoadDataController, error) { - idAlloc := kv.NewPanickingAllocators(plan.TableInfo.SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(plan.TableInfo.SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, plan.TableInfo) if err != nil { return nil, err diff --git a/pkg/disttask/importinto/task_executor.go b/pkg/disttask/importinto/task_executor.go index 31d85fbc0d0ad..2c85f63c3785a 100644 --- a/pkg/disttask/importinto/task_executor.go +++ b/pkg/disttask/importinto/task_executor.go @@ -73,7 +73,7 @@ func getTableImporter( taskMeta *TaskMeta, store tidbkv.Storage, ) (*importer.TableImporter, error) { - idAlloc := kv.NewPanickingAllocators(taskMeta.Plan.TableInfo.SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(taskMeta.Plan.TableInfo.SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, taskMeta.Plan.TableInfo) if err != nil { return nil, err diff --git a/pkg/executor/importer/table_import.go b/pkg/executor/importer/table_import.go index 1939809ea7285..eecdee38d96db 100644 --- a/pkg/executor/importer/table_import.go +++ b/pkg/executor/importer/table_import.go @@ -154,7 +154,7 @@ func NewTableImporter( id string, kvStore tidbkv.Storage, ) (ti *TableImporter, err error) { - idAlloc := kv.NewPanickingAllocators(e.Table.Meta().SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(e.Table.Meta().SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, e.Table.Meta()) if err != nil { return nil, errors.Annotatef(err, "failed to tables.TableFromMeta %s", e.Table.Meta().Name) @@ -234,7 +234,7 @@ type TableImporter struct { // NewTableImporterForTest creates a new table importer for test. func NewTableImporterForTest(ctx context.Context, e *LoadDataController, id string, helper local.StoreHelper) (*TableImporter, error) { - idAlloc := kv.NewPanickingAllocators(e.Table.Meta().SepAutoInc(), 0) + idAlloc := kv.NewPanickingAllocators(e.Table.Meta().SepAutoInc()) tbl, err := tables.TableFromMeta(idAlloc, e.Table.Meta()) if err != nil { return nil, errors.Annotatef(err, "failed to tables.TableFromMeta %s", e.Table.Meta().Name) diff --git a/pkg/lightning/backend/kv/allocator.go b/pkg/lightning/backend/kv/allocator.go index dfd7027a0e7de..5c97c27045b66 100644 --- a/pkg/lightning/backend/kv/allocator.go +++ b/pkg/lightning/backend/kv/allocator.go @@ -30,20 +30,27 @@ type panickingAllocator struct { ty autoid.AllocatorType } -// NewPanickingAllocators creates a PanickingAllocator shared by all allocation types. +// NewPanickingAllocators creates a PanickingAllocator with default base values. +func NewPanickingAllocators(sepAutoInc bool) autoid.Allocators { + return NewPanickingAllocatorsWithBase(sepAutoInc, 0, 0, 0) +} + +// NewPanickingAllocatorsWithBase creates a PanickingAllocator shared by all allocation types. // we use this to collect the max id(either _tidb_rowid or auto_increment id or auto_random) used // during import, and we will use this info to do ALTER TABLE xxx AUTO_RANDOM_BASE or AUTO_INCREMENT // on post-process phase. -// TODO: support save all bases in checkpoint. -func NewPanickingAllocators(sepAutoInc bool, base int64) autoid.Allocators { +func NewPanickingAllocatorsWithBase(sepAutoInc bool, autoRandBase, autoIncrBase, autoRowIDBase int64) autoid.Allocators { allocs := make([]autoid.Allocator, 0, 3) - for _, t := range []autoid.AllocatorType{ - autoid.RowIDAllocType, - autoid.AutoIncrementType, - autoid.AutoRandomType, + for _, t := range []struct { + Type autoid.AllocatorType + Base int64 + }{ + {Type: autoid.AutoRandomType, Base: autoRandBase}, + {Type: autoid.AutoIncrementType, Base: autoIncrBase}, + {Type: autoid.RowIDAllocType, Base: autoRowIDBase}, } { - pa := &panickingAllocator{ty: t} - pa.base.Store(base) + pa := &panickingAllocator{ty: t.Type} + pa.base.Store(t.Base) allocs = append(allocs, pa) } return autoid.NewAllocators(sepAutoInc, allocs...) diff --git a/pkg/lightning/backend/kv/allocator_test.go b/pkg/lightning/backend/kv/allocator_test.go index a88b54f4b5032..2d28968d54b76 100644 --- a/pkg/lightning/backend/kv/allocator_test.go +++ b/pkg/lightning/backend/kv/allocator_test.go @@ -22,7 +22,7 @@ import ( ) func TestAllocator(t *testing.T) { - alloc := NewPanickingAllocators(true, 0) + alloc := NewPanickingAllocators(true) require.NoError(t, alloc.Get(autoid.RowIDAllocType).Rebase(nil, 123, false)) // cannot revert back require.NoError(t, alloc.Get(autoid.RowIDAllocType).Rebase(nil, 100, false)) diff --git a/pkg/lightning/backend/kv/base_test.go b/pkg/lightning/backend/kv/base_test.go index ee3bae9d0eb57..3fba4ca6cc7cc 100644 --- a/pkg/lightning/backend/kv/base_test.go +++ b/pkg/lightning/backend/kv/base_test.go @@ -44,7 +44,7 @@ func TestLogKVConvertFailed(t *testing.T) { cols := []*model.ColumnInfo{c1} tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} var tbl table.Table - tbl, err = tables.TableFromMeta(NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err = tables.TableFromMeta(NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) var baseKVEncoder *BaseKVEncoder diff --git a/pkg/lightning/backend/kv/kv2sql_test.go b/pkg/lightning/backend/kv/kv2sql_test.go index cf6d40341eb6a..afe076192f0be 100644 --- a/pkg/lightning/backend/kv/kv2sql_test.go +++ b/pkg/lightning/backend/kv/kv2sql_test.go @@ -42,7 +42,7 @@ func TestIterRawIndexKeysClusteredPK(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.True(t, info.IsCommonHandle) - tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) sessionOpts := &encode.SessionOptions{ @@ -83,7 +83,7 @@ func TestIterRawIndexKeysIntPK(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.True(t, info.PKIsHandle) - tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) sessionOpts := &encode.SessionOptions{ diff --git a/pkg/lightning/backend/kv/sql2kv_test.go b/pkg/lightning/backend/kv/sql2kv_test.go index dcece5617704b..a21e7198fe4ad 100644 --- a/pkg/lightning/backend/kv/sql2kv_test.go +++ b/pkg/lightning/backend/kv/sql2kv_test.go @@ -80,7 +80,7 @@ func TestEncode(t *testing.T) { c1 := &model.ColumnInfo{ID: 1, Name: pmodel.NewCIStr("c1"), State: model.StatePublic, Offset: 0, FieldType: *types.NewFieldType(mysql.TypeTiny)} cols := []*model.ColumnInfo{c1} tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) logger := log.Logger{Logger: zap.NewNop()} @@ -163,7 +163,7 @@ func TestDecode(t *testing.T) { c1 := &model.ColumnInfo{ID: 1, Name: pmodel.NewCIStr("c1"), State: model.StatePublic, Offset: 0, FieldType: *types.NewFieldType(mysql.TypeTiny)} cols := []*model.ColumnInfo{c1} tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) decoder, err := lkv.NewTableKVDecoder(tbl, "`test`.`c1`", &encode.SessionOptions{ SQLMode: mysql.ModeStrictAllTables, @@ -217,7 +217,7 @@ func TestDecodeIndex(t *testing.T) { State: model.StatePublic, PKIsHandle: false, } - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) if err != nil { fmt.Printf("error: %v", err.Error()) } @@ -262,7 +262,7 @@ func TestEncodeRowFormatV2(t *testing.T) { c1 := &model.ColumnInfo{ID: 1, Name: pmodel.NewCIStr("c1"), State: model.StatePublic, Offset: 0, FieldType: *types.NewFieldType(mysql.TypeTiny)} cols := []*model.ColumnInfo{c1} tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) rows := []types.Datum{ @@ -313,7 +313,7 @@ func TestEncodeTimestamp(t *testing.T) { } cols := []*model.ColumnInfo{c1} tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ @@ -342,7 +342,7 @@ func TestEncodeTimestamp(t *testing.T) { func TestEncodeDoubleAutoIncrement(t *testing.T) { tblInfo := mockTableInfo(t, "create table t (id double not null auto_increment, unique key `u_id` (`id`));") - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ @@ -406,7 +406,7 @@ func TestEncodeMissingAutoValue(t *testing.T) { }, } { tblInfo := mockTableInfo(t, testTblInfo.CreateStmt) - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ @@ -458,7 +458,7 @@ func TestEncodeMissingAutoValue(t *testing.T) { func TestEncodeExpressionColumn(t *testing.T) { tblInfo := mockTableInfo(t, "create table t (id varchar(40) not null DEFAULT uuid(), unique key `u_id` (`id`));") - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ @@ -503,7 +503,7 @@ func mockTableInfo(t *testing.T, createSQL string) *model.TableInfo { func TestDefaultAutoRandoms(t *testing.T) { tblInfo := mockTableInfo(t, "create table t (id bigint unsigned NOT NULL auto_random primary key clustered, a varchar(100));") - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ Table: tbl, @@ -541,7 +541,7 @@ func TestDefaultAutoRandoms(t *testing.T) { func TestShardRowId(t *testing.T) { tblInfo := mockTableInfo(t, "create table t (s varchar(16)) shard_row_id_bits = 3;") - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ Table: tbl, @@ -656,7 +656,7 @@ func SetUpTest(b *testing.B) *benchSQL2KVSuite { tableInfo.State = model.StatePublic // Construct the corresponding KV encoder. - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tableInfo.SepAutoInc(), 0), tableInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tableInfo.SepAutoInc()), tableInfo) require.NoError(b, err) encoder, err := lkv.NewTableKVEncoder(&encode.EncodingConfig{ Table: tbl, diff --git a/pkg/lightning/backend/local/duplicate_test.go b/pkg/lightning/backend/local/duplicate_test.go index 9652376aacf05..de5c471615bea 100644 --- a/pkg/lightning/backend/local/duplicate_test.go +++ b/pkg/lightning/backend/local/duplicate_test.go @@ -44,7 +44,7 @@ func TestBuildDupTask(t *testing.T) { info, err := ddl.MockTableInfo(mock.NewContext(), node[0].(*ast.CreateTableStmt), 1) require.NoError(t, err) info.State = model.StatePublic - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) // Test build duplicate detecting task. @@ -100,7 +100,7 @@ func TestBuildDupTask(t *testing.T) { info, err = ddl.MockTableInfo(mock.NewContext(), node[0].(*ast.CreateTableStmt), 1) require.NoError(t, err) info.State = model.StatePublic - tbl, err = tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err = tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) require.Len(t, tbl.Meta().Indices, 3) require.Equal(t, "primary", tbl.Meta().Indices[0].Name.L) @@ -145,7 +145,7 @@ func buildTableForTestConvertToErrFoundConflictRecords(t *testing.T, node []ast. info, err := ddl.MockTableInfo(mockSctx, node[0].(*ast.CreateTableStmt), 108) require.NoError(t, err) info.State = model.StatePublic - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) sessionOpts := encode.SessionOptions{ diff --git a/pkg/lightning/backend/tidb/tidb_test.go b/pkg/lightning/backend/tidb/tidb_test.go index e0344de49d556..c8e3490ba7a29 100644 --- a/pkg/lightning/backend/tidb/tidb_test.go +++ b/pkg/lightning/backend/tidb/tidb_test.go @@ -69,7 +69,7 @@ func createMysqlSuite(t *testing.T) *mysqlSuite { cols = append(cols, col) } tblInfo := &model.TableInfo{ID: 1, Columns: cols, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) cfg := config.NewConfig() cfg.Conflict.Strategy = config.ReplaceOnDup @@ -294,7 +294,7 @@ func testStrictMode(t *testing.T) { ft.SetCharset(charset.CharsetASCII) col1 := &model.ColumnInfo{ID: 2, Name: pmodel.NewCIStr("s1"), State: model.StatePublic, Offset: 1, FieldType: ft} tblInfo := &model.TableInfo{ID: 1, Columns: []*model.ColumnInfo{col0, col1}, PKIsHandle: false, State: model.StatePublic} - tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(kv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) ctx := context.Background() diff --git a/pkg/lightning/checkpoints/checkpoints.go b/pkg/lightning/checkpoints/checkpoints.go index b22d0f2eec029..ef2689e37bbf4 100644 --- a/pkg/lightning/checkpoints/checkpoints.go +++ b/pkg/lightning/checkpoints/checkpoints.go @@ -105,7 +105,6 @@ const ( table_name varchar(261) NOT NULL PRIMARY KEY, hash binary(32) NOT NULL, status tinyint unsigned DEFAULT 30, - alloc_base bigint NOT NULL DEFAULT 0, table_id bigint NOT NULL DEFAULT 0, table_info longtext NOT NULL, create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -171,7 +170,7 @@ const ( FROM %s.%s WHERE table_name = ? ORDER BY engine_id, path, offset;` ReadTableRemainTemplate = ` - SELECT status, alloc_base, table_id, table_info, kv_bytes, kv_kvs, kv_checksum, auto_rand_base, auto_incr_base, auto_row_id_base + SELECT status, table_id, table_info, kv_bytes, kv_kvs, kv_checksum, auto_rand_base, auto_incr_base, auto_row_id_base FROM %s.%s WHERE table_name = ?;` ReplaceEngineTemplate = ` REPLACE INTO %s.%s (table_name, engine_id, status) VALUES (?, ?, ?);` @@ -192,8 +191,7 @@ const ( WHERE (table_name, engine_id, path, offset) = (?, ?, ?, ?);` UpdateTableRebaseTemplate = ` UPDATE %s.%s - SET alloc_base = GREATEST(?, alloc_base), - auto_rand_base = GREATEST(?, auto_rand_base), + SET auto_rand_base = GREATEST(?, auto_rand_base), auto_incr_base = GREATEST(?, auto_incr_base), auto_row_id_base = GREATEST(?, auto_row_id_base) WHERE table_name = ?;` @@ -347,10 +345,9 @@ func (engine *EngineCheckpoint) DeepCopy() *EngineCheckpoint { // TableCheckpoint is the checkpoint for a table. type TableCheckpoint struct { - Status CheckpointStatus - AllocBase int64 - Engines map[int32]*EngineCheckpoint - TableID int64 + Status CheckpointStatus + Engines map[int32]*EngineCheckpoint + TableID int64 // TableInfo is desired table info what we want to restore. When add-index-by-sql is enabled, // we will first drop indexes from target table, then restore data, then add indexes back. In case // of crash, this field will be used to save the dropped indexes, so we can add them back. @@ -372,11 +369,10 @@ func (cp *TableCheckpoint) DeepCopy() *TableCheckpoint { engines[engineID] = engine.DeepCopy() } return &TableCheckpoint{ - Status: cp.Status, - AllocBase: cp.AllocBase, - Engines: engines, - TableID: cp.TableID, - Checksum: cp.Checksum, + Status: cp.Status, + Engines: engines, + TableID: cp.TableID, + Checksum: cp.Checksum, AutoRandBase: cp.AutoRandBase, AutoIncrBase: cp.AutoIncrBase, @@ -413,7 +409,6 @@ type TableCheckpointDiff struct { hasRebase bool hasChecksum bool status CheckpointStatus - allocBase int64 engines map[int32]engineCheckpointDiff checksum verify.KVChecksum autoRandBase int64 @@ -445,8 +440,8 @@ func (cpd *TableCheckpointDiff) insertEngineCheckpointDiff(engineID int32, newDi // String implements fmt.Stringer interface. func (cpd *TableCheckpointDiff) String() string { return fmt.Sprintf( - "{hasStatus:%v, hasRebase:%v, status:%d, allocBase:%d, engines:[%d]}", - cpd.hasStatus, cpd.hasRebase, cpd.status, cpd.allocBase, len(cpd.engines), + "{hasStatus:%v, hasRebase:%v, status:%d, engines:[%d], autoRandBase:%d, autoIncrBase:%d, autoRowIDBase:%d}", + cpd.hasStatus, cpd.hasRebase, cpd.status, len(cpd.engines), cpd.autoRandBase, cpd.autoIncrBase, cpd.autoRowIDBase, ) } @@ -456,7 +451,6 @@ func (cp *TableCheckpoint) Apply(cpd *TableCheckpointDiff) { cp.Status = cpd.status } if cpd.hasRebase { - cp.AllocBase = max(cp.AllocBase, cpd.allocBase) cp.AutoRandBase = max(cp.AutoRandBase, cpd.autoRandBase) cp.AutoIncrBase = max(cp.AutoIncrBase, cpd.autoIncrBase) cp.AutoRowIDBase = max(cp.AutoRowIDBase, cpd.autoRowIDBase) @@ -562,13 +556,17 @@ func (m *TableChecksumMerger) MergeInto(cpd *TableCheckpointDiff) { // RebaseCheckpointMerger is the merger for rebasing the auto-increment ID. type RebaseCheckpointMerger struct { - AllocBase int64 + AutoRandBase int64 + AutoIncrBase int64 + AutoRowIDBase int64 } // MergeInto implements TableCheckpointMerger.MergeInto. func (merger *RebaseCheckpointMerger) MergeInto(cpd *TableCheckpointDiff) { cpd.hasRebase = true - cpd.allocBase = max(cpd.allocBase, merger.AllocBase) + cpd.autoRandBase = max(cpd.autoRandBase, merger.AutoRandBase) + cpd.autoIncrBase = max(cpd.autoIncrBase, merger.AutoIncrBase) + cpd.autoRowIDBase = max(cpd.autoRowIDBase, merger.AutoRowIDBase) } // DestroyedTableCheckpoint is the checkpoint for a table that has been @@ -959,7 +957,7 @@ func (cpdb *MySQLCheckpointsDB) Get(ctx context.Context, tableName string) (*Tab var kvs, bytes, checksum uint64 var rawTableInfo []byte if err := tableRow.Scan( - &status, &cp.AllocBase, &cp.TableID, &rawTableInfo, &bytes, &kvs, &checksum, + &status, &cp.TableID, &rawTableInfo, &bytes, &kvs, &checksum, &cp.AutoRandBase, &cp.AutoIncrBase, &cp.AutoRowIDBase, ); err != nil { if err == sql.ErrNoRows { @@ -1082,7 +1080,7 @@ func (cpdb *MySQLCheckpointsDB) Update(taskCtx context.Context, checkpointDiffs } } if cpd.hasRebase { - if _, e := rebaseStmt.ExecContext(c, cpd.allocBase, cpd.autoRandBase, cpd.autoIncrBase, cpd.autoRowIDBase, tableName); e != nil { + if _, e := rebaseStmt.ExecContext(c, cpd.autoRandBase, cpd.autoIncrBase, cpd.autoRowIDBase, tableName); e != nil { return errors.Trace(e) } } @@ -1361,7 +1359,6 @@ func (cpdb *FileCheckpointsDB) Get(_ context.Context, tableName string) (*TableC cp := &TableCheckpoint{ Status: CheckpointStatus(tableModel.Status), - AllocBase: tableModel.AllocBase, Engines: make(map[int32]*EngineCheckpoint, len(tableModel.Engines)), TableID: tableModel.TableID, TableInfo: tableInfo, @@ -1467,7 +1464,6 @@ func (cpdb *FileCheckpointsDB) Update(_ context.Context, checkpointDiffs map[str tableModel.Status = uint32(cpd.status) } if cpd.hasRebase { - tableModel.AllocBase = max(tableModel.AllocBase, cpd.allocBase) tableModel.AutoRandBase = max(tableModel.AutoRandBase, cpd.autoRandBase) tableModel.AutoIncrBase = max(tableModel.AutoIncrBase, cpd.autoIncrBase) tableModel.AutoRowIDBase = max(tableModel.AutoRowIDBase, cpd.autoRowIDBase) @@ -1785,7 +1781,6 @@ func (cpdb *MySQLCheckpointsDB) DumpTables(ctx context.Context, writer io.Writer table_name, hex(hash) AS hash, status, - alloc_base, create_time, update_time, auto_rand_base, diff --git a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go index 74f93acfb7f87..464d3acb93536 100644 --- a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go +++ b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go @@ -6,12 +6,11 @@ package checkpointspb import ( encoding_binary "encoding/binary" fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" io "io" math "math" math_bits "math/bits" - - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" ) // Reference imports to suppress errors if they are not otherwise used. @@ -112,7 +111,6 @@ var xxx_messageInfo_TaskCheckpointModel proto.InternalMessageInfo type TableCheckpointModel struct { Hash []byte `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` Status uint32 `protobuf:"varint,3,opt,name=status,proto3" json:"status,omitempty"` - AllocBase int64 `protobuf:"varint,4,opt,name=alloc_base,json=allocBase,proto3" json:"alloc_base,omitempty"` // Deprecated: Do not use. Engines map[int32]*EngineCheckpointModel `protobuf:"bytes,8,rep,name=engines,proto3" json:"engines,omitempty" protobuf_key:"zigzag32,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` TableID int64 `protobuf:"varint,9,opt,name=tableID,proto3" json:"tableID,omitempty"` KvBytes uint64 `protobuf:"varint,10,opt,name=kv_bytes,json=kvBytes,proto3" json:"kv_bytes,omitempty"` @@ -266,66 +264,65 @@ func init() { } var fileDescriptor_c57c7b77a714394c = []byte{ - // 940 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcf, 0x6e, 0xdb, 0xc6, - 0x13, 0x36, 0x45, 0xeb, 0xdf, 0x50, 0xb2, 0xe5, 0xfd, 0xd9, 0x09, 0x7f, 0x6e, 0xab, 0x2a, 0x4a, - 0x0e, 0x02, 0xd2, 0x4a, 0xa8, 0xd3, 0x43, 0x11, 0xb4, 0x45, 0x6b, 0x3b, 0x40, 0x0d, 0x23, 0xa8, - 0xb1, 0x4d, 0x7b, 0xe8, 0x85, 0xa0, 0xc8, 0x95, 0x44, 0xac, 0xc4, 0x25, 0xb8, 0x4b, 0x26, 0xca, - 0x53, 0xf4, 0x31, 0xfa, 0x12, 0xbd, 0x07, 0x3d, 0xe5, 0xd8, 0x63, 0x6b, 0xdf, 0x0b, 0xf4, 0x0d, - 0x8a, 0x9d, 0xa5, 0x24, 0x2a, 0x10, 0x82, 0xde, 0x76, 0xbf, 0xf9, 0xe6, 0xdb, 0xd9, 0xd9, 0x6f, - 0x48, 0xf8, 0x3a, 0xe1, 0xd3, 0xd1, 0x3c, 0x9a, 0xce, 0x54, 0x1c, 0xc5, 0xd3, 0x51, 0x30, 0x63, - 0x01, 0x4f, 0x44, 0x14, 0x2b, 0x59, 0x5e, 0x27, 0xe3, 0xd1, 0x24, 0x9a, 0x33, 0xaf, 0x04, 0x0d, - 0x93, 0x54, 0x28, 0x71, 0xfa, 0xf9, 0x34, 0x52, 0xb3, 0x6c, 0x3c, 0x0c, 0xc4, 0x62, 0x34, 0x15, - 0x53, 0x31, 0x42, 0x78, 0x9c, 0x4d, 0xbe, 0xc9, 0x3f, 0x1b, 0x3e, 0x19, 0x9e, 0x21, 0x88, 0x18, - 0xae, 0x4c, 0x56, 0xff, 0x6f, 0x0b, 0x3a, 0x17, 0x1b, 0xad, 0xe7, 0x22, 0x64, 0x73, 0x72, 0x09, - 0x4e, 0x49, 0xdf, 0xb5, 0x7a, 0xf6, 0xc0, 0x39, 0xeb, 0x0f, 0xdf, 0xe5, 0x95, 0x81, 0x67, 0xb1, - 0x4a, 0x97, 0xb4, 0x9c, 0x46, 0xbe, 0x82, 0x43, 0xe5, 0x4b, 0x5e, 0x2a, 0xd5, 0xad, 0xf4, 0xac, - 0x81, 0x73, 0x76, 0x3c, 0x7c, 0xe1, 0x4b, 0xbe, 0x49, 0x46, 0x31, 0x7a, 0xa0, 0xb6, 0xc0, 0xd3, - 0x1f, 0xb7, 0x0a, 0x43, 0x7d, 0xd2, 0x01, 0x9b, 0xb3, 0xa5, 0x6b, 0xf5, 0xac, 0x41, 0x93, 0xea, - 0x25, 0x79, 0x0c, 0xd5, 0xdc, 0x9f, 0x67, 0xac, 0x90, 0x3e, 0x19, 0xbe, 0xf0, 0xc7, 0x73, 0xf6, - 0xae, 0xb6, 0xe1, 0x3c, 0xad, 0x7c, 0x61, 0xf5, 0x7f, 0xad, 0xc0, 0xff, 0x76, 0x1c, 0x4f, 0xee, - 0x43, 0x1d, 0xab, 0x8d, 0x42, 0x94, 0xb7, 0x69, 0x4d, 0x6f, 0xaf, 0x42, 0xf2, 0x11, 0x80, 0x14, - 0x59, 0x1a, 0x30, 0x2f, 0x8c, 0x52, 0x3c, 0xa6, 0x49, 0x9b, 0x06, 0xb9, 0x8c, 0x52, 0xe2, 0x42, - 0x7d, 0xec, 0x07, 0x9c, 0xc5, 0xa1, 0x6b, 0x63, 0x6c, 0xb5, 0x25, 0x0f, 0xa1, 0x1d, 0x2d, 0x12, - 0x91, 0x2a, 0x96, 0x7a, 0x7e, 0x18, 0xa6, 0xee, 0x3e, 0xc6, 0x5b, 0x2b, 0xf0, 0xdb, 0x30, 0x4c, - 0xc9, 0x07, 0xd0, 0x54, 0x51, 0x38, 0xf6, 0x66, 0x42, 0x2a, 0xb7, 0x8a, 0x84, 0x86, 0x06, 0xbe, - 0x13, 0x52, 0xad, 0x83, 0x9a, 0xef, 0xd6, 0x7a, 0xd6, 0xa0, 0x6a, 0x82, 0x37, 0x22, 0x55, 0xba, - 0xe0, 0x24, 0x34, 0xc2, 0x75, 0xcc, 0xab, 0x25, 0x21, 0x4a, 0xf6, 0xa1, 0x2d, 0xf5, 0x01, 0xa1, - 0xc7, 0x73, 0xac, 0xb9, 0x81, 0x61, 0xc7, 0x80, 0xd7, 0xb9, 0xae, 0xfa, 0x21, 0xb4, 0xd7, 0x56, - 0xf3, 0x72, 0x96, 0xba, 0x4d, 0x53, 0xdb, 0x1a, 0xfc, 0x89, 0xa5, 0xfd, 0x7f, 0x6c, 0x38, 0xde, - 0xd5, 0x4e, 0x42, 0x60, 0x7f, 0xe6, 0xcb, 0x19, 0x36, 0xaa, 0x45, 0x71, 0x4d, 0xee, 0x41, 0x4d, - 0x2a, 0x5f, 0x65, 0x12, 0xdb, 0xd0, 0xa6, 0xc5, 0x8e, 0x3c, 0x00, 0xf0, 0xe7, 0x73, 0x11, 0x78, - 0x63, 0x5f, 0x32, 0x6c, 0x81, 0x7d, 0x5e, 0x71, 0x2d, 0xda, 0x44, 0xf4, 0xdc, 0x97, 0x8c, 0x7c, - 0x09, 0x75, 0x16, 0x4f, 0xa3, 0x98, 0x49, 0xb7, 0x51, 0x58, 0x6d, 0xd7, 0xb1, 0xc3, 0x67, 0x86, - 0x64, 0xac, 0xb6, 0x4a, 0xd1, 0x0f, 0xa0, 0x34, 0xfb, 0xea, 0x12, 0x2f, 0x61, 0xd3, 0xd5, 0x96, - 0xfc, 0x1f, 0x1a, 0x3c, 0xf7, 0xc6, 0x4b, 0xc5, 0xa4, 0x0b, 0x3d, 0x6b, 0xb0, 0x4f, 0xeb, 0x3c, - 0x3f, 0xd7, 0x5b, 0x72, 0x02, 0x35, 0x9e, 0x7b, 0x3c, 0x97, 0xae, 0x83, 0x81, 0x2a, 0xcf, 0xaf, - 0x73, 0x49, 0x3e, 0x06, 0x87, 0xe7, 0xc6, 0xb0, 0x32, 0x5b, 0xb8, 0xad, 0x9e, 0x35, 0xa8, 0x51, - 0xe0, 0xf9, 0x45, 0x81, 0x68, 0x33, 0xa0, 0xba, 0x17, 0xc5, 0x13, 0xe1, 0xb6, 0xf1, 0xfe, 0x4d, - 0x73, 0x5e, 0x3c, 0x11, 0xa4, 0x0f, 0x2d, 0x3f, 0x53, 0x82, 0xfa, 0x71, 0xa8, 0x6f, 0xe6, 0x1e, - 0x60, 0x41, 0x5b, 0xd8, 0x8a, 0x73, 0x15, 0x07, 0x29, 0x72, 0x0e, 0x37, 0x9c, 0x15, 0x46, 0x1e, - 0x41, 0x1b, 0x73, 0xc4, 0xcb, 0xab, 0x4b, 0x24, 0x75, 0x90, 0xb4, 0x0d, 0x9e, 0x52, 0x68, 0x95, - 0x5b, 0x52, 0x9e, 0x8e, 0x23, 0x33, 0x1d, 0x9f, 0x6c, 0x4f, 0xc7, 0xbd, 0xa2, 0x85, 0xef, 0x19, - 0x8f, 0xdf, 0x2c, 0x38, 0xd9, 0x49, 0x2a, 0x3d, 0xb0, 0xb5, 0xf5, 0xc0, 0x4f, 0xa1, 0x16, 0xcc, - 0xb2, 0x98, 0x4b, 0xb7, 0x52, 0x3c, 0xde, 0xce, 0xfc, 0xe1, 0x05, 0x92, 0xcc, 0xe3, 0x15, 0x19, - 0xa7, 0x37, 0xe0, 0x94, 0xe0, 0xff, 0x32, 0xde, 0x48, 0x7f, 0x4f, 0xfd, 0xbf, 0xdb, 0x70, 0xbc, - 0x8b, 0xa3, 0x3d, 0x9b, 0xf8, 0x6a, 0x56, 0x88, 0xe3, 0x5a, 0x5f, 0x49, 0x4c, 0x26, 0x92, 0x99, - 0x0f, 0x93, 0x4d, 0x8b, 0x1d, 0xf9, 0x14, 0x48, 0x20, 0xe6, 0xd9, 0x22, 0xf6, 0x12, 0x96, 0x2e, - 0x32, 0xe5, 0xab, 0x48, 0xc4, 0x6e, 0xab, 0x67, 0x0f, 0xaa, 0xf4, 0xc8, 0x44, 0x6e, 0x36, 0x01, - 0x6d, 0x0a, 0x16, 0x87, 0x5e, 0x21, 0x55, 0x45, 0xa9, 0x26, 0x8b, 0xc3, 0xef, 0x8d, 0x5a, 0x07, - 0xec, 0x44, 0x48, 0x9c, 0x5f, 0x9b, 0xea, 0x25, 0x79, 0x04, 0x07, 0x49, 0xca, 0x72, 0x2f, 0x15, - 0x2f, 0xa3, 0xd0, 0x5b, 0xf8, 0xaf, 0x70, 0x82, 0x6d, 0xda, 0xd2, 0x28, 0xd5, 0xe0, 0x73, 0xff, - 0x95, 0x9e, 0xfe, 0x0d, 0xa1, 0x81, 0x84, 0x46, 0x5a, 0x0a, 0xf2, 0x3c, 0x28, 0xcc, 0xdd, 0x44, - 0x0f, 0x37, 0x78, 0x1e, 0x18, 0x77, 0xdf, 0x87, 0xba, 0x0e, 0x6a, 0x7b, 0x1b, 0xdf, 0xd7, 0x78, - 0x1e, 0x68, 0x7f, 0x3f, 0x80, 0x96, 0x0e, 0xac, 0x0d, 0xee, 0xa0, 0xc1, 0x1d, 0x9e, 0x07, 0x6b, - 0x87, 0x7f, 0xa8, 0xbf, 0x39, 0x0b, 0x26, 0x95, 0xbf, 0x48, 0xd0, 0xe0, 0x1d, 0xba, 0x01, 0x74, - 0x17, 0xd5, 0x32, 0x31, 0xc6, 0xae, 0x52, 0x5c, 0x93, 0x1e, 0x38, 0x81, 0x58, 0x24, 0x29, 0x93, - 0x52, 0xb7, 0xe9, 0x10, 0x43, 0x65, 0x48, 0x0f, 0xa2, 0xfe, 0xf8, 0x78, 0xfa, 0x71, 0x3b, 0xe6, - 0x23, 0xa9, 0xf7, 0xd7, 0x6c, 0xa9, 0xef, 0x81, 0xff, 0x33, 0x19, 0xbd, 0x66, 0xee, 0x91, 0xb9, - 0xa4, 0x06, 0x7e, 0x88, 0x5e, 0xb3, 0xf3, 0xc7, 0x6f, 0xfe, 0xea, 0xee, 0xbd, 0xb9, 0xed, 0x5a, - 0x6f, 0x6f, 0xbb, 0xd6, 0x9f, 0xb7, 0x5d, 0xeb, 0x97, 0xbb, 0xee, 0xde, 0xdb, 0xbb, 0xee, 0xde, - 0x1f, 0x77, 0xdd, 0xbd, 0x9f, 0xdb, 0x5b, 0xbf, 0xc5, 0x71, 0x0d, 0x7f, 0x68, 0x4f, 0xfe, 0x0d, - 0x00, 0x00, 0xff, 0xff, 0x1e, 0x86, 0x14, 0xc9, 0x48, 0x07, 0x00, 0x00, + // 925 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcf, 0x8e, 0xe3, 0xc4, + 0x13, 0x1e, 0x8f, 0xf3, 0xb7, 0x9c, 0xcc, 0x64, 0xfa, 0x37, 0xb3, 0xeb, 0xdf, 0x00, 0x21, 0x64, + 0xf7, 0x10, 0x69, 0x21, 0x11, 0xb3, 0x1c, 0xd0, 0x0a, 0x10, 0xcc, 0xcc, 0x4a, 0x0c, 0xa3, 0x15, + 0xa3, 0x66, 0xe1, 0xc0, 0xc5, 0x72, 0xec, 0x4e, 0x62, 0x75, 0xe2, 0xb6, 0xdc, 0x6d, 0xef, 0x66, + 0x9f, 0x82, 0xc7, 0xe0, 0x25, 0xb8, 0xaf, 0x38, 0xed, 0x81, 0x03, 0x47, 0x98, 0xb9, 0xf3, 0x0c, + 0xa8, 0xab, 0x9d, 0xc4, 0x59, 0x05, 0xc4, 0xad, 0xeb, 0xab, 0xaf, 0x3e, 0x57, 0x57, 0x7f, 0xdd, + 0x86, 0x2f, 0x12, 0x3e, 0x1d, 0xcd, 0xa3, 0xe9, 0x4c, 0xc5, 0x51, 0x3c, 0x1d, 0x05, 0x33, 0x16, + 0xf0, 0x44, 0x44, 0xb1, 0x92, 0xe5, 0x75, 0x32, 0x1e, 0x4d, 0xa2, 0x39, 0xf3, 0x4a, 0xd0, 0x30, + 0x49, 0x85, 0x12, 0xa7, 0x9f, 0x4c, 0x23, 0x35, 0xcb, 0xc6, 0xc3, 0x40, 0x2c, 0x46, 0x53, 0x31, + 0x15, 0x23, 0x84, 0xc7, 0xd9, 0xe4, 0xcb, 0xfc, 0xe3, 0xe1, 0xe3, 0xe1, 0x19, 0x82, 0x88, 0xe1, + 0xca, 0x54, 0xf5, 0xff, 0xb2, 0xa0, 0x73, 0xb1, 0xd1, 0x7a, 0x26, 0x42, 0x36, 0x27, 0x97, 0xe0, + 0x94, 0xf4, 0x5d, 0xab, 0x67, 0x0f, 0x9c, 0xb3, 0xfe, 0xf0, 0x6d, 0x5e, 0x19, 0x78, 0x1a, 0xab, + 0x74, 0x49, 0xcb, 0x65, 0xe4, 0x73, 0x38, 0x54, 0xbe, 0xe4, 0xa5, 0x56, 0xdd, 0xfd, 0x9e, 0x35, + 0x70, 0xce, 0x8e, 0x87, 0xcf, 0x7d, 0xc9, 0x37, 0xc5, 0x28, 0x46, 0x0f, 0xd4, 0x16, 0x78, 0xfa, + 0xfd, 0x56, 0x63, 0xa8, 0x4f, 0x3a, 0x60, 0x73, 0xb6, 0x74, 0xad, 0x9e, 0x35, 0x68, 0x52, 0xbd, + 0x24, 0x8f, 0xa0, 0x9a, 0xfb, 0xf3, 0x8c, 0x15, 0xd2, 0x27, 0xc3, 0xe7, 0xfe, 0x78, 0xce, 0xde, + 0xd6, 0x36, 0x9c, 0x27, 0xfb, 0x9f, 0x5a, 0xfd, 0x9f, 0xf7, 0xe1, 0x7f, 0x3b, 0x3e, 0x4f, 0xee, + 0x43, 0x1d, 0xbb, 0x8d, 0x42, 0x94, 0xb7, 0x69, 0x4d, 0x87, 0x57, 0x21, 0x79, 0x0f, 0x40, 0x8a, + 0x2c, 0x0d, 0x98, 0x17, 0x46, 0x29, 0x7e, 0xa6, 0x49, 0x9b, 0x06, 0xb9, 0x8c, 0x52, 0xe2, 0x42, + 0x7d, 0xec, 0x07, 0x9c, 0xc5, 0xa1, 0x6b, 0x63, 0x6e, 0x15, 0x92, 0x07, 0xd0, 0x8e, 0x16, 0x89, + 0x48, 0x15, 0x4b, 0x3d, 0x3f, 0x0c, 0x53, 0xb7, 0x82, 0xf9, 0xd6, 0x0a, 0xfc, 0x2a, 0x0c, 0x53, + 0xf2, 0x0e, 0x34, 0x55, 0x14, 0x8e, 0xbd, 0x99, 0x90, 0xca, 0xad, 0x22, 0xa1, 0xa1, 0x81, 0xaf, + 0x85, 0x54, 0xeb, 0xa4, 0xe6, 0xbb, 0xb5, 0x9e, 0x35, 0xa8, 0x9a, 0xe4, 0x8d, 0x48, 0x95, 0x6e, + 0x38, 0x09, 0x8d, 0x70, 0x1d, 0xeb, 0x6a, 0x49, 0x88, 0x92, 0x7d, 0x68, 0x4b, 0xfd, 0x81, 0xd0, + 0xe3, 0x39, 0xf6, 0xdc, 0xc0, 0xb4, 0x63, 0xc0, 0xeb, 0x5c, 0x77, 0xfd, 0x00, 0xda, 0x6b, 0xab, + 0x79, 0x39, 0x4b, 0xdd, 0xa6, 0xe9, 0x6d, 0x0d, 0xfe, 0xc0, 0xd2, 0xfe, 0x6f, 0x36, 0x1c, 0xef, + 0x1a, 0x27, 0x21, 0x50, 0x99, 0xf9, 0x72, 0x86, 0x83, 0x6a, 0x51, 0x5c, 0x93, 0x7b, 0x50, 0x93, + 0xca, 0x57, 0x99, 0xc4, 0x31, 0xb4, 0x69, 0x11, 0x91, 0xcf, 0xa0, 0xce, 0xe2, 0x69, 0x14, 0x33, + 0xe9, 0x36, 0x0a, 0x1f, 0xed, 0xd2, 0x1c, 0x3e, 0x35, 0x24, 0xe3, 0xa3, 0x55, 0x89, 0x9e, 0xae, + 0xd2, 0xec, 0xab, 0x4b, 0xec, 0xd0, 0xa6, 0xab, 0x90, 0xfc, 0x1f, 0x1a, 0x3c, 0xf7, 0xc6, 0x4b, + 0xc5, 0xa4, 0x0b, 0x3d, 0x6b, 0x50, 0xa1, 0x75, 0x9e, 0x9f, 0xeb, 0x90, 0x9c, 0x40, 0x8d, 0xe7, + 0x1e, 0xcf, 0xa5, 0xeb, 0x60, 0xa2, 0xca, 0xf3, 0xeb, 0x5c, 0x92, 0xf7, 0xc1, 0xe1, 0xb9, 0x71, + 0xa3, 0xcc, 0x16, 0x6e, 0xab, 0x67, 0x0d, 0x6a, 0x14, 0x78, 0x7e, 0x51, 0x20, 0xfa, 0xa4, 0x51, + 0xdd, 0x8b, 0xe2, 0x89, 0x70, 0xdb, 0xb8, 0xb9, 0xa6, 0xf9, 0x5e, 0x3c, 0x11, 0xa4, 0x0f, 0x2d, + 0x3f, 0x53, 0x82, 0xfa, 0x71, 0x78, 0xee, 0x4b, 0xe6, 0x1e, 0x60, 0x43, 0x5b, 0xd8, 0x8a, 0x73, + 0x15, 0x07, 0x29, 0x72, 0x0e, 0x37, 0x9c, 0x15, 0x46, 0x1e, 0x42, 0x1b, 0x6b, 0xc4, 0x8b, 0xab, + 0x4b, 0x24, 0x75, 0x90, 0xb4, 0x0d, 0x9e, 0x52, 0x68, 0x95, 0x47, 0x52, 0xb6, 0xfe, 0x91, 0xb1, + 0xfe, 0x87, 0xdb, 0xd6, 0xbf, 0x57, 0x8c, 0xf0, 0x9f, 0xbd, 0xff, 0x4d, 0xa5, 0x51, 0xe9, 0x54, + 0xfb, 0xbf, 0x58, 0x70, 0xb2, 0x93, 0x5a, 0x3a, 0x43, 0x6b, 0xeb, 0x0c, 0x9f, 0x40, 0x2d, 0x98, + 0x65, 0x31, 0x97, 0xee, 0x7e, 0x71, 0x84, 0x3b, 0xeb, 0x87, 0x17, 0x48, 0x32, 0x47, 0x58, 0x54, + 0x9c, 0xde, 0x80, 0x53, 0x82, 0xff, 0xcb, 0x0d, 0x46, 0xfa, 0xbf, 0xdc, 0xe0, 0x5f, 0x6d, 0x38, + 0xde, 0xc5, 0xd1, 0xb6, 0x4c, 0x7c, 0x35, 0x2b, 0xc4, 0x71, 0xad, 0xb7, 0x24, 0x26, 0x13, 0xc9, + 0xcc, 0xdb, 0x63, 0xd3, 0x22, 0x22, 0x1f, 0x01, 0x09, 0xc4, 0x3c, 0x5b, 0xc4, 0x5e, 0xc2, 0xd2, + 0x45, 0xa6, 0x7c, 0x15, 0x89, 0xd8, 0x6d, 0xf5, 0xec, 0x41, 0x95, 0x1e, 0x99, 0xcc, 0xcd, 0x26, + 0xa1, 0xad, 0xc1, 0xe2, 0xd0, 0x2b, 0xa4, 0xaa, 0x28, 0xd5, 0x64, 0x71, 0xf8, 0xad, 0x51, 0xeb, + 0x80, 0x9d, 0x08, 0x89, 0x57, 0xd4, 0xa6, 0x7a, 0x49, 0x1e, 0xc2, 0x41, 0x92, 0xb2, 0xdc, 0x4b, + 0xc5, 0x8b, 0x28, 0xf4, 0x16, 0xfe, 0x4b, 0xbc, 0xa4, 0x36, 0x6d, 0x69, 0x94, 0x6a, 0xf0, 0x99, + 0xff, 0x52, 0x5f, 0xf0, 0x0d, 0xa1, 0x81, 0x84, 0x46, 0x5a, 0x4a, 0xf2, 0x3c, 0x28, 0x2c, 0xde, + 0x44, 0x27, 0x37, 0x78, 0x1e, 0x18, 0x8f, 0xdf, 0x87, 0xba, 0x4e, 0x6a, 0x93, 0x1b, 0xf7, 0xd7, + 0x78, 0x1e, 0x68, 0x97, 0x7f, 0x00, 0x2d, 0x9d, 0x58, 0xdb, 0xdc, 0x41, 0x9b, 0x3b, 0x3c, 0x0f, + 0xd6, 0x3e, 0x7f, 0x57, 0x3f, 0x2b, 0x0b, 0x26, 0x95, 0xbf, 0x48, 0xd0, 0xe6, 0x1d, 0xba, 0x01, + 0xf4, 0x14, 0xd5, 0x32, 0x31, 0xf6, 0xae, 0x52, 0x5c, 0x93, 0x1e, 0x38, 0x81, 0x58, 0x24, 0x29, + 0x93, 0x52, 0x8f, 0xe9, 0x10, 0x53, 0x65, 0x48, 0x5f, 0x47, 0xfd, 0xbe, 0x78, 0xfa, 0x70, 0x3b, + 0xe6, 0x1d, 0xd4, 0xf1, 0x35, 0x5b, 0xea, 0x7d, 0xe0, 0x2f, 0x4b, 0x46, 0xaf, 0x98, 0x7b, 0x64, + 0x36, 0xa9, 0x81, 0xef, 0xa2, 0x57, 0xec, 0xfc, 0xd1, 0xeb, 0x3f, 0xbb, 0x7b, 0xaf, 0x6f, 0xbb, + 0xd6, 0x9b, 0xdb, 0xae, 0xf5, 0xc7, 0x6d, 0xd7, 0xfa, 0xe9, 0xae, 0xbb, 0xf7, 0xe6, 0xae, 0xbb, + 0xf7, 0xfb, 0x5d, 0x77, 0xef, 0xc7, 0xf6, 0xd6, 0x9f, 0x6f, 0x5c, 0xc3, 0x7f, 0xd6, 0xe3, 0xbf, + 0x03, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xa4, 0xc2, 0x90, 0x2b, 0x07, 0x00, 0x00, } func (m *CheckpointsModel) Marshal() (dAtA []byte, err error) { @@ -560,11 +557,6 @@ func (m *TableCheckpointModel) MarshalToSizedBuffer(dAtA []byte) (int, error) { dAtA[i] = 0x42 } } - if m.AllocBase != 0 { - i = encodeVarintFileCheckpoints(dAtA, i, uint64(m.AllocBase)) - i-- - dAtA[i] = 0x20 - } if m.Status != 0 { i = encodeVarintFileCheckpoints(dAtA, i, uint64(m.Status)) i-- @@ -849,9 +841,6 @@ func (m *TableCheckpointModel) Size() (n int) { if m.Status != 0 { n += 1 + sovFileCheckpoints(uint64(m.Status)) } - if m.AllocBase != 0 { - n += 1 + sovFileCheckpoints(uint64(m.AllocBase)) - } if len(m.Engines) > 0 { for k, v := range m.Engines { _ = k @@ -1593,25 +1582,6 @@ func (m *TableCheckpointModel) Unmarshal(dAtA []byte) error { break } } - case 4: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field AllocBase", wireType) - } - m.AllocBase = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowFileCheckpoints - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.AllocBase |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } case 8: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Engines", wireType) diff --git a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto index 136131fca28d9..eb4f82d07819d 100644 --- a/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto +++ b/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto @@ -40,7 +40,9 @@ message TaskCheckpointModel { message TableCheckpointModel { bytes hash = 1; uint32 status = 3; - int64 alloc_base = 4 [deprecated=true]; + // previously we use 4 to store alloc_base, but now it's deleted, reverse 4 + // to avoid reusing it. + reserved 4; map engines = 8; int64 tableID = 9; uint64 kv_bytes = 10; diff --git a/pkg/lightning/errormanager/errormanager_test.go b/pkg/lightning/errormanager/errormanager_test.go index dfc41f3eb290b..f823d870e9f32 100644 --- a/pkg/lightning/errormanager/errormanager_test.go +++ b/pkg/lightning/errormanager/errormanager_test.go @@ -218,7 +218,7 @@ func TestReplaceConflictOneKey(t *testing.T) { State: model.StatePublic, } - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(table.SepAutoInc(), 0), table) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(table.SepAutoInc()), table) require.NoError(t, err) sessionOpts := encode.SessionOptions{ @@ -407,7 +407,7 @@ func TestReplaceConflictOneUniqueKey(t *testing.T) { State: model.StatePublic, } - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(table.SepAutoInc(), 0), table) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(table.SepAutoInc()), table) require.NoError(t, err) sessionOpts := encode.SessionOptions{ diff --git a/pkg/lightning/errormanager/resolveconflict_test.go b/pkg/lightning/errormanager/resolveconflict_test.go index 2f504fe0de2d0..c1f4edffcd840 100644 --- a/pkg/lightning/errormanager/resolveconflict_test.go +++ b/pkg/lightning/errormanager/resolveconflict_test.go @@ -52,7 +52,7 @@ func TestReplaceConflictMultipleKeysNonclusteredPk(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.False(t, info.PKIsHandle) - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) require.False(t, tbl.Meta().HasClusteredIndex()) @@ -273,7 +273,7 @@ func TestReplaceConflictOneKeyNonclusteredPk(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.False(t, info.PKIsHandle) - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) require.False(t, tbl.Meta().HasClusteredIndex()) @@ -441,7 +441,7 @@ func TestReplaceConflictOneUniqueKeyNonclusteredPk(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.False(t, info.PKIsHandle) - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) require.False(t, tbl.Meta().HasClusteredIndex()) @@ -647,7 +647,7 @@ func TestReplaceConflictOneUniqueKeyNonclusteredVarcharPk(t *testing.T) { require.NoError(t, err) info.State = model.StatePublic require.False(t, info.PKIsHandle) - tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc(), 0), info) + tbl, err := tables.TableFromMeta(tidbkv.NewPanickingAllocators(info.SepAutoInc()), info) require.NoError(t, err) require.False(t, tbl.Meta().HasClusteredIndex()) diff --git a/pkg/table/tables/index_test.go b/pkg/table/tables/index_test.go index ad45594184985..047f664d8912d 100644 --- a/pkg/table/tables/index_test.go +++ b/pkg/table/tables/index_test.go @@ -179,7 +179,7 @@ func buildTableInfo(t *testing.T, sql string) *model.TableInfo { func TestGenIndexValueFromIndex(t *testing.T) { tblInfo := buildTableInfo(t, "create table a (a int primary key, b int not null, c text, unique key key_b(b));") tblInfo.State = model.StatePublic - tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc(), 0), tblInfo) + tbl, err := tables.TableFromMeta(lkv.NewPanickingAllocators(tblInfo.SepAutoInc()), tblInfo) require.NoError(t, err) sessionOpts := encode.SessionOptions{ From a7d86250005b23c8b4ac77fcf6c48150f6fdef86 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Thu, 14 Nov 2024 19:22:58 +0800 Subject: [PATCH 03/12] change --- lightning/pkg/importer/meta_manager.go | 67 ++++++++++++-------------- lightning/pkg/importer/table_import.go | 8 +-- 2 files changed, 35 insertions(+), 40 deletions(-) diff --git a/lightning/pkg/importer/meta_manager.go b/lightning/pkg/importer/meta_manager.go index d4fe9523b08e4..a1884bef9b0c3 100644 --- a/lightning/pkg/importer/meta_manager.go +++ b/lightning/pkg/importer/meta_manager.go @@ -93,7 +93,7 @@ func (b *dbMetaMgrBuilder) TableMetaMgr(tr *TableImporter) tableMetaMgr { type tableMetaMgr interface { InitTableMeta(ctx context.Context) error - AllocTableRowIDs(ctx context.Context, rawRowIDMax int64) (*verify.KVChecksum, int64, error) + AllocTableRowIDs(ctx context.Context, requiredRowIDCnt int64) (*verify.KVChecksum, int64, error) UpdateTableStatus(ctx context.Context, status metaStatus) error UpdateTableBaseChecksum(ctx context.Context, checksum *verify.KVChecksum) error CheckAndUpdateLocalChecksum(ctx context.Context, checksum *verify.KVChecksum, hasLocalDupes bool) ( @@ -177,7 +177,7 @@ func parseMetaStatus(s string) (metaStatus, error) { } } -func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64) (*verify.KVChecksum, int64, error) { +func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, requiredRowIDCnt int64) (*verify.KVChecksum, int64, error) { conn, err := m.session.Conn(ctx) if err != nil { return nil, 0, errors.Trace(err) @@ -188,8 +188,10 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 DB: conn, Logger: m.tr.logger, } - var newRowIDBase, newRowIDMax int64 - curStatus := metaStatusInitial + // (myStartRowID, myEndRowID] is the range of row_id that current instance + // can use to encode the table. + var myStartRowID, myEndRowID int64 + myStatus := metaStatusInitial newStatus := metaStatusRowIDAllocated var baseTotalKvs, baseTotalBytes, baseChecksum uint64 err = exec.Exec(ctx, "enable pessimistic transaction", "SET SESSION tidb_txn_mode = 'pessimistic';") @@ -201,9 +203,16 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 tableChecksumingMsg := "Target table is calculating checksum. Please wait until the checksum is finished and try again." doAllocTableRowIDsFn := func() error { return exec.Transact(ctx, "init table allocator base", func(ctx context.Context, tx *sql.Tx) error { + // lightning follows below calling sequence, so at most one client + // can execute the code after the FOR UPDATE part for some table, + // even though FOR UPDATE only lock rows that matches the condition: + // - insert into table_meta with key (table_id, task_id) + // - try lock with FOR UPDATE rows, err := tx.QueryContext( ctx, - common.SprintfWithIdentifiers("SELECT task_id, row_id_base, row_id_max, total_kvs_base, total_bytes_base, checksum_base, status FROM %s.%s WHERE table_id = ? FOR UPDATE", m.schemaName, m.tableName), + common.SprintfWithIdentifiers(` +SELECT task_id, row_id_base, row_id_max, total_kvs_base, total_bytes_base, checksum_base, status +FROM %s.%s WHERE table_id = ? FOR UPDATE`, m.schemaName, m.tableName), m.tr.tableInfo.ID, ) if err != nil { @@ -234,16 +243,16 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 } if metaTaskID == m.taskID { - curStatus = status + myStatus = status baseChecksum = checksum baseTotalKvs = totalKvs baseTotalBytes = totalBytes if status >= metaStatusRowIDAllocated { - if rowIDMax-rowIDBase != rawRowIDMax { - return common.ErrAllocTableRowIDs.GenWithStack("verify allocator base failed. local: '%d', meta: '%d'", rawRowIDMax, rowIDMax-rowIDBase) + if rowIDMax-rowIDBase != requiredRowIDCnt { + return common.ErrAllocTableRowIDs.GenWithStack("verify allocator base failed. local: '%d', meta: '%d'", requiredRowIDCnt, rowIDMax-rowIDBase) } - newRowIDBase = rowIDBase - newRowIDMax = rowIDMax + myStartRowID = rowIDBase + myEndRowID = rowIDMax break } continue @@ -263,36 +272,21 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 } // no enough info are available, fetch row_id max for table - if curStatus == metaStatusInitial { - if needAutoID { - // maxRowIDMax is the max row_id that other tasks has allocated, we need to rebase the global autoid base first. - // TODO this is not right when AUTO_ID_CACHE=1 and have auto row id, - // the id allocators are separated in this case. - if err := common.RebaseGlobalAutoID(ctx, maxRowIDMax, m.tr, m.tr.dbInfo.ID, m.tr.tableInfo.Core); err != nil { - return errors.Trace(err) - } - newRowIDBase, newRowIDMax, err = common.AllocGlobalAutoID(ctx, rawRowIDMax, m.tr, m.tr.dbInfo.ID, m.tr.tableInfo.Core) - if err != nil { - return errors.Trace(err) - } - } else { - // Though we don't need auto ID, we still guarantee that the row ID is unique across all lightning instances. - newRowIDBase = maxRowIDMax - newRowIDMax = newRowIDBase + rawRowIDMax - } + if myStatus == metaStatusInitial { + myStartRowID = maxRowIDMax + myEndRowID = myStartRowID + requiredRowIDCnt - // table contains no data, can skip checksum - if needAutoID && newRowIDBase == 0 && newStatus < metaStatusRestoreStarted { + if needAutoID && myStartRowID == 0 && newStatus < metaStatusRestoreStarted { newStatus = metaStatusRestoreStarted } query := common.SprintfWithIdentifiers("UPDATE %s.%s SET row_id_base = ?, row_id_max = ?, status = ? WHERE table_id = ? AND task_id = ?", m.schemaName, m.tableName) - _, err := tx.ExecContext(ctx, query, newRowIDBase, newRowIDMax, newStatus.String(), m.tr.tableInfo.ID, m.taskID) + _, err := tx.ExecContext(ctx, query, myStartRowID, myEndRowID, newStatus.String(), m.tr.tableInfo.ID, m.taskID) if err != nil { return errors.Trace(err) } - curStatus = newStatus + myStatus = newStatus } return nil }) @@ -325,9 +319,10 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 var checksum *verify.KVChecksum // need to do checksum and update checksum meta since we are the first one. - if curStatus < metaStatusRestoreStarted { - // table contains data but haven't do checksum yet - if (newRowIDBase > 0 || !needAutoID) && m.needChecksum && baseTotalKvs == 0 { + if myStatus < metaStatusRestoreStarted { + if m.needChecksum && baseTotalKvs == 0 { + // if another instance finished import before below checksum logic, + // it will cause checksum mismatch, but it's very rare. remoteCk, err := DoChecksum(ctx, m.tr.tableInfo) if err != nil { return nil, 0, errors.Trace(err) @@ -354,11 +349,11 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, rawRowIDMax int64 checksum = &ck } log.FromContext(ctx).Info("allocate table row_id base", zap.String("table", m.tr.tableName), - zap.Int64("row_id_base", newRowIDBase)) + zap.Int64("startRowID", myStartRowID), zap.Int64("endRowID", myEndRowID)) if checksum != nil { log.FromContext(ctx).Info("checksum base", zap.Any("checksum", checksum)) } - return checksum, newRowIDBase, nil + return checksum, myStartRowID, nil } func (m *dbTableMetaMgr) UpdateTableBaseChecksum(ctx context.Context, checksum *verify.KVChecksum) error { diff --git a/lightning/pkg/importer/table_import.go b/lightning/pkg/importer/table_import.go index ea617ad1c9b83..5ee692bdbd0e4 100644 --- a/lightning/pkg/importer/table_import.go +++ b/lightning/pkg/importer/table_import.go @@ -143,10 +143,10 @@ func (tr *TableImporter) importTable( } // fetch the max chunk row_id max value as the global max row_id - rowIDMax := int64(0) + requiredRowIDCnt := int64(0) for _, engine := range cp.Engines { - if len(engine.Chunks) > 0 && engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax > rowIDMax { - rowIDMax = engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax + if len(engine.Chunks) > 0 && engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax > requiredRowIDCnt { + requiredRowIDCnt = engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax } } versionStr, err := version.FetchVersion(ctx, rc.db) @@ -163,7 +163,7 @@ func (tr *TableImporter) importTable( return false, err } - checksum, rowIDBase, err := metaMgr.AllocTableRowIDs(ctx, rowIDMax) + checksum, rowIDBase, err := metaMgr.AllocTableRowIDs(ctx, requiredRowIDCnt) if err != nil { return false, err } From c516fdb6c02ec329852586e96c7730b793df576c Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Thu, 14 Nov 2024 22:40:53 +0800 Subject: [PATCH 04/12] test --- .../checkpoints/checkpoints_file_test.go | 12 ++-- .../checkpoints/checkpoints_sql_test.go | 52 ++++++++------- pkg/lightning/checkpoints/checkpoints_test.go | 63 +++++++++++++++---- 3 files changed, 90 insertions(+), 37 deletions(-) diff --git a/pkg/lightning/checkpoints/checkpoints_file_test.go b/pkg/lightning/checkpoints/checkpoints_file_test.go index e938e060cad29..17d9fceb1e122 100644 --- a/pkg/lightning/checkpoints/checkpoints_file_test.go +++ b/pkg/lightning/checkpoints/checkpoints_file_test.go @@ -125,7 +125,9 @@ func newFileCheckpointsDB(t *testing.T, addIndexBySQL bool) *checkpoints.FileChe } scm.MergeInto(cpd) rcm := checkpoints.RebaseCheckpointMerger{ - AllocBase: 132861, + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, } rcm.MergeInto(cpd) cksum := checkpoints.TableChecksumMerger{ @@ -167,9 +169,11 @@ func setInvalidStatus(cpdb *checkpoints.FileCheckpointsDB) { func TestGet(t *testing.T) { ctx := context.Background() expectT2 := &checkpoints.TableCheckpoint{ - Status: checkpoints.CheckpointStatusAllWritten, - AllocBase: 132861, - Checksum: verification.MakeKVChecksum(4492, 686, 486070148910), + Status: checkpoints.CheckpointStatusAllWritten, + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, + Checksum: verification.MakeKVChecksum(4492, 686, 486070148910), Engines: map[int32]*checkpoints.EngineCheckpoint{ -1: { Status: checkpoints.CheckpointStatusLoaded, diff --git a/pkg/lightning/checkpoints/checkpoints_sql_test.go b/pkg/lightning/checkpoints/checkpoints_sql_test.go index b4776ea8b9c05..20420cbc2a50c 100644 --- a/pkg/lightning/checkpoints/checkpoints_sql_test.go +++ b/pkg/lightning/checkpoints/checkpoints_sql_test.go @@ -139,19 +139,21 @@ func TestNormalOperations(t *testing.T) { s.mock. ExpectQuery("SELECT .+ FROM `mock-schema`\\.`table_v\\d+`"). WithArgs("`db1`.`t2`"). - WillReturnRows( - sqlmock.NewRows([]string{"status", "alloc_base", "table_id", "table_info", "kv_bytes", "kv_kvs", "kv_checksum"}). - AddRow(60, 132861, int64(2), nil, uint64(4492), uint64(686), uint64(486070148910)), - ) + WillReturnRows(sqlmock.NewRows([]string{ + "status", "table_id", "table_info", "kv_bytes", "kv_kvs", "kv_checksum", + "auto_rand_base", "auto_incr_base", "auto_row_id_base"}). + AddRow(60, int64(2), nil, uint64(4492), uint64(686), uint64(486070148910), 132861, 132862, 132863)) s.mock.ExpectCommit() cp, err := cpdb.Get(ctx, "`db1`.`t2`") require.Nil(t, err) require.Equal(t, &checkpoints.TableCheckpoint{ - Status: checkpoints.CheckpointStatusAllWritten, - AllocBase: 132861, - TableID: int64(2), - TableInfo: nil, + Status: checkpoints.CheckpointStatusAllWritten, + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, + TableID: int64(2), + TableInfo: nil, Engines: map[int32]*checkpoints.EngineCheckpoint{ -1: {Status: checkpoints.CheckpointStatusLoaded}, 0: { @@ -329,7 +331,9 @@ func TestNormalOperationsWithAddIndexBySQL(t *testing.T) { } scm.MergeInto(cpd) rcm := checkpoints.RebaseCheckpointMerger{ - AllocBase: 132861, + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, } rcm.MergeInto(cpd) cksum := checkpoints.TableChecksumMerger{ @@ -355,9 +359,9 @@ func TestNormalOperationsWithAddIndexBySQL(t *testing.T) { ). WillReturnResult(sqlmock.NewResult(11, 1)) s.mock. - ExpectPrepare("UPDATE `mock-schema`\\.`table_v\\d+` SET alloc_base = .+"). + ExpectPrepare("UPDATE `mock-schema`\\.`table_v\\d+` SET auto_rand_base = .+ auto_incr_base = .+ auto_row_id_base = .+"). ExpectExec(). - WithArgs(132861, "`db1`.`t2`"). + WithArgs(132861, 132862, 132863, "`db1`.`t2`"). WillReturnResult(sqlmock.NewResult(12, 1)) s.mock. ExpectPrepare("UPDATE `mock-schema`\\.`engine_v\\d+` SET status = .+"). @@ -412,17 +416,21 @@ func TestNormalOperationsWithAddIndexBySQL(t *testing.T) { ExpectQuery("SELECT .+ FROM `mock-schema`\\.`table_v\\d+`"). WithArgs("`db1`.`t2`"). WillReturnRows( - sqlmock.NewRows([]string{"status", "alloc_base", "table_id", "table_info", "kv_bytes", "kv_kvs", "kv_checksum"}). - AddRow(60, 132861, int64(2), t2Info, uint64(4492), uint64(686), uint64(486070148910)), + sqlmock.NewRows([]string{ + "status", "table_id", "table_info", "kv_bytes", "kv_kvs", "kv_checksum", + "auto_rand_base", "auto_incr_base", "auto_row_id_base"}). + AddRow(60, int64(2), t2Info, uint64(4492), uint64(686), uint64(486070148910), 132861, 132862, 132863), ) s.mock.ExpectCommit() cp, err := cpdb.Get(ctx, "`db1`.`t2`") require.Nil(t, err) require.Equal(t, &checkpoints.TableCheckpoint{ - Status: checkpoints.CheckpointStatusAllWritten, - AllocBase: 132861, - TableID: int64(2), + Status: checkpoints.CheckpointStatusAllWritten, + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, + TableID: int64(2), TableInfo: &model.TableInfo{ Name: pmodel.NewCIStr("t2"), }, @@ -484,7 +492,7 @@ func TestRemoveAllCheckpoints_SQL(t *testing.T) { s.mock. ExpectQuery("SELECT .+ FROM `mock-schema`\\.`table_v\\d+`"). WithArgs("`db1`.`t2`"). - WillReturnRows(sqlmock.NewRows([]string{"status", "alloc_base", "table_id"})) + WillReturnRows(sqlmock.NewRows([]string{"status", "table_id"})) s.mock.ExpectRollback() cp, err := s.cpdb.Get(ctx, "`db1`.`t2`") @@ -666,16 +674,16 @@ func TestDump(t *testing.T) { s.mock. ExpectQuery("SELECT .+ FROM `mock-schema`\\.`table_v\\d+`"). - WillReturnRows( - sqlmock.NewRows([]string{"task_id", "table_name", "hash", "status", "alloc_base", "create_time", "update_time"}). - AddRow(1555555555, "`db1`.`t2`", 0, 90, 132861, tm, tm), + WillReturnRows(sqlmock.NewRows([]string{"task_id", "table_name", "hash", "status", + "create_time", "update_time", "auto_rand_base", "auto_incr_base", "auto_row_id_base"}). + AddRow(1555555555, "`db1`.`t2`", 0, 90, tm, tm, 132861, 132862, 132863), ) csvBuilder.Reset() err = s.cpdb.DumpTables(ctx, &csvBuilder) require.NoError(t, err) - require.Equal(t, "task_id,table_name,hash,status,alloc_base,create_time,update_time\n"+ - "1555555555,`db1`.`t2`,0,90,132861,2019-04-18 02:45:55 +0000 UTC,2019-04-18 02:45:55 +0000 UTC\n", + require.Equal(t, "task_id,table_name,hash,status,create_time,update_time,auto_rand_base,auto_incr_base,auto_row_id_base\n"+ + "1555555555,`db1`.`t2`,0,90,2019-04-18 02:45:55 +0000 UTC,2019-04-18 02:45:55 +0000 UTC,132861,132862,132863\n", csvBuilder.String(), ) } diff --git a/pkg/lightning/checkpoints/checkpoints_test.go b/pkg/lightning/checkpoints/checkpoints_test.go index 84467a3833f3a..0f24887c1149d 100644 --- a/pkg/lightning/checkpoints/checkpoints_test.go +++ b/pkg/lightning/checkpoints/checkpoints_test.go @@ -185,20 +185,38 @@ func TestMergeChunkCheckpoint(t *testing.T) { func TestRebaseCheckpoint(t *testing.T) { cpd := NewTableCheckpointDiff() - m := RebaseCheckpointMerger{AllocBase: 10000} + m := RebaseCheckpointMerger{ + AutoRandBase: 132861, + AutoIncrBase: 132862, + AutoRowIDBase: 132863, + } m.MergeInto(cpd) - require.Equal(t, &TableCheckpointDiff{ - hasRebase: true, - allocBase: 10000, - engines: make(map[int32]engineCheckpointDiff), - }, cpd) + expected := &TableCheckpointDiff{ + hasRebase: true, + autoRandBase: 132861, + autoIncrBase: 132862, + autoRowIDBase: 132863, + engines: make(map[int32]engineCheckpointDiff), + } + require.Equal(t, expected, cpd) + + // shouldn't go backwards + m2 := RebaseCheckpointMerger{ + AutoRandBase: 131, + AutoIncrBase: 132, + AutoRowIDBase: 133, + } + m2.MergeInto(cpd) + require.Equal(t, expected, cpd) } func TestApplyDiff(t *testing.T) { cp := TableCheckpoint{ - Status: CheckpointStatusLoaded, - AllocBase: 123, + Status: CheckpointStatusLoaded, + AutoRandBase: 131, + AutoIncrBase: 132, + AutoRowIDBase: 133, Engines: map[int32]*EngineCheckpoint{ -1: { Status: CheckpointStatusLoaded, @@ -233,7 +251,11 @@ func TestApplyDiff(t *testing.T) { (&StatusCheckpointMerger{EngineID: -1, Status: CheckpointStatusImported}).MergeInto(cpd) (&StatusCheckpointMerger{EngineID: WholeTableEngineID, Status: CheckpointStatusAllWritten}).MergeInto(cpd) (&StatusCheckpointMerger{EngineID: 1234, Status: CheckpointStatusAnalyzeSkipped}).MergeInto(cpd) - (&RebaseCheckpointMerger{AllocBase: 11111}).MergeInto(cpd) + (&RebaseCheckpointMerger{ + AutoRandBase: 1131, + AutoIncrBase: 1132, + AutoRowIDBase: 1133, + }).MergeInto(cpd) (&ChunkCheckpointMerger{ EngineID: 0, Key: ChunkCheckpointKey{Path: "/tmp/01.sql"}, @@ -263,8 +285,10 @@ func TestApplyDiff(t *testing.T) { cp.Apply(cpd) require.Equal(t, TableCheckpoint{ - Status: CheckpointStatusAllWritten, - AllocBase: 11111, + Status: CheckpointStatusAllWritten, + AutoRandBase: 1131, + AutoIncrBase: 1132, + AutoRowIDBase: 1133, Engines: map[int32]*EngineCheckpoint{ -1: { Status: CheckpointStatusImported, @@ -352,3 +376,20 @@ func TestSeparateCompletePath(t *testing.T) { require.Equal(t, testCase.expectPath, newPath) } } + +func TestTableCheckpointApplyBases(t *testing.T) { + tblCP := TableCheckpoint{ + AutoRowIDBase: 11, + AutoIncrBase: 12, + AutoRandBase: 13, + } + tblCP.Apply(&TableCheckpointDiff{ + hasRebase: true, + autoRowIDBase: 1, + autoIncrBase: 2, + autoRandBase: 3, + }) + require.EqualValues(t, 11, tblCP.AutoRowIDBase) + require.EqualValues(t, 12, tblCP.AutoIncrBase) + require.EqualValues(t, 13, tblCP.AutoRandBase) +} From 613e9c2ea8aa13d1c2c2f327bb54809b37905245 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 00:47:11 +0800 Subject: [PATCH 05/12] change --- lightning/pkg/importer/table_import.go | 3 + .../lightning_checkpoint/config-file.toml | 15 ++++ .../tests/lightning_checkpoint/config.toml | 1 + lightning/tests/lightning_checkpoint/run.sh | 86 +++++++++++++++++-- .../tests/lightning_checkpoint_chunks/run.sh | 2 +- pkg/lightning/common/common.go | 36 -------- pkg/lightning/common/common_test.go | 17 +--- 7 files changed, 99 insertions(+), 61 deletions(-) create mode 100644 lightning/tests/lightning_checkpoint/config-file.toml diff --git a/lightning/pkg/importer/table_import.go b/lightning/pkg/importer/table_import.go index 5ee692bdbd0e4..17f342fc535c9 100644 --- a/lightning/pkg/importer/table_import.go +++ b/lightning/pkg/importer/table_import.go @@ -149,6 +149,9 @@ func (tr *TableImporter) importTable( requiredRowIDCnt = engine.Chunks[len(engine.Chunks)-1].Chunk.RowIDMax } } + tr.logger.Info("estimated required row id count", + zap.String("table", tr.tableName), + zap.Int64("count", requiredRowIDCnt)) versionStr, err := version.FetchVersion(ctx, rc.db) if err != nil { return false, errors.Trace(err) diff --git a/lightning/tests/lightning_checkpoint/config-file.toml b/lightning/tests/lightning_checkpoint/config-file.toml new file mode 100644 index 0000000000000..a5cd9efcc4879 --- /dev/null +++ b/lightning/tests/lightning_checkpoint/config-file.toml @@ -0,0 +1,15 @@ +[lightning] +index-concurrency = 1 +table-concurrency = 1 + +[tikv-importer] +backend = "local" +parallel-import = true + +[checkpoint] +enable = true +driver = "file" + +[mydumper] +read-block-size = 1 +filter = ['cppk_tsr.tbl1', 'cppk_tsr.tbl2', 'cppk_tsr.tbl7', 'cppk_tsr.tbl8', 'cppk_tsr.tbl9'] diff --git a/lightning/tests/lightning_checkpoint/config.toml b/lightning/tests/lightning_checkpoint/config.toml index a1d2090a4bb63..82ef5f94325e3 100644 --- a/lightning/tests/lightning_checkpoint/config.toml +++ b/lightning/tests/lightning_checkpoint/config.toml @@ -4,6 +4,7 @@ table-concurrency = 1 [tikv-importer] backend = "local" +parallel-import = true [checkpoint] enable = true diff --git a/lightning/tests/lightning_checkpoint/run.sh b/lightning/tests/lightning_checkpoint/run.sh index 76607a79760d8..c2bf20256d6ce 100755 --- a/lightning/tests/lightning_checkpoint/run.sh +++ b/lightning/tests/lightning_checkpoint/run.sh @@ -17,6 +17,7 @@ set -euE # Populate the mydumper source +CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) DBPATH="$TEST_DIR/cppk.mydump" TABLE_COUNT=9 CHUNK_COUNT=50 @@ -25,10 +26,16 @@ mkdir -p $DBPATH echo 'CREATE DATABASE cppk_tsr;' > "$DBPATH/cppk_tsr-schema-create.sql" INNER_QUERY='0' OUTER_QUERY='0' +# total 64*50 = 3200 bytes +NOISE_COL_VAL="PJNKNoQE3TX3NuMQRCP0fbtYEnI9cVcVxcnr3MRpqjoaZf1DyT" +for i in {1..6}; do + export NOISE_COL_VAL="$NOISE_COL_VAL$NOISE_COL_VAL" +done for i in $(seq "$TABLE_COUNT"); do + TABLE_ATTRIBUTES="" case $i in 1) - INDICES="PRIMARY KEY" + INDICES="PRIMARY KEY auto_increment" ;; 2) INDICES="UNIQUE" @@ -45,15 +52,26 @@ for i in $(seq "$TABLE_COUNT"); do 6) INDICES=", PRIMARY KEY(j)" ;; + 7) + INDICES="PRIMARY KEY auto_random" + ;; + 8) + INDICES="PRIMARY KEY auto_increment" + TABLE_ATTRIBUTES="AUTO_ID_CACHE=1" + ;; + 9) + INDICES="PRIMARY KEY nonclustered auto_increment" + TABLE_ATTRIBUTES="AUTO_ID_CACHE=1" + ;; *) INDICES="" ;; esac - echo "CREATE TABLE tbl$i(i TINYINT, j INT $INDICES);" > "$DBPATH/cppk_tsr.tbl$i-schema.sql" + echo "CREATE TABLE tbl$i(n text, i TINYINT, j bigint $INDICES) $TABLE_ATTRIBUTES;" > "$DBPATH/cppk_tsr.tbl$i-schema.sql" INNER_QUERY="$INNER_QUERY, (SELECT sum(j) FROM cppk_tsr.tbl$i) as s$i" OUTER_QUERY="$OUTER_QUERY + coalesce(s$i, 0)" for j in $(seq "$CHUNK_COUNT"); do - echo "INSERT INTO tbl$i VALUES ($i,${j}000),($i,${j}001);" > "$DBPATH/cppk_tsr.tbl$i.$j.sql" + echo "INSERT INTO tbl$i VALUES ('$NOISE_COL_VAL',$i,${j}000),('$NOISE_COL_VAL',$i,${j}001);" > "$DBPATH/cppk_tsr.tbl$i.$j.sql" done done PARTIAL_IMPORT_QUERY="SELECT *, $OUTER_QUERY AS s FROM (SELECT $INNER_QUERY) _" @@ -63,35 +81,87 @@ check_cluster_version 4 0 0 'local backend' # Set the failpoint to kill the lightning instance as soon as one table is imported # If checkpoint does work, this should only kill 9 instances of lightnings. SLOWDOWN_FAILPOINTS='github.com/pingcap/tidb/lightning/pkg/importer/SlowDownImport=sleep(250)' -export GO_FAILPOINTS="$SLOWDOWN_FAILPOINTS;github.com/pingcap/tidb/lightning/pkg/importer/FailBeforeIndexEngineImported=return" +# +# run with file checkpoint +# +run_sql 'DROP DATABASE IF EXISTS cppk_tsr' +export GO_FAILPOINTS="github.com/pingcap/tidb/lightning/pkg/importer/FailBeforeIndexEngineImported=return" +set +e +for i in $(seq 5); do + echo "******** with file checkpoint (step $i/5) ********" + run_lightning -d "$DBPATH" --enable-checkpoint=1 --config $CUR/config-file.toml 2> /dev/null + [ $? -ne 0 ] || exit 1 +done +set -e +export GO_FAILPOINTS="" +run_lightning -d "$DBPATH" --enable-checkpoint=1 --config $CUR/config-file.toml +run_sql "show table cppk_tsr.tbl1 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl2 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 63205" +run_sql "show table cppk_tsr.tbl7 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl8 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl9 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +check_contains "NEXT_GLOBAL_ROW_ID: 63205" + +# +# run with mysql checkpoint +# # Start importing the tables. run_sql 'DROP DATABASE IF EXISTS cppk_tsr' run_sql 'DROP DATABASE IF EXISTS tidb_lightning_checkpoint_test_cppk' run_sql 'DROP DATABASE IF EXISTS `tidb_lightning_checkpoint_test_cppk.1357924680.bak`' +export GO_FAILPOINTS="github.com/pingcap/tidb/lightning/pkg/importer/FailBeforeIndexEngineImported=return" # panic after saving index engine checkpoint status before saving table checkpoint status set +e for i in $(seq "$TABLE_COUNT"); do - echo "******** Importing Table Now (step $i/$TABLE_COUNT) ********" + echo "******** with mysql checkpoint (step $i/$TABLE_COUNT) ********" run_lightning -d "$DBPATH" --enable-checkpoint=1 2> /dev/null [ $? -ne 0 ] || exit 1 done set -e +run_sql "select concat(table_name, '|', auto_rand_base, '|', auto_incr_base, '|', auto_row_id_base) s from tidb_lightning_checkpoint_test_cppk.table_v10" +# auto-incr & auto-row-id share the same allocator, below too +check_contains 's: `cppk_tsr`.`tbl1`|0|50001|50001' +# use the estimated value of auto-row-id +check_contains 's: `cppk_tsr`.`tbl2`|0|63204|63204' +# no auto id +check_contains 's: `cppk_tsr`.`tbl4`|0|0|0' +check_contains 's: `cppk_tsr`.`tbl7`|50001|0|0' +check_contains 's: `cppk_tsr`.`tbl8`|0|50001|0' +check_contains 's: `cppk_tsr`.`tbl9`|0|50001|63204' + # at the failure of last table, all data engines are imported so finished == total grep "print lightning status" "$TEST_DIR/lightning.log" | grep -q "equal=true" -export GO_FAILPOINTS="$SLOWDOWN_FAILPOINTS" +export GO_FAILPOINTS="" # After everything is done, there should be no longer new calls to ImportEngine # (and thus `kill_lightning_after_one_import` will spare this final check) echo "******** Verify checkpoint no-op ********" run_lightning -d "$DBPATH" --enable-checkpoint=1 run_sql "$PARTIAL_IMPORT_QUERY" check_contains "s: $(( (1000 * $CHUNK_COUNT + 1001) * $CHUNK_COUNT * $TABLE_COUNT ))" -run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cppk`.table_v9 WHERE status >= 200' +run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cppk`.table_v10 WHERE status >= 200' check_contains "count(*): $TABLE_COUNT" +run_sql "show table cppk_tsr.tbl1 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl2 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 63205" +run_sql "show table cppk_tsr.tbl7 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl8 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +run_sql "show table cppk_tsr.tbl9 next_row_id" +check_contains "NEXT_GLOBAL_ROW_ID: 50002" +check_contains "NEXT_GLOBAL_ROW_ID: 63205" + # Start importing the tables. run_sql 'DROP DATABASE IF EXISTS cppk_tsr' run_sql 'DROP DATABASE IF EXISTS tidb_lightning_checkpoint_test_cppk' @@ -113,5 +183,5 @@ echo "******** Verify checkpoint no-op ********" run_lightning -d "$DBPATH" --enable-checkpoint=1 run_sql "$PARTIAL_IMPORT_QUERY" check_contains "s: $(( (1000 * $CHUNK_COUNT + 1001) * $CHUNK_COUNT * $TABLE_COUNT ))" -run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cppk`.table_v9 WHERE status >= 200' +run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cppk`.table_v10 WHERE status >= 200' check_contains "count(*): $TABLE_COUNT" diff --git a/lightning/tests/lightning_checkpoint_chunks/run.sh b/lightning/tests/lightning_checkpoint_chunks/run.sh index 582dd80e1daaf..d80cbdfab2d1d 100755 --- a/lightning/tests/lightning_checkpoint_chunks/run.sh +++ b/lightning/tests/lightning_checkpoint_chunks/run.sh @@ -34,7 +34,7 @@ verify_checkpoint_noop() { run_sql 'SELECT count(i), sum(i) FROM cpch_tsr.tbl;' check_contains "count(i): $(($ROW_COUNT*$CHUNK_COUNT))" check_contains "sum(i): $(( $ROW_COUNT*$CHUNK_COUNT*(($CHUNK_COUNT+2)*$ROW_COUNT + 1)/2 ))" - run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cpch.1234567890.bak`.table_v9 WHERE status >= 200' + run_sql 'SELECT count(*) FROM `tidb_lightning_checkpoint_test_cpch.1234567890.bak`.table_v10 WHERE status >= 200' check_contains "count(*): 1" } diff --git a/pkg/lightning/common/common.go b/pkg/lightning/common/common.go index 4eaff5d0a55c3..2ac4467758eb2 100644 --- a/pkg/lightning/common/common.go +++ b/pkg/lightning/common/common.go @@ -48,42 +48,6 @@ var DefaultImportVariablesTiDB = map[string]string{ "tidb_row_format_version": "1", } -// AllocGlobalAutoID allocs N consecutive autoIDs from TiDB. -func AllocGlobalAutoID(ctx context.Context, n int64, r autoid.Requirement, dbID int64, - tblInfo *model.TableInfo) (autoIDBase, autoIDMax int64, err error) { - allocators, err := GetGlobalAutoIDAlloc(r, dbID, tblInfo) - if err != nil { - return 0, 0, err - } - // there might be 2 allocators when tblInfo.SepAutoInc is true, and in this case - // RowIDAllocType will be the last one. - // we return the value of last Alloc as autoIDBase and autoIDMax, i.e. the value - // either comes from RowIDAllocType or AutoRandomType. - for _, alloc := range allocators { - autoIDBase, autoIDMax, err = alloc.Alloc(ctx, uint64(n), 1, 1) - if err != nil { - return 0, 0, err - } - } - return -} - -// RebaseGlobalAutoID rebase the autoID base to newBase. -func RebaseGlobalAutoID(ctx context.Context, newBase int64, r autoid.Requirement, dbID int64, - tblInfo *model.TableInfo) error { - allocators, err := GetGlobalAutoIDAlloc(r, dbID, tblInfo) - if err != nil { - return err - } - for _, alloc := range allocators { - err = alloc.Rebase(ctx, newBase, false) - if err != nil { - return err - } - } - return nil -} - // RebaseTableAllocators rebase the allocators of a table. // This function only rebase a table allocator when its new base is given in // `bases` param, else it will be skipped. diff --git a/pkg/lightning/common/common_test.go b/pkg/lightning/common/common_test.go index d6a070fefeed4..c7d01cb7c8bec 100644 --- a/pkg/lightning/common/common_test.go +++ b/pkg/lightning/common/common_test.go @@ -133,25 +133,10 @@ func TestAllocGlobalAutoID(t *testing.T) { expectAllocatorTypes: []autoid.AllocatorType{autoid.AutoRandomType}, }, } - ctx := context.Background() for _, c := range cases { ti := newTableInfo(t, 1, c.tableID, c.createTableSQL, kvStore) allocators, err := common.GetGlobalAutoIDAlloc(mockRequirement{kvStore}, 1, ti) - if c.expectErrStr == "" { - require.NoError(t, err, c.tableID) - require.NoError(t, common.RebaseGlobalAutoID(ctx, 123, mockRequirement{kvStore}, 1, ti)) - base, idMax, err := common.AllocGlobalAutoID(ctx, 100, mockRequirement{kvStore}, 1, ti) - require.NoError(t, err, c.tableID) - require.Equal(t, int64(123), base, c.tableID) - require.Equal(t, int64(223), idMax, c.tableID) - // all allocators are rebased and allocated - for _, alloc := range allocators { - base2, max2, err := alloc.Alloc(ctx, 100, 1, 1) - require.NoError(t, err, c.tableID) - require.Equal(t, int64(223), base2, c.tableID) - require.Equal(t, int64(323), max2, c.tableID) - } - } else { + if c.expectErrStr != "" { require.ErrorContains(t, err, c.expectErrStr, c.tableID) } var allocatorTypes []autoid.AllocatorType From 9c02c2cdc6661ad7560aed6a4151ea6d0f615578 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 12:24:05 +0800 Subject: [PATCH 06/12] bazel --- pkg/lightning/checkpoints/BUILD.bazel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/lightning/checkpoints/BUILD.bazel b/pkg/lightning/checkpoints/BUILD.bazel index 72adef289c3a8..cf08c81f1b592 100644 --- a/pkg/lightning/checkpoints/BUILD.bazel +++ b/pkg/lightning/checkpoints/BUILD.bazel @@ -36,7 +36,7 @@ go_test( embed = [":checkpoints"], flaky = True, race = "on", - shard_count = 24, + shard_count = 25, deps = [ "//br/pkg/version/build", "//pkg/lightning/checkpoints/checkpointspb", From 03e16f722e80098c1adb5443e1fa988c12ec12fa Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 12:32:11 +0800 Subject: [PATCH 07/12] change --- pkg/lightning/common/util.go | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pkg/lightning/common/util.go b/pkg/lightning/common/util.go index 445aabf418467..7017e79a646c2 100644 --- a/pkg/lightning/common/util.go +++ b/pkg/lightning/common/util.go @@ -51,9 +51,7 @@ import ( const ( retryTimeout = 3 * time.Second - // we are using optimistic txn all the time, we need retry more because we - // might face more write conflicts during parallel import. - defaultMaxRetry = 10 + defaultMaxRetry = 3 ) // MySQLConnectParam records the parameters needed to connect to a MySQL database. From c04f322ac86405696ec4546add01859b22852fcc Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 12:55:41 +0800 Subject: [PATCH 08/12] change --- pkg/lightning/checkpoints/checkpoints.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/lightning/checkpoints/checkpoints.go b/pkg/lightning/checkpoints/checkpoints.go index ef2689e37bbf4..4319ea57d6676 100644 --- a/pkg/lightning/checkpoints/checkpoints.go +++ b/pkg/lightning/checkpoints/checkpoints.go @@ -1785,7 +1785,7 @@ func (cpdb *MySQLCheckpointsDB) DumpTables(ctx context.Context, writer io.Writer update_time, auto_rand_base, auto_incr_base, - auto_row_id_base, + auto_row_id_base FROM %s.%s; `, cpdb.schema, CheckpointTableNameTable)) if err != nil { From c833bcf31394d6bfb8e95ca8d81ee5c2b791a372 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 16:56:04 +0800 Subject: [PATCH 09/12] change --- lightning/pkg/importer/meta_manager.go | 34 +++++++++++++++++++++++--- pkg/lightning/common/common.go | 17 +++++++++++++ pkg/lightning/common/common_test.go | 13 ++++++++++ 3 files changed, 60 insertions(+), 4 deletions(-) diff --git a/lightning/pkg/importer/meta_manager.go b/lightning/pkg/importer/meta_manager.go index a1884bef9b0c3..005de988c1f37 100644 --- a/lightning/pkg/importer/meta_manager.go +++ b/lightning/pkg/importer/meta_manager.go @@ -199,7 +199,7 @@ func (m *dbTableMetaMgr) AllocTableRowIDs(ctx context.Context, requiredRowIDCnt return nil, 0, errors.Annotate(err, "enable pessimistic transaction failed") } - needAutoID := common.TableHasAutoID(m.tr.tableInfo.Core) + hasAutoID := common.TableHasAutoID(m.tr.tableInfo.Core) tableChecksumingMsg := "Target table is calculating checksum. Please wait until the checksum is finished and try again." doAllocTableRowIDsFn := func() error { return exec.Transact(ctx, "init table allocator base", func(ctx context.Context, tx *sql.Tx) error { @@ -273,10 +273,34 @@ FROM %s.%s WHERE table_id = ? FOR UPDATE`, m.schemaName, m.tableName), // no enough info are available, fetch row_id max for table if myStatus == metaStatusInitial { - myStartRowID = maxRowIDMax + if !hasAutoID { + // we still guarantee that the row ID is unique across all + // lightning instances even if the table don't have auto id. + myStartRowID = maxRowIDMax + } else if maxRowIDMax > 0 { + // someone have already allocated the auto id, we can continue + // allocating from previous maxRowIDMax. + myStartRowID = maxRowIDMax + } else { + // we are the first one to allocate the auto id, we need to + // fetch the max auto id base from the table, and allocate + // from there. + // as we only have one estimated requiredRowIDCount, but the + // table might have multiple allocators, so we use the max + // of them. + maxAutoIDBase, err := common.GetMaxAutoIDBase(m.tr, m.tr.dbInfo.ID, m.tr.tableInfo.Core) + if err != nil { + return errors.Trace(err) + } + myStartRowID = maxAutoIDBase + } myEndRowID = myStartRowID + requiredRowIDCnt - if needAutoID && myStartRowID == 0 && newStatus < metaStatusRestoreStarted { + // if we are the first one to allocate, the table has auto-id, + // and our start is 0, it means the table is empty, so we move + // the state to next one directly without going through below + // checksum branch. + if hasAutoID && myStartRowID == 0 && newStatus < metaStatusRestoreStarted { newStatus = metaStatusRestoreStarted } @@ -320,7 +344,9 @@ FROM %s.%s WHERE table_id = ? FOR UPDATE`, m.schemaName, m.tableName), var checksum *verify.KVChecksum // need to do checksum and update checksum meta since we are the first one. if myStatus < metaStatusRestoreStarted { - if m.needChecksum && baseTotalKvs == 0 { + // the table might have data if our StartRowID is not 0, or if the table + // don't have any auto id. + if (myStartRowID > 0 || !hasAutoID) && m.needChecksum && baseTotalKvs == 0 { // if another instance finished import before below checksum logic, // it will cause checksum mismatch, but it's very rare. remoteCk, err := DoChecksum(ctx, m.tr.tableInfo) diff --git a/pkg/lightning/common/common.go b/pkg/lightning/common/common.go index 2ac4467758eb2..7b5fcc19b4496 100644 --- a/pkg/lightning/common/common.go +++ b/pkg/lightning/common/common.go @@ -48,6 +48,23 @@ var DefaultImportVariablesTiDB = map[string]string{ "tidb_row_format_version": "1", } +// GetMaxAutoIDBase returns the max auto ID base for a table. +func GetMaxAutoIDBase(r autoid.Requirement, dbID int64, tblInfo *model.TableInfo) (int64, error) { + allocators, err := GetGlobalAutoIDAlloc(r, dbID, tblInfo) + if err != nil { + return 0, errors.Trace(err) + } + maxNextID := int64(0) + for _, alloc := range allocators { + nextID, err := alloc.NextGlobalAutoID() + if err != nil { + return 0, errors.Trace(err) + } + maxNextID = max(maxNextID, nextID) + } + return maxNextID - 1, nil +} + // RebaseTableAllocators rebase the allocators of a table. // This function only rebase a table allocator when its new base is given in // `bases` param, else it will be skipped. diff --git a/pkg/lightning/common/common_test.go b/pkg/lightning/common/common_test.go index c7d01cb7c8bec..b0fcbf5f12357 100644 --- a/pkg/lightning/common/common_test.go +++ b/pkg/lightning/common/common_test.go @@ -176,6 +176,10 @@ func TestRebaseTableAllocators(t *testing.T) { require.NoError(t, err) require.Equal(t, int64(1), id) } + maxAutoIDBase, err := common.GetMaxAutoIDBase(mockRequirement{kvStore}, 1, ti) + require.NoError(t, err) + require.Equal(t, int64(0), maxAutoIDBase) + ctx := context.Background() allocatorTypes := make([]autoid.AllocatorType, 0, len(allocators)) // rebase to 123 @@ -191,6 +195,9 @@ func TestRebaseTableAllocators(t *testing.T) { require.NoError(t, err) require.Equal(t, int64(124), nextID) } + maxAutoIDBase, err = common.GetMaxAutoIDBase(mockRequirement{kvStore}, 1, ti) + require.NoError(t, err) + require.Equal(t, int64(123), maxAutoIDBase) // this call rebase AutoIncrementType allocator to 223 require.NoError(t, common.RebaseTableAllocators(ctx, map[autoid.AllocatorType]int64{ autoid.AutoIncrementType: 223, @@ -201,6 +208,9 @@ func TestRebaseTableAllocators(t *testing.T) { next, err = allocators[1].NextGlobalAutoID() require.NoError(t, err) require.Equal(t, int64(124), next) + maxAutoIDBase, err = common.GetMaxAutoIDBase(mockRequirement{kvStore}, 1, ti) + require.NoError(t, err) + require.Equal(t, int64(223), maxAutoIDBase) // this call rebase AutoIncrementType allocator to 323, RowIDAllocType allocator to 423 require.NoError(t, common.RebaseTableAllocators(ctx, map[autoid.AllocatorType]int64{ autoid.AutoIncrementType: 323, @@ -212,4 +222,7 @@ func TestRebaseTableAllocators(t *testing.T) { next, err = allocators[1].NextGlobalAutoID() require.NoError(t, err) require.Equal(t, int64(424), next) + maxAutoIDBase, err = common.GetMaxAutoIDBase(mockRequirement{kvStore}, 1, ti) + require.NoError(t, err) + require.Equal(t, int64(423), maxAutoIDBase) } From 105f4016831ffbdcbc7450cb9b4f76fa23dbe311 Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 17:00:36 +0800 Subject: [PATCH 10/12] simplify --- lightning/pkg/importer/meta_manager.go | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/lightning/pkg/importer/meta_manager.go b/lightning/pkg/importer/meta_manager.go index 005de988c1f37..7c678db76a116 100644 --- a/lightning/pkg/importer/meta_manager.go +++ b/lightning/pkg/importer/meta_manager.go @@ -273,13 +273,11 @@ FROM %s.%s WHERE table_id = ? FOR UPDATE`, m.schemaName, m.tableName), // no enough info are available, fetch row_id max for table if myStatus == metaStatusInitial { - if !hasAutoID { - // we still guarantee that the row ID is unique across all - // lightning instances even if the table don't have auto id. - myStartRowID = maxRowIDMax - } else if maxRowIDMax > 0 { - // someone have already allocated the auto id, we can continue - // allocating from previous maxRowIDMax. + // if the table don't have auto id, we still guarantee that the + // row ID is unique across all lightning instances. + // or if someone have already allocated the auto id, we can continue + // allocating from previous maxRowIDMax. + if !hasAutoID || maxRowIDMax > 0 { myStartRowID = maxRowIDMax } else { // we are the first one to allocate the auto id, we need to From 4b485e0b96ae5a80f67106e7a44a65eea15b130d Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Fri, 15 Nov 2024 17:02:27 +0800 Subject: [PATCH 11/12] comment --- pkg/lightning/common/common.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/lightning/common/common.go b/pkg/lightning/common/common.go index 7b5fcc19b4496..4c16f700e4345 100644 --- a/pkg/lightning/common/common.go +++ b/pkg/lightning/common/common.go @@ -54,7 +54,8 @@ func GetMaxAutoIDBase(r autoid.Requirement, dbID int64, tblInfo *model.TableInfo if err != nil { return 0, errors.Trace(err) } - maxNextID := int64(0) + // all next auto id starts from 1. + maxNextID := int64(1) for _, alloc := range allocators { nextID, err := alloc.NextGlobalAutoID() if err != nil { From 3889d53bf74b8b2286cc8c6e6c011fd12272d9db Mon Sep 17 00:00:00 2001 From: D3Hunter Date: Tue, 19 Nov 2024 10:13:24 +0800 Subject: [PATCH 12/12] fix comments --- Makefile | 2 +- Makefile.common | 3 ++- lightning/pkg/importer/import.go | 5 +++++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 7e3e129d68b79..72b5e52dd7744 100644 --- a/Makefile +++ b/Makefile @@ -530,7 +530,7 @@ br_bins: .PHONY: data_parsers data_parsers: tools/bin/vfsgendev pkg/lightning/mydump/parser_generated.go lightning_web - PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOPATH)/pkg/mod" pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. + PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOMODCACHE)" pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. tools/bin/vfsgendev -source='"github.com/pingcap/tidb/lightning/pkg/web".Res' && mv res_vfsdata.go lightning/pkg/web/ .PHONY: build_dumpling diff --git a/Makefile.common b/Makefile.common index 94cd9734d2582..8a59f8a9fe96f 100644 --- a/Makefile.common +++ b/Makefile.common @@ -14,6 +14,7 @@ PROJECT=tidb GOPATH ?= $(shell go env GOPATH) +GOMODCACHE ?= $(shell go env GOMODCACHE) P=8 # Ensure GOPATH is set before running build process. @@ -132,4 +133,4 @@ ifneq ("$(CI)", "") endif BAZEL_INSTRUMENTATION_FILTER := --instrument_test_targets --instrumentation_filter=//pkg/...,//br/...,//dumpling/... -NOGO_FLAG=true \ No newline at end of file +NOGO_FLAG=true diff --git a/lightning/pkg/importer/import.go b/lightning/pkg/importer/import.go index 1f4376820fbd3..c4f5b9a4681f9 100644 --- a/lightning/pkg/importer/import.go +++ b/lightning/pkg/importer/import.go @@ -1990,6 +1990,11 @@ type deliverResult struct { } func saveCheckpoint(rc *Controller, t *TableImporter, engineID int32, chunk *checkpoints.ChunkCheckpoint) { + // we save the XXXBase every time a chunk is finished. + // Note, it's possible some chunk with larger autoID range finished first, so + // the saved XXXBase is larger, when chunks with smaller autoID range finished + // it might have no effect on the saved XXXBase, but it's OK, we only need + // the largest. rc.saveCpCh <- saveCp{ tableName: t.tableName, merger: &checkpoints.RebaseCheckpointMerger{