diff --git a/br/pkg/lightning/restore/table_restore.go b/br/pkg/lightning/restore/table_restore.go
index 5b0ce1bc0794d..9c7657d032cc9 100644
--- a/br/pkg/lightning/restore/table_restore.go
+++ b/br/pkg/lightning/restore/table_restore.go
@@ -683,10 +683,18 @@ func (tr *TableRestore) postProcess(
 		tblInfo := tr.tableInfo.Core
 		var err error
 		if tblInfo.PKIsHandle && tblInfo.ContainsAutoRandomBits() {
-			err = AlterAutoRandom(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, tr.alloc.Get(autoid.AutoRandomType).Base()+1)
+			var maxAutoRandom, autoRandomTotalBits uint64
+			autoRandomTotalBits = 64
+			autoRandomBits := tblInfo.AutoRandomBits // range from (0, 15]
+			if !tblInfo.IsAutoRandomBitColUnsigned() {
+				// if auto_random is signed, leave one extra bit
+				autoRandomTotalBits = 63
+			}
+			maxAutoRandom = 1<<(autoRandomTotalBits-autoRandomBits) - 1
+			err = AlterAutoRandom(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, uint64(tr.alloc.Get(autoid.AutoRandomType).Base())+1, maxAutoRandom)
 		} else if common.TableHasAutoRowID(tblInfo) || tblInfo.GetAutoIncrementColInfo() != nil {
 			// only alter auto increment id iff table contains auto-increment column or generated handle
-			err = AlterAutoIncrement(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, tr.alloc.Get(autoid.RowIDAllocType).Base()+1)
+			err = AlterAutoIncrement(ctx, rc.tidbGlue.GetSQLExecutor(), tr.tableName, uint64(tr.alloc.Get(autoid.RowIDAllocType).Base())+1)
 		}
 		rc.alterTableLock.Unlock()
 		saveCpErr := rc.saveStatusCheckpoint(ctx, tr.tableName, checkpoints.WholeTableEngineID, err, checkpoints.CheckpointStatusAlteredAutoInc)
diff --git a/br/pkg/lightning/restore/tidb.go b/br/pkg/lightning/restore/tidb.go
index 4616ae66ac71e..860536f8f47e1 100644
--- a/br/pkg/lightning/restore/tidb.go
+++ b/br/pkg/lightning/restore/tidb.go
@@ -18,6 +18,7 @@ import (
 	"context"
 	"database/sql"
 	"fmt"
+	"math"
 	"strconv"
 	"strings"
 
@@ -391,9 +392,17 @@ func ObtainNewCollationEnabled(ctx context.Context, g glue.SQLExecutor) (bool, e
 // NOTE: since tidb can make sure the auto id is always be rebase even if the `incr` value is smaller
 // the the auto incremanet base in tidb side, we needn't fetch currently auto increment value here.
 // See: https://github.com/pingcap/tidb/blob/64698ef9a3358bfd0fdc323996bb7928a56cadca/ddl/ddl_api.go#L2528-L2533
-func AlterAutoIncrement(ctx context.Context, g glue.SQLExecutor, tableName string, incr int64) error {
-	logger := log.With(zap.String("table", tableName), zap.Int64("auto_increment", incr))
-	query := fmt.Sprintf("ALTER TABLE %s AUTO_INCREMENT=%d", tableName, incr)
+func AlterAutoIncrement(ctx context.Context, g glue.SQLExecutor, tableName string, incr uint64) error {
+	var query string
+	logger := log.With(zap.String("table", tableName), zap.Uint64("auto_increment", incr))
+	if incr > math.MaxInt64 {
+		// automatically set max value
+		logger.Warn("auto_increment out of the maximum value TiDB supports, automatically set to the max", zap.Uint64("auto_increment", incr))
+		incr = math.MaxInt64
+		query = fmt.Sprintf("ALTER TABLE %s FORCE AUTO_INCREMENT=%d", tableName, incr)
+	} else {
+		query = fmt.Sprintf("ALTER TABLE %s AUTO_INCREMENT=%d", tableName, incr)
+	}
 	task := logger.Begin(zap.InfoLevel, "alter table auto_increment")
 	err := g.ExecuteWithLog(ctx, query, "alter table auto_increment", logger)
 	task.End(zap.ErrorLevel, err)
@@ -406,8 +415,16 @@ func AlterAutoIncrement(ctx context.Context, g glue.SQLExecutor, tableName strin
 	return errors.Annotatef(err, "%s", query)
 }
 
-func AlterAutoRandom(ctx context.Context, g glue.SQLExecutor, tableName string, randomBase int64) error {
-	logger := log.With(zap.String("table", tableName), zap.Int64("auto_random", randomBase))
+func AlterAutoRandom(ctx context.Context, g glue.SQLExecutor, tableName string, randomBase uint64, maxAutoRandom uint64) error {
+	logger := log.With(zap.String("table", tableName), zap.Uint64("auto_random", randomBase))
+	if randomBase == maxAutoRandom+1 {
+		// insert a tuple with key maxAutoRandom
+		randomBase = maxAutoRandom
+	} else if randomBase > maxAutoRandom {
+		// TiDB does nothing when inserting an overflow value
+		logger.Warn("auto_random out of the maximum value TiDB supports")
+		return nil
+	}
 	query := fmt.Sprintf("ALTER TABLE %s AUTO_RANDOM_BASE=%d", tableName, randomBase)
 	task := logger.Begin(zap.InfoLevel, "alter table auto_random")
 	err := g.ExecuteWithLog(ctx, query, "alter table auto_random_base", logger)
diff --git a/br/pkg/lightning/restore/tidb_test.go b/br/pkg/lightning/restore/tidb_test.go
index 4599d64540d17..151c6c073fff0 100644
--- a/br/pkg/lightning/restore/tidb_test.go
+++ b/br/pkg/lightning/restore/tidb_test.go
@@ -17,6 +17,7 @@ package restore
 import (
 	"context"
 	"database/sql"
+	"math"
 	"testing"
 
 	"github.com/DATA-DOG/go-sqlmock"
@@ -428,11 +429,17 @@ func (s *tidbSuite) TestAlterAutoInc(c *C) {
 	s.mockDB.
 		ExpectExec("\\QALTER TABLE `db`.`table` AUTO_INCREMENT=12345\\E").
 		WillReturnResult(sqlmock.NewResult(1, 1))
+	s.mockDB.
+		ExpectExec("\\QALTER TABLE `db`.`table` FORCE AUTO_INCREMENT=9223372036854775807\\E").
+		WillReturnResult(sqlmock.NewResult(1, 1))
 	s.mockDB.
 		ExpectClose()
 
 	err := AlterAutoIncrement(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", 12345)
 	c.Assert(err, IsNil)
+
+	err = AlterAutoIncrement(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", uint64(math.MaxInt64)+1)
+	c.Assert(err, IsNil)
 }
 
 func (s *tidbSuite) TestAlterAutoRandom(c *C) {
@@ -441,10 +448,20 @@ func (s *tidbSuite) TestAlterAutoRandom(c *C) {
 	s.mockDB.
 		ExpectExec("\\QALTER TABLE `db`.`table` AUTO_RANDOM_BASE=12345\\E").
 		WillReturnResult(sqlmock.NewResult(1, 1))
+	s.mockDB.
+		ExpectExec("\\QALTER TABLE `db`.`table` AUTO_RANDOM_BASE=288230376151711743\\E").
+		WillReturnResult(sqlmock.NewResult(1, 1))
 	s.mockDB.
 		ExpectClose()
 
-	err := AlterAutoRandom(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", 12345)
+	err := AlterAutoRandom(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", 12345, 288230376151711743)
+	c.Assert(err, IsNil)
+
+	// insert 288230376151711743 and try rebase to 288230376151711744
+	err = AlterAutoRandom(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", 288230376151711744, 288230376151711743)
+	c.Assert(err, IsNil)
+
+	err = AlterAutoRandom(ctx, s.tiGlue.GetSQLExecutor(), "`db`.`table`", uint64(math.MaxInt64)+1, 288230376151711743)
 	c.Assert(err, IsNil)
 }
 
diff --git a/br/tests/lightning_max_incr/config.toml b/br/tests/lightning_max_incr/config.toml
new file mode 100644
index 0000000000000..d2152b47c922a
--- /dev/null
+++ b/br/tests/lightning_max_incr/config.toml
@@ -0,0 +1,2 @@
+[tikv-importer]
+backend = 'local'
diff --git a/br/tests/lightning_max_incr/data/db-schema-create.sql b/br/tests/lightning_max_incr/data/db-schema-create.sql
new file mode 100644
index 0000000000000..c88b0e3150e76
--- /dev/null
+++ b/br/tests/lightning_max_incr/data/db-schema-create.sql
@@ -0,0 +1 @@
+create database db;
\ No newline at end of file
diff --git a/br/tests/lightning_max_incr/data/db.test-schema.sql b/br/tests/lightning_max_incr/data/db.test-schema.sql
new file mode 100644
index 0000000000000..494571fe9736d
--- /dev/null
+++ b/br/tests/lightning_max_incr/data/db.test-schema.sql
@@ -0,0 +1,5 @@
+create table test(
+    a bigint auto_increment,
+    b int,
+    primary key(a)
+);
\ No newline at end of file
diff --git a/br/tests/lightning_max_incr/data/db.test.000000000.csv b/br/tests/lightning_max_incr/data/db.test.000000000.csv
new file mode 100644
index 0000000000000..cb1603f9d4a71
--- /dev/null
+++ b/br/tests/lightning_max_incr/data/db.test.000000000.csv
@@ -0,0 +1,3 @@
+"a","b"
+1,2
+9223372036854775805,3
\ No newline at end of file
diff --git a/br/tests/lightning_max_incr/data/db.test1-schema.sql b/br/tests/lightning_max_incr/data/db.test1-schema.sql
new file mode 100644
index 0000000000000..d44068487af8b
--- /dev/null
+++ b/br/tests/lightning_max_incr/data/db.test1-schema.sql
@@ -0,0 +1,5 @@
+create table test1(
+    a bigint auto_increment,
+    b int,
+    primary key(a)
+);
\ No newline at end of file
diff --git a/br/tests/lightning_max_incr/data/db.test1.000000000.csv b/br/tests/lightning_max_incr/data/db.test1.000000000.csv
new file mode 100644
index 0000000000000..abe9c63c2a028
--- /dev/null
+++ b/br/tests/lightning_max_incr/data/db.test1.000000000.csv
@@ -0,0 +1,3 @@
+"a","b"
+1,2
+9223372036854775807,3
\ No newline at end of file
diff --git a/br/tests/lightning_max_incr/run.sh b/br/tests/lightning_max_incr/run.sh
new file mode 100644
index 0000000000000..ce044c0230623
--- /dev/null
+++ b/br/tests/lightning_max_incr/run.sh
@@ -0,0 +1,52 @@
+#!/bin/sh
+#
+# Copyright 2022 PingCAP, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eux
+
+check_cluster_version 4 0 0 'local backend' || exit 0
+
+ENGINE_COUNT=6
+
+check_result() {
+    run_sql 'SHOW DATABASES;'
+    check_contains 'Database: db';
+    run_sql 'SHOW TABLES IN db;'
+    check_contains 'Tables_in_db: test'
+    check_contains 'Tables_in_db: test1'
+    run_sql 'SELECT count(*) FROM db.test;'
+    check_contains 'count(*): 2'
+    run_sql 'SELECT count(*) FROM db.test1;'
+    check_contains 'count(*): 2'
+}
+
+cleanup() {
+    rm -f $TEST_DIR/lightning.log
+    rm -rf $TEST_DIR/sst
+    run_sql 'DROP DATABASE IF EXISTS db;'
+}
+
+cleanup
+
+# db.test contains key that is less than int64 - 1
+# while db.test1 contains key that equals int64 - 1
+run_lightning --sorted-kv-dir "$TEST_DIR/sst" --config "tests/$TEST_NAME/config.toml" --log-file "$TEST_DIR/lightning.log"
+check_result
+# successfully insert: max key has not reached maximum
+run_sql 'INSERT INTO db.test(b) VALUES(11);'
+# fail for insertion: db.test1 has key int64 - 1
+run_sql 'INSERT INTO db.test1(b) VALUES(22);' 2>&1 | tee -a "$TEST_DIR/sql_res.$TEST_NAME.txt"
+check_contains 'ERROR'
+cleanup
diff --git a/br/tests/lightning_max_random/config.toml b/br/tests/lightning_max_random/config.toml
new file mode 100644
index 0000000000000..d2152b47c922a
--- /dev/null
+++ b/br/tests/lightning_max_random/config.toml
@@ -0,0 +1,2 @@
+[tikv-importer]
+backend = 'local'
diff --git a/br/tests/lightning_max_random/data/db-schema-create.sql b/br/tests/lightning_max_random/data/db-schema-create.sql
new file mode 100644
index 0000000000000..c88b0e3150e76
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db-schema-create.sql
@@ -0,0 +1 @@
+create database db;
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test-schema.sql b/br/tests/lightning_max_random/data/db.test-schema.sql
new file mode 100644
index 0000000000000..eb1838507ea94
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test-schema.sql
@@ -0,0 +1,5 @@
+create table test(
+    a bigint auto_random(10),
+    b int,
+    primary key(a)
+);
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test.000000000.csv b/br/tests/lightning_max_random/data/db.test.000000000.csv
new file mode 100644
index 0000000000000..a278110306af9
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test.000000000.csv
@@ -0,0 +1,3 @@
+"a","b"
+1,2
+9007199254740990,3
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test1-schema.sql b/br/tests/lightning_max_random/data/db.test1-schema.sql
new file mode 100644
index 0000000000000..5ab8c5e9b32ef
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test1-schema.sql
@@ -0,0 +1,5 @@
+create table test1(
+    a bigint auto_random(10),
+    b int,
+    primary key(a)
+);
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test1.000000000.csv b/br/tests/lightning_max_random/data/db.test1.000000000.csv
new file mode 100644
index 0000000000000..550f578de3e1a
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test1.000000000.csv
@@ -0,0 +1,3 @@
+"a","b"
+1,2
+9007199254740991,3
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test2-schema.sql b/br/tests/lightning_max_random/data/db.test2-schema.sql
new file mode 100644
index 0000000000000..740458dfd43df
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test2-schema.sql
@@ -0,0 +1,5 @@
+create table test2(
+    a bigint auto_random(10),
+    b int,
+    primary key(a)
+);
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/data/db.test2.000000000.csv b/br/tests/lightning_max_random/data/db.test2.000000000.csv
new file mode 100644
index 0000000000000..8de1e50edd09a
--- /dev/null
+++ b/br/tests/lightning_max_random/data/db.test2.000000000.csv
@@ -0,0 +1,3 @@
+"a","b"
+1,2
+9007199254740992,3
\ No newline at end of file
diff --git a/br/tests/lightning_max_random/run.sh b/br/tests/lightning_max_random/run.sh
new file mode 100644
index 0000000000000..972481e5bc0a5
--- /dev/null
+++ b/br/tests/lightning_max_random/run.sh
@@ -0,0 +1,65 @@
+#!/bin/sh
+#
+# Copyright 2022 PingCAP, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eux
+
+check_cluster_version 4 0 0 'local backend' || exit 0
+
+ENGINE_COUNT=6
+
+check_result() {
+    run_sql 'SHOW DATABASES;'
+    check_contains 'Database: db';
+    run_sql 'SHOW TABLES IN db;'
+    check_contains 'Tables_in_db: test'
+    check_contains 'Tables_in_db: test1'
+    check_contains 'Tables_in_db: test2'
+    run_sql 'SELECT count(*) FROM db.test;'
+    check_contains 'count(*): 2'
+    run_sql 'SELECT count(*) FROM db.test1;'
+    check_contains 'count(*): 2'
+    run_sql 'SELECT count(*) FROM db.test2;'
+    check_contains 'count(*): 2'
+}
+
+cleanup() {
+    rm -f $TEST_DIR/lightning.log
+    rm -rf $TEST_DIR/sst
+    run_sql 'DROP DATABASE IF EXISTS db;'
+}
+
+cleanup
+
+# auto_random_max = 2^{64-1-10}-1
+# db.test contains key auto_random_max - 1
+# db.test1 contains key auto_random_max
+# db.test2 contains key auto_random_max + 1 (overflow)
+run_lightning --sorted-kv-dir "$TEST_DIR/sst" --config "tests/$TEST_NAME/config.toml" --log-file "$TEST_DIR/lightning.log"
+check_result
+# successfully insert: d.test auto_random key has not reached maximum
+run_sql 'INSERT INTO db.test(b) VALUES(11);'
+# fail for further insertion
+run_sql 'INSERT INTO db.test(b) VALUES(22);' 2>&1 | tee -a "$TEST_DIR/sql_res.$TEST_NAME.txt"
+check_contains 'ERROR'
+# fail: db.test1 has key auto_random_max
+run_sql 'INSERT INTO db.test1(b) VALUES(11);'
+run_sql 'INSERT INTO db.test1(b) VALUES(22);' 2>&1 | tee -a "$TEST_DIR/sql_res.$TEST_NAME.txt"
+check_contains 'ERROR'
+# successfully insert for overflow key
+run_sql 'INSERT INTO db.test2(b) VALUES(33);'
+run_sql 'INSERT INTO db.test2(b) VALUES(44);'
+run_sql 'INSERT INTO db.test2(b) VALUES(55);'
+cleanup