use a big data slice to validate max row size

This commit is contained in:
Antoine GIRARD 2019-12-02 01:10:28 +01:00 committed by Lunny Xiao
parent d8394dfdc5
commit 25bb29f88c
1 changed files with 8 additions and 2 deletions

View File

@ -9,8 +9,10 @@ import (
"fmt"
"math"
"math/big"
"math/rand"
"strconv"
"testing"
"time"
"xorm.io/xorm"
"xorm.io/xorm/convert"
@ -116,8 +118,12 @@ func TestGetBytesMax(t *testing.T) {
err := testEngine.Sync2(new(Varbinary))
assert.NoError(t, err)
bigData := make([]byte, 80000) //over 8000
rand.Seed(time.Now().UnixNano())
rand.Read(bigData)
cnt, err := testEngine.Insert(&Varbinary{
Data: []byte("test"),
Data: bigData,
})
assert.NoError(t, err)
assert.EqualValues(t, 1, cnt)
@ -126,7 +132,7 @@ func TestGetBytesMax(t *testing.T) {
has, err := testEngine.Get(&b)
assert.NoError(t, err)
assert.Equal(t, true, has)
assert.Equal(t, "test", string(b.Data))
assert.Equal(t, bigData, b.Data)
}
type ConvString string