bulk SQL records insert fix, chunk size per db type

pull/595/head
hunterlong 2020-05-18 21:17:38 -07:00
parent 6de67a1906
commit bcd38ec668
3 changed files with 14 additions and 2 deletions

View File

@ -105,6 +105,18 @@ type Database interface {
ParseTime(t string) (time.Time, error)
DbType() string
GormDB() *gorm.DB
ChunkSize() int
}
func (it *Db) ChunkSize() int {
switch it.Database.Dialect().GetName() {
case "mysql":
return 3000
case "postgres":
return 3000
default:
return 100
}
}
func (it *Db) GormDB() *gorm.DB {

View File

@ -42,7 +42,7 @@ func Samples() error {
records = append(records, failure)
createdAt = createdAt.Add(35 * time.Minute)
}
if err := gormbulk.BulkInsert(db.GormDB(), records, 3000); err != nil {
if err := gormbulk.BulkInsert(db.GormDB(), records, db.ChunkSize()); err != nil {
log.Error(err)
return err
}

View File

@ -16,7 +16,7 @@ var SampleHits = 99900.
func Samples() error {
for i := int64(1); i <= 5; i++ {
records := createHitsAt(i)
if err := gormbulk.BulkInsert(db.GormDB(), records, 3000); err != nil {
if err := gormbulk.BulkInsert(db.GormDB(), records, db.ChunkSize()); err != nil {
log.Error(err)
return err
}