kefu/models/models.go

175 lines
4.1 KiB
Go
Raw Normal View History

2024-12-10 02:50:12 +00:00
package models
import (
"encoding/json"
"fmt"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/mysql"
"github.com/patrickmn/go-cache"
"io/ioutil"
"kefu/common"
"kefu/tools"
"kefu/types"
"log"
"reflect"
"strings"
"time"
)
var DB *gorm.DB
var DBcache *cache.Cache
type Model struct {
ID uint `gorm:"primary_key" json:"id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
func NewConnect(mysqlConfigFile string) error {
var mysql = &types.Mysql{
Username: "go_fly_pro",
Password: "go_fly_pro",
Database: "go_fly_pro",
Server: "127.0.0.1",
Port: "3306",
}
isExist, _ := tools.IsFileExist(mysqlConfigFile)
if !isExist {
panic("MYSQL配置文件不存在!" + mysqlConfigFile)
}
info, err := ioutil.ReadFile(mysqlConfigFile)
if err != nil {
panic("MYSQL配置文件读取失败!" + err.Error())
}
err = json.Unmarshal(info, mysql)
if err != nil {
panic("解析MYSQL配置文件JSON结构失败!" + err.Error())
}
dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?charset=utf8mb4&parseTime=True&loc=Local", mysql.Username, mysql.Password, mysql.Server, mysql.Port, mysql.Database)
DB, err = gorm.Open("mysql", dsn)
if err != nil {
log.Println("数据库连接失败:", err)
//panic("数据库连接失败!")
return err
}
DB.SingularTable(true)
DB.LogMode(true)
DB.DB().SetMaxIdleConns(10)
DB.DB().SetMaxOpenConns(100)
DB.DB().SetConnMaxLifetime(59 * time.Second)
//初始化配置数据
InitConfig()
DBcache = cache.New(5*time.Minute, 10*time.Minute)
return nil
}
func Execute(sql string) error {
db := DB.Exec(sql)
err := db.Error
if err != nil {
log.Println("models.go sql execute error:" + err.Error())
return err
}
return nil
}
func CloseDB() {
defer DB.Close()
}
//重连数据库
func CheckModels() {
if common.MySQLConnectFaild {
err := NewConnect(common.ConfigDirPath + "/mysql.json")
if err == nil {
common.MySQLConnectFaild = false
}
}
}
func BatchInsert(db *gorm.DB, data interface{}, batchSize int) error {
value := reflect.ValueOf(data)
if value.Kind() != reflect.Slice {
return fmt.Errorf("BatchInsert: input data type is not slice")
}
length := value.Len()
if length == 0 {
return nil
}
tx := db.Begin()
if tx.Error != nil {
return tx.Error
}
//modelType := value.Index(0).Type()
tableName := tx.NewScope(data).TableName()
fields := make([]string, 0, len(tx.NewScope(data).Fields()))
for _, field := range tx.NewScope(data).Fields() {
if !field.IsPrimaryKey && !field.IsIgnored {
fields = append(fields, field.DBName)
}
}
//placeholders := make([]string, 0, len(fields))
//for _, _ = range fields {
// placeholders = append(placeholders, "?")
//}
//placeholderStr := "(" + strings.Join(placeholders, ",") + ")"
var query strings.Builder
query.WriteString(fmt.Sprintf("INSERT INTO %s (%s) VALUES ", tableName, strings.Join(fields, ",")))
var args []interface{}
for i := 0; i < length/batchSize; i++ {
start := i * batchSize
end := (i + 1) * batchSize
if end > length {
end = length
}
values := make([]string, 0, batchSize)
for j := start; j < end; j++ {
var valueStrings []string
for _, field := range fields {
valueStrings = append(valueStrings, "?")
args = append(args, value.FieldByName(field).Interface())
}
values = append(values, "("+strings.Join(valueStrings, ",")+")")
}
if _, err := query.WriteString(strings.Join(values, ",")); err != nil {
tx.Rollback()
return err
}
if i < length/batchSize-1 {
query.WriteString(",")
}
}
if length%batchSize > 0 {
values := make([]string, 0, length%batchSize)
for i := length - length%batchSize; i < length; i++ {
var valueStrings []string
for _, field := range fields {
valueStrings = append(valueStrings, "?")
args = append(args, value.FieldByName(field).Interface())
}
values = append(values, "("+strings.Join(valueStrings, ",")+")")
}
if _, err := query.WriteString(strings.Join(values, ",")); err != nil {
tx.Rollback()
return err
}
}
if db := tx.Exec(query.String(), args...); db.Error != nil {
tx.Rollback()
return db.Error
}
return tx.Commit().Error
}