1
0
mirror of https://github.com/gogf/gf.git synced 2025-04-05 11:18:50 +08:00

add type mapping feature for command gf gen dao (#2722)

This commit is contained in:
John Guo 2023-06-28 10:06:33 +08:00 committed by GitHub
parent c5c2938380
commit 22696566d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 300 additions and 114 deletions

View File

@ -9,12 +9,15 @@ package gendao
import (
"context"
"fmt"
"golang.org/x/mod/modfile"
"strings"
"github.com/gogf/gf/cmd/gf/v2/internal/utility/utils"
"github.com/gogf/gf/v2/container/garray"
"github.com/gogf/gf/v2/database/gdb"
"github.com/gogf/gf/v2/frame/g"
"github.com/gogf/gf/v2/os/gfile"
"github.com/gogf/gf/v2/os/gproc"
"github.com/gogf/gf/v2/os/gtime"
"github.com/gogf/gf/v2/text/gstr"
"github.com/gogf/gf/v2/util/gtag"
@ -48,6 +51,12 @@ CONFIGURATION SUPPORT
path: "./my-app"
prefix: "primary_"
tables: "user, userDetail"
typeMapping:
decimal:
type: decimal.Decimal
import: github.com/shopspring/decimal
numeric:
type: string
`
CGenDaoBriefPath = `directory path for generated files`
CGenDaoBriefLink = `database configuration, the same as the ORM configuration of GoFrame`
@ -69,6 +78,7 @@ CONFIGURATION SUPPORT
CGenDaoBriefNoJsonTag = `no json tag will be added for each field`
CGenDaoBriefNoModelComment = `no model comment will be added for each field`
CGenDaoBriefClear = `delete all generated go files that do not exist in database`
CGenDaoBriefTypeMapping = `custom local type mapping for generated struct attributes relevant to fields of table`
CGenDaoBriefGroup = `
specifying the configuration group name of database for generated ORM instance,
it's not necessary and the default value is "default"
@ -104,7 +114,21 @@ generated json tag case for model struct, cases are as follows:
)
var (
createdAt = gtime.Now()
createdAt = gtime.Now()
defaultTypeMapping = map[string]TypeMapping{
"decimal": {
Type: "float64",
},
"money": {
Type: "float64",
},
"numeric": {
Type: "float64",
},
"smallmoney": {
Type: "float64",
},
}
)
func init() {
@ -134,6 +158,7 @@ func init() {
`CGenDaoBriefNoJsonTag`: CGenDaoBriefNoJsonTag,
`CGenDaoBriefNoModelComment`: CGenDaoBriefNoModelComment,
`CGenDaoBriefClear`: CGenDaoBriefClear,
`CGenDaoBriefTypeMapping`: CGenDaoBriefTypeMapping,
`CGenDaoBriefGroup`: CGenDaoBriefGroup,
`CGenDaoBriefJsonCase`: CGenDaoBriefJsonCase,
`CGenDaoBriefTplDaoIndexPath`: CGenDaoBriefTplDaoIndexPath,
@ -147,30 +172,31 @@ type (
CGenDao struct{}
CGenDaoInput struct {
g.Meta `name:"dao" config:"{CGenDaoConfig}" usage:"{CGenDaoUsage}" brief:"{CGenDaoBrief}" eg:"{CGenDaoEg}" ad:"{CGenDaoAd}"`
Path string `name:"path" short:"p" brief:"{CGenDaoBriefPath}" d:"internal"`
Link string `name:"link" short:"l" brief:"{CGenDaoBriefLink}"`
Tables string `name:"tables" short:"t" brief:"{CGenDaoBriefTables}"`
TablesEx string `name:"tablesEx" short:"x" brief:"{CGenDaoBriefTablesEx}"`
Group string `name:"group" short:"g" brief:"{CGenDaoBriefGroup}" d:"default"`
Prefix string `name:"prefix" short:"f" brief:"{CGenDaoBriefPrefix}"`
RemovePrefix string `name:"removePrefix" short:"r" brief:"{CGenDaoBriefRemovePrefix}"`
JsonCase string `name:"jsonCase" short:"j" brief:"{CGenDaoBriefJsonCase}" d:"CamelLower"`
ImportPrefix string `name:"importPrefix" short:"i" brief:"{CGenDaoBriefImportPrefix}"`
DaoPath string `name:"daoPath" short:"d" brief:"{CGenDaoBriefDaoPath}" d:"dao"`
DoPath string `name:"doPath" short:"o" brief:"{CGenDaoBriefDoPath}" d:"model/do"`
EntityPath string `name:"entityPath" short:"e" brief:"{CGenDaoBriefEntityPath}" d:"model/entity"`
TplDaoIndexPath string `name:"tplDaoIndexPath" short:"t1" brief:"{CGenDaoBriefTplDaoIndexPath}"`
TplDaoInternalPath string `name:"tplDaoInternalPath" short:"t2" brief:"{CGenDaoBriefTplDaoInternalPath}"`
TplDaoDoPath string `name:"tplDaoDoPath" short:"t3" brief:"{CGenDaoBriefTplDaoDoPathPath}"`
TplDaoEntityPath string `name:"tplDaoEntityPath" short:"t4" brief:"{CGenDaoBriefTplDaoEntityPath}"`
StdTime bool `name:"stdTime" short:"s" brief:"{CGenDaoBriefStdTime}" orphan:"true"`
WithTime bool `name:"withTime" short:"w" brief:"{CGenDaoBriefWithTime}" orphan:"true"`
GJsonSupport bool `name:"gJsonSupport" short:"n" brief:"{CGenDaoBriefGJsonSupport}" orphan:"true"`
OverwriteDao bool `name:"overwriteDao" short:"v" brief:"{CGenDaoBriefOverwriteDao}" orphan:"true"`
DescriptionTag bool `name:"descriptionTag" short:"c" brief:"{CGenDaoBriefDescriptionTag}" orphan:"true"`
NoJsonTag bool `name:"noJsonTag" short:"k" brief:"{CGenDaoBriefNoJsonTag}" orphan:"true"`
NoModelComment bool `name:"noModelComment" short:"m" brief:"{CGenDaoBriefNoModelComment}" orphan:"true"`
Clear bool `name:"clear" short:"a" brief:"{CGenDaoBriefClear}" orphan:"true"`
Path string `name:"path" short:"p" brief:"{CGenDaoBriefPath}" d:"internal"`
Link string `name:"link" short:"l" brief:"{CGenDaoBriefLink}"`
Tables string `name:"tables" short:"t" brief:"{CGenDaoBriefTables}"`
TablesEx string `name:"tablesEx" short:"x" brief:"{CGenDaoBriefTablesEx}"`
Group string `name:"group" short:"g" brief:"{CGenDaoBriefGroup}" d:"default"`
Prefix string `name:"prefix" short:"f" brief:"{CGenDaoBriefPrefix}"`
RemovePrefix string `name:"removePrefix" short:"r" brief:"{CGenDaoBriefRemovePrefix}"`
JsonCase string `name:"jsonCase" short:"j" brief:"{CGenDaoBriefJsonCase}" d:"CamelLower"`
ImportPrefix string `name:"importPrefix" short:"i" brief:"{CGenDaoBriefImportPrefix}"`
DaoPath string `name:"daoPath" short:"d" brief:"{CGenDaoBriefDaoPath}" d:"dao"`
DoPath string `name:"doPath" short:"o" brief:"{CGenDaoBriefDoPath}" d:"model/do"`
EntityPath string `name:"entityPath" short:"e" brief:"{CGenDaoBriefEntityPath}" d:"model/entity"`
TplDaoIndexPath string `name:"tplDaoIndexPath" short:"t1" brief:"{CGenDaoBriefTplDaoIndexPath}"`
TplDaoInternalPath string `name:"tplDaoInternalPath" short:"t2" brief:"{CGenDaoBriefTplDaoInternalPath}"`
TplDaoDoPath string `name:"tplDaoDoPath" short:"t3" brief:"{CGenDaoBriefTplDaoDoPathPath}"`
TplDaoEntityPath string `name:"tplDaoEntityPath" short:"t4" brief:"{CGenDaoBriefTplDaoEntityPath}"`
StdTime bool `name:"stdTime" short:"s" brief:"{CGenDaoBriefStdTime}" orphan:"true"`
WithTime bool `name:"withTime" short:"w" brief:"{CGenDaoBriefWithTime}" orphan:"true"`
GJsonSupport bool `name:"gJsonSupport" short:"n" brief:"{CGenDaoBriefGJsonSupport}" orphan:"true"`
OverwriteDao bool `name:"overwriteDao" short:"v" brief:"{CGenDaoBriefOverwriteDao}" orphan:"true"`
DescriptionTag bool `name:"descriptionTag" short:"c" brief:"{CGenDaoBriefDescriptionTag}" orphan:"true"`
NoJsonTag bool `name:"noJsonTag" short:"k" brief:"{CGenDaoBriefNoJsonTag}" orphan:"true"`
NoModelComment bool `name:"noModelComment" short:"m" brief:"{CGenDaoBriefNoModelComment}" orphan:"true"`
Clear bool `name:"clear" short:"a" brief:"{CGenDaoBriefClear}" orphan:"true"`
TypeMapping map[string]TypeMapping `name:"typeMapping" short:"y" brief:"{CGenDaoBriefTypeMapping}" orphan:"true"`
}
CGenDaoOutput struct{}
@ -180,6 +206,11 @@ type (
TableNames []string
NewTableNames []string
}
TypeMapping struct {
Type string `brief:"custom attribute type name"`
Import string `brief:"custom import for this type"`
}
)
func (c CGenDao) Dao(ctx context.Context, in CGenDaoInput) (out *CGenDaoOutput, err error) {
@ -253,6 +284,17 @@ func doGenDaoForArray(ctx context.Context, index int, in CGenDaoInput) {
tableNames = array.Slice()
}
// merge default typeMapping to input typeMapping.
if in.TypeMapping == nil {
in.TypeMapping = defaultTypeMapping
} else {
for key, typeMapping := range defaultTypeMapping {
if _, ok := in.TypeMapping[key]; !ok {
in.TypeMapping[key] = typeMapping
}
}
}
// Generating dao & model go files one by one according to given table name.
newTableNames := make([]string, len(tableNames))
for i, tableName := range tableNames {
@ -263,6 +305,7 @@ func doGenDaoForArray(ctx context.Context, index int, in CGenDaoInput) {
newTableName = in.Prefix + newTableName
newTableNames[i] = newTableName
}
// Dao: index and internal.
generateDao(ctx, CGenDaoInternalInput{
CGenDaoInput: in,
@ -286,11 +329,8 @@ func doGenDaoForArray(ctx context.Context, index int, in CGenDaoInput) {
})
}
func getImportPartContent(source string, isDo bool) string {
var (
packageImportsArray = garray.NewStrArray()
)
func getImportPartContent(ctx context.Context, source string, isDo bool, appendImports []string) string {
var packageImportsArray = garray.NewStrArray()
if isDo {
packageImportsArray.Append(`"github.com/gogf/gf/v2/frame/g"`)
}
@ -307,6 +347,32 @@ func getImportPartContent(source string, isDo bool) string {
packageImportsArray.Append(`"github.com/gogf/gf/v2/encoding/gjson"`)
}
// Check and update imports in go.mod
if appendImports != nil && len(appendImports) > 0 {
goModPath := utils.GetModPath()
if goModPath == "" {
mlog.Fatal("go.mod not found in current project")
}
mod, err := modfile.Parse(goModPath, gfile.GetBytes(goModPath), nil)
if err != nil {
mlog.Fatalf("parse go.mod failed: %+v", err)
}
for _, appendImport := range appendImports {
found := false
for _, require := range mod.Require {
if gstr.Contains(appendImport, require.Mod.Path) {
found = true
break
}
}
if !found {
err = gproc.ShellRun(ctx, `go get `+appendImport)
mlog.Fatalf(`%+v`, err)
}
packageImportsArray.Append(fmt.Sprintf(`"%s"`, appendImport))
}
}
// Generate and write content to golang file.
packageImportsStr := ""
if packageImportsArray.Len() > 0 {

View File

@ -36,9 +36,9 @@ func generateDo(ctx context.Context, in CGenDaoInternalInput) {
mlog.Fatalf("fetching tables fields failed for table '%s':\n%v", tableName, err)
}
var (
newTableName = in.NewTableNames[i]
doFilePath = gfile.Join(dirPathDo, gstr.CaseSnake(newTableName)+".go")
structDefinition = generateStructDefinition(ctx, generateStructDefinitionInput{
newTableName = in.NewTableNames[i]
doFilePath = gfile.Join(dirPathDo, gstr.CaseSnake(newTableName)+".go")
structDefinition, _ = generateStructDefinition(ctx, generateStructDefinitionInput{
CGenDaoInternalInput: in,
TableName: tableName,
StructName: gstr.CaseCamel(newTableName),
@ -59,6 +59,7 @@ func generateDo(ctx context.Context, in CGenDaoInternalInput) {
},
)
modelContent := generateDoContent(
ctx,
in,
tableName,
gstr.CaseCamel(newTableName),
@ -74,15 +75,18 @@ func generateDo(ctx context.Context, in CGenDaoInternalInput) {
}
}
func generateDoContent(in CGenDaoInternalInput, tableName, tableNameCamelCase, structDefine string) string {
func generateDoContent(
ctx context.Context, in CGenDaoInternalInput, tableName, tableNameCamelCase, structDefine string,
) string {
doContent := gstr.ReplaceByMap(
getTemplateFromPathOrDefault(in.TplDaoDoPath, consts.TemplateGenDaoDoContent),
g.MapStrStr{
tplVarTableName: tableName,
tplVarPackageImports: getImportPartContent(structDefine, true),
tplVarPackageImports: getImportPartContent(ctx, structDefine, true, nil),
tplVarTableNameCamelCase: tableNameCamelCase,
tplVarStructDefine: structDefine,
})
},
)
doContent = replaceDefaultVar(in, doContent)
return doContent
}

View File

@ -30,22 +30,27 @@ func generateEntity(ctx context.Context, in CGenDaoInternalInput) {
if err != nil {
mlog.Fatalf("fetching tables fields failed for table '%s':\n%v", tableName, err)
}
var (
newTableName = in.NewTableNames[i]
entityFilePath = gfile.Join(dirPathEntity, gstr.CaseSnake(newTableName)+".go")
entityContent = generateEntityContent(
newTableName = in.NewTableNames[i]
entityFilePath = gfile.Join(dirPathEntity, gstr.CaseSnake(newTableName)+".go")
structDefinition, appendImports = generateStructDefinition(ctx, generateStructDefinitionInput{
CGenDaoInternalInput: in,
TableName: tableName,
StructName: gstr.CaseCamel(newTableName),
FieldMap: fieldMap,
IsDo: false,
})
entityContent = generateEntityContent(
ctx,
in,
newTableName,
gstr.CaseCamel(newTableName),
generateStructDefinition(ctx, generateStructDefinitionInput{
CGenDaoInternalInput: in,
TableName: tableName,
StructName: gstr.CaseCamel(newTableName),
FieldMap: fieldMap,
IsDo: false,
}),
structDefinition,
appendImports,
)
)
err = gfile.PutContents(entityFilePath, strings.TrimSpace(entityContent))
if err != nil {
mlog.Fatalf("writing content to '%s' failed: %v", entityFilePath, err)
@ -56,15 +61,18 @@ func generateEntity(ctx context.Context, in CGenDaoInternalInput) {
}
}
func generateEntityContent(in CGenDaoInternalInput, tableName, tableNameCamelCase, structDefine string) string {
func generateEntityContent(
ctx context.Context, in CGenDaoInternalInput, tableName, tableNameCamelCase, structDefine string, appendImports []string,
) string {
entityContent := gstr.ReplaceByMap(
getTemplateFromPathOrDefault(in.TplDaoEntityPath, consts.TemplateGenDaoEntityContent),
g.MapStrStr{
tplVarTableName: tableName,
tplVarPackageImports: getImportPartContent(structDefine, false),
tplVarPackageImports: getImportPartContent(ctx, structDefine, false, appendImports),
tplVarTableNameCamelCase: tableNameCamelCase,
tplVarStructDefine: structDefine,
})
},
)
entityContent = replaceDefaultVar(in, entityContent)
return entityContent
}

View File

@ -10,8 +10,8 @@ import (
"bytes"
"context"
"fmt"
"github.com/olekukonko/tablewriter"
"strings"
"github.com/gogf/gf/v2/database/gdb"
"github.com/gogf/gf/v2/frame/g"
@ -27,13 +27,18 @@ type generateStructDefinitionInput struct {
IsDo bool // Is generating DTO struct.
}
func generateStructDefinition(ctx context.Context, in generateStructDefinitionInput) string {
func generateStructDefinition(ctx context.Context, in generateStructDefinitionInput) (string, []string) {
var appendImports []string
buffer := bytes.NewBuffer(nil)
array := make([][]string, len(in.FieldMap))
names := sortFieldKeyForDao(in.FieldMap)
for index, name := range names {
var imports string
field := in.FieldMap[name]
array[index] = generateStructFieldDefinition(ctx, field, in)
array[index], imports = generateStructFieldDefinition(ctx, field, in)
if imports != "" {
appendImports = append(appendImports, imports)
}
}
tw := tablewriter.NewWriter(buffer)
tw.SetBorder(false)
@ -54,21 +59,42 @@ func generateStructDefinition(ctx context.Context, in generateStructDefinitionIn
}
buffer.WriteString(stContent)
buffer.WriteString("}")
return buffer.String()
return buffer.String(), appendImports
}
// generateStructFieldDefinition generates and returns the attribute definition for specified field.
func generateStructFieldDefinition(
ctx context.Context, field *gdb.TableField, in generateStructDefinitionInput,
) []string {
) (attrLines []string, appendImport string) {
var (
err error
typeName string
jsonTag = getJsonTagFromCase(field.Name, in.JsonCase)
)
typeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
if err != nil {
panic(err)
if in.TypeMapping != nil && len(in.TypeMapping) > 0 {
var (
tryTypeName string
)
tryTypeMatch, _ := gregex.MatchString(`(.+?)\((.+)\)`, field.Type)
if len(tryTypeMatch) == 3 {
tryTypeName = gstr.Trim(tryTypeMatch[1])
} else {
tryTypeName = gstr.Split(field.Type, " ")[0]
}
if tryTypeName != "" {
if typeMapping, ok := in.TypeMapping[strings.ToLower(tryTypeName)]; ok {
typeName = typeMapping.Name
appendImport = typeMapping.Import
}
}
}
if typeName == "" {
typeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
if err != nil {
panic(err)
}
}
switch typeName {
case gdb.LocalTypeDate, gdb.LocalTypeDatetime:
@ -94,19 +120,18 @@ func generateStructFieldDefinition(
}
var (
tagKey = "`"
result = []string{
" #" + gstr.CaseCamel(field.Name),
" #" + typeName,
}
tagKey = "`"
descriptionTag = gstr.Replace(formatComment(field.Comment), `"`, `\"`)
)
attrLines = []string{
" #" + gstr.CaseCamel(field.Name),
" #" + typeName,
}
attrLines = append(attrLines, " #"+fmt.Sprintf(tagKey+`json:"%s"`, jsonTag))
attrLines = append(attrLines, " #"+fmt.Sprintf(`description:"%s"`+tagKey, descriptionTag))
attrLines = append(attrLines, " #"+fmt.Sprintf(`// %s`, formatComment(field.Comment)))
result = append(result, " #"+fmt.Sprintf(tagKey+`json:"%s"`, jsonTag))
result = append(result, " #"+fmt.Sprintf(`description:"%s"`+tagKey, descriptionTag))
result = append(result, " #"+fmt.Sprintf(`// %s`, formatComment(field.Comment)))
for k, v := range result {
for k, v := range attrLines {
if in.NoJsonTag {
v, _ = gregex.ReplaceString(`json:".+"`, ``, v)
}
@ -116,9 +141,9 @@ func generateStructFieldDefinition(
if in.NoModelComment {
v, _ = gregex.ReplaceString(`//.+`, ``, v)
}
result[k] = v
attrLines[k] = v
}
return result
return attrLines, appendImport
}
// formatComment formats the comment string to fit the golang code without any lines.

View File

@ -113,3 +113,25 @@ func GetImportPath(filePath string) string {
suffix = gfile.Basename(oldDir) + "/" + suffix
}
}
// GetModPath retrieves and returns the file path of go.mod for current project.
func GetModPath() string {
var (
oldDir = gfile.Pwd()
newDir = gfile.Dir(oldDir)
goModName = "go.mod"
goModPath string
)
for {
goModPath = gfile.Join(newDir, goModName)
if gfile.Exists(goModPath) {
return goModPath
}
oldDir = newDir
newDir = gfile.Dir(oldDir)
if newDir == oldDir {
break
}
}
return ""
}

View File

@ -0,0 +1,22 @@
// Copyright GoFrame gf Author(https://goframe.org). All Rights Reserved.
//
// This Source Code Form is subject to the terms of the MIT License.
// If a copy of the MIT was not distributed with this file,
// You can obtain one at https://github.com/gogf/gf.
package utils_test
import (
"fmt"
"testing"
"github.com/gogf/gf/cmd/gf/v2/internal/utility/utils"
"github.com/gogf/gf/v2/test/gtest"
)
func Test_GetModPath(t *testing.T) {
gtest.C(t, func(t *gtest.T) {
goModPath := utils.GetModPath()
fmt.Println(goModPath)
})
}

View File

@ -9,6 +9,7 @@ package mssql_test
import (
"database/sql"
"fmt"
"github.com/gogf/gf/v2/util/gconv"
"testing"
"time"
@ -466,7 +467,7 @@ func Test_Model_Array(t *testing.T) {
gtest.C(t, func(t *gtest.T) {
all, err := db.Model(table).Where("id", g.Slice{1, 2, 3}).All()
t.AssertNil(err)
t.Assert(all.Array("ID"), g.Slice{1, 2, 3})
t.Assert(gconv.Ints(all.Array("ID")), g.Slice{1, 2, 3})
t.Assert(all.Array("NICKNAME"), g.Slice{"name_1", "name_2", "name_3"})
})
gtest.C(t, func(t *gtest.T) {
@ -2347,7 +2348,7 @@ func Test_Model_FieldCount(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["ID"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -2360,7 +2361,7 @@ func Test_Model_FieldMax(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["ID"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -2373,7 +2374,7 @@ func Test_Model_FieldMin(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["ID"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -2386,7 +2387,7 @@ func Test_Model_FieldAvg(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["ID"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}

View File

@ -3849,7 +3849,7 @@ func Test_Model_FieldCount(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3862,7 +3862,7 @@ func Test_Model_FieldMax(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3875,7 +3875,7 @@ func Test_Model_FieldMin(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3888,7 +3888,7 @@ func Test_Model_FieldAvg(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}

View File

@ -3498,7 +3498,7 @@ func Test_Model_FieldCount(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3511,7 +3511,7 @@ func Test_Model_FieldMax(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3524,7 +3524,7 @@ func Test_Model_FieldMin(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}
@ -3537,7 +3537,7 @@ func Test_Model_FieldAvg(t *testing.T) {
t.AssertNil(err)
t.Assert(len(all), TableSize)
t.Assert(all[0]["id"], 1)
t.Assert(all[0]["total"], 1)
t.Assert(all[0]["total"].Int(), 1)
})
}

View File

@ -424,6 +424,40 @@ const (
LocalTypeJsonb = "jsonb"
)
const (
fieldTypeBinary = "binary"
fieldTypeVarbinary = "varbinary"
fieldTypeBlob = "blob"
fieldTypeTinyblob = "tinyblob"
fieldTypeMediumblob = "mediumblob"
fieldTypeLongblob = "longblob"
fieldTypeInt = "int"
fieldTypeTinyint = "tinyint"
fieldTypeSmallInt = "small_int"
fieldTypeSmallint = "smallint"
fieldTypeMediumInt = "medium_int"
fieldTypeMediumint = "mediumint"
fieldTypeSerial = "serial"
fieldTypeBigInt = "big_int"
fieldTypeBigint = "bigint"
fieldTypeBigserial = "bigserial"
fieldTypeReal = "real"
fieldTypeFloat = "float"
fieldTypeDouble = "double"
fieldTypeDecimal = "decimal"
fieldTypeMoney = "money"
fieldTypeNumeric = "numeric"
fieldTypeSmallmoney = "smallmoney"
fieldTypeBool = "bool"
fieldTypeBit = "bit"
fieldTypeDate = "date"
fieldTypeDatetime = "datetime"
fieldTypeTimestamp = "timestamp"
fieldTypeTimestampz = "timestamptz"
fieldTypeJson = "json"
fieldTypeJsonb = "jsonb"
)
var (
// instances is the management map for instances.
instances = gmap.NewStrAnyMap(true)

View File

@ -134,54 +134,58 @@ func (c *Core) CheckLocalTypeForField(ctx context.Context, fieldType string, fie
} else {
typeName = gstr.Split(fieldType, " ")[0]
}
typeName = strings.ToLower(typeName)
switch typeName {
case
"binary",
"varbinary",
"blob",
"tinyblob",
"mediumblob",
"longblob":
fieldTypeBinary,
fieldTypeVarbinary,
fieldTypeBlob,
fieldTypeTinyblob,
fieldTypeMediumblob,
fieldTypeLongblob:
return LocalTypeBytes, nil
case
"int",
"tinyint",
"small_int",
"smallint",
"medium_int",
"mediumint",
"serial":
fieldTypeInt,
fieldTypeTinyint,
fieldTypeSmallInt,
fieldTypeSmallint,
fieldTypeMediumInt,
fieldTypeMediumint,
fieldTypeSerial:
if gstr.ContainsI(fieldType, "unsigned") {
return LocalTypeUint, nil
}
return LocalTypeInt, nil
case
"big_int",
"bigint",
"bigserial":
fieldTypeBigInt,
fieldTypeBigint,
fieldTypeBigserial:
if gstr.ContainsI(fieldType, "unsigned") {
return LocalTypeUint64, nil
}
return LocalTypeInt64, nil
case
"real":
fieldTypeReal:
return LocalTypeFloat32, nil
case
"float",
"double",
"decimal",
"money",
"numeric",
"smallmoney":
fieldTypeDecimal,
fieldTypeMoney,
fieldTypeNumeric,
fieldTypeSmallmoney:
return LocalTypeString, nil
case
fieldTypeFloat,
fieldTypeDouble:
return LocalTypeFloat64, nil
case
"bit":
fieldTypeBit:
// It is suggested using bit(1) as boolean.
if typePattern == "1" {
return LocalTypeBool, nil
@ -197,25 +201,25 @@ func (c *Core) CheckLocalTypeForField(ctx context.Context, fieldType string, fie
return LocalTypeInt64Bytes, nil
case
"bool":
fieldTypeBool:
return LocalTypeBool, nil
case
"date":
fieldTypeDate:
return LocalTypeDate, nil
case
"datetime",
"timestamp",
"timestamptz":
fieldTypeDatetime,
fieldTypeTimestamp,
fieldTypeTimestampz:
return LocalTypeDatetime, nil
case
"json":
fieldTypeJson:
return LocalTypeJson, nil
case
"jsonb":
fieldTypeJsonb:
return LocalTypeJsonb, nil
default:

View File

@ -62,7 +62,7 @@ var (
// quoteWordReg is the regular expression object for a word check.
quoteWordReg = regexp.MustCompile(`^[a-zA-Z0-9\-_]+$`)
// Priority tags for struct converting for orm field mapping.
// structTagPriority tags for struct converting for orm field mapping.
structTagPriority = append([]string{OrmTagForStruct}, gconv.StructTagPriority...)
)