1
0
mirror of https://github.com/gogf/gf.git synced 2025-04-05 03:05:05 +08:00

change interface ConvertDataForRecord to ConvertValueForField for package gdb (#2916)

This commit is contained in:
John Guo 2023-09-04 21:23:54 +08:00 committed by GitHub
parent 887803e495
commit b9e2b05f04
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 357 additions and 507 deletions

View File

@ -111,7 +111,9 @@ jobs:
--health-retries 10
# ClickHouse backend server.
# docker run -d --name clickhouse -p 9000:9000 -p 8123:8123 -p 9001:9001 loads/clickhouse-server:22.1.3.7
# docker run -d --name clickhouse \
# -p 9000:9000 -p 8123:8123 -p 9001:9001 \
# loads/clickhouse-server:22.1.3.7
clickhouse-server:
image: loads/clickhouse-server:22.1.3.7
ports:

View File

@ -67,9 +67,10 @@ func generateStructFieldDefinition(
ctx context.Context, field *gdb.TableField, in generateStructDefinitionInput,
) (attrLines []string, appendImport string) {
var (
err error
typeName string
jsonTag = getJsonTagFromCase(field.Name, in.JsonCase)
err error
localTypeName gdb.LocalType
localTypeNameStr string
jsonTag = getJsonTagFromCase(field.Name, in.JsonCase)
)
if in.TypeMapping != nil && len(in.TypeMapping) > 0 {
@ -84,38 +85,39 @@ func generateStructFieldDefinition(
}
if tryTypeName != "" {
if typeMapping, ok := in.TypeMapping[strings.ToLower(tryTypeName)]; ok {
typeName = typeMapping.Type
localTypeNameStr = typeMapping.Type
appendImport = typeMapping.Import
}
}
}
if typeName == "" {
typeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
if localTypeNameStr == "" {
localTypeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
if err != nil {
panic(err)
}
}
switch typeName {
case gdb.LocalTypeDate, gdb.LocalTypeDatetime:
if in.StdTime {
typeName = "time.Time"
} else {
typeName = "*gtime.Time"
}
case gdb.LocalTypeInt64Bytes:
typeName = "int64"
switch localTypeName {
case gdb.LocalTypeDate, gdb.LocalTypeDatetime:
if in.StdTime {
localTypeNameStr = "time.Time"
} else {
localTypeNameStr = "*gtime.Time"
}
case gdb.LocalTypeUint64Bytes:
typeName = "uint64"
case gdb.LocalTypeInt64Bytes:
localTypeNameStr = "int64"
// Special type handle.
case gdb.LocalTypeJson, gdb.LocalTypeJsonb:
if in.GJsonSupport {
typeName = "*gjson.Json"
} else {
typeName = "string"
case gdb.LocalTypeUint64Bytes:
localTypeNameStr = "uint64"
// Special type handle.
case gdb.LocalTypeJson, gdb.LocalTypeJsonb:
if in.GJsonSupport {
localTypeNameStr = "*gjson.Json"
} else {
localTypeNameStr = "string"
}
}
}
@ -125,7 +127,7 @@ func generateStructFieldDefinition(
)
attrLines = []string{
" #" + gstr.CaseCamel(field.Name),
" #" + typeName,
" #" + localTypeNameStr,
}
attrLines = append(attrLines, " #"+fmt.Sprintf(tagKey+`json:"%s"`, jsonTag))
attrLines = append(attrLines, " #"+fmt.Sprintf(`description:"%s"`+tagKey, descriptionTag))

View File

@ -12,6 +12,8 @@ import (
"fmt"
"strings"
"github.com/olekukonko/tablewriter"
"github.com/gogf/gf/cmd/gf/v2/internal/consts"
"github.com/gogf/gf/cmd/gf/v2/internal/utility/mlog"
"github.com/gogf/gf/v2/database/gdb"
@ -23,7 +25,6 @@ import (
"github.com/gogf/gf/v2/text/gstr"
"github.com/gogf/gf/v2/util/gconv"
"github.com/gogf/gf/v2/util/gtag"
"github.com/olekukonko/tablewriter"
)
type (
@ -294,17 +295,17 @@ func generateEntityMessageDefinition(entityName string, fieldMap map[string]*gdb
// generateMessageFieldForPbEntity generates and returns the message definition for specified field.
func generateMessageFieldForPbEntity(index int, field *gdb.TableField, in CGenPbEntityInternalInput) []string {
var (
typeName string
comment string
jsonTagStr string
err error
ctx = gctx.GetInitCtx()
localTypeName gdb.LocalType
comment string
jsonTagStr string
err error
ctx = gctx.GetInitCtx()
)
typeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
localTypeName, err = in.DB.CheckLocalTypeForField(ctx, field.Type, nil)
if err != nil {
panic(err)
}
var typeMapping = map[string]string{
var typeMapping = map[gdb.LocalType]string{
gdb.LocalTypeString: "string",
gdb.LocalTypeDate: "google.protobuf.Timestamp",
gdb.LocalTypeDatetime: "google.protobuf.Timestamp",
@ -324,9 +325,9 @@ func generateMessageFieldForPbEntity(index int, field *gdb.TableField, in CGenPb
gdb.LocalTypeJson: "string",
gdb.LocalTypeJsonb: "string",
}
typeName = typeMapping[typeName]
if typeName == "" {
typeName = "string"
localTypeNameStr := typeMapping[localTypeName]
if localTypeNameStr == "" {
localTypeNameStr = "string"
}
comment = gstr.ReplaceByArray(field.Comment, g.SliceStr{
@ -350,7 +351,7 @@ func generateMessageFieldForPbEntity(index int, field *gdb.TableField, in CGenPb
}
}
return []string{
" #" + typeName,
" #" + localTypeNameStr,
" #" + formatCase(field.Name, in.NameCase),
" #= " + gconv.String(index) + jsonTagStr + ";",
" #" + fmt.Sprintf(`// %s`, comment),

View File

@ -8,13 +8,14 @@
package gmap
import (
"reflect"
"github.com/gogf/gf/v2/container/gvar"
"github.com/gogf/gf/v2/internal/deepcopy"
"github.com/gogf/gf/v2/internal/empty"
"github.com/gogf/gf/v2/internal/json"
"github.com/gogf/gf/v2/internal/rwmutex"
"github.com/gogf/gf/v2/util/gconv"
"reflect"
)
// StrAnyMap implements map[string]interface{} with RWMutex that has switch.

View File

@ -27,8 +27,6 @@ import (
"github.com/gogf/gf/v2/os/gctx"
"github.com/gogf/gf/v2/os/gtime"
"github.com/gogf/gf/v2/text/gregex"
"github.com/gogf/gf/v2/util/gconv"
"github.com/gogf/gf/v2/util/gtag"
"github.com/gogf/gf/v2/util/gutil"
)
@ -139,9 +137,7 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
// TableFields retrieves and returns the fields' information of specified table of current schema.
// Also see DriverMysql.TableFields.
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link
@ -224,9 +220,8 @@ func (d *Driver) DoFilter(
if len(args) == 0 {
return originSql, args, nil
}
var index int
// Convert placeholder char '?' to string "$x".
var index int
originSql, _ = gregex.ReplaceStringFunc(`\?`, originSql, func(s string) string {
index++
return fmt.Sprintf(`$%d`, index)
@ -251,9 +246,12 @@ func (d *Driver) DoFilter(
case "UPDATE":
// MySQL eg: UPDATE table_name SET field1=new-value1, field2=new-value2 [WHERE Clause]
// Clickhouse eg: ALTER TABLE [db.]table UPDATE column1 = expr1 [, ...] WHERE filter_expr
newSql, err = gregex.ReplaceStringFuncMatch(updateFilterPattern, originSql, func(s []string) string {
return fmt.Sprintf("ALTER TABLE %s UPDATE", s[1])
})
newSql, err = gregex.ReplaceStringFuncMatch(
updateFilterPattern, originSql,
func(s []string) string {
return fmt.Sprintf("ALTER TABLE %s UPDATE", s[1])
},
)
if err != nil {
return "", nil, err
}
@ -262,9 +260,12 @@ func (d *Driver) DoFilter(
case "DELETE":
// MySQL eg: DELETE FROM table_name [WHERE Clause]
// Clickhouse eg: ALTER TABLE [db.]table [ON CLUSTER cluster] DELETE WHERE filter_expr
newSql, err = gregex.ReplaceStringFuncMatch(deleteFilterPattern, originSql, func(s []string) string {
return fmt.Sprintf("ALTER TABLE %s DELETE", s[1])
})
newSql, err = gregex.ReplaceStringFuncMatch(
deleteFilterPattern, originSql,
func(s []string) string {
return fmt.Sprintf("ALTER TABLE %s DELETE", s[1])
},
)
if err != nil {
return "", nil, err
}
@ -280,6 +281,7 @@ func (d *Driver) DoCommit(ctx context.Context, in gdb.DoCommitInput) (out gdb.Do
return d.Core.DoCommit(ctx, in)
}
// DoInsert inserts or updates data forF given table.
func (d *Driver) DoInsert(
ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption,
) (result sql.Result, err error) {
@ -335,86 +337,80 @@ func (d *Driver) DoInsert(
return
}
// ConvertDataForRecord converting for any data that will be inserted into table/collection as a record.
func (d *Driver) ConvertDataForRecord(ctx context.Context, value interface{}) (map[string]interface{}, error) {
m := gconv.Map(value, gtag.ORM)
// transforms a value of a particular type
for k, v := range m {
switch itemValue := v.(type) {
case time.Time:
m[k] = itemValue
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
m[k] = nil
}
case uuid.UUID:
m[k] = itemValue
case *time.Time:
m[k] = itemValue
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
m[k] = nil
}
case gtime.Time:
// for gtime type, needs to get time.Time
m[k] = itemValue.Time
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
m[k] = nil
}
case *gtime.Time:
// for gtime type, needs to get time.Time
if itemValue != nil {
m[k] = itemValue.Time
}
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
m[k] = nil
}
case decimal.Decimal:
m[k] = itemValue
case *decimal.Decimal:
m[k] = nil
if itemValue != nil {
m[k] = *itemValue
}
default:
// if the other type implements valuer for the driver package
// the converted result is used
// otherwise the interface data is committed
valuer, ok := itemValue.(driver.Valuer)
if !ok {
m[k] = itemValue
continue
}
convertedValue, err := valuer.Value()
if err != nil {
return nil, err
}
m[k] = convertedValue
// ConvertValueForField converts value to the type of the record field.
func (d *Driver) ConvertValueForField(ctx context.Context, fieldType string, fieldValue interface{}) (interface{}, error) {
switch itemValue := fieldValue.(type) {
case time.Time:
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
return nil, nil
}
case uuid.UUID:
return itemValue, nil
case *time.Time:
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
return nil, nil
}
return itemValue, nil
case gtime.Time:
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
return nil, nil
}
// for gtime type, needs to get time.Time
return itemValue.Time, nil
case *gtime.Time:
// for gtime type, needs to get time.Time
if itemValue != nil {
return itemValue.Time, nil
}
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
return nil, nil
}
case decimal.Decimal:
return itemValue, nil
case *decimal.Decimal:
if itemValue != nil {
return *itemValue, nil
}
return nil, nil
default:
// if the other type implements valuer for the driver package
// the converted result is used
// otherwise the interface data is committed
valuer, ok := itemValue.(driver.Valuer)
if !ok {
return itemValue, nil
}
convertedValue, err := valuer.Value()
if err != nil {
return nil, err
}
return convertedValue, nil
}
return m, nil
return fieldValue, nil
}
// DoDelete does "DELETE FROM ... " statement for the table.
func (d *Driver) DoDelete(ctx context.Context, link gdb.Link, table string, condition string, args ...interface{}) (result sql.Result, err error) {
ctx = d.injectNeedParsedSql(ctx)
return d.Core.DoDelete(ctx, link, table, condition, args...)
}
// DoUpdate does "UPDATE ... " statement for the table.
func (d *Driver) DoUpdate(ctx context.Context, link gdb.Link, table string, data interface{}, condition string, args ...interface{}) (result sql.Result, err error) {
ctx = d.injectNeedParsedSql(ctx)
return d.Core.DoUpdate(ctx, link, table, data, condition, args...)
@ -435,10 +431,12 @@ func (d *Driver) Replace(ctx context.Context, table string, data interface{}, ba
return nil, errUnsupportedReplace
}
// Begin starts and returns the transaction object.
func (d *Driver) Begin(ctx context.Context) (tx gdb.TX, err error) {
return nil, errUnsupportedBegin
}
// Transaction wraps the transaction logic using function `f`.
func (d *Driver) Transaction(ctx context.Context, f func(ctx context.Context, tx gdb.TX) error) error {
return errUnsupportedTransaction
}

View File

@ -27,7 +27,6 @@ import (
"github.com/gogf/gf/v2/text/gregex"
"github.com/gogf/gf/v2/text/gstr"
"github.com/gogf/gf/v2/util/gconv"
"github.com/gogf/gf/v2/util/gtag"
"github.com/gogf/gf/v2/util/gutil"
)
@ -52,16 +51,19 @@ func init() {
}
}
// New create and returns a driver that implements gdb.Driver, which supports operations for dm.
func New() gdb.Driver {
return &Driver{}
}
// New creates and returns a database object for dm.
func (d *Driver) New(core *gdb.Core, node *gdb.ConfigNode) (gdb.DB, error) {
return &Driver{
Core: core,
}, nil
}
// Open creates and returns an underlying sql.DB object for pgsql.
func (d *Driver) Open(config *gdb.ConfigNode) (db *sql.DB, err error) {
var (
source string
@ -100,10 +102,13 @@ func (d *Driver) Open(config *gdb.ConfigNode) (db *sql.DB, err error) {
return
}
// GetChars returns the security char for this type of database.
func (d *Driver) GetChars() (charLeft string, charRight string) {
return quoteChar, quoteChar
}
// Tables retrieves and returns the tables of current schema.
// It's mainly used in cli tool chain for automatically generating the models.
func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string, err error) {
var result gdb.Result
// When schema is empty, return the default link
@ -126,9 +131,8 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
return
}
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
// TableFields retrieves and returns the fields' information of specified table of current schema.
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link
@ -173,49 +177,44 @@ func (d *Driver) TableFields(
return fields, nil
}
// ConvertDataForRecord converting for any data that will be inserted into table/collection as a record.
func (d *Driver) ConvertDataForRecord(ctx context.Context, value interface{}) (map[string]interface{}, error) {
m := gconv.Map(value, gtag.ORM)
// transforms a value of a particular type
for k, v := range m {
switch itemValue := v.(type) {
// dm does not support time.Time, it so here converts it to time string that it supports.
case time.Time:
m[k] = gtime.New(itemValue).String()
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
m[k] = nil
}
// dm does not support time.Time, it so here converts it to time string that it supports.
case *time.Time:
m[k] = gtime.New(itemValue).String()
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
m[k] = nil
}
// ConvertValueForField converts value to the type of the record field.
func (d *Driver) ConvertValueForField(ctx context.Context, fieldType string, fieldValue interface{}) (interface{}, error) {
switch itemValue := fieldValue.(type) {
// dm does not support time.Time, it so here converts it to time string that it supports.
case time.Time:
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue.IsZero() {
return nil, nil
}
return gtime.New(itemValue).String(), nil
// dm does not support time.Time, it so here converts it to time string that it supports.
case *time.Time:
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
if itemValue == nil || itemValue.IsZero() {
return nil, nil
}
return gtime.New(itemValue).String(), nil
}
return m, nil
return fieldValue, nil
}
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
defer func() {
newSql, newArgs, err = d.Core.DoFilter(ctx, link, newSql, newArgs)
}()
// There should be no need to capitalize, because it has been done from field processing before
newSql, err = gregex.ReplaceString(`["\n\t]`, "", sql)
newSql = gstr.ReplaceI(newSql, "GROUP_CONCAT", "WM_CONCAT")
// gutil.Dump("Driver.DoFilter()::newSql", newSql)
newArgs = args
// gutil.Dump("Driver.DoFilter()::newArgs", newArgs)
return
newSql, _ = gregex.ReplaceString(`["\n\t]`, "", sql)
return d.Core.DoFilter(
ctx,
link,
gstr.ReplaceI(newSql, "GROUP_CONCAT", "WM_CONCAT"),
args,
)
}
// DoInsert inserts or updates data forF given table.
func (d *Driver) DoInsert(
ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption,
) (result sql.Result, err error) {

View File

@ -104,17 +104,14 @@ func (d *Driver) GetChars() (charLeft string, charRight string) {
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
defer func() {
newSql, newArgs, err = d.Core.DoFilter(ctx, link, newSql, newArgs)
}()
var index int
// Convert placeholder char '?' to string "@px".
str, _ := gregex.ReplaceStringFunc("\\?", sql, func(s string) string {
newSql, _ = gregex.ReplaceStringFunc("\\?", sql, func(s string) string {
index++
return fmt.Sprintf("@p%d", index)
})
str, _ = gregex.ReplaceString("\"", "", str)
return d.parseSql(str), args, nil
newSql, _ = gregex.ReplaceString("\"", "", newSql)
return d.Core.DoFilter(ctx, link, d.parseSql(newSql), args)
}
// parseSql does some replacement of the sql before commits it to underlying driver,
@ -299,14 +296,20 @@ ORDER BY a.id,a.colorder`,
return fields, nil
}
// DoInsert is not supported in mssql.
// DoInsert inserts or updates data forF given table.
func (d *Driver) DoInsert(ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption) (result sql.Result, err error) {
switch option.InsertOption {
case gdb.InsertOptionSave:
return nil, gerror.NewCode(gcode.CodeNotSupported, `Save operation is not supported by mssql driver`)
return nil, gerror.NewCode(
gcode.CodeNotSupported,
`Save operation is not supported by mssql driver`,
)
case gdb.InsertOptionReplace:
return nil, gerror.NewCode(gcode.CodeNotSupported, `Replace operation is not supported by mssql driver`)
return nil, gerror.NewCode(
gcode.CodeNotSupported,
`Replace operation is not supported by mssql driver`,
)
default:
return d.Core.DoInsert(ctx, link, table, list, option)

View File

@ -15,6 +15,7 @@ import (
"strings"
_ "github.com/go-sql-driver/mysql"
"github.com/gogf/gf/v2/database/gdb"
"github.com/gogf/gf/v2/errors/gcode"
"github.com/gogf/gf/v2/errors/gerror"
@ -143,9 +144,7 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
//
// It's using cache feature to enhance the performance, which is never expired util the
// process restarts.
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link

View File

@ -10,10 +10,7 @@ import (
"context"
"testing"
"github.com/go-sql-driver/mysql"
"github.com/gogf/gf/v2/container/gvar"
"github.com/gogf/gf/v2/database/gdb"
"github.com/gogf/gf/v2/encoding/gjson"
"github.com/gogf/gf/v2/test/gtest"
)
@ -32,61 +29,6 @@ func Test_Instance(t *testing.T) {
})
}
// Fix issue: https://github.com/gogf/gf/issues/819
func Test_Func_ConvertDataForRecord(t *testing.T) {
type Test struct {
ResetPasswordTokenAt mysql.NullTime `orm:"reset_password_token_at"`
}
gtest.C(t, func(t *gtest.T) {
c := &gdb.Core{}
m, err := c.ConvertDataForRecord(nil, new(Test))
t.AssertNil(err)
t.Assert(len(m), 1)
t.Assert(m["reset_password_token_at"], nil)
})
type TestNil struct {
JsonEmptyString *gjson.Json `orm:"json_empty_string"`
JsonNil *gjson.Json `orm:"json_nil"`
JsonNull *gjson.Json `orm:"json_null"`
VarEmptyString *gvar.Var `orm:"var_empty_string"`
VarNil *gvar.Var `orm:"var_nil"`
}
gtest.C(t, func(t *gtest.T) {
c := &gdb.Core{}
m, err := c.ConvertDataForRecord(nil, TestNil{
JsonEmptyString: gjson.New(""),
JsonNil: gjson.New(nil),
JsonNull: gjson.New(struct{}{}),
VarEmptyString: gvar.New(""),
VarNil: gvar.New(nil),
})
t.AssertNil(err)
t.Assert(len(m), 5)
valueEmptyString, exist := m["json_empty_string"]
t.Assert(exist, true)
t.Assert(valueEmptyString, nil)
valueNil, exist := m["json_nil"]
t.Assert(exist, true)
t.Assert(valueNil, nil)
valueNull, exist := m["json_null"]
t.Assert(exist, true)
t.Assert(valueNull, "null")
valueEmptyString, exist = m["var_empty_string"]
t.Assert(exist, true)
t.Assert(valueEmptyString, "")
valueNil, exist = m["var_nil"]
t.Assert(exist, true)
t.Assert(valueNil, nil)
})
}
func Test_Func_FormatSqlWithArgs(t *testing.T) {
// mysql
gtest.C(t, func(t *gtest.T) {

View File

@ -114,10 +114,6 @@ func (d *Driver) GetChars() (charLeft string, charRight string) {
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
defer func() {
newSql, newArgs, err = d.Core.DoFilter(ctx, link, newSql, newArgs)
}()
var index int
// Convert placeholder char '?' to string ":vx".
newSql, _ = gregex.ReplaceStringFunc("\\?", sql, func(s string) string {
@ -125,10 +121,7 @@ func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args [
return fmt.Sprintf(":v%d", index)
})
newSql, _ = gregex.ReplaceString("\"", "", newSql)
newSql = d.parseSql(newSql)
newArgs = args
return
return d.Core.DoFilter(ctx, link, d.parseSql(newSql), args)
}
// parseSql does some replacement of the sql before commits it to underlying driver,
@ -214,9 +207,7 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
// TableFields retrieves and returns the fields' information of specified table of current schema.
//
// Also see DriverMysql.TableFields.
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link
@ -259,17 +250,6 @@ FROM USER_TAB_COLUMNS WHERE TABLE_NAME = '%s' ORDER BY COLUMN_ID`,
}
// DoInsert inserts or updates data for given table.
// This function is usually used for custom interface definition, you do not need call it manually.
// The parameter `data` can be type of map/gmap/struct/*struct/[]map/[]struct, etc.
// Eg:
// Data(g.Map{"uid": 10000, "name":"john"})
// Data(g.Slice{g.Map{"uid": 10000, "name":"john"}, g.Map{"uid": 20000, "name":"smith"})
//
// The parameter `option` values are as follows:
// 0: insert: just insert, if there's unique/primary key in the data, it returns error;
// 1: replace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// 2: save: if there's unique/primary key in the data, it updates it or else inserts a new one;
// 3: ignore: if there's unique/primary key in the data, it ignores the inserting;
func (d *Driver) DoInsert(
ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption,
) (result sql.Result, err error) {

View File

@ -8,6 +8,7 @@
//
// Note:
// 1. It does not support Save/Replace features.
// 2. It does not support Insert Ignore features.
package pgsql
import (
@ -122,7 +123,7 @@ func (d *Driver) GetChars() (charLeft string, charRight string) {
}
// CheckLocalTypeForField checks and returns corresponding local golang type for given db type.
func (d *Driver) CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (string, error) {
func (d *Driver) CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (gdb.LocalType, error) {
var typeName string
match, _ := gregex.MatchString(`(.+?)\((.+)\)`, fieldType)
if len(match) == 3 {
@ -204,24 +205,21 @@ func (d *Driver) ConvertValueForLocal(ctx context.Context, fieldType string, fie
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
defer func() {
newSql, newArgs, err = d.Core.DoFilter(ctx, link, newSql, newArgs)
}()
var index int
// Convert placeholder char '?' to string "$x".
sql, _ = gregex.ReplaceStringFunc(`\?`, sql, func(s string) string {
newSql, _ = gregex.ReplaceStringFunc(`\?`, sql, func(s string) string {
index++
return fmt.Sprintf(`$%d`, index)
})
// Handle pgsql jsonb feature support, which contains place holder char '?'.
// Handle pgsql jsonb feature support, which contains place-holder char '?'.
// Refer:
// https://github.com/gogf/gf/issues/1537
// https://www.postgresql.org/docs/12/functions-json.html
sql, _ = gregex.ReplaceStringFuncMatch(`(::jsonb([^\w\d]*)\$\d)`, sql, func(match []string) string {
newSql, _ = gregex.ReplaceStringFuncMatch(`(::jsonb([^\w\d]*)\$\d)`, newSql, func(match []string) string {
return fmt.Sprintf(`::jsonb%s?`, match[2])
})
newSql, _ = gregex.ReplaceString(` LIMIT (\d+),\s*(\d+)`, ` LIMIT $2 OFFSET $1`, sql)
return newSql, args, nil
newSql, _ = gregex.ReplaceString(` LIMIT (\d+),\s*(\d+)`, ` LIMIT $2 OFFSET $1`, newSql)
return d.Core.DoFilter(ctx, link, newSql, args)
}
// Tables retrieves and returns the tables of current schema.
@ -269,8 +267,6 @@ ORDER BY
}
// TableFields retrieves and returns the fields' information of specified table of current schema.
//
// Also see DriverMysql.TableFields.
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
@ -328,7 +324,7 @@ ORDER BY a.attnum`,
return fields, nil
}
// DoInsert is not supported in pgsql.
// DoInsert inserts or updates data forF given table.
func (d *Driver) DoInsert(ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption) (result sql.Result, err error) {
switch option.InsertOption {
case gdb.InsertOptionSave:
@ -364,6 +360,8 @@ func (d *Driver) DoInsert(ctx context.Context, link gdb.Link, table string, list
return d.Core.DoInsert(ctx, link, table, list, option)
}
// DoExec commits the sql string and its arguments to underlying driver
// through given link object and returns the execution result.
func (d *Driver) DoExec(ctx context.Context, link gdb.Link, sql string, args ...interface{}) (result sql.Result, err error) {
var (
isUseCoreDoExec bool = false // Check whether the default method needs to be used

View File

@ -7,14 +7,13 @@
// Package sqlite implements gdb.Driver, which supports operations for database SQLite.
//
// Note:
// 1. It does not support Save/Replace features.
// 1. It does not support Save features.
package sqlite
import (
"context"
"database/sql"
"fmt"
"strings"
_ "github.com/glebarez/go-sqlite"
@ -114,6 +113,22 @@ func (d *Driver) GetChars() (charLeft string, charRight string) {
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
// Special insert/ignore operation for sqlite.
switch {
case gstr.HasPrefix(sql, gdb.InsertOperationIgnore):
sql = "INSERT OR IGNORE" + sql[len(gdb.InsertOperationIgnore):]
case gstr.HasPrefix(sql, gdb.InsertOperationReplace):
sql = "INSERT OR REPLACE" + sql[len(gdb.InsertOperationReplace):]
default:
if gstr.Contains(sql, gdb.InsertOnDuplicateKeyUpdate) {
return sql, args, gerror.NewCode(
gcode.CodeNotSupported,
`Save operation is not supported by sqlite driver`,
)
}
}
return d.Core.DoFilter(ctx, link, sql, args)
}
@ -126,7 +141,11 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
return nil, err
}
result, err = d.DoSelect(ctx, link, `SELECT NAME FROM SQLITE_MASTER WHERE TYPE='table' ORDER BY NAME`)
result, err = d.DoSelect(
ctx,
link,
`SELECT NAME FROM SQLITE_MASTER WHERE TYPE='table' ORDER BY NAME`,
)
if err != nil {
return
}
@ -141,9 +160,7 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
// TableFields retrieves and returns the fields' information of specified table of current schema.
//
// Also see DriverMysql.TableFields.
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link
@ -173,83 +190,3 @@ func (d *Driver) TableFields(
}
return fields, nil
}
// DoInsert is not supported in sqlite.
func (d *Driver) DoInsert(
ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption,
) (result sql.Result, err error) {
switch option.InsertOption {
case gdb.InsertOptionSave:
return nil, gerror.NewCode(gcode.CodeNotSupported, `Save operation is not supported by sqlite driver`)
case gdb.InsertOptionIgnore, gdb.InsertOptionReplace:
var (
keys []string // Field names.
values []string // Value holder string array, like: (?,?,?)
params []interface{} // Values that will be committed to underlying database driver.
onDuplicateStr string // onDuplicateStr is used in "ON DUPLICATE KEY UPDATE" statement.
)
// Handle the field names and placeholders.
for k := range list[0] {
keys = append(keys, k)
}
// Prepare the batch result pointer.
var (
charL, charR = d.GetChars()
batchResult = new(gdb.SqlResult)
keysStr = charL + strings.Join(keys, charR+","+charL) + charR
operation = "INSERT OR IGNORE"
)
if option.InsertOption == gdb.InsertOptionReplace {
operation = "INSERT OR REPLACE"
}
var (
listLength = len(list)
valueHolder = make([]string, 0)
)
for i := 0; i < listLength; i++ {
values = values[:0]
// Note that the map type is unordered,
// so it should use slice+key to retrieve the value.
for _, k := range keys {
if s, ok := list[i][k].(gdb.Raw); ok {
values = append(values, gconv.String(s))
} else {
values = append(values, "?")
params = append(params, list[i][k])
}
}
valueHolder = append(valueHolder, "("+gstr.Join(values, ",")+")")
// Batch package checks: It meets the batch number, or it is the last element.
if len(valueHolder) == option.BatchCount || (i == listLength-1 && len(valueHolder) > 0) {
var (
stdSqlResult sql.Result
affectedRows int64
)
stdSqlResult, err = d.DoExec(ctx, link, fmt.Sprintf(
"%s INTO %s(%s) VALUES%s %s",
operation, d.QuotePrefixTableName(table), keysStr,
gstr.Join(valueHolder, ","),
onDuplicateStr,
), params...)
if err != nil {
return stdSqlResult, err
}
if affectedRows, err = stdSqlResult.RowsAffected(); err != nil {
err = gerror.WrapCode(gcode.CodeDbOperationError, err, `sql.Result.RowsAffected failed`)
return stdSqlResult, err
} else {
batchResult.Result = stdSqlResult
batchResult.Affected += affectedRows
}
params = params[:0]
valueHolder = valueHolder[:0]
}
}
return batchResult, nil
default:
return d.Core.DoInsert(ctx, link, table, list, option)
}
}

View File

@ -7,15 +7,15 @@
// Package sqlitecgo implements gdb.Driver, which supports operations for database SQLite.
//
// Note:
// 1. Using sqlitecgo is for building a 32-bit Windows operating system
// 2. You need to set the environment variable CGO_ENABLED=1 and make sure that GCC is installed on your path. windows gcc: https://jmeubank.github.io/tdm-gcc/
// 1. Using sqlitecgo is for building a 32-bit Windows operating system
// 2. You need to set the environment variable CGO_ENABLED=1 and make sure that GCC is installed
// on your path. windows gcc: https://jmeubank.github.io/tdm-gcc/
package sqlitecgo
import (
"context"
"database/sql"
"fmt"
"strings"
_ "github.com/mattn/go-sqlite3"
@ -115,6 +115,22 @@ func (d *Driver) GetChars() (charLeft string, charRight string) {
// DoFilter deals with the sql string before commits it to underlying sql driver.
func (d *Driver) DoFilter(ctx context.Context, link gdb.Link, sql string, args []interface{}) (newSql string, newArgs []interface{}, err error) {
// Special insert/ignore operation for sqlite.
switch {
case gstr.HasPrefix(sql, gdb.InsertOperationIgnore):
sql = "INSERT OR IGNORE" + sql[len(gdb.InsertOperationIgnore):]
case gstr.HasPrefix(sql, gdb.InsertOperationReplace):
sql = "INSERT OR REPLACE" + sql[len(gdb.InsertOperationReplace):]
default:
if gstr.Contains(sql, gdb.InsertOnDuplicateKeyUpdate) {
return sql, args, gerror.NewCode(
gcode.CodeNotSupported,
`Save operation is not supported by sqlite driver`,
)
}
}
return d.Core.DoFilter(ctx, link, sql, args)
}
@ -127,7 +143,11 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
return nil, err
}
result, err = d.DoSelect(ctx, link, `SELECT NAME FROM SQLITE_MASTER WHERE TYPE='table' ORDER BY NAME`)
result, err = d.DoSelect(
ctx,
link,
`SELECT NAME FROM SQLITE_MASTER WHERE TYPE='table' ORDER BY NAME`,
)
if err != nil {
return
}
@ -142,9 +162,7 @@ func (d *Driver) Tables(ctx context.Context, schema ...string) (tables []string,
// TableFields retrieves and returns the fields' information of specified table of current schema.
//
// Also see DriverMysql.TableFields.
func (d *Driver) TableFields(
ctx context.Context, table string, schema ...string,
) (fields map[string]*gdb.TableField, err error) {
func (d *Driver) TableFields(ctx context.Context, table string, schema ...string) (fields map[string]*gdb.TableField, err error) {
var (
result gdb.Result
link gdb.Link
@ -174,83 +192,3 @@ func (d *Driver) TableFields(
}
return fields, nil
}
// DoInsert is not supported in sqlite.
func (d *Driver) DoInsert(
ctx context.Context, link gdb.Link, table string, list gdb.List, option gdb.DoInsertOption,
) (result sql.Result, err error) {
switch option.InsertOption {
case gdb.InsertOptionSave:
return nil, gerror.NewCode(gcode.CodeNotSupported, `Save operation is not supported by sqlite driver`)
case gdb.InsertOptionIgnore, gdb.InsertOptionReplace:
var (
keys []string // Field names.
values []string // Value holder string array, like: (?,?,?)
params []interface{} // Values that will be committed to underlying database driver.
onDuplicateStr string // onDuplicateStr is used in "ON DUPLICATE KEY UPDATE" statement.
)
// Handle the field names and placeholders.
for k := range list[0] {
keys = append(keys, k)
}
// Prepare the batch result pointer.
var (
charL, charR = d.GetChars()
batchResult = new(gdb.SqlResult)
keysStr = charL + strings.Join(keys, charR+","+charL) + charR
operation = "INSERT OR IGNORE"
)
if option.InsertOption == gdb.InsertOptionReplace {
operation = "INSERT OR REPLACE"
}
var (
listLength = len(list)
valueHolder = make([]string, 0)
)
for i := 0; i < listLength; i++ {
values = values[:0]
// Note that the map type is unordered,
// so it should use slice+key to retrieve the value.
for _, k := range keys {
if s, ok := list[i][k].(gdb.Raw); ok {
values = append(values, gconv.String(s))
} else {
values = append(values, "?")
params = append(params, list[i][k])
}
}
valueHolder = append(valueHolder, "("+gstr.Join(values, ",")+")")
// Batch package checks: It meets the batch number, or it is the last element.
if len(valueHolder) == option.BatchCount || (i == listLength-1 && len(valueHolder) > 0) {
var (
stdSqlResult sql.Result
affectedRows int64
)
stdSqlResult, err = d.DoExec(ctx, link, fmt.Sprintf(
"%s INTO %s(%s) VALUES%s %s",
operation, d.QuotePrefixTableName(table), keysStr,
gstr.Join(valueHolder, ","),
onDuplicateStr,
), params...)
if err != nil {
return stdSqlResult, err
}
if affectedRows, err = stdSqlResult.RowsAffected(); err != nil {
err = gerror.WrapCode(gcode.CodeDbOperationError, err, `sql.Result.RowsAffected failed`)
return stdSqlResult, err
} else {
batchResult.Result = stdSqlResult
batchResult.Affected += affectedRows
}
params = params[:0]
valueHolder = valueHolder[:0]
}
}
return batchResult, nil
default:
return d.Core.DoInsert(ctx, link, table, list, option)
}
}

View File

@ -172,9 +172,9 @@ type DB interface {
GetChars() (charLeft string, charRight string) // See Core.GetChars.
Tables(ctx context.Context, schema ...string) (tables []string, err error) // See Core.Tables. The driver must implement this function.
TableFields(ctx context.Context, table string, schema ...string) (map[string]*TableField, error) // See Core.TableFields. The driver must implement this function.
ConvertDataForRecord(ctx context.Context, data interface{}) (map[string]interface{}, error) // See Core.ConvertDataForRecord
ConvertValueForField(ctx context.Context, fieldType string, fieldValue interface{}) (interface{}, error) // See Core.ConvertValueForField
ConvertValueForLocal(ctx context.Context, fieldType string, fieldValue interface{}) (interface{}, error) // See Core.ConvertValueForLocal
CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (string, error) // See Core.CheckLocalTypeForField
CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (LocalType, error) // See Core.CheckLocalTypeForField
}
// TX defines the interfaces for ORM transaction operations.
@ -328,11 +328,11 @@ type DoInsertOption struct {
type TableField struct {
Index int // For ordering purpose as map is unordered.
Name string // Field name.
Type string // Field type.
Type string // Field type. Eg: 'int(10) unsigned', 'varchar(64)'.
Null bool // Field can be null or not.
Key string // The index information(empty if it's not an index).
Key string // The index information(empty if it's not an index). Eg: PRI, MUL.
Default interface{} // Default value for the field.
Extra string // Extra information.
Extra string // Extra information. Eg: auto_increment.
Comment string // Field comment.
}
@ -389,10 +389,14 @@ const (
type InsertOption int
const (
InsertOptionDefault InsertOption = 0
InsertOptionReplace InsertOption = 1
InsertOptionSave InsertOption = 2
InsertOptionIgnore InsertOption = 3
InsertOptionDefault InsertOption = 0
InsertOptionReplace InsertOption = 1
InsertOptionSave InsertOption = 2
InsertOptionIgnore InsertOption = 3
InsertOperationInsert = "INSERT"
InsertOperationReplace = "REPLACE"
InsertOperationIgnore = "INSERT IGNORE"
InsertOnDuplicateKeyUpdate = "ON DUPLICATE KEY UPDATE"
)
const (
@ -407,25 +411,27 @@ const (
SqlTypeStmtQueryRowContext = "DB.Statement.QueryRowContext"
)
type LocalType string
const (
LocalTypeString = "string"
LocalTypeDate = "date"
LocalTypeDatetime = "datetime"
LocalTypeInt = "int"
LocalTypeUint = "uint"
LocalTypeInt64 = "int64"
LocalTypeUint64 = "uint64"
LocalTypeIntSlice = "[]int"
LocalTypeInt64Slice = "[]int64"
LocalTypeUint64Slice = "[]uint64"
LocalTypeInt64Bytes = "int64-bytes"
LocalTypeUint64Bytes = "uint64-bytes"
LocalTypeFloat32 = "float32"
LocalTypeFloat64 = "float64"
LocalTypeBytes = "[]byte"
LocalTypeBool = "bool"
LocalTypeJson = "json"
LocalTypeJsonb = "jsonb"
LocalTypeString LocalType = "string"
LocalTypeDate LocalType = "date"
LocalTypeDatetime LocalType = "datetime"
LocalTypeInt LocalType = "int"
LocalTypeUint LocalType = "uint"
LocalTypeInt64 LocalType = "int64"
LocalTypeUint64 LocalType = "uint64"
LocalTypeIntSlice LocalType = "[]int"
LocalTypeInt64Slice LocalType = "[]int64"
LocalTypeUint64Slice LocalType = "[]uint64"
LocalTypeInt64Bytes LocalType = "int64-bytes"
LocalTypeUint64Bytes LocalType = "uint64-bytes"
LocalTypeFloat32 LocalType = "float32"
LocalTypeFloat64 LocalType = "float64"
LocalTypeBytes LocalType = "[]byte"
LocalTypeBool LocalType = "bool"
LocalTypeJson LocalType = "json"
LocalTypeJsonb LocalType = "jsonb"
)
const (

View File

@ -422,10 +422,10 @@ func (c *Core) fieldsToSequence(ctx context.Context, table string, fields []stri
// Data(g.Slice{g.Map{"uid": 10000, "name":"john"}, g.Map{"uid": 20000, "name":"smith"})
//
// The parameter `option` values are as follows:
// 0: insert: just insert, if there's unique/primary key in the data, it returns error;
// 1: replace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// 2: save: if there's unique/primary key in the data, it updates it or else inserts a new one;
// 3: ignore: if there's unique/primary key in the data, it ignores the inserting;
// InsertOptionDefault: just insert, if there's unique/primary key in the data, it returns error;
// InsertOptionReplace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// InsertOptionSave: if there's unique/primary key in the data, it updates it or else inserts a new one;
// InsertOptionIgnore: if there's unique/primary key in the data, it ignores the inserting;
func (c *Core) DoInsert(ctx context.Context, link Link, table string, list List, option DoInsertOption) (result sql.Result, err error) {
var (
keys []string // Field names.
@ -433,8 +433,10 @@ func (c *Core) DoInsert(ctx context.Context, link Link, table string, list List,
params []interface{} // Values that will be committed to underlying database driver.
onDuplicateStr string // onDuplicateStr is used in "ON DUPLICATE KEY UPDATE" statement.
)
// ============================================================================================
// Group the list by fields. Different fields to different list.
// It here uses ListMap to keep sequence for data inserting.
// ============================================================================================
var keyListMap = gmap.NewListMap()
for _, item := range list {
var (
@ -574,7 +576,7 @@ func (c *Core) formatOnDuplicate(columns []string, option DoInsertOption) string
)
}
}
return fmt.Sprintf("ON DUPLICATE KEY UPDATE %s", onDuplicateStr)
return InsertOnDuplicateKeyUpdate + " " + onDuplicateStr
}
// Update does "UPDATE ... " statement for the table.
@ -633,7 +635,7 @@ func (c *Core) DoUpdate(ctx context.Context, link Link, table string, data inter
}
}
)
dataMap, err = c.db.ConvertDataForRecord(ctx, data)
dataMap, err = c.ConvertDataForRecord(ctx, data, table)
if err != nil {
return nil, err
}

View File

@ -15,6 +15,7 @@ import (
"github.com/gogf/gf/v2/encoding/gbinary"
"github.com/gogf/gf/v2/errors/gerror"
"github.com/gogf/gf/v2/internal/intlog"
"github.com/gogf/gf/v2/internal/json"
"github.com/gogf/gf/v2/os/gtime"
"github.com/gogf/gf/v2/text/gregex"
@ -23,32 +24,67 @@ import (
"github.com/gogf/gf/v2/util/gutil"
)
// GetFieldTypeStr retrieves and returns the field type string for certain field by name.
func (c *Core) GetFieldTypeStr(ctx context.Context, fieldName, table, schema string) string {
field := c.GetFieldType(ctx, fieldName, table, schema)
if field != nil {
return field.Type
}
return ""
}
// GetFieldType retrieves and returns the field type object for certain field by name.
func (c *Core) GetFieldType(ctx context.Context, fieldName, table, schema string) *TableField {
fieldsMap, err := c.db.TableFields(ctx, table, schema)
if err != nil {
intlog.Errorf(
ctx,
`TableFields failed for table "%s", schema "%s": %+v`,
table, schema, err,
)
return nil
}
for tableFieldName, tableField := range fieldsMap {
if tableFieldName == fieldName {
return tableField
}
}
return nil
}
// ConvertDataForRecord is a very important function, which does converting for any data that
// will be inserted into table/collection as a record.
//
// The parameter `value` should be type of *map/map/*struct/struct.
// It supports embedded struct definition for struct.
func (c *Core) ConvertDataForRecord(ctx context.Context, value interface{}) (map[string]interface{}, error) {
func (c *Core) ConvertDataForRecord(ctx context.Context, value interface{}, table string) (map[string]interface{}, error) {
var (
err error
data = DataToMapDeep(value)
)
for k, v := range data {
data[k], err = c.ConvertDataForRecordValue(ctx, v)
for fieldName, fieldValue := range data {
data[fieldName], err = c.db.ConvertValueForField(
ctx,
c.GetFieldTypeStr(ctx, fieldName, table, c.GetSchema()),
fieldValue,
)
if err != nil {
return nil, gerror.Wrapf(err, `ConvertDataForRecordValue failed for value: %#v`, v)
return nil, gerror.Wrapf(err, `ConvertDataForRecord failed for value: %#v`, fieldValue)
}
}
return data, nil
}
func (c *Core) ConvertDataForRecordValue(ctx context.Context, value interface{}) (interface{}, error) {
// ConvertValueForField converts value to the type of the record field.
// The parameter `fieldType` is the target record field.
// The parameter `fieldValue` is the value that to be committed to record field.
func (c *Core) ConvertValueForField(ctx context.Context, fieldType string, fieldValue interface{}) (interface{}, error) {
var (
err error
convertedValue = value
convertedValue = fieldValue
)
// If `value` implements interface `driver.Valuer`, it then uses the interface for value converting.
if valuer, ok := value.(driver.Valuer); ok {
if valuer, ok := fieldValue.(driver.Valuer); ok {
if convertedValue, err = valuer.Value(); err != nil {
if err != nil {
return nil, err
@ -58,7 +94,7 @@ func (c *Core) ConvertDataForRecordValue(ctx context.Context, value interface{})
}
// Default value converting.
var (
rvValue = reflect.ValueOf(value)
rvValue = reflect.ValueOf(fieldValue)
rvKind = rvValue.Kind()
)
for rvKind == reflect.Ptr {
@ -68,16 +104,16 @@ func (c *Core) ConvertDataForRecordValue(ctx context.Context, value interface{})
switch rvKind {
case reflect.Slice, reflect.Array, reflect.Map:
// It should ignore the bytes type.
if _, ok := value.([]byte); !ok {
if _, ok := fieldValue.([]byte); !ok {
// Convert the value to JSON.
convertedValue, err = json.Marshal(value)
convertedValue, err = json.Marshal(fieldValue)
if err != nil {
return nil, err
}
}
case reflect.Struct:
switch r := value.(type) {
switch r := fieldValue.(type) {
// If the time is zero, it then updates it to nil,
// which will insert/update the value to database as "null".
case time.Time:
@ -109,14 +145,14 @@ func (c *Core) ConvertDataForRecordValue(ctx context.Context, value interface{})
// If `value` implements interface iNil,
// check its IsNil() function, if got ture,
// which will insert/update the value to database as "null".
if v, ok := value.(iNil); ok && v.IsNil() {
if v, ok := fieldValue.(iNil); ok && v.IsNil() {
convertedValue = nil
} else if s, ok := value.(iString); ok {
} else if s, ok := fieldValue.(iString); ok {
// Use string conversion in default.
convertedValue = s.String()
} else {
// Convert the value to JSON.
convertedValue, err = json.Marshal(value)
convertedValue, err = json.Marshal(fieldValue)
if err != nil {
return nil, err
}
@ -127,7 +163,7 @@ func (c *Core) ConvertDataForRecordValue(ctx context.Context, value interface{})
}
// CheckLocalTypeForField checks and returns corresponding type for given db type.
func (c *Core) CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (string, error) {
func (c *Core) CheckLocalTypeForField(ctx context.Context, fieldType string, fieldValue interface{}) (LocalType, error) {
var (
typeName string
typePattern string
@ -275,7 +311,8 @@ func (c *Core) ConvertValueForLocal(ctx context.Context, fieldType string, field
}
switch typeName {
case LocalTypeBytes:
if strings.Contains(typeName, "binary") || strings.Contains(typeName, "blob") {
var typeNameStr = string(typeName)
if strings.Contains(typeNameStr, "binary") || strings.Contains(typeNameStr, "blob") {
return fieldValue, nil
}
return gconv.Bytes(fieldValue), nil

View File

@ -98,14 +98,14 @@ func (d *DriverWrapperDB) TableFields(
// Data(g.Slice{g.Map{"uid": 10000, "name":"john"}, g.Map{"uid": 20000, "name":"smith"})
//
// The parameter `option` values are as follows:
// 0: insert: just insert, if there's unique/primary key in the data, it returns error;
// 1: replace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// 2: save: if there's unique/primary key in the data, it updates it or else inserts a new one;
// 3: ignore: if there's unique/primary key in the data, it ignores the inserting;
// InsertOptionDefault: just insert, if there's unique/primary key in the data, it returns error;
// InsertOptionReplace: if there's unique/primary key in the data, it deletes it from table and inserts a new one;
// InsertOptionSave: if there's unique/primary key in the data, it updates it or else inserts a new one;
// InsertOptionIgnore: if there's unique/primary key in the data, it ignores the inserting;
func (d *DriverWrapperDB) DoInsert(ctx context.Context, link Link, table string, list List, option DoInsertOption) (result sql.Result, err error) {
// Convert data type before commit it to underlying db driver.
for i, item := range list {
list[i], err = d.DB.ConvertDataForRecord(ctx, item)
list[i], err = d.GetCore().ConvertDataForRecord(ctx, item, table)
if err != nil {
return nil, err
}

View File

@ -200,11 +200,11 @@ func GetInsertOperationByOption(option InsertOption) string {
var operator string
switch option {
case InsertOptionReplace:
operator = "REPLACE"
operator = InsertOperationReplace
case InsertOptionIgnore:
operator = "INSERT IGNORE"
operator = InsertOperationIgnore
default:
operator = "INSERT"
operator = InsertOperationInsert
}
return operator
}
@ -222,14 +222,6 @@ func DataToMapDeep(value interface{}) map[string]interface{} {
switch v.(type) {
case time.Time, *time.Time, gtime.Time, *gtime.Time, gjson.Json, *gjson.Json:
m[k] = v
default:
// Use string conversion in default.
if s, ok := v.(iString); ok {
m[k] = s.String()
} else {
m[k] = v
}
}
}
return m

View File

@ -5,6 +5,8 @@
// You can obtain one at https://github.com/gogf/gf.
// Package instance provides instances management.
//
// Note that this package is not used for cache, as it has no cache expiration.
package instance
import (

View File

@ -19,6 +19,10 @@ import (
// Func is the cache function that calculates and returns the value.
type Func func(ctx context.Context) (value interface{}, err error)
const (
DurationNoExpire = 0 // Expire duration that never expires.
)
// Default cache object.
var defaultCache = New()

View File

@ -68,9 +68,9 @@ const (
var (
// It's more high performance using regular expression
// than time.ParseInLocation to parse the datetime string.
timeRegex1, _ = regexp.Compile(timeRegexPattern1)
timeRegex2, _ = regexp.Compile(timeRegexPattern2)
timeRegex3, _ = regexp.Compile(timeRegexPattern3)
timeRegex1 = regexp.MustCompile(timeRegexPattern1)
timeRegex2 = regexp.MustCompile(timeRegexPattern2)
timeRegex3 = regexp.MustCompile(timeRegexPattern3)
// Month words to arabic numerals mapping.
monthMap = map[string]int{

View File

@ -8,6 +8,7 @@ package gtime
import (
"bytes"
"regexp"
"strconv"
"strings"
@ -266,7 +267,7 @@ func formatToStdLayout(format string) string {
// formatToRegexPattern converts the custom format to its corresponding regular expression.
func formatToRegexPattern(format string) string {
s := gregex.Quote(formatToStdLayout(format))
s := regexp.QuoteMeta(formatToStdLayout(format))
s, _ = gregex.ReplaceString(`[0-9]`, `[0-9]`, s)
s, _ = gregex.ReplaceString(`[A-Za-z]`, `[A-Za-z]`, s)
s, _ = gregex.ReplaceString(`\s+`, `\s+`, s)

View File

@ -15,6 +15,7 @@ import (
var (
regexMu = sync.RWMutex{}
// Cache for regex object.
// Note that:
// 1. It uses sync.RWMutex ensuring the concurrent safety.

View File

@ -11,6 +11,7 @@ import "strings"
// Str returns part of `haystack` string starting from and including
// the first occurrence of `needle` to the end of `haystack`.
// See http://php.net/manual/en/function.strstr.php.
// Eg: Str("12345", "3") => "345"
func Str(haystack string, needle string) string {
if needle == "" {
return ""
@ -24,6 +25,7 @@ func Str(haystack string, needle string) string {
// StrEx returns part of `haystack` string starting from and excluding
// the first occurrence of `needle` to the end of `haystack`.
// Eg: StrEx("12345", "3") => "45"
func StrEx(haystack string, needle string) string {
if s := Str(haystack, needle); s != "" {
return s[1:]
@ -33,6 +35,7 @@ func StrEx(haystack string, needle string) string {
// StrTill returns part of `haystack` string ending to and including
// the first occurrence of `needle` from the start of `haystack`.
// Eg: StrTill("12345", "3") => "123"
func StrTill(haystack string, needle string) string {
pos := strings.Index(haystack, needle)
if pos == NotFoundIndex || pos == 0 {
@ -43,6 +46,7 @@ func StrTill(haystack string, needle string) string {
// StrTillEx returns part of `haystack` string ending to and excluding
// the first occurrence of `needle` from the start of `haystack`.
// Eg: StrTillEx("12345", "3") => "12"
func StrTillEx(haystack string, needle string) string {
pos := strings.Index(haystack, needle)
if pos == NotFoundIndex || pos == 0 {
@ -53,6 +57,7 @@ func StrTillEx(haystack string, needle string) string {
// SubStr returns a portion of string `str` specified by the `start` and `length` parameters.
// The parameter `length` is optional, it uses the length of `str` in default.
// Eg: SubStr("12345", 1, 2) => "23"
func SubStr(str string, start int, length ...int) (substr string) {
strLength := len(str)
if start < 0 {