Added database prefix of cache key. (#835)

This commit is contained in:
fangjianwei
2021-07-22 11:29:09 +08:00
committed by GitHub
parent 75952308f9
commit 476026e393
10 changed files with 60 additions and 49 deletions

View File

@@ -419,6 +419,10 @@ var (
Name: "idea", Name: "idea",
Usage: "for idea plugin [optional]", Usage: "for idea plugin [optional]",
}, },
cli.StringFlag{
Name: "database, db",
Usage: "the name of database [optional]",
},
}, },
Action: model.MysqlDDL, Action: model.MysqlDDL,
}, },

View File

@@ -264,6 +264,7 @@ OPTIONS:
--style value the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md] --style value the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]
--cache, -c generate code with cache [optional] --cache, -c generate code with cache [optional]
--idea for idea plugin [optional] --idea for idea plugin [optional]
--database, -db the name of database [optional]
``` ```
* datasource * datasource

View File

@@ -24,6 +24,7 @@ const (
flagURL = "url" flagURL = "url"
flagTable = "table" flagTable = "table"
flagStyle = "style" flagStyle = "style"
flagDatabase = "database"
) )
var errNotMatched = errors.New("sql not matched") var errNotMatched = errors.New("sql not matched")
@@ -35,12 +36,13 @@ func MysqlDDL(ctx *cli.Context) error {
cache := ctx.Bool(flagCache) cache := ctx.Bool(flagCache)
idea := ctx.Bool(flagIdea) idea := ctx.Bool(flagIdea)
style := ctx.String(flagStyle) style := ctx.String(flagStyle)
database := ctx.String(flagDatabase)
cfg, err := config.NewConfig(style) cfg, err := config.NewConfig(style)
if err != nil { if err != nil {
return err return err
} }
return fromDDl(src, dir, cfg, cache, idea) return fromDDl(src, dir, cfg, cache, idea, database)
} }
// MyDataSource generates model code from datasource // MyDataSource generates model code from datasource
@@ -59,7 +61,7 @@ func MyDataSource(ctx *cli.Context) error {
return fromDataSource(url, pattern, dir, cfg, cache, idea) return fromDataSource(url, pattern, dir, cfg, cache, idea)
} }
func fromDDl(src, dir string, cfg *config.Config, cache, idea bool) error { func fromDDl(src, dir string, cfg *config.Config, cache, idea bool, database string) error {
log := console.NewConsole(idea) log := console.NewConsole(idea)
src = strings.TrimSpace(src) src = strings.TrimSpace(src)
if len(src) == 0 { if len(src) == 0 {
@@ -81,7 +83,7 @@ func fromDDl(src, dir string, cfg *config.Config, cache, idea bool) error {
} }
for _, file := range files { for _, file := range files {
err = generator.StartFromDDL(file, cache) err = generator.StartFromDDL(file, cache, database)
if err != nil { if err != nil {
return err return err
} }

View File

@@ -24,12 +24,12 @@ func TestFromDDl(t *testing.T) {
err := gen.Clean() err := gen.Clean()
assert.Nil(t, err) assert.Nil(t, err)
err = fromDDl("./user.sql", t.TempDir(), cfg, true, false) err = fromDDl("./user.sql", t.TempDir(), cfg, true, false, "go_zero")
assert.Equal(t, errNotMatched, err) assert.Equal(t, errNotMatched, err)
// case dir is not exists // case dir is not exists
unknownDir := filepath.Join(t.TempDir(), "test", "user.sql") unknownDir := filepath.Join(t.TempDir(), "test", "user.sql")
err = fromDDl(unknownDir, t.TempDir(), cfg, true, false) err = fromDDl(unknownDir, t.TempDir(), cfg, true, false, "go_zero")
assert.True(t, func() bool { assert.True(t, func() bool {
switch err.(type) { switch err.(type) {
case *os.PathError: case *os.PathError:
@@ -40,7 +40,7 @@ func TestFromDDl(t *testing.T) {
}()) }())
// case empty src // case empty src
err = fromDDl("", t.TempDir(), cfg, true, false) err = fromDDl("", t.TempDir(), cfg, true, false, "go_zero")
if err != nil { if err != nil {
assert.Equal(t, "expected path or path globbing patterns, but nothing found", err.Error()) assert.Equal(t, "expected path or path globbing patterns, but nothing found", err.Error())
} }
@@ -70,7 +70,7 @@ func TestFromDDl(t *testing.T) {
_, err = os.Stat(user2Sql) _, err = os.Stat(user2Sql)
assert.Nil(t, err) assert.Nil(t, err)
err = fromDDl(filepath.Join(tempDir, "user*.sql"), tempDir, cfg, true, false) err = fromDDl(filepath.Join(tempDir, "user*.sql"), tempDir, cfg, true, false, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
_, err = os.Stat(filepath.Join(tempDir, "usermodel.go")) _, err = os.Stat(filepath.Join(tempDir, "usermodel.go"))

View File

@@ -90,8 +90,8 @@ func newDefaultOption() Option {
} }
} }
func (g *defaultGenerator) StartFromDDL(filename string, withCache bool) error { func (g *defaultGenerator) StartFromDDL(filename string, withCache bool, database string) error {
modelList, err := g.genFromDDL(filename, withCache) modelList, err := g.genFromDDL(filename, withCache, database)
if err != nil { if err != nil {
return err return err
} }
@@ -174,9 +174,9 @@ func (g *defaultGenerator) createFile(modelList map[string]string) error {
} }
// ret1: key-table name,value-code // ret1: key-table name,value-code
func (g *defaultGenerator) genFromDDL(filename string, withCache bool) (map[string]string, error) { func (g *defaultGenerator) genFromDDL(filename string, withCache bool, database string) (map[string]string, error) {
m := make(map[string]string) m := make(map[string]string)
tables, err := parser.Parse(filename) tables, err := parser.Parse(filename, database)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@@ -34,7 +34,7 @@ func TestCacheModel(t *testing.T) {
}) })
assert.Nil(t, err) assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true) err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
assert.True(t, func() bool { assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(cacheDir, "TestUserModel.go")) _, err := os.Stat(filepath.Join(cacheDir, "TestUserModel.go"))
@@ -45,7 +45,7 @@ func TestCacheModel(t *testing.T) {
}) })
assert.Nil(t, err) assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, false) err = g.StartFromDDL(sqlFile, false, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
assert.True(t, func() bool { assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(noCacheDir, "testusermodel.go")) _, err := os.Stat(filepath.Join(noCacheDir, "testusermodel.go"))
@@ -72,7 +72,7 @@ func TestNamingModel(t *testing.T) {
}) })
assert.Nil(t, err) assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true) err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
assert.True(t, func() bool { assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(camelDir, "TestUserModel.go")) _, err := os.Stat(filepath.Join(camelDir, "TestUserModel.go"))
@@ -83,7 +83,7 @@ func TestNamingModel(t *testing.T) {
}) })
assert.Nil(t, err) assert.Nil(t, err)
err = g.StartFromDDL(sqlFile, true) err = g.StartFromDDL(sqlFile, true, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
assert.True(t, func() bool { assert.True(t, func() bool {
_, err := os.Stat(filepath.Join(snakeDir, "test_user_model.go")) _, err := os.Stat(filepath.Join(snakeDir, "test_user_model.go"))

View File

@@ -39,9 +39,9 @@ type Join []string
func genCacheKeys(table parser.Table) (Key, []Key) { func genCacheKeys(table parser.Table) (Key, []Key) {
var primaryKey Key var primaryKey Key
var uniqueKey []Key var uniqueKey []Key
primaryKey = genCacheKey(table.Name, []*parser.Field{&table.PrimaryKey.Field}) primaryKey = genCacheKey(table.Db, table.Name, []*parser.Field{&table.PrimaryKey.Field})
for _, each := range table.UniqueIndex { for _, each := range table.UniqueIndex {
uniqueKey = append(uniqueKey, genCacheKey(table.Name, each)) uniqueKey = append(uniqueKey, genCacheKey(table.Db, table.Name, each))
} }
sort.Slice(uniqueKey, func(i, j int) bool { sort.Slice(uniqueKey, func(i, j int) bool {
return uniqueKey[i].VarLeft < uniqueKey[j].VarLeft return uniqueKey[i].VarLeft < uniqueKey[j].VarLeft
@@ -50,7 +50,7 @@ func genCacheKeys(table parser.Table) (Key, []Key) {
return primaryKey, uniqueKey return primaryKey, uniqueKey
} }
func genCacheKey(table stringx.String, in []*parser.Field) Key { func genCacheKey(db stringx.String, table stringx.String, in []*parser.Field) Key {
var ( var (
varLeftJoin, varRightJon, fieldNameJoin Join varLeftJoin, varRightJon, fieldNameJoin Join
varLeft, varRight, varExpression string varLeft, varRight, varExpression string
@@ -59,9 +59,9 @@ func genCacheKey(table stringx.String, in []*parser.Field) Key {
keyLeft, keyRight, dataKeyRight, keyExpression, dataKeyExpression string keyLeft, keyRight, dataKeyRight, keyExpression, dataKeyExpression string
) )
varLeftJoin = append(varLeftJoin, "cache", table.Source()) varLeftJoin = append(varLeftJoin, "cache", db.Source(), table.Source())
varRightJon = append(varRightJon, "cache", table.Source()) varRightJon = append(varRightJon, "cache", db.Source(), table.Source())
keyLeftJoin = append(keyLeftJoin, table.Source()) keyLeftJoin = append(keyLeftJoin, db.Source(), table.Source())
for _, each := range in { for _, each := range in {
varLeftJoin = append(varLeftJoin, each.Name.Source()) varLeftJoin = append(varLeftJoin, each.Name.Source())

View File

@@ -36,6 +36,7 @@ func TestGenCacheKeys(t *testing.T) {
} }
primariCacheKey, uniqueCacheKey := genCacheKeys(parser.Table{ primariCacheKey, uniqueCacheKey := genCacheKeys(parser.Table{
Name: stringx.From("user"), Name: stringx.From("user"),
Db: stringx.From("go_zero"),
PrimaryKey: parser.Primary{ PrimaryKey: parser.Primary{
Field: *primaryField, Field: *primaryField,
AutoIncrement: true, AutoIncrement: true,
@@ -70,14 +71,14 @@ func TestGenCacheKeys(t *testing.T) {
t.Run("primaryCacheKey", func(t *testing.T) { t.Run("primaryCacheKey", func(t *testing.T) {
assert.Equal(t, true, func() bool { assert.Equal(t, true, func() bool {
return cacheKeyEqual(primariCacheKey, Key{ return cacheKeyEqual(primariCacheKey, Key{
VarLeft: "cacheUserIdPrefix", VarLeft: "cacheGoZeroUserIdPrefix",
VarRight: `"cache:user:id:"`, VarRight: `"cache:goZero:user:id:"`,
VarExpression: `cacheUserIdPrefix = "cache:user:id:"`, VarExpression: `cacheGoZeroUserIdPrefix = "cache:goZero:user:id:"`,
KeyLeft: "userIdKey", KeyLeft: "goZeroUserIdKey",
KeyRight: `fmt.Sprintf("%s%v", cacheUserIdPrefix, id)`, KeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, id)`,
DataKeyRight: `fmt.Sprintf("%s%v", cacheUserIdPrefix, data.Id)`, DataKeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, data.Id)`,
KeyExpression: `userIdKey := fmt.Sprintf("%s%v", cacheUserIdPrefix, id)`, KeyExpression: `goZeroUserIdKey := fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, id)`,
DataKeyExpression: `userIdKey := fmt.Sprintf("%s%v", cacheUserIdPrefix, data.Id)`, DataKeyExpression: `goZeroUserIdKey := fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, data.Id)`,
FieldNameJoin: []string{"id"}, FieldNameJoin: []string{"id"},
}) })
}()) }())
@@ -87,25 +88,25 @@ func TestGenCacheKeys(t *testing.T) {
assert.Equal(t, true, func() bool { assert.Equal(t, true, func() bool {
expected := []Key{ expected := []Key{
{ {
VarLeft: "cacheUserClassNamePrefix", VarLeft: "cacheGoZeroUserClassNamePrefix",
VarRight: `"cache:user:class:name:"`, VarRight: `"cache:goZero:user:class:name:"`,
VarExpression: `cacheUserClassNamePrefix = "cache:user:class:name:"`, VarExpression: `cacheGoZeroUserClassNamePrefix = "cache:goZero:user:class:name:"`,
KeyLeft: "userClassNameKey", KeyLeft: "goZeroUserClassNameKey",
KeyRight: `fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, class, name)`, KeyRight: `fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, class, name)`,
DataKeyRight: `fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, data.Class, data.Name)`, DataKeyRight: `fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, data.Class, data.Name)`,
KeyExpression: `userClassNameKey := fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, class, name)`, KeyExpression: `goZeroUserClassNameKey := fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, class, name)`,
DataKeyExpression: `userClassNameKey := fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, data.Class, data.Name)`, DataKeyExpression: `goZeroUserClassNameKey := fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, data.Class, data.Name)`,
FieldNameJoin: []string{"class", "name"}, FieldNameJoin: []string{"class", "name"},
}, },
{ {
VarLeft: "cacheUserMobilePrefix", VarLeft: "cacheGoZeroUserMobilePrefix",
VarRight: `"cache:user:mobile:"`, VarRight: `"cache:goZero:user:mobile:"`,
VarExpression: `cacheUserMobilePrefix = "cache:user:mobile:"`, VarExpression: `cacheGoZeroUserMobilePrefix = "cache:goZero:user:mobile:"`,
KeyLeft: "userMobileKey", KeyLeft: "goZeroUserMobileKey",
KeyRight: `fmt.Sprintf("%s%v", cacheUserMobilePrefix, mobile)`, KeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, mobile)`,
DataKeyRight: `fmt.Sprintf("%s%v", cacheUserMobilePrefix, data.Mobile)`, DataKeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, data.Mobile)`,
KeyExpression: `userMobileKey := fmt.Sprintf("%s%v", cacheUserMobilePrefix, mobile)`, KeyExpression: `goZeroUserMobileKey := fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, mobile)`,
DataKeyExpression: `userMobileKey := fmt.Sprintf("%s%v", cacheUserMobilePrefix, data.Mobile)`, DataKeyExpression: `goZeroUserMobileKey := fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, data.Mobile)`,
FieldNameJoin: []string{"mobile"}, FieldNameJoin: []string{"mobile"},
}, },
} }

View File

@@ -21,6 +21,7 @@ type (
// Table describes a mysql table // Table describes a mysql table
Table struct { Table struct {
Name stringx.String Name stringx.String
Db stringx.String
PrimaryKey Primary PrimaryKey Primary
UniqueIndex map[string][]*Field UniqueIndex map[string][]*Field
Fields []*Field Fields []*Field
@@ -46,7 +47,7 @@ type (
) )
// Parse parses ddl into golang structure // Parse parses ddl into golang structure
func Parse(filename string) ([]*Table, error) { func Parse(filename string, database string) ([]*Table, error) {
p := parser.NewParser() p := parser.NewParser()
tables, err := p.From(filename) tables, err := p.From(filename)
if err != nil { if err != nil {
@@ -145,6 +146,7 @@ func Parse(filename string) ([]*Table, error) {
list = append(list, &Table{ list = append(list, &Table{
Name: stringx.From(e.Name), Name: stringx.From(e.Name),
Db: stringx.From(database),
PrimaryKey: primaryKey, PrimaryKey: primaryKey,
UniqueIndex: uniqueIndex, UniqueIndex: uniqueIndex,
Fields: fields, Fields: fields,
@@ -243,6 +245,7 @@ func ConvertDataType(table *model.Table) (*Table, error) {
var reply Table var reply Table
reply.UniqueIndex = map[string][]*Field{} reply.UniqueIndex = map[string][]*Field{}
reply.Name = stringx.From(table.Table) reply.Name = stringx.From(table.Table)
reply.Db = stringx.From(table.Db)
seqInIndex := 0 seqInIndex := 0
if table.PrimaryKey.Index != nil { if table.PrimaryKey.Index != nil {
seqInIndex = table.PrimaryKey.Index.SeqInIndex seqInIndex = table.PrimaryKey.Index.SeqInIndex

View File

@@ -15,7 +15,7 @@ func TestParsePlainText(t *testing.T) {
err := ioutil.WriteFile(sqlFile, []byte("plain text"), 0o777) err := ioutil.WriteFile(sqlFile, []byte("plain text"), 0o777)
assert.Nil(t, err) assert.Nil(t, err)
_, err = Parse(sqlFile) _, err = Parse(sqlFile, "go_zero")
assert.NotNil(t, err) assert.NotNil(t, err)
} }
@@ -24,7 +24,7 @@ func TestParseSelect(t *testing.T) {
err := ioutil.WriteFile(sqlFile, []byte("select * from user"), 0o777) err := ioutil.WriteFile(sqlFile, []byte("select * from user"), 0o777)
assert.Nil(t, err) assert.Nil(t, err)
tables, err := Parse(sqlFile) tables, err := Parse(sqlFile, "go_zero")
assert.Nil(t, err) assert.Nil(t, err)
assert.Equal(t, 0, len(tables)) assert.Equal(t, 0, len(tables))
} }
@@ -34,7 +34,7 @@ func TestParseCreateTable(t *testing.T) {
err := ioutil.WriteFile(sqlFile, []byte("CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,\n `mobile` varchar(255) COLLATE utf8mb4_bin NOT NULL comment '手\\t机 号',\n `class` bigint NOT NULL comment '班级',\n `name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL comment '姓\n 名',\n `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP comment '创建\\r时间',\n `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`id`),\n UNIQUE KEY `mobile_unique` (`mobile`),\n UNIQUE KEY `class_name_unique` (`class`,`name`),\n KEY `create_index` (`create_time`),\n KEY `name_index` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;"), 0o777) err := ioutil.WriteFile(sqlFile, []byte("CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,\n `mobile` varchar(255) COLLATE utf8mb4_bin NOT NULL comment '手\\t机 号',\n `class` bigint NOT NULL comment '班级',\n `name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL comment '姓\n 名',\n `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP comment '创建\\r时间',\n `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`id`),\n UNIQUE KEY `mobile_unique` (`mobile`),\n UNIQUE KEY `class_name_unique` (`class`,`name`),\n KEY `create_index` (`create_time`),\n KEY `name_index` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;"), 0o777)
assert.Nil(t, err) assert.Nil(t, err)
tables, err := Parse(sqlFile) tables, err := Parse(sqlFile, "go_zero")
assert.Equal(t, 1, len(tables)) assert.Equal(t, 1, len(tables))
table := tables[0] table := tables[0]
assert.Nil(t, err) assert.Nil(t, err)