Compare commits
27 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7f3730e1a | ||
|
|
0ee7654407 | ||
|
|
16cc990fdd | ||
|
|
00061c2e5b | ||
|
|
6793f7a1de | ||
|
|
c8428a7f65 | ||
|
|
a5e1d0d0dc | ||
|
|
8270c7deed | ||
|
|
9f4a882a1b | ||
|
|
cb7b7cb72e | ||
|
|
603c93aa4a | ||
|
|
cb8d9d413a | ||
|
|
ff7443c6a7 | ||
|
|
b812e74d6f | ||
|
|
089cdaa75f | ||
|
|
476026e393 | ||
|
|
75952308f9 | ||
|
|
df0550d6dc | ||
|
|
e481b63b21 | ||
|
|
e47079f0f4 | ||
|
|
9b2a279948 | ||
|
|
db87fd3239 | ||
|
|
598fda0c97 | ||
|
|
b0e335e7b0 | ||
|
|
efdf475da4 | ||
|
|
22a1315136 | ||
|
|
5b22823018 |
@@ -106,9 +106,7 @@ func (s *Set) KeysInt() []int {
|
||||
var keys []int
|
||||
|
||||
for key := range s.data {
|
||||
if intKey, ok := key.(int); !ok {
|
||||
continue
|
||||
} else {
|
||||
if intKey, ok := key.(int); ok {
|
||||
keys = append(keys, intKey)
|
||||
}
|
||||
}
|
||||
@@ -121,9 +119,7 @@ func (s *Set) KeysInt64() []int64 {
|
||||
var keys []int64
|
||||
|
||||
for key := range s.data {
|
||||
if intKey, ok := key.(int64); !ok {
|
||||
continue
|
||||
} else {
|
||||
if intKey, ok := key.(int64); ok {
|
||||
keys = append(keys, intKey)
|
||||
}
|
||||
}
|
||||
@@ -136,9 +132,7 @@ func (s *Set) KeysUint() []uint {
|
||||
var keys []uint
|
||||
|
||||
for key := range s.data {
|
||||
if intKey, ok := key.(uint); !ok {
|
||||
continue
|
||||
} else {
|
||||
if intKey, ok := key.(uint); ok {
|
||||
keys = append(keys, intKey)
|
||||
}
|
||||
}
|
||||
@@ -151,9 +145,7 @@ func (s *Set) KeysUint64() []uint64 {
|
||||
var keys []uint64
|
||||
|
||||
for key := range s.data {
|
||||
if intKey, ok := key.(uint64); !ok {
|
||||
continue
|
||||
} else {
|
||||
if intKey, ok := key.(uint64); ok {
|
||||
keys = append(keys, intKey)
|
||||
}
|
||||
}
|
||||
@@ -166,9 +158,7 @@ func (s *Set) KeysStr() []string {
|
||||
var keys []string
|
||||
|
||||
for key := range s.data {
|
||||
if strKey, ok := key.(string); !ok {
|
||||
continue
|
||||
} else {
|
||||
if strKey, ok := key.(string); ok {
|
||||
keys = append(keys, strKey)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,5 +14,5 @@ func AddWrapUpListener(fn func()) func() {
|
||||
return fn
|
||||
}
|
||||
|
||||
func SetTimeoutToForceQuit(duration time.Duration) {
|
||||
func SetTimeToForceQuit(duration time.Duration) {
|
||||
}
|
||||
|
||||
@@ -30,7 +30,8 @@ func (t txSession) Prepare(q string) (StmtSession, error) {
|
||||
}
|
||||
|
||||
return statement{
|
||||
stmt: stmt,
|
||||
query: q,
|
||||
stmt: stmt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
10
go.mod
10
go.mod
@@ -35,16 +35,16 @@ require (
|
||||
github.com/spaolacci/murmur3 v1.1.0
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/urfave/cli v1.22.5
|
||||
github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2
|
||||
github.com/zeromicro/antlr v0.0.1
|
||||
github.com/zeromicro/ddl-parser v0.0.0-20210712021150-63520aca7348 // indirect
|
||||
go.etcd.io/etcd/api/v3 v3.5.0
|
||||
go.etcd.io/etcd/client/v3 v3.5.0
|
||||
go.uber.org/automaxprocs v1.3.0
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 // indirect
|
||||
golang.org/x/net v0.0.0-20210716203947-853a461950ff
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect
|
||||
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba
|
||||
google.golang.org/genproto v0.0.0-20210617175327-b9e0b3197ced // indirect
|
||||
google.golang.org/grpc v1.38.0
|
||||
google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f // indirect
|
||||
google.golang.org/grpc v1.39.0
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.28
|
||||
gopkg.in/h2non/gock.v1 v1.0.15
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
|
||||
24
go.sum
24
go.sum
@@ -15,6 +15,8 @@ github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGn
|
||||
github.com/alicebob/miniredis/v2 v2.14.1 h1:GjlbSeoJ24bzdLRs13HoMEeaRZx9kg5nHoRW7QV/nCs=
|
||||
github.com/alicebob/miniredis/v2 v2.14.1/go.mod h1:uS970Sw5Gs9/iK3yBg0l9Uj9s25wXxSpQUE9EaJ/Blg=
|
||||
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
|
||||
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210521184019-c5ad59b459ec h1:EEyRvzmpEUZ+I8WmD5cw/vY8EqhambkOqy5iFr0908A=
|
||||
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210521184019-c5ad59b459ec/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
@@ -32,6 +34,7 @@ github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58 h1:F1EaeKL/ta07PY
|
||||
github.com/cloudflare/golz4 v0.0.0-20150217214814-ef862a3cdc58/go.mod h1:EOBUe0h4xcZ5GoxqC5SDxFQ8gwyZPKQoEzownBlhI80=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd/v22 v22.3.2 h1:D9/bQk5vlXQFZ6Kwuu6zaiXJ9oTPe68++AzAJc1DzSI=
|
||||
@@ -51,7 +54,9 @@ github.com/emicklei/proto v1.9.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
@@ -225,8 +230,6 @@ github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5Cc
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU=
|
||||
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2 h1:zzrxE1FKn5ryBNl9eKOeqQ58Y/Qpo3Q9QNxKHX5uzzQ=
|
||||
github.com/xwb1989/sqlparser v0.0.0-20180606152119-120387863bf2/go.mod h1:hzfGeIUDq/j97IG+FhNqkowIyEcD88LrW6fyU3K3WqY=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
@@ -234,12 +237,17 @@ github.com/yuin/gopher-lua v0.0.0-20191220021717-ab39c6098bdb h1:ZkM6LRnq40pR1Ox
|
||||
github.com/yuin/gopher-lua v0.0.0-20191220021717-ab39c6098bdb/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ=
|
||||
github.com/zeromicro/antlr v0.0.1 h1:CQpIn/dc0pUjgGQ81y98s/NGOm2Hfru2NNio2I9mQgk=
|
||||
github.com/zeromicro/antlr v0.0.1/go.mod h1:nfpjEwFR6Q4xGDJMcZnCL9tEfQRgszMwu3rDz2Z+p5M=
|
||||
github.com/zeromicro/ddl-parser v0.0.0-20210710132903-bc9dbb9789b1 h1:zItUIfobEHTYD9X0fAt9QWEWIFWDa8CypF+Z62zIR+M=
|
||||
github.com/zeromicro/ddl-parser v0.0.0-20210710132903-bc9dbb9789b1/go.mod h1:ISU/8NuPyEpl9pa17Py9TBPetMjtsiHrb9f5XGiYbo8=
|
||||
github.com/zeromicro/ddl-parser v0.0.0-20210712021150-63520aca7348 h1:OhxL9tn28gDeJVzreIUiE5oVxZCjL3tBJ0XBNw8p5R8=
|
||||
github.com/zeromicro/ddl-parser v0.0.0-20210712021150-63520aca7348/go.mod h1:ISU/8NuPyEpl9pa17Py9TBPetMjtsiHrb9f5XGiYbo8=
|
||||
go.etcd.io/etcd/api/v3 v3.5.0 h1:GsV3S+OfZEOCNXdtNkBSR7kgLobAa/SO6tCxRa0GAYw=
|
||||
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.0 h1:2aQv6F436YnN7I4VbI8PPYrBhu+SmrTaADcf8Mi/6PU=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||
go.etcd.io/etcd/client/v3 v3.5.0 h1:62Eh0XOro+rDwkrypAGDfgmNh5Joq+z+W9HZdlXMzek=
|
||||
go.etcd.io/etcd/client/v3 v3.5.0/go.mod h1:AIKXXVX/DQXtfTEqBryiLTUXwON+GuvO6Z7lLS/oTh0=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/automaxprocs v1.3.0 h1:II28aZoGdaglS5vVNnspf28lnZpXScxtIozx1lAjdb0=
|
||||
@@ -281,6 +289,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210716203947-853a461950ff h1:j2EK/QoxYNBsXI4R7fQkkRUk8y6wnOBI+6hgPdP/6Ds=
|
||||
golang.org/x/net v0.0.0-20210716203947-853a461950ff/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -318,6 +328,8 @@ golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22 h1:RqytpXGR1iVNX7psjB3ff8y7sNFinVFvkx1c8SjBkio=
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -342,6 +354,7 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -356,13 +369,18 @@ google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEY
|
||||
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
|
||||
google.golang.org/genproto v0.0.0-20210617175327-b9e0b3197ced h1:c5geK1iMU3cDKtFrCVQIcjR3W+JOZMuhIyICMCTbtus=
|
||||
google.golang.org/genproto v0.0.0-20210617175327-b9e0b3197ced/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=
|
||||
google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f h1:YORWxaStkWBnWgELOHTmDrqNlFXuVGEbhwbB5iK94bQ=
|
||||
google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/grpc v1.38.0 h1:/9BgsAsa5nWe26HqOlvlgJnqBuktYOLCgjCPqsa56W0=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc v1.39.0 h1:Klz8I9kdtkIN6EpHHUOMLCYhTn/2WAe5a0s1hcBkdTI=
|
||||
google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
@@ -375,6 +393,8 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
||||
12
readme-cn.md
12
readme-cn.md
@@ -161,17 +161,8 @@ GO111MODULE=on GOPROXY=https://goproxy.cn/,direct go get -u github.com/tal-tech/
|
||||
|
||||
[https://go-zero.dev/cn/](https://go-zero.dev/cn/)
|
||||
|
||||
* 常见问题
|
||||
|
||||
* 因为 `etcd` 和 `grpc` 兼容性问题,请使用 `grpc@v1.29.1`
|
||||
|
||||
`google.golang.org/grpc v1.29.1`
|
||||
|
||||
* 因为 `protobuf` 兼容性问题,请使用 `protocol-gen@v1.3.2`
|
||||
|
||||
`go get -u github.com/golang/protobuf/protoc-gen-go@v1.3.2`
|
||||
|
||||
* awesome 系列(更多文章见『微服务实践』公众号)
|
||||
|
||||
* [快速构建高并发微服务](https://github.com/tal-tech/zero-doc/blob/main/doc/shorturl.md)
|
||||
* [快速构建高并发微服务 - 多 RPC 版](https://github.com/tal-tech/zero-doc/blob/main/docs/zero/bookstore.md)
|
||||
* [goctl 使用帮助](https://github.com/tal-tech/zero-doc/blob/main/doc/goctl.md)
|
||||
@@ -224,6 +215,7 @@ go-zero 已被许多公司用于生产部署,接入场景如在线教育、电
|
||||
>33. 上海鲸思智能科技有限公司
|
||||
>34. 南宁宸升计算机科技有限公司
|
||||
>35. 秦皇岛2084team
|
||||
>36. 天翼云股份有限公司
|
||||
|
||||
如果贵公司也已使用 go-zero,欢迎在 [登记地址](https://github.com/tal-tech/go-zero/issues/602) 登记,仅仅为了推广,不做其它用途。
|
||||
|
||||
|
||||
14
readme.md
14
readme.md
@@ -211,19 +211,9 @@ go get -u github.com/tal-tech/go-zero
|
||||
* [Rapid development of microservice systems - multiple RPCs](https://github.com/tal-tech/zero-doc/blob/main/docs/zero/bookstore-en.md)
|
||||
* [Examples](https://github.com/zeromicro/zero-examples)
|
||||
|
||||
## 9. Important notes
|
||||
## 9. Chat group
|
||||
|
||||
* Use grpc 1.29.1, because etcd lib doesn’t support latter versions.
|
||||
|
||||
`google.golang.org/grpc v1.29.1`
|
||||
|
||||
* For protobuf compatibility, use `protocol-gen@v1.3.2`.
|
||||
|
||||
` go get -u github.com/golang/protobuf/protoc-gen-go@v1.3.2`
|
||||
|
||||
## 10. Chat group
|
||||
|
||||
Join the chat via https://join.slack.com/t/go-zero/shared_invite/zt-qxlclrv9-MWrCNkB2DpSgtEK2tVXJcw
|
||||
Join the chat via https://join.slack.com/t/go-zero/shared_invite/zt-thyennhc-_fNXFpeUJcGE_tQNZFpsdA
|
||||
|
||||
## Give a Star! ⭐
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
const (
|
||||
formKey = "form"
|
||||
pathKey = "path"
|
||||
headerKey = "header"
|
||||
emptyJson = "{}"
|
||||
maxMemory = 32 << 20 // 32MB
|
||||
maxBodyLen = 8 << 20 // 8MB
|
||||
@@ -20,8 +21,9 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
formUnmarshaler = mapping.NewUnmarshaler(formKey, mapping.WithStringValues())
|
||||
pathUnmarshaler = mapping.NewUnmarshaler(pathKey, mapping.WithStringValues())
|
||||
formUnmarshaler = mapping.NewUnmarshaler(formKey, mapping.WithStringValues())
|
||||
pathUnmarshaler = mapping.NewUnmarshaler(pathKey, mapping.WithStringValues())
|
||||
headerUnmarshaler = mapping.NewUnmarshaler(headerKey, mapping.WithStringValues())
|
||||
)
|
||||
|
||||
// Parse parses the request.
|
||||
@@ -34,9 +36,28 @@ func Parse(r *http.Request, v interface{}) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := ParseHeaders(r, v); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return ParseJsonBody(r, v)
|
||||
}
|
||||
|
||||
// ParseHeaders parses the headers request.
|
||||
func ParseHeaders(r *http.Request, v interface{}) error {
|
||||
m := map[string]interface{}{}
|
||||
for k, v := range r.Header {
|
||||
k = strings.ToLower(k)
|
||||
if len(v) == 1 {
|
||||
m[k] = v[0]
|
||||
} else {
|
||||
m[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return headerUnmarshaler.Unmarshal(m, v)
|
||||
}
|
||||
|
||||
// ParseForm parses the form request.
|
||||
func ParseForm(r *http.Request, v interface{}) error {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
|
||||
@@ -201,3 +201,26 @@ func BenchmarkParseAuto(b *testing.B) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseHeaders(t *testing.T) {
|
||||
v := struct {
|
||||
Name string `header:"name"`
|
||||
Percent string `header:"percent"`
|
||||
Addrs []string `header:"addrs"`
|
||||
}{}
|
||||
request, err := http.NewRequest("POST", "http://hello.com/", nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
request.Header.Set("name", "chenquan")
|
||||
request.Header.Set("percent", "1")
|
||||
request.Header.Add("addrs", "addr1")
|
||||
request.Header.Add("addrs", "addr2")
|
||||
err = ParseHeaders(request, &v)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
assert.Equal(t, "chenquan", v.Name)
|
||||
assert.Equal(t, "1", v.Percent)
|
||||
assert.Equal(t, []string{"addr1", "addr2"}, v.Addrs)
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ func StartHttps(host string, port int, certFile, keyFile string, handler http.Ha
|
||||
})
|
||||
}
|
||||
|
||||
func start(host string, port int, handler http.Handler, run func(srv *http.Server) error) error {
|
||||
func start(host string, port int, handler http.Handler, run func(srv *http.Server) error) (err error) {
|
||||
server := &http.Server{
|
||||
Addr: fmt.Sprintf("%s:%d", host, port),
|
||||
Handler: handler,
|
||||
@@ -31,7 +31,11 @@ func start(host string, port int, handler http.Handler, run func(srv *http.Serve
|
||||
waitForCalled := proc.AddWrapUpListener(func() {
|
||||
server.Shutdown(context.Background())
|
||||
})
|
||||
defer waitForCalled()
|
||||
defer func() {
|
||||
if err == http.ErrServerClosed {
|
||||
waitForCalled()
|
||||
}
|
||||
}()
|
||||
|
||||
return run(server)
|
||||
}
|
||||
|
||||
@@ -30,7 +30,11 @@ func GoCommand(c *cli.Context) error {
|
||||
apiFile := c.String("api")
|
||||
dir := c.String("dir")
|
||||
namingStyle := c.String("style")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
if len(apiFile) == 0 {
|
||||
return errors.New("missing -api")
|
||||
}
|
||||
|
||||
@@ -42,6 +42,12 @@ func DockerCommand(c *cli.Context) (err error) {
|
||||
}()
|
||||
|
||||
goFile := c.String("go")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
if len(goFile) == 0 {
|
||||
return errors.New("-go can't be empty")
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"os"
|
||||
"runtime"
|
||||
|
||||
"github.com/logrusorgru/aurora"
|
||||
"github.com/tal-tech/go-zero/core/load"
|
||||
"github.com/tal-tech/go-zero/core/logx"
|
||||
"github.com/tal-tech/go-zero/core/stat"
|
||||
@@ -31,7 +32,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
buildVersion = "1.1.8"
|
||||
buildVersion = "1.1.10"
|
||||
commands = []cli.Command{
|
||||
{
|
||||
Name: "upgrade",
|
||||
@@ -116,6 +117,10 @@ var (
|
||||
Name: "style",
|
||||
Usage: "the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: gogen.GoCommand,
|
||||
},
|
||||
@@ -233,6 +238,10 @@ var (
|
||||
Usage: "the port to expose, default none",
|
||||
Value: 0,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: docker.DockerCommand,
|
||||
},
|
||||
@@ -318,6 +327,10 @@ var (
|
||||
Usage: "the max replicas of deploy",
|
||||
Value: 10,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: kube.DeploymentCommand,
|
||||
},
|
||||
@@ -339,6 +352,10 @@ var (
|
||||
Name: "idea",
|
||||
Usage: "whether the command execution environment is from idea plugin. [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: rpc.RPCNew,
|
||||
},
|
||||
@@ -350,6 +367,10 @@ var (
|
||||
Name: "out, o",
|
||||
Usage: "the target path of proto",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: rpc.RPCTemplate,
|
||||
},
|
||||
@@ -365,6 +386,10 @@ var (
|
||||
Name: "proto_path, I",
|
||||
Usage: `native command of protoc, specify the directory in which to search for imports. [optional]`,
|
||||
},
|
||||
cli.StringSliceFlag{
|
||||
Name: "go_opt",
|
||||
Usage: `native command of protoc-gen-go, specify the mapping from proto to go, eg --go_opt=proto_import=go_package_import. [optional]`,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "dir, d",
|
||||
Usage: `the target path of the code`,
|
||||
@@ -377,6 +402,10 @@ var (
|
||||
Name: "idea",
|
||||
Usage: "whether the command execution environment is from idea plugin. [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: rpc.RPC,
|
||||
},
|
||||
@@ -414,6 +443,14 @@ var (
|
||||
Name: "idea",
|
||||
Usage: "for idea plugin [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "database, db",
|
||||
Usage: "the name of database [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: model.MysqlDDL,
|
||||
},
|
||||
@@ -423,7 +460,7 @@ var (
|
||||
Flags: []cli.Flag{
|
||||
cli.StringFlag{
|
||||
Name: "url",
|
||||
Usage: `the data source of database,like "root:password@tcp(127.0.0.1:3306)/database`,
|
||||
Usage: `the data source of database,like "root:password@tcp(127.0.0.1:3306)/database"`,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "table, t",
|
||||
@@ -445,8 +482,57 @@ var (
|
||||
Name: "idea",
|
||||
Usage: "for idea plugin [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: model.MyDataSource,
|
||||
Action: model.MySqlDataSource,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "pg",
|
||||
Usage: `generate postgresql model`,
|
||||
Subcommands: []cli.Command{
|
||||
{
|
||||
Name: "datasource",
|
||||
Usage: `generate model from datasource`,
|
||||
Flags: []cli.Flag{
|
||||
cli.StringFlag{
|
||||
Name: "url",
|
||||
Usage: `the data source of database,like "postgres://root:password@127.0.0.1:54332/database?sslmode=disable"`,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "table, t",
|
||||
Usage: `the table or table globbing patterns in the database`,
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "schema, s",
|
||||
Usage: `the table schema, default is [public]`,
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "cache, c",
|
||||
Usage: "generate code with cache [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "dir, d",
|
||||
Usage: "the target dir",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "style",
|
||||
Usage: "the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "idea",
|
||||
Usage: "for idea plugin [optional]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: model.PostgreSqlDataSource,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -470,6 +556,10 @@ var (
|
||||
Name: "style",
|
||||
Usage: "the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: mongo.Action,
|
||||
},
|
||||
@@ -491,13 +581,25 @@ var (
|
||||
Usage: "template operation",
|
||||
Subcommands: []cli.Command{
|
||||
{
|
||||
Name: "init",
|
||||
Usage: "initialize the all templates(force update)",
|
||||
Name: "init",
|
||||
Usage: "initialize the all templates(force update)",
|
||||
Flags: []cli.Flag{
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: tpl.GenTemplates,
|
||||
},
|
||||
{
|
||||
Name: "clean",
|
||||
Usage: "clean the all cache templates",
|
||||
Name: "clean",
|
||||
Usage: "clean the all cache templates",
|
||||
Flags: []cli.Flag{
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: tpl.CleanTemplates,
|
||||
},
|
||||
{
|
||||
@@ -508,6 +610,10 @@ var (
|
||||
Name: "category,c",
|
||||
Usage: "the category of template, enum [api,rpc,model,docker,kube]",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: tpl.UpdateTemplates,
|
||||
},
|
||||
@@ -523,6 +629,10 @@ var (
|
||||
Name: "name,n",
|
||||
Usage: "the target file name of template",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "home",
|
||||
Usage: "the goctl home path of the template",
|
||||
},
|
||||
},
|
||||
Action: tpl.RevertTemplates,
|
||||
},
|
||||
@@ -542,6 +652,6 @@ func main() {
|
||||
app.Commands = commands
|
||||
// cli already print error messages
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
fmt.Println("error:", err)
|
||||
fmt.Println(aurora.Red("error: " + err.Error()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,6 +40,12 @@ type Deployment struct {
|
||||
// DeploymentCommand is used to generate the kubernetes deployment yaml files.
|
||||
func DeploymentCommand(c *cli.Context) error {
|
||||
nodePort := c.Int("nodePort")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
// 0 to disable the nodePort type
|
||||
if nodePort != 0 && (nodePort < basePort || nodePort > portLimit) {
|
||||
return errors.New("nodePort should be between 30000 and 32767")
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/tal-tech/go-zero/tools/goctl/config"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/mongo/generate"
|
||||
file "github.com/tal-tech/go-zero/tools/goctl/util"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
@@ -16,6 +17,12 @@ func Action(ctx *cli.Context) error {
|
||||
c := ctx.Bool("cache")
|
||||
o := strings.TrimSpace(ctx.String("dir"))
|
||||
s := ctx.String("style")
|
||||
home := ctx.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
file.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
if len(tp) == 0 {
|
||||
return errors.New("missing type")
|
||||
}
|
||||
|
||||
@@ -264,6 +264,7 @@ OPTIONS:
|
||||
--style value the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]
|
||||
--cache, -c generate code with cache [optional]
|
||||
--idea for idea plugin [optional]
|
||||
--database, -db the name of database [optional]
|
||||
```
|
||||
|
||||
* datasource
|
||||
|
||||
@@ -3,6 +3,7 @@ package builderx
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/go-xorm/builder"
|
||||
)
|
||||
@@ -81,13 +82,18 @@ func FieldNames(in interface{}) []string {
|
||||
}
|
||||
|
||||
// RawFieldNames converts golang struct field into slice string
|
||||
func RawFieldNames(in interface{}) []string {
|
||||
func RawFieldNames(in interface{}, postgresSql ...bool) []string {
|
||||
out := make([]string, 0)
|
||||
v := reflect.ValueOf(in)
|
||||
if v.Kind() == reflect.Ptr {
|
||||
v = v.Elem()
|
||||
}
|
||||
|
||||
var pg bool
|
||||
if len(postgresSql) > 0 {
|
||||
pg = postgresSql[0]
|
||||
}
|
||||
|
||||
// we only accept structs
|
||||
if v.Kind() != reflect.Struct {
|
||||
panic(fmt.Errorf("ToMap only accepts structs; got %T", v))
|
||||
@@ -98,11 +104,32 @@ func RawFieldNames(in interface{}) []string {
|
||||
// gets us a StructField
|
||||
fi := typ.Field(i)
|
||||
if tagv := fi.Tag.Get(dbTag); tagv != "" {
|
||||
out = append(out, fmt.Sprintf("`%s`", tagv))
|
||||
if pg {
|
||||
out = append(out, fmt.Sprintf("%s", tagv))
|
||||
} else {
|
||||
out = append(out, fmt.Sprintf("`%s`", tagv))
|
||||
}
|
||||
} else {
|
||||
out = append(out, fmt.Sprintf(`"%s"`, fi.Name))
|
||||
if pg {
|
||||
out = append(out, fmt.Sprintf("%s", fi.Name))
|
||||
} else {
|
||||
out = append(out, fmt.Sprintf("`%s`", fi.Name))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func PostgreSqlJoin(elems []string) string {
|
||||
var b = new(strings.Builder)
|
||||
for index, e := range elems {
|
||||
b.WriteString(fmt.Sprintf("%s = $%d, ", e, index+1))
|
||||
}
|
||||
|
||||
if b.Len() == 0 {
|
||||
return b.String()
|
||||
}
|
||||
|
||||
return b.String()[0 : b.Len()-2]
|
||||
}
|
||||
|
||||
@@ -118,3 +118,8 @@ func TestBuildSqlLike(t *testing.T) {
|
||||
assert.Equal(t, sql, actualSQL)
|
||||
assert.Equal(t, args, actualArgs)
|
||||
}
|
||||
|
||||
func TestJoin(t *testing.T) {
|
||||
ret := PostgreSqlJoin([]string{"name", "age"})
|
||||
assert.Equal(t, "name = $1, age = $2", ret)
|
||||
}
|
||||
|
||||
@@ -2,29 +2,32 @@ package command
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/go-sql-driver/mysql"
|
||||
"github.com/tal-tech/go-zero/core/logx"
|
||||
"github.com/tal-tech/go-zero/core/stores/postgres"
|
||||
"github.com/tal-tech/go-zero/core/stores/sqlx"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/config"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/gen"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/model"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/util"
|
||||
file "github.com/tal-tech/go-zero/tools/goctl/util"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/console"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
const (
|
||||
flagSrc = "src"
|
||||
flagDir = "dir"
|
||||
flagCache = "cache"
|
||||
flagIdea = "idea"
|
||||
flagURL = "url"
|
||||
flagTable = "table"
|
||||
flagStyle = "style"
|
||||
flagSrc = "src"
|
||||
flagDir = "dir"
|
||||
flagCache = "cache"
|
||||
flagIdea = "idea"
|
||||
flagURL = "url"
|
||||
flagTable = "table"
|
||||
flagStyle = "style"
|
||||
flagDatabase = "database"
|
||||
flagSchema = "schema"
|
||||
)
|
||||
|
||||
var errNotMatched = errors.New("sql not matched")
|
||||
@@ -36,31 +39,70 @@ func MysqlDDL(ctx *cli.Context) error {
|
||||
cache := ctx.Bool(flagCache)
|
||||
idea := ctx.Bool(flagIdea)
|
||||
style := ctx.String(flagStyle)
|
||||
database := ctx.String(flagDatabase)
|
||||
home := ctx.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
file.RegisterGoctlHome(home)
|
||||
}
|
||||
cfg, err := config.NewConfig(style)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fromDDl(src, dir, cfg, cache, idea)
|
||||
return fromDDl(src, dir, cfg, cache, idea, database)
|
||||
}
|
||||
|
||||
// MyDataSource generates model code from datasource
|
||||
func MyDataSource(ctx *cli.Context) error {
|
||||
// MySqlDataSource generates model code from datasource
|
||||
func MySqlDataSource(ctx *cli.Context) error {
|
||||
url := strings.TrimSpace(ctx.String(flagURL))
|
||||
dir := strings.TrimSpace(ctx.String(flagDir))
|
||||
cache := ctx.Bool(flagCache)
|
||||
idea := ctx.Bool(flagIdea)
|
||||
style := ctx.String(flagStyle)
|
||||
home := ctx.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
file.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
pattern := strings.TrimSpace(ctx.String(flagTable))
|
||||
cfg, err := config.NewConfig(style)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fromDataSource(url, pattern, dir, cfg, cache, idea)
|
||||
return fromMysqlDataSource(url, pattern, dir, cfg, cache, idea)
|
||||
}
|
||||
|
||||
func fromDDl(src, dir string, cfg *config.Config, cache, idea bool) error {
|
||||
// PostgreSqlDataSource generates model code from datasource
|
||||
func PostgreSqlDataSource(ctx *cli.Context) error {
|
||||
url := strings.TrimSpace(ctx.String(flagURL))
|
||||
dir := strings.TrimSpace(ctx.String(flagDir))
|
||||
cache := ctx.Bool(flagCache)
|
||||
idea := ctx.Bool(flagIdea)
|
||||
style := ctx.String(flagStyle)
|
||||
schema := ctx.String(flagSchema)
|
||||
home := ctx.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
file.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
if len(schema) == 0 {
|
||||
schema = "public"
|
||||
}
|
||||
|
||||
pattern := strings.TrimSpace(ctx.String(flagTable))
|
||||
cfg, err := config.NewConfig(style)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return fromPostgreSqlDataSource(url, pattern, dir, schema, cfg, cache, idea)
|
||||
}
|
||||
|
||||
func fromDDl(src, dir string, cfg *config.Config, cache, idea bool, database string) error {
|
||||
log := console.NewConsole(idea)
|
||||
src = strings.TrimSpace(src)
|
||||
if len(src) == 0 {
|
||||
@@ -76,25 +118,22 @@ func fromDDl(src, dir string, cfg *config.Config, cache, idea bool) error {
|
||||
return errNotMatched
|
||||
}
|
||||
|
||||
var source []string
|
||||
for _, file := range files {
|
||||
data, err := ioutil.ReadFile(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
source = append(source, string(data))
|
||||
}
|
||||
|
||||
generator, err := gen.NewDefaultGenerator(dir, cfg, gen.WithConsoleOption(log))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return generator.StartFromDDL(strings.Join(source, "\n"), cache)
|
||||
for _, file := range files {
|
||||
err = generator.StartFromDDL(file, cache, database)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func fromDataSource(url, pattern, dir string, cfg *config.Config, cache, idea bool) error {
|
||||
func fromMysqlDataSource(url, pattern, dir string, cfg *config.Config, cache, idea bool) error {
|
||||
log := console.NewConsole(idea)
|
||||
if len(url) == 0 {
|
||||
log.Error("%v", "expected data source of mysql, but nothing found")
|
||||
@@ -156,3 +195,58 @@ func fromDataSource(url, pattern, dir string, cfg *config.Config, cache, idea bo
|
||||
|
||||
return generator.StartFromInformationSchema(matchTables, cache)
|
||||
}
|
||||
|
||||
func fromPostgreSqlDataSource(url, pattern, dir, schema string, cfg *config.Config, cache, idea bool) error {
|
||||
log := console.NewConsole(idea)
|
||||
if len(url) == 0 {
|
||||
log.Error("%v", "expected data source of postgresql, but nothing found")
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(pattern) == 0 {
|
||||
log.Error("%v", "expected table or table globbing patterns, but nothing found")
|
||||
return nil
|
||||
}
|
||||
db := postgres.New(url)
|
||||
im := model.NewPostgreSqlModel(db)
|
||||
|
||||
tables, err := im.GetAllTables(schema)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
matchTables := make(map[string]*model.Table)
|
||||
for _, item := range tables {
|
||||
match, err := filepath.Match(pattern, item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
|
||||
columnData, err := im.FindColumns(schema, item)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
table, err := columnData.Convert()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
matchTables[item] = table
|
||||
}
|
||||
|
||||
if len(matchTables) == 0 {
|
||||
return errors.New("no tables matched")
|
||||
}
|
||||
|
||||
generator, err := gen.NewDefaultGenerator(dir, cfg, gen.WithConsoleOption(log), gen.WithPostgreSql())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return generator.StartFromInformationSchema(matchTables, cache)
|
||||
}
|
||||
|
||||
@@ -24,12 +24,12 @@ func TestFromDDl(t *testing.T) {
|
||||
err := gen.Clean()
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = fromDDl("./user.sql", t.TempDir(), cfg, true, false)
|
||||
err = fromDDl("./user.sql", t.TempDir(), cfg, true, false, "go_zero")
|
||||
assert.Equal(t, errNotMatched, err)
|
||||
|
||||
// case dir is not exists
|
||||
unknownDir := filepath.Join(t.TempDir(), "test", "user.sql")
|
||||
err = fromDDl(unknownDir, t.TempDir(), cfg, true, false)
|
||||
err = fromDDl(unknownDir, t.TempDir(), cfg, true, false, "go_zero")
|
||||
assert.True(t, func() bool {
|
||||
switch err.(type) {
|
||||
case *os.PathError:
|
||||
@@ -40,7 +40,7 @@ func TestFromDDl(t *testing.T) {
|
||||
}())
|
||||
|
||||
// case empty src
|
||||
err = fromDDl("", t.TempDir(), cfg, true, false)
|
||||
err = fromDDl("", t.TempDir(), cfg, true, false, "go_zero")
|
||||
if err != nil {
|
||||
assert.Equal(t, "expected path or path globbing patterns, but nothing found", err.Error())
|
||||
}
|
||||
@@ -70,7 +70,7 @@ func TestFromDDl(t *testing.T) {
|
||||
_, err = os.Stat(user2Sql)
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = fromDDl(filepath.Join(tempDir, "user*.sql"), tempDir, cfg, true, false)
|
||||
err = fromDDl(filepath.Join(tempDir, "user*.sql"), tempDir, cfg, true, false, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
|
||||
_, err = os.Stat(filepath.Join(tempDir, "usermodel.go"))
|
||||
|
||||
@@ -3,9 +3,57 @@ package converter
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/zeromicro/ddl-parser/parser"
|
||||
)
|
||||
|
||||
var commonMysqlDataTypeMap = map[string]string{
|
||||
var commonMysqlDataTypeMap = map[int]string{
|
||||
// For consistency, all integer types are converted to int64
|
||||
// number
|
||||
parser.Bool: "int64",
|
||||
parser.Boolean: "int64",
|
||||
parser.TinyInt: "int64",
|
||||
parser.SmallInt: "int64",
|
||||
parser.MediumInt: "int64",
|
||||
parser.Int: "int64",
|
||||
parser.MiddleInt: "int64",
|
||||
parser.Int1: "int64",
|
||||
parser.Int2: "int64",
|
||||
parser.Int3: "int64",
|
||||
parser.Int4: "int64",
|
||||
parser.Int8: "int64",
|
||||
parser.Integer: "int64",
|
||||
parser.BigInt: "int64",
|
||||
parser.Float: "float64",
|
||||
parser.Float4: "float64",
|
||||
parser.Float8: "float64",
|
||||
parser.Double: "float64",
|
||||
parser.Decimal: "float64",
|
||||
// date&time
|
||||
parser.Date: "time.Time",
|
||||
parser.DateTime: "time.Time",
|
||||
parser.Timestamp: "time.Time",
|
||||
parser.Time: "string",
|
||||
parser.Year: "int64",
|
||||
// string
|
||||
parser.Char: "string",
|
||||
parser.VarChar: "string",
|
||||
parser.Binary: "string",
|
||||
parser.VarBinary: "string",
|
||||
parser.TinyText: "string",
|
||||
parser.Text: "string",
|
||||
parser.MediumText: "string",
|
||||
parser.LongText: "string",
|
||||
parser.Enum: "string",
|
||||
parser.Set: "string",
|
||||
parser.Json: "string",
|
||||
parser.Blob: "string",
|
||||
parser.LongBlob: "string",
|
||||
parser.MediumBlob: "string",
|
||||
parser.TinyBlob: "string",
|
||||
}
|
||||
|
||||
var commonMysqlDataTypeMap2 = map[string]string{
|
||||
// For consistency, all integer types are converted to int64
|
||||
// number
|
||||
"bool": "int64",
|
||||
@@ -37,13 +85,27 @@ var commonMysqlDataTypeMap = map[string]string{
|
||||
"enum": "string",
|
||||
"set": "string",
|
||||
"json": "string",
|
||||
"blob": "string",
|
||||
"longblob": "string",
|
||||
"mediumblob": "string",
|
||||
"tinyblob": "string",
|
||||
}
|
||||
|
||||
// ConvertDataType converts mysql column type into golang type
|
||||
func ConvertDataType(dataBaseType string, isDefaultNull bool) (string, error) {
|
||||
tp, ok := commonMysqlDataTypeMap[strings.ToLower(dataBaseType)]
|
||||
func ConvertDataType(dataBaseType int, isDefaultNull bool) (string, error) {
|
||||
tp, ok := commonMysqlDataTypeMap[dataBaseType]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("unexpected database type: %s", dataBaseType)
|
||||
return "", fmt.Errorf("unsupported database type: %v", dataBaseType)
|
||||
}
|
||||
|
||||
return mayConvertNullType(tp, isDefaultNull), nil
|
||||
}
|
||||
|
||||
// ConvertStringDataType converts mysql column type into golang type
|
||||
func ConvertStringDataType(dataBaseType string, isDefaultNull bool) (string, error) {
|
||||
tp, ok := commonMysqlDataTypeMap2[strings.ToLower(dataBaseType)]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("unsupported database type: %s", dataBaseType)
|
||||
}
|
||||
|
||||
return mayConvertNullType(tp, isDefaultNull), nil
|
||||
|
||||
@@ -4,25 +4,23 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/zeromicro/ddl-parser/parser"
|
||||
)
|
||||
|
||||
func TestConvertDataType(t *testing.T) {
|
||||
v, err := ConvertDataType("tinyint", false)
|
||||
v, err := ConvertDataType(parser.TinyInt, false)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "int64", v)
|
||||
|
||||
v, err = ConvertDataType("tinyint", true)
|
||||
v, err = ConvertDataType(parser.TinyInt, true)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "sql.NullInt64", v)
|
||||
|
||||
v, err = ConvertDataType("timestamp", false)
|
||||
v, err = ConvertDataType(parser.Timestamp, false)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "time.Time", v)
|
||||
|
||||
v, err = ConvertDataType("timestamp", true)
|
||||
v, err = ConvertDataType(parser.Timestamp, true)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "sql.NullTime", v)
|
||||
|
||||
_, err = ConvertDataType("float32", false)
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func genDelete(table Table, withCache bool) (string, string, error) {
|
||||
func genDelete(table Table, withCache, postgreSql bool) (string, string, error) {
|
||||
keySet := collection.NewSet()
|
||||
keyVariableSet := collection.NewSet()
|
||||
keySet.AddStr(table.PrimaryCacheKey.KeyExpression)
|
||||
@@ -34,8 +34,9 @@ func genDelete(table Table, withCache bool) (string, string, error) {
|
||||
"lowerStartCamelPrimaryKey": stringx.From(table.PrimaryKey.Name.ToCamel()).Untitle(),
|
||||
"dataType": table.PrimaryKey.DataType,
|
||||
"keys": strings.Join(keySet.KeysStr(), "\n"),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source()),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source(), postgreSql),
|
||||
"keyValues": strings.Join(keyVariableSet.KeysStr(), ", "),
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func genFindOne(table Table, withCache bool) (string, string, error) {
|
||||
func genFindOne(table Table, withCache, postgreSql bool) (string, string, error) {
|
||||
camel := table.Name.ToCamel()
|
||||
text, err := util.LoadTemplate(category, findOneTemplateFile, template.FindOne)
|
||||
if err != nil {
|
||||
@@ -19,11 +19,12 @@ func genFindOne(table Table, withCache bool) (string, string, error) {
|
||||
"withCache": withCache,
|
||||
"upperStartCamelObject": camel,
|
||||
"lowerStartCamelObject": stringx.From(camel).Untitle(),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source()),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source(), postgreSql),
|
||||
"lowerStartCamelPrimaryKey": stringx.From(table.PrimaryKey.Name.ToCamel()).Untitle(),
|
||||
"dataType": table.PrimaryKey.DataType,
|
||||
"cacheKey": table.PrimaryCacheKey.KeyExpression,
|
||||
"cacheKeyVariable": table.PrimaryCacheKey.KeyLeft,
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
|
||||
@@ -15,7 +15,7 @@ type findOneCode struct {
|
||||
cacheExtra string
|
||||
}
|
||||
|
||||
func genFindOneByField(table Table, withCache bool) (*findOneCode, error) {
|
||||
func genFindOneByField(table Table, withCache, postgreSql bool) (*findOneCode, error) {
|
||||
text, err := util.LoadTemplate(category, findOneByFieldTemplateFile, template.FindOneByField)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -25,7 +25,7 @@ func genFindOneByField(table Table, withCache bool) (*findOneCode, error) {
|
||||
var list []string
|
||||
camelTableName := table.Name.ToCamel()
|
||||
for _, key := range table.UniqueCacheKey {
|
||||
in, paramJoinString, originalFieldString := convertJoin(key)
|
||||
in, paramJoinString, originalFieldString := convertJoin(key, postgreSql)
|
||||
|
||||
output, err := t.Execute(map[string]interface{}{
|
||||
"upperStartCamelObject": camelTableName,
|
||||
@@ -38,6 +38,7 @@ func genFindOneByField(table Table, withCache bool) (*findOneCode, error) {
|
||||
"lowerStartCamelField": paramJoinString,
|
||||
"upperStartCamelPrimaryKey": table.PrimaryKey.Name.ToCamel(),
|
||||
"originalField": originalFieldString,
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -87,7 +88,8 @@ func genFindOneByField(table Table, withCache bool) (*findOneCode, error) {
|
||||
"upperStartCamelObject": camelTableName,
|
||||
"primaryKeyLeft": table.PrimaryCacheKey.VarLeft,
|
||||
"lowerStartCamelObject": stringx.From(camelTableName).Untitle(),
|
||||
"originalPrimaryField": wrapWithRawString(table.PrimaryKey.Name.Source()),
|
||||
"originalPrimaryField": wrapWithRawString(table.PrimaryKey.Name.Source(), postgreSql),
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -106,13 +108,17 @@ func genFindOneByField(table Table, withCache bool) (*findOneCode, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertJoin(key Key) (in, paramJoinString, originalFieldString string) {
|
||||
func convertJoin(key Key, postgreSql bool) (in, paramJoinString, originalFieldString string) {
|
||||
var inJoin, paramJoin, argJoin Join
|
||||
for _, f := range key.Fields {
|
||||
for index, f := range key.Fields {
|
||||
param := stringx.From(f.Name.ToCamel()).Untitle()
|
||||
inJoin = append(inJoin, fmt.Sprintf("%s %s", param, f.DataType))
|
||||
paramJoin = append(paramJoin, param)
|
||||
argJoin = append(argJoin, fmt.Sprintf("%s = ?", wrapWithRawString(f.Name.Source())))
|
||||
if postgreSql {
|
||||
argJoin = append(argJoin, fmt.Sprintf("%s = $%d", wrapWithRawString(f.Name.Source(), postgreSql), index+1))
|
||||
} else {
|
||||
argJoin = append(argJoin, fmt.Sprintf("%s = ?", wrapWithRawString(f.Name.Source(), postgreSql)))
|
||||
}
|
||||
}
|
||||
if len(inJoin) > 0 {
|
||||
in = inJoin.With(", ").Source()
|
||||
|
||||
@@ -29,8 +29,9 @@ type (
|
||||
// source string
|
||||
dir string
|
||||
console.Console
|
||||
pkg string
|
||||
cfg *config.Config
|
||||
pkg string
|
||||
cfg *config.Config
|
||||
isPostgreSql bool
|
||||
}
|
||||
|
||||
// Option defines a function with argument defaultGenerator
|
||||
@@ -84,14 +85,21 @@ func WithConsoleOption(c console.Console) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WithPostgreSql marks defaultGenerator.isPostgreSql true
|
||||
func WithPostgreSql() Option {
|
||||
return func(generator *defaultGenerator) {
|
||||
generator.isPostgreSql = true
|
||||
}
|
||||
}
|
||||
|
||||
func newDefaultOption() Option {
|
||||
return func(generator *defaultGenerator) {
|
||||
generator.Console = console.NewColorConsole()
|
||||
}
|
||||
}
|
||||
|
||||
func (g *defaultGenerator) StartFromDDL(source string, withCache bool) error {
|
||||
modelList, err := g.genFromDDL(source, withCache)
|
||||
func (g *defaultGenerator) StartFromDDL(filename string, withCache bool, database string) error {
|
||||
modelList, err := g.genFromDDL(filename, withCache, database)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -174,21 +182,20 @@ func (g *defaultGenerator) createFile(modelList map[string]string) error {
|
||||
}
|
||||
|
||||
// ret1: key-table name,value-code
|
||||
func (g *defaultGenerator) genFromDDL(source string, withCache bool) (map[string]string, error) {
|
||||
ddlList := g.split(source)
|
||||
func (g *defaultGenerator) genFromDDL(filename string, withCache bool, database string) (map[string]string, error) {
|
||||
m := make(map[string]string)
|
||||
for _, ddl := range ddlList {
|
||||
table, err := parser.Parse(ddl)
|
||||
tables, err := parser.Parse(filename, database)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, e := range tables {
|
||||
code, err := g.genModel(*e, withCache)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
code, err := g.genModel(*table, withCache)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
m[table.Name.Source()] = code
|
||||
m[e.Name.Source()] = code
|
||||
}
|
||||
|
||||
return m, nil
|
||||
@@ -220,34 +227,34 @@ func (g *defaultGenerator) genModel(in parser.Table, withCache bool) (string, er
|
||||
table.UniqueCacheKey = uniqueKey
|
||||
table.ContainsUniqueCacheKey = len(uniqueKey) > 0
|
||||
|
||||
varsCode, err := genVars(table, withCache)
|
||||
varsCode, err := genVars(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
insertCode, insertCodeMethod, err := genInsert(table, withCache)
|
||||
insertCode, insertCodeMethod, err := genInsert(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
findCode := make([]string, 0)
|
||||
findOneCode, findOneCodeMethod, err := genFindOne(table, withCache)
|
||||
findOneCode, findOneCodeMethod, err := genFindOne(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
ret, err := genFindOneByField(table, withCache)
|
||||
ret, err := genFindOneByField(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
findCode = append(findCode, findOneCode, ret.findOneMethod)
|
||||
updateCode, updateCodeMethod, err := genUpdate(table, withCache)
|
||||
updateCode, updateCodeMethod, err := genUpdate(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
deleteCode, deleteCodeMethod, err := genDelete(table, withCache)
|
||||
deleteCode, deleteCodeMethod, err := genDelete(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -259,7 +266,7 @@ func (g *defaultGenerator) genModel(in parser.Table, withCache bool) (string, er
|
||||
return "", err
|
||||
}
|
||||
|
||||
newCode, err := genNew(table, withCache)
|
||||
newCode, err := genNew(table, withCache, g.isPostgreSql)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -310,7 +317,11 @@ func (g *defaultGenerator) executeModel(code *code) (*bytes.Buffer, error) {
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func wrapWithRawString(v string) string {
|
||||
func wrapWithRawString(v string, postgreSql bool) string {
|
||||
if postgreSql {
|
||||
return v
|
||||
}
|
||||
|
||||
if v == "`" {
|
||||
return v
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package gen
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
@@ -20,6 +21,11 @@ var source = "CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,
|
||||
func TestCacheModel(t *testing.T) {
|
||||
logx.Disable()
|
||||
_ = Clean()
|
||||
|
||||
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
|
||||
err := ioutil.WriteFile(sqlFile, []byte(source), 0o777)
|
||||
assert.Nil(t, err)
|
||||
|
||||
dir := filepath.Join(t.TempDir(), "./testmodel")
|
||||
cacheDir := filepath.Join(dir, "cache")
|
||||
noCacheDir := filepath.Join(dir, "nocache")
|
||||
@@ -28,7 +34,7 @@ func TestCacheModel(t *testing.T) {
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = g.StartFromDDL(source, true)
|
||||
err = g.StartFromDDL(sqlFile, true, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, func() bool {
|
||||
_, err := os.Stat(filepath.Join(cacheDir, "TestUserModel.go"))
|
||||
@@ -39,7 +45,7 @@ func TestCacheModel(t *testing.T) {
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = g.StartFromDDL(source, false)
|
||||
err = g.StartFromDDL(sqlFile, false, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, func() bool {
|
||||
_, err := os.Stat(filepath.Join(noCacheDir, "testusermodel.go"))
|
||||
@@ -50,6 +56,11 @@ func TestCacheModel(t *testing.T) {
|
||||
func TestNamingModel(t *testing.T) {
|
||||
logx.Disable()
|
||||
_ = Clean()
|
||||
|
||||
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
|
||||
err := ioutil.WriteFile(sqlFile, []byte(source), 0o777)
|
||||
assert.Nil(t, err)
|
||||
|
||||
dir, _ := filepath.Abs("./testmodel")
|
||||
camelDir := filepath.Join(dir, "camel")
|
||||
snakeDir := filepath.Join(dir, "snake")
|
||||
@@ -61,7 +72,7 @@ func TestNamingModel(t *testing.T) {
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = g.StartFromDDL(source, true)
|
||||
err = g.StartFromDDL(sqlFile, true, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, func() bool {
|
||||
_, err := os.Stat(filepath.Join(camelDir, "TestUserModel.go"))
|
||||
@@ -72,7 +83,7 @@ func TestNamingModel(t *testing.T) {
|
||||
})
|
||||
assert.Nil(t, err)
|
||||
|
||||
err = g.StartFromDDL(source, true)
|
||||
err = g.StartFromDDL(sqlFile, true, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
assert.True(t, func() bool {
|
||||
_, err := os.Stat(filepath.Join(snakeDir, "test_user_model.go"))
|
||||
@@ -81,10 +92,11 @@ func TestNamingModel(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestWrapWithRawString(t *testing.T) {
|
||||
assert.Equal(t, "``", wrapWithRawString(""))
|
||||
assert.Equal(t, "``", wrapWithRawString("``"))
|
||||
assert.Equal(t, "`a`", wrapWithRawString("a"))
|
||||
assert.Equal(t, "` `", wrapWithRawString(" "))
|
||||
assert.Equal(t, "``", wrapWithRawString("", false))
|
||||
assert.Equal(t, "``", wrapWithRawString("``", false))
|
||||
assert.Equal(t, "`a`", wrapWithRawString("a", false))
|
||||
assert.Equal(t, "a", wrapWithRawString("a", true))
|
||||
assert.Equal(t, "` `", wrapWithRawString(" ", false))
|
||||
}
|
||||
|
||||
func TestFields(t *testing.T) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package gen
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/tal-tech/go-zero/core/collection"
|
||||
@@ -9,7 +10,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func genInsert(table Table, withCache bool) (string, string, error) {
|
||||
func genInsert(table Table, withCache, postgreSql bool) (string, string, error) {
|
||||
keySet := collection.NewSet()
|
||||
keyVariableSet := collection.NewSet()
|
||||
for _, key := range table.UniqueCacheKey {
|
||||
@@ -19,6 +20,7 @@ func genInsert(table Table, withCache bool) (string, string, error) {
|
||||
|
||||
expressions := make([]string, 0)
|
||||
expressionValues := make([]string, 0)
|
||||
var count int
|
||||
for _, field := range table.Fields {
|
||||
camel := field.Name.ToCamel()
|
||||
if camel == "CreateTime" || camel == "UpdateTime" {
|
||||
@@ -31,7 +33,12 @@ func genInsert(table Table, withCache bool) (string, string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
expressions = append(expressions, "?")
|
||||
count += 1
|
||||
if postgreSql {
|
||||
expressions = append(expressions, fmt.Sprintf("$%d", count))
|
||||
} else {
|
||||
expressions = append(expressions, "?")
|
||||
}
|
||||
expressionValues = append(expressionValues, "data."+camel)
|
||||
}
|
||||
|
||||
|
||||
@@ -39,9 +39,9 @@ type Join []string
|
||||
func genCacheKeys(table parser.Table) (Key, []Key) {
|
||||
var primaryKey Key
|
||||
var uniqueKey []Key
|
||||
primaryKey = genCacheKey(table.Name, []*parser.Field{&table.PrimaryKey.Field})
|
||||
primaryKey = genCacheKey(table.Db, table.Name, []*parser.Field{&table.PrimaryKey.Field})
|
||||
for _, each := range table.UniqueIndex {
|
||||
uniqueKey = append(uniqueKey, genCacheKey(table.Name, each))
|
||||
uniqueKey = append(uniqueKey, genCacheKey(table.Db, table.Name, each))
|
||||
}
|
||||
sort.Slice(uniqueKey, func(i, j int) bool {
|
||||
return uniqueKey[i].VarLeft < uniqueKey[j].VarLeft
|
||||
@@ -50,7 +50,7 @@ func genCacheKeys(table parser.Table) (Key, []Key) {
|
||||
return primaryKey, uniqueKey
|
||||
}
|
||||
|
||||
func genCacheKey(table stringx.String, in []*parser.Field) Key {
|
||||
func genCacheKey(db stringx.String, table stringx.String, in []*parser.Field) Key {
|
||||
var (
|
||||
varLeftJoin, varRightJon, fieldNameJoin Join
|
||||
varLeft, varRight, varExpression string
|
||||
@@ -59,9 +59,9 @@ func genCacheKey(table stringx.String, in []*parser.Field) Key {
|
||||
keyLeft, keyRight, dataKeyRight, keyExpression, dataKeyExpression string
|
||||
)
|
||||
|
||||
varLeftJoin = append(varLeftJoin, "cache", table.Source())
|
||||
varRightJon = append(varRightJon, "cache", table.Source())
|
||||
keyLeftJoin = append(keyLeftJoin, table.Source())
|
||||
varLeftJoin = append(varLeftJoin, "cache", db.Source(), table.Source())
|
||||
varRightJon = append(varRightJon, "cache", db.Source(), table.Source())
|
||||
keyLeftJoin = append(keyLeftJoin, db.Source(), table.Source())
|
||||
|
||||
for _, each := range in {
|
||||
varLeftJoin = append(varLeftJoin, each.Name.Source())
|
||||
|
||||
@@ -11,35 +11,32 @@ import (
|
||||
|
||||
func TestGenCacheKeys(t *testing.T) {
|
||||
primaryField := &parser.Field{
|
||||
Name: stringx.From("id"),
|
||||
DataBaseType: "bigint",
|
||||
DataType: "int64",
|
||||
Comment: "自增id",
|
||||
SeqInIndex: 1,
|
||||
Name: stringx.From("id"),
|
||||
DataType: "int64",
|
||||
Comment: "自增id",
|
||||
SeqInIndex: 1,
|
||||
}
|
||||
mobileField := &parser.Field{
|
||||
Name: stringx.From("mobile"),
|
||||
DataBaseType: "varchar",
|
||||
DataType: "string",
|
||||
Comment: "手机号",
|
||||
SeqInIndex: 1,
|
||||
Name: stringx.From("mobile"),
|
||||
DataType: "string",
|
||||
Comment: "手机号",
|
||||
SeqInIndex: 1,
|
||||
}
|
||||
classField := &parser.Field{
|
||||
Name: stringx.From("class"),
|
||||
DataBaseType: "varchar",
|
||||
DataType: "string",
|
||||
Comment: "班级",
|
||||
SeqInIndex: 1,
|
||||
Name: stringx.From("class"),
|
||||
DataType: "string",
|
||||
Comment: "班级",
|
||||
SeqInIndex: 1,
|
||||
}
|
||||
nameField := &parser.Field{
|
||||
Name: stringx.From("name"),
|
||||
DataBaseType: "varchar",
|
||||
DataType: "string",
|
||||
Comment: "姓名",
|
||||
SeqInIndex: 2,
|
||||
Name: stringx.From("name"),
|
||||
DataType: "string",
|
||||
Comment: "姓名",
|
||||
SeqInIndex: 2,
|
||||
}
|
||||
primariCacheKey, uniqueCacheKey := genCacheKeys(parser.Table{
|
||||
Name: stringx.From("user"),
|
||||
Db: stringx.From("go_zero"),
|
||||
PrimaryKey: parser.Primary{
|
||||
Field: *primaryField,
|
||||
AutoIncrement: true,
|
||||
@@ -53,23 +50,20 @@ func TestGenCacheKeys(t *testing.T) {
|
||||
nameField,
|
||||
},
|
||||
},
|
||||
NormalIndex: nil,
|
||||
Fields: []*parser.Field{
|
||||
primaryField,
|
||||
mobileField,
|
||||
classField,
|
||||
nameField,
|
||||
{
|
||||
Name: stringx.From("createTime"),
|
||||
DataBaseType: "timestamp",
|
||||
DataType: "time.Time",
|
||||
Comment: "创建时间",
|
||||
Name: stringx.From("createTime"),
|
||||
DataType: "time.Time",
|
||||
Comment: "创建时间",
|
||||
},
|
||||
{
|
||||
Name: stringx.From("updateTime"),
|
||||
DataBaseType: "timestamp",
|
||||
DataType: "time.Time",
|
||||
Comment: "更新时间",
|
||||
Name: stringx.From("updateTime"),
|
||||
DataType: "time.Time",
|
||||
Comment: "更新时间",
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -77,14 +71,14 @@ func TestGenCacheKeys(t *testing.T) {
|
||||
t.Run("primaryCacheKey", func(t *testing.T) {
|
||||
assert.Equal(t, true, func() bool {
|
||||
return cacheKeyEqual(primariCacheKey, Key{
|
||||
VarLeft: "cacheUserIdPrefix",
|
||||
VarRight: `"cache:user:id:"`,
|
||||
VarExpression: `cacheUserIdPrefix = "cache:user:id:"`,
|
||||
KeyLeft: "userIdKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v", cacheUserIdPrefix, id)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v", cacheUserIdPrefix, data.Id)`,
|
||||
KeyExpression: `userIdKey := fmt.Sprintf("%s%v", cacheUserIdPrefix, id)`,
|
||||
DataKeyExpression: `userIdKey := fmt.Sprintf("%s%v", cacheUserIdPrefix, data.Id)`,
|
||||
VarLeft: "cacheGoZeroUserIdPrefix",
|
||||
VarRight: `"cache:goZero:user:id:"`,
|
||||
VarExpression: `cacheGoZeroUserIdPrefix = "cache:goZero:user:id:"`,
|
||||
KeyLeft: "goZeroUserIdKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, id)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, data.Id)`,
|
||||
KeyExpression: `goZeroUserIdKey := fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, id)`,
|
||||
DataKeyExpression: `goZeroUserIdKey := fmt.Sprintf("%s%v", cacheGoZeroUserIdPrefix, data.Id)`,
|
||||
FieldNameJoin: []string{"id"},
|
||||
})
|
||||
}())
|
||||
@@ -94,25 +88,25 @@ func TestGenCacheKeys(t *testing.T) {
|
||||
assert.Equal(t, true, func() bool {
|
||||
expected := []Key{
|
||||
{
|
||||
VarLeft: "cacheUserClassNamePrefix",
|
||||
VarRight: `"cache:user:class:name:"`,
|
||||
VarExpression: `cacheUserClassNamePrefix = "cache:user:class:name:"`,
|
||||
KeyLeft: "userClassNameKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, class, name)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, data.Class, data.Name)`,
|
||||
KeyExpression: `userClassNameKey := fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, class, name)`,
|
||||
DataKeyExpression: `userClassNameKey := fmt.Sprintf("%s%v:%v", cacheUserClassNamePrefix, data.Class, data.Name)`,
|
||||
VarLeft: "cacheGoZeroUserClassNamePrefix",
|
||||
VarRight: `"cache:goZero:user:class:name:"`,
|
||||
VarExpression: `cacheGoZeroUserClassNamePrefix = "cache:goZero:user:class:name:"`,
|
||||
KeyLeft: "goZeroUserClassNameKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, class, name)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, data.Class, data.Name)`,
|
||||
KeyExpression: `goZeroUserClassNameKey := fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, class, name)`,
|
||||
DataKeyExpression: `goZeroUserClassNameKey := fmt.Sprintf("%s%v:%v", cacheGoZeroUserClassNamePrefix, data.Class, data.Name)`,
|
||||
FieldNameJoin: []string{"class", "name"},
|
||||
},
|
||||
{
|
||||
VarLeft: "cacheUserMobilePrefix",
|
||||
VarRight: `"cache:user:mobile:"`,
|
||||
VarExpression: `cacheUserMobilePrefix = "cache:user:mobile:"`,
|
||||
KeyLeft: "userMobileKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v", cacheUserMobilePrefix, mobile)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v", cacheUserMobilePrefix, data.Mobile)`,
|
||||
KeyExpression: `userMobileKey := fmt.Sprintf("%s%v", cacheUserMobilePrefix, mobile)`,
|
||||
DataKeyExpression: `userMobileKey := fmt.Sprintf("%s%v", cacheUserMobilePrefix, data.Mobile)`,
|
||||
VarLeft: "cacheGoZeroUserMobilePrefix",
|
||||
VarRight: `"cache:goZero:user:mobile:"`,
|
||||
VarExpression: `cacheGoZeroUserMobilePrefix = "cache:goZero:user:mobile:"`,
|
||||
KeyLeft: "goZeroUserMobileKey",
|
||||
KeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, mobile)`,
|
||||
DataKeyRight: `fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, data.Mobile)`,
|
||||
KeyExpression: `goZeroUserMobileKey := fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, mobile)`,
|
||||
DataKeyExpression: `goZeroUserMobileKey := fmt.Sprintf("%s%v", cacheGoZeroUserMobilePrefix, data.Mobile)`,
|
||||
FieldNameJoin: []string{"mobile"},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,20 +1,27 @@
|
||||
package gen
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/template"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util"
|
||||
)
|
||||
|
||||
func genNew(table Table, withCache bool) (string, error) {
|
||||
func genNew(table Table, withCache, postgreSql bool) (string, error) {
|
||||
text, err := util.LoadTemplate(category, modelNewTemplateFile, template.New)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
t := fmt.Sprintf(`"%s"`, wrapWithRawString(table.Name.Source(), postgreSql))
|
||||
if postgreSql {
|
||||
t = "`" + fmt.Sprintf(`"%s"."%s"`, table.Db.Source(), table.Name.Source()) + "`"
|
||||
}
|
||||
|
||||
output, err := util.With("new").
|
||||
Parse(text).
|
||||
Execute(map[string]interface{}{
|
||||
"table": wrapWithRawString(table.Name.Source()),
|
||||
"table": t,
|
||||
"withCache": withCache,
|
||||
"upperStartCamelObject": table.Name.ToCamel(),
|
||||
})
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func genUpdate(table Table, withCache bool) (string, string, error) {
|
||||
func genUpdate(table Table, withCache, postgreSql bool) (string, string, error) {
|
||||
expressionValues := make([]string, 0)
|
||||
for _, field := range table.Fields {
|
||||
camel := field.Name.ToCamel()
|
||||
@@ -50,8 +50,9 @@ func genUpdate(table Table, withCache bool) (string, string, error) {
|
||||
"primaryCacheKey": table.PrimaryCacheKey.DataKeyExpression,
|
||||
"primaryKeyVariable": table.PrimaryCacheKey.KeyLeft,
|
||||
"lowerStartCamelObject": stringx.From(camelTableName).Untitle(),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source()),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source(), postgreSql),
|
||||
"expressionValues": strings.Join(expressionValues, ", "),
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return "", "", nil
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func genVars(table Table, withCache bool) (string, error) {
|
||||
func genVars(table Table, withCache, postgreSql bool) (string, error) {
|
||||
keys := make([]string, 0)
|
||||
keys = append(keys, table.PrimaryCacheKey.VarExpression)
|
||||
for _, v := range table.UniqueCacheKey {
|
||||
@@ -27,8 +27,9 @@ func genVars(table Table, withCache bool) (string, error) {
|
||||
"upperStartCamelObject": camel,
|
||||
"cacheKeys": strings.Join(keys, "\n"),
|
||||
"autoIncrement": table.PrimaryKey.AutoIncrement,
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source()),
|
||||
"originalPrimaryKey": wrapWithRawString(table.PrimaryKey.Name.Source(), postgreSql),
|
||||
"withCache": withCache,
|
||||
"postgreSql": postgreSql,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
234
tools/goctl/model/sql/model/postgresqlmodel.go
Normal file
234
tools/goctl/model/sql/model/postgresqlmodel.go
Normal file
@@ -0,0 +1,234 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"strings"
|
||||
|
||||
"github.com/tal-tech/go-zero/core/stores/sqlx"
|
||||
)
|
||||
|
||||
var (
|
||||
p2m = map[string]string{
|
||||
"int8": "bigint",
|
||||
"numeric": "bigint",
|
||||
"float8": "double",
|
||||
"float4": "float",
|
||||
"int2": "smallint",
|
||||
"int4": "integer",
|
||||
}
|
||||
)
|
||||
|
||||
// PostgreSqlModel gets table information from information_schema、pg_catalog
|
||||
type PostgreSqlModel struct {
|
||||
conn sqlx.SqlConn
|
||||
}
|
||||
|
||||
// PostgreColumn describes a column in table
|
||||
type PostgreColumn struct {
|
||||
Num sql.NullInt32 `db:"num"`
|
||||
Field sql.NullString `db:"field"`
|
||||
Type sql.NullString `db:"type"`
|
||||
NotNull sql.NullBool `db:"not_null"`
|
||||
Comment sql.NullString `db:"comment"`
|
||||
ColumnDefault sql.NullString `db:"column_default"`
|
||||
IdentityIncrement sql.NullInt32 `db:"identity_increment"`
|
||||
}
|
||||
|
||||
// PostgreIndex describes an index for a column
|
||||
type PostgreIndex struct {
|
||||
IndexName sql.NullString `db:"index_name"`
|
||||
IndexId sql.NullInt32 `db:"index_id"`
|
||||
IsUnique sql.NullBool `db:"is_unique"`
|
||||
IsPrimary sql.NullBool `db:"is_primary"`
|
||||
ColumnName sql.NullString `db:"column_name"`
|
||||
IndexSort sql.NullInt32 `db:"index_sort"`
|
||||
}
|
||||
|
||||
// NewPostgreSqlModel creates an instance and return
|
||||
func NewPostgreSqlModel(conn sqlx.SqlConn) *PostgreSqlModel {
|
||||
return &PostgreSqlModel{
|
||||
conn: conn,
|
||||
}
|
||||
}
|
||||
|
||||
// GetAllTables selects all tables from TABLE_SCHEMA
|
||||
func (m *PostgreSqlModel) GetAllTables(schema string) ([]string, error) {
|
||||
query := `select table_name from information_schema.tables where table_schema = $1`
|
||||
var tables []string
|
||||
err := m.conn.QueryRows(&tables, query, schema)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return tables, nil
|
||||
}
|
||||
|
||||
// FindColumns return columns in specified database and table
|
||||
func (m *PostgreSqlModel) FindColumns(schema, table string) (*ColumnData, error) {
|
||||
querySql := `select t.num,t.field,t.type,t.not_null,t.comment, c.column_default, identity_increment
|
||||
from (
|
||||
SELECT a.attnum AS num,
|
||||
c.relname,
|
||||
a.attname AS field,
|
||||
t.typname AS type,
|
||||
a.atttypmod AS lengthvar,
|
||||
a.attnotnull AS not_null,
|
||||
b.description AS comment
|
||||
FROM pg_class c,
|
||||
pg_attribute a
|
||||
LEFT OUTER JOIN pg_description b ON a.attrelid = b.objoid AND a.attnum = b.objsubid,
|
||||
pg_type t
|
||||
WHERE c.relname = $1
|
||||
and a.attnum > 0
|
||||
and a.attrelid = c.oid
|
||||
and a.atttypid = t.oid
|
||||
ORDER BY a.attnum) AS t
|
||||
left join information_schema.columns AS c on t.relname = c.table_name
|
||||
and t.field = c.column_name and c.table_schema = $2`
|
||||
|
||||
var reply []*PostgreColumn
|
||||
err := m.conn.QueryRowsPartial(&reply, querySql, table, schema)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
list, err := m.getColumns(schema, table, reply)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var columnData ColumnData
|
||||
columnData.Db = schema
|
||||
columnData.Table = table
|
||||
columnData.Columns = list
|
||||
return &columnData, nil
|
||||
}
|
||||
|
||||
func (m *PostgreSqlModel) getColumns(schema, table string, in []*PostgreColumn) ([]*Column, error) {
|
||||
index, err := m.getIndex(schema, table)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var list []*Column
|
||||
for _, e := range in {
|
||||
var dft interface{}
|
||||
if len(e.ColumnDefault.String) > 0 {
|
||||
dft = e.ColumnDefault
|
||||
}
|
||||
|
||||
isNullAble := "YES"
|
||||
if e.NotNull.Bool {
|
||||
isNullAble = "NO"
|
||||
}
|
||||
|
||||
extra := "auto_increment"
|
||||
if e.IdentityIncrement.Int32 != 1 {
|
||||
extra = ""
|
||||
}
|
||||
|
||||
if len(index[e.Field.String]) > 0 {
|
||||
for _, i := range index[e.Field.String] {
|
||||
list = append(list, &Column{
|
||||
DbColumn: &DbColumn{
|
||||
Name: e.Field.String,
|
||||
DataType: m.convertPostgreSqlTypeIntoMysqlType(e.Type.String),
|
||||
Extra: extra,
|
||||
Comment: e.Comment.String,
|
||||
ColumnDefault: dft,
|
||||
IsNullAble: isNullAble,
|
||||
OrdinalPosition: int(e.Num.Int32),
|
||||
},
|
||||
Index: i,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
list = append(list, &Column{
|
||||
DbColumn: &DbColumn{
|
||||
Name: e.Field.String,
|
||||
DataType: m.convertPostgreSqlTypeIntoMysqlType(e.Type.String),
|
||||
Extra: extra,
|
||||
Comment: e.Comment.String,
|
||||
ColumnDefault: dft,
|
||||
IsNullAble: isNullAble,
|
||||
OrdinalPosition: int(e.Num.Int32),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func (m *PostgreSqlModel) convertPostgreSqlTypeIntoMysqlType(in string) string {
|
||||
r, ok := p2m[strings.ToLower(in)]
|
||||
if ok {
|
||||
return r
|
||||
}
|
||||
|
||||
return in
|
||||
}
|
||||
|
||||
func (m *PostgreSqlModel) getIndex(schema, table string) (map[string][]*DbIndex, error) {
|
||||
indexes, err := m.FindIndex(schema, table)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var index = make(map[string][]*DbIndex)
|
||||
for _, e := range indexes {
|
||||
if e.IsPrimary.Bool {
|
||||
index[e.ColumnName.String] = append(index[e.ColumnName.String], &DbIndex{
|
||||
IndexName: indexPri,
|
||||
SeqInIndex: int(e.IndexSort.Int32),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
nonUnique := 0
|
||||
if !e.IsUnique.Bool {
|
||||
nonUnique = 1
|
||||
}
|
||||
|
||||
index[e.ColumnName.String] = append(index[e.ColumnName.String], &DbIndex{
|
||||
IndexName: e.IndexName.String,
|
||||
NonUnique: nonUnique,
|
||||
SeqInIndex: int(e.IndexSort.Int32),
|
||||
})
|
||||
}
|
||||
return index, nil
|
||||
}
|
||||
|
||||
// FindIndex finds index with given schema, table and column.
|
||||
func (m *PostgreSqlModel) FindIndex(schema, table string) ([]*PostgreIndex, error) {
|
||||
querySql := `select A.INDEXNAME AS index_name,
|
||||
C.INDEXRELID AS index_id,
|
||||
C.INDISUNIQUE AS is_unique,
|
||||
C.INDISPRIMARY AS is_primary,
|
||||
G.ATTNAME AS column_name,
|
||||
G.attnum AS index_sort
|
||||
from PG_AM B
|
||||
left join PG_CLASS F on
|
||||
B.OID = F.RELAM
|
||||
left join PG_STAT_ALL_INDEXES E on
|
||||
F.OID = E.INDEXRELID
|
||||
left join PG_INDEX C on
|
||||
E.INDEXRELID = C.INDEXRELID
|
||||
left outer join PG_DESCRIPTION D on
|
||||
C.INDEXRELID = D.OBJOID,
|
||||
PG_INDEXES A,
|
||||
pg_attribute G
|
||||
where A.SCHEMANAME = E.SCHEMANAME
|
||||
and A.TABLENAME = E.RELNAME
|
||||
and A.INDEXNAME = E.INDEXRELNAME
|
||||
and F.oid = G.attrelid
|
||||
and E.SCHEMANAME = $1
|
||||
and E.RELNAME = $2
|
||||
order by C.INDEXRELID,G.attnum`
|
||||
|
||||
var reply []*PostgreIndex
|
||||
err := m.conn.QueryRowsPartial(&reply, querySql, schema, table)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return reply, nil
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
errUnsupportDDL = errors.New("unexpected type")
|
||||
errTableBodyNotFound = errors.New("create table spec not found")
|
||||
errPrimaryKey = errors.New("unexpected join primary key")
|
||||
)
|
||||
@@ -2,6 +2,7 @@ package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
@@ -11,7 +12,7 @@ import (
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/util"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/console"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
"github.com/xwb1989/sqlparser"
|
||||
"github.com/zeromicro/ddl-parser/parser"
|
||||
)
|
||||
|
||||
const timeImport = "time.Time"
|
||||
@@ -20,9 +21,9 @@ type (
|
||||
// Table describes a mysql table
|
||||
Table struct {
|
||||
Name stringx.String
|
||||
Db stringx.String
|
||||
PrimaryKey Primary
|
||||
UniqueIndex map[string][]*Field
|
||||
NormalIndex map[string][]*Field
|
||||
Fields []*Field
|
||||
}
|
||||
|
||||
@@ -35,7 +36,6 @@ type (
|
||||
// Field describes a table field
|
||||
Field struct {
|
||||
Name stringx.String
|
||||
DataBaseType string
|
||||
DataType string
|
||||
Comment string
|
||||
SeqInIndex int
|
||||
@@ -47,73 +47,116 @@ type (
|
||||
)
|
||||
|
||||
// Parse parses ddl into golang structure
|
||||
func Parse(ddl string) (*Table, error) {
|
||||
stmt, err := sqlparser.ParseStrictDDL(ddl)
|
||||
func Parse(filename string, database string) ([]*Table, error) {
|
||||
p := parser.NewParser()
|
||||
tables, err := p.From(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ddlStmt, ok := stmt.(*sqlparser.DDL)
|
||||
if !ok {
|
||||
return nil, errUnsupportDDL
|
||||
indexNameGen := func(column ...string) string {
|
||||
return strings.Join(column, "_")
|
||||
}
|
||||
|
||||
action := ddlStmt.Action
|
||||
if action != sqlparser.CreateStr {
|
||||
return nil, fmt.Errorf("expected [CREATE] action,but found: %s", action)
|
||||
}
|
||||
prefix := filepath.Base(filename)
|
||||
var list []*Table
|
||||
for _, e := range tables {
|
||||
columns := e.Columns
|
||||
|
||||
tableName := ddlStmt.NewName.Name.String()
|
||||
tableSpec := ddlStmt.TableSpec
|
||||
if tableSpec == nil {
|
||||
return nil, errTableBodyNotFound
|
||||
}
|
||||
var (
|
||||
primaryColumnSet = collection.NewSet()
|
||||
|
||||
columns := tableSpec.Columns
|
||||
indexes := tableSpec.Indexes
|
||||
primaryColumn, uniqueKeyMap, normalKeyMap, err := convertIndexes(indexes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
primaryColumn string
|
||||
uniqueKeyMap = make(map[string][]string)
|
||||
normalKeyMap = make(map[string][]string)
|
||||
)
|
||||
|
||||
primaryKey, fieldM, err := convertColumns(columns, primaryColumn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, column := range columns {
|
||||
if column.Constraint != nil {
|
||||
if column.Constraint.Primary {
|
||||
primaryColumnSet.AddStr(column.Name)
|
||||
}
|
||||
|
||||
var fields []*Field
|
||||
for _, e := range fieldM {
|
||||
fields = append(fields, e)
|
||||
}
|
||||
if column.Constraint.Unique {
|
||||
indexName := indexNameGen(column.Name, "unique")
|
||||
uniqueKeyMap[indexName] = []string{column.Name}
|
||||
}
|
||||
|
||||
var (
|
||||
uniqueIndex = make(map[string][]*Field)
|
||||
normalIndex = make(map[string][]*Field)
|
||||
)
|
||||
|
||||
for indexName, each := range uniqueKeyMap {
|
||||
for _, columnName := range each {
|
||||
uniqueIndex[indexName] = append(uniqueIndex[indexName], fieldM[columnName])
|
||||
if column.Constraint.Key {
|
||||
indexName := indexNameGen(column.Name, "idx")
|
||||
uniqueKeyMap[indexName] = []string{column.Name}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for indexName, each := range normalKeyMap {
|
||||
for _, columnName := range each {
|
||||
normalIndex[indexName] = append(normalIndex[indexName], fieldM[columnName])
|
||||
for _, e := range e.Constraints {
|
||||
if len(e.ColumnPrimaryKey) > 1 {
|
||||
return nil, fmt.Errorf("%s: unexpected join primary key", prefix)
|
||||
}
|
||||
|
||||
if len(e.ColumnPrimaryKey) == 1 {
|
||||
primaryColumn = e.ColumnPrimaryKey[0]
|
||||
primaryColumnSet.AddStr(e.ColumnPrimaryKey[0])
|
||||
}
|
||||
|
||||
if len(e.ColumnUniqueKey) > 0 {
|
||||
list := append([]string(nil), e.ColumnUniqueKey...)
|
||||
list = append(list, "unique")
|
||||
indexName := indexNameGen(list...)
|
||||
uniqueKeyMap[indexName] = e.ColumnUniqueKey
|
||||
}
|
||||
}
|
||||
|
||||
if primaryColumnSet.Count() > 1 {
|
||||
return nil, fmt.Errorf("%s: unexpected join primary key", prefix)
|
||||
}
|
||||
|
||||
primaryKey, fieldM, err := convertColumns(columns, primaryColumn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fields []*Field
|
||||
// sort
|
||||
for _, c := range columns {
|
||||
field, ok := fieldM[c.Name]
|
||||
if ok {
|
||||
fields = append(fields, field)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
uniqueIndex = make(map[string][]*Field)
|
||||
normalIndex = make(map[string][]*Field)
|
||||
)
|
||||
|
||||
for indexName, each := range uniqueKeyMap {
|
||||
for _, columnName := range each {
|
||||
uniqueIndex[indexName] = append(uniqueIndex[indexName], fieldM[columnName])
|
||||
}
|
||||
}
|
||||
|
||||
for indexName, each := range normalKeyMap {
|
||||
for _, columnName := range each {
|
||||
normalIndex[indexName] = append(normalIndex[indexName], fieldM[columnName])
|
||||
}
|
||||
}
|
||||
|
||||
checkDuplicateUniqueIndex(uniqueIndex, e.Name)
|
||||
|
||||
list = append(list, &Table{
|
||||
Name: stringx.From(e.Name),
|
||||
Db: stringx.From(database),
|
||||
PrimaryKey: primaryKey,
|
||||
UniqueIndex: uniqueIndex,
|
||||
Fields: fields,
|
||||
})
|
||||
}
|
||||
|
||||
checkDuplicateUniqueIndex(uniqueIndex, tableName, normalIndex)
|
||||
return &Table{
|
||||
Name: stringx.From(tableName),
|
||||
PrimaryKey: primaryKey,
|
||||
UniqueIndex: uniqueIndex,
|
||||
NormalIndex: normalIndex,
|
||||
Fields: fields,
|
||||
}, nil
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func checkDuplicateUniqueIndex(uniqueIndex map[string][]*Field, tableName string, normalIndex map[string][]*Field) {
|
||||
func checkDuplicateUniqueIndex(uniqueIndex map[string][]*Field, tableName string) {
|
||||
log := console.NewColorConsole()
|
||||
uniqueSet := collection.NewSet()
|
||||
for k, i := range uniqueIndex {
|
||||
@@ -131,26 +174,9 @@ func checkDuplicateUniqueIndex(uniqueIndex map[string][]*Field, tableName string
|
||||
|
||||
uniqueSet.AddStr(joinRet)
|
||||
}
|
||||
|
||||
normalIndexSet := collection.NewSet()
|
||||
for k, i := range normalIndex {
|
||||
var list []string
|
||||
for _, e := range i {
|
||||
list = append(list, e.Name.Source())
|
||||
}
|
||||
|
||||
joinRet := strings.Join(list, ",")
|
||||
if normalIndexSet.Contains(joinRet) {
|
||||
log.Warning("table %s: duplicate index %s", tableName, joinRet)
|
||||
delete(normalIndex, k)
|
||||
continue
|
||||
}
|
||||
|
||||
normalIndexSet.Add(joinRet)
|
||||
}
|
||||
}
|
||||
|
||||
func convertColumns(columns []*sqlparser.ColumnDefinition, primaryColumn string) (Primary, map[string]*Field, error) {
|
||||
func convertColumns(columns []*parser.Column, primaryColumn string) (Primary, map[string]*Field, error) {
|
||||
var (
|
||||
primaryKey Primary
|
||||
fieldM = make(map[string]*Field)
|
||||
@@ -161,35 +187,35 @@ func convertColumns(columns []*sqlparser.ColumnDefinition, primaryColumn string)
|
||||
continue
|
||||
}
|
||||
|
||||
var comment string
|
||||
if column.Type.Comment != nil {
|
||||
comment = string(column.Type.Comment.Val)
|
||||
}
|
||||
var (
|
||||
comment string
|
||||
isDefaultNull bool
|
||||
)
|
||||
|
||||
isDefaultNull := true
|
||||
if column.Type.NotNull {
|
||||
isDefaultNull = false
|
||||
} else {
|
||||
if column.Type.Default != nil {
|
||||
if column.Constraint != nil {
|
||||
comment = column.Constraint.Comment
|
||||
isDefaultNull = !column.Constraint.HasDefaultValue
|
||||
if column.Name == primaryColumn && column.Constraint.AutoIncrement {
|
||||
isDefaultNull = false
|
||||
}
|
||||
}
|
||||
|
||||
dataType, err := converter.ConvertDataType(column.Type.Type, isDefaultNull)
|
||||
dataType, err := converter.ConvertDataType(column.DataType.Type(), isDefaultNull)
|
||||
if err != nil {
|
||||
return Primary{}, nil, err
|
||||
}
|
||||
|
||||
var field Field
|
||||
field.Name = stringx.From(column.Name.String())
|
||||
field.DataBaseType = column.Type.Type
|
||||
field.Name = stringx.From(column.Name)
|
||||
field.DataType = dataType
|
||||
field.Comment = util.TrimNewLine(comment)
|
||||
|
||||
if field.Name.Source() == primaryColumn {
|
||||
primaryKey = Primary{
|
||||
Field: field,
|
||||
AutoIncrement: bool(column.Type.Autoincrement),
|
||||
Field: field,
|
||||
}
|
||||
if column.Constraint != nil {
|
||||
primaryKey.AutoIncrement = column.Constraint.AutoIncrement
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,60 +224,6 @@ func convertColumns(columns []*sqlparser.ColumnDefinition, primaryColumn string)
|
||||
return primaryKey, fieldM, nil
|
||||
}
|
||||
|
||||
func convertIndexes(indexes []*sqlparser.IndexDefinition) (string, map[string][]string, map[string][]string, error) {
|
||||
var primaryColumn string
|
||||
uniqueKeyMap := make(map[string][]string)
|
||||
normalKeyMap := make(map[string][]string)
|
||||
|
||||
isCreateTimeOrUpdateTime := func(name string) bool {
|
||||
camelColumnName := stringx.From(name).ToCamel()
|
||||
// by default, createTime|updateTime findOne is not used.
|
||||
return camelColumnName == "CreateTime" || camelColumnName == "UpdateTime"
|
||||
}
|
||||
|
||||
for _, index := range indexes {
|
||||
info := index.Info
|
||||
if info == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
indexName := index.Info.Name.String()
|
||||
if info.Primary {
|
||||
if len(index.Columns) > 1 {
|
||||
return "", nil, nil, errPrimaryKey
|
||||
}
|
||||
columnName := index.Columns[0].Column.String()
|
||||
if isCreateTimeOrUpdateTime(columnName) {
|
||||
continue
|
||||
}
|
||||
|
||||
primaryColumn = columnName
|
||||
continue
|
||||
} else if info.Unique {
|
||||
for _, each := range index.Columns {
|
||||
columnName := each.Column.String()
|
||||
if isCreateTimeOrUpdateTime(columnName) {
|
||||
break
|
||||
}
|
||||
|
||||
uniqueKeyMap[indexName] = append(uniqueKeyMap[indexName], columnName)
|
||||
}
|
||||
} else if info.Spatial {
|
||||
// do nothing
|
||||
} else {
|
||||
for _, each := range index.Columns {
|
||||
columnName := each.Column.String()
|
||||
if isCreateTimeOrUpdateTime(columnName) {
|
||||
break
|
||||
}
|
||||
|
||||
normalKeyMap[indexName] = append(normalKeyMap[indexName], each.Column.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
return primaryColumn, uniqueKeyMap, normalKeyMap, nil
|
||||
}
|
||||
|
||||
// ContainsTime returns true if contains golang type time.Time
|
||||
func (t *Table) ContainsTime() bool {
|
||||
for _, item := range t.Fields {
|
||||
@@ -265,15 +237,15 @@ func (t *Table) ContainsTime() bool {
|
||||
// ConvertDataType converts mysql data type into golang data type
|
||||
func ConvertDataType(table *model.Table) (*Table, error) {
|
||||
isPrimaryDefaultNull := table.PrimaryKey.ColumnDefault == nil && table.PrimaryKey.IsNullAble == "YES"
|
||||
primaryDataType, err := converter.ConvertDataType(table.PrimaryKey.DataType, isPrimaryDefaultNull)
|
||||
primaryDataType, err := converter.ConvertStringDataType(table.PrimaryKey.DataType, isPrimaryDefaultNull)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var reply Table
|
||||
reply.UniqueIndex = map[string][]*Field{}
|
||||
reply.NormalIndex = map[string][]*Field{}
|
||||
reply.Name = stringx.From(table.Table)
|
||||
reply.Db = stringx.From(table.Db)
|
||||
seqInIndex := 0
|
||||
if table.PrimaryKey.Index != nil {
|
||||
seqInIndex = table.PrimaryKey.Index.SeqInIndex
|
||||
@@ -282,7 +254,6 @@ func ConvertDataType(table *model.Table) (*Table, error) {
|
||||
reply.PrimaryKey = Primary{
|
||||
Field: Field{
|
||||
Name: stringx.From(table.PrimaryKey.Name),
|
||||
DataBaseType: table.PrimaryKey.DataType,
|
||||
DataType: primaryDataType,
|
||||
Comment: table.PrimaryKey.Comment,
|
||||
SeqInIndex: seqInIndex,
|
||||
@@ -338,29 +309,6 @@ func ConvertDataType(table *model.Table) (*Table, error) {
|
||||
reply.UniqueIndex[indexName] = list
|
||||
}
|
||||
|
||||
normalIndexSet := collection.NewSet()
|
||||
for indexName, each := range table.NormalIndex {
|
||||
var list []*Field
|
||||
var normalJoin []string
|
||||
for _, c := range each {
|
||||
list = append(list, fieldM[c.Name])
|
||||
normalJoin = append(normalJoin, c.Name)
|
||||
}
|
||||
|
||||
normalKey := strings.Join(normalJoin, ",")
|
||||
if normalIndexSet.Contains(normalKey) {
|
||||
log.Warning("table %s: duplicate index, %s", table.Table, normalKey)
|
||||
continue
|
||||
}
|
||||
|
||||
normalIndexSet.AddStr(normalKey)
|
||||
sort.Slice(list, func(i, j int) bool {
|
||||
return list[i].SeqInIndex < list[j].SeqInIndex
|
||||
})
|
||||
|
||||
reply.NormalIndex[indexName] = list
|
||||
}
|
||||
|
||||
return &reply, nil
|
||||
}
|
||||
|
||||
@@ -368,7 +316,7 @@ func getTableFields(table *model.Table) (map[string]*Field, error) {
|
||||
fieldM := make(map[string]*Field)
|
||||
for _, each := range table.Columns {
|
||||
isDefaultNull := each.ColumnDefault == nil && each.IsNullAble == "YES"
|
||||
dt, err := converter.ConvertDataType(each.DataType, isDefaultNull)
|
||||
dt, err := converter.ConvertStringDataType(each.DataType, isDefaultNull)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -379,7 +327,6 @@ func getTableFields(table *model.Table) (map[string]*Field, error) {
|
||||
|
||||
field := &Field{
|
||||
Name: stringx.From(each.Name),
|
||||
DataBaseType: each.DataType,
|
||||
DataType: dt,
|
||||
Comment: each.Comment,
|
||||
SeqInIndex: columnSeqInIndex,
|
||||
|
||||
@@ -1,88 +1,47 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/model"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/model/sql/util"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util/stringx"
|
||||
)
|
||||
|
||||
func TestParsePlainText(t *testing.T) {
|
||||
_, err := Parse("plain text")
|
||||
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
|
||||
err := ioutil.WriteFile(sqlFile, []byte("plain text"), 0o777)
|
||||
assert.Nil(t, err)
|
||||
|
||||
_, err = Parse(sqlFile, "go_zero")
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestParseSelect(t *testing.T) {
|
||||
_, err := Parse("select * from user")
|
||||
assert.Equal(t, errUnsupportDDL, err)
|
||||
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
|
||||
err := ioutil.WriteFile(sqlFile, []byte("select * from user"), 0o777)
|
||||
assert.Nil(t, err)
|
||||
|
||||
tables, err := Parse(sqlFile, "go_zero")
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, 0, len(tables))
|
||||
}
|
||||
|
||||
func TestParseCreateTable(t *testing.T) {
|
||||
table, err := Parse("CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,\n `mobile` varchar(255) COLLATE utf8mb4_bin NOT NULL comment '手\\t机 号',\n `class` bigint NOT NULL comment '班级',\n `name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL comment '姓\n 名',\n `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP comment '创建\\r时间',\n `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`id`),\n UNIQUE KEY `mobile_unique` (`mobile`),\n UNIQUE KEY `class_name_unique` (`class`,`name`),\n KEY `create_index` (`create_time`),\n KEY `name_index` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;")
|
||||
sqlFile := filepath.Join(t.TempDir(), "tmp.sql")
|
||||
err := ioutil.WriteFile(sqlFile, []byte("CREATE TABLE `test_user` (\n `id` bigint NOT NULL AUTO_INCREMENT,\n `mobile` varchar(255) COLLATE utf8mb4_bin NOT NULL comment '手\\t机 号',\n `class` bigint NOT NULL comment '班级',\n `name` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL comment '姓\n 名',\n `create_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP comment '创建\\r时间',\n `update_time` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`id`),\n UNIQUE KEY `mobile_unique` (`mobile`),\n UNIQUE KEY `class_name_unique` (`class`,`name`),\n KEY `create_index` (`create_time`),\n KEY `name_index` (`name`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;"), 0o777)
|
||||
assert.Nil(t, err)
|
||||
|
||||
tables, err := Parse(sqlFile, "go_zero")
|
||||
assert.Equal(t, 1, len(tables))
|
||||
table := tables[0]
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "test_user", table.Name.Source())
|
||||
assert.Equal(t, "id", table.PrimaryKey.Name.Source())
|
||||
assert.Equal(t, true, table.ContainsTime())
|
||||
assert.Equal(t, true, func() bool {
|
||||
mobileUniqueIndex, ok := table.UniqueIndex["mobile_unique"]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
classNameUniqueIndex, ok := table.UniqueIndex["class_name_unique"]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
equal := func(f1, f2 []*Field) bool {
|
||||
sort.Slice(f1, func(i, j int) bool {
|
||||
return f1[i].Name.Source() < f1[j].Name.Source()
|
||||
})
|
||||
sort.Slice(f2, func(i, j int) bool {
|
||||
return f2[i].Name.Source() < f2[j].Name.Source()
|
||||
})
|
||||
|
||||
if len(f2) != len(f2) {
|
||||
return false
|
||||
}
|
||||
|
||||
for index, f := range f1 {
|
||||
if f1[index].Name.Source() != f.Name.Source() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if !equal(mobileUniqueIndex, []*Field{
|
||||
{
|
||||
Name: stringx.From("mobile"),
|
||||
DataBaseType: "varchar",
|
||||
DataType: "string",
|
||||
SeqInIndex: 1,
|
||||
},
|
||||
}) {
|
||||
return false
|
||||
}
|
||||
|
||||
return equal(classNameUniqueIndex, []*Field{
|
||||
{
|
||||
Name: stringx.From("class"),
|
||||
DataBaseType: "bigint",
|
||||
DataType: "int64",
|
||||
SeqInIndex: 1,
|
||||
},
|
||||
{
|
||||
Name: stringx.From("name"),
|
||||
DataBaseType: "varchar",
|
||||
DataType: "string",
|
||||
SeqInIndex: 2,
|
||||
},
|
||||
})
|
||||
}())
|
||||
assert.Equal(t, 2, len(table.UniqueIndex))
|
||||
assert.True(t, func() bool {
|
||||
for _, e := range table.Fields {
|
||||
if e.Comment != util.TrimNewLine(e.Comment) {
|
||||
|
||||
@@ -10,9 +10,9 @@ func (m *default{{.upperStartCamelObject}}Model) Delete({{.lowerStartCamelPrimar
|
||||
|
||||
{{.keys}}
|
||||
_, err {{if .containsIndexCache}}={{else}}:={{end}} m.Exec(func(conn sqlx.SqlConn) (result sql.Result, err error) {
|
||||
query := fmt.Sprintf("delete from %s where {{.originalPrimaryKey}} = ?", m.table)
|
||||
query := fmt.Sprintf("delete from %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}}", m.table)
|
||||
return conn.Exec(query, {{.lowerStartCamelPrimaryKey}})
|
||||
}, {{.keyValues}}){{else}}query := fmt.Sprintf("delete from %s where {{.originalPrimaryKey}} = ?", m.table)
|
||||
}, {{.keyValues}}){{else}}query := fmt.Sprintf("delete from %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}}", m.table)
|
||||
_,err:=m.conn.Exec(query, {{.lowerStartCamelPrimaryKey}}){{end}}
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ func (m *default{{.upperStartCamelObject}}Model) FindOne({{.lowerStartCamelPrima
|
||||
{{if .withCache}}{{.cacheKey}}
|
||||
var resp {{.upperStartCamelObject}}
|
||||
err := m.QueryRow(&resp, {{.cacheKeyVariable}}, func(conn sqlx.SqlConn, v interface{}) error {
|
||||
query := fmt.Sprintf("select %s from %s where {{.originalPrimaryKey}} = ? limit 1", {{.lowerStartCamelObject}}Rows, m.table)
|
||||
query := fmt.Sprintf("select %s from %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}} limit 1", {{.lowerStartCamelObject}}Rows, m.table)
|
||||
return conn.QueryRow(v, query, {{.lowerStartCamelPrimaryKey}})
|
||||
})
|
||||
switch err {
|
||||
@@ -16,7 +16,7 @@ func (m *default{{.upperStartCamelObject}}Model) FindOne({{.lowerStartCamelPrima
|
||||
return nil, ErrNotFound
|
||||
default:
|
||||
return nil, err
|
||||
}{{else}}query := fmt.Sprintf("select %s from %s where {{.originalPrimaryKey}} = ? limit 1", {{.lowerStartCamelObject}}Rows, m.table)
|
||||
}{{else}}query := fmt.Sprintf("select %s from %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}} limit 1", {{.lowerStartCamelObject}}Rows, m.table)
|
||||
var resp {{.upperStartCamelObject}}
|
||||
err := m.conn.QueryRow(&resp, query, {{.lowerStartCamelPrimaryKey}})
|
||||
switch err {
|
||||
@@ -71,7 +71,7 @@ func (m *default{{.upperStartCamelObject}}Model) formatPrimary(primary interface
|
||||
}
|
||||
|
||||
func (m *default{{.upperStartCamelObject}}Model) queryPrimary(conn sqlx.SqlConn, v, primary interface{}) error {
|
||||
query := fmt.Sprintf("select %s from %s where {{.originalPrimaryField}} = ? limit 1", {{.lowerStartCamelObject}}Rows, m.table )
|
||||
query := fmt.Sprintf("select %s from %s where {{.originalPrimaryField}} = {{if .postgreSql}}$1{{else}}?{{end}} limit 1", {{.lowerStartCamelObject}}Rows, m.table )
|
||||
return conn.QueryRow(v, query, primary)
|
||||
}
|
||||
`
|
||||
|
||||
@@ -5,7 +5,7 @@ var New = `
|
||||
func New{{.upperStartCamelObject}}Model(conn sqlx.SqlConn{{if .withCache}}, c cache.CacheConf{{end}}) {{.upperStartCamelObject}}Model {
|
||||
return &default{{.upperStartCamelObject}}Model{
|
||||
{{if .withCache}}CachedConn: sqlc.NewConn(conn, c){{else}}conn:conn{{end}},
|
||||
table: "{{.table}}",
|
||||
table: {{.table}},
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
@@ -5,9 +5,9 @@ var Update = `
|
||||
func (m *default{{.upperStartCamelObject}}Model) Update(data {{.upperStartCamelObject}}) error {
|
||||
{{if .withCache}}{{.keys}}
|
||||
_, err := m.Exec(func(conn sqlx.SqlConn) (result sql.Result, err error) {
|
||||
query := fmt.Sprintf("update %s set %s where {{.originalPrimaryKey}} = ?", m.table, {{.lowerStartCamelObject}}RowsWithPlaceHolder)
|
||||
query := fmt.Sprintf("update %s set %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}}", m.table, {{.lowerStartCamelObject}}RowsWithPlaceHolder)
|
||||
return conn.Exec(query, {{.expressionValues}})
|
||||
}, {{.keyValues}}){{else}}query := fmt.Sprintf("update %s set %s where {{.originalPrimaryKey}} = ?", m.table, {{.lowerStartCamelObject}}RowsWithPlaceHolder)
|
||||
}, {{.keyValues}}){{else}}query := fmt.Sprintf("update %s set %s where {{.originalPrimaryKey}} = {{if .postgreSql}}$1{{else}}?{{end}}", m.table, {{.lowerStartCamelObject}}RowsWithPlaceHolder)
|
||||
_,err:=m.conn.Exec(query, {{.expressionValues}}){{end}}
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -5,11 +5,14 @@ import "fmt"
|
||||
// Vars defines a template for var block in model
|
||||
var Vars = fmt.Sprintf(`
|
||||
var (
|
||||
{{.lowerStartCamelObject}}FieldNames = builderx.RawFieldNames(&{{.upperStartCamelObject}}{})
|
||||
{{.lowerStartCamelObject}}FieldNames = builderx.RawFieldNames(&{{.upperStartCamelObject}}{}{{if .postgreSql}},true{{end}})
|
||||
{{.lowerStartCamelObject}}Rows = strings.Join({{.lowerStartCamelObject}}FieldNames, ",")
|
||||
{{.lowerStartCamelObject}}RowsExpectAutoSet = strings.Join(stringx.Remove({{.lowerStartCamelObject}}FieldNames, {{if .autoIncrement}}"{{.originalPrimaryKey}}",{{end}} "%screate_time%s", "%supdate_time%s"), ",")
|
||||
{{.lowerStartCamelObject}}RowsWithPlaceHolder = strings.Join(stringx.Remove({{.lowerStartCamelObject}}FieldNames, "{{.originalPrimaryKey}}", "%screate_time%s", "%supdate_time%s"), "=?,") + "=?"
|
||||
{{.lowerStartCamelObject}}RowsExpectAutoSet = {{if .postgreSql}}strings.Join(stringx.Remove({{.lowerStartCamelObject}}FieldNames, {{if .autoIncrement}}"{{.originalPrimaryKey}}",{{end}} "%screate_time%s", "%supdate_time%s"), ","){{else}}strings.Join(stringx.Remove({{.lowerStartCamelObject}}FieldNames, {{if .autoIncrement}}"{{.originalPrimaryKey}}",{{end}} "%screate_time%s", "%supdate_time%s"), ","){{end}}
|
||||
{{.lowerStartCamelObject}}RowsWithPlaceHolder = {{if .postgreSql}}builderx.PostgreSqlJoin(stringx.Remove({{.lowerStartCamelObject}}FieldNames, "{{.originalPrimaryKey}}", "%screate_time%s", "%supdate_time%s")){{else}}strings.Join(stringx.Remove({{.lowerStartCamelObject}}FieldNames, "{{.originalPrimaryKey}}", "%screate_time%s", "%supdate_time%s"), "=?,") + "=?"{{end}}
|
||||
|
||||
{{if .withCache}}{{.cacheKeys}}{{end}}
|
||||
)
|
||||
`, "`", "`", "`", "`", "`", "`", "`", "`")
|
||||
`, "", "", "", "", // postgreSql mode
|
||||
"`", "`", "`", "`",
|
||||
"", "", "", "", // postgreSql mode
|
||||
"`", "`", "`", "`")
|
||||
|
||||
@@ -134,15 +134,21 @@ USAGE:
|
||||
OPTIONS:
|
||||
--src value, -s value the file path of the proto source file
|
||||
--proto_path value, -I value native command of protoc, specify the directory in which to search for imports. [optional]
|
||||
--go_opt value native command of protoc-gen-go, specify the mapping from proto to go, eg --go_opt=proto_import=go_package_import. [optional]
|
||||
--dir value, -d value the target path of the code
|
||||
--style value the file naming format, see [https://github.com/tal-tech/go-zero/tree/master/tools/goctl/config/readme.md]
|
||||
--idea whether the command execution environment is from idea plugin. [optional]
|
||||
|
||||
```
|
||||
|
||||
### 参数说明
|
||||
|
||||
* --src 必填,proto数据源,目前暂时支持单个proto文件生成
|
||||
* --proto_path 可选,protoc原生子命令,用于指定proto import从何处查找,可指定多个路径,如`goctl rpc -I={path1} -I={path2} ...`,在没有import时可不填。当前proto路径不用指定,已经内置,`-I`的详细用法请参考`protoc -h`
|
||||
* --proto_path 可选,protoc原生子命令,用于指定proto import从何处查找,可指定多个路径,如`goctl rpc -I={path1} -I={path2} ...`
|
||||
,在没有import时可不填。当前proto路径不用指定,已经内置,`-I`的详细用法请参考`protoc -h`
|
||||
* --go_opt 可选,protoc-gen-go插件原生flag,用于指定go_package
|
||||
* --dir 可选,默认为proto文件所在目录,生成代码的目标目录
|
||||
* --style 可选,指定生成文件名的命名风格
|
||||
* --idea 可选,是否为idea插件中执行,终端执行可以忽略
|
||||
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/tal-tech/go-zero/tools/goctl/rpc/generator"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/util"
|
||||
"github.com/urfave/cli"
|
||||
)
|
||||
|
||||
@@ -17,9 +18,17 @@ func RPC(c *cli.Context) error {
|
||||
out := c.String("dir")
|
||||
style := c.String("style")
|
||||
protoImportPath := c.StringSlice("proto_path")
|
||||
goOptions := c.StringSlice("go_opt")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
if len(src) == 0 {
|
||||
return errors.New("missing -src")
|
||||
}
|
||||
|
||||
if len(out) == 0 {
|
||||
return errors.New("missing -dir")
|
||||
}
|
||||
@@ -29,7 +38,7 @@ func RPC(c *cli.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
return g.Generate(src, out, protoImportPath)
|
||||
return g.Generate(src, out, protoImportPath, goOptions...)
|
||||
}
|
||||
|
||||
// RPCNew is to generate rpc greet service, this greet service can speed
|
||||
@@ -41,6 +50,11 @@ func RPCNew(c *cli.Context) error {
|
||||
return fmt.Errorf("unexpected ext: %s", ext)
|
||||
}
|
||||
style := c.String("style")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
protoName := rpcname + ".proto"
|
||||
filename := filepath.Join(".", rpcname, protoName)
|
||||
@@ -65,6 +79,12 @@ func RPCNew(c *cli.Context) error {
|
||||
// RPCTemplate is the entry for generate rpc template
|
||||
func RPCTemplate(c *cli.Context) error {
|
||||
protoFile := c.String("o")
|
||||
home := c.String("home")
|
||||
|
||||
if len(home) > 0 {
|
||||
util.RegisterGoctlHome(home)
|
||||
}
|
||||
|
||||
if len(protoFile) == 0 {
|
||||
return errors.New("missing -o")
|
||||
}
|
||||
|
||||
@@ -11,8 +11,11 @@ type DefaultGenerator struct {
|
||||
log console.Console
|
||||
}
|
||||
|
||||
// just test interface implement
|
||||
var _ Generator = (*DefaultGenerator)(nil)
|
||||
|
||||
// NewDefaultGenerator returns an instance of DefaultGenerator
|
||||
func NewDefaultGenerator() *DefaultGenerator {
|
||||
func NewDefaultGenerator() Generator {
|
||||
log := console.NewColorConsole()
|
||||
return &DefaultGenerator{
|
||||
log: log,
|
||||
@@ -33,5 +36,6 @@ func (g *DefaultGenerator) Prepare() error {
|
||||
}
|
||||
|
||||
_, err = exec.LookPath("protoc-gen-go")
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ func NewRPCGenerator(g Generator, cfg *conf.Config) *RPCGenerator {
|
||||
// Generate generates an rpc service, through the proto file,
|
||||
// code storage directory, and proto import parameters to control
|
||||
// the source file and target location of the rpc service that needs to be generated
|
||||
func (g *RPCGenerator) Generate(src, target string, protoImportPath []string) error {
|
||||
func (g *RPCGenerator) Generate(src, target string, protoImportPath []string, goOptions ...string) error {
|
||||
abs, err := filepath.Abs(target)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -73,7 +73,7 @@ func (g *RPCGenerator) Generate(src, target string, protoImportPath []string) er
|
||||
return err
|
||||
}
|
||||
|
||||
err = g.g.GenPb(dirCtx, protoImportPath, proto, g.cfg)
|
||||
err = g.g.GenPb(dirCtx, protoImportPath, proto, g.cfg, goOptions...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ func TestRpcGenerate(t *testing.T) {
|
||||
projectName := stringx.Rand()
|
||||
g := NewRPCGenerator(dispatcher, cfg)
|
||||
|
||||
// case go path
|
||||
src := filepath.Join(build.Default.GOPATH, "src")
|
||||
_, err = os.Stat(src)
|
||||
if err != nil {
|
||||
@@ -41,41 +40,51 @@ func TestRpcGenerate(t *testing.T) {
|
||||
defer func() {
|
||||
_ = os.RemoveAll(srcDir)
|
||||
}()
|
||||
err = g.Generate("./test.proto", projectDir, []string{src})
|
||||
common, err := filepath.Abs(".")
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
|
||||
// case go path
|
||||
t.Run("GOPATH", func(t *testing.T) {
|
||||
err = g.Generate("./test.proto", projectDir, []string{common, src}, "Mbase/common.proto=./base")
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
})
|
||||
|
||||
// case go mod
|
||||
workDir := t.TempDir()
|
||||
name := filepath.Base(workDir)
|
||||
_, err = execx.Run("go mod init "+name, workDir)
|
||||
if err != nil {
|
||||
logx.Error(err)
|
||||
return
|
||||
}
|
||||
t.Run("GOMOD", func(t *testing.T) {
|
||||
workDir := t.TempDir()
|
||||
name := filepath.Base(workDir)
|
||||
_, err = execx.Run("go mod init "+name, workDir)
|
||||
if err != nil {
|
||||
logx.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
projectDir = filepath.Join(workDir, projectName)
|
||||
err = g.Generate("./test.proto", projectDir, []string{src})
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
projectDir = filepath.Join(workDir, projectName)
|
||||
err = g.Generate("./test.proto", projectDir, []string{common, src}, "Mbase/common.proto=./base")
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
})
|
||||
|
||||
// case not in go mod and go path
|
||||
err = g.Generate("./test.proto", projectDir, []string{src})
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
t.Run("OTHER", func(t *testing.T) {
|
||||
err = g.Generate("./test.proto", projectDir, []string{common, src}, "Mbase/common.proto=./base")
|
||||
assert.Nil(t, err)
|
||||
_, err = execx.Run("go test "+projectName, projectDir)
|
||||
if err != nil {
|
||||
assert.True(t, func() bool {
|
||||
return strings.Contains(err.Error(), "not in GOROOT") || strings.Contains(err.Error(), "cannot find package")
|
||||
}())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
const (
|
||||
callTemplateText = `{{.head}}
|
||||
|
||||
//go:generate mockgen -destination ./{{.name}}_mock.go -package {{.filePackage}} -source $GOFILE
|
||||
|
||||
package {{.filePackage}}
|
||||
|
||||
@@ -50,13 +49,13 @@ func New{{.serviceName}}(cli zrpc.Client) {{.serviceName}} {
|
||||
`
|
||||
|
||||
callInterfaceFunctionTemplate = `{{if .hasComment}}{{.comment}}
|
||||
{{end}}{{.method}}(ctx context.Context,in *{{.pbRequest}}) (*{{.pbResponse}},error)`
|
||||
{{end}}{{.method}}(ctx context.Context{{if .hasReq}},in *{{.pbRequest}}{{end}}) ({{if .notStream}}*{{.pbResponse}}, {{else}}{{.streamBody}},{{end}} error)`
|
||||
|
||||
callFunctionTemplate = `
|
||||
{{if .hasComment}}{{.comment}}{{end}}
|
||||
func (m *default{{.serviceName}}) {{.method}}(ctx context.Context,in *{{.pbRequest}}) (*{{.pbResponse}}, error) {
|
||||
func (m *default{{.serviceName}}) {{.method}}(ctx context.Context{{if .hasReq}},in *{{.pbRequest}}{{end}}) ({{if .notStream}}*{{.pbResponse}}, {{else}}{{.streamBody}},{{end}} error) {
|
||||
client := {{.package}}.New{{.rpcServiceName}}Client(m.cli.Conn())
|
||||
return client.{{.method}}(ctx, in)
|
||||
return client.{{.method}}(ctx,{{if .hasReq}} in{{end}})
|
||||
}
|
||||
`
|
||||
)
|
||||
@@ -79,7 +78,7 @@ func (g *DefaultGenerator) GenCall(ctx DirContext, proto parser.Proto, cfg *conf
|
||||
return err
|
||||
}
|
||||
|
||||
iFunctions, err := g.getInterfaceFuncs(service)
|
||||
iFunctions, err := g.getInterfaceFuncs(proto.PbPackage, service)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -116,6 +115,7 @@ func (g *DefaultGenerator) genFunction(goPackage string, service parser.Service)
|
||||
}
|
||||
|
||||
comment := parser.GetComment(rpc.Doc())
|
||||
streamServer := fmt.Sprintf("%s.%s_%s%s", goPackage, parser.CamelCase(service.Name), parser.CamelCase(rpc.Name), "Client")
|
||||
buffer, err := util.With("sharedFn").Parse(text).Execute(map[string]interface{}{
|
||||
"serviceName": stringx.From(service.Name).ToCamel(),
|
||||
"rpcServiceName": parser.CamelCase(service.Name),
|
||||
@@ -125,6 +125,9 @@ func (g *DefaultGenerator) genFunction(goPackage string, service parser.Service)
|
||||
"pbResponse": parser.CamelCase(rpc.ReturnsType),
|
||||
"hasComment": len(comment) > 0,
|
||||
"comment": comment,
|
||||
"hasReq": !rpc.StreamsRequest,
|
||||
"notStream": !rpc.StreamsRequest && !rpc.StreamsReturns,
|
||||
"streamBody": streamServer,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -135,7 +138,7 @@ func (g *DefaultGenerator) genFunction(goPackage string, service parser.Service)
|
||||
return functions, nil
|
||||
}
|
||||
|
||||
func (g *DefaultGenerator) getInterfaceFuncs(service parser.Service) ([]string, error) {
|
||||
func (g *DefaultGenerator) getInterfaceFuncs(goPackage string, service parser.Service) ([]string, error) {
|
||||
functions := make([]string, 0)
|
||||
|
||||
for _, rpc := range service.RPC {
|
||||
@@ -145,13 +148,17 @@ func (g *DefaultGenerator) getInterfaceFuncs(service parser.Service) ([]string,
|
||||
}
|
||||
|
||||
comment := parser.GetComment(rpc.Doc())
|
||||
streamServer := fmt.Sprintf("%s.%s_%s%s", goPackage, parser.CamelCase(service.Name), parser.CamelCase(rpc.Name), "Client")
|
||||
buffer, err := util.With("interfaceFn").Parse(text).Execute(
|
||||
map[string]interface{}{
|
||||
"hasComment": len(comment) > 0,
|
||||
"comment": comment,
|
||||
"method": parser.CamelCase(rpc.Name),
|
||||
"hasReq": !rpc.StreamsRequest,
|
||||
"pbRequest": parser.CamelCase(rpc.RequestType),
|
||||
"notStream": !rpc.StreamsRequest && !rpc.StreamsReturns,
|
||||
"pbResponse": parser.CamelCase(rpc.ReturnsType),
|
||||
"streamBody": streamServer,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -15,5 +15,5 @@ type Generator interface {
|
||||
GenLogic(ctx DirContext, proto parser.Proto, cfg *conf.Config) error
|
||||
GenServer(ctx DirContext, proto parser.Proto, cfg *conf.Config) error
|
||||
GenSvc(ctx DirContext, proto parser.Proto, cfg *conf.Config) error
|
||||
GenPb(ctx DirContext, protoImportPath []string, proto parser.Proto, cfg *conf.Config) error
|
||||
GenPb(ctx DirContext, protoImportPath []string, proto parser.Proto, cfg *conf.Config, goOptions ...string) error
|
||||
}
|
||||
|
||||
@@ -40,10 +40,10 @@ func New{{.logicName}}(ctx context.Context,svcCtx *svc.ServiceContext) *{{.logic
|
||||
{{.functions}}
|
||||
`
|
||||
logicFunctionTemplate = `{{if .hasComment}}{{.comment}}{{end}}
|
||||
func (l *{{.logicName}}) {{.method}} (in {{.request}}) ({{.response}}, error) {
|
||||
func (l *{{.logicName}}) {{.method}} ({{if .hasReq}}in {{.request}}{{if .stream}},stream {{.streamBody}}{{end}}{{else}}stream {{.streamBody}}{{end}}) ({{if .hasReply}}{{.response}},{{end}} error) {
|
||||
// todo: add your logic here and delete this line
|
||||
|
||||
return &{{.responseType}}{}, nil
|
||||
return {{if .hasReply}}&{{.responseType}}{},{{end}} nil
|
||||
}
|
||||
`
|
||||
)
|
||||
@@ -51,6 +51,7 @@ func (l *{{.logicName}}) {{.method}} (in {{.request}}) ({{.response}}, error) {
|
||||
// GenLogic generates the logic file of the rpc service, which corresponds to the RPC definition items in proto.
|
||||
func (g *DefaultGenerator) GenLogic(ctx DirContext, proto parser.Proto, cfg *conf.Config) error {
|
||||
dir := ctx.GetLogic()
|
||||
service := proto.Service.Service.Name
|
||||
for _, rpc := range proto.Service.RPC {
|
||||
logicFilename, err := format.FileNamingFormat(cfg.NamingFormat, rpc.Name+"_logic")
|
||||
if err != nil {
|
||||
@@ -58,7 +59,7 @@ func (g *DefaultGenerator) GenLogic(ctx DirContext, proto parser.Proto, cfg *con
|
||||
}
|
||||
|
||||
filename := filepath.Join(dir.Filename, logicFilename+".go")
|
||||
functions, err := g.genLogicFunction(proto.PbPackage, rpc)
|
||||
functions, err := g.genLogicFunction(service, proto.PbPackage, rpc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -82,7 +83,7 @@ func (g *DefaultGenerator) GenLogic(ctx DirContext, proto parser.Proto, cfg *con
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *DefaultGenerator) genLogicFunction(goPackage string, rpc *parser.RPC) (string, error) {
|
||||
func (g *DefaultGenerator) genLogicFunction(serviceName, goPackage string, rpc *parser.RPC) (string, error) {
|
||||
functions := make([]string, 0)
|
||||
text, err := util.LoadTemplate(category, logicFuncTemplateFileFile, logicFunctionTemplate)
|
||||
if err != nil {
|
||||
@@ -91,12 +92,17 @@ func (g *DefaultGenerator) genLogicFunction(goPackage string, rpc *parser.RPC) (
|
||||
|
||||
logicName := stringx.From(rpc.Name + "_logic").ToCamel()
|
||||
comment := parser.GetComment(rpc.Doc())
|
||||
streamServer := fmt.Sprintf("%s.%s_%s%s", goPackage, parser.CamelCase(serviceName), parser.CamelCase(rpc.Name), "Server")
|
||||
buffer, err := util.With("fun").Parse(text).Execute(map[string]interface{}{
|
||||
"logicName": logicName,
|
||||
"method": parser.CamelCase(rpc.Name),
|
||||
"hasReq": !rpc.StreamsRequest,
|
||||
"request": fmt.Sprintf("*%s.%s", goPackage, parser.CamelCase(rpc.RequestType)),
|
||||
"hasReply": !rpc.StreamsRequest && !rpc.StreamsReturns,
|
||||
"response": fmt.Sprintf("*%s.%s", goPackage, parser.CamelCase(rpc.ReturnsType)),
|
||||
"responseType": fmt.Sprintf("%s.%s", goPackage, parser.CamelCase(rpc.ReturnsType)),
|
||||
"stream": rpc.StreamsRequest || rpc.StreamsReturns,
|
||||
"streamBody": streamServer,
|
||||
"hasComment": len(comment) > 0,
|
||||
"comment": comment,
|
||||
})
|
||||
|
||||
@@ -2,33 +2,104 @@ package generator
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/tal-tech/go-zero/core/collection"
|
||||
conf "github.com/tal-tech/go-zero/tools/goctl/config"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/rpc/execx"
|
||||
"github.com/tal-tech/go-zero/tools/goctl/rpc/parser"
|
||||
)
|
||||
|
||||
const googleProtocGenGoErr = `--go_out: protoc-gen-go: plugins are not supported; use 'protoc --go-grpc_out=...' to generate gRPC`
|
||||
|
||||
// GenPb generates the pb.go file, which is a layer of packaging for protoc to generate gprc,
|
||||
// but the commands and flags in protoc are not completely joined in goctl. At present, proto_path(-I) is introduced
|
||||
func (g *DefaultGenerator) GenPb(ctx DirContext, protoImportPath []string, proto parser.Proto, _ *conf.Config) error {
|
||||
func (g *DefaultGenerator) GenPb(ctx DirContext, protoImportPath []string, proto parser.Proto, _ *conf.Config, goOptions ...string) error {
|
||||
dir := ctx.GetPb()
|
||||
cw := new(bytes.Buffer)
|
||||
base := filepath.Dir(proto.Src)
|
||||
directory, base := filepath.Split(proto.Src)
|
||||
directory = filepath.Clean(directory)
|
||||
cw.WriteString("protoc ")
|
||||
protoImportPathSet := collection.NewSet()
|
||||
isSamePackage := true
|
||||
for _, ip := range protoImportPath {
|
||||
cw.WriteString(" -I=" + ip)
|
||||
pip := " --proto_path=" + ip
|
||||
if protoImportPathSet.Contains(pip) {
|
||||
continue
|
||||
}
|
||||
|
||||
abs, err := filepath.Abs(ip)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if abs == directory {
|
||||
isSamePackage = true
|
||||
} else {
|
||||
isSamePackage = false
|
||||
}
|
||||
|
||||
protoImportPathSet.AddStr(pip)
|
||||
cw.WriteString(pip)
|
||||
}
|
||||
cw.WriteString(" -I=" + base)
|
||||
currentPath := " --proto_path=" + directory
|
||||
if !protoImportPathSet.Contains(currentPath) {
|
||||
cw.WriteString(currentPath)
|
||||
}
|
||||
|
||||
cw.WriteString(" " + proto.Name)
|
||||
if strings.Contains(proto.GoPackage, "/") {
|
||||
cw.WriteString(" --go_out=plugins=grpc:" + ctx.GetMain().Filename)
|
||||
} else {
|
||||
cw.WriteString(" --go_out=plugins=grpc:" + dir.Filename)
|
||||
}
|
||||
|
||||
// Compatible with version 1.4.0,github.com/golang/protobuf/protoc-gen-go@v1.4.0
|
||||
// --go_opt usage please see https://developers.google.com/protocol-buffers/docs/reference/go-generated#package
|
||||
optSet := collection.NewSet()
|
||||
for _, op := range goOptions {
|
||||
opt := " --go_opt=" + op
|
||||
if optSet.Contains(opt) {
|
||||
continue
|
||||
}
|
||||
|
||||
optSet.AddStr(op)
|
||||
cw.WriteString(" --go_opt=" + op)
|
||||
}
|
||||
|
||||
var currentFileOpt string
|
||||
if !isSamePackage || (len(proto.GoPackage) > 0 && proto.GoPackage != proto.Package.Name) {
|
||||
if filepath.IsAbs(proto.GoPackage) {
|
||||
currentFileOpt = " --go_opt=M" + base + "=" + proto.GoPackage
|
||||
} else if strings.Contains(proto.GoPackage, string(filepath.Separator)) {
|
||||
currentFileOpt = " --go_opt=M" + base + "=./" + proto.GoPackage
|
||||
} else {
|
||||
currentFileOpt = " --go_opt=M" + base + "=../" + proto.GoPackage
|
||||
}
|
||||
} else {
|
||||
currentFileOpt = " --go_opt=M" + base + "=."
|
||||
}
|
||||
|
||||
if !optSet.Contains(currentFileOpt) {
|
||||
cw.WriteString(currentFileOpt)
|
||||
}
|
||||
|
||||
command := cw.String()
|
||||
g.log.Debug(command)
|
||||
_, err := execx.Run(command, "")
|
||||
return err
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), googleProtocGenGoErr) {
|
||||
return errors.New(`Unsupported plugin protoc-gen-go which installed from the following source:
|
||||
google.golang.org/protobuf/cmd/protoc-gen-go,
|
||||
github.com/protocolbuffers/protobuf-go/cmd/protoc-gen-go;
|
||||
|
||||
Please replace it by the following command, we recommend to use version before v1.3.5:
|
||||
go get -u github.com/golang/protobuf/protoc-gen-go`)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ const (
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
{{if .notStream}}"context"{{end}}
|
||||
|
||||
{{.imports}}
|
||||
)
|
||||
@@ -38,9 +38,9 @@ func New{{.server}}Server(svcCtx *svc.ServiceContext) *{{.server}}Server {
|
||||
`
|
||||
functionTemplate = `
|
||||
{{if .hasComment}}{{.comment}}{{end}}
|
||||
func (s *{{.server}}Server) {{.method}} (ctx context.Context, in {{.request}}) ({{.response}}, error) {
|
||||
l := logic.New{{.logicName}}(ctx,s.svcCtx)
|
||||
return l.{{.method}}(in)
|
||||
func (s *{{.server}}Server) {{.method}} ({{if .notStream}}ctx context.Context,{{if .hasReq}} in {{.request}}{{end}}{{else}}{{if .hasReq}} in {{.request}},{{end}}stream {{.streamBody}}{{end}}) ({{if .notStream}}{{.response}},{{end}}error) {
|
||||
l := logic.New{{.logicName}}({{if .notStream}}ctx,{{else}}stream.Context(),{{end}}s.svcCtx)
|
||||
return l.{{.method}}({{if .hasReq}}in{{if .stream}} ,stream{{end}}{{else}}{{if .stream}}stream{{end}}{{end}})
|
||||
}
|
||||
`
|
||||
)
|
||||
@@ -73,11 +73,20 @@ func (g *DefaultGenerator) GenServer(ctx DirContext, proto parser.Proto, cfg *co
|
||||
return err
|
||||
}
|
||||
|
||||
notStream := false
|
||||
for _, rpc := range service.RPC {
|
||||
if !rpc.StreamsRequest && !rpc.StreamsReturns {
|
||||
notStream = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
err = util.With("server").GoFmt(true).Parse(text).SaveTo(map[string]interface{}{
|
||||
"head": head,
|
||||
"server": stringx.From(service.Name).ToCamel(),
|
||||
"imports": strings.Join(imports.KeysStr(), util.NL),
|
||||
"funcs": strings.Join(funcList, util.NL),
|
||||
"head": head,
|
||||
"server": stringx.From(service.Name).ToCamel(),
|
||||
"imports": strings.Join(imports.KeysStr(), util.NL),
|
||||
"funcs": strings.Join(funcList, util.NL),
|
||||
"notStream": notStream,
|
||||
}, serverFile, true)
|
||||
return err
|
||||
}
|
||||
@@ -91,6 +100,7 @@ func (g *DefaultGenerator) genFunctions(goPackage string, service parser.Service
|
||||
}
|
||||
|
||||
comment := parser.GetComment(rpc.Doc())
|
||||
streamServer := fmt.Sprintf("%s.%s_%s%s", goPackage, parser.CamelCase(service.Name), parser.CamelCase(rpc.Name), "Server")
|
||||
buffer, err := util.With("func").Parse(text).Execute(map[string]interface{}{
|
||||
"server": stringx.From(service.Name).ToCamel(),
|
||||
"logicName": fmt.Sprintf("%sLogic", stringx.From(rpc.Name).ToCamel()),
|
||||
@@ -99,6 +109,10 @@ func (g *DefaultGenerator) genFunctions(goPackage string, service parser.Service
|
||||
"response": fmt.Sprintf("*%s.%s", goPackage, parser.CamelCase(rpc.ReturnsType)),
|
||||
"hasComment": len(comment) > 0,
|
||||
"comment": comment,
|
||||
"hasReq": !rpc.StreamsRequest,
|
||||
"stream": rpc.StreamsRequest || rpc.StreamsReturns,
|
||||
"notStream": !rpc.StreamsRequest && !rpc.StreamsReturns,
|
||||
"streamBody": streamServer,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -59,4 +59,10 @@ service Test_Service {
|
||||
rpc MapService (MapReq) returns (CommonReply);
|
||||
// case repeated
|
||||
rpc RepeatedService (RepeatedReq) returns (CommonReply);
|
||||
// server stream
|
||||
rpc ServerStream (Req) returns (stream Reply);
|
||||
// client stream
|
||||
rpc ClientStream (stream Req) returns (Reply);
|
||||
// stream
|
||||
rpc Stream(stream Req) returns (stream Reply);
|
||||
}
|
||||
16
tools/goctl/rpc/parser/stream.proto
Normal file
16
tools/goctl/rpc/parser/stream.proto
Normal file
@@ -0,0 +1,16 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package test;
|
||||
|
||||
message Req{
|
||||
string input = 1;
|
||||
}
|
||||
|
||||
message Reply{
|
||||
string output = 1;
|
||||
}
|
||||
service TestService{
|
||||
rpc ServerStream (Req) returns (stream Reply);
|
||||
rpc ClientStream (stream Req) returns (Reply);
|
||||
rpc Stream (stream Req) returns (stream Reply);
|
||||
}
|
||||
@@ -2,6 +2,7 @@ package tpl
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/logrusorgru/aurora"
|
||||
"github.com/tal-tech/go-zero/core/errorx"
|
||||
@@ -19,6 +20,11 @@ const templateParentPath = "/"
|
||||
|
||||
// GenTemplates writes the latest template text into file which is not exists
|
||||
func GenTemplates(ctx *cli.Context) error {
|
||||
path := ctx.String("home")
|
||||
if len(path) != 0 {
|
||||
util.RegisterGoctlHome(path)
|
||||
}
|
||||
|
||||
if err := errorx.Chain(
|
||||
func() error {
|
||||
return gogen.GenTemplates(ctx)
|
||||
@@ -47,14 +53,24 @@ func GenTemplates(ctx *cli.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Templates are generated in %s, %s\n", aurora.Green(dir),
|
||||
abs, err := filepath.Abs(dir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Templates are generated in %s, %s\n", aurora.Green(abs),
|
||||
aurora.Red("edit on your risk!"))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CleanTemplates deletes all templates
|
||||
func CleanTemplates(_ *cli.Context) error {
|
||||
func CleanTemplates(ctx *cli.Context) error {
|
||||
path := ctx.String("home")
|
||||
if len(path) != 0 {
|
||||
util.RegisterGoctlHome(path)
|
||||
}
|
||||
|
||||
err := errorx.Chain(
|
||||
func() error {
|
||||
return gogen.Clean()
|
||||
@@ -86,7 +102,12 @@ func CleanTemplates(_ *cli.Context) error {
|
||||
// UpdateTemplates writes the latest template text into file,
|
||||
// it will delete the older templates if there are exists
|
||||
func UpdateTemplates(ctx *cli.Context) (err error) {
|
||||
path := ctx.String("home")
|
||||
category := ctx.String("category")
|
||||
if len(path) != 0 {
|
||||
util.RegisterGoctlHome(path)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
fmt.Println(aurora.Green(fmt.Sprintf("%s template are update!", category)).String())
|
||||
@@ -113,8 +134,13 @@ func UpdateTemplates(ctx *cli.Context) (err error) {
|
||||
|
||||
// RevertTemplates will overwrite the old template content with the new template
|
||||
func RevertTemplates(ctx *cli.Context) (err error) {
|
||||
path := ctx.String("home")
|
||||
category := ctx.String("category")
|
||||
filename := ctx.String("name")
|
||||
if len(path) != 0 {
|
||||
util.RegisterGoctlHome(path)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err == nil {
|
||||
fmt.Println(aurora.Green(fmt.Sprintf("%s template are reverted!", filename)).String())
|
||||
|
||||
@@ -17,6 +17,12 @@ const (
|
||||
goctlDir = ".goctl"
|
||||
)
|
||||
|
||||
var goctlHome string
|
||||
|
||||
func RegisterGoctlHome(home string) {
|
||||
goctlHome = home
|
||||
}
|
||||
|
||||
// CreateIfNotExist creates a file if it is not exists
|
||||
func CreateIfNotExist(file string) (*os.File, error) {
|
||||
_, err := os.Stat(file)
|
||||
@@ -62,6 +68,10 @@ func FileNameWithoutExt(file string) string {
|
||||
|
||||
// GetGoctlHome returns the path value of the goctl home where Join $HOME with .goctl
|
||||
func GetGoctlHome() (string, error) {
|
||||
if len(goctlHome) != 0 {
|
||||
return goctlHome, nil
|
||||
}
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"log"
|
||||
"net"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/tal-tech/go-zero/core/logx"
|
||||
@@ -58,6 +59,13 @@ func TestDepositServer_Deposit(t *testing.T) {
|
||||
codes.OK,
|
||||
"",
|
||||
},
|
||||
{
|
||||
"valid request with long handling time",
|
||||
2000.00,
|
||||
nil,
|
||||
codes.DeadlineExceeded,
|
||||
fmt.Sprintf("context deadline exceeded"),
|
||||
},
|
||||
}
|
||||
|
||||
directClient := MustNewClient(
|
||||
@@ -79,7 +87,7 @@ func TestDepositServer_Deposit(t *testing.T) {
|
||||
func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn,
|
||||
invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
||||
return invoker(ctx, method, req, reply, cc, opts...)
|
||||
}))
|
||||
}), WithTimeout(1000*time.Millisecond))
|
||||
assert.Nil(t, err)
|
||||
clients := []Client{
|
||||
directClient,
|
||||
|
||||
@@ -69,8 +69,8 @@ func (c *client) buildDialOptions(opts ...ClientOption) []grpc.DialOption {
|
||||
WithUnaryClientInterceptors(
|
||||
clientinterceptors.TracingInterceptor,
|
||||
clientinterceptors.DurationInterceptor,
|
||||
clientinterceptors.BreakerInterceptor,
|
||||
clientinterceptors.PrometheusInterceptor,
|
||||
clientinterceptors.BreakerInterceptor,
|
||||
clientinterceptors.TimeoutInterceptor(cliOpts.Timeout),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -18,26 +18,6 @@ func TimeoutInterceptor(timeout time.Duration) grpc.UnaryClientInterceptor {
|
||||
ctx, cancel := context.WithTimeout(ctx, timeout)
|
||||
defer cancel()
|
||||
|
||||
// create channel with buffer size 1 to avoid goroutine leak
|
||||
done := make(chan error, 1)
|
||||
panicChan := make(chan interface{}, 1)
|
||||
go func() {
|
||||
defer func() {
|
||||
if p := recover(); p != nil {
|
||||
panicChan <- p
|
||||
}
|
||||
}()
|
||||
|
||||
done <- invoker(ctx, method, req, reply, cc, opts...)
|
||||
}()
|
||||
|
||||
select {
|
||||
case p := <-panicChan:
|
||||
panic(p)
|
||||
case err := <-done:
|
||||
return err
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
return invoker(ctx, method, req, reply, cc, opts...)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,25 +49,6 @@ func TestTimeoutInterceptor_timeout(t *testing.T) {
|
||||
assert.Nil(t, err)
|
||||
}
|
||||
|
||||
func TestTimeoutInterceptor_timeoutExpire(t *testing.T) {
|
||||
const timeout = time.Millisecond * 10
|
||||
interceptor := TimeoutInterceptor(timeout)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond)
|
||||
defer cancel()
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
cc := new(grpc.ClientConn)
|
||||
err := interceptor(ctx, "/foo", nil, nil, cc,
|
||||
func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn,
|
||||
opts ...grpc.CallOption) error {
|
||||
defer wg.Done()
|
||||
time.Sleep(time.Millisecond * 50)
|
||||
return nil
|
||||
})
|
||||
wg.Wait()
|
||||
assert.Equal(t, context.DeadlineExceeded, err)
|
||||
}
|
||||
|
||||
func TestTimeoutInterceptor_panic(t *testing.T) {
|
||||
timeouts := []time.Duration{0, time.Millisecond * 10}
|
||||
for _, timeout := range timeouts {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package codes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
)
|
||||
@@ -10,6 +12,17 @@ func Acceptable(err error) bool {
|
||||
switch status.Code(err) {
|
||||
case codes.DeadlineExceeded, codes.Internal, codes.Unavailable, codes.DataLoss:
|
||||
return false
|
||||
case codes.Unknown:
|
||||
return acceptableUnknown(err)
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func acceptableUnknown(err error) bool {
|
||||
switch err {
|
||||
case context.DeadlineExceeded:
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package mock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
@@ -16,5 +17,6 @@ func (*DepositServer) Deposit(ctx context.Context, req *DepositRequest) (*Deposi
|
||||
return nil, status.Errorf(codes.InvalidArgument, "cannot deposit %v", req.GetAmount())
|
||||
}
|
||||
|
||||
time.Sleep(time.Duration(req.GetAmount()) * time.Millisecond)
|
||||
return &DepositResponse{Ok: true}, nil
|
||||
}
|
||||
|
||||
@@ -58,10 +58,12 @@ func (s *rpcServer) Start(register RegisterFn) error {
|
||||
serverinterceptors.UnaryCrashInterceptor(),
|
||||
serverinterceptors.UnaryStatInterceptor(s.metrics),
|
||||
serverinterceptors.UnaryPrometheusInterceptor(),
|
||||
serverinterceptors.UnaryBreakerInterceptor(),
|
||||
}
|
||||
unaryInterceptors = append(unaryInterceptors, s.unaryInterceptors...)
|
||||
streamInterceptors := []grpc.StreamServerInterceptor{
|
||||
serverinterceptors.StreamCrashInterceptor,
|
||||
serverinterceptors.StreamBreakerInterceptor,
|
||||
}
|
||||
streamInterceptors = append(streamInterceptors, s.streamInterceptors...)
|
||||
options := append(s.options, WithUnaryServerInterceptors(unaryInterceptors...),
|
||||
|
||||
33
zrpc/internal/serverinterceptors/breakerinterceptor.go
Normal file
33
zrpc/internal/serverinterceptors/breakerinterceptor.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package serverinterceptors
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/tal-tech/go-zero/core/breaker"
|
||||
"github.com/tal-tech/go-zero/zrpc/internal/codes"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
// StreamBreakerInterceptor is an interceptor that acts as a circuit breaker.
|
||||
func StreamBreakerInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo,
|
||||
handler grpc.StreamHandler) (err error) {
|
||||
breakerName := info.FullMethod
|
||||
return breaker.DoWithAcceptable(breakerName, func() error {
|
||||
return handler(srv, stream)
|
||||
}, codes.Acceptable)
|
||||
}
|
||||
|
||||
// UnaryBreakerInterceptor is an interceptor that acts as a circuit breaker.
|
||||
func UnaryBreakerInterceptor() grpc.UnaryServerInterceptor {
|
||||
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo,
|
||||
handler grpc.UnaryHandler) (resp interface{}, err error) {
|
||||
breakerName := info.FullMethod
|
||||
err = breaker.DoWithAcceptable(breakerName, func() error {
|
||||
var err error
|
||||
resp, err = handler(ctx, req)
|
||||
return err
|
||||
}, codes.Acceptable)
|
||||
|
||||
return resp, err
|
||||
}
|
||||
}
|
||||
31
zrpc/internal/serverinterceptors/breakerinterceptor_test.go
Normal file
31
zrpc/internal/serverinterceptors/breakerinterceptor_test.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package serverinterceptors
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/status"
|
||||
)
|
||||
|
||||
func TestStreamBreakerInterceptor(t *testing.T) {
|
||||
err := StreamBreakerInterceptor(nil, nil, &grpc.StreamServerInfo{
|
||||
FullMethod: "any",
|
||||
}, func(
|
||||
srv interface{}, stream grpc.ServerStream) error {
|
||||
return status.New(codes.DeadlineExceeded, "any").Err()
|
||||
})
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
|
||||
func TestUnaryBreakerInterceptor(t *testing.T) {
|
||||
interceptor := UnaryBreakerInterceptor()
|
||||
_, err := interceptor(nil, nil, &grpc.UnaryServerInfo{
|
||||
FullMethod: "any",
|
||||
}, func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return nil, status.New(codes.DeadlineExceeded, "any").Err()
|
||||
})
|
||||
assert.NotNil(t, err)
|
||||
}
|
||||
Reference in New Issue
Block a user