bump uptrace/bun dependencies from 1.2.6 to 1.2.8 (#3645)

This commit is contained in:
kim 2025-01-14 14:23:28 +00:00 committed by GitHub
commit b8ef9fc4bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
89 changed files with 907 additions and 4123 deletions

View file

@ -1,3 +1,44 @@
## [1.2.8](https://github.com/uptrace/bun/compare/v1.2.7...v1.2.8) (2025-01-06)
### Bug Fixes
* comment string zero bytes filtering ([34dfd68](https://github.com/uptrace/bun/commit/34dfd684e371c24b9f59e9b13ef57660931f0bde))
* get m2m table's structKey with driver.Valuer ([f107314](https://github.com/uptrace/bun/commit/f1073147dc73d01dcf8a6ee9252d354ff06a1062)), closes [#1100](https://github.com/uptrace/bun/issues/1100)
* return error when use dest with has-many/many-to-many ([8296774](https://github.com/uptrace/bun/commit/829677486b502e6d5d2ae37814488ae9f2c7386e)), closes [#606](https://github.com/uptrace/bun/issues/606)
* support scan float32 to float32/float64 ([a52e733](https://github.com/uptrace/bun/commit/a52e7339a93f84468878dcaffc42536faa44efae)), closes [#1087](https://github.com/uptrace/bun/issues/1087)
### Features
* add RelationWithOpts method to SelectQuery ([dd3ef52](https://github.com/uptrace/bun/commit/dd3ef522c8a9c656958b73ee5d546854fb7c6edf))
* enhance debugging by adding query comments in headers ([1376d18](https://github.com/uptrace/bun/commit/1376d1870bfe3d89e3630203787f1e87c503d5df))
* sort fields by struct ([5edb672](https://github.com/uptrace/bun/commit/5edb672e320be9b210f06d25c4f4b9e761c1c526)), closes [#1095](https://github.com/uptrace/bun/issues/1095)
## [1.2.7](https://github.com/uptrace/bun/compare/v1.2.6...v1.2.7) (2025-01-01)
### Bug Fixes
* do not create new migrations if nothing to migrate ([5cc961d](https://github.com/uptrace/bun/commit/5cc961d6cc461ad3534728fc4d3cae12bf8b736e))
* has many relation with driver.Valuer ([cb8c42c](https://github.com/uptrace/bun/commit/cb8c42cd3f65d95865c76a594abad815eea1df3c))
* improve range type to support driver.Valuer and sql.Scanner ([856e12b](https://github.com/uptrace/bun/commit/856e12b0d37275a6aa247370f6a8231fd89ca3e7))
* pass correct 'transactional' parameter ([ebdef1b](https://github.com/uptrace/bun/commit/ebdef1b0e9d33a5ca475ab4c2ec2fb44d11d4595))
* **pgdialect:** remove unsigned integer conversion ([ab3c679](https://github.com/uptrace/bun/commit/ab3c679d529dd20d44e789dc6f1d89f9510bde0b)), closes [uptrace/bun#624](https://github.com/uptrace/bun/issues/624)
* remove unused param on table.go and tables.go: canAddr ([d563e2d](https://github.com/uptrace/bun/commit/d563e2dbe95caeb0e00ad1b3e82283431747fe7b))
* replace the link to docs repo in CONTRIBUTING.md ([e120096](https://github.com/uptrace/bun/commit/e12009662ae1ddefcc1337cc5e32e73d77c7def0))
* trim surrounding '' in string literal in DEFAULT clause ([a0dff72](https://github.com/uptrace/bun/commit/a0dff72b6ab0ca24d00c96c923046200dd6112ed))
### Features
* add an ordered map to remove unnecessary dependencies ([9fea143](https://github.com/uptrace/bun/commit/9fea1437d8344d836670e802fd12d3476e8cad86))
* support disable dialect's feature ([5343bd7](https://github.com/uptrace/bun/commit/5343bd7fc4ceda866a7d607388ebb7a89f7f5823))
## [1.2.6](https://github.com/uptrace/bun/compare/v1.2.5...v1.2.6) (2024-11-20)

View file

@ -31,4 +31,4 @@ TAG=v1.0.0 ./scripts/tag.sh
## Documentation
To contribute to the docs visit https://github.com/go-bun/bun-docs
To contribute to the docs visit https://github.com/uptrace/bun-docs

18
vendor/github.com/uptrace/bun/db.go generated vendored
View file

@ -35,8 +35,7 @@ func WithDiscardUnknownColumns() DBOption {
type DB struct {
*sql.DB
dialect schema.Dialect
features feature.Feature
dialect schema.Dialect
queryHooks []QueryHook
@ -50,10 +49,9 @@ func NewDB(sqldb *sql.DB, dialect schema.Dialect, opts ...DBOption) *DB {
dialect.Init(sqldb)
db := &DB{
DB: sqldb,
dialect: dialect,
features: dialect.Features(),
fmter: schema.NewFormatter(dialect),
DB: sqldb,
dialect: dialect,
fmter: schema.NewFormatter(dialect),
}
for _, opt := range opts {
@ -231,7 +229,7 @@ func (db *DB) UpdateFQN(alias, column string) Ident {
// HasFeature uses feature package to report whether the underlying DBMS supports this feature.
func (db *DB) HasFeature(feat feature.Feature) bool {
return db.fmter.HasFeature(feat)
return db.dialect.Features().Has(feat)
}
//------------------------------------------------------------------------------
@ -513,7 +511,7 @@ func (tx Tx) commitTX() error {
}
func (tx Tx) commitSP() error {
if tx.Dialect().Features().Has(feature.MSSavepoint) {
if tx.db.HasFeature(feature.MSSavepoint) {
return nil
}
query := "RELEASE SAVEPOINT " + tx.name
@ -537,7 +535,7 @@ func (tx Tx) rollbackTX() error {
func (tx Tx) rollbackSP() error {
query := "ROLLBACK TO SAVEPOINT " + tx.name
if tx.Dialect().Features().Has(feature.MSSavepoint) {
if tx.db.HasFeature(feature.MSSavepoint) {
query = "ROLLBACK TRANSACTION " + tx.name
}
_, err := tx.ExecContext(tx.ctx, query)
@ -601,7 +599,7 @@ func (tx Tx) BeginTx(ctx context.Context, _ *sql.TxOptions) (Tx, error) {
qName := "SP_" + hex.EncodeToString(sp)
query := "SAVEPOINT " + qName
if tx.Dialect().Features().Has(feature.MSSavepoint) {
if tx.db.HasFeature(feature.MSSavepoint) {
query = "SAVE TRANSACTION " + qName
}
_, err = tx.ExecContext(ctx, query)

View file

@ -11,22 +11,22 @@ import (
)
var (
driverValuerType = reflect.TypeOf((*driver.Valuer)(nil)).Elem()
driverValuerType = reflect.TypeFor[driver.Valuer]()
stringType = reflect.TypeOf((*string)(nil)).Elem()
sliceStringType = reflect.TypeOf([]string(nil))
stringType = reflect.TypeFor[string]()
sliceStringType = reflect.TypeFor[[]string]()
intType = reflect.TypeOf((*int)(nil)).Elem()
sliceIntType = reflect.TypeOf([]int(nil))
intType = reflect.TypeFor[int]()
sliceIntType = reflect.TypeFor[[]int]()
int64Type = reflect.TypeOf((*int64)(nil)).Elem()
sliceInt64Type = reflect.TypeOf([]int64(nil))
int64Type = reflect.TypeFor[int64]()
sliceInt64Type = reflect.TypeFor[[]int64]()
float64Type = reflect.TypeOf((*float64)(nil)).Elem()
sliceFloat64Type = reflect.TypeOf([]float64(nil))
float64Type = reflect.TypeFor[float64]()
sliceFloat64Type = reflect.TypeFor[[]float64]()
timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
sliceTimeType = reflect.TypeOf([]time.Time(nil))
timeType = reflect.TypeFor[time.Time]()
sliceTimeType = reflect.TypeFor[[]time.Time]()
)
func appendTime(buf []byte, tm time.Time) []byte {
@ -67,9 +67,9 @@ func appendMapStringString(b []byte, m map[string]string) []byte {
b = append(b, '\'')
for key, value := range m {
b = arrayAppendString(b, key)
b = appendStringElem(b, key)
b = append(b, '=', '>')
b = arrayAppendString(b, value)
b = appendStringElem(b, value)
b = append(b, ',')
}
if len(m) > 0 {

View file

@ -3,13 +3,11 @@ package pgdialect
import (
"database/sql"
"database/sql/driver"
"encoding/hex"
"fmt"
"math"
"reflect"
"strconv"
"time"
"unicode/utf8"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/internal"
@ -146,44 +144,21 @@ func (d *Dialect) arrayElemAppender(typ reflect.Type) schema.AppenderFunc {
}
switch typ.Kind() {
case reflect.String:
return arrayAppendStringValue
return appendStringElemValue
case reflect.Slice:
if typ.Elem().Kind() == reflect.Uint8 {
return arrayAppendBytesValue
return appendBytesElemValue
}
}
return schema.Appender(d, typ)
}
func arrayAppend(fmter schema.Formatter, b []byte, v interface{}) []byte {
switch v := v.(type) {
case int64:
return strconv.AppendInt(b, v, 10)
case float64:
return arrayAppendFloat64(b, v)
case bool:
return dialect.AppendBool(b, v)
case []byte:
return arrayAppendBytes(b, v)
case string:
return arrayAppendString(b, v)
case time.Time:
b = append(b, '"')
b = appendTime(b, v)
b = append(b, '"')
return b
default:
err := fmt.Errorf("pgdialect: can't append %T", v)
return dialect.AppendError(b, err)
}
func appendStringElemValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
return appendStringElem(b, v.String())
}
func arrayAppendStringValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
return arrayAppendString(b, v.String())
}
func arrayAppendBytesValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
return arrayAppendBytes(b, v.Bytes())
func appendBytesElemValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
return appendBytesElem(b, v.Bytes())
}
func arrayAppendDriverValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
@ -191,7 +166,7 @@ func arrayAppendDriverValue(fmter schema.Formatter, b []byte, v reflect.Value) [
if err != nil {
return dialect.AppendError(b, err)
}
return arrayAppend(fmter, b, iface)
return appendElem(b, iface)
}
func appendStringSliceValue(fmter schema.Formatter, b []byte, v reflect.Value) []byte {
@ -208,7 +183,7 @@ func appendStringSlice(b []byte, ss []string) []byte {
b = append(b, '{')
for _, s := range ss {
b = arrayAppendString(b, s)
b = appendStringElem(b, s)
b = append(b, ',')
}
if len(ss) > 0 {
@ -496,7 +471,7 @@ func decodeIntSlice(src interface{}) ([]int, error) {
continue
}
n, err := strconv.Atoi(bytesToString(elem))
n, err := strconv.Atoi(internal.String(elem))
if err != nil {
return nil, err
}
@ -545,7 +520,7 @@ func decodeInt64Slice(src interface{}) ([]int64, error) {
continue
}
n, err := strconv.ParseInt(bytesToString(elem), 10, 64)
n, err := strconv.ParseInt(internal.String(elem), 10, 64)
if err != nil {
return nil, err
}
@ -594,7 +569,7 @@ func scanFloat64Slice(src interface{}) ([]float64, error) {
continue
}
n, err := strconv.ParseFloat(bytesToString(elem), 64)
n, err := strconv.ParseFloat(internal.String(elem), 64)
if err != nil {
return nil, err
}
@ -610,57 +585,10 @@ func scanFloat64Slice(src interface{}) ([]float64, error) {
func toBytes(src interface{}) ([]byte, error) {
switch src := src.(type) {
case string:
return stringToBytes(src), nil
return internal.Bytes(src), nil
case []byte:
return src, nil
default:
return nil, fmt.Errorf("pgdialect: got %T, wanted []byte or string", src)
}
}
//------------------------------------------------------------------------------
func arrayAppendBytes(b []byte, bs []byte) []byte {
if bs == nil {
return dialect.AppendNull(b)
}
b = append(b, `"\\x`...)
s := len(b)
b = append(b, make([]byte, hex.EncodedLen(len(bs)))...)
hex.Encode(b[s:], bs)
b = append(b, '"')
return b
}
func arrayAppendString(b []byte, s string) []byte {
b = append(b, '"')
for _, r := range s {
switch r {
case 0:
// ignore
case '\'':
b = append(b, "''"...)
case '"':
b = append(b, '\\', '"')
case '\\':
b = append(b, '\\', '\\')
default:
if r < utf8.RuneSelf {
b = append(b, byte(r))
break
}
l := len(b)
if cap(b)-l < utf8.UTFMax {
b = append(b, make([]byte, utf8.UTFMax)...)
}
n := utf8.EncodeRune(b[l:l+utf8.UTFMax], r)
b = b[:l+n]
}
}
b = append(b, '"')
return b
}

View file

@ -1 +0,0 @@
package pgdialect

View file

@ -3,7 +3,6 @@ package pgdialect
import (
"database/sql"
"fmt"
"strconv"
"strings"
"github.com/uptrace/bun"
@ -34,7 +33,7 @@ var _ schema.Dialect = (*Dialect)(nil)
var _ sqlschema.InspectorDialect = (*Dialect)(nil)
var _ sqlschema.MigratorDialect = (*Dialect)(nil)
func New() *Dialect {
func New(opts ...DialectOption) *Dialect {
d := new(Dialect)
d.tables = schema.NewTables(d)
d.features = feature.CTE |
@ -55,9 +54,22 @@ func New() *Dialect {
feature.GeneratedIdentity |
feature.CompositeIn |
feature.DeleteReturning
for _, opt := range opts {
opt(d)
}
return d
}
type DialectOption func(d *Dialect)
func WithoutFeature(other feature.Feature) DialectOption {
return func(d *Dialect) {
d.features = d.features.Remove(other)
}
}
func (d *Dialect) Init(*sql.DB) {}
func (d *Dialect) Name() dialect.Name {
@ -115,14 +127,6 @@ func (d *Dialect) IdentQuote() byte {
return '"'
}
func (d *Dialect) AppendUint32(b []byte, n uint32) []byte {
return strconv.AppendInt(b, int64(int32(n)), 10)
}
func (d *Dialect) AppendUint64(b []byte, n uint64) []byte {
return strconv.AppendInt(b, int64(n), 10)
}
func (d *Dialect) AppendSequence(b []byte, _ *schema.Table, _ *schema.Field) []byte {
return appendGeneratedAsIdentity(b)
}

View file

@ -0,0 +1,87 @@
package pgdialect
import (
"database/sql/driver"
"encoding/hex"
"fmt"
"strconv"
"time"
"unicode/utf8"
"github.com/uptrace/bun/dialect"
)
func appendElem(buf []byte, val interface{}) []byte {
switch val := val.(type) {
case int64:
return strconv.AppendInt(buf, val, 10)
case float64:
return arrayAppendFloat64(buf, val)
case bool:
return dialect.AppendBool(buf, val)
case []byte:
return appendBytesElem(buf, val)
case string:
return appendStringElem(buf, val)
case time.Time:
buf = append(buf, '"')
buf = appendTime(buf, val)
buf = append(buf, '"')
return buf
case driver.Valuer:
val2, err := val.Value()
if err != nil {
err := fmt.Errorf("pgdialect: can't append elem value: %w", err)
return dialect.AppendError(buf, err)
}
return appendElem(buf, val2)
default:
err := fmt.Errorf("pgdialect: can't append elem %T", val)
return dialect.AppendError(buf, err)
}
}
func appendBytesElem(b []byte, bs []byte) []byte {
if bs == nil {
return dialect.AppendNull(b)
}
b = append(b, `"\\x`...)
s := len(b)
b = append(b, make([]byte, hex.EncodedLen(len(bs)))...)
hex.Encode(b[s:], bs)
b = append(b, '"')
return b
}
func appendStringElem(b []byte, s string) []byte {
b = append(b, '"')
for _, r := range s {
switch r {
case 0:
// ignore
case '\'':
b = append(b, "''"...)
case '"':
b = append(b, '\\', '"')
case '\\':
b = append(b, '\\', '\\')
default:
if r < utf8.RuneSelf {
b = append(b, byte(r))
break
}
l := len(b)
if cap(b)-l < utf8.UTFMax {
b = append(b, make([]byte, utf8.UTFMax)...)
}
n := utf8.EncodeRune(b[l:l+utf8.UTFMax], r)
b = b[:l+n]
}
}
b = append(b, '"')
return b
}

View file

@ -5,8 +5,8 @@ import (
"strings"
"github.com/uptrace/bun"
"github.com/uptrace/bun/internal/ordered"
"github.com/uptrace/bun/migrate/sqlschema"
orderedmap "github.com/wk8/go-ordered-map/v2"
)
type (
@ -34,7 +34,7 @@ func newInspector(db *bun.DB, options ...sqlschema.InspectorOption) *Inspector {
func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) {
dbSchema := Schema{
Tables: orderedmap.New[string, sqlschema.Table](),
Tables: ordered.NewMap[string, sqlschema.Table](),
ForeignKeys: make(map[sqlschema.ForeignKey]string),
}
@ -61,7 +61,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) {
return dbSchema, err
}
colDefs := orderedmap.New[string, sqlschema.Column]()
colDefs := ordered.NewMap[string, sqlschema.Column]()
uniqueGroups := make(map[string][]string)
for _, c := range columns {
@ -72,7 +72,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) {
def = strings.ToLower(def)
}
colDefs.Set(c.Name, &Column{
colDefs.Store(c.Name, &Column{
Name: c.Name,
SQLType: c.DataType,
VarcharLen: c.VarcharLen,
@ -103,7 +103,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) {
}
}
dbSchema.Tables.Set(table.Name, &Table{
dbSchema.Tables.Store(table.Name, &Table{
Schema: table.Schema,
Name: table.Name,
Columns: colDefs,

View file

@ -0,0 +1,107 @@
package pgdialect
import (
"bytes"
"encoding/hex"
"github.com/uptrace/bun/internal/parser"
)
type pgparser struct {
parser.Parser
buf []byte
}
func newParser(b []byte) *pgparser {
p := new(pgparser)
p.Reset(b)
return p
}
func (p *pgparser) ReadLiteral(ch byte) []byte {
p.Unread()
lit, _ := p.ReadSep(',')
return lit
}
func (p *pgparser) ReadUnescapedSubstring(ch byte) ([]byte, error) {
return p.readSubstring(ch, false)
}
func (p *pgparser) ReadSubstring(ch byte) ([]byte, error) {
return p.readSubstring(ch, true)
}
func (p *pgparser) readSubstring(ch byte, escaped bool) ([]byte, error) {
ch, err := p.ReadByte()
if err != nil {
return nil, err
}
p.buf = p.buf[:0]
for {
if ch == '"' {
break
}
next, err := p.ReadByte()
if err != nil {
return nil, err
}
if ch == '\\' {
switch next {
case '\\', '"':
p.buf = append(p.buf, next)
ch, err = p.ReadByte()
if err != nil {
return nil, err
}
default:
p.buf = append(p.buf, '\\')
ch = next
}
continue
}
if escaped && ch == '\'' && next == '\'' {
p.buf = append(p.buf, next)
ch, err = p.ReadByte()
if err != nil {
return nil, err
}
continue
}
p.buf = append(p.buf, ch)
ch = next
}
if bytes.HasPrefix(p.buf, []byte("\\x")) && len(p.buf)%2 == 0 {
data := p.buf[2:]
buf := make([]byte, hex.DecodedLen(len(data)))
n, err := hex.Decode(buf, data)
if err != nil {
return nil, err
}
return buf[:n], nil
}
return p.buf, nil
}
func (p *pgparser) ReadRange(ch byte) ([]byte, error) {
p.buf = p.buf[:0]
p.buf = append(p.buf, ch)
for p.Valid() {
ch = p.Read()
p.buf = append(p.buf, ch)
if ch == ']' || ch == ')' {
break
}
}
return p.buf, nil
}

View file

@ -1,15 +1,12 @@
package pgdialect
import (
"bytes"
"database/sql"
"encoding/hex"
"fmt"
"io"
"time"
"github.com/uptrace/bun/internal"
"github.com/uptrace/bun/internal/parser"
"github.com/uptrace/bun/schema"
)
@ -41,7 +38,10 @@ func NewRange[T any](lower, upper T) Range[T] {
var _ sql.Scanner = (*Range[any])(nil)
func (r *Range[T]) Scan(anySrc any) (err error) {
src := anySrc.([]byte)
src, ok := anySrc.([]byte)
if !ok {
return fmt.Errorf("pgdialect: Range can't scan %T", anySrc)
}
if len(src) == 0 {
return io.ErrUnexpectedEOF
@ -90,18 +90,6 @@ func (r *Range[T]) AppendQuery(fmt schema.Formatter, buf []byte) ([]byte, error)
return buf, nil
}
func appendElem(buf []byte, val any) []byte {
switch val := val.(type) {
case time.Time:
buf = append(buf, '"')
buf = appendTime(buf, val)
buf = append(buf, '"')
return buf
default:
panic(fmt.Errorf("unsupported range type: %T", val))
}
}
func scanElem(ptr any, src []byte) ([]byte, error) {
switch ptr := ptr.(type) {
case *time.Time:
@ -117,6 +105,17 @@ func scanElem(ptr any, src []byte) ([]byte, error) {
*ptr = tm
return src, nil
case sql.Scanner:
src, str, err := readStringLiteral(src)
if err != nil {
return nil, err
}
if err := ptr.Scan(str); err != nil {
return nil, err
}
return src, nil
default:
panic(fmt.Errorf("unsupported range type: %T", ptr))
}
@ -137,104 +136,3 @@ func readStringLiteral(src []byte) ([]byte, []byte, error) {
src = p.Remaining()
return src, str, nil
}
//------------------------------------------------------------------------------
type pgparser struct {
parser.Parser
buf []byte
}
func newParser(b []byte) *pgparser {
p := new(pgparser)
p.Reset(b)
return p
}
func (p *pgparser) ReadLiteral(ch byte) []byte {
p.Unread()
lit, _ := p.ReadSep(',')
return lit
}
func (p *pgparser) ReadUnescapedSubstring(ch byte) ([]byte, error) {
return p.readSubstring(ch, false)
}
func (p *pgparser) ReadSubstring(ch byte) ([]byte, error) {
return p.readSubstring(ch, true)
}
func (p *pgparser) readSubstring(ch byte, escaped bool) ([]byte, error) {
ch, err := p.ReadByte()
if err != nil {
return nil, err
}
p.buf = p.buf[:0]
for {
if ch == '"' {
break
}
next, err := p.ReadByte()
if err != nil {
return nil, err
}
if ch == '\\' {
switch next {
case '\\', '"':
p.buf = append(p.buf, next)
ch, err = p.ReadByte()
if err != nil {
return nil, err
}
default:
p.buf = append(p.buf, '\\')
ch = next
}
continue
}
if escaped && ch == '\'' && next == '\'' {
p.buf = append(p.buf, next)
ch, err = p.ReadByte()
if err != nil {
return nil, err
}
continue
}
p.buf = append(p.buf, ch)
ch = next
}
if bytes.HasPrefix(p.buf, []byte("\\x")) && len(p.buf)%2 == 0 {
data := p.buf[2:]
buf := make([]byte, hex.DecodedLen(len(data)))
n, err := hex.Decode(buf, data)
if err != nil {
return nil, err
}
return buf[:n], nil
}
return p.buf, nil
}
func (p *pgparser) ReadRange(ch byte) ([]byte, error) {
p.buf = p.buf[:0]
p.buf = append(p.buf, ch)
for p.Valid() {
ch = p.Read()
p.buf = append(p.buf, ch)
if ch == ']' || ch == ')' {
break
}
}
return p.buf, nil
}

View file

@ -1,11 +0,0 @@
// +build appengine
package pgdialect
func bytesToString(b []byte) string {
return string(b)
}
func stringToBytes(s string) []byte {
return []byte(s)
}

View file

@ -1,11 +0,0 @@
package pgdialect
import (
"reflect"
"github.com/uptrace/bun/schema"
)
func scanner(typ reflect.Type) schema.ScannerFunc {
return schema.Scanner(typ)
}

View file

@ -44,10 +44,10 @@ const (
)
var (
ipType = reflect.TypeOf((*net.IP)(nil)).Elem()
ipNetType = reflect.TypeOf((*net.IPNet)(nil)).Elem()
jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
nullStringType = reflect.TypeOf((*sql.NullString)(nil)).Elem()
ipType = reflect.TypeFor[net.IP]()
ipNetType = reflect.TypeFor[net.IPNet]()
jsonRawMessageType = reflect.TypeFor[json.RawMessage]()
nullStringType = reflect.TypeFor[sql.NullString]()
)
func (d *Dialect) DefaultVarcharLen() int {

View file

@ -1,18 +0,0 @@
// +build !appengine
package pgdialect
import "unsafe"
func bytesToString(b []byte) string {
return *(*string)(unsafe.Pointer(&b))
}
func stringToBytes(s string) []byte {
return *(*[]byte)(unsafe.Pointer(
&struct {
string
Cap int
}{s, len(s)},
))
}

View file

@ -2,5 +2,5 @@ package pgdialect
// Version is the current release version.
func Version() string {
return "1.2.6"
return "1.2.8"
}

View file

@ -26,7 +26,7 @@ type Dialect struct {
features feature.Feature
}
func New() *Dialect {
func New(opts ...DialectOption) *Dialect {
d := new(Dialect)
d.tables = schema.NewTables(d)
d.features = feature.CTE |
@ -42,9 +42,22 @@ func New() *Dialect {
feature.AutoIncrement |
feature.CompositeIn |
feature.DeleteReturning
for _, opt := range opts {
opt(d)
}
return d
}
type DialectOption func(d *Dialect)
func WithoutFeature(other feature.Feature) DialectOption {
return func(d *Dialect) {
d.features = d.features.Remove(other)
}
}
func (d *Dialect) Init(*sql.DB) {}
func (d *Dialect) Name() dialect.Name {

View file

@ -1,11 +0,0 @@
package sqlitedialect
import (
"reflect"
"github.com/uptrace/bun/schema"
)
func scanner(typ reflect.Type) schema.ScannerFunc {
return schema.Scanner(typ)
}

View file

@ -2,5 +2,5 @@ package sqlitedialect
// Version is the current release version.
func Version() string {
return "1.2.6"
return "1.2.8"
}

View file

@ -16,6 +16,7 @@ import (
"github.com/uptrace/bun"
"github.com/uptrace/bun/dialect"
"github.com/uptrace/bun/internal"
"github.com/uptrace/bun/schema"
"github.com/uptrace/opentelemetry-go-extra/otelsql"
)
@ -169,7 +170,7 @@ func (h *QueryHook) eventQuery(event *bun.QueryEvent) string {
func unformattedQuery(event *bun.QueryEvent) string {
if event.IQuery != nil {
if b, err := event.IQuery.AppendQuery(schema.NewNopFormatter(), nil); err == nil {
return bytesToString(b)
return internal.String(b)
}
}
return string(event.QueryTemplate)

View file

@ -1,11 +0,0 @@
// +build appengine
package internal
func bytesToString(b []byte) string {
return string(b)
}
func stringToBytes(s string) []byte {
return []byte(s)
}

View file

@ -1,20 +0,0 @@
//go:build !appengine
// +build !appengine
package bunotel
import "unsafe"
func bytesToString(b []byte) string {
if len(b) == 0 {
return ""
}
return unsafe.String(&b[0], len(b))
}
func stringToBytes(s string) []byte {
if s == "" {
return []byte{}
}
return unsafe.Slice(unsafe.StringData(s), len(s))
}

View file

@ -2,7 +2,7 @@ package internal
import "reflect"
var ifaceType = reflect.TypeOf((*interface{})(nil)).Elem()
var ifaceType = reflect.TypeFor[interface{}]()
type MapKey struct {
iface interface{}

125
vendor/github.com/uptrace/bun/internal/ordered/map.go generated vendored Normal file
View file

@ -0,0 +1,125 @@
package ordered
// Pair represents a key-value pair in the ordered map.
type Pair[K comparable, V any] struct {
Key K
Value V
next, prev *Pair[K, V] // Pointers to the next and previous pairs in the linked list.
}
// Map represents an ordered map.
type Map[K comparable, V any] struct {
root *Pair[K, V] // Sentinel node for the circular doubly linked list.
zero V // Zero value for the value type.
pairs map[K]*Pair[K, V] // Map from keys to pairs.
}
// NewMap creates a new ordered map with optional initial data.
func NewMap[K comparable, V any](initialData ...Pair[K, V]) *Map[K, V] {
m := &Map[K, V]{}
m.Clear()
for _, pair := range initialData {
m.Store(pair.Key, pair.Value)
}
return m
}
// Clear removes all pairs from the map.
func (m *Map[K, V]) Clear() {
if m.root != nil {
m.root.next, m.root.prev = nil, nil // avoid memory leaks
}
for _, pair := range m.pairs {
pair.next, pair.prev = nil, nil // avoid memory leaks
}
m.root = &Pair[K, V]{}
m.root.next, m.root.prev = m.root, m.root
m.pairs = make(map[K]*Pair[K, V])
}
// Len returns the number of pairs in the map.
func (m *Map[K, V]) Len() int {
return len(m.pairs)
}
// Load returns the value associated with the key, and a boolean indicating if the key was found.
func (m *Map[K, V]) Load(key K) (V, bool) {
if pair, present := m.pairs[key]; present {
return pair.Value, true
}
return m.zero, false
}
// Value returns the value associated with the key, or the zero value if the key is not found.
func (m *Map[K, V]) Value(key K) V {
if pair, present := m.pairs[key]; present {
return pair.Value
}
return m.zero
}
// Store adds or updates a key-value pair in the map.
func (m *Map[K, V]) Store(key K, value V) {
if pair, present := m.pairs[key]; present {
pair.Value = value
return
}
pair := &Pair[K, V]{Key: key, Value: value}
pair.prev = m.root.prev
m.root.prev.next = pair
m.root.prev = pair
pair.next = m.root
m.pairs[key] = pair
}
// Delete removes a key-value pair from the map.
func (m *Map[K, V]) Delete(key K) {
if pair, present := m.pairs[key]; present {
pair.prev.next = pair.next
pair.next.prev = pair.prev
pair.next, pair.prev = nil, nil // avoid memory leaks
delete(m.pairs, key)
}
}
// Range calls the given function for each key-value pair in the map in order.
func (m *Map[K, V]) Range(yield func(key K, value V) bool) {
for pair := m.root.next; pair != m.root; pair = pair.next {
if !yield(pair.Key, pair.Value) {
break
}
}
}
// Keys returns a slice of all keys in the map in order.
func (m *Map[K, V]) Keys() []K {
keys := make([]K, 0, len(m.pairs))
m.Range(func(key K, _ V) bool {
keys = append(keys, key)
return true
})
return keys
}
// Values returns a slice of all values in the map in order.
func (m *Map[K, V]) Values() []V {
values := make([]V, 0, len(m.pairs))
m.Range(func(_ K, value V) bool {
values = append(values, value)
return true
})
return values
}
// Pairs returns a slice of all key-value pairs in the map in order.
func (m *Map[K, V]) Pairs() []Pair[K, V] {
pairs := make([]Pair[K, V], 0, len(m.pairs))
m.Range(func(key K, value V) bool {
pairs = append(pairs, Pair[K, V]{Key: key, Value: value})
return true
})
return pairs
}

View file

@ -196,6 +196,9 @@ func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) {
func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) (*MigrationGroup, error) {
migrations, _, err := am.createSQLMigrations(ctx, false)
if err != nil {
if err == errNothingToMigrate {
return new(MigrationGroup), nil
}
return nil, fmt.Errorf("auto migrate: %w", err)
}
@ -214,23 +217,37 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) (*
// CreateSQLMigration writes required changes to a new migration file.
// Use migrate.Migrator to apply the generated migrations.
func (am *AutoMigrator) CreateSQLMigrations(ctx context.Context) ([]*MigrationFile, error) {
_, files, err := am.createSQLMigrations(ctx, true)
_, files, err := am.createSQLMigrations(ctx, false)
if err == errNothingToMigrate {
return files, nil
}
return files, err
}
// CreateTxSQLMigration writes required changes to a new migration file making sure they will be executed
// in a transaction when applied. Use migrate.Migrator to apply the generated migrations.
func (am *AutoMigrator) CreateTxSQLMigrations(ctx context.Context) ([]*MigrationFile, error) {
_, files, err := am.createSQLMigrations(ctx, false)
_, files, err := am.createSQLMigrations(ctx, true)
if err == errNothingToMigrate {
return files, nil
}
return files, err
}
// errNothingToMigrate is a sentinel error which means the database is already in a desired state.
// Should not be returned to the user -- return a nil-error instead.
var errNothingToMigrate = errors.New("nothing to migrate")
func (am *AutoMigrator) createSQLMigrations(ctx context.Context, transactional bool) (*Migrations, []*MigrationFile, error) {
changes, err := am.plan(ctx)
if err != nil {
return nil, nil, fmt.Errorf("create sql migrations: %w", err)
}
if changes.Len() == 0 {
return nil, nil, errNothingToMigrate
}
name, _ := genMigrationName(am.schemaName + "_auto")
migrations := NewMigrations(am.migrationsOpts...)
migrations.Add(Migration{
@ -282,6 +299,10 @@ func (am *AutoMigrator) createSQL(_ context.Context, migrations *Migrations, fna
return mf, nil
}
func (c *changeset) Len() int {
return len(c.operations)
}
// Func creates a MigrationFunc that applies all operations all the changeset.
func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc {
return func(ctx context.Context, db *bun.DB) error {

View file

@ -26,20 +26,21 @@ func (d *detector) detectChanges() *changeset {
targetTables := d.target.GetTables()
RenameCreate:
for wantName, wantTable := range targetTables.FromOldest() {
for _, wantPair := range targetTables.Pairs() {
wantName, wantTable := wantPair.Key, wantPair.Value
// A table with this name exists in the database. We assume that schema objects won't
// be renamed to an already existing name, nor do we support such cases.
// Simply check if the table definition has changed.
if haveTable, ok := currentTables.Get(wantName); ok {
if haveTable, ok := currentTables.Load(wantName); ok {
d.detectColumnChanges(haveTable, wantTable, true)
d.detectConstraintChanges(haveTable, wantTable)
continue
}
// Find all renamed tables. We assume that renamed tables have the same signature.
for haveName, haveTable := range currentTables.FromOldest() {
if _, exists := targetTables.Get(haveName); !exists && d.canRename(haveTable, wantTable) {
for _, havePair := range currentTables.Pairs() {
haveName, haveTable := havePair.Key, havePair.Value
if _, exists := targetTables.Load(haveName); !exists && d.canRename(haveTable, wantTable) {
d.changes.Add(&RenameTableOp{
TableName: haveTable.GetName(),
NewName: wantName,
@ -65,8 +66,9 @@ RenameCreate:
}
// Drop any remaining "current" tables which do not have a model.
for name, table := range currentTables.FromOldest() {
if _, keep := targetTables.Get(name); !keep {
for _, tPair := range currentTables.Pairs() {
name, table := tPair.Key, tPair.Value
if _, keep := targetTables.Load(name); !keep {
d.changes.Add(&DropTableOp{
TableName: table.GetName(),
})
@ -103,12 +105,13 @@ func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkTyp
targetColumns := target.GetColumns()
ChangeRename:
for tName, tCol := range targetColumns.FromOldest() {
for _, tPair := range targetColumns.Pairs() {
tName, tCol := tPair.Key, tPair.Value
// This column exists in the database, so it hasn't been renamed, dropped, or added.
// Still, we should not delete(columns, thisColumn), because later we will need to
// check that we do not try to rename a column to an already a name that already exists.
if cCol, ok := currentColumns.Get(tName); ok {
if cCol, ok := currentColumns.Load(tName); ok {
if checkType && !d.equalColumns(cCol, tCol) {
d.changes.Add(&ChangeColumnTypeOp{
TableName: target.GetName(),
@ -122,9 +125,10 @@ ChangeRename:
// Column tName does not exist in the database -- it's been either renamed or added.
// Find renamed columns first.
for cName, cCol := range currentColumns.FromOldest() {
for _, cPair := range currentColumns.Pairs() {
cName, cCol := cPair.Key, cPair.Value
// Cannot rename if a column with this name already exists or the types differ.
if _, exists := targetColumns.Get(cName); exists || !d.equalColumns(tCol, cCol) {
if _, exists := targetColumns.Load(cName); exists || !d.equalColumns(tCol, cCol) {
continue
}
d.changes.Add(&RenameColumnOp{
@ -149,8 +153,9 @@ ChangeRename:
}
// Drop columns which do not exist in the target schema and were not renamed.
for cName, cCol := range currentColumns.FromOldest() {
if _, keep := targetColumns.Get(cName); !keep {
for _, cPair := range currentColumns.Pairs() {
cName, cCol := cPair.Key, cPair.Value
if _, keep := targetColumns.Load(cName); !keep {
d.changes.Add(&DropColumnOp{
TableName: target.GetName(),
ColumnName: cName,
@ -325,7 +330,7 @@ func newSignature(t sqlschema.Table, eq CompareTypeFunc) signature {
// scan iterates over table's field and counts occurrences of each unique column definition.
func (s *signature) scan(t sqlschema.Table) {
for _, icol := range t.GetColumns().FromOldest() {
for _, icol := range t.GetColumns().Values() {
scanCol := icol.(*sqlschema.BaseColumn)
// This is slightly more expensive than if the columns could be compared directly
// and we always did s.underlying[col]++, but we get type-equivalence in return.

View file

@ -4,12 +4,12 @@ import (
"slices"
"strings"
"github.com/uptrace/bun/internal/ordered"
"github.com/uptrace/bun/schema"
orderedmap "github.com/wk8/go-ordered-map/v2"
)
type Database interface {
GetTables() *orderedmap.OrderedMap[string, Table]
GetTables() *ordered.Map[string, Table]
GetForeignKeys() map[ForeignKey]string
}
@ -20,11 +20,11 @@ var _ Database = (*BaseDatabase)(nil)
// Dialects and only dialects can use it to implement the Database interface.
// Other packages must use the Database interface.
type BaseDatabase struct {
Tables *orderedmap.OrderedMap[string, Table]
Tables *ordered.Map[string, Table]
ForeignKeys map[ForeignKey]string
}
func (ds BaseDatabase) GetTables() *orderedmap.OrderedMap[string, Table] {
func (ds BaseDatabase) GetTables() *ordered.Map[string, Table] {
return ds.Tables
}

View file

@ -7,8 +7,8 @@ import (
"strings"
"github.com/uptrace/bun"
"github.com/uptrace/bun/internal/ordered"
"github.com/uptrace/bun/schema"
orderedmap "github.com/wk8/go-ordered-map/v2"
)
type InspectorDialect interface {
@ -102,25 +102,25 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) {
BaseDatabase: BaseDatabase{
ForeignKeys: make(map[ForeignKey]string),
},
Tables: orderedmap.New[string, Table](),
Tables: ordered.NewMap[string, Table](),
}
for _, t := range bmi.tables.All() {
if t.Schema != bmi.SchemaName {
continue
}
columns := orderedmap.New[string, Column]()
columns := ordered.NewMap[string, Column]()
for _, f := range t.Fields {
sqlType, length, err := parseLen(f.CreateTableSQLType)
if err != nil {
return nil, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err)
}
columns.Set(f.Name, &BaseColumn{
columns.Store(f.Name, &BaseColumn{
Name: f.Name,
SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq()
VarcharLen: length,
DefaultValue: exprToLower(f.SQLDefault),
DefaultValue: exprOrLiteral(f.SQLDefault),
IsNullable: !f.NotNull,
IsAutoIncrement: f.AutoIncrement,
IsIdentity: f.Identity,
@ -162,7 +162,7 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) {
// produces
// schema.Table{ Schema: "favourite", Name: "favourite.books" }
tableName := strings.TrimPrefix(t.Name, t.Schema+".")
state.Tables.Set(tableName, &BunTable{
state.Tables.Store(tableName, &BunTable{
BaseTable: BaseTable{
Schema: t.Schema,
Name: tableName,
@ -211,12 +211,13 @@ func parseLen(typ string) (string, int, error) {
return typ[:paren], length, nil
}
// exprToLower converts string to lowercase, if it does not contain a string literal 'lit'.
// exprOrLiteral converts string to lowercase, if it does not contain a string literal 'lit'
// and trims the surrounding '' otherwise.
// Use it to ensure that user-defined default values in the models are always comparable
// to those returned by the database inspector, regardless of the case convention in individual drivers.
func exprToLower(s string) string {
func exprOrLiteral(s string) string {
if strings.HasPrefix(s, "'") && strings.HasSuffix(s, "'") {
return s
return strings.Trim(s, "'")
}
return strings.ToLower(s)
}
@ -225,10 +226,10 @@ func exprToLower(s string) string {
type BunModelSchema struct {
BaseDatabase
Tables *orderedmap.OrderedMap[string, Table]
Tables *ordered.Map[string, Table]
}
func (ms BunModelSchema) GetTables() *orderedmap.OrderedMap[string, Table] {
func (ms BunModelSchema) GetTables() *ordered.Map[string, Table] {
return ms.Tables
}

View file

@ -1,13 +1,13 @@
package sqlschema
import (
orderedmap "github.com/wk8/go-ordered-map/v2"
"github.com/uptrace/bun/internal/ordered"
)
type Table interface {
GetSchema() string
GetName() string
GetColumns() *orderedmap.OrderedMap[string, Column]
GetColumns() *ordered.Map[string, Column]
GetPrimaryKey() *PrimaryKey
GetUniqueConstraints() []Unique
}
@ -23,7 +23,7 @@ type BaseTable struct {
Name string
// ColumnDefinitions map each column name to the column definition.
Columns *orderedmap.OrderedMap[string, Column]
Columns *ordered.Map[string, Column]
// PrimaryKey holds the primary key definition.
// A nil value means that no primary key is defined for the table.
@ -47,7 +47,7 @@ func (td *BaseTable) GetName() string {
return td.Name
}
func (td *BaseTable) GetColumns() *orderedmap.OrderedMap[string, Column] {
func (td *BaseTable) GetColumns() *ordered.Map[string, Column] {
return td.Columns
}

View file

@ -14,8 +14,8 @@ import (
var errNilModel = errors.New("bun: Model(nil)")
var (
timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
bytesType = reflect.TypeOf((*[]byte)(nil)).Elem()
timeType = reflect.TypeFor[time.Time]()
bytesType = reflect.TypeFor[[]byte]()
)
type Model = schema.Model

View file

@ -99,7 +99,7 @@ func (m *mapSliceModel) appendValues(fmter schema.Formatter, b []byte) (_ []byte
slice := *m.dest
b = append(b, "VALUES "...)
if m.db.features.Has(feature.ValuesRow) {
if m.db.HasFeature(feature.ValuesRow) {
b = append(b, "ROW("...)
} else {
b = append(b, '(')
@ -118,7 +118,7 @@ func (m *mapSliceModel) appendValues(fmter schema.Formatter, b []byte) (_ []byte
for i, el := range slice {
if i > 0 {
b = append(b, "), "...)
if m.db.features.Has(feature.ValuesRow) {
if m.db.HasFeature(feature.ValuesRow) {
b = append(b, "ROW("...)
} else {
b = append(b, '(')

View file

@ -3,6 +3,7 @@ package bun
import (
"context"
"database/sql"
"database/sql/driver"
"fmt"
"reflect"
@ -152,7 +153,13 @@ func modelKey(key []interface{}, strct reflect.Value, fields []*schema.Field) []
// The value is then used as a map key.
func indirectFieldValue(field reflect.Value) interface{} {
if field.Kind() != reflect.Ptr {
return field.Interface()
i := field.Interface()
if valuer, ok := i.(driver.Valuer); ok {
if v, err := valuer.Value(); err == nil {
return v
}
}
return i
}
if field.IsNil() {
return nil

View file

@ -103,7 +103,7 @@ func (m *m2mModel) scanM2MColumn(column string, src interface{}) error {
if err := field.Scan(dest, src); err != nil {
return err
}
m.structKey = append(m.structKey, dest.Interface())
m.structKey = append(m.structKey, indirectFieldValue(dest))
break
}
}

View file

@ -1,6 +1,6 @@
{
"name": "gobun",
"version": "1.2.6",
"version": "1.2.8",
"main": "index.js",
"repository": "git@github.com:uptrace/bun.git",
"author": "Vladimir Mihailenco <vladimir.webdev@gmail.com>",

View file

@ -201,7 +201,7 @@ func (q *baseQuery) beforeAppendModel(ctx context.Context, query Query) error {
}
func (q *baseQuery) hasFeature(feature feature.Feature) bool {
return q.db.features.Has(feature)
return q.db.HasFeature(feature)
}
//------------------------------------------------------------------------------

View file

@ -13,6 +13,7 @@ type AddColumnQuery struct {
baseQuery
ifNotExists bool
comment string
}
var _ Query = (*AddColumnQuery)(nil)
@ -85,6 +86,14 @@ func (q *AddColumnQuery) IfNotExists() *AddColumnQuery {
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *AddColumnQuery) Comment(comment string) *AddColumnQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *AddColumnQuery) Operation() string {
return "ADD COLUMN"
}
@ -93,6 +102,9 @@ func (q *AddColumnQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte
if q.err != nil {
return nil, q.err
}
b = appendComment(b, q.comment)
if len(q.columns) != 1 {
return nil, fmt.Errorf("bun: AddColumnQuery requires exactly one column")
}

View file

@ -11,6 +11,8 @@ import (
type DropColumnQuery struct {
baseQuery
comment string
}
var _ Query = (*DropColumnQuery)(nil)
@ -85,6 +87,14 @@ func (q *DropColumnQuery) ColumnExpr(query string, args ...interface{}) *DropCol
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *DropColumnQuery) Comment(comment string) *DropColumnQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *DropColumnQuery) Operation() string {
return "DROP COLUMN"
}
@ -93,6 +103,9 @@ func (q *DropColumnQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byt
if q.err != nil {
return nil, q.err
}
b = appendComment(b, q.comment)
if len(q.columns) != 1 {
return nil, fmt.Errorf("bun: DropColumnQuery requires exactly one column")
}

View file

@ -15,6 +15,8 @@ type DeleteQuery struct {
whereBaseQuery
orderLimitOffsetQuery
returningQuery
comment string
}
var _ Query = (*DeleteQuery)(nil)
@ -174,6 +176,14 @@ func (q *DeleteQuery) Returning(query string, args ...interface{}) *DeleteQuery
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *DeleteQuery) Comment(comment string) *DeleteQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *DeleteQuery) Operation() string {
return "DELETE"
}
@ -183,6 +193,8 @@ func (q *DeleteQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
return nil, q.err
}
b = appendComment(b, q.comment)
fmter = formatterWithModel(fmter, q)
if q.isSoftDelete() {
@ -201,7 +213,7 @@ func (q *DeleteQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
return upd.AppendQuery(fmter, b)
}
withAlias := q.db.features.Has(feature.DeleteTableAlias)
withAlias := q.db.HasFeature(feature.DeleteTableAlias)
b, err = q.appendWith(fmter, b)
if err != nil {

View file

@ -20,6 +20,7 @@ type CreateIndexQuery struct {
index schema.QueryWithArgs
using schema.QueryWithArgs
include []schema.QueryWithArgs
comment string
}
var _ Query = (*CreateIndexQuery)(nil)
@ -149,6 +150,14 @@ func (q *CreateIndexQuery) WhereOr(query string, args ...interface{}) *CreateInd
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *CreateIndexQuery) Comment(comment string) *CreateIndexQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *CreateIndexQuery) Operation() string {
return "CREATE INDEX"
}
@ -158,6 +167,8 @@ func (q *CreateIndexQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []by
return nil, q.err
}
b = appendComment(b, q.comment)
b = append(b, "CREATE "...)
if q.unique {

View file

@ -15,7 +15,8 @@ type DropIndexQuery struct {
concurrently bool
ifExists bool
index schema.QueryWithArgs
index schema.QueryWithArgs
comment string
}
var _ Query = (*DropIndexQuery)(nil)
@ -74,6 +75,14 @@ func (q *DropIndexQuery) Index(query string, args ...interface{}) *DropIndexQuer
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *DropIndexQuery) Comment(comment string) *DropIndexQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *DropIndexQuery) Operation() string {
return "DROP INDEX"
}
@ -83,6 +92,8 @@ func (q *DropIndexQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte
return nil, q.err
}
b = appendComment(b, q.comment)
b = append(b, "DROP INDEX "...)
if q.concurrently {

View file

@ -22,6 +22,7 @@ type InsertQuery struct {
ignore bool
replace bool
comment string
}
var _ Query = (*InsertQuery)(nil)
@ -164,6 +165,14 @@ func (q *InsertQuery) Replace() *InsertQuery {
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *InsertQuery) Comment(comment string) *InsertQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *InsertQuery) Operation() string {
return "INSERT"
}
@ -173,6 +182,8 @@ func (q *InsertQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
return nil, q.err
}
b = appendComment(b, q.comment)
fmter = formatterWithModel(fmter, q)
b, err = q.appendWith(fmter, b)
@ -190,7 +201,7 @@ func (q *InsertQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
}
b = append(b, "INTO "...)
if q.db.features.Has(feature.InsertTableAlias) && !q.on.IsZero() {
if q.db.HasFeature(feature.InsertTableAlias) && !q.on.IsZero() {
b, err = q.appendFirstTableWithAlias(fmter, b)
} else {
b, err = q.appendFirstTable(fmter, b)
@ -385,9 +396,9 @@ func (q *InsertQuery) appendSliceValues(
}
func (q *InsertQuery) getFields() ([]*schema.Field, error) {
hasIdentity := q.db.features.Has(feature.Identity)
hasIdentity := q.db.HasFeature(feature.Identity)
if len(q.columns) > 0 || q.db.features.Has(feature.DefaultPlaceholder) && !hasIdentity {
if len(q.columns) > 0 || q.db.HasFeature(feature.DefaultPlaceholder) && !hasIdentity {
return q.baseQuery.getFields()
}
@ -640,8 +651,8 @@ func (q *InsertQuery) afterInsertHook(ctx context.Context) error {
}
func (q *InsertQuery) tryLastInsertID(res sql.Result, dest []interface{}) error {
if q.db.features.Has(feature.Returning) ||
q.db.features.Has(feature.Output) ||
if q.db.HasFeature(feature.Returning) ||
q.db.HasFeature(feature.Output) ||
q.table == nil ||
len(q.table.PKs) != 1 ||
!q.table.PKs[0].AutoIncrement {

View file

@ -15,9 +15,10 @@ type MergeQuery struct {
baseQuery
returningQuery
using schema.QueryWithArgs
on schema.QueryWithArgs
when []schema.QueryAppender
using schema.QueryWithArgs
on schema.QueryWithArgs
when []schema.QueryAppender
comment string
}
var _ Query = (*MergeQuery)(nil)
@ -150,6 +151,14 @@ func (q *MergeQuery) When(expr string, args ...interface{}) *MergeQuery {
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *MergeQuery) Comment(comment string) *MergeQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *MergeQuery) Operation() string {
return "MERGE"
}
@ -159,6 +168,8 @@ func (q *MergeQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, er
return nil, q.err
}
b = appendComment(b, q.comment)
fmter = formatterWithModel(fmter, q)
b, err = q.appendWith(fmter, b)

View file

@ -10,8 +10,9 @@ import (
type RawQuery struct {
baseQuery
query string
args []interface{}
query string
args []interface{}
comment string
}
// Deprecated: Use NewRaw instead. When add it to IDB, it conflicts with the sql.Conn#Raw
@ -56,6 +57,12 @@ func (q *RawQuery) Scan(ctx context.Context, dest ...interface{}) error {
return err
}
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *RawQuery) Comment(comment string) *RawQuery {
q.comment = comment
return q
}
func (q *RawQuery) scanOrExec(
ctx context.Context, dest []interface{}, hasDest bool,
) (sql.Result, error) {
@ -90,6 +97,8 @@ func (q *RawQuery) scanOrExec(
}
func (q *RawQuery) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) {
b = appendComment(b, q.comment)
return fmter.AppendQuery(b, q.query, q.args...), nil
}

View file

@ -31,7 +31,8 @@ type SelectQuery struct {
having []schema.QueryWithArgs
selFor schema.QueryWithArgs
union []union
union []union
comment string
}
var _ Query = (*SelectQuery)(nil)
@ -381,6 +382,43 @@ func (q *SelectQuery) Relation(name string, apply ...func(*SelectQuery) *SelectQ
return q
}
q.applyToRelation(join, apply...)
return q
}
type RelationOpts struct {
// Apply applies additional options to the relation.
Apply func(*SelectQuery) *SelectQuery
// AdditionalJoinOnConditions adds additional conditions to the JOIN ON clause.
AdditionalJoinOnConditions []schema.QueryWithArgs
}
// RelationWithOpts adds a relation to the query with additional options.
func (q *SelectQuery) RelationWithOpts(name string, opts RelationOpts) *SelectQuery {
if q.tableModel == nil {
q.setErr(errNilModel)
return q
}
join := q.tableModel.join(name)
if join == nil {
q.setErr(fmt.Errorf("%s does not have relation=%q", q.table, name))
return q
}
if opts.Apply != nil {
q.applyToRelation(join, opts.Apply)
}
if len(opts.AdditionalJoinOnConditions) > 0 {
join.additionalJoinOnConditions = opts.AdditionalJoinOnConditions
}
return q
}
func (q *SelectQuery) applyToRelation(join *relationJoin, apply ...func(*SelectQuery) *SelectQuery) {
var apply1, apply2 func(*SelectQuery) *SelectQuery
if len(join.Relation.Condition) > 0 {
@ -407,8 +445,6 @@ func (q *SelectQuery) Relation(name string, apply ...func(*SelectQuery) *SelectQ
return q
}
return q
}
func (q *SelectQuery) forEachInlineRelJoin(fn func(*relationJoin) error) error {
@ -460,11 +496,21 @@ func (q *SelectQuery) selectJoins(ctx context.Context, joins []relationJoin) err
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *SelectQuery) Comment(comment string) *SelectQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *SelectQuery) Operation() string {
return "SELECT"
}
func (q *SelectQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) {
b = appendComment(b, q.comment)
return q.appendQuery(fmter, b, false)
}
@ -803,6 +849,14 @@ func (q *SelectQuery) scanResult(ctx context.Context, dest ...interface{}) (sql.
if err != nil {
return nil, err
}
if len(dest) > 0 && q.tableModel != nil && len(q.tableModel.getJoins()) > 0 {
for _, j := range q.tableModel.getJoins() {
switch j.Relation.Type {
case schema.HasManyRelation, schema.ManyToManyRelation:
return nil, fmt.Errorf("When querying has-many or many-to-many relationships, you should use Model instead of the dest parameter in Scan.")
}
}
}
if q.table != nil {
if err := q.beforeSelectHook(ctx); err != nil {

View file

@ -32,6 +32,7 @@ type CreateTableQuery struct {
fks []schema.QueryWithArgs
partitionBy schema.QueryWithArgs
tablespace schema.QueryWithArgs
comment string
}
var _ Query = (*CreateTableQuery)(nil)
@ -129,6 +130,14 @@ func (q *CreateTableQuery) WithForeignKeys() *CreateTableQuery {
return q
}
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *CreateTableQuery) Comment(comment string) *CreateTableQuery {
q.comment = comment
return q
}
// ------------------------------------------------------------------------------
func (q *CreateTableQuery) Operation() string {
@ -139,6 +148,9 @@ func (q *CreateTableQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []by
if q.err != nil {
return nil, q.err
}
b = appendComment(b, q.comment)
if q.table == nil {
return nil, errNilModel
}

View file

@ -13,6 +13,7 @@ type DropTableQuery struct {
cascadeQuery
ifExists bool
comment string
}
var _ Query = (*DropTableQuery)(nil)
@ -80,6 +81,14 @@ func (q *DropTableQuery) Restrict() *DropTableQuery {
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *DropTableQuery) Comment(comment string) *DropTableQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *DropTableQuery) Operation() string {
return "DROP TABLE"
}
@ -89,6 +98,8 @@ func (q *DropTableQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte
return nil, q.err
}
b = appendComment(b, q.comment)
b = append(b, "DROP TABLE "...)
if q.ifExists {
b = append(b, "IF EXISTS "...)

View file

@ -14,6 +14,7 @@ type TruncateTableQuery struct {
cascadeQuery
continueIdentity bool
comment string
}
var _ Query = (*TruncateTableQuery)(nil)
@ -81,6 +82,14 @@ func (q *TruncateTableQuery) Restrict() *TruncateTableQuery {
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *TruncateTableQuery) Comment(comment string) *TruncateTableQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *TruncateTableQuery) Operation() string {
return "TRUNCATE TABLE"
}
@ -92,6 +101,8 @@ func (q *TruncateTableQuery) AppendQuery(
return nil, q.err
}
b = appendComment(b, q.comment)
if !fmter.HasFeature(feature.TableTruncate) {
b = append(b, "DELETE FROM "...)
@ -110,7 +121,7 @@ func (q *TruncateTableQuery) AppendQuery(
return nil, err
}
if q.db.features.Has(feature.TableIdentity) {
if q.db.HasFeature(feature.TableIdentity) {
if q.continueIdentity {
b = append(b, " CONTINUE IDENTITY"...)
} else {

View file

@ -23,6 +23,7 @@ type UpdateQuery struct {
joins []joinQuery
omitZero bool
comment string
}
var _ Query = (*UpdateQuery)(nil)
@ -243,6 +244,14 @@ func (q *UpdateQuery) Returning(query string, args ...interface{}) *UpdateQuery
//------------------------------------------------------------------------------
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *UpdateQuery) Comment(comment string) *UpdateQuery {
q.comment = comment
return q
}
//------------------------------------------------------------------------------
func (q *UpdateQuery) Operation() string {
return "UPDATE"
}
@ -252,6 +261,8 @@ func (q *UpdateQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
return nil, q.err
}
b = appendComment(b, q.comment)
fmter = formatterWithModel(fmter, q)
b, err = q.appendWith(fmter, b)

View file

@ -14,6 +14,7 @@ type ValuesQuery struct {
customValueQuery
withOrder bool
comment string
}
var (
@ -64,6 +65,12 @@ func (q *ValuesQuery) WithOrder() *ValuesQuery {
return q
}
// Comment adds a comment to the query, wrapped by /* ... */.
func (q *ValuesQuery) Comment(comment string) *ValuesQuery {
q.comment = comment
return q
}
func (q *ValuesQuery) AppendNamedArg(fmter schema.Formatter, b []byte, name string) ([]byte, bool) {
switch name {
case "Columns":
@ -121,6 +128,8 @@ func (q *ValuesQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, e
return nil, errNilModel
}
b = appendComment(b, q.comment)
fmter = formatterWithModel(fmter, q)
if q.tableModel != nil {
@ -145,7 +154,7 @@ func (q *ValuesQuery) appendQuery(
fields []*schema.Field,
) (_ []byte, err error) {
b = append(b, "VALUES "...)
if q.db.features.Has(feature.ValuesRow) {
if q.db.HasFeature(feature.ValuesRow) {
b = append(b, "ROW("...)
} else {
b = append(b, '(')
@ -168,7 +177,7 @@ func (q *ValuesQuery) appendQuery(
for i := 0; i < sliceLen; i++ {
if i > 0 {
b = append(b, "), "...)
if q.db.features.Has(feature.ValuesRow) {
if q.db.HasFeature(feature.ValuesRow) {
b = append(b, "ROW("...)
} else {
b = append(b, '(')

View file

@ -16,6 +16,8 @@ type relationJoin struct {
JoinModel TableModel
Relation *schema.Relation
additionalJoinOnConditions []schema.QueryWithArgs
apply func(*SelectQuery) *SelectQuery
columns []schema.QueryWithArgs
}
@ -63,7 +65,7 @@ func (j *relationJoin) manyQuery(q *SelectQuery) *SelectQuery {
var where []byte
if q.db.dialect.Features().Has(feature.CompositeIn) {
if q.db.HasFeature(feature.CompositeIn) {
return j.manyQueryCompositeIn(where, q)
}
return j.manyQueryMulti(where, q)
@ -86,6 +88,11 @@ func (j *relationJoin) manyQueryCompositeIn(where []byte, q *SelectQuery) *Selec
j.Relation.BasePKs,
)
where = append(where, ")"...)
if len(j.additionalJoinOnConditions) > 0 {
where = append(where, " AND "...)
where = appendAdditionalJoinOnConditions(q.db.Formatter(), where, j.additionalJoinOnConditions)
}
q = q.Where(internal.String(where))
if j.Relation.PolymorphicField != nil {
@ -111,6 +118,10 @@ func (j *relationJoin) manyQueryMulti(where []byte, q *SelectQuery) *SelectQuery
q = q.Where(internal.String(where))
if len(j.additionalJoinOnConditions) > 0 {
q = q.Where(internal.String(appendAdditionalJoinOnConditions(q.db.Formatter(), []byte{}, j.additionalJoinOnConditions)))
}
if j.Relation.PolymorphicField != nil {
q = q.Where("? = ?", j.Relation.PolymorphicField.SQLName, j.Relation.PolymorphicValue)
}
@ -204,6 +215,12 @@ func (j *relationJoin) m2mQuery(q *SelectQuery) *SelectQuery {
join = append(join, ") IN ("...)
join = appendChildValues(fmter, join, j.BaseModel.rootValue(), index, j.Relation.BasePKs)
join = append(join, ")"...)
if len(j.additionalJoinOnConditions) > 0 {
join = append(join, " AND "...)
join = appendAdditionalJoinOnConditions(fmter, join, j.additionalJoinOnConditions)
}
q = q.Join(internal.String(join))
joinTable := j.JoinModel.Table()
@ -330,6 +347,11 @@ func (j *relationJoin) appendHasOneJoin(
b = j.appendSoftDelete(fmter, b, q.flags)
}
if len(j.additionalJoinOnConditions) > 0 {
b = append(b, " AND "...)
b = appendAdditionalJoinOnConditions(fmter, b, j.additionalJoinOnConditions)
}
return b, nil
}
@ -417,3 +439,15 @@ func appendMultiValues(
b = append(b, ')')
return b
}
func appendAdditionalJoinOnConditions(
fmter schema.Formatter, b []byte, conditions []schema.QueryWithArgs,
) []byte {
for i, cond := range conditions {
if i > 0 {
b = append(b, " AND "...)
}
b = fmter.AppendQuery(b, cond.Query, cond.Args...)
}
return b
}

View file

@ -24,7 +24,7 @@ type BeforeAppendModelHook interface {
BeforeAppendModel(ctx context.Context, query Query) error
}
var beforeAppendModelHookType = reflect.TypeOf((*BeforeAppendModelHook)(nil)).Elem()
var beforeAppendModelHookType = reflect.TypeFor[BeforeAppendModelHook]()
//------------------------------------------------------------------------------
@ -32,7 +32,7 @@ type BeforeScanRowHook interface {
BeforeScanRow(context.Context) error
}
var beforeScanRowHookType = reflect.TypeOf((*BeforeScanRowHook)(nil)).Elem()
var beforeScanRowHookType = reflect.TypeFor[BeforeScanRowHook]()
//------------------------------------------------------------------------------
@ -40,4 +40,4 @@ type AfterScanRowHook interface {
AfterScanRow(context.Context) error
}
var afterScanRowHookType = reflect.TypeOf((*AfterScanRowHook)(nil)).Elem()
var afterScanRowHookType = reflect.TypeFor[AfterScanRowHook]()

View file

@ -10,18 +10,18 @@ import (
)
var (
bytesType = reflect.TypeOf((*[]byte)(nil)).Elem()
timePtrType = reflect.TypeOf((*time.Time)(nil))
timeType = timePtrType.Elem()
ipType = reflect.TypeOf((*net.IP)(nil)).Elem()
ipNetType = reflect.TypeOf((*net.IPNet)(nil)).Elem()
netipPrefixType = reflect.TypeOf((*netip.Prefix)(nil)).Elem()
netipAddrType = reflect.TypeOf((*netip.Addr)(nil)).Elem()
jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem()
bytesType = reflect.TypeFor[[]byte]()
timePtrType = reflect.TypeFor[*time.Time]()
timeType = reflect.TypeFor[time.Time]()
ipType = reflect.TypeFor[net.IP]()
ipNetType = reflect.TypeFor[net.IPNet]()
netipPrefixType = reflect.TypeFor[netip.Prefix]()
netipAddrType = reflect.TypeFor[netip.Addr]()
jsonRawMessageType = reflect.TypeFor[json.RawMessage]()
driverValuerType = reflect.TypeOf((*driver.Valuer)(nil)).Elem()
queryAppenderType = reflect.TypeOf((*QueryAppender)(nil)).Elem()
jsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
driverValuerType = reflect.TypeFor[driver.Valuer]()
queryAppenderType = reflect.TypeFor[QueryAppender]()
jsonMarshalerType = reflect.TypeFor[json.Marshaler]()
)
func indirectType(t reflect.Type) reflect.Type {

View file

@ -18,7 +18,7 @@ import (
"github.com/uptrace/bun/internal"
)
var scannerType = reflect.TypeOf((*sql.Scanner)(nil)).Elem()
var scannerType = reflect.TypeFor[sql.Scanner]()
type ScannerFunc func(dest reflect.Value, src interface{}) error
@ -38,8 +38,8 @@ func init() {
reflect.Uint32: scanUint64,
reflect.Uint64: scanUint64,
reflect.Uintptr: scanUint64,
reflect.Float32: scanFloat64,
reflect.Float64: scanFloat64,
reflect.Float32: scanFloat,
reflect.Float64: scanFloat,
reflect.Complex64: nil,
reflect.Complex128: nil,
reflect.Array: nil,
@ -214,11 +214,14 @@ func scanUint64(dest reflect.Value, src interface{}) error {
}
}
func scanFloat64(dest reflect.Value, src interface{}) error {
func scanFloat(dest reflect.Value, src interface{}) error {
switch src := src.(type) {
case nil:
dest.SetFloat(0)
return nil
case float32:
dest.SetFloat(float64(src))
return nil
case float64:
dest.SetFloat(src)
return nil

View file

@ -13,12 +13,12 @@ import (
)
var (
bunNullTimeType = reflect.TypeOf((*NullTime)(nil)).Elem()
nullTimeType = reflect.TypeOf((*sql.NullTime)(nil)).Elem()
nullBoolType = reflect.TypeOf((*sql.NullBool)(nil)).Elem()
nullFloatType = reflect.TypeOf((*sql.NullFloat64)(nil)).Elem()
nullIntType = reflect.TypeOf((*sql.NullInt64)(nil)).Elem()
nullStringType = reflect.TypeOf((*sql.NullString)(nil)).Elem()
bunNullTimeType = reflect.TypeFor[NullTime]()
nullTimeType = reflect.TypeFor[sql.NullTime]()
nullBoolType = reflect.TypeFor[sql.NullBool]()
nullFloatType = reflect.TypeFor[sql.NullFloat64]()
nullIntType = reflect.TypeFor[sql.NullInt64]()
nullStringType = reflect.TypeFor[sql.NullString]()
)
var sqlTypes = []string{

View file

@ -4,6 +4,7 @@ import (
"database/sql"
"fmt"
"reflect"
"sort"
"strings"
"time"
@ -22,7 +23,7 @@ const (
)
var (
baseModelType = reflect.TypeOf((*BaseModel)(nil)).Elem()
baseModelType = reflect.TypeFor[BaseModel]()
tableNameInflector = inflection.Plural
)
@ -75,7 +76,7 @@ type structField struct {
Table *Table
}
func (table *Table) init(dialect Dialect, typ reflect.Type, canAddr bool) {
func (table *Table) init(dialect Dialect, typ reflect.Type) {
table.dialect = dialect
table.Type = typ
table.ZeroValue = reflect.New(table.Type).Elem()
@ -90,7 +91,7 @@ func (table *Table) init(dialect Dialect, typ reflect.Type, canAddr bool) {
table.Fields = make([]*Field, 0, typ.NumField())
table.FieldMap = make(map[string]*Field, typ.NumField())
table.processFields(typ, canAddr)
table.processFields(typ)
hooks := []struct {
typ reflect.Type
@ -110,7 +111,7 @@ func (table *Table) init(dialect Dialect, typ reflect.Type, canAddr bool) {
}
}
func (t *Table) processFields(typ reflect.Type, canAddr bool) {
func (t *Table) processFields(typ reflect.Type) {
type embeddedField struct {
prefix string
index []int
@ -250,6 +251,30 @@ func (t *Table) processFields(typ reflect.Type, canAddr bool) {
t.addUnique(subfield, embfield.prefix, v)
}
}
if len(embedded) > 0 {
// https://github.com/uptrace/bun/issues/1095
// < v1.2, all fields follow the order corresponding to the struct
// >= v1.2, < v1.2.8, fields of nested structs have been moved to the end.
// >= v1.2.8, The default behavior remains the same as initially,
sortFieldsByStruct(t.allFields)
sortFieldsByStruct(t.Fields)
sortFieldsByStruct(t.PKs)
sortFieldsByStruct(t.DataFields)
}
}
func sortFieldsByStruct(fields []*Field) {
sort.Slice(fields, func(i, j int) bool {
left, right := fields[i], fields[j]
for k := 0; k < len(left.Index) && k < len(right.Index); k++ {
if left.Index[k] != right.Index[k] {
return left.Index[k] < right.Index[k]
}
}
// NOTE: should not reach
return true
})
}
func (t *Table) addUnique(field *Field, prefix string, tagOptions []string) {

View file

@ -72,7 +72,7 @@ func (t *Tables) InProgress(typ reflect.Type) *Table {
table := new(Table)
t.inProgress[typ] = table
table.init(t.dialect, typ, false)
table.init(t.dialect, typ)
return table
}

View file

@ -5,7 +5,7 @@ import (
"reflect"
)
var isZeroerType = reflect.TypeOf((*isZeroer)(nil)).Elem()
var isZeroerType = reflect.TypeFor[isZeroer]()
type isZeroer interface {
IsZero() bool

View file

@ -1,6 +1,10 @@
package bun
import "reflect"
import (
"fmt"
"reflect"
"strings"
)
func indirect(v reflect.Value) reflect.Value {
switch v.Kind() {
@ -66,3 +70,19 @@ func sliceElemType(v reflect.Value) reflect.Type {
}
return indirectType(elemType)
}
// appendComment adds comment in the header of the query into buffer
func appendComment(b []byte, name string) []byte {
if name == "" {
return b
}
name = strings.Map(func(r rune) rune {
if r == '\x00' {
return -1
}
return r
}, name)
name = strings.ReplaceAll(name, `/*`, `/\*`)
name = strings.ReplaceAll(name, `*/`, `*\/`)
return append(b, fmt.Sprintf("/* %s */ ", name)...)
}

View file

@ -2,5 +2,5 @@ package bun
// Version is the current release version.
func Version() string {
return "1.2.6"
return "1.2.8"
}