chore: better local dev with stainless script
This commit is contained in:
670
packages/tui/sdk/internal/apijson/decoder.go
Normal file
670
packages/tui/sdk/internal/apijson/decoder.go
Normal file
@@ -0,0 +1,670 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// decoders is a synchronized map with roughly the following type:
|
||||
// map[reflect.Type]decoderFunc
|
||||
var decoders sync.Map
|
||||
|
||||
// Unmarshal is similar to [encoding/json.Unmarshal] and parses the JSON-encoded
|
||||
// data and stores it in the given pointer.
|
||||
func Unmarshal(raw []byte, to any) error {
|
||||
d := &decoderBuilder{dateFormat: time.RFC3339}
|
||||
return d.unmarshal(raw, to)
|
||||
}
|
||||
|
||||
// UnmarshalRoot is like Unmarshal, but doesn't try to call MarshalJSON on the
|
||||
// root element. Useful if a struct's UnmarshalJSON is overrode to use the
|
||||
// behavior of this encoder versus the standard library.
|
||||
func UnmarshalRoot(raw []byte, to any) error {
|
||||
d := &decoderBuilder{dateFormat: time.RFC3339, root: true}
|
||||
return d.unmarshal(raw, to)
|
||||
}
|
||||
|
||||
// decoderBuilder contains the 'compile-time' state of the decoder.
|
||||
type decoderBuilder struct {
|
||||
// Whether or not this is the first element and called by [UnmarshalRoot], see
|
||||
// the documentation there to see why this is necessary.
|
||||
root bool
|
||||
// The dateFormat (a format string for [time.Format]) which is chosen by the
|
||||
// last struct tag that was seen.
|
||||
dateFormat string
|
||||
}
|
||||
|
||||
// decoderState contains the 'run-time' state of the decoder.
|
||||
type decoderState struct {
|
||||
strict bool
|
||||
exactness exactness
|
||||
}
|
||||
|
||||
// Exactness refers to how close to the type the result was if deserialization
|
||||
// was successful. This is useful in deserializing unions, where you want to try
|
||||
// each entry, first with strict, then with looser validation, without actually
|
||||
// having to do a lot of redundant work by marshalling twice (or maybe even more
|
||||
// times).
|
||||
type exactness int8
|
||||
|
||||
const (
|
||||
// Some values had to fudged a bit, for example by converting a string to an
|
||||
// int, or an enum with extra values.
|
||||
loose exactness = iota
|
||||
// There are some extra arguments, but other wise it matches the union.
|
||||
extras
|
||||
// Exactly right.
|
||||
exact
|
||||
)
|
||||
|
||||
type decoderFunc func(node gjson.Result, value reflect.Value, state *decoderState) error
|
||||
|
||||
type decoderField struct {
|
||||
tag parsedStructTag
|
||||
fn decoderFunc
|
||||
idx []int
|
||||
goname string
|
||||
}
|
||||
|
||||
type decoderEntry struct {
|
||||
reflect.Type
|
||||
dateFormat string
|
||||
root bool
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) unmarshal(raw []byte, to any) error {
|
||||
value := reflect.ValueOf(to).Elem()
|
||||
result := gjson.ParseBytes(raw)
|
||||
if !value.IsValid() {
|
||||
return fmt.Errorf("apijson: cannot marshal into invalid value")
|
||||
}
|
||||
return d.typeDecoder(value.Type())(result, value, &decoderState{strict: false, exactness: exact})
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) typeDecoder(t reflect.Type) decoderFunc {
|
||||
entry := decoderEntry{
|
||||
Type: t,
|
||||
dateFormat: d.dateFormat,
|
||||
root: d.root,
|
||||
}
|
||||
|
||||
if fi, ok := decoders.Load(entry); ok {
|
||||
return fi.(decoderFunc)
|
||||
}
|
||||
|
||||
// To deal with recursive types, populate the map with an
|
||||
// indirect func before we build it. This type waits on the
|
||||
// real func (f) to be ready and then calls it. This indirect
|
||||
// func is only used for recursive types.
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
f decoderFunc
|
||||
)
|
||||
wg.Add(1)
|
||||
fi, loaded := decoders.LoadOrStore(entry, decoderFunc(func(node gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
wg.Wait()
|
||||
return f(node, v, state)
|
||||
}))
|
||||
if loaded {
|
||||
return fi.(decoderFunc)
|
||||
}
|
||||
|
||||
// Compute the real decoder and replace the indirect func with it.
|
||||
f = d.newTypeDecoder(t)
|
||||
wg.Done()
|
||||
decoders.Store(entry, f)
|
||||
return f
|
||||
}
|
||||
|
||||
func indirectUnmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
return v.Addr().Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw))
|
||||
}
|
||||
|
||||
func unmarshalerDecoder(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
if v.Kind() == reflect.Pointer && v.CanSet() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return v.Interface().(json.Unmarshaler).UnmarshalJSON([]byte(n.Raw))
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newTypeDecoder(t reflect.Type) decoderFunc {
|
||||
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
|
||||
return d.newTimeTypeDecoder(t)
|
||||
}
|
||||
if !d.root && t.Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) {
|
||||
return unmarshalerDecoder
|
||||
}
|
||||
if !d.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()) {
|
||||
if _, ok := unionVariants[t]; !ok {
|
||||
return indirectUnmarshalerDecoder
|
||||
}
|
||||
}
|
||||
d.root = false
|
||||
|
||||
if _, ok := unionRegistry[t]; ok {
|
||||
return d.newUnionDecoder(t)
|
||||
}
|
||||
|
||||
switch t.Kind() {
|
||||
case reflect.Pointer:
|
||||
inner := t.Elem()
|
||||
innerDecoder := d.typeDecoder(inner)
|
||||
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
if !v.IsValid() {
|
||||
return fmt.Errorf("apijson: unexpected invalid reflection value %+#v", v)
|
||||
}
|
||||
|
||||
newValue := reflect.New(inner).Elem()
|
||||
err := innerDecoder(n, newValue, state)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
v.Set(newValue.Addr())
|
||||
return nil
|
||||
}
|
||||
case reflect.Struct:
|
||||
return d.newStructTypeDecoder(t)
|
||||
case reflect.Array:
|
||||
fallthrough
|
||||
case reflect.Slice:
|
||||
return d.newArrayTypeDecoder(t)
|
||||
case reflect.Map:
|
||||
return d.newMapDecoder(t)
|
||||
case reflect.Interface:
|
||||
return func(node gjson.Result, value reflect.Value, state *decoderState) error {
|
||||
if !value.IsValid() {
|
||||
return fmt.Errorf("apijson: unexpected invalid value %+#v", value)
|
||||
}
|
||||
if node.Value() != nil && value.CanSet() {
|
||||
value.Set(reflect.ValueOf(node.Value()))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
default:
|
||||
return d.newPrimitiveTypeDecoder(t)
|
||||
}
|
||||
}
|
||||
|
||||
// newUnionDecoder returns a decoderFunc that deserializes into a union using an
|
||||
// algorithm roughly similar to Pydantic's [smart algorithm].
|
||||
//
|
||||
// Conceptually this is equivalent to choosing the best schema based on how 'exact'
|
||||
// the deserialization is for each of the schemas.
|
||||
//
|
||||
// If there is a tie in the level of exactness, then the tie is broken
|
||||
// left-to-right.
|
||||
//
|
||||
// [smart algorithm]: https://docs.pydantic.dev/latest/concepts/unions/#smart-mode
|
||||
func (d *decoderBuilder) newUnionDecoder(t reflect.Type) decoderFunc {
|
||||
unionEntry, ok := unionRegistry[t]
|
||||
if !ok {
|
||||
panic("apijson: couldn't find union of type " + t.String() + " in union registry")
|
||||
}
|
||||
decoders := []decoderFunc{}
|
||||
for _, variant := range unionEntry.variants {
|
||||
decoder := d.typeDecoder(variant.Type)
|
||||
decoders = append(decoders, decoder)
|
||||
}
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
// If there is a discriminator match, circumvent the exactness logic entirely
|
||||
for idx, variant := range unionEntry.variants {
|
||||
decoder := decoders[idx]
|
||||
if variant.TypeFilter != n.Type {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(unionEntry.discriminatorKey) != 0 {
|
||||
discriminatorValue := n.Get(unionEntry.discriminatorKey).Value()
|
||||
if discriminatorValue == variant.DiscriminatorValue {
|
||||
inner := reflect.New(variant.Type).Elem()
|
||||
err := decoder(n, inner, state)
|
||||
v.Set(inner)
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set bestExactness to worse than loose
|
||||
bestExactness := loose - 1
|
||||
for idx, variant := range unionEntry.variants {
|
||||
decoder := decoders[idx]
|
||||
if variant.TypeFilter != n.Type {
|
||||
continue
|
||||
}
|
||||
sub := decoderState{strict: state.strict, exactness: exact}
|
||||
inner := reflect.New(variant.Type).Elem()
|
||||
err := decoder(n, inner, &sub)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if sub.exactness == exact {
|
||||
v.Set(inner)
|
||||
return nil
|
||||
}
|
||||
if sub.exactness > bestExactness {
|
||||
v.Set(inner)
|
||||
bestExactness = sub.exactness
|
||||
}
|
||||
}
|
||||
|
||||
if bestExactness < loose {
|
||||
return errors.New("apijson: was not able to coerce type as union")
|
||||
}
|
||||
|
||||
if guardStrict(state, bestExactness != exact) {
|
||||
return errors.New("apijson: was not able to coerce type as union strictly")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newMapDecoder(t reflect.Type) decoderFunc {
|
||||
keyType := t.Key()
|
||||
itemType := t.Elem()
|
||||
itemDecoder := d.typeDecoder(itemType)
|
||||
|
||||
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
|
||||
mapValue := reflect.MakeMapWithSize(t, len(node.Map()))
|
||||
|
||||
node.ForEach(func(key, value gjson.Result) bool {
|
||||
// It's fine for us to just use `ValueOf` here because the key types will
|
||||
// always be primitive types so we don't need to decode it using the standard pattern
|
||||
keyValue := reflect.ValueOf(key.Value())
|
||||
if !keyValue.IsValid() {
|
||||
if err == nil {
|
||||
err = fmt.Errorf("apijson: received invalid key type %v", keyValue.String())
|
||||
}
|
||||
return false
|
||||
}
|
||||
if keyValue.Type() != keyType {
|
||||
if err == nil {
|
||||
err = fmt.Errorf("apijson: expected key type %v but got %v", keyType, keyValue.Type())
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
itemValue := reflect.New(itemType).Elem()
|
||||
itemerr := itemDecoder(value, itemValue, state)
|
||||
if itemerr != nil {
|
||||
if err == nil {
|
||||
err = itemerr
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
mapValue.SetMapIndex(keyValue, itemValue)
|
||||
return true
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
value.Set(mapValue)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newArrayTypeDecoder(t reflect.Type) decoderFunc {
|
||||
itemDecoder := d.typeDecoder(t.Elem())
|
||||
|
||||
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
|
||||
if !node.IsArray() {
|
||||
return fmt.Errorf("apijson: could not deserialize to an array")
|
||||
}
|
||||
|
||||
arrayNode := node.Array()
|
||||
|
||||
arrayValue := reflect.MakeSlice(reflect.SliceOf(t.Elem()), len(arrayNode), len(arrayNode))
|
||||
for i, itemNode := range arrayNode {
|
||||
err = itemDecoder(itemNode, arrayValue.Index(i), state)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
value.Set(arrayValue)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newStructTypeDecoder(t reflect.Type) decoderFunc {
|
||||
// map of json field name to struct field decoders
|
||||
decoderFields := map[string]decoderField{}
|
||||
anonymousDecoders := []decoderField{}
|
||||
extraDecoder := (*decoderField)(nil)
|
||||
inlineDecoder := (*decoderField)(nil)
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
idx := []int{i}
|
||||
field := t.FieldByIndex(idx)
|
||||
if !field.IsExported() {
|
||||
continue
|
||||
}
|
||||
// If this is an embedded struct, traverse one level deeper to extract
|
||||
// the fields and get their encoders as well.
|
||||
if field.Anonymous {
|
||||
anonymousDecoders = append(anonymousDecoders, decoderField{
|
||||
fn: d.typeDecoder(field.Type),
|
||||
idx: idx[:],
|
||||
})
|
||||
continue
|
||||
}
|
||||
// If json tag is not present, then we skip, which is intentionally
|
||||
// different behavior from the stdlib.
|
||||
ptag, ok := parseJSONStructTag(field)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
// We only want to support unexported fields if they're tagged with
|
||||
// `extras` because that field shouldn't be part of the public API.
|
||||
if ptag.extras {
|
||||
extraDecoder = &decoderField{ptag, d.typeDecoder(field.Type.Elem()), idx, field.Name}
|
||||
continue
|
||||
}
|
||||
if ptag.inline {
|
||||
inlineDecoder = &decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name}
|
||||
continue
|
||||
}
|
||||
if ptag.metadata {
|
||||
continue
|
||||
}
|
||||
|
||||
oldFormat := d.dateFormat
|
||||
dateFormat, ok := parseFormatStructTag(field)
|
||||
if ok {
|
||||
switch dateFormat {
|
||||
case "date-time":
|
||||
d.dateFormat = time.RFC3339
|
||||
case "date":
|
||||
d.dateFormat = "2006-01-02"
|
||||
}
|
||||
}
|
||||
decoderFields[ptag.name] = decoderField{ptag, d.typeDecoder(field.Type), idx, field.Name}
|
||||
d.dateFormat = oldFormat
|
||||
}
|
||||
|
||||
return func(node gjson.Result, value reflect.Value, state *decoderState) (err error) {
|
||||
if field := value.FieldByName("JSON"); field.IsValid() {
|
||||
if raw := field.FieldByName("raw"); raw.IsValid() {
|
||||
setUnexportedField(raw, node.Raw)
|
||||
}
|
||||
}
|
||||
|
||||
for _, decoder := range anonymousDecoders {
|
||||
// ignore errors
|
||||
decoder.fn(node, value.FieldByIndex(decoder.idx), state)
|
||||
}
|
||||
|
||||
if inlineDecoder != nil {
|
||||
var meta Field
|
||||
dest := value.FieldByIndex(inlineDecoder.idx)
|
||||
isValid := false
|
||||
if dest.IsValid() && node.Type != gjson.Null {
|
||||
err = inlineDecoder.fn(node, dest, state)
|
||||
if err == nil {
|
||||
isValid = true
|
||||
}
|
||||
}
|
||||
|
||||
if node.Type == gjson.Null {
|
||||
meta = Field{
|
||||
raw: node.Raw,
|
||||
status: null,
|
||||
}
|
||||
} else if !isValid {
|
||||
meta = Field{
|
||||
raw: node.Raw,
|
||||
status: invalid,
|
||||
}
|
||||
} else if isValid {
|
||||
meta = Field{
|
||||
raw: node.Raw,
|
||||
status: valid,
|
||||
}
|
||||
}
|
||||
if metadata := getSubField(value, inlineDecoder.idx, inlineDecoder.goname); metadata.IsValid() {
|
||||
metadata.Set(reflect.ValueOf(meta))
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
typedExtraType := reflect.Type(nil)
|
||||
typedExtraFields := reflect.Value{}
|
||||
if extraDecoder != nil {
|
||||
typedExtraType = value.FieldByIndex(extraDecoder.idx).Type()
|
||||
typedExtraFields = reflect.MakeMap(typedExtraType)
|
||||
}
|
||||
untypedExtraFields := map[string]Field{}
|
||||
|
||||
for fieldName, itemNode := range node.Map() {
|
||||
df, explicit := decoderFields[fieldName]
|
||||
var (
|
||||
dest reflect.Value
|
||||
fn decoderFunc
|
||||
meta Field
|
||||
)
|
||||
if explicit {
|
||||
fn = df.fn
|
||||
dest = value.FieldByIndex(df.idx)
|
||||
}
|
||||
if !explicit && extraDecoder != nil {
|
||||
dest = reflect.New(typedExtraType.Elem()).Elem()
|
||||
fn = extraDecoder.fn
|
||||
}
|
||||
|
||||
isValid := false
|
||||
if dest.IsValid() && itemNode.Type != gjson.Null {
|
||||
err = fn(itemNode, dest, state)
|
||||
if err == nil {
|
||||
isValid = true
|
||||
}
|
||||
}
|
||||
|
||||
if itemNode.Type == gjson.Null {
|
||||
meta = Field{
|
||||
raw: itemNode.Raw,
|
||||
status: null,
|
||||
}
|
||||
} else if !isValid {
|
||||
meta = Field{
|
||||
raw: itemNode.Raw,
|
||||
status: invalid,
|
||||
}
|
||||
} else if isValid {
|
||||
meta = Field{
|
||||
raw: itemNode.Raw,
|
||||
status: valid,
|
||||
}
|
||||
}
|
||||
|
||||
if explicit {
|
||||
if metadata := getSubField(value, df.idx, df.goname); metadata.IsValid() {
|
||||
metadata.Set(reflect.ValueOf(meta))
|
||||
}
|
||||
}
|
||||
if !explicit {
|
||||
untypedExtraFields[fieldName] = meta
|
||||
}
|
||||
if !explicit && extraDecoder != nil {
|
||||
typedExtraFields.SetMapIndex(reflect.ValueOf(fieldName), dest)
|
||||
}
|
||||
}
|
||||
|
||||
if extraDecoder != nil && typedExtraFields.Len() > 0 {
|
||||
value.FieldByIndex(extraDecoder.idx).Set(typedExtraFields)
|
||||
}
|
||||
|
||||
// Set exactness to 'extras' if there are untyped, extra fields.
|
||||
if len(untypedExtraFields) > 0 && state.exactness > extras {
|
||||
state.exactness = extras
|
||||
}
|
||||
|
||||
if metadata := getSubField(value, []int{-1}, "ExtraFields"); metadata.IsValid() && len(untypedExtraFields) > 0 {
|
||||
metadata.Set(reflect.ValueOf(untypedExtraFields))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newPrimitiveTypeDecoder(t reflect.Type) decoderFunc {
|
||||
switch t.Kind() {
|
||||
case reflect.String:
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
v.SetString(n.String())
|
||||
if guardStrict(state, n.Type != gjson.String) {
|
||||
return fmt.Errorf("apijson: failed to parse string strictly")
|
||||
}
|
||||
// Everything that is not an object can be loosely stringified.
|
||||
if n.Type == gjson.JSON {
|
||||
return fmt.Errorf("apijson: failed to parse string")
|
||||
}
|
||||
if guardUnknown(state, v) {
|
||||
return fmt.Errorf("apijson: failed string enum validation")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
case reflect.Bool:
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
v.SetBool(n.Bool())
|
||||
if guardStrict(state, n.Type != gjson.True && n.Type != gjson.False) {
|
||||
return fmt.Errorf("apijson: failed to parse bool strictly")
|
||||
}
|
||||
// Numbers and strings that are either 'true' or 'false' can be loosely
|
||||
// deserialized as bool.
|
||||
if n.Type == gjson.String && (n.Raw != "true" && n.Raw != "false") || n.Type == gjson.JSON {
|
||||
return fmt.Errorf("apijson: failed to parse bool")
|
||||
}
|
||||
if guardUnknown(state, v) {
|
||||
return fmt.Errorf("apijson: failed bool enum validation")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
v.SetInt(n.Int())
|
||||
if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num))) {
|
||||
return fmt.Errorf("apijson: failed to parse int strictly")
|
||||
}
|
||||
// Numbers, booleans, and strings that maybe look like numbers can be
|
||||
// loosely deserialized as numbers.
|
||||
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
|
||||
return fmt.Errorf("apijson: failed to parse int")
|
||||
}
|
||||
if guardUnknown(state, v) {
|
||||
return fmt.Errorf("apijson: failed int enum validation")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
v.SetUint(n.Uint())
|
||||
if guardStrict(state, n.Type != gjson.Number || n.Num != float64(int(n.Num)) || n.Num < 0) {
|
||||
return fmt.Errorf("apijson: failed to parse uint strictly")
|
||||
}
|
||||
// Numbers, booleans, and strings that maybe look like numbers can be
|
||||
// loosely deserialized as uint.
|
||||
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
|
||||
return fmt.Errorf("apijson: failed to parse uint")
|
||||
}
|
||||
if guardUnknown(state, v) {
|
||||
return fmt.Errorf("apijson: failed uint enum validation")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
v.SetFloat(n.Float())
|
||||
if guardStrict(state, n.Type != gjson.Number) {
|
||||
return fmt.Errorf("apijson: failed to parse float strictly")
|
||||
}
|
||||
// Numbers, booleans, and strings that maybe look like numbers can be
|
||||
// loosely deserialized as floats.
|
||||
if n.Type == gjson.JSON || (n.Type == gjson.String && !canParseAsNumber(n.Str)) {
|
||||
return fmt.Errorf("apijson: failed to parse float")
|
||||
}
|
||||
if guardUnknown(state, v) {
|
||||
return fmt.Errorf("apijson: failed float enum validation")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
default:
|
||||
return func(node gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
return fmt.Errorf("unknown type received at primitive decoder: %s", t.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoderBuilder) newTimeTypeDecoder(t reflect.Type) decoderFunc {
|
||||
format := d.dateFormat
|
||||
return func(n gjson.Result, v reflect.Value, state *decoderState) error {
|
||||
parsed, err := time.Parse(format, n.Str)
|
||||
if err == nil {
|
||||
v.Set(reflect.ValueOf(parsed).Convert(t))
|
||||
return nil
|
||||
}
|
||||
|
||||
if guardStrict(state, true) {
|
||||
return err
|
||||
}
|
||||
|
||||
layouts := []string{
|
||||
"2006-01-02",
|
||||
"2006-01-02T15:04:05Z07:00",
|
||||
"2006-01-02T15:04:05Z0700",
|
||||
"2006-01-02T15:04:05",
|
||||
"2006-01-02 15:04:05Z07:00",
|
||||
"2006-01-02 15:04:05Z0700",
|
||||
"2006-01-02 15:04:05",
|
||||
}
|
||||
|
||||
for _, layout := range layouts {
|
||||
parsed, err := time.Parse(layout, n.Str)
|
||||
if err == nil {
|
||||
v.Set(reflect.ValueOf(parsed).Convert(t))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("unable to leniently parse date-time string: %s", n.Str)
|
||||
}
|
||||
}
|
||||
|
||||
func setUnexportedField(field reflect.Value, value interface{}) {
|
||||
reflect.NewAt(field.Type(), unsafe.Pointer(field.UnsafeAddr())).Elem().Set(reflect.ValueOf(value))
|
||||
}
|
||||
|
||||
func guardStrict(state *decoderState, cond bool) bool {
|
||||
if !cond {
|
||||
return false
|
||||
}
|
||||
|
||||
if state.strict {
|
||||
return true
|
||||
}
|
||||
|
||||
state.exactness = loose
|
||||
return false
|
||||
}
|
||||
|
||||
func canParseAsNumber(str string) bool {
|
||||
_, err := strconv.ParseFloat(str, 64)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func guardUnknown(state *decoderState, v reflect.Value) bool {
|
||||
if have, ok := v.Interface().(interface{ IsKnown() bool }); guardStrict(state, ok && !have.IsKnown()) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
398
packages/tui/sdk/internal/apijson/encoder.go
Normal file
398
packages/tui/sdk/internal/apijson/encoder.go
Normal file
@@ -0,0 +1,398 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/tidwall/sjson"
|
||||
|
||||
"github.com/sst/opencode-sdk-go/internal/param"
|
||||
)
|
||||
|
||||
var encoders sync.Map // map[encoderEntry]encoderFunc
|
||||
|
||||
func Marshal(value interface{}) ([]byte, error) {
|
||||
e := &encoder{dateFormat: time.RFC3339}
|
||||
return e.marshal(value)
|
||||
}
|
||||
|
||||
func MarshalRoot(value interface{}) ([]byte, error) {
|
||||
e := &encoder{root: true, dateFormat: time.RFC3339}
|
||||
return e.marshal(value)
|
||||
}
|
||||
|
||||
type encoder struct {
|
||||
dateFormat string
|
||||
root bool
|
||||
}
|
||||
|
||||
type encoderFunc func(value reflect.Value) ([]byte, error)
|
||||
|
||||
type encoderField struct {
|
||||
tag parsedStructTag
|
||||
fn encoderFunc
|
||||
idx []int
|
||||
}
|
||||
|
||||
type encoderEntry struct {
|
||||
reflect.Type
|
||||
dateFormat string
|
||||
root bool
|
||||
}
|
||||
|
||||
func (e *encoder) marshal(value interface{}) ([]byte, error) {
|
||||
val := reflect.ValueOf(value)
|
||||
if !val.IsValid() {
|
||||
return nil, nil
|
||||
}
|
||||
typ := val.Type()
|
||||
enc := e.typeEncoder(typ)
|
||||
return enc(val)
|
||||
}
|
||||
|
||||
func (e *encoder) typeEncoder(t reflect.Type) encoderFunc {
|
||||
entry := encoderEntry{
|
||||
Type: t,
|
||||
dateFormat: e.dateFormat,
|
||||
root: e.root,
|
||||
}
|
||||
|
||||
if fi, ok := encoders.Load(entry); ok {
|
||||
return fi.(encoderFunc)
|
||||
}
|
||||
|
||||
// To deal with recursive types, populate the map with an
|
||||
// indirect func before we build it. This type waits on the
|
||||
// real func (f) to be ready and then calls it. This indirect
|
||||
// func is only used for recursive types.
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
f encoderFunc
|
||||
)
|
||||
wg.Add(1)
|
||||
fi, loaded := encoders.LoadOrStore(entry, encoderFunc(func(v reflect.Value) ([]byte, error) {
|
||||
wg.Wait()
|
||||
return f(v)
|
||||
}))
|
||||
if loaded {
|
||||
return fi.(encoderFunc)
|
||||
}
|
||||
|
||||
// Compute the real encoder and replace the indirect func with it.
|
||||
f = e.newTypeEncoder(t)
|
||||
wg.Done()
|
||||
encoders.Store(entry, f)
|
||||
return f
|
||||
}
|
||||
|
||||
func marshalerEncoder(v reflect.Value) ([]byte, error) {
|
||||
return v.Interface().(json.Marshaler).MarshalJSON()
|
||||
}
|
||||
|
||||
func indirectMarshalerEncoder(v reflect.Value) ([]byte, error) {
|
||||
return v.Addr().Interface().(json.Marshaler).MarshalJSON()
|
||||
}
|
||||
|
||||
func (e *encoder) newTypeEncoder(t reflect.Type) encoderFunc {
|
||||
if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
|
||||
return e.newTimeTypeEncoder()
|
||||
}
|
||||
if !e.root && t.Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
|
||||
return marshalerEncoder
|
||||
}
|
||||
if !e.root && reflect.PointerTo(t).Implements(reflect.TypeOf((*json.Marshaler)(nil)).Elem()) {
|
||||
return indirectMarshalerEncoder
|
||||
}
|
||||
e.root = false
|
||||
switch t.Kind() {
|
||||
case reflect.Pointer:
|
||||
inner := t.Elem()
|
||||
|
||||
innerEncoder := e.typeEncoder(inner)
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
if !v.IsValid() || v.IsNil() {
|
||||
return nil, nil
|
||||
}
|
||||
return innerEncoder(v.Elem())
|
||||
}
|
||||
case reflect.Struct:
|
||||
return e.newStructTypeEncoder(t)
|
||||
case reflect.Array:
|
||||
fallthrough
|
||||
case reflect.Slice:
|
||||
return e.newArrayTypeEncoder(t)
|
||||
case reflect.Map:
|
||||
return e.newMapEncoder(t)
|
||||
case reflect.Interface:
|
||||
return e.newInterfaceEncoder()
|
||||
default:
|
||||
return e.newPrimitiveTypeEncoder(t)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) newPrimitiveTypeEncoder(t reflect.Type) encoderFunc {
|
||||
switch t.Kind() {
|
||||
// Note that we could use `gjson` to encode these types but it would complicate our
|
||||
// code more and this current code shouldn't cause any issues
|
||||
case reflect.String:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return json.Marshal(v.Interface())
|
||||
}
|
||||
case reflect.Bool:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
if v.Bool() {
|
||||
return []byte("true"), nil
|
||||
}
|
||||
return []byte("false"), nil
|
||||
}
|
||||
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return []byte(strconv.FormatInt(v.Int(), 10)), nil
|
||||
}
|
||||
case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return []byte(strconv.FormatUint(v.Uint(), 10)), nil
|
||||
}
|
||||
case reflect.Float32:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 32)), nil
|
||||
}
|
||||
case reflect.Float64:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return []byte(strconv.FormatFloat(v.Float(), 'f', -1, 64)), nil
|
||||
}
|
||||
default:
|
||||
return func(v reflect.Value) ([]byte, error) {
|
||||
return nil, fmt.Errorf("unknown type received at primitive encoder: %s", t.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) newArrayTypeEncoder(t reflect.Type) encoderFunc {
|
||||
itemEncoder := e.typeEncoder(t.Elem())
|
||||
|
||||
return func(value reflect.Value) ([]byte, error) {
|
||||
json := []byte("[]")
|
||||
for i := 0; i < value.Len(); i++ {
|
||||
var value, err = itemEncoder(value.Index(i))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if value == nil {
|
||||
// Assume that empty items should be inserted as `null` so that the output array
|
||||
// will be the same length as the input array
|
||||
value = []byte("null")
|
||||
}
|
||||
|
||||
json, err = sjson.SetRawBytes(json, "-1", value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return json, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) newStructTypeEncoder(t reflect.Type) encoderFunc {
|
||||
if t.Implements(reflect.TypeOf((*param.FieldLike)(nil)).Elem()) {
|
||||
return e.newFieldTypeEncoder(t)
|
||||
}
|
||||
|
||||
encoderFields := []encoderField{}
|
||||
extraEncoder := (*encoderField)(nil)
|
||||
|
||||
// This helper allows us to recursively collect field encoders into a flat
|
||||
// array. The parameter `index` keeps track of the access patterns necessary
|
||||
// to get to some field.
|
||||
var collectEncoderFields func(r reflect.Type, index []int)
|
||||
collectEncoderFields = func(r reflect.Type, index []int) {
|
||||
for i := 0; i < r.NumField(); i++ {
|
||||
idx := append(index, i)
|
||||
field := t.FieldByIndex(idx)
|
||||
if !field.IsExported() {
|
||||
continue
|
||||
}
|
||||
// If this is an embedded struct, traverse one level deeper to extract
|
||||
// the field and get their encoders as well.
|
||||
if field.Anonymous {
|
||||
collectEncoderFields(field.Type, idx)
|
||||
continue
|
||||
}
|
||||
// If json tag is not present, then we skip, which is intentionally
|
||||
// different behavior from the stdlib.
|
||||
ptag, ok := parseJSONStructTag(field)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
// We only want to support unexported field if they're tagged with
|
||||
// `extras` because that field shouldn't be part of the public API. We
|
||||
// also want to only keep the top level extras
|
||||
if ptag.extras && len(index) == 0 {
|
||||
extraEncoder = &encoderField{ptag, e.typeEncoder(field.Type.Elem()), idx}
|
||||
continue
|
||||
}
|
||||
if ptag.name == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
dateFormat, ok := parseFormatStructTag(field)
|
||||
oldFormat := e.dateFormat
|
||||
if ok {
|
||||
switch dateFormat {
|
||||
case "date-time":
|
||||
e.dateFormat = time.RFC3339
|
||||
case "date":
|
||||
e.dateFormat = "2006-01-02"
|
||||
}
|
||||
}
|
||||
encoderFields = append(encoderFields, encoderField{ptag, e.typeEncoder(field.Type), idx})
|
||||
e.dateFormat = oldFormat
|
||||
}
|
||||
}
|
||||
collectEncoderFields(t, []int{})
|
||||
|
||||
// Ensure deterministic output by sorting by lexicographic order
|
||||
sort.Slice(encoderFields, func(i, j int) bool {
|
||||
return encoderFields[i].tag.name < encoderFields[j].tag.name
|
||||
})
|
||||
|
||||
return func(value reflect.Value) (json []byte, err error) {
|
||||
json = []byte("{}")
|
||||
|
||||
for _, ef := range encoderFields {
|
||||
field := value.FieldByIndex(ef.idx)
|
||||
encoded, err := ef.fn(field)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if encoded == nil {
|
||||
continue
|
||||
}
|
||||
json, err = sjson.SetRawBytes(json, ef.tag.name, encoded)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if extraEncoder != nil {
|
||||
json, err = e.encodeMapEntries(json, value.FieldByIndex(extraEncoder.idx))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) newFieldTypeEncoder(t reflect.Type) encoderFunc {
|
||||
f, _ := t.FieldByName("Value")
|
||||
enc := e.typeEncoder(f.Type)
|
||||
|
||||
return func(value reflect.Value) (json []byte, err error) {
|
||||
present := value.FieldByName("Present")
|
||||
if !present.Bool() {
|
||||
return nil, nil
|
||||
}
|
||||
null := value.FieldByName("Null")
|
||||
if null.Bool() {
|
||||
return []byte("null"), nil
|
||||
}
|
||||
raw := value.FieldByName("Raw")
|
||||
if !raw.IsNil() {
|
||||
return e.typeEncoder(raw.Type())(raw)
|
||||
}
|
||||
return enc(value.FieldByName("Value"))
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) newTimeTypeEncoder() encoderFunc {
|
||||
format := e.dateFormat
|
||||
return func(value reflect.Value) (json []byte, err error) {
|
||||
return []byte(`"` + value.Convert(reflect.TypeOf(time.Time{})).Interface().(time.Time).Format(format) + `"`), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (e encoder) newInterfaceEncoder() encoderFunc {
|
||||
return func(value reflect.Value) ([]byte, error) {
|
||||
value = value.Elem()
|
||||
if !value.IsValid() {
|
||||
return nil, nil
|
||||
}
|
||||
return e.typeEncoder(value.Type())(value)
|
||||
}
|
||||
}
|
||||
|
||||
// Given a []byte of json (may either be an empty object or an object that already contains entries)
|
||||
// encode all of the entries in the map to the json byte array.
|
||||
func (e *encoder) encodeMapEntries(json []byte, v reflect.Value) ([]byte, error) {
|
||||
type mapPair struct {
|
||||
key []byte
|
||||
value reflect.Value
|
||||
}
|
||||
|
||||
pairs := []mapPair{}
|
||||
keyEncoder := e.typeEncoder(v.Type().Key())
|
||||
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
var encodedKeyString string
|
||||
if iter.Key().Type().Kind() == reflect.String {
|
||||
encodedKeyString = iter.Key().String()
|
||||
} else {
|
||||
var err error
|
||||
encodedKeyBytes, err := keyEncoder(iter.Key())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
encodedKeyString = string(encodedKeyBytes)
|
||||
}
|
||||
encodedKey := []byte(sjsonReplacer.Replace(encodedKeyString))
|
||||
pairs = append(pairs, mapPair{key: encodedKey, value: iter.Value()})
|
||||
}
|
||||
|
||||
// Ensure deterministic output
|
||||
sort.Slice(pairs, func(i, j int) bool {
|
||||
return bytes.Compare(pairs[i].key, pairs[j].key) < 0
|
||||
})
|
||||
|
||||
elementEncoder := e.typeEncoder(v.Type().Elem())
|
||||
for _, p := range pairs {
|
||||
encodedValue, err := elementEncoder(p.value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(encodedValue) == 0 {
|
||||
continue
|
||||
}
|
||||
json, err = sjson.SetRawBytes(json, string(p.key), encodedValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return json, nil
|
||||
}
|
||||
|
||||
func (e *encoder) newMapEncoder(t reflect.Type) encoderFunc {
|
||||
return func(value reflect.Value) ([]byte, error) {
|
||||
json := []byte("{}")
|
||||
var err error
|
||||
json, err = e.encodeMapEntries(json, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json, nil
|
||||
}
|
||||
}
|
||||
|
||||
// If we want to set a literal key value into JSON using sjson, we need to make sure it doesn't have
|
||||
// special characters that sjson interprets as a path.
|
||||
var sjsonReplacer *strings.Replacer = strings.NewReplacer(".", "\\.", ":", "\\:", "*", "\\*")
|
||||
41
packages/tui/sdk/internal/apijson/field.go
Normal file
41
packages/tui/sdk/internal/apijson/field.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package apijson
|
||||
|
||||
import "reflect"
|
||||
|
||||
type status uint8
|
||||
|
||||
const (
|
||||
missing status = iota
|
||||
null
|
||||
invalid
|
||||
valid
|
||||
)
|
||||
|
||||
type Field struct {
|
||||
raw string
|
||||
status status
|
||||
}
|
||||
|
||||
// Returns true if the field is explicitly `null` _or_ if it is not present at all (ie, missing).
|
||||
// To check if the field's key is present in the JSON with an explicit null value,
|
||||
// you must check `f.IsNull() && !f.IsMissing()`.
|
||||
func (j Field) IsNull() bool { return j.status <= null }
|
||||
func (j Field) IsMissing() bool { return j.status == missing }
|
||||
func (j Field) IsInvalid() bool { return j.status == invalid }
|
||||
func (j Field) Raw() string { return j.raw }
|
||||
|
||||
func getSubField(root reflect.Value, index []int, name string) reflect.Value {
|
||||
strct := root.FieldByIndex(index[:len(index)-1])
|
||||
if !strct.IsValid() {
|
||||
panic("couldn't find encapsulating struct for field " + name)
|
||||
}
|
||||
meta := strct.FieldByName("JSON")
|
||||
if !meta.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
field := meta.FieldByName(name)
|
||||
if !field.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
return field
|
||||
}
|
||||
66
packages/tui/sdk/internal/apijson/field_test.go
Normal file
66
packages/tui/sdk/internal/apijson/field_test.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/sst/opencode-sdk-go/internal/param"
|
||||
)
|
||||
|
||||
type Struct struct {
|
||||
A string `json:"a"`
|
||||
B int64 `json:"b"`
|
||||
}
|
||||
|
||||
type FieldStruct struct {
|
||||
A param.Field[string] `json:"a"`
|
||||
B param.Field[int64] `json:"b"`
|
||||
C param.Field[Struct] `json:"c"`
|
||||
D param.Field[time.Time] `json:"d" format:"date"`
|
||||
E param.Field[time.Time] `json:"e" format:"date-time"`
|
||||
F param.Field[int64] `json:"f"`
|
||||
}
|
||||
|
||||
func TestFieldMarshal(t *testing.T) {
|
||||
tests := map[string]struct {
|
||||
value interface{}
|
||||
expected string
|
||||
}{
|
||||
"null_string": {param.Field[string]{Present: true, Null: true}, "null"},
|
||||
"null_int": {param.Field[int]{Present: true, Null: true}, "null"},
|
||||
"null_int64": {param.Field[int64]{Present: true, Null: true}, "null"},
|
||||
"null_struct": {param.Field[Struct]{Present: true, Null: true}, "null"},
|
||||
|
||||
"string": {param.Field[string]{Present: true, Value: "string"}, `"string"`},
|
||||
"int": {param.Field[int]{Present: true, Value: 123}, "123"},
|
||||
"int64": {param.Field[int64]{Present: true, Value: int64(123456789123456789)}, "123456789123456789"},
|
||||
"struct": {param.Field[Struct]{Present: true, Value: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`},
|
||||
|
||||
"string_raw": {param.Field[int]{Present: true, Raw: "string"}, `"string"`},
|
||||
"int_raw": {param.Field[int]{Present: true, Raw: 123}, "123"},
|
||||
"int64_raw": {param.Field[int]{Present: true, Raw: int64(123456789123456789)}, "123456789123456789"},
|
||||
"struct_raw": {param.Field[int]{Present: true, Raw: Struct{A: "yo", B: 123}}, `{"a":"yo","b":123}`},
|
||||
|
||||
"param_struct": {
|
||||
FieldStruct{
|
||||
A: param.Field[string]{Present: true, Value: "hello"},
|
||||
B: param.Field[int64]{Present: true, Value: int64(12)},
|
||||
D: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
|
||||
E: param.Field[time.Time]{Present: true, Value: time.Date(2023, time.March, 18, 14, 47, 38, 0, time.UTC)},
|
||||
},
|
||||
`{"a":"hello","b":12,"d":"2023-03-18","e":"2023-03-18T14:47:38Z"}`,
|
||||
},
|
||||
}
|
||||
|
||||
for name, test := range tests {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
b, err := Marshal(test.value)
|
||||
if err != nil {
|
||||
t.Fatalf("didn't expect error %v", err)
|
||||
}
|
||||
if string(b) != test.expected {
|
||||
t.Fatalf("expected %s, received %s", test.expected, string(b))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
617
packages/tui/sdk/internal/apijson/json_test.go
Normal file
617
packages/tui/sdk/internal/apijson/json_test.go
Normal file
@@ -0,0 +1,617 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
func P[T any](v T) *T { return &v }
|
||||
|
||||
type Primitives struct {
|
||||
A bool `json:"a"`
|
||||
B int `json:"b"`
|
||||
C uint `json:"c"`
|
||||
D float64 `json:"d"`
|
||||
E float32 `json:"e"`
|
||||
F []int `json:"f"`
|
||||
}
|
||||
|
||||
type PrimitivePointers struct {
|
||||
A *bool `json:"a"`
|
||||
B *int `json:"b"`
|
||||
C *uint `json:"c"`
|
||||
D *float64 `json:"d"`
|
||||
E *float32 `json:"e"`
|
||||
F *[]int `json:"f"`
|
||||
}
|
||||
|
||||
type Slices struct {
|
||||
Slice []Primitives `json:"slices"`
|
||||
}
|
||||
|
||||
type DateTime struct {
|
||||
Date time.Time `json:"date" format:"date"`
|
||||
DateTime time.Time `json:"date-time" format:"date-time"`
|
||||
}
|
||||
|
||||
type AdditionalProperties struct {
|
||||
A bool `json:"a"`
|
||||
ExtraFields map[string]interface{} `json:"-,extras"`
|
||||
}
|
||||
|
||||
type TypedAdditionalProperties struct {
|
||||
A bool `json:"a"`
|
||||
ExtraFields map[string]int `json:"-,extras"`
|
||||
}
|
||||
|
||||
type EmbeddedStruct struct {
|
||||
A bool `json:"a"`
|
||||
B string `json:"b"`
|
||||
|
||||
JSON EmbeddedStructJSON
|
||||
}
|
||||
|
||||
type EmbeddedStructJSON struct {
|
||||
A Field
|
||||
B Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
type EmbeddedStructs struct {
|
||||
EmbeddedStruct
|
||||
A *int `json:"a"`
|
||||
ExtraFields map[string]interface{} `json:"-,extras"`
|
||||
|
||||
JSON EmbeddedStructsJSON
|
||||
}
|
||||
|
||||
type EmbeddedStructsJSON struct {
|
||||
A Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
type Recursive struct {
|
||||
Name string `json:"name"`
|
||||
Child *Recursive `json:"child"`
|
||||
}
|
||||
|
||||
type JSONFieldStruct struct {
|
||||
A bool `json:"a"`
|
||||
B int64 `json:"b"`
|
||||
C string `json:"c"`
|
||||
D string `json:"d"`
|
||||
ExtraFields map[string]int64 `json:"-,extras"`
|
||||
JSON JSONFieldStructJSON `json:"-,metadata"`
|
||||
}
|
||||
|
||||
type JSONFieldStructJSON struct {
|
||||
A Field
|
||||
B Field
|
||||
C Field
|
||||
D Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
type UnknownStruct struct {
|
||||
Unknown interface{} `json:"unknown"`
|
||||
}
|
||||
|
||||
type UnionStruct struct {
|
||||
Union Union `json:"union" format:"date"`
|
||||
}
|
||||
|
||||
type Union interface {
|
||||
union()
|
||||
}
|
||||
|
||||
type Inline struct {
|
||||
InlineField Primitives `json:"-,inline"`
|
||||
JSON InlineJSON `json:"-,metadata"`
|
||||
}
|
||||
|
||||
type InlineArray struct {
|
||||
InlineField []string `json:"-,inline"`
|
||||
JSON InlineJSON `json:"-,metadata"`
|
||||
}
|
||||
|
||||
type InlineJSON struct {
|
||||
InlineField Field
|
||||
raw string
|
||||
}
|
||||
|
||||
type UnionInteger int64
|
||||
|
||||
func (UnionInteger) union() {}
|
||||
|
||||
type UnionStructA struct {
|
||||
Type string `json:"type"`
|
||||
A string `json:"a"`
|
||||
B string `json:"b"`
|
||||
}
|
||||
|
||||
func (UnionStructA) union() {}
|
||||
|
||||
type UnionStructB struct {
|
||||
Type string `json:"type"`
|
||||
A string `json:"a"`
|
||||
}
|
||||
|
||||
func (UnionStructB) union() {}
|
||||
|
||||
type UnionTime time.Time
|
||||
|
||||
func (UnionTime) union() {}
|
||||
|
||||
func init() {
|
||||
RegisterUnion(reflect.TypeOf((*Union)(nil)).Elem(), "type",
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.String,
|
||||
Type: reflect.TypeOf(UnionTime{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.Number,
|
||||
Type: reflect.TypeOf(UnionInteger(0)),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
DiscriminatorValue: "typeA",
|
||||
Type: reflect.TypeOf(UnionStructA{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
DiscriminatorValue: "typeB",
|
||||
Type: reflect.TypeOf(UnionStructB{}),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
type ComplexUnionStruct struct {
|
||||
Union ComplexUnion `json:"union"`
|
||||
}
|
||||
|
||||
type ComplexUnion interface {
|
||||
complexUnion()
|
||||
}
|
||||
|
||||
type ComplexUnionA struct {
|
||||
Boo string `json:"boo"`
|
||||
Foo bool `json:"foo"`
|
||||
}
|
||||
|
||||
func (ComplexUnionA) complexUnion() {}
|
||||
|
||||
type ComplexUnionB struct {
|
||||
Boo bool `json:"boo"`
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
func (ComplexUnionB) complexUnion() {}
|
||||
|
||||
type ComplexUnionC struct {
|
||||
Boo int64 `json:"boo"`
|
||||
}
|
||||
|
||||
func (ComplexUnionC) complexUnion() {}
|
||||
|
||||
type ComplexUnionTypeA struct {
|
||||
Baz int64 `json:"baz"`
|
||||
Type TypeA `json:"type"`
|
||||
}
|
||||
|
||||
func (ComplexUnionTypeA) complexUnion() {}
|
||||
|
||||
type TypeA string
|
||||
|
||||
func (t TypeA) IsKnown() bool {
|
||||
return t == "a"
|
||||
}
|
||||
|
||||
type ComplexUnionTypeB struct {
|
||||
Baz int64 `json:"baz"`
|
||||
Type TypeB `json:"type"`
|
||||
}
|
||||
|
||||
type TypeB string
|
||||
|
||||
func (t TypeB) IsKnown() bool {
|
||||
return t == "b"
|
||||
}
|
||||
|
||||
type UnmarshalStruct struct {
|
||||
Foo string `json:"foo"`
|
||||
prop bool `json:"-"`
|
||||
}
|
||||
|
||||
func (r *UnmarshalStruct) UnmarshalJSON(json []byte) error {
|
||||
r.prop = true
|
||||
return UnmarshalRoot(json, r)
|
||||
}
|
||||
|
||||
func (ComplexUnionTypeB) complexUnion() {}
|
||||
|
||||
func init() {
|
||||
RegisterUnion(reflect.TypeOf((*ComplexUnion)(nil)).Elem(), "",
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(ComplexUnionA{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(ComplexUnionB{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(ComplexUnionC{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(ComplexUnionTypeA{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(ComplexUnionTypeB{}),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
type MarshallingUnionStruct struct {
|
||||
Union MarshallingUnion
|
||||
}
|
||||
|
||||
func (r *MarshallingUnionStruct) UnmarshalJSON(data []byte) (err error) {
|
||||
*r = MarshallingUnionStruct{}
|
||||
err = UnmarshalRoot(data, &r.Union)
|
||||
return
|
||||
}
|
||||
|
||||
func (r MarshallingUnionStruct) MarshalJSON() (data []byte, err error) {
|
||||
return MarshalRoot(r.Union)
|
||||
}
|
||||
|
||||
type MarshallingUnion interface {
|
||||
marshallingUnion()
|
||||
}
|
||||
|
||||
type MarshallingUnionA struct {
|
||||
Boo string `json:"boo"`
|
||||
}
|
||||
|
||||
func (MarshallingUnionA) marshallingUnion() {}
|
||||
|
||||
func (r *MarshallingUnionA) UnmarshalJSON(data []byte) (err error) {
|
||||
return UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
type MarshallingUnionB struct {
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
func (MarshallingUnionB) marshallingUnion() {}
|
||||
|
||||
func (r *MarshallingUnionB) UnmarshalJSON(data []byte) (err error) {
|
||||
return UnmarshalRoot(data, r)
|
||||
}
|
||||
|
||||
func init() {
|
||||
RegisterUnion(
|
||||
reflect.TypeOf((*MarshallingUnion)(nil)).Elem(),
|
||||
"",
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(MarshallingUnionA{}),
|
||||
},
|
||||
UnionVariant{
|
||||
TypeFilter: gjson.JSON,
|
||||
Type: reflect.TypeOf(MarshallingUnionB{}),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
var tests = map[string]struct {
|
||||
buf string
|
||||
val interface{}
|
||||
}{
|
||||
"true": {"true", true},
|
||||
"false": {"false", false},
|
||||
"int": {"1", 1},
|
||||
"int_bigger": {"12324", 12324},
|
||||
"int_string_coerce": {`"65"`, 65},
|
||||
"int_boolean_coerce": {"true", 1},
|
||||
"int64": {"1", int64(1)},
|
||||
"int64_huge": {"123456789123456789", int64(123456789123456789)},
|
||||
"uint": {"1", uint(1)},
|
||||
"uint_bigger": {"12324", uint(12324)},
|
||||
"uint_coerce": {`"65"`, uint(65)},
|
||||
"float_1.54": {"1.54", float32(1.54)},
|
||||
"float_1.89": {"1.89", float64(1.89)},
|
||||
"string": {`"str"`, "str"},
|
||||
"string_int_coerce": {`12`, "12"},
|
||||
"array_string": {`["foo","bar"]`, []string{"foo", "bar"}},
|
||||
"array_int": {`[1,2]`, []int{1, 2}},
|
||||
"array_int_coerce": {`["1",2]`, []int{1, 2}},
|
||||
|
||||
"ptr_true": {"true", P(true)},
|
||||
"ptr_false": {"false", P(false)},
|
||||
"ptr_int": {"1", P(1)},
|
||||
"ptr_int_bigger": {"12324", P(12324)},
|
||||
"ptr_int_string_coerce": {`"65"`, P(65)},
|
||||
"ptr_int_boolean_coerce": {"true", P(1)},
|
||||
"ptr_int64": {"1", P(int64(1))},
|
||||
"ptr_int64_huge": {"123456789123456789", P(int64(123456789123456789))},
|
||||
"ptr_uint": {"1", P(uint(1))},
|
||||
"ptr_uint_bigger": {"12324", P(uint(12324))},
|
||||
"ptr_uint_coerce": {`"65"`, P(uint(65))},
|
||||
"ptr_float_1.54": {"1.54", P(float32(1.54))},
|
||||
"ptr_float_1.89": {"1.89", P(float64(1.89))},
|
||||
|
||||
"date_time": {`"2007-03-01T13:00:00Z"`, time.Date(2007, time.March, 1, 13, 0, 0, 0, time.UTC)},
|
||||
"date_time_nano_coerce": {`"2007-03-01T13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)},
|
||||
|
||||
"date_time_missing_t_coerce": {`"2007-03-01 13:03:05Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)},
|
||||
"date_time_missing_timezone_coerce": {`"2007-03-01T13:03:05"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.UTC)},
|
||||
// note: using -1200 to minimize probability of conflicting with the local timezone of the test runner
|
||||
// see https://en.wikipedia.org/wiki/UTC%E2%88%9212:00
|
||||
"date_time_missing_timezone_colon_coerce": {`"2007-03-01T13:03:05-1200"`, time.Date(2007, time.March, 1, 13, 3, 5, 0, time.FixedZone("", -12*60*60))},
|
||||
"date_time_nano_missing_t_coerce": {`"2007-03-01 13:03:05.123456789Z"`, time.Date(2007, time.March, 1, 13, 3, 5, 123456789, time.UTC)},
|
||||
|
||||
"map_string": {`{"foo":"bar"}`, map[string]string{"foo": "bar"}},
|
||||
"map_string_with_sjson_path_chars": {`{":a.b.c*:d*-1e.f":"bar"}`, map[string]string{":a.b.c*:d*-1e.f": "bar"}},
|
||||
"map_interface": {`{"a":1,"b":"str","c":false}`, map[string]interface{}{"a": float64(1), "b": "str", "c": false}},
|
||||
|
||||
"primitive_struct": {
|
||||
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`,
|
||||
Primitives{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
|
||||
},
|
||||
|
||||
"slices": {
|
||||
`{"slices":[{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}]}`,
|
||||
Slices{
|
||||
Slice: []Primitives{{A: false, B: 237628372683, C: uint(654), D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}}},
|
||||
},
|
||||
},
|
||||
|
||||
"primitive_pointer_struct": {
|
||||
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4,5]}`,
|
||||
PrimitivePointers{
|
||||
A: P(false),
|
||||
B: P(237628372683),
|
||||
C: P(uint(654)),
|
||||
D: P(9999.43),
|
||||
E: P(float32(43.76)),
|
||||
F: &[]int{1, 2, 3, 4, 5},
|
||||
},
|
||||
},
|
||||
|
||||
"datetime_struct": {
|
||||
`{"date":"2006-01-02","date-time":"2006-01-02T15:04:05Z"}`,
|
||||
DateTime{
|
||||
Date: time.Date(2006, time.January, 2, 0, 0, 0, 0, time.UTC),
|
||||
DateTime: time.Date(2006, time.January, 2, 15, 4, 5, 0, time.UTC),
|
||||
},
|
||||
},
|
||||
|
||||
"additional_properties": {
|
||||
`{"a":true,"bar":"value","foo":true}`,
|
||||
AdditionalProperties{
|
||||
A: true,
|
||||
ExtraFields: map[string]interface{}{
|
||||
"bar": "value",
|
||||
"foo": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"embedded_struct": {
|
||||
`{"a":1,"b":"bar"}`,
|
||||
EmbeddedStructs{
|
||||
EmbeddedStruct: EmbeddedStruct{
|
||||
A: true,
|
||||
B: "bar",
|
||||
JSON: EmbeddedStructJSON{
|
||||
A: Field{raw: `1`, status: valid},
|
||||
B: Field{raw: `"bar"`, status: valid},
|
||||
raw: `{"a":1,"b":"bar"}`,
|
||||
},
|
||||
},
|
||||
A: P(1),
|
||||
ExtraFields: map[string]interface{}{"b": "bar"},
|
||||
JSON: EmbeddedStructsJSON{
|
||||
A: Field{raw: `1`, status: valid},
|
||||
ExtraFields: map[string]Field{
|
||||
"b": {raw: `"bar"`, status: valid},
|
||||
},
|
||||
raw: `{"a":1,"b":"bar"}`,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"recursive_struct": {
|
||||
`{"child":{"name":"Alex"},"name":"Robert"}`,
|
||||
Recursive{Name: "Robert", Child: &Recursive{Name: "Alex"}},
|
||||
},
|
||||
|
||||
"metadata_coerce": {
|
||||
`{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`,
|
||||
JSONFieldStruct{
|
||||
A: false,
|
||||
B: 12,
|
||||
C: "",
|
||||
JSON: JSONFieldStructJSON{
|
||||
raw: `{"a":"12","b":"12","c":null,"extra_typed":12,"extra_untyped":{"foo":"bar"}}`,
|
||||
A: Field{raw: `"12"`, status: invalid},
|
||||
B: Field{raw: `"12"`, status: valid},
|
||||
C: Field{raw: "null", status: null},
|
||||
D: Field{raw: "", status: missing},
|
||||
ExtraFields: map[string]Field{
|
||||
"extra_typed": {
|
||||
raw: "12",
|
||||
status: valid,
|
||||
},
|
||||
"extra_untyped": {
|
||||
raw: `{"foo":"bar"}`,
|
||||
status: invalid,
|
||||
},
|
||||
},
|
||||
},
|
||||
ExtraFields: map[string]int64{
|
||||
"extra_typed": 12,
|
||||
"extra_untyped": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"unknown_struct_number": {
|
||||
`{"unknown":12}`,
|
||||
UnknownStruct{
|
||||
Unknown: 12.,
|
||||
},
|
||||
},
|
||||
|
||||
"unknown_struct_map": {
|
||||
`{"unknown":{"foo":"bar"}}`,
|
||||
UnknownStruct{
|
||||
Unknown: map[string]interface{}{
|
||||
"foo": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"union_integer": {
|
||||
`{"union":12}`,
|
||||
UnionStruct{
|
||||
Union: UnionInteger(12),
|
||||
},
|
||||
},
|
||||
|
||||
"union_struct_discriminated_a": {
|
||||
`{"union":{"a":"foo","b":"bar","type":"typeA"}}`,
|
||||
UnionStruct{
|
||||
Union: UnionStructA{
|
||||
Type: "typeA",
|
||||
A: "foo",
|
||||
B: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"union_struct_discriminated_b": {
|
||||
`{"union":{"a":"foo","type":"typeB"}}`,
|
||||
UnionStruct{
|
||||
Union: UnionStructB{
|
||||
Type: "typeB",
|
||||
A: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"union_struct_time": {
|
||||
`{"union":"2010-05-23"}`,
|
||||
UnionStruct{
|
||||
Union: UnionTime(time.Date(2010, 05, 23, 0, 0, 0, 0, time.UTC)),
|
||||
},
|
||||
},
|
||||
|
||||
"complex_union_a": {
|
||||
`{"union":{"boo":"12","foo":true}}`,
|
||||
ComplexUnionStruct{Union: ComplexUnionA{Boo: "12", Foo: true}},
|
||||
},
|
||||
|
||||
"complex_union_b": {
|
||||
`{"union":{"boo":true,"foo":"12"}}`,
|
||||
ComplexUnionStruct{Union: ComplexUnionB{Boo: true, Foo: "12"}},
|
||||
},
|
||||
|
||||
"complex_union_c": {
|
||||
`{"union":{"boo":12}}`,
|
||||
ComplexUnionStruct{Union: ComplexUnionC{Boo: 12}},
|
||||
},
|
||||
|
||||
"complex_union_type_a": {
|
||||
`{"union":{"baz":12,"type":"a"}}`,
|
||||
ComplexUnionStruct{Union: ComplexUnionTypeA{Baz: 12, Type: TypeA("a")}},
|
||||
},
|
||||
|
||||
"complex_union_type_b": {
|
||||
`{"union":{"baz":12,"type":"b"}}`,
|
||||
ComplexUnionStruct{Union: ComplexUnionTypeB{Baz: 12, Type: TypeB("b")}},
|
||||
},
|
||||
|
||||
"marshalling_union_a": {
|
||||
`{"boo":"hello"}`,
|
||||
MarshallingUnionStruct{Union: MarshallingUnionA{Boo: "hello"}},
|
||||
},
|
||||
"marshalling_union_b": {
|
||||
`{"foo":"hi"}`,
|
||||
MarshallingUnionStruct{Union: MarshallingUnionB{Foo: "hi"}},
|
||||
},
|
||||
|
||||
"unmarshal": {
|
||||
`{"foo":"hello"}`,
|
||||
&UnmarshalStruct{Foo: "hello", prop: true},
|
||||
},
|
||||
|
||||
"array_of_unmarshal": {
|
||||
`[{"foo":"hello"}]`,
|
||||
[]UnmarshalStruct{{Foo: "hello", prop: true}},
|
||||
},
|
||||
|
||||
"inline_coerce": {
|
||||
`{"a":false,"b":237628372683,"c":654,"d":9999.43,"e":43.76,"f":[1,2,3,4]}`,
|
||||
Inline{
|
||||
InlineField: Primitives{A: false, B: 237628372683, C: 0x28e, D: 9999.43, E: 43.76, F: []int{1, 2, 3, 4}},
|
||||
JSON: InlineJSON{
|
||||
InlineField: Field{raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}", status: 3},
|
||||
raw: "{\"a\":false,\"b\":237628372683,\"c\":654,\"d\":9999.43,\"e\":43.76,\"f\":[1,2,3,4]}",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
"inline_array_coerce": {
|
||||
`["Hello","foo","bar"]`,
|
||||
InlineArray{
|
||||
InlineField: []string{"Hello", "foo", "bar"},
|
||||
JSON: InlineJSON{
|
||||
InlineField: Field{raw: `["Hello","foo","bar"]`, status: 3},
|
||||
raw: `["Hello","foo","bar"]`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
for name, test := range tests {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
result := reflect.New(reflect.TypeOf(test.val))
|
||||
if err := Unmarshal([]byte(test.buf), result.Interface()); err != nil {
|
||||
t.Fatalf("deserialization of %v failed with error %v", result, err)
|
||||
}
|
||||
if !reflect.DeepEqual(result.Elem().Interface(), test.val) {
|
||||
t.Fatalf("expected '%s' to deserialize to \n%#v\nbut got\n%#v", test.buf, test.val, result.Elem().Interface())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
for name, test := range tests {
|
||||
if strings.HasSuffix(name, "_coerce") {
|
||||
continue
|
||||
}
|
||||
t.Run(name, func(t *testing.T) {
|
||||
raw, err := Marshal(test.val)
|
||||
if err != nil {
|
||||
t.Fatalf("serialization of %v failed with error %v", test.val, err)
|
||||
}
|
||||
if string(raw) != test.buf {
|
||||
t.Fatalf("expected %+#v to serialize to %s but got %s", test.val, test.buf, string(raw))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
120
packages/tui/sdk/internal/apijson/port.go
Normal file
120
packages/tui/sdk/internal/apijson/port.go
Normal file
@@ -0,0 +1,120 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// Port copies over values from one struct to another struct.
|
||||
func Port(from any, to any) error {
|
||||
toVal := reflect.ValueOf(to)
|
||||
fromVal := reflect.ValueOf(from)
|
||||
|
||||
if toVal.Kind() != reflect.Ptr || toVal.IsNil() {
|
||||
return fmt.Errorf("destination must be a non-nil pointer")
|
||||
}
|
||||
|
||||
for toVal.Kind() == reflect.Ptr {
|
||||
toVal = toVal.Elem()
|
||||
}
|
||||
toType := toVal.Type()
|
||||
|
||||
for fromVal.Kind() == reflect.Ptr {
|
||||
fromVal = fromVal.Elem()
|
||||
}
|
||||
fromType := fromVal.Type()
|
||||
|
||||
if toType.Kind() != reflect.Struct {
|
||||
return fmt.Errorf("destination must be a non-nil pointer to a struct (%v %v)", toType, toType.Kind())
|
||||
}
|
||||
|
||||
values := map[string]reflect.Value{}
|
||||
fields := map[string]reflect.Value{}
|
||||
|
||||
fromJSON := fromVal.FieldByName("JSON")
|
||||
toJSON := toVal.FieldByName("JSON")
|
||||
|
||||
// Iterate through the fields of v and load all the "normal" fields in the struct to the map of
|
||||
// string to reflect.Value, as well as their raw .JSON.Foo counterpart indicated by j.
|
||||
var getFields func(t reflect.Type, v reflect.Value)
|
||||
getFields = func(t reflect.Type, v reflect.Value) {
|
||||
j := v.FieldByName("JSON")
|
||||
|
||||
// Recurse into anonymous fields first, since the fields on the object should win over the fields in the
|
||||
// embedded object.
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
if field.Anonymous {
|
||||
getFields(field.Type, v.Field(i))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
field := t.Field(i)
|
||||
ptag, ok := parseJSONStructTag(field)
|
||||
if !ok || ptag.name == "-" {
|
||||
continue
|
||||
}
|
||||
values[ptag.name] = v.Field(i)
|
||||
if j.IsValid() {
|
||||
fields[ptag.name] = j.FieldByName(field.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
getFields(fromType, fromVal)
|
||||
|
||||
// Use the values from the previous step to populate the 'to' struct.
|
||||
for i := 0; i < toType.NumField(); i++ {
|
||||
field := toType.Field(i)
|
||||
ptag, ok := parseJSONStructTag(field)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if ptag.name == "-" {
|
||||
continue
|
||||
}
|
||||
if value, ok := values[ptag.name]; ok {
|
||||
delete(values, ptag.name)
|
||||
if field.Type.Kind() == reflect.Interface {
|
||||
toVal.Field(i).Set(value)
|
||||
} else {
|
||||
switch value.Kind() {
|
||||
case reflect.String:
|
||||
toVal.Field(i).SetString(value.String())
|
||||
case reflect.Bool:
|
||||
toVal.Field(i).SetBool(value.Bool())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
toVal.Field(i).SetInt(value.Int())
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
toVal.Field(i).SetUint(value.Uint())
|
||||
case reflect.Float32, reflect.Float64:
|
||||
toVal.Field(i).SetFloat(value.Float())
|
||||
default:
|
||||
toVal.Field(i).Set(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if fromJSONField, ok := fields[ptag.name]; ok {
|
||||
if toJSONField := toJSON.FieldByName(field.Name); toJSONField.IsValid() {
|
||||
toJSONField.Set(fromJSONField)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, copy over the .JSON.raw and .JSON.ExtraFields
|
||||
if toJSON.IsValid() {
|
||||
if raw := toJSON.FieldByName("raw"); raw.IsValid() {
|
||||
setUnexportedField(raw, fromJSON.Interface().(interface{ RawJSON() string }).RawJSON())
|
||||
}
|
||||
|
||||
if toExtraFields := toJSON.FieldByName("ExtraFields"); toExtraFields.IsValid() {
|
||||
if fromExtraFields := fromJSON.FieldByName("ExtraFields"); fromExtraFields.IsValid() {
|
||||
setUnexportedField(toExtraFields, fromExtraFields.Interface())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
257
packages/tui/sdk/internal/apijson/port_test.go
Normal file
257
packages/tui/sdk/internal/apijson/port_test.go
Normal file
@@ -0,0 +1,257 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type Metadata struct {
|
||||
CreatedAt string `json:"created_at"`
|
||||
}
|
||||
|
||||
// Card is the "combined" type of CardVisa and CardMastercard
|
||||
type Card struct {
|
||||
Processor CardProcessor `json:"processor"`
|
||||
Data any `json:"data"`
|
||||
IsFoo bool `json:"is_foo"`
|
||||
IsBar bool `json:"is_bar"`
|
||||
Metadata Metadata `json:"metadata"`
|
||||
Value interface{} `json:"value"`
|
||||
|
||||
JSON cardJSON
|
||||
}
|
||||
|
||||
type cardJSON struct {
|
||||
Processor Field
|
||||
Data Field
|
||||
IsFoo Field
|
||||
IsBar Field
|
||||
Metadata Field
|
||||
Value Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
func (r cardJSON) RawJSON() string { return r.raw }
|
||||
|
||||
type CardProcessor string
|
||||
|
||||
// CardVisa
|
||||
type CardVisa struct {
|
||||
Processor CardVisaProcessor `json:"processor"`
|
||||
Data CardVisaData `json:"data"`
|
||||
IsFoo bool `json:"is_foo"`
|
||||
Metadata Metadata `json:"metadata"`
|
||||
Value string `json:"value"`
|
||||
|
||||
JSON cardVisaJSON
|
||||
}
|
||||
|
||||
type cardVisaJSON struct {
|
||||
Processor Field
|
||||
Data Field
|
||||
IsFoo Field
|
||||
Metadata Field
|
||||
Value Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
func (r cardVisaJSON) RawJSON() string { return r.raw }
|
||||
|
||||
type CardVisaProcessor string
|
||||
|
||||
type CardVisaData struct {
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
// CardMastercard
|
||||
type CardMastercard struct {
|
||||
Processor CardMastercardProcessor `json:"processor"`
|
||||
Data CardMastercardData `json:"data"`
|
||||
IsBar bool `json:"is_bar"`
|
||||
Metadata Metadata `json:"metadata"`
|
||||
Value bool `json:"value"`
|
||||
|
||||
JSON cardMastercardJSON
|
||||
}
|
||||
|
||||
type cardMastercardJSON struct {
|
||||
Processor Field
|
||||
Data Field
|
||||
IsBar Field
|
||||
Metadata Field
|
||||
Value Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
func (r cardMastercardJSON) RawJSON() string { return r.raw }
|
||||
|
||||
type CardMastercardProcessor string
|
||||
|
||||
type CardMastercardData struct {
|
||||
Bar int64 `json:"bar"`
|
||||
}
|
||||
|
||||
type CommonFields struct {
|
||||
Metadata Metadata `json:"metadata"`
|
||||
Value string `json:"value"`
|
||||
|
||||
JSON commonFieldsJSON
|
||||
}
|
||||
|
||||
type commonFieldsJSON struct {
|
||||
Metadata Field
|
||||
Value Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
type CardEmbedded struct {
|
||||
CommonFields
|
||||
Processor CardVisaProcessor `json:"processor"`
|
||||
Data CardVisaData `json:"data"`
|
||||
IsFoo bool `json:"is_foo"`
|
||||
|
||||
JSON cardEmbeddedJSON
|
||||
}
|
||||
|
||||
type cardEmbeddedJSON struct {
|
||||
Processor Field
|
||||
Data Field
|
||||
IsFoo Field
|
||||
ExtraFields map[string]Field
|
||||
raw string
|
||||
}
|
||||
|
||||
func (r cardEmbeddedJSON) RawJSON() string { return r.raw }
|
||||
|
||||
var portTests = map[string]struct {
|
||||
from any
|
||||
to any
|
||||
}{
|
||||
"visa to card": {
|
||||
CardVisa{
|
||||
Processor: "visa",
|
||||
IsFoo: true,
|
||||
Data: CardVisaData{
|
||||
Foo: "foo",
|
||||
},
|
||||
Metadata: Metadata{
|
||||
CreatedAt: "Mar 29 2024",
|
||||
},
|
||||
Value: "value",
|
||||
JSON: cardVisaJSON{
|
||||
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`,
|
||||
Processor: Field{raw: `"visa"`, status: valid},
|
||||
IsFoo: Field{raw: `true`, status: valid},
|
||||
Data: Field{raw: `{"foo":"foo"}`, status: valid},
|
||||
Value: Field{raw: `"value"`, status: valid},
|
||||
ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}},
|
||||
},
|
||||
},
|
||||
Card{
|
||||
Processor: "visa",
|
||||
IsFoo: true,
|
||||
IsBar: false,
|
||||
Data: CardVisaData{
|
||||
Foo: "foo",
|
||||
},
|
||||
Metadata: Metadata{
|
||||
CreatedAt: "Mar 29 2024",
|
||||
},
|
||||
Value: "value",
|
||||
JSON: cardJSON{
|
||||
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"foo"}}`,
|
||||
Processor: Field{raw: `"visa"`, status: valid},
|
||||
IsFoo: Field{raw: `true`, status: valid},
|
||||
Data: Field{raw: `{"foo":"foo"}`, status: valid},
|
||||
Value: Field{raw: `"value"`, status: valid},
|
||||
ExtraFields: map[string]Field{"extra": {raw: `"yo"`, status: valid}},
|
||||
},
|
||||
},
|
||||
},
|
||||
"mastercard to card": {
|
||||
CardMastercard{
|
||||
Processor: "mastercard",
|
||||
IsBar: true,
|
||||
Data: CardMastercardData{
|
||||
Bar: 13,
|
||||
},
|
||||
Value: false,
|
||||
},
|
||||
Card{
|
||||
Processor: "mastercard",
|
||||
IsFoo: false,
|
||||
IsBar: true,
|
||||
Data: CardMastercardData{
|
||||
Bar: 13,
|
||||
},
|
||||
Value: false,
|
||||
},
|
||||
},
|
||||
"embedded to card": {
|
||||
CardEmbedded{
|
||||
CommonFields: CommonFields{
|
||||
Metadata: Metadata{
|
||||
CreatedAt: "Mar 29 2024",
|
||||
},
|
||||
Value: "embedded_value",
|
||||
JSON: commonFieldsJSON{
|
||||
Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: valid},
|
||||
Value: Field{raw: `"embedded_value"`, status: valid},
|
||||
raw: `should not matter`,
|
||||
},
|
||||
},
|
||||
Processor: "visa",
|
||||
IsFoo: true,
|
||||
Data: CardVisaData{
|
||||
Foo: "embedded_foo",
|
||||
},
|
||||
JSON: cardEmbeddedJSON{
|
||||
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`,
|
||||
Processor: Field{raw: `"visa"`, status: valid},
|
||||
IsFoo: Field{raw: `true`, status: valid},
|
||||
Data: Field{raw: `{"foo":"embedded_foo"}`, status: valid},
|
||||
},
|
||||
},
|
||||
Card{
|
||||
Processor: "visa",
|
||||
IsFoo: true,
|
||||
IsBar: false,
|
||||
Data: CardVisaData{
|
||||
Foo: "embedded_foo",
|
||||
},
|
||||
Metadata: Metadata{
|
||||
CreatedAt: "Mar 29 2024",
|
||||
},
|
||||
Value: "embedded_value",
|
||||
JSON: cardJSON{
|
||||
raw: `{"processor":"visa","is_foo":true,"data":{"foo":"embedded_foo"},"metadata":{"created_at":"Mar 29 2024"},"value":"embedded_value"}`,
|
||||
Processor: Field{raw: `"visa"`, status: 0x3},
|
||||
IsFoo: Field{raw: "true", status: 0x3},
|
||||
Data: Field{raw: `{"foo":"embedded_foo"}`, status: 0x3},
|
||||
Metadata: Field{raw: `{"created_at":"Mar 29 2024"}`, status: 0x3},
|
||||
Value: Field{raw: `"embedded_value"`, status: 0x3},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestPort(t *testing.T) {
|
||||
for name, test := range portTests {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
toVal := reflect.New(reflect.TypeOf(test.to))
|
||||
|
||||
err := Port(test.from, toVal.Interface())
|
||||
if err != nil {
|
||||
t.Fatalf("port of %v failed with error %v", test.from, err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(toVal.Elem().Interface(), test.to) {
|
||||
t.Fatalf("expected:\n%+#v\n\nto port to:\n%+#v\n\nbut got:\n%+#v", test.from, test.to, toVal.Elem().Interface())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
41
packages/tui/sdk/internal/apijson/registry.go
Normal file
41
packages/tui/sdk/internal/apijson/registry.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type UnionVariant struct {
|
||||
TypeFilter gjson.Type
|
||||
DiscriminatorValue interface{}
|
||||
Type reflect.Type
|
||||
}
|
||||
|
||||
var unionRegistry = map[reflect.Type]unionEntry{}
|
||||
var unionVariants = map[reflect.Type]interface{}{}
|
||||
|
||||
type unionEntry struct {
|
||||
discriminatorKey string
|
||||
variants []UnionVariant
|
||||
}
|
||||
|
||||
func RegisterUnion(typ reflect.Type, discriminator string, variants ...UnionVariant) {
|
||||
unionRegistry[typ] = unionEntry{
|
||||
discriminatorKey: discriminator,
|
||||
variants: variants,
|
||||
}
|
||||
for _, variant := range variants {
|
||||
unionVariants[variant.Type] = typ
|
||||
}
|
||||
}
|
||||
|
||||
// Useful to wrap a union type to force it to use [apijson.UnmarshalJSON] since you cannot define an
|
||||
// UnmarshalJSON function on the interface itself.
|
||||
type UnionUnmarshaler[T any] struct {
|
||||
Value T
|
||||
}
|
||||
|
||||
func (c *UnionUnmarshaler[T]) UnmarshalJSON(buf []byte) error {
|
||||
return UnmarshalRoot(buf, &c.Value)
|
||||
}
|
||||
47
packages/tui/sdk/internal/apijson/tag.go
Normal file
47
packages/tui/sdk/internal/apijson/tag.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package apijson
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const jsonStructTag = "json"
|
||||
const formatStructTag = "format"
|
||||
|
||||
type parsedStructTag struct {
|
||||
name string
|
||||
required bool
|
||||
extras bool
|
||||
metadata bool
|
||||
inline bool
|
||||
}
|
||||
|
||||
func parseJSONStructTag(field reflect.StructField) (tag parsedStructTag, ok bool) {
|
||||
raw, ok := field.Tag.Lookup(jsonStructTag)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
parts := strings.Split(raw, ",")
|
||||
if len(parts) == 0 {
|
||||
return tag, false
|
||||
}
|
||||
tag.name = parts[0]
|
||||
for _, part := range parts[1:] {
|
||||
switch part {
|
||||
case "required":
|
||||
tag.required = true
|
||||
case "extras":
|
||||
tag.extras = true
|
||||
case "metadata":
|
||||
tag.metadata = true
|
||||
case "inline":
|
||||
tag.inline = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func parseFormatStructTag(field reflect.StructField) (format string, ok bool) {
|
||||
format, ok = field.Tag.Lookup(formatStructTag)
|
||||
return
|
||||
}
|
||||
Reference in New Issue
Block a user