cue/ast/astutil: add and use Santize function
Sanitize is meant for general use by any converter that
generates CUE.
It uses the same resolution code as Resolve, allowing for
a more accurate resolution that a hand-made solution.
This CL uses it in jsonschema to fix an outstanding issue.
The stricter resolution also exposed a bug in the Proto parser,
which is also fixed in this CL by moving the Proto parser
to use Sanitize.
Fixes #379
Change-Id: I674083871d25c377658286ff878200f6a109b652
Reviewed-on: https://cue-review.googlesource.com/c/cue/+/5963
Reviewed-by: Marcel van Lohuizen <mpvl@google.com>
diff --git a/cmd/cue/cmd/orphans.go b/cmd/cue/cmd/orphans.go
index 94b2eea..de54d92 100644
--- a/cmd/cue/cmd/orphans.go
+++ b/cmd/cue/cmd/orphans.go
@@ -15,7 +15,6 @@
package cmd
import (
- "errors"
"fmt"
"path/filepath"
"regexp"
@@ -25,6 +24,7 @@
"cuelang.org/go/cue/ast"
"cuelang.org/go/cue/ast/astutil"
"cuelang.org/go/cue/build"
+ "cuelang.org/go/cue/errors"
"cuelang.org/go/cue/parser"
"cuelang.org/go/cue/token"
"cuelang.org/go/internal"
@@ -191,7 +191,18 @@
"recordCount", ast.NewLit(token.INT, strconv.Itoa(len(objs))),
)
}
- inst, err := runtime.CompileExpr(expr)
+ var f *ast.File
+ if s, ok := expr.(*ast.StructLit); ok {
+ f = &ast.File{Decls: s.Elts}
+ } else {
+ f = &ast.File{Decls: []ast.Decl{&ast.EmbedDecl{Expr: expr}}}
+ }
+ err := astutil.Sanitize(f)
+ if err != nil {
+ return nil, errors.Wrapf(err, token.NoPos,
+ "invalid combination of input files")
+ }
+ inst, err := runtime.CompileFile(f)
if err != nil {
return nil, err
}
@@ -273,7 +284,7 @@
}
}
- return f, nil
+ return f, astutil.Sanitize(f)
}
func parseFullPath(inst *cue.Instance, exprs string) (p []ast.Label, t []token.Token, err error) {
diff --git a/cmd/cue/cmd/testdata/script/def_jsonschema.txt b/cmd/cue/cmd/testdata/script/def_jsonschema.txt
index 5c6224c..258ef36 100644
--- a/cmd/cue/cmd/testdata/script/def_jsonschema.txt
+++ b/cmd/cue/cmd/testdata/script/def_jsonschema.txt
@@ -16,12 +16,14 @@
-- expect-stdout --
package schema
+import "strings"
+
#Person: {
// The person's first name.
firstName?: string
// The person's last name.
- lastName?: string
+ lastName?: strings.MinRunes(1)
// Age in years which must be equal to or greater than zero.
age?: >=0
@@ -40,7 +42,8 @@
},
"lastName": {
"type": "string",
- "description": "The person's last name."
+ "description": "The person's last name.",
+ "minLength": 1
},
"age": {
"description": "Age in years which must be equal to or greater than zero.",
@@ -63,6 +66,6 @@
-- expect-stderr2 --
age: conflicting values "twenty" and >=0 (mismatched types string and number):
- 11:7
+ 13:7
./data.yaml:1:7
-- cue.mod --
diff --git a/cmd/cue/cmd/testdata/script/import_proto.txt b/cmd/cue/cmd/testdata/script/import_proto.txt
index 6ab00a8..932c0a1 100644
--- a/cmd/cue/cmd/testdata/script/import_proto.txt
+++ b/cmd/cue/cmd/testdata/script/import_proto.txt
@@ -164,9 +164,9 @@
package v1
import (
- "googleapis.com/acme/test"
- "googleapis.com/acme/test/test"
"time"
+ "googleapis.com/acme/test"
+ "googleapis.com/acme/test/test:test_test"
)
// Attributes defines attributes.
diff --git a/cue/ast/astutil/sanitize.go b/cue/ast/astutil/sanitize.go
new file mode 100644
index 0000000..c5b3092
--- /dev/null
+++ b/cue/ast/astutil/sanitize.go
@@ -0,0 +1,355 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package astutil
+
+import (
+ "fmt"
+ "math/rand"
+ "strings"
+
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/token"
+)
+
+// TODO:
+// - handle comprehensions
+// - change field from foo to "foo" if it isn't referenced, rather than
+// relying on introducing a unique alias.
+// - change a predeclared identifier reference to use the __ident form,
+// instead of introducing an alias.
+
+// Sanitize rewrites File f in place to be well formed after automated
+// construction of an AST.
+//
+// Rewrites:
+// - auto inserts imports associated with Idents
+// - unshadows imports associated with idents
+// - unshadows references for identifiers that were already resolved.
+//
+func Sanitize(f *ast.File) error {
+ z := &sanitizer{
+ file: f,
+ rand: rand.New(rand.NewSource(808)),
+
+ names: map[string]bool{},
+ importMap: map[string]*ast.ImportSpec{},
+ referenced: map[ast.Node]bool{},
+ altMap: map[ast.Node]string{},
+ }
+
+ // Gather all names.
+ walk(&scope{
+ errFn: z.errf,
+ nameFn: z.addName,
+ identFn: z.markUsed,
+ }, f)
+ if z.errs != nil {
+ return z.errs
+ }
+
+ // Add imports and unshadow.
+ s := &scope{
+ file: f,
+ errFn: z.errf,
+ identFn: z.handleIdent,
+ index: make(map[string]entry),
+ }
+ z.fileScope = s
+ walk(s, f)
+ if z.errs != nil {
+ return z.errs
+ }
+
+ z.cleanImports()
+
+ return z.errs
+}
+
+type sanitizer struct {
+ file *ast.File
+ fileScope *scope
+
+ rand *rand.Rand
+
+ // names is all used names. Can be used to determine a new unique name.
+ names map[string]bool
+ referenced map[ast.Node]bool
+
+ // altMap defines an alternative name for an existing entry link (a field,
+ // alias or let clause). As new names are globally unique, they can be
+ // safely reused for any unshadowing.
+ altMap map[ast.Node]string
+ importMap map[string]*ast.ImportSpec
+
+ errs errors.Error
+}
+
+func (z *sanitizer) errf(p token.Pos, msg string, args ...interface{}) {
+ z.errs = errors.Append(z.errs, errors.Newf(p, msg, args...))
+}
+
+func (z *sanitizer) addName(name string) {
+ z.names[name] = true
+}
+
+func (z *sanitizer) addRename(base string, n ast.Node) (alt string, new bool) {
+ if name, ok := z.altMap[n]; ok {
+ return name, false
+ }
+
+ name := z.uniqueName(base, false)
+ z.altMap[n] = name
+ return name, true
+}
+
+func (z *sanitizer) unshadow(parent ast.Node, base string, link ast.Node) string {
+ name, ok := z.altMap[link]
+ if !ok {
+ name = z.uniqueName(base, false)
+ z.altMap[link] = name
+
+ // Insert new let clause at top to refer to a declaration in possible
+ // other files.
+ let := &ast.LetClause{
+ Ident: ast.NewIdent(name),
+ Expr: ast.NewIdent(base),
+ }
+
+ var decls *[]ast.Decl
+
+ switch x := parent.(type) {
+ case *ast.File:
+ decls = &x.Decls
+ case *ast.StructLit:
+ decls = &x.Elts
+ default:
+ panic("impossible scope")
+ }
+
+ i := 0
+ for ; i < len(*decls); i++ {
+ if (*decls)[i] == link {
+ break
+ }
+ if f, ok := (*decls)[i].(*ast.Field); ok && f.Label == link {
+ break
+ }
+ }
+
+ if i > 0 {
+ ast.SetRelPos(let, token.NewSection)
+ }
+
+ a := append((*decls)[:i:i], let)
+ *decls = append(a, (*decls)[i:]...)
+ }
+ return name
+}
+
+func (z *sanitizer) markUsed(s *scope, n *ast.Ident) bool {
+ if n.Node != nil {
+ return false
+ }
+ _, _, entry := s.lookup(n.String())
+ z.referenced[entry.link] = true
+ return true
+}
+
+func (z *sanitizer) cleanImports() {
+ for _, d := range z.file.Decls {
+ switch id := d.(type) {
+ case *ast.Package, *ast.CommentGroup:
+ case *ast.ImportDecl:
+ k := 0
+ for _, s := range id.Specs {
+ if _, ok := z.referenced[s]; ok {
+ id.Specs[k] = s
+ k++
+ }
+ }
+ id.Specs = id.Specs[:k]
+
+ default:
+ return
+ }
+ }
+}
+
+func (z *sanitizer) handleIdent(s *scope, n *ast.Ident) bool {
+ if n.Node == nil {
+ return true
+ }
+
+ _, _, node := s.lookup(n.Name)
+ if node.node == nil {
+ spec, ok := n.Node.(*ast.ImportSpec)
+ if !ok {
+ // Clear node. A reference may have been moved to a different
+ // file. If not, it should be an error.
+ n.Node = nil
+ n.Scope = nil
+ return false
+ }
+
+ _ = z.addImport(spec)
+ info, _ := ParseImportSpec(spec)
+ z.fileScope.insert(info.Ident, spec, spec)
+ return true
+ }
+
+ if x, ok := n.Node.(*ast.ImportSpec); ok {
+ xi, _ := ParseImportSpec(x)
+
+ if y, ok := node.node.(*ast.ImportSpec); ok {
+ yi, _ := ParseImportSpec(y)
+ if xi.ID == yi.ID { // name must be identical as a result of lookup.
+ z.referenced[y] = true
+ n.Node = x
+ n.Scope = nil
+ return false
+ }
+ }
+
+ // Either:
+ // - the import is shadowed
+ // - an incorrect import is matched
+ // In all cases we need to create a new import with a unique name or
+ // use a previously created one.
+ spec := z.importMap[xi.ID]
+ if spec == nil {
+ name := z.uniqueName(xi.Ident, false)
+ spec = z.addImport(&ast.ImportSpec{
+ Name: ast.NewIdent(name),
+ Path: x.Path,
+ })
+ z.importMap[xi.ID] = spec
+ z.fileScope.insert(name, spec, spec)
+ }
+
+ info, _ := ParseImportSpec(spec)
+ // TODO(apply): replace n itself directly
+ n.Name = info.Ident
+ n.Node = spec
+ n.Scope = nil
+ return false
+ }
+
+ if node.node == n.Node {
+ return true
+ }
+
+ // n.Node != node and are both not nil and n.Node is not an ImportSpec.
+ // This means that either n.Node is illegal or shadowed.
+ // Look for the scope in which n.Node is defined and add an alias or let.
+
+ parent, e, ok := s.resolveScope(n.Name, n.Node)
+ if !ok {
+ // The node isn't within a legal scope within this file. It may only
+ // possibly shadow a value of another file. We add a top-level let
+ // clause to refer to this value.
+
+ // TODO(apply): better would be to have resolve use Apply so that we can replace
+ // the entire ast.Ident, rather than modifying it.
+ // TODO: resolve to new node or rely on another pass of Resolve?
+ n.Name = z.unshadow(z.file, n.Name, n)
+ n.Node = nil
+ n.Scope = nil
+
+ return false
+ }
+
+ var name string
+ // var isNew bool
+ switch x := e.link.(type) {
+ case *ast.Field: // referring to regular field.
+ name, ok = z.altMap[x]
+ if ok {
+ break
+ }
+ // If this field has not alias, introduce one with a unique name.
+ // If this has an alias, also introduce a new name. There is a
+ // possibility that the alias can be used, but it is easier to just
+ // assign a new name, assuming this case is rather rare.
+ switch y := x.Label.(type) {
+ case *ast.Alias:
+ name = z.unshadow(parent, y.Ident.Name, y)
+
+ case *ast.Ident:
+ var isNew bool
+ name, isNew = z.addRename(y.Name, x)
+ if isNew {
+ x.Label = &ast.Alias{Ident: ast.NewIdent(name), Expr: y}
+ }
+
+ default:
+ // This is an illegal reference.
+ return false
+ }
+
+ case *ast.LetClause:
+ name = z.unshadow(parent, x.Ident.Name, x)
+
+ case *ast.Alias:
+ name = z.unshadow(parent, x.Ident.Name, x)
+
+ default:
+ panic(fmt.Sprintf("unexpected link type %T", e.link))
+ }
+
+ // TODO(apply): better would be to have resolve use Apply so that we can replace
+ // the entire ast.Ident, rather than modifying it.
+ n.Name = name
+ n.Node = nil
+ n.Scope = nil
+
+ return true
+}
+
+// uniqueName returns a new name globally unique name of the form
+// base_XX ... base_XXXXXXXXXXXXXX or _base or the same pattern with a '_'
+// prefix if hidden is true.
+//
+// It prefers short extensions over large ones, while ensuring the likelihood of
+// fast termination is high. There are at least two digits to make it visually
+// clearer this concerns a generated number.
+//
+func (z *sanitizer) uniqueName(base string, hidden bool) string {
+ if hidden && !strings.HasPrefix(base, "_") {
+ base = "_" + base
+ if !z.names[base] {
+ z.names[base] = true
+ return base
+ }
+ }
+
+ // TODO(go1.13): const mask = 0xff_ffff_ffff_ffff
+ const mask = 0xffffffffffffff // max bits; stay clear of int64 overflow
+ const shift = 4 // rate of growth
+ for n := int64(0x10); ; n = int64(mask&((n<<shift)-1)) + 1 {
+ num := z.rand.Intn(int(n))
+ name := fmt.Sprintf("%s_%01X", base, num)
+ if !z.names[name] {
+ z.names[name] = true
+ return name
+ }
+ }
+}
+
+func (z *sanitizer) addImport(spec *ast.ImportSpec) *ast.ImportSpec {
+ spec = insertImport(&z.file.Decls, spec)
+ z.referenced[spec] = true
+ return spec
+}
diff --git a/cue/ast/astutil/sanitize_test.go b/cue/ast/astutil/sanitize_test.go
new file mode 100644
index 0000000..997396f
--- /dev/null
+++ b/cue/ast/astutil/sanitize_test.go
@@ -0,0 +1,495 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package astutil_test
+
+import (
+ "testing"
+
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/ast/astutil"
+ "cuelang.org/go/cue/format"
+ "cuelang.org/go/internal"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSanitize(t *testing.T) {
+ testCases := []struct {
+ desc string
+ file *ast.File
+ want string
+ }{{
+ desc: "Take existing import and rename it",
+ file: func() *ast.File {
+ spec := ast.NewImport(nil, "list")
+ spec.AddComment(internal.NewComment(true, "will be renamed"))
+ return &ast.File{Decls: []ast.Decl{
+ &ast.ImportDecl{Specs: []*ast.ImportSpec{spec}},
+ &ast.EmbedDecl{
+ Expr: ast.NewStruct(
+ ast.NewIdent("list"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "list", Node: spec},
+ "Min")),
+ )},
+ }}
+ }(),
+ want: `import (
+ // will be renamed
+ list_1 "list"
+)
+
+{
+ list: list_1.Min()
+}
+`,
+ }, {
+ desc: "Take existing import and rename it",
+ file: func() *ast.File {
+ spec := ast.NewImport(nil, "list")
+ return &ast.File{Decls: []ast.Decl{
+ &ast.ImportDecl{Specs: []*ast.ImportSpec{spec}},
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("list"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "list", Node: spec}, "Min")),
+ ),
+ },
+ }}
+ }(),
+ want: `import list_1 "list"
+
+a: {
+ list: list_1.Min()
+}
+`,
+ }, {
+ desc: "One import added, one removed",
+ file: &ast.File{Decls: []ast.Decl{
+ &ast.ImportDecl{Specs: []*ast.ImportSpec{
+ {Path: ast.NewString("foo")},
+ }},
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewCall(
+ ast.NewSel(&ast.Ident{
+ Name: "bar",
+ Node: &ast.ImportSpec{Path: ast.NewString("bar")},
+ }, "Min")),
+ },
+ }},
+ want: `import "bar"
+
+a: bar.Min()
+`,
+ }, {
+ desc: "Rename duplicate import",
+ file: func() *ast.File {
+ spec1 := ast.NewImport(nil, "bar")
+ spec2 := ast.NewImport(nil, "foo/bar")
+ spec3 := ast.NewImport(ast.NewIdent("bar"), "foo")
+ return &ast.File{Decls: []ast.Decl{
+ internal.NewComment(false, "File comment"),
+ &ast.Package{Name: ast.NewIdent("pkg")},
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("b"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec1}, "A")),
+ ast.NewIdent("c"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec2}, "A")),
+ ast.NewIdent("d"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec3}, "A")),
+ ),
+ },
+ }}
+ }(),
+ want: `// File comment
+
+package pkg
+
+import (
+ "bar"
+ bar_1 "foo/bar"
+ bar_5 "foo"
+)
+
+a: {
+ b: bar.A()
+ c: bar_1.A()
+ d: bar_5.A()
+}
+`,
+ }, {
+ desc: "Rename duplicate import, reuse and drop",
+ file: func() *ast.File {
+ spec1 := ast.NewImport(nil, "bar")
+ spec2 := ast.NewImport(nil, "foo/bar")
+ spec3 := ast.NewImport(ast.NewIdent("bar"), "foo")
+ return &ast.File{Decls: []ast.Decl{
+ &ast.ImportDecl{Specs: []*ast.ImportSpec{
+ spec3,
+ ast.NewImport(nil, "foo"),
+ }},
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("b"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec1}, "A")),
+ ast.NewIdent("c"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec2}, "A")),
+ ast.NewIdent("d"), ast.NewCall(
+ ast.NewSel(&ast.Ident{Name: "bar", Node: spec3}, "A")),
+ ),
+ },
+ }}
+ }(),
+ want: `import (
+ bar "foo"
+ bar_1 "bar"
+ bar_5 "foo/bar"
+)
+
+a: {
+ b: bar_1.A()
+ c: bar_5.A()
+ d: bar.A()
+}
+`,
+ }, {
+ desc: "Reuse different import",
+ file: &ast.File{Decls: []ast.Decl{
+ &ast.Package{Name: ast.NewIdent("pkg")},
+ &ast.ImportDecl{Specs: []*ast.ImportSpec{
+ {Path: ast.NewString("bar")},
+ }},
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("list"), ast.NewCall(
+ ast.NewSel(&ast.Ident{
+ Name: "bar",
+ Node: &ast.ImportSpec{Path: ast.NewString("bar")},
+ }, "Min")),
+ ),
+ },
+ }},
+ want: `package pkg
+
+import "bar"
+
+a: {
+ list: bar.Min()
+}
+`,
+ }, {
+ desc: "Clear reference that does not exist in scope",
+ file: &ast.File{Decls: []ast.Decl{
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "c",
+ Node: ast.NewString("foo"),
+ },
+ ast.NewIdent("d"), ast.NewIdent("e"),
+ ),
+ },
+ }},
+ want: `a: {
+ b: c
+ d: e
+}
+`,
+ }, {
+ desc: "Unshadow possible reference to other file",
+ file: &ast.File{Decls: []ast.Decl{
+ &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewStruct(
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "c",
+ Node: ast.NewString("foo"),
+ },
+ ast.NewIdent("c"), ast.NewIdent("d"),
+ ),
+ },
+ }},
+ want: `a: {
+ b: c_1
+ c: d
+}
+
+let c_1 = c
+`,
+ }, {
+ desc: "Add alias to shadowed field",
+ file: func() *ast.File {
+ field := &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewString("b"),
+ }
+ return &ast.File{Decls: []ast.Decl{
+ field,
+ &ast.Field{
+ Label: ast.NewIdent("c"),
+ Value: ast.NewStruct(
+ ast.NewIdent("a"), ast.NewStruct(),
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ast.NewIdent("c"), ast.NewIdent("d"),
+ ),
+ },
+ }}
+ }(),
+ want: `a_1=a: "b"
+c: {
+ a: {}
+ b: a_1
+ c: d
+}
+`,
+ }, {
+ desc: "Add let clause to shadowed field",
+ // Resolve both identifiers to same clause.
+ file: func() *ast.File {
+ field := &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewString("b"),
+ }
+ return &ast.File{Decls: []ast.Decl{
+ field,
+ &ast.Field{
+ Label: ast.NewIdent("c"),
+ Value: ast.NewStruct(
+ ast.NewIdent("a"), ast.NewStruct(),
+ // Remove this reference.
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ast.NewIdent("c"), ast.NewIdent("d"),
+ ast.NewIdent("e"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ),
+ },
+ }}
+ }(),
+ want: `a_1=a: "b"
+c: {
+ a: {}
+ b: a_1
+ c: d
+ e: a_1
+}
+`,
+ }, {
+ desc: "Add let clause to shadowed field",
+ // Resolve both identifiers to same clause.
+ file: func() *ast.File {
+ fieldX := &ast.Field{
+ Label: &ast.Alias{
+ Ident: ast.NewIdent("X"),
+ Expr: ast.NewIdent("a"), // shadowed
+ },
+ Value: ast.NewString("b"),
+ }
+ fieldY := &ast.Field{
+ Label: &ast.Alias{
+ Ident: ast.NewIdent("Y"), // shadowed
+ Expr: ast.NewIdent("q"), // not shadowed
+ },
+ Value: ast.NewString("b"),
+ }
+ return &ast.File{Decls: []ast.Decl{
+ fieldX,
+ fieldY,
+ &ast.Field{
+ Label: ast.NewIdent("c"),
+ Value: ast.NewStruct(
+ ast.NewIdent("a"), ast.NewStruct(),
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "X",
+ Node: fieldX,
+ },
+ ast.NewIdent("c"), ast.NewIdent("d"),
+ ast.NewIdent("e"), &ast.Ident{
+ Name: "a",
+ Node: fieldX.Value,
+ },
+ ast.NewIdent("f"), &ast.Ident{
+ Name: "Y",
+ Node: fieldY,
+ },
+ ),
+ },
+ }}
+ }(),
+ want: `
+let X_1 = X
+X=a: "b"
+Y=q: "b"
+c: {
+ a: {}
+ b: X
+ c: d
+ e: X_1
+ f: Y
+}
+`,
+ }, {
+ desc: "Add let clause to nested shadowed field",
+ // Resolve both identifiers to same clause.
+ file: func() *ast.File {
+ field := &ast.Field{
+ Label: ast.NewIdent("a"),
+ Value: ast.NewString("b"),
+ }
+ return &ast.File{Decls: []ast.Decl{
+ &ast.Field{
+ Label: ast.NewIdent("b"),
+ Value: ast.NewStruct(
+ field,
+ ast.NewIdent("b"), ast.NewStruct(
+ ast.NewIdent("a"), ast.NewString("bar"),
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ast.NewIdent("e"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ),
+ ),
+ },
+ }}
+ }(),
+ want: `b: {
+ a_1=a: "b"
+ b: {
+ a: "bar"
+ b: a_1
+ e: a_1
+ }
+}
+`,
+ }, {
+ desc: "Add let clause to nested shadowed field with alias",
+ // Resolve both identifiers to same clause.
+ file: func() *ast.File {
+ field := &ast.Field{
+ Label: &ast.Alias{
+ Ident: ast.NewIdent("X"),
+ Expr: ast.NewIdent("a"),
+ },
+ Value: ast.NewString("b"),
+ }
+ return &ast.File{Decls: []ast.Decl{
+ &ast.Field{
+ Label: ast.NewIdent("b"),
+ Value: ast.NewStruct(
+ field,
+ ast.NewIdent("b"), ast.NewStruct(
+ ast.NewIdent("a"), ast.NewString("bar"),
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ast.NewIdent("e"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ),
+ ),
+ },
+ }}
+ }(),
+ want: `b: {
+ let X_1 = X
+ X=a: "b"
+ b: {
+ a: "bar"
+ b: X_1
+ e: X_1
+ }
+}
+`,
+ }}
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ err := astutil.Sanitize(tc.file)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ b, errs := format.Node(tc.file)
+ if errs != nil {
+ t.Fatal(errs)
+ }
+
+ got := string(b)
+ assert.Equal(t, got, tc.want)
+ })
+ }
+}
+
+// For testing purposes: do not remove.
+func TestX(t *testing.T) {
+ t.Skip()
+
+ field := &ast.Field{
+ Label: &ast.Alias{
+ Ident: ast.NewIdent("X"),
+ Expr: ast.NewIdent("a"),
+ },
+ Value: ast.NewString("b"),
+ }
+
+ file := &ast.File{Decls: []ast.Decl{
+ &ast.Field{
+ Label: ast.NewIdent("b"),
+ Value: ast.NewStruct(
+ field,
+ ast.NewIdent("b"), ast.NewStruct(
+ ast.NewIdent("a"), ast.NewString("bar"),
+ ast.NewIdent("b"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ast.NewIdent("e"), &ast.Ident{
+ Name: "a",
+ Node: field.Value,
+ },
+ ),
+ ),
+ },
+ }}
+
+ err := astutil.Sanitize(file)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ b, errs := format.Node(file)
+ if errs != nil {
+ t.Fatal(errs)
+ }
+
+ t.Error(string(b))
+}
diff --git a/encoding/protobuf/parse.go b/encoding/protobuf/parse.go
index 833b731..c9b2fe5 100644
--- a/encoding/protobuf/parse.go
+++ b/encoding/protobuf/parse.go
@@ -20,7 +20,6 @@
"os"
"path"
"path/filepath"
- "sort"
"strconv"
"strings"
"text/scanner"
@@ -29,7 +28,9 @@
"github.com/emicklei/proto"
"cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/ast/astutil"
"cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/format"
"cuelang.org/go/cue/parser"
"cuelang.org/go/cue/token"
"cuelang.org/go/internal/source"
@@ -69,7 +70,6 @@
tfile: tfile,
imported: map[string]bool{},
symbols: map[string]bool{},
- aliases: map[string]string{},
}
defer func() {
@@ -139,34 +139,13 @@
}
}
- imports := &ast.ImportDecl{}
- importIdx := len(p.file.Decls)
- p.file.Decls = append(p.file.Decls, imports)
-
for _, e := range d.Elements {
p.topElement(e)
}
- imported := []string{}
- for k := range p.imported {
- imported = append(imported, k)
- }
- sort.Strings(imported)
- p.sorted = imported
+ err = astutil.Sanitize(p.file)
- for _, v := range imported {
- spec := ast.NewImport(nil, v)
- imports.Specs = append(imports.Specs, spec)
- p.file.Imports = append(p.file.Imports, spec)
- }
-
- if len(imports.Specs) == 0 {
- a := p.file.Decls
- copy(a[importIdx:], a[importIdx+1:])
- p.file.Decls = a[:len(a)-1]
- }
-
- return p, nil
+ return p, err
}
// A protoConverter converts a proto definition to CUE. Proto files map to
@@ -185,20 +164,24 @@
file *ast.File
current *ast.StructLit
- sorted []string
imported map[string]bool
path []string
scope []map[string]mapping // for symbols resolution within package.
symbols map[string]bool // symbols provided by package
- aliases map[string]string // for shadowed packages
}
type mapping struct {
- ref string
- cueName string
- alias string // alias for the type, if exists.
- pkg *protoConverter
+ cue func() ast.Expr // needs to be a new copy as position changes
+ pkg *protoConverter
+}
+
+func (p *protoConverter) qualifiedImportPath() string {
+ s := p.importPath()
+ if short := p.shortPkgName; short != "" && short != path.Base(s) {
+ s += ":" + short
+ }
+ return s
}
func (p *protoConverter) importPath() string {
@@ -224,12 +207,12 @@
return p.tfile.Pos(pos.Offset, 0)
}
-func (p *protoConverter) addRef(pos scanner.Position, name, cuename string) {
+func (p *protoConverter) addRef(pos scanner.Position, name string, cue func() ast.Expr) {
top := p.scope[len(p.scope)-1]
if _, ok := top[name]; ok {
failf(pos, "entity %q already defined", name)
}
- top[name] = mapping{ref: name, cueName: cuename}
+ top[name] = mapping{cue: cue}
}
func (p *protoConverter) addNames(elems []proto.Visitee) {
@@ -261,7 +244,7 @@
}
sym := strings.Join(append(p.path, name), ".")
p.symbols[sym] = true
- p.addRef(pos, name, "#"+name)
+ p.addRef(pos, name, func() ast.Expr { return ast.NewIdent("#" + name) })
}
}
@@ -269,68 +252,24 @@
p.scope = p.scope[:len(p.scope)-1]
}
-func (p *protoConverter) uniqueTop(name string, asIs bool) string {
- a := strings.SplitN(name, ".", 2)
- for i := len(p.scope) - 1; i > 0; i-- {
- if _, ok := p.scope[i][a[0]]; ok {
- first := a[0]
- alias, ok := p.aliases[first]
- if !ok {
- // TODO: this is likely to be okay, but find something better.
- alias = "_" + first + "_"
- p.file.Decls = append(p.file.Decls, &ast.LetClause{
- Ident: ast.NewIdent(alias),
- Expr: ast.NewIdent(first),
- })
- p.aliases[first] = alias
- }
- a[0] = alias
- if len(a) > 1 && !asIs {
- a[1] = "#" + a[1]
- }
- return strings.Join(a, ".")
- }
- }
-
- if e, ok := p.scope[0][name]; ok {
- return e.cueName
- }
- // TODO: do something more principled.
- switch name {
- case "time.Time", "time.Duration":
- return name
- }
- if !asIs {
- a[len(a)-1] = "#" + a[len(a)-1]
- }
- return strings.Join(a, ".")
-}
-
-func (p *protoConverter) toExpr(pos scanner.Position, name string) (expr ast.Expr) {
- expr, err := parser.ParseExpr("", name, parser.ParseComments)
- if err != nil {
- panic(fmt.Sprintf("error parsing name %q: %v", name, err))
- }
- ast.SetPos(expr, p.toCUEPos(pos))
- return expr
-}
-
-func (p *protoConverter) resolve(pos scanner.Position, name string, options []*proto.Option) string {
- if s, ok := protoToCUE(name, options); ok {
- return p.uniqueTop(s, true)
+func (p *protoConverter) resolve(pos scanner.Position, name string, options []*proto.Option) ast.Expr {
+ if expr := protoToCUE(name, options); expr != nil {
+ ast.SetPos(expr, p.toCUEPos(pos))
+ return expr
}
if strings.HasPrefix(name, ".") {
return p.resolveTopScope(pos, name[1:], options)
}
for i := len(p.scope) - 1; i > 0; i-- {
if m, ok := p.scope[i][name]; ok {
- return m.cueName
+ return m.cue()
}
}
- return p.resolveTopScope(pos, name, options)
+ expr := p.resolveTopScope(pos, name, options)
+ return expr
}
-func (p *protoConverter) resolveTopScope(pos scanner.Position, name string, options []*proto.Option) string {
+func (p *protoConverter) resolveTopScope(pos scanner.Position, name string, options []*proto.Option) ast.Expr {
for i := 0; i < len(name); i++ {
k := strings.IndexByte(name[i:], '.')
i += k
@@ -338,18 +277,23 @@
i = len(name)
}
if m, ok := p.scope[0][name[:i]]; ok {
- isPkg := false
if m.pkg != nil {
- p.imported[m.pkg.importPath()] = true
- // TODO: do something more principled.
- isPkg = true
+ p.imported[m.pkg.qualifiedImportPath()] = true
}
- cueName := name[i:]
- return p.uniqueTop(m.ref+cueName, !isPkg || m.ref == m.cueName)
+ expr := m.cue()
+ for i < len(name) {
+ name = name[i+1:]
+ if i = strings.IndexByte(name, '.'); i == -1 {
+ i = len(name)
+ }
+ expr = ast.NewSel(expr, "#"+name[:i])
+ }
+ ast.SetPos(expr, p.toCUEPos(pos))
+ return expr
}
}
failf(pos, "name %q not found", name)
- return ""
+ return nil
}
func (p *protoConverter) doImport(v *proto.Import) error {
@@ -383,11 +327,6 @@
fail(v.Position, err)
}
- prefix := ""
- if imp.importPath() != p.importPath() {
- prefix = imp.shortName() + "."
- }
-
pkgNamespace := strings.Split(imp.protoPkg, ".")
curNamespace := strings.Split(p.protoPkg, ".")
for {
@@ -398,10 +337,23 @@
}
if _, ok := p.scope[0][ref]; !ok {
pkg := imp
- if imp.importPath() == p.importPath() {
+ a := toCue(k)
+
+ var f func() ast.Expr
+
+ if imp.qualifiedImportPath() == p.qualifiedImportPath() {
pkg = nil
+ f = func() ast.Expr { return ast.NewIdent(a[0]) }
+ } else {
+ f = func() ast.Expr {
+ ident := &ast.Ident{
+ Name: imp.shortName(),
+ Node: ast.NewImport(nil, imp.qualifiedImportPath()),
+ }
+ return ast.NewSel(ident, a[0])
+ }
}
- p.scope[0][ref] = mapping{prefix + k, prefix + toCue(k), "", pkg}
+ p.scope[0][ref] = mapping{f, pkg}
}
}
if len(pkgNamespace) == 0 {
@@ -417,12 +369,12 @@
}
// TODO: this doesn't work. Do something more principled.
-func toCue(name string) string {
+func toCue(name string) []string {
a := strings.Split(name, ".")
for i, s := range a {
a[i] = "#" + s
}
- return strings.Join(a, ".")
+ return a
}
func (p *protoConverter) stringLit(pos scanner.Position, s string) *ast.BasicLit {
@@ -557,7 +509,7 @@
// All keys are converted to strings.
// TODO: support integer keys.
f.Label = ast.NewList(ast.NewIdent("string"))
- f.Value = p.toExpr(x.Position, p.resolve(x.Position, x.Type, x.Options))
+ f.Value = p.resolve(x.Position, x.Type, x.Options)
name := p.ident(x.Position, x.Name)
f = &ast.Field{
@@ -746,14 +698,16 @@
name := p.ident(x.Position, x.Name)
f.Label = name
typ := p.resolve(x.Position, x.Type, x.Options)
- f.Value = p.toExpr(x.Position, typ)
+ f.Value = typ
s.Elts = append(s.Elts, f)
o := optionParser{message: s, field: f}
// body of @protobuf tag: sequence[,type][,name=<name>][,...]
o.tags += fmt.Sprint(x.Sequence)
- if x.Type != strings.TrimLeft(typ, "#") {
+ b, _ := format.Node(typ)
+ str := string(b)
+ if x.Type != strings.TrimLeft(str, "#") {
o.tags += ",type=" + x.Type
}
if x.Name != name.Name {
diff --git a/encoding/protobuf/protobuf.go b/encoding/protobuf/protobuf.go
index a138f67..848726d 100644
--- a/encoding/protobuf/protobuf.go
+++ b/encoding/protobuf/protobuf.go
@@ -272,11 +272,6 @@
}
inst.Files = append(inst.Files, f)
- // inst.CUEFiles = append(inst.CUEFiles, f.Filename)
- // err := parser.Resolve(f)
- // if err != nil {
- // return nil, err
- // }
for pkg := range r.p.imported {
inst.ImportPaths = append(inst.ImportPaths, pkg)
@@ -311,7 +306,7 @@
if b.errs != nil {
return nil
}
- importPath := p.importPath()
+ importPath := p.qualifiedImportPath()
if importPath == "" {
err := errors.Newf(token.NoPos,
"no package clause for proto package %q in file %s", p.id, p.file.Filename)
@@ -321,7 +316,7 @@
}
dir := b.root
- path := importPath
+ path := p.importPath()
file := p.file.Filename
if !filepath.IsAbs(file) {
file = filepath.Join(b.root, p.file.Filename)
diff --git a/encoding/protobuf/testdata/attributes.proto.out.cue b/encoding/protobuf/testdata/attributes.proto.out.cue
index 1c3ff05..5ef33a6 100644
--- a/encoding/protobuf/testdata/attributes.proto.out.cue
+++ b/encoding/protobuf/testdata/attributes.proto.out.cue
@@ -14,9 +14,10 @@
package v1
import (
- "googleapis.com/acme/test"
- "googleapis.com/acme/test/test"
"time"
+ "googleapis.com/acme/test"
+ "googleapis.com/acme/test/test:test_test"
+ time_1 "time"
)
#StructWrap: {
@@ -127,17 +128,17 @@
// Holds attributes of type TIMESTAMP
time?: {
- [string]: _time_.Time
+ [string]: time_1.Time
} @protobuf(6,type=map<sint32,google.protobuf.Timestamp>,"(gogoproto.nullable)=false","(gogoproto.stdtime)")
// Holds attributes of type DURATION
durations?: {
- [string]: _time_.Duration
+ [string]: time_1.Duration
} @protobuf(7,type=map<sint32,google.protobuf.Duration>,"(gogoproto.nullable)=false","(gogoproto.stdduration)")
// Holds attributes of type BYTES
bytes?: {
- [string]: _bytes_
+ [string]: bytes_5
} @protobuf(8,type=map<sint32,bytes>)
// Holds attributes of type STRING_MAP
@@ -145,8 +146,6 @@
[string]: #StringMap
} @protobuf(9,type=map<sint32,StringMap>,string_maps,"(gogoproto.nullable)=false")
}
-let _time_ = time
-let _bytes_ = bytes
// A map of string to string. The keys and values in this map are dictionary
// indices (see the [Attributes][istio.mixer.v1.CompressedAttributes] message for an explanation)
@@ -156,3 +155,5 @@
[string]: int32
} @protobuf(1,type=map<sint32,sint32>)
}
+
+let bytes_5 = bytes
diff --git a/encoding/protobuf/testdata/client_config.proto.out.cue b/encoding/protobuf/testdata/client_config.proto.out.cue
index 5bf1119..febae34 100644
--- a/encoding/protobuf/testdata/client_config.proto.out.cue
+++ b/encoding/protobuf/testdata/client_config.proto.out.cue
@@ -20,8 +20,8 @@
package client
import (
- "istio.io/api/mixer/v1"
"time"
+ "istio.io/api/mixer/v1"
)
// Specifies the behavior when the client is unable to connect to Mixer.
diff --git a/encoding/protobuf/testdata/istio.io/api/mixer/v1/attributes_proto_gen.cue b/encoding/protobuf/testdata/istio.io/api/mixer/v1/attributes_proto_gen.cue
index 1c3ff05..5ef33a6 100644
--- a/encoding/protobuf/testdata/istio.io/api/mixer/v1/attributes_proto_gen.cue
+++ b/encoding/protobuf/testdata/istio.io/api/mixer/v1/attributes_proto_gen.cue
@@ -14,9 +14,10 @@
package v1
import (
- "googleapis.com/acme/test"
- "googleapis.com/acme/test/test"
"time"
+ "googleapis.com/acme/test"
+ "googleapis.com/acme/test/test:test_test"
+ time_1 "time"
)
#StructWrap: {
@@ -127,17 +128,17 @@
// Holds attributes of type TIMESTAMP
time?: {
- [string]: _time_.Time
+ [string]: time_1.Time
} @protobuf(6,type=map<sint32,google.protobuf.Timestamp>,"(gogoproto.nullable)=false","(gogoproto.stdtime)")
// Holds attributes of type DURATION
durations?: {
- [string]: _time_.Duration
+ [string]: time_1.Duration
} @protobuf(7,type=map<sint32,google.protobuf.Duration>,"(gogoproto.nullable)=false","(gogoproto.stdduration)")
// Holds attributes of type BYTES
bytes?: {
- [string]: _bytes_
+ [string]: bytes_5
} @protobuf(8,type=map<sint32,bytes>)
// Holds attributes of type STRING_MAP
@@ -145,8 +146,6 @@
[string]: #StringMap
} @protobuf(9,type=map<sint32,StringMap>,string_maps,"(gogoproto.nullable)=false")
}
-let _time_ = time
-let _bytes_ = bytes
// A map of string to string. The keys and values in this map are dictionary
// indices (see the [Attributes][istio.mixer.v1.CompressedAttributes] message for an explanation)
@@ -156,3 +155,5 @@
[string]: int32
} @protobuf(1,type=map<sint32,sint32>)
}
+
+let bytes_5 = bytes
diff --git a/encoding/protobuf/testdata/istio.io/api/mixer/v1/config/client/client_config_proto_gen.cue b/encoding/protobuf/testdata/istio.io/api/mixer/v1/config/client/client_config_proto_gen.cue
index f032d06..626345a 100644
--- a/encoding/protobuf/testdata/istio.io/api/mixer/v1/config/client/client_config_proto_gen.cue
+++ b/encoding/protobuf/testdata/istio.io/api/mixer/v1/config/client/client_config_proto_gen.cue
@@ -20,8 +20,8 @@
package client
import (
- "istio.io/api/mixer/v1"
"time"
+ "istio.io/api/mixer/v1"
)
// Specifies the behavior when the client is unable to connect to Mixer.
diff --git a/encoding/protobuf/testdata/istio.io/api/mixer/v1/mixer_proto_gen.cue b/encoding/protobuf/testdata/istio.io/api/mixer/v1/mixer_proto_gen.cue
index 59535a4..47e96b9 100644
--- a/encoding/protobuf/testdata/istio.io/api/mixer/v1/mixer_proto_gen.cue
+++ b/encoding/protobuf/testdata/istio.io/api/mixer/v1/mixer_proto_gen.cue
@@ -17,7 +17,7 @@
package v1
import (
- "google.golang.org/genproto/googleapis/rpc/status"
+ status_1 "google.golang.org/genproto/googleapis/rpc/status"
"time"
)
@@ -59,7 +59,7 @@
#PreconditionResult: {
// A status code of OK indicates all preconditions were satisfied. Any other code indicates not
// all preconditions were satisfied and details describe why.
- status?: _status_.#Status @protobuf(1,type=google.rpc.Status,"(gogoproto.nullable)=false")
+ status?: status_1.#Status @protobuf(1,type=google.rpc.Status,"(gogoproto.nullable)=false")
// The amount of time for which this result can be considered valid.
validDuration?: time.Duration @protobuf(2,type=google.protobuf.Duration,name=valid_duration,"(gogoproto.nullable)=false","(gogoproto.stdduration)")
@@ -98,7 +98,6 @@
[string]: #QuotaResult
} @protobuf(3,type=map<string,QuotaResult>,"(gogoproto.nullable)=false")
}
-let _status_ = status
// Describes the attributes that were used to determine the response.
// This can be used to construct a response cache.
diff --git a/encoding/protobuf/types.go b/encoding/protobuf/types.go
index 5628660..60b55af 100644
--- a/encoding/protobuf/types.go
+++ b/encoding/protobuf/types.go
@@ -15,14 +15,21 @@
package protobuf
import (
+ "fmt"
"text/scanner"
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/parser"
+ "cuelang.org/go/cue/token"
"github.com/emicklei/proto"
)
-func protoToCUE(typ string, options []*proto.Option) (ref string, ok bool) {
+func protoToCUE(typ string, options []*proto.Option) ast.Expr {
t, ok := scalars[typ]
- return t, ok
+ if !ok {
+ return nil
+ }
+ return predeclared(t)
}
var scalars = map[string]string{
@@ -48,13 +55,33 @@
"bytes": "bytes",
}
-func (p *protoConverter) setBuiltin(from, to string, pkg *protoConverter) {
- p.scope[0][from] = mapping{to, to, "", pkg}
+func predeclared(s string) ast.Expr {
+ return &ast.Ident{
+ Name: s,
+ Node: ast.NewIdent("__" + s),
+ }
+}
+
+func (p *protoConverter) setBuiltin(from string, to func() ast.Expr, pkg *protoConverter) {
+ p.scope[0][from] = mapping{to, pkg}
+}
+
+func (p *protoConverter) setBuiltinParse(from, to string, pkg *protoConverter) {
+ f := func() ast.Expr {
+ expr, err := parser.ParseExpr("", to, parser.ParseComments)
+ if err != nil {
+ panic(fmt.Sprintf("error parsing name %q: %v", to, err))
+ }
+ return expr
+ }
+ p.scope[0][from] = mapping{f, pkg}
}
var (
- pkgTime = &protoConverter{cuePkgPath: "time"}
- pkgStruct = &protoConverter{cuePkgPath: "struct"}
+ pkgTime = &protoConverter{cuePkgPath: "time"}
+ pkgStruct = &protoConverter{cuePkgPath: "struct"}
+ importTime = ast.NewImport(nil, "time")
+ importStruct = ast.NewImport(nil, "struct")
)
func (p *protoConverter) mapBuiltinPackage(pos scanner.Position, file string, required bool) (generate bool) {
@@ -63,25 +90,61 @@
case "gogoproto/gogo.proto":
case "google/protobuf/struct.proto":
- p.setBuiltin("google.protobuf.Struct", "{}", nil)
- p.setBuiltin("google.protobuf.Value", "_", nil)
- p.setBuiltin("google.protobuf.NullValue", "null", nil)
- p.setBuiltin("google.protobuf.ListValue", "[...]", nil)
- p.setBuiltin("google.protobuf.StringValue", "string", nil)
- p.setBuiltin("google.protobuf.BoolValue", "bool", nil)
- p.setBuiltin("google.protobuf.NumberValue", "number", nil)
+ p.setBuiltin("google.protobuf.Struct", func() ast.Expr {
+ return ast.NewStruct()
+ }, nil)
+
+ p.setBuiltin("google.protobuf.Value", func() ast.Expr {
+ return ast.NewIdent("_")
+ }, nil)
+
+ p.setBuiltin("google.protobuf.NullValue", func() ast.Expr {
+ return ast.NewLit(token.NULL, "null")
+ }, nil)
+
+ p.setBuiltin("google.protobuf.ListValue", func() ast.Expr {
+ return ast.NewList(&ast.Ellipsis{})
+ }, nil)
+
+ p.setBuiltin("google.protobuf.StringValue", func() ast.Expr {
+ return predeclared("string")
+ }, nil)
+
+ p.setBuiltin("google.protobuf.BoolValue", func() ast.Expr {
+ return predeclared("bool")
+ }, nil)
+
+ p.setBuiltin("google.protobuf.NumberValue", func() ast.Expr {
+ return predeclared("number")
+ }, nil)
+
return false
case "google/protobuf/empty.proto":
- p.setBuiltin("google.protobuf.Empty", "struct.MaxFields(0)", pkgStruct)
+ f := func() ast.Expr {
+ time := &ast.Ident{Name: "struct", Node: importStruct}
+ return ast.NewCall(
+ ast.NewSel(time, "MaxFields"),
+ ast.NewLit(token.INT, "0"),
+ )
+ }
+ p.setBuiltin("google.protobuf.Empty", f, pkgStruct)
return false
case "google/protobuf/duration.proto":
- p.setBuiltin("google.protobuf.Duration", "time.Duration", pkgTime)
+ f := func() ast.Expr {
+ time := &ast.Ident{Name: "time", Node: importTime}
+ return ast.NewSel(time, "Duration")
+ }
+ p.setBuiltin("google.protobuf.Duration", f, pkgTime)
return false
case "google/protobuf/timestamp.proto":
- p.setBuiltin("google.protobuf.Timestamp", "time.Time", pkgTime)
+ f := func() ast.Expr {
+ time := &ast.Ident{Name: "time", Node: importTime}
+ return ast.NewSel(time, "Time")
+ }
+ p.setBuiltin("google.protobuf.Timestamp", f, pkgTime)
return false
case "google/protobuf/any.proto":
@@ -91,7 +154,7 @@
// be used here) are represented as strings.
//
// In Structural OpenAPI this type cannot be represented.
- p.setBuiltin("google.protobuf.Any", `{
+ p.setBuiltinParse("google.protobuf.Any", `{
// A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one "/" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `+
"`type.googleapis.com/google.protobuf.Duration`"+`). The name should be in a canonical form (e.g., leading "." is not accepted).
// The remaining fields of this object correspond to fields of the proto messsage. If the embedded message is well-known and has a custom JSON representation, that representation is assigned to the 'value' field.
@@ -100,15 +163,15 @@
return false
case "google/protobuf/wrappers.proto":
- p.setBuiltin("google.protobuf.DoubleValue", `null | float`, nil)
- p.setBuiltin("google.protobuf.FloatValue", `null | float`, nil)
- p.setBuiltin("google.protobuf.Int64Value", `null | int64`, nil)
- p.setBuiltin("google.protobuf.UInt64Value", `null | uint64`, nil)
- p.setBuiltin("google.protobuf.Int32Value", `null | int32`, nil)
- p.setBuiltin("google.protobuf.UInt32Value", `null | uint32`, nil)
- p.setBuiltin("google.protobuf.BoolValue", `null | bool`, nil)
- p.setBuiltin("google.protobuf.StringValue", `null | string`, nil)
- p.setBuiltin("google.protobuf.BytesValue", `null | bytes`, nil)
+ p.setBuiltinParse("google.protobuf.DoubleValue", `null | float`, nil)
+ p.setBuiltinParse("google.protobuf.FloatValue", `null | float`, nil)
+ p.setBuiltinParse("google.protobuf.Int64Value", `null | int64`, nil)
+ p.setBuiltinParse("google.protobuf.UInt64Value", `null | uint64`, nil)
+ p.setBuiltinParse("google.protobuf.Int32Value", `null | int32`, nil)
+ p.setBuiltinParse("google.protobuf.UInt32Value", `null | uint32`, nil)
+ p.setBuiltinParse("google.protobuf.BoolValue", `null | bool`, nil)
+ p.setBuiltinParse("google.protobuf.StringValue", `null | string`, nil)
+ p.setBuiltinParse("google.protobuf.BytesValue", `null | bytes`, nil)
return false
// case "google/protobuf/field_mask.proto":
diff --git a/internal/internal.go b/internal/internal.go
index ffc8e66..87df900 100644
--- a/internal/internal.go
+++ b/internal/internal.go
@@ -100,6 +100,21 @@
return elts, e
}
+func Imports(f *ast.File) (a []ast.Decl) {
+ for _, d := range f.Decls {
+ switch x := d.(type) {
+ case *ast.CommentGroup:
+ case *ast.Package:
+ case *ast.Attribute:
+ case *ast.ImportDecl:
+ a = append(a, x)
+ default:
+ return a
+ }
+ }
+ return a
+}
+
func PackageInfo(f *ast.File) (p *ast.Package, name string, tok token.Pos) {
for _, d := range f.Decls {
switch x := d.(type) {