Skip to content

Commit ed5b8e0

Browse files
committed
interim
1 parent 743ccf6 commit ed5b8e0

23 files changed

+587
-100
lines changed

.gitignore

+4-1
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,11 @@
1515
*.out
1616

1717
# Dependency directories (remove the comment below to include it)
18-
# vendor/
18+
vendor/
1919

2020
# Go workspace file
2121
go.work
2222
**/.ignore*
23+
.coverage
24+
25+
.DS_Store

Makefile

+52
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
OWNER = dnitsch
2+
NAME := async-api-generator
3+
GIT_TAG := "0.0.0"
4+
VERSION := "v$(GIT_TAG)"
5+
REVISION := "aaaabbbb"
6+
7+
LDFLAGS := -ldflags="-s -w -X \"github.com/$(OWNER)/$(NAME)/cmd.Version=$(VERSION)\" -X \"github.com/$(OWNER)/$(NAME)/cmd.Revision=$(REVISION)\" -extldflags -static"
8+
9+
install:
10+
go mod tidy
11+
go mod vendor
12+
13+
install_ci:
14+
go mod vendor
15+
16+
.PHONY: clean
17+
clean:
18+
rm -rf bin/*
19+
rm -rf dist/*
20+
rm -rf vendor/*
21+
mkdir -p dist
22+
23+
bingen:
24+
for os in darwin linux windows; do \
25+
GOOS=$$os CGO_ENABLED=0 go build -mod=readonly -buildvcs=false $(LDFLAGS) -o dist/$(NAME)-$$os ./cmd; \
26+
done
27+
28+
build: clean install bingen
29+
30+
build_ci: clean install_ci bingen
31+
32+
cross-build: bingen
33+
34+
tag:
35+
git tag -a $(VERSION) -m "ci tag release" $(REVISION)
36+
git push origin $(VERSION)
37+
38+
release:
39+
OWNER=$(OWNER) NAME=$(NAME) PAT=$(PAT) VERSION=$(VERSION) . hack/release.sh
40+
41+
test_prereq:
42+
mkdir -p .coverage
43+
go install github.com/jstemmer/go-junit-report/v2@latest && \
44+
go install github.com/axw/gocov/gocov@latest && \
45+
go install github.com/AlekSi/gocov-xml@latest
46+
47+
test: test_prereq
48+
go test ./... -v -mod=readonly -race -coverprofile=.coverage/out | go-junit-report > .coverage/report-junit.xml && \
49+
gocov convert .coverage/out | gocov-xml > .coverage/report-cobertura.xml
50+
51+
coverage: test
52+
go tool cover -html=.coverage/out

README.md

+22
Original file line numberDiff line numberDiff line change
@@ -27,3 +27,25 @@ Special consideration will need to be given to files that are not able to contai
2727
Not using an existing parser generator like CGF, BNF, or EBNF is on purposes as the input source will only ever really be composed of parts we care about i.e. `gendoc` markers their beginning and end and what they enclose within them as text.
2828

2929
We'll use the overly simplified Pratt Parser (top down method) as we'll have no need to for expression parsing onyl statement node creation with the associated/helper attributes.
30+
31+
## AsyncAPI standard
32+
33+
The current [AsyncAPI standard spec](https://www.asyncapi.com/docs/reference/specification/v2.6.0) is at version `2.6.0`.
34+
35+
The tool will deal with all the relevant sections to be able to build an AsyncAPI spec file from within a single repo.
36+
37+
The asyncAPI is built from the `Application` - i.e. service down, each service will have a toplevel description - `info` key, which will in turn include
38+
39+
### Important Properties
40+
41+
- `id` name of the service. Will default to parent folder name - unless overridden
42+
- format: `urn:$business_domain:$bounded_context_domain:$service_name` => `urn:whds:packing:whds.packing.app`
43+
- `application`
44+
this is info about the service/application including descriptions and titles
45+
- `channels` outlines the topics/subscriptions or queues the application produces or is subscribed to.
46+
- `topic/queue/subscription`
47+
will each contain a message summary description, schema, any traits - i.e. re-useable components - such as the envelop for common parameters
48+
49+
### EventCatalog binding
50+
51+
The translation of AsyncAPI into an EventCatalog set up. Whilst there are fairly standard mappings between the 2 processes - there are some nuances and requirements.

hack/boilerplate.go.txt

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
/*
2+
3+
Copyright DNITSCH - WTFPL
4+
5+
*/
6+

hack/release.sh

+10
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#!/usr/bin/env bash
2+
3+
id=$(curl -X POST -u $OWNER:$PAT -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/$OWNER/$NAME/releases -d "{\"tag_name\":\"$VERSION\",\"generate_release_notes\":true,\"prerelease\":false}" | jq -r .id)
4+
5+
upload_url=https://uploads.github.com/repos/$OWNER/$NAME/releases/$id/assets
6+
7+
for asset in dist/*; do \
8+
name=$(echo $asset | cut -c 6-)
9+
curl -u $OWNER:$PAT -H "Content-Type: application/x-binary" -X POST --data-binary "@$asset" "$upload_url?name=$name"
10+
done

internal/cmdutil/cmdutil.go

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
package cmdutil
2+
3+
import (
4+
"io/fs"
5+
"path/filepath"
6+
"strings"
7+
)
8+
9+
type FileList struct {
10+
Name string // fullname of file
11+
Path string // full path to file - either relative or full
12+
Type string // type of file e.g. schema json, CS, TF, K8sYaml, HelmYaml
13+
}
14+
15+
func ListFiles(baseDir string) ([]*FileList, error) {
16+
// init a
17+
files := []*FileList{}
18+
if err := filepath.WalkDir(baseDir, func(path string, d fs.DirEntry, err error) error {
19+
if err != nil {
20+
return err
21+
}
22+
23+
if !d.IsDir() {
24+
files = append(files, &FileList{Name: d.Name(), Path: path, Type: strings.TrimPrefix(filepath.Ext(path), ".")})
25+
return nil
26+
}
27+
return nil
28+
}); err != nil {
29+
// return nil - even if some files were processed
30+
return nil, err
31+
}
32+
return files, nil
33+
}

internal/cmdutil/cmdutil_test.go

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
package cmdutil_test
2+
3+
import (
4+
"os"
5+
"strings"
6+
"testing"
7+
8+
"github.com/dnitsch/async-api-generator/internal/cmdutil"
9+
)
10+
11+
func Test_ListFiles_in_mock_directort(t *testing.T) {
12+
baseDir := "./test/samples"
13+
// debug test purposes only
14+
cwd, err := os.Getwd()
15+
if err != nil {
16+
t.Fatal(err)
17+
}
18+
if strings.Contains(cwd, "internal/cmdutil") {
19+
baseDir = "../../test/samples"
20+
}
21+
// END DEBUG
22+
got, err := cmdutil.ListFiles(baseDir)
23+
if err != nil {
24+
t.Fatalf("%v", err)
25+
}
26+
ttests := []struct {
27+
expectName string
28+
expectPath string
29+
expectType string
30+
}{
31+
{"sample.cs", "../../test/samples/business/sample.cs", "cs"},
32+
{"index.md", "../../test/samples/index.md", "md"},
33+
{"sample.tf", "../../test/samples/infra/sample.tf", "tf"},
34+
}
35+
for idx, tt := range ttests {
36+
gotLf := got[idx]
37+
if gotLf.Name != tt.expectName {
38+
t.Errorf("expected names of files to be equal. got: %s, wanted: %s", gotLf.Name, tt.expectName)
39+
}
40+
if gotLf.Path != tt.expectPath {
41+
t.Errorf("expected path of files to be equal. got: %s, wanted: %s", gotLf.Path, tt.expectPath)
42+
}
43+
if gotLf.Type != tt.expectType {
44+
t.Errorf("expected types of files to be equal. got: %s, wanted: %s", gotLf.Type, tt.expectType)
45+
}
46+
}
47+
}

pkg/crawl/crawl.go

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
2+
package crawl
3+

pkg/crawl/crawl_test.go

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
package crawl_test
2+
3+
import "testing"
4+
5+
func Test_Crawl_single_file_input(t *testing.T) {
6+
ttests := map[string]struct {
7+
objType any
8+
}{
9+
"test1": {
10+
objType: nil,
11+
},
12+
}
13+
for name, tt := range ttests {
14+
t.Run(name, func(t *testing.T) {
15+
_ = tt
16+
})
17+
}
18+
}

pkg/lexer/lexer.go

+7-7
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ import (
88

99
const (
1010
// Literals
11-
BEGIN_DOC string = " gendoc"
12-
END_DOC string = " !gendoc"
11+
BEGIN_DOC string = "+gendoc"
12+
END_DOC string = "-gendoc"
1313
)
1414

1515
// nonText characters captures all character sets that are _not_ assignable to TEXT
@@ -40,10 +40,10 @@ func (l *Lexer) NextToken() token.Token {
4040
// if next rune is a `/` then we have to consume it from lexer
4141
l.readChar()
4242
if l.peekIsDocGenBegin() {
43-
tok = l.readDocAnnotation(token.Token{Type: token.BEGIN_DOC_GEN, Literal: "// gendoc"})
43+
tok = l.readDocAnnotation(token.Token{Type: token.BEGIN_DOC_GEN, Literal: "//+gendoc"})
4444
// return l.readDocAnnotation(token.Token{Type: token.BEGIN_DOC_GEN, Literal: "// gendoc"})
4545
} else if l.peekIsDocGenEnd() {
46-
tok = token.Token{Type: token.END_DOC_GEN, Literal: "// !gendoc"}
46+
tok = token.Token{Type: token.END_DOC_GEN, Literal: "//-gendoc"}
4747
} else {
4848
// it is not a doc gen marker assigning double slash as text
4949
tok = token.Token{Type: token.TEXT, Literal: "//"}
@@ -105,11 +105,11 @@ func (l *Lexer) setTextSeparatorToken() token.Token {
105105
// readDocAnnotation reads the rest of the line identified by
106106
func (l *Lexer) readDocAnnotation(tok token.Token) token.Token {
107107
metaTag := ""
108-
for l.ch != '\n' {
108+
for l.peekChar() != '\n' {
109109
metaTag += string(l.peekChar())
110110
l.readChar()
111111
}
112-
tok.MetaTags = strings.TrimSpace(metaTag)
112+
tok.MetaAnnotation = strings.TrimSpace(metaTag)
113113
return tok
114114
}
115115

@@ -134,7 +134,7 @@ func (l *Lexer) peekIsDocGenBegin() bool {
134134
func (l *Lexer) peekIsDocGenEnd() bool {
135135
count := 0
136136
docGen := ""
137-
for count < 8 {
137+
for count < 7 {
138138
count++
139139
docGen += string(l.peekChar())
140140
l.readChar()

pkg/lexer/lexer_test.go

+7-10
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
package lexer_test
22

33
import (
4-
"fmt"
5-
"strings"
64
"testing"
75

86
"github.com/dnitsch/async-api-generator/pkg/lexer"
@@ -11,11 +9,11 @@ import (
119

1210
func Test(t *testing.T) {
1311
input := `foo stuyfsdfsf
14-
// gendoc type=message,subtype=example,consumer=[],producer=[]
12+
//+gendoc type=message,subtype=example,consumer=[],producer=[]
1513
class {
1614
stuff string {get; set;}
1715
}
18-
// !gendoc
16+
//-gendoc
1917
2018
/// <summary> ignorethis
2119
# another comment
@@ -28,7 +26,8 @@ class {
2826
{token.SPACE, " "},
2927
{token.TEXT, "stuyfsdfsf"},
3028
{token.NEW_LINE, "\n"},
31-
{token.BEGIN_DOC_GEN, "// gendoc"},
29+
{token.BEGIN_DOC_GEN, "//+gendoc"},
30+
{token.NEW_LINE, "\n"},
3231
{token.TEXT, "class"},
3332
{token.SPACE, " "},
3433
{token.TEXT, "{"},
@@ -44,7 +43,7 @@ class {
4443
{token.NEW_LINE, "\n"},
4544
{token.TEXT, "}"},
4645
{token.NEW_LINE, "\n"},
47-
{token.END_DOC_GEN, "// !gendoc"},
46+
{token.END_DOC_GEN, "//-gendoc"},
4847
{token.NEW_LINE, "\n"},
4948
{token.NEW_LINE, "\n"},
5049
{token.TEXT, "//"},
@@ -66,7 +65,6 @@ class {
6665
for i, tt := range ttests {
6766

6867
tok := l.NextToken()
69-
fmt.Println(tok)
7068
if tok.Type != tt.expectedType {
7169
t.Fatalf("tests[%d] - tokentype wrong. got=%q, expected=%q",
7270
i, tok.Type, tt.expectedType)
@@ -76,9 +74,8 @@ class {
7674
t.Fatalf("tests[%d] - literal wrong. got=%q, expected=%q",
7775
i, tok.Literal, tt.expectedLiteral)
7876
}
79-
if tok.Type == token.BEGIN_DOC_GEN && len(tok.MetaTags) < 1 && strings.EqualFold(tok.MetaTags, " type=message,subtype=example,consumer=[],producer=[]\n") {
80-
// if tok.MetaTags
81-
t.Errorf("gendoc token should include ")
77+
if tok.Type == token.BEGIN_DOC_GEN && len(tok.MetaAnnotation) < 1 && tok.MetaAnnotation != "type=message,subtype=example,consumer=[],producer=[]" {
78+
t.Errorf("gendoc meta annotation should include %s", "type=message,subtype=example,consumer=[],producer=[]")
8279
}
8380
}
8481
}

pkg/ast/ast.go renamed to pkg/parser/ast.go

+14-7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package ast
1+
package parser
22

33
import (
44
"bytes"
@@ -34,9 +34,8 @@ type GenDoc struct {
3434
func (p *GenDoc) TokenLiteral() string {
3535
if len(p.Statements) > 0 {
3636
return p.Statements[0].TokenLiteral()
37-
} else {
38-
return ""
3937
}
38+
return ""
4039
}
4140

4241
func (p *GenDoc) String() string {
@@ -49,10 +48,18 @@ func (p *GenDoc) String() string {
4948
return out.String()
5049
}
5150

52-
/*
53-
* Statements will encapsulate expressions so they can be later evaluated
54-
*/
51+
type ContentType string
52+
53+
const (
54+
55+
)
56+
57+
type GenDocMetaAnnotation struct {
58+
Type ContentType
59+
}
60+
5561
// GenDocStatement
62+
// Statements will encapsulate expressions so they can be later evaluated
5663
type GenDocStatement struct {
5764
Token token.Token // token.BEGIN_GEN_DOC token
5865
Name *EnclosedIdentifier
@@ -65,7 +72,7 @@ func (ls *GenDocStatement) String() string {
6572
var out bytes.Buffer
6673

6774
out.WriteString(ls.TokenLiteral())
68-
out.WriteString(fmt.Sprintf("%s %s", ls.Name.String(), ls.Token.MetaTags))
75+
out.WriteString(fmt.Sprintf("%s %s", ls.Name.String(), ls.Token.MetaAnnotation))
6976

7077
if ls.Value != nil {
7178
out.WriteString(ls.Value.String())

0 commit comments

Comments
 (0)