Skip to content

feat: Add S3 as backend #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions docs/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -106,12 +106,12 @@ gendoc global-context --input local:///path/to/src/domain.sample --output local:

### Local Example

Point it to an input directory of any repo - e.g. `domain.Packing.DirectDespatchAggregation`.
Point it to an input directory of any repo - e.g. `domain.Foo.BarActionQux`.

This will generate the interim code that the

```sh
gendoc single-context --input local://$FULL_PATH_TO/domain.Packing.DirectDespatchAggregation --is-service --bounded-ctx Packing --business-domain domain \
gendoc single-context --input local://$FULL_PATH_TO/domain.Foo.BarActionQux --is-service --bounded-ctx Foo --business-domain domain \
--repo "https://github.com/repo" \
--output local://$HOME/.gendoc/poc
```
Expand All @@ -124,4 +124,4 @@ This is then used as in input for the global-context and it will output a full A
gendoc global-context --input local://$HOME/.gendoc/poc/current --output local://$HOME/.gendoc/poc/processed
```

The files are emitted with the `AsyncAPI.ID` as the name in the `asyncapi` directory, e.g.: `asyncapi/urn:domain:Packing:domain.Packing.DirectDespatchAggregation.yml`.
The files are emitted with the `AsyncAPI.ID` as the name in the `asyncapi` directory, e.g.: `asyncapi/urn:domain:Foo:domain.Foo.BarActionQux.yml`.
122 changes: 122 additions & 0 deletions src/go/async-api-gen-doc/cmd/async-api-gen-doc/async_api_gen_doc.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
package asyncapigendoc

import (
"context"
"fmt"
"os"
"path/filepath"

"github.com/dnitsch/async-api-generator/internal/generate"
"github.com/dnitsch/async-api-generator/internal/parser"
"github.com/dnitsch/async-api-generator/internal/storage"
log "github.com/dnitsch/simplelog"
"github.com/spf13/cobra"
)

var (
Version string = "0.0.1"
Revision string = "1111aaaa"
)

type flags struct {
verbose bool
dryRun bool
outputLocation string
inputLocation string
}

type AsyncApiGenDocCmd struct {
ctx context.Context
Cmd *cobra.Command
logger log.Loggeriface
rootFlags *flags
outputStorageConfig *storage.Conf
inputLocationStorageConfig *storage.Conf
}

func NewCmd(ctx context.Context) *AsyncApiGenDocCmd {
f := &flags{}
aagd := &AsyncApiGenDocCmd{
ctx: ctx,
logger: log.New(os.Stderr, log.ErrorLvl),
rootFlags: f,
}
aagd.Cmd = &cobra.Command{
Use: "gendoc",
Aliases: []string{"aadg", "generator"},
Short: "Generator for AsyncAPI documents",
Long: `Generator for AsyncAPI documents, functions by performing lexical analysis on source files in a given base directory.
These can then be further fed into other generator tools, e.g. client/server generators`,
Example: "",
SilenceUsage: true,
Version: fmt.Sprintf("%s-%s", Version, Revision),
PreRunE: func(cmd *cobra.Command, args []string) error {
return aagd.setStorageLocation(f.inputLocation, f.outputLocation)
},
}
aagd.Cmd.PersistentFlags().StringVarP(&f.outputLocation, "output", "o", "local://$HOME/.gendoc", `Output type and destination, currently only supports [local://, azblob://]. if dry-run is set then this is ignored`)
aagd.Cmd.PersistentFlags().StringVarP(&f.inputLocation, "input", "i", "local://.", `Path to start the search in, Must include the protocol - see output for options`)
aagd.Cmd.PersistentFlags().BoolVarP(&f.verbose, "verbose", "v", false, "Verbose output")
aagd.Cmd.PersistentFlags().BoolVarP(&f.dryRun, "dry-run", "", false, "Dry run only runs in validate mode and does not emit anything")
return aagd
}

func (c *AsyncApiGenDocCmd) WithCommands() {
for _, fn := range []func(*AsyncApiGenDocCmd){singleContextCmd, globalCtxCmd} {
fn(c)
}
}

func (c *AsyncApiGenDocCmd) Execute() error {
return c.Cmd.ExecuteContext(c.ctx)
}

// config bootstraps pflags into useable config
func (c *AsyncApiGenDocCmd) config(outConf *storage.Conf, sf *genDocContextFlags) (*generate.Config, func(), error) {
dirName := filepath.Base(outConf.Destination)

conf := &generate.Config{
ParserConfig: parser.Config{ServiceRepoUrl: sf.repoUrl, BusinessDomain: sf.businessDomain, BoundedDomain: sf.boundedCtxDomain, ServiceLanguage: sf.repoLang},
SearchDirName: dirName,
Output: outConf,
}
if sf.isService {
// use the current search dir name as the serviceId
// this allows certain objects to __not__ have parentId or id specified
conf.ParserConfig.ServiceId = dirName
}

if !c.rootFlags.dryRun {
// create interim local dirs for interim state or interim download storage
interim, err := os.MkdirTemp("", ".gendoc-interim-*")
if err != nil {
return nil, nil, err
}
download, err := os.MkdirTemp("", ".gendoc-download-*")
if err != nil {
return nil, nil, err
}
conf.InterimOutputDir = interim
conf.DownloadDir = download
}

return conf, func() {
_ = os.RemoveAll(conf.InterimOutputDir)
_ = os.RemoveAll(conf.DownloadDir)
}, nil
}

// setStorageLocation sets the input/output locations
func (c *AsyncApiGenDocCmd) setStorageLocation(input, output string) error {
inStoreConf, err := storage.ParseStorageOutputConfig(input)
if err != nil {
return err
}
outStoreConf, err := storage.ParseStorageOutputConfig(output)
if err != nil {
return err
}
c.inputLocationStorageConfig = inStoreConf
c.outputStorageConfig = outStoreConf
return nil
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package asyncapigendoc_test

import (
"bytes"
"context"
"io"
"testing"

Expand All @@ -13,12 +14,12 @@ func Test_root_ok(t *testing.T) {
baseDir := "test/foo.sample"
b := new(bytes.Buffer)

cmd := asyncapigendoc.AsyncAPIGenCmd

cmd := asyncapigendoc.NewCmd(context.TODO())
cmd.WithCommands()
fshelper.DebugDirHelper(t, baseDir, "cmd/async-api-gen-doc", "../../")

cmd.SetArgs([]string{"--version"})
cmd.SetErr(b)
cmd.Cmd.SetArgs([]string{"--version"})
cmd.Cmd.SetErr(b)
cmd.Execute()
out, err := io.ReadAll(b)
if err != nil {
Expand Down
107 changes: 53 additions & 54 deletions src/go/async-api-gen-doc/cmd/async-api-gen-doc/global_ctx.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,66 +11,61 @@ import (
"github.com/spf13/cobra"
)

var (
globalCtxCmd = &cobra.Command{
func globalCtxCmd(rootCmd *AsyncApiGenDocCmd) {

cmd := &cobra.Command{
Use: "global-context",
Aliases: []string{"gc", "global"},
Short: `Runs the gendoc against a directory containing processed GenDocBlox.`,
Long: `Runs the gendoc against a directory containing processed GenDocBlox. Builds a hierarchical tree with the generated interim states across multiple contexts.
Source must be specified [see output] option for examples and structure`,
RunE: globalCtxExecute,
Source must be specified [see output] option for examples and structure`,
RunE: func(cmd *cobra.Command, args []string) error {
if rootCmd.rootFlags.verbose {
rootCmd.logger = log.New(os.Stdout, log.DebugLvl)
}

conf, cleanUp, err := rootCmd.config(rootCmd.inputLocationStorageConfig, &genDocContextFlags{})
if err != nil {
return err
}
defer cleanUp()
rootCmd.logger.Debugf("interim output: %s", conf.InterimOutputDir)
rootCmd.logger.Debugf("download output: %s", conf.DownloadDir)

ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()

if err := fetchPrep(ctx, conf, rootCmd.inputLocationStorageConfig); err != nil {
return err
}

files, err := fshelper.ListFiles(conf.DownloadDir)
if err != nil {
return err
}

g := generate.New(conf, rootCmd.logger)

g.LoadInputsFromFiles(files)

if err := g.ConvertProcessed(); err != nil {
return err
}

if err := g.BuildContextTree(); err != nil {
return err
}

if err := g.AsyncAPIFromProcessedTree(); err != nil {
return err
}
return uploadPrep(ctx, g, rootCmd.outputStorageConfig)
},
PreRunE: func(cmd *cobra.Command, args []string) error {
return setStorageLocation(inputLocation, outputLocation)
return rootCmd.setStorageLocation(rootCmd.rootFlags.inputLocation, rootCmd.rootFlags.outputLocation)
},
}
)

func init() {
AsyncAPIGenCmd.AddCommand(globalCtxCmd)
}

func globalCtxExecute(cmd *cobra.Command, args []string) error {

if verbose {
logger = log.New(os.Stdout, log.DebugLvl)
}

conf, cleanUp, err := config(inputLocationStorageConfig)
if err != nil {
return err
}
defer cleanUp()
logger.Debugf("interim output: %s", conf.InterimOutputDir)
logger.Debugf("download output: %s", conf.DownloadDir)

ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()

if err := fetchPrep(ctx, conf, inputLocationStorageConfig); err != nil {
return err
}

files, err := fshelper.ListFiles(conf.DownloadDir)
if err != nil {
return err
}

g := generate.New(conf, logger)

g.LoadInputsFromFiles(files)

if err := g.ConvertProcessed(); err != nil {
return err
}

if err := g.BuildContextTree(); err != nil {
return err
}

if err := g.AsyncAPIFromProcessedTree(); err != nil {
return err
}
return uploadPrep(ctx, g, outputStorageConfig)
rootCmd.Cmd.AddCommand(cmd)
}

// fetchPrep
Expand All @@ -81,7 +76,11 @@ func fetchPrep(ctx context.Context, conf *generate.Config, storageConf *storage.
return err
}

fetchReq := &storage.StorageFetchRequest{Destination: storageConf.Destination, ContainerName: storageConf.TopLevelFolder, EmitPath: conf.DownloadDir}
fetchReq := &storage.StorageFetchRequest{
Destination: storageConf.Destination,
ContainerName: storageConf.TopLevelFolder,
EmitPath: conf.DownloadDir,
}

if err := sc.Fetch(ctx, fetchReq); err != nil {
return err
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package asyncapigendoc_test

import (
"bytes"
"context"
"fmt"
"io"
"io/fs"
Expand All @@ -18,15 +19,16 @@ func Test_global_Analyis_runs_ok(t *testing.T) {
t.Run("azblob source and output", func(t *testing.T) {
t.Skip()

cmd := asyncapigendoc.AsyncAPIGenCmd
cmd := asyncapigendoc.NewCmd(context.TODO())
cmd.WithCommands()

b := new(bytes.Buffer)

cmd.SetArgs([]string{"global-context", "-i",
cmd.Cmd.SetArgs([]string{"global-context", "-i",
"azblob://stdevsandboxeuwdev/interim/current",
"--output", "azblob://stdevsandboxeuwdev/processed"})

cmd.SetErr(b)
cmd.Cmd.SetErr(b)
cmd.Execute()
out, err := io.ReadAll(b)
if err != nil {
Expand All @@ -46,18 +48,19 @@ func Test_global_Analyis_runs_ok(t *testing.T) {
}
defer os.RemoveAll(out)

cmd := asyncapigendoc.AsyncAPIGenCmd
cmd := asyncapigendoc.NewCmd(context.TODO())
cmd.WithCommands()

baseDir := "test/interim-generated"

b := new(bytes.Buffer)
output := fmt.Sprintf("local://%s", out)
cmd.SetArgs([]string{"global-context", "-i",
cmd.Cmd.SetArgs([]string{"global-context", "-i",
fmt.Sprintf("local://%s", fshelper.DebugDirHelper(t, baseDir, "cmd/async-api-gen-doc", "../../")),
"--verbose",
"--output", output})

cmd.SetErr(b)
cmd.Cmd.SetErr(b)
cmd.Execute()

rb, err := io.ReadAll(b)
Expand Down
Loading