From ae42dcc41338fd6d866821256c8ae947d17efc3e Mon Sep 17 00:00:00 2001 From: cohix Date: Sun, 20 Nov 2022 17:12:46 -0500 Subject: [PATCH 1/2] add e2 cli --- Makefile | 1 + e2/Dockerfile | 19 + e2/README.md | 66 +++ e2/builder/.image-ver | 1 + e2/builder/builder.go | 274 ++++++++++ e2/builder/builder.mk | 80 +++ e2/builder/docker/assemblyscript/Dockerfile | 6 + e2/builder/docker/grain/Dockerfile | 6 + e2/builder/docker/javascript/Dockerfile | 12 + e2/builder/docker/rust/Dockerfile | 11 + e2/builder/docker/swift/Dockerfile | 5 + e2/builder/docker/tinygo/Dockerfile | 29 ++ e2/builder/docker/wat/Dockerfile | 19 + e2/builder/native.go | 83 +++ e2/builder/prereq.go | 122 +++++ e2/builder/prereq_test.go | 69 +++ e2/builder/template/config.go | 48 ++ e2/builder/template/templates.go | 274 ++++++++++ e2/changelogs/v0.0.14.md | 9 + e2/changelogs/v0.0.15.md | 1 + e2/changelogs/v0.0.16.md | 1 + e2/changelogs/v0.0.17.md | 1 + e2/changelogs/v0.0.18.md | 1 + e2/changelogs/v0.0.19.md | 1 + e2/changelogs/v0.0.20.md | 1 + e2/changelogs/v0.0.21.md | 1 + e2/changelogs/v0.0.22.md | 1 + e2/changelogs/v0.1.0.md | 3 + e2/changelogs/v0.2.0.md | 3 + e2/changelogs/v0.2.1.md | 1 + e2/changelogs/v0.2.2.md | 8 + e2/changelogs/v0.3.0.md | 5 + e2/changelogs/v0.3.1.md | 1 + e2/changelogs/v0.3.2.md | 10 + e2/changelogs/v0.4.0.md | 1 + e2/changelogs/v0.4.1.md | 1 + e2/changelogs/v0.4.2.md | 1 + e2/changelogs/v0.5.0.md | 11 + e2/changelogs/v0.5.1.md | 1 + e2/changelogs/v0.5.2.md | 1 + e2/changelogs/v0.5.3.md | 1 + e2/changelogs/v0.5.4.md | 1 + e2/cli/command/build.go | 122 +++++ e2/cli/command/clean.go | 74 +++ e2/cli/command/create_handler.go | 73 +++ e2/cli/command/create_module.go | 158 ++++++ e2/cli/command/create_project.go | 108 ++++ e2/cli/command/create_release.go | 222 ++++++++ e2/cli/command/deploy.go | 70 +++ e2/cli/command/dev.go | 62 +++ e2/cli/command/docs.go | 485 ++++++++++++++++++ e2/cli/command/flags.go | 20 + e2/cli/command/push.go | 64 +++ e2/cli/command/scn.go | 18 + e2/cli/command/se2_create_token.go | 64 +++ e2/cli/command/se2_deploy.go | 366 +++++++++++++ e2/cli/features/development.go | 10 + e2/cli/features/public.go | 10 + e2/cli/input/input.go | 20 + e2/cli/localproxy/proxy.go | 71 +++ e2/cli/release/check.go | 166 ++++++ e2/cli/release/info.go | 14 + e2/cli/release/release.mk | 5 + e2/cli/release/version.go | 14 + e2/cli/repl/repl.go | 98 ++++ e2/cli/util/cache.go | 38 ++ e2/cli/util/exec.go | 80 +++ e2/cli/util/exec_test.go | 73 +++ e2/cli/util/log.go | 53 ++ e2/cli/util/mkdir.go | 19 + e2/cli/util/permissions.go | 16 + e2/cli/util/token.go | 45 ++ e2/cli/util/version_check.go | 30 ++ e2/deployer/deployer.go | 35 ++ e2/deployer/k8sdeployer.go | 113 ++++ e2/docs/get-started.md | 99 ++++ e2/docs/test/example.md | 18 + e2/docs/test/greeting_test.go | 22 + e2/docs/test/greetings.go | 11 + e2/e2.mk | 25 + e2/main.go | 14 + e2/packager/bundlepackager.go | 107 ++++ e2/packager/dockerimagepackager.go | 54 ++ e2/packager/packager.go | 40 ++ e2/packager/static.go | 50 ++ e2/project/context.go | 317 ++++++++++++ e2/project/tenantconfig.go | 225 ++++++++ e2/publisher/bindlepublisher.go | 224 ++++++++ e2/publisher/dockerpublisher.go | 52 ++ e2/publisher/publisher.go | 37 ++ e2/root.go | 74 +++ e2/scn/api.go | 141 +++++ e2/scn/emailverifier.go | 25 + e2/scn/environmenttoken.go | 26 + e2/scn/telemetry.go | 24 + e2/scn/types/emailverifier.go | 29 ++ e2/scn/types/environmenttoken.go | 12 + e2/scn/types/heartbeat.go | 13 + e2/scripts/smoketest.sh | 37 ++ .../assemblyscript/asconfig.json.tmpl | 12 + e2/templates/assemblyscript/package.json.tmpl | 19 + e2/templates/assemblyscript/src/index.ts | 33 ++ e2/templates/assemblyscript/src/lib.ts | 11 + e2/templates/assemblyscript/src/tsconfig.json | 6 + e2/templates/grain/index.gr | 59 +++ e2/templates/grain/lib.gr | 5 + e2/templates/javascript/package.json.tmpl | 17 + e2/templates/javascript/src/index.js | 16 + e2/templates/javascript/src/lib.js | 9 + e2/templates/javascript/webpack.config.js | 20 + e2/templates/k8s/atmo-deployment.yaml.tmpl | 43 ++ e2/templates/k8s/atmo-svc.yaml.tmpl | 21 + e2/templates/project/.gitignore | 5 + e2/templates/project/Directive.yaml.tmpl | 16 + e2/templates/project/Dockerfile.tmpl | 5 + .../project/helloworld/.runnable.yaml.tmpl | 4 + .../project/helloworld/Cargo.toml.tmpl | 13 + e2/templates/project/helloworld/src/lib.rs | 20 + e2/templates/rust/Cargo.toml.tmpl | 13 + e2/templates/rust/src/lib.rs.tmpl | 20 + e2/templates/scc-docker/.who | 0 e2/templates/scc-docker/SCC.env.tmpl | 1 + .../scc-docker/config/scc-config.yaml | 24 + .../scc-docker/docker-compose.yml.tmpl | 48 ++ .../.suborbital/scc-atmo-deployment.yaml.tmpl | 57 ++ .../.suborbital/scc-autoscale.yaml.tmpl | 21 + .../scc-controlplane-deployment.yaml.tmpl | 144 ++++++ e2/templates/scc-k8s/config/scc-config.yaml | 24 + e2/templates/swift/.gitignore | 5 + e2/templates/swift/Package.swift.tmpl | 18 + .../Sources/{{ .Name }}.tmpl/main.swift.tmpl | 9 + e2/templates/tinygo/go.mod.tmpl | 7 + e2/templates/tinygo/main.go.tmpl | 16 + e2/templates/typescript/package.json.tmpl | 21 + e2/templates/typescript/src/index.ts | 22 + e2/templates/typescript/src/lib.ts | 9 + e2/templates/typescript/tsconfig.json | 19 + e2/templates/typescript/webpack.config.js | 29 ++ e2/templates/wat/lib.wat | 79 +++ e2/update_checker.go | 38 ++ e2/update_checker_docker.go | 7 + go.mod | 6 + go.sum | 13 + 143 files changed, 6667 insertions(+) create mode 100644 e2/Dockerfile create mode 100644 e2/README.md create mode 100644 e2/builder/.image-ver create mode 100644 e2/builder/builder.go create mode 100644 e2/builder/builder.mk create mode 100644 e2/builder/docker/assemblyscript/Dockerfile create mode 100644 e2/builder/docker/grain/Dockerfile create mode 100644 e2/builder/docker/javascript/Dockerfile create mode 100644 e2/builder/docker/rust/Dockerfile create mode 100644 e2/builder/docker/swift/Dockerfile create mode 100644 e2/builder/docker/tinygo/Dockerfile create mode 100644 e2/builder/docker/wat/Dockerfile create mode 100644 e2/builder/native.go create mode 100644 e2/builder/prereq.go create mode 100644 e2/builder/prereq_test.go create mode 100644 e2/builder/template/config.go create mode 100644 e2/builder/template/templates.go create mode 100644 e2/changelogs/v0.0.14.md create mode 100644 e2/changelogs/v0.0.15.md create mode 100644 e2/changelogs/v0.0.16.md create mode 100644 e2/changelogs/v0.0.17.md create mode 100644 e2/changelogs/v0.0.18.md create mode 100644 e2/changelogs/v0.0.19.md create mode 100644 e2/changelogs/v0.0.20.md create mode 100644 e2/changelogs/v0.0.21.md create mode 100644 e2/changelogs/v0.0.22.md create mode 100644 e2/changelogs/v0.1.0.md create mode 100644 e2/changelogs/v0.2.0.md create mode 100644 e2/changelogs/v0.2.1.md create mode 100644 e2/changelogs/v0.2.2.md create mode 100644 e2/changelogs/v0.3.0.md create mode 100644 e2/changelogs/v0.3.1.md create mode 100644 e2/changelogs/v0.3.2.md create mode 100644 e2/changelogs/v0.4.0.md create mode 100644 e2/changelogs/v0.4.1.md create mode 100644 e2/changelogs/v0.4.2.md create mode 100644 e2/changelogs/v0.5.0.md create mode 100644 e2/changelogs/v0.5.1.md create mode 100644 e2/changelogs/v0.5.2.md create mode 100644 e2/changelogs/v0.5.3.md create mode 100644 e2/changelogs/v0.5.4.md create mode 100644 e2/cli/command/build.go create mode 100644 e2/cli/command/clean.go create mode 100644 e2/cli/command/create_handler.go create mode 100644 e2/cli/command/create_module.go create mode 100644 e2/cli/command/create_project.go create mode 100644 e2/cli/command/create_release.go create mode 100644 e2/cli/command/deploy.go create mode 100644 e2/cli/command/dev.go create mode 100644 e2/cli/command/docs.go create mode 100644 e2/cli/command/flags.go create mode 100644 e2/cli/command/push.go create mode 100644 e2/cli/command/scn.go create mode 100644 e2/cli/command/se2_create_token.go create mode 100644 e2/cli/command/se2_deploy.go create mode 100644 e2/cli/features/development.go create mode 100644 e2/cli/features/public.go create mode 100644 e2/cli/input/input.go create mode 100644 e2/cli/localproxy/proxy.go create mode 100644 e2/cli/release/check.go create mode 100644 e2/cli/release/info.go create mode 100644 e2/cli/release/release.mk create mode 100644 e2/cli/release/version.go create mode 100644 e2/cli/repl/repl.go create mode 100644 e2/cli/util/cache.go create mode 100644 e2/cli/util/exec.go create mode 100644 e2/cli/util/exec_test.go create mode 100644 e2/cli/util/log.go create mode 100644 e2/cli/util/mkdir.go create mode 100644 e2/cli/util/permissions.go create mode 100644 e2/cli/util/token.go create mode 100644 e2/cli/util/version_check.go create mode 100644 e2/deployer/deployer.go create mode 100644 e2/deployer/k8sdeployer.go create mode 100644 e2/docs/get-started.md create mode 100644 e2/docs/test/example.md create mode 100644 e2/docs/test/greeting_test.go create mode 100644 e2/docs/test/greetings.go create mode 100644 e2/e2.mk create mode 100644 e2/main.go create mode 100644 e2/packager/bundlepackager.go create mode 100644 e2/packager/dockerimagepackager.go create mode 100644 e2/packager/packager.go create mode 100644 e2/packager/static.go create mode 100644 e2/project/context.go create mode 100644 e2/project/tenantconfig.go create mode 100644 e2/publisher/bindlepublisher.go create mode 100644 e2/publisher/dockerpublisher.go create mode 100644 e2/publisher/publisher.go create mode 100644 e2/root.go create mode 100644 e2/scn/api.go create mode 100644 e2/scn/emailverifier.go create mode 100644 e2/scn/environmenttoken.go create mode 100644 e2/scn/telemetry.go create mode 100644 e2/scn/types/emailverifier.go create mode 100644 e2/scn/types/environmenttoken.go create mode 100644 e2/scn/types/heartbeat.go create mode 100755 e2/scripts/smoketest.sh create mode 100644 e2/templates/assemblyscript/asconfig.json.tmpl create mode 100644 e2/templates/assemblyscript/package.json.tmpl create mode 100644 e2/templates/assemblyscript/src/index.ts create mode 100644 e2/templates/assemblyscript/src/lib.ts create mode 100644 e2/templates/assemblyscript/src/tsconfig.json create mode 100644 e2/templates/grain/index.gr create mode 100644 e2/templates/grain/lib.gr create mode 100644 e2/templates/javascript/package.json.tmpl create mode 100644 e2/templates/javascript/src/index.js create mode 100644 e2/templates/javascript/src/lib.js create mode 100644 e2/templates/javascript/webpack.config.js create mode 100644 e2/templates/k8s/atmo-deployment.yaml.tmpl create mode 100644 e2/templates/k8s/atmo-svc.yaml.tmpl create mode 100644 e2/templates/project/.gitignore create mode 100644 e2/templates/project/Directive.yaml.tmpl create mode 100644 e2/templates/project/Dockerfile.tmpl create mode 100755 e2/templates/project/helloworld/.runnable.yaml.tmpl create mode 100755 e2/templates/project/helloworld/Cargo.toml.tmpl create mode 100755 e2/templates/project/helloworld/src/lib.rs create mode 100644 e2/templates/rust/Cargo.toml.tmpl create mode 100644 e2/templates/rust/src/lib.rs.tmpl create mode 100644 e2/templates/scc-docker/.who create mode 100644 e2/templates/scc-docker/SCC.env.tmpl create mode 100644 e2/templates/scc-docker/config/scc-config.yaml create mode 100644 e2/templates/scc-docker/docker-compose.yml.tmpl create mode 100644 e2/templates/scc-k8s/.suborbital/scc-atmo-deployment.yaml.tmpl create mode 100644 e2/templates/scc-k8s/.suborbital/scc-autoscale.yaml.tmpl create mode 100644 e2/templates/scc-k8s/.suborbital/scc-controlplane-deployment.yaml.tmpl create mode 100644 e2/templates/scc-k8s/config/scc-config.yaml create mode 100644 e2/templates/swift/.gitignore create mode 100644 e2/templates/swift/Package.swift.tmpl create mode 100644 e2/templates/swift/Sources/{{ .Name }}.tmpl/main.swift.tmpl create mode 100644 e2/templates/tinygo/go.mod.tmpl create mode 100644 e2/templates/tinygo/main.go.tmpl create mode 100644 e2/templates/typescript/package.json.tmpl create mode 100644 e2/templates/typescript/src/index.ts create mode 100644 e2/templates/typescript/src/lib.ts create mode 100644 e2/templates/typescript/tsconfig.json create mode 100644 e2/templates/typescript/webpack.config.js create mode 100644 e2/templates/wat/lib.wat create mode 100644 e2/update_checker.go create mode 100644 e2/update_checker_docker.go diff --git a/Makefile b/Makefile index 7bc644b6..6f784b65 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,4 @@ +include ./e2/e2.mk e2core: go build -o .bin/e2core ./main.go diff --git a/e2/Dockerfile b/e2/Dockerfile new file mode 100644 index 00000000..02ffe7b1 --- /dev/null +++ b/e2/Dockerfile @@ -0,0 +1,19 @@ +FROM golang:1.18-bullseye AS builder +WORKDIR /root/github.com/suborbital/subo + +COPY go.* ./ +RUN go mod download + +COPY subo ./subo +COPY builder ./builder +COPY deployer ./deployer +COPY packager ./packager +COPY publisher ./publisher +COPY project ./project +COPY scn ./scn +COPY *.go ./ +COPY Makefile . +RUN make subo/docker-bin + +FROM debian:bullseye +COPY --from=builder /go/bin/subo /go/bin/subo diff --git a/e2/README.md b/e2/README.md new file mode 100644 index 00000000..32b4dc1c --- /dev/null +++ b/e2/README.md @@ -0,0 +1,66 @@ +# e2, the Suborbital Extension Engine CLI + +e2 is the command-line helper for working with the Suborbital Extension Engine. e2 is used to create and build Wasm plugins, generate new projects and config files, and more over time. + +**You do not need to install language-specific tools to get started with WebAssembly and e2!** A Docker toolchain is supported (see below) that can build your plugins without needing to install language toolchains. + +## Installing +### macOS (Homebrew) +If you're on Mac (M1 or Intel), the easiest way to install is via `brew`: +``` +brew tap suborbital/e2 +brew install e2 +``` + +### Install from source (requires Go) +If you use Linux or otherwise prefer to build from source, simply clone this repository or download a [source code release](https://github.com/suborbital/e2core/releases/latest) archive and run: +``` +make e2 +``` +This will install `e2` into your GOPATH (`$HOME/go/bin/e2` by default) which you may need to add to your shell's `$PATH` variable. + +e2 does not have official support for Windows. + +## Verify installation +Verify e2 was installed: +``` +e2 --help +``` + + +## Getting started +**To get started with e2, visit the [Get started guide](./docs/get-started.md).** + +## Builders +This repo contains builders for the various languages supported by Wasm Runnables. A builder is a Docker image that can build Runnables into Wasm modules, and is used internally by `subo` to build your code! See the [builders](./builder/docker) directory for more. + +## Platforms +The `subo` tool supports the following platforms and operating systems: +| | x86_64 | arm64 +| --- | --- | --- | +| macOS | ✅ | ✅ | +| Linux | ✅ | ✅ | +| Windows* | — | — | + +_*On Windows you can use WSL._ + +The language toolchains used by `subo` support the following platforms: +| | x86_64 | arm64 | Docker | +| --- | --- | --- | --- | +| Rust | ✅ | ✅ | ✅ | +| JavaScript | ✅ | ✅ | ✅ | +| TypeScript | ✅ | ✅ | ✅ | +| TinyGo | ✅ | ✅ | ✅ | +| Grain | ✅ | ✅ | ✅ | +| AssemblyScript | ✅ | ✅ | ✅ | +| Swift | ✅ | — | 🟡  (no arm64) | + +## Contributing + +Please read the [contributing guide](./CONTRIBUTING.md) to learn about how you can contribute to e2! We welcome all types of contribution. + +By the way, e2 is also the name of our mascot, and it's pronounced SOO-bo. + +![SOS-Space_Panda-Dark-small](https://user-images.githubusercontent.com/5942370/129103528-8b013445-a8a2-44bb-8b39-65d912a66767.png) + +Copyright © 2021-2022 Suborbital and contributors. diff --git a/e2/builder/.image-ver b/e2/builder/.image-ver new file mode 100644 index 00000000..8ea9cc1e --- /dev/null +++ b/e2/builder/.image-ver @@ -0,0 +1 @@ +v0.5.4 diff --git a/e2/builder/builder.go b/e2/builder/builder.go new file mode 100644 index 00000000..fa7ee5bc --- /dev/null +++ b/e2/builder/builder.go @@ -0,0 +1,274 @@ +package builder + +import ( + "fmt" + "html/template" + "io/ioutil" + "os" + "path/filepath" + "runtime" + "strings" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +var dockerImageForLang = map[string]string{ + "rust": "suborbital/builder-rs", + "swift": "suborbital/builder-swift", + "assemblyscript": "suborbital/builder-as", + "tinygo": "suborbital/builder-tinygo", + "grain": "--platform linux/amd64 suborbital/builder-gr", + "typescript": "suborbital/builder-js", + "javascript": "suborbital/builder-js", + "wat": "suborbital/builder-wat", +} + +// BuildConfig is the configuration for a Builder. +type BuildConfig struct { + JsToolchain string + CommandRunner util.CommandRunner +} + +// DefaultBuildConfig is the default build configuration. +var DefaultBuildConfig = BuildConfig{ + JsToolchain: "npm", + CommandRunner: util.Command, +} + +// Builder is capable of building Wasm modules from source. +type Builder struct { + Context *project.Context + Config *BuildConfig + + results []BuildResult + + log util.FriendlyLogger +} + +// BuildResult is the results of a build including the built module and logs. +type BuildResult struct { + Succeeded bool + OutputLog string +} + +type Toolchain string + +const ( + ToolchainNative = Toolchain("native") + ToolchainDocker = Toolchain("docker") +) + +// ForDirectory creates a Builder bound to a particular directory. +func ForDirectory(logger util.FriendlyLogger, config *BuildConfig, dir string) (*Builder, error) { + ctx, err := project.ForDirectory(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to project.ForDirectory") + } + + b := &Builder{ + Context: ctx, + Config: config, + results: []BuildResult{}, + log: logger, + } + + return b, nil +} + +func (b *Builder) BuildWithToolchain(tcn Toolchain) error { + var err error + + b.results = []BuildResult{} + + // When building in Docker mode, just collect the langs we need to build, and then + // launch the associated builder images which will do the building. + dockerLangs := map[string]bool{} + + for _, mod := range b.Context.Modules { + if !b.Context.ShouldBuildLang(mod.Module.Lang) { + continue + } + + if tcn == ToolchainNative { + b.log.LogStart(fmt.Sprintf("building runnable: %s (%s)", mod.Name, mod.Module.Lang)) + + result := &BuildResult{} + + if err := b.checkAndRunPreReqs(mod, result); err != nil { + return errors.Wrap(err, "🚫 failed to checkAndRunPreReqs") + } + + if flags, err := b.analyzeForCompilerFlags(mod); err != nil { + return errors.Wrap(err, "🚫 failed to analyzeForCompilerFlags") + } else if flags != "" { + mod.CompilerFlags = flags + } + + err = b.doNativeBuildForModule(mod, result) + + // Even if there was a failure, load the result into the builder + // since the logs of the failed build are useful. + b.results = append(b.results, *result) + + if err != nil { + return errors.Wrapf(err, "🚫 failed to build %s", mod.Name) + } + + fullWasmFilepath := filepath.Join(mod.Fullpath, fmt.Sprintf("%s.wasm", mod.Name)) + b.log.LogDone(fmt.Sprintf("%s was built -> %s", mod.Name, fullWasmFilepath)) + + } else { + dockerLangs[mod.Module.Lang] = true + } + } + + if tcn == ToolchainDocker { + for lang := range dockerLangs { + result, err := b.dockerBuildForLang(lang) + if err != nil { + return errors.Wrap(err, "failed to dockerBuildForDirectory") + } + + b.results = append(b.results, *result) + } + } + + return nil +} + +// Results returns build results for all of the modules built by this builder +// returns os.ErrNotExists if none have been built yet. +func (b *Builder) Results() ([]BuildResult, error) { + if b.results == nil || len(b.results) == 0 { + return nil, os.ErrNotExist + } + + return b.results, nil +} + +func (b *Builder) dockerBuildForLang(lang string) (*BuildResult, error) { + img, err := ImageForLang(lang, b.Context.BuilderTag) + if err != nil { + return nil, errors.Wrap(err, "failed to ImageForLang") + } + + result := &BuildResult{} + + outputLog, err := b.Config.CommandRunner.Run(fmt.Sprintf("docker run --rm --mount type=bind,source=%s,target=/root/runnable %s e2 build %s --native --langs %s", b.Context.MountPath, img, b.Context.RelDockerPath, lang)) + + result.OutputLog = outputLog + + if err != nil { + result.Succeeded = false + return nil, errors.Wrap(err, "failed to Run docker command") + } + + result.Succeeded = true + + return result, nil +} + +// results and resulting file are loaded into the BuildResult pointer. +func (b *Builder) doNativeBuildForModule(mod project.ModuleDir, result *BuildResult) error { + cmds, err := NativeBuildCommands(mod.Module.Lang) + if err != nil { + return errors.Wrap(err, "failed to NativeBuildCommands") + } + + for _, cmd := range cmds { + cmdTmpl, err := template.New("cmd").Parse(cmd) + if err != nil { + return errors.Wrap(err, "failed to Parse command template") + } + + fullCmd := &strings.Builder{} + if err := cmdTmpl.Execute(fullCmd, mod); err != nil { + return errors.Wrap(err, "failed to Execute command template") + } + + cmdString := strings.TrimSpace(fullCmd.String()) + + // Even if the command fails, still load the output into the result object. + outputLog, err := b.Config.CommandRunner.RunInDir(cmdString, mod.Fullpath) + + result.OutputLog += outputLog + "\n" + + if err != nil { + result.Succeeded = false + return errors.Wrap(err, "failed to RunInDir") + } + + result.Succeeded = true + } + + return nil +} + +// ImageForLang returns the Docker image:tag builder for the given language. +func ImageForLang(lang, tag string) (string, error) { + img, ok := dockerImageForLang[lang] + if !ok { + return "", fmt.Errorf("%s is an unsupported language", lang) + } + + return fmt.Sprintf("%s:%s", img, tag), nil +} + +func (b *Builder) checkAndRunPreReqs(runnable project.ModuleDir, result *BuildResult) error { + preReqLangs, ok := PreRequisiteCommands[runtime.GOOS] + if !ok { + return fmt.Errorf("unsupported OS: %s", runtime.GOOS) + } + + preReqs, ok := preReqLangs[runnable.Module.Lang] + if !ok { + return fmt.Errorf("unsupported language: %s", runnable.Module.Lang) + } + + for _, p := range preReqs { + + filepathVar := filepath.Join(runnable.Fullpath, p.File) + + if _, err := os.Stat(filepathVar); err != nil { + if errors.Is(err, os.ErrNotExist) { + b.log.LogStart(fmt.Sprintf("missing %s, fixing...", p.File)) + + fullCmd, err := p.GetCommand(*b.Config, runnable) + if err != nil { + return errors.Wrap(err, "prereq.GetCommand") + } + + outputLog, err := b.Config.CommandRunner.RunInDir(fullCmd, runnable.Fullpath) + if err != nil { + return errors.Wrapf(err, "commandRunner.RunInDir: %s", fullCmd) + } + + result.OutputLog += outputLog + "\n" + + b.log.LogDone("fixed!") + } + } + } + + return nil +} + +// analyzeForCompilerFlags looks at the Runnable and determines if any additional compiler flags are needed +// this is initially added to support AS-JSON in AssemblyScript with its need for the --transform flag. +func (b *Builder) analyzeForCompilerFlags(md project.ModuleDir) (string, error) { + if md.Module.Lang == "assemblyscript" { + packageJSONBytes, err := ioutil.ReadFile(filepath.Join(md.Fullpath, "package.json")) + if err != nil { + return "", errors.Wrap(err, "failed to ReadFile package.json") + } + + if strings.Contains(string(packageJSONBytes), "json-as") { + return "--transform ./node_modules/json-as/transform", nil + } + } + + return "", nil +} diff --git a/e2/builder/builder.mk b/e2/builder/builder.mk new file mode 100644 index 00000000..cae58b7d --- /dev/null +++ b/e2/builder/builder.mk @@ -0,0 +1,80 @@ +# all paths are relative to project root +ver = $(shell cat ./builder/.image-ver | tr -d '\n') + +builder/docker: subo/docker builder/docker/rust builder/docker/swift builder/docker/as builder/docker/tinygo builder/docker/grain builder/docker/javascript builder/docker/wat + +builder/docker/publish: subo/docker/publish builder/docker/rust/publish builder/docker/swift/publish builder/docker/as/publish builder/docker/tinygo/publish builder/docker/grain/publish builder/docker/javascript/publish builder/docker/wat/publish + +builder/docker/dev/publish: subo/docker/publish builder/docker/rust/dev/publish builder/docker/swift/dev/publish builder/docker/as/dev/publish builder/docker/tinygo/dev/publish builder/docker/grain/dev/publish builder/docker/javascript/dev/publish builder/docker/wat/dev/publish + +# AssemblyScript docker targets +builder/docker/as: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/assemblyscript/Dockerfile -t suborbital/builder-as:$(ver) + +builder/docker/as/publish: + docker buildx build . -f builder/docker/assemblyscript/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-as:$(ver) --push + +builder/docker/as/dev/publish: + docker buildx build . -f builder/docker/assemblyscript/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-as:dev --push + +# Rust docker targets +builder/docker/rust: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/rust/Dockerfile -t suborbital/builder-rs:$(ver) + +builder/docker/rust/publish: + docker buildx build . -f builder/docker/rust/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-rs:$(ver) --push + +builder/docker/rust/dev/publish: + docker buildx build . -f builder/docker/rust/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-rs:dev --push + +# Swift docker targets +builder/docker/swift: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/swift/Dockerfile -t suborbital/builder-swift:$(ver) + +builder/docker/swift/publish: + docker buildx build . -f builder/docker/swift/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-swift:$(ver) --push + +builder/docker/swift/dev/publish: + docker buildx build . -f builder/docker/swift/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-swift:dev --push + +# TinyGo docker targets +builder/docker/tinygo: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/tinygo/Dockerfile -t suborbital/builder-tinygo:$(ver) + +builder/docker/tinygo/publish: + docker buildx build . -f builder/docker/tinygo/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-tinygo:$(ver) --push + +builder/docker/tinygo/dev/publish: + docker buildx build . -f builder/docker/tinygo/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-tinygo:dev --push + +# Grain docker targets +builder/docker/grain: + docker buildx build . -f builder/docker/grain/Dockerfile --platform linux/amd64 -t suborbital/builder-gr:$(ver) --load + +builder/docker/grain/publish: + docker buildx build . -f builder/docker/grain/Dockerfile --platform linux/amd64 -t suborbital/builder-gr:$(ver) --push + +builder/docker/grain/dev/publish: + docker buildx build . -f builder/docker/grain/Dockerfile --platform linux/amd64 -t suborbital/builder-gr:dev --push + +# JavaScript docker targets +builder/docker/javascript: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/javascript/Dockerfile -t suborbital/builder-js:$(ver) + +builder/docker/javascript/publish: + docker buildx build . -f builder/docker/javascript/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-js:$(ver) --push + +builder/docker/javascript/dev/publish: + docker buildx build . -f builder/docker/javascript/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-js:dev --push + +# wat docker targets +builder/docker/wat: + DOCKER_BUILDKIT=1 docker build . -f builder/docker/wat/Dockerfile -t suborbital/builder-wat:$(ver) + +builder/docker/wat/publish: + docker buildx build . -f builder/docker/wat/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-wat:$(ver) --push + +builder/docker/wat/dev/publish: + docker buildx build . -f builder/docker/wat/Dockerfile --platform linux/amd64,linux/arm64 -t suborbital/builder-wat:dev --push + +.PHONY: builder/docker builder/docker/publish builder/docker/as builder/docker/as/publish builder/docker/rust builder/docker/rust/publish builder/docker/swift builder/docker/swift/publish builder/docker/tinygo builder/docker/tinygo/publish builder/docker/grain builder/docker/grain/publish builder/docker/javascript builder/docker/javascript/publish builder/docker/wat builder/docker/wat/publish diff --git a/e2/builder/docker/assemblyscript/Dockerfile b/e2/builder/docker/assemblyscript/Dockerfile new file mode 100644 index 00000000..2b2d00b6 --- /dev/null +++ b/e2/builder/docker/assemblyscript/Dockerfile @@ -0,0 +1,6 @@ +FROM suborbital/subo:dev as subo + +FROM node:16-buster-slim +WORKDIR /root/runnable +COPY --from=subo /go/bin/subo /usr/local/bin +RUN npm install -g npm@latest diff --git a/e2/builder/docker/grain/Dockerfile b/e2/builder/docker/grain/Dockerfile new file mode 100644 index 00000000..711d571c --- /dev/null +++ b/e2/builder/docker/grain/Dockerfile @@ -0,0 +1,6 @@ +FROM suborbital/subo:dev as subo + +FROM ghcr.io/grain-lang/grain:0.4-slim +WORKDIR /root/runnable +COPY --from=subo /go/bin/subo /usr/local/bin/subo +RUN mkdir /root/suborbital diff --git a/e2/builder/docker/javascript/Dockerfile b/e2/builder/docker/javascript/Dockerfile new file mode 100644 index 00000000..b36e7bc4 --- /dev/null +++ b/e2/builder/docker/javascript/Dockerfile @@ -0,0 +1,12 @@ +FROM ghcr.io/suborbital/javy:v0.3.0 as javy +FROM suborbital/subo:dev as subo + +FROM node:16-bullseye-slim +WORKDIR /root/runnable +# Propagate our root permissions for our home folder to everyone. This allows +# npm scripts (which get run as whatever user owns the mounted runnable +# directory) to access common home folder resources (caches, etc.). +RUN mkdir /root/suborbital && chmod -R o=u /root + +COPY --from=javy /usr/local/bin/javy /usr/local/bin +COPY --from=subo /go/bin/subo /usr/local/bin diff --git a/e2/builder/docker/rust/Dockerfile b/e2/builder/docker/rust/Dockerfile new file mode 100644 index 00000000..bc410a41 --- /dev/null +++ b/e2/builder/docker/rust/Dockerfile @@ -0,0 +1,11 @@ +FROM suborbital/subo:dev as subo + +FROM rust:1.56.1-slim-buster +WORKDIR /root/runnable +COPY --from=subo /go/bin/subo /usr/local/bin + +# install the wasm target and then install something that +# doesn't exist (and ignore the error) to update the crates.io index +RUN mkdir /root/suborbital && \ + rustup target install wasm32-wasi +RUN cargo install lazy_static; exit 0 diff --git a/e2/builder/docker/swift/Dockerfile b/e2/builder/docker/swift/Dockerfile new file mode 100644 index 00000000..4fb50cae --- /dev/null +++ b/e2/builder/docker/swift/Dockerfile @@ -0,0 +1,5 @@ +FROM suborbital/subo:dev as subo + +FROM ghcr.io/swiftwasm/swift:focal +WORKDIR /root/runnable +COPY --from=subo /go/bin/subo /usr/local/bin diff --git a/e2/builder/docker/tinygo/Dockerfile b/e2/builder/docker/tinygo/Dockerfile new file mode 100644 index 00000000..97c7a7a2 --- /dev/null +++ b/e2/builder/docker/tinygo/Dockerfile @@ -0,0 +1,29 @@ +FROM suborbital/subo:dev as subo +FROM golang:1.18-bullseye as go + +FROM debian:bullseye-slim +RUN apt-get update && apt-get -y install wget + +WORKDIR /usr/local + +# renovate: datasource=github-releases depName=tinygo-org/tinygo +ARG TINYGO_VERSION=0.26.0 +ARG TARGETARCH + +RUN wget -O tinygo.tar.gz \ + "https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VERSION}/tinygo${TINYGO_VERSION}.linux-${TARGETARCH}.tar.gz" && \ + tar xf tinygo.tar.gz && \ + bash -c "rm -rf tinygo/src/device/{sam,stm32,nxp,nrf,avr,esp,rp}" && \ + bash -c "rm -rf tinygo/lib/{nrfx,mingw-w64,macos-minimal-sdk}" && \ + rm -rf tinygo/src/examples && \ + rm -rf tinygo.tar.gz + +WORKDIR /root/runnable + +COPY --from=go /usr/local/go /usr/local/ +COPY --from=subo /go/bin/subo /usr/local/bin + +ENV PATH="/usr/local/tinygo/bin:/usr/local/go/bin:$PATH" + +RUN go mod download github.com/suborbital/reactr@latest && \ + rm -rf /go/pkg/mod/github.com/suborbital/reactr*/rwasm/testdata diff --git a/e2/builder/docker/wat/Dockerfile b/e2/builder/docker/wat/Dockerfile new file mode 100644 index 00000000..03d134ba --- /dev/null +++ b/e2/builder/docker/wat/Dockerfile @@ -0,0 +1,19 @@ +FROM suborbital/subo:dev as subo + +FROM debian:bullseye as builder +RUN apt-get update && \ + apt-get install pkg-config git build-essential libssl-dev clang cmake curl -y && \ + git clone -b 1.0.27 --recursive https://github.com/WebAssembly/wabt.git && \ + cd wabt && \ + git submodule update --init && \ + mkdir build && \ + cd build && \ + cmake .. && \ + cmake --build . + +FROM debian:bullseye-slim +WORKDIR /root/runnable + +COPY --from=builder /wabt/bin/wat2wasm /usr/local/bin +COPY --from=subo /go/bin/subo /usr/local/bin +RUN mkdir /root/suborbital diff --git a/e2/builder/native.go b/e2/builder/native.go new file mode 100644 index 00000000..936ca9f7 --- /dev/null +++ b/e2/builder/native.go @@ -0,0 +1,83 @@ +package builder + +import ( + "fmt" + "runtime" +) + +// NativeBuildCommands returns the native build commands needed to build a Runnable of a particular language. +func NativeBuildCommands(lang string) ([]string, error) { + os := runtime.GOOS + + cmds, exists := nativeCommandsForLang[os][lang] + if !exists { + return nil, fmt.Errorf("unable to build %s Runnables natively", lang) + } + + return cmds, nil +} + +var nativeCommandsForLang = map[string]map[string][]string{ + "darwin": { + "rust": { + "cargo vendor && cargo build --target wasm32-wasi --lib --release", + "cp target/wasm32-wasi/release/{{ .UnderscoreName }}.wasm ./{{ .Name }}.wasm", + }, + "swift": { + "xcrun --toolchain swiftwasm swift build --triple wasm32-unknown-wasi -Xlinker --allow-undefined -Xlinker --export=allocate -Xlinker --export=deallocate -Xlinker --export=run_e -Xlinker --export=init", + "cp .build/debug/{{ .Name }}.wasm .", + }, + "assemblyscript": { + "npm run asbuild", + }, + "tinygo": { + "go get -d", + "go mod tidy", + "tinygo build -o {{ .Name }}.wasm -target wasi .", + }, + "grain": { + "grain compile index.gr -I _lib -o {{ .Name }}.wasm", + }, + "typescript": { + "npm run build", + }, + "javascript": { + "npm run build", + }, + "wat": { + "wat2wasm lib.wat -o {{ .Name }}.wasm", + }, + }, + "linux": { + "rust": { + "cargo vendor && cargo build --target wasm32-wasi --lib --release", + "cp target/wasm32-wasi/release/{{ .UnderscoreName }}.wasm ./{{ .Name }}.wasm", + }, + "swift": { + "swift build --triple wasm32-unknown-wasi -Xlinker --allow-undefined -Xlinker --export=allocate -Xlinker --export=deallocate -Xlinker --export=run_e -Xlinker --export=init", + "cp .build/debug/{{ .Name }}.wasm .", + }, + "assemblyscript": { + "chmod -R 777 ./", + "chmod +x ./node_modules/assemblyscript/bin/asc", + "./node_modules/assemblyscript/bin/asc src/index.ts --target release --use abort=src/index/abort {{ .CompilerFlags }}", + }, + "tinygo": { + "go get -d", + "go mod tidy", + "tinygo build -o {{ .Name }}.wasm -target wasi .", + }, + "grain": { + "grain compile index.gr -I _lib -o {{ .Name }}.wasm", + }, + "typescript": { + "npm run build", + }, + "javascript": { + "npm run build", + }, + "wat": { + "wat2wasm lib.wat -o {{ .Name }}.wasm", + }, + }, +} diff --git a/e2/builder/prereq.go b/e2/builder/prereq.go new file mode 100644 index 00000000..4b8c7071 --- /dev/null +++ b/e2/builder/prereq.go @@ -0,0 +1,122 @@ +package builder + +import ( + "strings" + "text/template" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/project" +) + +// Prereq is a pre-requisite file paired with the native command needed to acquire that file (if it's missing). +type Prereq struct { + File string + Command string +} + +// PreRequisiteCommands is a map of OS : language : preReq. +var PreRequisiteCommands = map[string]map[string][]Prereq{ + "darwin": { + "rust": {}, + "swift": {}, + "grain": { + Prereq{ + File: "_lib", + Command: "mkdir _lib", + }, + Prereq{ + File: "_lib/_lib.tar.gz", + Command: "curl -L https://github.com/suborbital/reactr/archive/v{{ .ModuleDir.Module.APIVersion }}.tar.gz -o _lib/_lib.tar.gz", + }, + Prereq{ + File: "_lib/suborbital", + Command: "tar --strip-components=3 -C _lib -xvzf _lib/_lib.tar.gz **/api/grain/suborbital/*", + }, + }, + "assemblyscript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "tinygo": {}, + "typescript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "javascript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "wat": {}, + }, + "linux": { + "rust": {}, + "swift": {}, + "grain": { + Prereq{ + File: "_lib", + Command: "mkdir _lib", + }, + Prereq{ + File: "_lib/_lib.tar.gz", + Command: "curl -L https://github.com/suborbital/reactr/archive/v{{ .ModuleDir.Module.APIVersion }}.tar.gz -o _lib/_lib.tar.gz", + }, + Prereq{ + File: "_lib/suborbital", + Command: "tar --wildcards --strip-components=3 -C _lib -xvzf _lib/_lib.tar.gz **/api/grain/suborbital/*", + }, + }, + "assemblyscript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "tinygo": {}, + "typescript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "javascript": { + Prereq{ + File: "node_modules", + Command: "{{ .BuildConfig.JsToolchain }} install", + }, + }, + "wat": {}, + }, +} + +// GetCommand takes a ModuleDir, and returns an executed template command string. +func (p Prereq) GetCommand(b BuildConfig, md project.ModuleDir) (string, error) { + cmdTmpl, err := template.New("cmd").Parse(p.Command) + if err != nil { + return "", errors.Wrapf(err, "failed to parse prerequisite Command string into template: %s", p.Command) + } + + type TemplateParams struct { + ModuleDir project.ModuleDir + BuildConfig + } + + data := TemplateParams{ + ModuleDir: md, + BuildConfig: b, + } + + var fullCmd strings.Builder + err = cmdTmpl.Execute(&fullCmd, data) + if err != nil { + return "", errors.Wrap(err, "failed to execute prerequisite Command string with runnableDir") + } + + return fullCmd.String(), nil +} diff --git a/e2/builder/prereq_test.go b/e2/builder/prereq_test.go new file mode 100644 index 00000000..706c72e1 --- /dev/null +++ b/e2/builder/prereq_test.go @@ -0,0 +1,69 @@ +package builder + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/suborbital/e2core/e2/project" + "github.com/suborbital/systemspec/tenant" +) + +func TestPrereq_GetCommand(t *testing.T) { + tests := []struct { + name string + prereq Prereq + r project.ModuleDir + want string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "successfully expands template", + prereq: Prereq{ + File: "_lib/_lib.tar.gz", + Command: "curl -L https://github.com/suborbital/reactr/archive/v{{ .ModuleDir.Module.APIVersion }}.tar.gz -o _lib/_lib.tar.gz", + }, + r: project.ModuleDir{ + Module: &tenant.Module{ + APIVersion: "0.33.75", + }, + }, + want: "curl -L https://github.com/suborbital/reactr/archive/v0.33.75.tar.gz -o _lib/_lib.tar.gz", + wantErr: assert.NoError, + }, + { + name: "errors due to missing data to expand with", + prereq: Prereq{ + File: "_lib/_lib.tar.gz", + Command: "curl -L https://github.com/suborbital/reactr/archive/v{{ .ModuleDir.Module.APIVersion }}.tar.gz -o _lib/_lib.tar.gz", + }, + r: project.ModuleDir{ + Module: nil, + }, + want: "", + wantErr: assert.Error, + }, + { + name: "successfully expands command with no template tag in it", + prereq: Prereq{ + File: "_lib/_lib.tar.gz", + Command: "curl -L https://github.com/suborbital/reactr/archive/v2.tar.gz -o _lib/_lib.tar.gz", + }, + r: project.ModuleDir{ + Module: &tenant.Module{ + APIVersion: "0.33.75", + }, + }, + want: "curl -L https://github.com/suborbital/reactr/archive/v2.tar.gz -o _lib/_lib.tar.gz", + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.prereq.GetCommand(DefaultBuildConfig, tt.r) + + tt.wantErr(t, err) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/e2/builder/template/config.go b/e2/builder/template/config.go new file mode 100644 index 00000000..dec33205 --- /dev/null +++ b/e2/builder/template/config.go @@ -0,0 +1,48 @@ +package template + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" +) + +func FullPath(repo, branch string) (string, error) { + repoParts := strings.Split(repo, "/") + if len(repoParts) != 2 { + return "", fmt.Errorf("repo is invalid, contains %d parts", len(repoParts)) + } + + repoName := repoParts[1] + + root, err := TemplateRootDir() + if err != nil { + return "", errors.Wrap(err, "failed to TemplateRootDir") + } + + return filepath.Join(root, fmt.Sprintf("%s-%s", repoName, strings.ReplaceAll(branch, "/", "-")), "templates"), nil +} + +// TemplateRootDir gets the template directory for e2 and ensures it exists. +func TemplateRootDir() (string, error) { + tmplPath, err := util.CacheDir("templates") + if err != nil { + return "", errors.Wrap(err, "failed to CacheDir") + } + + if _, err = os.Stat(tmplPath); err != nil { + if errors.Is(err, os.ErrNotExist) { + if err := os.MkdirAll(tmplPath, util.PermDirectory); err != nil { + return "", errors.Wrap(err, "failed to MkdirAll template directory") + } + } else { + return "", errors.Wrap(err, "failed to Stat template directory") + } + } + + return tmplPath, nil +} diff --git a/e2/builder/template/templates.go b/e2/builder/template/templates.go new file mode 100644 index 00000000..49e296f3 --- /dev/null +++ b/e2/builder/template/templates.go @@ -0,0 +1,274 @@ +package template + +import ( + "fmt" + "io" + "io/ioutil" + "net/http" + "os" + "path/filepath" + "strings" + "text/template" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/systemspec/tenant" +) + +// ErrTemplateMissing and others are template related errors. +var ErrTemplateMissing = errors.New("template missing") + +type tmplData struct { + tenant.Module + NameCaps string + NameCamel string +} + +func UpdateTemplates(repo, branch string) (string, error) { + util.LogStart("downloading templates") + + repoParts := strings.Split(repo, "/") + if len(repoParts) != 2 { + return "", fmt.Errorf("repo is invalid, contains %d parts", len(repoParts)) + } + + repoName := repoParts[1] + + branchDirName := fmt.Sprintf("%s-%s", repoName, strings.ReplaceAll(branch, "/", "-")) + + templateRootPath, err := TemplateRootDir() + if err != nil { + return "", errors.Wrap(err, "failed to TemplateDir") + } + + filepathVar, err := downloadZip(repo, branch, templateRootPath) + if err != nil { + return "", errors.Wrap(err, "🚫 failed to downloadZip for templates") + } + + // The tmplPath may be different than the default if a custom URL was provided. + tmplPath, err := extractZip(filepathVar, templateRootPath, branchDirName) + if err != nil { + return "", errors.Wrap(err, "🚫 failed to extractZip for templates") + } + + util.LogDone("templates downloaded") + + return tmplPath, nil +} + +// TemplatesExist returns the templates directory for the provided repo and branch. +func TemplatesExist(repo, branch string) (string, error) { + repoParts := strings.Split(repo, "/") + if len(repoParts) != 2 { + return "", fmt.Errorf("repo is invalid, contains %d parts", len(repoParts)) + } + + repoName := repoParts[1] + + templateRootPath, err := TemplateRootDir() + if err != nil { + return "", errors.Wrap(err, "failed to TemplateDir") + } + + branchDirName := fmt.Sprintf("%s-%s", repoName, strings.ReplaceAll(branch, "/", "-")) + existingPath := filepath.Join(templateRootPath, branchDirName) + + tmplPath := filepath.Join(existingPath, "templates") + + if files, err := os.ReadDir(tmplPath); err != nil { + return "", errors.Wrap(err, "failed to ReadDir") + } else if len(files) == 0 { + return "", errors.New("templates directory is empty") + } + + return tmplPath, nil +} + +// ExecRunnableTmplStr executes a template string with the module's data. +func ExecRunnableTmplStr(templateStr string, module *tenant.Module) (string, error) { + templateData := makeTemplateData(module) + + tmpl, err := template.New("tmpl").Parse(templateStr) + if err != nil { + return "", errors.Wrap(err, "failed to parse template string") + } + + builder := &strings.Builder{} + if err := tmpl.Execute(builder, templateData); err != nil { + return "", errors.Wrap(err, "failed to Execute template") + } + + return builder.String(), nil +} + +// ExecRunnableTmpl copies a template. +func ExecRunnableTmpl(cwd, name, templatesPath string, module *tenant.Module) error { + templateData := makeTemplateData(module) + + return ExecTmplDir(cwd, name, templatesPath, module.Lang, templateData) +} + +// ExecTmplDir copies a generic templated directory. +func ExecTmplDir(cwd, name, templatesPath, tmplName string, templateData interface{}) error { + templatePath := filepath.Join(templatesPath, tmplName) + targetPath := filepath.Join(cwd, name) + + if _, err := os.Stat(templatePath); err != nil { + if errors.Is(err, os.ErrNotExist) { + return ErrTemplateMissing + } + + return errors.Wrap(err, "failed to Stat template directory") + } + + var err = filepath.Walk(templatePath, func(path string, info os.FileInfo, _ error) error { + var relPath = strings.Replace(path, templatePath, "", 1) + if relPath == "" { + return nil + } + + targetRelPath := relPath + if strings.Contains(relPath, ".tmpl") { + tmpl, err := template.New("tmpl").Parse(strings.Replace(relPath, ".tmpl", "", -1)) + if err != nil { + return errors.Wrapf(err, "failed to parse template directory name %s", info.Name()) + } + + builder := &strings.Builder{} + if err := tmpl.Execute(builder, templateData); err != nil { + return errors.Wrapf(err, "failed to Execute template for %s", info.Name()) + } + + targetRelPath = builder.String() + } + + // Check if the target path is an existing file, and skip it if so. + if _, err := os.Stat(filepath.Join(targetPath, targetRelPath)); err != nil { + if os.IsNotExist(err) { + // That's fine, continue. + } else { + return errors.Wrap(err, "failed to Stat") + } + } else { + // If the target file already exists, we're going to skip the rest since we don't want to overwrite. + return nil + } + + if info.IsDir() { + if err := os.Mkdir(filepath.Join(targetPath, targetRelPath), util.PermDirectory); err != nil { + return errors.Wrap(err, "failed to Mkdir") + } + + return nil + } + + var data, err1 = ioutil.ReadFile(filepath.Join(templatePath, relPath)) + if err1 != nil { + return err1 + } + + if strings.HasSuffix(info.Name(), ".tmpl") { + tmpl, err := template.New("tmpl").Parse(string(data)) + if err != nil { + return errors.Wrapf(err, "failed to parse template file %s", info.Name()) + } + + builder := &strings.Builder{} + if err := tmpl.Execute(builder, templateData); err != nil { + return errors.Wrapf(err, "failed to Execute template for %s", info.Name()) + } + + data = []byte(builder.String()) + } + + if err := ioutil.WriteFile(filepath.Join(targetPath, targetRelPath), data, util.PermFilePrivate); err != nil { + return errors.Wrap(err, "failed to WriteFile") + } + + return nil + }) + + return err +} + +// downloadZip downloads a ZIP from a particular branch of the Subo repo. +func downloadZip(repo, branch, targetPath string) (string, error) { + url := fmt.Sprintf("https://github.com/%s/archive/%s.zip", repo, branch) + + req, err := http.NewRequest(http.MethodGet, url, nil) + if err != nil { + return "", errors.Wrap(err, "failed to NewRequest") + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", errors.Wrap(err, "failed to Do request") + } + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("response was non-200: %d", resp.StatusCode) + } + + filepathVar := filepath.Join(targetPath, "e2.zip") + + // Check if the zip already exists, and delete it if it does. + if _, err := os.Stat(filepathVar); err == nil { + if err := os.Remove(filepathVar); err != nil { + return "", errors.Wrap(err, "failed to delete exising templates zip") + } + } + + if err := os.MkdirAll(targetPath, util.PermDirectory); err != nil { + return "", errors.Wrap(err, "failed to MkdirAll") + } + + file, err := os.Create(filepathVar) + if err != nil { + return "", errors.Wrap(err, "failed to Open file") + } + + defer resp.Body.Close() + if _, err := io.Copy(file, resp.Body); err != nil { + return "", errors.Wrap(err, "failed to Copy data to file") + } + + return filepathVar, nil +} + +// extractZip extracts a ZIP file. +func extractZip(filePath, destPath, branchDirName string) (string, error) { + escapedFilepath := strings.ReplaceAll(filePath, " ", "\\ ") + escapedDestPath := strings.ReplaceAll(destPath, " ", "\\ ") + string(filepath.Separator) + + existingPath := filepath.Join(destPath, branchDirName) + + if _, err := os.Stat(existingPath); err == nil { + if err := os.RemoveAll(existingPath); err != nil { + return "", errors.Wrap(err, "failed to RemoveAll old templates") + } + } + + if _, err := util.Command.Run(fmt.Sprintf("unzip -q %s -d %s", escapedFilepath, escapedDestPath)); err != nil { + return "", errors.Wrap(err, "failed to Run unzip") + } + + return filepath.Join(existingPath, "templates"), nil +} + +// makeTemplateData makes data to be used in templates. +func makeTemplateData(module *tenant.Module) tmplData { + nameCamel := "" + nameParts := strings.Split(module.Name, "-") + for _, part := range nameParts { + nameCamel += strings.ToUpper(string(part[0])) + nameCamel += string(part[1:]) + } + + return tmplData{ + Module: *module, + NameCaps: strings.ToUpper(strings.Replace(module.Name, "-", "", -1)), + NameCamel: nameCamel, + } +} diff --git a/e2/changelogs/v0.0.14.md b/e2/changelogs/v0.0.14.md new file mode 100644 index 00000000..e99c6da5 --- /dev/null +++ b/e2/changelogs/v0.0.14.md @@ -0,0 +1,9 @@ +## Welcome to Subo Alpha-14! + +Subo Alpha-14 is required for use with Atmo Beta-2. + +Subo now creates bundles by default when building, with the added `--no-bundle` flag if you would like to skip. + +This release will also create a git repo automatically when creating a new project. + +This release will create Runnables with API version 0.9.1 and use Atmo version 0.2.0 for the `dev` server. diff --git a/e2/changelogs/v0.0.15.md b/e2/changelogs/v0.0.15.md new file mode 100644 index 00000000..a186e866 --- /dev/null +++ b/e2/changelogs/v0.0.15.md @@ -0,0 +1 @@ +Subo Alpha-15 is a quick patch version to remove a dependency on Wasmer shared libraries that was causing issues running the Docker builders \ No newline at end of file diff --git a/e2/changelogs/v0.0.16.md b/e2/changelogs/v0.0.16.md new file mode 100644 index 00000000..469ae3e9 --- /dev/null +++ b/e2/changelogs/v0.0.16.md @@ -0,0 +1 @@ +Subo Alpha-16 brings support for AssemblyScript, and some under-the-hood changes to support future Atmo features. \ No newline at end of file diff --git a/e2/changelogs/v0.0.17.md b/e2/changelogs/v0.0.17.md new file mode 100644 index 00000000..26a4b7d8 --- /dev/null +++ b/e2/changelogs/v0.0.17.md @@ -0,0 +1 @@ +This tag is functionally identical to `v0.0.16`, but addresses an issue that caused the gobinaries service to fail building Subo (installations work again!) \ No newline at end of file diff --git a/e2/changelogs/v0.0.18.md b/e2/changelogs/v0.0.18.md new file mode 100644 index 00000000..f4110ae4 --- /dev/null +++ b/e2/changelogs/v0.0.18.md @@ -0,0 +1 @@ +This release includes the `subo compute deploy core` command used to install Flight Deck \ No newline at end of file diff --git a/e2/changelogs/v0.0.19.md b/e2/changelogs/v0.0.19.md new file mode 100644 index 00000000..33e13509 --- /dev/null +++ b/e2/changelogs/v0.0.19.md @@ -0,0 +1 @@ +This release brings compatibility with Atmo Beta-3 \ No newline at end of file diff --git a/e2/changelogs/v0.0.20.md b/e2/changelogs/v0.0.20.md new file mode 100644 index 00000000..8d03face --- /dev/null +++ b/e2/changelogs/v0.0.20.md @@ -0,0 +1 @@ +Alpha-20 brings some internal changes to keep up to date with Atmo Beta-3.1 \ No newline at end of file diff --git a/e2/changelogs/v0.0.21.md b/e2/changelogs/v0.0.21.md new file mode 100644 index 00000000..0e623189 --- /dev/null +++ b/e2/changelogs/v0.0.21.md @@ -0,0 +1 @@ +This release includes improvements for the `compute deploy core` command \ No newline at end of file diff --git a/e2/changelogs/v0.0.22.md b/e2/changelogs/v0.0.22.md new file mode 100644 index 00000000..5dc2f433 --- /dev/null +++ b/e2/changelogs/v0.0.22.md @@ -0,0 +1 @@ +This release improves deployment for the Suborbital Compute Core, enabling intelligent autoscaling \ No newline at end of file diff --git a/e2/changelogs/v0.1.0.md b/e2/changelogs/v0.1.0.md new file mode 100644 index 00000000..3973be98 --- /dev/null +++ b/e2/changelogs/v0.1.0.md @@ -0,0 +1,3 @@ +# Subo Beta-1 + +Welcome to the first beta of the Subo CLI! This project has remained in Alpha for a long time while we improved its usability, compatibility with language toolchains, and integrated it with our [Compute](https://suborbital.dev/compute) product. We think it's ready, so it's being elevated to Beta status! We'll continue to make improvements along the way, but we believe it's the best way to work with WebAssembly codebases and all of our projects/products. \ No newline at end of file diff --git a/e2/changelogs/v0.2.0.md b/e2/changelogs/v0.2.0.md new file mode 100644 index 00000000..e9a4c6a7 --- /dev/null +++ b/e2/changelogs/v0.2.0.md @@ -0,0 +1,3 @@ +## Beta-2 + +Subo Beta-2 brings much improved builder image performance, as well as support for creating and building TinyGo and Grain Runnables, and support for Suborbital Compute Beta-1 \ No newline at end of file diff --git a/e2/changelogs/v0.2.1.md b/e2/changelogs/v0.2.1.md new file mode 100644 index 00000000..6dd11ec2 --- /dev/null +++ b/e2/changelogs/v0.2.1.md @@ -0,0 +1 @@ +Subo Beta-2.1 brings the latest version of TinyGo to the Docker builder toolchain, and a small fix for Suborbital Compute users deploying locally. \ No newline at end of file diff --git a/e2/changelogs/v0.2.2.md b/e2/changelogs/v0.2.2.md new file mode 100644 index 00000000..5c1d4491 --- /dev/null +++ b/e2/changelogs/v0.2.2.md @@ -0,0 +1,8 @@ +Subo Beta-2.2 brings several bug fixes and new features. + +Features: +- Subo now automatically checks for updates and notifies you if one is available (big thank you to @denopink for this!) +- Subo's proxy port (when running `subo compute deploy`) can now be configured with the `--proxy-port` flag + +Fixes: +- Fixed a bug where Subo would crash when building a single Runnable with the `--docker` flag (#102) diff --git a/e2/changelogs/v0.3.0.md b/e2/changelogs/v0.3.0.md new file mode 100644 index 00000000..2cdf9bfc --- /dev/null +++ b/e2/changelogs/v0.3.0.md @@ -0,0 +1,5 @@ +Subo Beta-3.0 brings several bug fixes and new features. + +Features: +- Add --reset flag to subo compute deploy core: resets the docker-compose.yaml or K8s manifests to the default template (does not update templates) +- Add TinyGo slim Docker image diff --git a/e2/changelogs/v0.3.1.md b/e2/changelogs/v0.3.1.md new file mode 100644 index 00000000..ed1cfdf8 --- /dev/null +++ b/e2/changelogs/v0.3.1.md @@ -0,0 +1 @@ +Subo Beta-3.1 bumps the Suborbital Compute Docker image tag from v0.1.0 to v0.1.1. \ No newline at end of file diff --git a/e2/changelogs/v0.3.2.md b/e2/changelogs/v0.3.2.md new file mode 100644 index 00000000..4c966694 --- /dev/null +++ b/e2/changelogs/v0.3.2.md @@ -0,0 +1,10 @@ +Subo Beta-3.2 brings quality of life improvements, JavaScript support, and create handler command + +* (fix): purge bad data cache from version check (pr #170) +* (tweak): reduce file and folder permissions across subo (pr #172) +* feat(builder): support for JavaScript (pr #177) +* feat(builder): introduce templating for prereq commands (pr #187) +* feat(cli): create handler command +* feat(cli): initial version of subo push using Bindle (pr #178) +* feat(ci/cd): adds a workflow to dispatch data on release to homebrew formula repo (pr #183) +* feat(ci/cd): adds golangci-lint and basic linting rules, fixes issues (pr #189) \ No newline at end of file diff --git a/e2/changelogs/v0.4.0.md b/e2/changelogs/v0.4.0.md new file mode 100644 index 00000000..cb3df2bb --- /dev/null +++ b/e2/changelogs/v0.4.0.md @@ -0,0 +1 @@ +Subo Beta-4 brings the ability to create and build JavaScript and TypeScript Runnables. \ No newline at end of file diff --git a/e2/changelogs/v0.4.1.md b/e2/changelogs/v0.4.1.md new file mode 100644 index 00000000..8664e24f --- /dev/null +++ b/e2/changelogs/v0.4.1.md @@ -0,0 +1 @@ +Subo Beta-4.1 fixes a permissions issue when building JavaScript and TypeScript Runnables on Linux. diff --git a/e2/changelogs/v0.4.2.md b/e2/changelogs/v0.4.2.md new file mode 100644 index 00000000..1eda11e8 --- /dev/null +++ b/e2/changelogs/v0.4.2.md @@ -0,0 +1 @@ +Subo Beta-4.2 upgrades Atmo to Beta-4.4 and upgrades Suborbital Compute to Beta-2. diff --git a/e2/changelogs/v0.5.0.md b/e2/changelogs/v0.5.0.md new file mode 100644 index 00000000..8082ce12 --- /dev/null +++ b/e2/changelogs/v0.5.0.md @@ -0,0 +1,11 @@ +Subo Beta-4.3 upgrades Atmo to Beta-4.7. + +Updates: +- Go 1.18 +- the TinyGo builder image has been updated to version v0.23.0 +- Add Wat support +- Add configurable JS toolchains +- Add ability to push docker image and deploy onto Kubernetes cluster (thanks @yashikajotwani12) + +Fixes: +- Remove update checking when inside Docker diff --git a/e2/changelogs/v0.5.1.md b/e2/changelogs/v0.5.1.md new file mode 100644 index 00000000..0e15dbd5 --- /dev/null +++ b/e2/changelogs/v0.5.1.md @@ -0,0 +1 @@ +Subo Beta-5.1 upgrades Suborbital Compute to Beta-3. diff --git a/e2/changelogs/v0.5.2.md b/e2/changelogs/v0.5.2.md new file mode 100644 index 00000000..c4060bfc --- /dev/null +++ b/e2/changelogs/v0.5.2.md @@ -0,0 +1 @@ +Subo Beta-5.2 upgrades Suborbital Compute to Beta-3.1 diff --git a/e2/changelogs/v0.5.3.md b/e2/changelogs/v0.5.3.md new file mode 100644 index 00000000..648d6cd5 --- /dev/null +++ b/e2/changelogs/v0.5.3.md @@ -0,0 +1 @@ +Subo Beta-5.3 upgrades Suborbital Compute to Beta-3.2. diff --git a/e2/changelogs/v0.5.4.md b/e2/changelogs/v0.5.4.md new file mode 100644 index 00000000..d2e6eb68 --- /dev/null +++ b/e2/changelogs/v0.5.4.md @@ -0,0 +1 @@ +Subo Beta-5.4 upgrades TinyGo to v0.25.0. diff --git a/e2/cli/command/build.go b/e2/cli/command/build.go new file mode 100644 index 00000000..ea0bef83 --- /dev/null +++ b/e2/cli/command/build.go @@ -0,0 +1,122 @@ +package command + +import ( + "fmt" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/builder" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/packager" +) + +// BuildCmd returns the build command. +func BuildCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "build [dir]", + Short: "Build a WebAssembly runnable", + Long: `Build a WebAssembly runnable and/or create a Runnable Bundle`, + RunE: func(cmd *cobra.Command, args []string) error { + dir := "." + if len(args) > 0 { + dir = args[0] + } + + bdr, err := builder.ForDirectory(&util.PrintLogger{}, &builder.DefaultBuildConfig, dir) + if err != nil { + return errors.Wrap(err, "failed to builder.ForDirectory") + } + + if len(bdr.Context.Modules) == 0 { + return errors.New("🚫 no runnables found in current directory (no .runnable.yaml files found)") + } + + if bdr.Context.CwdIsRunnable { + util.LogInfo("building single Runnable (run from project root to create bundle)") + } + + langs, _ := cmd.Flags().GetStringSlice("langs") + bdr.Context.Langs = langs + + noBundle, _ := cmd.Flags().GetBool("no-bundle") + shouldBundle := !noBundle && !bdr.Context.CwdIsRunnable && len(langs) == 0 + shouldDockerBuild, _ := cmd.Flags().GetBool("docker") + + if bdr.Context.CwdIsRunnable && shouldDockerBuild { + return errors.New("🚫 cannot build Docker image for a single Runnable (must be a project)") + } + + useNative, _ := cmd.Flags().GetBool("native") + makeTarget, _ := cmd.Flags().GetString("make") + + // Determine if a custom Docker mountpath and relpath were set. + mountPath, _ := cmd.Flags().GetString("mountpath") + relPath, _ := cmd.Flags().GetString("relpath") + + if mountPath != "" { + if relPath == "" { + // Fallback to the dir arg as that's usually a sane default. + relPath = dir + } + + bdr.Context.MountPath = mountPath + bdr.Context.RelDockerPath = relPath + } + + builderTag, _ := cmd.Flags().GetString("builder-tag") + if builderTag != "" { + bdr.Context.BuilderTag = builderTag + } + + if makeTarget != "" { + util.LogStart(fmt.Sprintf("make %s", makeTarget)) + _, err = util.Command.Run(fmt.Sprintf("make %s", makeTarget)) + if err != nil { + return errors.Wrapf(err, "🚫 failed to make %s", makeTarget) + } + } + + var toolchain builder.Toolchain + if useNative { + toolchain = builder.ToolchainNative + } else { + util.LogInfo("🐳 using Docker toolchain") + toolchain = builder.ToolchainDocker + } + + // The builder does the majority of the work. + if err := bdr.BuildWithToolchain(toolchain); err != nil { + return errors.Wrap(err, "failed to BuildWithToolchain") + } + + pkgr := packager.New(&util.PrintLogger{}) + pkgJobs := []packager.PackageJob{} + + if shouldBundle { + pkgJobs = append(pkgJobs, packager.NewBundlePackageJob()) + } + + if shouldDockerBuild && !bdr.Context.CwdIsRunnable { + pkgJobs = append(pkgJobs, packager.NewDockerImagePackageJob()) + } + + if err := pkgr.Package(bdr.Context, pkgJobs...); err != nil { + return errors.Wrap(err, "failed to Package") + } + + return nil + }, + } + + cmd.Flags().Bool("no-bundle", false, "if passed, a .wasm.zip bundle will not be generated") + cmd.Flags().Bool("native", false, "use native (locally installed) toolchain rather than Docker") + cmd.Flags().String("make", "", "execute the provided Make target before building the project bundle") + cmd.Flags().Bool("docker", false, "build your project's Dockerfile. It will be tagged {identifier}:{appVersion}") + cmd.Flags().StringSlice("langs", []string{}, "build only Runnables for the listed languages (comma-seperated)") + cmd.Flags().String("mountpath", "", "if passed, the Docker builders will mount their volumes at the provided path") + cmd.Flags().String("relpath", "", "if passed, the Docker builders will run `e2 build` using the provided path, relative to '--mountpath'") + cmd.Flags().String("builder-tag", "", "use the provided tag for builder images") + + return cmd +} diff --git a/e2/cli/command/clean.go b/e2/cli/command/clean.go new file mode 100644 index 00000000..9b352887 --- /dev/null +++ b/e2/cli/command/clean.go @@ -0,0 +1,74 @@ +package command + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +// CleanCmd removes all of the target/.build folders for Runnables and deletes the .wasm files. +func CleanCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean", + Short: "Remove build folders and .wasm files", + Long: "Remove all of target/.build folders and deletes .wasm files", + RunE: func(cmd *cobra.Command, args []string) error { + cwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + bctx, err := project.ForDirectory(cwd) + if err != nil { + return errors.Wrap(err, "failed to project.ForDirectory") + } + + if len(bctx.Modules) == 0 { + return errors.New("🚫 no runnables found in current directory (no .runnable yaml files found)") + } + + util.LogStart(fmt.Sprintf("cleaning in %s", bctx.Cwd)) + + for _, r := range bctx.Modules { + // Delete target or .build folder. + files, _ := ioutil.ReadDir(r.Fullpath) + + for _, file := range files { + fullPath := filepath.Join(r.Fullpath, file.Name()) + if file.IsDir() { + if file.Name() == "target" || file.Name() == ".build" { + if rErr := os.RemoveAll(fullPath); rErr != nil { + util.LogFail(errors.Wrap(rErr, "failed to RemoveAll").Error()) + continue + } + + util.LogDone(fmt.Sprintf("removed %s", file.Name())) + } + } else { + if strings.HasSuffix(file.Name(), ".wasm") || strings.HasSuffix(file.Name(), ".wasm.zip") { + if err := os.Remove(fullPath); err != nil { + util.LogInfo(errors.Wrap(err, "🚫 failed to Remove").Error()) + continue + } + + util.LogDone(fmt.Sprintf("removed %s", file.Name())) + } + } + } + } + + util.LogDone("cleaned") + return nil + }, + } + + return cmd +} diff --git a/e2/cli/command/create_handler.go b/e2/cli/command/create_handler.go new file mode 100644 index 00000000..351b523d --- /dev/null +++ b/e2/cli/command/create_handler.go @@ -0,0 +1,73 @@ +package command + +// TODO: turn this into `create workflow` +// Ref: https://github.com/suborbital/e2core/e2/issues/347 + +// import ( +// "fmt" +// "os" + +// "github.com/pkg/errors" +// "github.com/spf13/cobra" + +// "github.com/suborbital/atmo/directive" +// "github.com/suborbital/e2core/e2/project" +// "github.com/suborbital/e2core/e2/cli/util" +// ). + +// func CreateHandlerCmd() *cobra.Command { +// cmd := &cobra.Command{ +// Use: "handler ", +// Short: "create a new handler", +// Long: `create a new handler in Directive.yaml`, +// Args: cobra.ExactArgs(1), +// RunE: func(cmd *cobra.Command, args []string) error { +// resource := args[0] + +// handlerType, _ := cmd.Flags().GetString(typeFlag) +// method, _ := cmd.Flags().GetString(methodFlag) + +// util.LogStart(fmt.Sprintf("creating handler for %s", resource)) + +// cwd, err := os.Getwd() +// if err != nil { +// return errors.Wrap(err, "failed to Getwd") +// } + +// bctx, err := project.ForDirectory(cwd) +// if err != nil { +// return errors.Wrap(err, "🚫 failed to project.ForDirectory") +// } + +// if bctx.Directive == nil { +// return errors.New("cannot create handler, Directive.yaml not found") +// } + +// // Create a new handler object. +// handler := directive.Handler{ +// Input: directive.Input{ +// Type: handlerType, +// Resource: resource, +// Method: method, +// }, +// } + +// // Add the handler object to the directive file. +// bctx.Directive.Handlers = append(bctx.Directive.Handlers, handler) + +// // Write Directive File which overwrites the entire file. +// if err := project.WriteDirectiveFile(bctx.Cwd, bctx.Directive); err != nil { +// return errors.Wrap(err, "failed to WriteDirectiveFile") +// } + +// util.LogDone(fmt.Sprintf("handler for %s created", resource)) + +// return nil +// }, +// } + +// cmd.Flags().String(typeFlag, "request", "the handler's input type") +// cmd.Flags().String(methodFlag, "GET", "the HTTP method for 'request' handlers") + +// return cmd +// }. diff --git a/e2/cli/command/create_module.go b/e2/cli/command/create_module.go new file mode 100644 index 00000000..322ec277 --- /dev/null +++ b/e2/cli/command/create_module.go @@ -0,0 +1,158 @@ +package command + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + "gopkg.in/yaml.v2" + + "github.com/suborbital/e2core/e2/builder/template" + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" + "github.com/suborbital/systemspec/tenant" +) + +// langAliases are aliases for languages. +var langAliases = map[string]string{ + "as": "assemblyscript", + "rs": "rust", + "go": "tinygo", + "gr": "grain", + "ts": "typescript", + "js": "javascript", +} + +// CreateRunnableError wraps errors for CreateModuleCmd() failures. +type CreateRunnableError struct { + Path string // The ouput directory for build command CreateModuleCmd(). + error // The original error. +} + +// NewCreateRunnableError cleans up and returns CreateRunnableError for CreateModuleCmd() failures. +func NewCreateRunnableError(path string, err error) CreateRunnableError { + if cleanupErr := os.RemoveAll(path); cleanupErr != nil { + err = errors.Wrap(err, "failed to clean up module outputs") + } + return CreateRunnableError{Path: path, error: err} +} + +// CreateModuleCmd returns the build command. +func CreateModuleCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "module ", + Short: "Create a new plugin module", + Long: `Create a new module to be used with E2Core or Suborbital Extension Engine (SE2)`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + name := args[0] + + namespace, _ := cmd.Flags().GetString(namespaceFlag) + lang, _ := cmd.Flags().GetString(langFlag) + repo, _ := cmd.Flags().GetString(repoFlag) + branch, _ := cmd.Flags().GetString(branchFlag) + + dir, _ := cmd.Flags().GetString(dirFlag) + bctx, err := project.ForDirectory(dir) + if err != nil { + return errors.Wrap(err, "🚫 failed to project.ForDirectory") + } + + if bctx.ModuleExists(name) { + return fmt.Errorf("🚫 module %s already exists", name) + } + + util.LogStart(fmt.Sprintf("creating module %s", name)) + + path, err := util.Mkdir(bctx.Cwd, name) + if err != nil { + return errors.Wrap(err, "🚫 failed to Mkdir") + } + + module, err := writeDotModule(bctx.Cwd, name, lang, namespace) + if err != nil { + return errors.Wrap(NewCreateRunnableError(path, err), "🚫 failed to writeDotRunnable") + } + + templatesPath, err := template.FullPath(repo, branch) + if err != nil { + return errors.Wrap(NewCreateRunnableError(path, err), "failed to template.FullPath") + } + + if update, _ := cmd.Flags().GetBool(updateTemplatesFlag); update { + templatesPath, err = template.UpdateTemplates(repo, branch) + if err != nil { + return errors.Wrap(NewCreateRunnableError(path, err), "🚫 failed to UpdateTemplates") + } + } + + if err := template.ExecRunnableTmpl(bctx.Cwd, name, templatesPath, module); err != nil { + // if the templates are missing, try updating them and exec again. + if err == template.ErrTemplateMissing { + templatesPath, err = template.UpdateTemplates(repo, branch) + if err != nil { + return errors.Wrap(NewCreateRunnableError(path, err), "🚫 failed to UpdateTemplates") + } + + if err := template.ExecRunnableTmpl(bctx.Cwd, name, templatesPath, module); err != nil { + return errors.Wrap(NewCreateRunnableError(path, err), "🚫 failed to ExecTmplDir") + } + } else { + return errors.Wrap(NewCreateRunnableError(path, err), "🚫 failed to ExecTmplDir") + } + } + + util.LogDone(path) + + return nil + }, + } + + cwd, err := os.Getwd() + if err != nil { + cwd = "$HOME" + } + + cmd.Flags().String(dirFlag, cwd, "the directory to put the new module in") + cmd.Flags().String(langFlag, "rust", "the language of the new module") + cmd.Flags().String(namespaceFlag, "default", "the namespace for the new module") + cmd.Flags().String(repoFlag, defaultRepo, "git repo to download templates from") + cmd.Flags().String(branchFlag, defaultBranch, "git branch to download templates from") + cmd.Flags().Bool(updateTemplatesFlag, false, "update with the newest module templates") + + return cmd +} + +func writeDotModule(cwd, name, lang, namespace string) (*tenant.Module, error) { + if actual, exists := langAliases[lang]; exists { + lang = actual + } + + if valid := project.IsValidLang(lang); !valid { + return nil, fmt.Errorf("%s is not an available language", lang) + } + + module := &tenant.Module{ + Name: name, + Lang: lang, + Namespace: namespace, + APIVersion: release.FFIVersion, + } + + bytes, err := yaml.Marshal(module) + if err != nil { + return nil, errors.Wrap(err, "failed to Marshal module") + } + + path := filepath.Join(cwd, name, ".module.yaml") + + if err := ioutil.WriteFile(path, bytes, util.PermFilePrivate); err != nil { + return nil, errors.Wrap(err, "failed to WriteFile module") + } + + return module, nil +} diff --git a/e2/cli/command/create_project.go b/e2/cli/command/create_project.go new file mode 100644 index 00000000..b177765a --- /dev/null +++ b/e2/cli/command/create_project.go @@ -0,0 +1,108 @@ +package command + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/builder/template" + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +const ( + defaultRepo = "suborbital/templates" + defaultBranch = "main" +) + +type projectData struct { + Name string + Environment string + APIVersion string + RuntimeVersion string +} + +// CreateProjectCmd returns the build command. +func CreateProjectCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "project ", + Short: "Create a new project", + Long: `Create a new project for E2Core or Suborbital Extension Engine (SE2)`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + name := args[0] + + cwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + bctx, err := project.ForDirectory(cwd) + if err != nil { + return errors.Wrap(err, "🚫 failed to project.ForDirectory") + } + + util.LogStart(fmt.Sprintf("creating project %s", name)) + + path, err := util.Mkdir(bctx.Cwd, name) + if err != nil { + return errors.Wrap(err, "🚫 failed to Mkdir") + } + + branch, _ := cmd.Flags().GetString(branchFlag) + environment, _ := cmd.Flags().GetString(environmentFlag) + + templatesPath, err := template.FullPath(defaultRepo, branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to template.FullPath") + } + + if update, _ := cmd.Flags().GetBool(updateTemplatesFlag); update { + templatesPath, err = template.UpdateTemplates(defaultRepo, branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to UpdateTemplates") + } + } + + data := projectData{ + Name: name, + Environment: environment, + APIVersion: release.FFIVersion, + RuntimeVersion: release.RuntimeVersion, + } + + if err := template.ExecTmplDir(bctx.Cwd, name, templatesPath, "project", data); err != nil { + // if the templates are missing, try updating them and exec again. + if err == template.ErrTemplateMissing { + templatesPath, err = template.UpdateTemplates(defaultRepo, branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to UpdateTemplates") + } + + if err := template.ExecTmplDir(bctx.Cwd, name, templatesPath, "project", data); err != nil { + return errors.Wrap(err, "🚫 failed to ExecTmplDir") + } + } else { + return errors.Wrap(err, "🚫 failed to ExecTmplDir") + } + } + + util.LogDone(path) + + if _, err := util.Command.Run(fmt.Sprintf("git init ./%s", name)); err != nil { + return errors.Wrap(err, "🚫 failed to initialize Run git init") + } + + return nil + }, + } + + cmd.Flags().String(branchFlag, defaultBranch, "git branch to download templates from") + cmd.Flags().String(environmentFlag, "com.suborbital", "project environment name (your company's reverse domain") + cmd.Flags().Bool(updateTemplatesFlag, false, "update with the newest templates") + + return cmd +} diff --git a/e2/cli/command/create_release.go b/e2/cli/command/create_release.go new file mode 100644 index 00000000..8f6985a6 --- /dev/null +++ b/e2/cli/command/create_release.go @@ -0,0 +1,222 @@ +package command + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + "golang.org/x/mod/semver" + "gopkg.in/yaml.v2" + + "github.com/suborbital/e2core/e2/cli/util" +) + +// DotSuboFile describes a .e2 file for controlling releases. +type DotSuboFile struct { + DotVersionFiles []string `yaml:"dotVersionFiles"` + PreMakeTargets []string `yaml:"preMakeTargets"` + PostMakeTargets []string `yaml:"postMakeTargets"` +} + +// CreateReleaseCmd returns the create release command +// this is only available for development builds. +func CreateReleaseCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "release ", + Short: "Create a new release", + Long: `Tag a new version and create a new GitHub release, configured using the .e2.yml file.`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + util.LogStart("checking release conditions") + cwd, _ := cmd.Flags().GetString("dir") + + newVersion := args[0] + releaseName := args[1] + + // ensure the version entered is sane. + if err := validateVersion(newVersion); err != nil { + return errors.Wrap(err, "failed to validateVersion") + } + + // ensure the git repo is clean, no untracked or uncommitted changes. + if err := checkGitCleanliness(); err != nil { + return errors.Wrap(err, "failed to checkGitCleanliness") + } + + // ensure the current git branch is an rc branch. + branch, err := ensureCorrectGitBranch(newVersion) + if err != nil { + return errors.Wrap(err, "failed to ensureCorrectGitBranch") + } + + // ensure a .e2.yml file is present and valid. + dotSubo, err := findDotSubo(cwd) + if err != nil { + return errors.Wrap(err, "failed to findDotSubo") + } else if dotSubo == nil { + return errors.New(".e2.yml file is missing") + } + + // ensure a changelog exists for the release. + changelogFilePath := filepath.Join(cwd, "changelogs", fmt.Sprintf("%s.md", newVersion)) + + if err := checkChangelogFileExists(changelogFilePath); err != nil { + return errors.Wrap(err, "failed to checkChangelogFileExists") + } + + // ensure each of the versionFiles contains the string of the new version. + for _, f := range dotSubo.DotVersionFiles { + filePath := filepath.Join(cwd, f) + + if err := util.CheckFileForVersionString(filePath, newVersion); err != nil { + if errors.Is(err, util.ErrVersionNotPresent) { + return fmt.Errorf("required dotVersionFile %s does not contain the release version number %s", filePath, newVersion) + } + + return errors.Wrap(err, "failed to CheckFileForVersionString") + } + } + + util.LogDone("release is ready to go") + util.LogStart("running pre-make targets") + + // run all of the pre-release make targets. + for _, target := range dotSubo.PreMakeTargets { + targetWithVersion := strings.Replace(target, "{{ .Version }}", newVersion, -1) + + if _, err := util.Command.Run(fmt.Sprintf("make %s", targetWithVersion)); err != nil { + return errors.Wrapf(err, "failed to run preMakeTarget %s", target) + } + } + + util.LogDone("pre-make targets complete") + + if shouldDryRun, _ := cmd.Flags().GetBool(dryRunFlag); shouldDryRun { + util.LogDone("release conditions verified, terminating for dry run") + return nil + } + + util.LogStart("creating release") + + // ensure the local changes are pushed, create the release, and then pull down the new tag. + if _, err := util.Command.Run("git push"); err != nil { + return errors.Wrap(err, "failed to Run git push") + } + + ghCommand := fmt.Sprintf("gh release create %s --title=%s --target=%s --notes-file=%s", newVersion, releaseName, branch, changelogFilePath) + if preRelease, _ := cmd.Flags().GetBool(preReleaseFlag); preRelease { + ghCommand += " --prerelease" + } + + if _, err := util.Command.Run(ghCommand); err != nil { + return errors.Wrap(err, "failed to Run gh command") + } + + if _, err := util.Command.Run("git pull --tags"); err != nil { + return errors.Wrap(err, "failed to Run git pull command") + } + + util.LogDone("release created!") + util.LogStart("running post-make targets") + + // run all of the post-release make targets. + for _, target := range dotSubo.PostMakeTargets { + targetWithVersion := strings.Replace(target, "{{ .Version }}", newVersion, -1) + + if _, err := util.Command.Run(fmt.Sprintf("make %s", targetWithVersion)); err != nil { + return errors.Wrapf(err, "failed to run postMakeTarget %s", target) + } + } + + util.LogDone("post-make targets complete") + + return nil + }, + } + + cwd, err := os.Getwd() + if err != nil { + cwd = "$HOME" + } + + cmd.Flags().String(dirFlag, cwd, "the directory to create the release for") + cmd.Flags().Bool(preReleaseFlag, false, "pass --prelease to mark the release as such") + cmd.Flags().Bool(dryRunFlag, false, "pass --dryrun to run release condition checks and pre-make targets, but don't create the release") + + return cmd +} + +func findDotSubo(cwd string) (*DotSuboFile, error) { + dotSuboPath := filepath.Join(cwd, ".e2.yml") + + dotSuboBytes, err := ioutil.ReadFile(dotSuboPath) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } + + return nil, errors.Wrap(err, "failed to ReadFile") + } + + dotSubo := &DotSuboFile{} + if err := yaml.Unmarshal(dotSuboBytes, dotSubo); err != nil { + return nil, errors.Wrap(err, "failed to Unmarshal dotSubo file") + } + + return dotSubo, nil +} + +func checkChangelogFileExists(filePath string) error { + if _, err := os.Stat(filePath); err != nil { + return errors.Wrap(err, "failed to Stat changelog file") + } + + return nil +} + +func checkGitCleanliness() error { + if out, err := util.Command.Run("git diff-index --name-only HEAD"); err != nil { + return errors.Wrap(err, "failed to git diff-index") + } else if out != "" { + return errors.New("project has modified files") + } + + if out, err := util.Command.Run("git ls-files --exclude-standard --others"); err != nil { + return errors.Wrap(err, "failed to git ls-files") + } else if out != "" { + return errors.New("project has untracked files") + } + + return nil +} + +func ensureCorrectGitBranch(version string) (string, error) { + expectedBranch := fmt.Sprintf("rc-%s", version) + + branch, err := util.Command.Run("git branch --show-current") + if err != nil { + return "", errors.Wrap(err, "failed to Run git branch") + } + + if strings.TrimSpace(branch) != expectedBranch { + return "", errors.New("release must be created on an 'rc-*' branch, currently on " + branch + ", expected " + expectedBranch) + } + + return strings.TrimSpace(branch), nil +} + +func validateVersion(version string) error { + if !strings.HasPrefix(version, "v") { + return errors.New("version does not start with v") + } + + if !semver.IsValid(version) { + return errors.New("version is not valid semver") + } + + return nil +} diff --git a/e2/cli/command/deploy.go b/e2/cli/command/deploy.go new file mode 100644 index 00000000..73e4c1f9 --- /dev/null +++ b/e2/cli/command/deploy.go @@ -0,0 +1,70 @@ +package command + +// import ( +// "fmt" +// "os" + +// "github.com/pkg/errors" +// "github.com/spf13/cobra" + +// "github.com/suborbital/e2core/e2/cli/util" +// "github.com/suborbital/e2core/e2/deployer" +// "github.com/suborbital/e2core/e2/project" +// ) + +// var validDeployTypes = map[string]bool{ +// "kubernetes": true, +// "k8s": true, +// } + +// // DeployCmd deploys the current project. +// func DeployCmd() *cobra.Command { +// cmd := &cobra.Command{ +// Use: "deploy", +// Short: "Deploy an application", +// Long: "Deploy the current project to a remote environment (Kubernetes, etc.)", +// Args: cobra.ExactArgs(1), +// RunE: func(cmd *cobra.Command, args []string) error { +// deployType := args[0] +// if _, valid := validDeployTypes[deployType]; !valid { +// return fmt.Errorf("invalid deployment type %s", deployType) +// } + +// cwd, err := os.Getwd() +// if err != nil { +// return errors.Wrap(err, "failed to Getwd") +// } + +// ctx, err := project.ForDirectory(cwd) +// if err != nil { +// return errors.Wrap(err, "failed to project.ForDirectory") +// } + +// dplyr := deployer.New(&util.PrintLogger{}) +// var deployJob deployer.DeployJob + +// repo, _ := cmd.Flags().GetString(repoFlag) +// branch, _ := cmd.Flags().GetString(branchFlag) +// domain, _ := cmd.Flags().GetString(domainFlag) +// updateTemplates := cmd.Flags().Changed(updateTemplatesFlag) + +// switch deployType { +// case "kubernetes", "k8s": +// deployJob = deployer.NewK8sDeployJob(repo, branch, domain, updateTemplates) +// } + +// if err := dplyr.Deploy(ctx, deployJob); err != nil { +// return errors.Wrap(err, "failed to Deploy") +// } + +// return nil +// }, +// } + +// cmd.Flags().String(domainFlag, "", "domain name to configure TLS for (DNS must be configured post-deploy)") +// cmd.Flags().String(repoFlag, defaultRepo, "git repo to download templates from") +// cmd.Flags().String(branchFlag, defaultBranch, "git branch to download templates from") +// cmd.Flags().Bool(updateTemplatesFlag, false, "update with the newest runnable templates") + +// return cmd +// } diff --git a/e2/cli/command/dev.go b/e2/cli/command/dev.go new file mode 100644 index 00000000..cda62e00 --- /dev/null +++ b/e2/cli/command/dev.go @@ -0,0 +1,62 @@ +package command + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +// DevCmd returns the dev command. +func DevCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "dev", + Short: "Run a development server using Docker", + Long: `Run a development server using Docker`, + RunE: func(cmd *cobra.Command, args []string) error { + cwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + bctx, err := project.ForDirectory(cwd) + if err != nil { + return errors.Wrap(err, "failed to project.ForDirectory") + } + + if bctx.TenantConfig == nil { + return errors.New("current directory is not a project; tenant.json is missing") + } + + port, _ := cmd.Flags().GetString("port") + verbose, _ := cmd.Flags().GetBool("verbose") + + envvar := "" + + if verbose { + envvar = "-e DELTAV_LOG_LEVEL=debug" + util.LogInfo("Running DeltaV with debug logging") + } + + dockerCmd := fmt.Sprintf("docker run -v=%s:/home/atmo -e=DELTAV_HTTP_PORT=%s %s -p=%s:%s suborbital/deltav:%s deltav start", bctx.Cwd, port, envvar, port, port, release.RuntimeVersion) + + _, err = util.Command.Run(dockerCmd) + if err != nil { + return errors.Wrap(err, "🚫 failed to run dev server") + } + + return nil + }, + } + + cmd.Flags().String("port", "8080", "set the port on which to serve the project") + cmd.Flags().BoolP("verbose", "v", false, "run with debug level logging") + cmd.Flags().Lookup("verbose").NoOptDefVal = "true" + + return cmd +} diff --git a/e2/cli/command/docs.go b/e2/cli/command/docs.go new file mode 100644 index 00000000..8057a8bd --- /dev/null +++ b/e2/cli/command/docs.go @@ -0,0 +1,485 @@ +package command + +import ( + "bytes" + "fmt" + "go/ast" + "go/doc" + "go/parser" + "go/printer" + "go/token" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strings" + "text/template" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/util" +) + +const ( + supportExt = "go" + docRegexStart = `<!-- DO NOT REMOVE: START -->` + docRegexEnd = `<!-- DO NOT REMOVE: END -->` + actionTemplate = `<!-- {{ Snippet "[[.ExampleKey]]" }} -->` + docTemplate = ` +{{- .Action }} +{{ .RegexStart }} +{{range $i, $e := .Examples}} +{{ $e.Code }} +{{ end }} +{{ .RegexEnd -}}` + cleanActionRegex = `<!--(\s)*{{(.[^}>]*)[}$]}(\s)*-->` + cleanExtraNewLines = `(\n)*` + docRegexEnd + `(\n)*` + oldDocsRegex = docRegexStart + `((.|\s)[^>]*)` + docRegexEnd + actionPrefixRegex = `^<!--(\s)*{{` + actionPrefix = `<!-- {{` + actionSuffixRegex = `}}(\s)*-->$` + actionSuffix = `}} -->` +) + +type codeData struct { + Action string + Ext string + Package string + Function string + RegexStart string + RegexEnd string + Examples []*exampleData +} + +type exampleData struct { + Suffix string + Doc string + Code string + Output string +} + +// DocsBuildCmd returns the docs build command. +func DocsBuildCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "build [dir] [--output]", + Short: "Build code and documentation with inserted code snippets", + Long: `Build code and documentation with inserted code snippets`, + RunE: func(cmd *cobra.Command, args []string) error { + dir := "." + if len(args) > 0 { + dir = args[0] + } + + outputDir, err := cmd.Flags().GetString("output") + if err != nil { + return errors.Wrap(err, "failed to GetString") + } else if outputDir == "" { + outputDir = "." + } + + if err := generateDocs(dir, outputDir); err != nil { + return errors.Wrap(err, "failed to getUpdatedDocs") + } + + return nil + }, + } + cmd.Flags().String("output", "", "output directory for generated documentation") + + return cmd +} + +// DocsTestCmd returns the docs test command. +func DocsTestCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "test [dir]", + Short: "Test code and snippets inserts", + Long: `Test code and snippets inserts without generating new documentation`, + RunE: func(cmd *cobra.Command, args []string) error { + dir := "." + if len(args) > 0 { + dir = args[0] + } + + if cache, err := util.CacheDir("e2"); err != nil { + return errors.Wrap(err, "failed to CacheDir") + } else if err := generateDocs(dir, filepath.Join(cache, "docs")); err != nil { + return errors.Wrap(err, "failed to getUpdatedDocs") + } + + return nil + }, + } + + return cmd +} + +// generateDocs generates new docs with inserted example code snippets. +func generateDocs(dir string, outputDir string) error { + files, err := getMarkdownCodeData(dir) + if err != nil { + return errors.Wrap(err, "failed to getMarkdownCodeData") + } + + // Generate docs with inserted code snippets. + for fileName, fileContent := range files["md"] { + // Clean template actions. + r := regexp.MustCompile(cleanActionRegex) + fileContent = r.ReplaceAllStringFunc(fileContent, + func(src string) string { + // remove extra whitespaces from template actions. + r := regexp.MustCompile(actionSuffixRegex) + if r.MatchString(src) { + src = r.ReplaceAllLiteralString(src, actionSuffix) + } + + r = regexp.MustCompile(actionPrefixRegex) + if r.MatchString(src) { + src = r.ReplaceAllLiteralString(src, actionPrefix) + } + + return src + }, + ) + r = regexp.MustCompile(oldDocsRegex) + // Remove old documentation examples. + fileContent = r.ReplaceAllLiteralString(fileContent, "") + // Generate new documentation examples. + filePath := filepath.Join(outputDir, fileName) + fileDir := filepath.Dir(filePath) + if info, err := os.Stat(fileDir); os.IsNotExist(err) { + if err := os.MkdirAll(fileDir, os.ModePerm); err != nil { + return errors.Wrap(err, "failed to MkdirAll") + } + } else if err != nil { + return errors.Wrap(err, "failed to Stat") + } else if !info.IsDir() { + return errors.New(fmt.Sprintf("%s is not a directory", fileDir)) + } + + file, err := os.Create(filePath) + if err != nil { + return errors.Wrap(err, "failed to Create") + } + + // Main template that replaces user template actions `{ Snippet ... }` with their associated code snippets + // + // User template actions types: + // `{{ Snippet "greetings" }}` => all package examples are inserted from package `greetings` + // `{{ Snippet "greetings:doNotDoThis" }}` => only package example `doNotDoThis` is inserted from package `greeting` + // {{ Snippet "greetings/Hello" }} => all function `Hello` examples are inserted from package `greetings` + // {{ Snippet "greetings/Hello:doThis" }} => only function `Hello` example `doThis` is inserted from package `greetings` + // Nonexistent examples for packages or functions will cause `doc` cmd to fail. + tmpl, err := template.New(fileName).Funcs(template.FuncMap{ + "Snippet": func(exampleKey string) (string, error) { + // Generate user template action for reinsertion. + tmpl, err := template.New("NestedTemplateAction").Delims(`[[`, `]]`).Parse(actionTemplate) + if err != nil { + return "", err + } + + var buffer bytes.Buffer + if err := tmpl.Execute(&buffer, map[string]string{"ExampleKey": exampleKey}); err != nil { + return "", err + } + + // Check example key structure. + var pkgName, funcName string + keys := strings.Split(exampleKey, "/") + if len(keys) == 1 { + pkgName = keys[0] + } else if len(keys) == 2 { + pkgName, funcName = keys[0], keys[1] + } else { + return "", errors.New("`Snippet` expects a non-empty string key `packageName/funcName:Example` or 'packageName:Example', where 'funcName' and 'Example' are both optional") + } + + // Check if supportExt files, `go` files, where found. + examples, ok := files[supportExt] + if !ok { + return "", errors.New(fmt.Sprintf("Failed to `Snippet`, no files found with ext `%s`", supportExt)) + } + + // Check if example exists. + example, ok := examples[exampleKey] + if !ok { + if pkgName == "" { + return "", errors.New("`Snippet` expects a non-empty string package name") + } else if funcName == "" { + return "", errors.New(fmt.Sprintf("%s\nExamples for the package `%s` do not exist", buffer.String(), pkgName)) + } + + return "", errors.New(fmt.Sprintf("%s\nExamples for the function `%s` in the package `%s` do not exist", buffer.String(), funcName, pkgName)) + } + + return example, nil + }, + }).Delims(actionPrefix, actionSuffix).Parse(fileContent) + var buffer bytes.Buffer + if err != nil { + return errors.Wrap(err, "failed to Parse") + } else if err = tmpl.Execute(&buffer, ""); err != nil { + // Reset doc to its previous state. + if errWrite := ioutil.WriteFile(filePath, []byte(fileContent), os.ModePerm); errWrite != nil { + return errors.Wrap(errWrite, "failed to Write") + } + + return errors.Wrap(err, "failed to Execute") + } + + // weird behavior of text.templates, it adds new lines regardless of action delimiters + // This ReplaceAll compensates for it. + r = regexp.MustCompile(cleanExtraNewLines) + fileContent = r.ReplaceAllLiteralString(buffer.String(), fmt.Sprintf("\n%s\n", docRegexEnd)) + _, err = file.WriteString(fileContent) + if err != nil { + return errors.Wrap(err, "failed to WriteString") + } + } + + return nil +} + +// getMarkdownCodeData returns a mapping of markdown texts and go example code snippets. +func getMarkdownCodeData(dir string) (map[string]map[string]string, error) { + if _, err := os.Stat(dir); os.IsNotExist(err) { + return nil, errors.Wrap(err, fmt.Sprintf("dir %s does not exist", dir)) + } else if err != nil { + return nil, errors.Wrap(err, "failed to Stat") + } + + goSnippets, err := getGoSnippets(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to getGoSnippets") + } + + mdTextss, err := getMarkdownTexts(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to getMarkdownTexts") + } + + return map[string]map[string]string{"md": mdTextss, "go": goSnippets}, nil +} + +// getMarkdownTexts returns discovered markdown texts. +func getMarkdownTexts(dir string) (map[string]string, error) { + mdTexts := make(map[string]string) + err := filepath.Walk(dir, + func(path string, info os.FileInfo, err error) error { + if err != nil { + return errors.Wrap(err, "failed to Walk") + } else if info.IsDir() { + return nil + } else if !strings.HasSuffix(path, ".md") { + return nil + } + + util.LogInfo(fmt.Sprintf("processing doc '%s'", path)) + pwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + data, err := ioutil.ReadFile(path) + if err != nil { + return errors.Wrap(err, "failed to ReadFile") + } + + keyFile := filepath.Join(strings.Replace(filepath.Dir(path), pwd, "", 1), info.Name()) + mdTexts[keyFile] = string(data) + + return nil + }, + ) + if err != nil { + return nil, errors.Wrap(err, "failed to Walk") + } + + return mdTexts, nil +} + +// getGoSnippets returns discovered go example code snippets. +func getGoSnippets(dir string) (map[string]string, error) { + goSnippets := make(map[string]string) + err := filepath.Walk(dir, + func(path string, info os.FileInfo, err error) error { + if err != nil { + return errors.Wrap(err, "failed to Walk") + } else if !info.IsDir() { + return nil + } + + path, err = filepath.Abs(path) + if err != nil { + return errors.Wrap(err, "failed to Abs") + } + + fset := token.NewFileSet() + pkgs, err := parser.ParseDir(fset, path, nil, parser.ParseComments) + if err != nil { + return errors.Wrap(err, "failed to ParseDir") + } + + // Get and structure package and function examples from ast to text. + for pkgName, pkg := range pkgs { + files := []*ast.File{} + for _, file := range pkg.Files { + files = append(files, file) + } + + // Get special package and function asts that contain example metadata. + pkgDoc, err := doc.NewFromFiles(fset, files, filepath.Join(path, pkgName), doc.AllDecls) + if err != nil { + return errors.Wrap(err, "failed to NewFromFiles") + } + + // Process examples associated with the package. + pkgData := codeData{ + Ext: "go", + RegexStart: docRegexStart, + RegexEnd: docRegexEnd, + Package: pkgName, + } + for _, example := range pkgDoc.Examples { + exampleKey := getExampleKey(pkgName, "", example.Suffix) + text, err := pkgData.getCodeText(exampleKey, []*doc.Example{example}, fset) + if err != nil { + return errors.Wrap(err, "failed to getCodeText") + } + + goSnippets[exampleKey] = text + } + + // Add all examples option for package. + exampleKey := getExampleKey(pkgName, "", "") + text, err := pkgData.getCodeText(exampleKey, pkgDoc.Examples, fset) + if err != nil { + return errors.Wrap(err, "failed to getCodeText") + } + + goSnippets[exampleKey] = text + // Process examples associated with this function or method. + for _, funcNode := range pkgDoc.Funcs { + if len(funcNode.Examples) == 0 { + continue + } + + funcData := codeData{ + Ext: "go", + RegexStart: docRegexStart, + RegexEnd: docRegexEnd, + Package: pkgName, + Function: funcNode.Name, + } + for _, example := range funcNode.Examples { + exampleKey := getExampleKey(pkgName, example.Name, example.Suffix) + text, err := funcData.getCodeText(exampleKey, []*doc.Example{example}, fset) + if err != nil { + return errors.Wrap(err, "failed to getCodeText") + } + + goSnippets[exampleKey] = text + } + + // Add all examples option for function. + exampleKey := getExampleKey(pkgName, funcNode.Name, "") + text, err := funcData.getCodeText(exampleKey, funcNode.Examples, fset) + if err != nil { + return errors.Wrap(err, "failed to getCodeText") + } + + goSnippets[exampleKey] = text + } + } + + return nil + }, + ) + if err != nil { + return nil, errors.Wrap(err, "failed to Walk") + } + + return goSnippets, nil +} + +// getExampleKey returns keys associated to examples. +func getExampleKey(pkgName, funcName, exampleSuffix string) string { + key := pkgName + if funcName != "" { + key = fmt.Sprintf("%s/%s", pkgName, funcName) + if exampleSuffix != "" { + key = strings.Replace(key, fmt.Sprintf("_%s", exampleSuffix), fmt.Sprintf(":%s", exampleSuffix), 1) + } + } else if exampleSuffix != "" { + key = fmt.Sprintf("%s:%s", key, exampleSuffix) + } + + return key +} + +// getCodeText returns code snippets generated from example asts. +func (c *codeData) getCodeText(exampleKey string, examples []*doc.Example, fset *token.FileSet) (string, error) { + err := c.getExampleData(examples, fset) + if err != nil { + return "", errors.Wrap(err, "failed to getExampleData") + } + + // Generate user template action for reinsertion. + tmpl, err := template.New("NestedTemplateAction").Delims(`[[`, `]]`).Parse(actionTemplate) + if err != nil { + return "", errors.Wrap(err, "failed to Parse") + } + + var buffer bytes.Buffer + if err := tmpl.Execute(&buffer, map[string]string{"ExampleKey": exampleKey}); err != nil { + return "", errors.Wrap(err, "failed to Execute") + } + + c.Action = buffer.String() + // Generate code snippets based on `docTemplate` templates. + tmpl, err = template.New("CodeText").Parse(docTemplate) + if err != nil { + return "", errors.Wrap(err, "failed to Parse") + } + + buffer.Reset() + if err := tmpl.Execute(&buffer, c); err != nil { + return "", errors.Wrap(err, "failed to Execute") + } + + return buffer.String(), nil +} + +// getExampleData loads example asts to codeData. +func (c *codeData) getExampleData(examples []*doc.Example, fset *token.FileSet) error { + c.Examples = nil + for i, example := range examples { + // Get example code snippets ast. + var buffer bytes.Buffer + switch n := example.Code.(type) { + case *ast.BlockStmt: + for _, n := range n.List { + if err := printer.Fprint(&buffer, fset, n); err != nil { + return errors.Wrap(err, "failed to Fprint") + } + + fmt.Fprint(&buffer, "\n") + } + } + + // Get example code snippets metadata. + c.Examples = append(c.Examples, + &exampleData{ + Suffix: example.Suffix, + Doc: example.Doc, + Code: fmt.Sprintf("```go\n%s```", buffer.String()), + }, + ) + if example.Output != "" { + c.Examples[i].Output = fmt.Sprintf("`%s`", strings.TrimSpace(example.Output)) + } + } + + return nil +} diff --git a/e2/cli/command/flags.go b/e2/cli/command/flags.go new file mode 100644 index 00000000..ec7b95ee --- /dev/null +++ b/e2/cli/command/flags.go @@ -0,0 +1,20 @@ +package command + +const ( + langFlag = "lang" + dirFlag = "dir" + namespaceFlag = "namespace" + branchFlag = "branch" + versionFlag = "version" + repoFlag = "repo" + environmentFlag = "environment" + updateTemplatesFlag = "update-templates" + preReleaseFlag = "prerelease" + dryRunFlag = "dryrun" + resetFlag = "reset" + localFlag = "local" + proxyPortFlag = "proxy-port" + domainFlag = "domain" + // methodFlag = "method" + // typeFlag = "type". +) diff --git a/e2/cli/command/push.go b/e2/cli/command/push.go new file mode 100644 index 00000000..de2f66d3 --- /dev/null +++ b/e2/cli/command/push.go @@ -0,0 +1,64 @@ +package command + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" + "github.com/suborbital/e2core/e2/publisher" +) + +var validPublishTypes = map[string]bool{ + "bindle": true, + "docker": true, +} + +// PushCmd packages the current project into a Bindle and pushes it to a Bindle server. +func PushCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "push", + Short: "Publish a project", + Long: "Publish the current project to a remote server (Docker, Bindle, etc.)", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + publishType := args[0] + if _, valid := validPublishTypes[publishType]; !valid { + return fmt.Errorf("invalid publish type %s", publishType) + } + + cwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + ctx, err := project.ForDirectory(cwd) + if err != nil { + return errors.Wrap(err, "failed to project.ForDirectory") + } + + pshr := publisher.New(&util.PrintLogger{}) + var pubJob publisher.PublishJob + + switch publishType { + case publisher.BindlePublishJobType: + pubJob = publisher.NewBindlePublishJob() + case publisher.DockerPublishJobType: + pubJob = publisher.NewDockerPublishJob() + default: + return fmt.Errorf("invalid push destination %s", publishType) + } + + if err := pshr.Publish(ctx, pubJob); err != nil { + return errors.Wrap(err, "failed to Publish") + } + + return nil + }, + } + + return cmd +} diff --git a/e2/cli/command/scn.go b/e2/cli/command/scn.go new file mode 100644 index 00000000..cc5929f1 --- /dev/null +++ b/e2/cli/command/scn.go @@ -0,0 +1,18 @@ +package command + +import ( + "os" + + "github.com/suborbital/e2core/e2/scn" +) + +func scnAPI() *scn.API { + endpoint := scn.DefaultEndpoint + if envEndpoint, exists := os.LookupEnv(scnEndpointEnvKey); exists { + endpoint = envEndpoint + } + + api := scn.New(endpoint) + + return api +} diff --git a/e2/cli/command/se2_create_token.go b/e2/cli/command/se2_create_token.go new file mode 100644 index 00000000..d8738089 --- /dev/null +++ b/e2/cli/command/se2_create_token.go @@ -0,0 +1,64 @@ +package command + +import ( + "fmt" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/input" + "github.com/suborbital/e2core/e2/cli/util" +) + +const ( + scnEndpointEnvKey = "SUBO_SCN_ENDPOINT" +) + +// SE2CreateTokenCommand returns the dev command. +func SE2CreateTokenCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "token [email]", + Short: "Create an SE2 token", + Long: `Create a Suborbital Extension Engine (SE2) token`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + email := args[0] + + vapi, err := scnAPI().ForVerifiedEmail(email, getVerifierCode) + if err != nil { + return errors.Wrap(err, "failed to ForVerifiedEmail") + } + + token, err := vapi.CreateEnvironmentToken() + if err != nil { + return errors.Wrap(err, "failed to CreateEnvironmentToken") + } + + fmt.Println(token.Token) + + if err := util.WriteEnvironmentToken(token.Token); err != nil { + return errors.Wrap(err, "failed to WriteEnvironmentToken") + } + return nil + }, + } + + return cmd +} + +// getVerifierCode gets the 6-character code from stdin. +func getVerifierCode() (string, error) { + fmt.Print("A verification code was sent to your email address. " + + "Enter the code to continue, " + + "and your environment token will print below (keep it safe!): ") + code, err := input.ReadStdinString() + if err != nil { + return "", errors.Wrap(err, "failed to ReadStdinString") + } + + if len(code) != 6 { + return "", errors.New("code must be 6 characters in length") + } + + return code, nil +} diff --git a/e2/cli/command/se2_deploy.go b/e2/cli/command/se2_deploy.go new file mode 100644 index 00000000..dde076fe --- /dev/null +++ b/e2/cli/command/se2_deploy.go @@ -0,0 +1,366 @@ +package command + +import ( + "fmt" + "log" + "os" + "os/exec" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/builder/template" + "github.com/suborbital/e2core/e2/cli/input" + "github.com/suborbital/e2core/e2/cli/localproxy" + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/repl" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +type deployData struct { + SCCVersion string + EnvToken string + BuilderDomain string + StorageClassName string +} + +const proxyDefaultPort int = 80 + +// SE2DeployCommand returns the SE2 deploy command. +func SE2DeployCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "deploy", + Short: "Deploy SE2", + Long: `Deploy Suborbital Extension Engine (SE2) using Kubernetes or Docker Compose`, + RunE: func(cmd *cobra.Command, args []string) error { + localInstall := cmd.Flags().Changed(localFlag) + shouldReset := cmd.Flags().Changed(resetFlag) + branch, _ := cmd.Flags().GetString(branchFlag) + tag, _ := cmd.Flags().GetString(versionFlag) + + if !localInstall { + if err := introAcceptance(); err != nil { + return err + } + } + + proxyPort, _ := cmd.Flags().GetInt(proxyPortFlag) + if proxyPort < 1 || proxyPort > (2<<16)-1 { + return errors.New("🚫 proxy-port must be between 1 and 65535") + } + + cwd, err := os.Getwd() + if err != nil { + return errors.Wrap(err, "failed to Getwd") + } + + bctx, err := project.ForDirectory(cwd) + if err != nil { + return errors.Wrap(err, "🚫 failed to project.ForDirectory") + } + + // if the --reset flag was passed or there's no existing manifests + // then we need to 'build the world' from scratch. + if shouldReset || !manifestsExist(bctx) { + util.LogStart("preparing deployment") + + // if there are any existing deployment manifests sitting around, let's replace them. + if err := removeExistingManifests(bctx); err != nil { + return errors.Wrap(err, "failed to removeExistingManifests") + } + + _, err = util.Mkdir(bctx.Cwd, ".suborbital") + if err != nil { + return errors.Wrap(err, "🚫 failed to Mkdir") + } + + templatesPath, err := template.TemplatesExist(defaultRepo, branch) + if err != nil { + templatesPath, err = template.UpdateTemplates(defaultRepo, branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to UpdateTemplates") + } + } + + envToken, err := getEnvToken() + if err != nil { + return errors.Wrap(err, "🚫 failed to getEnvToken") + } + + data := deployData{ + SCCVersion: tag, + EnvToken: envToken, + } + + templateName := "scc-docker" + + if !localInstall { + data.BuilderDomain, err = getBuilderDomain() + if err != nil { + return errors.Wrap(err, "🚫 failed to getBuilderDomain") + } + + data.StorageClassName, err = getStorageClass() + if err != nil { + return errors.Wrap(err, "🚫 failed to getStorageClass") + } + + templateName = "scc-k8s" + } + + if err := template.ExecTmplDir(bctx.Cwd, "", templatesPath, templateName, data); err != nil { + return errors.Wrap(err, "🚫 failed to ExecTmplDir") + } + + util.LogDone("ready to start installation") + } + + dryRun, _ := cmd.Flags().GetBool(dryRunFlag) + + if dryRun { + util.LogInfo("aborting due to dry-run, manifest files remain in " + bctx.Cwd) + return nil + } + + util.LogStart("installing...") + + if localInstall { + var compose string + if _, err := util.Command.Run("docker compose version 2>&1 >/dev/null"); err == nil { + // Use Compose v2 if we're positive we have it + compose = "docker compose" + } else if _, err := exec.LookPath("docker-compose"); err == nil { + // Fall back to legacy compose if available. + compose = "docker-compose" + } else { + // YOLO. Try Compose V2 anyway. Works with containerd/nerdctl. + // See: https://github.com/containerd/nerdctl/issues/1368 + compose = "docker compose" + } + + command := fmt.Sprintf("%s up -d", compose) + + if _, err := util.Command.Run(command); err != nil { + util.LogInfo("Is Docker Compose installed? https://docs.docker.com/compose/install/") + return errors.Wrapf(err, "🚫 failed to run `%s`", command) + } + + util.LogInfo(fmt.Sprintf("use `docker ps` and `%s logs` to check deployment status", compose)) + + proxyPortStr := strconv.Itoa(proxyPort) + proxy := localproxy.New("editor.suborbital.network", proxyPortStr) + + go func() { + if err := proxy.Start(); err != nil { + log.Fatal(err) + } + }() + + // this is to give the proxy server some room to bind to the port and start up + // it's not ideal, but the least gross way to ensure a good experience. + time.Sleep(time.Second * 1) + + repl := repl.New(proxyPortStr) + repl.Run() + + } else { + if _, err := util.Command.Run("kubectl apply -f https://github.com/kedacore/keda/releases/download/v2.4.0/keda-2.4.0.yaml"); err != nil { + return errors.Wrap(err, "🚫 failed to install KEDA") + } + + // we don't care if this fails, so don't check error. + util.Command.Run("kubectl create ns suborbital") + + if err := createConfigMap(cwd); err != nil { + return errors.Wrap(err, "failed to createConfigMap") + } + + if _, err := util.Command.Run("kubectl apply -f .suborbital/"); err != nil { + return errors.Wrap(err, "🚫 failed to kubectl apply") + } + + util.LogInfo("use `kubectl get pods -n suborbital` and `kubectl get svc -n suborbital` to check deployment status") + } + + util.LogDone("installation complete!") + + return nil + }, + } + + cmd.Flags().String(branchFlag, defaultBranch, "git branch to download templates from") + cmd.Flags().String(versionFlag, release.SCCTag, "Docker tag to use for control plane images") + cmd.Flags().Int(proxyPortFlag, proxyDefaultPort, "port that the Editor proxy listens on") + cmd.Flags().Bool(localFlag, false, "deploy locally using Docker Compose") + cmd.Flags().Bool(dryRunFlag, false, "prepare the deployment in the .suborbital directory, but do not apply it") + cmd.Flags().Bool(resetFlag, false, "reset the deployment to default (replaces docker-compose.yaml and/or Kubernetes manifests)") + + return cmd +} + +func introAcceptance() error { + fmt.Print(` +Suborbital Extension Engine (SE2) Installer + +BEFORE YOU CONTINUE: + - You must first run "e2 se2 create token <email>" to get an environment token + + - You must have kubectl installed in PATH, and it must be connected to the cluster you'd like to use + + - You must be able to set up DNS records for the builder service after this installation completes + - Choose the DNS name you'd like to use before continuing, e.g. builder.acmeco.com + + - Subo will attempt to determine the default storage class for your Kubernetes cluster, + but if is unable to do so you will need to provide one + - See the SE2 documentation for more details + + - Subo will install the KEDA autoscaler into your cluster. It will not affect any existing deployments. + +Are you ready to continue? (y/N): `) + + answer, err := input.ReadStdinString() + if err != nil { + return errors.Wrap(err, "failed to ReadStdinString") + } + + if !strings.EqualFold(answer, "y") { + return errors.New("aborting") + } + + return nil +} + +// getEnvToken gets the environment token from stdin. +func getEnvToken() (string, error) { + existing, err := util.ReadEnvironmentToken() + if err == nil { + util.LogInfo("using cached environment token") + return existing, nil + } + + fmt.Print("Enter your environment token: ") + token, err := input.ReadStdinString() + + if err != nil { + return "", errors.Wrap(err, "failed to ReadStdinString") + } + + if len(token) != 32 { + return "", errors.New("token must be 32 characters in length") + } + + if err := util.WriteEnvironmentToken(token); err != nil { + util.LogWarn(err.Error()) + return token, nil + + } else { + util.LogInfo("saved environment token to cache") + } + + return token, nil +} + +// getBuilderDomain gets the environment token from stdin. +func getBuilderDomain() (string, error) { + fmt.Print("Enter the domain name that will be used for the builder service: ") + domain, err := input.ReadStdinString() + if err != nil { + return "", errors.Wrap(err, "failed to ReadStdinString") + } + + if len(domain) == 0 { + return "", errors.New("domain must not be empty") + } + + return domain, nil +} + +// getStorageClass gets the storage class to use. +func getStorageClass() (string, error) { + defaultClass, err := detectStorageClass() + if err != nil { + // that's fine, continue. + fmt.Println("failed to automatically detect Kubernetes storage class:", err.Error()) + } else if defaultClass != "" { + fmt.Println("using default storage class: ", defaultClass) + return defaultClass, nil + } + + fmt.Print("Enter the Kubernetes storage class to use: ") + storageClass, err := input.ReadStdinString() + if err != nil { + return "", errors.Wrap(err, "failed to ReadStdinString") + } + + if len(storageClass) == 0 { + return "", errors.New("storage class must not be empty") + } + + return storageClass, nil +} + +func detectStorageClass() (string, error) { + output, err := util.Command.Run("kubectl get storageclass --output=name") + if err != nil { + return "", errors.Wrap(err, "failed to get default storageclass") + } + + // output will look like: storageclass.storage.k8s.io/do-block-storage + // so split on the / and return the last part. + + outputParts := strings.Split(output, "/") + if len(outputParts) != 2 { + return "", errors.New("could not automatically determine storage class") + } + + return outputParts[1], nil +} + +func createConfigMap(cwd string) error { + configFilepath := filepath.Join(cwd, "config", "scc-config.yaml") + + _, err := os.Stat(configFilepath) + if err != nil { + return errors.Wrap(err, "failed to Stat scc-config.yaml") + } + + if _, err := util.Command.Run(fmt.Sprintf("kubectl create configmap scc-config --from-file=scc-config.yaml=%s -n suborbital", configFilepath)); err != nil { + return errors.Wrap(err, "failed to create configmap (you may need to run `kubectl delete configmap scc-config -n suborbital`)") + } + + return nil +} + +func manifestsExist(bctx *project.Context) bool { + if _, err := os.Stat(filepath.Join(bctx.Cwd, ".suborbital")); err == nil { + return true + } + + if _, err := os.Stat(filepath.Join(bctx.Cwd, "docker-compose.yml")); err == nil { + return true + } + + return false +} + +func removeExistingManifests(bctx *project.Context) error { + // start with a clean slate. + if _, err := os.Stat(filepath.Join(bctx.Cwd, ".suborbital")); err == nil { + if err := os.RemoveAll(filepath.Join(bctx.Cwd, ".suborbital")); err != nil { + return errors.Wrap(err, "failed to RemoveAll .suborbital") + } + } + + if _, err := os.Stat(filepath.Join(bctx.Cwd, "docker-compose.yml")); err == nil { + if err := os.Remove(filepath.Join(bctx.Cwd, "docker-compose.yml")); err != nil { + return errors.Wrap(err, "failed to Remove docker-compose.yml") + } + } + + return nil +} diff --git a/e2/cli/features/development.go b/e2/cli/features/development.go new file mode 100644 index 00000000..9da1e84f --- /dev/null +++ b/e2/cli/features/development.go @@ -0,0 +1,10 @@ +//go:build development +// +build development + +package features + +// EnableReleaseCommands and others are feature flags +const ( + EnableReleaseCommands = true + EnableRegistryCommands = true +) diff --git a/e2/cli/features/public.go b/e2/cli/features/public.go new file mode 100644 index 00000000..6030a6a2 --- /dev/null +++ b/e2/cli/features/public.go @@ -0,0 +1,10 @@ +//go:build !development +// +build !development + +package features + +// EnableReleaseCommands and others are feature flags. +const ( + EnableReleaseCommands = false + EnableRegistryCommands = false +) diff --git a/e2/cli/input/input.go b/e2/cli/input/input.go new file mode 100644 index 00000000..924d587c --- /dev/null +++ b/e2/cli/input/input.go @@ -0,0 +1,20 @@ +package input + +import ( + "bufio" + "os" + + "github.com/pkg/errors" +) + +// ReadStdinString reads a string from stdin. +func ReadStdinString() (string, error) { + scanner := bufio.NewScanner(os.Stdin) + scanner.Scan() + + if err := scanner.Err(); err != nil { + return "", errors.Wrap(err, "failed to scanner.Scan") + } + + return scanner.Text(), nil +} diff --git a/e2/cli/localproxy/proxy.go b/e2/cli/localproxy/proxy.go new file mode 100644 index 00000000..21554795 --- /dev/null +++ b/e2/cli/localproxy/proxy.go @@ -0,0 +1,71 @@ +package localproxy + +import ( + "fmt" + "io" + "net/http" +) + +// Proxy is a proxy from the local machine to the cloud-hosted editor. +type Proxy struct { + endpoint string + server http.Server + client *http.Client +} + +// New creates a new local proxy. +func New(endpoint string, listenPort string) *Proxy { + p := &Proxy{ + endpoint: endpoint, + client: &http.Client{}, + } + + server := http.Server{ + Addr: ":" + listenPort, + Handler: p, + } + + p.server = server + + return p +} + +// Start starts the local proxy server. +func (p *Proxy) Start() error { + fmt.Println("\nPROXY: local tunnel to function editor starting") + + return p.server.ListenAndServe() +} + +func (p *Proxy) ServeHTTP(w http.ResponseWriter, r *http.Request) { + proxiedReq := *r + proxiedReq.RequestURI = "" + proxiedReq.Host = p.endpoint + proxiedReq.URL.Host = p.endpoint + proxiedReq.URL.Scheme = "https" + + resp, err := p.client.Do(&proxiedReq) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + w.Write([]byte(err.Error())) + return + } + + header := w.Header() + for k, vals := range resp.Header { + for _, v := range vals { + header.Add(k, v) + } + } + + w.WriteHeader(resp.StatusCode) + w.Write(body) +} diff --git a/e2/cli/release/check.go b/e2/cli/release/check.go new file mode 100644 index 00000000..24aa5aac --- /dev/null +++ b/e2/cli/release/check.go @@ -0,0 +1,166 @@ +package release + +import ( + "bytes" + "context" + "encoding/gob" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "time" + + "github.com/google/go-github/v41/github" + "github.com/hashicorp/go-version" + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" +) + +const lastCheckedFilename = "subo_last_checked" +const latestReleaseFilename = "subo_latest_release" + +func getTimestampCache() (time.Time, error) { + cachePath, err := util.CacheDir() + if err != nil { + return time.Time{}, errors.Wrap(err, "failed to CacheDir") + } + + cachedTimestamp := time.Time{} + filePath := filepath.Join(cachePath, lastCheckedFilename) + if _, err = os.Stat(filePath); os.IsNotExist(err) { + } else if err != nil { + return time.Time{}, errors.Wrap(err, "failed to Stat") + } else { + data, err := ioutil.ReadFile(filePath) + if err != nil { + return time.Time{}, errors.Wrap(err, "failed to ReadFile") + } + + cachedTimestamp, err = time.Parse(time.RFC3339, string(data)) + if err != nil { + errRemove := os.Remove(filePath) + if errRemove != nil { + return time.Time{}, errors.Wrap(err, "failed to Remove bad cached timestamp") + } + return time.Time{}, errors.Wrap(err, "failed to parse cached timestamp") + } + } + return cachedTimestamp, nil +} + +func cacheTimestamp(timestamp time.Time) error { + cachePath, err := util.CacheDir() + if err != nil { + return errors.Wrap(err, "failed to CacheDir") + } + + filePath := filepath.Join(cachePath, lastCheckedFilename) + data := []byte(timestamp.Format(time.RFC3339)) + if err := ioutil.WriteFile(filePath, data, util.PermFile); err != nil { + return errors.Wrap(err, "failed to WriteFile") + } + + return nil +} + +func getLatestReleaseCache() (*github.RepositoryRelease, error) { + if cachedTimestamp, err := getTimestampCache(); err != nil { + return nil, errors.Wrap(err, "failed to getTimestampCache") + } else if currentTimestamp := time.Now().UTC(); cachedTimestamp.IsZero() || currentTimestamp.After(cachedTimestamp.Add(time.Hour)) { + // check if 1 hour has passed since the last version check, and update the cached timestamp and latest release if so. + if err := cacheTimestamp(currentTimestamp); err != nil { + return nil, errors.Wrap(err, "failed to cacheTimestamp") + } + + return nil, nil + } + + cachePath, err := util.CacheDir() + if err != nil { + return nil, errors.Wrap(err, "failed to CacheDir") + } + + var latestRepoRelease *github.RepositoryRelease + filePath := filepath.Join(cachePath, latestReleaseFilename) + if _, err = os.Stat(filePath); os.IsNotExist(err) { + return nil, nil + } else if err != nil { + return nil, errors.Wrap(err, "faild to Stat") + } else { + data, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, errors.Wrap(err, "failed to ReadFile") + } + + var buffer bytes.Buffer + buffer.Write(data) + decoder := gob.NewDecoder(&buffer) + err = decoder.Decode(&latestRepoRelease) + if err != nil { + errRemove := os.Remove(filePath) + if errRemove != nil { + return nil, errors.Wrap(err, "failed to Remove bad cached RepositoryRelease") + } + return nil, errors.Wrap(err, "failed to Decode cached RepositoryRelease") + } + } + + return latestRepoRelease, nil +} + +func cacheLatestRelease(latestRepoRelease *github.RepositoryRelease) error { + cachePath, err := util.CacheDir() + if err != nil { + return errors.Wrap(err, "failed to CacheDir") + } + + var buffer bytes.Buffer + encoder := gob.NewEncoder(&buffer) + if err = encoder.Encode(latestRepoRelease); err != nil { + return errors.Wrap(err, "failed to Encode RepositoryRelease") + } else if err := ioutil.WriteFile(filepath.Join(cachePath, latestReleaseFilename), buffer.Bytes(), util.PermFile); err != nil { + return errors.Wrap(err, "failed to WriteFile") + } + + return nil +} + +func getLatestVersion(ctx context.Context) (*version.Version, error) { + latestRepoRelease, err := getLatestReleaseCache() + if err != nil { + return nil, errors.Wrap(err, "failed to getTimestampCache") + } else if latestRepoRelease == nil { + latestRepoRelease, _, err = github.NewClient(nil).Repositories.GetLatestRelease(ctx, "suborbital", "e2core") + if err != nil { + return nil, errors.Wrap(err, "failed to fetch latest e2 release") + } else if err = cacheLatestRelease(latestRepoRelease); err != nil { + return nil, errors.Wrap(err, "failed to cacheLatestRelease") + } + } + + latestVersion, err := version.NewVersion(*latestRepoRelease.TagName) + if err != nil { + return nil, errors.Wrap(err, "failed to parse latest e2 version") + } + + return latestVersion, nil +} + +// CheckForLatestVersion returns an error if E2CLIDotVersion does not match the latest GitHub release or if the check fails. +func CheckForLatestVersion(ctx context.Context) (string, error) { + if latestCmdVersion, err := getLatestVersion(ctx); err != nil { + return "", errors.Wrap(err, "failed to getLatestVersion") + } else if cmdVersion, err := version.NewVersion(E2CLIDotVersion); err != nil { + return "", errors.Wrap(err, "failed to parse current e2 version") + } else if cmdVersion.LessThan(latestCmdVersion) { + return fmt.Sprintf("An upgrade for Subo is available: %s → %s. "+ + "The method for upgrading depends on the method used for"+ + " installation (see https://github."+ + "com/suborbital/e2core for details). As always, "+ + "feel free to ping us on Discord if you run into any snags! https://chat.suborbital.dev/", + cmdVersion, latestCmdVersion), nil + } + + return "", nil +} diff --git a/e2/cli/release/info.go b/e2/cli/release/info.go new file mode 100644 index 00000000..81c8edc8 --- /dev/null +++ b/e2/cli/release/info.go @@ -0,0 +1,14 @@ +package release + +import "fmt" + +// These variables are set at buildtime. See the Makefile. +var CommitHash = "" +var BuildTime = "" + +func Version() string { + if CommitHash != "" && BuildTime != "" { + return fmt.Sprintf(`%s %s (Built at %s)`, E2CLIDotVersion, CommitHash, BuildTime) + } + return E2CLIDotVersion +} diff --git a/e2/cli/release/release.mk b/e2/cli/release/release.mk new file mode 100644 index 00000000..b3de73eb --- /dev/null +++ b/e2/cli/release/release.mk @@ -0,0 +1,5 @@ +now = $(shell date +'%Y-%m-%dT%TZ') +commit = $(shell if [ ! -d .git ]; then echo "unknown"; else git rev-parse --short HEAD; fi) +var_path = github.com/suborbital/subo/subo/release +RELEASE_FLAGS = "-X $(var_path).CommitHash=$(commit)\ + -X $(var_path).BuildTime=$(now)" \ No newline at end of file diff --git a/e2/cli/release/version.go b/e2/cli/release/version.go new file mode 100644 index 00000000..3ca7a334 --- /dev/null +++ b/e2/cli/release/version.go @@ -0,0 +1,14 @@ +package release + +// E2CLIDotVersion represents the dot version for e2 +// it is also the image tag used for builders. +var E2CLIDotVersion = "0.5.4" + +// FFIVersion is the FFI version used by this version of e2. +var FFIVersion = "0.15.1" + +// RuntimeVersion is the default version of E2Core that will be used for new projects. +var RuntimeVersion = "0.4.7" + +// SCCTag is the docker tag used for creating new SE2 core deployments. +var SCCTag = "v0.3.1" diff --git a/e2/cli/repl/repl.go b/e2/cli/repl/repl.go new file mode 100644 index 00000000..84db63fc --- /dev/null +++ b/e2/cli/repl/repl.go @@ -0,0 +1,98 @@ +package repl + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/input" + "github.com/suborbital/systemspec/fqmn" +) + +// Repl is a 'local proxy repl' that allows the user to perform simple actions against their local install of SE2. +type Repl struct { + proxyPort string +} + +type tokenResp struct { + Token string `json:"token"` +} + +// New creates a new "local proxy repl". +func New(proxyPort string) *Repl { + return &Repl{proxyPort: proxyPort} +} + +func (r *Repl) Run() error { + fmt.Print("\n\nPress enter to launch the local SE2 REPL...") + + if _, err := input.ReadStdinString(); err != nil { + return errors.Wrap(err, "failed to ReadStdinString") + } + + for { + fmt.Println("\n\n1. Create/edit a function") + fmt.Print("\nChoose an option: ") + + opt, err := input.ReadStdinString() + if err != nil { + return errors.Wrap(err, "failed to ReadStdinString") + } + + var replErr error + + switch opt { + case "1": + replErr = r.editFunction() + default: + fmt.Println("invalid, choose again.") + } + + if replErr != nil { + return errors.Wrap(err, "error produced by option "+opt) + } + } +} + +func (r *Repl) editFunction() error { + fmt.Print("\n\nTo create or edit a function, enter its name (or FQMN): ") + name, err := input.ReadStdinString() + if err != nil { + return errors.Wrap(err, "failed to ReadStdinString") + } + + ident := "com.suborbital.acmeco" + namespace := "default" + + if FQMN, err := fqmn.Parse(name); err == nil { + ident = FQMN.Tenant + namespace = FQMN.Namespace + name = FQMN.Name + } + + req, _ := http.NewRequest(http.MethodGet, fmt.Sprintf("http://local.suborbital.network:8081/api/v1/token/%s/%s/%s", ident, namespace, name), nil) + resp, err := http.DefaultClient.Do(req) + if err != nil { + return errors.Wrap(err, "failed to Do request") + } + + body, _ := io.ReadAll(resp.Body) + defer resp.Body.Close() + + token := tokenResp{} + json.Unmarshal(body, &token) + + editorHost := "local.suborbital.network" + if r.proxyPort != "80" { + editorHost += ":" + r.proxyPort + } + + editorURL := fmt.Sprintf("http://%s/?builder=http://local.suborbital.network:8082&token=%s&ident=%s&namespace=%s&fn=%s", editorHost, token.Token, ident, namespace, name) + + fmt.Println("\n✅ visit", editorURL, "to access the editor") + + return nil +} diff --git a/e2/cli/util/cache.go b/e2/cli/util/cache.go new file mode 100644 index 00000000..0ef95384 --- /dev/null +++ b/e2/cli/util/cache.go @@ -0,0 +1,38 @@ +package util + +import ( + "os" + "path/filepath" + + "github.com/pkg/errors" +) + +const CacheBaseDir = "suborbital" + +// CacheDir returns the cache directory and creates it if it doesn't exist. If +// no subdirectories are passed it defaults to `suborbital/subo`. +func CacheDir(subdirectories ...string) (string, error) { + tmpPath := os.TempDir() + basePath, err := os.UserCacheDir() + + if err != nil { + // fallback if $HOME is not set. + basePath = tmpPath + } + + base := []string{basePath, CacheBaseDir} + + if len(subdirectories) == 0 { + base = append(base, "e2") + } + + targetPath := filepath.Join(append(base, subdirectories...)...) + + if _, err := os.Stat(targetPath); os.IsNotExist(err) { + if err := os.MkdirAll(targetPath, PermDirectory); err != nil { + return "", errors.Wrap(err, "failed to MkdirAll") + } + } + + return targetPath, nil +} diff --git a/e2/cli/util/exec.go b/e2/cli/util/exec.go new file mode 100644 index 00000000..c8a9d661 --- /dev/null +++ b/e2/cli/util/exec.go @@ -0,0 +1,80 @@ +package util + +import ( + "bytes" + "io" + "os" + "os/exec" + + "github.com/pkg/errors" +) + +type CommandRunner interface { + Run(cmd string) (string, error) + RunInDir(cmd, dir string) (string, error) +} + +type silentOutput bool + +const ( + SilentOutput = true + NormalOutput = false +) + +type CommandLineExecutor struct { + silent silentOutput + writer io.Writer +} + +// Command is a barebones command executor. +var Command = &CommandLineExecutor{} + +// NewCommandLineExecutor creates a new CommandLineExecutor with the given configuration. +func NewCommandLineExecutor(silent silentOutput, writer io.Writer) *CommandLineExecutor { + return &CommandLineExecutor{ + silent: silent, + writer: writer, + } +} + +// Run runs a command, outputting to terminal and returning the full output and/or error. +func (d *CommandLineExecutor) Run(cmd string) (string, error) { + return run(cmd, "", d.silent, d.writer) +} + +// RunInDir runs a command in the specified directory and returns the full output or error. +func (d *CommandLineExecutor) RunInDir(cmd, dir string) (string, error) { + return run(cmd, dir, d.silent, d.writer) +} + +func run(cmd, dir string, silent silentOutput, writer io.Writer) (string, error) { + // you can uncomment this below if you want to see exactly the commands being run + // fmt.Println("▶️", cmd). + + command := exec.Command("sh", "-c", cmd) + + command.Dir = dir + + var outBuf bytes.Buffer + + if silent { + command.Stdout = &outBuf + command.Stderr = &outBuf + } else if writer != nil { + command.Stdout = io.MultiWriter(os.Stdout, &outBuf, writer) + command.Stderr = io.MultiWriter(os.Stderr, &outBuf, writer) + } else { + command.Stdout = io.MultiWriter(os.Stdout, &outBuf) + command.Stderr = io.MultiWriter(os.Stderr, &outBuf) + } + + runErr := command.Run() + + outStr := outBuf.String() + + if runErr != nil { + return outStr, errors.Wrap(runErr, "failed to Run command") + } + + return outStr, nil +} diff --git a/e2/cli/util/exec_test.go b/e2/cli/util/exec_test.go new file mode 100644 index 00000000..76cd422c --- /dev/null +++ b/e2/cli/util/exec_test.go @@ -0,0 +1,73 @@ +package util + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCommandRunner_Run(t *testing.T) { + tests := []struct { + name string + cmd string + runner func() (CommandRunner, *bytes.Buffer) + want string + wantErr assert.ErrorAssertionFunc + wantBuf []byte + }{ + + { + name: "writes to a supplied io.Writer", + cmd: "echo 'the mitochondria is the powerhouse of the cell'", + runner: func() (CommandRunner, *bytes.Buffer) { + buf := new(bytes.Buffer) + return NewCommandLineExecutor(NormalOutput, buf), buf + }, + want: "the mitochondria is the powerhouse of the cell\n", + wantErr: assert.NoError, + wantBuf: []byte("the mitochondria is the powerhouse of the cell\n"), + }, + { + name: "writes nothing to a supplied io.Writer when silent", + cmd: "echo 'the mitochondria is the powerhouse of the cell'", + runner: func() (CommandRunner, *bytes.Buffer) { + buf := new(bytes.Buffer) + return NewCommandLineExecutor(SilentOutput, buf), buf + }, + want: "the mitochondria is the powerhouse of the cell\n", + wantErr: assert.NoError, + wantBuf: nil, + }, + { + name: "accepts a nil io.Writer", + cmd: "echo 'the mitochondria is the powerhouse of the cell'", + runner: func() (CommandRunner, *bytes.Buffer) { + return NewCommandLineExecutor(SilentOutput, nil), new(bytes.Buffer) + }, + want: "the mitochondria is the powerhouse of the cell\n", + wantErr: assert.NoError, + wantBuf: nil, + }, + { + name: "performs with the default executor", + cmd: "echo 'the mitochondria is the powerhouse of the cell'", + runner: func() (CommandRunner, *bytes.Buffer) { + return Command, new(bytes.Buffer) + }, + want: "the mitochondria is the powerhouse of the cell\n", + wantErr: assert.NoError, + wantBuf: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + runner, buf := tt.runner() + got, err := runner.Run(tt.cmd) + + tt.wantErr(t, err) + assert.Equal(t, tt.want, got) + assert.Equal(t, tt.wantBuf, buf.Bytes()) + }) + } +} diff --git a/e2/cli/util/log.go b/e2/cli/util/log.go new file mode 100644 index 00000000..274249da --- /dev/null +++ b/e2/cli/util/log.go @@ -0,0 +1,53 @@ +package util + +import ( + "fmt" +) + +// FriendlyLogger describes a logger designed to provide friendly output for interactive CLI purposes. +type FriendlyLogger interface { + LogInfo(string) + LogStart(string) + LogDone(string) + LogFail(string) + LogWarn(string) +} + +// PrintLogger is a struct wrapper around the logging functions used by Subo. +type PrintLogger struct{} + +func (p *PrintLogger) LogInfo(msg string) { LogInfo(msg) } +func (p *PrintLogger) LogStart(msg string) { LogStart(msg) } +func (p *PrintLogger) LogDone(msg string) { LogDone(msg) } +func (p *PrintLogger) LogFail(msg string) { LogFail(msg) } +func (p *PrintLogger) LogWarn(msg string) { LogWarn(msg) } + +// Keeping it DRY. +func log(msg string) { + fmt.Println(msg) +} + +// LogInfo logs information. +func LogInfo(msg string) { + log(fmt.Sprintf("ℹ️ %s", msg)) +} + +// LogStart logs the start of something. +func LogStart(msg string) { + log(fmt.Sprintf("⏩ START: %s", msg)) +} + +// LogDone logs the success of something. +func LogDone(msg string) { + log(fmt.Sprintf("✅ DONE: %s", msg)) +} + +// LogFail logs the failure of something. +func LogFail(msg string) { + log(fmt.Sprintf("🚫 FAILED: %s", msg)) +} + +// LogWarn logs a warning from something. +func LogWarn(msg string) { + log(fmt.Sprintf("⚠️ WARNING: %s", msg)) +} diff --git a/e2/cli/util/mkdir.go b/e2/cli/util/mkdir.go new file mode 100644 index 00000000..fc3e7e7a --- /dev/null +++ b/e2/cli/util/mkdir.go @@ -0,0 +1,19 @@ +package util + +import ( + "os" + "path/filepath" + + "github.com/pkg/errors" +) + +// Mkdir creates a new directory to contain a runnable. +func Mkdir(cwd, name string) (string, error) { + path := filepath.Join(cwd, name) + + if err := os.Mkdir(path, PermDirectory); err != nil { + return "", errors.Wrap(err, "failed to Mkdir") + } + + return path, nil +} diff --git a/e2/cli/util/permissions.go b/e2/cli/util/permissions.go new file mode 100644 index 00000000..01432123 --- /dev/null +++ b/e2/cli/util/permissions.go @@ -0,0 +1,16 @@ +package util + +import ( + "io/fs" +) + +// These constants are meant to be used as reasonable default values for files and directories created by Subo. +// nolint:godot +const ( + PermDirectory fs.FileMode = 0755 // rwxr-xr-x + PermDirectoryPrivate fs.FileMode = 0700 // rwx------ + PermExecutable fs.FileMode = 0755 // rwxr-xr-x + PermExecutablePrivate fs.FileMode = 0700 // rwx------ + PermFile fs.FileMode = 0644 // rw-r--r-- + PermFilePrivate fs.FileMode = 0600 // rw------- +) diff --git a/e2/cli/util/token.go b/e2/cli/util/token.go new file mode 100644 index 00000000..b8f30d2a --- /dev/null +++ b/e2/cli/util/token.go @@ -0,0 +1,45 @@ +package util + +import ( + "io/ioutil" + "path/filepath" + + "github.com/pkg/errors" +) + +func getTokenPath() (string, error) { + tokenPath, err := CacheDir("compute") + if err != nil { + return "", errors.Wrap(err, `failed to CacheDir("compute")`) + } + + return filepath.Join(tokenPath, "envtoken"), nil +} + +func WriteEnvironmentToken(tokenStr string) error { + tokenPath, err := getTokenPath() + if err != nil { + return errors.Wrap(err, "failed to getTokenPath") + } + + if err := ioutil.WriteFile(tokenPath, []byte(tokenStr), PermFilePrivate); err != nil { + return errors.Wrapf(err, "failed to write %s", tokenPath) + } + + return nil +} + +func ReadEnvironmentToken() (string, error) { + tokenPath, err := getTokenPath() + if err != nil { + return "", errors.Wrap(err, "failed to getTokenPath") + } + + buf, err := ioutil.ReadFile(tokenPath) + + if err != nil { + return "", errors.Wrapf(err, "failed to read %s", tokenPath) + } + + return string(buf), nil +} diff --git a/e2/cli/util/version_check.go b/e2/cli/util/version_check.go new file mode 100644 index 00000000..4107a9b1 --- /dev/null +++ b/e2/cli/util/version_check.go @@ -0,0 +1,30 @@ +package util + +import ( + "io/ioutil" + "strings" + + "github.com/pkg/errors" +) + +// ErrVersionNotPresent are errors related to checking for version numbers. +var ErrVersionNotPresent = errors.New("expected version number is not present") + +// CheckFileForVersionString returns an error if the requested file does not contain the provided versionString. +func CheckFileForVersionString(filePath string, versionString string) error { + file, err := ioutil.ReadFile(filePath) + if err != nil { + return errors.Wrap(err, "failed to ReadFile") + } + + if !strings.Contains(string(file), versionString) { + // also check if it exists without the 'v' prefix. + noV := strings.TrimPrefix(versionString, "v") + + if !strings.Contains(string(file), noV) { + return ErrVersionNotPresent + } + } + + return nil +} diff --git a/e2/deployer/deployer.go b/e2/deployer/deployer.go new file mode 100644 index 00000000..f0a55a3e --- /dev/null +++ b/e2/deployer/deployer.go @@ -0,0 +1,35 @@ +package deployer + +import ( + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +type Deployer struct { + log util.FriendlyLogger +} + +type DeployJob interface { + Type() string + Deploy(logger util.FriendlyLogger, pctx *project.Context) error +} + +// New creates a new Deployer. +func New(log util.FriendlyLogger) *Deployer { + d := &Deployer{ + log: log, + } + + return d +} + +// Deploy executes a DeployJob. +func (d *Deployer) Deploy(ctx *project.Context, job DeployJob) error { + if err := job.Deploy(d.log, ctx); err != nil { + return errors.Wrapf(err, "deploy job %s failed", job.Type()) + } + + return nil +} diff --git a/e2/deployer/k8sdeployer.go b/e2/deployer/k8sdeployer.go new file mode 100644 index 00000000..76be8eb0 --- /dev/null +++ b/e2/deployer/k8sdeployer.go @@ -0,0 +1,113 @@ +package deployer + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/builder/template" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +const ( + k8sDeployJobType = "kubernetes" +) + +// K8sDeployJob represents a deployment job. +type K8sDeployJob struct { + repo string + branch string + domain string + updateTemplates bool +} + +type deploymentData struct { + Identifier string + Version int64 + ImageName string + Domain string +} + +// NewK8sDeployJob creates a new deploy job. +func NewK8sDeployJob(repo, branch, domain string, updateTemplates bool) DeployJob { + k := &K8sDeployJob{ + repo: repo, + branch: branch, + domain: domain, + updateTemplates: updateTemplates, + } + + return k +} + +// Typw returns the deploy job typw. +func (k *K8sDeployJob) Type() string { + return k8sDeployJobType +} + +// Deploy executes the deployment. +func (k *K8sDeployJob) Deploy(log util.FriendlyLogger, ctx *project.Context) error { + imageName, err := project.DockerNameFromConfig(ctx.TenantConfig) + if err != nil { + return errors.Wrap(err, "failed to DockerNameFromDirective") + } + + data := deploymentData{ + Identifier: strings.Replace(ctx.TenantConfig.Identifier, ".", "-", -1), + Version: ctx.TenantConfig.TenantVersion, + ImageName: imageName, + Domain: k.domain, + } + + if err := os.RemoveAll(filepath.Join(ctx.Cwd, ".deployment")); err != nil { + if !os.IsNotExist(err) { + return errors.Wrap(err, "failed to RemoveAll deployment files") + } + } + + if err := os.MkdirAll(filepath.Join(ctx.Cwd, ".deployment"), util.PermDirectory); err != nil { + return errors.Wrap(err, "failed to MkdirAll .deployment") + } + + templatesPath, err := template.FullPath(k.repo, k.branch) + if err != nil { + return errors.Wrap(err, "failed to template.FullPath") + } + + if k.updateTemplates { + templatesPath, err = template.UpdateTemplates(k.repo, k.branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to UpdateTemplates") + } + } + + if err := template.ExecTmplDir(ctx.Cwd, ".deployment", templatesPath, "k8s", data); err != nil { + // if the templates are missing, try updating them and exec again. + if err == template.ErrTemplateMissing { + templatesPath, err = template.UpdateTemplates(k.repo, k.branch) + if err != nil { + return errors.Wrap(err, "🚫 failed to UpdateTemplates") + } + + if err := template.ExecTmplDir(ctx.Cwd, ".deployment", templatesPath, "k8s", data); err != nil { + return errors.Wrap(err, "🚫 failed to ExecTmplDir") + } + } else { + return errors.Wrap(err, "🚫 failed to ExecTmplDir") + } + } + + if out, err := util.Command.Run("kubectl create ns suborbital"); err != nil { + log.LogWarn(fmt.Sprintf("failed to create `suborbital` namespace (may alrady exist): %s", out)) + } + + if _, err := util.Command.Run("kubectl apply -f .deployment/"); err != nil { + return errors.Wrap(err, "failed to Run kubectl apply") + } + + return nil +} diff --git a/e2/docs/get-started.md b/e2/docs/get-started.md new file mode 100644 index 00000000..6e07ddb6 --- /dev/null +++ b/e2/docs/get-started.md @@ -0,0 +1,99 @@ +# Get started + +Subo includes the WebAssembly toolchain for Suborbital projects. + +The Suborbital Development Platform aims for Wasm to be a first-class citizen. `subo` is the toolchain for building Wasm Runnables for [Reactr](https://github.com/suborbital/reactr) and [Atmo](https://github.com/suborbital/atmo). The `subo` CLI can build Wasm Runnables, and can package several Wasm Runnables into a deployable bundle. + +Building a Runnable in languages other than Go is designed to be simple and powerful: +```rust +impl runnable::Runnable for Example { + fn run(&self, input: Vec<u8>) -> Option<Vec<u8>> { + let in_string = String::from_utf8(input).unwrap(); + + Some(String::from(format!("hello {}", in_string)).as_bytes().to_vec()) + } +} +``` +subo will package your Runnable into a Wasm module that can be used by Reactr or Atmo and run just like any other Runnable! You can see examples of Runnables in the [Reactr repository](https://github.com/suborbital/reactr/tree/main/rwasm/testdata). + +## Create a project +To create a new project for Atmo or Reactr, use `subo create project <name>`. This will create a new folder which contains a Directive.yaml and an example Runnable. + +Full options for `create project`: +``` +create a new project for Atmo or Reactr + +Usage: + subo create project <name> [flags] + +Flags: + --branch string git branch to download templates from (default "main") + -h, --help help for project + --update-templates update with the newest templates +``` + +## Create a Runnable +To create a new Runnable, use the create runnable command: +``` +> subo create runnable <name> +``` +Rust is chosen by default, but if you prefer Swift, just pass `--lang=swift`! You can now use the Runnable API to build your function. A directory is created for each Runnable, and each contains a `.runnable.yaml` file that includes some metadata. + +The full options for `create runnable`: +``` +Usage: + subo create <name> [flags] + +Flags: + --branch string git branch to download templates from (default "main") + --dir string the directory to put the new runnable in (default "/Users/cohix-16/Workspaces/suborbital/subo") + -h, --help help for create + --lang string the language of the new runnable (default "rust") + --namespace string the namespace for the new runnable (default "default") + --update-templates update with the newest runnable templates +``` + +## Building Wasm Runnables +**It is recommended that Docker be installed to build Wasm Runnables. See below if you do not have Docker installed.** + +To build your Runnable into a Wasm module for Reactr or Atmo, use the build command: +``` +> subo build . +``` +If the current working directory is a Runnable, subo will build it. If the current directory contains many runnables, subo will build them all. Any directory with a `.runnable.yaml` file is considered a Runnable and will be built. Building Runnables is not fully tested on Windows. + +## Bundles +By default, subo will write all of the Runnables in the current directory into a bundle. Atmo uses Runnable bundles to help you build powerful web services by composing Runnables declaratively. If you want to skip bundling, you can pass `--no-bundle` to `subo build` + +The resulting bundle can also be used with a Reactr instance by calling `h.HandleBundle({path/to/bundle})`. See the [Reactr Wasm instructions](https://github.com/suborbital/reactr/blob/master/docs/wasm.md) for details. + +The full options for `build`: +``` +Usage: + subo build [dir] [flags] + +Flags: + --docker pass --docker to automatically build a Docker image based on your project's Dockerfile. It will be tagged with the 'identifier' and 'appVersion' from your Directive + -h, --help help for build + --native if passed, build runnables using native toolchain rather than Docker + --no-bundle if passed, a .wasm.zip bundle will not be generated +``` + +## Building without Docker +If you prefer not to use Docker, you can use the `--native` flag. This will cause subo to use your local machine's toolchain to build Runnables instead of Docker containers. You will need to install the toolchains yourself: +- Rust: Install the latest Rust toolchain and the additional `wasm32-wasi` target. +- Swift: Install the [SwiftWasm](https://book.swiftwasm.org/getting-started/setup.html) toolchain. If using macOS, ensure XCode developer tools are installed (xcrun is required). + +`subo` is continually evolving alongside [Reactr](https://github.com/suborbital/reactr) and [Atmo](https://github.com/suborbital/atmo). + +## Suborbital Runnable API +Reactr and Atmo provide an [API](https://atmo.suborbital.dev/runnable-api/introduction) which gives Wasm Runnables the ability to access resources and communicate with the host application. This API currently has capabilities such as: +- The ability to make HTTP requests +- Structured logging +- Access to persistent cache +- Access to a static filesystem +- Database access + +This API will soon have: +- The ability to render templates +- Access to blob storage diff --git a/e2/docs/test/example.md b/e2/docs/test/example.md new file mode 100644 index 00000000..c17daec8 --- /dev/null +++ b/e2/docs/test/example.md @@ -0,0 +1,18 @@ +# This is a test + +## Existing Package Examples + +<!-- {{ Snippet "greetings"}} --> + +## Package Example doNotDoThis + +<!-- {{ Snippet "greetings:doNotDoThis"}} --> + + +## Existing Function Examples + +<!-- {{ Snippet "greetings/Hello" }} --> + +## Existing Function Example doThis + +<!-- {{ Snippet "greetings/Hello:doThis" }} --> \ No newline at end of file diff --git a/e2/docs/test/greeting_test.go b/e2/docs/test/greeting_test.go new file mode 100644 index 00000000..4d44b384 --- /dev/null +++ b/e2/docs/test/greeting_test.go @@ -0,0 +1,22 @@ +package greetings + +// This comment is associated with the ExampleHello_doNotDoThis function example. +// This is showing you how not to use Hello. +func ExampleHello_doNotDoThis() { + Hello("Boo!") + // Output: Ahhh! +} + +// This comment is associated with the ExampleHello_doThis function example. +// This is showing you how to use Hello. +func ExampleHello_doThis() { + Hello("world") + // Output: Hello, world! +} + +// This comment is associated with the package example. +// This is showing you how to use Hello. +func Example() { + Hello("Foo Bar") + // Output: Hello, Foo Bar! +} diff --git a/e2/docs/test/greetings.go b/e2/docs/test/greetings.go new file mode 100644 index 00000000..3ae8e446 --- /dev/null +++ b/e2/docs/test/greetings.go @@ -0,0 +1,11 @@ +package greetings + +import "fmt" + +func Hello(who string) { + if who == "Boo!" { + fmt.Printf("Ahhh!") + } else { + fmt.Printf("Hello, %s!\n", who) + } +} diff --git a/e2/e2.mk b/e2/e2.mk new file mode 100644 index 00000000..a403ee4a --- /dev/null +++ b/e2/e2.mk @@ -0,0 +1,25 @@ +include ./e2/builder/builder.mk +include ./e2/cli/release/release.mk + +GO_INSTALL=go install -ldflags $(RELEASE_FLAGS) ./e2 + +e2: + $(GO_INSTALL) + +e2/dev: + $(GO_INSTALL) -tags=development + +e2/docker-bin: + $(GO_INSTALL) -tags=docker + +e2/docker: + DOCKER_BUILDKIT=1 docker build ./e2 -t suborbital/e2:dev + +e2/docker/publish: + docker buildx build ./e2 --platform linux/amd64,linux/arm64 -t suborbital/e2:dev --push + +e2/smoketest: e2 + ./e2/scripts/smoketest.sh + + +.PHONY: e2 e2/dev e2/docker-bin e2/docker e2/docker/publish e2/smoketest diff --git a/e2/main.go b/e2/main.go new file mode 100644 index 00000000..9f231c7e --- /dev/null +++ b/e2/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "os" +) + +func main() { + rootCmd := rootCommand() + if err := rootCmd.Execute(); err != nil { + os.Exit(1) + } + + checkForUpdates() +} diff --git a/e2/packager/bundlepackager.go b/e2/packager/bundlepackager.go new file mode 100644 index 00000000..50e60cfc --- /dev/null +++ b/e2/packager/bundlepackager.go @@ -0,0 +1,107 @@ +package packager + +import ( + "fmt" + "path/filepath" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" + "github.com/suborbital/systemspec/bundle" + "github.com/suborbital/systemspec/capabilities" + "github.com/suborbital/systemspec/tenant" +) + +const bundlePackageJobType = "bundle" + +type BundlePackageJob struct{} + +func NewBundlePackageJob() PackageJob { + b := &BundlePackageJob{} + + return b +} + +// Type returns the job type. +func (b *BundlePackageJob) Type() string { + return bundlePackageJobType +} + +// Package packages the application. +func (b *BundlePackageJob) Package(log util.FriendlyLogger, ctx *project.Context) error { + for _, r := range ctx.Modules { + if err := r.HasWasmFile(); err != nil { + return errors.Wrap(err, "missing built Wasm module") + } + } + + if ctx.TenantConfig == nil { + defaultCaps := capabilities.DefaultCapabilityConfig() + + ctx.TenantConfig = &tenant.Config{ + Identifier: "com.suborbital.app", + SpecVersion: 1, + TenantVersion: 1, + DefaultNamespace: tenant.NamespaceConfig{ + Name: "default", + Capabilities: &defaultCaps, + }, + Namespaces: []tenant.NamespaceConfig{}, + } + } else { + log.LogInfo("updating tenant version") + + ctx.TenantConfig.TenantVersion++ + } + + if err := project.WriteTenantConfig(ctx.Cwd, ctx.TenantConfig); err != nil { + return errors.Wrap(err, "failed to WriteTenantConfig") + } + + if err := project.CalculateModuleRefs(ctx.TenantConfig, ctx.Modules); err != nil { + return errors.Wrap(err, "🚫 failed to CalculateModuleRefs") + } + + if err := ctx.TenantConfig.Validate(); err != nil { + return errors.Wrap(err, "🚫 failed to Validate Directive") + } + + static, err := CollectStaticFiles(ctx.Cwd) + if err != nil { + return errors.Wrap(err, "failed to CollectStaticFiles") + } + + if len(static) > 0 { + log.LogInfo("adding static files to bundle") + } + + configBytes, err := ctx.TenantConfig.Marshal() + if err != nil { + return errors.Wrap(err, "failed to Directive.Marshal") + } + + moduleFiles, err := ctx.ModuleFiles() + if err != nil { + return errors.Wrap(err, "failed to Modules for build") + } + + for i := range moduleFiles { + defer moduleFiles[i].Close() + } + + if err := bundle.Write(configBytes, moduleFiles, static, ctx.Bundle.Fullpath); err != nil { + return errors.Wrap(err, "🚫 failed to WriteBundle") + } + + bundleRef := project.BundleRef{ + Exists: true, + Fullpath: filepath.Join(ctx.Cwd, "runnables.wasm.zip"), + } + + ctx.Bundle = bundleRef + + log.LogDone(fmt.Sprintf("bundle was created -> %s @ v%d", ctx.Bundle.Fullpath, ctx.TenantConfig.TenantVersion)) + + return nil +} diff --git a/e2/packager/dockerimagepackager.go b/e2/packager/dockerimagepackager.go new file mode 100644 index 00000000..f243ce19 --- /dev/null +++ b/e2/packager/dockerimagepackager.go @@ -0,0 +1,54 @@ +package packager + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +const dockerImagePackageJobType = "docker" + +type DockerImagePackageJob struct{} + +func NewDockerImagePackageJob() PackageJob { + b := &DockerImagePackageJob{} + + return b +} + +// Type returns the job type. +func (b *DockerImagePackageJob) Type() string { + return dockerImagePackageJobType +} + +// Package packages the application. +func (b *DockerImagePackageJob) Package(log util.FriendlyLogger, ctx *project.Context) error { + if err := ctx.HasDockerfile(); err != nil { + return errors.Wrap(err, "missing Dockerfile") + } + + if !ctx.Bundle.Exists { + return errors.New("missing project bundle") + } + + if err := os.Setenv("DOCKER_BUILDKIT", "0"); err != nil { + util.LogWarn("DOCKER_BUILDKIT=0 could not be set, Docker build may be problematic on M1 Macs.") + } + + imageName, err := project.DockerNameFromConfig(ctx.TenantConfig) + if err != nil { + return errors.Wrap(err, "failed to dockerNameFromDirective") + } + + if _, err := util.Command.Run(fmt.Sprintf("docker build . -t=%s", imageName)); err != nil { + return errors.Wrap(err, "🚫 failed to build Docker image") + } + + util.LogDone(fmt.Sprintf("built Docker image -> %s", imageName)) + + return nil +} diff --git a/e2/packager/packager.go b/e2/packager/packager.go new file mode 100644 index 00000000..676c38c3 --- /dev/null +++ b/e2/packager/packager.go @@ -0,0 +1,40 @@ +package packager + +import ( + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +// Packager is responsible for packaging and publishing projects. +type Packager struct { + log util.FriendlyLogger +} + +// PackageJob represents a specific type of packaging, +// for example modules into bundle, bundle into container image, etc. +type PackageJob interface { + Type() string + Package(logger util.FriendlyLogger, pctx *project.Context) error +} + +// New creates a new Packager. +func New(log util.FriendlyLogger) *Packager { + p := &Packager{ + log: log, + } + + return p +} + +// Package executes the given set of PackageJobs, returning an error if any fail. +func (p *Packager) Package(ctx *project.Context, jobs ...PackageJob) error { + for _, j := range jobs { + if err := j.Package(p.log, ctx); err != nil { + return errors.Wrapf(err, "package job %s failed", j.Type()) + } + } + + return nil +} diff --git a/e2/packager/static.go b/e2/packager/static.go new file mode 100644 index 00000000..814003f6 --- /dev/null +++ b/e2/packager/static.go @@ -0,0 +1,50 @@ +package packager + +import ( + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/suborbital/systemspec/bundle" +) + +// CollectStaticFiles collects all of the files in the `static/` directory relative to cwd +// and generates a map of their relative paths. +func CollectStaticFiles(cwd string) (map[string]os.File, error) { + staticDir := filepath.Join(cwd, "static") + + stat, err := os.Stat(staticDir) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil, nil + } + + return nil, errors.Wrap(err, "failed to Stat static directory") + } else if !stat.IsDir() { + return nil, errors.New("'static' is not a directory") + } + + files := map[string]os.File{} + + filepath.Walk(staticDir, func(path string, info os.FileInfo, _ error) error { + if info.IsDir() { + return nil + } + + file, err := os.Open(path) + if err != nil { + return errors.Wrap(err, "failed to Open file: "+path) + } + + relativePath := strings.TrimPrefix(path, staticDir) + fileName := bundle.NormalizeStaticFilename(relativePath) + + files[fileName] = *file + + return nil + }) + + return files, nil +} diff --git a/e2/project/context.go b/e2/project/context.go new file mode 100644 index 00000000..3eb35db3 --- /dev/null +++ b/e2/project/context.go @@ -0,0 +1,317 @@ +package project + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + "gopkg.in/yaml.v2" + + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/systemspec/tenant" +) + +// validLangs are the available languages. +var validLangs = map[string]struct{}{ + "rust": {}, + "swift": {}, + "assemblyscript": {}, + "tinygo": {}, + "grain": {}, + "typescript": {}, + "javascript": {}, + "wat": {}, +} + +// Context describes the context under which the tool is being run. +type Context struct { + Cwd string + CwdIsRunnable bool + Modules []ModuleDir + Bundle BundleRef + TenantConfig *tenant.Config + RuntimeVersion string + Langs []string + MountPath string + RelDockerPath string + BuilderTag string +} + +// ModuleDir represents a directory containing a Runnable. +type ModuleDir struct { + Name string + UnderscoreName string + Fullpath string + Module *tenant.Module + CompilerFlags string +} + +// BundleRef contains information about a bundle in the current context. +type BundleRef struct { + Exists bool + Fullpath string +} + +// ForDirectory returns the build context for the provided working directory. +func ForDirectory(dir string) (*Context, error) { + fullDir, err := filepath.Abs(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to get Abs path") + } + + modules, cwdIsRunnable, err := getModuleDirs(fullDir) + if err != nil { + return nil, errors.Wrap(err, "failed to getRunnableDirs") + } + + bundle, err := bundleTargetPath(fullDir) + if err != nil { + return nil, errors.Wrap(err, "failed to bundleIfExists") + } + + config, err := readTenantConfig(fullDir) + if err != nil { + if !os.IsNotExist(errors.Cause(err)) { + return nil, errors.Wrap(err, "failed to readDirectiveFile") + } + } + + queries, err := readQueriesFile(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to readQueriesFile") + } else if len(queries) > 0 { + config.DefaultNamespace.Queries = queries + } + + connections, err := readConnectionsFile(dir) + if err != nil { + return nil, errors.Wrap(err, "failed to readConnectionsFile") + } else if len(connections) > 0 { + config.DefaultNamespace.Connections = connections + } + + bctx := &Context{ + Cwd: fullDir, + CwdIsRunnable: cwdIsRunnable, + Modules: modules, + Bundle: *bundle, + TenantConfig: config, + Langs: []string{}, + MountPath: fullDir, + RelDockerPath: ".", + BuilderTag: fmt.Sprintf("v%s", release.E2CLIDotVersion), + } + + return bctx, nil +} + +// ModuleExists returns true if the context contains a module with name <name>. +func (b *Context) ModuleExists(name string) bool { + for _, r := range b.Modules { + if r.Name == name { + return true + } + } + + return false +} + +// ShouldBuildLang returns true if the provided language is safe-listed for building. +func (b *Context) ShouldBuildLang(lang string) bool { + if len(b.Langs) == 0 { + return true + } + + for _, l := range b.Langs { + if l == lang { + return true + } + } + + return false +} + +func (b *Context) ModuleFiles() ([]os.File, error) { + modules := []os.File{} + + for _, r := range b.Modules { + wasmPath := filepath.Join(r.Fullpath, fmt.Sprintf("%s.wasm", r.Name)) + + file, err := os.Open(wasmPath) + if err != nil { + return nil, errors.Wrapf(err, "failed to Open module file %s", wasmPath) + } + + modules = append(modules, *file) + } + + return modules, nil +} + +// HasDockerfile returns a nil error if the project's Dockerfile exists. +func (b *Context) HasDockerfile() error { + dockerfilePath := filepath.Join(b.Cwd, "Dockerfile") + + if _, err := os.Stat(dockerfilePath); err != nil { + return errors.Wrap(err, "failed to Stat Dockerfile") + } + + return nil +} + +// WasmFile returns a file object for the .wasm file. It is the caller's responsibility to close the file. +func (m *ModuleDir) WasmFile() (io.ReadCloser, error) { + modulePath := filepath.Join(m.Fullpath, fmt.Sprintf("%s.wasm", m.Name)) + + wasmFile, err := os.Open(modulePath) + if err != nil { + return nil, errors.Wrapf(err, "failed to Open %s", modulePath) + } + + return wasmFile, nil +} + +// HasWasmFile returns a nil error if the module's .wasm file exists. +func (m *ModuleDir) HasWasmFile() error { + modulePath := filepath.Join(m.Fullpath, fmt.Sprintf("%s.wasm", m.Name)) + + if _, err := os.Stat(modulePath); err != nil { + return errors.Wrapf(err, "failed to Stat %s", modulePath) + } + + return nil +} + +func getModuleDirs(cwd string) ([]ModuleDir, bool, error) { + modules := []ModuleDir{} + + // Go through all of the dirs in the current dir. + topLvlFiles, err := ioutil.ReadDir(cwd) + if err != nil { + return nil, false, errors.Wrap(err, "failed to list directory") + } + + // Check to see if we're running from within a Runnable directory + // and return true if so. + moduleDir, err := getModuleFromFiles(cwd, topLvlFiles) + if err != nil { + return nil, false, errors.Wrap(err, "failed to getRunnableFromFiles") + } else if moduleDir != nil { + modules = append(modules, *moduleDir) + return modules, true, nil + } + + for _, tf := range topLvlFiles { + if !tf.IsDir() { + continue + } + + dirPath := filepath.Join(cwd, tf.Name()) + + // Determine if a .module file exists in that dir. + innerFiles, err := ioutil.ReadDir(dirPath) + if err != nil { + util.LogWarn(fmt.Sprintf("couldn't read files in %v", dirPath)) + continue + } + + moduleDir, err := getModuleFromFiles(dirPath, innerFiles) + if err != nil { + return nil, false, errors.Wrap(err, "failed to getRunnableFromFiles") + } else if moduleDir == nil { + continue + } + + modules = append(modules, *moduleDir) + } + + return modules, false, nil +} + +// ContainsModuleYaml finds any .module file in a list of files. +func ContainsModuleYaml(files []os.FileInfo) (string, bool) { + for _, f := range files { + if strings.HasPrefix(f.Name(), ".module.") { + return f.Name(), true + } + } + + return "", false +} + +// IsValidLang returns true if a language is valid. +func IsValidLang(lang string) bool { + _, exists := validLangs[lang] + + return exists +} + +func getModuleFromFiles(wd string, files []os.FileInfo) (*ModuleDir, error) { + filename, exists := ContainsModuleYaml(files) + if !exists { + return nil, nil + } + + moduleBytes, err := ioutil.ReadFile(filepath.Join(wd, filename)) + if err != nil { + return nil, errors.Wrap(err, "failed to ReadFile .module yaml") + } + + module := &tenant.Module{} + if err := yaml.Unmarshal(moduleBytes, &module); err != nil { + return nil, errors.Wrap(err, "failed to Unmarshal .module yaml") + } + + if module.Name == "" { + module.Name = filepath.Base(wd) + } + + if module.Namespace == "" { + module.Namespace = "default" + } + + if ok := IsValidLang(module.Lang); !ok { + return nil, fmt.Errorf("(%s) %s is not a valid lang", module.Name, module.Lang) + } + + absolutePath, err := filepath.Abs(wd) + if err != nil { + return nil, errors.Wrap(err, "failed to get Abs filepath") + } + + moduleDir := &ModuleDir{ + Name: module.Name, + UnderscoreName: strings.Replace(module.Name, "-", "_", -1), + Fullpath: absolutePath, + Module: module, + } + + return moduleDir, nil +} + +func bundleTargetPath(cwd string) (*BundleRef, error) { + path := filepath.Join(cwd, "modules.wasm.zip") + + b := &BundleRef{ + Fullpath: path, + Exists: false, + } + + _, err := os.Stat(path) + if err != nil { + if os.IsNotExist(err) { + return b, nil + } else { + return nil, err + } + } + + b.Exists = true + + return b, nil +} diff --git a/e2/project/tenantconfig.go b/e2/project/tenantconfig.go new file mode 100644 index 00000000..6d488875 --- /dev/null +++ b/e2/project/tenantconfig.go @@ -0,0 +1,225 @@ +package project + +import ( + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/systemspec/fqmn" + "github.com/suborbital/systemspec/tenant" +) + +// WriteTenantConfig writes a tenant config to disk. +func WriteTenantConfig(cwd string, cfg *tenant.Config) error { + filePath := filepath.Join(cwd, "tenant.json") + + configBytes, err := cfg.Marshal() + if err != nil { + return errors.Wrap(err, "failed to Marshal") + } + + if err := ioutil.WriteFile(filePath, configBytes, util.PermFilePrivate); err != nil { + return errors.Wrap(err, "failed to WriteFile") + } + + return nil +} + +// readTenantConfig finds a tenant.json from disk but does not validate it. +func readTenantConfig(cwd string) (*tenant.Config, error) { + filePath := filepath.Join(cwd, "tenant.json") + + tenantBytes, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, errors.Wrap(err, "failed to ReadFile for Directive") + } + + t := &tenant.Config{} + if err := t.Unmarshal(tenantBytes); err != nil { + return nil, errors.Wrap(err, "failed to Unmarshal Directive") + } + + return t, nil +} + +// readQueriesFile finds a queries.yaml from disk. +func readQueriesFile(cwd string) ([]tenant.DBQuery, error) { + filePath := filepath.Join(cwd, "Queries.yaml") + + configBytes, err := ioutil.ReadFile(filePath) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + + return nil, errors.Wrap(err, "failed to ReadFile for Queries.yaml") + } + + t := &tenant.Config{} + if err := t.UnmarshalYaml(configBytes); err != nil { + return nil, errors.Wrap(err, "failed to Unmarshal Directive") + } + + return t.DefaultNamespace.Queries, nil +} + +// readConnectionsFile finds a queries.yaml from disk. +func readConnectionsFile(cwd string) ([]tenant.Connection, error) { + filePath := filepath.Join(cwd, "Connections.yaml") + + configBytes, err := ioutil.ReadFile(filePath) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + + return nil, errors.Wrap(err, "failed to ReadFile for Queries.yaml") + } + + t := &tenant.Config{} + if err := t.UnmarshalYaml(configBytes); err != nil { + return nil, errors.Wrap(err, "failed to Unmarshal Directive") + } + + return t.DefaultNamespace.Connections, nil +} + +// CalculateModuleRefs calculates the hash refs for all modules and validates correctness of the config. +func CalculateModuleRefs(cfg *tenant.Config, mods []ModuleDir) error { + dirModules := make([]tenant.Module, len(mods)) + + // for each module, calculate its ref (a.k.a. its hash), and then add it to the context. + for i := range mods { + mod := mods[i] + modFile, err := mod.WasmFile() + if err != nil { + return errors.Wrap(err, "failed to WasmFile") + } + + defer modFile.Close() + + hash, err := calculateModuleRef(modFile) + if err != nil { + return errors.Wrap(err, "failed to calculateModuleRef") + } + + mod.Module.Ref = hash + rev := tenant.ModuleRevision{ + Ref: hash, + } + + if mod.Module.Revisions == nil { + mod.Module.Revisions = []tenant.ModuleRevision{rev} + } else { + mod.Module.Revisions = append(mod.Module.Revisions, rev) + } + + FQMN, err := fqmn.FromParts(cfg.Identifier, mod.Module.Namespace, mod.Module.Name, hash) + if err != nil { + return errors.Wrap(err, "failed to fqmn.FromParts") + } + + mod.Module.FQMN = FQMN + + dirModules[i] = *mod.Module + } + + // now that refs are calculated, ensure that all modules referenced + // in the tenant's workflows are present in the tenant's module list + workflowMods := getWorkflowFQMNList(cfg) + + missing := []string{} + + for _, modFQMN := range workflowMods { + FQMN, err := fqmn.Parse(modFQMN) + if err != nil { + return errors.Wrapf(err, "failed to parse FQMN %s", modFQMN) + } + + found := false + + for _, dirMod := range dirModules { + if dirMod.Name == FQMN.Name && dirMod.Namespace == FQMN.Namespace { + found = true + break + } + } + + if !found { + missing = append(missing, modFQMN) + } + } + + if len(missing) > 0 { + return fmt.Errorf("the following modules referenced in workflows were not found: %s", strings.Join(missing, ", ")) + } + + cfg.Modules = dirModules + + return cfg.Validate() +} + +// calculateModuleRef calculates the hex-encoded sha256 hash of a module file. +func calculateModuleRef(mod io.Reader) (string, error) { + hasher := sha256.New() + if _, err := io.Copy(hasher, mod); err != nil { + return "", errors.Wrap(err, "failed to Copy module contents") + } + + hashBytes := hasher.Sum(nil) + + return hex.EncodeToString(hashBytes), nil +} + +// getWorkflowFQMNList gets a full list of all functions used in the config's workflows. +func getWorkflowFQMNList(cfg *tenant.Config) []string { + modMap := map[string]bool{} + + // collect all the workflows in all of the namespaces. + workflows := []tenant.Workflow{} + workflows = append(workflows, cfg.DefaultNamespace.Workflows...) + for _, ns := range cfg.Namespaces { + workflows = append(workflows, ns.Workflows...) + } + + for _, h := range workflows { + for _, step := range h.Steps { + if step.IsFn() { + modMap[step.ExecutableMod.FQMN] = true + } else if step.IsGroup() { + for _, mod := range step.Group { + modMap[mod.FQMN] = true + } + } + } + } + + mods := []string{} + for fn := range modMap { + mods = append(mods, fn) + } + + return mods +} + +func DockerNameFromConfig(cfg *tenant.Config) (string, error) { + identParts := strings.Split(cfg.Identifier, ".") + if len(identParts) != 3 { + return "", errors.New("ident has incorrect number of parts") + } + + org := identParts[1] + repo := identParts[2] + + name := fmt.Sprintf("%s/%s:%d", org, repo, cfg.TenantVersion) + + return name, nil +} diff --git a/e2/publisher/bindlepublisher.go b/e2/publisher/bindlepublisher.go new file mode 100644 index 00000000..1470aee7 --- /dev/null +++ b/e2/publisher/bindlepublisher.go @@ -0,0 +1,224 @@ +package publisher + +import ( + "crypto/sha256" + "encoding/hex" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strings" + + bclient "github.com/deislabs/go-bindle/client" + "github.com/deislabs/go-bindle/keyring" + "github.com/deislabs/go-bindle/types" + "github.com/pelletier/go-toml" + "github.com/pkg/errors" + "gopkg.in/yaml.v2" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +const ( + BindlePublishJobType = "bindle" + suboAuthor = "Subo <subo@suborbital.dev>" +) + +type BindlePublishJob struct{} + +type parcelWrapper struct { + parcel types.Parcel + data []byte +} + +// NewBindlePublishJob returns a new PublishJob for Bindle. +func NewBindlePublishJob() PublishJob { + b := &BindlePublishJob{} + + return b +} + +// Type returns the publish job's type. +func (b *BindlePublishJob) Type() string { + return BindlePublishJobType +} + +// Publish publishes the application. +func (b *BindlePublishJob) Publish(log util.FriendlyLogger, ctx *project.Context) error { + if ctx.TenantConfig == nil { + return errors.New("🚫 cannot push without tenant.json file") + } + + log.LogStart(fmt.Sprintf("pushing %s@%d", ctx.TenantConfig.Identifier, ctx.TenantConfig.TenantVersion)) + + invoice := &types.Invoice{ + BindleVersion: "1.0.0", + Bindle: types.BindleSpec{ + Name: ctx.TenantConfig.Identifier, + Version: fmt.Sprintf("%d", ctx.TenantConfig.TenantVersion), + Authors: []string{ + suboAuthor, + }, + }, + Parcel: []types.Parcel{}, + } + + parcelsBySHA := map[string]parcelWrapper{} + + // add the Directive as a parcel. + configBytes, err := yaml.Marshal(ctx.TenantConfig) + if err != nil { + return errors.Wrap(err, "failed to Marshal Directive") + } + + tenantParcel := parcelForData("tenant.yaml", "application/yaml", configBytes) + + invoice.Parcel = append(invoice.Parcel, tenantParcel) + + parcelsBySHA[tenantParcel.Label.SHA256] = parcelWrapper{ + parcel: tenantParcel, + data: configBytes, + } + + // add each module as a parcel. + for _, mod := range ctx.Modules { + files, err := ioutil.ReadDir(mod.Fullpath) + if err != nil { + return errors.Wrapf(err, "failed to ReadDir for %s", mod.Fullpath) + } + + for _, file := range files { + if !strings.HasSuffix(file.Name(), ".wasm") { + continue + } + + fullPath := filepath.Join(mod.Fullpath, file.Name()) + + fileBytes, err := os.ReadFile(fullPath) + if err != nil { + return errors.Wrapf(err, "failed to Open %s", fullPath) + } + + parcel := parcelForData(file.Name(), "application/wasm", fileBytes) + + invoice.Parcel = append(invoice.Parcel, parcel) + + parcelsBySHA[parcel.Label.SHA256] = parcelWrapper{ + parcel: parcel, + data: fileBytes, + } + } + } + + sigKey, privKey, err := createOrReadKeypair(suboAuthor) + if err != nil { + return errors.Wrap(err, "failed to createOrReadKeypair") + } + + if err := invoice.GenerateSignature(suboAuthor, types.RoleCreator, sigKey, privKey); err != nil { + return errors.Wrap(err, "failed to GenerateCreatorSignaure") + } + + client, err := bclient.New("http://127.0.0.1:8080/v1", nil) + if err != nil { + return errors.Wrap(err, "failed to client.New") + } + + invResp, err := client.CreateInvoice(*invoice) + if err != nil { + return errors.Wrap(err, "failed to CreateInvoice") + } + + for _, p := range invResp.Missing { + wrapper := parcelsBySHA[p.SHA256] + + if err := client.CreateParcel(invoice.Name(), p.SHA256, wrapper.data); err != nil { + return errors.Wrapf(err, "failed to CreateParcel for %s", wrapper.parcel.Label.Name) + } + } + + invoiceBytes, err := toml.Marshal(invoice) + if err != nil { + return errors.Wrap(err, "failed to Marshal invoice") + } + + invoiceBytes = append([]byte("# Autogenerated Bindle Invoice, do not edit\n\n"), invoiceBytes...) + + if err := os.WriteFile(filepath.Join(ctx.Cwd, "Invoice.toml"), invoiceBytes, util.PermFile); err != nil { + return errors.Wrap(err, "failed to WriteFile for Invoice.toml") + } + + util.LogDone("pushed") + + return nil +} + +func parcelForData(name, mediaType string, data []byte) types.Parcel { + sha := sha256.New() + sha.Write(data) + + fileSHA := hex.EncodeToString(sha.Sum(nil)) + + label := types.Label{ + SHA256: fileSHA, + MediaType: mediaType, + Name: name, + Size: uint64(len(data)), + } + + parcel := types.Parcel{ + Label: label, + } + + return parcel +} + +func createOrReadKeypair(author string) (*types.SignatureKey, []byte, error) { + var sigKey *types.SignatureKey + var privKey []byte + + kr, err := keyring.LocalKeyring() + if err != nil { + sigKey, privKey, err = keyring.GenerateSignatureKey(author, "creator") + if err != nil { + return nil, nil, errors.Wrap(err, "failed to GenerateSignatureKey") + } + + if err := keyring.AddLocalKey(sigKey); err != nil { + return nil, nil, errors.Wrap(err, "failed to AddLocalKey") + } + + if err := keyring.WritePrivKey(privKey, privKeyFilepath()); err != nil { + return nil, nil, errors.Wrap(err, "failed to WritePrivateKey") + } + + return sigKey, privKey, nil + } + + // find the SignatureKey in the local Keyring. + for i, k := range kr.Key { + if k.Label == author { + sigKey = &kr.Key[i] + break + } + } + + // read the privkey from the '.ssh' location. + privKey, err = keyring.ReadPrivKey(privKeyFilepath()) + if err != nil { + return nil, nil, errors.Wrap(err, "failed to ReadPrivKey") + } + + return sigKey, privKey, nil +} + +func privKeyFilepath() string { + home := "$HOME" + + if usrHome, err := os.UserHomeDir(); err == nil { + home = usrHome + } + + return filepath.Join(home, ".ssh", "bindle_ed25519") +} diff --git a/e2/publisher/dockerpublisher.go b/e2/publisher/dockerpublisher.go new file mode 100644 index 00000000..a8b58d94 --- /dev/null +++ b/e2/publisher/dockerpublisher.go @@ -0,0 +1,52 @@ +package publisher + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +const ( + DockerPublishJobType = "docker" +) + +type DockerPublishJob struct{} + +// NewDockerPublishJob returns a new PublishJob for Docker images. +func NewDockerPublishJob() PublishJob { + d := &DockerPublishJob{} + + return d +} + +// Type returns the publish job's type. +func (b *DockerPublishJob) Type() string { + return DockerPublishJobType +} + +// Publish publishes the application. +func (b *DockerPublishJob) Publish(log util.FriendlyLogger, ctx *project.Context) error { + if ctx.TenantConfig == nil { + return errors.New("cannot publish without tenant.json") + } + + if !ctx.Bundle.Exists { + return errors.New("cannot publish without runnables.wasm.zip, run `e2 build` first") + } + + imageName, err := project.DockerNameFromConfig(ctx.TenantConfig) + if err != nil { + return errors.Wrap(err, "failed to DockerNameFromConfig") + } + + if _, err := util.Command.Run(fmt.Sprintf("docker buildx build . --platform linux/amd64,linux/arm64 -t %s --push", imageName)); err != nil { + return errors.Wrap(err, "failed to Run docker") + } + + util.LogDone(fmt.Sprintf("pushed Docker image -> %s", imageName)) + + return nil +} diff --git a/e2/publisher/publisher.go b/e2/publisher/publisher.go new file mode 100644 index 00000000..2f25d6ca --- /dev/null +++ b/e2/publisher/publisher.go @@ -0,0 +1,37 @@ +package publisher + +import ( + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/cli/util" + "github.com/suborbital/e2core/e2/project" +) + +// Publisher is responsible for publishing projects. +type Publisher struct { + log util.FriendlyLogger +} + +// New creates a new Publisher. +func New(log util.FriendlyLogger) *Publisher { + p := &Publisher{ + log: log, + } + + return p +} + +// PublishJob represents an attempt to publish a packaged application. +type PublishJob interface { + Type() string + Publish(logger util.FriendlyLogger, pctx *project.Context) error +} + +// Publish executes a PublishJob. +func (p *Publisher) Publish(ctx *project.Context, job PublishJob) error { + if err := job.Publish(p.log, ctx); err != nil { + return errors.Wrapf(err, "publish job %s failed", job.Type()) + } + + return nil +} diff --git a/e2/root.go b/e2/root.go new file mode 100644 index 00000000..6cea5144 --- /dev/null +++ b/e2/root.go @@ -0,0 +1,74 @@ +package main + +import ( + "github.com/spf13/cobra" + + "github.com/suborbital/e2core/e2/cli/command" + "github.com/suborbital/e2core/e2/cli/features" + "github.com/suborbital/e2core/e2/cli/release" +) + +func rootCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "e2", + Short: "Suborbital Extension Engine CLI", + Version: release.Version(), + Long: `e2 is the full toolchain for using and managing the Suborbital Extension Engine (SE2).`, + RunE: func(cmd *cobra.Command, args []string) error { + cmd.Help() + return nil + }, + } + + create := &cobra.Command{ + Use: "create", + Short: "create a runnable, project, or handler", + Long: `create a new Atmo project, WebAssembly runnable or handler`, + } + + create.AddCommand(command.CreateProjectCmd()) + create.AddCommand(command.CreateModuleCmd()) + // TODO: turn into create workflow command + // Ref: https://github.com/suborbital/subo/issues/347 + // create.AddCommand(command.CreateHandlerCmd()). + + cmd.AddCommand(docsCommand()) + cmd.AddCommand(command.BuildCmd()) + cmd.AddCommand(command.CleanCmd()) + // TODO: Re-enable when dev is updated to work with e2core + // cmd.AddCommand(command.DevCmd()) + + // se2 related commands. + create.AddCommand(command.SE2CreateTokenCommand()) + cmd.AddCommand(command.SE2DeployCommand()) + + // experimental hidden commands + if features.EnableReleaseCommands { + create.AddCommand(command.CreateReleaseCmd()) + } + + if features.EnableRegistryCommands { + cmd.AddCommand(command.PushCmd()) + + // TODO: figure out how not to clash with the se2 deploy commsnd + // cmd.AddCommand(command.DeployCmd()) + } + + cmd.AddCommand(create) + cmd.SetVersionTemplate("e2 CLI v{{.Version}}\n") + + return cmd +} + +func docsCommand() *cobra.Command { + docs := &cobra.Command{ + Use: "docs", + Short: "documentation generation resources", + Long: "test and generate code embedded markdown documentation", + } + + docs.AddCommand(command.DocsBuildCmd()) + docs.AddCommand(command.DocsTestCmd()) + + return docs +} diff --git a/e2/scn/api.go b/e2/scn/api.go new file mode 100644 index 00000000..e8ce8ac8 --- /dev/null +++ b/e2/scn/api.go @@ -0,0 +1,141 @@ +package scn + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/scn/types" +) + +const ( + DefaultEndpoint = "https://api.suborbital.network" + tokenRequestHeaderKey = "X-Suborbital-Env-Token" +) + +// API is an API client. +type API struct { + endpoint string +} + +// VerifiedAPI is an API that has an email-verified access level. +type VerifiedAPI struct { + api *API + verifier *types.RequestVerifier +} + +// EnvironmentAPI is an API authenticated to a particular SCN environment. +type EnvironmentAPI struct { + api *API + token string +} + +func New(endpoint string) *API { + s := &API{ + endpoint: endpoint, + } + + return s +} + +// ForVerifiedEmail verifies an email address is correct and then creates a VerifiedAPI object. +func (a *API) ForVerifiedEmail(email string, codeFn func() (string, error)) (*VerifiedAPI, error) { + verifier, err := a.createEmailVerifier(email) + if err != nil { + return nil, errors.Wrap(err, "failed to createEmailVerifier") + } + + code, err := codeFn() + if err != nil { + return nil, errors.Wrap(err, "failed to get verifier code") + } + + reqVerifier := &types.RequestVerifier{ + UUID: verifier.UUID, + Code: code, + } + + verified := &VerifiedAPI{ + api: a, + verifier: reqVerifier, + } + + return verified, nil +} + +// ForEnvironment returns an EnvironmentAPI scoped to the given token. +func (a *API) ForEnvironment(token string) (*EnvironmentAPI, error) { + env := &EnvironmentAPI{ + api: a, + token: token, + } + + return env, nil +} + +// do performs a request. +func (a *API) do(method string, URI string, body, result interface{}) error { + return a.doWithHeaders(method, URI, nil, body, result) +} + +// doWithHeaders performs a request with the provided headers. +func (a *API) doWithHeaders(method string, URI string, headers map[string]string, body, result interface{}) error { + var buffer io.Reader + + URL, err := url.Parse(fmt.Sprintf("%s%s", a.endpoint, URI)) + if err != nil { + return errors.Wrap(err, "faield to parse URL") + } + + if body != nil { + bodyJSON, err := json.Marshal(body) + if err != nil { + return errors.Wrap(err, "failed to Marshal") + } + + buffer = bytes.NewBuffer(bodyJSON) + } + + request, err := http.NewRequest(method, URL.String(), buffer) + if err != nil { + return errors.Wrap(err, "failed to NewRequest") + } + + if headers != nil { + reqHeader := request.Header + for k, v := range headers { + reqHeader.Set(k, v) + } + + request.Header = reqHeader + } + + resp, err := http.DefaultClient.Do(request) + if err != nil { + return errors.Wrap(err, "failed to Do request") + } + + if resp.StatusCode > 299 { + return fmt.Errorf("failed to Do request, received status code %d", resp.StatusCode) + } + + if result != nil { + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return errors.Wrap(err, "failed to ReadAll body") + } + + if err := json.Unmarshal(body, result); err != nil { + return errors.Wrap(err, "failed to Unmarshal body") + } + } + + return nil +} diff --git a/e2/scn/emailverifier.go b/e2/scn/emailverifier.go new file mode 100644 index 00000000..d543dafb --- /dev/null +++ b/e2/scn/emailverifier.go @@ -0,0 +1,25 @@ +package scn + +import ( + "net/http" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/scn/types" +) + +// createEmailVerifier creates an emailverifier (used internally by API.Verify). +func (a *API) createEmailVerifier(email string) (*types.EmailVerifier, error) { + uri := "/auth/v1/verifier" + + req := &types.CreateEmailVerifierRequest{ + Email: email, + } + + resp := &types.CreateEmailVerifierResponse{} + if err := a.do(http.MethodPost, uri, req, resp); err != nil { + return nil, errors.Wrap(err, "failed to Do") + } + + return &resp.Verifier, nil +} diff --git a/e2/scn/environmenttoken.go b/e2/scn/environmenttoken.go new file mode 100644 index 00000000..c4aa69f4 --- /dev/null +++ b/e2/scn/environmenttoken.go @@ -0,0 +1,26 @@ +package scn + +import ( + "net/http" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/scn/types" +) + +// CreateEnvironmentToken creates an environment token. +func (a *VerifiedAPI) CreateEnvironmentToken() (*types.CreateEnvironmentTokenResponse, error) { + uri := "/auth/v1/token" + + req := &types.CreateEnvironmentTokenRequest{ + Verifier: a.verifier, + Env: "", + } + + resp := &types.CreateEnvironmentTokenResponse{} + if err := a.api.do(http.MethodPost, uri, req, resp); err != nil { + return nil, errors.Wrap(err, "failed to Do") + } + + return resp, nil +} diff --git a/e2/scn/telemetry.go b/e2/scn/telemetry.go new file mode 100644 index 00000000..74762059 --- /dev/null +++ b/e2/scn/telemetry.go @@ -0,0 +1,24 @@ +package scn + +import ( + "net/http" + + "github.com/pkg/errors" + + "github.com/suborbital/e2core/e2/scn/types" +) + +// SendHeartbeat sends a telemetry heartbeat request. +func (e *EnvironmentAPI) SendHeartbeat(heartbeat *types.HeartbeatRequest) error { + uri := "/telemetry/v1/heartbeat" + + headers := map[string]string{ + tokenRequestHeaderKey: e.token, + } + + if err := e.api.doWithHeaders(http.MethodPost, uri, headers, heartbeat, nil); err != nil { + return errors.Wrap(err, "failed to doWithHeaders") + } + + return nil +} diff --git a/e2/scn/types/emailverifier.go b/e2/scn/types/emailverifier.go new file mode 100644 index 00000000..eda3f756 --- /dev/null +++ b/e2/scn/types/emailverifier.go @@ -0,0 +1,29 @@ +package types + +import "time" + +// EmailVerifier is an email verification record. +type EmailVerifier struct { + ID int64 `json:"-" db:"id"` + UUID string `json:"uuid" db:"uuid"` + UserUUID string `json:"userUuid" db:"user_uuid"` + Code string `json:"-" db:"code"` + CreatedAt *time.Time `json:"createdAt" db:"created_at"` + State string `json:"state" db:"state"` +} + +// RequestVerifier is a verifier used in an HTTP request. +type RequestVerifier struct { + UUID string `json:"uuid"` + Code string `json:"code"` +} + +// CreateEmailVerifierRequest is a request for an email verifier. +type CreateEmailVerifierRequest struct { + Email string `json:"email"` +} + +// CreateEmailVerifierResponse is a response to a CreateEmailVerifierRequest. +type CreateEmailVerifierResponse struct { + Verifier EmailVerifier `json:"verifier"` +} diff --git a/e2/scn/types/environmenttoken.go b/e2/scn/types/environmenttoken.go new file mode 100644 index 00000000..9f784af5 --- /dev/null +++ b/e2/scn/types/environmenttoken.go @@ -0,0 +1,12 @@ +package types + +// CreateEnvironmentTokenRequest is a request to create an environment token. +type CreateEnvironmentTokenRequest struct { + Verifier *RequestVerifier + Env string `json:"env"` +} + +// CreateEnvironmentTokenResponse is a response to a create token request. +type CreateEnvironmentTokenResponse struct { + Token string `json:"token"` +} diff --git a/e2/scn/types/heartbeat.go b/e2/scn/types/heartbeat.go new file mode 100644 index 00000000..3b8216b4 --- /dev/null +++ b/e2/scn/types/heartbeat.go @@ -0,0 +1,13 @@ +package types + +// HeartbeatRequest is a request to send heartbeat data. +type HeartbeatRequest struct { + Version string `json:"version"` + Runnables *RunnableStats `json:"runnables"` +} + +// RunnableStats are stats about runnables. +type RunnableStats struct { + TotalCount int `json:"totalCount"` + IdentCount int `json:"identCount"` +} diff --git a/e2/scripts/smoketest.sh b/e2/scripts/smoketest.sh new file mode 100755 index 00000000..6f9dfea1 --- /dev/null +++ b/e2/scripts/smoketest.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +set -eu + +TEST_PROJECT="smoketest" + +trap 'catch $? $LINENO' EXIT + +catch() { + if [[ "$1" != "0" ]]; then + echo "An Error $1 occurred on $2" + fi + + # return to origin, clear directory stack + pushd -0 > /dev/null && dirs -c + + if [[ -d "$TEST_PROJECT" ]]; then + echo "Cleaning up test artifacts..." + rm -rf "$TEST_PROJECT" || echo "Failed to clean up test artifacts, if this was a permissions error try using 'sudo rm -rf $TEST_PROJECT'" + fi +} + +# create a new project +subo create project "$TEST_PROJECT" + +# enter project directory +pushd "$TEST_PROJECT" > /dev/null + +# create a runnable for each supported language +subo create runnable rs-test --lang rust +subo create runnable swift-test --lang swift +subo create runnable as-test --lang assemblyscript +subo create runnable tinygo-test --lang tinygo +subo create runnable js-test --lang javascript + +# build project bundle +subo build . \ No newline at end of file diff --git a/e2/templates/assemblyscript/asconfig.json.tmpl b/e2/templates/assemblyscript/asconfig.json.tmpl new file mode 100644 index 00000000..fac14701 --- /dev/null +++ b/e2/templates/assemblyscript/asconfig.json.tmpl @@ -0,0 +1,12 @@ +{ + "targets": { + "release": { + "binaryFile": "{{ .Name }}.wasm", + "optimizeLevel": 3, + "shrinkLevel": 1, + "converge": false, + "noAssert": false + } + }, + "options": {} +} \ No newline at end of file diff --git a/e2/templates/assemblyscript/package.json.tmpl b/e2/templates/assemblyscript/package.json.tmpl new file mode 100644 index 00000000..da5b5b25 --- /dev/null +++ b/e2/templates/assemblyscript/package.json.tmpl @@ -0,0 +1,19 @@ +{ + "name": "{{ .Name }}", + "version": "{{ .Version }}", + "description": "", + "main": "src/index.ts", + "scripts": { + "test": "node tests", + "asbuild": "asc src/index.ts --target release --use abort=src/index/abort" + }, + "author": "", + "license": "ISC", + "devDependencies": { + "assemblyscript": "^0.19" + }, + "dependencies": { + "@assemblyscript/loader": "^0.19", + "@suborbital/suborbital": "^{{ .APIVersion }}" + } +} diff --git a/e2/templates/assemblyscript/src/index.ts b/e2/templates/assemblyscript/src/index.ts new file mode 100644 index 00000000..38f997ab --- /dev/null +++ b/e2/templates/assemblyscript/src/index.ts @@ -0,0 +1,33 @@ +// DO NOT EDIT; generated file + +import { return_result, return_abort, toFFI, fromFFI, getIdent, setIdent } from "@suborbital/suborbital"; +import { run } from "./lib" + +export function run_e(ptr: usize, size: i32, ident: i32): void { + // set the current ident for other API methods to use + setIdent(ident) + + // read the memory that was passed as input + var inBuffer = fromFFI(ptr, size) + + // execute the Runnable + let result = run(inBuffer) + + // return the result to the host + return_result(changetype<usize>(result), result.byteLength, getIdent()) +} + +export function allocate(size: i32): usize { + return heap.alloc(size) +} + +export function deallocate(ptr: i32, _: i32): void { + heap.free(ptr) +} + +function abort(message: string | null, fileName: string | null, lineNumber: u32, columnNumber: u32): void { + let msgFFI = toFFI(String.UTF8.encode(message ? message : "")) + let fileFFI = toFFI(String.UTF8.encode(fileName ? fileName : "")) + + return_abort(msgFFI.ptr, msgFFI.size, fileFFI.ptr, fileFFI.size, lineNumber, columnNumber, getIdent()) +} \ No newline at end of file diff --git a/e2/templates/assemblyscript/src/lib.ts b/e2/templates/assemblyscript/src/lib.ts new file mode 100644 index 00000000..b644636f --- /dev/null +++ b/e2/templates/assemblyscript/src/lib.ts @@ -0,0 +1,11 @@ +import { logInfo } from "@suborbital/suborbital" + +export function run(input: ArrayBuffer): ArrayBuffer { + let inStr = String.UTF8.decode(input) + + let out = "hello, " + inStr + + logInfo(out) + + return String.UTF8.encode(out) +} \ No newline at end of file diff --git a/e2/templates/assemblyscript/src/tsconfig.json b/e2/templates/assemblyscript/src/tsconfig.json new file mode 100644 index 00000000..e28fcf25 --- /dev/null +++ b/e2/templates/assemblyscript/src/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "assemblyscript/std/assembly.json", + "include": [ + "./**/*.ts" + ] +} \ No newline at end of file diff --git a/e2/templates/grain/index.gr b/e2/templates/grain/index.gr new file mode 100644 index 00000000..0a787611 --- /dev/null +++ b/e2/templates/grain/index.gr @@ -0,0 +1,59 @@ +// GENERATED FILE; DO NOT EDIT + +import Memory from "runtime/unsafe/memory" +import WasmI32 from "runtime/unsafe/wasmi32" +import { allocateBytes } from "runtime/dataStructures" +import Conv from "runtime/unsafe/conv" + +import Bytes from "bytes" + +import Env from "suborbital/env" +import FFI from "suborbital/ffi" +import Lib from "./lib" + +@disableGC +let bytesOfExt = (ptr, size) => { + let bytes = allocateBytes(size) + Memory.copy(WasmI32.add(bytes, 8n), ptr, size) + WasmI32.toGrain(bytes): (Bytes) +} + +@disableGC +export let run_e = (ptr, size, ident) => { + let ident = Conv.toInt32(ident) + + Memory.incRef(WasmI32.fromGrain(FFI.setIdent)) + Memory.incRef(WasmI32.fromGrain(ident)) + FFI.setIdent(ident) + + let bytes = bytesOfExt(ptr, size) + + Memory.incRef(WasmI32.fromGrain(Lib.run)) + let result = Lib.run(bytes) + match (result) { + Ok(bytes) => { + Memory.incRef(WasmI32.fromGrain(Env.returnResult)) + Memory.incRef(WasmI32.fromGrain(bytes)) + Env.returnResult(bytes, ident) + }, + Err((code, msg)) => { + Memory.incRef(WasmI32.fromGrain(Env.returnError)) + Memory.incRef(WasmI32.fromGrain(code)) + Memory.incRef(WasmI32.fromGrain(msg)) + Env.returnError(code, msg, ident) + }, + } + + Memory.decRef(WasmI32.fromGrain(result)) + void +} + +@disableGC +export let allocate = size => { + Memory.malloc(size) +} + +@disableGC +export let deallocate = (ptr, _) => { + Memory.free(ptr) +} diff --git a/e2/templates/grain/lib.gr b/e2/templates/grain/lib.gr new file mode 100644 index 00000000..ffbb0a4b --- /dev/null +++ b/e2/templates/grain/lib.gr @@ -0,0 +1,5 @@ +import Bytes from "bytes" + +export let run: Bytes -> Result<Bytes, (Int32, String)> = input => { + Ok(Bytes.concat(Bytes.fromString("hello, "), input)) +} diff --git a/e2/templates/javascript/package.json.tmpl b/e2/templates/javascript/package.json.tmpl new file mode 100644 index 00000000..d6783e4e --- /dev/null +++ b/e2/templates/javascript/package.json.tmpl @@ -0,0 +1,17 @@ +{ + "name": "{{ .Name }}", + "description": "", + "version": "{{ .Version }}", + "dependencies": { + "@suborbital/runnable": "^{{ .APIVersion }}", + "fastestsmallesttextencoderdecoder-encodeinto": "^1.0.22" + }, + "devDependencies": { + "webpack": "^5.38.1", + "webpack-cli": "^4.7.2" + }, + "scripts": { + "prebuild": "webpack", + "build": "javy build/index.js -o {{ .Name }}.wasm" + } +} diff --git a/e2/templates/javascript/src/index.js b/e2/templates/javascript/src/index.js new file mode 100644 index 00000000..a9db9954 --- /dev/null +++ b/e2/templates/javascript/src/index.js @@ -0,0 +1,16 @@ +import "fastestsmallesttextencoderdecoder-encodeinto/EncoderDecoderTogether.min.js"; +import { run } from "./lib"; + +import { setup, runnable } from "@suborbital/runnable"; + +const decoder = new TextDecoder(); + +export function run_e(payload, ident) { + // Imports will be injected by the runtime + setup(this.imports, ident); + + const input = decoder.decode(payload); + const result = run(input); + + runnable.returnResult(result); +} diff --git a/e2/templates/javascript/src/lib.js b/e2/templates/javascript/src/lib.js new file mode 100644 index 00000000..2c65f9cc --- /dev/null +++ b/e2/templates/javascript/src/lib.js @@ -0,0 +1,9 @@ +import { log } from "@suborbital/runnable"; + +export const run = (input) => { + let message = "Hello, " + input; + + log.info(message); + + return message; +}; diff --git a/e2/templates/javascript/webpack.config.js b/e2/templates/javascript/webpack.config.js new file mode 100644 index 00000000..4524ab7e --- /dev/null +++ b/e2/templates/javascript/webpack.config.js @@ -0,0 +1,20 @@ +const path = require("path"); +module.exports = { + mode: "production", + target: "es2019", + devtool: "cheap-module-source-map", + optimization: { + sideEffects: true, + }, + resolve: { + extensions: [".js"], + }, + output: { + libraryTarget: "umd", + globalObject: "this", + filename: "index.js", + path: path.join(__dirname, "build"), + library: "Suborbital", + chunkFormat: "array-push", + }, +}; diff --git a/e2/templates/k8s/atmo-deployment.yaml.tmpl b/e2/templates/k8s/atmo-deployment.yaml.tmpl new file mode 100644 index 00000000..cd8f1a72 --- /dev/null +++ b/e2/templates/k8s/atmo-deployment.yaml.tmpl @@ -0,0 +1,43 @@ +apiVersion: apps/v1 +kind: Deployment + +metadata: + name: {{ .Identifier }}-deployment + namespace: suborbital + labels: + app: atmo + +spec: + replicas: 1 + + selector: + matchLabels: + app: atmo + + template: + metadata: + labels: + app: atmo + + spec: + containers: + - name: atmo + image: {{ .ImageName }} + command: ["atmo"] + + ports: + - containerPort: 8080 + - containerPort: 443 + + env: + - name: ATMO_DOMAIN + value: {{ .Domain }} + + - name: ATMO_HTTP_PORT + value: "8080" + + - name: ATMO_LOG_LEVEL + value: "info" + + - name: APP_VERSION + value: {{ .AppVersion }} \ No newline at end of file diff --git a/e2/templates/k8s/atmo-svc.yaml.tmpl b/e2/templates/k8s/atmo-svc.yaml.tmpl new file mode 100644 index 00000000..a72b296a --- /dev/null +++ b/e2/templates/k8s/atmo-svc.yaml.tmpl @@ -0,0 +1,21 @@ +apiVersion: v1 +kind: Service + +metadata: + name: {{ .Identifier }}-svc + namespace: suborbital + +spec: + selector: + app: atmo + + ports: + - port: 80 + targetPort: 8080 + name: http + + - port: 443 + targetPort: 443 + name: https + + type: LoadBalancer \ No newline at end of file diff --git a/e2/templates/project/.gitignore b/e2/templates/project/.gitignore new file mode 100644 index 00000000..63a5765b --- /dev/null +++ b/e2/templates/project/.gitignore @@ -0,0 +1,5 @@ +target/ +.build/ +node_modules/ +runnables.wasm.zip +.deployment \ No newline at end of file diff --git a/e2/templates/project/Directive.yaml.tmpl b/e2/templates/project/Directive.yaml.tmpl new file mode 100644 index 00000000..c2806f32 --- /dev/null +++ b/e2/templates/project/Directive.yaml.tmpl @@ -0,0 +1,16 @@ +# the Directive is a complete description of your application, including all of its business logic. +# appVersion should be updated for each new deployment of your app. +# atmoVersion declares which version of Atmo is used for the `subo dev` command. + +identifier: {{ .Environment }}.{{ .Name }} +appVersion: v0.1.0 +atmoVersion: v{{ .AtmoVersion }} +{{ if .Headless }}headless: true{{ end }} +{{ if not .Headless }} +handlers: + - type: request + resource: /hello + method: POST + steps: + - fn: helloworld +{{ end }} \ No newline at end of file diff --git a/e2/templates/project/Dockerfile.tmpl b/e2/templates/project/Dockerfile.tmpl new file mode 100644 index 00000000..4b756cb2 --- /dev/null +++ b/e2/templates/project/Dockerfile.tmpl @@ -0,0 +1,5 @@ +FROM suborbital/atmo:v{{ .AtmoVersion }} + +COPY ./runnables.wasm.zip . + +ENTRYPOINT [ "atmo" ] diff --git a/e2/templates/project/helloworld/.runnable.yaml.tmpl b/e2/templates/project/helloworld/.runnable.yaml.tmpl new file mode 100755 index 00000000..a5ffa695 --- /dev/null +++ b/e2/templates/project/helloworld/.runnable.yaml.tmpl @@ -0,0 +1,4 @@ +name: helloworld +namespace: default +lang: rust +apiVersion: {{ .APIVersion }} diff --git a/e2/templates/project/helloworld/Cargo.toml.tmpl b/e2/templates/project/helloworld/Cargo.toml.tmpl new file mode 100755 index 00000000..365c027b --- /dev/null +++ b/e2/templates/project/helloworld/Cargo.toml.tmpl @@ -0,0 +1,13 @@ +[package] +name = "helloworld" +version = "0.1.0" +authors = ["Suborbital Runnable <info@suborbital.dev>"] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +suborbital = '{{ .APIVersion }}' \ No newline at end of file diff --git a/e2/templates/project/helloworld/src/lib.rs b/e2/templates/project/helloworld/src/lib.rs new file mode 100755 index 00000000..1202bdc1 --- /dev/null +++ b/e2/templates/project/helloworld/src/lib.rs @@ -0,0 +1,20 @@ +use suborbital::runnable::*; + +struct HelloWorld{} + +impl Runnable for HelloWorld { + fn run(&self, input: Vec<u8>) -> Result<Vec<u8>, RunErr> { + let in_string = String::from_utf8(input).unwrap(); + + Ok(String::from(format!("hello {}", in_string)).as_bytes().to_vec()) + } +} + + +// initialize the runner, do not edit below // +static RUNNABLE: &HelloWorld = &HelloWorld{}; + +#[no_mangle] +pub extern fn init() { + use_runnable(RUNNABLE); +} diff --git a/e2/templates/rust/Cargo.toml.tmpl b/e2/templates/rust/Cargo.toml.tmpl new file mode 100644 index 00000000..95df8f9c --- /dev/null +++ b/e2/templates/rust/Cargo.toml.tmpl @@ -0,0 +1,13 @@ +[package] +name = "{{ .Name }}" +version = "0.1.0" +authors = ["Suborbital Runnable <info@suborbital.dev>"] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +suborbital = '{{ .APIVersion }}' \ No newline at end of file diff --git a/e2/templates/rust/src/lib.rs.tmpl b/e2/templates/rust/src/lib.rs.tmpl new file mode 100644 index 00000000..45ffcea8 --- /dev/null +++ b/e2/templates/rust/src/lib.rs.tmpl @@ -0,0 +1,20 @@ +use suborbital::runnable::*; + +struct {{ .NameCamel }}{} + +impl Runnable for {{ .NameCamel }} { + fn run(&self, input: Vec<u8>) -> Result<Vec<u8>, RunErr> { + let in_string = String::from_utf8(input).unwrap(); + + Ok(String::from(format!("hello {}", in_string)).as_bytes().to_vec()) + } +} + + +// initialize the runner, do not edit below // +static RUNNABLE: &{{ .NameCamel }} = &{{ .NameCamel }}{}; + +#[no_mangle] +pub extern fn _start() { + use_runnable(RUNNABLE); +} diff --git a/e2/templates/scc-docker/.who b/e2/templates/scc-docker/.who new file mode 100644 index 00000000..e69de29b diff --git a/e2/templates/scc-docker/SCC.env.tmpl b/e2/templates/scc-docker/SCC.env.tmpl new file mode 100644 index 00000000..2a403145 --- /dev/null +++ b/e2/templates/scc-docker/SCC.env.tmpl @@ -0,0 +1 @@ +SCC_ENV_TOKEN={{ .EnvToken }} \ No newline at end of file diff --git a/e2/templates/scc-docker/config/scc-config.yaml b/e2/templates/scc-docker/config/scc-config.yaml new file mode 100644 index 00000000..1d17ac92 --- /dev/null +++ b/e2/templates/scc-docker/config/scc-config.yaml @@ -0,0 +1,24 @@ +networkRules: &networkRules + allowIPs: false + allowPrivate: false + allowHTTP: true + blockedDomains: + - "*.cluster.local" + - "scc-controlplane-service" + - "scc-builder-service" + +capabilities: + logger: + enabled: true + http: + enabled: true + rules: *networkRules + graphql: + enabled: true + rules: *networkRules + cache: + enabled: false + file: + enabled: false + db: + enabled: false \ No newline at end of file diff --git a/e2/templates/scc-docker/docker-compose.yml.tmpl b/e2/templates/scc-docker/docker-compose.yml.tmpl new file mode 100644 index 00000000..5d67021e --- /dev/null +++ b/e2/templates/scc-docker/docker-compose.yml.tmpl @@ -0,0 +1,48 @@ +version: '3' +services: + scc-control-plane: + image: suborbital/scc-control-plane:{{ .SCCVersion }} + command: controlplane + volumes: + - ./:/home/scn + environment: + SCC_LOG_LEVEL: info + SCC_HEADLESS: "true" + SCC_HTTP_PORT: 8081 + env_file: + - SCC.env + ports: + - "8081:8081" + networks: + - scn + + scc-builder: + image: suborbital/scc-builder:{{ .SCCVersion }} + command: builder + volumes: + - ./:/home/scn + environment: + SCC_LOG_LEVEL: info + SCC_HTTP_PORT: 8082 + ports: + - "8082:8082" + networks: + - scn + + scc-atmo: + image: suborbital/atmo:v0.4.7 + command: atmo + depends_on: + - scc-control-plane + environment: + ATMO_CONTROL_PLANE: "scc-control-plane:8081" + ATMO_HEADLESS: "true" + ATMO_LOG_LEVEL: info + ATMO_HTTP_PORT: 8080 + ports: + - "8080:8080" + networks: + - scn + +networks: + scn: \ No newline at end of file diff --git a/e2/templates/scc-k8s/.suborbital/scc-atmo-deployment.yaml.tmpl b/e2/templates/scc-k8s/.suborbital/scc-atmo-deployment.yaml.tmpl new file mode 100644 index 00000000..4aaa7ae4 --- /dev/null +++ b/e2/templates/scc-k8s/.suborbital/scc-atmo-deployment.yaml.tmpl @@ -0,0 +1,57 @@ +apiVersion: apps/v1 +kind: Deployment + +metadata: + name: scc-atmo-deployment + namespace: suborbital + labels: + app: scc-atmo + +spec: + replicas: 1 + + selector: + matchLabels: + app: scc-atmo + + template: + metadata: + labels: + app: scc-atmo + + spec: + containers: + - name: atmo + image: suborbital/atmo:v0.4.7 + command: ["atmo"] + + ports: + - containerPort: 8080 + + env: + - name: ATMO_HTTP_PORT + value: "8080" + + - name: ATMO_LOG_LEVEL + value: "info" + + - name: ATMO_CONTROL_PLANE + value: "scc-controlplane-service:8081" + + - name: ATMO_HEADLESS + value: "true" + +--- + +apiVersion: v1 +kind: Service +metadata: + namespace: suborbital + name: scc-atmo-service +spec: + selector: + app: scc-atmo + ports: + - protocol: TCP + port: 80 + targetPort: 8080 \ No newline at end of file diff --git a/e2/templates/scc-k8s/.suborbital/scc-autoscale.yaml.tmpl b/e2/templates/scc-k8s/.suborbital/scc-autoscale.yaml.tmpl new file mode 100644 index 00000000..0013ee90 --- /dev/null +++ b/e2/templates/scc-k8s/.suborbital/scc-autoscale.yaml.tmpl @@ -0,0 +1,21 @@ +apiVersion: keda.sh/v1alpha1 +kind: ScaledObject +metadata: + name: scc-atmo-scaledobject + namespace: suborbital + labels: + app: scc-atmo +spec: + scaleTargetRef: + name: scc-atmo-deployment + envSourceContainerName: atmo + pollingInterval: 5 + cooldownPeriod: 30 + minReplicaCount: 1 + maxReplicaCount: 50 + triggers: + - type: metrics-api + metadata: + targetValue: "1" + url: "http://scc-atmo-service.suborbital.svc.cluster.local/meta/metrics" + valueLocation: "scheduler.totalThreadCount" diff --git a/e2/templates/scc-k8s/.suborbital/scc-controlplane-deployment.yaml.tmpl b/e2/templates/scc-k8s/.suborbital/scc-controlplane-deployment.yaml.tmpl new file mode 100644 index 00000000..12603516 --- /dev/null +++ b/e2/templates/scc-k8s/.suborbital/scc-controlplane-deployment.yaml.tmpl @@ -0,0 +1,144 @@ +apiVersion: apps/v1 +kind: Deployment + +metadata: + name: scc-controlplane-deployment + namespace: suborbital + labels: + app: scc-controlplane + +spec: + replicas: 1 + + selector: + matchLabels: + app: scc-controlplane + + template: + metadata: + labels: + app: scc-controlplane + + spec: + containers: + - name: controlplane + image: suborbital/scc-control-plane:{{ .SCCVersion }} + command: ["controlplane"] + + ports: + - containerPort: 8081 + + env: + - name: SCC_HTTP_PORT + value: "8081" + + - name: SCC_LOG_LEVEL + value: "info" + + - name: SCC_HEADLESS + value: "true" + + - name: SCC_ENV_TOKEN + value: {{ .EnvToken }} + + volumeMounts: + - name: controlplane-storage + mountPath: "/home/scn" + - name: controlplane-config + mountPath: "/home/scn/config" + readOnly: true + + - name: builder + image: suborbital/scc-builder:{{ .SCCVersion }} + command: ["builder"] + + ports: + - containerPort: 8080 + - containerPort: 8443 + + env: + - name: SCC_DOMAIN + value: "{{ .BuilderDomain }}" + + - name: SCC_TLS_PORT + value: "8443" + + - name: SCC_LOG_LEVEL + value: "info" + + - name: SCC_CONTROL_PLANE + value: "scc-controlplane-service:8081" + + volumeMounts: + - name: controlplane-storage + mountPath: "/home/scn" + - name: controlplane-config + mountPath: "/home/scn/config" + readOnly: true + + initContainers: + - name: scc-init + image: busybox + command: ["/bin/chmod","-R","777", "/data"] + volumeMounts: + - name: controlplane-storage + mountPath: /data + + volumes: + - name: controlplane-storage + persistentVolumeClaim: + claimName: scc-controlplane-pvc + - name: controlplane-config + configMap: + name: scc-config + +--- + +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + namespace: suborbital + name: scc-controlplane-pvc +spec: + accessModes: + - ReadWriteOnce + storageClassName: {{ .StorageClassName }} + resources: + requests: + storage: 5Gi + +--- + +apiVersion: v1 +kind: Service +metadata: + namespace: suborbital + name: scc-controlplane-service +spec: + selector: + app: scc-controlplane + ports: + - protocol: TCP + port: 8081 + targetPort: 8081 + +--- + +apiVersion: v1 +kind: Service +metadata: + namespace: suborbital + name: scc-builder-service +spec: + selector: + app: scc-controlplane + ports: + - protocol: TCP + name: challenge + port: 80 + targetPort: 8080 + - protocol: TCP + name: https + port: 443 + targetPort: 8443 + type: LoadBalancer diff --git a/e2/templates/scc-k8s/config/scc-config.yaml b/e2/templates/scc-k8s/config/scc-config.yaml new file mode 100644 index 00000000..1d17ac92 --- /dev/null +++ b/e2/templates/scc-k8s/config/scc-config.yaml @@ -0,0 +1,24 @@ +networkRules: &networkRules + allowIPs: false + allowPrivate: false + allowHTTP: true + blockedDomains: + - "*.cluster.local" + - "scc-controlplane-service" + - "scc-builder-service" + +capabilities: + logger: + enabled: true + http: + enabled: true + rules: *networkRules + graphql: + enabled: true + rules: *networkRules + cache: + enabled: false + file: + enabled: false + db: + enabled: false \ No newline at end of file diff --git a/e2/templates/swift/.gitignore b/e2/templates/swift/.gitignore new file mode 100644 index 00000000..95c43209 --- /dev/null +++ b/e2/templates/swift/.gitignore @@ -0,0 +1,5 @@ +.DS_Store +/.build +/Packages +/*.xcodeproj +xcuserdata/ diff --git a/e2/templates/swift/Package.swift.tmpl b/e2/templates/swift/Package.swift.tmpl new file mode 100644 index 00000000..575ada96 --- /dev/null +++ b/e2/templates/swift/Package.swift.tmpl @@ -0,0 +1,18 @@ +// swift-tools-version:5.3 +// The swift-tools-version declares the minimum version of Swift required to build this package. + +import PackageDescription + +let package = Package( + name: "{{ .Name }}", + dependencies: [ + .package(name: "Suborbital", url: "https://github.com/suborbital/reactr.git", from: "{{ .APIVersion }}") + ], + targets: [ + // Targets are the basic building blocks of a package. A target can define a module or a test suite. + // Targets can depend on other targets in this package, and on products in packages this package depends on. + .target( + name: "{{ .Name }}", + dependencies: ["Suborbital"]) + ] +) diff --git a/e2/templates/swift/Sources/{{ .Name }}.tmpl/main.swift.tmpl b/e2/templates/swift/Sources/{{ .Name }}.tmpl/main.swift.tmpl new file mode 100644 index 00000000..a7546a14 --- /dev/null +++ b/e2/templates/swift/Sources/{{ .Name }}.tmpl/main.swift.tmpl @@ -0,0 +1,9 @@ +import Suborbital + +class {{ .NameCamel }}: Suborbital.Runnable { + func run(input: String) -> String { + return "hello " + input + } +} + +Suborbital.Set(runnable: {{ .NameCamel }}()) diff --git a/e2/templates/tinygo/go.mod.tmpl b/e2/templates/tinygo/go.mod.tmpl new file mode 100644 index 00000000..96ccc482 --- /dev/null +++ b/e2/templates/tinygo/go.mod.tmpl @@ -0,0 +1,7 @@ +module vendor.suborbital.network/{{ .Name }} + +require ( + github.com/suborbital/reactr v{{ .APIVersion }} +) + +go 1.17 diff --git a/e2/templates/tinygo/main.go.tmpl b/e2/templates/tinygo/main.go.tmpl new file mode 100644 index 00000000..aa8d0535 --- /dev/null +++ b/e2/templates/tinygo/main.go.tmpl @@ -0,0 +1,16 @@ +package main + +import ( + "github.com/suborbital/reactr/api/tinygo/runnable" +) + +type {{ .NameCamel }} struct{} + +func (h {{ .NameCamel }}) Run(input []byte) ([]byte, error) { + return []byte("Hello, " + string(input)), nil +} + +// initialize runnable, do not edit // +func main() { + runnable.Use({{ .NameCamel }}{}) +} diff --git a/e2/templates/typescript/package.json.tmpl b/e2/templates/typescript/package.json.tmpl new file mode 100644 index 00000000..99d50c04 --- /dev/null +++ b/e2/templates/typescript/package.json.tmpl @@ -0,0 +1,21 @@ +{ + "name": "{{ .Name }}", + "description": "", + "version": "{{ .Version }}", + "dependencies": { + "@suborbital/runnable": "^{{ .APIVersion }}", + "fastestsmallesttextencoderdecoder-encodeinto": "^1.0.22" + }, + "devDependencies": { + "@types/node": "^15.12.1", + "ts-loader": "^9.2.3", + "ts-node": "^10.0.0", + "typescript": "^4.3.5", + "webpack": "^5.38.1", + "webpack-cli": "^4.7.2" + }, + "scripts": { + "prebuild": "webpack", + "build": "javy build/index.js -o {{ .Name }}.wasm" + } +} diff --git a/e2/templates/typescript/src/index.ts b/e2/templates/typescript/src/index.ts new file mode 100644 index 00000000..1652f866 --- /dev/null +++ b/e2/templates/typescript/src/index.ts @@ -0,0 +1,22 @@ +import "fastestsmallesttextencoderdecoder-encodeinto/EncoderDecoderTogether.min.js"; +import { run } from "./lib"; + +import { setup, runnable } from "@suborbital/runnable"; + +declare global { + var TextEncoder: any; + var TextDecoder: any; +} + +const decoder = new TextDecoder(); + +export function run_e(payload: ArrayBuffer, ident: number) { + // Imports will be injected by the runtime + // @ts-ignore + setup(this.imports, ident); + + const input = decoder.decode(payload); + const result = run(input); + + runnable.returnResult(result); +} diff --git a/e2/templates/typescript/src/lib.ts b/e2/templates/typescript/src/lib.ts new file mode 100644 index 00000000..eb6b3468 --- /dev/null +++ b/e2/templates/typescript/src/lib.ts @@ -0,0 +1,9 @@ +import { log } from "@suborbital/runnable"; + +export const run = (input: string): string => { + let message = "Hello, " + input; + + log.info(message); + + return message; +}; diff --git a/e2/templates/typescript/tsconfig.json b/e2/templates/typescript/tsconfig.json new file mode 100644 index 00000000..20d7f38d --- /dev/null +++ b/e2/templates/typescript/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "resolveJsonModule": true, + "allowJs": true, + "strict": true, + "target": "es2019", + "module": "es2020", + "moduleResolution": "node", + "outDir": "dist/", + "rootDir": "src/", + "types": [ + "node", + ], + "lib": [ + "es2019" + ] + }, + "include": ["src/**/*"] +} diff --git a/e2/templates/typescript/webpack.config.js b/e2/templates/typescript/webpack.config.js new file mode 100644 index 00000000..e66f8511 --- /dev/null +++ b/e2/templates/typescript/webpack.config.js @@ -0,0 +1,29 @@ +const path = require("path"); +module.exports = { + mode: "production", + target: "es2019", + devtool: "cheap-module-source-map", + optimization: { + sideEffects: true, + }, + module: { + rules: [ + { + test: /\.ts$/, + use: "ts-loader", + exclude: /node_modules/, + }, + ], + }, + resolve: { + extensions: [".ts", ".js"], + }, + output: { + libraryTarget: "umd", + globalObject: "this", + filename: "index.js", + path: path.join(__dirname, "build"), + library: "Suborbital", + chunkFormat: "array-push", + }, +}; diff --git a/e2/templates/wat/lib.wat b/e2/templates/wat/lib.wat new file mode 100644 index 00000000..207d678c --- /dev/null +++ b/e2/templates/wat/lib.wat @@ -0,0 +1,79 @@ +(module + (import "env" "return_result" (func $return_result (param i32 i32 i32))) + (memory $memory 1) + (global $heap (mut i32) (i32.const 1024)) + (global $hello_size i32 (i32.const 7)) + (func $run_e (param $payload_ptr i32) (param $payload_size i32) (param $ident i32) (local $tmp1 i32) (local $tmp2 i32) + (local.set $tmp1 ;; allocate some memory + (call $allocate + (global.get $hello_size) + ) + ) + (memory.init 0 ;; load "Hello, " into that memory + (local.get $tmp1) + (i32.const 0) + (global.get $hello_size) + ) + (local.set $tmp2 + (call $concat ;; concat "Hello, " with the payload + (local.get $tmp1) + (global.get $hello_size) + (local.get $payload_ptr) + (local.get $payload_size) + ) + ) + (call $return_result ;; return the concatenated string + (local.get $tmp2) + (i32.add + (global.get $hello_size) + (local.get $payload_size) + ) + (local.get $ident) + ) + ) + (func $concat (param $a_ptr i32) (param $a_size i32) (param $b_ptr i32) (param $b_size i32) (result i32) (local $tmp i32) + (local.set $tmp + (call $allocate + (i32.add + (local.get $a_size) + (local.get $b_size) + ) + ) + ) + (memory.copy + (local.get $tmp) + (local.get $a_ptr) + (local.get $a_size) + ) + (memory.copy + (i32.add + (local.get $tmp) + (local.get $a_size) + ) + (local.get $b_ptr) + (local.get $b_size) + ) + (local.get $tmp) + ) + (func $allocate (param $size i32) (result i32) + ;; round size to next multiple of 8 and advance the heap pointer + global.get $heap + global.get $heap + local.get $size + i32.const 7 + i32.add + i32.const 0xfffffff8 + i32.and + i32.add + global.set $heap + ) + (func $deallocate (param $ptr i32) + ;; just leak all memory + nop + ) + (data "Hello, ") + (export "run_e" (func $run_e)) + (export "allocate" (func $allocate)) + (export "deallocate" (func $deallocate)) + (export "memory" (memory $memory)) +) diff --git a/e2/update_checker.go b/e2/update_checker.go new file mode 100644 index 00000000..7e4ca509 --- /dev/null +++ b/e2/update_checker.go @@ -0,0 +1,38 @@ +//go:build !docker + +package main + +import ( + "context" + "time" + + "github.com/suborbital/e2core/e2/cli/release" + "github.com/suborbital/e2core/e2/cli/util" +) + +const checkVersionTimeout = 500 * time.Millisecond + +func checkForUpdates() { + ctx, cancel := context.WithTimeout(context.Background(), checkVersionTimeout) + defer cancel() + + versionChan := make(chan string) + + go func() { + msg := "" + if version, err := release.CheckForLatestVersion(ctx); err != nil { + msg = err.Error() + } else if version != "" { + msg = version + } + + versionChan <- msg + }() + + select { + case msg := <-versionChan: + if msg != "" { + util.LogInfo(msg) + } + } +} diff --git a/e2/update_checker_docker.go b/e2/update_checker_docker.go new file mode 100644 index 00000000..dbfabb74 --- /dev/null +++ b/e2/update_checker_docker.go @@ -0,0 +1,7 @@ +//go:build docker + +package main + +func checkForUpdates() { + // do nothing :) +} diff --git a/go.mod b/go.mod index 1a419375..dbb5693d 100644 --- a/go.mod +++ b/go.mod @@ -4,11 +4,15 @@ go 1.19 require ( github.com/bytecodealliance/wasmtime-go v1.0.0 + github.com/deislabs/go-bindle v0.1.1-0.20220201013943-612c59d27f42 github.com/docker/go-connections v0.4.0 + github.com/google/go-github/v41 v41.0.0 github.com/google/uuid v1.3.0 github.com/gorilla/websocket v1.5.0 + github.com/hashicorp/go-version v1.6.0 github.com/lib/pq v1.10.6 github.com/nats-io/nats.go v1.19.1 + github.com/pelletier/go-toml v1.8.1 github.com/pkg/errors v0.9.1 github.com/plar/go-adaptive-radix-tree v1.0.4 github.com/schollz/peerdiscovery v1.6.12 @@ -31,6 +35,7 @@ require ( go.opentelemetry.io/otel/sdk v1.11.1 go.opentelemetry.io/otel/trace v1.11.1 golang.org/x/exp v0.0.0-20221114191408-850992195362 + golang.org/x/mod v0.6.0 golang.org/x/sync v0.1.0 google.golang.org/grpc v1.50.1 gopkg.in/yaml.v2 v2.4.0 @@ -57,6 +62,7 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect github.com/golang/protobuf v1.5.2 // indirect + github.com/google/go-querystring v1.1.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.13.0 // indirect github.com/inconshreveable/mousetrap v1.0.1 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect diff --git a/go.sum b/go.sum index d50b4c8d..53111ff0 100644 --- a/go.sum +++ b/go.sum @@ -247,6 +247,8 @@ github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjI github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deislabs/go-bindle v0.1.1-0.20220201013943-612c59d27f42 h1:3cVBSH19n7v34A7IJEoZAacdkWLs1w2YlbltWETOB1Q= +github.com/deislabs/go-bindle v0.1.1-0.20220201013943-612c59d27f42/go.mod h1:Xl2ji6ePNND4HqiEeeF9Fe5X4UHaz0ZpOT1N1PE4OuQ= github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= @@ -393,6 +395,10 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-github/v41 v41.0.0 h1:HseJrM2JFf2vfiZJ8anY2hqBjdfY1Vlj/K27ueww4gg= +github.com/google/go-github/v41 v41.0.0/go.mod h1:XgmCA5H323A9rtgExdTcnDkcqp6S30AVACCBDOonIxg= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -438,6 +444,8 @@ github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FK github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= +github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= @@ -647,6 +655,7 @@ github.com/opencontainers/selinux v1.6.0/go.mod h1:VVGKuOLlE7v4PJyT6h7mNWvq1rzqi github.com/opencontainers/selinux v1.8.0/go.mod h1:RScLhm78qiWa2gbVCcGkC7tCGdgk3ogry1nUQF8Evvo= github.com/opencontainers/selinux v1.8.2/go.mod h1:MUIHuUEvKB1wtJjQdOyYRgOnLD2xAPP8dBsCoU0KuF8= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= @@ -873,6 +882,7 @@ golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220817201139-bc19a97f63c8/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.1.0 h1:MDRAIl0xIo9Io2xV565hzXHw3zVseKrJKodhohM5CjU= @@ -909,6 +919,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0 h1:b9gGHsz9/HhJ3HF5DHQytPpuwocVTChQJK3AvoLRD5I= +golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1159,6 +1171,7 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= From c3c5956c33cd841b85f69a9770f2c8e60f40985d Mon Sep 17 00:00:00 2001 From: cohix <connor@suborbital.dev> Date: Sun, 20 Nov 2022 17:16:39 -0500 Subject: [PATCH 2/2] fix conflicts --- go.mod | 1 + go.sum | 3 +++ 2 files changed, 4 insertions(+) diff --git a/go.mod b/go.mod index 45febcae..c3ca5607 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/google/go-github/v41 v41.0.0 github.com/google/uuid v1.3.0 github.com/gorilla/websocket v1.5.0 + github.com/hashicorp/go-version v1.6.0 github.com/lib/pq v1.10.7 github.com/nats-io/nats.go v1.19.1 github.com/pelletier/go-toml v1.8.1 diff --git a/go.sum b/go.sum index 2b7e1868..ffb2333c 100644 --- a/go.sum +++ b/go.sum @@ -883,6 +883,7 @@ golang.org/x/crypto v0.0.0-20210314154223-e6e6c4f2bb5b/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220817201139-bc19a97f63c8/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= @@ -921,6 +922,8 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.6.0 h1:b9gGHsz9/HhJ3HF5DHQytPpuwocVTChQJK3AvoLRD5I= +golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=