Compare commits

...

6 Commits

Author SHA1 Message Date
0fdaf9fb15 test: on-demand integration tests
Some checks reported errors
continuous-integration/drone/push Build was killed
continuous-integration/drone/pr Build is failing
2025-03-16 13:16:12 +01:00
1574aa0631 refactor!: status between service/image
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
See #487 (comment)
2025-03-16 12:42:09 +01:00
1723025fbf build: go 1.24
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
We were running behind and there were quite some deprecations to update.
This was mostly in the upstream copy/pasta package but seems quite
minimal.
2025-03-16 12:31:45 +01:00
a2b678caf6 test: reset after undeploy for a clean env version
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
Follows #510.
See https://build.coopcloud.tech/toolshed/abra/2620/1/5.
2025-03-16 11:49:38 +01:00
0a371ec360 fix: integration tests
All checks were successful
continuous-integration/drone/pr Build is passing
continuous-integration/drone/push Build is passing
2025-03-13 08:31:11 +01:00
e58a716fe1 feat(deploy): Simplifies deploy overview (#508)
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/pr Build is passing
This simplifies the deploy overview, to only show 3 version fields:
- CURRENT DEPLOYMENT
- CURRENT ENV
- NEW DEPLOYMENT

It also fixes a few errors around version detection

Reviewed-on: #508
Co-authored-by: p4u1 <p4u1_f4u1@riseup.net>
Co-committed-by: p4u1 <p4u1_f4u1@riseup.net>
2025-03-12 16:13:24 +00:00
846 changed files with 25735 additions and 198055 deletions

View File

@ -3,12 +3,12 @@ kind: pipeline
name: coopcloud.tech/abra name: coopcloud.tech/abra
steps: steps:
- name: make check - name: make check
image: golang:1.22 image: golang:1.24
commands: commands:
- make check - make check
- name: make test - name: make test
image: golang:1.22 image: golang:1.24
environment: environment:
CATL_URL: https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git CATL_URL: https://git.coopcloud.tech/toolshed/recipes-catalogue-json.git
commands: commands:
@ -60,7 +60,31 @@ steps:
- make check - make check
- make test - make test
- name: integration test - name: on-demand integration test
image: appleboy/drone-ssh
settings:
host:
- int.coopcloud.tech
username: abra
key:
from_secret: abra_int_private_key
port: 22
command_timeout: 60m
script_stop: true
request_pty: true
script:
- |
wget https://git.coopcloud.tech/toolshed/abra/raw/branch/main/scripts/tests/run-ci-int -O run-ci-int
chmod +x run-ci-int
sh run-ci-int
when:
ref:
- int-*
depends_on:
- make check
- make test
- name: nightly integration test
image: appleboy/drone-ssh image: appleboy/drone-ssh
settings: settings:
host: host:
@ -87,3 +111,8 @@ steps:
volumes: volumes:
- name: deps - name: deps
temp: {} temp: {}
trigger:
action:
exclude:
- synchronized

View File

@ -1,5 +1,5 @@
# Build image # Build image
FROM golang:1.22-alpine AS build FROM golang:1.24-alpine AS build
ENV GOPRIVATE=coopcloud.tech ENV GOPRIVATE=coopcloud.tech

View File

@ -2,7 +2,7 @@ ABRA := ./cmd/abra
KADABRA := ./cmd/kadabra KADABRA := ./cmd/kadabra
COMMIT := $(shell git rev-list -1 HEAD) COMMIT := $(shell git rev-list -1 HEAD)
GOPATH := $(shell go env GOPATH) GOPATH := $(shell go env GOPATH)
GOVERSION := 1.22 GOVERSION := 1.24
LDFLAGS := "-X 'main.Commit=$(COMMIT)'" LDFLAGS := "-X 'main.Commit=$(COMMIT)'"
DIST_LDFLAGS := $(LDFLAGS)" -s -w" DIST_LDFLAGS := $(LDFLAGS)" -s -w"
GCFLAGS := "all=-l -B" GCFLAGS := "all=-l -B"

View File

@ -18,7 +18,7 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"coopcloud.tech/abra/pkg/upstream/container" "coopcloud.tech/abra/pkg/upstream/container"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types" containertypes "github.com/docker/docker/api/types/container"
dockerClient "github.com/docker/docker/client" dockerClient "github.com/docker/docker/client"
"github.com/docker/docker/errdefs" "github.com/docker/docker/errdefs"
"github.com/docker/docker/pkg/archive" "github.com/docker/docker/pkg/archive"
@ -134,7 +134,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
if err != nil { if err != nil {
return err return err
} }
if _, err := container.RunExec(dcli, cl, containerID, &types.ExecConfig{ if _, err := container.RunExec(dcli, cl, containerID, &containertypes.ExecOptions{
AttachStderr: true, AttachStderr: true,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,
@ -162,7 +162,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
} }
log.Debugf("copy %s from local to %s on container", srcPath, dstPath) log.Debugf("copy %s from local to %s on container", srcPath, dstPath)
copyOpts := types.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false} copyOpts := containertypes.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
if err := cl.CopyToContainer(context.Background(), containerID, dstPath, content, copyOpts); err != nil { if err := cl.CopyToContainer(context.Background(), containerID, dstPath, content, copyOpts); err != nil {
return err return err
} }
@ -173,7 +173,7 @@ func CopyToContainer(cl *dockerClient.Client, containerID, srcPath, dstPath stri
if err != nil { if err != nil {
return err return err
} }
if _, err := container.RunExec(dcli, cl, containerID, &types.ExecConfig{ if _, err := container.RunExec(dcli, cl, containerID, &containertypes.ExecOptions{
AttachStderr: true, AttachStderr: true,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,

View File

@ -3,6 +3,7 @@ package app
import ( import (
"context" "context"
"fmt" "fmt"
"strings"
"coopcloud.tech/abra/cli/internal" "coopcloud.tech/abra/cli/internal"
"coopcloud.tech/abra/pkg/app" "coopcloud.tech/abra/pkg/app"
@ -66,8 +67,6 @@ checkout as-is. Recipe commit hashes are also supported as values for
var ( var (
deployWarnMessages []string deployWarnMessages []string
toDeployVersion string toDeployVersion string
isChaosCommit bool
toDeployChaosVersion = config.CHAOS_DEFAULT
) )
app := internal.ValidateApp(args) app := internal.ValidateApp(args)
@ -80,10 +79,6 @@ checkout as-is. Recipe commit hashes are also supported as values for
log.Fatal(err) log.Fatal(err)
} }
if err := lint.LintForErrors(app.Recipe); err != nil {
log.Fatal(err)
}
cl, err := client.New(app.Server) cl, err := client.New(app.Server)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -100,30 +95,22 @@ checkout as-is. Recipe commit hashes are also supported as values for
log.Fatalf("%s is already deployed", app.Name) log.Fatalf("%s is already deployed", app.Name)
} }
toDeployVersion, toDeployChaosVersion, err = getDeployVersion(args, deployMeta, app) toDeployVersion, err = getDeployVersion(args, deployMeta, app)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(fmt.Errorf("get deploy version: %s", err))
} }
if !internal.Chaos { if !internal.Chaos {
isChaosCommit, err = app.Recipe.EnsureVersion(toDeployVersion) _, err = app.Recipe.EnsureVersion(toDeployVersion)
if err != nil { if err != nil {
log.Fatalf("ensure recipe: %s", err)
}
}
if err := lint.LintForErrors(app.Recipe); err != nil {
log.Fatal(err) log.Fatal(err)
} }
if isChaosCommit {
log.Debugf("assuming chaos commit: %s", toDeployVersion)
internal.Chaos = true
toDeployChaosVersion = toDeployVersion
toDeployVersion, err = app.Recipe.GetVersionLabelLocal()
if err != nil {
log.Fatal(err)
}
}
}
if err := validateSecrets(cl, app); err != nil { if err := validateSecrets(cl, app); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -154,18 +141,14 @@ checkout as-is. Recipe commit hashes are also supported as values for
log.Fatal(err) log.Fatal(err)
} }
toDeployChaosVersionLabel := toDeployChaosVersion
if app.Recipe.Dirty {
toDeployChaosVersionLabel = formatter.AddDirtyMarker(toDeployChaosVersionLabel)
}
appPkg.ExposeAllEnv(stackName, compose, app.Env) appPkg.ExposeAllEnv(stackName, compose, app.Env)
appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name) appPkg.SetRecipeLabel(compose, stackName, app.Recipe.Name)
appPkg.SetChaosLabel(compose, stackName, internal.Chaos) appPkg.SetChaosLabel(compose, stackName, internal.Chaos)
if internal.Chaos { if internal.Chaos {
appPkg.SetChaosVersionLabel(compose, stackName, toDeployChaosVersionLabel) appPkg.SetChaosVersionLabel(compose, stackName, toDeployVersion)
} }
appPkg.SetUpdateLabel(compose, stackName, app.Env) appPkg.SetUpdateLabel(compose, stackName, app.Env)
appPkg.SetVersionLabel(compose, stackName, toDeployVersion)
envVars, err := appPkg.CheckEnv(app) envVars, err := appPkg.CheckEnv(app)
if err != nil { if err != nil {
@ -197,19 +180,12 @@ checkout as-is. Recipe commit hashes are also supported as values for
deployedVersion = deployMeta.Version deployedVersion = deployMeta.Version
} }
toWriteVersion := toDeployVersion
if internal.Chaos || isChaosCommit {
toWriteVersion = toDeployChaosVersion
}
if err := internal.DeployOverview( if err := internal.DeployOverview(
app, app,
deployWarnMessages,
deployedVersion, deployedVersion,
deployMeta.ChaosVersion,
toDeployVersion, toDeployVersion,
toDeployChaosVersion, "",
toWriteVersion, deployWarnMessages,
); err != nil { ); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -233,43 +209,28 @@ checkout as-is. Recipe commit hashes are also supported as values for
} }
} }
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil { if err := app.WriteRecipeVersion(toDeployVersion, false); err != nil {
log.Fatalf("writing recipe version failed: %s", err) log.Fatalf("writing recipe version failed: %s", err)
} }
}, },
} }
func getChaosVersion(app app.App, toDeployVersion, toDeployChaosVersion *string) error { func getLatestVersionOrCommit(app app.App) (string, error) {
var err error
*toDeployChaosVersion, err = app.Recipe.ChaosVersion()
if err != nil {
return err
}
*toDeployVersion, err = app.Recipe.GetVersionLabelLocal()
if err != nil {
return err
}
return nil
}
func getLatestVersionOrCommit(app app.App) (string, string, error) {
versions, err := app.Recipe.Tags() versions, err := app.Recipe.Tags()
if err != nil { if err != nil {
return "", "", err return "", err
} }
if len(versions) > 0 && !internal.Chaos { if len(versions) > 0 && !internal.Chaos {
return versions[len(versions)-1], "", nil return versions[len(versions)-1], nil
} }
head, err := app.Recipe.Head() head, err := app.Recipe.Head()
if err != nil { if err != nil {
return "", "", err return "", err
} }
return "", formatter.SmallSHA(head.String()), nil return formatter.SmallSHA(head.String()), nil
} }
// validateArgsAndFlags ensures compatible args/flags. // validateArgsAndFlags ensures compatible args/flags.
@ -296,48 +257,44 @@ func validateSecrets(cl *dockerClient.Client, app app.App) error {
return nil return nil
} }
func getDeployVersion(cliArgs []string, deployMeta stack.DeployMeta, app app.App) (string, string, error) { func getDeployVersion(cliArgs []string, deployMeta stack.DeployMeta, app app.App) (string, error) {
// Chaos mode overrides everything // Chaos mode overrides everything
if internal.Chaos { if internal.Chaos {
v, err := app.Recipe.ChaosVersion() v, err := app.Recipe.ChaosVersion()
if err != nil { if err != nil {
return "", "", err return "", err
} }
cv, err := app.Recipe.GetVersionLabelLocal() log.Debugf("version: taking chaos version: %s", v)
if err != nil { return v, nil
return "", "", err
}
log.Debugf("version: taking chaos version: %s, %s", v, cv)
return v, cv, nil
} }
// Check if the deploy version is set with a cli argument // Check if the deploy version is set with a cli argument
if len(cliArgs) == 2 && cliArgs[1] != "" { if len(cliArgs) == 2 && cliArgs[1] != "" {
log.Debugf("version: taking version from cli arg: %s", cliArgs[1]) log.Debugf("version: taking version from cli arg: %s", cliArgs[1])
return cliArgs[1], "", nil return cliArgs[1], nil
} }
// Check if the recipe has a version in the .env file // Check if the recipe has a version in the .env file
if app.Recipe.EnvVersion != "" && !internal.IgnoreEnvVersion { if app.Recipe.EnvVersion != "" && !internal.IgnoreEnvVersion {
if strings.HasSuffix(app.Recipe.EnvVersionRaw, "+U") {
return "", fmt.Errorf("version: can not redeploy chaos version %s", app.Recipe.EnvVersionRaw)
}
log.Debugf("version: taking version from .env file: %s", app.Recipe.EnvVersion) log.Debugf("version: taking version from .env file: %s", app.Recipe.EnvVersion)
return app.Recipe.EnvVersion, "", nil return app.Recipe.EnvVersion, nil
} }
// Take deployed version // Take deployed version
if deployMeta.IsDeployed { if deployMeta.IsDeployed {
log.Debugf("version: taking deployed version: %s", deployMeta.Version) log.Debugf("version: taking deployed version: %s", deployMeta.Version)
return deployMeta.Version, "", nil return deployMeta.Version, nil
} }
v, vc, err := getLatestVersionOrCommit(app) v, err := getLatestVersionOrCommit(app)
log.Debugf("version: taking new recipe versio: %s, %s", v, vc) log.Debugf("version: taking new recipe version: %s", v)
if err != nil { if err != nil {
log.Fatal(err) return "", err
} }
if v == "" { return v, nil
return vc, vc, nil
}
return v, vc, nil
} }
func init() { func init() {

View File

@ -209,16 +209,7 @@ var AppNewCommand = &cobra.Command{
log.Fatal(err) log.Fatal(err)
} }
if err := app.Recipe.IsDirty(); err != nil { if err := app.WriteRecipeVersion(recipeVersion, false); err != nil {
log.Fatal(err)
}
toWriteVersion := recipeVersion
if internal.Chaos || app.Recipe.Dirty {
toWriteVersion = chaosVersion
}
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil {
log.Fatalf("writing recipe version failed: %s", err) log.Fatalf("writing recipe version failed: %s", err)
} }
}, },

View File

@ -143,10 +143,10 @@ func showPSOutput(app appPkg.App, cl *dockerClient.Client, deployedVersion, chao
row := []string{ row := []string{
containerStats["service"], containerStats["service"],
containerStats["status"],
containerStats["image"], containerStats["image"],
dVersion, dVersion,
cVersion, cVersion,
containerStats["status"],
} }
rows = append(rows, row) rows = append(rows, row)
@ -170,10 +170,10 @@ func showPSOutput(app appPkg.App, cl *dockerClient.Client, deployedVersion, chao
headers := []string{ headers := []string{
"SERVICE", "SERVICE",
"STATUS",
"IMAGE", "IMAGE",
"VERSION", "VERSION",
"CHAOS", "CHAOS",
"STATUS",
} }
table. table.

View File

@ -183,20 +183,13 @@ beforehand. See "abra app backup" for more.`,
} }
appPkg.SetUpdateLabel(compose, stackName, app.Env) appPkg.SetUpdateLabel(compose, stackName, app.Env)
chaosVersion := config.CHAOS_DEFAULT
if deployMeta.IsChaos {
chaosVersion = deployMeta.ChaosVersion
}
// NOTE(d1): no release notes implemeneted for rolling back // NOTE(d1): no release notes implemeneted for rolling back
if err := internal.NewVersionOverview( if err := internal.DeployOverview(
app, app,
downgradeWarnMessages,
"rollback",
deployMeta.Version, deployMeta.Version,
chaosVersion,
chosenDowngrade, chosenDowngrade,
"", "",
downgradeWarnMessages,
); err != nil { ); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -249,7 +242,7 @@ func validateDowngradeVersionArg(
) error { ) error {
parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version) parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version)
if err != nil { if err != nil {
return fmt.Errorf("'%s' is not a known version for %s", deployMeta.Version, app.Recipe.Name) return fmt.Errorf("current deployment '%s' is not a known version for %s", deployMeta.Version, app.Recipe.Name)
} }
parsedSpecificVersion, err := tagcmp.Parse(specificVersion) parsedSpecificVersion, err := tagcmp.Parse(specificVersion)

View File

@ -11,7 +11,7 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"coopcloud.tech/abra/pkg/upstream/container" "coopcloud.tech/abra/pkg/upstream/container"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types" containertypes "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/filters"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -64,7 +64,7 @@ var AppRunCommand = &cobra.Command{
} }
userCmd := args[2:] userCmd := args[2:]
execCreateOpts := types.ExecConfig{ execCreateOpts := containertypes.ExecOptions{
AttachStderr: true, AttachStderr: true,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,

View File

@ -54,21 +54,12 @@ Passing "--prune/-p" does not remove those volumes.`,
log.Fatalf("%s is not deployed?", app.Name) log.Fatalf("%s is not deployed?", app.Name)
} }
chaosVersion := config.CHAOS_DEFAULT if err := internal.DeployOverview(
if deployMeta.IsChaos {
chaosVersion = deployMeta.ChaosVersion
}
toWriteVersion := deployMeta.Version
if deployMeta.IsChaos {
toWriteVersion = chaosVersion
}
if err := internal.UndeployOverview(
app, app,
deployMeta.Version, deployMeta.Version,
chaosVersion, config.NO_DOMAIN_DEFAULT,
toWriteVersion, "",
nil,
); err != nil { ); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -87,7 +78,7 @@ Passing "--prune/-p" does not remove those volumes.`,
} }
} }
if err := app.WriteRecipeVersion(toWriteVersion, false); err != nil { if err := app.WriteRecipeVersion(deployMeta.Version, false); err != nil {
log.Fatalf("writing recipe version failed: %s", err) log.Fatalf("writing recipe version failed: %s", err)
} }
}, },

View File

@ -43,7 +43,8 @@ beforehand. See "abra app backup" for more.`,
ValidArgsFunction: func( ValidArgsFunction: func(
cmd *cobra.Command, cmd *cobra.Command,
args []string, args []string,
toComplete string) ([]string, cobra.ShellCompDirective) { toComplete string,
) ([]string, cobra.ShellCompDirective) {
switch l := len(args); l { switch l := len(args); l {
case 0: case 0:
return autocomplete.AppNameComplete() return autocomplete.AppNameComplete()
@ -206,23 +207,21 @@ beforehand. See "abra app backup" for more.`,
return return
} }
chaosVersion := config.CHAOS_DEFAULT if upgradeReleaseNotes != "" && chosenUpgrade != "" {
if deployMeta.IsChaos { fmt.Print(upgradeReleaseNotes)
chaosVersion = deployMeta.ChaosVersion } else {
upgradeWarnMessages = append(
if deployMeta.ChaosVersion == "" {
chaosVersion = config.UNKNOWN_DEFAULT
}
}
if err := internal.NewVersionOverview(
app,
upgradeWarnMessages, upgradeWarnMessages,
"upgrade", fmt.Sprintf("no release notes available for %s", chosenUpgrade),
)
}
if err := internal.DeployOverview(
app,
deployMeta.Version, deployMeta.Version,
chaosVersion,
chosenUpgrade, chosenUpgrade,
upgradeReleaseNotes, upgradeReleaseNotes,
upgradeWarnMessages,
); err != nil { ); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -365,7 +364,7 @@ func validateUpgradeVersionArg(
parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version) parsedDeployedVersion, err := tagcmp.Parse(deployMeta.Version)
if err != nil { if err != nil {
return err return fmt.Errorf("'%s' is not a known version", deployMeta.Version)
} }
if parsedSpecificVersion.IsLessThan(parsedDeployedVersion) && if parsedSpecificVersion.IsLessThan(parsedDeployedVersion) &&
@ -397,9 +396,7 @@ func ensureDeployed(cl *dockerClient.Client, app app.App) (stack.DeployMeta, err
return deployMeta, nil return deployMeta, nil
} }
var ( var showReleaseNotes bool
showReleaseNotes bool
)
func init() { func init() {
AppUpgradeCommand.Flags().BoolVarP( AppUpgradeCommand.Flags().BoolVarP(

View File

@ -12,6 +12,7 @@ import (
"coopcloud.tech/abra/pkg/upstream/container" "coopcloud.tech/abra/pkg/upstream/container"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types"
containertypes "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/filters"
dockerClient "github.com/docker/docker/client" dockerClient "github.com/docker/docker/client"
) )
@ -47,7 +48,7 @@ func RunBackupCmdRemote(
backupCmd string, backupCmd string,
containerID string, containerID string,
execEnv []string) (io.Writer, error) { execEnv []string) (io.Writer, error) {
execBackupListOpts := types.ExecConfig{ execBackupListOpts := containertypes.ExecOptions{
AttachStderr: true, AttachStderr: true,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,

View File

@ -14,7 +14,7 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"coopcloud.tech/abra/pkg/upstream/container" "coopcloud.tech/abra/pkg/upstream/container"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types" containertypes "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/filters"
dockerClient "github.com/docker/docker/client" dockerClient "github.com/docker/docker/client"
"github.com/docker/docker/pkg/archive" "github.com/docker/docker/pkg/archive"
@ -42,7 +42,7 @@ func RunCmdRemote(
return err return err
} }
copyOpts := types.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false} copyOpts := containertypes.CopyToContainerOptions{AllowOverwriteDirWithFile: false, CopyUIDGID: false}
if err := cl.CopyToContainer(context.Background(), targetContainer.ID, "/tmp", content, copyOpts); err != nil { if err := cl.CopyToContainer(context.Background(), targetContainer.ID, "/tmp", content, copyOpts); err != nil {
return err return err
} }
@ -55,7 +55,7 @@ func RunCmdRemote(
shell := "/bin/bash" shell := "/bin/bash"
findShell := []string{"test", "-e", shell} findShell := []string{"test", "-e", shell}
execCreateOpts := types.ExecConfig{ execCreateOpts := containertypes.ExecOptions{
AttachStderr: true, AttachStderr: true,
AttachStdin: true, AttachStdin: true,
AttachStdout: true, AttachStdout: true,

View File

@ -10,7 +10,6 @@ import (
"coopcloud.tech/abra/pkg/config" "coopcloud.tech/abra/pkg/config"
"coopcloud.tech/abra/pkg/formatter" "coopcloud.tech/abra/pkg/formatter"
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"coopcloud.tech/abra/pkg/recipe"
"coopcloud.tech/tagcmp" "coopcloud.tech/tagcmp"
"github.com/AlecAivazis/survey/v2" "github.com/AlecAivazis/survey/v2"
"github.com/charmbracelet/lipgloss" "github.com/charmbracelet/lipgloss"
@ -38,96 +37,6 @@ func horizontal(left, mid, right string) string {
return lipgloss.JoinHorizontal(lipgloss.Left, left, mid, right) return lipgloss.JoinHorizontal(lipgloss.Left, left, mid, right)
} }
// NewVersionOverview shows an upgrade or downgrade overview
func NewVersionOverview(
app appPkg.App,
warnMessages []string,
kind,
deployedVersion,
deployedChaosVersion,
toDeployVersion,
releaseNotes string) error {
deployConfig := "compose.yml"
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
deployConfig = formatComposeFiles(composeFiles)
}
server := app.Server
if app.Server == "default" {
server = "local"
}
domain := app.Domain
if domain == "" {
domain = config.NO_DOMAIN_DEFAULT
}
upperKind := strings.ToUpper(kind)
envVersion, err := recipe.GetEnvVersionRaw(app.Recipe.Name)
if err != nil {
return err
}
if envVersion == "" {
envVersion = config.NO_VERSION_DEFAULT
}
rows := [][]string{
{"DOMAIN", domain},
{"RECIPE", app.Recipe.Name},
{"SERVER", server},
{"CONFIG", deployConfig},
{"CURRENT DEPLOYMENT", "---"},
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)},
{"CHAOS ", formatter.BoldDirtyDefault(deployedChaosVersion)},
{upperKind, "---"},
{"VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Domain)), "---"},
{"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
{"NEW VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
}
overview := formatter.CreateOverview(
fmt.Sprintf("%s OVERVIEW", upperKind),
rows,
)
fmt.Println(overview)
if releaseNotes != "" && toDeployVersion != "" {
fmt.Print(releaseNotes)
} else {
warnMessages = append(
warnMessages,
fmt.Sprintf("no release notes available for %s", toDeployVersion),
)
}
for _, msg := range warnMessages {
log.Warn(msg)
}
if NoInput {
return nil
}
response := false
prompt := &survey.Confirm{Message: "proceed?"}
if err := survey.AskOne(prompt, &response); err != nil {
return err
}
if !response {
log.Fatal("deployment cancelled")
}
return nil
}
func formatComposeFiles(composeFiles string) string { func formatComposeFiles(composeFiles string) string {
return strings.ReplaceAll(composeFiles, ":", "\n") return strings.ReplaceAll(composeFiles, ":", "\n")
} }
@ -135,12 +44,10 @@ func formatComposeFiles(composeFiles string) string {
// DeployOverview shows a deployment overview // DeployOverview shows a deployment overview
func DeployOverview( func DeployOverview(
app appPkg.App, app appPkg.App,
warnMessages []string,
deployedVersion string, deployedVersion string,
deployedChaosVersion string, toDeployVersion string,
toDeployVersion, info string,
toDeployChaosVersion string, warnMessages []string,
toWriteVersion string,
) error { ) error {
deployConfig := "compose.yml" deployConfig := "compose.yml"
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok { if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
@ -157,21 +64,7 @@ func DeployOverview(
domain = config.NO_DOMAIN_DEFAULT domain = config.NO_DOMAIN_DEFAULT
} }
if app.Recipe.Dirty { envVersion := app.Recipe.EnvVersionRaw
toWriteVersion = formatter.AddDirtyMarker(toWriteVersion)
toDeployChaosVersion = formatter.AddDirtyMarker(toDeployChaosVersion)
}
recipeName, exists := app.Env["RECIPE"]
if !exists {
recipeName = app.Env["TYPE"]
}
envVersion, err := recipe.GetEnvVersionRaw(recipeName)
if err != nil {
return err
}
if envVersion == "" { if envVersion == "" {
envVersion = config.NO_VERSION_DEFAULT envVersion = config.NO_VERSION_DEFAULT
} }
@ -181,24 +74,21 @@ func DeployOverview(
{"RECIPE", app.Recipe.Name}, {"RECIPE", app.Recipe.Name},
{"SERVER", server}, {"SERVER", server},
{"CONFIG", deployConfig}, {"CONFIG", deployConfig},
{"", ""},
{"CURRENT DEPLOYMENT", "---"}, {"CURRENT DEPLOYMENT", formatter.BoldDirtyDefault(deployedVersion)},
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)}, {"ENV VERSION", formatter.BoldDirtyDefault(envVersion)},
{"CHAOS", formatter.BoldDirtyDefault(deployedChaosVersion)}, {"NEW DEPLOYMENT", formatter.BoldDirtyDefault(toDeployVersion)},
{"NEW DEPLOYMENT", "---"},
{"VERSION", formatter.BoldDirtyDefault(toDeployVersion)},
{"CHAOS", formatter.BoldDirtyDefault(toDeployChaosVersion)},
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Name)), "---"},
{"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
{"NEW VERSION", formatter.BoldDirtyDefault(toWriteVersion)},
} }
overview := formatter.CreateOverview("DEPLOY OVERVIEW", rows) deployType := getDeployType(deployedVersion, toDeployVersion)
overview := formatter.CreateOverview(fmt.Sprintf("%s OVERVIEW", deployType), rows)
fmt.Println(overview) fmt.Println(overview)
if info != "" {
fmt.Println(info)
}
for _, msg := range warnMessages { for _, msg := range warnMessages {
log.Warn(msg) log.Warn(msg)
} }
@ -220,76 +110,34 @@ func DeployOverview(
return nil return nil
} }
// UndeployOverview shows an undeployment overview func getDeployType(currentVersion, newVersion string) string {
func UndeployOverview( if newVersion == config.NO_DOMAIN_DEFAULT {
app appPkg.App, return "UNDEPLOY"
deployedVersion,
deployedChaosVersion,
toWriteVersion string,
) error {
deployConfig := "compose.yml"
if composeFiles, ok := app.Env["COMPOSE_FILE"]; ok {
deployConfig = formatComposeFiles(composeFiles)
} }
if strings.Contains(newVersion, "+U") {
server := app.Server return "CHAOS DEPLOY"
if app.Server == "default" {
server = "local"
} }
if strings.Contains(currentVersion, "+U") {
domain := app.Domain return "UNCHAOS DEPLOY"
if domain == "" {
domain = config.NO_DOMAIN_DEFAULT
} }
if currentVersion == newVersion {
recipeName, exists := app.Env["RECIPE"] return "REDEPLOY"
if !exists {
recipeName = app.Env["TYPE"]
} }
if currentVersion == config.NO_VERSION_DEFAULT {
envVersion, err := recipe.GetEnvVersionRaw(recipeName) return "NEW DEPLOY"
}
currentParsed, err := tagcmp.Parse(currentVersion)
if err != nil { if err != nil {
return err return "DEPLOY"
} }
newParsed, err := tagcmp.Parse(newVersion)
if envVersion == "" { if err != nil {
envVersion = config.NO_VERSION_DEFAULT return "DEPLOY"
} }
if currentParsed.IsLessThan(newParsed) {
rows := [][]string{ return "UPGRADE"
{"DOMAIN", domain},
{"RECIPE", app.Recipe.Name},
{"SERVER", server},
{"CONFIG", deployConfig},
{"CURRENT DEPLOYMENT", "---"},
{"VERSION", formatter.BoldDirtyDefault(deployedVersion)},
{"CHAOS", formatter.BoldDirtyDefault(deployedChaosVersion)},
{fmt.Sprintf("%s.ENV", strings.ToUpper(app.Name)), "---"},
{"CURRENT VERSION", formatter.BoldDirtyDefault(envVersion)},
{"NEW VERSION", formatter.BoldDirtyDefault(toWriteVersion)},
} }
return "DOWNGRADE"
overview := formatter.CreateOverview("UNDEPLOY OVERVIEW", rows)
fmt.Println(overview)
if NoInput {
return nil
}
response := false
prompt := &survey.Confirm{Message: "proceed?"}
if err := survey.AskOne(prompt, &response); err != nil {
return err
}
if !response {
log.Fatal("undeploy cancelled")
}
return nil
} }
// PostCmds parses a string of commands and executes them inside of the respective services // PostCmds parses a string of commands and executes them inside of the respective services

102
go.mod
View File

@ -1,6 +1,6 @@
module coopcloud.tech/abra module coopcloud.tech/abra
go 1.22.7 go 1.23.0
toolchain go1.23.1 toolchain go1.23.1
@ -8,21 +8,21 @@ require (
coopcloud.tech/tagcmp v0.0.0-20230809071031-eb3e7758d4eb coopcloud.tech/tagcmp v0.0.0-20230809071031-eb3e7758d4eb
git.coopcloud.tech/toolshed/godotenv v1.5.2-0.20250103171850-4d0ca41daa5c git.coopcloud.tech/toolshed/godotenv v1.5.2-0.20250103171850-4d0ca41daa5c
github.com/AlecAivazis/survey/v2 v2.3.7 github.com/AlecAivazis/survey/v2 v2.3.7
github.com/charmbracelet/lipgloss v1.0.0 github.com/charmbracelet/lipgloss v1.1.0
github.com/charmbracelet/log v0.4.0 github.com/charmbracelet/log v0.4.1
github.com/distribution/reference v0.6.0 github.com/distribution/reference v0.6.0
github.com/docker/cli v27.4.1+incompatible github.com/docker/cli v28.0.1+incompatible
github.com/docker/docker v27.4.1+incompatible github.com/docker/docker v28.0.1+incompatible
github.com/docker/go-units v0.5.0 github.com/docker/go-units v0.5.0
github.com/go-git/go-git/v5 v5.13.1 github.com/go-git/go-git/v5 v5.14.0
github.com/google/go-cmp v0.6.0 github.com/google/go-cmp v0.7.0
github.com/moby/sys/signal v0.7.1 github.com/moby/sys/signal v0.7.1
github.com/moby/term v0.5.2 github.com/moby/term v0.5.2
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/schollz/progressbar/v3 v3.17.1 github.com/schollz/progressbar/v3 v3.18.0
golang.org/x/term v0.28.0 golang.org/x/term v0.30.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
gotest.tools/v3 v3.5.1 gotest.tools/v3 v3.5.2
) )
require ( require (
@ -31,16 +31,19 @@ require (
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
github.com/BurntSushi/toml v1.4.0 // indirect github.com/BurntSushi/toml v1.4.0 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/ProtonMail/go-crypto v1.1.6 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charmbracelet/x/ansi v0.6.0 // indirect github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect
github.com/cloudflare/circl v1.5.0 // indirect github.com/charmbracelet/x/ansi v0.8.0 // indirect
github.com/charmbracelet/x/cellbuf v0.0.13 // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/cloudflare/circl v1.6.0 // indirect
github.com/containerd/log v0.1.0 // indirect github.com/containerd/log v0.1.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect
github.com/cyphar/filepath-securejoin v0.3.6 // indirect github.com/cyphar/filepath-securejoin v0.4.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c // indirect
@ -52,7 +55,7 @@ require (
github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/ghodss/yaml v1.0.0 // indirect github.com/ghodss/yaml v1.0.0 // indirect
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
github.com/go-git/go-billy/v5 v5.6.1 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect
github.com/go-logfmt/logfmt v0.6.0 // indirect github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect
@ -60,15 +63,15 @@ require (
github.com/gogo/protobuf v1.3.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
github.com/google/uuid v1.6.0 // indirect github.com/google/uuid v1.6.0 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect
github.com/klauspost/compress v1.17.11 // indirect github.com/klauspost/compress v1.18.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
@ -81,49 +84,50 @@ require (
github.com/moby/sys/user v0.3.0 // indirect github.com/moby/sys/user v0.3.0 // indirect
github.com/moby/sys/userns v0.1.0 // indirect github.com/moby/sys/userns v0.1.0 // indirect
github.com/morikuni/aec v1.0.0 // indirect github.com/morikuni/aec v1.0.0 // indirect
github.com/muesli/termenv v0.15.2 // indirect github.com/muesli/termenv v0.16.0 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/runc v1.1.13 // indirect github.com/opencontainers/runc v1.1.13 // indirect
github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pjbgf/sha1cd v0.3.1 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.6.1 // indirect github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.61.0 // indirect github.com/prometheus/common v0.63.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect github.com/prometheus/procfs v0.15.1 // indirect
github.com/rivo/uniseg v0.4.7 // indirect github.com/rivo/uniseg v0.4.7 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect github.com/sirupsen/logrus v1.9.3 // indirect
github.com/skeema/knownhosts v1.3.0 // indirect github.com/skeema/knownhosts v1.3.1 // indirect
github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/pflag v1.0.6 // indirect
github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xeipuuv/gojsonschema v1.2.0 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect
go.opentelemetry.io/otel v1.33.0 // indirect go.opentelemetry.io/otel v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 // indirect go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 // indirect
go.opentelemetry.io/otel/metric v1.33.0 // indirect go.opentelemetry.io/otel/metric v1.35.0 // indirect
go.opentelemetry.io/otel/sdk v1.33.0 // indirect go.opentelemetry.io/otel/sdk v1.35.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.33.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect
go.opentelemetry.io/otel/trace v1.33.0 // indirect go.opentelemetry.io/otel/trace v1.35.0 // indirect
go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect
golang.org/x/crypto v0.32.0 // indirect golang.org/x/crypto v0.36.0 // indirect
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 // indirect golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 // indirect
golang.org/x/mod v0.22.0 // indirect golang.org/x/mod v0.24.0 // indirect
golang.org/x/net v0.34.0 // indirect golang.org/x/net v0.37.0 // indirect
golang.org/x/sync v0.10.0 // indirect golang.org/x/sync v0.12.0 // indirect
golang.org/x/text v0.21.0 // indirect golang.org/x/text v0.23.0 // indirect
golang.org/x/time v0.9.0 // indirect golang.org/x/time v0.11.0 // indirect
golang.org/x/tools v0.29.0 // indirect golang.org/x/tools v0.31.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect
google.golang.org/grpc v1.69.2 // indirect google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.2 // indirect google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
) )
@ -132,19 +136,19 @@ require (
github.com/containers/image v3.0.2+incompatible github.com/containers/image v3.0.2+incompatible
github.com/containers/storage v1.38.2 // indirect github.com/containers/storage v1.38.2 // indirect
github.com/decentral1se/passgen v1.0.1 github.com/decentral1se/passgen v1.0.1
github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/docker/docker-credential-helpers v0.9.3 // indirect
github.com/fvbommel/sortorder v1.1.0 // indirect github.com/fvbommel/sortorder v1.1.0 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/gorilla/mux v1.8.1 // indirect github.com/gorilla/mux v1.8.1 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 github.com/hashicorp/go-retryablehttp v0.7.7
github.com/moby/patternmatcher v0.6.0 // indirect github.com/moby/patternmatcher v0.6.0 // indirect
github.com/moby/sys/sequential v0.6.0 // indirect github.com/moby/sys/sequential v0.6.0 // indirect
github.com/opencontainers/image-spec v1.1.0 // indirect github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/prometheus/client_golang v1.20.5 // indirect github.com/prometheus/client_golang v1.21.1 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/spf13/cobra v1.8.1 github.com/spf13/cobra v1.9.1
github.com/stretchr/testify v1.10.0 github.com/stretchr/testify v1.10.0
github.com/theupdateframework/notary v0.7.0 // indirect github.com/theupdateframework/notary v0.7.0 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
golang.org/x/sys v0.29.0 golang.org/x/sys v0.31.0
) )

107
go.sum
View File

@ -81,6 +81,8 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDe
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk=
github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
@ -136,14 +138,26 @@ github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghf
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs=
github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk=
github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg= github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg=
github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo= github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo=
github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY=
github.com/charmbracelet/lipgloss v1.1.0/go.mod h1:/6Q8FR2o+kj8rz4Dq0zQc3vYf7X+B0binUUBwA0aL30=
github.com/charmbracelet/log v0.4.0 h1:G9bQAcx8rWA2T3pWvx7YtPTPwgqpk7D68BX21IRW8ZM= github.com/charmbracelet/log v0.4.0 h1:G9bQAcx8rWA2T3pWvx7YtPTPwgqpk7D68BX21IRW8ZM=
github.com/charmbracelet/log v0.4.0/go.mod h1:63bXt/djrizTec0l11H20t8FDSvA4CRZJ1KH22MdptM= github.com/charmbracelet/log v0.4.0/go.mod h1:63bXt/djrizTec0l11H20t8FDSvA4CRZJ1KH22MdptM=
github.com/charmbracelet/log v0.4.1 h1:6AYnoHKADkghm/vt4neaNEXkxcXLSV2g1rdyFDOpTyk=
github.com/charmbracelet/log v0.4.1/go.mod h1:pXgyTsqsVu4N9hGdHmQ0xEA4RsXof402LX9ZgiITn2I=
github.com/charmbracelet/x/ansi v0.6.0 h1:qOznutrb93gx9oMiGf7caF7bqqubh6YIM0SWKyA08pA= github.com/charmbracelet/x/ansi v0.6.0 h1:qOznutrb93gx9oMiGf7caF7bqqubh6YIM0SWKyA08pA=
github.com/charmbracelet/x/ansi v0.6.0/go.mod h1:KBUFw1la39nl0dLl10l5ORDAqGXaeurTQmwyyVKse/Q= github.com/charmbracelet/x/ansi v0.6.0/go.mod h1:KBUFw1la39nl0dLl10l5ORDAqGXaeurTQmwyyVKse/Q=
github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE=
github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q=
github.com/charmbracelet/x/cellbuf v0.0.13 h1:/KBBKHuVRbq1lYx5BzEHBAFBP8VcQzJejZ/IA3iR28k=
github.com/charmbracelet/x/cellbuf v0.0.13/go.mod h1:xe0nKWGd3eJgtqZRaN9RjMtK7xUYchjzPr7q6kcvCCs=
github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a h1:G99klV19u0QnhiizODirwVksQB91TJKV/UaTnACcG30= github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a h1:G99klV19u0QnhiizODirwVksQB91TJKV/UaTnACcG30=
github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= github.com/charmbracelet/x/exp/golden v0.0.0-20240806155701-69247e0abc2a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw=
github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M=
github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E=
@ -163,6 +177,8 @@ github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004 h1:lkAMpLVBDaj17e
github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA= github.com/cloudflare/cfssl v0.0.0-20180223231731-4e2dcbde5004/go.mod h1:yMWuSON2oQp+43nFtAV/uvKQIFpSPerB57DCt9t8sSA=
github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys=
github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cloudflare/circl v1.6.0 h1:cr5JKic4HI+LkINy2lg3W2jF8sHCVTBncJr5gIIq7qk=
github.com/cloudflare/circl v1.6.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
@ -288,6 +304,8 @@ github.com/cyphar/filepath-securejoin v0.2.2/go.mod h1:FpkQEhXnPnOthhzymB7CGsFk2
github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.3/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM=
github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ= github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1SMSibvLzxjeJLnrYEVLULFNiHY9YfQ=
github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s=
github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8=
@ -308,6 +326,8 @@ github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyG
github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/cli v0.0.0-20191017083524-a8ff7f821017/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v27.4.1+incompatible h1:VzPiUlRJ/xh+otB75gva3r05isHMo5wXDfPRi5/b4hI= github.com/docker/cli v27.4.1+incompatible h1:VzPiUlRJ/xh+otB75gva3r05isHMo5wXDfPRi5/b4hI=
github.com/docker/cli v27.4.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/cli v27.4.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/cli v28.0.1+incompatible h1:g0h5NQNda3/CxIsaZfH4Tyf6vpxFth7PYl3hgCPOKzs=
github.com/docker/cli v28.0.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY= github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.7.1+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
@ -316,9 +336,13 @@ github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4Kfc
github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v1.4.2-0.20190924003213-a8608b5b67c7/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v27.4.1+incompatible h1:ZJvcY7gfwHn1JF48PfbyXg7Jyt9ZCWDW+GGXOIxEwp4= github.com/docker/docker v27.4.1+incompatible h1:ZJvcY7gfwHn1JF48PfbyXg7Jyt9ZCWDW+GGXOIxEwp4=
github.com/docker/docker v27.4.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v27.4.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v28.0.1+incompatible h1:FCHjSRdXhNRFjlHMTv4jUNlIBbTeRjrWfeFuJp7jpo0=
github.com/docker/docker v28.0.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y= github.com/docker/docker-credential-helpers v0.6.3/go.mod h1:WRaJzqw3CTB9bk10avuGsjVBZsD05qeibJ1/TYlvc0Y=
github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
github.com/docker/docker-credential-helpers v0.9.3 h1:gAm/VtF9wgqJMoxzT3Gj5p4AqIjCBS4wrsOh9yRqcz8=
github.com/docker/docker-credential-helpers v0.9.3/go.mod h1:x+4Gbw9aGmChi3qTLZj8Dfn0TD20M/fuWy0E5+WDeCo=
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c h1:lzqkGL9b3znc+ZUgi7FlLnqjQhcXxkNM/quxIjBVMD0=
github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q= github.com/docker/go v1.5.1-1.0.20160303222718-d30aec9fd63c/go.mod h1:CADgU4DSXK5QUlFslkQu2yW2TKzFZcXq/leZfM0UH5Q=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
@ -343,6 +367,7 @@ github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae/go.mod h1:7Bv
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc= github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v1.2.3 h1:xwIyKHbaP5yfT6O9KIeYJR5549MXRQkoQMRXGztz8YQ= github.com/elazarl/goproxy v1.2.3 h1:xwIyKHbaP5yfT6O9KIeYJR5549MXRQkoQMRXGztz8YQ=
github.com/elazarl/goproxy v1.2.3/go.mod h1:YfEbZtqP4AetfO6d40vWchF3znWX7C7Vd6ZMfdL8z64= github.com/elazarl/goproxy v1.2.3/go.mod h1:YfEbZtqP4AetfO6d40vWchF3znWX7C7Vd6ZMfdL8z64=
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
@ -379,10 +404,14 @@ github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66D
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.6.1 h1:u+dcrgaguSSkbjzHwelEjc0Yj300NUevrrPphk/SoRA= github.com/go-git/go-billy/v5 v5.6.1 h1:u+dcrgaguSSkbjzHwelEjc0Yj300NUevrrPphk/SoRA=
github.com/go-git/go-billy/v5 v5.6.1/go.mod h1:0AsLr1z2+Uksi4NlElmMblP5rPcDZNRCD8ujZCRR2BE= github.com/go-git/go-billy/v5 v5.6.1/go.mod h1:0AsLr1z2+Uksi4NlElmMblP5rPcDZNRCD8ujZCRR2BE=
github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM=
github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4=
github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII=
github.com/go-git/go-git/v5 v5.13.1 h1:DAQ9APonnlvSWpvolXWIuV6Q6zXy2wHbN4cVlNR5Q+M= github.com/go-git/go-git/v5 v5.13.1 h1:DAQ9APonnlvSWpvolXWIuV6Q6zXy2wHbN4cVlNR5Q+M=
github.com/go-git/go-git/v5 v5.13.1/go.mod h1:qryJB4cSBoq3FRoBRf5A77joojuBcmPJ0qu3XXXVixc= github.com/go-git/go-git/v5 v5.13.1/go.mod h1:qryJB4cSBoq3FRoBRf5A77joojuBcmPJ0qu3XXXVixc=
github.com/go-git/go-git/v5 v5.14.0 h1:/MD3lCrGjCen5WfEAzKg00MJJffKhC8gzS80ycmCi60=
github.com/go-git/go-git/v5 v5.14.0/go.mod h1:Z5Xhoia5PcWA3NF8vRLURn9E5FRhSl7dGj9ItW3Wk5k=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@ -481,6 +510,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0= github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0=
github.com/google/go-intervals v0.0.2/go.mod h1:MkaR3LNRfeKLPmqgJYs4E66z5InYjmCjbbr4TQlcT6Y= github.com/google/go-intervals v0.0.2/go.mod h1:MkaR3LNRfeKLPmqgJYs4E66z5InYjmCjbbr4TQlcT6Y=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -524,6 +555,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg= github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1 h1:VNqngBF40hVlDloBruUehVYC3ArSgIyScOAyMRqBxRg=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ= github.com/grpc-ecosystem/grpc-gateway/v2 v2.25.1/go.mod h1:RBRO7fro65R6tjKzYgLAFo0t1QEXY1Dp+i/bvpRiqiQ=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@ -587,6 +620,8 @@ github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdY
github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs= github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@ -620,6 +655,8 @@ github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaO
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
@ -687,6 +724,8 @@ github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7P
github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
@ -725,6 +764,8 @@ github.com/opencontainers/image-spec v1.0.0/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zM
github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
github.com/opencontainers/image-spec v1.1.1/go.mod h1:qpqAh3Dmcf36wStyyWU+kCeDgrGnAve2nCC8+7h8Q0M=
github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= github.com/opencontainers/runc v1.0.0-rc8.0.20190926000215-3e425f80a8c9/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U=
@ -756,6 +797,8 @@ github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCko
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/pjbgf/sha1cd v0.3.1 h1:Dh2GYdpJnO84lIw0LJwTFXjcNbasP/bklicSznyAaPI= github.com/pjbgf/sha1cd v0.3.1 h1:Dh2GYdpJnO84lIw0LJwTFXjcNbasP/bklicSznyAaPI=
github.com/pjbgf/sha1cd v0.3.1/go.mod h1:Y8t7jSB/dEI/lQE04A1HVKteqjj9bX5O4+Cex0TCu8s= github.com/pjbgf/sha1cd v0.3.1/go.mod h1:Y8t7jSB/dEI/lQE04A1HVKteqjj9bX5O4+Cex0TCu8s=
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1-0.20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -773,6 +816,8 @@ github.com/prometheus/client_golang v1.1.0/go.mod h1:I1FGZT9+L76gKKOs5djB6ezCbFQ
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk=
github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg=
github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20171117100541-99fa1f4be8e5/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@ -788,6 +833,8 @@ github.com/prometheus/common v0.6.0/go.mod h1:eBmuwkDJBwy6iBfxCBob6t6dR6ENT/y+J+
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.61.0 h1:3gv/GThfX0cV2lpO7gkTUwZru38mxevy90Bj8YFSRQQ= github.com/prometheus/common v0.61.0 h1:3gv/GThfX0cV2lpO7gkTUwZru38mxevy90Bj8YFSRQQ=
github.com/prometheus/common v0.61.0/go.mod h1:zr29OCN/2BsJRaFwG8QOBr41D6kkchKbpeNH7pAjb/s= github.com/prometheus/common v0.61.0/go.mod h1:zr29OCN/2BsJRaFwG8QOBr41D6kkchKbpeNH7pAjb/s=
github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k=
github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18=
github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
@ -810,7 +857,9 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@ -818,6 +867,8 @@ github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiB
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/schollz/progressbar/v3 v3.17.1 h1:bI1MTaoQO+v5kzklBjYNRQLoVpe0zbyRZNK6DFkVC5U= github.com/schollz/progressbar/v3 v3.17.1 h1:bI1MTaoQO+v5kzklBjYNRQLoVpe0zbyRZNK6DFkVC5U=
github.com/schollz/progressbar/v3 v3.17.1/go.mod h1:RzqpnsPQNjUyIgdglUjRLgD7sVnxN1wpmBMV+UiEbL4= github.com/schollz/progressbar/v3 v3.17.1/go.mod h1:RzqpnsPQNjUyIgdglUjRLgD7sVnxN1wpmBMV+UiEbL4=
github.com/schollz/progressbar/v3 v3.18.0 h1:uXdoHABRFmNIjUfte/Ex7WtuyVslrw2wVPQmCN62HpA=
github.com/schollz/progressbar/v3 v3.18.0/go.mod h1:IsO3lpbaGuzh8zIMzgY3+J8l4C8GjO0Y9S69eFvNsec=
github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U= github.com/sclevine/spec v1.2.0/go.mod h1:W4J29eT/Kzv7/b9IWLB055Z+qvVC9vt0Arko24q7p+U=
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg= github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
@ -836,6 +887,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY=
github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M=
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
@ -852,6 +905,8 @@ github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v0.0.0-20141219030609-3d60171a6431/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
@ -862,6 +917,8 @@ github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM= github.com/spf13/viper v0.0.0-20150530192845-be5ff3e4840c/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7SrnBM=
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU= github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
@ -916,6 +973,8 @@ github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
@ -938,24 +997,42 @@ go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJyS
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=
go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw=
go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I=
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0 h1:7F29RDmnlqk6B5d+sUqemt8TBfDqxryYW5gX6L74RFA= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0 h1:7F29RDmnlqk6B5d+sUqemt8TBfDqxryYW5gX6L74RFA=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0/go.mod h1:ZiGDq7xwDMKmWDrN1XsXAj0iC7hns+2DhxBFSncNHSE= go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.33.0/go.mod h1:ZiGDq7xwDMKmWDrN1XsXAj0iC7hns+2DhxBFSncNHSE=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 h1:QcFwRrZLc82r8wODjvyCbP7Ifp3UANaBSmhDSFjnqSc=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0/go.mod h1:CXIWhUomyWBG/oY2/r/kLp6K/cmx9e/7DLpBuuGdLCA=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 h1:1fTNlAIJZGWLP5FVu0fikVry1IsiUnXjf7QFvoNN3Xw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0/go.mod h1:zjPK58DtkqQFn+YUMbx0M2XV3QgKU0gS9LeGohREyK4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 h1:5pojmb1U1AogINhN3SurB+zm/nIcusopeBNp42f45QM= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 h1:5pojmb1U1AogINhN3SurB+zm/nIcusopeBNp42f45QM=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0/go.mod h1:57gTHJSE5S1tqg+EKsLPlTWhpHMsWlVmer+LA926XiA= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0/go.mod h1:57gTHJSE5S1tqg+EKsLPlTWhpHMsWlVmer+LA926XiA=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 h1:m639+BofXTvcY1q8CGs4ItwQarYtJPOWmVobfM1HpVI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0/go.mod h1:LjReUci/F4BUyv+y4dwnq3h/26iNOeC3wAIqgvTIZVo=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU=
go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ=
go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M=
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM=
go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM=
go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=
go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=
go.opentelemetry.io/otel/sdk/metric v1.33.0 h1:Gs5VK9/WUJhNXZgn8MR6ITatvAmKeIuCtNbsP3JkNqU= go.opentelemetry.io/otel/sdk/metric v1.33.0 h1:Gs5VK9/WUJhNXZgn8MR6ITatvAmKeIuCtNbsP3JkNqU=
go.opentelemetry.io/otel/sdk/metric v1.33.0/go.mod h1:dL5ykHZmm1B1nVRk9dDjChwDmt81MjVp3gLkQRwKf/Q= go.opentelemetry.io/otel/sdk/metric v1.33.0/go.mod h1:dL5ykHZmm1B1nVRk9dDjChwDmt81MjVp3gLkQRwKf/Q=
go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=
go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=
go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s=
go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck=
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg=
go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY=
@ -989,6 +1066,8 @@ golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -1003,6 +1082,8 @@ golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329 h1:9kj3STMvgqy3YA4VQXBrN7925
golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c= golang.org/x/exp v0.0.0-20250103183323-7d7fa50e5329/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA=
golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394 h1:nDVHiLt8aIbd/VzvPWN6kSOPE7+F/fNFDSXLVYkE/Iw=
golang.org/x/exp v0.0.0-20250305212735-054e65f0b394/go.mod h1:sIifuuw/Yco/y6yb6+bDNfyeQ/MdPUy/hKEMYQV17cM=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -1027,6 +1108,8 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=
golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -1072,6 +1155,8 @@ golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -1091,6 +1176,8 @@ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -1174,6 +1261,8 @@ golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -1181,6 +1270,8 @@ golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1192,6 +1283,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -1202,6 +1295,8 @@ golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY=
golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0=
golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -1251,6 +1346,8 @@ golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw= golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE=
golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588=
golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -1303,10 +1400,14 @@ google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d h1:
google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY= google.golang.org/genproto/googleapis/api v0.0.0-20250102185135-69823020774d/go.mod h1:2v7Z7gP2ZUOGsaFyxATQSRoBnKygqVq2Cwnvom7QiqY=
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24= google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1:GVIKPyP/kLIyVOgOnTwFOrvQaQUzOzGMCxgFUOEmm24=
google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw= google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw=
google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4 h1:IFnXJq3UPB3oBREOodn1v1aGQeZYQclEmvWRMN0PSsY=
google.golang.org/genproto/googleapis/api v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:c8q6Z6OCqnfVIqUFJkCzKcrj8eCvUrz+K4KRzSTuANg=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw= google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d h1:xJJRGY7TJcvIlpSrN3K6LAWgNFUILlO+OMAqtg9aqnw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4= google.golang.org/genproto/googleapis/rpc v0.0.0-20250102185135-69823020774d/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 h1:3UsHvIr4Wc2aW4brOaSCmcxh9ksica6fHEr8P1XhkYw= google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422 h1:3UsHvIr4Wc2aW4brOaSCmcxh9ksica6fHEr8P1XhkYw=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4= google.golang.org/genproto/googleapis/rpc v0.0.0-20250106144421-5f5ef82da422/go.mod h1:3ENsm/5D1mzDyhpzeRi1NR784I0BcofWBoSc5QqqMK4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.0.5/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
@ -1328,6 +1429,8 @@ google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAG
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU= google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU=
google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@ -1345,6 +1448,8 @@ google.golang.org/protobuf v1.36.1 h1:yBPeRvTftaleIgM3PZ/WBIZ7XM/eEYAaEyCwvyjq/g
google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU= google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU=
google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U= gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII= gopkg.in/cenkalti/backoff.v2 v2.2.1 h1:eJ9UAg01/HIHG987TwxvnzK2MgxXq97YY6rYDpY9aII=
@ -1390,6 +1495,8 @@ gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -655,19 +655,6 @@ func (a App) WriteRecipeVersion(version string, dryRun bool) error {
splitted := strings.Split(line, ":") splitted := strings.Split(line, ":")
if a.Recipe.Dirty {
dirtyVersion = fmt.Sprintf("%s%s", version, config.DIRTY_DEFAULT)
if strings.Contains(line, dirtyVersion) {
skipped = true
lines = append(lines, line)
continue
}
line = fmt.Sprintf("%s:%s", splitted[0], dirtyVersion)
lines = append(lines, line)
continue
}
line = fmt.Sprintf("%s:%s", splitted[0], version) line = fmt.Sprintf("%s:%s", splitted[0], version)
lines = append(lines, line) lines = append(lines, line)
} }

View File

@ -223,16 +223,4 @@ func TestWriteRecipeVersionOverwrite(t *testing.T) {
} }
assert.Equal(t, "foo", app.Recipe.EnvVersion) assert.Equal(t, "foo", app.Recipe.EnvVersion)
app.Recipe.Dirty = true
if err := app.WriteRecipeVersion("foo+U", false); err != nil {
t.Fatal(err)
}
app, err = appPkg.GetApp(testPkg.ExpectedAppFiles, testPkg.AppName)
if err != nil {
t.Fatal(err)
}
assert.Equal(t, "foo+U", app.Recipe.EnvVersion)
} }

View File

@ -44,6 +44,16 @@ func SetChaosVersionLabel(compose *composetypes.Config, stackName string, chaosV
} }
} }
func SetVersionLabel(compose *composetypes.Config, stackName string, version string) {
for _, service := range compose.Services {
if service.Name == "app" {
log.Debugf("set label 'coop-cloud.%s.version' to %v for %s", stackName, version, stackName)
labelKey := fmt.Sprintf("coop-cloud.%s.version", stackName)
service.Deploy.Labels[labelKey] = version
}
}
}
// SetUpdateLabel adds env ENABLE_AUTO_UPDATE as label to enable/disable the // SetUpdateLabel adds env ENABLE_AUTO_UPDATE as label to enable/disable the
// auto update process for this app. The default if this variable is not set is to disable // auto update process for this app. The default if this variable is not set is to disable
// the auto update process. // the auto update process.

View File

@ -7,6 +7,7 @@ import (
"sort" "sort"
"strings" "strings"
"coopcloud.tech/abra/pkg/config"
"coopcloud.tech/abra/pkg/formatter" "coopcloud.tech/abra/pkg/formatter"
gitPkg "coopcloud.tech/abra/pkg/git" gitPkg "coopcloud.tech/abra/pkg/git"
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
@ -45,6 +46,9 @@ func (r Recipe) Ensure(ctx EnsureContext) error {
if r.EnvVersion != "" && !ctx.IgnoreEnvVersion { if r.EnvVersion != "" && !ctx.IgnoreEnvVersion {
log.Debugf("ensuring env version %s", r.EnvVersion) log.Debugf("ensuring env version %s", r.EnvVersion)
if strings.Contains(r.EnvVersion, "+U") {
log.Fatalf("can not redeploy chaos version (%s) without --chaos", r.EnvVersion)
}
if _, err := r.EnsureVersion(r.EnvVersion); err != nil { if _, err := r.EnsureVersion(r.EnvVersion); err != nil {
return err return err
@ -274,19 +278,14 @@ func (r Recipe) EnsureUpToDate() error {
return nil return nil
} }
// IsDirty checks whether a recipe is dirty or not. N.B., if you call IsDirty // IsDirty checks whether a recipe is dirty or not.
// from another Recipe method, you should propagate the pointer reference (*). func (r *Recipe) IsDirty() (bool, error) {
func (r *Recipe) IsDirty() error {
isClean, err := gitPkg.IsClean(r.Dir) isClean, err := gitPkg.IsClean(r.Dir)
if err != nil { if err != nil {
return err return false, err
} }
if !isClean { return !isClean, nil
r.Dirty = true
}
return nil
} }
// ChaosVersion constructs a chaos mode recipe version. // ChaosVersion constructs a chaos mode recipe version.
@ -300,8 +299,12 @@ func (r *Recipe) ChaosVersion() (string, error) {
version = formatter.SmallSHA(head.String()) version = formatter.SmallSHA(head.String())
if err := r.IsDirty(); err != nil { dirty, err := r.IsDirty()
return version, err if err != nil {
return "", err
}
if dirty {
return fmt.Sprintf("%s%s", version, config.DIRTY_DEFAULT), nil
} }
return version, nil return version, nil

View File

@ -15,10 +15,6 @@ func TestIsDirty(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
if err := r.IsDirty(); err != nil {
t.Fatal(err)
}
assert.False(t, r.Dirty) assert.False(t, r.Dirty)
fpath := filepath.Join(r.Dir, "foo.txt") fpath := filepath.Join(r.Dir, "foo.txt")
@ -31,9 +27,10 @@ func TestIsDirty(t *testing.T) {
os.Remove(fpath) os.Remove(fpath)
}) })
if err := r.IsDirty(); err != nil { dirty, err := r.IsDirty()
if err != nil {
t.Fatal(err) t.Fatal(err)
} }
assert.True(t, r.Dirty) assert.True(t, dirty)
} }

View File

@ -12,6 +12,8 @@ import (
"strconv" "strconv"
"strings" "strings"
"github.com/go-git/go-git/v5"
"coopcloud.tech/abra/pkg/catalogue" "coopcloud.tech/abra/pkg/catalogue"
"coopcloud.tech/abra/pkg/config" "coopcloud.tech/abra/pkg/config"
"coopcloud.tech/abra/pkg/formatter" "coopcloud.tech/abra/pkg/formatter"
@ -20,7 +22,6 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"coopcloud.tech/abra/pkg/web" "coopcloud.tech/abra/pkg/web"
"coopcloud.tech/tagcmp" "coopcloud.tech/tagcmp"
"github.com/go-git/go-git/v5"
) )
// RecipeCatalogueURL is the only current recipe catalogue available. // RecipeCatalogueURL is the only current recipe catalogue available.
@ -119,22 +120,9 @@ type Features struct {
SSO string `json:"sso"` SSO string `json:"sso"`
} }
func GetEnvVersionRaw(name string) (string, error) {
var version string
if strings.Contains(name, ":") {
split := strings.Split(name, ":")
if len(split) > 2 {
return version, fmt.Errorf("version seems invalid: %s", name)
}
version = split[1]
}
return version, nil
}
func Get(name string) Recipe { func Get(name string) Recipe {
version := "" version := ""
versionRaw := ""
if strings.Contains(name, ":") { if strings.Contains(name, ":") {
split := strings.Split(name, ":") split := strings.Split(name, ":")
if len(split) > 2 { if len(split) > 2 {
@ -143,6 +131,7 @@ func Get(name string) Recipe {
name = split[0] name = split[0]
version = split[1] version = split[1]
versionRaw = version
if strings.HasSuffix(version, config.DIRTY_DEFAULT) { if strings.HasSuffix(version, config.DIRTY_DEFAULT) {
version = strings.Replace(split[1], config.DIRTY_DEFAULT, "", 1) version = strings.Replace(split[1], config.DIRTY_DEFAULT, "", 1)
log.Debugf("removed dirty suffix from .env version: %s -> %s", split[1], version) log.Debugf("removed dirty suffix from .env version: %s -> %s", split[1], version)
@ -169,6 +158,7 @@ func Get(name string) Recipe {
r := Recipe{ r := Recipe{
Name: name, Name: name,
EnvVersion: version, EnvVersion: version,
EnvVersionRaw: versionRaw,
Dir: dir, Dir: dir,
GitURL: gitURL, GitURL: gitURL,
SSHURL: sshURL, SSHURL: sshURL,
@ -179,9 +169,11 @@ func Get(name string) Recipe {
AbraShPath: path.Join(dir, "abra.sh"), AbraShPath: path.Join(dir, "abra.sh"),
} }
if err := r.IsDirty(); err != nil && !errors.Is(err, git.ErrRepositoryNotExists) { dirty, err := r.IsDirty()
if err != nil && !errors.Is(err, git.ErrRepositoryNotExists) {
log.Fatalf("failed to check git status of %s: %s", r.Name, err) log.Fatalf("failed to check git status of %s: %s", r.Name, err)
} }
r.Dirty = dirty
return r return r
} }
@ -189,6 +181,7 @@ func Get(name string) Recipe {
type Recipe struct { type Recipe struct {
Name string Name string
EnvVersion string EnvVersion string
EnvVersionRaw string
Dirty bool // NOTE(d1): git terminology for unstaged changes Dirty bool // NOTE(d1): git terminology for unstaged changes
Dir string Dir string
GitURL string GitURL string

View File

@ -34,6 +34,7 @@ func TestGet(t *testing.T) {
recipe: Recipe{ recipe: Recipe{
Name: "foo", Name: "foo",
EnvVersion: "1.2.3", EnvVersion: "1.2.3",
EnvVersionRaw: "1.2.3",
Dir: path.Join(cfg.GetAbraDir(), "/recipes/foo"), Dir: path.Join(cfg.GetAbraDir(), "/recipes/foo"),
GitURL: "https://git.coopcloud.tech/coop-cloud/foo.git", GitURL: "https://git.coopcloud.tech/coop-cloud/foo.git",
SSHURL: "ssh://git@git.coopcloud.tech:2222/coop-cloud/foo.git", SSHURL: "ssh://git@git.coopcloud.tech:2222/coop-cloud/foo.git",
@ -61,6 +62,22 @@ func TestGet(t *testing.T) {
recipe: Recipe{ recipe: Recipe{
Name: "mygit.org/myorg/cool-recipe", Name: "mygit.org/myorg/cool-recipe",
EnvVersion: "1.2.4", EnvVersion: "1.2.4",
EnvVersionRaw: "1.2.4",
Dir: path.Join(cfg.GetAbraDir(), "/recipes/mygit_org_myorg_cool-recipe"),
GitURL: "https://mygit.org/myorg/cool-recipe.git",
SSHURL: "ssh://git@mygit.org/myorg/cool-recipe.git",
ComposePath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/compose.yml"),
ReadmePath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/README.md"),
SampleEnvPath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/.env.sample"),
AbraShPath: path.Join(cfg.GetAbraDir(), "recipes/mygit_org_myorg_cool-recipe/abra.sh"),
},
},
{
name: "mygit.org/myorg/cool-recipe:1e83340e+U",
recipe: Recipe{
Name: "mygit.org/myorg/cool-recipe",
EnvVersion: "1e83340e",
EnvVersionRaw: "1e83340e+U",
Dir: path.Join(cfg.GetAbraDir(), "/recipes/mygit_org_myorg_cool-recipe"), Dir: path.Join(cfg.GetAbraDir(), "/recipes/mygit_org_myorg_cool-recipe"),
GitURL: "https://mygit.org/myorg/cool-recipe.git", GitURL: "https://mygit.org/myorg/cool-recipe.git",
SSHURL: "ssh://git@mygit.org/myorg/cool-recipe.git", SSHURL: "ssh://git@mygit.org/myorg/cool-recipe.git",
@ -105,16 +122,3 @@ func TestGetVersionLabelLocalDoesNotUseTimeoutLabel(t *testing.T) {
assert.NotEqual(t, label, defaultTimeoutLabel) assert.NotEqual(t, label, defaultTimeoutLabel)
} }
} }
func TestDirtyMarkerRemoved(t *testing.T) {
r := Get("abra-test-recipe:1e83340e+U")
assert.Equal(t, "1e83340e", r.EnvVersion)
}
func TestGetEnvVersionRaw(t *testing.T) {
v, err := GetEnvVersionRaw("abra-test-recipe:1e83340e+U")
if err != nil {
t.Fatal(err)
}
assert.Equal(t, "1e83340e+U", v)
}

View File

@ -9,14 +9,14 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"github.com/docker/cli/cli" "github.com/docker/cli/cli"
"github.com/docker/cli/cli/command" "github.com/docker/cli/cli/command"
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container"
apiclient "github.com/docker/docker/client" apiclient "github.com/docker/docker/client"
) )
// RunExec runs a command on a remote container. io.Writer corresponds to the // RunExec runs a command on a remote container. io.Writer corresponds to the
// command output. // command output.
func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string, func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string,
execConfig *types.ExecConfig) (io.Writer, error) { execOptions *container.ExecOptions) (io.Writer, error) {
ctx := context.Background() ctx := context.Background()
// We need to check the tty _before_ we do the ContainerExecCreate, because // We need to check the tty _before_ we do the ContainerExecCreate, because
@ -26,13 +26,13 @@ func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string
if _, err := client.ContainerInspect(ctx, containerID); err != nil { if _, err := client.ContainerInspect(ctx, containerID); err != nil {
return nil, err return nil, err
} }
if !execConfig.Detach { if !execOptions.Detach {
if err := dockerCli.In().CheckTty(execConfig.AttachStdin, execConfig.Tty); err != nil { if err := dockerCli.In().CheckTty(execOptions.AttachStdin, execOptions.Tty); err != nil {
return nil, err return nil, err
} }
} }
response, err := client.ContainerExecCreate(ctx, containerID, *execConfig) response, err := client.ContainerExecCreate(ctx, containerID, *execOptions)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -42,40 +42,40 @@ func RunExec(dockerCli command.Cli, client *apiclient.Client, containerID string
return nil, errors.New("exec ID empty") return nil, errors.New("exec ID empty")
} }
if execConfig.Detach { if execOptions.Detach {
execStartCheck := types.ExecStartCheck{ execStartCheck := container.ExecStartOptions{
Detach: execConfig.Detach, Detach: execOptions.Detach,
Tty: execConfig.Tty, Tty: execOptions.Tty,
} }
return nil, client.ContainerExecStart(ctx, execID, execStartCheck) return nil, client.ContainerExecStart(ctx, execID, execStartCheck)
} }
return interactiveExec(ctx, dockerCli, client, execConfig, execID) return interactiveExec(ctx, dockerCli, client, execOptions, execID)
} }
func interactiveExec(ctx context.Context, dockerCli command.Cli, client *apiclient.Client, func interactiveExec(ctx context.Context, dockerCli command.Cli, client *apiclient.Client,
execConfig *types.ExecConfig, execID string) (io.Writer, error) { execOpts *container.ExecOptions, execID string) (io.Writer, error) {
// Interactive exec requested. // Interactive exec requested.
var ( var (
out, stderr io.Writer out, stderr io.Writer
in io.ReadCloser in io.ReadCloser
) )
if execConfig.AttachStdin { if execOpts.AttachStdin {
in = dockerCli.In() in = dockerCli.In()
} }
if execConfig.AttachStdout { if execOpts.AttachStdout {
out = dockerCli.Out() out = dockerCli.Out()
} }
if execConfig.AttachStderr { if execOpts.AttachStderr {
if execConfig.Tty { if execOpts.Tty {
stderr = dockerCli.Out() stderr = dockerCli.Out()
} else { } else {
stderr = dockerCli.Err() stderr = dockerCli.Err()
} }
} }
execStartCheck := types.ExecStartCheck{ execStartCheck := container.ExecStartOptions{
Tty: execConfig.Tty, Tty: execOpts.Tty,
} }
resp, err := client.ContainerExecAttach(ctx, execID, execStartCheck) resp, err := client.ContainerExecAttach(ctx, execID, execStartCheck)
if err != nil { if err != nil {
@ -94,15 +94,15 @@ func interactiveExec(ctx context.Context, dockerCli command.Cli, client *apiclie
outputStream: out, outputStream: out,
errorStream: stderr, errorStream: stderr,
resp: resp, resp: resp,
tty: execConfig.Tty, tty: execOpts.Tty,
detachKeys: execConfig.DetachKeys, detachKeys: execOpts.DetachKeys,
} }
return streamer.stream(ctx) return streamer.stream(ctx)
}() }()
}() }()
if execConfig.Tty && dockerCli.In().IsTerminal() { if execOpts.Tty && dockerCli.In().IsTerminal() {
if err := MonitorTtySize(ctx, client, dockerCli, execID, true); err != nil { if err := MonitorTtySize(ctx, client, dockerCli, execID, true); err != nil {
fmt.Fprintln(dockerCli.Err(), "Error monitoring TTY size:", err) fmt.Fprintln(dockerCli.Err(), "Error monitoring TTY size:", err)
} }

View File

@ -5,7 +5,6 @@ import (
"strings" "strings"
composetypes "github.com/docker/cli/cli/compose/types" composetypes "github.com/docker/cli/cli/compose/types"
"github.com/docker/docker/api/types"
networktypes "github.com/docker/docker/api/types/network" networktypes "github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/swarm" "github.com/docker/docker/api/types/swarm"
) )
@ -52,13 +51,13 @@ func AddStackLabel(namespace Namespace, labels map[string]string) map[string]str
type networkMap map[string]composetypes.NetworkConfig type networkMap map[string]composetypes.NetworkConfig
// Networks from the compose-file type to the engine API type // Networks from the compose-file type to the engine API type
func Networks(namespace Namespace, networks networkMap, servicesNetworks map[string]struct{}) (map[string]types.NetworkCreate, []string) { func Networks(namespace Namespace, networks networkMap, servicesNetworks map[string]struct{}) (map[string]networktypes.CreateOptions, []string) {
if networks == nil { if networks == nil {
networks = make(map[string]composetypes.NetworkConfig) networks = make(map[string]composetypes.NetworkConfig)
} }
externalNetworks := []string{} externalNetworks := []string{}
result := make(map[string]types.NetworkCreate) result := make(map[string]networktypes.CreateOptions)
for internalName := range servicesNetworks { for internalName := range servicesNetworks {
network := networks[internalName] network := networks[internalName]
if network.External.External { if network.External.External {
@ -66,7 +65,7 @@ func Networks(namespace Namespace, networks networkMap, servicesNetworks map[str
continue continue
} }
createOpts := types.NetworkCreate{ createOpts := networktypes.CreateOptions{
Labels: AddStackLabel(namespace, network.Labels), Labels: AddStackLabel(namespace, network.Labels),
Driver: network.Driver, Driver: network.Driver,
Options: network.DriverOpts, Options: network.DriverOpts,

View File

@ -4,7 +4,6 @@ import (
"testing" "testing"
composetypes "github.com/docker/cli/cli/compose/types" composetypes "github.com/docker/cli/cli/compose/types"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/network"
"gotest.tools/v3/assert" "gotest.tools/v3/assert"
is "gotest.tools/v3/assert/cmp" is "gotest.tools/v3/assert/cmp"
@ -67,7 +66,7 @@ func TestNetworks(t *testing.T) {
Name: "othername", Name: "othername",
}, },
} }
expected := map[string]types.NetworkCreate{ expected := map[string]network.CreateOptions{
"foo_default": { "foo_default": {
Labels: map[string]string{ Labels: map[string]string{
LabelNamespace: "foo", LabelNamespace: "foo",

View File

@ -8,6 +8,7 @@ import (
"coopcloud.tech/abra/pkg/log" "coopcloud.tech/abra/pkg/log"
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/swarm" "github.com/docker/docker/api/types/swarm"
"github.com/docker/docker/api/types/versions" "github.com/docker/docker/api/types/versions"
"github.com/docker/docker/client" "github.com/docker/docker/client"
@ -99,7 +100,7 @@ func removeServices(
func removeNetworks( func removeNetworks(
ctx context.Context, ctx context.Context,
client *apiclient.Client, client *apiclient.Client,
networks []types.NetworkResource, networks []network.Inspect,
) bool { ) bool {
var hasError bool var hasError bool
for _, network := range networks { for _, network := range networks {

View File

@ -22,6 +22,7 @@ import (
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/filters"
networktypes "github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/swarm" "github.com/docker/docker/api/types/swarm"
"github.com/docker/docker/api/types/versions" "github.com/docker/docker/api/types/versions"
"github.com/docker/docker/client" "github.com/docker/docker/client"
@ -296,7 +297,7 @@ func validateExternalNetworks(ctx context.Context, client dockerClient.NetworkAP
// local-scoped networks, so there's no need to inspect them. // local-scoped networks, so there's no need to inspect them.
continue continue
} }
network, err := client.NetworkInspect(ctx, networkName, types.NetworkInspectOptions{}) network, err := client.NetworkInspect(ctx, networkName, networktypes.InspectOptions{})
switch { switch {
case dockerClient.IsErrNotFound(err): case dockerClient.IsErrNotFound(err):
return errors.Errorf("network %q is declared as external, but could not be found. You need to create a swarm-scoped network before the stack is deployed, which you can do by running this on the server: docker network create -d overlay proxy", networkName) return errors.Errorf("network %q is declared as external, but could not be found. You need to create a swarm-scoped network before the stack is deployed, which you can do by running this on the server: docker network create -d overlay proxy", networkName)
@ -353,13 +354,13 @@ func createConfigs(ctx context.Context, cl *dockerClient.Client, configs []swarm
return nil return nil
} }
func createNetworks(ctx context.Context, cl *dockerClient.Client, namespace convert.Namespace, networks map[string]types.NetworkCreate) error { func createNetworks(ctx context.Context, cl *dockerClient.Client, namespace convert.Namespace, networks map[string]networktypes.CreateOptions) error {
existingNetworks, err := getStackNetworks(ctx, cl, namespace.Name()) existingNetworks, err := getStackNetworks(ctx, cl, namespace.Name())
if err != nil { if err != nil {
return err return err
} }
existingNetworkMap := make(map[string]types.NetworkResource) existingNetworkMap := make(map[string]networktypes.Inspect)
for _, network := range existingNetworks { for _, network := range existingNetworks {
existingNetworkMap[network.Name] = network existingNetworkMap[network.Name] = network
} }
@ -473,8 +474,8 @@ func deployServices(
return serviceIDs, nil return serviceIDs, nil
} }
func getStackNetworks(ctx context.Context, dockerclient client.APIClient, namespace string) ([]types.NetworkResource, error) { func getStackNetworks(ctx context.Context, dockerclient client.APIClient, namespace string) ([]networktypes.Inspect, error) {
return dockerclient.NetworkList(ctx, types.NetworkListOptions{Filters: getStackFilter(namespace)}) return dockerclient.NetworkList(ctx, networktypes.ListOptions{Filters: getStackFilter(namespace)})
} }
func getStackSecrets(ctx context.Context, dockerclient client.APIClient, namespace string) ([]swarm.Secret, error) { func getStackSecrets(ctx context.Context, dockerclient client.APIClient, namespace string) ([]swarm.Secret, error) {

View File

@ -22,6 +22,8 @@ setup(){
teardown(){ teardown(){
_reset_recipe _reset_recipe
_undeploy_app _undeploy_app
_undeploy_app2 "gitea.$TEST_SERVER"
_reset_app _reset_app
_reset_tags _reset_tags
@ -222,19 +224,6 @@ teardown(){
run $ABRA app deploy "gitea.$TEST_SERVER" --no-input --no-converge-checks run $ABRA app deploy "gitea.$TEST_SERVER" --no-input --no-converge-checks
assert_success assert_success
assert_output --partial "$latestVersion" assert_output --partial "$latestVersion"
run $ABRA app undeploy "gitea.$TEST_SERVER" --no-input
assert_success
run $ABRA app secret remove "gitea.$TEST_SERVER" --all --no-input
assert_success
run $ABRA app volume remove "gitea.$TEST_SERVER" --no-input
assert_success
run $ABRA app remove "gitea.$TEST_SERVER" --no-input
assert_success
assert_not_exists "$ABRA_DIR/servers/$TEST_SERVER/gitea.$TEST_SERVER.env"
} }
# bats test_tags=slow # bats test_tags=slow

View File

@ -37,17 +37,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'NEW DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*N/A' assert_output --partial 'CURRENT DEPLOYMENT N/A'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION N/A'
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
# new deployment
assert_output --regexp 'VERSION.* ' + "${latestRelease}"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*N/A'
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \ run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -61,17 +54,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'NEW DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*N/A' assert_output --partial "CURRENT DEPLOYMENT N/A"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION ${latestRelease}"
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
# new deployment
assert_output --regexp 'VERSION.* ' + "${latestRelease}"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \ run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -90,17 +76,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'NEW DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*N/A' assert_output --partial "CURRENT DEPLOYMENT N/A"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION 0.1.1+1.20.2"
assert_output --partial "NEW DEPLOYMENT 0.1.1+1.20.2"
# new deployment
assert_output --regexp 'VERSION.*' + "0.1.1+1.20.2"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.1.1+1.20.2"
assert_output --regexp 'NEW VERSION.*' + "0.1.1+1.20.2"
run grep -q "TYPE=$TEST_RECIPE:0.1.1+1.20.2" \ run grep -q "TYPE=$TEST_RECIPE:0.1.1+1.20.2" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -120,17 +99,10 @@ teardown(){
--no-input --no-converge-checks --ignore-env-version --no-input --no-converge-checks --ignore-env-version
assert_success assert_success
# current deployment assert_output --partial 'NEW DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*N/A' assert_output --partial "CURRENT DEPLOYMENT N/A"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION 0.1.1+1.20.2"
assert_output --partial "NEW DEPLOYMENT ${latestRelease}"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.1.1+1.20.2"
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \ run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -153,17 +125,10 @@ teardown(){
--no-input --no-converge-checks --chaos --no-input --no-converge-checks --chaos
assert_success assert_success
# current deployment assert_output --partial 'CHAOS DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${latestRelease}"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION ${latestRelease}"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U"
run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo" run rm -rf "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
@ -173,7 +138,7 @@ teardown(){
assert_success assert_success
} }
@test "chaos deploy then force deploy" { @test "can not redeploy chaos version without --chaos" {
headHash=$(_get_head_hash) headHash=$(_get_head_hash)
latestRelease=$(_latest_release) latestRelease=$(_latest_release)
@ -189,27 +154,12 @@ teardown(){
assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo" assert_not_exists "$ABRA_DIR/recipes/$TEST_RECIPE/foo"
run $ABRA app deploy "$TEST_APP_DOMAIN" \ run $ABRA app deploy "$TEST_APP_DOMAIN" \
--no-input --no-converge-checks --force --no-input --no-converge-checks --force --debug
assert_success assert_failure
assert_output --regexp 'can not redeploy chaos version .*' + "${headHash:0:8}+U"
# current deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${headHash:0:8}+U"
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
} }
@test "deploy then force chaos commit deploy" { @test "deploy then force commit deploy" {
headHash=$(_get_head_hash) headHash=$(_get_head_hash)
latestRelease=$(_latest_release) latestRelease=$(_latest_release)
@ -225,17 +175,10 @@ teardown(){
--no-input --no-converge-checks --force --no-input --no-converge-checks --force
assert_success assert_success
# current deployment assert_output --partial 'DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${latestRelease}"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION ${latestRelease}"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \ run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -250,17 +193,10 @@ teardown(){
--no-input --no-converge-checks --chaos --no-input --no-converge-checks --chaos
assert_success assert_success
# current deployment assert_output --partial 'NEW DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*N/A' assert_output --partial "CURRENT DEPLOYMENT N/A"
assert_output --regexp 'CHAOS.*false' assert_output --partial "ENV VERSION ${latestRelease}"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}"
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"' run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"'
assert_success assert_success
@ -270,17 +206,28 @@ teardown(){
--no-input --no-converge-checks --chaos --no-input --no-converge-checks --chaos
assert_success assert_success
# current deployment assert_output --partial 'CHAOS DEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}" assert_output --partial "ENV VERSION ${headHash:0:8}"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
# new deployment run $ABRA app deploy "$TEST_APP_DOMAIN" \
assert_output --regexp 'VERSION.*' + "${latestRelease}" --no-input --no-converge-checks --chaos
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U" assert_success
# env version assert_output --partial 'CHAOS DEPLOY OVERVIEW'
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U" assert_output --partial "ENV VERSION ${headHash:0:8}+U"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
run $ABRA app deploy "$TEST_APP_DOMAIN" \
--no-input --no-converge-checks --chaos
assert_success
assert_output --partial 'CHAOS DEPLOY OVERVIEW'
assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
assert_output --partial "ENV VERSION ${headHash:0:8}+U"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}+U"
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \ run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -302,19 +249,8 @@ teardown(){
--no-input --no-converge-checks --force --no-input --no-converge-checks --force
assert_success assert_success
# current deployment assert_output --partial 'REDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}" assert_output --partial "ENV VERSION ${headHash:0:8}"
assert_output --partial "NEW DEPLOYMENT ${headHash:0:8}"
# new deployment
assert_output --regexp 'VERSION.*' + "${latestRelease}"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${headHash:0:8}"
assert_output --regexp 'NEW VERSION.*' + "${latestRelease}"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
assert_success
} }

View File

@ -19,8 +19,8 @@ setup(){
} }
teardown(){ teardown(){
_reset_app
_undeploy_app _undeploy_app
_reset_app
_reset_recipe _reset_recipe
} }
@ -153,7 +153,7 @@ teardown(){
} }
# bats test_tags=slow # bats test_tags=slow
@test "rollback chaos deployment" { @test "rollback chaos deployment is not possible" {
tagHash=$(_get_tag_hash "0.2.0+1.21.0") tagHash=$(_get_tag_hash "0.2.0+1.21.0")
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash" run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash"
assert_success assert_success
@ -163,17 +163,8 @@ teardown(){
assert_output --partial "${tagHash:0:8}" assert_output --partial "${tagHash:0:8}"
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks
assert_success assert_failure
assert_output --partial "0.1.1+1.20.2" assert_output --partial 'current deployment' + "${tagHash:0:8}" + 'is not a known version'
assert_output --partial "${tagHash:0:8}"
run $ABRA app rollback "$TEST_APP_DOMAIN" "0.1.0+1.20.0" --no-input --no-converge-checks
assert_success
assert_output --partial "0.1.0+1.20.0"
tagHash=$(_get_tag_hash "0.1.1+1.20.2")
refute_output --partial "${tagHash:0:8}"
assert_output --partial "false"
} }
# bats test_tags=slow # bats test_tags=slow

View File

@ -33,16 +33,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'DOWNGRADE OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
assert_output --partial 'NEW DEPLOYMENT 0.1.0+1.20.0'
# rollback
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'NEW VERSION.*' + "0.1.0+1.20.0"
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \ run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -58,16 +52,10 @@ teardown(){
--no-input --no-converge-checks --force --no-input --no-converge-checks --force
assert_success assert_success
# current deployment assert_output --partial 'REDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
# rollback
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \ run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -85,16 +73,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'DOWNGRADE OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION N/A'
assert_output --partial 'NEW DEPLOYMENT 0.1.0+1.20.0'
# rollback
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0"
# env version
assert_output --regexp 'CURRENT VERSION.*N/A'
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \ run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"

View File

@ -92,9 +92,6 @@ teardown(){
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
assert_success assert_success
# NOTE(d1): ensure not chaos undeploy
assert_output --partial 'false'
} }
# bats test_tags=slow # bats test_tags=slow

View File

@ -33,13 +33,10 @@ teardown(){
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
assert_success assert_success
# current deployment assert_output --partial 'UNDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0" assert_output --partial 'CURRENT DEPLOYMENT 0.1.0+1.20.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION 0.1.0+1.20.0'
assert_output --partial 'NEW DEPLOYMENT N/A'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.1.0+1.20.0"
assert_output --regexp 'NEW VERSION.*' + "0.1.0+1.20.0"
run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \ run grep -q "TYPE=$TEST_RECIPE:0.1.0+1.20.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -57,13 +54,10 @@ teardown(){
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
assert_success assert_success
# current deployment assert_output --partial 'UNDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}" assert_output --partial "ENV VERSION ${headHash:0:8}"
assert_output --partial 'NEW DEPLOYMENT N/A'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}"
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \ run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -72,7 +66,6 @@ teardown(){
@test "chaos deploy with unstaged commits and undeploy" { @test "chaos deploy with unstaged commits and undeploy" {
headHash=$(_get_head_hash) headHash=$(_get_head_hash)
latestRelease=$(_latest_release)
run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"' run bash -c 'echo "unstaged changes" >> "$ABRA_DIR/recipes/$TEST_RECIPE/foo"'
assert_success assert_success
@ -85,13 +78,10 @@ teardown(){
run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input run $ABRA app undeploy "$TEST_APP_DOMAIN" --no-input
assert_success assert_success
# current deployment assert_output --partial 'UNDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "${latestRelease}" assert_output --partial "CURRENT DEPLOYMENT ${headHash:0:8}+U"
assert_output --regexp 'CHAOS.*' + "${headHash:0:8}+U" assert_output --partial "ENV VERSION ${headHash:0:8}+U"
assert_output --partial 'NEW DEPLOYMENT N/A'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "${latestRelease}"
assert_output --regexp 'NEW VERSION.*' + "${headHash:0:8}+U"
run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \ run grep -q "TYPE=$TEST_RECIPE:${headHash:0:8}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"

View File

@ -205,7 +205,7 @@ teardown(){
} }
# bats test_tags=slow # bats test_tags=slow
@test "upgrade chaos deployment" { @test "upgrade commit deployment not possible" {
tagHash=$(_get_tag_hash "0.1.0+1.20.0") tagHash=$(_get_tag_hash "0.1.0+1.20.0")
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash" run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" checkout "$tagHash"
assert_success assert_success
@ -215,17 +215,8 @@ teardown(){
assert_output --partial "${tagHash:0:8}" assert_output --partial "${tagHash:0:8}"
run $ABRA app upgrade "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks run $ABRA app upgrade "$TEST_APP_DOMAIN" "0.1.1+1.20.2" --no-input --no-converge-checks
assert_success assert_failure
assert_output --partial "0.1.1+1.20.2" assert_output --partial "not a known version"
assert_output --partial "${tagHash:0:8}"
run $ABRA app upgrade "$TEST_APP_DOMAIN" "0.2.0+1.21.0" --no-input --no-converge-checks
assert_success
assert_output --partial "0.2.0+1.21.0"
tagHash=$(_get_tag_hash "0.1.1+1.20.2")
refute_output --partial "${tagHash:0:8}"
assert_output --partial "false"
} }
@test "chaos commit upgrade not possible" { @test "chaos commit upgrade not possible" {

View File

@ -31,17 +31,10 @@ teardown(){
--no-input --no-converge-checks --no-input --no-converge-checks
assert_success assert_success
# current deployment assert_output --partial 'UPGRADE OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.1.0+1.20.0" assert_output --partial 'CURRENT DEPLOYMENT 0.1.0+1.20.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION 0.1.0+1.20.0'
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
# upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.1.0+1.20.0"
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \ run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -57,17 +50,10 @@ teardown(){
--no-input --no-converge-checks --force --no-input --no-converge-checks --force
assert_success assert_success
# current deployment assert_output --partial 'REDEPLOY OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION 0.2.0+1.21.0'
assert_output --partial 'NEW DEPLOYMENT 0.2.0+1.21.0'
# upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \ run grep -q "TYPE=$TEST_RECIPE:0.2.0+1.21.0" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"
@ -87,17 +73,10 @@ teardown(){
--no-input --no-converge-checks --force --no-input --no-converge-checks --force
assert_success assert_success
# current deployment assert_output --partial 'UPGRADE OVERVIEW'
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0" assert_output --partial 'CURRENT DEPLOYMENT 0.2.0+1.21.0'
assert_output --regexp 'CHAOS.*false' assert_output --partial 'ENV VERSION N/A'
assert_output --partial 'NEW DEPLOYMENT 0.3.1+1.21.0'
# upgrade
assert_output --regexp 'VERSION.*' + "0.2.0+1.21.0"
assert_output --regexp 'CHAOS.*false'
# env version
assert_output --regexp 'CURRENT VERSION.*N/A'
assert_output --regexp 'NEW VERSION.*' + "0.2.0+1.21.0"
run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \ run grep -q "TYPE=$TEST_RECIPE:${latestRelease}" \
"$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env" "$ABRA_DIR/servers/$TEST_SERVER/$TEST_APP_DOMAIN.env"

View File

@ -30,6 +30,15 @@ _undeploy_app() {
assert_output --partial 'unknown' assert_output --partial 'unknown'
} }
_undeploy_app2() {
run $ABRA app undeploy "$1" --no-input
run $ABRA app ls --server "$TEST_SERVER" --status
assert_success
assert_output --partial "$1"
assert_output --partial 'unknown'
}
_rm_app() { _rm_app() {
# NOTE(d1): not asserting outcomes on teardown here since some might fail # NOTE(d1): not asserting outcomes on teardown here since some might fail
# depending on what the test created. all commands run through anyway # depending on what the test created. all commands run through anyway

View File

@ -38,6 +38,8 @@ _set_git_author() {
} }
_git_commit() { _git_commit() {
_set_git_author
run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add . run git -C "$ABRA_DIR/recipes/$TEST_RECIPE" add .
assert_success assert_success

View File

@ -49,16 +49,16 @@ func ShiftNBytesLeft(dst, x []byte, n int) {
dst = append(dst, make([]byte, n/8)...) dst = append(dst, make([]byte, n/8)...)
} }
// XorBytesMut assumes equal input length, replaces X with X XOR Y // XorBytesMut replaces X with X XOR Y. len(X) must be >= len(Y).
func XorBytesMut(X, Y []byte) { func XorBytesMut(X, Y []byte) {
for i := 0; i < len(X); i++ { for i := 0; i < len(Y); i++ {
X[i] ^= Y[i] X[i] ^= Y[i]
} }
} }
// XorBytes assumes equal input length, puts X XOR Y into Z // XorBytes puts X XOR Y into Z. len(Z) and len(X) must be >= len(Y).
func XorBytes(Z, X, Y []byte) { func XorBytes(Z, X, Y []byte) {
for i := 0; i < len(X); i++ { for i := 0; i < len(Y); i++ {
Z[i] = X[i] ^ Y[i] Z[i] = X[i] ^ Y[i]
} }
} }

View File

@ -109,8 +109,10 @@ func (o *ocb) Seal(dst, nonce, plaintext, adata []byte) []byte {
if len(nonce) > o.nonceSize { if len(nonce) > o.nonceSize {
panic("crypto/ocb: Incorrect nonce length given to OCB") panic("crypto/ocb: Incorrect nonce length given to OCB")
} }
ret, out := byteutil.SliceForAppend(dst, len(plaintext)+o.tagSize) sep := len(plaintext)
o.crypt(enc, out, nonce, adata, plaintext) ret, out := byteutil.SliceForAppend(dst, sep+o.tagSize)
tag := o.crypt(enc, out[:sep], nonce, adata, plaintext)
copy(out[sep:], tag)
return ret return ret
} }
@ -122,12 +124,10 @@ func (o *ocb) Open(dst, nonce, ciphertext, adata []byte) ([]byte, error) {
return nil, ocbError("Ciphertext shorter than tag length") return nil, ocbError("Ciphertext shorter than tag length")
} }
sep := len(ciphertext) - o.tagSize sep := len(ciphertext) - o.tagSize
ret, out := byteutil.SliceForAppend(dst, len(ciphertext)) ret, out := byteutil.SliceForAppend(dst, sep)
ciphertextData := ciphertext[:sep] ciphertextData := ciphertext[:sep]
tag := ciphertext[sep:] tag := o.crypt(dec, out, nonce, adata, ciphertextData)
o.crypt(dec, out, nonce, adata, ciphertextData) if subtle.ConstantTimeCompare(tag, ciphertext[sep:]) == 1 {
if subtle.ConstantTimeCompare(ret[sep:], tag) == 1 {
ret = ret[:sep]
return ret, nil return ret, nil
} }
for i := range out { for i := range out {
@ -137,7 +137,8 @@ func (o *ocb) Open(dst, nonce, ciphertext, adata []byte) ([]byte, error) {
} }
// On instruction enc (resp. dec), crypt is the encrypt (resp. decrypt) // On instruction enc (resp. dec), crypt is the encrypt (resp. decrypt)
// function. It returns the resulting plain/ciphertext with the tag appended. // function. It writes the resulting plain/ciphertext into Y and returns
// the tag.
func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte { func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
// //
// Consider X as a sequence of 128-bit blocks // Consider X as a sequence of 128-bit blocks
@ -194,13 +195,14 @@ func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
byteutil.XorBytesMut(offset, o.mask.L[bits.TrailingZeros(uint(i+1))]) byteutil.XorBytesMut(offset, o.mask.L[bits.TrailingZeros(uint(i+1))])
blockX := X[i*blockSize : (i+1)*blockSize] blockX := X[i*blockSize : (i+1)*blockSize]
blockY := Y[i*blockSize : (i+1)*blockSize] blockY := Y[i*blockSize : (i+1)*blockSize]
byteutil.XorBytes(blockY, blockX, offset)
switch instruction { switch instruction {
case enc: case enc:
byteutil.XorBytesMut(checksum, blockX)
byteutil.XorBytes(blockY, blockX, offset)
o.block.Encrypt(blockY, blockY) o.block.Encrypt(blockY, blockY)
byteutil.XorBytesMut(blockY, offset) byteutil.XorBytesMut(blockY, offset)
byteutil.XorBytesMut(checksum, blockX)
case dec: case dec:
byteutil.XorBytes(blockY, blockX, offset)
o.block.Decrypt(blockY, blockY) o.block.Decrypt(blockY, blockY)
byteutil.XorBytesMut(blockY, offset) byteutil.XorBytesMut(blockY, offset)
byteutil.XorBytesMut(checksum, blockY) byteutil.XorBytesMut(checksum, blockY)
@ -216,31 +218,24 @@ func (o *ocb) crypt(instruction int, Y, nonce, adata, X []byte) []byte {
o.block.Encrypt(pad, offset) o.block.Encrypt(pad, offset)
chunkX := X[blockSize*m:] chunkX := X[blockSize*m:]
chunkY := Y[blockSize*m : len(X)] chunkY := Y[blockSize*m : len(X)]
byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
// P_* || bit(1) || zeroes(127) - len(P_*)
switch instruction { switch instruction {
case enc: case enc:
paddedY := append(chunkX, byte(128)) byteutil.XorBytesMut(checksum, chunkX)
paddedY = append(paddedY, make([]byte, blockSize-len(chunkX)-1)...) checksum[len(chunkX)] ^= 128
byteutil.XorBytesMut(checksum, paddedY) byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
// P_* || bit(1) || zeroes(127) - len(P_*)
case dec: case dec:
paddedX := append(chunkY, byte(128)) byteutil.XorBytes(chunkY, chunkX, pad[:len(chunkX)])
paddedX = append(paddedX, make([]byte, blockSize-len(chunkY)-1)...) // P_* || bit(1) || zeroes(127) - len(P_*)
byteutil.XorBytesMut(checksum, paddedX) byteutil.XorBytesMut(checksum, chunkY)
checksum[len(chunkY)] ^= 128
}
} }
byteutil.XorBytes(tag, checksum, offset) byteutil.XorBytes(tag, checksum, offset)
byteutil.XorBytesMut(tag, o.mask.lDol) byteutil.XorBytesMut(tag, o.mask.lDol)
o.block.Encrypt(tag, tag) o.block.Encrypt(tag, tag)
byteutil.XorBytesMut(tag, o.hash(adata)) byteutil.XorBytesMut(tag, o.hash(adata))
copy(Y[blockSize*m+len(chunkY):], tag[:o.tagSize]) return tag[:o.tagSize]
} else {
byteutil.XorBytes(tag, checksum, offset)
byteutil.XorBytesMut(tag, o.mask.lDol)
o.block.Encrypt(tag, tag)
byteutil.XorBytesMut(tag, o.hash(adata))
copy(Y[blockSize*m:], tag[:o.tagSize])
}
return Y
} }
// This hash function is used to compute the tag. Per design, on empty input it // This hash function is used to compute the tag. Per design, on empty input it

View File

@ -7,6 +7,7 @@ package armor
import ( import (
"encoding/base64" "encoding/base64"
"io" "io"
"sort"
) )
var armorHeaderSep = []byte(": ") var armorHeaderSep = []byte(": ")
@ -159,8 +160,15 @@ func encode(out io.Writer, blockType string, headers map[string]string, checksum
return return
} }
for k, v := range headers { keys := make([]string, len(headers))
err = writeSlices(out, []byte(k), armorHeaderSep, []byte(v), newline) i := 0
for k := range headers {
keys[i] = k
i++
}
sort.Strings(keys)
for _, k := range keys {
err = writeSlices(out, []byte(k), armorHeaderSep, []byte(headers[k]), newline)
if err != nil { if err != nil {
return return
} }

View File

@ -6,6 +6,7 @@
package errors // import "github.com/ProtonMail/go-crypto/openpgp/errors" package errors // import "github.com/ProtonMail/go-crypto/openpgp/errors"
import ( import (
"fmt"
"strconv" "strconv"
) )
@ -178,3 +179,22 @@ type ErrMalformedMessage string
func (dke ErrMalformedMessage) Error() string { func (dke ErrMalformedMessage) Error() string {
return "openpgp: malformed message " + string(dke) return "openpgp: malformed message " + string(dke)
} }
// ErrEncryptionKeySelection is returned if encryption key selection fails (v2 API).
type ErrEncryptionKeySelection struct {
PrimaryKeyId string
PrimaryKeyErr error
EncSelectionKeyId *string
EncSelectionErr error
}
func (eks ErrEncryptionKeySelection) Error() string {
prefix := fmt.Sprintf("openpgp: key selection for primary key %s:", eks.PrimaryKeyId)
if eks.PrimaryKeyErr != nil {
return fmt.Sprintf("%s invalid primary key: %s", prefix, eks.PrimaryKeyErr)
}
if eks.EncSelectionKeyId != nil {
return fmt.Sprintf("%s invalid encryption key %s: %s", prefix, *eks.EncSelectionKeyId, eks.EncSelectionErr)
}
return fmt.Sprintf("%s no encryption key: %s", prefix, eks.EncSelectionErr)
}

View File

@ -3,7 +3,6 @@
package packet package packet
import ( import (
"bytes"
"crypto/cipher" "crypto/cipher"
"encoding/binary" "encoding/binary"
"io" "io"
@ -15,12 +14,11 @@ import (
type aeadCrypter struct { type aeadCrypter struct {
aead cipher.AEAD aead cipher.AEAD
chunkSize int chunkSize int
initialNonce []byte nonce []byte
associatedData []byte // Chunk-independent associated data associatedData []byte // Chunk-independent associated data
chunkIndex []byte // Chunk counter chunkIndex []byte // Chunk counter
packetTag packetType // SEIP packet (v2) or AEAD Encrypted Data packet packetTag packetType // SEIP packet (v2) or AEAD Encrypted Data packet
bytesProcessed int // Amount of plaintext bytes encrypted/decrypted bytesProcessed int // Amount of plaintext bytes encrypted/decrypted
buffer bytes.Buffer // Buffered bytes across chunks
} }
// computeNonce takes the incremental index and computes an eXclusive OR with // computeNonce takes the incremental index and computes an eXclusive OR with
@ -28,12 +26,12 @@ type aeadCrypter struct {
// 5.16.1 and 5.16.2). It returns the resulting nonce. // 5.16.1 and 5.16.2). It returns the resulting nonce.
func (wo *aeadCrypter) computeNextNonce() (nonce []byte) { func (wo *aeadCrypter) computeNextNonce() (nonce []byte) {
if wo.packetTag == packetTypeSymmetricallyEncryptedIntegrityProtected { if wo.packetTag == packetTypeSymmetricallyEncryptedIntegrityProtected {
return append(wo.initialNonce, wo.chunkIndex...) return wo.nonce
} }
nonce = make([]byte, len(wo.initialNonce)) nonce = make([]byte, len(wo.nonce))
copy(nonce, wo.initialNonce) copy(nonce, wo.nonce)
offset := len(wo.initialNonce) - 8 offset := len(wo.nonce) - 8
for i := 0; i < 8; i++ { for i := 0; i < 8; i++ {
nonce[i+offset] ^= wo.chunkIndex[i] nonce[i+offset] ^= wo.chunkIndex[i]
} }
@ -62,8 +60,9 @@ func (wo *aeadCrypter) incrementIndex() error {
type aeadDecrypter struct { type aeadDecrypter struct {
aeadCrypter // Embedded ciphertext opener aeadCrypter // Embedded ciphertext opener
reader io.Reader // 'reader' is a partialLengthReader reader io.Reader // 'reader' is a partialLengthReader
chunkBytes []byte
peekedBytes []byte // Used to detect last chunk peekedBytes []byte // Used to detect last chunk
eof bool buffer []byte // Buffered decrypted bytes
} }
// Read decrypts bytes and reads them into dst. It decrypts when necessary and // Read decrypts bytes and reads them into dst. It decrypts when necessary and
@ -71,60 +70,45 @@ type aeadDecrypter struct {
// and an error. // and an error.
func (ar *aeadDecrypter) Read(dst []byte) (n int, err error) { func (ar *aeadDecrypter) Read(dst []byte) (n int, err error) {
// Return buffered plaintext bytes from previous calls // Return buffered plaintext bytes from previous calls
if ar.buffer.Len() > 0 { if len(ar.buffer) > 0 {
return ar.buffer.Read(dst) n = copy(dst, ar.buffer)
} ar.buffer = ar.buffer[n:]
return
// Return EOF if we've previously validated the final tag
if ar.eof {
return 0, io.EOF
} }
// Read a chunk // Read a chunk
tagLen := ar.aead.Overhead() tagLen := ar.aead.Overhead()
cipherChunkBuf := new(bytes.Buffer) copy(ar.chunkBytes, ar.peekedBytes) // Copy bytes peeked in previous chunk or in initialization
_, errRead := io.CopyN(cipherChunkBuf, ar.reader, int64(ar.chunkSize+tagLen)) bytesRead, errRead := io.ReadFull(ar.reader, ar.chunkBytes[tagLen:])
cipherChunk := cipherChunkBuf.Bytes() if errRead != nil && errRead != io.EOF && errRead != io.ErrUnexpectedEOF {
if errRead != nil && errRead != io.EOF {
return 0, errRead return 0, errRead
} }
if len(cipherChunk) > 0 { if bytesRead > 0 {
decrypted, errChunk := ar.openChunk(cipherChunk) ar.peekedBytes = ar.chunkBytes[bytesRead:bytesRead+tagLen]
decrypted, errChunk := ar.openChunk(ar.chunkBytes[:bytesRead])
if errChunk != nil { if errChunk != nil {
return 0, errChunk return 0, errChunk
} }
// Return decrypted bytes, buffering if necessary // Return decrypted bytes, buffering if necessary
if len(dst) < len(decrypted) {
n = copy(dst, decrypted[:len(dst)])
ar.buffer.Write(decrypted[len(dst):])
} else {
n = copy(dst, decrypted) n = copy(dst, decrypted)
} ar.buffer = decrypted[n:]
return
} }
// Check final authentication tag return 0, io.EOF
if errRead == io.EOF {
errChunk := ar.validateFinalTag(ar.peekedBytes)
if errChunk != nil {
return n, errChunk
}
ar.eof = true // Mark EOF for when we've returned all buffered data
}
return
} }
// Close is noOp. The final authentication tag of the stream was already // Close checks the final authentication tag of the stream.
// checked in the last Read call. In the future, this function could be used to // In the future, this function could also be used to wipe the reader
// wipe the reader and peeked, decrypted bytes, if necessary. // and peeked & decrypted bytes, if necessary.
func (ar *aeadDecrypter) Close() (err error) { func (ar *aeadDecrypter) Close() (err error) {
if !ar.eof {
errChunk := ar.validateFinalTag(ar.peekedBytes) errChunk := ar.validateFinalTag(ar.peekedBytes)
if errChunk != nil { if errChunk != nil {
return errChunk return errChunk
} }
}
return nil return nil
} }
@ -132,20 +116,13 @@ func (ar *aeadDecrypter) Close() (err error) {
// the underlying plaintext and an error. It accesses peeked bytes from next // the underlying plaintext and an error. It accesses peeked bytes from next
// chunk, to identify the last chunk and decrypt/validate accordingly. // chunk, to identify the last chunk and decrypt/validate accordingly.
func (ar *aeadDecrypter) openChunk(data []byte) ([]byte, error) { func (ar *aeadDecrypter) openChunk(data []byte) ([]byte, error) {
tagLen := ar.aead.Overhead()
// Restore carried bytes from last call
chunkExtra := append(ar.peekedBytes, data...)
// 'chunk' contains encrypted bytes, followed by an authentication tag.
chunk := chunkExtra[:len(chunkExtra)-tagLen]
ar.peekedBytes = chunkExtra[len(chunkExtra)-tagLen:]
adata := ar.associatedData adata := ar.associatedData
if ar.aeadCrypter.packetTag == packetTypeAEADEncrypted { if ar.aeadCrypter.packetTag == packetTypeAEADEncrypted {
adata = append(ar.associatedData, ar.chunkIndex...) adata = append(ar.associatedData, ar.chunkIndex...)
} }
nonce := ar.computeNextNonce() nonce := ar.computeNextNonce()
plainChunk, err := ar.aead.Open(nil, nonce, chunk, adata) plainChunk, err := ar.aead.Open(data[:0:len(data)], nonce, data, adata)
if err != nil { if err != nil {
return nil, errors.ErrAEADTagVerification return nil, errors.ErrAEADTagVerification
} }
@ -183,21 +160,21 @@ func (ar *aeadDecrypter) validateFinalTag(tag []byte) error {
type aeadEncrypter struct { type aeadEncrypter struct {
aeadCrypter // Embedded plaintext sealer aeadCrypter // Embedded plaintext sealer
writer io.WriteCloser // 'writer' is a partialLengthWriter writer io.WriteCloser // 'writer' is a partialLengthWriter
chunkBytes []byte
offset int
} }
// Write encrypts and writes bytes. It encrypts when necessary and buffers extra // Write encrypts and writes bytes. It encrypts when necessary and buffers extra
// plaintext bytes for next call. When the stream is finished, Close() MUST be // plaintext bytes for next call. When the stream is finished, Close() MUST be
// called to append the final tag. // called to append the final tag.
func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) { func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) {
// Append plaintextBytes to existing buffered bytes for n != len(plaintextBytes) {
n, err = aw.buffer.Write(plaintextBytes) copied := copy(aw.chunkBytes[aw.offset:aw.chunkSize], plaintextBytes[n:])
if err != nil { n += copied
return n, err aw.offset += copied
}
// Encrypt and write chunks if aw.offset == aw.chunkSize {
for aw.buffer.Len() >= aw.chunkSize { encryptedChunk, err := aw.sealChunk(aw.chunkBytes[:aw.offset])
plainChunk := aw.buffer.Next(aw.chunkSize)
encryptedChunk, err := aw.sealChunk(plainChunk)
if err != nil { if err != nil {
return n, err return n, err
} }
@ -205,6 +182,8 @@ func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) {
if err != nil { if err != nil {
return n, err return n, err
} }
aw.offset = 0
}
} }
return return
} }
@ -215,9 +194,8 @@ func (aw *aeadEncrypter) Write(plaintextBytes []byte) (n int, err error) {
func (aw *aeadEncrypter) Close() (err error) { func (aw *aeadEncrypter) Close() (err error) {
// Encrypt and write a chunk if there's buffered data left, or if we haven't // Encrypt and write a chunk if there's buffered data left, or if we haven't
// written any chunks yet. // written any chunks yet.
if aw.buffer.Len() > 0 || aw.bytesProcessed == 0 { if aw.offset > 0 || aw.bytesProcessed == 0 {
plainChunk := aw.buffer.Bytes() lastEncryptedChunk, err := aw.sealChunk(aw.chunkBytes[:aw.offset])
lastEncryptedChunk, err := aw.sealChunk(plainChunk)
if err != nil { if err != nil {
return err return err
} }
@ -263,7 +241,7 @@ func (aw *aeadEncrypter) sealChunk(data []byte) ([]byte, error) {
} }
nonce := aw.computeNextNonce() nonce := aw.computeNextNonce()
encrypted := aw.aead.Seal(nil, nonce, data, adata) encrypted := aw.aead.Seal(data[:0], nonce, data, adata)
aw.bytesProcessed += len(data) aw.bytesProcessed += len(data)
if err := aw.aeadCrypter.incrementIndex(); err != nil { if err := aw.aeadCrypter.incrementIndex(); err != nil {
return nil, err return nil, err

View File

@ -65,24 +65,28 @@ func (ae *AEADEncrypted) decrypt(key []byte) (io.ReadCloser, error) {
blockCipher := ae.cipher.new(key) blockCipher := ae.cipher.new(key)
aead := ae.mode.new(blockCipher) aead := ae.mode.new(blockCipher)
// Carry the first tagLen bytes // Carry the first tagLen bytes
chunkSize := decodeAEADChunkSize(ae.chunkSizeByte)
tagLen := ae.mode.TagLength() tagLen := ae.mode.TagLength()
peekedBytes := make([]byte, tagLen) chunkBytes := make([]byte, chunkSize+tagLen*2)
peekedBytes := chunkBytes[chunkSize+tagLen:]
n, err := io.ReadFull(ae.Contents, peekedBytes) n, err := io.ReadFull(ae.Contents, peekedBytes)
if n < tagLen || (err != nil && err != io.EOF) { if n < tagLen || (err != nil && err != io.EOF) {
return nil, errors.AEADError("Not enough data to decrypt:" + err.Error()) return nil, errors.AEADError("Not enough data to decrypt:" + err.Error())
} }
chunkSize := decodeAEADChunkSize(ae.chunkSizeByte)
return &aeadDecrypter{ return &aeadDecrypter{
aeadCrypter: aeadCrypter{ aeadCrypter: aeadCrypter{
aead: aead, aead: aead,
chunkSize: chunkSize, chunkSize: chunkSize,
initialNonce: ae.initialNonce, nonce: ae.initialNonce,
associatedData: ae.associatedData(), associatedData: ae.associatedData(),
chunkIndex: make([]byte, 8), chunkIndex: make([]byte, 8),
packetTag: packetTypeAEADEncrypted, packetTag: packetTypeAEADEncrypted,
}, },
reader: ae.Contents, reader: ae.Contents,
peekedBytes: peekedBytes}, nil chunkBytes: chunkBytes,
peekedBytes: peekedBytes,
}, nil
} }
// associatedData for chunks: tag, version, cipher, mode, chunk size byte // associatedData for chunks: tag, version, cipher, mode, chunk size byte

View File

@ -173,6 +173,11 @@ type Config struct {
// weaknesses in the hash algo, potentially hindering e.g. some chosen-prefix attacks. // weaknesses in the hash algo, potentially hindering e.g. some chosen-prefix attacks.
// The default behavior, when the config or flag is nil, is to enable the feature. // The default behavior, when the config or flag is nil, is to enable the feature.
NonDeterministicSignaturesViaNotation *bool NonDeterministicSignaturesViaNotation *bool
// InsecureAllowAllKeyFlagsWhenMissing determines how a key without valid key flags is handled.
// When set to true, a key without flags is treated as if all flags are enabled.
// This behavior is consistent with GPG.
InsecureAllowAllKeyFlagsWhenMissing bool
} }
func (c *Config) Random() io.Reader { func (c *Config) Random() io.Reader {
@ -403,6 +408,13 @@ func (c *Config) RandomizeSignaturesViaNotation() bool {
return *c.NonDeterministicSignaturesViaNotation return *c.NonDeterministicSignaturesViaNotation
} }
func (c *Config) AllowAllKeyFlagsWhenMissing() bool {
if c == nil {
return false
}
return c.InsecureAllowAllKeyFlagsWhenMissing
}
// BoolPointer is a helper function to set a boolean pointer in the Config. // BoolPointer is a helper function to set a boolean pointer in the Config.
// e.g., config.CheckPacketSequence = BoolPointer(true) // e.g., config.CheckPacketSequence = BoolPointer(true)
func BoolPointer(value bool) *bool { func BoolPointer(value bool) *bool {

View File

@ -1048,12 +1048,17 @@ func (pk *PublicKey) VerifyDirectKeySignature(sig *Signature) (err error) {
// KeyIdString returns the public key's fingerprint in capital hex // KeyIdString returns the public key's fingerprint in capital hex
// (e.g. "6C7EE1B8621CC013"). // (e.g. "6C7EE1B8621CC013").
func (pk *PublicKey) KeyIdString() string { func (pk *PublicKey) KeyIdString() string {
return fmt.Sprintf("%X", pk.Fingerprint[12:20]) return fmt.Sprintf("%016X", pk.KeyId)
} }
// KeyIdShortString returns the short form of public key's fingerprint // KeyIdShortString returns the short form of public key's fingerprint
// in capital hex, as shown by gpg --list-keys (e.g. "621CC013"). // in capital hex, as shown by gpg --list-keys (e.g. "621CC013").
// This function will return the full key id for v5 and v6 keys
// since the short key id is undefined for them.
func (pk *PublicKey) KeyIdShortString() string { func (pk *PublicKey) KeyIdShortString() string {
if pk.Version >= 5 {
return pk.KeyIdString()
}
return fmt.Sprintf("%X", pk.Fingerprint[16:20]) return fmt.Sprintf("%X", pk.Fingerprint[16:20])
} }

View File

@ -1288,7 +1288,9 @@ func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubp
if sig.IssuerKeyId != nil && sig.Version == 4 { if sig.IssuerKeyId != nil && sig.Version == 4 {
keyId := make([]byte, 8) keyId := make([]byte, 8)
binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId) binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId)
subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, true, keyId}) // Note: making this critical breaks RPM <=4.16.
// See: https://github.com/ProtonMail/go-crypto/issues/263
subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, false, keyId})
} }
// Notation Data // Notation Data
for _, notation := range sig.Notations { for _, notation := range sig.Notations {

View File

@ -70,8 +70,10 @@ func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, e
aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData()) aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData())
// Carry the first tagLen bytes // Carry the first tagLen bytes
chunkSize := decodeAEADChunkSize(se.ChunkSizeByte)
tagLen := se.Mode.TagLength() tagLen := se.Mode.TagLength()
peekedBytes := make([]byte, tagLen) chunkBytes := make([]byte, chunkSize+tagLen*2)
peekedBytes := chunkBytes[chunkSize+tagLen:]
n, err := io.ReadFull(se.Contents, peekedBytes) n, err := io.ReadFull(se.Contents, peekedBytes)
if n < tagLen || (err != nil && err != io.EOF) { if n < tagLen || (err != nil && err != io.EOF) {
return nil, errors.StructuralError("not enough data to decrypt:" + err.Error()) return nil, errors.StructuralError("not enough data to decrypt:" + err.Error())
@ -81,12 +83,13 @@ func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, e
aeadCrypter: aeadCrypter{ aeadCrypter: aeadCrypter{
aead: aead, aead: aead,
chunkSize: decodeAEADChunkSize(se.ChunkSizeByte), chunkSize: decodeAEADChunkSize(se.ChunkSizeByte),
initialNonce: nonce, nonce: nonce,
associatedData: se.associatedData(), associatedData: se.associatedData(),
chunkIndex: make([]byte, 8), chunkIndex: nonce[len(nonce)-8:],
packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected, packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected,
}, },
reader: se.Contents, reader: se.Contents,
chunkBytes: chunkBytes,
peekedBytes: peekedBytes, peekedBytes: peekedBytes,
}, nil }, nil
} }
@ -130,16 +133,20 @@ func serializeSymmetricallyEncryptedAead(ciphertext io.WriteCloser, cipherSuite
aead, nonce := getSymmetricallyEncryptedAeadInstance(cipherSuite.Cipher, cipherSuite.Mode, inputKey, salt, prefix) aead, nonce := getSymmetricallyEncryptedAeadInstance(cipherSuite.Cipher, cipherSuite.Mode, inputKey, salt, prefix)
chunkSize := decodeAEADChunkSize(chunkSizeByte)
tagLen := aead.Overhead()
chunkBytes := make([]byte, chunkSize+tagLen)
return &aeadEncrypter{ return &aeadEncrypter{
aeadCrypter: aeadCrypter{ aeadCrypter: aeadCrypter{
aead: aead, aead: aead,
chunkSize: decodeAEADChunkSize(chunkSizeByte), chunkSize: chunkSize,
associatedData: prefix, associatedData: prefix,
chunkIndex: make([]byte, 8), nonce: nonce,
initialNonce: nonce, chunkIndex: nonce[len(nonce)-8:],
packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected, packetTag: packetTypeSymmetricallyEncryptedIntegrityProtected,
}, },
writer: ciphertext, writer: ciphertext,
chunkBytes: chunkBytes,
}, nil }, nil
} }
@ -149,10 +156,10 @@ func getSymmetricallyEncryptedAeadInstance(c CipherFunction, mode AEADMode, inpu
encryptionKey := make([]byte, c.KeySize()) encryptionKey := make([]byte, c.KeySize())
_, _ = readFull(hkdfReader, encryptionKey) _, _ = readFull(hkdfReader, encryptionKey)
// Last 64 bits of nonce are the counter nonce = make([]byte, mode.IvLength())
nonce = make([]byte, mode.IvLength()-8)
_, _ = readFull(hkdfReader, nonce) // Last 64 bits of nonce are the counter
_, _ = readFull(hkdfReader, nonce[:len(nonce)-8])
blockCipher := c.new(encryptionKey) blockCipher := c.new(encryptionKey)
aead = mode.new(blockCipher) aead = mode.new(blockCipher)

View File

@ -1,5 +1,6 @@
run: run:
tests: false tests: false
issues-exit-code: 0
issues: issues:
include: include:
@ -36,5 +37,4 @@ linters:
- govet - govet
- ineffassign - ineffassign
- staticcheck - staticcheck
- typecheck
- unused - unused

View File

@ -0,0 +1,28 @@
run:
tests: false
issues:
include:
- EXC0001
- EXC0005
- EXC0011
- EXC0012
- EXC0013
max-issues-per-linter: 0
max-same-issues: 0
linters:
enable:
- bodyclose
- gofumpt
- goimports
- gosec
- nilerr
- revive
- rowserrcheck
- sqlclosecheck
- tparallel
- unconvert
- unparam
- whitespace

View File

@ -0,0 +1,6 @@
includes:
- from_url:
url: charmbracelet/meta/main/goreleaser-lib.yaml
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json

21
vendor/github.com/charmbracelet/colorprofile/LICENSE generated vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020-2024 Charmbracelet, Inc
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

103
vendor/github.com/charmbracelet/colorprofile/README.md generated vendored Normal file
View File

@ -0,0 +1,103 @@
# Colorprofile
<p>
<a href="https://github.com/charmbracelet/colorprofile/releases"><img src="https://img.shields.io/github/release/charmbracelet/colorprofile.svg" alt="Latest Release"></a>
<a href="https://pkg.go.dev/github.com/charmbracelet/colorprofile?tab=doc"><img src="https://godoc.org/github.com/charmbracelet/colorprofile?status.svg" alt="GoDoc"></a>
<a href="https://github.com/charmbracelet/colorprofile/actions"><img src="https://github.com/charmbracelet/colorprofile/actions/workflows/build.yml/badge.svg" alt="Build Status"></a>
</p>
A simple, powerful—and at times magical—package for detecting terminal color
profiles and performing color (and CSI) degradation.
## Detecting the terminals color profile
Detecting the terminals color profile is easy.
```go
import "github.com/charmbracelet/colorprofile"
// Detect the color profile. If youre planning on writing to stderr you'd want
// to use os.Stderr instead.
p := colorprofile.Detect(os.Stdout, os.Environ())
// Comment on the profile.
fmt.Printf("You know, your colors are quite %s.", func() string {
switch p {
case colorprofile.TrueColor:
return "fancy"
case colorprofile.ANSI256:
return "1990s fancy"
case colorprofile.ANSI:
return "normcore"
case colorprofile.Ascii:
return "ancient"
case colorprofile.NoTTY:
return "naughty!"
}
return "...IDK" // this should never happen
}())
```
## Downsampling colors
When necessary, colors can be downsampled to a given profile, or manually
downsampled to a specific profile.
```go
p := colorprofile.Detect(os.Stdout, os.Environ())
c := color.RGBA{0x6b, 0x50, 0xff, 0xff} // #6b50ff
// Downsample to the detected profile, when necessary.
convertedColor := p.Convert(c)
// Or manually convert to a given profile.
ansi256Color := colorprofile.ANSI256.Convert(c)
ansiColor := colorprofile.ANSI.Convert(c)
noColor := colorprofile.Ascii.Convert(c)
noANSI := colorprofile.NoTTY.Convert(c)
```
## Automatic downsampling with a Writer
You can also magically downsample colors in ANSI output, when necessary. If
output is not a TTY ANSI will be dropped entirely.
```go
myFancyANSI := "\x1b[38;2;107;80;255mCute \x1b[1;3mpuppy!!\x1b[m"
// Automatically downsample for the terminal at stdout.
w := colorprofile.NewWriter(os.Stdout, os.Environ())
fmt.Fprintf(w, myFancyANSI)
// Downsample to 4-bit ANSI.
w.Profile = colorprofile.ANSI
fmt.Fprintf(w, myFancyANSI)
// Ascii-fy, no colors.
w.Profile = colorprofile.Ascii
fmt.Fprintf(w, myFancyANSI)
// Strip ANSI altogether.
w.Profile = colorprofile.NoTTY
fmt.Fprintf(w, myFancyANSI) // not as fancy
```
## Feedback
Wed love to hear your thoughts on this project. Feel free to drop us a note!
- [Twitter](https://twitter.com/charmcli)
- [The Fediverse](https://mastodon.social/@charmcli)
- [Discord](https://charm.sh/chat)
## License
[MIT](https://github.com/charmbracelet/bubbletea/raw/master/LICENSE)
---
Part of [Charm](https://charm.sh).
<a href="https://charm.sh/"><img alt="The Charm logo" src="https://stuff.charm.sh/charm-badge.jpg" width="400"></a>
Charm热爱开源 • Charm loves open source • نحنُ نحب المصادر المفتوحة

287
vendor/github.com/charmbracelet/colorprofile/env.go generated vendored Normal file
View File

@ -0,0 +1,287 @@
package colorprofile
import (
"bytes"
"io"
"os/exec"
"runtime"
"strconv"
"strings"
"github.com/charmbracelet/x/term"
"github.com/xo/terminfo"
)
// Detect returns the color profile based on the terminal output, and
// environment variables. This respects NO_COLOR, CLICOLOR, and CLICOLOR_FORCE
// environment variables.
//
// The rules as follows:
// - TERM=dumb is always treated as NoTTY unless CLICOLOR_FORCE=1 is set.
// - If COLORTERM=truecolor, and the profile is not NoTTY, it gest upgraded to TrueColor.
// - Using any 256 color terminal (e.g. TERM=xterm-256color) will set the profile to ANSI256.
// - Using any color terminal (e.g. TERM=xterm-color) will set the profile to ANSI.
// - Using CLICOLOR=1 without TERM defined should be treated as ANSI if the
// output is a terminal.
// - NO_COLOR takes precedence over CLICOLOR/CLICOLOR_FORCE, and will disable
// colors but not text decoration, i.e. bold, italic, faint, etc.
//
// See https://no-color.org/ and https://bixense.com/clicolors/ for more information.
func Detect(output io.Writer, env []string) Profile {
out, ok := output.(term.File)
isatty := ok && term.IsTerminal(out.Fd())
environ := newEnviron(env)
term := environ.get("TERM")
isDumb := term == "dumb"
envp := colorProfile(isatty, environ)
if envp == TrueColor || envNoColor(environ) {
// We already know we have TrueColor, or NO_COLOR is set.
return envp
}
if isatty && !isDumb {
tip := Terminfo(term)
tmuxp := tmux(environ)
// Color profile is the maximum of env, terminfo, and tmux.
return max(envp, max(tip, tmuxp))
}
return envp
}
// Env returns the color profile based on the terminal environment variables.
// This respects NO_COLOR, CLICOLOR, and CLICOLOR_FORCE environment variables.
//
// The rules as follows:
// - TERM=dumb is always treated as NoTTY unless CLICOLOR_FORCE=1 is set.
// - If COLORTERM=truecolor, and the profile is not NoTTY, it gest upgraded to TrueColor.
// - Using any 256 color terminal (e.g. TERM=xterm-256color) will set the profile to ANSI256.
// - Using any color terminal (e.g. TERM=xterm-color) will set the profile to ANSI.
// - Using CLICOLOR=1 without TERM defined should be treated as ANSI if the
// output is a terminal.
// - NO_COLOR takes precedence over CLICOLOR/CLICOLOR_FORCE, and will disable
// colors but not text decoration, i.e. bold, italic, faint, etc.
//
// See https://no-color.org/ and https://bixense.com/clicolors/ for more information.
func Env(env []string) (p Profile) {
return colorProfile(true, newEnviron(env))
}
func colorProfile(isatty bool, env environ) (p Profile) {
isDumb := env.get("TERM") == "dumb"
envp := envColorProfile(env)
if !isatty || isDumb {
// Check if the output is a terminal.
// Treat dumb terminals as NoTTY
p = NoTTY
} else {
p = envp
}
if envNoColor(env) && isatty {
if p > Ascii {
p = Ascii
}
return
}
if cliColorForced(env) {
if p < ANSI {
p = ANSI
}
if envp > p {
p = envp
}
return
}
if cliColor(env) {
if isatty && !isDumb && p < ANSI {
p = ANSI
}
}
return p
}
// envNoColor returns true if the environment variables explicitly disable color output
// by setting NO_COLOR (https://no-color.org/).
func envNoColor(env environ) bool {
noColor, _ := strconv.ParseBool(env.get("NO_COLOR"))
return noColor
}
func cliColor(env environ) bool {
cliColor, _ := strconv.ParseBool(env.get("CLICOLOR"))
return cliColor
}
func cliColorForced(env environ) bool {
cliColorForce, _ := strconv.ParseBool(env.get("CLICOLOR_FORCE"))
return cliColorForce
}
func colorTerm(env environ) bool {
colorTerm := strings.ToLower(env.get("COLORTERM"))
return colorTerm == "truecolor" || colorTerm == "24bit" ||
colorTerm == "yes" || colorTerm == "true"
}
// envColorProfile returns infers the color profile from the environment.
func envColorProfile(env environ) (p Profile) {
term, ok := env.lookup("TERM")
if !ok || len(term) == 0 || term == "dumb" {
p = NoTTY
if runtime.GOOS == "windows" {
// Use Windows API to detect color profile. Windows Terminal and
// cmd.exe don't define $TERM.
if wcp, ok := windowsColorProfile(env); ok {
p = wcp
}
}
} else {
p = ANSI
}
parts := strings.Split(term, "-")
switch parts[0] {
case "alacritty",
"contour",
"foot",
"ghostty",
"kitty",
"rio",
"st",
"wezterm":
return TrueColor
case "xterm":
if len(parts) > 1 {
switch parts[1] {
case "ghostty", "kitty":
// These terminals can be defined as xterm-TERMNAME
return TrueColor
}
}
case "tmux", "screen":
if p < ANSI256 {
p = ANSI256
}
}
if isCloudShell, _ := strconv.ParseBool(env.get("GOOGLE_CLOUD_SHELL")); isCloudShell {
return TrueColor
}
// GNU Screen doesn't support TrueColor
// Tmux doesn't support $COLORTERM
if colorTerm(env) && !strings.HasPrefix(term, "screen") && !strings.HasPrefix(term, "tmux") {
return TrueColor
}
if strings.HasSuffix(term, "256color") && p < ANSI256 {
p = ANSI256
}
return
}
// Terminfo returns the color profile based on the terminal's terminfo
// database. This relies on the Tc and RGB capabilities to determine if the
// terminal supports TrueColor.
// If term is empty or "dumb", it returns NoTTY.
func Terminfo(term string) (p Profile) {
if len(term) == 0 || term == "dumb" {
return NoTTY
}
p = ANSI
ti, err := terminfo.Load(term)
if err != nil {
return
}
extbools := ti.ExtBoolCapsShort()
if _, ok := extbools["Tc"]; ok {
return TrueColor
}
if _, ok := extbools["RGB"]; ok {
return TrueColor
}
return
}
// Tmux returns the color profile based on `tmux info` output. Tmux supports
// overriding the terminal's color capabilities, so this function will return
// the color profile based on the tmux configuration.
func Tmux(env []string) Profile {
return tmux(newEnviron(env))
}
// tmux returns the color profile based on the tmux environment variables.
func tmux(env environ) (p Profile) {
if tmux, ok := env.lookup("TMUX"); !ok || len(tmux) == 0 {
// Not in tmux
return NoTTY
}
// Check if tmux has either Tc or RGB capabilities. Otherwise, return
// ANSI256.
p = ANSI256
cmd := exec.Command("tmux", "info")
out, err := cmd.Output()
if err != nil {
return
}
for _, line := range bytes.Split(out, []byte("\n")) {
if (bytes.Contains(line, []byte("Tc")) || bytes.Contains(line, []byte("RGB"))) &&
bytes.Contains(line, []byte("true")) {
return TrueColor
}
}
return
}
// environ is a map of environment variables.
type environ map[string]string
// newEnviron returns a new environment map from a slice of environment
// variables.
func newEnviron(environ []string) environ {
m := make(map[string]string, len(environ))
for _, e := range environ {
parts := strings.SplitN(e, "=", 2)
var value string
if len(parts) == 2 {
value = parts[1]
}
m[parts[0]] = value
}
return m
}
// lookup returns the value of an environment variable and a boolean indicating
// if it exists.
func (e environ) lookup(key string) (string, bool) {
v, ok := e[key]
return v, ok
}
// get returns the value of an environment variable and empty string if it
// doesn't exist.
func (e environ) get(key string) string {
v, _ := e.lookup(key)
return v
}
func max[T ~byte | ~int](a, b T) T {
if a > b {
return a
}
return b
}

View File

@ -0,0 +1,8 @@
//go:build !windows
// +build !windows
package colorprofile
func windowsColorProfile(map[string]string) (Profile, bool) {
return 0, false
}

View File

@ -0,0 +1,45 @@
//go:build windows
// +build windows
package colorprofile
import (
"strconv"
"golang.org/x/sys/windows"
)
func windowsColorProfile(env map[string]string) (Profile, bool) {
if env["ConEmuANSI"] == "ON" {
return TrueColor, true
}
if len(env["WT_SESSION"]) > 0 {
// Windows Terminal supports TrueColor
return TrueColor, true
}
major, _, build := windows.RtlGetNtVersionNumbers()
if build < 10586 || major < 10 {
// No ANSI support before WindowsNT 10 build 10586
if len(env["ANSICON"]) > 0 {
ansiconVer := env["ANSICON_VER"]
cv, err := strconv.Atoi(ansiconVer)
if err != nil || cv < 181 {
// No 8 bit color support before ANSICON 1.81
return ANSI, true
}
return ANSI256, true
}
return NoTTY, true
}
if build < 14931 {
// No true color support before build 14931
return ANSI256, true
}
return TrueColor, true
}

399
vendor/github.com/charmbracelet/colorprofile/profile.go generated vendored Normal file
View File

@ -0,0 +1,399 @@
package colorprofile
import (
"image/color"
"math"
"github.com/charmbracelet/x/ansi"
"github.com/lucasb-eyer/go-colorful"
)
// Profile is a color profile: NoTTY, Ascii, ANSI, ANSI256, or TrueColor.
type Profile byte
const (
// NoTTY, not a terminal profile.
NoTTY Profile = iota
// Ascii, uncolored profile.
Ascii //nolint:revive
// ANSI, 4-bit color profile.
ANSI
// ANSI256, 8-bit color profile.
ANSI256
// TrueColor, 24-bit color profile.
TrueColor
)
// String returns the string representation of a Profile.
func (p Profile) String() string {
switch p {
case TrueColor:
return "TrueColor"
case ANSI256:
return "ANSI256"
case ANSI:
return "ANSI"
case Ascii:
return "Ascii"
case NoTTY:
return "NoTTY"
}
return "Unknown"
}
// Convert transforms a given Color to a Color supported within the Profile.
func (p Profile) Convert(c color.Color) color.Color {
if p <= Ascii {
return nil
}
switch c := c.(type) {
case ansi.BasicColor:
return c
case ansi.ExtendedColor:
if p == ANSI {
return ansi256ToANSIColor(c)
}
return c
case ansi.TrueColor, color.Color:
h, ok := colorful.MakeColor(c)
if !ok {
return nil
}
if p != TrueColor {
ac := hexToANSI256Color(h)
if p == ANSI {
return ansi256ToANSIColor(ac)
}
return ac
}
return c
}
return c
}
func hexToANSI256Color(c colorful.Color) ansi.ExtendedColor {
v2ci := func(v float64) int {
if v < 48 {
return 0
}
if v < 115 {
return 1
}
return int((v - 35) / 40)
}
// Calculate the nearest 0-based color index at 16..231
r := v2ci(c.R * 255.0) // 0..5 each
g := v2ci(c.G * 255.0)
b := v2ci(c.B * 255.0)
ci := 36*r + 6*g + b /* 0..215 */
// Calculate the represented colors back from the index
i2cv := [6]int{0, 0x5f, 0x87, 0xaf, 0xd7, 0xff}
cr := i2cv[r] // r/g/b, 0..255 each
cg := i2cv[g]
cb := i2cv[b]
// Calculate the nearest 0-based gray index at 232..255
var grayIdx int
average := (cr + cg + cb) / 3
if average > 238 {
grayIdx = 23
} else {
grayIdx = (average - 3) / 10 // 0..23
}
gv := 8 + 10*grayIdx // same value for r/g/b, 0..255
// Return the one which is nearer to the original input rgb value
c2 := colorful.Color{R: float64(cr) / 255.0, G: float64(cg) / 255.0, B: float64(cb) / 255.0}
g2 := colorful.Color{R: float64(gv) / 255.0, G: float64(gv) / 255.0, B: float64(gv) / 255.0}
colorDist := c.DistanceHSLuv(c2)
grayDist := c.DistanceHSLuv(g2)
if colorDist <= grayDist {
return ansi.ExtendedColor(16 + ci) //nolint:gosec
}
return ansi.ExtendedColor(232 + grayIdx) //nolint:gosec
}
func ansi256ToANSIColor(c ansi.ExtendedColor) ansi.BasicColor {
var r int
md := math.MaxFloat64
h, _ := colorful.Hex(ansiHex[c])
for i := 0; i <= 15; i++ {
hb, _ := colorful.Hex(ansiHex[i])
d := h.DistanceHSLuv(hb)
if d < md {
md = d
r = i
}
}
return ansi.BasicColor(r) //nolint:gosec
}
// RGB values of ANSI colors (0-255).
var ansiHex = []string{
"#000000",
"#800000",
"#008000",
"#808000",
"#000080",
"#800080",
"#008080",
"#c0c0c0",
"#808080",
"#ff0000",
"#00ff00",
"#ffff00",
"#0000ff",
"#ff00ff",
"#00ffff",
"#ffffff",
"#000000",
"#00005f",
"#000087",
"#0000af",
"#0000d7",
"#0000ff",
"#005f00",
"#005f5f",
"#005f87",
"#005faf",
"#005fd7",
"#005fff",
"#008700",
"#00875f",
"#008787",
"#0087af",
"#0087d7",
"#0087ff",
"#00af00",
"#00af5f",
"#00af87",
"#00afaf",
"#00afd7",
"#00afff",
"#00d700",
"#00d75f",
"#00d787",
"#00d7af",
"#00d7d7",
"#00d7ff",
"#00ff00",
"#00ff5f",
"#00ff87",
"#00ffaf",
"#00ffd7",
"#00ffff",
"#5f0000",
"#5f005f",
"#5f0087",
"#5f00af",
"#5f00d7",
"#5f00ff",
"#5f5f00",
"#5f5f5f",
"#5f5f87",
"#5f5faf",
"#5f5fd7",
"#5f5fff",
"#5f8700",
"#5f875f",
"#5f8787",
"#5f87af",
"#5f87d7",
"#5f87ff",
"#5faf00",
"#5faf5f",
"#5faf87",
"#5fafaf",
"#5fafd7",
"#5fafff",
"#5fd700",
"#5fd75f",
"#5fd787",
"#5fd7af",
"#5fd7d7",
"#5fd7ff",
"#5fff00",
"#5fff5f",
"#5fff87",
"#5fffaf",
"#5fffd7",
"#5fffff",
"#870000",
"#87005f",
"#870087",
"#8700af",
"#8700d7",
"#8700ff",
"#875f00",
"#875f5f",
"#875f87",
"#875faf",
"#875fd7",
"#875fff",
"#878700",
"#87875f",
"#878787",
"#8787af",
"#8787d7",
"#8787ff",
"#87af00",
"#87af5f",
"#87af87",
"#87afaf",
"#87afd7",
"#87afff",
"#87d700",
"#87d75f",
"#87d787",
"#87d7af",
"#87d7d7",
"#87d7ff",
"#87ff00",
"#87ff5f",
"#87ff87",
"#87ffaf",
"#87ffd7",
"#87ffff",
"#af0000",
"#af005f",
"#af0087",
"#af00af",
"#af00d7",
"#af00ff",
"#af5f00",
"#af5f5f",
"#af5f87",
"#af5faf",
"#af5fd7",
"#af5fff",
"#af8700",
"#af875f",
"#af8787",
"#af87af",
"#af87d7",
"#af87ff",
"#afaf00",
"#afaf5f",
"#afaf87",
"#afafaf",
"#afafd7",
"#afafff",
"#afd700",
"#afd75f",
"#afd787",
"#afd7af",
"#afd7d7",
"#afd7ff",
"#afff00",
"#afff5f",
"#afff87",
"#afffaf",
"#afffd7",
"#afffff",
"#d70000",
"#d7005f",
"#d70087",
"#d700af",
"#d700d7",
"#d700ff",
"#d75f00",
"#d75f5f",
"#d75f87",
"#d75faf",
"#d75fd7",
"#d75fff",
"#d78700",
"#d7875f",
"#d78787",
"#d787af",
"#d787d7",
"#d787ff",
"#d7af00",
"#d7af5f",
"#d7af87",
"#d7afaf",
"#d7afd7",
"#d7afff",
"#d7d700",
"#d7d75f",
"#d7d787",
"#d7d7af",
"#d7d7d7",
"#d7d7ff",
"#d7ff00",
"#d7ff5f",
"#d7ff87",
"#d7ffaf",
"#d7ffd7",
"#d7ffff",
"#ff0000",
"#ff005f",
"#ff0087",
"#ff00af",
"#ff00d7",
"#ff00ff",
"#ff5f00",
"#ff5f5f",
"#ff5f87",
"#ff5faf",
"#ff5fd7",
"#ff5fff",
"#ff8700",
"#ff875f",
"#ff8787",
"#ff87af",
"#ff87d7",
"#ff87ff",
"#ffaf00",
"#ffaf5f",
"#ffaf87",
"#ffafaf",
"#ffafd7",
"#ffafff",
"#ffd700",
"#ffd75f",
"#ffd787",
"#ffd7af",
"#ffd7d7",
"#ffd7ff",
"#ffff00",
"#ffff5f",
"#ffff87",
"#ffffaf",
"#ffffd7",
"#ffffff",
"#080808",
"#121212",
"#1c1c1c",
"#262626",
"#303030",
"#3a3a3a",
"#444444",
"#4e4e4e",
"#585858",
"#626262",
"#6c6c6c",
"#767676",
"#808080",
"#8a8a8a",
"#949494",
"#9e9e9e",
"#a8a8a8",
"#b2b2b2",
"#bcbcbc",
"#c6c6c6",
"#d0d0d0",
"#dadada",
"#e4e4e4",
"#eeeeee",
}

166
vendor/github.com/charmbracelet/colorprofile/writer.go generated vendored Normal file
View File

@ -0,0 +1,166 @@
package colorprofile
import (
"bytes"
"image/color"
"io"
"strconv"
"github.com/charmbracelet/x/ansi"
)
// NewWriter creates a new color profile writer that downgrades color sequences
// based on the detected color profile.
//
// If environ is nil, it will use os.Environ() to get the environment variables.
//
// It queries the given writer to determine if it supports ANSI escape codes.
// If it does, along with the given environment variables, it will determine
// the appropriate color profile to use for color formatting.
//
// This respects the NO_COLOR, CLICOLOR, and CLICOLOR_FORCE environment variables.
func NewWriter(w io.Writer, environ []string) *Writer {
return &Writer{
Forward: w,
Profile: Detect(w, environ),
}
}
// Writer represents a color profile writer that writes ANSI sequences to the
// underlying writer.
type Writer struct {
Forward io.Writer
Profile Profile
}
// Write writes the given text to the underlying writer.
func (w *Writer) Write(p []byte) (int, error) {
switch w.Profile {
case TrueColor:
return w.Forward.Write(p)
case NoTTY:
return io.WriteString(w.Forward, ansi.Strip(string(p)))
default:
return w.downsample(p)
}
}
// downsample downgrades the given text to the appropriate color profile.
func (w *Writer) downsample(p []byte) (int, error) {
var buf bytes.Buffer
var state byte
parser := ansi.GetParser()
defer ansi.PutParser(parser)
for len(p) > 0 {
parser.Reset()
seq, _, read, newState := ansi.DecodeSequence(p, state, parser)
switch {
case ansi.HasCsiPrefix(seq) && parser.Command() == 'm':
handleSgr(w, parser, &buf)
default:
// If we're not a style SGR sequence, just write the bytes.
if n, err := buf.Write(seq); err != nil {
return n, err
}
}
p = p[read:]
state = newState
}
return w.Forward.Write(buf.Bytes())
}
// WriteString writes the given text to the underlying writer.
func (w *Writer) WriteString(s string) (n int, err error) {
return w.Write([]byte(s))
}
func handleSgr(w *Writer, p *ansi.Parser, buf *bytes.Buffer) {
var style ansi.Style
params := p.Params()
for i := 0; i < len(params); i++ {
param := params[i]
switch param := param.Param(0); param {
case 0:
// SGR default parameter is 0. We use an empty string to reduce the
// number of bytes written to the buffer.
style = append(style, "")
case 30, 31, 32, 33, 34, 35, 36, 37: // 8-bit foreground color
if w.Profile < ANSI {
continue
}
style = style.ForegroundColor(
w.Profile.Convert(ansi.BasicColor(param - 30))) //nolint:gosec
case 38: // 16 or 24-bit foreground color
var c color.Color
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
i += n - 1
}
if w.Profile < ANSI {
continue
}
style = style.ForegroundColor(w.Profile.Convert(c))
case 39: // default foreground color
if w.Profile < ANSI {
continue
}
style = style.DefaultForegroundColor()
case 40, 41, 42, 43, 44, 45, 46, 47: // 8-bit background color
if w.Profile < ANSI {
continue
}
style = style.BackgroundColor(
w.Profile.Convert(ansi.BasicColor(param - 40))) //nolint:gosec
case 48: // 16 or 24-bit background color
var c color.Color
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
i += n - 1
}
if w.Profile < ANSI {
continue
}
style = style.BackgroundColor(w.Profile.Convert(c))
case 49: // default background color
if w.Profile < ANSI {
continue
}
style = style.DefaultBackgroundColor()
case 58: // 16 or 24-bit underline color
var c color.Color
if n := ansi.ReadStyleColor(params[i:], &c); n > 0 {
i += n - 1
}
if w.Profile < ANSI {
continue
}
style = style.UnderlineColor(w.Profile.Convert(c))
case 59: // default underline color
if w.Profile < ANSI {
continue
}
style = style.DefaultUnderlineColor()
case 90, 91, 92, 93, 94, 95, 96, 97: // 8-bit bright foreground color
if w.Profile < ANSI {
continue
}
style = style.ForegroundColor(
w.Profile.Convert(ansi.BasicColor(param - 90 + 8))) //nolint:gosec
case 100, 101, 102, 103, 104, 105, 106, 107: // 8-bit bright background color
if w.Profile < ANSI {
continue
}
style = style.BackgroundColor(
w.Profile.Convert(ansi.BasicColor(param - 100 + 8))) //nolint:gosec
default:
// If this is not a color attribute, just append it to the style.
style = append(style, strconv.Itoa(param))
}
}
_, _ = buf.WriteString(style.String())
}

View File

@ -1 +1,2 @@
ssh_example_ed25519* ssh_example_ed25519*
dist/

View File

@ -15,10 +15,22 @@ issues:
linters: linters:
enable: enable:
- bodyclose - bodyclose
- exhaustive
- goconst
- godot
- godox
- gofumpt - gofumpt
- goimports - goimports
- gomoddirectives
- goprintffuncname
- gosec - gosec
- misspell
- nakedret
- nestif
- nilerr - nilerr
- noctx
- nolintlint
- prealloc
- revive - revive
- rowserrcheck - rowserrcheck
- sqlclosecheck - sqlclosecheck
@ -26,3 +38,4 @@ linters:
- unconvert - unconvert
- unparam - unparam
- whitespace - whitespace
- wrapcheck

View File

@ -1,5 +1,5 @@
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json
version: 2
includes: includes:
- from_url: - from_url:
url: charmbracelet/meta/main/goreleaser-lib.yaml url: charmbracelet/meta/main/goreleaser-lib.yaml
# yaml-language-server: $schema=https://goreleaser.com/static/schema-pro.json

View File

@ -10,7 +10,7 @@
Style definitions for nice terminal layouts. Built with TUIs in mind. Style definitions for nice terminal layouts. Built with TUIs in mind.
![Lip Gloss example](https://github.com/user-attachments/assets/99c5c015-551b-4897-8cd1-bcaafa0aad5a) ![Lip Gloss example](https://github.com/user-attachments/assets/7950b1c1-e0e3-427e-8e7d-6f7f6ad17ca7)
Lip Gloss takes an expressive, declarative approach to terminal rendering. Lip Gloss takes an expressive, declarative approach to terminal rendering.
Users familiar with CSS will feel at home with Lip Gloss. Users familiar with CSS will feel at home with Lip Gloss.
@ -425,17 +425,28 @@ rows := [][]string{
Use the table package to style and render the table. Use the table package to style and render the table.
```go ```go
var (
purple = lipgloss.Color("99")
gray = lipgloss.Color("245")
lightGray = lipgloss.Color("241")
headerStyle = lipgloss.NewStyle().Foreground(purple).Bold(true).Align(lipgloss.Center)
cellStyle = lipgloss.NewStyle().Padding(0, 1).Width(14)
oddRowStyle = cellStyle.Foreground(gray)
evenRowStyle = cellStyle.Foreground(lightGray)
)
t := table.New(). t := table.New().
Border(lipgloss.NormalBorder()). Border(lipgloss.NormalBorder()).
BorderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("99"))). BorderStyle(lipgloss.NewStyle().Foreground(purple)).
StyleFunc(func(row, col int) lipgloss.Style { StyleFunc(func(row, col int) lipgloss.Style {
switch { switch {
case row == 0: case row == table.HeaderRow:
return HeaderStyle return headerStyle
case row%2 == 0: case row%2 == 0:
return EvenRowStyle return evenRowStyle
default: default:
return OddRowStyle return oddRowStyle
} }
}). }).
Headers("LANGUAGE", "FORMAL", "INFORMAL"). Headers("LANGUAGE", "FORMAL", "INFORMAL").
@ -453,6 +464,45 @@ fmt.Println(t)
![Table Example](https://github.com/charmbracelet/lipgloss/assets/42545625/6e4b70c4-f494-45da-a467-bdd27df30d5d) ![Table Example](https://github.com/charmbracelet/lipgloss/assets/42545625/6e4b70c4-f494-45da-a467-bdd27df30d5d)
> [!WARNING]
> Table `Rows` need to be declared before `Offset` otherwise it does nothing.
### Table Borders
There are helpers to generate tables in markdown or ASCII style:
#### Markdown Table
```go
table.New().Border(lipgloss.MarkdownBorder()).BorderTop(false).BorderBottom(false)
```
```
| LANGUAGE | FORMAL | INFORMAL |
|----------|--------------|-----------|
| Chinese | Nǐn hǎo | Nǐ hǎo |
| French | Bonjour | Salut |
| Russian | Zdravstvuyte | Privet |
| Spanish | Hola | ¿Qué tal? |
```
#### ASCII Table
```go
table.New().Border(lipgloss.ASCIIBorder())
```
```
+----------+--------------+-----------+
| LANGUAGE | FORMAL | INFORMAL |
+----------+--------------+-----------+
| Chinese | Nǐn hǎo | Nǐ hǎo |
| French | Bonjour | Salut |
| Russian | Zdravstvuyte | Privet |
| Spanish | Hola | ¿Qué tal? |
+----------+--------------+-----------+
```
For more on tables see [the docs](https://pkg.go.dev/github.com/charmbracelet/lipgloss?tab=doc) and [examples](https://github.com/charmbracelet/lipgloss/tree/master/examples/table). For more on tables see [the docs](https://pkg.go.dev/github.com/charmbracelet/lipgloss?tab=doc) and [examples](https://github.com/charmbracelet/lipgloss/tree/master/examples/table).
## Rendering Lists ## Rendering Lists

19
vendor/github.com/charmbracelet/lipgloss/Taskfile.yaml generated vendored Normal file
View File

@ -0,0 +1,19 @@
# https://taskfile.dev
version: '3'
tasks:
lint:
desc: Run base linters
cmds:
- golangci-lint run
test:
desc: Run tests
cmds:
- go test ./... {{.CLI_ARGS}}
test:table:
desc: Run table tests
cmds:
- go test ./table {{.CLI_ARGS}}

View File

@ -30,8 +30,8 @@ func alignTextHorizontal(str string, pos Position, width int, style *termenv.Sty
l = s + l l = s + l
case Center: case Center:
// Note: remainder goes on the right. // Note: remainder goes on the right.
left := shortAmount / 2 //nolint:gomnd left := shortAmount / 2 //nolint:mnd
right := left + shortAmount%2 //nolint:gomnd right := left + shortAmount%2 //nolint:mnd
leftSpaces := strings.Repeat(" ", left) leftSpaces := strings.Repeat(" ", left)
rightSpaces := strings.Repeat(" ", right) rightSpaces := strings.Repeat(" ", right)
@ -69,7 +69,7 @@ func alignTextVertical(str string, pos Position, height int, _ *termenv.Style) s
case Top: case Top:
return str + strings.Repeat("\n", height-strHeight) return str + strings.Repeat("\n", height-strHeight)
case Center: case Center:
topPadding, bottomPadding := (height-strHeight)/2, (height-strHeight)/2 //nolint:gomnd topPadding, bottomPadding := (height-strHeight)/2, (height-strHeight)/2 //nolint:mnd
if strHeight+topPadding+bottomPadding > height { if strHeight+topPadding+bottomPadding > height {
topPadding-- topPadding--
} else if strHeight+topPadding+bottomPadding < height { } else if strHeight+topPadding+bottomPadding < height {

View File

@ -108,6 +108,11 @@ var (
TopRight: "█", TopRight: "█",
BottomLeft: "█", BottomLeft: "█",
BottomRight: "█", BottomRight: "█",
MiddleLeft: "█",
MiddleRight: "█",
Middle: "█",
MiddleTop: "█",
MiddleBottom: "█",
} }
outerHalfBlockBorder = Border{ outerHalfBlockBorder = Border{
@ -179,6 +184,38 @@ var (
MiddleTop: " ", MiddleTop: " ",
MiddleBottom: " ", MiddleBottom: " ",
} }
markdownBorder = Border{
Top: "-",
Bottom: "-",
Left: "|",
Right: "|",
TopLeft: "|",
TopRight: "|",
BottomLeft: "|",
BottomRight: "|",
MiddleLeft: "|",
MiddleRight: "|",
Middle: "|",
MiddleTop: "|",
MiddleBottom: "|",
}
asciiBorder = Border{
Top: "-",
Bottom: "-",
Left: "|",
Right: "|",
TopLeft: "+",
TopRight: "+",
BottomLeft: "+",
BottomRight: "+",
MiddleLeft: "+",
MiddleRight: "+",
Middle: "+",
MiddleTop: "+",
MiddleBottom: "+",
}
) )
// NormalBorder returns a standard-type border with a normal weight and 90 // NormalBorder returns a standard-type border with a normal weight and 90
@ -226,13 +263,23 @@ func HiddenBorder() Border {
return hiddenBorder return hiddenBorder
} }
// MarkdownBorder return a table border in markdown style.
//
// Make sure to disable top and bottom border for the best result. This will
// ensure that the output is valid markdown.
//
// table.New().Border(lipgloss.MarkdownBorder()).BorderTop(false).BorderBottom(false)
func MarkdownBorder() Border {
return markdownBorder
}
// ASCIIBorder returns a table border with ASCII characters.
func ASCIIBorder() Border {
return asciiBorder
}
func (s Style) applyBorder(str string) string { func (s Style) applyBorder(str string) string {
var ( var (
topSet = s.isSet(borderTopKey)
rightSet = s.isSet(borderRightKey)
bottomSet = s.isSet(borderBottomKey)
leftSet = s.isSet(borderLeftKey)
border = s.getBorderStyle() border = s.getBorderStyle()
hasTop = s.getAsBool(borderTopKey, false) hasTop = s.getAsBool(borderTopKey, false)
hasRight = s.getAsBool(borderRightKey, false) hasRight = s.getAsBool(borderRightKey, false)
@ -252,7 +299,7 @@ func (s Style) applyBorder(str string) string {
// If a border is set and no sides have been specifically turned on or off // If a border is set and no sides have been specifically turned on or off
// render borders on all sides. // render borders on all sides.
if border != noBorder && !(topSet || rightSet || bottomSet || leftSet) { if s.implicitBorders() {
hasTop = true hasTop = true
hasRight = true hasRight = true
hasBottom = true hasBottom = true

View File

@ -35,7 +35,7 @@ func (NoColor) color(*Renderer) termenv.Color {
// //
// Deprecated. // Deprecated.
func (n NoColor) RGBA() (r, g, b, a uint32) { func (n NoColor) RGBA() (r, g, b, a uint32) {
return 0x0, 0x0, 0x0, 0xFFFF //nolint:gomnd return 0x0, 0x0, 0x0, 0xFFFF //nolint:mnd
} }
// Color specifies a color by hex or ANSI value. For example: // Color specifies a color by hex or ANSI value. For example:

View File

@ -300,7 +300,7 @@ func (s Style) GetBorderTopWidth() int {
// runes of varying widths, the widest rune is returned. If no border exists on // runes of varying widths, the widest rune is returned. If no border exists on
// the top edge, 0 is returned. // the top edge, 0 is returned.
func (s Style) GetBorderTopSize() int { func (s Style) GetBorderTopSize() int {
if !s.getAsBool(borderTopKey, false) { if !s.getAsBool(borderTopKey, false) && !s.implicitBorders() {
return 0 return 0
} }
return s.getBorderStyle().GetTopSize() return s.getBorderStyle().GetTopSize()
@ -310,7 +310,7 @@ func (s Style) GetBorderTopSize() int {
// runes of varying widths, the widest rune is returned. If no border exists on // runes of varying widths, the widest rune is returned. If no border exists on
// the left edge, 0 is returned. // the left edge, 0 is returned.
func (s Style) GetBorderLeftSize() int { func (s Style) GetBorderLeftSize() int {
if !s.getAsBool(borderLeftKey, false) { if !s.getAsBool(borderLeftKey, false) && !s.implicitBorders() {
return 0 return 0
} }
return s.getBorderStyle().GetLeftSize() return s.getBorderStyle().GetLeftSize()
@ -320,7 +320,7 @@ func (s Style) GetBorderLeftSize() int {
// contain runes of varying widths, the widest rune is returned. If no border // contain runes of varying widths, the widest rune is returned. If no border
// exists on the left edge, 0 is returned. // exists on the left edge, 0 is returned.
func (s Style) GetBorderBottomSize() int { func (s Style) GetBorderBottomSize() int {
if !s.getAsBool(borderBottomKey, false) { if !s.getAsBool(borderBottomKey, false) && !s.implicitBorders() {
return 0 return 0
} }
return s.getBorderStyle().GetBottomSize() return s.getBorderStyle().GetBottomSize()
@ -330,7 +330,7 @@ func (s Style) GetBorderBottomSize() int {
// contain runes of varying widths, the widest rune is returned. If no border // contain runes of varying widths, the widest rune is returned. If no border
// exists on the right edge, 0 is returned. // exists on the right edge, 0 is returned.
func (s Style) GetBorderRightSize() int { func (s Style) GetBorderRightSize() int {
if !s.getAsBool(borderRightKey, false) { if !s.getAsBool(borderRightKey, false) && !s.implicitBorders() {
return 0 return 0
} }
return s.getBorderStyle().GetRightSize() return s.getBorderStyle().GetRightSize()
@ -519,6 +519,20 @@ func (s Style) getBorderStyle() Border {
return s.borderStyle return s.borderStyle
} }
// Returns whether or not the style has implicit borders. This happens when
// a border style has been set but no border sides have been explicitly turned
// on or off.
func (s Style) implicitBorders() bool {
var (
borderStyle = s.getBorderStyle()
topSet = s.isSet(borderTopKey)
rightSet = s.isSet(borderRightKey)
bottomSet = s.isSet(borderBottomKey)
leftSet = s.isSet(borderLeftKey)
)
return borderStyle != noBorder && !(topSet || rightSet || bottomSet || leftSet)
}
func (s Style) getAsTransform(propKey) func(string) string { func (s Style) getAsTransform(propKey) func(string) string {
if !s.isSet(transformKey) { if !s.isSet(transformKey) {
return nil return nil

48
vendor/github.com/charmbracelet/lipgloss/ranges.go generated vendored Normal file
View File

@ -0,0 +1,48 @@
package lipgloss
import (
"strings"
"github.com/charmbracelet/x/ansi"
)
// StyleRanges allows to, given a string, style ranges of it differently.
// The function will take into account existing styles.
// Ranges should not overlap.
func StyleRanges(s string, ranges ...Range) string {
if len(ranges) == 0 {
return s
}
var buf strings.Builder
lastIdx := 0
stripped := ansi.Strip(s)
// Use Truncate and TruncateLeft to style match.MatchedIndexes without
// losing the original option style:
for _, rng := range ranges {
// Add the text before this match
if rng.Start > lastIdx {
buf.WriteString(ansi.Cut(s, lastIdx, rng.Start))
}
// Add the matched range with its highlight
buf.WriteString(rng.Style.Render(ansi.Cut(stripped, rng.Start, rng.End)))
lastIdx = rng.End
}
// Add any remaining text after the last match
buf.WriteString(ansi.TruncateLeft(s, lastIdx, ""))
return buf.String()
}
// NewRange returns a range that can be used with [StyleRanges].
func NewRange(start, end int, style Style) Range {
return Range{start, end, style}
}
// Range to be used with [StyleRanges].
type Range struct {
Start, End int
Style Style
}

View File

@ -710,19 +710,19 @@ func whichSidesInt(i ...int) (top, right, bottom, left int, ok bool) {
left = i[0] left = i[0]
right = i[0] right = i[0]
ok = true ok = true
case 2: //nolint:gomnd case 2: //nolint:mnd
top = i[0] top = i[0]
bottom = i[0] bottom = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
ok = true ok = true
case 3: //nolint:gomnd case 3: //nolint:mnd
top = i[0] top = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]
ok = true ok = true
case 4: //nolint:gomnd case 4: //nolint:mnd
top = i[0] top = i[0]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]
@ -743,19 +743,19 @@ func whichSidesBool(i ...bool) (top, right, bottom, left bool, ok bool) {
left = i[0] left = i[0]
right = i[0] right = i[0]
ok = true ok = true
case 2: //nolint:gomnd case 2: //nolint:mnd
top = i[0] top = i[0]
bottom = i[0] bottom = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
ok = true ok = true
case 3: //nolint:gomnd case 3: //nolint:mnd
top = i[0] top = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]
ok = true ok = true
case 4: //nolint:gomnd case 4: //nolint:mnd
top = i[0] top = i[0]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]
@ -776,19 +776,19 @@ func whichSidesColor(i ...TerminalColor) (top, right, bottom, left TerminalColor
left = i[0] left = i[0]
right = i[0] right = i[0]
ok = true ok = true
case 2: //nolint:gomnd case 2: //nolint:mnd
top = i[0] top = i[0]
bottom = i[0] bottom = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
ok = true ok = true
case 3: //nolint:gomnd case 3: //nolint:mnd
top = i[0] top = i[0]
left = i[1] left = i[1]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]
ok = true ok = true
case 4: //nolint:gomnd case 4: //nolint:mnd
top = i[0] top = i[0]
right = i[1] right = i[1]
bottom = i[2] bottom = i[2]

View File

@ -5,6 +5,7 @@ import (
"unicode" "unicode"
"github.com/charmbracelet/x/ansi" "github.com/charmbracelet/x/ansi"
"github.com/charmbracelet/x/cellbuf"
"github.com/muesli/termenv" "github.com/muesli/termenv"
) )
@ -364,7 +365,7 @@ func (s Style) Render(strs ...string) string {
// Word wrap // Word wrap
if !inline && width > 0 { if !inline && width > 0 {
wrapAt := width - leftPadding - rightPadding wrapAt := width - leftPadding - rightPadding
str = ansi.Wrap(str, wrapAt, "") str = cellbuf.Wrap(str, wrapAt, "")
} }
// Render core text // Render core text
@ -431,7 +432,7 @@ func (s Style) Render(strs ...string) string {
{ {
numLines := strings.Count(str, "\n") numLines := strings.Count(str, "\n")
if !(numLines == 0 && width == 0) { if numLines != 0 || width != 0 {
var st *termenv.Style var st *termenv.Style
if colorWhitespace || styleWhitespace { if colorWhitespace || styleWhitespace {
st = &teWhitespace st = &teWhitespace

View File

@ -0,0 +1,418 @@
package table
import (
"math"
"strings"
"github.com/charmbracelet/lipgloss"
"github.com/charmbracelet/x/ansi"
)
// resize resizes the table to fit the specified width.
//
// Given a user defined table width, we must ensure the table is exactly that
// width. This must account for all borders, column, separators, and column
// data.
//
// In the case where the table is narrower than the specified table width,
// we simply expand the columns evenly to fit the width.
// For example, a table with 3 columns takes up 50 characters total, and the
// width specified is 80, we expand each column by 10 characters, adding 30
// to the total width.
//
// In the case where the table is wider than the specified table width, we
// _could_ simply shrink the columns evenly but this would result in data
// being truncated (perhaps unnecessarily). The naive approach could result
// in very poor cropping of the table data. So, instead of shrinking columns
// evenly, we calculate the median non-whitespace length of each column, and
// shrink the columns based on the largest median.
//
// For example,
//
// ┌──────┬───────────────┬──────────┐
// │ Name │ Age of Person │ Location │
// ├──────┼───────────────┼──────────┤
// │ Kini │ 40 │ New York │
// │ Eli │ 30 │ London │
// │ Iris │ 20 │ Paris │
// └──────┴───────────────┴──────────┘
//
// Median non-whitespace length vs column width of each column:
//
// Name: 4 / 5
// Age of Person: 2 / 15
// Location: 6 / 10
//
// The biggest difference is 15 - 2, so we can shrink the 2nd column by 13.
func (t *Table) resize() {
hasHeaders := len(t.headers) > 0
rows := dataToMatrix(t.data)
r := newResizer(t.width, t.height, t.headers, rows)
r.wrap = t.wrap
r.borderColumn = t.borderColumn
r.yPaddings = make([][]int, len(r.allRows))
var allRows [][]string
if hasHeaders {
allRows = append([][]string{t.headers}, rows...)
} else {
allRows = rows
}
r.rowHeights = r.defaultRowHeights()
for i, row := range allRows {
r.yPaddings[i] = make([]int, len(row))
for j := range row {
column := &r.columns[j]
// Making sure we're passing the right index to `styleFunc`. The header row should be `-1` and
// the others should start from `0`.
rowIndex := i
if hasHeaders {
rowIndex--
}
style := t.styleFunc(rowIndex, j)
topMargin, rightMargin, bottomMargin, leftMargin := style.GetMargin()
topPadding, rightPadding, bottomPadding, leftPadding := style.GetPadding()
totalHorizontalPadding := leftMargin + rightMargin + leftPadding + rightPadding
column.xPadding = max(column.xPadding, totalHorizontalPadding)
column.fixedWidth = max(column.fixedWidth, style.GetWidth())
r.rowHeights[i] = max(r.rowHeights[i], style.GetHeight())
totalVerticalPadding := topMargin + bottomMargin + topPadding + bottomPadding
r.yPaddings[i][j] = totalVerticalPadding
}
}
// A table width wasn't specified. In this case, detect according to
// content width.
if r.tableWidth <= 0 {
r.tableWidth = r.detectTableWidth()
}
t.widths, t.heights = r.optimizedWidths()
}
// resizerColumn is a column in the resizer.
type resizerColumn struct {
index int
min int
max int
median int
rows [][]string
xPadding int // horizontal padding
fixedWidth int
}
// resizer is a table resizer.
type resizer struct {
tableWidth int
tableHeight int
headers []string
allRows [][]string
rowHeights []int
columns []resizerColumn
wrap bool
borderColumn bool
yPaddings [][]int // vertical paddings
}
// newResizer creates a new resizer.
func newResizer(tableWidth, tableHeight int, headers []string, rows [][]string) *resizer {
r := &resizer{
tableWidth: tableWidth,
tableHeight: tableHeight,
headers: headers,
}
if len(headers) > 0 {
r.allRows = append([][]string{headers}, rows...)
} else {
r.allRows = rows
}
for _, row := range r.allRows {
for i, cell := range row {
cellLen := lipgloss.Width(cell)
// Header or first row. Just add as is.
if len(r.columns) <= i {
r.columns = append(r.columns, resizerColumn{
index: i,
min: cellLen,
max: cellLen,
median: cellLen,
})
continue
}
r.columns[i].rows = append(r.columns[i].rows, row)
r.columns[i].min = min(r.columns[i].min, cellLen)
r.columns[i].max = max(r.columns[i].max, cellLen)
}
}
for j := range r.columns {
widths := make([]int, len(r.columns[j].rows))
for i, row := range r.columns[j].rows {
widths[i] = lipgloss.Width(row[j])
}
r.columns[j].median = median(widths)
}
return r
}
// optimizedWidths returns the optimized column widths and row heights.
func (r *resizer) optimizedWidths() (colWidths, rowHeights []int) {
if r.maxTotal() <= r.tableWidth {
return r.expandTableWidth()
}
return r.shrinkTableWidth()
}
// detectTableWidth detects the table width.
func (r *resizer) detectTableWidth() int {
return r.maxCharCount() + r.totalHorizontalPadding() + r.totalHorizontalBorder()
}
// expandTableWidth expands the table width.
func (r *resizer) expandTableWidth() (colWidths, rowHeights []int) {
colWidths = r.maxColumnWidths()
for {
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
if totalWidth >= r.tableWidth {
break
}
shorterColumnIndex := 0
shorterColumnWidth := math.MaxInt32
for j, width := range colWidths {
if width == r.columns[j].fixedWidth {
continue
}
if width < shorterColumnWidth {
shorterColumnWidth = width
shorterColumnIndex = j
}
}
colWidths[shorterColumnIndex]++
}
rowHeights = r.expandRowHeigths(colWidths)
return
}
// shrinkTableWidth shrinks the table width.
func (r *resizer) shrinkTableWidth() (colWidths, rowHeights []int) {
colWidths = r.maxColumnWidths()
// Cut width of columns that are way too big.
shrinkBiggestColumns := func(veryBigOnly bool) {
for {
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
if totalWidth <= r.tableWidth {
break
}
bigColumnIndex := -math.MaxInt32
bigColumnWidth := -math.MaxInt32
for j, width := range colWidths {
if width == r.columns[j].fixedWidth {
continue
}
if veryBigOnly {
if width >= (r.tableWidth/2) && width > bigColumnWidth { //nolint:mnd
bigColumnWidth = width
bigColumnIndex = j
}
} else {
if width > bigColumnWidth {
bigColumnWidth = width
bigColumnIndex = j
}
}
}
if bigColumnIndex < 0 || colWidths[bigColumnIndex] == 0 {
break
}
colWidths[bigColumnIndex]--
}
}
// Cut width of columns that differ the most from the median.
shrinkToMedian := func() {
for {
totalWidth := sum(colWidths) + r.totalHorizontalBorder()
if totalWidth <= r.tableWidth {
break
}
biggestDiffToMedian := -math.MaxInt32
biggestDiffToMedianIndex := -math.MaxInt32
for j, width := range colWidths {
if width == r.columns[j].fixedWidth {
continue
}
diffToMedian := width - r.columns[j].median
if diffToMedian > 0 && diffToMedian > biggestDiffToMedian {
biggestDiffToMedian = diffToMedian
biggestDiffToMedianIndex = j
}
}
if biggestDiffToMedianIndex <= 0 || colWidths[biggestDiffToMedianIndex] == 0 {
break
}
colWidths[biggestDiffToMedianIndex]--
}
}
shrinkBiggestColumns(true)
shrinkToMedian()
shrinkBiggestColumns(false)
return colWidths, r.expandRowHeigths(colWidths)
}
// expandRowHeigths expands the row heights.
func (r *resizer) expandRowHeigths(colWidths []int) (rowHeights []int) {
rowHeights = r.defaultRowHeights()
if !r.wrap {
return rowHeights
}
for i, row := range r.allRows {
for j, cell := range row {
height := r.detectContentHeight(cell, colWidths[j]-r.xPaddingForCol(j)) + r.xPaddingForCell(i, j)
if height > rowHeights[i] {
rowHeights[i] = height
}
}
}
return
}
// defaultRowHeights returns the default row heights.
func (r *resizer) defaultRowHeights() (rowHeights []int) {
rowHeights = make([]int, len(r.allRows))
for i := range rowHeights {
if i < len(r.rowHeights) {
rowHeights[i] = r.rowHeights[i]
}
if rowHeights[i] < 1 {
rowHeights[i] = 1
}
}
return
}
// maxColumnWidths returns the maximum column widths.
func (r *resizer) maxColumnWidths() []int {
maxColumnWidths := make([]int, len(r.columns))
for i, col := range r.columns {
if col.fixedWidth > 0 {
maxColumnWidths[i] = col.fixedWidth
} else {
maxColumnWidths[i] = col.max + r.xPaddingForCol(col.index)
}
}
return maxColumnWidths
}
// columnCount returns the column count.
func (r *resizer) columnCount() int {
return len(r.columns)
}
// maxCharCount returns the maximum character count.
func (r *resizer) maxCharCount() int {
var count int
for _, col := range r.columns {
if col.fixedWidth > 0 {
count += col.fixedWidth - r.xPaddingForCol(col.index)
} else {
count += col.max
}
}
return count
}
// maxTotal returns the maximum total width.
func (r *resizer) maxTotal() (maxTotal int) {
for j, column := range r.columns {
if column.fixedWidth > 0 {
maxTotal += column.fixedWidth
} else {
maxTotal += column.max + r.xPaddingForCol(j)
}
}
return
}
// totalHorizontalPadding returns the total padding.
func (r *resizer) totalHorizontalPadding() (totalHorizontalPadding int) {
for _, col := range r.columns {
totalHorizontalPadding += col.xPadding
}
return
}
// xPaddingForCol returns the horizontal padding for a column.
func (r *resizer) xPaddingForCol(j int) int {
if j >= len(r.columns) {
return 0
}
return r.columns[j].xPadding
}
// xPaddingForCell returns the horizontal padding for a cell.
func (r *resizer) xPaddingForCell(i, j int) int {
if i >= len(r.yPaddings) || j >= len(r.yPaddings[i]) {
return 0
}
return r.yPaddings[i][j]
}
// totalHorizontalBorder returns the total border.
func (r *resizer) totalHorizontalBorder() int {
return (r.columnCount() * r.borderPerCell()) + r.extraBorder()
}
// borderPerCell returns number of border chars per cell.
func (r *resizer) borderPerCell() int {
if r.borderColumn {
return 1
}
return 0
}
// extraBorder returns the number of the extra border char at the end of the table.
func (r *resizer) extraBorder() int {
if r.borderColumn {
return 1
}
return 0
}
// detectContentHeight detects the content height.
func (r *resizer) detectContentHeight(content string, width int) (height int) {
if width == 0 {
return 1
}
content = strings.ReplaceAll(content, "\r\n", "\n")
for _, line := range strings.Split(content, "\n") {
height += strings.Count(ansi.Wrap(line, width, ""), "\n") + 1
}
return
}

View File

@ -111,3 +111,19 @@ func (m *Filter) Rows() int {
return j return j
} }
// dataToMatrix converts an object that implements the Data interface to a table.
func dataToMatrix(data Data) (rows [][]string) {
numRows := data.Rows()
numCols := data.Columns()
rows = make([][]string, numRows)
for i := 0; i < numRows; i++ {
rows[i] = make([]string, numCols)
for j := 0; j < numCols; j++ {
rows[i][j] = data.At(i, j)
}
}
return
}

View File

@ -61,6 +61,7 @@ type Table struct {
height int height int
useManualHeight bool useManualHeight bool
offset int offset int
wrap bool
// widths tracks the width of each column. // widths tracks the width of each column.
widths []int widths []int
@ -83,6 +84,7 @@ func New() *Table {
borderLeft: true, borderLeft: true,
borderRight: true, borderRight: true,
borderTop: true, borderTop: true,
wrap: true,
data: NewStringData(), data: NewStringData(),
} }
} }
@ -209,11 +211,20 @@ func (t *Table) Height(h int) *Table {
} }
// Offset sets the table rendering offset. // Offset sets the table rendering offset.
//
// Warning: you may declare Offset only after setting Rows. Otherwise it will be
// ignored.
func (t *Table) Offset(o int) *Table { func (t *Table) Offset(o int) *Table {
t.offset = o t.offset = o
return t return t
} }
// Wrap dictates whether or not the table content should wrap.
func (t *Table) Wrap(w bool) *Table {
t.wrap = w
return t
}
// String returns the table as a string. // String returns the table as a string.
func (t *Table) String() string { func (t *Table) String() string {
hasHeaders := len(t.headers) > 0 hasHeaders := len(t.headers) > 0
@ -231,120 +242,8 @@ func (t *Table) String() string {
} }
} }
// Initialize the widths. // Do all the sizing calculations for width and height.
t.widths = make([]int, max(len(t.headers), t.data.Columns())) t.resize()
t.heights = make([]int, btoi(hasHeaders)+t.data.Rows())
// The style function may affect width of the table. It's possible to set
// the StyleFunc after the headers and rows. Update the widths for a final
// time.
for i, cell := range t.headers {
t.widths[i] = max(t.widths[i], lipgloss.Width(t.style(HeaderRow, i).Render(cell)))
t.heights[0] = max(t.heights[0], lipgloss.Height(t.style(HeaderRow, i).Render(cell)))
}
for r := 0; r < t.data.Rows(); r++ {
for i := 0; i < t.data.Columns(); i++ {
cell := t.data.At(r, i)
rendered := t.style(r, i).Render(cell)
t.heights[r+btoi(hasHeaders)] = max(t.heights[r+btoi(hasHeaders)], lipgloss.Height(rendered))
t.widths[i] = max(t.widths[i], lipgloss.Width(rendered))
}
}
// Table Resizing Logic.
//
// Given a user defined table width, we must ensure the table is exactly that
// width. This must account for all borders, column, separators, and column
// data.
//
// In the case where the table is narrower than the specified table width,
// we simply expand the columns evenly to fit the width.
// For example, a table with 3 columns takes up 50 characters total, and the
// width specified is 80, we expand each column by 10 characters, adding 30
// to the total width.
//
// In the case where the table is wider than the specified table width, we
// _could_ simply shrink the columns evenly but this would result in data
// being truncated (perhaps unnecessarily). The naive approach could result
// in very poor cropping of the table data. So, instead of shrinking columns
// evenly, we calculate the median non-whitespace length of each column, and
// shrink the columns based on the largest median.
//
// For example,
// ┌──────┬───────────────┬──────────┐
// │ Name │ Age of Person │ Location │
// ├──────┼───────────────┼──────────┤
// │ Kini │ 40 │ New York │
// │ Eli │ 30 │ London │
// │ Iris │ 20 │ Paris │
// └──────┴───────────────┴──────────┘
//
// Median non-whitespace length vs column width of each column:
//
// Name: 4 / 5
// Age of Person: 2 / 15
// Location: 6 / 10
//
// The biggest difference is 15 - 2, so we can shrink the 2nd column by 13.
width := t.computeWidth()
if width < t.width && t.width > 0 {
// Table is too narrow, expand the columns evenly until it reaches the
// desired width.
var i int
for width < t.width {
t.widths[i]++
width++
i = (i + 1) % len(t.widths)
}
} else if width > t.width && t.width > 0 {
// Table is too wide, calculate the median non-whitespace length of each
// column, and shrink the columns based on the largest difference.
columnMedians := make([]int, len(t.widths))
for c := range t.widths {
trimmedWidth := make([]int, t.data.Rows())
for r := 0; r < t.data.Rows(); r++ {
renderedCell := t.style(r+btoi(hasHeaders), c).Render(t.data.At(r, c))
nonWhitespaceChars := lipgloss.Width(strings.TrimRight(renderedCell, " "))
trimmedWidth[r] = nonWhitespaceChars + 1
}
columnMedians[c] = median(trimmedWidth)
}
// Find the biggest differences between the median and the column width.
// Shrink the columns based on the largest difference.
differences := make([]int, len(t.widths))
for i := range t.widths {
differences[i] = t.widths[i] - columnMedians[i]
}
for width > t.width {
index, _ := largest(differences)
if differences[index] < 1 {
break
}
shrink := min(differences[index], width-t.width)
t.widths[index] -= shrink
width -= shrink
differences[index] = 0
}
// Table is still too wide, begin shrinking the columns based on the
// largest column.
for width > t.width {
index, _ := largest(t.widths)
if t.widths[index] < 1 {
break
}
t.widths[index]--
width--
}
}
var sb strings.Builder var sb strings.Builder
@ -393,15 +292,6 @@ func (t *Table) String() string {
Render(sb.String()) Render(sb.String())
} }
// computeWidth computes the width of the table in it's current configuration.
func (t *Table) computeWidth() int {
width := sum(t.widths) + btoi(t.borderLeft) + btoi(t.borderRight)
if t.borderColumn {
width += len(t.widths) - 1
}
return width
}
// computeHeight computes the height of the table in it's current configuration. // computeHeight computes the height of the table in it's current configuration.
func (t *Table) computeHeight() int { func (t *Table) computeHeight() int {
hasHeaders := len(t.headers) > 0 hasHeaders := len(t.headers) > 0
@ -553,13 +443,17 @@ func (t *Table) constructRow(index int, isOverflow bool) string {
} }
cellStyle := t.style(index, c) cellStyle := t.style(index, c)
if !t.wrap {
length := (cellWidth * height) - cellStyle.GetHorizontalPadding()
cell = ansi.Truncate(cell, length, "…")
}
cells = append(cells, cellStyle. cells = append(cells, cellStyle.
// Account for the margins in the cell sizing. // Account for the margins in the cell sizing.
Height(height-cellStyle.GetVerticalMargins()). Height(height-cellStyle.GetVerticalMargins()).
MaxHeight(height). MaxHeight(height).
Width(t.widths[c]-cellStyle.GetHorizontalMargins()). Width(t.widths[c]-cellStyle.GetHorizontalMargins()).
MaxWidth(t.widths[c]). MaxWidth(t.widths[c]).
Render(ansi.Truncate(cell, cellWidth*height, "…"))) Render(cell))
if c < t.data.Columns()-1 && t.borderColumn { if c < t.data.Columns()-1 && t.borderColumn {
cells = append(cells, left) cells = append(cells, left)

View File

@ -20,7 +20,7 @@ func max(a, b int) int { //nolint:predeclared
return b return b
} }
// min returns the greater of two integers. // min returns the smaller of two integers.
func min(a, b int) int { //nolint:predeclared func min(a, b int) int { //nolint:predeclared
if a < b { if a < b {
return a return a
@ -45,20 +45,8 @@ func median(n []int) int {
return 0 return 0
} }
if len(n)%2 == 0 { if len(n)%2 == 0 {
h := len(n) / 2 //nolint:gomnd h := len(n) / 2 //nolint:mnd
return (n[h-1] + n[h]) / 2 //nolint:gomnd return (n[h-1] + n[h]) / 2 //nolint:mnd
} }
return n[len(n)/2] return n[len(n)/2]
} }
// largest returns the largest element and it's index from a slice of integers.
func largest(n []int) (int, int) { //nolint:unparam
var largest, index int
for i, e := range n {
if n[i] > n[index] {
largest = e
index = i
}
}
return index, largest
}

View File

@ -15,20 +15,27 @@ issues:
linters: linters:
enable: enable:
- bodyclose - bodyclose
- dupl - exhaustive
- exportloopref
- goconst - goconst
- godot - godot
- godox - godox
- gofumpt
- goimports - goimports
- gomoddirectives
- goprintffuncname - goprintffuncname
- gosec - gosec
- misspell - misspell
- nakedret
- nestif
- nilerr
- noctx
- nolintlint - nolintlint
- prealloc - prealloc
- revive - revive
- rowserrcheck - rowserrcheck
- sqlclosecheck - sqlclosecheck
- tparallel
- unconvert - unconvert
- unparam - unparam
- whitespace - whitespace
- wrapcheck

View File

@ -1,61 +1,151 @@
package log package log
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"time" "time"
) )
func (l *Logger) jsonFormatter(keyvals ...interface{}) { func (l *Logger) jsonFormatter(keyvals ...interface{}) {
m := make(map[string]interface{}, len(keyvals)/2) jw := &jsonWriter{w: &l.b}
for i := 0; i < len(keyvals); i += 2 { jw.start()
switch keyvals[i] {
case TimestampKey: i := 0
if t, ok := keyvals[i+1].(time.Time); ok { for i < len(keyvals) {
m[TimestampKey] = t.Format(l.timeFormat) switch kv := keyvals[i].(type) {
} case slogAttr:
case LevelKey: l.jsonFormatterRoot(jw, kv.Key, kv.Value)
if level, ok := keyvals[i+1].(Level); ok { i++
m[LevelKey] = level.String()
}
case CallerKey:
if caller, ok := keyvals[i+1].(string); ok {
m[CallerKey] = caller
}
case PrefixKey:
if prefix, ok := keyvals[i+1].(string); ok {
m[PrefixKey] = prefix
}
case MessageKey:
if msg := keyvals[i+1]; msg != nil {
m[MessageKey] = fmt.Sprint(msg)
}
default: default:
var ( if i+1 < len(keyvals) {
key string l.jsonFormatterRoot(jw, keyvals[i], keyvals[i+1])
val interface{}
)
switch k := keyvals[i].(type) {
case fmt.Stringer:
key = k.String()
case error:
key = k.Error()
default:
key = fmt.Sprint(k)
} }
switch v := keyvals[i+1].(type) { i += 2
case error:
val = v.Error()
case fmt.Stringer:
val = v.String()
default:
val = v
}
m[key] = val
} }
} }
e := json.NewEncoder(&l.b) jw.end()
e.SetEscapeHTML(false) l.b.WriteRune('\n')
_ = e.Encode(m) }
func (l *Logger) jsonFormatterRoot(jw *jsonWriter, key, value any) {
switch key {
case TimestampKey:
if t, ok := value.(time.Time); ok {
jw.objectItem(TimestampKey, t.Format(l.timeFormat))
}
case LevelKey:
if level, ok := value.(Level); ok {
jw.objectItem(LevelKey, level.String())
}
case CallerKey:
if caller, ok := value.(string); ok {
jw.objectItem(CallerKey, caller)
}
case PrefixKey:
if prefix, ok := value.(string); ok {
jw.objectItem(PrefixKey, prefix)
}
case MessageKey:
if msg := value; msg != nil {
jw.objectItem(MessageKey, fmt.Sprint(msg))
}
default:
l.jsonFormatterItem(jw, key, value)
}
}
func (l *Logger) jsonFormatterItem(jw *jsonWriter, key, value any) {
switch k := key.(type) {
case fmt.Stringer:
jw.objectKey(k.String())
case error:
jw.objectKey(k.Error())
default:
jw.objectKey(fmt.Sprint(k))
}
switch v := value.(type) {
case error:
jw.objectValue(v.Error())
case slogLogValuer:
l.writeSlogValue(jw, v.LogValue())
case slogValue:
l.writeSlogValue(jw, v.Resolve())
case fmt.Stringer:
jw.objectValue(v.String())
default:
jw.objectValue(v)
}
}
func (l *Logger) writeSlogValue(jw *jsonWriter, v slogValue) {
switch v.Kind() { //nolint:exhaustive
case slogKindGroup:
jw.start()
for _, attr := range v.Group() {
l.jsonFormatterItem(jw, attr.Key, attr.Value)
}
jw.end()
default:
jw.objectValue(v.Any())
}
}
type jsonWriter struct {
w *bytes.Buffer
d int
}
func (w *jsonWriter) start() {
w.w.WriteRune('{')
w.d = 0
}
func (w *jsonWriter) end() {
w.w.WriteRune('}')
}
func (w *jsonWriter) objectItem(key string, value any) {
w.objectKey(key)
w.objectValue(value)
}
func (w *jsonWriter) objectKey(key string) {
if w.d > 0 {
w.w.WriteRune(',')
}
w.d++
pos := w.w.Len()
err := w.writeEncoded(key)
if err != nil {
w.w.Truncate(pos)
w.w.WriteString(`"invalid key"`)
}
w.w.WriteRune(':')
}
func (w *jsonWriter) objectValue(value any) {
pos := w.w.Len()
err := w.writeEncoded(value)
if err != nil {
w.w.Truncate(pos)
w.w.WriteString(`"invalid value"`)
}
}
func (w *jsonWriter) writeEncoded(v any) error {
e := json.NewEncoder(w.w)
e.SetEscapeHTML(false)
if err := e.Encode(v); err != nil {
return fmt.Errorf("failed to encode value: %w", err)
}
// trailing \n added by json.Encode
b := w.w.Bytes()
if len(b) > 0 && b[len(b)-1] == '\n' {
w.w.Truncate(w.w.Len() - 1)
}
return nil
} }

View File

@ -8,7 +8,7 @@ import (
) )
// Level is a logging level. // Level is a logging level.
type Level int32 type Level int
const ( const (
// DebugLevel is the debug level. // DebugLevel is the debug level.
@ -22,12 +22,12 @@ const (
// FatalLevel is the fatal level. // FatalLevel is the fatal level.
FatalLevel Level = 12 FatalLevel Level = 12
// noLevel is used with log.Print. // noLevel is used with log.Print.
noLevel Level = math.MaxInt32 noLevel Level = math.MaxInt
) )
// String returns the string representation of the level. // String returns the string representation of the level.
func (l Level) String() string { func (l Level) String() string {
switch l { switch l { //nolint:exhaustive
case DebugLevel: case DebugLevel:
return "debug" return "debug"
case InfoLevel: case InfoLevel:

View File

@ -1,15 +0,0 @@
//go:build go1.21
// +build go1.21
package log
import "log/slog"
// fromSlogLevel converts slog.Level to log.Level.
var fromSlogLevel = map[slog.Level]Level{
slog.LevelDebug: DebugLevel,
slog.LevelInfo: InfoLevel,
slog.LevelWarn: WarnLevel,
slog.LevelError: ErrorLevel,
slog.Level(12): FatalLevel,
}

View File

@ -1,15 +0,0 @@
//go:build !go1.21
// +build !go1.21
package log
import "golang.org/x/exp/slog"
// fromSlogLevel converts slog.Level to log.Level.
var fromSlogLevel = map[slog.Level]Level{
slog.LevelDebug: DebugLevel,
slog.LevelInfo: InfoLevel,
slog.LevelWarn: WarnLevel,
slog.LevelError: ErrorLevel,
slog.Level(12): FatalLevel,
}

View File

@ -30,7 +30,7 @@ type Logger struct {
isDiscard uint32 isDiscard uint32
level int32 level int64
prefix string prefix string
timeFunc TimeFunction timeFunc TimeFunction
timeFormat string timeFormat string
@ -59,7 +59,7 @@ func (l *Logger) Log(level Level, msg interface{}, keyvals ...interface{}) {
} }
// check if the level is allowed // check if the level is allowed
if atomic.LoadInt32(&l.level) > int32(level) { if atomic.LoadInt64(&l.level) > int64(level) {
return return
} }
@ -129,6 +129,8 @@ func (l *Logger) handle(level Level, ts time.Time, frames []runtime.Frame, msg i
l.logfmtFormatter(kvs...) l.logfmtFormatter(kvs...)
case JSONFormatter: case JSONFormatter:
l.jsonFormatter(kvs...) l.jsonFormatter(kvs...)
case TextFormatter:
fallthrough
default: default:
l.textFormatter(kvs...) l.textFormatter(kvs...)
} }
@ -234,7 +236,7 @@ func (l *Logger) GetLevel() Level {
func (l *Logger) SetLevel(level Level) { func (l *Logger) SetLevel(level Level) {
l.mu.Lock() l.mu.Lock()
defer l.mu.Unlock() defer l.mu.Unlock()
atomic.StoreInt32(&l.level, int32(level)) atomic.StoreInt64(&l.level, int64(level))
} }
// GetPrefix returns the current prefix. // GetPrefix returns the current prefix.
@ -334,7 +336,8 @@ func (l *Logger) With(keyvals ...interface{}) *Logger {
sl.b = bytes.Buffer{} sl.b = bytes.Buffer{}
sl.mu = &sync.RWMutex{} sl.mu = &sync.RWMutex{}
sl.helpers = &sync.Map{} sl.helpers = &sync.Map{}
sl.fields = append(l.fields, keyvals...) sl.fields = append(make([]interface{}, 0, len(l.fields)+len(keyvals)), l.fields...)
sl.fields = append(sl.fields, keyvals...)
sl.styles = &st sl.styles = &st
return &sl return &sl
} }

View File

@ -10,11 +10,20 @@ import (
"sync/atomic" "sync/atomic"
) )
// type aliases for slog.
type (
slogAttr = slog.Attr
slogValue = slog.Value
slogLogValuer = slog.LogValuer
)
const slogKindGroup = slog.KindGroup
// Enabled reports whether the logger is enabled for the given level. // Enabled reports whether the logger is enabled for the given level.
// //
// Implements slog.Handler. // Implements slog.Handler.
func (l *Logger) Enabled(_ context.Context, level slog.Level) bool { func (l *Logger) Enabled(_ context.Context, level slog.Level) bool {
return atomic.LoadInt32(&l.level) <= int32(fromSlogLevel[level]) return atomic.LoadInt64(&l.level) <= int64(level)
} }
// Handle handles the Record. It will only be called if Enabled returns true. // Handle handles the Record. It will only be called if Enabled returns true.
@ -27,13 +36,13 @@ func (l *Logger) Handle(ctx context.Context, record slog.Record) error {
fields := make([]interface{}, 0, record.NumAttrs()*2) fields := make([]interface{}, 0, record.NumAttrs()*2)
record.Attrs(func(a slog.Attr) bool { record.Attrs(func(a slog.Attr) bool {
fields = append(fields, a.Key, a.Value.String()) fields = append(fields, a.Key, a.Value)
return true return true
}) })
// Get the caller frame using the record's PC. // Get the caller frame using the record's PC.
frames := runtime.CallersFrames([]uintptr{record.PC}) frames := runtime.CallersFrames([]uintptr{record.PC})
frame, _ := frames.Next() frame, _ := frames.Next()
l.handle(fromSlogLevel[record.Level], l.timeFunc(record.Time), []runtime.Frame{frame}, record.Message, fields...) l.handle(Level(record.Level), l.timeFunc(record.Time), []runtime.Frame{frame}, record.Message, fields...)
return nil return nil
} }

View File

@ -11,11 +11,20 @@ import (
"golang.org/x/exp/slog" "golang.org/x/exp/slog"
) )
// type alises for slog.
type (
slogAttr = slog.Attr
slogValue = slog.Value
slogLogValuer = slog.LogValuer
)
const slogKindGroup = slog.KindGroup
// Enabled reports whether the logger is enabled for the given level. // Enabled reports whether the logger is enabled for the given level.
// //
// Implements slog.Handler. // Implements slog.Handler.
func (l *Logger) Enabled(_ context.Context, level slog.Level) bool { func (l *Logger) Enabled(_ context.Context, level slog.Level) bool {
return atomic.LoadInt32(&l.level) <= int32(fromSlogLevel[level]) return atomic.LoadInt64(&l.level) <= int64(level)
} }
// Handle handles the Record. It will only be called if Enabled returns true. // Handle handles the Record. It will only be called if Enabled returns true.
@ -24,13 +33,13 @@ func (l *Logger) Enabled(_ context.Context, level slog.Level) bool {
func (l *Logger) Handle(_ context.Context, record slog.Record) error { func (l *Logger) Handle(_ context.Context, record slog.Record) error {
fields := make([]interface{}, 0, record.NumAttrs()*2) fields := make([]interface{}, 0, record.NumAttrs()*2)
record.Attrs(func(a slog.Attr) bool { record.Attrs(func(a slog.Attr) bool {
fields = append(fields, a.Key, a.Value.String()) fields = append(fields, a.Key, a.Value)
return true return true
}) })
// Get the caller frame using the record's PC. // Get the caller frame using the record's PC.
frames := runtime.CallersFrames([]uintptr{record.PC}) frames := runtime.CallersFrames([]uintptr{record.PC})
frame, _ := frames.Next() frame, _ := frames.Next()
l.handle(fromSlogLevel[record.Level], l.timeFunc(record.Time), []runtime.Frame{frame}, record.Message, fields...) l.handle(Level(record.Level), l.timeFunc(record.Time), []runtime.Frame{frame}, record.Message, fields...)
return nil return nil
} }

View File

@ -7,6 +7,7 @@ import (
"log" "log"
"os" "os"
"sync" "sync"
"sync/atomic"
"time" "time"
"github.com/muesli/termenv" "github.com/muesli/termenv"
@ -17,25 +18,27 @@ var (
registry = sync.Map{} registry = sync.Map{}
// defaultLogger is the default global logger instance. // defaultLogger is the default global logger instance.
defaultLogger atomic.Pointer[Logger]
defaultLoggerOnce sync.Once defaultLoggerOnce sync.Once
defaultLogger *Logger
) )
// Default returns the default logger. The default logger comes with timestamp enabled. // Default returns the default logger. The default logger comes with timestamp enabled.
func Default() *Logger { func Default() *Logger {
dl := defaultLogger.Load()
if dl == nil {
defaultLoggerOnce.Do(func() { defaultLoggerOnce.Do(func() {
if defaultLogger != nil { defaultLogger.CompareAndSwap(
// already set via SetDefault. nil, NewWithOptions(os.Stderr, Options{ReportTimestamp: true}),
return )
}
defaultLogger = NewWithOptions(os.Stderr, Options{ReportTimestamp: true})
}) })
return defaultLogger dl = defaultLogger.Load()
}
return dl
} }
// SetDefault sets the default global logger. // SetDefault sets the default global logger.
func SetDefault(logger *Logger) { func SetDefault(logger *Logger) {
defaultLogger = logger defaultLogger.Store(logger)
} }
// New returns a new logger with the default options. // New returns a new logger with the default options.
@ -49,7 +52,7 @@ func NewWithOptions(w io.Writer, o Options) *Logger {
b: bytes.Buffer{}, b: bytes.Buffer{},
mu: &sync.RWMutex{}, mu: &sync.RWMutex{},
helpers: &sync.Map{}, helpers: &sync.Map{},
level: int32(o.Level), level: int64(o.Level),
reportTimestamp: o.ReportTimestamp, reportTimestamp: o.ReportTimestamp,
reportCaller: o.ReportCaller, reportCaller: o.ReportCaller,
prefix: o.Prefix, prefix: o.Prefix,

View File

@ -14,7 +14,7 @@ func (l *stdLogWriter) Write(p []byte) (n int, err error) {
str := strings.TrimSuffix(string(p), "\n") str := strings.TrimSuffix(string(p), "\n")
if l.opt != nil { if l.opt != nil {
switch l.opt.ForceLevel { switch l.opt.ForceLevel { //nolint:exhaustive
case DebugLevel: case DebugLevel:
l.l.Debug(str) l.l.Debug(str)
case InfoLevel: case InfoLevel:

View File

@ -21,7 +21,7 @@ func (l *Logger) writeIndent(w io.Writer, str string, indent string, newline boo
// kindly borrowed from hclog // kindly borrowed from hclog
for { for {
nl := strings.IndexByte(str, '\n') nl := strings.IndexByte(str, '\n')
if nl == -1 { if nl == -1 { //nolint:nestif
if str != "" { if str != "" {
_, _ = w.Write([]byte(indent)) _, _ = w.Write([]byte(indent))
val := escapeStringForOutput(str, false) val := escapeStringForOutput(str, false)

View File

@ -178,7 +178,7 @@ func ansiToRGB(ansi uint32) (uint32, uint32, uint32) {
// //
// r, g, b := hexToRGB(0x0000FF) // r, g, b := hexToRGB(0x0000FF)
func hexToRGB(hex uint32) (uint32, uint32, uint32) { func hexToRGB(hex uint32) (uint32, uint32, uint32) {
return hex >> 16, hex >> 8 & 0xff, hex & 0xff return hex >> 16 & 0xff, hex >> 8 & 0xff, hex & 0xff
} }
// toRGBA converts an RGB 8-bit color values to 32-bit color values suitable // toRGBA converts an RGB 8-bit color values to 32-bit color values suitable

View File

@ -1,120 +0,0 @@
package ansi
import (
"bytes"
"strconv"
)
// CsiSequence represents a control sequence introducer (CSI) sequence.
//
// The sequence starts with a CSI sequence, CSI (0x9B) in a 8-bit environment
// or ESC [ (0x1B 0x5B) in a 7-bit environment, followed by any number of
// parameters in the range of 0x30-0x3F, then by any number of intermediate
// byte in the range of 0x20-0x2F, then finally with a single final byte in the
// range of 0x20-0x7E.
//
// CSI P..P I..I F
//
// See ECMA-48 § 5.4.
type CsiSequence struct {
// Params contains the raw parameters of the sequence.
// This is a slice of integers, where each integer is a 32-bit integer
// containing the parameter value in the lower 31 bits and a flag in the
// most significant bit indicating whether there are more sub-parameters.
Params []Parameter
// Cmd contains the raw command of the sequence.
// The command is a 32-bit integer containing the CSI command byte in the
// lower 8 bits, the private marker in the next 8 bits, and the intermediate
// byte in the next 8 bits.
//
// CSI ? u
//
// Is represented as:
//
// 'u' | '?' << 8
Cmd Command
}
var _ Sequence = CsiSequence{}
// Clone returns a deep copy of the CSI sequence.
func (s CsiSequence) Clone() Sequence {
return CsiSequence{
Params: append([]Parameter(nil), s.Params...),
Cmd: s.Cmd,
}
}
// Marker returns the marker byte of the CSI sequence.
// This is always gonna be one of the following '<' '=' '>' '?' and in the
// range of 0x3C-0x3F.
// Zero is returned if the sequence does not have a marker.
func (s CsiSequence) Marker() int {
return s.Cmd.Marker()
}
// Intermediate returns the intermediate byte of the CSI sequence.
// An intermediate byte is in the range of 0x20-0x2F. This includes these
// characters from ' ', '!', '"', '#', '$', '%', '&', ”', '(', ')', '*', '+',
// ',', '-', '.', '/'.
// Zero is returned if the sequence does not have an intermediate byte.
func (s CsiSequence) Intermediate() int {
return s.Cmd.Intermediate()
}
// Command returns the command byte of the CSI sequence.
func (s CsiSequence) Command() int {
return s.Cmd.Command()
}
// Param is a helper that returns the parameter at the given index and falls
// back to the default value if the parameter is missing. If the index is out
// of bounds, it returns the default value and false.
func (s CsiSequence) Param(i, def int) (int, bool) {
if i < 0 || i >= len(s.Params) {
return def, false
}
return s.Params[i].Param(def), true
}
// String returns a string representation of the sequence.
// The string will always be in the 7-bit format i.e (ESC [ P..P I..I F).
func (s CsiSequence) String() string {
return s.buffer().String()
}
// buffer returns a buffer containing the sequence.
func (s CsiSequence) buffer() *bytes.Buffer {
var b bytes.Buffer
b.WriteString("\x1b[")
if m := s.Marker(); m != 0 {
b.WriteByte(byte(m))
}
for i, p := range s.Params {
param := p.Param(-1)
if param >= 0 {
b.WriteString(strconv.Itoa(param))
}
if i < len(s.Params)-1 {
if p.HasMore() {
b.WriteByte(':')
} else {
b.WriteByte(';')
}
}
}
if i := s.Intermediate(); i != 0 {
b.WriteByte(byte(i))
}
if cmd := s.Command(); cmd != 0 {
b.WriteByte(byte(cmd))
}
return &b
}
// Bytes returns the byte representation of the sequence.
// The bytes will always be in the 7-bit format i.e (ESC [ P..P I..I F).
func (s CsiSequence) Bytes() []byte {
return s.buffer().Bytes()
}

View File

@ -14,7 +14,7 @@ import (
// //
// See https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-PC-Style-Function-Keys // See https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-PC-Style-Function-Keys
const ( const (
RequestNameVersion = "\x1b[>0q" RequestNameVersion = "\x1b[>q"
XTVERSION = RequestNameVersion XTVERSION = RequestNameVersion
) )
@ -24,6 +24,7 @@ const (
// DCS > | text ST // DCS > | text ST
// //
// See https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-PC-Style-Function-Keys // See https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-PC-Style-Function-Keys
//
// Deprecated: use [RequestNameVersion] instead. // Deprecated: use [RequestNameVersion] instead.
const RequestXTVersion = RequestNameVersion const RequestXTVersion = RequestNameVersion
@ -40,7 +41,7 @@ const RequestXTVersion = RequestNameVersion
// See https://vt100.net/docs/vt510-rm/DA1.html // See https://vt100.net/docs/vt510-rm/DA1.html
func PrimaryDeviceAttributes(attrs ...int) string { func PrimaryDeviceAttributes(attrs ...int) string {
if len(attrs) == 0 { if len(attrs) == 0 {
return "\x1b[c" return RequestPrimaryDeviceAttributes
} else if len(attrs) == 1 && attrs[0] == 0 { } else if len(attrs) == 1 && attrs[0] == 0 {
return "\x1b[0c" return "\x1b[0c"
} }
@ -75,7 +76,7 @@ const RequestPrimaryDeviceAttributes = "\x1b[c"
// See https://vt100.net/docs/vt510-rm/DA2.html // See https://vt100.net/docs/vt510-rm/DA2.html
func SecondaryDeviceAttributes(attrs ...int) string { func SecondaryDeviceAttributes(attrs ...int) string {
if len(attrs) == 0 { if len(attrs) == 0 {
return "\x1b[>c" return RequestSecondaryDeviceAttributes
} }
as := make([]string, len(attrs)) as := make([]string, len(attrs))
@ -90,6 +91,14 @@ func DA2(attrs ...int) string {
return SecondaryDeviceAttributes(attrs...) return SecondaryDeviceAttributes(attrs...)
} }
// RequestSecondaryDeviceAttributes is a control sequence that requests the
// terminal's secondary device attributes (DA2).
//
// CSI > c
//
// See https://vt100.net/docs/vt510-rm/DA2.html
const RequestSecondaryDeviceAttributes = "\x1b[>c"
// TertiaryDeviceAttributes (DA3) is a control sequence that reports the // TertiaryDeviceAttributes (DA3) is a control sequence that reports the
// terminal's tertiary device attributes. // terminal's tertiary device attributes.
// //
@ -106,7 +115,7 @@ func DA2(attrs ...int) string {
func TertiaryDeviceAttributes(unitID string) string { func TertiaryDeviceAttributes(unitID string) string {
switch unitID { switch unitID {
case "": case "":
return "\x1b[=c" return RequestTertiaryDeviceAttributes
case "0": case "0":
return "\x1b[=0c" return "\x1b[=0c"
} }
@ -118,3 +127,11 @@ func TertiaryDeviceAttributes(unitID string) string {
func DA3(unitID string) string { func DA3(unitID string) string {
return TertiaryDeviceAttributes(unitID) return TertiaryDeviceAttributes(unitID)
} }
// RequestTertiaryDeviceAttributes is a control sequence that requests the
// terminal's tertiary device attributes (DA3).
//
// CSI = c
//
// See https://vt100.net/docs/vt510-rm/DA3.html
const RequestTertiaryDeviceAttributes = "\x1b[=c"

View File

@ -36,6 +36,8 @@ const (
// //
// Where Pl is the line number and Pc is the column number. // Where Pl is the line number and Pc is the column number.
// See: https://vt100.net/docs/vt510-rm/CPR.html // See: https://vt100.net/docs/vt510-rm/CPR.html
//
// Deprecated: use [RequestCursorPositionReport] instead.
const RequestCursorPosition = "\x1b[6n" const RequestCursorPosition = "\x1b[6n"
// RequestExtendedCursorPosition (DECXCPR) is a sequence for requesting the // RequestExtendedCursorPosition (DECXCPR) is a sequence for requesting the
@ -51,6 +53,8 @@ const RequestCursorPosition = "\x1b[6n"
// Where Pl is the line number, Pc is the column number, and Pp is the page // Where Pl is the line number, Pc is the column number, and Pp is the page
// number. // number.
// See: https://vt100.net/docs/vt510-rm/DECXCPR.html // See: https://vt100.net/docs/vt510-rm/DECXCPR.html
//
// Deprecated: use [RequestExtendedCursorPositionReport] instead.
const RequestExtendedCursorPosition = "\x1b[?6n" const RequestExtendedCursorPosition = "\x1b[?6n"
// CursorUp (CUU) returns a sequence for moving the cursor up n cells. // CursorUp (CUU) returns a sequence for moving the cursor up n cells.

View File

@ -1,133 +0,0 @@
package ansi
import (
"bytes"
"strconv"
"strings"
)
// DcsSequence represents a Device Control String (DCS) escape sequence.
//
// The DCS sequence is used to send device control strings to the terminal. The
// sequence starts with the C1 control code character DCS (0x9B) or ESC P in
// 7-bit environments, followed by parameter bytes, intermediate bytes, a
// command byte, followed by data bytes, and ends with the C1 control code
// character ST (0x9C) or ESC \ in 7-bit environments.
//
// This follows the parameter string format.
// See ECMA-48 § 5.4.1
type DcsSequence struct {
// Params contains the raw parameters of the sequence.
// This is a slice of integers, where each integer is a 32-bit integer
// containing the parameter value in the lower 31 bits and a flag in the
// most significant bit indicating whether there are more sub-parameters.
Params []Parameter
// Data contains the string raw data of the sequence.
// This is the data between the final byte and the escape sequence terminator.
Data []byte
// Cmd contains the raw command of the sequence.
// The command is a 32-bit integer containing the DCS command byte in the
// lower 8 bits, the private marker in the next 8 bits, and the intermediate
// byte in the next 8 bits.
//
// DCS > 0 ; 1 $ r <data> ST
//
// Is represented as:
//
// 'r' | '>' << 8 | '$' << 16
Cmd Command
}
var _ Sequence = DcsSequence{}
// Clone returns a deep copy of the DCS sequence.
func (s DcsSequence) Clone() Sequence {
return DcsSequence{
Params: append([]Parameter(nil), s.Params...),
Data: append([]byte(nil), s.Data...),
Cmd: s.Cmd,
}
}
// Split returns a slice of data split by the semicolon.
func (s DcsSequence) Split() []string {
return strings.Split(string(s.Data), ";")
}
// Marker returns the marker byte of the DCS sequence.
// This is always gonna be one of the following '<' '=' '>' '?' and in the
// range of 0x3C-0x3F.
// Zero is returned if the sequence does not have a marker.
func (s DcsSequence) Marker() int {
return s.Cmd.Marker()
}
// Intermediate returns the intermediate byte of the DCS sequence.
// An intermediate byte is in the range of 0x20-0x2F. This includes these
// characters from ' ', '!', '"', '#', '$', '%', '&', ”', '(', ')', '*', '+',
// ',', '-', '.', '/'.
// Zero is returned if the sequence does not have an intermediate byte.
func (s DcsSequence) Intermediate() int {
return s.Cmd.Intermediate()
}
// Command returns the command byte of the CSI sequence.
func (s DcsSequence) Command() int {
return s.Cmd.Command()
}
// Param is a helper that returns the parameter at the given index and falls
// back to the default value if the parameter is missing. If the index is out
// of bounds, it returns the default value and false.
func (s DcsSequence) Param(i, def int) (int, bool) {
if i < 0 || i >= len(s.Params) {
return def, false
}
return s.Params[i].Param(def), true
}
// String returns a string representation of the sequence.
// The string will always be in the 7-bit format i.e (ESC P p..p i..i f <data> ESC \).
func (s DcsSequence) String() string {
return s.buffer().String()
}
// buffer returns a buffer containing the sequence.
func (s DcsSequence) buffer() *bytes.Buffer {
var b bytes.Buffer
b.WriteString("\x1bP")
if m := s.Marker(); m != 0 {
b.WriteByte(byte(m))
}
for i, p := range s.Params {
param := p.Param(-1)
if param >= 0 {
b.WriteString(strconv.Itoa(param))
}
if i < len(s.Params)-1 {
if p.HasMore() {
b.WriteByte(':')
} else {
b.WriteByte(';')
}
}
}
if i := s.Intermediate(); i != 0 {
b.WriteByte(byte(i))
}
if cmd := s.Command(); cmd != 0 {
b.WriteByte(byte(cmd))
}
b.Write(s.Data)
b.WriteByte(ESC)
b.WriteByte('\\')
return &b
}
// Bytes returns the byte representation of the sequence.
// The bytes will always be in the 7-bit format i.e (ESC P p..p i..i F <data> ESC \).
func (s DcsSequence) Bytes() []byte {
return s.buffer().Bytes()
}

199
vendor/github.com/charmbracelet/x/ansi/graphics.go generated vendored Normal file
View File

@ -0,0 +1,199 @@
package ansi
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
"image"
"io"
"os"
"strings"
"github.com/charmbracelet/x/ansi/kitty"
)
// KittyGraphics returns a sequence that encodes the given image in the Kitty
// graphics protocol.
//
// APC G [comma separated options] ; [base64 encoded payload] ST
//
// See https://sw.kovidgoyal.net/kitty/graphics-protocol/
func KittyGraphics(payload []byte, opts ...string) string {
var buf bytes.Buffer
buf.WriteString("\x1b_G")
buf.WriteString(strings.Join(opts, ","))
if len(payload) > 0 {
buf.WriteString(";")
buf.Write(payload)
}
buf.WriteString("\x1b\\")
return buf.String()
}
var (
// KittyGraphicsTempDir is the directory where temporary files are stored.
// This is used in [WriteKittyGraphics] along with [os.CreateTemp].
KittyGraphicsTempDir = ""
// KittyGraphicsTempPattern is the pattern used to create temporary files.
// This is used in [WriteKittyGraphics] along with [os.CreateTemp].
// The Kitty Graphics protocol requires the file path to contain the
// substring "tty-graphics-protocol".
KittyGraphicsTempPattern = "tty-graphics-protocol-*"
)
// WriteKittyGraphics writes an image using the Kitty Graphics protocol with
// the given options to w. It chunks the written data if o.Chunk is true.
//
// You can omit m and use nil when rendering an image from a file. In this
// case, you must provide a file path in o.File and use o.Transmission =
// [kitty.File]. You can also use o.Transmission = [kitty.TempFile] to write
// the image to a temporary file. In that case, the file path is ignored, and
// the image is written to a temporary file that is automatically deleted by
// the terminal.
//
// See https://sw.kovidgoyal.net/kitty/graphics-protocol/
func WriteKittyGraphics(w io.Writer, m image.Image, o *kitty.Options) error {
if o == nil {
o = &kitty.Options{}
}
if o.Transmission == 0 && len(o.File) != 0 {
o.Transmission = kitty.File
}
var data bytes.Buffer // the data to be encoded into base64
e := &kitty.Encoder{
Compress: o.Compression == kitty.Zlib,
Format: o.Format,
}
switch o.Transmission {
case kitty.Direct:
if err := e.Encode(&data, m); err != nil {
return fmt.Errorf("failed to encode direct image: %w", err)
}
case kitty.SharedMemory:
// TODO: Implement shared memory
return fmt.Errorf("shared memory transmission is not yet implemented")
case kitty.File:
if len(o.File) == 0 {
return kitty.ErrMissingFile
}
f, err := os.Open(o.File)
if err != nil {
return fmt.Errorf("failed to open file: %w", err)
}
defer f.Close() //nolint:errcheck
stat, err := f.Stat()
if err != nil {
return fmt.Errorf("failed to get file info: %w", err)
}
mode := stat.Mode()
if !mode.IsRegular() {
return fmt.Errorf("file is not a regular file")
}
// Write the file path to the buffer
if _, err := data.WriteString(f.Name()); err != nil {
return fmt.Errorf("failed to write file path to buffer: %w", err)
}
case kitty.TempFile:
f, err := os.CreateTemp(KittyGraphicsTempDir, KittyGraphicsTempPattern)
if err != nil {
return fmt.Errorf("failed to create file: %w", err)
}
defer f.Close() //nolint:errcheck
if err := e.Encode(f, m); err != nil {
return fmt.Errorf("failed to encode image to file: %w", err)
}
// Write the file path to the buffer
if _, err := data.WriteString(f.Name()); err != nil {
return fmt.Errorf("failed to write file path to buffer: %w", err)
}
}
// Encode image to base64
var payload bytes.Buffer // the base64 encoded image to be written to w
b64 := base64.NewEncoder(base64.StdEncoding, &payload)
if _, err := data.WriteTo(b64); err != nil {
return fmt.Errorf("failed to write base64 encoded image to payload: %w", err)
}
if err := b64.Close(); err != nil {
return err
}
// If not chunking, write all at once
if !o.Chunk {
_, err := io.WriteString(w, KittyGraphics(payload.Bytes(), o.Options()...))
return err
}
// Write in chunks
var (
err error
n int
)
chunk := make([]byte, kitty.MaxChunkSize)
isFirstChunk := true
for {
// Stop if we read less than the chunk size [kitty.MaxChunkSize].
n, err = io.ReadFull(&payload, chunk)
if errors.Is(err, io.ErrUnexpectedEOF) || errors.Is(err, io.EOF) {
break
}
if err != nil {
return fmt.Errorf("failed to read chunk: %w", err)
}
opts := buildChunkOptions(o, isFirstChunk, false)
if _, err := io.WriteString(w, KittyGraphics(chunk[:n], opts...)); err != nil {
return err
}
isFirstChunk = false
}
// Write the last chunk
opts := buildChunkOptions(o, isFirstChunk, true)
_, err = io.WriteString(w, KittyGraphics(chunk[:n], opts...))
return err
}
// buildChunkOptions creates the options slice for a chunk
func buildChunkOptions(o *kitty.Options, isFirstChunk, isLastChunk bool) []string {
var opts []string
if isFirstChunk {
opts = o.Options()
} else {
// These options are allowed in subsequent chunks
if o.Quite > 0 {
opts = append(opts, fmt.Sprintf("q=%d", o.Quite))
}
if o.Action == kitty.Frame {
opts = append(opts, "a=f")
}
}
if !isFirstChunk || !isLastChunk {
// We don't need to encode the (m=) option when we only have one chunk.
if isLastChunk {
opts = append(opts, "m=0")
} else {
opts = append(opts, "m=1")
}
}
return opts
}

18
vendor/github.com/charmbracelet/x/ansi/iterm2.go generated vendored Normal file
View File

@ -0,0 +1,18 @@
package ansi
import "fmt"
// ITerm2 returns a sequence that uses the iTerm2 proprietary protocol. Use the
// iterm2 package for a more convenient API.
//
// OSC 1337 ; key = value ST
//
// Example:
//
// ITerm2(iterm2.File{...})
//
// See https://iterm2.com/documentation-escape-codes.html
// See https://iterm2.com/documentation-images.html
func ITerm2(data any) string {
return "\x1b]1337;" + fmt.Sprint(data) + "\x07"
}

View File

@ -72,7 +72,7 @@ func PushKittyKeyboard(flags int) string {
// Keyboard stack to disable the protocol. // Keyboard stack to disable the protocol.
// //
// This is equivalent to PushKittyKeyboard(0). // This is equivalent to PushKittyKeyboard(0).
const DisableKittyKeyboard = "\x1b[>0u" const DisableKittyKeyboard = "\x1b[>u"
// PopKittyKeyboard returns a sequence to pop n number of flags from the // PopKittyKeyboard returns a sequence to pop n number of flags from the
// terminal Kitty Keyboard stack. // terminal Kitty Keyboard stack.

View File

@ -0,0 +1,85 @@
package kitty
import (
"compress/zlib"
"fmt"
"image"
"image/color"
"image/png"
"io"
)
// Decoder is a decoder for the Kitty graphics protocol. It supports decoding
// images in the 24-bit [RGB], 32-bit [RGBA], and [PNG] formats. It can also
// decompress data using zlib.
// The default format is 32-bit [RGBA].
type Decoder struct {
// Uses zlib decompression.
Decompress bool
// Can be one of [RGB], [RGBA], or [PNG].
Format int
// Width of the image in pixels. This can be omitted if the image is [PNG]
// formatted.
Width int
// Height of the image in pixels. This can be omitted if the image is [PNG]
// formatted.
Height int
}
// Decode decodes the image data from r in the specified format.
func (d *Decoder) Decode(r io.Reader) (image.Image, error) {
if d.Decompress {
zr, err := zlib.NewReader(r)
if err != nil {
return nil, fmt.Errorf("failed to create zlib reader: %w", err)
}
defer zr.Close() //nolint:errcheck
r = zr
}
if d.Format == 0 {
d.Format = RGBA
}
switch d.Format {
case RGBA, RGB:
return d.decodeRGBA(r, d.Format == RGBA)
case PNG:
return png.Decode(r)
default:
return nil, fmt.Errorf("unsupported format: %d", d.Format)
}
}
// decodeRGBA decodes the image data in 32-bit RGBA or 24-bit RGB formats.
func (d *Decoder) decodeRGBA(r io.Reader, alpha bool) (image.Image, error) {
m := image.NewRGBA(image.Rect(0, 0, d.Width, d.Height))
var buf []byte
if alpha {
buf = make([]byte, 4)
} else {
buf = make([]byte, 3)
}
for y := 0; y < d.Height; y++ {
for x := 0; x < d.Width; x++ {
if _, err := io.ReadFull(r, buf[:]); err != nil {
return nil, fmt.Errorf("failed to read pixel data: %w", err)
}
if alpha {
m.SetRGBA(x, y, color.RGBA{buf[0], buf[1], buf[2], buf[3]})
} else {
m.SetRGBA(x, y, color.RGBA{buf[0], buf[1], buf[2], 0xff})
}
}
}
return m, nil
}

View File

@ -0,0 +1,64 @@
package kitty
import (
"compress/zlib"
"fmt"
"image"
"image/png"
"io"
)
// Encoder is an encoder for the Kitty graphics protocol. It supports encoding
// images in the 24-bit [RGB], 32-bit [RGBA], and [PNG] formats, and
// compressing the data using zlib.
// The default format is 32-bit [RGBA].
type Encoder struct {
// Uses zlib compression.
Compress bool
// Can be one of [RGBA], [RGB], or [PNG].
Format int
}
// Encode encodes the image data in the specified format and writes it to w.
func (e *Encoder) Encode(w io.Writer, m image.Image) error {
if m == nil {
return nil
}
if e.Compress {
zw := zlib.NewWriter(w)
defer zw.Close() //nolint:errcheck
w = zw
}
if e.Format == 0 {
e.Format = RGBA
}
switch e.Format {
case RGBA, RGB:
bounds := m.Bounds()
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
r, g, b, a := m.At(x, y).RGBA()
switch e.Format {
case RGBA:
w.Write([]byte{byte(r >> 8), byte(g >> 8), byte(b >> 8), byte(a >> 8)}) //nolint:errcheck
case RGB:
w.Write([]byte{byte(r >> 8), byte(g >> 8), byte(b >> 8)}) //nolint:errcheck
}
}
}
case PNG:
if err := png.Encode(w, m); err != nil {
return fmt.Errorf("failed to encode PNG: %w", err)
}
default:
return fmt.Errorf("unsupported format: %d", e.Format)
}
return nil
}

View File

@ -0,0 +1,414 @@
package kitty
import "errors"
// ErrMissingFile is returned when the file path is missing.
var ErrMissingFile = errors.New("missing file path")
// MaxChunkSize is the maximum chunk size for the image data.
const MaxChunkSize = 1024 * 4
// Placeholder is a special Unicode character that can be used as a placeholder
// for an image.
const Placeholder = '\U0010EEEE'
// Graphics image format.
const (
// 32-bit RGBA format.
RGBA = 32
// 24-bit RGB format.
RGB = 24
// PNG format.
PNG = 100
)
// Compression types.
const (
Zlib = 'z'
)
// Transmission types.
const (
// The data transmitted directly in the escape sequence.
Direct = 'd'
// The data transmitted in a regular file.
File = 'f'
// A temporary file is used and deleted after transmission.
TempFile = 't'
// A shared memory object.
// For POSIX see https://pubs.opengroup.org/onlinepubs/9699919799/functions/shm_open.html
// For Windows see https://docs.microsoft.com/en-us/windows/win32/memory/creating-named-shared-memory
SharedMemory = 's'
)
// Action types.
const (
// Transmit image data.
Transmit = 't'
// TransmitAndPut transmit image data and display (put) it.
TransmitAndPut = 'T'
// Query terminal for image info.
Query = 'q'
// Put (display) previously transmitted image.
Put = 'p'
// Delete image.
Delete = 'd'
// Frame transmits data for animation frames.
Frame = 'f'
// Animate controls animation.
Animate = 'a'
// Compose composes animation frames.
Compose = 'c'
)
// Delete types.
const (
// Delete all placements visible on screen
DeleteAll = 'a'
// Delete all images with the specified id, specified using the i key. If
// you specify a p key for the placement id as well, then only the
// placement with the specified image id and placement id will be deleted.
DeleteID = 'i'
// Delete newest image with the specified number, specified using the I
// key. If you specify a p key for the placement id as well, then only the
// placement with the specified number and placement id will be deleted.
DeleteNumber = 'n'
// Delete all placements that intersect with the current cursor position.
DeleteCursor = 'c'
// Delete animation frames.
DeleteFrames = 'f'
// Delete all placements that intersect a specific cell, the cell is
// specified using the x and y keys
DeleteCell = 'p'
// Delete all placements that intersect a specific cell having a specific
// z-index. The cell and z-index is specified using the x, y and z keys.
DeleteCellZ = 'q'
// Delete all images whose id is greater than or equal to the value of the x
// key and less than or equal to the value of the y.
DeleteRange = 'r'
// Delete all placements that intersect the specified column, specified using
// the x key.
DeleteColumn = 'x'
// Delete all placements that intersect the specified row, specified using
// the y key.
DeleteRow = 'y'
// Delete all placements that have the specified z-index, specified using the
// z key.
DeleteZ = 'z'
)
// Diacritic returns the diacritic rune at the specified index. If the index is
// out of bounds, the first diacritic rune is returned.
func Diacritic(i int) rune {
if i < 0 || i >= len(diacritics) {
return diacritics[0]
}
return diacritics[i]
}
// From https://sw.kovidgoyal.net/kitty/_downloads/f0a0de9ec8d9ff4456206db8e0814937/rowcolumn-diacritics.txt
// See https://sw.kovidgoyal.net/kitty/graphics-protocol/#unicode-placeholders for further explanation.
var diacritics = []rune{
'\u0305',
'\u030D',
'\u030E',
'\u0310',
'\u0312',
'\u033D',
'\u033E',
'\u033F',
'\u0346',
'\u034A',
'\u034B',
'\u034C',
'\u0350',
'\u0351',
'\u0352',
'\u0357',
'\u035B',
'\u0363',
'\u0364',
'\u0365',
'\u0366',
'\u0367',
'\u0368',
'\u0369',
'\u036A',
'\u036B',
'\u036C',
'\u036D',
'\u036E',
'\u036F',
'\u0483',
'\u0484',
'\u0485',
'\u0486',
'\u0487',
'\u0592',
'\u0593',
'\u0594',
'\u0595',
'\u0597',
'\u0598',
'\u0599',
'\u059C',
'\u059D',
'\u059E',
'\u059F',
'\u05A0',
'\u05A1',
'\u05A8',
'\u05A9',
'\u05AB',
'\u05AC',
'\u05AF',
'\u05C4',
'\u0610',
'\u0611',
'\u0612',
'\u0613',
'\u0614',
'\u0615',
'\u0616',
'\u0617',
'\u0657',
'\u0658',
'\u0659',
'\u065A',
'\u065B',
'\u065D',
'\u065E',
'\u06D6',
'\u06D7',
'\u06D8',
'\u06D9',
'\u06DA',
'\u06DB',
'\u06DC',
'\u06DF',
'\u06E0',
'\u06E1',
'\u06E2',
'\u06E4',
'\u06E7',
'\u06E8',
'\u06EB',
'\u06EC',
'\u0730',
'\u0732',
'\u0733',
'\u0735',
'\u0736',
'\u073A',
'\u073D',
'\u073F',
'\u0740',
'\u0741',
'\u0743',
'\u0745',
'\u0747',
'\u0749',
'\u074A',
'\u07EB',
'\u07EC',
'\u07ED',
'\u07EE',
'\u07EF',
'\u07F0',
'\u07F1',
'\u07F3',
'\u0816',
'\u0817',
'\u0818',
'\u0819',
'\u081B',
'\u081C',
'\u081D',
'\u081E',
'\u081F',
'\u0820',
'\u0821',
'\u0822',
'\u0823',
'\u0825',
'\u0826',
'\u0827',
'\u0829',
'\u082A',
'\u082B',
'\u082C',
'\u082D',
'\u0951',
'\u0953',
'\u0954',
'\u0F82',
'\u0F83',
'\u0F86',
'\u0F87',
'\u135D',
'\u135E',
'\u135F',
'\u17DD',
'\u193A',
'\u1A17',
'\u1A75',
'\u1A76',
'\u1A77',
'\u1A78',
'\u1A79',
'\u1A7A',
'\u1A7B',
'\u1A7C',
'\u1B6B',
'\u1B6D',
'\u1B6E',
'\u1B6F',
'\u1B70',
'\u1B71',
'\u1B72',
'\u1B73',
'\u1CD0',
'\u1CD1',
'\u1CD2',
'\u1CDA',
'\u1CDB',
'\u1CE0',
'\u1DC0',
'\u1DC1',
'\u1DC3',
'\u1DC4',
'\u1DC5',
'\u1DC6',
'\u1DC7',
'\u1DC8',
'\u1DC9',
'\u1DCB',
'\u1DCC',
'\u1DD1',
'\u1DD2',
'\u1DD3',
'\u1DD4',
'\u1DD5',
'\u1DD6',
'\u1DD7',
'\u1DD8',
'\u1DD9',
'\u1DDA',
'\u1DDB',
'\u1DDC',
'\u1DDD',
'\u1DDE',
'\u1DDF',
'\u1DE0',
'\u1DE1',
'\u1DE2',
'\u1DE3',
'\u1DE4',
'\u1DE5',
'\u1DE6',
'\u1DFE',
'\u20D0',
'\u20D1',
'\u20D4',
'\u20D5',
'\u20D6',
'\u20D7',
'\u20DB',
'\u20DC',
'\u20E1',
'\u20E7',
'\u20E9',
'\u20F0',
'\u2CEF',
'\u2CF0',
'\u2CF1',
'\u2DE0',
'\u2DE1',
'\u2DE2',
'\u2DE3',
'\u2DE4',
'\u2DE5',
'\u2DE6',
'\u2DE7',
'\u2DE8',
'\u2DE9',
'\u2DEA',
'\u2DEB',
'\u2DEC',
'\u2DED',
'\u2DEE',
'\u2DEF',
'\u2DF0',
'\u2DF1',
'\u2DF2',
'\u2DF3',
'\u2DF4',
'\u2DF5',
'\u2DF6',
'\u2DF7',
'\u2DF8',
'\u2DF9',
'\u2DFA',
'\u2DFB',
'\u2DFC',
'\u2DFD',
'\u2DFE',
'\u2DFF',
'\uA66F',
'\uA67C',
'\uA67D',
'\uA6F0',
'\uA6F1',
'\uA8E0',
'\uA8E1',
'\uA8E2',
'\uA8E3',
'\uA8E4',
'\uA8E5',
'\uA8E6',
'\uA8E7',
'\uA8E8',
'\uA8E9',
'\uA8EA',
'\uA8EB',
'\uA8EC',
'\uA8ED',
'\uA8EE',
'\uA8EF',
'\uA8F0',
'\uA8F1',
'\uAAB0',
'\uAAB2',
'\uAAB3',
'\uAAB7',
'\uAAB8',
'\uAABE',
'\uAABF',
'\uAAC1',
'\uFE20',
'\uFE21',
'\uFE22',
'\uFE23',
'\uFE24',
'\uFE25',
'\uFE26',
'\U00010A0F',
'\U00010A38',
'\U0001D185',
'\U0001D186',
'\U0001D187',
'\U0001D188',
'\U0001D189',
'\U0001D1AA',
'\U0001D1AB',
'\U0001D1AC',
'\U0001D1AD',
'\U0001D242',
'\U0001D243',
'\U0001D244',
}

367
vendor/github.com/charmbracelet/x/ansi/kitty/options.go generated vendored Normal file
View File

@ -0,0 +1,367 @@
package kitty
import (
"encoding"
"fmt"
"strconv"
"strings"
)
var (
_ encoding.TextMarshaler = Options{}
_ encoding.TextUnmarshaler = &Options{}
)
// Options represents a Kitty Graphics Protocol options.
type Options struct {
// Common options.
// Action (a=t) is the action to be performed on the image. Can be one of
// [Transmit], [TransmitDisplay], [Query], [Put], [Delete], [Frame],
// [Animate], [Compose].
Action byte
// Quite mode (q=0) is the quiet mode. Can be either zero, one, or two
// where zero is the default, 1 suppresses OK responses, and 2 suppresses
// both OK and error responses.
Quite byte
// Transmission options.
// ID (i=) is the image ID. The ID is a unique identifier for the image.
// Must be a positive integer up to [math.MaxUint32].
ID int
// PlacementID (p=) is the placement ID. The placement ID is a unique
// identifier for the placement of the image. Must be a positive integer up
// to [math.MaxUint32].
PlacementID int
// Number (I=0) is the number of images to be transmitted.
Number int
// Format (f=32) is the image format. One of [RGBA], [RGB], [PNG].
Format int
// ImageWidth (s=0) is the transmitted image width.
ImageWidth int
// ImageHeight (v=0) is the transmitted image height.
ImageHeight int
// Compression (o=) is the image compression type. Can be [Zlib] or zero.
Compression byte
// Transmission (t=d) is the image transmission type. Can be [Direct], [File],
// [TempFile], or[SharedMemory].
Transmission byte
// File is the file path to be used when the transmission type is [File].
// If [Options.Transmission] is omitted i.e. zero and this is non-empty,
// the transmission type is set to [File].
File string
// Size (S=0) is the size to be read from the transmission medium.
Size int
// Offset (O=0) is the offset byte to start reading from the transmission
// medium.
Offset int
// Chunk (m=) whether the image is transmitted in chunks. Can be either
// zero or one. When true, the image is transmitted in chunks. Each chunk
// must be a multiple of 4, and up to [MaxChunkSize] bytes. Each chunk must
// have the m=1 option except for the last chunk which must have m=0.
Chunk bool
// Display options.
// X (x=0) is the pixel X coordinate of the image to start displaying.
X int
// Y (y=0) is the pixel Y coordinate of the image to start displaying.
Y int
// Z (z=0) is the Z coordinate of the image to display.
Z int
// Width (w=0) is the width of the image to display.
Width int
// Height (h=0) is the height of the image to display.
Height int
// OffsetX (X=0) is the OffsetX coordinate of the cursor cell to start
// displaying the image. OffsetX=0 is the leftmost cell. This must be
// smaller than the terminal cell width.
OffsetX int
// OffsetY (Y=0) is the OffsetY coordinate of the cursor cell to start
// displaying the image. OffsetY=0 is the topmost cell. This must be
// smaller than the terminal cell height.
OffsetY int
// Columns (c=0) is the number of columns to display the image. The image
// will be scaled to fit the number of columns.
Columns int
// Rows (r=0) is the number of rows to display the image. The image will be
// scaled to fit the number of rows.
Rows int
// VirtualPlacement (U=0) whether to use virtual placement. This is used
// with Unicode [Placeholder] to display images.
VirtualPlacement bool
// DoNotMoveCursor (C=0) whether to move the cursor after displaying the
// image.
DoNotMoveCursor bool
// ParentID (P=0) is the parent image ID. The parent ID is the ID of the
// image that is the parent of the current image. This is used with Unicode
// [Placeholder] to display images relative to the parent image.
ParentID int
// ParentPlacementID (Q=0) is the parent placement ID. The parent placement
// ID is the ID of the placement of the parent image. This is used with
// Unicode [Placeholder] to display images relative to the parent image.
ParentPlacementID int
// Delete options.
// Delete (d=a) is the delete action. Can be one of [DeleteAll],
// [DeleteID], [DeleteNumber], [DeleteCursor], [DeleteFrames],
// [DeleteCell], [DeleteCellZ], [DeleteRange], [DeleteColumn], [DeleteRow],
// [DeleteZ].
Delete byte
// DeleteResources indicates whether to delete the resources associated
// with the image.
DeleteResources bool
}
// Options returns the options as a slice of a key-value pairs.
func (o *Options) Options() (opts []string) {
opts = []string{}
if o.Format == 0 {
o.Format = RGBA
}
if o.Action == 0 {
o.Action = Transmit
}
if o.Delete == 0 {
o.Delete = DeleteAll
}
if o.Transmission == 0 {
if len(o.File) > 0 {
o.Transmission = File
} else {
o.Transmission = Direct
}
}
if o.Format != RGBA {
opts = append(opts, fmt.Sprintf("f=%d", o.Format))
}
if o.Quite > 0 {
opts = append(opts, fmt.Sprintf("q=%d", o.Quite))
}
if o.ID > 0 {
opts = append(opts, fmt.Sprintf("i=%d", o.ID))
}
if o.PlacementID > 0 {
opts = append(opts, fmt.Sprintf("p=%d", o.PlacementID))
}
if o.Number > 0 {
opts = append(opts, fmt.Sprintf("I=%d", o.Number))
}
if o.ImageWidth > 0 {
opts = append(opts, fmt.Sprintf("s=%d", o.ImageWidth))
}
if o.ImageHeight > 0 {
opts = append(opts, fmt.Sprintf("v=%d", o.ImageHeight))
}
if o.Transmission != Direct {
opts = append(opts, fmt.Sprintf("t=%c", o.Transmission))
}
if o.Size > 0 {
opts = append(opts, fmt.Sprintf("S=%d", o.Size))
}
if o.Offset > 0 {
opts = append(opts, fmt.Sprintf("O=%d", o.Offset))
}
if o.Compression == Zlib {
opts = append(opts, fmt.Sprintf("o=%c", o.Compression))
}
if o.VirtualPlacement {
opts = append(opts, "U=1")
}
if o.DoNotMoveCursor {
opts = append(opts, "C=1")
}
if o.ParentID > 0 {
opts = append(opts, fmt.Sprintf("P=%d", o.ParentID))
}
if o.ParentPlacementID > 0 {
opts = append(opts, fmt.Sprintf("Q=%d", o.ParentPlacementID))
}
if o.X > 0 {
opts = append(opts, fmt.Sprintf("x=%d", o.X))
}
if o.Y > 0 {
opts = append(opts, fmt.Sprintf("y=%d", o.Y))
}
if o.Z > 0 {
opts = append(opts, fmt.Sprintf("z=%d", o.Z))
}
if o.Width > 0 {
opts = append(opts, fmt.Sprintf("w=%d", o.Width))
}
if o.Height > 0 {
opts = append(opts, fmt.Sprintf("h=%d", o.Height))
}
if o.OffsetX > 0 {
opts = append(opts, fmt.Sprintf("X=%d", o.OffsetX))
}
if o.OffsetY > 0 {
opts = append(opts, fmt.Sprintf("Y=%d", o.OffsetY))
}
if o.Columns > 0 {
opts = append(opts, fmt.Sprintf("c=%d", o.Columns))
}
if o.Rows > 0 {
opts = append(opts, fmt.Sprintf("r=%d", o.Rows))
}
if o.Delete != DeleteAll || o.DeleteResources {
da := o.Delete
if o.DeleteResources {
da = da - ' ' // to uppercase
}
opts = append(opts, fmt.Sprintf("d=%c", da))
}
if o.Action != Transmit {
opts = append(opts, fmt.Sprintf("a=%c", o.Action))
}
return
}
// String returns the string representation of the options.
func (o Options) String() string {
return strings.Join(o.Options(), ",")
}
// MarshalText returns the string representation of the options.
func (o Options) MarshalText() ([]byte, error) {
return []byte(o.String()), nil
}
// UnmarshalText parses the options from the given string.
func (o *Options) UnmarshalText(text []byte) error {
opts := strings.Split(string(text), ",")
for _, opt := range opts {
ps := strings.SplitN(opt, "=", 2)
if len(ps) != 2 || len(ps[1]) == 0 {
continue
}
switch ps[0] {
case "a":
o.Action = ps[1][0]
case "o":
o.Compression = ps[1][0]
case "t":
o.Transmission = ps[1][0]
case "d":
d := ps[1][0]
if d >= 'A' && d <= 'Z' {
o.DeleteResources = true
d = d + ' ' // to lowercase
}
o.Delete = d
case "i", "q", "p", "I", "f", "s", "v", "S", "O", "m", "x", "y", "z", "w", "h", "X", "Y", "c", "r", "U", "P", "Q":
v, err := strconv.Atoi(ps[1])
if err != nil {
continue
}
switch ps[0] {
case "i":
o.ID = v
case "q":
o.Quite = byte(v)
case "p":
o.PlacementID = v
case "I":
o.Number = v
case "f":
o.Format = v
case "s":
o.ImageWidth = v
case "v":
o.ImageHeight = v
case "S":
o.Size = v
case "O":
o.Offset = v
case "m":
o.Chunk = v == 0 || v == 1
case "x":
o.X = v
case "y":
o.Y = v
case "z":
o.Z = v
case "w":
o.Width = v
case "h":
o.Height = v
case "X":
o.OffsetX = v
case "Y":
o.OffsetY = v
case "c":
o.Columns = v
case "r":
o.Rows = v
case "U":
o.VirtualPlacement = v == 1
case "P":
o.ParentID = v
case "Q":
o.ParentPlacementID = v
}
}
}
return nil
}

172
vendor/github.com/charmbracelet/x/ansi/method.go generated vendored Normal file
View File

@ -0,0 +1,172 @@
package ansi
// Method is a type that represents the how the renderer should calculate the
// display width of cells.
type Method uint8
// Display width modes.
const (
WcWidth Method = iota
GraphemeWidth
)
// StringWidth returns the width of a string in cells. This is the number of
// cells that the string will occupy when printed in a terminal. ANSI escape
// codes are ignored and wide characters (such as East Asians and emojis) are
// accounted for.
func (m Method) StringWidth(s string) int {
return stringWidth(m, s)
}
// Truncate truncates a string to a given length, adding a tail to the end if
// the string is longer than the given length. This function is aware of ANSI
// escape codes and will not break them, and accounts for wide-characters (such
// as East-Asian characters and emojis).
func (m Method) Truncate(s string, length int, tail string) string {
return truncate(m, s, length, tail)
}
// TruncateLeft truncates a string to a given length, adding a prefix to the
// beginning if the string is longer than the given length. This function is
// aware of ANSI escape codes and will not break them, and accounts for
// wide-characters (such as East-Asian characters and emojis).
func (m Method) TruncateLeft(s string, length int, prefix string) string {
return truncateLeft(m, s, length, prefix)
}
// Cut the string, without adding any prefix or tail strings. This function is
// aware of ANSI escape codes and will not break them, and accounts for
// wide-characters (such as East-Asian characters and emojis). Note that the
// [left] parameter is inclusive, while [right] isn't.
func (m Method) Cut(s string, left, right int) string {
return cut(m, s, left, right)
}
// Hardwrap wraps a string or a block of text to a given line length, breaking
// word boundaries. This will preserve ANSI escape codes and will account for
// wide-characters in the string.
// When preserveSpace is true, spaces at the beginning of a line will be
// preserved.
// This treats the text as a sequence of graphemes.
func (m Method) Hardwrap(s string, length int, preserveSpace bool) string {
return hardwrap(m, s, length, preserveSpace)
}
// Wordwrap wraps a string or a block of text to a given line length, not
// breaking word boundaries. This will preserve ANSI escape codes and will
// account for wide-characters in the string.
// The breakpoints string is a list of characters that are considered
// breakpoints for word wrapping. A hyphen (-) is always considered a
// breakpoint.
//
// Note: breakpoints must be a string of 1-cell wide rune characters.
func (m Method) Wordwrap(s string, length int, breakpoints string) string {
return wordwrap(m, s, length, breakpoints)
}
// Wrap wraps a string or a block of text to a given line length, breaking word
// boundaries if necessary. This will preserve ANSI escape codes and will
// account for wide-characters in the string. The breakpoints string is a list
// of characters that are considered breakpoints for word wrapping. A hyphen
// (-) is always considered a breakpoint.
//
// Note: breakpoints must be a string of 1-cell wide rune characters.
func (m Method) Wrap(s string, length int, breakpoints string) string {
return wrap(m, s, length, breakpoints)
}
// DecodeSequence decodes the first ANSI escape sequence or a printable
// grapheme from the given data. It returns the sequence slice, the number of
// bytes read, the cell width for each sequence, and the new state.
//
// The cell width will always be 0 for control and escape sequences, 1 for
// ASCII printable characters, and the number of cells other Unicode characters
// occupy. It uses the uniseg package to calculate the width of Unicode
// graphemes and characters. This means it will always do grapheme clustering
// (mode 2027).
//
// Passing a non-nil [*Parser] as the last argument will allow the decoder to
// collect sequence parameters, data, and commands. The parser cmd will have
// the packed command value that contains intermediate and prefix characters.
// In the case of a OSC sequence, the cmd will be the OSC command number. Use
// [Cmd] and [Param] types to unpack command intermediates and prefixes as well
// as parameters.
//
// Zero [Cmd] means the CSI, DCS, or ESC sequence is invalid. Moreover, checking the
// validity of other data sequences, OSC, DCS, etc, will require checking for
// the returned sequence terminator bytes such as ST (ESC \\) and BEL).
//
// We store the command byte in [Cmd] in the most significant byte, the
// prefix byte in the next byte, and the intermediate byte in the least
// significant byte. This is done to avoid using a struct to store the command
// and its intermediates and prefixes. The command byte is always the least
// significant byte i.e. [Cmd & 0xff]. Use the [Cmd] type to unpack the
// command, intermediate, and prefix bytes. Note that we only collect the last
// prefix character and intermediate byte.
//
// The [p.Params] slice will contain the parameters of the sequence. Any
// sub-parameter will have the [parser.HasMoreFlag] set. Use the [Param] type
// to unpack the parameters.
//
// Example:
//
// var state byte // the initial state is always zero [NormalState]
// p := NewParser(32, 1024) // create a new parser with a 32 params buffer and 1024 data buffer (optional)
// input := []byte("\x1b[31mHello, World!\x1b[0m")
// for len(input) > 0 {
// seq, width, n, newState := DecodeSequence(input, state, p)
// log.Printf("seq: %q, width: %d", seq, width)
// state = newState
// input = input[n:]
// }
func (m Method) DecodeSequence(data []byte, state byte, p *Parser) (seq []byte, width, n int, newState byte) {
return decodeSequence(m, data, state, p)
}
// DecodeSequenceInString decodes the first ANSI escape sequence or a printable
// grapheme from the given data. It returns the sequence slice, the number of
// bytes read, the cell width for each sequence, and the new state.
//
// The cell width will always be 0 for control and escape sequences, 1 for
// ASCII printable characters, and the number of cells other Unicode characters
// occupy. It uses the uniseg package to calculate the width of Unicode
// graphemes and characters. This means it will always do grapheme clustering
// (mode 2027).
//
// Passing a non-nil [*Parser] as the last argument will allow the decoder to
// collect sequence parameters, data, and commands. The parser cmd will have
// the packed command value that contains intermediate and prefix characters.
// In the case of a OSC sequence, the cmd will be the OSC command number. Use
// [Cmd] and [Param] types to unpack command intermediates and prefixes as well
// as parameters.
//
// Zero [Cmd] means the CSI, DCS, or ESC sequence is invalid. Moreover, checking the
// validity of other data sequences, OSC, DCS, etc, will require checking for
// the returned sequence terminator bytes such as ST (ESC \\) and BEL).
//
// We store the command byte in [Cmd] in the most significant byte, the
// prefix byte in the next byte, and the intermediate byte in the least
// significant byte. This is done to avoid using a struct to store the command
// and its intermediates and prefixes. The command byte is always the least
// significant byte i.e. [Cmd & 0xff]. Use the [Cmd] type to unpack the
// command, intermediate, and prefix bytes. Note that we only collect the last
// prefix character and intermediate byte.
//
// The [p.Params] slice will contain the parameters of the sequence. Any
// sub-parameter will have the [parser.HasMoreFlag] set. Use the [Param] type
// to unpack the parameters.
//
// Example:
//
// var state byte // the initial state is always zero [NormalState]
// p := NewParser(32, 1024) // create a new parser with a 32 params buffer and 1024 data buffer (optional)
// input := []byte("\x1b[31mHello, World!\x1b[0m")
// for len(input) > 0 {
// seq, width, n, newState := DecodeSequenceInString(input, state, p)
// log.Printf("seq: %q, width: %d", seq, width)
// state = newState
// input = input[n:]
// }
func (m Method) DecodeSequenceInString(data string, state byte, p *Parser) (seq string, width, n int, newState byte) {
return decodeSequence(m, data, state, p)
}

View File

@ -51,7 +51,8 @@ type Mode interface {
// SetMode (SM) returns a sequence to set a mode. // SetMode (SM) returns a sequence to set a mode.
// The mode arguments are a list of modes to set. // The mode arguments are a list of modes to set.
// //
// If one of the modes is a [DECMode], the sequence will use the DEC format. // If one of the modes is a [DECMode], the function will returns two escape
// sequences.
// //
// ANSI format: // ANSI format:
// //
@ -74,7 +75,8 @@ func SM(modes ...Mode) string {
// ResetMode (RM) returns a sequence to reset a mode. // ResetMode (RM) returns a sequence to reset a mode.
// The mode arguments are a list of modes to reset. // The mode arguments are a list of modes to reset.
// //
// If one of the modes is a [DECMode], the sequence will use the DEC format. // If one of the modes is a [DECMode], the function will returns two escape
// sequences.
// //
// ANSI format: // ANSI format:
// //
@ -94,9 +96,9 @@ func RM(modes ...Mode) string {
return ResetMode(modes...) return ResetMode(modes...)
} }
func setMode(reset bool, modes ...Mode) string { func setMode(reset bool, modes ...Mode) (s string) {
if len(modes) == 0 { if len(modes) == 0 {
return "" return
} }
cmd := "h" cmd := "h"
@ -113,21 +115,24 @@ func setMode(reset bool, modes ...Mode) string {
return seq + strconv.Itoa(modes[0].Mode()) + cmd return seq + strconv.Itoa(modes[0].Mode()) + cmd
} }
var dec bool dec := make([]string, 0, len(modes)/2)
list := make([]string, len(modes)) ansi := make([]string, 0, len(modes)/2)
for i, m := range modes { for _, m := range modes {
list[i] = strconv.Itoa(m.Mode())
switch m.(type) { switch m.(type) {
case DECMode: case DECMode:
dec = true dec = append(dec, strconv.Itoa(m.Mode()))
case ANSIMode:
ansi = append(ansi, strconv.Itoa(m.Mode()))
} }
} }
if dec { if len(ansi) > 0 {
seq += "?" s += seq + strings.Join(ansi, ";") + cmd
} }
if len(dec) > 0 {
return seq + strings.Join(list, ";") + cmd s += seq + "?" + strings.Join(dec, ";") + cmd
}
return
} }
// RequestMode (DECRQM) returns a sequence to request a mode from the terminal. // RequestMode (DECRQM) returns a sequence to request a mode from the terminal.

View File

@ -26,18 +26,27 @@ type MouseButton byte
// Other buttons are not supported. // Other buttons are not supported.
const ( const (
MouseNone MouseButton = iota MouseNone MouseButton = iota
MouseLeft MouseButton1
MouseMiddle MouseButton2
MouseRight MouseButton3
MouseWheelUp MouseButton4
MouseWheelDown MouseButton5
MouseWheelLeft MouseButton6
MouseWheelRight MouseButton7
MouseBackward MouseButton8
MouseForward MouseButton9
MouseButton10 MouseButton10
MouseButton11 MouseButton11
MouseLeft = MouseButton1
MouseMiddle = MouseButton2
MouseRight = MouseButton3
MouseWheelUp = MouseButton4
MouseWheelDown = MouseButton5
MouseWheelLeft = MouseButton6
MouseWheelRight = MouseButton7
MouseBackward = MouseButton8
MouseForward = MouseButton9
MouseRelease = MouseNone MouseRelease = MouseNone
) )
@ -61,7 +70,7 @@ func (b MouseButton) String() string {
return mouseButtons[b] return mouseButtons[b]
} }
// Button returns a byte representing a mouse button. // EncodeMouseButton returns a byte representing a mouse button.
// The button is a bitmask of the following leftmost values: // The button is a bitmask of the following leftmost values:
// //
// - The first two bits are the button number: // - The first two bits are the button number:
@ -85,7 +94,7 @@ func (b MouseButton) String() string {
// //
// If button is [MouseNone], and motion is false, this returns a release event. // If button is [MouseNone], and motion is false, this returns a release event.
// If button is undefined, this function returns 0xff. // If button is undefined, this function returns 0xff.
func (b MouseButton) Button(motion, shift, alt, ctrl bool) (m byte) { func EncodeMouseButton(b MouseButton, motion, shift, alt, ctrl bool) (m byte) {
// mouse bit shifts // mouse bit shifts
const ( const (
bitShift = 0b0000_0100 bitShift = 0b0000_0100

Some files were not shown because too many files have changed in this diff Show More